mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2025-12-06 12:41:30 +00:00
Compare commits
632 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
eb26dd8984 | ||
|
|
b9b5d07336 | ||
|
|
5f3235ef57 | ||
|
|
dfe42612be | ||
|
|
a0202f7bfd | ||
|
|
6dd9d5b2dd | ||
|
|
0864387885 | ||
|
|
359bfb2704 | ||
|
|
644ea2e3aa | ||
|
|
071132cd56 | ||
|
|
7a18dde2e0 | ||
|
|
e146763399 | ||
|
|
4ce08dcfa3 | ||
|
|
2ca5ddce5f | ||
|
|
addb2445b7 | ||
|
|
4736a525b8 | ||
|
|
d3a08a2d22 | ||
|
|
ee5b5cdcbc | ||
|
|
f3f2c81cec | ||
|
|
1e8df40981 | ||
|
|
389133654e | ||
|
|
347ce87e27 | ||
|
|
ff5a954980 | ||
|
|
1a8de2aee1 | ||
|
|
ab08273d82 | ||
|
|
fbb845ad7c | ||
|
|
15a1d55812 | ||
|
|
4643bd6517 | ||
|
|
1c5f01e2a2 | ||
|
|
ebbc68853d | ||
|
|
3140fd0ca6 | ||
|
|
5bcbf63ddb | ||
|
|
01fbda4bc9 | ||
|
|
ba22171a51 | ||
|
|
fc8eede952 | ||
|
|
c8a0c525fc | ||
|
|
46c7c28919 | ||
|
|
81e3395975 | ||
|
|
0340ecd38a | ||
|
|
2d05174545 | ||
|
|
5f2bb88037 | ||
|
|
65b02d4ab0 | ||
|
|
ea58d29e2c | ||
|
|
47e0fc36c7 | ||
|
|
7ebba5614a | ||
|
|
686f53a7c6 | ||
|
|
67a3e8cd75 | ||
|
|
d9a931f77a | ||
|
|
0e206da7c0 | ||
|
|
81e6dab965 | ||
|
|
a702dafd03 | ||
|
|
6b48f6ec26 | ||
|
|
06148cd610 | ||
|
|
36dfad192f | ||
|
|
9436c43306 | ||
|
|
c198fd7939 | ||
|
|
1e092c4e8d | ||
|
|
1e310631ab | ||
|
|
47ee1a991f | ||
|
|
9b3d229294 | ||
|
|
c74756c3bc | ||
|
|
1196a1b7f8 | ||
|
|
c2262eda1a | ||
|
|
02eacc32c1 | ||
|
|
b1a112f72c | ||
|
|
464caf056b | ||
|
|
44c85f8351 | ||
|
|
ad3283fd24 | ||
|
|
07208c45ef | ||
|
|
751f423ae0 | ||
|
|
c124086021 | ||
|
|
f285bc7459 | ||
|
|
b4c4d3f72a | ||
|
|
cfe34f61b8 | ||
|
|
c1c7ea33fe | ||
|
|
4458a443ef | ||
|
|
16bd3a1f02 | ||
|
|
a358bc0a38 | ||
|
|
aebae6e27b | ||
|
|
0a3e771b1b | ||
|
|
f82c0497fa | ||
|
|
715763885d | ||
|
|
4aae5d9a9d | ||
|
|
1bc583d358 | ||
|
|
e506a390db | ||
|
|
c5b4af8636 | ||
|
|
c29e47f72f | ||
|
|
4087213501 | ||
|
|
e4725366d3 | ||
|
|
60e8c725f9 | ||
|
|
5dba32b2e1 | ||
|
|
ef04c99069 | ||
|
|
e2fb16c98c | ||
|
|
d2b16c5c91 | ||
|
|
9f0c42dde0 | ||
|
|
78ca371162 | ||
|
|
a35c976759 | ||
|
|
89e9f4939d | ||
|
|
71984fc452 | ||
|
|
a0a6702a4e | ||
|
|
b18444f215 | ||
|
|
7ea524800a | ||
|
|
7960045cf9 | ||
|
|
d253a97a6f | ||
|
|
1475ba441c | ||
|
|
b2585cc8ea | ||
|
|
7b263327cc | ||
|
|
cd31bf4ecb | ||
|
|
1b938c758f | ||
|
|
5a08b71999 | ||
|
|
4b420e7579 | ||
|
|
6b580a682a | ||
|
|
d6e7c2acdc | ||
|
|
4d3aa1605c | ||
|
|
7fe1820ce4 | ||
|
|
98e449e38c | ||
|
|
9acf122ba6 | ||
|
|
2ed144ec85 | ||
|
|
ec0c103952 | ||
|
|
a35d1e5373 | ||
|
|
f5cf22a536 | ||
|
|
38f16decef | ||
|
|
15f86e85b1 | ||
|
|
5217efc69b | ||
|
|
03bbf552ef | ||
|
|
664684ad8f | ||
|
|
ddea0bf6e4 | ||
|
|
1c1f259df4 | ||
|
|
6249823335 | ||
|
|
bad3f80a1c | ||
|
|
529089ba5b | ||
|
|
9851a5703a | ||
|
|
aa9989ff90 | ||
|
|
2a3014b606 | ||
|
|
16d5e22b72 | ||
|
|
a8a6dce38b | ||
|
|
f542e828d2 | ||
|
|
cf182882b1 | ||
|
|
2224ac76aa | ||
|
|
dd5ac6f1e7 | ||
|
|
1e7a453ff6 | ||
|
|
8a84c252be | ||
|
|
138aa6db65 | ||
|
|
121f0376ea | ||
|
|
dfc684640a | ||
|
|
104fbc80af | ||
|
|
cadba37059 | ||
|
|
750d57ec96 | ||
|
|
9a86365d92 | ||
|
|
f794d9d5a5 | ||
|
|
c29db43bfa | ||
|
|
e0eeed0a96 | ||
|
|
55272f7a3b | ||
|
|
6ff07f01eb | ||
|
|
1c737d7515 | ||
|
|
2fa5341879 | ||
|
|
025e9ac5b4 | ||
|
|
89bbf5284c | ||
|
|
44b00d629d | ||
|
|
afc3b30c41 | ||
|
|
17c556a63d | ||
|
|
edc6f47758 | ||
|
|
bb6e8fd4ce | ||
|
|
c54c2204a1 | ||
|
|
f7f33bef9f | ||
|
|
4bd7d81cea | ||
|
|
f6815df5c3 | ||
|
|
42cea2e03c | ||
|
|
52177065ca | ||
|
|
e74149970b | ||
|
|
90b0ac37c8 | ||
|
|
63a74777f2 | ||
|
|
4ac319b074 | ||
|
|
2a754eef1c | ||
|
|
4e1bdb0c70 | ||
|
|
c35ba8b226 | ||
|
|
7e6879ec41 | ||
|
|
ea961678ee | ||
|
|
d4414e6631 | ||
|
|
eb098f6527 | ||
|
|
5772d8904d | ||
|
|
7000373c4b | ||
|
|
a60c9b0dcc | ||
|
|
2eb7a1d264 | ||
|
|
13f0949f9e | ||
|
|
076a42cbfe | ||
|
|
ce19525bc3 | ||
|
|
6da2e49100 | ||
|
|
1e44c4d669 | ||
|
|
10097dd124 | ||
|
|
f4e36fc049 | ||
|
|
083ce111f0 | ||
|
|
c1d4ab72eb | ||
|
|
2cc604e356 | ||
|
|
c557637299 | ||
|
|
044f05e772 | ||
|
|
6f343080e8 | ||
|
|
25c34c7728 | ||
|
|
d2bbe80455 | ||
|
|
0398cbdc76 | ||
|
|
e0149e1c5f | ||
|
|
98c6d8f582 | ||
|
|
d605b3af3c | ||
|
|
a6cbbc5ea9 | ||
|
|
5c80e988ba | ||
|
|
10ffcb8b00 | ||
|
|
38d74cf61c | ||
|
|
1db6953f08 | ||
|
|
d431c7d155 | ||
|
|
5ab4d54df0 | ||
|
|
877d46e9f7 | ||
|
|
7e69cc112f | ||
|
|
5b14eecd25 | ||
|
|
24eaf55dc8 | ||
|
|
6be10b307d | ||
|
|
91ad71b1e0 | ||
|
|
d6255de205 | ||
|
|
c293a6a25a | ||
|
|
b1175017f9 | ||
|
|
75c9f91f11 | ||
|
|
9ff2dcf1c1 | ||
|
|
6c4e9ae427 | ||
|
|
748e94dcee | ||
|
|
f389bd71c0 | ||
|
|
1126ff86ce | ||
|
|
79377fedab | ||
|
|
5d2972f362 | ||
|
|
ae465bbaf8 | ||
|
|
1b95dd2d9d | ||
|
|
6130185ac6 | ||
|
|
c92fde120d | ||
|
|
7eab1bcbf9 | ||
|
|
4c05307357 | ||
|
|
0037c28e9e | ||
|
|
2b279233b6 | ||
|
|
b51b80b174 | ||
|
|
e4b0ac9ae5 | ||
|
|
7f416846b7 | ||
|
|
5b7254af96 | ||
|
|
c83d417298 | ||
|
|
b42dc6e7a5 | ||
|
|
8124fe391d | ||
|
|
833ca4b640 | ||
|
|
3b244858f8 | ||
|
|
6107696e25 | ||
|
|
af1c9c7fb2 | ||
|
|
06b54ab134 | ||
|
|
fee5c7bd7c | ||
|
|
fb8afc6add | ||
|
|
6c372a09bd | ||
|
|
171cf6f54d | ||
|
|
029bb5554d | ||
|
|
c69cb79d66 | ||
|
|
dc8301689e | ||
|
|
d8dd37510c | ||
|
|
d1680b04f3 | ||
|
|
102d4b4119 | ||
|
|
b3b49b3492 | ||
|
|
7a89433251 | ||
|
|
ced6711128 | ||
|
|
bdf76f8d4d | ||
|
|
571ae174bd | ||
|
|
332726356c | ||
|
|
4ea9d3b884 | ||
|
|
3409953538 | ||
|
|
3b3ab072e6 | ||
|
|
fef407e09c | ||
|
|
5afccce3c6 | ||
|
|
e439095593 | ||
|
|
e77126e847 | ||
|
|
3ef01f0e31 | ||
|
|
d36b5c0a4b | ||
|
|
e5a758bdf4 | ||
|
|
617509869d | ||
|
|
5079c42788 | ||
|
|
bc7ab01066 | ||
|
|
212c1ec1f2 | ||
|
|
381deb68ff | ||
|
|
ba0facb5eb | ||
|
|
7151df16f6 | ||
|
|
8994bf2dba | ||
|
|
09617c8243 | ||
|
|
556b4d289e | ||
|
|
978f56ad10 | ||
|
|
aa0b97b562 | ||
|
|
df645d7d3d | ||
|
|
035137ef4e | ||
|
|
484d9a4825 | ||
|
|
65c305cff0 | ||
|
|
9a5fc5ccf4 | ||
|
|
51a1973224 | ||
|
|
2f2a63334a | ||
|
|
23afeb4c7a | ||
|
|
b387fb219d | ||
|
|
1b48ff223d | ||
|
|
640e605412 | ||
|
|
e10bb42597 | ||
|
|
9902018cab | ||
|
|
56a918c408 | ||
|
|
bcd62ecc5b | ||
|
|
e519484230 | ||
|
|
a2c8f1deb1 | ||
|
|
12dc53f687 | ||
|
|
b3b5bd267d | ||
|
|
edcfffc279 | ||
|
|
3bbfd0665c | ||
|
|
921a53e314 | ||
|
|
32dd4a938c | ||
|
|
9930f1b55b | ||
|
|
8581d9e2ca | ||
|
|
1a613ed9a8 | ||
|
|
78e398d9c4 | ||
|
|
e3c3c2c185 | ||
|
|
4e36bbaff9 | ||
|
|
603e9739ae | ||
|
|
6b91b7b7fa | ||
|
|
2e62fda57d | ||
|
|
5ad27264a2 | ||
|
|
c4d8cab50c | ||
|
|
577e346774 | ||
|
|
81c6aad129 | ||
|
|
775325556e | ||
|
|
375abd50ee | ||
|
|
e718e2732e | ||
|
|
8c8764368f | ||
|
|
4a815ab56f | ||
|
|
6564adc984 | ||
|
|
ad5b8017f5 | ||
|
|
72e5a79288 | ||
|
|
63f4b3462f | ||
|
|
a45a90df94 | ||
|
|
ec1ac81e0a | ||
|
|
6ba46bf7cf | ||
|
|
a1f85df12b | ||
|
|
9c2c3894d6 | ||
|
|
b92fc840fe | ||
|
|
ef79bbf7d2 | ||
|
|
fba1199cd2 | ||
|
|
4022a68523 | ||
|
|
67bc3ed359 | ||
|
|
a0ddd99087 | ||
|
|
2a7ef58c9f | ||
|
|
35010006a1 | ||
|
|
acfe788c95 | ||
|
|
5ccb73a1ee | ||
|
|
6ac5b6b759 | ||
|
|
d82f20abc4 | ||
|
|
10eafa35fd | ||
|
|
9105f259cd | ||
|
|
7cca56edfa | ||
|
|
e21d751834 | ||
|
|
ebb73b71fa | ||
|
|
1ca633ae64 | ||
|
|
3e22cbfed7 | ||
|
|
c7f615f707 | ||
|
|
b83ee92cd1 | ||
|
|
571d669a09 | ||
|
|
e485531b71 | ||
|
|
7427b554e3 | ||
|
|
1a818ceccd | ||
|
|
7fea8d608e | ||
|
|
1e6191e3b1 | ||
|
|
c10b2825d7 | ||
|
|
c200b2cb19 | ||
|
|
071f4c8a2b | ||
|
|
5097a2c79e | ||
|
|
bce9db1af5 | ||
|
|
ca67456dbe | ||
|
|
6df4d73b09 | ||
|
|
2aaa486f7a | ||
|
|
47ba7d4705 | ||
|
|
6a8bfd5fd8 | ||
|
|
1df94747e1 | ||
|
|
4092c701fe | ||
|
|
4939bd49b0 | ||
|
|
c6fb3d35d8 | ||
|
|
aad0bd8705 | ||
|
|
b69f635a3f | ||
|
|
eeae696b1b | ||
|
|
e1c8bc0e01 | ||
|
|
4b0acee585 | ||
|
|
d74612eb4c | ||
|
|
88c33974ac | ||
|
|
e5d7bfe453 | ||
|
|
99d23237b4 | ||
|
|
08d750197c | ||
|
|
d35bdf6eaa | ||
|
|
d332e00eb0 | ||
|
|
9d5499597f | ||
|
|
c0f8bbbc72 | ||
|
|
1684d60782 | ||
|
|
af6a977c9a | ||
|
|
f20263f235 | ||
|
|
2e42afea6f | ||
|
|
292a28131d | ||
|
|
2e775fbb75 | ||
|
|
e1d7641b8a | ||
|
|
6b0951d1ee | ||
|
|
db1fc621b5 | ||
|
|
9351756c36 | ||
|
|
63b645c64c | ||
|
|
7ad49f4185 | ||
|
|
d9315830f9 | ||
|
|
2e2c62b6a7 | ||
|
|
53289b0234 | ||
|
|
dd082ef79d | ||
|
|
2c968f9a35 | ||
|
|
74d0315fef | ||
|
|
ae98159130 | ||
|
|
3a9e36c52b | ||
|
|
cb43c03712 | ||
|
|
65a0f15f69 | ||
|
|
98b77d32cc | ||
|
|
86a3569ccb | ||
|
|
17fca351d3 | ||
|
|
2614e7bec1 | ||
|
|
832c6e806f | ||
|
|
7b334b0808 | ||
|
|
aa9151785e | ||
|
|
6bdef1b7da | ||
|
|
8b4367d354 | ||
|
|
0a9d69a7d0 | ||
|
|
a4b60dc00f | ||
|
|
f91ae32284 | ||
|
|
53fc9d6720 | ||
|
|
0b31568306 | ||
|
|
e9407cf791 | ||
|
|
0175acd028 | ||
|
|
733a32de32 | ||
|
|
1b863ecf93 | ||
|
|
ec06037335 | ||
|
|
0cdb62a1b5 | ||
|
|
99454198b8 | ||
|
|
dd6287ace8 | ||
|
|
786460e3b4 | ||
|
|
419cf979f1 | ||
|
|
30be875304 | ||
|
|
7d011bc811 | ||
|
|
b2c4a3b247 | ||
|
|
9d9592a69b | ||
|
|
cb42294a7e | ||
|
|
146762c109 | ||
|
|
494b9d1586 | ||
|
|
2e95fdb52d | ||
|
|
46736cac7b | ||
|
|
041213f22d | ||
|
|
8ca45c5678 | ||
|
|
c6eec8db97 | ||
|
|
98fdc493f4 | ||
|
|
91372bff87 | ||
|
|
7fb9db42a7 | ||
|
|
82382957f9 | ||
|
|
f034122bd0 | ||
|
|
0df2456f34 | ||
|
|
78fdb27a0b | ||
|
|
350baf0a0a | ||
|
|
9886b646eb | ||
|
|
c5197b99a0 | ||
|
|
cc313280af | ||
|
|
f06ff42c58 | ||
|
|
4bc1cf4518 | ||
|
|
0e65043c84 | ||
|
|
d7d565415a | ||
|
|
0986ec8948 | ||
|
|
50bced511f | ||
|
|
e275e8c0b0 | ||
|
|
77dea38ac1 | ||
|
|
7dc2ec5fd8 | ||
|
|
4bf2e3b139 | ||
|
|
8114c14755 | ||
|
|
ec8cf6aadc | ||
|
|
d326965966 | ||
|
|
030df0353d | ||
|
|
5038d7a70a | ||
|
|
f0b8fbb7fd | ||
|
|
5810c2b199 | ||
|
|
77f0b5dfa8 | ||
|
|
b0ea74dc63 | ||
|
|
0c07c8942c | ||
|
|
7d1bdb35ca | ||
|
|
e823889819 | ||
|
|
680aedaefc | ||
|
|
afdca09ced | ||
|
|
ac89ee71c3 | ||
|
|
af7c8cff92 | ||
|
|
26d4dec5fb | ||
|
|
cf31d12528 | ||
|
|
b4c730f8c0 | ||
|
|
fba1720b31 | ||
|
|
9fad72f28b | ||
|
|
1782bf8e64 | ||
|
|
2d59a10515 | ||
|
|
21a25c4f00 | ||
|
|
6b5c16c22c | ||
|
|
2c6621c26a | ||
|
|
f0500b1d2f | ||
|
|
6a033bb58c | ||
|
|
2fa4b22645 | ||
|
|
229d3a7dd0 | ||
|
|
b965e5bf1c | ||
|
|
3bd74c5351 | ||
|
|
55624ec1a2 | ||
|
|
6885afe8c3 | ||
|
|
acc1277246 | ||
|
|
935cb9c8cb | ||
|
|
17a4ddad63 | ||
|
|
5264671f5b | ||
|
|
b4ebbae354 | ||
|
|
510197c39e | ||
|
|
b6a4bd91fe | ||
|
|
83b82a5e98 | ||
|
|
0b1efc0759 | ||
|
|
2b506d744d | ||
|
|
79d08906a4 | ||
|
|
d27b33e26c | ||
|
|
73d86f0fdd | ||
|
|
6327063bd0 | ||
|
|
69fd900108 | ||
|
|
f9d01f682b | ||
|
|
d7d3db415b | ||
|
|
608f141f52 | ||
|
|
31850e4544 | ||
|
|
de9f23939f | ||
|
|
154ed2c4e2 | ||
|
|
89dfe4e1ac | ||
|
|
b41b07ddd8 | ||
|
|
e36fc02282 | ||
|
|
49b41c1eca | ||
|
|
4cd9fdb7df | ||
|
|
5aab2d8fb5 | ||
|
|
210b65c02d | ||
|
|
7a2ac23f0b | ||
|
|
e435fb2e9e | ||
|
|
6892c94595 | ||
|
|
831c960216 | ||
|
|
43af2a4aee | ||
|
|
190819e85d | ||
|
|
1de6996c26 | ||
|
|
304f2ed308 | ||
|
|
148b35da4f | ||
|
|
3865b3a398 | ||
|
|
d6bcbbae1d | ||
|
|
04b3aefc5d | ||
|
|
a5f8cae599 | ||
|
|
29c3037512 | ||
|
|
d0d7d3a205 | ||
|
|
7ce36ea1b6 | ||
|
|
6f97f4796b | ||
|
|
39fe96009f | ||
|
|
b475a38895 | ||
|
|
42de887b05 | ||
|
|
28576bf08e | ||
|
|
c395958dff | ||
|
|
798b539eec | ||
|
|
70cf8edc75 | ||
|
|
a81ea88eb0 | ||
|
|
023dda26fc | ||
|
|
3e76895155 | ||
|
|
2c1bd7f034 | ||
|
|
f7cae68378 | ||
|
|
f6ff1a115a | ||
|
|
32ee586e2a | ||
|
|
b9e5655e3c | ||
|
|
6623c3f877 | ||
|
|
30a4173249 | ||
|
|
dbbe4c6ddd | ||
|
|
633e4dfe48 | ||
|
|
5e8b105677 | ||
|
|
414dd96bbd | ||
|
|
e857c2a88a | ||
|
|
e7aaea2b8e | ||
|
|
63d7cd607e | ||
|
|
d886b08dd9 | ||
|
|
72f3185ae7 | ||
|
|
03be9f9b65 | ||
|
|
d9d0865c13 | ||
|
|
e3f54bc226 | ||
|
|
9662f4a56a | ||
|
|
fea5cc8579 | ||
|
|
94091cd0e9 | ||
|
|
cc9f4b6102 | ||
|
|
cd7c99c752 | ||
|
|
75478c1181 | ||
|
|
ad0ca69579 | ||
|
|
2d801b7122 | ||
|
|
1e07269fe3 | ||
|
|
3b74e99576 | ||
|
|
439fff684e | ||
|
|
72cf06119c | ||
|
|
808068d70a | ||
|
|
f09072b2b6 | ||
|
|
be9381abc5 | ||
|
|
5d09f7b85f | ||
|
|
8bbfee7591 | ||
|
|
be26392057 | ||
|
|
263730f4ee | ||
|
|
5d7e1782d9 | ||
|
|
e27f590c2c | ||
|
|
7afe655561 | ||
|
|
3bf08290a4 | ||
|
|
34c2172391 | ||
|
|
48044f7a46 | ||
|
|
04e666182f | ||
|
|
c797129956 | ||
|
|
6928dae956 | ||
|
|
6db3bcbb51 | ||
|
|
d7f0b3566d | ||
|
|
0c67a90cc0 | ||
|
|
f06e498fb0 | ||
|
|
ad612bf9e4 | ||
|
|
9dd5cd8eb6 | ||
|
|
5ed3cdc819 | ||
|
|
e07c92bce5 | ||
|
|
0c5965c7b8 | ||
|
|
aa21550712 | ||
|
|
66061e8c5f | ||
|
|
c4b74c2e01 | ||
|
|
55b23e78ee | ||
|
|
a9526bda92 | ||
|
|
0901da3f83 | ||
|
|
8004652f7b | ||
|
|
c9b410c97f | ||
|
|
814d710320 | ||
|
|
38fcc5a35a | ||
|
|
674d516f3e | ||
|
|
8ceb4907a5 | ||
|
|
ce3749622a | ||
|
|
bcfae99701 | ||
|
|
44c1c2c6f0 | ||
|
|
ac08db82b2 | ||
|
|
305bfd9d30 | ||
|
|
f9aaec7b4a |
5
.gitattributes
vendored
5
.gitattributes
vendored
@@ -1,5 +1,8 @@
|
|||||||
*.py text eol=lf
|
|
||||||
*.conf text eol=lf
|
*.conf text eol=lf
|
||||||
|
*.md text eol=lf
|
||||||
|
*.md5 text eol=lf
|
||||||
|
*.py text eol=lf
|
||||||
|
*.xml text eol=lf
|
||||||
|
|
||||||
*_ binary
|
*_ binary
|
||||||
*.dll binary
|
*.dll binary
|
||||||
|
|||||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -2,4 +2,5 @@
|
|||||||
output/
|
output/
|
||||||
.sqlmap_history
|
.sqlmap_history
|
||||||
traffic.txt
|
traffic.txt
|
||||||
*~
|
*~
|
||||||
|
.idea/
|
||||||
26
ISSUE_TEMPLATE.md
Normal file
26
ISSUE_TEMPLATE.md
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
## What's the problem (or question)?
|
||||||
|
<!--- If describing a bug, tell us what happens instead of the expected behavior -->
|
||||||
|
<!--- If suggesting a change/improvement, explain the difference from current behavior -->
|
||||||
|
|
||||||
|
## Do you have an idea for a solution?
|
||||||
|
<!--- Not obligatory, but suggest a fix/reason for the bug, -->
|
||||||
|
<!--- or ideas how to implement the addition or change -->
|
||||||
|
|
||||||
|
## How can we reproduce the issue?
|
||||||
|
<!--- Provide unambiguous set of steps to reproduce this bug. Include command to reproduce, if relevant (you can mask the sensitive data) -->
|
||||||
|
1.
|
||||||
|
2.
|
||||||
|
3.
|
||||||
|
4.
|
||||||
|
|
||||||
|
## What are the running context details?
|
||||||
|
<!--- Include as many relevant details about the running context you experienced the bug/problem in -->
|
||||||
|
* Installation method (e.g. `pip`, `apt-get`, `git clone` or `zip`/`tar.gz`):
|
||||||
|
* Client OS (e.g. `Microsoft Windows 10`)
|
||||||
|
* Program version (`python sqlmap.py --version` or `sqlmap --version` depending on installation):
|
||||||
|
* Target DBMS (e.g. `Microsoft SQL Server`):
|
||||||
|
* Detected WAF/IDS/IPS protection (e.g. `ModSecurity` or `unknown`):
|
||||||
|
* SQLi techniques found by sqlmap (e.g. `error-based` and `boolean-based blind`):
|
||||||
|
* Results of manual target assessment (e.g. found that the payload `query=test' AND 4113 IN ((SELECT 'foobar'))-- qKLV` works):
|
||||||
|
* Relevant console output (if any):
|
||||||
|
* Exception traceback (if any):
|
||||||
12
README.md
12
README.md
@@ -18,7 +18,7 @@ You can download the latest tarball by clicking [here](https://github.com/sqlmap
|
|||||||
|
|
||||||
Preferably, you can download sqlmap by cloning the [Git](https://github.com/sqlmapproject/sqlmap) repository:
|
Preferably, you can download sqlmap by cloning the [Git](https://github.com/sqlmapproject/sqlmap) repository:
|
||||||
|
|
||||||
git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||||
|
|
||||||
sqlmap works out of the box with [Python](http://www.python.org/download/) version **2.6.x** and **2.7.x** on any platform.
|
sqlmap works out of the box with [Python](http://www.python.org/download/) version **2.6.x** and **2.7.x** on any platform.
|
||||||
|
|
||||||
@@ -33,8 +33,8 @@ To get a list of all options and switches use:
|
|||||||
|
|
||||||
python sqlmap.py -hh
|
python sqlmap.py -hh
|
||||||
|
|
||||||
You can find a sample run [here](https://gist.github.com/stamparm/5335217).
|
You can find a sample run [here](https://asciinema.org/a/46601).
|
||||||
To get an overview of sqlmap capabilities, list of supported features and description of all options and switches, along with examples, you are advised to consult the [user's manual](https://github.com/sqlmapproject/sqlmap/wiki).
|
To get an overview of sqlmap capabilities, list of supported features and description of all options and switches, along with examples, you are advised to consult the [user's manual](https://github.com/sqlmapproject/sqlmap/wiki/Usage).
|
||||||
|
|
||||||
Links
|
Links
|
||||||
----
|
----
|
||||||
@@ -45,9 +45,6 @@ Links
|
|||||||
* Issue tracker: https://github.com/sqlmapproject/sqlmap/issues
|
* Issue tracker: https://github.com/sqlmapproject/sqlmap/issues
|
||||||
* User's manual: https://github.com/sqlmapproject/sqlmap/wiki
|
* User's manual: https://github.com/sqlmapproject/sqlmap/wiki
|
||||||
* Frequently Asked Questions (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
* Frequently Asked Questions (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||||
* Mailing list subscription: https://lists.sourceforge.net/lists/listinfo/sqlmap-users
|
|
||||||
* Mailing list RSS feed: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap
|
|
||||||
* Mailing list archive: http://news.gmane.org/gmane.comp.security.sqlmap
|
|
||||||
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
||||||
* Demos: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos)
|
* Demos: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos)
|
||||||
* Screenshots: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
* Screenshots: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
||||||
@@ -57,8 +54,11 @@ Translations
|
|||||||
|
|
||||||
* [Chinese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-zh-CN.md)
|
* [Chinese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-zh-CN.md)
|
||||||
* [Croatian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-hr-HR.md)
|
* [Croatian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-hr-HR.md)
|
||||||
|
* [French](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-fr-FR.md)
|
||||||
* [Greek](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-gr-GR.md)
|
* [Greek](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-gr-GR.md)
|
||||||
* [Indonesian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-id-ID.md)
|
* [Indonesian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-id-ID.md)
|
||||||
|
* [Italian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-it-IT.md)
|
||||||
|
* [Japanese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-ja-JP.md)
|
||||||
* [Portuguese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-pt-BR.md)
|
* [Portuguese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-pt-BR.md)
|
||||||
* [Spanish](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-es-MX.md)
|
* [Spanish](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-es-MX.md)
|
||||||
* [Turkish](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-tr-TR.md)
|
* [Turkish](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-tr-TR.md)
|
||||||
|
|||||||
@@ -76,14 +76,14 @@
|
|||||||
* Added option `--safe-post` to set POST data for sending to safe URL.
|
* Added option `--safe-post` to set POST data for sending to safe URL.
|
||||||
* Added option `--safe-req` for loading HTTP request from a file that will be used during sending to safe URL.
|
* Added option `--safe-req` for loading HTTP request from a file that will be used during sending to safe URL.
|
||||||
* Added option `--skip` to skip testing of given parameter(s).
|
* Added option `--skip` to skip testing of given parameter(s).
|
||||||
* Added switch `--skip-static` to skip testing parameters that not appear dynamic.
|
* Added switch `--skip-static` to skip testing parameters that not appear to be dynamic.
|
||||||
* Added switch `--skip-urlencode` to skip URL encoding of payload data.
|
* Added switch `--skip-urlencode` to skip URL encoding of payload data.
|
||||||
* Added switch `--skip-waf` to skip heuristic detection of WAF/IPS/IDS protection.
|
* Added switch `--skip-waf` to skip heuristic detection of WAF/IPS/IDS protection.
|
||||||
* Added switch `--smart` to conduct thorough tests only if positive heuristic(s).
|
* Added switch `--smart` to conduct thorough tests only if positive heuristic(s).
|
||||||
* Added option `--sql-file` for setting file(s) holding SQL statements to be executed (in case of stacked SQLi).
|
* Added option `--sql-file` for setting file(s) holding SQL statements to be executed (in case of stacked SQLi).
|
||||||
* Added switch `--sqlmap-shell` to turn on interactive sqlmap shell prompt.
|
* Added switch `--sqlmap-shell` to turn on interactive sqlmap shell prompt.
|
||||||
* Added option `--test-filter` for test filtration by payloads and/or titles (e.g. `ROW`).
|
* Added option `--test-filter` for test filtration by payloads and/or titles (e.g. `ROW`).
|
||||||
* Added option `--test-skip` for skiping tests by payloads and/or titles (e.g. `BENCHMARK`).
|
* Added option `--test-skip` for skipping tests by payloads and/or titles (e.g. `BENCHMARK`).
|
||||||
* Added switch `--titles` to turn on comparison of pages based only on their titles.
|
* Added switch `--titles` to turn on comparison of pages based only on their titles.
|
||||||
* Added option `--tor-port` to explicitly set Tor proxy port.
|
* Added option `--tor-port` to explicitly set Tor proxy port.
|
||||||
* Added option `--tor-type` to set Tor proxy type (`HTTP` (default), `SOCKS4` or `SOCKS5`).
|
* Added option `--tor-type` to set Tor proxy type (`HTTP` (default), `SOCKS4` or `SOCKS5`).
|
||||||
@@ -149,7 +149,7 @@
|
|||||||
* Major bugs fixed.
|
* Major bugs fixed.
|
||||||
* Cleanup of UDF source code repository, https://svn.sqlmap.org/sqlmap/trunk/sqlmap/extra/udfhack.
|
* Cleanup of UDF source code repository, https://svn.sqlmap.org/sqlmap/trunk/sqlmap/extra/udfhack.
|
||||||
* Major code cleanup.
|
* Major code cleanup.
|
||||||
* Added simple file encryption/compression utility, extra/cloak/cloak.py, used by sqlmap to decrypt on the fly Churrasco, UPX executable and web shells consequently reducing drastically the number of anti-virus softwares that mistakenly mark sqlmap as a malware.
|
* Added simple file encryption/compression utility, extra/cloak/cloak.py, used by sqlmap to decrypt on the fly Churrasco, UPX executable and web shells consequently reducing drastically the number of anti-virus software that mistakenly mark sqlmap as a malware.
|
||||||
* Updated user's manual.
|
* Updated user's manual.
|
||||||
* Created several demo videos, hosted on YouTube (http://www.youtube.com/user/inquisb) and linked from http://sqlmap.org/demo.html.
|
* Created several demo videos, hosted on YouTube (http://www.youtube.com/user/inquisb) and linked from http://sqlmap.org/demo.html.
|
||||||
|
|
||||||
@@ -302,7 +302,7 @@
|
|||||||
* Added support to extract database users password hash on Microsoft SQL Server;
|
* Added support to extract database users password hash on Microsoft SQL Server;
|
||||||
* Added a fuzzer function with the aim to parse HTML page looking for standard database error messages consequently improving database fingerprinting;
|
* Added a fuzzer function with the aim to parse HTML page looking for standard database error messages consequently improving database fingerprinting;
|
||||||
* Added support for SQL injection on HTTP Cookie and User-Agent headers;
|
* Added support for SQL injection on HTTP Cookie and User-Agent headers;
|
||||||
* Reviewed HTTP request library (lib/request.py) to support the extended inband SQL injection functionality. Splitted getValue() into getInband() and getBlind();
|
* Reviewed HTTP request library (lib/request.py) to support the extended inband SQL injection functionality. Split getValue() into getInband() and getBlind();
|
||||||
* Major enhancements in common library and added checkForBrackets() method to check if the bracket(s) are needed to perform a UNION query SQL injection attack;
|
* Major enhancements in common library and added checkForBrackets() method to check if the bracket(s) are needed to perform a UNION query SQL injection attack;
|
||||||
* Implemented `--dump-all` functionality to dump entire DBMS data from all databases tables;
|
* Implemented `--dump-all` functionality to dump entire DBMS data from all databases tables;
|
||||||
* Added support to exclude DBMS system databases' when enumeration tables and dumping their entries (`--exclude-sysdbs`);
|
* Added support to exclude DBMS system databases' when enumeration tables and dumping their entries (`--exclude-sysdbs`);
|
||||||
@@ -335,7 +335,7 @@
|
|||||||
* Added inband SQL injection (UNION query) support (`--union-use`);
|
* Added inband SQL injection (UNION query) support (`--union-use`);
|
||||||
* Complete code refactoring, a lot of minor and some major fixes in libraries, many minor improvements;
|
* Complete code refactoring, a lot of minor and some major fixes in libraries, many minor improvements;
|
||||||
* Reviewed the directory tree structure;
|
* Reviewed the directory tree structure;
|
||||||
* Splitted lib/common.py: inband injection functionalities now are moved to lib/union.py;
|
* Split lib/common.py: inband injection functionalities now are moved to lib/union.py;
|
||||||
* Updated documentation files.
|
* Updated documentation files.
|
||||||
|
|
||||||
# Version 0.3 (2007-01-20)
|
# Version 0.3 (2007-01-20)
|
||||||
|
|||||||
@@ -1,38 +1,37 @@
|
|||||||
# Contributing to sqlmap
|
# Contributing to sqlmap
|
||||||
|
|
||||||
## Reporting bugs
|
## Reporting bugs
|
||||||
|
|
||||||
**Bug reports are welcome**!
|
**Bug reports are welcome**!
|
||||||
Please report all bugs on the [issue tracker](https://github.com/sqlmapproject/sqlmap/issues).
|
Please report all bugs on the [issue tracker](https://github.com/sqlmapproject/sqlmap/issues).
|
||||||
|
|
||||||
### Guidelines
|
### Guidelines
|
||||||
|
|
||||||
* Before you submit a bug report, search both [open](https://github.com/sqlmapproject/sqlmap/issues?q=is%3Aopen+is%3Aissue) and [closed](https://github.com/sqlmapproject/sqlmap/issues?q=is%3Aissue+is%3Aclosed) issues to make sure the issue has not come up before. Also, check the [user's manual](https://github.com/sqlmapproject/sqlmap/wiki) for anything relevant.
|
* Before you submit a bug report, search both [open](https://github.com/sqlmapproject/sqlmap/issues?q=is%3Aopen+is%3Aissue) and [closed](https://github.com/sqlmapproject/sqlmap/issues?q=is%3Aissue+is%3Aclosed) issues to make sure the issue has not come up before. Also, check the [user's manual](https://github.com/sqlmapproject/sqlmap/wiki) for anything relevant.
|
||||||
* Make sure you can reproduce the bug with the latest development version of sqlmap.
|
* Make sure you can reproduce the bug with the latest development version of sqlmap.
|
||||||
* Your report should give detailed instructions on how to reproduce the problem. If sqlmap raises an unhandled exception, the entire traceback is needed. Details of the unexpected behaviour are welcome too. A small test case (just a few lines) is ideal.
|
* Your report should give detailed instructions on how to reproduce the problem. If sqlmap raises an unhandled exception, the entire traceback is needed. Details of the unexpected behaviour are welcome too. A small test case (just a few lines) is ideal.
|
||||||
* If you are making an enhancement request, lay out the rationale for the feature you are requesting. *Why would this feature be useful?*
|
* If you are making an enhancement request, lay out the rationale for the feature you are requesting. *Why would this feature be useful?*
|
||||||
* If you are not sure whether something is a bug, or want to discuss a potential new feature before putting in an enhancement request, the [mailing list](https://lists.sourceforge.net/lists/listinfo/sqlmap-users) is a good place to bring it up.
|
|
||||||
|
## Submitting code changes
|
||||||
## Submitting code changes
|
|
||||||
|
All code contributions are greatly appreciated. First off, clone the [Git repository](https://github.com/sqlmapproject/sqlmap), read the [user's manual](https://github.com/sqlmapproject/sqlmap/wiki) carefully, go through the code yourself and [drop us an email](mailto:dev@sqlmap.org) if you are having a hard time grasping its structure and meaning. We apologize for not commenting the code enough - you could take a chance to read it through and [improve it](https://github.com/sqlmapproject/sqlmap/issues/37).
|
||||||
All code contributions are greatly appreciated. First off, clone the [Git repository](https://github.com/sqlmapproject/sqlmap), read the [user's manual](https://github.com/sqlmapproject/sqlmap/wiki) carefully, go through the code yourself and [drop us an email](mailto:dev@sqlmap.org) if you are having a hard time grasping its structure and meaning. We apologize for not commenting the code enough - you could take a chance to read it through and [improve it](https://github.com/sqlmapproject/sqlmap/issues/37).
|
|
||||||
|
Our preferred method of patch submission is via a Git [pull request](https://help.github.com/articles/using-pull-requests).
|
||||||
Our preferred method of patch submission is via a Git [pull request](https://help.github.com/articles/using-pull-requests).
|
Many [people](https://raw.github.com/sqlmapproject/sqlmap/master/doc/THANKS.md) have contributed in different ways to the sqlmap development. **You** can be the next!
|
||||||
Many [people](https://raw.github.com/sqlmapproject/sqlmap/master/doc/THANKS.md) have contributed in different ways to the sqlmap development. **You** can be the next!
|
|
||||||
|
### Guidelines
|
||||||
### Guidelines
|
|
||||||
|
In order to maintain consistency and readability throughout the code, we ask that you adhere to the following instructions:
|
||||||
In order to maintain consistency and readability throughout the code, we ask that you adhere to the following instructions:
|
|
||||||
|
* Each patch should make one logical change.
|
||||||
* Each patch should make one logical change.
|
* Wrap code to 76 columns when possible.
|
||||||
* Wrap code to 76 columns when possible.
|
* Avoid tabbing, use four blank spaces instead.
|
||||||
* Avoid tabbing, use four blank spaces instead.
|
* Before you put time into a non-trivial patch, it is worth discussing it privately by [email](mailto:dev@sqlmap.org).
|
||||||
* Before you put time into a non-trivial patch, it is worth discussing it on the [mailing list](https://lists.sourceforge.net/lists/listinfo/sqlmap-users) or privately by [email](mailto:dev@sqlmap.org).
|
* Do not change style on numerous files in one single pull request, we can [discuss](mailto:dev@sqlmap.org) about those before doing any major restyling, but be sure that personal preferences not having a strong support in [PEP 8](http://www.python.org/dev/peps/pep-0008/) will likely to be rejected.
|
||||||
* Do not change style on numerous files in one single pull request, we can [discuss](mailto:dev@sqlmap.org) about those before doing any major restyling, but be sure that personal preferences not having a strong support in [PEP 8](http://www.python.org/dev/peps/pep-0008/) will likely to be rejected.
|
* Make changes on less than five files per single pull request - there is rarely a good reason to have more than five files changed on one pull request, as this dramatically increases the review time required to land (commit) any of those pull requests.
|
||||||
* Make changes on less than five files per single pull request - there is rarely a good reason to have more than five files changed on one pull request, as this dramatically increases the review time required to land (commit) any of those pull requests.
|
* Style that is too different from main branch will be ''adapted'' by the developers side.
|
||||||
* Style that is too different from main branch will be ''adapted'' by the developers side.
|
* Do not touch anything inside `thirdparty/` and `extra/` folders.
|
||||||
* Do not touch anything inside `thirdparty/` and `extra/` folders.
|
|
||||||
|
### Licensing
|
||||||
### Licensing
|
|
||||||
|
By submitting code contributions to the sqlmap developers or via Git pull request, checking them into the sqlmap source code repository, it is understood (unless you specify otherwise) that you are offering the sqlmap copyright holders the unlimited, non-exclusive right to reuse, modify, and relicense the code. This is important because the inability to relicense code has caused devastating problems for other software projects (such as KDE and NASM). If you wish to specify special license conditions of your contributions, just say so when you send them.
|
||||||
By submitting code contributions to the sqlmap developers, to the mailing list, or via Git pull request, checking them into the sqlmap source code repository, it is understood (unless you specify otherwise) that you are offering the sqlmap copyright holders the unlimited, non-exclusive right to reuse, modify, and relicense the code. This is important because the inability to relicense code has caused devastating problems for other software projects (such as KDE and NASM). If you wish to specify special license conditions of your contributions, just say so when you send them.
|
|
||||||
|
|||||||
19
doc/COPYING
19
doc/COPYING
@@ -1,7 +1,7 @@
|
|||||||
COPYING -- Describes the terms under which sqlmap is distributed. A copy
|
COPYING -- Describes the terms under which sqlmap is distributed. A copy
|
||||||
of the GNU General Public License (GPL) is appended to this file.
|
of the GNU General Public License (GPL) is appended to this file.
|
||||||
|
|
||||||
sqlmap is (C) 2006-2016 Bernardo Damele Assumpcao Guimaraes, Miroslav Stampar.
|
sqlmap is (C) 2006-2017 Bernardo Damele Assumpcao Guimaraes, Miroslav Stampar.
|
||||||
|
|
||||||
This program is free software; you may redistribute and/or modify it under
|
This program is free software; you may redistribute and/or modify it under
|
||||||
the terms of the GNU General Public License as published by the Free
|
the terms of the GNU General Public License as published by the Free
|
||||||
@@ -46,14 +46,14 @@ to know exactly what a program is going to do before they run it.
|
|||||||
Source code also allows you to fix bugs and add new features. You are
|
Source code also allows you to fix bugs and add new features. You are
|
||||||
highly encouraged to send your changes to dev@sqlmap.org for possible
|
highly encouraged to send your changes to dev@sqlmap.org for possible
|
||||||
incorporation into the main distribution. By sending these changes to the
|
incorporation into the main distribution. By sending these changes to the
|
||||||
sqlmap developers, to the mailing lists, or via Git pull request, checking
|
sqlmap developers or via Git pull request, checking them into the sqlmap
|
||||||
them into the sqlmap source code repository, it is understood (unless you
|
source code repository, it is understood (unless you specify otherwise)
|
||||||
specify otherwise) that you are offering the sqlmap project the unlimited,
|
that you are offering the sqlmap project the unlimited, non-exclusive
|
||||||
non-exclusive right to reuse, modify, and relicense the code. sqlmap will
|
right to reuse, modify, and relicense the code. sqlmap will always be
|
||||||
always be available Open Source, but this is important because the
|
available Open Source, but this is important because the inability to
|
||||||
inability to relicense code has caused devastating problems for other Free
|
relicense code has caused devastating problems for other Free Software
|
||||||
Software projects (such as KDE and NASM). If you wish to specify special
|
projects (such as KDE and NASM). If you wish to specify special license
|
||||||
license conditions of your contributions, just say so when you send them.
|
conditions of your contributions, just say so when you send them.
|
||||||
|
|
||||||
This program is distributed in the hope that it will be useful, but
|
This program is distributed in the hope that it will be useful, but
|
||||||
WITHOUT ANY WARRANTY; without even the implied warranty of
|
WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
@@ -361,7 +361,6 @@ This license does not apply to the following components:
|
|||||||
* The MultipartPost library located under thirdparty/multipartpost/.
|
* The MultipartPost library located under thirdparty/multipartpost/.
|
||||||
* The Odict library located under thirdparty/odict/.
|
* The Odict library located under thirdparty/odict/.
|
||||||
* The Oset library located under thirdparty/oset/.
|
* The Oset library located under thirdparty/oset/.
|
||||||
* The PageRank library located under thirdparty/pagerank/.
|
|
||||||
* The PrettyPrint library located under thirdparty/prettyprint/.
|
* The PrettyPrint library located under thirdparty/prettyprint/.
|
||||||
* The PyDes library located under thirdparty/pydes/.
|
* The PyDes library located under thirdparty/pydes/.
|
||||||
* The SocksiPy library located under thirdparty/socks/.
|
* The SocksiPy library located under thirdparty/socks/.
|
||||||
|
|||||||
@@ -139,7 +139,7 @@ Jim Forster, <jimforster(at)goldenwest.com>
|
|||||||
* for reporting a bug
|
* for reporting a bug
|
||||||
|
|
||||||
Rong-En Fan, <rafan(at)freebsd.org>
|
Rong-En Fan, <rafan(at)freebsd.org>
|
||||||
* for commiting the sqlmap 0.5 port to the official FreeBSD project repository
|
* for committing the sqlmap 0.5 port to the official FreeBSD project repository
|
||||||
|
|
||||||
Giorgio Fedon, <giorgio.fedon(at)gmail.com>
|
Giorgio Fedon, <giorgio.fedon(at)gmail.com>
|
||||||
* for suggesting a speed improvement for bisection algorithm
|
* for suggesting a speed improvement for bisection algorithm
|
||||||
@@ -562,7 +562,7 @@ Kazim Bugra Tombul, <mhackmail(at)gmail.com>
|
|||||||
* for reporting a minor bug
|
* for reporting a minor bug
|
||||||
|
|
||||||
Efrain Torres, <et(at)metasploit.com>
|
Efrain Torres, <et(at)metasploit.com>
|
||||||
* for helping out to improve the Metasploit Framework sqlmap auxiliary module and for commiting it on the Metasploit official subversion repository
|
* for helping out to improve the Metasploit Framework sqlmap auxiliary module and for committing it on the Metasploit official subversion repository
|
||||||
* for his great Metasploit WMAP Framework
|
* for his great Metasploit WMAP Framework
|
||||||
|
|
||||||
Sandro Tosi, <matrixhasu(at)gmail.com>
|
Sandro Tosi, <matrixhasu(at)gmail.com>
|
||||||
|
|||||||
@@ -281,8 +281,6 @@ be bound by the terms and conditions of this License Agreement.
|
|||||||
|
|
||||||
* The bottle web framework library located under thirdparty/bottle/.
|
* The bottle web framework library located under thirdparty/bottle/.
|
||||||
Copyright (C) 2012, Marcel Hellkamp.
|
Copyright (C) 2012, Marcel Hellkamp.
|
||||||
* The PageRank library located under thirdparty/pagerank/.
|
|
||||||
Copyright (C) 2010, Corey Goldberg.
|
|
||||||
* The Termcolor library located under thirdparty/termcolor/.
|
* The Termcolor library located under thirdparty/termcolor/.
|
||||||
Copyright (C) 2008-2011, Volvox Development Team.
|
Copyright (C) 2008-2011, Volvox Development Team.
|
||||||
|
|
||||||
@@ -312,3 +310,5 @@ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|||||||
|
|
||||||
* The PyDes library located under thirdparty/pydes/.
|
* The PyDes library located under thirdparty/pydes/.
|
||||||
Copyleft 2009, Todd Whiteman.
|
Copyleft 2009, Todd Whiteman.
|
||||||
|
* The win_inet_pton library located under thirdparty/wininetpton/.
|
||||||
|
Copyleft 2014, Ryan Vennell.
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ Se puede descargar el "tarball" más actual haciendo clic [aquí](https://github
|
|||||||
|
|
||||||
Preferentemente, se puede descargar sqlmap clonando el repositorio [Git](https://github.com/sqlmapproject/sqlmap):
|
Preferentemente, se puede descargar sqlmap clonando el repositorio [Git](https://github.com/sqlmapproject/sqlmap):
|
||||||
|
|
||||||
git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||||
|
|
||||||
sqlmap funciona con las siguientes versiones de [Python](http://www.python.org/download/) ** 2.6.x** y ** 2.7.x** en cualquier plataforma.
|
sqlmap funciona con las siguientes versiones de [Python](http://www.python.org/download/) ** 2.6.x** y ** 2.7.x** en cualquier plataforma.
|
||||||
|
|
||||||
@@ -26,14 +26,14 @@ Uso
|
|||||||
|
|
||||||
Para obtener una lista de opciones básicas:
|
Para obtener una lista de opciones básicas:
|
||||||
|
|
||||||
python sqlmap.py -h
|
python sqlmap.py -h
|
||||||
|
|
||||||
Para obtener una lista de todas las opciones:
|
Para obtener una lista de todas las opciones:
|
||||||
|
|
||||||
python sqlmap.py -hh
|
python sqlmap.py -hh
|
||||||
|
|
||||||
Se puede encontrar una muestra de su funcionamiento [aquí](https://gist.github.com/stamparm/5335217).
|
Se puede encontrar una muestra de su funcionamiento [aquí](https://asciinema.org/a/46601).
|
||||||
Para obtener una visión general de las capacidades de sqlmap, así como un listado funciones soportadas y descripción de todas las opciones y modificadores, junto con ejemplos, se recomienda consultar el [manual de usuario](https://github.com/sqlmapproject/sqlmap/wiki).
|
Para obtener una visión general de las capacidades de sqlmap, así como un listado funciones soportadas y descripción de todas las opciones y modificadores, junto con ejemplos, se recomienda consultar el [manual de usuario](https://github.com/sqlmapproject/sqlmap/wiki/Usage).
|
||||||
|
|
||||||
Enlaces
|
Enlaces
|
||||||
---
|
---
|
||||||
@@ -44,9 +44,6 @@ Enlaces
|
|||||||
* Seguimiento de problemas "Issue tracker": https://github.com/sqlmapproject/sqlmap/issues
|
* Seguimiento de problemas "Issue tracker": https://github.com/sqlmapproject/sqlmap/issues
|
||||||
* Manual de usuario: https://github.com/sqlmapproject/sqlmap/wiki
|
* Manual de usuario: https://github.com/sqlmapproject/sqlmap/wiki
|
||||||
* Preguntas frecuentes (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
* Preguntas frecuentes (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||||
* Subscripción a la lista de correo: https://lists.sourceforge.net/lists/listinfo/sqlmap-users
|
|
||||||
* Fuente de la lista de correo "RSS feed": http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap
|
|
||||||
* Archivos de lista de correo: http://news.gmane.org/gmane.comp.security.sqlmap
|
|
||||||
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
||||||
* Demostraciones: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos)
|
* Demostraciones: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos)
|
||||||
* Imágenes: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
* Imágenes: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
||||||
|
|||||||
49
doc/translations/README-fr-FR.md
Normal file
49
doc/translations/README-fr-FR.md
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
# sqlmap
|
||||||
|
|
||||||
|
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [](https://twitter.com/sqlmap)
|
||||||
|
|
||||||
|
**sqlmap** est un outil Open Source de test d'intrusion. Cet outil permet d'automatiser le processus de détection et d'exploitation des failles d'injection SQL afin de prendre le contrôle des serveurs de base de données. __sqlmap__ dispose d'un puissant moteur de détection utilisant les techniques les plus récentes et les plus dévastatrices de tests d'intrusion comme L'Injection SQL, qui permet d'accéder à la base de données, au système de fichiers sous-jacent et permet aussi l'exécution des commandes sur le système d'exploitation.
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
Les captures d'écran disponible [ici](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots) démontrent des fonctionnalités de __sqlmap__.
|
||||||
|
|
||||||
|
Installation
|
||||||
|
----
|
||||||
|
|
||||||
|
Vous pouvez télécharger le plus récent fichier tarball en cliquant [ici](https://github.com/sqlmapproject/sqlmap/tarball/master). Vous pouvez aussi télécharger le plus récent archive zip [ici](https://github.com/sqlmapproject/sqlmap/zipball/master).
|
||||||
|
|
||||||
|
De préférence, télécharger __sqlmap__ en le [clonant](https://github.com/sqlmapproject/sqlmap):
|
||||||
|
|
||||||
|
git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||||
|
|
||||||
|
sqlmap fonctionne sur n'importe quel système d'exploitation avec la version **2.6.x** et **2.7.x** de [Python](http://www.python.org/download/)
|
||||||
|
|
||||||
|
Usage
|
||||||
|
----
|
||||||
|
|
||||||
|
Pour afficher une liste des fonctions de bases et des commutateurs (switches), tapez:
|
||||||
|
|
||||||
|
python sqlmap.py -h
|
||||||
|
|
||||||
|
Pour afficher une liste complète des options et des commutateurs (switches), tapez:
|
||||||
|
|
||||||
|
python sqlmap.py -hh
|
||||||
|
|
||||||
|
Vous pouvez regarder un vidéo [ici](https://asciinema.org/a/46601) pour plus d'exemples.
|
||||||
|
Pour obtenir un aperçu des ressources de __sqlmap__, une liste des fonctionnalités prises en charge et la description de toutes les options, ainsi que des exemples , nous vous recommandons de consulter [le wiki](https://github.com/sqlmapproject/sqlmap/wiki/Usage).
|
||||||
|
|
||||||
|
Liens
|
||||||
|
----
|
||||||
|
|
||||||
|
* Page d'acceuil: http://sqlmap.org
|
||||||
|
* Téléchargement: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) ou [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master)
|
||||||
|
* Commits RSS feed: https://github.com/sqlmapproject/sqlmap/commits/master.atom
|
||||||
|
* Issue tracker: https://github.com/sqlmapproject/sqlmap/issues
|
||||||
|
* Manuel de l'utilisateur: https://github.com/sqlmapproject/sqlmap/wiki
|
||||||
|
* Foire aux questions (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||||
|
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
||||||
|
* Démonstrations: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos)
|
||||||
|
* Les captures d'écran: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
||||||
@@ -18,7 +18,7 @@
|
|||||||
|
|
||||||
Κατά προτίμηση, μπορείτε να κατεβάσετε το sqlmap κάνοντας κλώνο το [Git](https://github.com/sqlmapproject/sqlmap) αποθετήριο:
|
Κατά προτίμηση, μπορείτε να κατεβάσετε το sqlmap κάνοντας κλώνο το [Git](https://github.com/sqlmapproject/sqlmap) αποθετήριο:
|
||||||
|
|
||||||
git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||||
|
|
||||||
Το sqlmap λειτουργεί χωρίς περαιτέρω κόπο με την [Python](http://www.python.org/download/) έκδοσης **2.6.x** και **2.7.x** σε όποια πλατφόρμα.
|
Το sqlmap λειτουργεί χωρίς περαιτέρω κόπο με την [Python](http://www.python.org/download/) έκδοσης **2.6.x** και **2.7.x** σε όποια πλατφόρμα.
|
||||||
|
|
||||||
@@ -33,8 +33,8 @@
|
|||||||
|
|
||||||
python sqlmap.py -hh
|
python sqlmap.py -hh
|
||||||
|
|
||||||
Μπορείτε να δείτε ένα δείγμα λειτουργίας του προγράμματος [εδώ](https://gist.github.com/stamparm/5335217).
|
Μπορείτε να δείτε ένα δείγμα λειτουργίας του προγράμματος [εδώ](https://asciinema.org/a/46601).
|
||||||
Για μια γενικότερη άποψη των δυνατοτήτων του sqlmap, μια λίστα των υποστηριζόμενων χαρακτηριστικών και περιγραφή για όλες τις επιλογές, μαζί με παραδείγματα, καλείστε να συμβουλευτείτε το [εγχειρίδιο χρήστη](https://github.com/sqlmapproject/sqlmap/wiki).
|
Για μια γενικότερη άποψη των δυνατοτήτων του sqlmap, μια λίστα των υποστηριζόμενων χαρακτηριστικών και περιγραφή για όλες τις επιλογές, μαζί με παραδείγματα, καλείστε να συμβουλευτείτε το [εγχειρίδιο χρήστη](https://github.com/sqlmapproject/sqlmap/wiki/Usage).
|
||||||
|
|
||||||
Σύνδεσμοι
|
Σύνδεσμοι
|
||||||
----
|
----
|
||||||
@@ -45,9 +45,6 @@
|
|||||||
* Προβλήματα: https://github.com/sqlmapproject/sqlmap/issues
|
* Προβλήματα: https://github.com/sqlmapproject/sqlmap/issues
|
||||||
* Εγχειρίδιο Χρήστη: https://github.com/sqlmapproject/sqlmap/wiki
|
* Εγχειρίδιο Χρήστη: https://github.com/sqlmapproject/sqlmap/wiki
|
||||||
* Συχνές Ερωτήσεις (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
* Συχνές Ερωτήσεις (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||||
* Εγγραφή σε Mailing list: https://lists.sourceforge.net/lists/listinfo/sqlmap-users
|
|
||||||
* Mailing list RSS feed: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap
|
|
||||||
* Mailing list αρχείο: http://news.gmane.org/gmane.comp.security.sqlmap
|
|
||||||
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
||||||
* Demos: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos)
|
* Demos: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos)
|
||||||
* Εικόνες: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
* Εικόνες: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ Možete preuzeti zadnji tarball klikom [ovdje](https://github.com/sqlmapproject/
|
|||||||
|
|
||||||
Po mogućnosti, možete preuzeti sqlmap kloniranjem [Git](https://github.com/sqlmapproject/sqlmap) repozitorija:
|
Po mogućnosti, možete preuzeti sqlmap kloniranjem [Git](https://github.com/sqlmapproject/sqlmap) repozitorija:
|
||||||
|
|
||||||
git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||||
|
|
||||||
sqlmap radi bez posebnih zahtjeva korištenjem [Python](http://www.python.org/download/) verzije **2.6.x** i/ili **2.7.x** na bilo kojoj platformi.
|
sqlmap radi bez posebnih zahtjeva korištenjem [Python](http://www.python.org/download/) verzije **2.6.x** i/ili **2.7.x** na bilo kojoj platformi.
|
||||||
|
|
||||||
@@ -33,8 +33,8 @@ Kako biste dobili listu svih opcija i prekidača koristite:
|
|||||||
|
|
||||||
python sqlmap.py -hh
|
python sqlmap.py -hh
|
||||||
|
|
||||||
Možete pronaći primjer izvršavanja [ovdje](https://gist.github.com/stamparm/5335217).
|
Možete pronaći primjer izvršavanja [ovdje](https://asciinema.org/a/46601).
|
||||||
Kako biste dobili pregled mogućnosti sqlmap-a, liste podržanih značajki te opis svih opcija i prekidača, zajedno s primjerima, preporučen je uvid u [korisnički priručnik](https://github.com/sqlmapproject/sqlmap/wiki).
|
Kako biste dobili pregled mogućnosti sqlmap-a, liste podržanih značajki te opis svih opcija i prekidača, zajedno s primjerima, preporučen je uvid u [korisnički priručnik](https://github.com/sqlmapproject/sqlmap/wiki/Usage).
|
||||||
|
|
||||||
Poveznice
|
Poveznice
|
||||||
----
|
----
|
||||||
@@ -45,9 +45,6 @@ Poveznice
|
|||||||
* Prijava problema: https://github.com/sqlmapproject/sqlmap/issues
|
* Prijava problema: https://github.com/sqlmapproject/sqlmap/issues
|
||||||
* Korisnički priručnik: https://github.com/sqlmapproject/sqlmap/wiki
|
* Korisnički priručnik: https://github.com/sqlmapproject/sqlmap/wiki
|
||||||
* Najčešće postavljena pitanja (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
* Najčešće postavljena pitanja (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||||
* Pretplata na mailing listu: https://lists.sourceforge.net/lists/listinfo/sqlmap-users
|
|
||||||
* RSS feed mailing liste: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap
|
|
||||||
* Arhiva mailing liste: http://news.gmane.org/gmane.comp.security.sqlmap
|
|
||||||
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
||||||
* Demo: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos)
|
* Demo: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos)
|
||||||
* Slike zaslona: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
* Slike zaslona: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ Anda dapat mengunduh tarball versi terbaru [di sini]
|
|||||||
|
|
||||||
Sebagai alternatif, Anda dapat mengunduh sqlmap dengan men-_clone_ repositori [Git](https://github.com/sqlmapproject/sqlmap):
|
Sebagai alternatif, Anda dapat mengunduh sqlmap dengan men-_clone_ repositori [Git](https://github.com/sqlmapproject/sqlmap):
|
||||||
|
|
||||||
git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||||
|
|
||||||
sqlmap berfungsi langsung pada [Python](http://www.python.org/download/) versi **2.6.x** dan **2.7.x** pada platform apapun.
|
sqlmap berfungsi langsung pada [Python](http://www.python.org/download/) versi **2.6.x** dan **2.7.x** pada platform apapun.
|
||||||
|
|
||||||
@@ -34,8 +34,8 @@ Untuk mendapatkan daftar opsi lanjut gunakan:
|
|||||||
|
|
||||||
python sqlmap.py -hh
|
python sqlmap.py -hh
|
||||||
|
|
||||||
Anda dapat mendapatkan contoh penggunaan [di sini](https://gist.github.com/stamparm/5335217).
|
Anda dapat mendapatkan contoh penggunaan [di sini](https://asciinema.org/a/46601).
|
||||||
Untuk mendapatkan gambaran singkat kemampuan sqlmap, daftar fitur yang didukung, deskripsi dari semua opsi, berikut dengan contohnya, Anda disarankan untuk membaca [manual pengguna](https://github.com/sqlmapproject/sqlmap/wiki).
|
Untuk mendapatkan gambaran singkat kemampuan sqlmap, daftar fitur yang didukung, deskripsi dari semua opsi, berikut dengan contohnya, Anda disarankan untuk membaca [Panduan Pengguna](https://github.com/sqlmapproject/sqlmap/wiki/Usage).
|
||||||
|
|
||||||
Tautan
|
Tautan
|
||||||
----
|
----
|
||||||
@@ -46,9 +46,6 @@ Tautan
|
|||||||
* Issue tracker: https://github.com/sqlmapproject/sqlmap/issues
|
* Issue tracker: https://github.com/sqlmapproject/sqlmap/issues
|
||||||
* Wiki Manual Penggunaan: https://github.com/sqlmapproject/sqlmap/wiki
|
* Wiki Manual Penggunaan: https://github.com/sqlmapproject/sqlmap/wiki
|
||||||
* Pertanyaan yang Sering Ditanyakan (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
* Pertanyaan yang Sering Ditanyakan (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||||
* Berlangganan milis: https://lists.sourceforge.net/lists/listinfo/sqlmap-users
|
|
||||||
* RSS feed dari milis: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap
|
|
||||||
* Arsip milis: http://news.gmane.org/gmane.comp.security.sqlmap
|
|
||||||
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
||||||
* Video Demo [#1](http://www.youtube.com/user/inquisb/videos) dan [#2](http://www.youtube.com/user/stamparm/videos)
|
* Video Demo [#1](http://www.youtube.com/user/inquisb/videos) dan [#2](http://www.youtube.com/user/stamparm/videos)
|
||||||
* Tangkapan Layar: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
* Tangkapan Layar: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
||||||
|
|||||||
50
doc/translations/README-it-IT.md
Normal file
50
doc/translations/README-it-IT.md
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
# sqlmap
|
||||||
|
|
||||||
|
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [](https://twitter.com/sqlmap)
|
||||||
|
|
||||||
|
sqlmap è uno strumento open source per il penetration testing. Il suo scopo è quello di rendere automatico il processo di scoperta ed exploit di vulnerabilità di tipo SQL injection al fine di compromettere database online. Dispone di un potente motore per la ricerca di vulnerabilità, molti strumenti di nicchia anche per il più esperto penetration tester ed un'ampia gamma di controlli che vanno dal fingerprinting di database allo scaricamento di dati, fino all'accesso al file system sottostante e l'esecuzione di comandi nel sistema operativo attraverso connessioni out-of-band.
|
||||||
|
|
||||||
|
Screenshot
|
||||||
|
----
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
Nella wiki puoi visitare [l'elenco di screenshot](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots) che mostrano il funzionamento di alcune delle funzionalità del programma.
|
||||||
|
|
||||||
|
Installazione
|
||||||
|
----
|
||||||
|
|
||||||
|
Puoi scaricare l'ultima tarball cliccando [qui](https://github.com/sqlmapproject/sqlmap/tarball/master) oppure l'ultima zipball cliccando [qui](https://github.com/sqlmapproject/sqlmap/zipball/master).
|
||||||
|
|
||||||
|
La cosa migliore sarebbe però scaricare sqlmap clonando la repository [Git](https://github.com/sqlmapproject/sqlmap):
|
||||||
|
|
||||||
|
git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||||
|
|
||||||
|
sqlmap è in grado di funzionare con le versioni **2.6.x** e **2.7.x** di [Python](http://www.python.org/download/) su ogni piattaforma.
|
||||||
|
|
||||||
|
Utilizzo
|
||||||
|
----
|
||||||
|
|
||||||
|
Per una lista delle opzioni e dei controlli di base:
|
||||||
|
|
||||||
|
python sqlmap.py -h
|
||||||
|
|
||||||
|
Per una lista di tutte le opzioni e di tutti i controlli:
|
||||||
|
|
||||||
|
python sqlmap.py -hh
|
||||||
|
|
||||||
|
Puoi trovare un esempio di esecuzione [qui](https://asciinema.org/a/46601).
|
||||||
|
Per una panoramica delle capacità di sqlmap, una lista delle sue funzionalità e la descrizione di tutte le sue opzioni e controlli, insieme ad un gran numero di esempi, siete pregati di visitare lo [user's manual](https://github.com/sqlmapproject/sqlmap/wiki/Usage) (disponibile solo in inglese).
|
||||||
|
|
||||||
|
Link
|
||||||
|
----
|
||||||
|
|
||||||
|
* Sito: http://sqlmap.org
|
||||||
|
* Download: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) or [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master)
|
||||||
|
* RSS feed dei commit: https://github.com/sqlmapproject/sqlmap/commits/master.atom
|
||||||
|
* Issue tracker: https://github.com/sqlmapproject/sqlmap/issues
|
||||||
|
* Manuale dell'utente: https://github.com/sqlmapproject/sqlmap/wiki
|
||||||
|
* Domande più frequenti (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||||
|
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
||||||
|
* Dimostrazioni: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos)
|
||||||
|
* Screenshot: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
||||||
51
doc/translations/README-ja-JP.md
Normal file
51
doc/translations/README-ja-JP.md
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
# sqlmap
|
||||||
|
|
||||||
|
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [](https://twitter.com/sqlmap)
|
||||||
|
|
||||||
|
sqlmapはオープンソースのペネトレーションテスティングツールです。SQLインジェクションの脆弱性の検出、活用、そしてデータベースサーバ奪取のプロセスを自動化します。
|
||||||
|
強力な検出エンジン、ペネトレーションテスターのための多くのニッチ機能、持続的なデータベースのフィンガープリンティングから、データベースのデータ取得やアウトオブバンド接続を介したオペレーティング・システム上でのコマンド実行、ファイルシステムへのアクセスなどの広範囲に及ぶスイッチを提供します。
|
||||||
|
|
||||||
|
スクリーンショット
|
||||||
|
----
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
wikiに載っているいくつかの機能のデモをスクリーンショットで見ることができます。 [スクリーンショット集](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots)
|
||||||
|
|
||||||
|
インストール
|
||||||
|
----
|
||||||
|
|
||||||
|
最新のtarballを [こちら](https://github.com/sqlmapproject/sqlmap/tarball/master) から、最新のzipballを [こちら](https://github.com/sqlmapproject/sqlmap/zipball/master) からダウンロードできます。
|
||||||
|
|
||||||
|
[Git](https://github.com/sqlmapproject/sqlmap) レポジトリをクローンして、sqlmapをダウンロードすることも可能です。:
|
||||||
|
|
||||||
|
git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||||
|
|
||||||
|
sqlmapは、 [Python](http://www.python.org/download/) バージョン **2.6.x** または **2.7.x** がインストールされていれば、全てのプラットフォームですぐに使用できます。
|
||||||
|
|
||||||
|
使用法
|
||||||
|
----
|
||||||
|
|
||||||
|
基本的なオプションとスイッチの使用法をリストするには:
|
||||||
|
|
||||||
|
python sqlmap.py -h
|
||||||
|
|
||||||
|
全てのオプションとスイッチの使用法をリストするには:
|
||||||
|
|
||||||
|
python sqlmap.py -hh
|
||||||
|
|
||||||
|
実行例を [こちら](https://asciinema.org/a/46601) で見ることができます。
|
||||||
|
sqlmapの概要、機能の一覧、全てのオプションやスイッチの使用法を例とともに、 [ユーザーマニュアル](https://github.com/sqlmapproject/sqlmap/wiki/Usage) で確認することができます。
|
||||||
|
|
||||||
|
リンク
|
||||||
|
----
|
||||||
|
|
||||||
|
* ホームページ: http://sqlmap.org
|
||||||
|
* ダウンロード: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) or [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master)
|
||||||
|
* コミットのRSSフィード: https://github.com/sqlmapproject/sqlmap/commits/master.atom
|
||||||
|
* 課題管理: https://github.com/sqlmapproject/sqlmap/issues
|
||||||
|
* ユーザーマニュアル: https://github.com/sqlmapproject/sqlmap/wiki
|
||||||
|
* よくある質問 (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||||
|
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
||||||
|
* デモ: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos)
|
||||||
|
* スクリーンショット: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
||||||
@@ -19,7 +19,7 @@ Você pode baixar o arquivo tar mais recente clicando [aqui]
|
|||||||
|
|
||||||
De preferência, você pode baixar o sqlmap clonando o repositório [Git](https://github.com/sqlmapproject/sqlmap):
|
De preferência, você pode baixar o sqlmap clonando o repositório [Git](https://github.com/sqlmapproject/sqlmap):
|
||||||
|
|
||||||
git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||||
|
|
||||||
sqlmap funciona em [Python](http://www.python.org/download/) nas versões **2.6.x** e **2.7.x** em todas as plataformas.
|
sqlmap funciona em [Python](http://www.python.org/download/) nas versões **2.6.x** e **2.7.x** em todas as plataformas.
|
||||||
|
|
||||||
@@ -34,7 +34,7 @@ Para obter a lista completa de opções faça:
|
|||||||
|
|
||||||
python sqlmap.py -hh
|
python sqlmap.py -hh
|
||||||
|
|
||||||
Você pode encontrar alguns exemplos [aqui](https://gist.github.com/stamparm/5335217).
|
Você pode encontrar alguns exemplos [aqui](https://asciinema.org/a/46601).
|
||||||
Para ter uma visão geral dos recursos do sqlmap, lista de recursos suportados e a descrição de todas as opções, juntamente com exemplos, aconselhamos que você consulte o [manual do usuário](https://github.com/sqlmapproject/sqlmap/wiki).
|
Para ter uma visão geral dos recursos do sqlmap, lista de recursos suportados e a descrição de todas as opções, juntamente com exemplos, aconselhamos que você consulte o [manual do usuário](https://github.com/sqlmapproject/sqlmap/wiki).
|
||||||
|
|
||||||
Links
|
Links
|
||||||
@@ -46,9 +46,6 @@ Links
|
|||||||
* Issue tracker: https://github.com/sqlmapproject/sqlmap/issues
|
* Issue tracker: https://github.com/sqlmapproject/sqlmap/issues
|
||||||
* Manual do Usuário: https://github.com/sqlmapproject/sqlmap/wiki
|
* Manual do Usuário: https://github.com/sqlmapproject/sqlmap/wiki
|
||||||
* Perguntas frequentes (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
* Perguntas frequentes (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||||
* Mailing list subscription: https://lists.sourceforge.net/lists/listinfo/sqlmap-users
|
|
||||||
* Mailing list RSS feed: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap
|
|
||||||
* Mailing list archive: http://news.gmane.org/gmane.comp.security.sqlmap
|
|
||||||
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
||||||
* Demonstrações: [#1](http://www.youtube.com/user/inquisb/videos) e [#2](http://www.youtube.com/user/stamparm/videos)
|
* Demonstrações: [#1](http://www.youtube.com/user/inquisb/videos) e [#2](http://www.youtube.com/user/stamparm/videos)
|
||||||
* Imagens: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
* Imagens: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ Kurulum
|
|||||||
|
|
||||||
Veya tercihen, [Git](https://github.com/sqlmapproject/sqlmap) reposunu klonlayarak indirebilirsiniz
|
Veya tercihen, [Git](https://github.com/sqlmapproject/sqlmap) reposunu klonlayarak indirebilirsiniz
|
||||||
|
|
||||||
git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||||
|
|
||||||
sqlmap [Python](http://www.python.org/download/) sitesinde bulunan **2.6.x** and **2.7.x** versiyonları ile bütün platformlarda çalışabilmektedir.
|
sqlmap [Python](http://www.python.org/download/) sitesinde bulunan **2.6.x** and **2.7.x** versiyonları ile bütün platformlarda çalışabilmektedir.
|
||||||
|
|
||||||
@@ -37,7 +37,7 @@ Bütün seçenekleri gösterir
|
|||||||
|
|
||||||
python sqlmap.py -hh
|
python sqlmap.py -hh
|
||||||
|
|
||||||
Program ile ilgili örnekleri [burada](https://gist.github.com/stamparm/5335217) bulabilirsiniz. Daha fazlası içinsqlmap'in bütün açıklamaları ile birlikte bütün özelliklerinin, örnekleri ile bulunduğu [manuel sayfamıza](https://github.com/sqlmapproject/sqlmap/wiki) bakmanızı tavsiye ediyoruz
|
Program ile ilgili örnekleri [burada](https://asciinema.org/a/46601) bulabilirsiniz. Daha fazlası içinsqlmap'in bütün açıklamaları ile birlikte bütün özelliklerinin, örnekleri ile bulunduğu [manuel sayfamıza](https://github.com/sqlmapproject/sqlmap/wiki/Usage) bakmanızı tavsiye ediyoruz
|
||||||
|
|
||||||
Links
|
Links
|
||||||
----
|
----
|
||||||
@@ -48,9 +48,6 @@ Links
|
|||||||
* Hata takip etme sistemi: https://github.com/sqlmapproject/sqlmap/issues
|
* Hata takip etme sistemi: https://github.com/sqlmapproject/sqlmap/issues
|
||||||
* Kullanıcı Manueli: https://github.com/sqlmapproject/sqlmap/wiki
|
* Kullanıcı Manueli: https://github.com/sqlmapproject/sqlmap/wiki
|
||||||
* Sıkça Sorulan Sorular(SSS): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
* Sıkça Sorulan Sorular(SSS): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||||
* Mail listesi: https://lists.sourceforge.net/lists/listinfo/sqlmap-users
|
|
||||||
* Mail RSS takibi: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap
|
|
||||||
* Mail listesi arşivi: http://news.gmane.org/gmane.comp.security.sqlmap
|
|
||||||
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
||||||
* Demolar: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos)
|
* Demolar: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos)
|
||||||
* Ekran görüntüleri: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
* Ekran görüntüleri: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ sqlmap 是一个开源的渗透测试工具,可以用来自动化的检测,
|
|||||||
|
|
||||||
推荐你从 [Git](https://github.com/sqlmapproject/sqlmap) 仓库获取最新的源代码:
|
推荐你从 [Git](https://github.com/sqlmapproject/sqlmap) 仓库获取最新的源代码:
|
||||||
|
|
||||||
git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||||
|
|
||||||
sqlmap 可以运行在 [Python](http://www.python.org/download/) **2.6.x** 和 **2.7.x** 版本的任何平台上
|
sqlmap 可以运行在 [Python](http://www.python.org/download/) **2.6.x** 和 **2.7.x** 版本的任何平台上
|
||||||
|
|
||||||
@@ -33,7 +33,7 @@ sqlmap 可以运行在 [Python](http://www.python.org/download/) **2.6.x** 和
|
|||||||
|
|
||||||
python sqlmap.py -hh
|
python sqlmap.py -hh
|
||||||
|
|
||||||
你可以从 [这里](https://gist.github.com/stamparm/5335217) 看到一个sqlmap 的使用样例。除此以外,你还可以查看 [使用手册](https://github.com/sqlmapproject/sqlmap/wiki)。获取sqlmap所有支持的特性、参数、命令行选项开关及说明的使用帮助。
|
你可以从 [这里](https://asciinema.org/a/46601) 看到一个sqlmap 的使用样例。除此以外,你还可以查看 [使用手册](https://github.com/sqlmapproject/sqlmap/wiki/Usage)。获取sqlmap所有支持的特性、参数、命令行选项开关及说明的使用帮助。
|
||||||
|
|
||||||
链接
|
链接
|
||||||
----
|
----
|
||||||
@@ -44,9 +44,6 @@ sqlmap 可以运行在 [Python](http://www.python.org/download/) **2.6.x** 和
|
|||||||
* Issue tracker: https://github.com/sqlmapproject/sqlmap/issues
|
* Issue tracker: https://github.com/sqlmapproject/sqlmap/issues
|
||||||
* 使用手册: https://github.com/sqlmapproject/sqlmap/wiki
|
* 使用手册: https://github.com/sqlmapproject/sqlmap/wiki
|
||||||
* 常见问题 (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
* 常见问题 (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||||
* 邮件讨论列表: https://lists.sourceforge.net/lists/listinfo/sqlmap-users
|
|
||||||
* 邮件列表 RSS 订阅: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap
|
|
||||||
* 邮件列表归档: http://news.gmane.org/gmane.comp.security.sqlmap
|
|
||||||
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
||||||
* 教程: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos)
|
* 教程: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos)
|
||||||
* 截图: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
* 截图: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
"""
|
"""
|
||||||
beep.py - Make a beep sound
|
beep.py - Make a beep sound
|
||||||
|
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
"""
|
"""
|
||||||
cloak.py - Simple file encryption/compression utility
|
cloak.py - Simple file encryption/compression utility
|
||||||
|
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
"""
|
"""
|
||||||
dbgtool.py - Portable executable to ASCII debug script converter
|
dbgtool.py - Portable executable to ASCII debug script converter
|
||||||
|
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -99,7 +99,7 @@ void usage(char *path)
|
|||||||
printf(" -h this screen\n");
|
printf(" -h this screen\n");
|
||||||
printf(" -b num maximal number of blanks (unanswered icmp requests)\n");
|
printf(" -b num maximal number of blanks (unanswered icmp requests)\n");
|
||||||
printf(" before quitting\n");
|
printf(" before quitting\n");
|
||||||
printf(" -s bytes maximal data buffer size in bytes (default is 64 bytes)\n\n", DEFAULT_MAX_DATA_SIZE);
|
printf(" -s bytes maximal data buffer size in bytes (default is %u bytes)\n\n", DEFAULT_MAX_DATA_SIZE);
|
||||||
printf("In order to improve the speed, lower the delay (-d) between requests or\n");
|
printf("In order to improve the speed, lower the delay (-d) between requests or\n");
|
||||||
printf("increase the size (-s) of the data buffer\n");
|
printf("increase the size (-s) of the data buffer\n");
|
||||||
}
|
}
|
||||||
@@ -203,8 +203,6 @@ int main(int argc, char **argv)
|
|||||||
PROCESS_INFORMATION pi;
|
PROCESS_INFORMATION pi;
|
||||||
int status;
|
int status;
|
||||||
unsigned int max_data_size;
|
unsigned int max_data_size;
|
||||||
struct hostent *he;
|
|
||||||
|
|
||||||
|
|
||||||
// set defaults
|
// set defaults
|
||||||
target = 0;
|
target = 0;
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -43,7 +43,7 @@ def updateMSSQLXML():
|
|||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
releases = re.findall("class=\"BCC_DV_01DarkBlueTitle\">SQL Server\s(.+?)\sBuilds", mssqlVersionsHtmlString, re.I | re.M)
|
releases = re.findall("class=\"BCC_DV_01DarkBlueTitle\">SQL Server\s(.+?)\sBuilds", mssqlVersionsHtmlString, re.I)
|
||||||
releasesCount = len(releases)
|
releasesCount = len(releases)
|
||||||
|
|
||||||
# Create the minidom document
|
# Create the minidom document
|
||||||
@@ -74,7 +74,7 @@ def updateMSSQLXML():
|
|||||||
stopIdx = mssqlVersionsHtmlString.index("SQL Server %s Builds" % releases[index + 1])
|
stopIdx = mssqlVersionsHtmlString.index("SQL Server %s Builds" % releases[index + 1])
|
||||||
|
|
||||||
mssqlVersionsReleaseString = mssqlVersionsHtmlString[startIdx:stopIdx]
|
mssqlVersionsReleaseString = mssqlVersionsHtmlString[startIdx:stopIdx]
|
||||||
servicepackVersion = re.findall("</td><td>[7\.0|2000|2005|2008|2008 R2]*(.*?)</td><td.*?([\d\.]+)</td>[\r]*\n", mssqlVersionsReleaseString, re.I | re.M)
|
servicepackVersion = re.findall("</td><td>(7\.0|2000|2005|2008|2008 R2)*(.*?)</td><td.*?([\d\.]+)</td>[\r]*\n", mssqlVersionsReleaseString, re.I)
|
||||||
|
|
||||||
for servicePack, version in servicepackVersion:
|
for servicePack, version in servicepackVersion:
|
||||||
if servicePack.startswith(" "):
|
if servicePack.startswith(" "):
|
||||||
|
|||||||
@@ -1,3 +1,3 @@
|
|||||||
Files in this folder can be used to compile auxiliary program that can
|
runcmd.exe is an auxiliary program that can be used for running command prompt
|
||||||
be used for running command prompt commands skipping standard "cmd /c" way.
|
commands skipping standard "cmd /c" way. It is licensed under the terms of the
|
||||||
They are licensed under the terms of the GNU Lesser General Public License.
|
GNU Lesser General Public License.
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
"""
|
"""
|
||||||
safe2bin.py - Simple safe(hex) to binary format converter
|
safe2bin.py - Simple safe(hex) to binary format converter
|
||||||
|
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -23,7 +23,7 @@ HEX_ENCODED_CHAR_REGEX = r"(?P<result>\\x[0-9A-Fa-f]{2})"
|
|||||||
SAFE_ENCODE_SLASH_REPLACEMENTS = "\t\n\r\x0b\x0c"
|
SAFE_ENCODE_SLASH_REPLACEMENTS = "\t\n\r\x0b\x0c"
|
||||||
|
|
||||||
# Characters that don't need to be safe encoded
|
# Characters that don't need to be safe encoded
|
||||||
SAFE_CHARS = "".join(filter(lambda x: x not in SAFE_ENCODE_SLASH_REPLACEMENTS, string.printable.replace('\\', '')))
|
SAFE_CHARS = "".join(filter(lambda _: _ not in SAFE_ENCODE_SLASH_REPLACEMENTS, string.printable.replace('\\', '')))
|
||||||
|
|
||||||
# Prefix used for hex encoded values
|
# Prefix used for hex encoded values
|
||||||
HEX_ENCODED_PREFIX = r"\x"
|
HEX_ENCODED_PREFIX = r"\x"
|
||||||
@@ -47,7 +47,7 @@ def safecharencode(value):
|
|||||||
retVal = value
|
retVal = value
|
||||||
|
|
||||||
if isinstance(value, basestring):
|
if isinstance(value, basestring):
|
||||||
if any(_ not in SAFE_CHARS for _ in value):
|
if any([_ not in SAFE_CHARS for _ in value]):
|
||||||
retVal = retVal.replace(HEX_ENCODED_PREFIX, HEX_ENCODED_PREFIX_MARKER)
|
retVal = retVal.replace(HEX_ENCODED_PREFIX, HEX_ENCODED_PREFIX_MARKER)
|
||||||
retVal = retVal.replace('\\', SLASH_MARKER)
|
retVal = retVal.replace('\\', SLASH_MARKER)
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
# Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
# Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
# See the file 'doc/COPYING' for copying permission
|
# See the file 'doc/COPYING' for copying permission
|
||||||
|
|
||||||
# Removes duplicate entries in wordlist like files
|
# Removes duplicate entries in wordlist like files
|
||||||
|
|||||||
@@ -8,14 +8,16 @@ FULLPATH=${SCRIPTPATH%/*}/$SETTINGS
|
|||||||
|
|
||||||
if [ -f $FULLPATH ]
|
if [ -f $FULLPATH ]
|
||||||
then
|
then
|
||||||
LINE=$(grep -o ${FULLPATH} -e 'VERSION = "[0-9.]*"');
|
LINE=$(grep -o ${FULLPATH} -e 'VERSION = "[0-9.]*"')
|
||||||
declare -a LINE;
|
declare -a LINE
|
||||||
NEW_TAG=$(python -c "import re, sys, time; version = re.search('\"([0-9.]*)\"', sys.argv[1]).group(1); _ = version.split('.'); print '.'.join(_[:-1]) if len(_) == 4 and _[-1] == '0' else ''" "$LINE")
|
NEW_TAG=$(python -c "import re, sys, time; version = re.search('\"([0-9.]*)\"', sys.argv[1]).group(1); _ = version.split('.'); print '.'.join(_[:-1]) if len(_) == 4 and _[-1] == '0' else ''" "$LINE")
|
||||||
if [ -n "$NEW_TAG" ]
|
if [ -n "$NEW_TAG" ]
|
||||||
then
|
then
|
||||||
git commit -am "Automatic monthly tagging"
|
#git commit -am "Automatic monthly tagging"
|
||||||
echo "Creating new tag ${NEW_TAG}";
|
echo "Creating new tag ${NEW_TAG}"
|
||||||
git tag $NEW_TAG;
|
git tag $NEW_TAG
|
||||||
git push origin $NEW_TAG
|
git push origin $NEW_TAG
|
||||||
|
echo "Going to push PyPI package"
|
||||||
|
/bin/bash ${SCRIPTPATH%/*}/pypi.sh
|
||||||
fi
|
fi
|
||||||
fi;
|
fi
|
||||||
@@ -1,22 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
SETTINGS="../../lib/core/settings.py"
|
|
||||||
|
|
||||||
declare -x SCRIPTPATH="${0}"
|
|
||||||
|
|
||||||
FULLPATH=${SCRIPTPATH%/*}/$SETTINGS
|
|
||||||
|
|
||||||
if [ -f $FULLPATH ]
|
|
||||||
then
|
|
||||||
LINE=$(grep -o ${FULLPATH} -e 'VERSION = "[0-9.]*"');
|
|
||||||
declare -a LINE;
|
|
||||||
INCREMENTED=$(python -c "import re, sys, time; version = re.search('\"([0-9.]*)\"', sys.argv[1]).group(1); _ = version.split('.'); _.append(0) if len(_) < 3 else _; _[-1] = str(int(_[-1]) + 1); month = str(time.gmtime().tm_mon); _[-1] = '0' if _[-2] != month else _[-1]; _[-2] = month; print sys.argv[1].replace(version, '.'.join(_))" "$LINE")
|
|
||||||
if [ -n "$INCREMENTED" ]
|
|
||||||
then
|
|
||||||
sed "s/${LINE}/${INCREMENTED}/" $FULLPATH > $FULLPATH.tmp && mv $FULLPATH.tmp $FULLPATH
|
|
||||||
echo "Updated ${INCREMENTED} in ${FULLPATH}";
|
|
||||||
else
|
|
||||||
echo "Something went wrong in VERSION increment"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi;
|
|
||||||
32
extra/shutils/precommit-hook.sh
Normal file
32
extra/shutils/precommit-hook.sh
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
PROJECT="../../"
|
||||||
|
SETTINGS="../../lib/core/settings.py"
|
||||||
|
CHECKSUM="../../txt/checksum.md5"
|
||||||
|
|
||||||
|
declare -x SCRIPTPATH="${0}"
|
||||||
|
|
||||||
|
PROJECT_FULLPATH=${SCRIPTPATH%/*}/$PROJECT
|
||||||
|
SETTINGS_FULLPATH=${SCRIPTPATH%/*}/$SETTINGS
|
||||||
|
CHECKSUM_FULLPATH=${SCRIPTPATH%/*}/$CHECKSUM
|
||||||
|
|
||||||
|
git diff $SETTINGS_FULLPATH | grep "VERSION =" > /dev/null && exit 0
|
||||||
|
|
||||||
|
if [ -f $SETTINGS_FULLPATH ]
|
||||||
|
then
|
||||||
|
LINE=$(grep -o ${SETTINGS_FULLPATH} -e 'VERSION = "[0-9.]*"')
|
||||||
|
declare -a LINE
|
||||||
|
INCREMENTED=$(python -c "import re, sys, time; version = re.search('\"([0-9.]*)\"', sys.argv[1]).group(1); _ = version.split('.'); _.append(0) if len(_) < 3 else _; _[-1] = str(int(_[-1]) + 1); month = str(time.gmtime().tm_mon); _[-1] = '0' if _[-2] != month else _[-1]; _[-2] = month; print sys.argv[1].replace(version, '.'.join(_))" "$LINE")
|
||||||
|
if [ -n "$INCREMENTED" ]
|
||||||
|
then
|
||||||
|
sed -i "s/${LINE}/${INCREMENTED}/" $SETTINGS_FULLPATH
|
||||||
|
echo "Updated ${INCREMENTED} in ${SETTINGS_FULLPATH}"
|
||||||
|
else
|
||||||
|
echo "Something went wrong in VERSION increment"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
git add "$SETTINGS_FULLPATH"
|
||||||
|
fi
|
||||||
|
|
||||||
|
truncate -s 0 "$CHECKSUM_FULLPATH"
|
||||||
|
cd $PROJECT_FULLPATH && for i in $(find . -name "*.py" -o -name "*.xml" -o -iname "*_" | sort); do git ls-files $i --error-unmatch &>/dev/null && md5sum $i | stdbuf -i0 -o0 -e0 sed 's/\.\///' >> "$CHECKSUM_FULLPATH"; git add "$CHECKSUM_FULLPATH"; done
|
||||||
7
extra/shutils/pydiatra.sh
Normal file
7
extra/shutils/pydiatra.sh
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
||||||
|
# See the file 'doc/COPYING' for copying permission
|
||||||
|
|
||||||
|
# Runs py2diatra on all python files (prerequisite: pip install pydiatra)
|
||||||
|
find . -wholename "./thirdparty" -prune -o -type f -iname "*.py" -exec py2diatra '{}' \; | grep -v bare-except
|
||||||
@@ -20,8 +20,8 @@ def check(module):
|
|||||||
print "CHECKING ", module
|
print "CHECKING ", module
|
||||||
pout = os.popen("pylint --rcfile=/dev/null %s" % module, 'r')
|
pout = os.popen("pylint --rcfile=/dev/null %s" % module, 'r')
|
||||||
for line in pout:
|
for line in pout:
|
||||||
if re.match("E....:.", line):
|
if re.match("\AE:", line):
|
||||||
print line
|
print line.strip()
|
||||||
if __RATING__ and "Your code has been rated at" in line:
|
if __RATING__ and "Your code has been rated at" in line:
|
||||||
print line
|
print line
|
||||||
score = re.findall("\d.\d\d", line)[0]
|
score = re.findall("\d.\d\d", line)[0]
|
||||||
|
|||||||
171
extra/shutils/pypi.sh
Normal file
171
extra/shutils/pypi.sh
Normal file
@@ -0,0 +1,171 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
declare -x SCRIPTPATH="${0}"
|
||||||
|
SETTINGS="${SCRIPTPATH%/*}/../../lib/core/settings.py"
|
||||||
|
VERSION=$(cat $SETTINGS | grep -E "^VERSION =" | cut -d '"' -f 2 | cut -d '.' -f 1-3)
|
||||||
|
TYPE=pip
|
||||||
|
TMP_DIR=/tmp/pypi
|
||||||
|
mkdir $TMP_DIR
|
||||||
|
cd $TMP_DIR
|
||||||
|
cat > $TMP_DIR/setup.py << EOF
|
||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
"""
|
||||||
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
|
See the file 'doc/COPYING' for copying permission
|
||||||
|
"""
|
||||||
|
|
||||||
|
from setuptools import setup, find_packages
|
||||||
|
|
||||||
|
setup(
|
||||||
|
name='sqlmap',
|
||||||
|
version='$VERSION',
|
||||||
|
description="Automatic SQL injection and database takeover tool",
|
||||||
|
author='Bernardo Damele Assumpcao Guimaraes, Miroslav Stampar',
|
||||||
|
author_email='bernardo@sqlmap.org, miroslav@sqlmap.org',
|
||||||
|
url='https://sqlmap.org',
|
||||||
|
download_url='https://github.com/sqlmapproject/sqlmap/archive/$VERSION.zip',
|
||||||
|
license='GNU General Public License v2 (GPLv2)',
|
||||||
|
packages=find_packages(),
|
||||||
|
include_package_data=True,
|
||||||
|
zip_safe=False,
|
||||||
|
# https://pypi.python.org/pypi?%3Aaction=list_classifiers
|
||||||
|
classifiers=[
|
||||||
|
'Development Status :: 5 - Production/Stable',
|
||||||
|
'License :: OSI Approved :: GNU General Public License v2 (GPLv2)',
|
||||||
|
'Natural Language :: English',
|
||||||
|
'Operating System :: OS Independent',
|
||||||
|
'Programming Language :: Python',
|
||||||
|
'Environment :: Console',
|
||||||
|
'Topic :: Database',
|
||||||
|
'Topic :: Security',
|
||||||
|
],
|
||||||
|
entry_points={
|
||||||
|
'console_scripts': [
|
||||||
|
'sqlmap = sqlmap.sqlmap:main',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
EOF
|
||||||
|
wget "https://github.com/sqlmapproject/sqlmap/archive/$VERSION.zip" -O sqlmap.zip
|
||||||
|
unzip sqlmap.zip
|
||||||
|
rm sqlmap.zip
|
||||||
|
mv "sqlmap-$VERSION" sqlmap
|
||||||
|
cat > sqlmap/__init__.py << EOF
|
||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
"""
|
||||||
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
|
See the file 'doc/COPYING' for copying permission
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.dont_write_bytecode = True
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||||
|
EOF
|
||||||
|
cat > README.rst << "EOF"
|
||||||
|
sqlmap
|
||||||
|
======
|
||||||
|
|
||||||
|
|Build Status| |Python 2.6|2.7| |License| |Twitter|
|
||||||
|
|
||||||
|
sqlmap is an open source penetration testing tool that automates the
|
||||||
|
process of detecting and exploiting SQL injection flaws and taking over
|
||||||
|
of database servers. It comes with a powerful detection engine, many
|
||||||
|
niche features for the ultimate penetration tester and a broad range of
|
||||||
|
switches lasting from database fingerprinting, over data fetching from
|
||||||
|
the database, to accessing the underlying file system and executing
|
||||||
|
commands on the operating system via out-of-band connections.
|
||||||
|
|
||||||
|
Screenshots
|
||||||
|
-----------
|
||||||
|
|
||||||
|
.. figure:: https://raw.github.com/wiki/sqlmapproject/sqlmap/images/sqlmap_screenshot.png
|
||||||
|
:alt: Screenshot
|
||||||
|
|
||||||
|
|
||||||
|
You can visit the `collection of
|
||||||
|
screenshots <https://github.com/sqlmapproject/sqlmap/wiki/Screenshots>`__
|
||||||
|
demonstrating some of features on the wiki.
|
||||||
|
|
||||||
|
Installation
|
||||||
|
------------
|
||||||
|
|
||||||
|
You can use pip to install and/or upgrade the sqlmap to latest (monthly) tagged version with: ::
|
||||||
|
|
||||||
|
pip install --upgrade sqlmap
|
||||||
|
|
||||||
|
Alternatively, you can download the latest tarball by clicking
|
||||||
|
`here <https://github.com/sqlmapproject/sqlmap/tarball/master>`__ or
|
||||||
|
latest zipball by clicking
|
||||||
|
`here <https://github.com/sqlmapproject/sqlmap/zipball/master>`__.
|
||||||
|
|
||||||
|
If you prefer fetching daily updates, you can download sqlmap by cloning the
|
||||||
|
`Git <https://github.com/sqlmapproject/sqlmap>`__ repository:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||||
|
|
||||||
|
sqlmap works out of the box with
|
||||||
|
`Python <http://www.python.org/download/>`__ version **2.6.x** and
|
||||||
|
**2.7.x** on any platform.
|
||||||
|
|
||||||
|
Usage
|
||||||
|
-----
|
||||||
|
|
||||||
|
To get a list of basic options and switches use:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
python sqlmap.py -h
|
||||||
|
|
||||||
|
To get a list of all options and switches use:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
python sqlmap.py -hh
|
||||||
|
|
||||||
|
You can find a sample run `here <https://asciinema.org/a/46601>`__. To
|
||||||
|
get an overview of sqlmap capabilities, list of supported features and
|
||||||
|
description of all options and switches, along with examples, you are
|
||||||
|
advised to consult the `user's
|
||||||
|
manual <https://github.com/sqlmapproject/sqlmap/wiki/Usage>`__.
|
||||||
|
|
||||||
|
Links
|
||||||
|
-----
|
||||||
|
|
||||||
|
- Homepage: http://sqlmap.org
|
||||||
|
- Download:
|
||||||
|
`.tar.gz <https://github.com/sqlmapproject/sqlmap/tarball/master>`__
|
||||||
|
or `.zip <https://github.com/sqlmapproject/sqlmap/zipball/master>`__
|
||||||
|
- Commits RSS feed:
|
||||||
|
https://github.com/sqlmapproject/sqlmap/commits/master.atom
|
||||||
|
- Issue tracker: https://github.com/sqlmapproject/sqlmap/issues
|
||||||
|
- User's manual: https://github.com/sqlmapproject/sqlmap/wiki
|
||||||
|
- Frequently Asked Questions (FAQ):
|
||||||
|
https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||||
|
- Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
||||||
|
- Demos: http://www.youtube.com/user/inquisb/videos
|
||||||
|
- Screenshots: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
||||||
|
|
||||||
|
.. |Build Status| image:: https://api.travis-ci.org/sqlmapproject/sqlmap.svg?branch=master
|
||||||
|
:target: https://api.travis-ci.org/sqlmapproject/sqlmap
|
||||||
|
.. |Python 2.6|2.7| image:: https://img.shields.io/badge/python-2.6|2.7-yellow.svg
|
||||||
|
:target: https://www.python.org/
|
||||||
|
.. |License| image:: https://img.shields.io/badge/license-GPLv2-red.svg
|
||||||
|
:target: https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING
|
||||||
|
.. |Twitter| image:: https://img.shields.io/badge/twitter-@sqlmap-blue.svg
|
||||||
|
:target: https://twitter.com/sqlmap
|
||||||
|
|
||||||
|
.. pandoc --from=markdown --to=rst --output=README.rst sqlmap/README.md
|
||||||
|
.. http://rst.ninjs.org/
|
||||||
|
EOF
|
||||||
|
sed -i "s/^VERSION =.*/VERSION = \"$VERSION\"/g" sqlmap/lib/core/settings.py
|
||||||
|
sed -i "s/^TYPE =.*/TYPE = \"$TYPE\"/g" sqlmap/lib/core/settings.py
|
||||||
|
sed -i "s/.*lib\/core\/settings\.py/`md5sum sqlmap/lib/core/settings.py | cut -d ' ' -f 1` lib\/core\/settings\.py/g" sqlmap/txt/checksum.md5
|
||||||
|
for file in $(find sqlmap -type f | grep -v -E "\.(git|yml)"); do echo include $file >> MANIFEST.in; done
|
||||||
|
python setup.py sdist upload
|
||||||
|
rm -rf $TMP_DIR
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
# Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
# Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
# See the file 'doc/COPYING' for copying permission
|
# See the file 'doc/COPYING' for copying permission
|
||||||
|
|
||||||
import codecs
|
import codecs
|
||||||
@@ -22,7 +22,6 @@ from lib.core.revision import getRevisionNumber
|
|||||||
|
|
||||||
START_TIME = time.strftime("%H:%M:%S %d-%m-%Y", time.gmtime())
|
START_TIME = time.strftime("%H:%M:%S %d-%m-%Y", time.gmtime())
|
||||||
SQLMAP_HOME = "/opt/sqlmap"
|
SQLMAP_HOME = "/opt/sqlmap"
|
||||||
REVISION = getRevisionNumber()
|
|
||||||
|
|
||||||
SMTP_SERVER = "127.0.0.1"
|
SMTP_SERVER = "127.0.0.1"
|
||||||
SMTP_PORT = 25
|
SMTP_PORT = 25
|
||||||
@@ -30,7 +29,7 @@ SMTP_TIMEOUT = 30
|
|||||||
FROM = "regressiontest@sqlmap.org"
|
FROM = "regressiontest@sqlmap.org"
|
||||||
#TO = "dev@sqlmap.org"
|
#TO = "dev@sqlmap.org"
|
||||||
TO = ["bernardo.damele@gmail.com", "miroslav.stampar@gmail.com"]
|
TO = ["bernardo.damele@gmail.com", "miroslav.stampar@gmail.com"]
|
||||||
SUBJECT = "regression test started on %s using revision %s" % (START_TIME, REVISION)
|
SUBJECT = "regression test started on %s using revision %s" % (START_TIME, getRevisionNumber())
|
||||||
TARGET = "debian"
|
TARGET = "debian"
|
||||||
|
|
||||||
def prepare_email(content):
|
def prepare_email(content):
|
||||||
@@ -41,7 +40,7 @@ def prepare_email(content):
|
|||||||
msg = MIMEMultipart()
|
msg = MIMEMultipart()
|
||||||
msg["Subject"] = SUBJECT
|
msg["Subject"] = SUBJECT
|
||||||
msg["From"] = FROM
|
msg["From"] = FROM
|
||||||
msg["To"] = TO if isinstance(TO, basestring) else ",".join(TO)
|
msg["To"] = TO if isinstance(TO, basestring) else ','.join(TO)
|
||||||
|
|
||||||
msg.attach(MIMEText(content))
|
msg.attach(MIMEText(content))
|
||||||
|
|
||||||
@@ -84,7 +83,7 @@ def main():
|
|||||||
if stderr:
|
if stderr:
|
||||||
failure_email("Execution of regression test failed with error:\n\n%s" % stderr)
|
failure_email("Execution of regression test failed with error:\n\n%s" % stderr)
|
||||||
|
|
||||||
failed_tests = re.findall("running live test case: (.+?) \((\d+)\/\d+\)[\r]*\n.+test failed (at parsing items: (.+))?\s*\- scan folder: (\/.+) \- traceback: (.*?)( - SQL injection not detected)?[\r]*\n", stdout, re.M)
|
failed_tests = re.findall("running live test case: (.+?) \((\d+)\/\d+\)[\r]*\n.+test failed (at parsing items: (.+))?\s*\- scan folder: (\/.+) \- traceback: (.*?)( - SQL injection not detected)?[\r]*\n", stdout)
|
||||||
|
|
||||||
for failed_test in failed_tests:
|
for failed_test in failed_tests:
|
||||||
title = failed_test[0]
|
title = failed_test[0]
|
||||||
|
|||||||
15
extra/shutils/strip.sh
Normal file
15
extra/shutils/strip.sh
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# References: http://www.thegeekstuff.com/2012/09/strip-command-examples/
|
||||||
|
# http://www.muppetlabs.com/~breadbox/software/elfkickers.html
|
||||||
|
# https://ptspts.blogspot.hr/2013/12/how-to-make-smaller-c-and-c-binaries.html
|
||||||
|
|
||||||
|
# For example:
|
||||||
|
# python ../../../../../extra/cloak/cloak.py -d -i lib_postgresqludf_sys.so_
|
||||||
|
# ../../../../../extra/shutils/strip.sh lib_postgresqludf_sys.so
|
||||||
|
# python ../../../../../extra/cloak/cloak.py -i lib_postgresqludf_sys.so
|
||||||
|
# rm lib_postgresqludf_sys.so
|
||||||
|
|
||||||
|
strip -S --strip-unneeded --remove-section=.note.gnu.gold-version --remove-section=.comment --remove-section=.note --remove-section=.note.gnu.build-id --remove-section=.note.ABI-tag $*
|
||||||
|
sstrip $*
|
||||||
|
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -16,8 +16,8 @@ from lib.core.enums import CONTENT_TYPE
|
|||||||
from lib.core.exception import SqlmapNoneDataException
|
from lib.core.exception import SqlmapNoneDataException
|
||||||
from lib.core.exception import SqlmapUnsupportedDBMSException
|
from lib.core.exception import SqlmapUnsupportedDBMSException
|
||||||
from lib.core.settings import SUPPORTED_DBMS
|
from lib.core.settings import SUPPORTED_DBMS
|
||||||
from lib.techniques.brute.use import columnExists
|
from lib.utils.brute import columnExists
|
||||||
from lib.techniques.brute.use import tableExists
|
from lib.utils.brute import tableExists
|
||||||
|
|
||||||
def action():
|
def action():
|
||||||
"""
|
"""
|
||||||
@@ -48,9 +48,6 @@ def action():
|
|||||||
elif kb.nullConnection:
|
elif kb.nullConnection:
|
||||||
errMsg += ". You can try to rerun without using optimization "
|
errMsg += ". You can try to rerun without using optimization "
|
||||||
errMsg += "switch '%s'" % ("-o" if conf.optimize else "--null-connection")
|
errMsg += "switch '%s'" % ("-o" if conf.optimize else "--null-connection")
|
||||||
else:
|
|
||||||
errMsg += ". Support for this DBMS will be implemented at "
|
|
||||||
errMsg += "some point"
|
|
||||||
|
|
||||||
raise SqlmapUnsupportedDBMSException(errMsg)
|
raise SqlmapUnsupportedDBMSException(errMsg)
|
||||||
|
|
||||||
@@ -77,8 +74,7 @@ def action():
|
|||||||
|
|
||||||
if conf.getPasswordHashes:
|
if conf.getPasswordHashes:
|
||||||
try:
|
try:
|
||||||
conf.dumper.userSettings("database management system users password hashes",
|
conf.dumper.userSettings("database management system users password hashes", conf.dbmsHandler.getPasswordHashes(), "password hash", CONTENT_TYPE.PASSWORDS)
|
||||||
conf.dbmsHandler.getPasswordHashes(), "password hash", CONTENT_TYPE.PASSWORDS)
|
|
||||||
except SqlmapNoneDataException, ex:
|
except SqlmapNoneDataException, ex:
|
||||||
logger.critical(ex)
|
logger.critical(ex)
|
||||||
except:
|
except:
|
||||||
@@ -86,8 +82,7 @@ def action():
|
|||||||
|
|
||||||
if conf.getPrivileges:
|
if conf.getPrivileges:
|
||||||
try:
|
try:
|
||||||
conf.dumper.userSettings("database management system users privileges",
|
conf.dumper.userSettings("database management system users privileges", conf.dbmsHandler.getPrivileges(), "privilege", CONTENT_TYPE.PRIVILEGES)
|
||||||
conf.dbmsHandler.getPrivileges(), "privilege", CONTENT_TYPE.PRIVILEGES)
|
|
||||||
except SqlmapNoneDataException, ex:
|
except SqlmapNoneDataException, ex:
|
||||||
logger.critical(ex)
|
logger.critical(ex)
|
||||||
except:
|
except:
|
||||||
@@ -95,8 +90,7 @@ def action():
|
|||||||
|
|
||||||
if conf.getRoles:
|
if conf.getRoles:
|
||||||
try:
|
try:
|
||||||
conf.dumper.userSettings("database management system users roles",
|
conf.dumper.userSettings("database management system users roles", conf.dbmsHandler.getRoles(), "role", CONTENT_TYPE.ROLES)
|
||||||
conf.dbmsHandler.getRoles(), "role", CONTENT_TYPE.ROLES)
|
|
||||||
except SqlmapNoneDataException, ex:
|
except SqlmapNoneDataException, ex:
|
||||||
logger.critical(ex)
|
logger.critical(ex)
|
||||||
except:
|
except:
|
||||||
|
|||||||
@@ -1,18 +1,18 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
import httplib
|
import httplib
|
||||||
|
import random
|
||||||
import re
|
import re
|
||||||
import socket
|
import socket
|
||||||
|
import subprocess
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from subprocess import Popen as execute
|
|
||||||
|
|
||||||
from extra.beep.beep import beep
|
from extra.beep.beep import beep
|
||||||
from lib.core.agent import agent
|
from lib.core.agent import agent
|
||||||
from lib.core.common import Backend
|
from lib.core.common import Backend
|
||||||
@@ -20,6 +20,7 @@ from lib.core.common import extractRegexResult
|
|||||||
from lib.core.common import extractTextTagContent
|
from lib.core.common import extractTextTagContent
|
||||||
from lib.core.common import findDynamicContent
|
from lib.core.common import findDynamicContent
|
||||||
from lib.core.common import Format
|
from lib.core.common import Format
|
||||||
|
from lib.core.common import getFilteredPageContent
|
||||||
from lib.core.common import getLastRequestHTTPError
|
from lib.core.common import getLastRequestHTTPError
|
||||||
from lib.core.common import getPublicTypeMembers
|
from lib.core.common import getPublicTypeMembers
|
||||||
from lib.core.common import getSafeExString
|
from lib.core.common import getSafeExString
|
||||||
@@ -54,6 +55,7 @@ from lib.core.enums import HASHDB_KEYS
|
|||||||
from lib.core.enums import HEURISTIC_TEST
|
from lib.core.enums import HEURISTIC_TEST
|
||||||
from lib.core.enums import HTTP_HEADER
|
from lib.core.enums import HTTP_HEADER
|
||||||
from lib.core.enums import HTTPMETHOD
|
from lib.core.enums import HTTPMETHOD
|
||||||
|
from lib.core.enums import NOTE
|
||||||
from lib.core.enums import NULLCONNECTION
|
from lib.core.enums import NULLCONNECTION
|
||||||
from lib.core.enums import PAYLOAD
|
from lib.core.enums import PAYLOAD
|
||||||
from lib.core.enums import PLACE
|
from lib.core.enums import PLACE
|
||||||
@@ -62,21 +64,27 @@ from lib.core.exception import SqlmapConnectionException
|
|||||||
from lib.core.exception import SqlmapNoneDataException
|
from lib.core.exception import SqlmapNoneDataException
|
||||||
from lib.core.exception import SqlmapSilentQuitException
|
from lib.core.exception import SqlmapSilentQuitException
|
||||||
from lib.core.exception import SqlmapUserQuitException
|
from lib.core.exception import SqlmapUserQuitException
|
||||||
from lib.core.settings import CLOUDFLARE_SERVER_HEADER
|
from lib.core.settings import CANDIDATE_SENTENCE_MIN_LENGTH
|
||||||
|
from lib.core.settings import CHECK_INTERNET_ADDRESS
|
||||||
|
from lib.core.settings import CHECK_INTERNET_VALUE
|
||||||
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
||||||
from lib.core.settings import DUMMY_NON_SQLI_CHECK_APPENDIX
|
from lib.core.settings import DUMMY_NON_SQLI_CHECK_APPENDIX
|
||||||
|
from lib.core.settings import FI_ERROR_REGEX
|
||||||
from lib.core.settings import FORMAT_EXCEPTION_STRINGS
|
from lib.core.settings import FORMAT_EXCEPTION_STRINGS
|
||||||
from lib.core.settings import HEURISTIC_CHECK_ALPHABET
|
from lib.core.settings import HEURISTIC_CHECK_ALPHABET
|
||||||
from lib.core.settings import IDS_WAF_CHECK_PAYLOAD
|
from lib.core.settings import IDS_WAF_CHECK_PAYLOAD
|
||||||
from lib.core.settings import IDS_WAF_CHECK_RATIO
|
from lib.core.settings import IDS_WAF_CHECK_RATIO
|
||||||
from lib.core.settings import IDS_WAF_CHECK_TIMEOUT
|
from lib.core.settings import IDS_WAF_CHECK_TIMEOUT
|
||||||
|
from lib.core.settings import MAX_DIFFLIB_SEQUENCE_LENGTH
|
||||||
from lib.core.settings import NON_SQLI_CHECK_PREFIX_SUFFIX_LENGTH
|
from lib.core.settings import NON_SQLI_CHECK_PREFIX_SUFFIX_LENGTH
|
||||||
|
from lib.core.settings import SLEEP_TIME_MARKER
|
||||||
from lib.core.settings import SUHOSIN_MAX_VALUE_LENGTH
|
from lib.core.settings import SUHOSIN_MAX_VALUE_LENGTH
|
||||||
from lib.core.settings import SUPPORTED_DBMS
|
from lib.core.settings import SUPPORTED_DBMS
|
||||||
from lib.core.settings import URI_HTTP_HEADER
|
from lib.core.settings import URI_HTTP_HEADER
|
||||||
from lib.core.settings import UPPER_RATIO_BOUND
|
from lib.core.settings import UPPER_RATIO_BOUND
|
||||||
from lib.core.threads import getCurrentThreadData
|
from lib.core.threads import getCurrentThreadData
|
||||||
from lib.request.connect import Connect as Request
|
from lib.request.connect import Connect as Request
|
||||||
|
from lib.request.comparison import comparison
|
||||||
from lib.request.inject import checkBooleanExpression
|
from lib.request.inject import checkBooleanExpression
|
||||||
from lib.request.templates import getPageTemplate
|
from lib.request.templates import getPageTemplate
|
||||||
from lib.techniques.union.test import unionTest
|
from lib.techniques.union.test import unionTest
|
||||||
@@ -90,6 +98,13 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
# Localized thread data needed for some methods
|
# Localized thread data needed for some methods
|
||||||
threadData = getCurrentThreadData()
|
threadData = getCurrentThreadData()
|
||||||
|
|
||||||
|
# Favoring non-string specific boundaries in case of digit-like parameter values
|
||||||
|
if value.isdigit():
|
||||||
|
kb.cache.intBoundaries = kb.cache.intBoundaries or sorted(copy.deepcopy(conf.boundaries), key=lambda boundary: any(_ in (boundary.prefix or "") or _ in (boundary.suffix or "") for _ in ('"', '\'')))
|
||||||
|
boundaries = kb.cache.intBoundaries
|
||||||
|
else:
|
||||||
|
boundaries = conf.boundaries
|
||||||
|
|
||||||
# Set the flag for SQL injection test mode
|
# Set the flag for SQL injection test mode
|
||||||
kb.testMode = True
|
kb.testMode = True
|
||||||
|
|
||||||
@@ -97,6 +112,9 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
tests = getSortedInjectionTests()
|
tests = getSortedInjectionTests()
|
||||||
seenPayload = set()
|
seenPayload = set()
|
||||||
|
|
||||||
|
kb.data.setdefault("randomInt", str(randomInt(10)))
|
||||||
|
kb.data.setdefault("randomStr", str(randomStr(10)))
|
||||||
|
|
||||||
while tests:
|
while tests:
|
||||||
test = tests.pop(0)
|
test = tests.pop(0)
|
||||||
|
|
||||||
@@ -110,7 +128,7 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
# then attempt to identify with a simple DBMS specific boolean-based
|
# then attempt to identify with a simple DBMS specific boolean-based
|
||||||
# test what the DBMS may be
|
# test what the DBMS may be
|
||||||
if not injection.dbms and PAYLOAD.TECHNIQUE.BOOLEAN in injection.data:
|
if not injection.dbms and PAYLOAD.TECHNIQUE.BOOLEAN in injection.data:
|
||||||
if not Backend.getIdentifiedDbms() and kb.heuristicDbms is None:
|
if not Backend.getIdentifiedDbms() and kb.heuristicDbms is None and not kb.droppingRequests:
|
||||||
kb.heuristicDbms = heuristicCheckDbms(injection)
|
kb.heuristicDbms = heuristicCheckDbms(injection)
|
||||||
|
|
||||||
# If the DBMS has already been fingerprinted (via DBMS-specific
|
# If the DBMS has already been fingerprinted (via DBMS-specific
|
||||||
@@ -121,7 +139,7 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
SUPPORTED_DBMS, True) or kb.heuristicDbms or injection.dbms):
|
SUPPORTED_DBMS, True) or kb.heuristicDbms or injection.dbms):
|
||||||
msg = "it looks like the back-end DBMS is '%s'. " % (Format.getErrorParsedDBMSes() or kb.heuristicDbms or injection.dbms)
|
msg = "it looks like the back-end DBMS is '%s'. " % (Format.getErrorParsedDBMSes() or kb.heuristicDbms or injection.dbms)
|
||||||
msg += "Do you want to skip test payloads specific for other DBMSes? [Y/n]"
|
msg += "Do you want to skip test payloads specific for other DBMSes? [Y/n]"
|
||||||
kb.reduceTests = (Backend.getErrorParsedDBMSes() or [kb.heuristicDbms]) if readInput(msg, default='Y').upper() == 'Y' else []
|
kb.reduceTests = (Backend.getErrorParsedDBMSes() or [kb.heuristicDbms]) if readInput(msg, default='Y', boolean=True) else []
|
||||||
|
|
||||||
# If the DBMS has been fingerprinted (via DBMS-specific error
|
# If the DBMS has been fingerprinted (via DBMS-specific error
|
||||||
# message, via simple heuristic check or via DBMS-specific
|
# message, via simple heuristic check or via DBMS-specific
|
||||||
@@ -136,12 +154,13 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
msg += " and " if conf.level < 5 and conf.risk < 3 else ""
|
msg += " and " if conf.level < 5 and conf.risk < 3 else ""
|
||||||
msg += "risk (%d)" % conf.risk if conf.risk < 3 else ""
|
msg += "risk (%d)" % conf.risk if conf.risk < 3 else ""
|
||||||
msg += " values? [Y/n]" if conf.level < 5 and conf.risk < 3 else " value? [Y/n]"
|
msg += " values? [Y/n]" if conf.level < 5 and conf.risk < 3 else " value? [Y/n]"
|
||||||
kb.extendTests = (Backend.getErrorParsedDBMSes() or [kb.heuristicDbms]) if readInput(msg, default='Y').upper() == 'Y' else []
|
kb.extendTests = (Backend.getErrorParsedDBMSes() or [kb.heuristicDbms]) if readInput(msg, default='Y', boolean=True) else []
|
||||||
|
|
||||||
title = test.title
|
title = test.title
|
||||||
kb.testType = stype = test.stype
|
kb.testType = stype = test.stype
|
||||||
clause = test.clause
|
clause = test.clause
|
||||||
unionExtended = False
|
unionExtended = False
|
||||||
|
trueCode, falseCode = None, None
|
||||||
|
|
||||||
if stype == PAYLOAD.TECHNIQUE.UNION:
|
if stype == PAYLOAD.TECHNIQUE.UNION:
|
||||||
configUnion(test.request.char)
|
configUnion(test.request.char)
|
||||||
@@ -173,17 +192,18 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
lower, upper = int(match.group(1)), int(match.group(2))
|
lower, upper = int(match.group(1)), int(match.group(2))
|
||||||
for _ in (lower, upper):
|
for _ in (lower, upper):
|
||||||
if _ > 1:
|
if _ > 1:
|
||||||
|
__ = 2 * (_ - 1) + 1 if _ == lower else 2 * _
|
||||||
unionExtended = True
|
unionExtended = True
|
||||||
test.request.columns = re.sub(r"\b%d\b" % _, str(2 * _), test.request.columns)
|
test.request.columns = re.sub(r"\b%d\b" % _, str(__), test.request.columns)
|
||||||
title = re.sub(r"\b%d\b" % _, str(2 * _), title)
|
title = re.sub(r"\b%d\b" % _, str(__), title)
|
||||||
test.title = re.sub(r"\b%d\b" % _, str(2 * _), test.title)
|
test.title = re.sub(r"\b%d\b" % _, str(__), test.title)
|
||||||
|
|
||||||
# Skip test if the user's wants to test only for a specific
|
# Skip test if the user's wants to test only for a specific
|
||||||
# technique
|
# technique
|
||||||
if conf.tech and isinstance(conf.tech, list) and stype not in conf.tech:
|
if conf.tech and isinstance(conf.tech, list) and stype not in conf.tech:
|
||||||
debugMsg = "skipping test '%s' because the user " % title
|
debugMsg = "skipping test '%s' because the user " % title
|
||||||
debugMsg += "specified to test only for "
|
debugMsg += "specified to test only for "
|
||||||
debugMsg += "%s techniques" % " & ".join(map(lambda x: PAYLOAD.SQLINJECTION[x], conf.tech))
|
debugMsg += "%s techniques" % " & ".join(PAYLOAD.SQLINJECTION[_] for _ in conf.tech)
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -303,12 +323,6 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
comment = agent.getComment(test.request) if len(conf.boundaries) > 1 else None
|
comment = agent.getComment(test.request) if len(conf.boundaries) > 1 else None
|
||||||
fstPayload = agent.cleanupPayload(test.request.payload, origValue=value if place not in (PLACE.URI, PLACE.CUSTOM_POST, PLACE.CUSTOM_HEADER) else None)
|
fstPayload = agent.cleanupPayload(test.request.payload, origValue=value if place not in (PLACE.URI, PLACE.CUSTOM_POST, PLACE.CUSTOM_HEADER) else None)
|
||||||
|
|
||||||
# Favoring non-string specific boundaries in case of digit-like parameter values
|
|
||||||
if value.isdigit():
|
|
||||||
boundaries = sorted(copy.deepcopy(conf.boundaries), key=lambda x: any(_ in (x.prefix or "") or _ in (x.suffix or "") for _ in ('"', '\'')))
|
|
||||||
else:
|
|
||||||
boundaries = conf.boundaries
|
|
||||||
|
|
||||||
for boundary in boundaries:
|
for boundary in boundaries:
|
||||||
injectable = False
|
injectable = False
|
||||||
|
|
||||||
@@ -380,8 +394,6 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
# Use different page template than the original
|
# Use different page template than the original
|
||||||
# one as we are changing parameters value, which
|
# one as we are changing parameters value, which
|
||||||
# will likely result in a different content
|
# will likely result in a different content
|
||||||
kb.data.setdefault("randomInt", str(randomInt(10)))
|
|
||||||
kb.data.setdefault("randomStr", str(randomStr(10)))
|
|
||||||
|
|
||||||
if conf.invalidLogical:
|
if conf.invalidLogical:
|
||||||
_ = int(kb.data.randomInt[:2])
|
_ = int(kb.data.randomInt[:2])
|
||||||
@@ -441,11 +453,13 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
kb.matchRatio = None
|
kb.matchRatio = None
|
||||||
kb.negativeLogic = (where == PAYLOAD.WHERE.NEGATIVE)
|
kb.negativeLogic = (where == PAYLOAD.WHERE.NEGATIVE)
|
||||||
Request.queryPage(genCmpPayload(), place, raise404=False)
|
Request.queryPage(genCmpPayload(), place, raise404=False)
|
||||||
falsePage = threadData.lastComparisonPage or ""
|
falsePage, falseHeaders, falseCode = threadData.lastComparisonPage or "", threadData.lastComparisonHeaders, threadData.lastComparisonCode
|
||||||
|
falseRawResponse = "%s%s" % (falseHeaders, falsePage)
|
||||||
|
|
||||||
# Perform the test's True request
|
# Perform the test's True request
|
||||||
trueResult = Request.queryPage(reqPayload, place, raise404=False)
|
trueResult = Request.queryPage(reqPayload, place, raise404=False)
|
||||||
truePage = threadData.lastComparisonPage or ""
|
truePage, trueHeaders, trueCode = threadData.lastComparisonPage or "", threadData.lastComparisonHeaders, threadData.lastComparisonCode
|
||||||
|
trueRawResponse = "%s%s" % (trueHeaders, truePage)
|
||||||
|
|
||||||
if trueResult and not(truePage == falsePage and not kb.nullConnection):
|
if trueResult and not(truePage == falsePage and not kb.nullConnection):
|
||||||
# Perform the test's False request
|
# Perform the test's False request
|
||||||
@@ -460,23 +474,78 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
errorResult = Request.queryPage(errorPayload, place, raise404=False)
|
errorResult = Request.queryPage(errorPayload, place, raise404=False)
|
||||||
if errorResult:
|
if errorResult:
|
||||||
continue
|
continue
|
||||||
|
elif not any((conf.string, conf.notString, conf.regexp, conf.code, kb.nullConnection)):
|
||||||
infoMsg = "%s parameter '%s' seems to be '%s' injectable " % (paramType, parameter, title)
|
_ = comparison(kb.heuristicPage, None, getRatioValue=True)
|
||||||
logger.info(infoMsg)
|
if _ > kb.matchRatio:
|
||||||
|
kb.matchRatio = _
|
||||||
|
logger.debug("adjusting match ratio for current parameter to %.3f" % kb.matchRatio)
|
||||||
|
|
||||||
injectable = True
|
injectable = True
|
||||||
|
|
||||||
if not injectable and not any((conf.string, conf.notString, conf.regexp)) and kb.pageStable:
|
elif threadData.lastComparisonRatio > UPPER_RATIO_BOUND and not any((conf.string, conf.notString, conf.regexp, conf.code, kb.nullConnection)):
|
||||||
trueSet = set(extractTextTagContent(truePage))
|
originalSet = set(getFilteredPageContent(kb.pageTemplate, True, "\n").split("\n"))
|
||||||
falseSet = set(extractTextTagContent(falsePage))
|
trueSet = set(getFilteredPageContent(truePage, True, "\n").split("\n"))
|
||||||
candidates = filter(None, (_.strip() if _.strip() in (kb.pageTemplate or "") and _.strip() not in falsePage and _.strip() not in threadData.lastComparisonHeaders else None for _ in (trueSet - falseSet)))
|
falseSet = set(getFilteredPageContent(falsePage, True, "\n").split("\n"))
|
||||||
|
|
||||||
if candidates:
|
if originalSet == trueSet != falseSet:
|
||||||
conf.string = candidates[0]
|
candidates = trueSet - falseSet
|
||||||
infoMsg = "%s parameter '%s' seems to be '%s' injectable (with --string=\"%s\")" % (paramType, parameter, title, repr(conf.string).lstrip('u').strip("'"))
|
|
||||||
logger.info(infoMsg)
|
|
||||||
|
|
||||||
injectable = True
|
if candidates:
|
||||||
|
candidates = sorted(candidates, key=lambda _: len(_))
|
||||||
|
for candidate in candidates:
|
||||||
|
if re.match(r"\A[\w.,! ]+\Z", candidate) and ' ' in candidate and candidate.strip() and len(candidate) > CANDIDATE_SENTENCE_MIN_LENGTH:
|
||||||
|
conf.string = candidate
|
||||||
|
injectable = True
|
||||||
|
|
||||||
|
infoMsg = "%s parameter '%s' appears to be '%s' injectable (with --string=\"%s\")" % (paramType, parameter, title, repr(conf.string).lstrip('u').strip("'"))
|
||||||
|
logger.info(infoMsg)
|
||||||
|
|
||||||
|
break
|
||||||
|
|
||||||
|
if injectable:
|
||||||
|
if kb.pageStable and not any((conf.string, conf.notString, conf.regexp, conf.code, kb.nullConnection)):
|
||||||
|
if all((falseCode, trueCode)) and falseCode != trueCode:
|
||||||
|
conf.code = trueCode
|
||||||
|
|
||||||
|
infoMsg = "%s parameter '%s' appears to be '%s' injectable (with --code=%d)" % (paramType, parameter, title, conf.code)
|
||||||
|
logger.info(infoMsg)
|
||||||
|
else:
|
||||||
|
trueSet = set(extractTextTagContent(trueRawResponse))
|
||||||
|
trueSet = trueSet.union(__ for _ in trueSet for __ in _.split())
|
||||||
|
|
||||||
|
falseSet = set(extractTextTagContent(falseRawResponse))
|
||||||
|
falseSet = falseSet.union(__ for _ in falseSet for __ in _.split())
|
||||||
|
|
||||||
|
candidates = filter(None, (_.strip() if _.strip() in trueRawResponse and _.strip() not in falseRawResponse else None for _ in (trueSet - falseSet)))
|
||||||
|
|
||||||
|
if candidates:
|
||||||
|
candidates = sorted(candidates, key=lambda _: len(_))
|
||||||
|
for candidate in candidates:
|
||||||
|
if re.match(r"\A\w+\Z", candidate):
|
||||||
|
break
|
||||||
|
|
||||||
|
conf.string = candidate
|
||||||
|
|
||||||
|
infoMsg = "%s parameter '%s' appears to be '%s' injectable (with --string=\"%s\")" % (paramType, parameter, title, repr(conf.string).lstrip('u').strip("'"))
|
||||||
|
logger.info(infoMsg)
|
||||||
|
|
||||||
|
if not any((conf.string, conf.notString)):
|
||||||
|
candidates = filter(None, (_.strip() if _.strip() in falseRawResponse and _.strip() not in trueRawResponse else None for _ in (falseSet - trueSet)))
|
||||||
|
|
||||||
|
if candidates:
|
||||||
|
candidates = sorted(candidates, key=lambda _: len(_))
|
||||||
|
for candidate in candidates:
|
||||||
|
if re.match(r"\A\w+\Z", candidate):
|
||||||
|
break
|
||||||
|
|
||||||
|
conf.notString = candidate
|
||||||
|
|
||||||
|
infoMsg = "%s parameter '%s' appears to be '%s' injectable (with --not-string=\"%s\")" % (paramType, parameter, title, repr(conf.notString).lstrip('u').strip("'"))
|
||||||
|
logger.info(infoMsg)
|
||||||
|
|
||||||
|
if not any((conf.string, conf.notString, conf.code)):
|
||||||
|
infoMsg = "%s parameter '%s' appears to be '%s' injectable " % (paramType, parameter, title)
|
||||||
|
singleTimeLogMessage(infoMsg)
|
||||||
|
|
||||||
# In case of error-based SQL injection
|
# In case of error-based SQL injection
|
||||||
elif method == PAYLOAD.METHOD.GREP:
|
elif method == PAYLOAD.METHOD.GREP:
|
||||||
@@ -485,12 +554,9 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
try:
|
try:
|
||||||
page, headers = Request.queryPage(reqPayload, place, content=True, raise404=False)
|
page, headers = Request.queryPage(reqPayload, place, content=True, raise404=False)
|
||||||
output = extractRegexResult(check, page, re.DOTALL | re.IGNORECASE) \
|
output = extractRegexResult(check, page, re.DOTALL | re.IGNORECASE) \
|
||||||
or extractRegexResult(check, listToStrValue( \
|
or extractRegexResult(check, threadData.lastHTTPError[2] if wasLastResponseHTTPError() else None, re.DOTALL | re.IGNORECASE) \
|
||||||
[headers[key] for key in headers.keys() if key.lower() != URI_HTTP_HEADER.lower()] \
|
or extractRegexResult(check, listToStrValue([headers[key] for key in headers.keys() if key.lower() != URI_HTTP_HEADER.lower()] if headers else None), re.DOTALL | re.IGNORECASE) \
|
||||||
if headers else None), re.DOTALL | re.IGNORECASE) \
|
or extractRegexResult(check, threadData.lastRedirectMsg[1] if threadData.lastRedirectMsg and threadData.lastRedirectMsg[0] == threadData.lastRequestUID else None, re.DOTALL | re.IGNORECASE)
|
||||||
or extractRegexResult(check, threadData.lastRedirectMsg[1] \
|
|
||||||
if threadData.lastRedirectMsg and threadData.lastRedirectMsg[0] == \
|
|
||||||
threadData.lastRequestUID else None, re.DOTALL | re.IGNORECASE)
|
|
||||||
|
|
||||||
if output:
|
if output:
|
||||||
result = output == "1"
|
result = output == "1"
|
||||||
@@ -512,13 +578,20 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
elif method == PAYLOAD.METHOD.TIME:
|
elif method == PAYLOAD.METHOD.TIME:
|
||||||
# Perform the test's request
|
# Perform the test's request
|
||||||
trueResult = Request.queryPage(reqPayload, place, timeBasedCompare=True, raise404=False)
|
trueResult = Request.queryPage(reqPayload, place, timeBasedCompare=True, raise404=False)
|
||||||
|
trueCode = threadData.lastCode
|
||||||
|
|
||||||
if trueResult:
|
if trueResult:
|
||||||
|
# Extra validation step (e.g. to check for DROP protection mechanisms)
|
||||||
|
if SLEEP_TIME_MARKER in reqPayload:
|
||||||
|
falseResult = Request.queryPage(reqPayload.replace(SLEEP_TIME_MARKER, "0"), place, timeBasedCompare=True, raise404=False)
|
||||||
|
if falseResult:
|
||||||
|
continue
|
||||||
|
|
||||||
# Confirm test's results
|
# Confirm test's results
|
||||||
trueResult = Request.queryPage(reqPayload, place, timeBasedCompare=True, raise404=False)
|
trueResult = Request.queryPage(reqPayload, place, timeBasedCompare=True, raise404=False)
|
||||||
|
|
||||||
if trueResult:
|
if trueResult:
|
||||||
infoMsg = "%s parameter '%s' seems to be '%s' injectable " % (paramType, parameter, title)
|
infoMsg = "%s parameter '%s' appears to be '%s' injectable " % (paramType, parameter, title)
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
injectable = True
|
injectable = True
|
||||||
@@ -538,7 +611,7 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
warnMsg = "using unescaped version of the test "
|
warnMsg = "using unescaped version of the test "
|
||||||
warnMsg += "because of zero knowledge of the "
|
warnMsg += "because of zero knowledge of the "
|
||||||
warnMsg += "back-end DBMS. You can try to "
|
warnMsg += "back-end DBMS. You can try to "
|
||||||
warnMsg += "explicitly set it using option '--dbms'"
|
warnMsg += "explicitly set it with option '--dbms'"
|
||||||
singleTimeWarnMessage(warnMsg)
|
singleTimeWarnMessage(warnMsg)
|
||||||
else:
|
else:
|
||||||
Backend.forceDbms(kb.heuristicDbms)
|
Backend.forceDbms(kb.heuristicDbms)
|
||||||
@@ -557,7 +630,8 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
msg += "extended UNION tests if there is not "
|
msg += "extended UNION tests if there is not "
|
||||||
msg += "at least one other (potential) "
|
msg += "at least one other (potential) "
|
||||||
msg += "technique found. Do you want to skip? [Y/n] "
|
msg += "technique found. Do you want to skip? [Y/n] "
|
||||||
kb.futileUnion = readInput(msg, default="Y").strip().upper() == 'N'
|
|
||||||
|
kb.futileUnion = not readInput(msg, default='Y', boolean=True)
|
||||||
if kb.futileUnion is False:
|
if kb.futileUnion is False:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -598,20 +672,20 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
|
|
||||||
# Feed with test details every time a test is successful
|
# Feed with test details every time a test is successful
|
||||||
if hasattr(test, "details"):
|
if hasattr(test, "details"):
|
||||||
for dKey, dValue in test.details.items():
|
for key, value in test.details.items():
|
||||||
if dKey == "dbms":
|
if key == "dbms":
|
||||||
injection.dbms = dValue
|
injection.dbms = value
|
||||||
|
|
||||||
if not isinstance(dValue, list):
|
if not isinstance(value, list):
|
||||||
Backend.setDbms(dValue)
|
Backend.setDbms(value)
|
||||||
else:
|
else:
|
||||||
Backend.forceDbms(dValue[0], True)
|
Backend.forceDbms(value[0], True)
|
||||||
|
|
||||||
elif dKey == "dbms_version" and injection.dbms_version is None and not conf.testFilter:
|
elif key == "dbms_version" and injection.dbms_version is None and not conf.testFilter:
|
||||||
injection.dbms_version = Backend.setVersion(dValue)
|
injection.dbms_version = Backend.setVersion(value)
|
||||||
|
|
||||||
elif dKey == "os" and injection.os is None:
|
elif key == "os" and injection.os is None:
|
||||||
injection.os = Backend.setOs(dValue)
|
injection.os = Backend.setOs(value)
|
||||||
|
|
||||||
if vector is None and "vector" in test and test.vector is not None:
|
if vector is None and "vector" in test and test.vector is not None:
|
||||||
vector = test.vector
|
vector = test.vector
|
||||||
@@ -624,9 +698,12 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
injection.data[stype].comment = comment
|
injection.data[stype].comment = comment
|
||||||
injection.data[stype].templatePayload = templatePayload
|
injection.data[stype].templatePayload = templatePayload
|
||||||
injection.data[stype].matchRatio = kb.matchRatio
|
injection.data[stype].matchRatio = kb.matchRatio
|
||||||
|
injection.data[stype].trueCode = trueCode
|
||||||
|
injection.data[stype].falseCode = falseCode
|
||||||
|
|
||||||
injection.conf.textOnly = conf.textOnly
|
injection.conf.textOnly = conf.textOnly
|
||||||
injection.conf.titles = conf.titles
|
injection.conf.titles = conf.titles
|
||||||
|
injection.conf.code = conf.code
|
||||||
injection.conf.string = conf.string
|
injection.conf.string = conf.string
|
||||||
injection.conf.notString = conf.notString
|
injection.conf.notString = conf.notString
|
||||||
injection.conf.regexp = conf.regexp
|
injection.conf.regexp = conf.regexp
|
||||||
@@ -640,7 +717,7 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
infoMsg = "executing alerting shell command(s) ('%s')" % conf.alert
|
infoMsg = "executing alerting shell command(s) ('%s')" % conf.alert
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
process = execute(conf.alert, shell=True)
|
process = subprocess.Popen(conf.alert, shell=True)
|
||||||
process.wait()
|
process.wait()
|
||||||
|
|
||||||
kb.alerted = True
|
kb.alerted = True
|
||||||
@@ -661,25 +738,23 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
msg = "how do you want to proceed? [(S)kip current test/(e)nd detection phase/(n)ext parameter/(c)hange verbosity/(q)uit]"
|
msg = "how do you want to proceed? [(S)kip current test/(e)nd detection phase/(n)ext parameter/(c)hange verbosity/(q)uit]"
|
||||||
choice = readInput(msg, default="S", checkBatch=False)
|
choice = readInput(msg, default='S', checkBatch=False).upper()
|
||||||
|
|
||||||
if choice[0] in ("s", "S"):
|
if choice == 'C':
|
||||||
pass
|
|
||||||
elif choice[0] in ("c", "C"):
|
|
||||||
choice = None
|
choice = None
|
||||||
while not ((choice or "").isdigit() and 0 <= int(choice) <= 6):
|
while not ((choice or "").isdigit() and 0 <= int(choice) <= 6):
|
||||||
if choice:
|
if choice:
|
||||||
logger.warn("invalid value")
|
logger.warn("invalid value")
|
||||||
msg = "enter new verbosity level: [0-6] "
|
msg = "enter new verbosity level: [0-6] "
|
||||||
choice = readInput(msg, default=str(conf.verbose), checkBatch=False).strip()
|
choice = readInput(msg, default=str(conf.verbose), checkBatch=False)
|
||||||
conf.verbose = int(choice)
|
conf.verbose = int(choice)
|
||||||
setVerbosity()
|
setVerbosity()
|
||||||
tests.insert(0, test)
|
tests.insert(0, test)
|
||||||
elif choice[0] in ("n", "N"):
|
elif choice == 'N':
|
||||||
return None
|
return None
|
||||||
elif choice[0] in ("e", "E"):
|
elif choice == 'E':
|
||||||
kb.endDetection = True
|
kb.endDetection = True
|
||||||
elif choice[0] in ("q", "Q"):
|
elif choice == 'Q':
|
||||||
raise SqlmapUserQuitException
|
raise SqlmapUserQuitException
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
@@ -691,19 +766,20 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
# Return the injection object
|
# Return the injection object
|
||||||
if injection.place is not None and injection.parameter is not None:
|
if injection.place is not None and injection.parameter is not None:
|
||||||
if not conf.dropSetCookie and PAYLOAD.TECHNIQUE.BOOLEAN in injection.data and injection.data[PAYLOAD.TECHNIQUE.BOOLEAN].vector.startswith('OR'):
|
if not conf.dropSetCookie and PAYLOAD.TECHNIQUE.BOOLEAN in injection.data and injection.data[PAYLOAD.TECHNIQUE.BOOLEAN].vector.startswith('OR'):
|
||||||
warnMsg = "in OR boolean-based injections, please consider usage "
|
warnMsg = "in OR boolean-based injection cases, please consider usage "
|
||||||
warnMsg += "of switch '--drop-set-cookie' if you experience any "
|
warnMsg += "of switch '--drop-set-cookie' if you experience any "
|
||||||
warnMsg += "problems during data retrieval"
|
warnMsg += "problems during data retrieval"
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
injection = checkFalsePositives(injection)
|
if not checkFalsePositives(injection):
|
||||||
|
|
||||||
if not injection:
|
|
||||||
kb.vulnHosts.remove(conf.hostname)
|
kb.vulnHosts.remove(conf.hostname)
|
||||||
|
if NOTE.FALSE_POSITIVE_OR_UNEXPLOITABLE not in injection.notes:
|
||||||
|
injection.notes.append(NOTE.FALSE_POSITIVE_OR_UNEXPLOITABLE)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
injection = None
|
injection = None
|
||||||
|
|
||||||
if injection:
|
if injection and NOTE.FALSE_POSITIVE_OR_UNEXPLOITABLE not in injection.notes:
|
||||||
checkSuhosinPatch(injection)
|
checkSuhosinPatch(injection)
|
||||||
checkFilteredChars(injection)
|
checkFilteredChars(injection)
|
||||||
|
|
||||||
@@ -748,7 +824,7 @@ def checkFalsePositives(injection):
|
|||||||
Checks for false positives (only in single special cases)
|
Checks for false positives (only in single special cases)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
retVal = injection
|
retVal = True
|
||||||
|
|
||||||
if all(_ in (PAYLOAD.TECHNIQUE.BOOLEAN, PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED) for _ in injection.data) or\
|
if all(_ in (PAYLOAD.TECHNIQUE.BOOLEAN, PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED) for _ in injection.data) or\
|
||||||
(len(injection.data) == 1 and PAYLOAD.TECHNIQUE.UNION in injection.data and "Generic" in injection.data[PAYLOAD.TECHNIQUE.UNION].title):
|
(len(injection.data) == 1 and PAYLOAD.TECHNIQUE.UNION in injection.data and "Generic" in injection.data[PAYLOAD.TECHNIQUE.UNION].title):
|
||||||
@@ -774,7 +850,7 @@ def checkFalsePositives(injection):
|
|||||||
break
|
break
|
||||||
|
|
||||||
if not checkBooleanExpression("%d=%d" % (randInt1, randInt1)):
|
if not checkBooleanExpression("%d=%d" % (randInt1, randInt1)):
|
||||||
retVal = None
|
retVal = False
|
||||||
break
|
break
|
||||||
|
|
||||||
# Just in case if DBMS hasn't properly recovered from previous delayed request
|
# Just in case if DBMS hasn't properly recovered from previous delayed request
|
||||||
@@ -782,22 +858,22 @@ def checkFalsePositives(injection):
|
|||||||
checkBooleanExpression("%d=%d" % (randInt1, randInt2))
|
checkBooleanExpression("%d=%d" % (randInt1, randInt2))
|
||||||
|
|
||||||
if checkBooleanExpression("%d=%d" % (randInt1, randInt3)): # this must not be evaluated to True
|
if checkBooleanExpression("%d=%d" % (randInt1, randInt3)): # this must not be evaluated to True
|
||||||
retVal = None
|
retVal = False
|
||||||
break
|
break
|
||||||
|
|
||||||
elif checkBooleanExpression("%d=%d" % (randInt3, randInt2)): # this must not be evaluated to True
|
elif checkBooleanExpression("%d=%d" % (randInt3, randInt2)): # this must not be evaluated to True
|
||||||
retVal = None
|
retVal = False
|
||||||
break
|
break
|
||||||
|
|
||||||
elif not checkBooleanExpression("%d=%d" % (randInt2, randInt2)): # this must be evaluated to True
|
elif not checkBooleanExpression("%d=%d" % (randInt2, randInt2)): # this must be evaluated to True
|
||||||
retVal = None
|
retVal = False
|
||||||
break
|
break
|
||||||
|
|
||||||
elif checkBooleanExpression("%d %d" % (randInt3, randInt2)): # this must not be evaluated to True (invalid statement)
|
elif checkBooleanExpression("%d %d" % (randInt3, randInt2)): # this must not be evaluated to True (invalid statement)
|
||||||
retVal = None
|
retVal = False
|
||||||
break
|
break
|
||||||
|
|
||||||
if retVal is None:
|
if not retVal:
|
||||||
warnMsg = "false positive or unexploitable injection point detected"
|
warnMsg = "false positive or unexploitable injection point detected"
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
@@ -864,8 +940,10 @@ def heuristicCheckSqlInjection(place, parameter):
|
|||||||
|
|
||||||
origValue = conf.paramDict[place][parameter]
|
origValue = conf.paramDict[place][parameter]
|
||||||
paramType = conf.method if conf.method not in (None, HTTPMETHOD.GET, HTTPMETHOD.POST) else place
|
paramType = conf.method if conf.method not in (None, HTTPMETHOD.GET, HTTPMETHOD.POST) else place
|
||||||
|
|
||||||
prefix = ""
|
prefix = ""
|
||||||
suffix = ""
|
suffix = ""
|
||||||
|
randStr = ""
|
||||||
|
|
||||||
if conf.prefix or conf.suffix:
|
if conf.prefix or conf.suffix:
|
||||||
if conf.prefix:
|
if conf.prefix:
|
||||||
@@ -874,9 +952,7 @@ def heuristicCheckSqlInjection(place, parameter):
|
|||||||
if conf.suffix:
|
if conf.suffix:
|
||||||
suffix = conf.suffix
|
suffix = conf.suffix
|
||||||
|
|
||||||
randStr = ""
|
while randStr.count('\'') != 1 or randStr.count('\"') != 1:
|
||||||
|
|
||||||
while '\'' not in randStr:
|
|
||||||
randStr = randomStr(length=10, alphabet=HEURISTIC_CHECK_ALPHABET)
|
randStr = randomStr(length=10, alphabet=HEURISTIC_CHECK_ALPHABET)
|
||||||
|
|
||||||
kb.heuristicMode = True
|
kb.heuristicMode = True
|
||||||
@@ -885,6 +961,7 @@ def heuristicCheckSqlInjection(place, parameter):
|
|||||||
payload = agent.payload(place, parameter, newValue=payload)
|
payload = agent.payload(place, parameter, newValue=payload)
|
||||||
page, _ = Request.queryPage(payload, place, content=True, raise404=False)
|
page, _ = Request.queryPage(payload, place, content=True, raise404=False)
|
||||||
|
|
||||||
|
kb.heuristicPage = page
|
||||||
kb.heuristicMode = False
|
kb.heuristicMode = False
|
||||||
|
|
||||||
parseFilePaths(page)
|
parseFilePaths(page)
|
||||||
@@ -906,7 +983,7 @@ def heuristicCheckSqlInjection(place, parameter):
|
|||||||
|
|
||||||
if not result:
|
if not result:
|
||||||
randStr = randomStr()
|
randStr = randomStr()
|
||||||
payload = "%s%s%s" % (prefix, "%s%s" % (origValue, randStr), suffix)
|
payload = "%s%s%s" % (prefix, "%s.%d%s" % (origValue, random.randint(1, 9), randStr), suffix)
|
||||||
payload = agent.payload(place, parameter, newValue=payload, where=PAYLOAD.WHERE.REPLACE)
|
payload = agent.payload(place, parameter, newValue=payload, where=PAYLOAD.WHERE.REPLACE)
|
||||||
casting = Request.queryPage(payload, place, raise404=False)
|
casting = Request.queryPage(payload, place, raise404=False)
|
||||||
|
|
||||||
@@ -920,7 +997,7 @@ def heuristicCheckSqlInjection(place, parameter):
|
|||||||
|
|
||||||
if kb.ignoreCasted is None:
|
if kb.ignoreCasted is None:
|
||||||
message = "do you want to skip those kind of cases (and save scanning time)? %s " % ("[Y/n]" if conf.multipleTargets else "[y/N]")
|
message = "do you want to skip those kind of cases (and save scanning time)? %s " % ("[Y/n]" if conf.multipleTargets else "[y/N]")
|
||||||
kb.ignoreCasted = readInput(message, default='Y' if conf.multipleTargets else 'N').upper() != 'N'
|
kb.ignoreCasted = readInput(message, default='Y' if conf.multipleTargets else 'N', boolean=True)
|
||||||
|
|
||||||
elif result:
|
elif result:
|
||||||
infoMsg += "be injectable"
|
infoMsg += "be injectable"
|
||||||
@@ -947,7 +1024,7 @@ def heuristicCheckSqlInjection(place, parameter):
|
|||||||
infoMsg += "'%s' might be vulnerable to cross-site scripting attacks" % parameter
|
infoMsg += "'%s' might be vulnerable to cross-site scripting attacks" % parameter
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
for match in re.finditer("(?i)[^\n]*(no such file|failed (to )?open)[^\n]*", page or ""):
|
for match in re.finditer(FI_ERROR_REGEX, page or ""):
|
||||||
if randStr1.lower() in match.group(0).lower():
|
if randStr1.lower() in match.group(0).lower():
|
||||||
infoMsg = "heuristic (FI) test shows that %s parameter " % paramType
|
infoMsg = "heuristic (FI) test shows that %s parameter " % paramType
|
||||||
infoMsg += "'%s' might be vulnerable to file inclusion attacks" % parameter
|
infoMsg += "'%s' might be vulnerable to file inclusion attacks" % parameter
|
||||||
@@ -1013,12 +1090,22 @@ def checkDynamicContent(firstPage, secondPage):
|
|||||||
logger.critical(warnMsg)
|
logger.critical(warnMsg)
|
||||||
return
|
return
|
||||||
|
|
||||||
seqMatcher = getCurrentThreadData().seqMatcher
|
if firstPage and secondPage and any(len(_) > MAX_DIFFLIB_SEQUENCE_LENGTH for _ in (firstPage, secondPage)):
|
||||||
seqMatcher.set_seq1(firstPage)
|
ratio = None
|
||||||
seqMatcher.set_seq2(secondPage)
|
else:
|
||||||
|
try:
|
||||||
|
seqMatcher = getCurrentThreadData().seqMatcher
|
||||||
|
seqMatcher.set_seq1(firstPage)
|
||||||
|
seqMatcher.set_seq2(secondPage)
|
||||||
|
ratio = seqMatcher.quick_ratio()
|
||||||
|
except MemoryError:
|
||||||
|
ratio = None
|
||||||
|
|
||||||
|
if ratio is None:
|
||||||
|
kb.skipSeqMatcher = True
|
||||||
|
|
||||||
# In case of an intolerable difference turn on dynamicity removal engine
|
# In case of an intolerable difference turn on dynamicity removal engine
|
||||||
if seqMatcher.quick_ratio() <= UPPER_RATIO_BOUND:
|
elif ratio <= UPPER_RATIO_BOUND:
|
||||||
findDynamicContent(firstPage, secondPage)
|
findDynamicContent(firstPage, secondPage)
|
||||||
|
|
||||||
count = 0
|
count = 0
|
||||||
@@ -1088,19 +1175,19 @@ def checkStability():
|
|||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
message = "how do you want to proceed? [(C)ontinue/(s)tring/(r)egex/(q)uit] "
|
message = "how do you want to proceed? [(C)ontinue/(s)tring/(r)egex/(q)uit] "
|
||||||
test = readInput(message, default="C")
|
choice = readInput(message, default='C').upper()
|
||||||
|
|
||||||
if test and test[0] in ("q", "Q"):
|
if choice == 'Q':
|
||||||
raise SqlmapUserQuitException
|
raise SqlmapUserQuitException
|
||||||
|
|
||||||
elif test and test[0] in ("s", "S"):
|
elif choice == 'S':
|
||||||
showStaticWords(firstPage, secondPage)
|
showStaticWords(firstPage, secondPage)
|
||||||
|
|
||||||
message = "please enter value for parameter 'string': "
|
message = "please enter value for parameter 'string': "
|
||||||
test = readInput(message)
|
string = readInput(message)
|
||||||
|
|
||||||
if test:
|
if string:
|
||||||
conf.string = test
|
conf.string = string
|
||||||
|
|
||||||
if kb.nullConnection:
|
if kb.nullConnection:
|
||||||
debugMsg = "turning off NULL connection "
|
debugMsg = "turning off NULL connection "
|
||||||
@@ -1112,12 +1199,12 @@ def checkStability():
|
|||||||
errMsg = "Empty value supplied"
|
errMsg = "Empty value supplied"
|
||||||
raise SqlmapNoneDataException(errMsg)
|
raise SqlmapNoneDataException(errMsg)
|
||||||
|
|
||||||
elif test and test[0] in ("r", "R"):
|
elif choice == 'R':
|
||||||
message = "please enter value for parameter 'regex': "
|
message = "please enter value for parameter 'regex': "
|
||||||
test = readInput(message)
|
regex = readInput(message)
|
||||||
|
|
||||||
if test:
|
if regex:
|
||||||
conf.regex = test
|
conf.regex = regex
|
||||||
|
|
||||||
if kb.nullConnection:
|
if kb.nullConnection:
|
||||||
debugMsg = "turning off NULL connection "
|
debugMsg = "turning off NULL connection "
|
||||||
@@ -1218,9 +1305,8 @@ def checkWaf():
|
|||||||
if not conf.identifyWaf:
|
if not conf.identifyWaf:
|
||||||
message = "do you want sqlmap to try to detect backend "
|
message = "do you want sqlmap to try to detect backend "
|
||||||
message += "WAF/IPS/IDS? [y/N] "
|
message += "WAF/IPS/IDS? [y/N] "
|
||||||
output = readInput(message, default="N")
|
|
||||||
|
|
||||||
if output and output[0] in ("Y", "y"):
|
if readInput(message, default='N', boolean=True):
|
||||||
conf.identifyWaf = True
|
conf.identifyWaf = True
|
||||||
|
|
||||||
if conf.timeout == defaults.timeout:
|
if conf.timeout == defaults.timeout:
|
||||||
@@ -1235,6 +1321,9 @@ def identifyWaf():
|
|||||||
if not conf.identifyWaf:
|
if not conf.identifyWaf:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
if not kb.wafFunctions:
|
||||||
|
setWafFunctions()
|
||||||
|
|
||||||
kb.testMode = True
|
kb.testMode = True
|
||||||
|
|
||||||
infoMsg = "using WAF scripts to detect "
|
infoMsg = "using WAF scripts to detect "
|
||||||
@@ -1258,11 +1347,11 @@ def identifyWaf():
|
|||||||
kb.redirectChoice = popValue()
|
kb.redirectChoice = popValue()
|
||||||
return page or "", headers or {}, code
|
return page or "", headers or {}, code
|
||||||
|
|
||||||
retVal = False
|
retVal = []
|
||||||
|
|
||||||
for function, product in kb.wafFunctions:
|
for function, product in kb.wafFunctions:
|
||||||
try:
|
try:
|
||||||
logger.debug("checking for WAF/IDS/IPS product '%s'" % product)
|
logger.debug("checking for WAF/IPS/IDS product '%s'" % product)
|
||||||
found = function(_)
|
found = function(_)
|
||||||
except Exception, ex:
|
except Exception, ex:
|
||||||
errMsg = "exception occurred while running "
|
errMsg = "exception occurred while running "
|
||||||
@@ -1272,22 +1361,24 @@ def identifyWaf():
|
|||||||
found = False
|
found = False
|
||||||
|
|
||||||
if found:
|
if found:
|
||||||
retVal = product
|
errMsg = "WAF/IPS/IDS identified as '%s'" % product
|
||||||
break
|
logger.critical(errMsg)
|
||||||
|
|
||||||
|
retVal.append(product)
|
||||||
|
|
||||||
if retVal:
|
if retVal:
|
||||||
errMsg = "WAF/IDS/IPS identified '%s'. Please " % retVal
|
|
||||||
errMsg += "consider usage of tamper scripts (option '--tamper')"
|
|
||||||
logger.critical(errMsg)
|
|
||||||
|
|
||||||
message = "are you sure that you want to "
|
message = "are you sure that you want to "
|
||||||
message += "continue with further target testing? [y/N] "
|
message += "continue with further target testing? [y/N] "
|
||||||
output = readInput(message, default="N")
|
choice = readInput(message, default='N', boolean=True)
|
||||||
|
|
||||||
if output and output[0] not in ("Y", "y"):
|
if not conf.tamper:
|
||||||
|
warnMsg = "please consider usage of tamper scripts (option '--tamper')"
|
||||||
|
singleTimeWarnMessage(warnMsg)
|
||||||
|
|
||||||
|
if not choice:
|
||||||
raise SqlmapUserQuitException
|
raise SqlmapUserQuitException
|
||||||
else:
|
else:
|
||||||
warnMsg = "no WAF/IDS/IPS product has been identified (this doesn't mean that there is none)"
|
warnMsg = "WAF/IPS/IDS product hasn't been identified"
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
kb.testType = None
|
kb.testType = None
|
||||||
@@ -1315,7 +1406,7 @@ def checkNullConnection():
|
|||||||
if not page and HTTP_HEADER.CONTENT_LENGTH in (headers or {}):
|
if not page and HTTP_HEADER.CONTENT_LENGTH in (headers or {}):
|
||||||
kb.nullConnection = NULLCONNECTION.HEAD
|
kb.nullConnection = NULLCONNECTION.HEAD
|
||||||
|
|
||||||
infoMsg = "NULL connection is supported with HEAD header"
|
infoMsg = "NULL connection is supported with HEAD method (Content-Length)"
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
else:
|
else:
|
||||||
page, headers, _ = Request.getPage(auxHeaders={HTTP_HEADER.RANGE: "bytes=-1"})
|
page, headers, _ = Request.getPage(auxHeaders={HTTP_HEADER.RANGE: "bytes=-1"})
|
||||||
@@ -1323,7 +1414,7 @@ def checkNullConnection():
|
|||||||
if page and len(page) == 1 and HTTP_HEADER.CONTENT_RANGE in (headers or {}):
|
if page and len(page) == 1 and HTTP_HEADER.CONTENT_RANGE in (headers or {}):
|
||||||
kb.nullConnection = NULLCONNECTION.RANGE
|
kb.nullConnection = NULLCONNECTION.RANGE
|
||||||
|
|
||||||
infoMsg = "NULL connection is supported with GET header "
|
infoMsg = "NULL connection is supported with GET method (Range)"
|
||||||
infoMsg += "'%s'" % kb.nullConnection
|
infoMsg += "'%s'" % kb.nullConnection
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
else:
|
else:
|
||||||
@@ -1383,10 +1474,6 @@ def checkConnection(suppressOutput=False):
|
|||||||
else:
|
else:
|
||||||
kb.errorIsNone = True
|
kb.errorIsNone = True
|
||||||
|
|
||||||
if headers and headers.get("Server", "") == CLOUDFLARE_SERVER_HEADER:
|
|
||||||
warnMsg = "CloudFlare response detected"
|
|
||||||
logger.warn(warnMsg)
|
|
||||||
|
|
||||||
except SqlmapConnectionException, ex:
|
except SqlmapConnectionException, ex:
|
||||||
if conf.ipv6:
|
if conf.ipv6:
|
||||||
warnMsg = "check connection to a provided "
|
warnMsg = "check connection to a provided "
|
||||||
@@ -1404,7 +1491,7 @@ def checkConnection(suppressOutput=False):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
msg = "it is not recommended to continue in this kind of cases. Do you want to quit and make sure that everything is set up properly? [Y/n] "
|
msg = "it is not recommended to continue in this kind of cases. Do you want to quit and make sure that everything is set up properly? [Y/n] "
|
||||||
if readInput(msg, default="Y") not in ("n", "N"):
|
if readInput(msg, default='Y', boolean=True):
|
||||||
raise SqlmapSilentQuitException
|
raise SqlmapSilentQuitException
|
||||||
else:
|
else:
|
||||||
kb.ignoreNotFound = True
|
kb.ignoreNotFound = True
|
||||||
@@ -1413,5 +1500,12 @@ def checkConnection(suppressOutput=False):
|
|||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
def checkInternet():
|
||||||
|
content = Request.getPage(url=CHECK_INTERNET_ADDRESS, checking=True)[0]
|
||||||
|
return CHECK_INTERNET_VALUE in (content or "")
|
||||||
|
|
||||||
def setVerbosity(): # Cross-linked function
|
def setVerbosity(): # Cross-linked function
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def setWafFunctions(): # Cross-linked function
|
||||||
|
raise NotImplementedError
|
||||||
|
|||||||
@@ -1,12 +1,13 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import time
|
||||||
|
|
||||||
from lib.controller.action import action
|
from lib.controller.action import action
|
||||||
from lib.controller.checks import checkSqlInjection
|
from lib.controller.checks import checkSqlInjection
|
||||||
@@ -15,6 +16,7 @@ from lib.controller.checks import checkStability
|
|||||||
from lib.controller.checks import checkString
|
from lib.controller.checks import checkString
|
||||||
from lib.controller.checks import checkRegexp
|
from lib.controller.checks import checkRegexp
|
||||||
from lib.controller.checks import checkConnection
|
from lib.controller.checks import checkConnection
|
||||||
|
from lib.controller.checks import checkInternet
|
||||||
from lib.controller.checks import checkNullConnection
|
from lib.controller.checks import checkNullConnection
|
||||||
from lib.controller.checks import checkWaf
|
from lib.controller.checks import checkWaf
|
||||||
from lib.controller.checks import heuristicCheckSqlInjection
|
from lib.controller.checks import heuristicCheckSqlInjection
|
||||||
@@ -45,6 +47,7 @@ from lib.core.enums import CONTENT_TYPE
|
|||||||
from lib.core.enums import HASHDB_KEYS
|
from lib.core.enums import HASHDB_KEYS
|
||||||
from lib.core.enums import HEURISTIC_TEST
|
from lib.core.enums import HEURISTIC_TEST
|
||||||
from lib.core.enums import HTTPMETHOD
|
from lib.core.enums import HTTPMETHOD
|
||||||
|
from lib.core.enums import NOTE
|
||||||
from lib.core.enums import PAYLOAD
|
from lib.core.enums import PAYLOAD
|
||||||
from lib.core.enums import PLACE
|
from lib.core.enums import PLACE
|
||||||
from lib.core.exception import SqlmapBaseException
|
from lib.core.exception import SqlmapBaseException
|
||||||
@@ -64,7 +67,6 @@ from lib.core.settings import REFERER_ALIASES
|
|||||||
from lib.core.settings import USER_AGENT_ALIASES
|
from lib.core.settings import USER_AGENT_ALIASES
|
||||||
from lib.core.target import initTargetEnv
|
from lib.core.target import initTargetEnv
|
||||||
from lib.core.target import setupTargetEnv
|
from lib.core.target import setupTargetEnv
|
||||||
from thirdparty.pagerank.pagerank import get_pagerank
|
|
||||||
|
|
||||||
def _selectInjection():
|
def _selectInjection():
|
||||||
"""
|
"""
|
||||||
@@ -116,11 +118,11 @@ def _selectInjection():
|
|||||||
message += "\n"
|
message += "\n"
|
||||||
|
|
||||||
message += "[q] Quit"
|
message += "[q] Quit"
|
||||||
select = readInput(message, default="0")
|
choice = readInput(message, default='0').upper()
|
||||||
|
|
||||||
if select.isdigit() and int(select) < len(kb.injections) and int(select) >= 0:
|
if choice.isdigit() and int(choice) < len(kb.injections) and int(choice) >= 0:
|
||||||
index = int(select)
|
index = int(choice)
|
||||||
elif select[0] in ("Q", "q"):
|
elif choice == 'Q':
|
||||||
raise SqlmapUserQuitException
|
raise SqlmapUserQuitException
|
||||||
else:
|
else:
|
||||||
errMsg = "invalid choice"
|
errMsg = "invalid choice"
|
||||||
@@ -140,7 +142,7 @@ def _formatInjection(inj):
|
|||||||
if inj.place == PLACE.CUSTOM_HEADER:
|
if inj.place == PLACE.CUSTOM_HEADER:
|
||||||
payload = payload.split(',', 1)[1]
|
payload = payload.split(',', 1)[1]
|
||||||
if stype == PAYLOAD.TECHNIQUE.UNION:
|
if stype == PAYLOAD.TECHNIQUE.UNION:
|
||||||
count = re.sub(r"(?i)(\(.+\))|(\blimit[^A-Za-z]+)", "", sdata.payload).count(',') + 1
|
count = re.sub(r"(?i)(\(.+\))|(\blimit[^a-z]+)", "", sdata.payload).count(',') + 1
|
||||||
title = re.sub(r"\d+ to \d+", str(count), title)
|
title = re.sub(r"\d+ to \d+", str(count), title)
|
||||||
vector = agent.forgeUnionQuery("[QUERY]", vector[0], vector[1], vector[2], None, None, vector[5], vector[6])
|
vector = agent.forgeUnionQuery("[QUERY]", vector[0], vector[1], vector[2], None, None, vector[5], vector[6])
|
||||||
if count == 1:
|
if count == 1:
|
||||||
@@ -161,10 +163,11 @@ def _showInjections():
|
|||||||
else:
|
else:
|
||||||
header = "sqlmap resumed the following injection point(s) from stored session"
|
header = "sqlmap resumed the following injection point(s) from stored session"
|
||||||
|
|
||||||
if hasattr(conf, "api"):
|
if conf.api:
|
||||||
|
conf.dumper.string("", {"url": conf.url, "query": conf.parameters.get(PLACE.GET), "data": conf.parameters.get(PLACE.POST)}, content_type=CONTENT_TYPE.TARGET)
|
||||||
conf.dumper.string("", kb.injections, content_type=CONTENT_TYPE.TECHNIQUES)
|
conf.dumper.string("", kb.injections, content_type=CONTENT_TYPE.TECHNIQUES)
|
||||||
else:
|
else:
|
||||||
data = "".join(set(map(lambda x: _formatInjection(x), kb.injections))).rstrip("\n")
|
data = "".join(set(_formatInjection(_) for _ in kb.injections)).rstrip("\n")
|
||||||
conf.dumper.string(header, data)
|
conf.dumper.string(header, data)
|
||||||
|
|
||||||
if conf.tamper:
|
if conf.tamper:
|
||||||
@@ -182,8 +185,8 @@ def _randomFillBlankFields(value):
|
|||||||
|
|
||||||
if extractRegexResult(EMPTY_FORM_FIELDS_REGEX, value):
|
if extractRegexResult(EMPTY_FORM_FIELDS_REGEX, value):
|
||||||
message = "do you want to fill blank fields with random values? [Y/n] "
|
message = "do you want to fill blank fields with random values? [Y/n] "
|
||||||
test = readInput(message, default="Y")
|
|
||||||
if not test or test[0] in ("y", "Y"):
|
if readInput(message, default='Y', boolean=True):
|
||||||
for match in re.finditer(EMPTY_FORM_FIELDS_REGEX, retVal):
|
for match in re.finditer(EMPTY_FORM_FIELDS_REGEX, retVal):
|
||||||
item = match.group("result")
|
item = match.group("result")
|
||||||
if not any(_ in item for _ in IGNORE_PARAMETERS) and not re.search(ASP_NET_CONTROL_REGEX, item):
|
if not any(_ in item for _ in IGNORE_PARAMETERS) and not re.search(ASP_NET_CONTROL_REGEX, item):
|
||||||
@@ -223,25 +226,25 @@ def _saveToResultsFile():
|
|||||||
return
|
return
|
||||||
|
|
||||||
results = {}
|
results = {}
|
||||||
techniques = dict(map(lambda x: (x[1], x[0]), getPublicTypeMembers(PAYLOAD.TECHNIQUE)))
|
techniques = dict((_[1], _[0]) for _ in getPublicTypeMembers(PAYLOAD.TECHNIQUE))
|
||||||
|
|
||||||
for inj in kb.injections:
|
for injection in kb.injections + kb.falsePositives:
|
||||||
if inj.place is None or inj.parameter is None:
|
if injection.place is None or injection.parameter is None:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
key = (inj.place, inj.parameter)
|
key = (injection.place, injection.parameter, ';'.join(injection.notes))
|
||||||
if key not in results:
|
if key not in results:
|
||||||
results[key] = []
|
results[key] = []
|
||||||
|
|
||||||
results[key].extend(inj.data.keys())
|
results[key].extend(injection.data.keys())
|
||||||
|
|
||||||
for key, value in results.items():
|
for key, value in results.items():
|
||||||
place, parameter = key
|
place, parameter, notes = key
|
||||||
line = "%s,%s,%s,%s%s" % (safeCSValue(kb.originalUrls.get(conf.url) or conf.url), place, parameter, "".join(map(lambda x: techniques[x][0].upper(), sorted(value))), os.linesep)
|
line = "%s,%s,%s,%s,%s%s" % (safeCSValue(kb.originalUrls.get(conf.url) or conf.url), place, parameter, "".join(techniques[_][0].upper() for _ in sorted(value)), notes, os.linesep)
|
||||||
conf.resultsFP.writelines(line)
|
conf.resultsFP.writelines(line)
|
||||||
|
|
||||||
if not results:
|
if not results:
|
||||||
line = "%s,,,%s" % (conf.url, os.linesep)
|
line = "%s,,,,%s" % (conf.url, os.linesep)
|
||||||
conf.resultsFP.writelines(line)
|
conf.resultsFP.writelines(line)
|
||||||
|
|
||||||
def start():
|
def start():
|
||||||
@@ -275,6 +278,21 @@ def start():
|
|||||||
|
|
||||||
for targetUrl, targetMethod, targetData, targetCookie, targetHeaders in kb.targets:
|
for targetUrl, targetMethod, targetData, targetCookie, targetHeaders in kb.targets:
|
||||||
try:
|
try:
|
||||||
|
|
||||||
|
if conf.checkInternet:
|
||||||
|
infoMsg = "[INFO] checking for Internet connection"
|
||||||
|
logger.info(infoMsg)
|
||||||
|
|
||||||
|
if not checkInternet():
|
||||||
|
warnMsg = "[%s] [WARNING] no connection detected" % time.strftime("%X")
|
||||||
|
dataToStdout(warnMsg)
|
||||||
|
|
||||||
|
while not checkInternet():
|
||||||
|
dataToStdout('.')
|
||||||
|
time.sleep(5)
|
||||||
|
|
||||||
|
dataToStdout("\n")
|
||||||
|
|
||||||
conf.url = targetUrl
|
conf.url = targetUrl
|
||||||
conf.method = targetMethod.upper() if targetMethod else targetMethod
|
conf.method = targetMethod.upper() if targetMethod else targetMethod
|
||||||
conf.data = targetData
|
conf.data = targetData
|
||||||
@@ -304,7 +322,9 @@ def start():
|
|||||||
message = "SQL injection vulnerability has already been detected "
|
message = "SQL injection vulnerability has already been detected "
|
||||||
message += "against '%s'. Do you want to skip " % conf.hostname
|
message += "against '%s'. Do you want to skip " % conf.hostname
|
||||||
message += "further tests involving it? [Y/n]"
|
message += "further tests involving it? [Y/n]"
|
||||||
kb.skipVulnHost = readInput(message, default="Y").upper() != 'N'
|
|
||||||
|
kb.skipVulnHost = readInput(message, default='Y', boolean=True)
|
||||||
|
|
||||||
testSqlInj = not kb.skipVulnHost
|
testSqlInj = not kb.skipVulnHost
|
||||||
|
|
||||||
if not testSqlInj:
|
if not testSqlInj:
|
||||||
@@ -318,7 +338,7 @@ def start():
|
|||||||
if conf.forms and conf.method:
|
if conf.forms and conf.method:
|
||||||
message = "[#%d] form:\n%s %s" % (hostCount, conf.method, targetUrl)
|
message = "[#%d] form:\n%s %s" % (hostCount, conf.method, targetUrl)
|
||||||
else:
|
else:
|
||||||
message = "URL %d:\n%s %s%s" % (hostCount, HTTPMETHOD.GET, targetUrl, " (PageRank: %s)" % get_pagerank(targetUrl) if conf.googleDork and conf.pageRank else "")
|
message = "URL %d:\n%s %s" % (hostCount, HTTPMETHOD.GET, targetUrl)
|
||||||
|
|
||||||
if conf.cookie:
|
if conf.cookie:
|
||||||
message += "\nCookie: %s" % conf.cookie
|
message += "\nCookie: %s" % conf.cookie
|
||||||
@@ -331,9 +351,13 @@ def start():
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
message += "\ndo you want to test this form? [Y/n/q] "
|
message += "\ndo you want to test this form? [Y/n/q] "
|
||||||
test = readInput(message, default="Y")
|
choice = readInput(message, default='Y').upper()
|
||||||
|
|
||||||
if not test or test[0] in ("y", "Y"):
|
if choice == 'N':
|
||||||
|
continue
|
||||||
|
elif choice == 'Q':
|
||||||
|
break
|
||||||
|
else:
|
||||||
if conf.method != HTTPMETHOD.GET:
|
if conf.method != HTTPMETHOD.GET:
|
||||||
message = "Edit %s data [default: %s]%s: " % (conf.method, urlencode(conf.data) if conf.data else "None", " (Warning: blank fields detected)" if conf.data and extractRegexResult(EMPTY_FORM_FIELDS_REGEX, conf.data) else "")
|
message = "Edit %s data [default: %s]%s: " % (conf.method, urlencode(conf.data) if conf.data else "None", " (Warning: blank fields detected)" if conf.data and extractRegexResult(EMPTY_FORM_FIELDS_REGEX, conf.data) else "")
|
||||||
conf.data = readInput(message, default=conf.data)
|
conf.data = readInput(message, default=conf.data)
|
||||||
@@ -351,21 +375,14 @@ def start():
|
|||||||
|
|
||||||
parseTargetUrl()
|
parseTargetUrl()
|
||||||
|
|
||||||
elif test[0] in ("n", "N"):
|
|
||||||
continue
|
|
||||||
elif test[0] in ("q", "Q"):
|
|
||||||
break
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
message += "\ndo you want to test this URL? [Y/n/q]"
|
message += "\ndo you want to test this URL? [Y/n/q]"
|
||||||
test = readInput(message, default="Y")
|
choice = readInput(message, default='Y').upper()
|
||||||
|
|
||||||
if not test or test[0] in ("y", "Y"):
|
if choice == 'N':
|
||||||
pass
|
|
||||||
elif test[0] in ("n", "N"):
|
|
||||||
dataToStdout(os.linesep)
|
dataToStdout(os.linesep)
|
||||||
continue
|
continue
|
||||||
elif test[0] in ("q", "Q"):
|
elif choice == 'Q':
|
||||||
break
|
break
|
||||||
|
|
||||||
infoMsg = "testing URL '%s'" % targetUrl
|
infoMsg = "testing URL '%s'" % targetUrl
|
||||||
@@ -463,7 +480,13 @@ def start():
|
|||||||
infoMsg = "skipping randomizing %s parameter '%s'" % (paramType, parameter)
|
infoMsg = "skipping randomizing %s parameter '%s'" % (paramType, parameter)
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
elif parameter in conf.skip:
|
elif parameter in conf.skip or kb.postHint and parameter.split(' ')[-1] in conf.skip:
|
||||||
|
testSqlInj = False
|
||||||
|
|
||||||
|
infoMsg = "skipping %s parameter '%s'" % (paramType, parameter)
|
||||||
|
logger.info(infoMsg)
|
||||||
|
|
||||||
|
elif conf.paramExclude and (re.search(conf.paramExclude, parameter, re.I) or kb.postHint and re.search(conf.paramExclude, parameter.split(' ')[-1], re.I)):
|
||||||
testSqlInj = False
|
testSqlInj = False
|
||||||
|
|
||||||
infoMsg = "skipping %s parameter '%s'" % (paramType, parameter)
|
infoMsg = "skipping %s parameter '%s'" % (paramType, parameter)
|
||||||
@@ -486,7 +509,7 @@ def start():
|
|||||||
check = checkDynParam(place, parameter, value)
|
check = checkDynParam(place, parameter, value)
|
||||||
|
|
||||||
if not check:
|
if not check:
|
||||||
warnMsg = "%s parameter '%s' does not appear dynamic" % (paramType, parameter)
|
warnMsg = "%s parameter '%s' does not appear to be dynamic" % (paramType, parameter)
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
if conf.skipStatic:
|
if conf.skipStatic:
|
||||||
@@ -520,24 +543,30 @@ def start():
|
|||||||
|
|
||||||
injection = checkSqlInjection(place, parameter, value)
|
injection = checkSqlInjection(place, parameter, value)
|
||||||
proceed = not kb.endDetection
|
proceed = not kb.endDetection
|
||||||
|
injectable = False
|
||||||
|
|
||||||
if injection is not None and injection.place is not None:
|
if getattr(injection, "place", None) is not None:
|
||||||
kb.injections.append(injection)
|
if NOTE.FALSE_POSITIVE_OR_UNEXPLOITABLE in injection.notes:
|
||||||
|
kb.falsePositives.append(injection)
|
||||||
|
else:
|
||||||
|
injectable = True
|
||||||
|
|
||||||
# In case when user wants to end detection phase (Ctrl+C)
|
kb.injections.append(injection)
|
||||||
if not proceed:
|
|
||||||
break
|
|
||||||
|
|
||||||
msg = "%s parameter '%s' " % (injection.place, injection.parameter)
|
# In case when user wants to end detection phase (Ctrl+C)
|
||||||
msg += "is vulnerable. Do you want to keep testing the others (if any)? [y/N] "
|
if not proceed:
|
||||||
test = readInput(msg, default="N")
|
break
|
||||||
|
|
||||||
if test[0] not in ("y", "Y"):
|
msg = "%s parameter '%s' " % (injection.place, injection.parameter)
|
||||||
proceed = False
|
msg += "is vulnerable. Do you want to keep testing the others (if any)? [y/N] "
|
||||||
paramKey = (conf.hostname, conf.path, None, None)
|
|
||||||
kb.testedParams.add(paramKey)
|
if not readInput(msg, default='N', boolean=True):
|
||||||
else:
|
proceed = False
|
||||||
warnMsg = "%s parameter '%s' is not " % (paramType, parameter)
|
paramKey = (conf.hostname, conf.path, None, None)
|
||||||
|
kb.testedParams.add(paramKey)
|
||||||
|
|
||||||
|
if not injectable:
|
||||||
|
warnMsg = "%s parameter '%s' does not seem to be " % (paramType, parameter)
|
||||||
warnMsg += "injectable"
|
warnMsg += "injectable"
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
@@ -585,24 +614,24 @@ def start():
|
|||||||
if not conf.string and not conf.notString and not conf.regexp:
|
if not conf.string and not conf.notString and not conf.regexp:
|
||||||
errMsg += " Also, you can try to rerun by providing "
|
errMsg += " Also, you can try to rerun by providing "
|
||||||
errMsg += "either a valid value for option '--string' "
|
errMsg += "either a valid value for option '--string' "
|
||||||
errMsg += "(or '--regexp')"
|
errMsg += "(or '--regexp')."
|
||||||
elif conf.string:
|
elif conf.string:
|
||||||
errMsg += " Also, you can try to rerun by providing a "
|
errMsg += " Also, you can try to rerun by providing a "
|
||||||
errMsg += "valid value for option '--string' as perhaps the string you "
|
errMsg += "valid value for option '--string' as perhaps the string you "
|
||||||
errMsg += "have chosen does not match "
|
errMsg += "have chosen does not match "
|
||||||
errMsg += "exclusively True responses"
|
errMsg += "exclusively True responses."
|
||||||
elif conf.regexp:
|
elif conf.regexp:
|
||||||
errMsg += " Also, you can try to rerun by providing a "
|
errMsg += " Also, you can try to rerun by providing a "
|
||||||
errMsg += "valid value for option '--regexp' as perhaps the regular "
|
errMsg += "valid value for option '--regexp' as perhaps the regular "
|
||||||
errMsg += "expression that you have chosen "
|
errMsg += "expression that you have chosen "
|
||||||
errMsg += "does not match exclusively True responses"
|
errMsg += "does not match exclusively True responses."
|
||||||
|
|
||||||
if not conf.tamper:
|
if not conf.tamper:
|
||||||
errMsg += " If you suspect that there is some kind of protection mechanism "
|
errMsg += " If you suspect that there is some kind of protection mechanism "
|
||||||
errMsg += "involved (e.g. WAF) maybe you could retry "
|
errMsg += "involved (e.g. WAF) maybe you could retry "
|
||||||
errMsg += "with an option '--tamper' (e.g. '--tamper=space2comment')"
|
errMsg += "with an option '--tamper' (e.g. '--tamper=space2comment')"
|
||||||
|
|
||||||
raise SqlmapNotVulnerableException(errMsg)
|
raise SqlmapNotVulnerableException(errMsg.rstrip('.'))
|
||||||
else:
|
else:
|
||||||
# Flush the flag
|
# Flush the flag
|
||||||
kb.testMode = False
|
kb.testMode = False
|
||||||
@@ -615,9 +644,7 @@ def start():
|
|||||||
if kb.injection.place is not None and kb.injection.parameter is not None:
|
if kb.injection.place is not None and kb.injection.parameter is not None:
|
||||||
if conf.multipleTargets:
|
if conf.multipleTargets:
|
||||||
message = "do you want to exploit this SQL injection? [Y/n] "
|
message = "do you want to exploit this SQL injection? [Y/n] "
|
||||||
exploit = readInput(message, default="Y")
|
condition = readInput(message, default='Y', boolean=True)
|
||||||
|
|
||||||
condition = not exploit or exploit[0] in ("y", "Y")
|
|
||||||
else:
|
else:
|
||||||
condition = True
|
condition = True
|
||||||
|
|
||||||
@@ -630,13 +657,11 @@ def start():
|
|||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
message = "do you want to skip to the next target in list? [Y/n/q]"
|
message = "do you want to skip to the next target in list? [Y/n/q]"
|
||||||
test = readInput(message, default="Y")
|
choice = readInput(message, default='Y').upper()
|
||||||
|
|
||||||
if not test or test[0] in ("y", "Y"):
|
if choice == 'N':
|
||||||
pass
|
|
||||||
elif test[0] in ("n", "N"):
|
|
||||||
return False
|
return False
|
||||||
elif test[0] in ("q", "Q"):
|
elif choice == 'Q':
|
||||||
raise SqlmapUserQuitException
|
raise SqlmapUserQuitException
|
||||||
else:
|
else:
|
||||||
raise
|
raise
|
||||||
@@ -651,8 +676,10 @@ def start():
|
|||||||
errMsg = getSafeExString(ex)
|
errMsg = getSafeExString(ex)
|
||||||
|
|
||||||
if conf.multipleTargets:
|
if conf.multipleTargets:
|
||||||
|
_saveToResultsFile()
|
||||||
|
|
||||||
errMsg += ", skipping to the next %s" % ("form" if conf.forms else "URL")
|
errMsg += ", skipping to the next %s" % ("form" if conf.forms else "URL")
|
||||||
logger.error(errMsg)
|
logger.error(errMsg.lstrip(", "))
|
||||||
else:
|
else:
|
||||||
logger.critical(errMsg)
|
logger.critical(errMsg)
|
||||||
return False
|
return False
|
||||||
@@ -669,9 +696,10 @@ def start():
|
|||||||
if kb.dataOutputFlag and not conf.multipleTargets:
|
if kb.dataOutputFlag and not conf.multipleTargets:
|
||||||
logger.info("fetched data logged to text files under '%s'" % conf.outputPath)
|
logger.info("fetched data logged to text files under '%s'" % conf.outputPath)
|
||||||
|
|
||||||
if conf.multipleTargets and conf.resultsFilename:
|
if conf.multipleTargets:
|
||||||
infoMsg = "you can find results of scanning in multiple targets "
|
if conf.resultsFilename:
|
||||||
infoMsg += "mode inside the CSV file '%s'" % conf.resultsFilename
|
infoMsg = "you can find results of scanning in multiple targets "
|
||||||
logger.info(infoMsg)
|
infoMsg += "mode inside the CSV file '%s'" % conf.resultsFilename
|
||||||
|
logger.info(infoMsg)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from lib.core.common import Backend
|
from lib.core.common import Backend
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.data import logger
|
from lib.core.data import kb
|
||||||
from lib.core.dicts import DBMS_DICT
|
from lib.core.dicts import DBMS_DICT
|
||||||
from lib.core.enums import DBMS
|
from lib.core.enums import DBMS
|
||||||
from lib.core.settings import MSSQL_ALIASES
|
from lib.core.settings import MSSQL_ALIASES
|
||||||
@@ -21,6 +21,7 @@ from lib.core.settings import MAXDB_ALIASES
|
|||||||
from lib.core.settings import SYBASE_ALIASES
|
from lib.core.settings import SYBASE_ALIASES
|
||||||
from lib.core.settings import DB2_ALIASES
|
from lib.core.settings import DB2_ALIASES
|
||||||
from lib.core.settings import HSQLDB_ALIASES
|
from lib.core.settings import HSQLDB_ALIASES
|
||||||
|
from lib.core.settings import INFORMIX_ALIASES
|
||||||
from lib.utils.sqlalchemy import SQLAlchemy
|
from lib.utils.sqlalchemy import SQLAlchemy
|
||||||
|
|
||||||
from plugins.dbms.mssqlserver import MSSQLServerMap
|
from plugins.dbms.mssqlserver import MSSQLServerMap
|
||||||
@@ -45,6 +46,8 @@ from plugins.dbms.db2 import DB2Map
|
|||||||
from plugins.dbms.db2.connector import Connector as DB2Conn
|
from plugins.dbms.db2.connector import Connector as DB2Conn
|
||||||
from plugins.dbms.hsqldb import HSQLDBMap
|
from plugins.dbms.hsqldb import HSQLDBMap
|
||||||
from plugins.dbms.hsqldb.connector import Connector as HSQLDBConn
|
from plugins.dbms.hsqldb.connector import Connector as HSQLDBConn
|
||||||
|
from plugins.dbms.informix import InformixMap
|
||||||
|
from plugins.dbms.informix.connector import Connector as InformixConn
|
||||||
|
|
||||||
def setHandler():
|
def setHandler():
|
||||||
"""
|
"""
|
||||||
@@ -64,6 +67,7 @@ def setHandler():
|
|||||||
(DBMS.SYBASE, SYBASE_ALIASES, SybaseMap, SybaseConn),
|
(DBMS.SYBASE, SYBASE_ALIASES, SybaseMap, SybaseConn),
|
||||||
(DBMS.DB2, DB2_ALIASES, DB2Map, DB2Conn),
|
(DBMS.DB2, DB2_ALIASES, DB2Map, DB2Conn),
|
||||||
(DBMS.HSQLDB, HSQLDB_ALIASES, HSQLDBMap, HSQLDBConn),
|
(DBMS.HSQLDB, HSQLDB_ALIASES, HSQLDBMap, HSQLDBConn),
|
||||||
|
(DBMS.INFORMIX, INFORMIX_ALIASES, InformixMap, InformixConn),
|
||||||
]
|
]
|
||||||
|
|
||||||
_ = max(_ if (Backend.getIdentifiedDbms() or "").lower() in _[1] else None for _ in items)
|
_ = max(_ if (Backend.getIdentifiedDbms() or "").lower() in _[1] else None for _ in items)
|
||||||
@@ -72,18 +76,10 @@ def setHandler():
|
|||||||
items.insert(0, _)
|
items.insert(0, _)
|
||||||
|
|
||||||
for dbms, aliases, Handler, Connector in items:
|
for dbms, aliases, Handler, Connector in items:
|
||||||
if conf.dbms and conf.dbms.lower() != dbms and conf.dbms.lower() not in aliases:
|
|
||||||
debugMsg = "skipping test for %s" % dbms
|
|
||||||
logger.debug(debugMsg)
|
|
||||||
continue
|
|
||||||
|
|
||||||
handler = Handler()
|
handler = Handler()
|
||||||
conf.dbmsConnector = Connector()
|
conf.dbmsConnector = Connector()
|
||||||
|
|
||||||
if conf.direct:
|
if conf.direct:
|
||||||
logger.debug("forcing timeout to 10 seconds")
|
|
||||||
conf.timeout = 10
|
|
||||||
|
|
||||||
dialect = DBMS_DICT[dbms][3]
|
dialect = DBMS_DICT[dbms][3]
|
||||||
|
|
||||||
if dialect:
|
if dialect:
|
||||||
@@ -101,7 +97,12 @@ def setHandler():
|
|||||||
conf.dbmsConnector.connect()
|
conf.dbmsConnector.connect()
|
||||||
|
|
||||||
if handler.checkDbms():
|
if handler.checkDbms():
|
||||||
conf.dbmsHandler = handler
|
if kb.resolutionDbms:
|
||||||
|
conf.dbmsHandler = max(_ for _ in items if _[0] == kb.resolutionDbms)[2]()
|
||||||
|
else:
|
||||||
|
conf.dbmsHandler = handler
|
||||||
|
|
||||||
|
conf.dbmsHandler._dbms = dbms
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
conf.dbmsConnector = None
|
conf.dbmsConnector = None
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -35,12 +35,15 @@ from lib.core.enums import PLACE
|
|||||||
from lib.core.enums import POST_HINT
|
from lib.core.enums import POST_HINT
|
||||||
from lib.core.exception import SqlmapNoneDataException
|
from lib.core.exception import SqlmapNoneDataException
|
||||||
from lib.core.settings import BOUNDARY_BACKSLASH_MARKER
|
from lib.core.settings import BOUNDARY_BACKSLASH_MARKER
|
||||||
|
from lib.core.settings import BOUNDED_INJECTION_MARKER
|
||||||
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
||||||
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
|
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
|
||||||
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
||||||
from lib.core.settings import GENERIC_SQL_COMMENT
|
from lib.core.settings import GENERIC_SQL_COMMENT
|
||||||
|
from lib.core.settings import NULL
|
||||||
from lib.core.settings import PAYLOAD_DELIMITER
|
from lib.core.settings import PAYLOAD_DELIMITER
|
||||||
from lib.core.settings import REPLACEMENT_MARKER
|
from lib.core.settings import REPLACEMENT_MARKER
|
||||||
|
from lib.core.settings import SLEEP_TIME_MARKER
|
||||||
from lib.core.unescaper import unescaper
|
from lib.core.unescaper import unescaper
|
||||||
|
|
||||||
class Agent(object):
|
class Agent(object):
|
||||||
@@ -60,7 +63,7 @@ class Agent(object):
|
|||||||
|
|
||||||
if Backend.getIdentifiedDbms() in (DBMS.ORACLE,): # non-standard object(s) make problems to a database connector while returned (e.g. XMLTYPE)
|
if Backend.getIdentifiedDbms() in (DBMS.ORACLE,): # non-standard object(s) make problems to a database connector while returned (e.g. XMLTYPE)
|
||||||
_, _, _, _, _, _, fieldsToCastStr, _ = self.getFields(query)
|
_, _, _, _, _, _, fieldsToCastStr, _ = self.getFields(query)
|
||||||
for field in fieldsToCastStr.split(","):
|
for field in fieldsToCastStr.split(','):
|
||||||
query = query.replace(field, self.nullAndCastField(field))
|
query = query.replace(field, self.nullAndCastField(field))
|
||||||
|
|
||||||
if kb.tamperFunctions:
|
if kb.tamperFunctions:
|
||||||
@@ -95,9 +98,12 @@ class Agent(object):
|
|||||||
paramDict = conf.paramDict[place]
|
paramDict = conf.paramDict[place]
|
||||||
origValue = getUnicode(paramDict[parameter])
|
origValue = getUnicode(paramDict[parameter])
|
||||||
|
|
||||||
if place == PLACE.URI:
|
if place == PLACE.URI or BOUNDED_INJECTION_MARKER in origValue:
|
||||||
paramString = origValue
|
paramString = origValue
|
||||||
origValue = origValue.split(CUSTOM_INJECTION_MARK_CHAR)[0]
|
if place == PLACE.URI:
|
||||||
|
origValue = origValue.split(CUSTOM_INJECTION_MARK_CHAR)[0]
|
||||||
|
else:
|
||||||
|
origValue = filter(None, (re.search(_, origValue.split(BOUNDED_INJECTION_MARKER)[0]) for _ in (r"\w+\Z", r"[^\"'><]+\Z", r"[^ ]+\Z")))[0].group(0)
|
||||||
origValue = origValue[origValue.rfind('/') + 1:]
|
origValue = origValue[origValue.rfind('/') + 1:]
|
||||||
for char in ('?', '=', ':'):
|
for char in ('?', '=', ':'):
|
||||||
if char in origValue:
|
if char in origValue:
|
||||||
@@ -115,7 +121,7 @@ class Agent(object):
|
|||||||
elif place == PLACE.CUSTOM_HEADER:
|
elif place == PLACE.CUSTOM_HEADER:
|
||||||
paramString = origValue
|
paramString = origValue
|
||||||
origValue = origValue.split(CUSTOM_INJECTION_MARK_CHAR)[0]
|
origValue = origValue.split(CUSTOM_INJECTION_MARK_CHAR)[0]
|
||||||
origValue = origValue[origValue.index(',') + 1:]
|
origValue = origValue[origValue.find(',') + 1:]
|
||||||
match = re.search(r"([^;]+)=(?P<value>[^;]+);?\Z", origValue)
|
match = re.search(r"([^;]+)=(?P<value>[^;]+);?\Z", origValue)
|
||||||
if match:
|
if match:
|
||||||
origValue = match.group("value")
|
origValue = match.group("value")
|
||||||
@@ -161,6 +167,9 @@ class Agent(object):
|
|||||||
newValue = newValue.replace(CUSTOM_INJECTION_MARK_CHAR, REPLACEMENT_MARKER)
|
newValue = newValue.replace(CUSTOM_INJECTION_MARK_CHAR, REPLACEMENT_MARKER)
|
||||||
retVal = paramString.replace(_, self.addPayloadDelimiters(newValue))
|
retVal = paramString.replace(_, self.addPayloadDelimiters(newValue))
|
||||||
retVal = retVal.replace(CUSTOM_INJECTION_MARK_CHAR, "").replace(REPLACEMENT_MARKER, CUSTOM_INJECTION_MARK_CHAR)
|
retVal = retVal.replace(CUSTOM_INJECTION_MARK_CHAR, "").replace(REPLACEMENT_MARKER, CUSTOM_INJECTION_MARK_CHAR)
|
||||||
|
elif BOUNDED_INJECTION_MARKER in paramDict[parameter]:
|
||||||
|
_ = "%s%s" % (origValue, BOUNDED_INJECTION_MARKER)
|
||||||
|
retVal = "%s=%s" % (re.sub(r" (\#\d\*|\(.+\))\Z", "", parameter), paramString.replace(_, self.addPayloadDelimiters(newValue)))
|
||||||
elif place in (PLACE.USER_AGENT, PLACE.REFERER, PLACE.HOST):
|
elif place in (PLACE.USER_AGENT, PLACE.REFERER, PLACE.HOST):
|
||||||
retVal = paramString.replace(origValue, self.addPayloadDelimiters(newValue))
|
retVal = paramString.replace(origValue, self.addPayloadDelimiters(newValue))
|
||||||
else:
|
else:
|
||||||
@@ -273,7 +282,7 @@ class Agent(object):
|
|||||||
where = kb.injection.data[kb.technique].where if where is None else where
|
where = kb.injection.data[kb.technique].where if where is None else where
|
||||||
comment = kb.injection.data[kb.technique].comment if comment is None else comment
|
comment = kb.injection.data[kb.technique].comment if comment is None else comment
|
||||||
|
|
||||||
if Backend.getIdentifiedDbms() == DBMS.ACCESS and comment == GENERIC_SQL_COMMENT:
|
if Backend.getIdentifiedDbms() == DBMS.ACCESS and any((comment or "").startswith(_) for _ in ("--", "[GENERIC_SQL_COMMENT]")):
|
||||||
comment = queries[DBMS.ACCESS].comment.query
|
comment = queries[DBMS.ACCESS].comment.query
|
||||||
|
|
||||||
if comment is not None:
|
if comment is not None:
|
||||||
@@ -287,7 +296,7 @@ class Agent(object):
|
|||||||
elif suffix and not comment:
|
elif suffix and not comment:
|
||||||
expression += suffix.replace('\\', BOUNDARY_BACKSLASH_MARKER)
|
expression += suffix.replace('\\', BOUNDARY_BACKSLASH_MARKER)
|
||||||
|
|
||||||
return re.sub(r"(?s);\W*;", ";", expression)
|
return re.sub(r";\W*;", ";", expression)
|
||||||
|
|
||||||
def cleanupPayload(self, payload, origValue=None):
|
def cleanupPayload(self, payload, origValue=None):
|
||||||
if payload is None:
|
if payload is None:
|
||||||
@@ -296,7 +305,7 @@ class Agent(object):
|
|||||||
_ = (
|
_ = (
|
||||||
("[DELIMITER_START]", kb.chars.start), ("[DELIMITER_STOP]", kb.chars.stop),\
|
("[DELIMITER_START]", kb.chars.start), ("[DELIMITER_STOP]", kb.chars.stop),\
|
||||||
("[AT_REPLACE]", kb.chars.at), ("[SPACE_REPLACE]", kb.chars.space), ("[DOLLAR_REPLACE]", kb.chars.dollar),\
|
("[AT_REPLACE]", kb.chars.at), ("[SPACE_REPLACE]", kb.chars.space), ("[DOLLAR_REPLACE]", kb.chars.dollar),\
|
||||||
("[HASH_REPLACE]", kb.chars.hash_),
|
("[HASH_REPLACE]", kb.chars.hash_), ("[GENERIC_SQL_COMMENT]", GENERIC_SQL_COMMENT)
|
||||||
)
|
)
|
||||||
payload = reduce(lambda x, y: x.replace(y[0], y[1]), _, payload)
|
payload = reduce(lambda x, y: x.replace(y[0], y[1]), _, payload)
|
||||||
|
|
||||||
@@ -307,6 +316,7 @@ class Agent(object):
|
|||||||
payload = payload.replace(_, randomStr())
|
payload = payload.replace(_, randomStr())
|
||||||
|
|
||||||
if origValue is not None and "[ORIGVALUE]" in payload:
|
if origValue is not None and "[ORIGVALUE]" in payload:
|
||||||
|
origValue = getUnicode(origValue)
|
||||||
payload = getUnicode(payload).replace("[ORIGVALUE]", origValue if origValue.isdigit() else unescaper.escape("'%s'" % origValue))
|
payload = getUnicode(payload).replace("[ORIGVALUE]", origValue if origValue.isdigit() else unescaper.escape("'%s'" % origValue))
|
||||||
|
|
||||||
if "[INFERENCE]" in payload:
|
if "[INFERENCE]" in payload:
|
||||||
@@ -335,7 +345,7 @@ class Agent(object):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
if payload:
|
if payload:
|
||||||
payload = payload.replace("[SLEEPTIME]", str(conf.timeSec))
|
payload = payload.replace(SLEEP_TIME_MARKER, str(conf.timeSec))
|
||||||
|
|
||||||
return payload
|
return payload
|
||||||
|
|
||||||
@@ -443,7 +453,7 @@ class Agent(object):
|
|||||||
@rtype: C{str}
|
@rtype: C{str}
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not Backend.getDbms():
|
if not Backend.getIdentifiedDbms():
|
||||||
return fields
|
return fields
|
||||||
|
|
||||||
if fields.startswith("(CASE") or fields.startswith("(IIF") or fields.startswith("SUBSTR") or fields.startswith("MID(") or re.search(r"\A'[^']+'\Z", fields):
|
if fields.startswith("(CASE") or fields.startswith("(IIF") or fields.startswith("SUBSTR") or fields.startswith("MID(") or re.search(r"\A'[^']+'\Z", fields):
|
||||||
@@ -478,7 +488,7 @@ class Agent(object):
|
|||||||
@rtype: C{str}
|
@rtype: C{str}
|
||||||
"""
|
"""
|
||||||
|
|
||||||
prefixRegex = r"(?:\s+(?:FIRST|SKIP|LIMIT \d+)\s+\d+)*"
|
prefixRegex = r"(?:\s+(?:FIRST|SKIP|LIMIT(?: \d+)?)\s+\d+)*"
|
||||||
fieldsSelectTop = re.search(r"\ASELECT\s+TOP\s+[\d]+\s+(.+?)\s+FROM", query, re.I)
|
fieldsSelectTop = re.search(r"\ASELECT\s+TOP\s+[\d]+\s+(.+?)\s+FROM", query, re.I)
|
||||||
fieldsSelectRownum = re.search(r"\ASELECT\s+([^()]+?),\s*ROWNUM AS LIMIT FROM", query, re.I)
|
fieldsSelectRownum = re.search(r"\ASELECT\s+([^()]+?),\s*ROWNUM AS LIMIT FROM", query, re.I)
|
||||||
fieldsSelectDistinct = re.search(r"\ASELECT%s\s+DISTINCT\((.+?)\)\s+FROM" % prefixRegex, query, re.I)
|
fieldsSelectDistinct = re.search(r"\ASELECT%s\s+DISTINCT\((.+?)\)\s+FROM" % prefixRegex, query, re.I)
|
||||||
@@ -499,26 +509,28 @@ class Agent(object):
|
|||||||
if fieldsSubstr:
|
if fieldsSubstr:
|
||||||
fieldsToCastStr = query
|
fieldsToCastStr = query
|
||||||
elif fieldsMinMaxstr:
|
elif fieldsMinMaxstr:
|
||||||
fieldsToCastStr = fieldsMinMaxstr.groups()[0]
|
fieldsToCastStr = fieldsMinMaxstr.group(1)
|
||||||
elif fieldsExists:
|
elif fieldsExists:
|
||||||
if fieldsSelect:
|
if fieldsSelect:
|
||||||
fieldsToCastStr = fieldsSelect.groups()[0]
|
fieldsToCastStr = fieldsSelect.group(1)
|
||||||
elif fieldsSelectTop:
|
elif fieldsSelectTop:
|
||||||
fieldsToCastStr = fieldsSelectTop.groups()[0]
|
fieldsToCastStr = fieldsSelectTop.group(1)
|
||||||
elif fieldsSelectRownum:
|
elif fieldsSelectRownum:
|
||||||
fieldsToCastStr = fieldsSelectRownum.groups()[0]
|
fieldsToCastStr = fieldsSelectRownum.group(1)
|
||||||
elif fieldsSelectDistinct:
|
elif fieldsSelectDistinct:
|
||||||
if Backend.getDbms() in (DBMS.HSQLDB,):
|
if Backend.getDbms() in (DBMS.HSQLDB,):
|
||||||
fieldsToCastStr = fieldsNoSelect
|
fieldsToCastStr = fieldsNoSelect
|
||||||
else:
|
else:
|
||||||
fieldsToCastStr = fieldsSelectDistinct.groups()[0]
|
fieldsToCastStr = fieldsSelectDistinct.group(1)
|
||||||
elif fieldsSelectCase:
|
elif fieldsSelectCase:
|
||||||
fieldsToCastStr = fieldsSelectCase.groups()[0]
|
fieldsToCastStr = fieldsSelectCase.group(1)
|
||||||
elif fieldsSelectFrom:
|
elif fieldsSelectFrom:
|
||||||
fieldsToCastStr = query[:unArrayizeValue(_)] if _ else query
|
fieldsToCastStr = query[:unArrayizeValue(_)] if _ else query
|
||||||
fieldsToCastStr = re.sub(r"\ASELECT%s\s+" % prefixRegex, "", fieldsToCastStr)
|
fieldsToCastStr = re.sub(r"\ASELECT%s\s+" % prefixRegex, "", fieldsToCastStr)
|
||||||
elif fieldsSelect:
|
elif fieldsSelect:
|
||||||
fieldsToCastStr = fieldsSelect.groups()[0]
|
fieldsToCastStr = fieldsSelect.group(1)
|
||||||
|
|
||||||
|
fieldsToCastStr = fieldsToCastStr or ""
|
||||||
|
|
||||||
# Function
|
# Function
|
||||||
if re.search("\A\w+\(.*\)", fieldsToCastStr, re.I) or (fieldsSelectCase and "WHEN use" not in query) or fieldsSubstr:
|
if re.search("\A\w+\(.*\)", fieldsToCastStr, re.I) or (fieldsSelectCase and "WHEN use" not in query) or fieldsSubstr:
|
||||||
@@ -659,24 +671,23 @@ class Agent(object):
|
|||||||
concatenatedQuery = "'%s'&%s&'%s'" % (kb.chars.start, concatenatedQuery, kb.chars.stop)
|
concatenatedQuery = "'%s'&%s&'%s'" % (kb.chars.start, concatenatedQuery, kb.chars.stop)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
warnMsg = "applying generic concatenation with double pipes ('||')"
|
warnMsg = "applying generic concatenation (CONCAT)"
|
||||||
singleTimeWarnMessage(warnMsg)
|
singleTimeWarnMessage(warnMsg)
|
||||||
|
|
||||||
if fieldsExists:
|
if fieldsExists:
|
||||||
concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'||" % kb.chars.start, 1)
|
concatenatedQuery = concatenatedQuery.replace("SELECT ", "CONCAT(CONCAT('%s'," % kb.chars.start, 1)
|
||||||
concatenatedQuery += "||'%s'" % kb.chars.stop
|
concatenatedQuery += "),'%s')" % kb.chars.stop
|
||||||
elif fieldsSelectCase:
|
elif fieldsSelectCase:
|
||||||
concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'||(SELECT " % kb.chars.start, 1)
|
concatenatedQuery = concatenatedQuery.replace("SELECT ", "CONCAT(CONCAT('%s'," % kb.chars.start, 1)
|
||||||
concatenatedQuery += ")||'%s'" % kb.chars.stop
|
concatenatedQuery += "),'%s')" % kb.chars.stop
|
||||||
elif fieldsSelectFrom:
|
elif fieldsSelectFrom:
|
||||||
concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'||" % kb.chars.start, 1)
|
|
||||||
_ = unArrayizeValue(zeroDepthSearch(concatenatedQuery, " FROM "))
|
_ = unArrayizeValue(zeroDepthSearch(concatenatedQuery, " FROM "))
|
||||||
concatenatedQuery = "%s||'%s'%s" % (concatenatedQuery[:_], kb.chars.stop, concatenatedQuery[_:])
|
concatenatedQuery = "%s),'%s')%s" % (concatenatedQuery[:_].replace("SELECT ", "CONCAT(CONCAT('%s'," % kb.chars.start, 1), kb.chars.stop, concatenatedQuery[_:])
|
||||||
elif fieldsSelect:
|
elif fieldsSelect:
|
||||||
concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'||" % kb.chars.start, 1)
|
concatenatedQuery = concatenatedQuery.replace("SELECT ", "CONCAT(CONCAT('%s'," % kb.chars.start, 1)
|
||||||
concatenatedQuery += "||'%s'" % kb.chars.stop
|
concatenatedQuery += "),'%s')" % kb.chars.stop
|
||||||
elif fieldsNoSelect:
|
elif fieldsNoSelect:
|
||||||
concatenatedQuery = "'%s'||%s||'%s'" % (kb.chars.start, concatenatedQuery, kb.chars.stop)
|
concatenatedQuery = "CONCAT(CONCAT('%s',%s),'%s')" % (kb.chars.start, concatenatedQuery, kb.chars.stop)
|
||||||
|
|
||||||
return concatenatedQuery
|
return concatenatedQuery
|
||||||
|
|
||||||
@@ -713,8 +724,11 @@ class Agent(object):
|
|||||||
|
|
||||||
if conf.uFrom:
|
if conf.uFrom:
|
||||||
fromTable = " FROM %s" % conf.uFrom
|
fromTable = " FROM %s" % conf.uFrom
|
||||||
else:
|
elif not fromTable:
|
||||||
fromTable = fromTable or FROM_DUMMY_TABLE.get(Backend.getIdentifiedDbms(), "")
|
if kb.tableFrom:
|
||||||
|
fromTable = " FROM %s" % kb.tableFrom
|
||||||
|
else:
|
||||||
|
fromTable = FROM_DUMMY_TABLE.get(Backend.getIdentifiedDbms(), "")
|
||||||
|
|
||||||
if query.startswith("SELECT "):
|
if query.startswith("SELECT "):
|
||||||
query = query[len("SELECT "):]
|
query = query[len("SELECT "):]
|
||||||
@@ -747,6 +761,9 @@ class Agent(object):
|
|||||||
intoRegExp = intoRegExp.group(1)
|
intoRegExp = intoRegExp.group(1)
|
||||||
query = query[:query.index(intoRegExp)]
|
query = query[:query.index(intoRegExp)]
|
||||||
|
|
||||||
|
position = 0
|
||||||
|
char = NULL
|
||||||
|
|
||||||
for element in xrange(0, count):
|
for element in xrange(0, count):
|
||||||
if element > 0:
|
if element > 0:
|
||||||
unionQuery += ','
|
unionQuery += ','
|
||||||
@@ -841,7 +858,7 @@ class Agent(object):
|
|||||||
if expression.find(queries[Backend.getIdentifiedDbms()].limitstring.query) > 0:
|
if expression.find(queries[Backend.getIdentifiedDbms()].limitstring.query) > 0:
|
||||||
_ = expression.index(queries[Backend.getIdentifiedDbms()].limitstring.query)
|
_ = expression.index(queries[Backend.getIdentifiedDbms()].limitstring.query)
|
||||||
else:
|
else:
|
||||||
_ = expression.index("LIMIT ")
|
_ = re.search(r"\bLIMIT\b", expression, re.I).start()
|
||||||
expression = expression[:_]
|
expression = expression[:_]
|
||||||
|
|
||||||
elif Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE):
|
elif Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE):
|
||||||
@@ -985,12 +1002,13 @@ class Agent(object):
|
|||||||
|
|
||||||
def forgeQueryOutputLength(self, expression):
|
def forgeQueryOutputLength(self, expression):
|
||||||
lengthQuery = queries[Backend.getIdentifiedDbms()].length.query
|
lengthQuery = queries[Backend.getIdentifiedDbms()].length.query
|
||||||
select = re.search("\ASELECT\s+", expression, re.I)
|
select = re.search(r"\ASELECT\s+", expression, re.I)
|
||||||
selectTopExpr = re.search("\ASELECT\s+TOP\s+[\d]+\s+(.+?)\s+FROM", expression, re.I)
|
selectTopExpr = re.search(r"\ASELECT\s+TOP\s+[\d]+\s+(.+?)\s+FROM", expression, re.I)
|
||||||
|
selectMinMaxExpr = re.search(r"\ASELECT\s+(MIN|MAX)\(.+?\)\s+FROM", expression, re.I)
|
||||||
|
|
||||||
_, _, _, _, _, _, fieldsStr, _ = self.getFields(expression)
|
_, _, _, _, _, _, fieldsStr, _ = self.getFields(expression)
|
||||||
|
|
||||||
if selectTopExpr:
|
if selectTopExpr or selectMinMaxExpr:
|
||||||
lengthExpr = lengthQuery % ("(%s)" % expression)
|
lengthExpr = lengthQuery % ("(%s)" % expression)
|
||||||
elif select:
|
elif select:
|
||||||
lengthExpr = expression.replace(fieldsStr, lengthQuery % fieldsStr, 1)
|
lengthExpr = expression.replace(fieldsStr, lengthQuery % fieldsStr, 1)
|
||||||
@@ -1062,5 +1080,20 @@ class Agent(object):
|
|||||||
|
|
||||||
return query
|
return query
|
||||||
|
|
||||||
|
def whereQuery(self, query):
|
||||||
|
if conf.dumpWhere and query:
|
||||||
|
prefix, suffix = query.split(" ORDER BY ") if " ORDER BY " in query else (query, "")
|
||||||
|
|
||||||
|
if "%s)" % conf.tbl.upper() in prefix.upper():
|
||||||
|
prefix = re.sub(r"(?i)%s\)" % re.escape(conf.tbl), "%s WHERE %s)" % (conf.tbl, conf.dumpWhere), prefix)
|
||||||
|
elif re.search(r"(?i)\bWHERE\b", prefix):
|
||||||
|
prefix += " AND %s" % conf.dumpWhere
|
||||||
|
else:
|
||||||
|
prefix += " WHERE %s" % conf.dumpWhere
|
||||||
|
|
||||||
|
query = "%s ORDER BY %s" % (prefix, suffix) if suffix else prefix
|
||||||
|
|
||||||
|
return query
|
||||||
|
|
||||||
# SQL agent
|
# SQL agent
|
||||||
agent = Agent()
|
agent = Agent()
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -15,6 +15,7 @@ import os
|
|||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
|
from lib.core.enums import MKSTEMP_PREFIX
|
||||||
from lib.core.exception import SqlmapSystemException
|
from lib.core.exception import SqlmapSystemException
|
||||||
from lib.core.settings import BIGARRAY_CHUNK_SIZE
|
from lib.core.settings import BIGARRAY_CHUNK_SIZE
|
||||||
|
|
||||||
@@ -91,7 +92,7 @@ class BigArray(list):
|
|||||||
|
|
||||||
def _dump(self, chunk):
|
def _dump(self, chunk):
|
||||||
try:
|
try:
|
||||||
handle, filename = tempfile.mkstemp()
|
handle, filename = tempfile.mkstemp(prefix=MKSTEMP_PREFIX.BIG_ARRAY)
|
||||||
self.filenames.add(filename)
|
self.filenames.add(filename)
|
||||||
os.close(handle)
|
os.close(handle)
|
||||||
with open(filename, "w+b") as fp:
|
with open(filename, "w+b") as fp:
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
28
lib/core/convert.py
Normal file → Executable file
28
lib/core/convert.py
Normal file → Executable file
@@ -1,17 +1,22 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
import cPickle as pickle
|
||||||
|
except:
|
||||||
|
import pickle
|
||||||
|
finally:
|
||||||
|
import pickle as picklePy
|
||||||
|
|
||||||
import base64
|
import base64
|
||||||
import json
|
import json
|
||||||
import pickle
|
|
||||||
import re
|
import re
|
||||||
import StringIO
|
import StringIO
|
||||||
import sys
|
import sys
|
||||||
import types
|
|
||||||
|
|
||||||
from lib.core.settings import IS_WIN
|
from lib.core.settings import IS_WIN
|
||||||
from lib.core.settings import UNICODE_ENCODING
|
from lib.core.settings import UNICODE_ENCODING
|
||||||
@@ -42,7 +47,7 @@ def base64pickle(value):
|
|||||||
Serializes (with pickle) and encodes to Base64 format supplied (binary) value
|
Serializes (with pickle) and encodes to Base64 format supplied (binary) value
|
||||||
|
|
||||||
>>> base64pickle('foobar')
|
>>> base64pickle('foobar')
|
||||||
'gAJVBmZvb2JhcnEALg=='
|
'gAJVBmZvb2JhcnEBLg=='
|
||||||
"""
|
"""
|
||||||
|
|
||||||
retVal = None
|
retVal = None
|
||||||
@@ -61,11 +66,11 @@ def base64pickle(value):
|
|||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
def base64unpickle(value):
|
def base64unpickle(value, unsafe=False):
|
||||||
"""
|
"""
|
||||||
Decodes value from Base64 to plain format and deserializes (with pickle) its content
|
Decodes value from Base64 to plain format and deserializes (with pickle) its content
|
||||||
|
|
||||||
>>> base64unpickle('gAJVBmZvb2JhcnEALg==')
|
>>> base64unpickle('gAJVBmZvb2JhcnEBLg==')
|
||||||
'foobar'
|
'foobar'
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -79,9 +84,12 @@ def base64unpickle(value):
|
|||||||
self.load_reduce()
|
self.load_reduce()
|
||||||
|
|
||||||
def loads(str):
|
def loads(str):
|
||||||
file = StringIO.StringIO(str)
|
f = StringIO.StringIO(str)
|
||||||
unpickler = pickle.Unpickler(file)
|
if unsafe:
|
||||||
unpickler.dispatch[pickle.REDUCE] = _
|
unpickler = picklePy.Unpickler(f)
|
||||||
|
unpickler.dispatch[picklePy.REDUCE] = _
|
||||||
|
else:
|
||||||
|
unpickler = pickle.Unpickler(f)
|
||||||
return unpickler.load()
|
return unpickler.load()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -161,7 +169,7 @@ def htmlunescape(value):
|
|||||||
codes = (('<', '<'), ('>', '>'), ('"', '"'), (' ', ' '), ('&', '&'))
|
codes = (('<', '<'), ('>', '>'), ('"', '"'), (' ', ' '), ('&', '&'))
|
||||||
retVal = reduce(lambda x, y: x.replace(y[0], y[1]), codes, retVal)
|
retVal = reduce(lambda x, y: x.replace(y[0], y[1]), codes, retVal)
|
||||||
try:
|
try:
|
||||||
retVal = re.sub(r"&#x([^;]+);", lambda match: unichr(int(match.group(1), 16)), retVal)
|
retVal = re.sub(r"&#x([^ ;]+);", lambda match: unichr(int(match.group(1), 16)), retVal)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
return retVal
|
return retVal
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,15 +1,13 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
import types
|
import types
|
||||||
|
|
||||||
from lib.core.exception import SqlmapDataException
|
|
||||||
|
|
||||||
class AttribDict(dict):
|
class AttribDict(dict):
|
||||||
"""
|
"""
|
||||||
This class defines the sqlmap object, inheriting from Python data
|
This class defines the sqlmap object, inheriting from Python data
|
||||||
@@ -43,7 +41,7 @@ class AttribDict(dict):
|
|||||||
try:
|
try:
|
||||||
return self.__getitem__(item)
|
return self.__getitem__(item)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise SqlmapDataException("unable to access item '%s'" % item)
|
raise AttributeError("unable to access item '%s'" % item)
|
||||||
|
|
||||||
def __setattr__(self, item, value):
|
def __setattr__(self, item, value):
|
||||||
"""
|
"""
|
||||||
@@ -93,6 +91,7 @@ class InjectionDict(AttribDict):
|
|||||||
self.prefix = None
|
self.prefix = None
|
||||||
self.suffix = None
|
self.suffix = None
|
||||||
self.clause = None
|
self.clause = None
|
||||||
|
self.notes = [] # Note: https://github.com/sqlmapproject/sqlmap/issues/1888
|
||||||
|
|
||||||
# data is a dict with various stype, each which is a dict with
|
# data is a dict with various stype, each which is a dict with
|
||||||
# all the information specific for that stype
|
# all the information specific for that stype
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -15,10 +15,13 @@ def cachedmethod(f, cache={}):
|
|||||||
def _(*args, **kwargs):
|
def _(*args, **kwargs):
|
||||||
try:
|
try:
|
||||||
key = (f, tuple(args), frozenset(kwargs.items()))
|
key = (f, tuple(args), frozenset(kwargs.items()))
|
||||||
|
if key not in cache:
|
||||||
|
cache[key] = f(*args, **kwargs)
|
||||||
except:
|
except:
|
||||||
key = "".join(str(_) for _ in (f, args, kwargs))
|
key = "".join(str(_) for _ in (f, args, kwargs))
|
||||||
if key not in cache:
|
if key not in cache:
|
||||||
cache[key] = f(*args, **kwargs)
|
cache[key] = f(*args, **kwargs)
|
||||||
|
|
||||||
return cache[key]
|
return cache[key]
|
||||||
|
|
||||||
return _
|
return _
|
||||||
|
|||||||
@@ -1,17 +1,16 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from lib.core.datatype import AttribDict
|
from lib.core.datatype import AttribDict
|
||||||
|
|
||||||
_defaults = {
|
_defaults = {
|
||||||
"csvDel": ",",
|
"csvDel": ',',
|
||||||
"timeSec": 5,
|
"timeSec": 5,
|
||||||
"googlePage": 1,
|
"googlePage": 1,
|
||||||
"cpuThrottle": 5,
|
|
||||||
"verbose": 1,
|
"verbose": 1,
|
||||||
"delay": 0,
|
"delay": 0,
|
||||||
"timeout": 30,
|
"timeout": 30,
|
||||||
@@ -22,7 +21,7 @@ _defaults = {
|
|||||||
"risk": 1,
|
"risk": 1,
|
||||||
"dumpFormat": "CSV",
|
"dumpFormat": "CSV",
|
||||||
"tech": "BEUSTQ",
|
"tech": "BEUSTQ",
|
||||||
"torType": "HTTP",
|
"torType": "SOCKS5",
|
||||||
}
|
}
|
||||||
|
|
||||||
defaults = AttribDict(_defaults)
|
defaults = AttribDict(_defaults)
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -21,219 +21,268 @@ from lib.core.settings import MAXDB_ALIASES
|
|||||||
from lib.core.settings import SYBASE_ALIASES
|
from lib.core.settings import SYBASE_ALIASES
|
||||||
from lib.core.settings import DB2_ALIASES
|
from lib.core.settings import DB2_ALIASES
|
||||||
from lib.core.settings import HSQLDB_ALIASES
|
from lib.core.settings import HSQLDB_ALIASES
|
||||||
|
from lib.core.settings import INFORMIX_ALIASES
|
||||||
|
|
||||||
FIREBIRD_TYPES = {
|
FIREBIRD_TYPES = {
|
||||||
261: "BLOB",
|
261: "BLOB",
|
||||||
14: "CHAR",
|
14: "CHAR",
|
||||||
40: "CSTRING",
|
40: "CSTRING",
|
||||||
11: "D_FLOAT",
|
11: "D_FLOAT",
|
||||||
27: "DOUBLE",
|
27: "DOUBLE",
|
||||||
10: "FLOAT",
|
10: "FLOAT",
|
||||||
16: "INT64",
|
16: "INT64",
|
||||||
8: "INTEGER",
|
8: "INTEGER",
|
||||||
9: "QUAD",
|
9: "QUAD",
|
||||||
7: "SMALLINT",
|
7: "SMALLINT",
|
||||||
12: "DATE",
|
12: "DATE",
|
||||||
13: "TIME",
|
13: "TIME",
|
||||||
35: "TIMESTAMP",
|
35: "TIMESTAMP",
|
||||||
37: "VARCHAR",
|
37: "VARCHAR",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
INFORMIX_TYPES = {
|
||||||
|
0: "CHAR",
|
||||||
|
1: "SMALLINT",
|
||||||
|
2: "INTEGER",
|
||||||
|
3: "FLOAT",
|
||||||
|
4: "SMALLFLOAT",
|
||||||
|
5: "DECIMAL",
|
||||||
|
6: "SERIAL",
|
||||||
|
7: "DATE",
|
||||||
|
8: "MONEY",
|
||||||
|
9: "NULL",
|
||||||
|
10: "DATETIME",
|
||||||
|
11: "BYTE",
|
||||||
|
12: "TEXT",
|
||||||
|
13: "VARCHAR",
|
||||||
|
14: "INTERVAL",
|
||||||
|
15: "NCHAR",
|
||||||
|
16: "NVARCHAR",
|
||||||
|
17: "INT8",
|
||||||
|
18: "SERIAL8",
|
||||||
|
19: "SET",
|
||||||
|
20: "MULTISET",
|
||||||
|
21: "LIST",
|
||||||
|
22: "ROW (unnamed)",
|
||||||
|
23: "COLLECTION",
|
||||||
|
40: "Variable-length opaque type",
|
||||||
|
41: "Fixed-length opaque type",
|
||||||
|
43: "LVARCHAR",
|
||||||
|
45: "BOOLEAN",
|
||||||
|
52: "BIGINT",
|
||||||
|
53: "BIGSERIAL",
|
||||||
|
2061: "IDSSECURITYLABEL",
|
||||||
|
4118: "ROW (named)",
|
||||||
|
}
|
||||||
|
|
||||||
SYBASE_TYPES = {
|
SYBASE_TYPES = {
|
||||||
14: "floatn",
|
14: "floatn",
|
||||||
8: "float",
|
8: "float",
|
||||||
15: "datetimn",
|
15: "datetimn",
|
||||||
12: "datetime",
|
12: "datetime",
|
||||||
23: "real",
|
23: "real",
|
||||||
28: "numericn",
|
28: "numericn",
|
||||||
10: "numeric",
|
10: "numeric",
|
||||||
27: "decimaln",
|
27: "decimaln",
|
||||||
26: "decimal",
|
26: "decimal",
|
||||||
17: "moneyn",
|
17: "moneyn",
|
||||||
11: "money",
|
11: "money",
|
||||||
21: "smallmoney",
|
21: "smallmoney",
|
||||||
22: "smalldatetime",
|
22: "smalldatetime",
|
||||||
13: "intn",
|
13: "intn",
|
||||||
7: "int",
|
7: "int",
|
||||||
6: "smallint",
|
6: "smallint",
|
||||||
5: "tinyint",
|
5: "tinyint",
|
||||||
16: "bit",
|
16: "bit",
|
||||||
2: "varchar",
|
2: "varchar",
|
||||||
18: "sysname",
|
18: "sysname",
|
||||||
25: "nvarchar",
|
25: "nvarchar",
|
||||||
1: "char",
|
1: "char",
|
||||||
24: "nchar",
|
24: "nchar",
|
||||||
4: "varbinary",
|
4: "varbinary",
|
||||||
80: "timestamp",
|
80: "timestamp",
|
||||||
3: "binary",
|
3: "binary",
|
||||||
19: "text",
|
19: "text",
|
||||||
20: "image",
|
20: "image",
|
||||||
}
|
}
|
||||||
|
|
||||||
MYSQL_PRIVS = {
|
MYSQL_PRIVS = {
|
||||||
1: "select_priv",
|
1: "select_priv",
|
||||||
2: "insert_priv",
|
2: "insert_priv",
|
||||||
3: "update_priv",
|
3: "update_priv",
|
||||||
4: "delete_priv",
|
4: "delete_priv",
|
||||||
5: "create_priv",
|
5: "create_priv",
|
||||||
6: "drop_priv",
|
6: "drop_priv",
|
||||||
7: "reload_priv",
|
7: "reload_priv",
|
||||||
8: "shutdown_priv",
|
8: "shutdown_priv",
|
||||||
9: "process_priv",
|
9: "process_priv",
|
||||||
10: "file_priv",
|
10: "file_priv",
|
||||||
11: "grant_priv",
|
11: "grant_priv",
|
||||||
12: "references_priv",
|
12: "references_priv",
|
||||||
13: "index_priv",
|
13: "index_priv",
|
||||||
14: "alter_priv",
|
14: "alter_priv",
|
||||||
15: "show_db_priv",
|
15: "show_db_priv",
|
||||||
16: "super_priv",
|
16: "super_priv",
|
||||||
17: "create_tmp_table_priv",
|
17: "create_tmp_table_priv",
|
||||||
18: "lock_tables_priv",
|
18: "lock_tables_priv",
|
||||||
19: "execute_priv",
|
19: "execute_priv",
|
||||||
20: "repl_slave_priv",
|
20: "repl_slave_priv",
|
||||||
21: "repl_client_priv",
|
21: "repl_client_priv",
|
||||||
22: "create_view_priv",
|
22: "create_view_priv",
|
||||||
23: "show_view_priv",
|
23: "show_view_priv",
|
||||||
24: "create_routine_priv",
|
24: "create_routine_priv",
|
||||||
25: "alter_routine_priv",
|
25: "alter_routine_priv",
|
||||||
26: "create_user_priv",
|
26: "create_user_priv",
|
||||||
}
|
}
|
||||||
|
|
||||||
PGSQL_PRIVS = {
|
PGSQL_PRIVS = {
|
||||||
1: "createdb",
|
1: "createdb",
|
||||||
2: "super",
|
2: "super",
|
||||||
3: "catupd",
|
3: "catupd",
|
||||||
}
|
}
|
||||||
|
|
||||||
# Reference(s): http://stackoverflow.com/a/17672504
|
# Reference(s): http://stackoverflow.com/a/17672504
|
||||||
# http://docwiki.embarcadero.com/InterBase/XE7/en/RDB$USER_PRIVILEGES
|
# http://docwiki.embarcadero.com/InterBase/XE7/en/RDB$USER_PRIVILEGES
|
||||||
|
|
||||||
FIREBIRD_PRIVS = {
|
FIREBIRD_PRIVS = {
|
||||||
"S": "SELECT",
|
"S": "SELECT",
|
||||||
"I": "INSERT",
|
"I": "INSERT",
|
||||||
"U": "UPDATE",
|
"U": "UPDATE",
|
||||||
"D": "DELETE",
|
"D": "DELETE",
|
||||||
"R": "REFERENCE",
|
"R": "REFERENCE",
|
||||||
"E": "EXECUTE",
|
"X": "EXECUTE",
|
||||||
"X": "EXECUTE",
|
"A": "ALL",
|
||||||
"A": "ALL",
|
"M": "MEMBER",
|
||||||
"M": "MEMBER",
|
"T": "DECRYPT",
|
||||||
"T": "DECRYPT",
|
"E": "ENCRYPT",
|
||||||
"E": "ENCRYPT",
|
"B": "SUBSCRIBE",
|
||||||
"B": "SUBSCRIBE",
|
}
|
||||||
}
|
|
||||||
|
# Reference(s): https://www.ibm.com/support/knowledgecenter/SSGU8G_12.1.0/com.ibm.sqls.doc/ids_sqs_0147.htm
|
||||||
|
# https://www.ibm.com/support/knowledgecenter/SSGU8G_11.70.0/com.ibm.sqlr.doc/ids_sqr_077.htm
|
||||||
|
|
||||||
|
INFORMIX_PRIVS = {
|
||||||
|
"D": "DBA (all privileges)",
|
||||||
|
"R": "RESOURCE (create UDRs, UDTs, permanent tables and indexes)",
|
||||||
|
"C": "CONNECT (work with existing tables)",
|
||||||
|
"G": "ROLE",
|
||||||
|
"U": "DEFAULT (implicit connection)",
|
||||||
|
}
|
||||||
|
|
||||||
DB2_PRIVS = {
|
DB2_PRIVS = {
|
||||||
1: "CONTROLAUTH",
|
1: "CONTROLAUTH",
|
||||||
2: "ALTERAUTH",
|
2: "ALTERAUTH",
|
||||||
3: "DELETEAUTH",
|
3: "DELETEAUTH",
|
||||||
4: "INDEXAUTH",
|
4: "INDEXAUTH",
|
||||||
5: "INSERTAUTH",
|
5: "INSERTAUTH",
|
||||||
6: "REFAUTH",
|
6: "REFAUTH",
|
||||||
7: "SELECTAUTH",
|
7: "SELECTAUTH",
|
||||||
8: "UPDATEAUTH",
|
8: "UPDATEAUTH",
|
||||||
}
|
}
|
||||||
|
|
||||||
DUMP_REPLACEMENTS = {" ": NULL, "": BLANK}
|
DUMP_REPLACEMENTS = {" ": NULL, "": BLANK}
|
||||||
|
|
||||||
DBMS_DICT = {
|
DBMS_DICT = {
|
||||||
DBMS.MSSQL: (MSSQL_ALIASES, "python-pymssql", "http://pymssql.sourceforge.net/", "mssql+pymssql"),
|
DBMS.MSSQL: (MSSQL_ALIASES, "python-pymssql", "https://github.com/pymssql/pymssql", "mssql+pymssql"),
|
||||||
DBMS.MYSQL: (MYSQL_ALIASES, "python pymysql", "https://github.com/petehunt/PyMySQL/", "mysql"),
|
DBMS.MYSQL: (MYSQL_ALIASES, "python-pymysql", "https://github.com/petehunt/PyMySQL/", "mysql"),
|
||||||
DBMS.PGSQL: (PGSQL_ALIASES, "python-psycopg2", "http://initd.org/psycopg/", "postgresql"),
|
DBMS.PGSQL: (PGSQL_ALIASES, "python-psycopg2", "http://initd.org/psycopg/", "postgresql"),
|
||||||
DBMS.ORACLE: (ORACLE_ALIASES, "python cx_Oracle", "http://cx-oracle.sourceforge.net/", "oracle"),
|
DBMS.ORACLE: (ORACLE_ALIASES, "python cx_Oracle", "http://cx-oracle.sourceforge.net/", "oracle"),
|
||||||
DBMS.SQLITE: (SQLITE_ALIASES, "python-sqlite", "http://packages.ubuntu.com/quantal/python-sqlite", "sqlite"),
|
DBMS.SQLITE: (SQLITE_ALIASES, "python-sqlite", "http://packages.ubuntu.com/quantal/python-sqlite", "sqlite"),
|
||||||
DBMS.ACCESS: (ACCESS_ALIASES, "python-pyodbc", "http://pyodbc.googlecode.com/", "access"),
|
DBMS.ACCESS: (ACCESS_ALIASES, "python-pyodbc", "https://github.com/mkleehammer/pyodbc", "access"),
|
||||||
DBMS.FIREBIRD: (FIREBIRD_ALIASES, "python-kinterbasdb", "http://kinterbasdb.sourceforge.net/", "firebird"),
|
DBMS.FIREBIRD: (FIREBIRD_ALIASES, "python-kinterbasdb", "http://kinterbasdb.sourceforge.net/", "firebird"),
|
||||||
DBMS.MAXDB: (MAXDB_ALIASES, None, None, "maxdb"),
|
DBMS.MAXDB: (MAXDB_ALIASES, None, None, "maxdb"),
|
||||||
DBMS.SYBASE: (SYBASE_ALIASES, "python-pymssql", "http://pymssql.sourceforge.net/", "sybase"),
|
DBMS.SYBASE: (SYBASE_ALIASES, "python-pymssql", "https://github.com/pymssql/pymssql", "sybase"),
|
||||||
DBMS.DB2: (DB2_ALIASES, "python ibm-db", "http://code.google.com/p/ibm-db/", "ibm_db_sa"),
|
DBMS.DB2: (DB2_ALIASES, "python ibm-db", "https://github.com/ibmdb/python-ibmdb", "ibm_db_sa"),
|
||||||
DBMS.HSQLDB: (HSQLDB_ALIASES, "python jaydebeapi & python-jpype", "https://pypi.python.org/pypi/JayDeBeApi/ & http://jpype.sourceforge.net/", None),
|
DBMS.HSQLDB: (HSQLDB_ALIASES, "python jaydebeapi & python-jpype", "https://pypi.python.org/pypi/JayDeBeApi/ & http://jpype.sourceforge.net/", None),
|
||||||
}
|
DBMS.INFORMIX: (INFORMIX_ALIASES, "python ibm-db", "https://github.com/ibmdb/python-ibmdb", "ibm_db_sa"),
|
||||||
|
}
|
||||||
|
|
||||||
FROM_DUMMY_TABLE = {
|
FROM_DUMMY_TABLE = {
|
||||||
DBMS.ORACLE: " FROM DUAL",
|
DBMS.ORACLE: " FROM DUAL",
|
||||||
DBMS.ACCESS: " FROM MSysAccessObjects",
|
DBMS.ACCESS: " FROM MSysAccessObjects",
|
||||||
DBMS.FIREBIRD: " FROM RDB$DATABASE",
|
DBMS.FIREBIRD: " FROM RDB$DATABASE",
|
||||||
DBMS.MAXDB: " FROM VERSIONS",
|
DBMS.MAXDB: " FROM VERSIONS",
|
||||||
DBMS.DB2: " FROM SYSIBM.SYSDUMMY1",
|
DBMS.DB2: " FROM SYSIBM.SYSDUMMY1",
|
||||||
DBMS.HSQLDB: " FROM INFORMATION_SCHEMA.SYSTEM_USERS"
|
DBMS.HSQLDB: " FROM INFORMATION_SCHEMA.SYSTEM_USERS",
|
||||||
}
|
DBMS.INFORMIX: " FROM SYSMASTER:SYSDUAL"
|
||||||
|
}
|
||||||
|
|
||||||
SQL_STATEMENTS = {
|
SQL_STATEMENTS = {
|
||||||
"SQL SELECT statement": (
|
"SQL SELECT statement": (
|
||||||
"select ",
|
"select ",
|
||||||
"show ",
|
"show ",
|
||||||
" top ",
|
" top ",
|
||||||
" distinct ",
|
" distinct ",
|
||||||
" from ",
|
" from ",
|
||||||
" from dual",
|
" from dual",
|
||||||
" where ",
|
" where ",
|
||||||
" group by ",
|
" group by ",
|
||||||
" order by ",
|
" order by ",
|
||||||
" having ",
|
" having ",
|
||||||
" limit ",
|
" limit ",
|
||||||
" offset ",
|
" offset ",
|
||||||
" union all ",
|
" union all ",
|
||||||
" rownum as ",
|
" rownum as ",
|
||||||
"(case ", ),
|
"(case ", ),
|
||||||
|
|
||||||
"SQL data definition": (
|
"SQL data definition": (
|
||||||
"create ",
|
"create ",
|
||||||
"declare ",
|
"declare ",
|
||||||
"drop ",
|
"drop ",
|
||||||
"truncate ",
|
"truncate ",
|
||||||
"alter ", ),
|
"alter ", ),
|
||||||
|
|
||||||
"SQL data manipulation": (
|
"SQL data manipulation": (
|
||||||
"bulk ",
|
"bulk ",
|
||||||
"insert ",
|
"insert ",
|
||||||
"update ",
|
"update ",
|
||||||
"delete ",
|
"delete ",
|
||||||
"merge ",
|
"merge ",
|
||||||
"load ", ),
|
"load ", ),
|
||||||
|
|
||||||
"SQL data control": (
|
"SQL data control": (
|
||||||
"grant ",
|
"grant ",
|
||||||
"revoke ", ),
|
"revoke ", ),
|
||||||
|
|
||||||
"SQL data execution": (
|
"SQL data execution": (
|
||||||
"exec ",
|
"exec ",
|
||||||
"execute ",
|
"execute ",
|
||||||
"values ",
|
"values ",
|
||||||
"call ", ),
|
"call ", ),
|
||||||
|
|
||||||
"SQL transaction": (
|
"SQL transaction": (
|
||||||
"start transaction ",
|
"start transaction ",
|
||||||
"begin work ",
|
"begin work ",
|
||||||
"begin transaction ",
|
"begin transaction ",
|
||||||
"commit ",
|
"commit ",
|
||||||
"rollback ", ),
|
"rollback ", ),
|
||||||
}
|
}
|
||||||
|
|
||||||
POST_HINT_CONTENT_TYPES = {
|
POST_HINT_CONTENT_TYPES = {
|
||||||
POST_HINT.JSON: "application/json",
|
POST_HINT.JSON: "application/json",
|
||||||
POST_HINT.JSON_LIKE: "application/json",
|
POST_HINT.JSON_LIKE: "application/json",
|
||||||
POST_HINT.MULTIPART: "multipart/form-data",
|
POST_HINT.MULTIPART: "multipart/form-data",
|
||||||
POST_HINT.SOAP: "application/soap+xml",
|
POST_HINT.SOAP: "application/soap+xml",
|
||||||
POST_HINT.XML: "application/xml",
|
POST_HINT.XML: "application/xml",
|
||||||
POST_HINT.ARRAY_LIKE: "application/x-www-form-urlencoded; charset=utf-8",
|
POST_HINT.ARRAY_LIKE: "application/x-www-form-urlencoded; charset=utf-8",
|
||||||
}
|
}
|
||||||
|
|
||||||
DEPRECATED_OPTIONS = {
|
DEPRECATED_OPTIONS = {
|
||||||
"--replicate": "use '--dump-format=SQLITE' instead",
|
"--replicate": "use '--dump-format=SQLITE' instead",
|
||||||
"--no-unescape": "use '--no-escape' instead",
|
"--no-unescape": "use '--no-escape' instead",
|
||||||
"--binary": "use '--binary-fields' instead",
|
"--binary": "use '--binary-fields' instead",
|
||||||
"--auth-private": "use '--auth-file' instead",
|
"--auth-private": "use '--auth-file' instead",
|
||||||
"--check-payload": None,
|
"--check-payload": None,
|
||||||
"--check-waf": None,
|
"--check-waf": None,
|
||||||
}
|
"--pickled-options": "use '--api -c ...' instead",
|
||||||
|
}
|
||||||
|
|
||||||
DUMP_DATA_PREPROCESS = {
|
DUMP_DATA_PREPROCESS = {
|
||||||
DBMS.ORACLE: {"XMLTYPE": "(%s).getStringVal()"}, # Reference: https://www.tibcommunity.com/docs/DOC-3643
|
DBMS.ORACLE: {"XMLTYPE": "(%s).getStringVal()"}, # Reference: https://www.tibcommunity.com/docs/DOC-3643
|
||||||
DBMS.MSSQL: {"IMAGE": "CONVERT(VARBINARY(MAX),%s)"},
|
DBMS.MSSQL: {"IMAGE": "CONVERT(VARBINARY(MAX),%s)"},
|
||||||
}
|
}
|
||||||
|
|
||||||
DEFAULT_DOC_ROOTS = {
|
DEFAULT_DOC_ROOTS = {
|
||||||
OS.WINDOWS: ("C:/xampp/htdocs/", "C:/Inetpub/wwwroot/"),
|
OS.WINDOWS: ("C:/xampp/htdocs/", "C:/wamp/www/", "C:/Inetpub/wwwroot/"),
|
||||||
OS.LINUX: ("/var/www/", "/var/www/html", "/usr/local/apache2/htdocs", "/var/www/nginx-default") # Reference: https://wiki.apache.org/httpd/DistrosDefaultLayout
|
OS.LINUX: ("/var/www/", "/var/www/html", "/usr/local/apache2/htdocs", "/var/www/nginx-default", "/srv/www") # Reference: https://wiki.apache.org/httpd/DistrosDefaultLayout
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -9,6 +9,7 @@ import cgi
|
|||||||
import hashlib
|
import hashlib
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
@@ -62,7 +63,7 @@ class Dump(object):
|
|||||||
self._lock = threading.Lock()
|
self._lock = threading.Lock()
|
||||||
|
|
||||||
def _write(self, data, newline=True, console=True, content_type=None):
|
def _write(self, data, newline=True, console=True, content_type=None):
|
||||||
if hasattr(conf, "api"):
|
if conf.api:
|
||||||
dataToStdout(data, content_type=content_type, status=CONTENT_STATUS.COMPLETE)
|
dataToStdout(data, content_type=content_type, status=CONTENT_STATUS.COMPLETE)
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -109,7 +110,7 @@ class Dump(object):
|
|||||||
def string(self, header, data, content_type=None, sort=True):
|
def string(self, header, data, content_type=None, sort=True):
|
||||||
kb.stickyLevel = None
|
kb.stickyLevel = None
|
||||||
|
|
||||||
if hasattr(conf, "api"):
|
if conf.api:
|
||||||
self._write(data, content_type=content_type)
|
self._write(data, content_type=content_type)
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -118,9 +119,15 @@ class Dump(object):
|
|||||||
elif data is not None:
|
elif data is not None:
|
||||||
_ = getUnicode(data)
|
_ = getUnicode(data)
|
||||||
|
|
||||||
if _ and _[-1] == '\n':
|
if _.endswith("\r\n"):
|
||||||
|
_ = _[:-2]
|
||||||
|
|
||||||
|
elif _.endswith("\n"):
|
||||||
_ = _[:-1]
|
_ = _[:-1]
|
||||||
|
|
||||||
|
if _.strip(' '):
|
||||||
|
_ = _.strip(' ')
|
||||||
|
|
||||||
if "\n" in _:
|
if "\n" in _:
|
||||||
self._write("%s:\n---\n%s\n---" % (header, _))
|
self._write("%s:\n---\n%s\n---" % (header, _))
|
||||||
else:
|
else:
|
||||||
@@ -137,7 +144,7 @@ class Dump(object):
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if hasattr(conf, "api"):
|
if conf.api:
|
||||||
self._write(elements, content_type=content_type)
|
self._write(elements, content_type=content_type)
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -186,7 +193,7 @@ class Dump(object):
|
|||||||
users = userSettings.keys()
|
users = userSettings.keys()
|
||||||
users.sort(key=lambda x: x.lower() if isinstance(x, basestring) else x)
|
users.sort(key=lambda x: x.lower() if isinstance(x, basestring) else x)
|
||||||
|
|
||||||
if hasattr(conf, "api"):
|
if conf.api:
|
||||||
self._write(userSettings, content_type=content_type)
|
self._write(userSettings, content_type=content_type)
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -220,7 +227,7 @@ class Dump(object):
|
|||||||
|
|
||||||
def dbTables(self, dbTables):
|
def dbTables(self, dbTables):
|
||||||
if isinstance(dbTables, dict) and len(dbTables) > 0:
|
if isinstance(dbTables, dict) and len(dbTables) > 0:
|
||||||
if hasattr(conf, "api"):
|
if conf.api:
|
||||||
self._write(dbTables, content_type=CONTENT_TYPE.TABLES)
|
self._write(dbTables, content_type=CONTENT_TYPE.TABLES)
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -263,7 +270,7 @@ class Dump(object):
|
|||||||
|
|
||||||
def dbTableColumns(self, tableColumns, content_type=None):
|
def dbTableColumns(self, tableColumns, content_type=None):
|
||||||
if isinstance(tableColumns, dict) and len(tableColumns) > 0:
|
if isinstance(tableColumns, dict) and len(tableColumns) > 0:
|
||||||
if hasattr(conf, "api"):
|
if conf.api:
|
||||||
self._write(tableColumns, content_type=content_type)
|
self._write(tableColumns, content_type=content_type)
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -337,7 +344,7 @@ class Dump(object):
|
|||||||
|
|
||||||
def dbTablesCount(self, dbTables):
|
def dbTablesCount(self, dbTables):
|
||||||
if isinstance(dbTables, dict) and len(dbTables) > 0:
|
if isinstance(dbTables, dict) and len(dbTables) > 0:
|
||||||
if hasattr(conf, "api"):
|
if conf.api:
|
||||||
self._write(dbTables, content_type=CONTENT_TYPE.COUNT)
|
self._write(dbTables, content_type=CONTENT_TYPE.COUNT)
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -396,7 +403,7 @@ class Dump(object):
|
|||||||
db = "All"
|
db = "All"
|
||||||
table = tableValues["__infos__"]["table"]
|
table = tableValues["__infos__"]["table"]
|
||||||
|
|
||||||
if hasattr(conf, "api"):
|
if conf.api:
|
||||||
self._write(tableValues, content_type=CONTENT_TYPE.DUMP_TABLE)
|
self._write(tableValues, content_type=CONTENT_TYPE.DUMP_TABLE)
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -449,8 +456,23 @@ class Dump(object):
|
|||||||
dumpFileName = os.path.join(dumpDbPath, "%s-%s.%s" % (_, hashlib.md5(unicodeencode(table)).hexdigest()[:8], conf.dumpFormat.lower()))
|
dumpFileName = os.path.join(dumpDbPath, "%s-%s.%s" % (_, hashlib.md5(unicodeencode(table)).hexdigest()[:8], conf.dumpFormat.lower()))
|
||||||
else:
|
else:
|
||||||
dumpFileName = os.path.join(dumpDbPath, "%s.%s" % (_, conf.dumpFormat.lower()))
|
dumpFileName = os.path.join(dumpDbPath, "%s.%s" % (_, conf.dumpFormat.lower()))
|
||||||
|
else:
|
||||||
|
appendToFile = any((conf.limitStart, conf.limitStop))
|
||||||
|
|
||||||
|
if not appendToFile:
|
||||||
|
count = 1
|
||||||
|
while True:
|
||||||
|
candidate = "%s.%d" % (dumpFileName, count)
|
||||||
|
if not checkFile(candidate, False):
|
||||||
|
try:
|
||||||
|
shutil.copyfile(dumpFileName, candidate)
|
||||||
|
except IOError:
|
||||||
|
pass
|
||||||
|
finally:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
count += 1
|
||||||
|
|
||||||
appendToFile = any((conf.limitStart, conf.limitStop)) and checkFile(dumpFileName, False)
|
|
||||||
dumpFP = openFile(dumpFileName, "wb" if not appendToFile else "ab", buffering=DUMP_FILE_BUFFER_SIZE)
|
dumpFP = openFile(dumpFileName, "wb" if not appendToFile else "ab", buffering=DUMP_FILE_BUFFER_SIZE)
|
||||||
|
|
||||||
count = int(tableValues["__infos__"]["count"])
|
count = int(tableValues["__infos__"]["count"])
|
||||||
@@ -644,7 +666,7 @@ class Dump(object):
|
|||||||
logger.warn(msg)
|
logger.warn(msg)
|
||||||
|
|
||||||
def dbColumns(self, dbColumnsDict, colConsider, dbs):
|
def dbColumns(self, dbColumnsDict, colConsider, dbs):
|
||||||
if hasattr(conf, "api"):
|
if conf.api:
|
||||||
self._write(dbColumnsDict, content_type=CONTENT_TYPE.COLUMNS)
|
self._write(dbColumnsDict, content_type=CONTENT_TYPE.COLUMNS)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -34,6 +34,7 @@ class DBMS:
|
|||||||
SQLITE = "SQLite"
|
SQLITE = "SQLite"
|
||||||
SYBASE = "Sybase"
|
SYBASE = "Sybase"
|
||||||
HSQLDB = "HSQLDB"
|
HSQLDB = "HSQLDB"
|
||||||
|
INFORMIX = "Informix"
|
||||||
|
|
||||||
class DBMS_DIRECTORY_NAME:
|
class DBMS_DIRECTORY_NAME:
|
||||||
ACCESS = "access"
|
ACCESS = "access"
|
||||||
@@ -47,6 +48,7 @@ class DBMS_DIRECTORY_NAME:
|
|||||||
SQLITE = "sqlite"
|
SQLITE = "sqlite"
|
||||||
SYBASE = "sybase"
|
SYBASE = "sybase"
|
||||||
HSQLDB = "hsqldb"
|
HSQLDB = "hsqldb"
|
||||||
|
INFORMIX = "informix"
|
||||||
|
|
||||||
class CUSTOM_LOGGING:
|
class CUSTOM_LOGGING:
|
||||||
PAYLOAD = 9
|
PAYLOAD = 9
|
||||||
@@ -174,6 +176,7 @@ class HTTP_HEADER:
|
|||||||
PROXY_CONNECTION = "Proxy-Connection"
|
PROXY_CONNECTION = "Proxy-Connection"
|
||||||
RANGE = "Range"
|
RANGE = "Range"
|
||||||
REFERER = "Referer"
|
REFERER = "Referer"
|
||||||
|
REFRESH = "Refresh" # Reference: http://stackoverflow.com/a/283794
|
||||||
SERVER = "Server"
|
SERVER = "Server"
|
||||||
SET_COOKIE = "Set-Cookie"
|
SET_COOKIE = "Set-Cookie"
|
||||||
TRANSFER_ENCODING = "Transfer-Encoding"
|
TRANSFER_ENCODING = "Transfer-Encoding"
|
||||||
@@ -194,6 +197,7 @@ class OPTION_TYPE:
|
|||||||
|
|
||||||
class HASHDB_KEYS:
|
class HASHDB_KEYS:
|
||||||
DBMS = "DBMS"
|
DBMS = "DBMS"
|
||||||
|
DBMS_FORK = "DBMS_FORK"
|
||||||
CHECK_WAF_RESULT = "CHECK_WAF_RESULT"
|
CHECK_WAF_RESULT = "CHECK_WAF_RESULT"
|
||||||
CONF_TMP_PATH = "CONF_TMP_PATH"
|
CONF_TMP_PATH = "CONF_TMP_PATH"
|
||||||
KB_ABS_FILE_PATHS = "KB_ABS_FILE_PATHS"
|
KB_ABS_FILE_PATHS = "KB_ABS_FILE_PATHS"
|
||||||
@@ -283,31 +287,32 @@ class WEB_API:
|
|||||||
JSP = "jsp"
|
JSP = "jsp"
|
||||||
|
|
||||||
class CONTENT_TYPE:
|
class CONTENT_TYPE:
|
||||||
TECHNIQUES = 0
|
TARGET = 0
|
||||||
DBMS_FINGERPRINT = 1
|
TECHNIQUES = 1
|
||||||
BANNER = 2
|
DBMS_FINGERPRINT = 2
|
||||||
CURRENT_USER = 3
|
BANNER = 3
|
||||||
CURRENT_DB = 4
|
CURRENT_USER = 4
|
||||||
HOSTNAME = 5
|
CURRENT_DB = 5
|
||||||
IS_DBA = 6
|
HOSTNAME = 6
|
||||||
USERS = 7
|
IS_DBA = 7
|
||||||
PASSWORDS = 8
|
USERS = 8
|
||||||
PRIVILEGES = 9
|
PASSWORDS = 9
|
||||||
ROLES = 10
|
PRIVILEGES = 10
|
||||||
DBS = 11
|
ROLES = 11
|
||||||
TABLES = 12
|
DBS = 12
|
||||||
COLUMNS = 13
|
TABLES = 13
|
||||||
SCHEMA = 14
|
COLUMNS = 14
|
||||||
COUNT = 15
|
SCHEMA = 15
|
||||||
DUMP_TABLE = 16
|
COUNT = 16
|
||||||
SEARCH = 17
|
DUMP_TABLE = 17
|
||||||
SQL_QUERY = 18
|
SEARCH = 18
|
||||||
COMMON_TABLES = 19
|
SQL_QUERY = 19
|
||||||
COMMON_COLUMNS = 20
|
COMMON_TABLES = 20
|
||||||
FILE_READ = 21
|
COMMON_COLUMNS = 21
|
||||||
FILE_WRITE = 22
|
FILE_READ = 22
|
||||||
OS_CMD = 23
|
FILE_WRITE = 23
|
||||||
REG_READ = 24
|
OS_CMD = 24
|
||||||
|
REG_READ = 25
|
||||||
|
|
||||||
PART_RUN_CONTENT_TYPES = {
|
PART_RUN_CONTENT_TYPES = {
|
||||||
"checkDbms": CONTENT_TYPE.TECHNIQUES,
|
"checkDbms": CONTENT_TYPE.TECHNIQUES,
|
||||||
@@ -351,3 +356,21 @@ class AUTOCOMPLETE_TYPE:
|
|||||||
SQL = 0
|
SQL = 0
|
||||||
OS = 1
|
OS = 1
|
||||||
SQLMAP = 2
|
SQLMAP = 2
|
||||||
|
|
||||||
|
class NOTE:
|
||||||
|
FALSE_POSITIVE_OR_UNEXPLOITABLE = "false positive or unexploitable"
|
||||||
|
|
||||||
|
class MKSTEMP_PREFIX:
|
||||||
|
HASHES = "sqlmaphashes-"
|
||||||
|
CRAWLER = "sqlmapcrawler-"
|
||||||
|
IPC = "sqlmapipc-"
|
||||||
|
CONFIG = "sqlmapconfig-"
|
||||||
|
TESTING = "sqlmaptesting-"
|
||||||
|
RESULTS = "sqlmapresults-"
|
||||||
|
COOKIE_JAR = "sqlmapcookiejar-"
|
||||||
|
BIG_ARRAY = "sqlmapbigarray-"
|
||||||
|
|
||||||
|
class TIMEOUT_STATE:
|
||||||
|
NORMAL = 0
|
||||||
|
EXCEPTION = 1
|
||||||
|
TIMEOUT = 2
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
413
lib/core/option.py
Normal file → Executable file
413
lib/core/option.py
Normal file → Executable file
@@ -1,10 +1,11 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import binascii
|
||||||
import cookielib
|
import cookielib
|
||||||
import glob
|
import glob
|
||||||
import inspect
|
import inspect
|
||||||
@@ -38,12 +39,12 @@ from lib.core.common import getPublicTypeMembers
|
|||||||
from lib.core.common import getSafeExString
|
from lib.core.common import getSafeExString
|
||||||
from lib.core.common import extractRegexResult
|
from lib.core.common import extractRegexResult
|
||||||
from lib.core.common import filterStringValue
|
from lib.core.common import filterStringValue
|
||||||
|
from lib.core.common import findLocalPort
|
||||||
from lib.core.common import findPageForms
|
from lib.core.common import findPageForms
|
||||||
from lib.core.common import getConsoleWidth
|
from lib.core.common import getConsoleWidth
|
||||||
from lib.core.common import getFileItems
|
from lib.core.common import getFileItems
|
||||||
from lib.core.common import getFileType
|
from lib.core.common import getFileType
|
||||||
from lib.core.common import getUnicode
|
from lib.core.common import getUnicode
|
||||||
from lib.core.common import isListLike
|
|
||||||
from lib.core.common import normalizePath
|
from lib.core.common import normalizePath
|
||||||
from lib.core.common import ntToPosixSlashes
|
from lib.core.common import ntToPosixSlashes
|
||||||
from lib.core.common import openFile
|
from lib.core.common import openFile
|
||||||
@@ -56,12 +57,11 @@ from lib.core.common import readInput
|
|||||||
from lib.core.common import resetCookieJar
|
from lib.core.common import resetCookieJar
|
||||||
from lib.core.common import runningAsAdmin
|
from lib.core.common import runningAsAdmin
|
||||||
from lib.core.common import safeExpandUser
|
from lib.core.common import safeExpandUser
|
||||||
|
from lib.core.common import saveConfig
|
||||||
from lib.core.common import setOptimize
|
from lib.core.common import setOptimize
|
||||||
from lib.core.common import setPaths
|
from lib.core.common import setPaths
|
||||||
from lib.core.common import singleTimeWarnMessage
|
from lib.core.common import singleTimeWarnMessage
|
||||||
from lib.core.common import UnicodeRawConfigParser
|
|
||||||
from lib.core.common import urldecode
|
from lib.core.common import urldecode
|
||||||
from lib.core.convert import base64unpickle
|
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
@@ -108,9 +108,8 @@ from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
|||||||
from lib.core.settings import DBMS_ALIASES
|
from lib.core.settings import DBMS_ALIASES
|
||||||
from lib.core.settings import DEFAULT_PAGE_ENCODING
|
from lib.core.settings import DEFAULT_PAGE_ENCODING
|
||||||
from lib.core.settings import DEFAULT_TOR_HTTP_PORTS
|
from lib.core.settings import DEFAULT_TOR_HTTP_PORTS
|
||||||
from lib.core.settings import DEFAULT_TOR_SOCKS_PORT
|
from lib.core.settings import DEFAULT_TOR_SOCKS_PORTS
|
||||||
from lib.core.settings import DUMMY_URL
|
from lib.core.settings import DUMMY_URL
|
||||||
from lib.core.settings import IGNORE_SAVE_OPTIONS
|
|
||||||
from lib.core.settings import INJECT_HERE_MARK
|
from lib.core.settings import INJECT_HERE_MARK
|
||||||
from lib.core.settings import IS_WIN
|
from lib.core.settings import IS_WIN
|
||||||
from lib.core.settings import KB_CHARS_BOUNDARY_CHAR
|
from lib.core.settings import KB_CHARS_BOUNDARY_CHAR
|
||||||
@@ -120,6 +119,7 @@ from lib.core.settings import MAX_CONNECT_RETRIES
|
|||||||
from lib.core.settings import MAX_NUMBER_OF_THREADS
|
from lib.core.settings import MAX_NUMBER_OF_THREADS
|
||||||
from lib.core.settings import NULL
|
from lib.core.settings import NULL
|
||||||
from lib.core.settings import PARAMETER_SPLITTING_REGEX
|
from lib.core.settings import PARAMETER_SPLITTING_REGEX
|
||||||
|
from lib.core.settings import PRECONNECT_CANDIDATE_TIMEOUT
|
||||||
from lib.core.settings import PROBLEMATIC_CUSTOM_INJECTION_PATTERNS
|
from lib.core.settings import PROBLEMATIC_CUSTOM_INJECTION_PATTERNS
|
||||||
from lib.core.settings import SITE
|
from lib.core.settings import SITE
|
||||||
from lib.core.settings import SOCKET_PRE_CONNECT_QUEUE_SIZE
|
from lib.core.settings import SOCKET_PRE_CONNECT_QUEUE_SIZE
|
||||||
@@ -127,12 +127,14 @@ from lib.core.settings import SQLMAP_ENVIRONMENT_PREFIX
|
|||||||
from lib.core.settings import SUPPORTED_DBMS
|
from lib.core.settings import SUPPORTED_DBMS
|
||||||
from lib.core.settings import SUPPORTED_OS
|
from lib.core.settings import SUPPORTED_OS
|
||||||
from lib.core.settings import TIME_DELAY_CANDIDATES
|
from lib.core.settings import TIME_DELAY_CANDIDATES
|
||||||
|
from lib.core.settings import UNICODE_ENCODING
|
||||||
from lib.core.settings import UNION_CHAR_REGEX
|
from lib.core.settings import UNION_CHAR_REGEX
|
||||||
from lib.core.settings import UNKNOWN_DBMS_VERSION
|
from lib.core.settings import UNKNOWN_DBMS_VERSION
|
||||||
from lib.core.settings import URI_INJECTABLE_REGEX
|
from lib.core.settings import URI_INJECTABLE_REGEX
|
||||||
from lib.core.settings import VERSION_STRING
|
from lib.core.settings import VERSION_STRING
|
||||||
from lib.core.settings import WEBSCARAB_SPLITTER
|
from lib.core.settings import WEBSCARAB_SPLITTER
|
||||||
from lib.core.threads import getCurrentThreadData
|
from lib.core.threads import getCurrentThreadData
|
||||||
|
from lib.core.threads import setDaemon
|
||||||
from lib.core.update import update
|
from lib.core.update import update
|
||||||
from lib.parse.configfile import configFileParser
|
from lib.parse.configfile import configFileParser
|
||||||
from lib.parse.payloads import loadBoundaries
|
from lib.parse.payloads import loadBoundaries
|
||||||
@@ -151,8 +153,8 @@ from lib.utils.crawler import crawl
|
|||||||
from lib.utils.deps import checkDependencies
|
from lib.utils.deps import checkDependencies
|
||||||
from lib.utils.search import search
|
from lib.utils.search import search
|
||||||
from lib.utils.purge import purge
|
from lib.utils.purge import purge
|
||||||
from thirdparty.colorama.initialise import init as coloramainit
|
|
||||||
from thirdparty.keepalive import keepalive
|
from thirdparty.keepalive import keepalive
|
||||||
|
from thirdparty.multipart import multipartpost
|
||||||
from thirdparty.oset.pyoset import oset
|
from thirdparty.oset.pyoset import oset
|
||||||
from thirdparty.socks import socks
|
from thirdparty.socks import socks
|
||||||
from xml.etree.ElementTree import ElementTree
|
from xml.etree.ElementTree import ElementTree
|
||||||
@@ -163,6 +165,13 @@ keepAliveHandler = keepalive.HTTPHandler()
|
|||||||
proxyHandler = urllib2.ProxyHandler()
|
proxyHandler = urllib2.ProxyHandler()
|
||||||
redirectHandler = SmartRedirectHandler()
|
redirectHandler = SmartRedirectHandler()
|
||||||
rangeHandler = HTTPRangeHandler()
|
rangeHandler = HTTPRangeHandler()
|
||||||
|
multipartPostHandler = multipartpost.MultipartPostHandler()
|
||||||
|
|
||||||
|
# Reference: https://mail.python.org/pipermail/python-list/2009-November/558615.html
|
||||||
|
try:
|
||||||
|
WindowsError
|
||||||
|
except NameError:
|
||||||
|
WindowsError = None
|
||||||
|
|
||||||
def _feedTargetsDict(reqFile, addedTargetUrls):
|
def _feedTargetsDict(reqFile, addedTargetUrls):
|
||||||
"""
|
"""
|
||||||
@@ -207,7 +216,10 @@ def _feedTargetsDict(reqFile, addedTargetUrls):
|
|||||||
reqResList = []
|
reqResList = []
|
||||||
for match in re.finditer(BURP_XML_HISTORY_REGEX, content, re.I | re.S):
|
for match in re.finditer(BURP_XML_HISTORY_REGEX, content, re.I | re.S):
|
||||||
port, request = match.groups()
|
port, request = match.groups()
|
||||||
request = request.decode("base64")
|
try:
|
||||||
|
request = request.decode("base64")
|
||||||
|
except binascii.Error:
|
||||||
|
continue
|
||||||
_ = re.search(r"%s:.+" % re.escape(HTTP_HEADER.HOST), request)
|
_ = re.search(r"%s:.+" % re.escape(HTTP_HEADER.HOST), request)
|
||||||
if _:
|
if _:
|
||||||
host = _.group(0).strip()
|
host = _.group(0).strip()
|
||||||
@@ -228,6 +240,7 @@ def _feedTargetsDict(reqFile, addedTargetUrls):
|
|||||||
if schemePort:
|
if schemePort:
|
||||||
scheme = schemePort.group(1)
|
scheme = schemePort.group(1)
|
||||||
port = schemePort.group(2)
|
port = schemePort.group(2)
|
||||||
|
request = re.sub(r"\n=+\Z", "", request.split(schemePort.group(0))[-1].lstrip())
|
||||||
else:
|
else:
|
||||||
scheme, port = None, None
|
scheme, port = None, None
|
||||||
|
|
||||||
@@ -385,7 +398,7 @@ def _loadQueries():
|
|||||||
try:
|
try:
|
||||||
tree.parse(paths.QUERIES_XML)
|
tree.parse(paths.QUERIES_XML)
|
||||||
except Exception, ex:
|
except Exception, ex:
|
||||||
errMsg = "something seems to be wrong with "
|
errMsg = "something appears to be wrong with "
|
||||||
errMsg += "the file '%s' ('%s'). Please make " % (paths.QUERIES_XML, getSafeExString(ex))
|
errMsg += "the file '%s' ('%s'). Please make " % (paths.QUERIES_XML, getSafeExString(ex))
|
||||||
errMsg += "sure that you haven't made any changes to it"
|
errMsg += "sure that you haven't made any changes to it"
|
||||||
raise SqlmapInstallationException, errMsg
|
raise SqlmapInstallationException, errMsg
|
||||||
@@ -471,14 +484,14 @@ def _setRequestFromFile():
|
|||||||
|
|
||||||
conf.requestFile = safeExpandUser(conf.requestFile)
|
conf.requestFile = safeExpandUser(conf.requestFile)
|
||||||
|
|
||||||
infoMsg = "parsing HTTP request from '%s'" % conf.requestFile
|
|
||||||
logger.info(infoMsg)
|
|
||||||
|
|
||||||
if not os.path.isfile(conf.requestFile):
|
if not os.path.isfile(conf.requestFile):
|
||||||
errMsg = "the specified HTTP request file "
|
errMsg = "specified HTTP request file '%s' " % conf.requestFile
|
||||||
errMsg += "does not exist"
|
errMsg += "does not exist"
|
||||||
raise SqlmapFilePathException(errMsg)
|
raise SqlmapFilePathException(errMsg)
|
||||||
|
|
||||||
|
infoMsg = "parsing HTTP request from '%s'" % conf.requestFile
|
||||||
|
logger.info(infoMsg)
|
||||||
|
|
||||||
_feedTargetsDict(conf.requestFile, addedTargetUrls)
|
_feedTargetsDict(conf.requestFile, addedTargetUrls)
|
||||||
|
|
||||||
def _setCrawler():
|
def _setCrawler():
|
||||||
@@ -530,8 +543,7 @@ def _doSearch():
|
|||||||
elif re.search(URI_INJECTABLE_REGEX, link, re.I):
|
elif re.search(URI_INJECTABLE_REGEX, link, re.I):
|
||||||
if kb.data.onlyGETs is None and conf.data is None and not conf.googleDork:
|
if kb.data.onlyGETs is None and conf.data is None and not conf.googleDork:
|
||||||
message = "do you want to scan only results containing GET parameters? [Y/n] "
|
message = "do you want to scan only results containing GET parameters? [Y/n] "
|
||||||
test = readInput(message, default="Y")
|
kb.data.onlyGETs = readInput(message, default='Y', boolean=True)
|
||||||
kb.data.onlyGETs = test.lower() != 'n'
|
|
||||||
if not kb.data.onlyGETs or conf.googleDork:
|
if not kb.data.onlyGETs or conf.googleDork:
|
||||||
kb.targets.add((link, conf.method, conf.data, conf.cookie, None))
|
kb.targets.add((link, conf.method, conf.data, conf.cookie, None))
|
||||||
|
|
||||||
@@ -558,9 +570,8 @@ def _doSearch():
|
|||||||
message += "for your search dork expression, but none of them "
|
message += "for your search dork expression, but none of them "
|
||||||
message += "have GET parameters to test for SQL injection. "
|
message += "have GET parameters to test for SQL injection. "
|
||||||
message += "Do you want to skip to the next result page? [Y/n]"
|
message += "Do you want to skip to the next result page? [Y/n]"
|
||||||
test = readInput(message, default="Y")
|
|
||||||
|
|
||||||
if test[0] in ("n", "N"):
|
if not readInput(message, default='Y', boolean=True):
|
||||||
raise SqlmapSilentQuitException
|
raise SqlmapSilentQuitException
|
||||||
else:
|
else:
|
||||||
conf.googlePage += 1
|
conf.googlePage += 1
|
||||||
@@ -879,32 +890,37 @@ def _setTamperingFunctions():
|
|||||||
resolve_priorities = False
|
resolve_priorities = False
|
||||||
priorities = []
|
priorities = []
|
||||||
|
|
||||||
for tfile in re.split(PARAMETER_SPLITTING_REGEX, conf.tamper):
|
for script in re.split(PARAMETER_SPLITTING_REGEX, conf.tamper):
|
||||||
found = False
|
found = False
|
||||||
|
|
||||||
tfile = tfile.strip()
|
path = paths.SQLMAP_TAMPER_PATH.encode(sys.getfilesystemencoding() or UNICODE_ENCODING)
|
||||||
|
script = script.strip().encode(sys.getfilesystemencoding() or UNICODE_ENCODING)
|
||||||
|
|
||||||
if not tfile:
|
try:
|
||||||
continue
|
if not script:
|
||||||
|
continue
|
||||||
|
|
||||||
elif os.path.exists(os.path.join(paths.SQLMAP_TAMPER_PATH, tfile if tfile.endswith('.py') else "%s.py" % tfile)):
|
elif os.path.exists(os.path.join(path, script if script.endswith(".py") else "%s.py" % script)):
|
||||||
tfile = os.path.join(paths.SQLMAP_TAMPER_PATH, tfile if tfile.endswith('.py') else "%s.py" % tfile)
|
script = os.path.join(path, script if script.endswith(".py") else "%s.py" % script)
|
||||||
|
|
||||||
elif not os.path.exists(tfile):
|
elif not os.path.exists(script):
|
||||||
errMsg = "tamper script '%s' does not exist" % tfile
|
errMsg = "tamper script '%s' does not exist" % script
|
||||||
raise SqlmapFilePathException(errMsg)
|
raise SqlmapFilePathException(errMsg)
|
||||||
|
|
||||||
elif not tfile.endswith('.py'):
|
elif not script.endswith(".py"):
|
||||||
errMsg = "tamper script '%s' should have an extension '.py'" % tfile
|
errMsg = "tamper script '%s' should have an extension '.py'" % script
|
||||||
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
errMsg = "invalid character provided in option '--tamper'"
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
dirname, filename = os.path.split(tfile)
|
dirname, filename = os.path.split(script)
|
||||||
dirname = os.path.abspath(dirname)
|
dirname = os.path.abspath(dirname)
|
||||||
|
|
||||||
infoMsg = "loading tamper script '%s'" % filename[:-3]
|
infoMsg = "loading tamper script '%s'" % filename[:-3]
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
if not os.path.exists(os.path.join(dirname, '__init__.py')):
|
if not os.path.exists(os.path.join(dirname, "__init__.py")):
|
||||||
errMsg = "make sure that there is an empty file '__init__.py' "
|
errMsg = "make sure that there is an empty file '__init__.py' "
|
||||||
errMsg += "inside of tamper scripts directory '%s'" % dirname
|
errMsg += "inside of tamper scripts directory '%s'" % dirname
|
||||||
raise SqlmapGenericException(errMsg)
|
raise SqlmapGenericException(errMsg)
|
||||||
@@ -913,11 +929,11 @@ def _setTamperingFunctions():
|
|||||||
sys.path.insert(0, dirname)
|
sys.path.insert(0, dirname)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
module = __import__(filename[:-3].encode(sys.getfilesystemencoding()))
|
module = __import__(filename[:-3].encode(sys.getfilesystemencoding() or UNICODE_ENCODING))
|
||||||
except (ImportError, SyntaxError), msg:
|
except (ImportError, SyntaxError), ex:
|
||||||
raise SqlmapSyntaxException("cannot import tamper script '%s' (%s)" % (filename[:-3], msg))
|
raise SqlmapSyntaxException("cannot import tamper script '%s' (%s)" % (filename[:-3], getSafeExString(ex)))
|
||||||
|
|
||||||
priority = PRIORITY.NORMAL if not hasattr(module, '__priority__') else module.__priority__
|
priority = PRIORITY.NORMAL if not hasattr(module, "__priority__") else module.__priority__
|
||||||
|
|
||||||
for name, function in inspect.getmembers(module, inspect.isfunction):
|
for name, function in inspect.getmembers(module, inspect.isfunction):
|
||||||
if name == "tamper" and inspect.getargspec(function).args and inspect.getargspec(function).keywords == "kwargs":
|
if name == "tamper" and inspect.getargspec(function).args and inspect.getargspec(function).keywords == "kwargs":
|
||||||
@@ -926,17 +942,17 @@ def _setTamperingFunctions():
|
|||||||
function.func_name = module.__name__
|
function.func_name = module.__name__
|
||||||
|
|
||||||
if check_priority and priority > last_priority:
|
if check_priority and priority > last_priority:
|
||||||
message = "it seems that you might have mixed "
|
message = "it appears that you might have mixed "
|
||||||
message += "the order of tamper scripts. "
|
message += "the order of tamper scripts. "
|
||||||
message += "Do you want to auto resolve this? [Y/n/q] "
|
message += "Do you want to auto resolve this? [Y/n/q] "
|
||||||
test = readInput(message, default="Y")
|
choice = readInput(message, default='Y').upper()
|
||||||
|
|
||||||
if not test or test[0] in ("y", "Y"):
|
if choice == 'N':
|
||||||
resolve_priorities = True
|
|
||||||
elif test[0] in ("n", "N"):
|
|
||||||
resolve_priorities = False
|
resolve_priorities = False
|
||||||
elif test[0] in ("q", "Q"):
|
elif choice == 'Q':
|
||||||
raise SqlmapUserQuitException
|
raise SqlmapUserQuitException
|
||||||
|
else:
|
||||||
|
resolve_priorities = True
|
||||||
|
|
||||||
check_priority = False
|
check_priority = False
|
||||||
|
|
||||||
@@ -949,7 +965,7 @@ def _setTamperingFunctions():
|
|||||||
|
|
||||||
if not found:
|
if not found:
|
||||||
errMsg = "missing function 'tamper(payload, **kwargs)' "
|
errMsg = "missing function 'tamper(payload, **kwargs)' "
|
||||||
errMsg += "in tamper script '%s'" % tfile
|
errMsg += "in tamper script '%s'" % script
|
||||||
raise SqlmapGenericException(errMsg)
|
raise SqlmapGenericException(errMsg)
|
||||||
|
|
||||||
if kb.tamperFunctions and len(kb.tamperFunctions) > 3:
|
if kb.tamperFunctions and len(kb.tamperFunctions) > 3:
|
||||||
@@ -966,7 +982,7 @@ def _setTamperingFunctions():
|
|||||||
|
|
||||||
def _setWafFunctions():
|
def _setWafFunctions():
|
||||||
"""
|
"""
|
||||||
Loads WAF/IDS/IPS detecting functions from script(s)
|
Loads WAF/IPS/IDS detecting functions from script(s)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if conf.identifyWaf:
|
if conf.identifyWaf:
|
||||||
@@ -986,7 +1002,7 @@ def _setWafFunctions():
|
|||||||
try:
|
try:
|
||||||
if filename[:-3] in sys.modules:
|
if filename[:-3] in sys.modules:
|
||||||
del sys.modules[filename[:-3]]
|
del sys.modules[filename[:-3]]
|
||||||
module = __import__(filename[:-3])
|
module = __import__(filename[:-3].encode(sys.getfilesystemencoding() or UNICODE_ENCODING))
|
||||||
except ImportError, msg:
|
except ImportError, msg:
|
||||||
raise SqlmapSyntaxException("cannot import WAF script '%s' (%s)" % (filename[:-3], msg))
|
raise SqlmapSyntaxException("cannot import WAF script '%s' (%s)" % (filename[:-3], msg))
|
||||||
|
|
||||||
@@ -998,6 +1014,8 @@ def _setWafFunctions():
|
|||||||
else:
|
else:
|
||||||
kb.wafFunctions.append((_["detect"], _.get("__product__", filename[:-3])))
|
kb.wafFunctions.append((_["detect"], _.get("__product__", filename[:-3])))
|
||||||
|
|
||||||
|
kb.wafFunctions = sorted(kb.wafFunctions, key=lambda _: "generic" in _[1].lower())
|
||||||
|
|
||||||
def _setThreads():
|
def _setThreads():
|
||||||
if not isinstance(conf.threads, int) or conf.threads <= 0:
|
if not isinstance(conf.threads, int) or conf.threads <= 0:
|
||||||
conf.threads = 1
|
conf.threads = 1
|
||||||
@@ -1008,12 +1026,12 @@ def _setDNSCache():
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def _getaddrinfo(*args, **kwargs):
|
def _getaddrinfo(*args, **kwargs):
|
||||||
if args in kb.cache:
|
if args in kb.cache.addrinfo:
|
||||||
return kb.cache[args]
|
return kb.cache.addrinfo[args]
|
||||||
|
|
||||||
else:
|
else:
|
||||||
kb.cache[args] = socket._getaddrinfo(*args, **kwargs)
|
kb.cache.addrinfo[args] = socket._getaddrinfo(*args, **kwargs)
|
||||||
return kb.cache[args]
|
return kb.cache.addrinfo[args]
|
||||||
|
|
||||||
if not hasattr(socket, "_getaddrinfo"):
|
if not hasattr(socket, "_getaddrinfo"):
|
||||||
socket._getaddrinfo = socket.getaddrinfo
|
socket._getaddrinfo = socket.getaddrinfo
|
||||||
@@ -1028,7 +1046,7 @@ def _setSocketPreConnect():
|
|||||||
return
|
return
|
||||||
|
|
||||||
def _():
|
def _():
|
||||||
while kb.threadContinue and not conf.disablePrecon:
|
while kb.get("threadContinue") and not conf.get("disablePrecon"):
|
||||||
try:
|
try:
|
||||||
for key in socket._ready:
|
for key in socket._ready:
|
||||||
if len(socket._ready[key]) < SOCKET_PRE_CONNECT_QUEUE_SIZE:
|
if len(socket._ready[key]) < SOCKET_PRE_CONNECT_QUEUE_SIZE:
|
||||||
@@ -1036,7 +1054,7 @@ def _setSocketPreConnect():
|
|||||||
s = socket.socket(family, type, proto)
|
s = socket.socket(family, type, proto)
|
||||||
s._connect(address)
|
s._connect(address)
|
||||||
with kb.locks.socket:
|
with kb.locks.socket:
|
||||||
socket._ready[key].append(s._sock)
|
socket._ready[key].append((s._sock, time.time()))
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
break
|
break
|
||||||
except:
|
except:
|
||||||
@@ -1051,9 +1069,17 @@ def _setSocketPreConnect():
|
|||||||
with kb.locks.socket:
|
with kb.locks.socket:
|
||||||
if key not in socket._ready:
|
if key not in socket._ready:
|
||||||
socket._ready[key] = []
|
socket._ready[key] = []
|
||||||
if len(socket._ready[key]) > 0:
|
while len(socket._ready[key]) > 0:
|
||||||
self._sock = socket._ready[key].pop(0)
|
candidate, created = socket._ready[key].pop(0)
|
||||||
found = True
|
if (time.time() - created) < PRECONNECT_CANDIDATE_TIMEOUT:
|
||||||
|
self._sock = candidate
|
||||||
|
found = True
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
candidate.close()
|
||||||
|
except socket.error:
|
||||||
|
pass
|
||||||
|
|
||||||
if not found:
|
if not found:
|
||||||
self._connect(address)
|
self._connect(address)
|
||||||
@@ -1064,6 +1090,7 @@ def _setSocketPreConnect():
|
|||||||
socket.socket.connect = connect
|
socket.socket.connect = connect
|
||||||
|
|
||||||
thread = threading.Thread(target=_)
|
thread = threading.Thread(target=_)
|
||||||
|
setDaemon(thread)
|
||||||
thread.start()
|
thread.start()
|
||||||
|
|
||||||
def _setHTTPHandlers():
|
def _setHTTPHandlers():
|
||||||
@@ -1151,7 +1178,7 @@ def _setHTTPHandlers():
|
|||||||
debugMsg = "creating HTTP requests opener object"
|
debugMsg = "creating HTTP requests opener object"
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
|
|
||||||
handlers = filter(None, [proxyHandler if proxyHandler.proxies else None, authHandler, redirectHandler, rangeHandler, httpsHandler])
|
handlers = filter(None, [multipartPostHandler, proxyHandler if proxyHandler.proxies else None, authHandler, redirectHandler, rangeHandler, httpsHandler])
|
||||||
|
|
||||||
if not conf.dropSetCookie:
|
if not conf.dropSetCookie:
|
||||||
if not conf.loadCookies:
|
if not conf.loadCookies:
|
||||||
@@ -1183,7 +1210,7 @@ def _setSafeVisit():
|
|||||||
"""
|
"""
|
||||||
Check and set the safe visit options.
|
Check and set the safe visit options.
|
||||||
"""
|
"""
|
||||||
if not any ((conf.safeUrl, conf.safeReqFile)):
|
if not any((conf.safeUrl, conf.safeReqFile)):
|
||||||
return
|
return
|
||||||
|
|
||||||
if conf.safeReqFile:
|
if conf.safeReqFile:
|
||||||
@@ -1309,17 +1336,17 @@ def _setHTTPAuthentication():
|
|||||||
debugMsg = "setting the HTTP authentication type and credentials"
|
debugMsg = "setting the HTTP authentication type and credentials"
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
|
|
||||||
aTypeLower = conf.authType.lower()
|
authType = conf.authType.lower()
|
||||||
|
|
||||||
if aTypeLower in (AUTH_TYPE.BASIC, AUTH_TYPE.DIGEST):
|
if authType in (AUTH_TYPE.BASIC, AUTH_TYPE.DIGEST):
|
||||||
regExp = "^(.*?):(.*?)$"
|
regExp = "^(.*?):(.*?)$"
|
||||||
errMsg = "HTTP %s authentication credentials " % aTypeLower
|
errMsg = "HTTP %s authentication credentials " % authType
|
||||||
errMsg += "value must be in format 'username:password'"
|
errMsg += "value must be in format 'username:password'"
|
||||||
elif aTypeLower == AUTH_TYPE.NTLM:
|
elif authType == AUTH_TYPE.NTLM:
|
||||||
regExp = "^(.*\\\\.*):(.*?)$"
|
regExp = "^(.*\\\\.*):(.*?)$"
|
||||||
errMsg = "HTTP NTLM authentication credentials value must "
|
errMsg = "HTTP NTLM authentication credentials value must "
|
||||||
errMsg += "be in format 'DOMAIN\username:password'"
|
errMsg += "be in format 'DOMAIN\username:password'"
|
||||||
elif aTypeLower == AUTH_TYPE.PKI:
|
elif authType == AUTH_TYPE.PKI:
|
||||||
errMsg = "HTTP PKI authentication require "
|
errMsg = "HTTP PKI authentication require "
|
||||||
errMsg += "usage of option `--auth-pki`"
|
errMsg += "usage of option `--auth-pki`"
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
@@ -1336,13 +1363,13 @@ def _setHTTPAuthentication():
|
|||||||
|
|
||||||
_setAuthCred()
|
_setAuthCred()
|
||||||
|
|
||||||
if aTypeLower == AUTH_TYPE.BASIC:
|
if authType == AUTH_TYPE.BASIC:
|
||||||
authHandler = SmartHTTPBasicAuthHandler(kb.passwordMgr)
|
authHandler = SmartHTTPBasicAuthHandler(kb.passwordMgr)
|
||||||
|
|
||||||
elif aTypeLower == AUTH_TYPE.DIGEST:
|
elif authType == AUTH_TYPE.DIGEST:
|
||||||
authHandler = urllib2.HTTPDigestAuthHandler(kb.passwordMgr)
|
authHandler = urllib2.HTTPDigestAuthHandler(kb.passwordMgr)
|
||||||
|
|
||||||
elif aTypeLower == AUTH_TYPE.NTLM:
|
elif authType == AUTH_TYPE.NTLM:
|
||||||
try:
|
try:
|
||||||
from ntlm import HTTPNtlmAuthHandler
|
from ntlm import HTTPNtlmAuthHandler
|
||||||
except ImportError:
|
except ImportError:
|
||||||
@@ -1381,16 +1408,12 @@ def _setHTTPExtraHeaders():
|
|||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
elif not conf.requestFile and len(conf.httpHeaders or []) < 2:
|
elif not conf.requestFile and len(conf.httpHeaders or []) < 2:
|
||||||
conf.httpHeaders.append((HTTP_HEADER.ACCEPT_LANGUAGE, "en-us,en;q=0.5"))
|
if conf.charset:
|
||||||
if not conf.charset:
|
|
||||||
conf.httpHeaders.append((HTTP_HEADER.ACCEPT_CHARSET, "ISO-8859-15,utf-8;q=0.7,*;q=0.7"))
|
|
||||||
else:
|
|
||||||
conf.httpHeaders.append((HTTP_HEADER.ACCEPT_CHARSET, "%s;q=0.7,*;q=0.1" % conf.charset))
|
conf.httpHeaders.append((HTTP_HEADER.ACCEPT_CHARSET, "%s;q=0.7,*;q=0.1" % conf.charset))
|
||||||
|
|
||||||
# Invalidating any caching mechanism in between
|
# Invalidating any caching mechanism in between
|
||||||
# Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html
|
# Reference: http://stackoverflow.com/a/1383359
|
||||||
conf.httpHeaders.append((HTTP_HEADER.CACHE_CONTROL, "no-cache,no-store"))
|
conf.httpHeaders.append((HTTP_HEADER.CACHE_CONTROL, "no-cache"))
|
||||||
conf.httpHeaders.append((HTTP_HEADER.PRAGMA, "no-cache"))
|
|
||||||
|
|
||||||
def _defaultHTTPUserAgent():
|
def _defaultHTTPUserAgent():
|
||||||
"""
|
"""
|
||||||
@@ -1400,13 +1423,6 @@ def _defaultHTTPUserAgent():
|
|||||||
|
|
||||||
return "%s (%s)" % (VERSION_STRING, SITE)
|
return "%s (%s)" % (VERSION_STRING, SITE)
|
||||||
|
|
||||||
# Firefox 3 running on Ubuntu 9.04 updated at April 2009
|
|
||||||
#return "Mozilla/5.0 (X11; U; Linux i686; en-GB; rv:1.9.0.9) Gecko/2009042113 Ubuntu/9.04 (jaunty) Firefox/3.0.9"
|
|
||||||
|
|
||||||
# Internet Explorer 7.0 running on Windows 2003 Service Pack 2 english
|
|
||||||
# updated at March 2009
|
|
||||||
#return "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.2; .NET CLR 1.1.4322; .NET CLR 2.0.50727; .NET CLR 3.0.04506.30; .NET CLR 3.0.04506.648; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729)"
|
|
||||||
|
|
||||||
def _setHTTPUserAgent():
|
def _setHTTPUserAgent():
|
||||||
"""
|
"""
|
||||||
Set the HTTP User-Agent header.
|
Set the HTTP User-Agent header.
|
||||||
@@ -1545,24 +1561,51 @@ def _createTemporaryDirectory():
|
|||||||
Creates temporary directory for this run.
|
Creates temporary directory for this run.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
try:
|
if conf.tmpDir:
|
||||||
if not os.path.isdir(tempfile.gettempdir()):
|
try:
|
||||||
os.makedirs(tempfile.gettempdir())
|
if not os.path.isdir(conf.tmpDir):
|
||||||
except IOError, ex:
|
os.makedirs(conf.tmpDir)
|
||||||
errMsg = "there has been a problem while accessing "
|
|
||||||
errMsg += "system's temporary directory location(s) ('%s'). Please " % getSafeExString(ex)
|
|
||||||
errMsg += "make sure that there is enough disk space left. If problem persists, "
|
|
||||||
errMsg += "try to set environment variable 'TEMP' to a location "
|
|
||||||
errMsg += "writeable by the current user"
|
|
||||||
raise SqlmapSystemException, errMsg
|
|
||||||
|
|
||||||
if "sqlmap" not in (tempfile.tempdir or ""):
|
_ = os.path.join(conf.tmpDir, randomStr())
|
||||||
tempfile.tempdir = tempfile.mkdtemp(prefix="sqlmap", suffix=str(os.getpid()))
|
|
||||||
|
open(_, "w+b").close()
|
||||||
|
os.remove(_)
|
||||||
|
|
||||||
|
tempfile.tempdir = conf.tmpDir
|
||||||
|
|
||||||
|
warnMsg = "using '%s' as the temporary directory" % conf.tmpDir
|
||||||
|
logger.warn(warnMsg)
|
||||||
|
except (OSError, IOError), ex:
|
||||||
|
errMsg = "there has been a problem while accessing "
|
||||||
|
errMsg += "temporary directory location(s) ('%s')" % getSafeExString(ex)
|
||||||
|
raise SqlmapSystemException, errMsg
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
if not os.path.isdir(tempfile.gettempdir()):
|
||||||
|
os.makedirs(tempfile.gettempdir())
|
||||||
|
except (OSError, IOError, WindowsError), ex:
|
||||||
|
warnMsg = "there has been a problem while accessing "
|
||||||
|
warnMsg += "system's temporary directory location(s) ('%s'). Please " % getSafeExString(ex)
|
||||||
|
warnMsg += "make sure that there is enough disk space left. If problem persists, "
|
||||||
|
warnMsg += "try to set environment variable 'TEMP' to a location "
|
||||||
|
warnMsg += "writeable by the current user"
|
||||||
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
|
if "sqlmap" not in (tempfile.tempdir or "") or conf.tmpDir and tempfile.tempdir == conf.tmpDir:
|
||||||
|
try:
|
||||||
|
tempfile.tempdir = tempfile.mkdtemp(prefix="sqlmap", suffix=str(os.getpid()))
|
||||||
|
except (OSError, IOError, WindowsError):
|
||||||
|
tempfile.tempdir = os.path.join(paths.SQLMAP_HOME_PATH, "tmp", "sqlmap%s%d" % (randomStr(6), os.getpid()))
|
||||||
|
|
||||||
kb.tempDir = tempfile.tempdir
|
kb.tempDir = tempfile.tempdir
|
||||||
|
|
||||||
if not os.path.isdir(tempfile.tempdir):
|
if not os.path.isdir(tempfile.tempdir):
|
||||||
os.makedirs(tempfile.tempdir)
|
try:
|
||||||
|
os.makedirs(tempfile.tempdir)
|
||||||
|
except (OSError, IOError, WindowsError), ex:
|
||||||
|
errMsg = "there has been a problem while setting "
|
||||||
|
errMsg += "temporary directory location ('%s')" % getSafeExString(ex)
|
||||||
|
raise SqlmapSystemException, errMsg
|
||||||
|
|
||||||
def _cleanupOptions():
|
def _cleanupOptions():
|
||||||
"""
|
"""
|
||||||
@@ -1590,6 +1633,9 @@ def _cleanupOptions():
|
|||||||
else:
|
else:
|
||||||
conf.testParameter = []
|
conf.testParameter = []
|
||||||
|
|
||||||
|
if conf.agent:
|
||||||
|
conf.agent = re.sub(r"[\r\n]", "", conf.agent)
|
||||||
|
|
||||||
if conf.user:
|
if conf.user:
|
||||||
conf.user = conf.user.replace(" ", "")
|
conf.user = conf.user.replace(" ", "")
|
||||||
|
|
||||||
@@ -1639,10 +1685,10 @@ def _cleanupOptions():
|
|||||||
setOptimize()
|
setOptimize()
|
||||||
|
|
||||||
if conf.data:
|
if conf.data:
|
||||||
conf.data = re.sub(INJECT_HERE_MARK.replace(" ", r"[^A-Za-z]*"), CUSTOM_INJECTION_MARK_CHAR, conf.data, re.I)
|
conf.data = re.sub("(?i)%s" % INJECT_HERE_MARK.replace(" ", r"[^A-Za-z]*"), CUSTOM_INJECTION_MARK_CHAR, conf.data)
|
||||||
|
|
||||||
if conf.url:
|
if conf.url:
|
||||||
conf.url = re.sub(INJECT_HERE_MARK.replace(" ", r"[^A-Za-z]*"), CUSTOM_INJECTION_MARK_CHAR, conf.url, re.I)
|
conf.url = re.sub("(?i)%s" % INJECT_HERE_MARK.replace(" ", r"[^A-Za-z]*"), CUSTOM_INJECTION_MARK_CHAR, conf.url)
|
||||||
|
|
||||||
if conf.os:
|
if conf.os:
|
||||||
conf.os = conf.os.capitalize()
|
conf.os = conf.os.capitalize()
|
||||||
@@ -1654,10 +1700,20 @@ def _cleanupOptions():
|
|||||||
conf.testFilter = conf.testFilter.strip('*+')
|
conf.testFilter = conf.testFilter.strip('*+')
|
||||||
conf.testFilter = re.sub(r"([^.])([*+])", "\g<1>.\g<2>", conf.testFilter)
|
conf.testFilter = re.sub(r"([^.])([*+])", "\g<1>.\g<2>", conf.testFilter)
|
||||||
|
|
||||||
|
try:
|
||||||
|
re.compile(conf.testFilter)
|
||||||
|
except re.error:
|
||||||
|
conf.testFilter = re.escape(conf.testFilter)
|
||||||
|
|
||||||
if conf.testSkip:
|
if conf.testSkip:
|
||||||
conf.testSkip = conf.testSkip.strip('*+')
|
conf.testSkip = conf.testSkip.strip('*+')
|
||||||
conf.testSkip = re.sub(r"([^.])([*+])", "\g<1>.\g<2>", conf.testSkip)
|
conf.testSkip = re.sub(r"([^.])([*+])", "\g<1>.\g<2>", conf.testSkip)
|
||||||
|
|
||||||
|
try:
|
||||||
|
re.compile(conf.testSkip)
|
||||||
|
except re.error:
|
||||||
|
conf.testSkip = re.escape(conf.testSkip)
|
||||||
|
|
||||||
if "timeSec" not in kb.explicitSettings:
|
if "timeSec" not in kb.explicitSettings:
|
||||||
if conf.tor:
|
if conf.tor:
|
||||||
conf.timeSec = 2 * conf.timeSec
|
conf.timeSec = 2 * conf.timeSec
|
||||||
@@ -1687,7 +1743,7 @@ def _cleanupOptions():
|
|||||||
|
|
||||||
if conf.outputDir:
|
if conf.outputDir:
|
||||||
paths.SQLMAP_OUTPUT_PATH = os.path.realpath(os.path.expanduser(conf.outputDir))
|
paths.SQLMAP_OUTPUT_PATH = os.path.realpath(os.path.expanduser(conf.outputDir))
|
||||||
setPaths()
|
setPaths(paths.SQLMAP_ROOT_PATH)
|
||||||
|
|
||||||
if conf.string:
|
if conf.string:
|
||||||
try:
|
try:
|
||||||
@@ -1711,23 +1767,40 @@ def _cleanupOptions():
|
|||||||
conf.torType = conf.torType.upper()
|
conf.torType = conf.torType.upper()
|
||||||
|
|
||||||
if conf.col:
|
if conf.col:
|
||||||
conf.col = re.sub(r"\s*,\s*", ",", conf.col)
|
conf.col = re.sub(r"\s*,\s*", ',', conf.col)
|
||||||
|
|
||||||
if conf.excludeCol:
|
if conf.excludeCol:
|
||||||
conf.excludeCol = re.sub(r"\s*,\s*", ",", conf.excludeCol)
|
conf.excludeCol = re.sub(r"\s*,\s*", ',', conf.excludeCol)
|
||||||
|
|
||||||
if conf.binaryFields:
|
if conf.binaryFields:
|
||||||
conf.binaryFields = re.sub(r"\s*,\s*", ",", conf.binaryFields)
|
conf.binaryFields = re.sub(r"\s*,\s*", ',', conf.binaryFields)
|
||||||
|
|
||||||
|
if any((conf.proxy, conf.proxyFile, conf.tor)):
|
||||||
|
conf.disablePrecon = True
|
||||||
|
|
||||||
threadData = getCurrentThreadData()
|
threadData = getCurrentThreadData()
|
||||||
threadData.reset()
|
threadData.reset()
|
||||||
|
|
||||||
|
def _cleanupEnvironment():
|
||||||
|
"""
|
||||||
|
Cleanup environment (e.g. from leftovers after --sqlmap-shell).
|
||||||
|
"""
|
||||||
|
|
||||||
|
if issubclass(urllib2.socket.socket, socks.socksocket):
|
||||||
|
socks.unwrapmodule(urllib2)
|
||||||
|
|
||||||
|
if hasattr(socket, "_ready"):
|
||||||
|
socket._ready.clear()
|
||||||
|
|
||||||
def _dirtyPatches():
|
def _dirtyPatches():
|
||||||
"""
|
"""
|
||||||
Place for "dirty" Python related patches
|
Place for "dirty" Python related patches
|
||||||
"""
|
"""
|
||||||
|
|
||||||
httplib._MAXLINE = 1 * 1024 * 1024 # to accept overly long result lines (e.g. SQLi results in HTTP header responses)
|
httplib._MAXLINE = 1 * 1024 * 1024 # accept overly long result lines (e.g. SQLi results in HTTP header responses)
|
||||||
|
|
||||||
|
if IS_WIN:
|
||||||
|
from thirdparty.wininetpton import win_inet_pton # add support for inet_pton() on Windows OS
|
||||||
|
|
||||||
def _purgeOutput():
|
def _purgeOutput():
|
||||||
"""
|
"""
|
||||||
@@ -1795,10 +1868,16 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
|||||||
kb.bruteMode = False
|
kb.bruteMode = False
|
||||||
|
|
||||||
kb.cache = AttribDict()
|
kb.cache = AttribDict()
|
||||||
|
kb.cache.addrinfo = {}
|
||||||
kb.cache.content = {}
|
kb.cache.content = {}
|
||||||
|
kb.cache.encoding = {}
|
||||||
|
kb.cache.intBoundaries = None
|
||||||
|
kb.cache.parsedDbms = {}
|
||||||
kb.cache.regex = {}
|
kb.cache.regex = {}
|
||||||
kb.cache.stdev = {}
|
kb.cache.stdev = {}
|
||||||
|
|
||||||
|
kb.captchaDetected = None
|
||||||
|
|
||||||
kb.chars = AttribDict()
|
kb.chars = AttribDict()
|
||||||
kb.chars.delimiter = randomStr(length=6, lowercase=True)
|
kb.chars.delimiter = randomStr(length=6, lowercase=True)
|
||||||
kb.chars.start = "%s%s%s" % (KB_CHARS_BOUNDARY_CHAR, randomStr(length=3, alphabet=KB_CHARS_LOW_FREQUENCY_ALPHABET), KB_CHARS_BOUNDARY_CHAR)
|
kb.chars.start = "%s%s%s" % (KB_CHARS_BOUNDARY_CHAR, randomStr(length=3, alphabet=KB_CHARS_LOW_FREQUENCY_ALPHABET), KB_CHARS_BOUNDARY_CHAR)
|
||||||
@@ -1807,6 +1886,9 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
|||||||
|
|
||||||
kb.columnExistsChoice = None
|
kb.columnExistsChoice = None
|
||||||
kb.commonOutputs = None
|
kb.commonOutputs = None
|
||||||
|
kb.connErrorChoice = None
|
||||||
|
kb.connErrorCounter = 0
|
||||||
|
kb.cookieEncodeChoice = None
|
||||||
kb.counters = {}
|
kb.counters = {}
|
||||||
kb.data = AttribDict()
|
kb.data = AttribDict()
|
||||||
kb.dataOutputFlag = False
|
kb.dataOutputFlag = False
|
||||||
@@ -1820,6 +1902,8 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
|||||||
kb.dnsMode = False
|
kb.dnsMode = False
|
||||||
kb.dnsTest = None
|
kb.dnsTest = None
|
||||||
kb.docRoot = None
|
kb.docRoot = None
|
||||||
|
kb.droppingRequests = False
|
||||||
|
kb.dumpColumns = None
|
||||||
kb.dumpTable = None
|
kb.dumpTable = None
|
||||||
kb.dumpKeyboardInterrupt = False
|
kb.dumpKeyboardInterrupt = False
|
||||||
kb.dynamicMarkings = []
|
kb.dynamicMarkings = []
|
||||||
@@ -1829,6 +1913,7 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
|||||||
kb.extendTests = None
|
kb.extendTests = None
|
||||||
kb.errorChunkLength = None
|
kb.errorChunkLength = None
|
||||||
kb.errorIsNone = True
|
kb.errorIsNone = True
|
||||||
|
kb.falsePositives = []
|
||||||
kb.fileReadMode = False
|
kb.fileReadMode = False
|
||||||
kb.followSitemapRecursion = None
|
kb.followSitemapRecursion = None
|
||||||
kb.forcedDbms = None
|
kb.forcedDbms = None
|
||||||
@@ -1838,6 +1923,7 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
|||||||
kb.headersFp = {}
|
kb.headersFp = {}
|
||||||
kb.heuristicDbms = None
|
kb.heuristicDbms = None
|
||||||
kb.heuristicMode = False
|
kb.heuristicMode = False
|
||||||
|
kb.heuristicPage = False
|
||||||
kb.heuristicTest = None
|
kb.heuristicTest = None
|
||||||
kb.hintValue = None
|
kb.hintValue = None
|
||||||
kb.htmlFp = []
|
kb.htmlFp = []
|
||||||
@@ -1852,7 +1938,7 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
|||||||
kb.lastParserStatus = None
|
kb.lastParserStatus = None
|
||||||
|
|
||||||
kb.locks = AttribDict()
|
kb.locks = AttribDict()
|
||||||
for _ in ("cache", "count", "index", "io", "limit", "log", "socket", "redirect", "request", "value"):
|
for _ in ("cache", "connError", "count", "index", "io", "limit", "log", "socket", "redirect", "request", "value"):
|
||||||
kb.locks[_] = threading.Lock()
|
kb.locks[_] = threading.Lock()
|
||||||
|
|
||||||
kb.matchRatio = None
|
kb.matchRatio = None
|
||||||
@@ -1896,13 +1982,16 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
|||||||
kb.reflectiveCounters = {REFLECTIVE_COUNTER.MISS: 0, REFLECTIVE_COUNTER.HIT: 0}
|
kb.reflectiveCounters = {REFLECTIVE_COUNTER.MISS: 0, REFLECTIVE_COUNTER.HIT: 0}
|
||||||
kb.requestCounter = 0
|
kb.requestCounter = 0
|
||||||
kb.resendPostOnRedirect = None
|
kb.resendPostOnRedirect = None
|
||||||
|
kb.resolutionDbms = None
|
||||||
kb.responseTimes = {}
|
kb.responseTimes = {}
|
||||||
kb.responseTimeMode = None
|
kb.responseTimeMode = None
|
||||||
kb.responseTimePayload = None
|
kb.responseTimePayload = None
|
||||||
kb.resumeValues = True
|
kb.resumeValues = True
|
||||||
|
kb.rowXmlMode = False
|
||||||
kb.safeCharEncode = False
|
kb.safeCharEncode = False
|
||||||
kb.safeReq = AttribDict()
|
kb.safeReq = AttribDict()
|
||||||
kb.singleLogFlags = set()
|
kb.singleLogFlags = set()
|
||||||
|
kb.skipSeqMatcher = False
|
||||||
kb.reduceTests = None
|
kb.reduceTests = None
|
||||||
kb.tlsSNI = {}
|
kb.tlsSNI = {}
|
||||||
kb.stickyDBMS = False
|
kb.stickyDBMS = False
|
||||||
@@ -1910,6 +1999,7 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
|||||||
kb.storeCrawlingChoice = None
|
kb.storeCrawlingChoice = None
|
||||||
kb.storeHashesChoice = None
|
kb.storeHashesChoice = None
|
||||||
kb.suppressResumeInfo = False
|
kb.suppressResumeInfo = False
|
||||||
|
kb.tableFrom = None
|
||||||
kb.technique = None
|
kb.technique = None
|
||||||
kb.tempDir = None
|
kb.tempDir = None
|
||||||
kb.testMode = False
|
kb.testMode = False
|
||||||
@@ -1919,7 +2009,6 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
|||||||
kb.threadContinue = True
|
kb.threadContinue = True
|
||||||
kb.threadException = False
|
kb.threadException = False
|
||||||
kb.tableExistsChoice = None
|
kb.tableExistsChoice = None
|
||||||
kb.timeValidCharsRun = 0
|
|
||||||
kb.uChar = NULL
|
kb.uChar = NULL
|
||||||
kb.unionDuplicates = False
|
kb.unionDuplicates = False
|
||||||
kb.xpCmdshellAvailable = False
|
kb.xpCmdshellAvailable = False
|
||||||
@@ -2015,53 +2104,7 @@ def _saveConfig():
|
|||||||
debugMsg = "saving command line options to a sqlmap configuration INI file"
|
debugMsg = "saving command line options to a sqlmap configuration INI file"
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
|
|
||||||
config = UnicodeRawConfigParser()
|
saveConfig(conf, conf.saveConfig)
|
||||||
userOpts = {}
|
|
||||||
|
|
||||||
for family in optDict.keys():
|
|
||||||
userOpts[family] = []
|
|
||||||
|
|
||||||
for option, value in conf.items():
|
|
||||||
for family, optionData in optDict.items():
|
|
||||||
if option in optionData:
|
|
||||||
userOpts[family].append((option, value, optionData[option]))
|
|
||||||
|
|
||||||
for family, optionData in userOpts.items():
|
|
||||||
config.add_section(family)
|
|
||||||
|
|
||||||
optionData.sort()
|
|
||||||
|
|
||||||
for option, value, datatype in optionData:
|
|
||||||
if datatype and isListLike(datatype):
|
|
||||||
datatype = datatype[0]
|
|
||||||
|
|
||||||
if option in IGNORE_SAVE_OPTIONS:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if value is None:
|
|
||||||
if datatype == OPTION_TYPE.BOOLEAN:
|
|
||||||
value = "False"
|
|
||||||
elif datatype in (OPTION_TYPE.INTEGER, OPTION_TYPE.FLOAT):
|
|
||||||
if option in defaults:
|
|
||||||
value = str(defaults[option])
|
|
||||||
else:
|
|
||||||
value = "0"
|
|
||||||
elif datatype == OPTION_TYPE.STRING:
|
|
||||||
value = ""
|
|
||||||
|
|
||||||
if isinstance(value, basestring):
|
|
||||||
value = value.replace("\n", "\n ")
|
|
||||||
|
|
||||||
config.set(family, option, value)
|
|
||||||
|
|
||||||
confFP = openFile(conf.saveConfig, "wb")
|
|
||||||
|
|
||||||
try:
|
|
||||||
config.write(confFP)
|
|
||||||
except IOError, ex:
|
|
||||||
errMsg = "something went wrong while trying "
|
|
||||||
errMsg += "to write to the configuration file '%s' ('%s')" % (conf.saveConfig, getSafeExString(ex))
|
|
||||||
raise SqlmapSystemException(errMsg)
|
|
||||||
|
|
||||||
infoMsg = "saved command line options to the configuration file '%s'" % conf.saveConfig
|
infoMsg = "saved command line options to the configuration file '%s'" % conf.saveConfig
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
@@ -2137,15 +2180,6 @@ def _mergeOptions(inputOptions, overrideOptions):
|
|||||||
@type inputOptions: C{instance}
|
@type inputOptions: C{instance}
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if inputOptions.pickledOptions:
|
|
||||||
try:
|
|
||||||
inputOptions = base64unpickle(inputOptions.pickledOptions)
|
|
||||||
_normalizeOptions(inputOptions)
|
|
||||||
except Exception, ex:
|
|
||||||
errMsg = "provided invalid value '%s' for option '--pickled-options'" % inputOptions.pickledOptions
|
|
||||||
errMsg += " ('%s')" % ex if ex.message else ""
|
|
||||||
raise SqlmapSyntaxException(errMsg)
|
|
||||||
|
|
||||||
if inputOptions.configFile:
|
if inputOptions.configFile:
|
||||||
configFileParser(inputOptions.configFile)
|
configFileParser(inputOptions.configFile)
|
||||||
|
|
||||||
@@ -2158,9 +2192,10 @@ def _mergeOptions(inputOptions, overrideOptions):
|
|||||||
if key not in conf or value not in (None, False) or overrideOptions:
|
if key not in conf or value not in (None, False) or overrideOptions:
|
||||||
conf[key] = value
|
conf[key] = value
|
||||||
|
|
||||||
for key, value in conf.items():
|
if not conf.api:
|
||||||
if value is not None:
|
for key, value in conf.items():
|
||||||
kb.explicitSettings.add(key)
|
if value is not None:
|
||||||
|
kb.explicitSettings.add(key)
|
||||||
|
|
||||||
for key, value in defaults.items():
|
for key, value in defaults.items():
|
||||||
if hasattr(conf, key) and conf[key] is None:
|
if hasattr(conf, key) and conf[key] is None:
|
||||||
@@ -2192,7 +2227,7 @@ def _setTrafficOutputFP():
|
|||||||
conf.trafficFP = openFile(conf.trafficFile, "w+")
|
conf.trafficFP = openFile(conf.trafficFile, "w+")
|
||||||
|
|
||||||
def _setDNSServer():
|
def _setDNSServer():
|
||||||
if not conf.dnsName:
|
if not conf.dnsDomain:
|
||||||
return
|
return
|
||||||
|
|
||||||
infoMsg = "setting up DNS server instance"
|
infoMsg = "setting up DNS server instance"
|
||||||
@@ -2220,7 +2255,7 @@ def _setProxyList():
|
|||||||
return
|
return
|
||||||
|
|
||||||
conf.proxyList = []
|
conf.proxyList = []
|
||||||
for match in re.finditer(r"(?i)((http[^:]*|socks[^:]*)://)?([\w.]+):(\d+)", readCachedFileContent(conf.proxyFile)):
|
for match in re.finditer(r"(?i)((http[^:]*|socks[^:]*)://)?([\w\-.]+):(\d+)", readCachedFileContent(conf.proxyFile)):
|
||||||
_, type_, address, port = match.groups()
|
_, type_, address, port = match.groups()
|
||||||
conf.proxyList.append("%s://%s:%s" % (type_ or "http", address, port))
|
conf.proxyList.append("%s://%s:%s" % (type_ or "http", address, port))
|
||||||
|
|
||||||
@@ -2237,26 +2272,14 @@ def _setTorHttpProxySettings():
|
|||||||
infoMsg = "setting Tor HTTP proxy settings"
|
infoMsg = "setting Tor HTTP proxy settings"
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
found = None
|
port = findLocalPort(DEFAULT_TOR_HTTP_PORTS if not conf.torPort else (conf.torPort,))
|
||||||
|
|
||||||
for port in (DEFAULT_TOR_HTTP_PORTS if not conf.torPort else (conf.torPort,)):
|
if port:
|
||||||
try:
|
conf.proxy = "http://%s:%d" % (LOCALHOST, port)
|
||||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
||||||
s.connect((LOCALHOST, port))
|
|
||||||
found = port
|
|
||||||
break
|
|
||||||
except socket.error:
|
|
||||||
pass
|
|
||||||
|
|
||||||
s.close()
|
|
||||||
|
|
||||||
if found:
|
|
||||||
conf.proxy = "http://%s:%d" % (LOCALHOST, found)
|
|
||||||
else:
|
else:
|
||||||
errMsg = "can't establish connection with the Tor proxy. "
|
errMsg = "can't establish connection with the Tor HTTP proxy. "
|
||||||
errMsg += "Please make sure that you have Vidalia, Privoxy or "
|
errMsg += "Please make sure that you have Tor (bundle) installed and setup "
|
||||||
errMsg += "Polipo bundle installed for you to be able to "
|
errMsg += "so you could be able to successfully use switch '--tor' "
|
||||||
errMsg += "successfully use switch '--tor' "
|
|
||||||
|
|
||||||
raise SqlmapConnectionException(errMsg)
|
raise SqlmapConnectionException(errMsg)
|
||||||
|
|
||||||
@@ -2272,8 +2295,17 @@ def _setTorSocksProxySettings():
|
|||||||
infoMsg = "setting Tor SOCKS proxy settings"
|
infoMsg = "setting Tor SOCKS proxy settings"
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
# Has to be SOCKS5 to prevent DNS leaks (http://en.wikipedia.org/wiki/Tor_%28anonymity_network%29)
|
port = findLocalPort(DEFAULT_TOR_SOCKS_PORTS if not conf.torPort else (conf.torPort,))
|
||||||
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5 if conf.torType == PROXY_TYPE.SOCKS5 else socks.PROXY_TYPE_SOCKS4, LOCALHOST, conf.torPort or DEFAULT_TOR_SOCKS_PORT)
|
|
||||||
|
if not port:
|
||||||
|
errMsg = "can't establish connection with the Tor SOCKS proxy. "
|
||||||
|
errMsg += "Please make sure that you have Tor service installed and setup "
|
||||||
|
errMsg += "so you could be able to successfully use switch '--tor' "
|
||||||
|
|
||||||
|
raise SqlmapConnectionException(errMsg)
|
||||||
|
|
||||||
|
# SOCKS5 to prevent DNS leaks (http://en.wikipedia.org/wiki/Tor_%28anonymity_network%29)
|
||||||
|
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5 if conf.torType == PROXY_TYPE.SOCKS5 else socks.PROXY_TYPE_SOCKS4, LOCALHOST, port)
|
||||||
socks.wrapmodule(urllib2)
|
socks.wrapmodule(urllib2)
|
||||||
|
|
||||||
def _checkWebSocket():
|
def _checkWebSocket():
|
||||||
@@ -2298,7 +2330,7 @@ def _checkTor():
|
|||||||
page = None
|
page = None
|
||||||
|
|
||||||
if not page or 'Congratulations' not in page:
|
if not page or 'Congratulations' not in page:
|
||||||
errMsg = "it seems that Tor is not properly set. Please try using options '--tor-type' and/or '--tor-port'"
|
errMsg = "it appears that Tor is not properly set. Please try using options '--tor-type' and/or '--tor-port'"
|
||||||
raise SqlmapConnectionException(errMsg)
|
raise SqlmapConnectionException(errMsg)
|
||||||
else:
|
else:
|
||||||
infoMsg = "Tor is properly being used"
|
infoMsg = "Tor is properly being used"
|
||||||
@@ -2331,14 +2363,14 @@ def _basicOptionValidation():
|
|||||||
errMsg = "value for option '--first' (firstChar) must be smaller than or equal to value for --last (lastChar) option"
|
errMsg = "value for option '--first' (firstChar) must be smaller than or equal to value for --last (lastChar) option"
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
if isinstance(conf.cpuThrottle, int) and (conf.cpuThrottle > 100 or conf.cpuThrottle < 0):
|
|
||||||
errMsg = "value for option '--cpu-throttle' (cpuThrottle) must be in range [0,100]"
|
|
||||||
raise SqlmapSyntaxException(errMsg)
|
|
||||||
|
|
||||||
if conf.textOnly and conf.nullConnection:
|
if conf.textOnly and conf.nullConnection:
|
||||||
errMsg = "switch '--text-only' is incompatible with switch '--null-connection'"
|
errMsg = "switch '--text-only' is incompatible with switch '--null-connection'"
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
|
if conf.eta and conf.verbose > defaults.verbose:
|
||||||
|
errMsg = "switch '--eta' is incompatible with option '-v'"
|
||||||
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
if conf.direct and conf.url:
|
if conf.direct and conf.url:
|
||||||
errMsg = "option '-d' is incompatible with option '-u' ('--url')"
|
errMsg = "option '-d' is incompatible with option '-u' ('--url')"
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
@@ -2355,6 +2387,10 @@ def _basicOptionValidation():
|
|||||||
errMsg = "switch '--dump' is incompatible with switch '--search'"
|
errMsg = "switch '--dump' is incompatible with switch '--search'"
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
|
if conf.api and not conf.configFile:
|
||||||
|
errMsg = "switch '--api' requires usage of option '-c'"
|
||||||
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
if conf.data and conf.nullConnection:
|
if conf.data and conf.nullConnection:
|
||||||
errMsg = "option '--data' is incompatible with switch '--null-connection'"
|
errMsg = "option '--data' is incompatible with switch '--null-connection'"
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
@@ -2386,14 +2422,14 @@ def _basicOptionValidation():
|
|||||||
if conf.regexp:
|
if conf.regexp:
|
||||||
try:
|
try:
|
||||||
re.compile(conf.regexp)
|
re.compile(conf.regexp)
|
||||||
except re.error, ex:
|
except Exception, ex:
|
||||||
errMsg = "invalid regular expression '%s' ('%s')" % (conf.regexp, getSafeExString(ex))
|
errMsg = "invalid regular expression '%s' ('%s')" % (conf.regexp, getSafeExString(ex))
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
if conf.crawlExclude:
|
if conf.crawlExclude:
|
||||||
try:
|
try:
|
||||||
re.compile(conf.crawlExclude)
|
re.compile(conf.crawlExclude)
|
||||||
except re.error, ex:
|
except Exception, ex:
|
||||||
errMsg = "invalid regular expression '%s' ('%s')" % (conf.crawlExclude, getSafeExString(ex))
|
errMsg = "invalid regular expression '%s' ('%s')" % (conf.crawlExclude, getSafeExString(ex))
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
@@ -2534,11 +2570,9 @@ def _resolveCrossReferences():
|
|||||||
lib.request.connect.setHTTPHandlers = _setHTTPHandlers
|
lib.request.connect.setHTTPHandlers = _setHTTPHandlers
|
||||||
lib.utils.search.setHTTPHandlers = _setHTTPHandlers
|
lib.utils.search.setHTTPHandlers = _setHTTPHandlers
|
||||||
lib.controller.checks.setVerbosity = setVerbosity
|
lib.controller.checks.setVerbosity = setVerbosity
|
||||||
|
lib.controller.checks.setWafFunctions = _setWafFunctions
|
||||||
|
|
||||||
def initOptions(inputOptions=AttribDict(), overrideOptions=False):
|
def initOptions(inputOptions=AttribDict(), overrideOptions=False):
|
||||||
if IS_WIN:
|
|
||||||
coloramainit()
|
|
||||||
|
|
||||||
_setConfAttributes()
|
_setConfAttributes()
|
||||||
_setKnowledgeBaseAttributes()
|
_setKnowledgeBaseAttributes()
|
||||||
_mergeOptions(inputOptions, overrideOptions)
|
_mergeOptions(inputOptions, overrideOptions)
|
||||||
@@ -2554,6 +2588,7 @@ def init():
|
|||||||
_saveConfig()
|
_saveConfig()
|
||||||
_setRequestFromFile()
|
_setRequestFromFile()
|
||||||
_cleanupOptions()
|
_cleanupOptions()
|
||||||
|
_cleanupEnvironment()
|
||||||
_dirtyPatches()
|
_dirtyPatches()
|
||||||
_purgeOutput()
|
_purgeOutput()
|
||||||
_checkDependencies()
|
_checkDependencies()
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -38,10 +38,13 @@ optDict = {
|
|||||||
"authType": "string",
|
"authType": "string",
|
||||||
"authCred": "string",
|
"authCred": "string",
|
||||||
"authFile": "string",
|
"authFile": "string",
|
||||||
|
"ignore401": "boolean",
|
||||||
|
"ignoreProxy": "boolean",
|
||||||
|
"ignoreRedirects": "boolean",
|
||||||
|
"ignoreTimeouts": "boolean",
|
||||||
"proxy": "string",
|
"proxy": "string",
|
||||||
"proxyCred": "string",
|
"proxyCred": "string",
|
||||||
"proxyFile": "string",
|
"proxyFile": "string",
|
||||||
"ignoreProxy": "boolean",
|
|
||||||
"tor": "boolean",
|
"tor": "boolean",
|
||||||
"torPort": "integer",
|
"torPort": "integer",
|
||||||
"torType": "string",
|
"torType": "string",
|
||||||
@@ -74,7 +77,8 @@ optDict = {
|
|||||||
"testParameter": "string",
|
"testParameter": "string",
|
||||||
"skip": "string",
|
"skip": "string",
|
||||||
"skipStatic": "boolean",
|
"skipStatic": "boolean",
|
||||||
"dbms": "string",
|
"skip": "string",
|
||||||
|
"paramExclude": "string",
|
||||||
"dbmsCred": "string",
|
"dbmsCred": "string",
|
||||||
"os": "string",
|
"os": "string",
|
||||||
"invalidBignum": "boolean",
|
"invalidBignum": "boolean",
|
||||||
@@ -104,7 +108,7 @@ optDict = {
|
|||||||
"uCols": "string",
|
"uCols": "string",
|
||||||
"uChar": "string",
|
"uChar": "string",
|
||||||
"uFrom": "string",
|
"uFrom": "string",
|
||||||
"dnsName": "string",
|
"dnsDomain": "string",
|
||||||
"secondOrder": "string",
|
"secondOrder": "string",
|
||||||
},
|
},
|
||||||
|
|
||||||
@@ -136,6 +140,7 @@ optDict = {
|
|||||||
"tbl": "string",
|
"tbl": "string",
|
||||||
"col": "string",
|
"col": "string",
|
||||||
"excludeCol": "string",
|
"excludeCol": "string",
|
||||||
|
"pivotColumn": "string",
|
||||||
"dumpWhere": "string",
|
"dumpWhere": "string",
|
||||||
"user": "string",
|
"user": "string",
|
||||||
"excludeSysDbs": "boolean",
|
"excludeSysDbs": "boolean",
|
||||||
@@ -189,7 +194,9 @@ optDict = {
|
|||||||
#"xmlFile": "string",
|
#"xmlFile": "string",
|
||||||
"trafficFile": "string",
|
"trafficFile": "string",
|
||||||
"batch": "boolean",
|
"batch": "boolean",
|
||||||
|
"binaryFields": "string",
|
||||||
"charset": "string",
|
"charset": "string",
|
||||||
|
"checkInternet": "boolean",
|
||||||
"crawlDepth": "integer",
|
"crawlDepth": "integer",
|
||||||
"crawlExclude": "string",
|
"crawlExclude": "string",
|
||||||
"csvDel": "string",
|
"csvDel": "string",
|
||||||
@@ -201,7 +208,6 @@ optDict = {
|
|||||||
"hexConvert": "boolean",
|
"hexConvert": "boolean",
|
||||||
"outputDir": "string",
|
"outputDir": "string",
|
||||||
"parseErrors": "boolean",
|
"parseErrors": "boolean",
|
||||||
"pivotColumn": "string",
|
|
||||||
"saveConfig": "string",
|
"saveConfig": "string",
|
||||||
"scope": "string",
|
"scope": "string",
|
||||||
"testFilter": "string",
|
"testFilter": "string",
|
||||||
@@ -217,27 +223,31 @@ optDict = {
|
|||||||
"dependencies": "boolean",
|
"dependencies": "boolean",
|
||||||
"disableColoring": "boolean",
|
"disableColoring": "boolean",
|
||||||
"googlePage": "integer",
|
"googlePage": "integer",
|
||||||
|
"identifyWaf": "boolean",
|
||||||
"mobile": "boolean",
|
"mobile": "boolean",
|
||||||
"offline": "boolean",
|
"offline": "boolean",
|
||||||
"pageRank": "boolean",
|
|
||||||
"purgeOutput": "boolean",
|
"purgeOutput": "boolean",
|
||||||
|
"skipWaf": "boolean",
|
||||||
"smart": "boolean",
|
"smart": "boolean",
|
||||||
|
"tmpDir": "string",
|
||||||
|
"webRoot": "string",
|
||||||
"wizard": "boolean",
|
"wizard": "boolean",
|
||||||
"verbose": "integer",
|
"verbose": "integer",
|
||||||
},
|
},
|
||||||
"Hidden": {
|
"Hidden": {
|
||||||
"dummy": "boolean",
|
"dummy": "boolean",
|
||||||
"disablePrecon": "boolean",
|
"disablePrecon": "boolean",
|
||||||
"binaryFields": "string",
|
|
||||||
"profile": "boolean",
|
"profile": "boolean",
|
||||||
"cpuThrottle": "integer",
|
|
||||||
"forceDns": "boolean",
|
"forceDns": "boolean",
|
||||||
"identifyWaf": "boolean",
|
"murphyRate": "integer",
|
||||||
"skipWaf": "boolean",
|
|
||||||
"ignore401": "boolean",
|
|
||||||
"smokeTest": "boolean",
|
"smokeTest": "boolean",
|
||||||
"liveTest": "boolean",
|
"liveTest": "boolean",
|
||||||
"stopFail": "boolean",
|
"stopFail": "boolean",
|
||||||
"runCase": "string",
|
"runCase": "string",
|
||||||
|
},
|
||||||
|
"API": {
|
||||||
|
"api": "boolean",
|
||||||
|
"taskid": "string",
|
||||||
|
"database": "string",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -75,6 +75,11 @@ def profile(profileOutputFile=None, dotOutputFile=None, imageOutputFile=None):
|
|||||||
# Create graph image (png) by using pydot (python-pydot)
|
# Create graph image (png) by using pydot (python-pydot)
|
||||||
# http://code.google.com/p/pydot/
|
# http://code.google.com/p/pydot/
|
||||||
pydotGraph = pydot.graph_from_dot_file(dotOutputFile)
|
pydotGraph = pydot.graph_from_dot_file(dotOutputFile)
|
||||||
|
|
||||||
|
# Reference: http://stackoverflow.com/questions/38176472/graph-write-pdfiris-pdf-attributeerror-list-object-has-no-attribute-writ
|
||||||
|
if isinstance(pydotGraph, list):
|
||||||
|
pydotGraph = pydotGraph[0]
|
||||||
|
|
||||||
pydotGraph.write_png(imageOutputFile)
|
pydotGraph.write_png(imageOutputFile)
|
||||||
|
|
||||||
infoMsg = "displaying interactive graph with xdot library"
|
infoMsg = "displaying interactive graph with xdot library"
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -10,6 +10,7 @@ import sqlite3
|
|||||||
from extra.safe2bin.safe2bin import safechardecode
|
from extra.safe2bin.safe2bin import safechardecode
|
||||||
from lib.core.common import getSafeExString
|
from lib.core.common import getSafeExString
|
||||||
from lib.core.common import unsafeSQLIdentificatorNaming
|
from lib.core.common import unsafeSQLIdentificatorNaming
|
||||||
|
from lib.core.exception import SqlmapConnectionException
|
||||||
from lib.core.exception import SqlmapGenericException
|
from lib.core.exception import SqlmapGenericException
|
||||||
from lib.core.exception import SqlmapValueException
|
from lib.core.exception import SqlmapValueException
|
||||||
from lib.core.settings import UNICODE_ENCODING
|
from lib.core.settings import UNICODE_ENCODING
|
||||||
@@ -21,10 +22,15 @@ class Replication(object):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, dbpath):
|
def __init__(self, dbpath):
|
||||||
self.dbpath = dbpath
|
try:
|
||||||
self.connection = sqlite3.connect(dbpath)
|
self.dbpath = dbpath
|
||||||
self.connection.isolation_level = None
|
self.connection = sqlite3.connect(dbpath)
|
||||||
self.cursor = self.connection.cursor()
|
self.connection.isolation_level = None
|
||||||
|
self.cursor = self.connection.cursor()
|
||||||
|
except sqlite3.OperationalError, ex:
|
||||||
|
errMsg = "error occurred while opening a replication "
|
||||||
|
errMsg += "file '%s' ('%s')" % (self.filepath, getSafeExString(ex))
|
||||||
|
raise SqlmapConnectionException(errMsg)
|
||||||
|
|
||||||
class DataType:
|
class DataType:
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1,15 +1,13 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import subprocess
|
||||||
from subprocess import PIPE
|
|
||||||
from subprocess import Popen as execute
|
|
||||||
|
|
||||||
def getRevisionNumber():
|
def getRevisionNumber():
|
||||||
"""
|
"""
|
||||||
@@ -46,7 +44,7 @@ def getRevisionNumber():
|
|||||||
break
|
break
|
||||||
|
|
||||||
if not retVal:
|
if not retVal:
|
||||||
process = execute("git rev-parse --verify HEAD", shell=True, stdout=PIPE, stderr=PIPE)
|
process = subprocess.Popen("git rev-parse --verify HEAD", shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
stdout, _ = process.communicate()
|
stdout, _ = process.communicate()
|
||||||
match = re.search(r"(?i)[0-9a-f]{32}", stdout or "")
|
match = re.search(r"(?i)[0-9a-f]{32}", stdout or "")
|
||||||
retVal = match.group(0) if match else None
|
retVal = match.group(0) if match else None
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -32,6 +32,8 @@ def setDbms(dbms):
|
|||||||
dbms = _.group(1)
|
dbms = _.group(1)
|
||||||
|
|
||||||
Backend.setDbms(dbms)
|
Backend.setDbms(dbms)
|
||||||
|
if kb.resolutionDbms:
|
||||||
|
hashDBWrite(HASHDB_KEYS.DBMS, kb.resolutionDbms)
|
||||||
|
|
||||||
logger.info("the back-end DBMS is %s" % Backend.getDbms())
|
logger.info("the back-end DBMS is %s" % Backend.getDbms())
|
||||||
|
|
||||||
|
|||||||
252
lib/core/settings.py
Normal file → Executable file
252
lib/core/settings.py
Normal file → Executable file
@@ -1,29 +1,28 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import random
|
||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
import string
|
import string
|
||||||
import sys
|
import sys
|
||||||
import time
|
|
||||||
import types
|
import types
|
||||||
|
|
||||||
from lib.core.datatype import AttribDict
|
from lib.core.datatype import AttribDict
|
||||||
from lib.core.enums import DBMS
|
from lib.core.enums import DBMS
|
||||||
from lib.core.enums import DBMS_DIRECTORY_NAME
|
from lib.core.enums import DBMS_DIRECTORY_NAME
|
||||||
from lib.core.enums import OS
|
from lib.core.enums import OS
|
||||||
from lib.core.revision import getRevisionNumber
|
|
||||||
|
|
||||||
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
|
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
|
||||||
VERSION = "1.0.4.0"
|
VERSION = "1.1.6.0"
|
||||||
REVISION = getRevisionNumber()
|
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
|
||||||
STABLE = VERSION.count('.') <= 2
|
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
|
||||||
VERSION_STRING = "sqlmap/%s#%s" % (VERSION, "stable" if STABLE else "dev")
|
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)
|
||||||
DESCRIPTION = "automatic SQL injection and database takeover tool"
|
DESCRIPTION = "automatic SQL injection and database takeover tool"
|
||||||
SITE = "http://sqlmap.org"
|
SITE = "http://sqlmap.org"
|
||||||
ISSUES_PAGE = "https://github.com/sqlmapproject/sqlmap/issues/new"
|
ISSUES_PAGE = "https://github.com/sqlmapproject/sqlmap/issues/new"
|
||||||
@@ -31,21 +30,23 @@ GIT_REPOSITORY = "git://github.com/sqlmapproject/sqlmap.git"
|
|||||||
GIT_PAGE = "https://github.com/sqlmapproject/sqlmap"
|
GIT_PAGE = "https://github.com/sqlmapproject/sqlmap"
|
||||||
|
|
||||||
# colorful banner
|
# colorful banner
|
||||||
BANNER = """\033[01;33m _
|
BANNER = """\033[01;33m\
|
||||||
___ ___| |_____ ___ ___ \033[01;37m{\033[01;%dm%s\033[01;37m}\033[01;33m
|
___
|
||||||
|_ -| . | | | .'| . |
|
__H__
|
||||||
|___|_ |_|_|_|_|__,| _|
|
___ ___[.]_____ ___ ___ \033[01;37m{\033[01;%dm%s\033[01;37m}\033[01;33m
|
||||||
|_| |_| \033[0m\033[4;37m%s\033[0m\n
|
|_ -| . [.] | .'| . |
|
||||||
""" % ((31 + hash(VERSION) % 6) if not STABLE else 30, VERSION_STRING.split('/')[-1], SITE)
|
|___|_ [.]_|_|_|__,| _|
|
||||||
|
|_|V |_| \033[0m\033[4;37m%s\033[0m\n
|
||||||
|
""" % (TYPE_COLORS.get(TYPE, 31), VERSION_STRING.split('/')[-1], SITE)
|
||||||
|
|
||||||
# Minimum distance of ratio from kb.matchRatio to result in True
|
# Minimum distance of ratio from kb.matchRatio to result in True
|
||||||
DIFF_TOLERANCE = 0.05
|
DIFF_TOLERANCE = 0.05
|
||||||
CONSTANT_RATIO = 0.9
|
CONSTANT_RATIO = 0.9
|
||||||
|
|
||||||
# Ratio used in heuristic check for WAF/IDS/IPS protected targets
|
# Ratio used in heuristic check for WAF/IPS/IDS protected targets
|
||||||
IDS_WAF_CHECK_RATIO = 0.5
|
IDS_WAF_CHECK_RATIO = 0.5
|
||||||
|
|
||||||
# Timeout used in heuristic check for WAF/IDS/IPS protected targets
|
# Timeout used in heuristic check for WAF/IPS/IDS protected targets
|
||||||
IDS_WAF_CHECK_TIMEOUT = 10
|
IDS_WAF_CHECK_TIMEOUT = 10
|
||||||
|
|
||||||
# Lower and upper values for match ratio in case of stable page
|
# Lower and upper values for match ratio in case of stable page
|
||||||
@@ -61,14 +62,19 @@ PARTIAL_HEX_VALUE_MARKER = "__PARTIAL_HEX_VALUE__"
|
|||||||
URI_QUESTION_MARKER = "__QUESTION_MARK__"
|
URI_QUESTION_MARKER = "__QUESTION_MARK__"
|
||||||
ASTERISK_MARKER = "__ASTERISK_MARK__"
|
ASTERISK_MARKER = "__ASTERISK_MARK__"
|
||||||
REPLACEMENT_MARKER = "__REPLACEMENT_MARK__"
|
REPLACEMENT_MARKER = "__REPLACEMENT_MARK__"
|
||||||
|
BOUNDED_INJECTION_MARKER = "__BOUNDED_INJECTION_MARK__"
|
||||||
|
|
||||||
RANDOM_INTEGER_MARKER = "[RANDINT]"
|
RANDOM_INTEGER_MARKER = "[RANDINT]"
|
||||||
RANDOM_STRING_MARKER = "[RANDSTR]"
|
RANDOM_STRING_MARKER = "[RANDSTR]"
|
||||||
|
SLEEP_TIME_MARKER = "[SLEEPTIME]"
|
||||||
|
|
||||||
PAYLOAD_DELIMITER = "__PAYLOAD_DELIMITER__"
|
PAYLOAD_DELIMITER = "__PAYLOAD_DELIMITER__"
|
||||||
CHAR_INFERENCE_MARK = "%c"
|
CHAR_INFERENCE_MARK = "%c"
|
||||||
PRINTABLE_CHAR_REGEX = r"[^\x00-\x1f\x7f-\xff]"
|
PRINTABLE_CHAR_REGEX = r"[^\x00-\x1f\x7f-\xff]"
|
||||||
|
|
||||||
|
# Regular expression used for extraction of table names (useful for (e.g.) MsAccess)
|
||||||
|
SELECT_FROM_TABLE_REGEX = r"\bSELECT .+? FROM (?P<result>([\w.]|`[^`<>]+`)+)"
|
||||||
|
|
||||||
# Regular expression used for recognition of textual content-type
|
# Regular expression used for recognition of textual content-type
|
||||||
TEXT_CONTENT_TYPE_REGEX = r"(?i)(text|form|message|xml|javascript|ecmascript|json)"
|
TEXT_CONTENT_TYPE_REGEX = r"(?i)(text|form|message|xml|javascript|ecmascript|json)"
|
||||||
|
|
||||||
@@ -78,6 +84,15 @@ PERMISSION_DENIED_REGEX = r"(command|permission|access)\s*(was|is)?\s*denied"
|
|||||||
# Regular expression used for recognition of generic maximum connection messages
|
# Regular expression used for recognition of generic maximum connection messages
|
||||||
MAX_CONNECTIONS_REGEX = r"max.+connections"
|
MAX_CONNECTIONS_REGEX = r"max.+connections"
|
||||||
|
|
||||||
|
# Maximum consecutive connection errors before asking the user if he wants to continue
|
||||||
|
MAX_CONSECUTIVE_CONNECTION_ERRORS = 15
|
||||||
|
|
||||||
|
# Timeout before the pre-connection candidate is being disposed (because of high probability that the web server will reset it)
|
||||||
|
PRECONNECT_CANDIDATE_TIMEOUT = 10
|
||||||
|
|
||||||
|
# Maximum sleep time in "Murphy" (testing) mode
|
||||||
|
MAX_MURPHY_SLEEP_TIME = 3
|
||||||
|
|
||||||
# Regular expression used for extracting results from Google search
|
# Regular expression used for extracting results from Google search
|
||||||
GOOGLE_REGEX = r"webcache\.googleusercontent\.com/search\?q=cache:[^:]+:([^+]+)\+&cd=|url\?\w+=((?![^>]+webcache\.googleusercontent\.com)http[^>]+)&(sa=U|rct=j)"
|
GOOGLE_REGEX = r"webcache\.googleusercontent\.com/search\?q=cache:[^:]+:([^+]+)\+&cd=|url\?\w+=((?![^>]+webcache\.googleusercontent\.com)http[^>]+)&(sa=U|rct=j)"
|
||||||
|
|
||||||
@@ -88,13 +103,13 @@ DUCKDUCKGO_REGEX = r'"u":"([^"]+)'
|
|||||||
DISCONNECT_SEARCH_REGEX = r'<p class="url wrapword">([^<]+)</p>'
|
DISCONNECT_SEARCH_REGEX = r'<p class="url wrapword">([^<]+)</p>'
|
||||||
|
|
||||||
# Dummy user agent for search (if default one returns different results)
|
# Dummy user agent for search (if default one returns different results)
|
||||||
DUMMY_SEARCH_USER_AGENT = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:40.0) Gecko/20100101 Firefox/40.0"
|
DUMMY_SEARCH_USER_AGENT = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:49.0) Gecko/20100101 Firefox/49.0"
|
||||||
|
|
||||||
# Regular expression used for extracting content from "textual" tags
|
# Regular expression used for extracting content from "textual" tags
|
||||||
TEXT_TAG_REGEX = r"(?si)<(abbr|acronym|b|blockquote|br|center|cite|code|dt|em|font|h\d|i|li|p|pre|q|strong|sub|sup|td|th|title|tt|u)(?!\w).*?>(?P<result>[^<]+)"
|
TEXT_TAG_REGEX = r"(?si)<(abbr|acronym|b|blockquote|br|center|cite|code|dt|em|font|h\d|i|li|p|pre|q|strong|sub|sup|td|th|title|tt|u)(?!\w).*?>(?P<result>[^<]+)"
|
||||||
|
|
||||||
# Regular expression used for recognition of IP addresses
|
# Regular expression used for recognition of IP addresses
|
||||||
IP_ADDRESS_REGEX = r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b"
|
IP_ADDRESS_REGEX = r"\b(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\b"
|
||||||
|
|
||||||
# Regular expression used for recognition of generic "your ip has been blocked" messages
|
# Regular expression used for recognition of generic "your ip has been blocked" messages
|
||||||
BLOCKED_IP_REGEX = r"(?i)(\A|\b)ip\b.*\b(banned|blocked|block list|firewall)"
|
BLOCKED_IP_REGEX = r"(?i)(\A|\b)ip\b.*\b(banned|blocked|block list|firewall)"
|
||||||
@@ -122,7 +137,7 @@ UNION_STDEV_COEFF = 7
|
|||||||
TIME_DELAY_CANDIDATES = 3
|
TIME_DELAY_CANDIDATES = 3
|
||||||
|
|
||||||
# Default value for HTTP Accept header
|
# Default value for HTTP Accept header
|
||||||
HTTP_ACCEPT_HEADER_VALUE = "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"
|
HTTP_ACCEPT_HEADER_VALUE = "*/*"
|
||||||
|
|
||||||
# Default value for HTTP Accept-Encoding header
|
# Default value for HTTP Accept-Encoding header
|
||||||
HTTP_ACCEPT_ENCODING_HEADER_VALUE = "gzip,deflate"
|
HTTP_ACCEPT_ENCODING_HEADER_VALUE = "gzip,deflate"
|
||||||
@@ -130,6 +145,9 @@ HTTP_ACCEPT_ENCODING_HEADER_VALUE = "gzip,deflate"
|
|||||||
# Default timeout for running commands over backdoor
|
# Default timeout for running commands over backdoor
|
||||||
BACKDOOR_RUN_CMD_TIMEOUT = 5
|
BACKDOOR_RUN_CMD_TIMEOUT = 5
|
||||||
|
|
||||||
|
# Number of seconds to wait for thread finalization at program end
|
||||||
|
THREAD_FINALIZATION_TIMEOUT = 1
|
||||||
|
|
||||||
# Maximum number of techniques used in inject.py/getValue() per one value
|
# Maximum number of techniques used in inject.py/getValue() per one value
|
||||||
MAX_TECHNIQUES_PER_VALUE = 2
|
MAX_TECHNIQUES_PER_VALUE = 2
|
||||||
|
|
||||||
@@ -190,26 +208,20 @@ PYVERSION = sys.version.split()[0]
|
|||||||
|
|
||||||
# DBMS system databases
|
# DBMS system databases
|
||||||
MSSQL_SYSTEM_DBS = ("Northwind", "master", "model", "msdb", "pubs", "tempdb")
|
MSSQL_SYSTEM_DBS = ("Northwind", "master", "model", "msdb", "pubs", "tempdb")
|
||||||
MYSQL_SYSTEM_DBS = ("information_schema", "mysql") # Before MySQL 5.0 only "mysql"
|
MYSQL_SYSTEM_DBS = ("information_schema", "mysql", "performance_schema")
|
||||||
PGSQL_SYSTEM_DBS = ("information_schema", "pg_catalog", "pg_toast")
|
PGSQL_SYSTEM_DBS = ("information_schema", "pg_catalog", "pg_toast", "pgagent")
|
||||||
ORACLE_SYSTEM_DBS = ("CTXSYS", "DBSNMP", "DMSYS", "EXFSYS", "MDSYS", "OLAPSYS", "ORDSYS", "OUTLN", "SYS", "SYSAUX", "SYSMAN", "SYSTEM", "TSMSYS", "WMSYS", "XDB") # These are TABLESPACE_NAME
|
ORACLE_SYSTEM_DBS = ("ANONYMOUS", "APEX_PUBLIC_USER", "CTXSYS", "DBSNMP", "DIP", "EXFSYS", "FLOWS_%", "FLOWS_FILES", "LBACSYS", "MDDATA", "MDSYS", "MGMT_VIEW", "OLAPSYS", "ORACLE_OCM", "ORDDATA", "ORDPLUGINS", "ORDSYS", "OUTLN", "OWBSYS", "SI_INFORMTN_SCHEMA", "SPATIAL_CSW_ADMIN_USR", "SPATIAL_WFS_ADMIN_USR", "SYS", "SYSMAN", "SYSTEM", "WKPROXY", "WKSYS", "WK_TEST", "WMSYS", "XDB", "XS$NULL") # Reference: https://blog.vishalgupta.com/2011/06/19/predefined-oracle-system-schemas/
|
||||||
SQLITE_SYSTEM_DBS = ("sqlite_master", "sqlite_temp_master")
|
SQLITE_SYSTEM_DBS = ("sqlite_master", "sqlite_temp_master")
|
||||||
ACCESS_SYSTEM_DBS = ("MSysAccessObjects", "MSysACEs", "MSysObjects", "MSysQueries", "MSysRelationships", "MSysAccessStorage",\
|
ACCESS_SYSTEM_DBS = ("MSysAccessObjects", "MSysACEs", "MSysObjects", "MSysQueries", "MSysRelationships", "MSysAccessStorage", "MSysAccessXML", "MSysModules", "MSysModules2")
|
||||||
"MSysAccessXML", "MSysModules", "MSysModules2")
|
FIREBIRD_SYSTEM_DBS = ("RDB$BACKUP_HISTORY", "RDB$CHARACTER_SETS", "RDB$CHECK_CONSTRAINTS", "RDB$COLLATIONS", "RDB$DATABASE", "RDB$DEPENDENCIES", "RDB$EXCEPTIONS", "RDB$FIELDS", "RDB$FIELD_DIMENSIONS", " RDB$FILES", "RDB$FILTERS", "RDB$FORMATS", "RDB$FUNCTIONS", "RDB$FUNCTION_ARGUMENTS", "RDB$GENERATORS", "RDB$INDEX_SEGMENTS", "RDB$INDICES", "RDB$LOG_FILES", "RDB$PAGES", "RDB$PROCEDURES", "RDB$PROCEDURE_PARAMETERS", "RDB$REF_CONSTRAINTS", "RDB$RELATIONS", "RDB$RELATION_CONSTRAINTS", "RDB$RELATION_FIELDS", "RDB$ROLES", "RDB$SECURITY_CLASSES", "RDB$TRANSACTIONS", "RDB$TRIGGERS", "RDB$TRIGGER_MESSAGES", "RDB$TYPES", "RDB$USER_PRIVILEGES", "RDB$VIEW_RELATIONS")
|
||||||
FIREBIRD_SYSTEM_DBS = ("RDB$BACKUP_HISTORY", "RDB$CHARACTER_SETS", "RDB$CHECK_CONSTRAINTS", "RDB$COLLATIONS", "RDB$DATABASE",\
|
|
||||||
"RDB$DEPENDENCIES", "RDB$EXCEPTIONS", "RDB$FIELDS", "RDB$FIELD_DIMENSIONS", " RDB$FILES", "RDB$FILTERS",\
|
|
||||||
"RDB$FORMATS", "RDB$FUNCTIONS", "RDB$FUNCTION_ARGUMENTS", "RDB$GENERATORS", "RDB$INDEX_SEGMENTS", "RDB$INDICES",\
|
|
||||||
"RDB$LOG_FILES", "RDB$PAGES", "RDB$PROCEDURES", "RDB$PROCEDURE_PARAMETERS", "RDB$REF_CONSTRAINTS", "RDB$RELATIONS",\
|
|
||||||
"RDB$RELATION_CONSTRAINTS", "RDB$RELATION_FIELDS", "RDB$ROLES", "RDB$SECURITY_CLASSES", "RDB$TRANSACTIONS", "RDB$TRIGGERS",\
|
|
||||||
"RDB$TRIGGER_MESSAGES", "RDB$TYPES", "RDB$USER_PRIVILEGES", "RDB$VIEW_RELATIONS")
|
|
||||||
MAXDB_SYSTEM_DBS = ("SYSINFO", "DOMAIN")
|
MAXDB_SYSTEM_DBS = ("SYSINFO", "DOMAIN")
|
||||||
SYBASE_SYSTEM_DBS = ("master", "model", "sybsystemdb", "sybsystemprocs")
|
SYBASE_SYSTEM_DBS = ("master", "model", "sybsystemdb", "sybsystemprocs")
|
||||||
DB2_SYSTEM_DBS = ("NULLID", "SQLJ", "SYSCAT", "SYSFUN", "SYSIBM", "SYSIBMADM", "SYSIBMINTERNAL", "SYSIBMTS",\
|
DB2_SYSTEM_DBS = ("NULLID", "SQLJ", "SYSCAT", "SYSFUN", "SYSIBM", "SYSIBMADM", "SYSIBMINTERNAL", "SYSIBMTS", "SYSPROC", "SYSPUBLIC", "SYSSTAT", "SYSTOOLS")
|
||||||
"SYSPROC", "SYSPUBLIC", "SYSSTAT", "SYSTOOLS")
|
|
||||||
HSQLDB_SYSTEM_DBS = ("INFORMATION_SCHEMA", "SYSTEM_LOB")
|
HSQLDB_SYSTEM_DBS = ("INFORMATION_SCHEMA", "SYSTEM_LOB")
|
||||||
|
INFORMIX_SYSTEM_DBS = ("sysmaster", "sysutils", "sysuser", "sysadmin")
|
||||||
|
|
||||||
MSSQL_ALIASES = ("microsoft sql server", "mssqlserver", "mssql", "ms")
|
MSSQL_ALIASES = ("microsoft sql server", "mssqlserver", "mssql", "ms")
|
||||||
MYSQL_ALIASES = ("mysql", "my")
|
MYSQL_ALIASES = ("mysql", "my", "mariadb", "maria")
|
||||||
PGSQL_ALIASES = ("postgresql", "postgres", "pgsql", "psql", "pg")
|
PGSQL_ALIASES = ("postgresql", "postgres", "pgsql", "psql", "pg")
|
||||||
ORACLE_ALIASES = ("oracle", "orcl", "ora", "or")
|
ORACLE_ALIASES = ("oracle", "orcl", "ora", "or")
|
||||||
SQLITE_ALIASES = ("sqlite", "sqlite3")
|
SQLITE_ALIASES = ("sqlite", "sqlite3")
|
||||||
@@ -219,10 +231,11 @@ MAXDB_ALIASES = ("maxdb", "sap maxdb", "sap db")
|
|||||||
SYBASE_ALIASES = ("sybase", "sybase sql server")
|
SYBASE_ALIASES = ("sybase", "sybase sql server")
|
||||||
DB2_ALIASES = ("db2", "ibm db2", "ibmdb2")
|
DB2_ALIASES = ("db2", "ibm db2", "ibmdb2")
|
||||||
HSQLDB_ALIASES = ("hsql", "hsqldb", "hs", "hypersql")
|
HSQLDB_ALIASES = ("hsql", "hsqldb", "hs", "hypersql")
|
||||||
|
INFORMIX_ALIASES = ("informix", "ibm informix", "ibminformix")
|
||||||
|
|
||||||
DBMS_DIRECTORY_DICT = dict((getattr(DBMS, _), getattr(DBMS_DIRECTORY_NAME, _)) for _ in dir(DBMS) if not _.startswith("_"))
|
DBMS_DIRECTORY_DICT = dict((getattr(DBMS, _), getattr(DBMS_DIRECTORY_NAME, _)) for _ in dir(DBMS) if not _.startswith("_"))
|
||||||
|
|
||||||
SUPPORTED_DBMS = MSSQL_ALIASES + MYSQL_ALIASES + PGSQL_ALIASES + ORACLE_ALIASES + SQLITE_ALIASES + ACCESS_ALIASES + FIREBIRD_ALIASES + MAXDB_ALIASES + SYBASE_ALIASES + DB2_ALIASES + HSQLDB_ALIASES
|
SUPPORTED_DBMS = MSSQL_ALIASES + MYSQL_ALIASES + PGSQL_ALIASES + ORACLE_ALIASES + SQLITE_ALIASES + ACCESS_ALIASES + FIREBIRD_ALIASES + MAXDB_ALIASES + SYBASE_ALIASES + DB2_ALIASES + HSQLDB_ALIASES + INFORMIX_ALIASES
|
||||||
SUPPORTED_OS = ("linux", "windows")
|
SUPPORTED_OS = ("linux", "windows")
|
||||||
|
|
||||||
DBMS_ALIASES = ((DBMS.MSSQL, MSSQL_ALIASES), (DBMS.MYSQL, MYSQL_ALIASES), (DBMS.PGSQL, PGSQL_ALIASES), (DBMS.ORACLE, ORACLE_ALIASES), (DBMS.SQLITE, SQLITE_ALIASES), (DBMS.ACCESS, ACCESS_ALIASES), (DBMS.FIREBIRD, FIREBIRD_ALIASES), (DBMS.MAXDB, MAXDB_ALIASES), (DBMS.SYBASE, SYBASE_ALIASES), (DBMS.DB2, DB2_ALIASES), (DBMS.HSQLDB, HSQLDB_ALIASES))
|
DBMS_ALIASES = ((DBMS.MSSQL, MSSQL_ALIASES), (DBMS.MYSQL, MYSQL_ALIASES), (DBMS.PGSQL, PGSQL_ALIASES), (DBMS.ORACLE, ORACLE_ALIASES), (DBMS.SQLITE, SQLITE_ALIASES), (DBMS.ACCESS, ACCESS_ALIASES), (DBMS.FIREBIRD, FIREBIRD_ALIASES), (DBMS.MAXDB, MAXDB_ALIASES), (DBMS.SYBASE, SYBASE_ALIASES), (DBMS.DB2, DB2_ALIASES), (DBMS.HSQLDB, HSQLDB_ALIASES))
|
||||||
@@ -238,39 +251,39 @@ WINDOWS_RESERVED_NAMES = ("CON", "PRN", "AUX", "NUL", "COM1", "COM2", "COM3", "C
|
|||||||
|
|
||||||
# Items displayed in basic help (-h) output
|
# Items displayed in basic help (-h) output
|
||||||
BASIC_HELP_ITEMS = (
|
BASIC_HELP_ITEMS = (
|
||||||
"url",
|
"url",
|
||||||
"googleDork",
|
"googleDork",
|
||||||
"data",
|
"data",
|
||||||
"cookie",
|
"cookie",
|
||||||
"randomAgent",
|
"randomAgent",
|
||||||
"proxy",
|
"proxy",
|
||||||
"testParameter",
|
"testParameter",
|
||||||
"dbms",
|
"dbms",
|
||||||
"level",
|
"level",
|
||||||
"risk",
|
"risk",
|
||||||
"tech",
|
"tech",
|
||||||
"getAll",
|
"getAll",
|
||||||
"getBanner",
|
"getBanner",
|
||||||
"getCurrentUser",
|
"getCurrentUser",
|
||||||
"getCurrentDb",
|
"getCurrentDb",
|
||||||
"getPasswordHashes",
|
"getPasswordHashes",
|
||||||
"getTables",
|
"getTables",
|
||||||
"getColumns",
|
"getColumns",
|
||||||
"getSchema",
|
"getSchema",
|
||||||
"dumpTable",
|
"dumpTable",
|
||||||
"dumpAll",
|
"dumpAll",
|
||||||
"db",
|
"db",
|
||||||
"tbl",
|
"tbl",
|
||||||
"col",
|
"col",
|
||||||
"osShell",
|
"osShell",
|
||||||
"osPwn",
|
"osPwn",
|
||||||
"batch",
|
"batch",
|
||||||
"checkTor",
|
"checkTor",
|
||||||
"flushSession",
|
"flushSession",
|
||||||
"tor",
|
"tor",
|
||||||
"sqlmapShell",
|
"sqlmapShell",
|
||||||
"wizard",
|
"wizard",
|
||||||
)
|
)
|
||||||
|
|
||||||
# String representation for NULL value
|
# String representation for NULL value
|
||||||
NULL = "NULL"
|
NULL = "NULL"
|
||||||
@@ -281,13 +294,19 @@ BLANK = "<blank>"
|
|||||||
# String representation for current database
|
# String representation for current database
|
||||||
CURRENT_DB = "CD"
|
CURRENT_DB = "CD"
|
||||||
|
|
||||||
|
# Regular expressions used for finding file paths in error messages
|
||||||
|
FILE_PATH_REGEXES = (r" in (file )?<b>(?P<result>.*?)</b> on line \d+", r"in (?P<result>[^<>]+?) on line \d+", r"(?:[>(\[\s])(?P<result>[A-Za-z]:[\\/][\w. \\/-]*)", r"(?:[>(\[\s])(?P<result>/\w[/\w.-]+)", r"href=['\"]file://(?P<result>/[^'\"]+)")
|
||||||
|
|
||||||
# Regular expressions used for parsing error messages (--parse-errors)
|
# Regular expressions used for parsing error messages (--parse-errors)
|
||||||
ERROR_PARSING_REGEXES = (
|
ERROR_PARSING_REGEXES = (
|
||||||
r"<b>[^<]*(fatal|error|warning|exception)[^<]*</b>:?\s*(?P<result>.+?)<br\s*/?\s*>",
|
r"<b>[^<]*(fatal|error|warning|exception)[^<]*</b>:?\s*(?P<result>.+?)<br\s*/?\s*>",
|
||||||
r"(?m)^(fatal|error|warning|exception):?\s*(?P<result>.+?)$",
|
r"(?m)^(fatal|error|warning|exception):?\s*(?P<result>[^\n]+?)$",
|
||||||
r"<li>Error Type:<br>(?P<result>.+?)</li>",
|
r"(?P<result>[^\n>]*SQL Syntax[^\n<]+)",
|
||||||
r"error '[0-9a-f]{8}'((<[^>]+>)|\s)+(?P<result>[^<>]+)",
|
r"<li>Error Type:<br>(?P<result>.+?)</li>",
|
||||||
)
|
r"CDbCommand (?P<result>[^<>\n]*SQL[^<>\n]+)",
|
||||||
|
r"error '[0-9a-f]{8}'((<[^>]+>)|\s)+(?P<result>[^<>]+)",
|
||||||
|
r"\[[^\n\]]+(ODBC|JDBC)[^\n\]]+\](\[[^\]]+\])?(?P<result>[^\n]+(in query expression|\(SQL| at /[^ ]+pdo)[^\n<]+)"
|
||||||
|
)
|
||||||
|
|
||||||
# Regular expression used for parsing charset info from meta html headers
|
# Regular expression used for parsing charset info from meta html headers
|
||||||
META_CHARSET_REGEX = r'(?si)<head>.*<meta[^>]+charset="?(?P<result>[^"> ]+).*</head>'
|
META_CHARSET_REGEX = r'(?si)<head>.*<meta[^>]+charset="?(?P<result>[^"> ]+).*</head>'
|
||||||
@@ -313,9 +332,6 @@ BURP_REQUEST_REGEX = r"={10,}\s+[^=]+={10,}\s(.+?)\s={10,}"
|
|||||||
# Regex used for parsing XML Burp saved history items
|
# Regex used for parsing XML Burp saved history items
|
||||||
BURP_XML_HISTORY_REGEX = r'<port>(\d+)</port>.+?<request base64="true"><!\[CDATA\[([^]]+)'
|
BURP_XML_HISTORY_REGEX = r'<port>(\d+)</port>.+?<request base64="true"><!\[CDATA\[([^]]+)'
|
||||||
|
|
||||||
# Server header in CloudFlare responses
|
|
||||||
CLOUDFLARE_SERVER_HEADER = "cloudflare-nginx"
|
|
||||||
|
|
||||||
# Encoding used for Unicode data
|
# Encoding used for Unicode data
|
||||||
UNICODE_ENCODING = "utf8"
|
UNICODE_ENCODING = "utf8"
|
||||||
|
|
||||||
@@ -328,6 +344,9 @@ URI_INJECTABLE_REGEX = r"//[^/]*/([^\.*?]+)\Z"
|
|||||||
# Regex used for masking sensitive data
|
# Regex used for masking sensitive data
|
||||||
SENSITIVE_DATA_REGEX = "(\s|=)(?P<result>[^\s=]*%s[^\s]*)\s"
|
SENSITIVE_DATA_REGEX = "(\s|=)(?P<result>[^\s=]*%s[^\s]*)\s"
|
||||||
|
|
||||||
|
# Options to explicitly mask in anonymous (unhandled exception) reports (along with anything carrying the <hostname> inside)
|
||||||
|
SENSITIVE_OPTIONS = ("hostname", "data", "dnsDomain", "googleDork", "authCred", "proxyCred", "tbl", "db", "col", "user", "cookie", "proxy", "rFile", "wFile", "dFile", "testParameter", "authCred")
|
||||||
|
|
||||||
# Maximum number of threads (avoiding connection issues and/or DoS)
|
# Maximum number of threads (avoiding connection issues and/or DoS)
|
||||||
MAX_NUMBER_OF_THREADS = 10
|
MAX_NUMBER_OF_THREADS = 10
|
||||||
|
|
||||||
@@ -340,6 +359,9 @@ MIN_RATIO = 0.0
|
|||||||
# Maximum value for comparison ratio
|
# Maximum value for comparison ratio
|
||||||
MAX_RATIO = 1.0
|
MAX_RATIO = 1.0
|
||||||
|
|
||||||
|
# Minimum length of sentence for automatic choosing of --string (in case of high matching ratio)
|
||||||
|
CANDIDATE_SENTENCE_MIN_LENGTH = 10
|
||||||
|
|
||||||
# Character used for marking injectable position inside provided data
|
# Character used for marking injectable position inside provided data
|
||||||
CUSTOM_INJECTION_MARK_CHAR = '*'
|
CUSTOM_INJECTION_MARK_CHAR = '*'
|
||||||
|
|
||||||
@@ -364,6 +386,9 @@ REFLECTED_BORDER_REGEX = r"[^A-Za-z]+"
|
|||||||
# Regular expression used for replacing non-alphanum characters
|
# Regular expression used for replacing non-alphanum characters
|
||||||
REFLECTED_REPLACEMENT_REGEX = r".+"
|
REFLECTED_REPLACEMENT_REGEX = r".+"
|
||||||
|
|
||||||
|
# Maximum time (in seconds) spent per reflective value(s) replacement
|
||||||
|
REFLECTED_REPLACEMENT_TIMEOUT = 3
|
||||||
|
|
||||||
# Maximum number of alpha-numerical parts in reflected regex (for speed purposes)
|
# Maximum number of alpha-numerical parts in reflected regex (for speed purposes)
|
||||||
REFLECTED_MAX_REGEX_PARTS = 10
|
REFLECTED_MAX_REGEX_PARTS = 10
|
||||||
|
|
||||||
@@ -383,10 +408,10 @@ HASH_MOD_ITEM_DISPLAY = 11
|
|||||||
MAX_INT = sys.maxint
|
MAX_INT = sys.maxint
|
||||||
|
|
||||||
# Options that need to be restored in multiple targets run mode
|
# Options that need to be restored in multiple targets run mode
|
||||||
RESTORE_MERGED_OPTIONS = ("col", "db", "dnsName", "privEsc", "tbl", "regexp", "string", "textOnly", "threads", "timeSec", "tmpPath", "uChar", "user")
|
RESTORE_MERGED_OPTIONS = ("col", "db", "dnsDomain", "privEsc", "tbl", "regexp", "string", "textOnly", "threads", "timeSec", "tmpPath", "uChar", "user")
|
||||||
|
|
||||||
# Parameters to be ignored in detection phase (upper case)
|
# Parameters to be ignored in detection phase (upper case)
|
||||||
IGNORE_PARAMETERS = ("__VIEWSTATE", "__VIEWSTATEENCRYPTED", "__EVENTARGUMENT", "__EVENTTARGET", "__EVENTVALIDATION", "ASPSESSIONID", "ASP.NET_SESSIONID", "JSESSIONID", "CFID", "CFTOKEN")
|
IGNORE_PARAMETERS = ("__VIEWSTATE", "__VIEWSTATEENCRYPTED", "__VIEWSTATEGENERATOR", "__EVENTARGUMENT", "__EVENTTARGET", "__EVENTVALIDATION", "ASPSESSIONID", "ASP.NET_SESSIONID", "JSESSIONID", "CFID", "CFTOKEN")
|
||||||
|
|
||||||
# Regular expression used for recognition of ASP.NET control parameters
|
# Regular expression used for recognition of ASP.NET control parameters
|
||||||
ASP_NET_CONTROL_REGEX = r"(?i)\Actl\d+\$"
|
ASP_NET_CONTROL_REGEX = r"(?i)\Actl\d+\$"
|
||||||
@@ -415,10 +440,10 @@ IGNORE_SAVE_OPTIONS = ("saveConfig",)
|
|||||||
# IP address of the localhost
|
# IP address of the localhost
|
||||||
LOCALHOST = "127.0.0.1"
|
LOCALHOST = "127.0.0.1"
|
||||||
|
|
||||||
# Default port used by Tor
|
# Default SOCKS ports used by Tor
|
||||||
DEFAULT_TOR_SOCKS_PORT = 9050
|
DEFAULT_TOR_SOCKS_PORTS = (9050, 9150)
|
||||||
|
|
||||||
# Default ports used in Tor proxy bundles
|
# Default HTTP ports used by Tor
|
||||||
DEFAULT_TOR_HTTP_PORTS = (8123, 8118)
|
DEFAULT_TOR_HTTP_PORTS = (8123, 8118)
|
||||||
|
|
||||||
# Percentage below which comparison engine could have problems
|
# Percentage below which comparison engine could have problems
|
||||||
@@ -448,7 +473,7 @@ DUMMY_SQL_INJECTION_CHARS = ";()'"
|
|||||||
DUMMY_USER_INJECTION = r"(?i)[^\w](AND|OR)\s+[^\s]+[=><]|\bUNION\b.+\bSELECT\b|\bSELECT\b.+\bFROM\b|\b(CONCAT|information_schema|SLEEP|DELAY)\b"
|
DUMMY_USER_INJECTION = r"(?i)[^\w](AND|OR)\s+[^\s]+[=><]|\bUNION\b.+\bSELECT\b|\bSELECT\b.+\bFROM\b|\b(CONCAT|information_schema|SLEEP|DELAY)\b"
|
||||||
|
|
||||||
# Extensions skipped by crawler
|
# Extensions skipped by crawler
|
||||||
CRAWL_EXCLUDE_EXTENSIONS = ("gif", "jpg", "jpeg", "image", "jar", "tif", "bmp", "war", "ear", "mpg", "mpeg", "wmv", "mpeg", "scm", "iso", "dmp", "dll", "cab", "so", "avi", "mkv", "bin", "iso", "tar", "png", "pdf", "ps", "wav", "mp3", "mp4", "au", "aiff", "aac", "zip", "rar", "7z", "gz", "flv", "mov", "doc", "docx", "xls", "dot", "dotx", "xlt", "xlsx", "ppt", "pps", "pptx")
|
CRAWL_EXCLUDE_EXTENSIONS = ("3ds", "3g2", "3gp", "7z", "DS_Store", "a", "aac", "adp", "ai", "aif", "aiff", "apk", "ar", "asf", "au", "avi", "bak", "bin", "bk", "bmp", "btif", "bz2", "cab", "caf", "cgm", "cmx", "cpio", "cr2", "dat", "deb", "djvu", "dll", "dmg", "dmp", "dng", "doc", "docx", "dot", "dotx", "dra", "dsk", "dts", "dtshd", "dvb", "dwg", "dxf", "ear", "ecelp4800", "ecelp7470", "ecelp9600", "egg", "eol", "eot", "epub", "exe", "f4v", "fbs", "fh", "fla", "flac", "fli", "flv", "fpx", "fst", "fvt", "g3", "gif", "gz", "h261", "h263", "h264", "ico", "ief", "image", "img", "ipa", "iso", "jar", "jpeg", "jpg", "jpgv", "jpm", "jxr", "ktx", "lvp", "lz", "lzma", "lzo", "m3u", "m4a", "m4v", "mar", "mdi", "mid", "mj2", "mka", "mkv", "mmr", "mng", "mov", "movie", "mp3", "mp4", "mp4a", "mpeg", "mpg", "mpga", "mxu", "nef", "npx", "o", "oga", "ogg", "ogv", "otf", "pbm", "pcx", "pdf", "pea", "pgm", "pic", "png", "pnm", "ppm", "pps", "ppt", "pptx", "ps", "psd", "pya", "pyc", "pyo", "pyv", "qt", "rar", "ras", "raw", "rgb", "rip", "rlc", "rz", "s3m", "s7z", "scm", "scpt", "sgi", "shar", "sil", "smv", "so", "sub", "swf", "tar", "tbz2", "tga", "tgz", "tif", "tiff", "tlz", "ts", "ttf", "uvh", "uvi", "uvm", "uvp", "uvs", "uvu", "viv", "vob", "war", "wav", "wax", "wbmp", "wdp", "weba", "webm", "webp", "whl", "wm", "wma", "wmv", "wmx", "woff", "woff2", "wvx", "xbm", "xif", "xls", "xlsx", "xlt", "xm", "xpi", "xpm", "xwd", "xz", "z", "zip", "zipx")
|
||||||
|
|
||||||
# Patterns often seen in HTTP headers containing custom injection marking character
|
# Patterns often seen in HTTP headers containing custom injection marking character
|
||||||
PROBLEMATIC_CUSTOM_INJECTION_PATTERNS = r"(;q=[^;']+)|(\*/\*)"
|
PROBLEMATIC_CUSTOM_INJECTION_PATTERNS = r"(;q=[^;']+)|(\*/\*)"
|
||||||
@@ -459,20 +484,26 @@ BRUTE_TABLE_EXISTS_TEMPLATE = "EXISTS(SELECT %d FROM %s)"
|
|||||||
# Template used for common column existence check
|
# Template used for common column existence check
|
||||||
BRUTE_COLUMN_EXISTS_TEMPLATE = "EXISTS(SELECT %s FROM %s)"
|
BRUTE_COLUMN_EXISTS_TEMPLATE = "EXISTS(SELECT %s FROM %s)"
|
||||||
|
|
||||||
# Payload used for checking of existence of IDS/WAF (dummier the better)
|
# Payload used for checking of existence of IDS/IPS/WAF (dummier the better)
|
||||||
IDS_WAF_CHECK_PAYLOAD = "AND 1=1 UNION ALL SELECT 1,2,3,table_name FROM information_schema.tables WHERE 2>1-- ../../../etc/passwd"
|
IDS_WAF_CHECK_PAYLOAD = "AND 1=1 UNION ALL SELECT 1,NULL,'<script>alert(\"XSS\")</script>',table_name FROM information_schema.tables WHERE 2>1--/**/; EXEC xp_cmdshell('cat ../../../etc/passwd')#"
|
||||||
|
|
||||||
# Data inside shellcodeexec to be filled with random string
|
# Data inside shellcodeexec to be filled with random string
|
||||||
SHELLCODEEXEC_RANDOM_STRING_MARKER = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
|
SHELLCODEEXEC_RANDOM_STRING_MARKER = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
|
||||||
|
|
||||||
# Vectors used for provoking specific WAF/IDS/IPS behavior(s)
|
# Generic address for checking the Internet connection while using switch --check-internet
|
||||||
|
CHECK_INTERNET_ADDRESS = "http://ipinfo.io/"
|
||||||
|
|
||||||
|
# Value to look for in response to CHECK_INTERNET_ADDRESS
|
||||||
|
CHECK_INTERNET_VALUE = "IP Address Details"
|
||||||
|
|
||||||
|
# Vectors used for provoking specific WAF/IPS/IDS behavior(s)
|
||||||
WAF_ATTACK_VECTORS = (
|
WAF_ATTACK_VECTORS = (
|
||||||
"", # NIL
|
"", # NIL
|
||||||
"search=<script>alert(1)</script>",
|
"search=<script>alert(1)</script>",
|
||||||
"file=../../../../etc/passwd",
|
"file=../../../../etc/passwd",
|
||||||
"q=<invalid>foobar",
|
"q=<invalid>foobar",
|
||||||
"id=1 %s" % IDS_WAF_CHECK_PAYLOAD
|
"id=1 %s" % IDS_WAF_CHECK_PAYLOAD
|
||||||
)
|
)
|
||||||
|
|
||||||
# Used for status representation in dictionary attack phase
|
# Used for status representation in dictionary attack phase
|
||||||
ROTATING_CHARS = ('\\', '|', '|', '/', '-')
|
ROTATING_CHARS = ('\\', '|', '|', '/', '-')
|
||||||
@@ -496,20 +527,20 @@ PARSE_HEADERS_LIMIT = 3
|
|||||||
# Step used in ORDER BY technique used for finding the right number of columns in UNION query injections
|
# Step used in ORDER BY technique used for finding the right number of columns in UNION query injections
|
||||||
ORDER_BY_STEP = 10
|
ORDER_BY_STEP = 10
|
||||||
|
|
||||||
# Maximum number of times for revalidation of a character in time-based injections
|
# Maximum number of times for revalidation of a character in inference (as required)
|
||||||
MAX_TIME_REVALIDATION_STEPS = 5
|
MAX_REVALIDATION_STEPS = 5
|
||||||
|
|
||||||
# Characters that can be used to split parameter values in provided command line (e.g. in --tamper)
|
# Characters that can be used to split parameter values in provided command line (e.g. in --tamper)
|
||||||
PARAMETER_SPLITTING_REGEX = r'[,|;]'
|
PARAMETER_SPLITTING_REGEX = r"[,|;]"
|
||||||
|
|
||||||
# Regular expression describing possible union char value (e.g. used in --union-char)
|
# Regular expression describing possible union char value (e.g. used in --union-char)
|
||||||
UNION_CHAR_REGEX = r'\A\w+\Z'
|
UNION_CHAR_REGEX = r"\A\w+\Z"
|
||||||
|
|
||||||
# Attribute used for storing original parameter value in special cases (e.g. POST)
|
# Attribute used for storing original parameter value in special cases (e.g. POST)
|
||||||
UNENCODED_ORIGINAL_VALUE = 'original'
|
UNENCODED_ORIGINAL_VALUE = "original"
|
||||||
|
|
||||||
# Common column names containing usernames (used for hash cracking in some cases)
|
# Common column names containing usernames (used for hash cracking in some cases)
|
||||||
COMMON_USER_COLUMNS = ('user', 'username', 'user_name', 'benutzername', 'benutzer', 'utilisateur', 'usager', 'consommateur', 'utente', 'utilizzatore', 'usufrutuario', 'korisnik', 'usuario', 'consumidor')
|
COMMON_USER_COLUMNS = ("login", "user", "username", "user_name", "user_login", "benutzername", "benutzer", "utilisateur", "usager", "consommateur", "utente", "utilizzatore", "usufrutuario", "korisnik", "usuario", "consumidor", "client", "cuser")
|
||||||
|
|
||||||
# Default delimiter in GET/POST values
|
# Default delimiter in GET/POST values
|
||||||
DEFAULT_GET_POST_DELIMITER = '&'
|
DEFAULT_GET_POST_DELIMITER = '&'
|
||||||
@@ -521,7 +552,7 @@ DEFAULT_COOKIE_DELIMITER = ';'
|
|||||||
FORCE_COOKIE_EXPIRATION_TIME = "9999999999"
|
FORCE_COOKIE_EXPIRATION_TIME = "9999999999"
|
||||||
|
|
||||||
# Github OAuth token used for creating an automatic Issue for unhandled exceptions
|
# Github OAuth token used for creating an automatic Issue for unhandled exceptions
|
||||||
GITHUB_REPORT_OAUTH_TOKEN = "YzNkYTgyMTdjYzdjNjZjMjFjMWE5ODI5OGQyNzk2ODM1M2M0MzUyOA=="
|
GITHUB_REPORT_OAUTH_TOKEN = "NTMyNWNkMmZkMzRlMDZmY2JkMmY0MGI4NWI0MzVlM2Q5YmFjYWNhYQ=="
|
||||||
|
|
||||||
# Skip unforced HashDB flush requests below the threshold number of cached items
|
# Skip unforced HashDB flush requests below the threshold number of cached items
|
||||||
HASHDB_FLUSH_THRESHOLD = 32
|
HASHDB_FLUSH_THRESHOLD = 32
|
||||||
@@ -529,11 +560,14 @@ HASHDB_FLUSH_THRESHOLD = 32
|
|||||||
# Number of retries for unsuccessful HashDB flush attempts
|
# Number of retries for unsuccessful HashDB flush attempts
|
||||||
HASHDB_FLUSH_RETRIES = 3
|
HASHDB_FLUSH_RETRIES = 3
|
||||||
|
|
||||||
|
# Number of retries for unsuccessful HashDB retrieve attempts
|
||||||
|
HASHDB_RETRIEVE_RETRIES = 3
|
||||||
|
|
||||||
# Number of retries for unsuccessful HashDB end transaction attempts
|
# Number of retries for unsuccessful HashDB end transaction attempts
|
||||||
HASHDB_END_TRANSACTION_RETRIES = 3
|
HASHDB_END_TRANSACTION_RETRIES = 3
|
||||||
|
|
||||||
# Unique milestone value used for forced deprecation of old HashDB values (e.g. when changing hash/pickle mechanism)
|
# Unique milestone value used for forced deprecation of old HashDB values (e.g. when changing hash/pickle mechanism)
|
||||||
HASHDB_MILESTONE_VALUE = "JHjrBugdDA" # "".join(random.sample(string.ascii_letters, 10))
|
HASHDB_MILESTONE_VALUE = "dPHoJRQYvs" # python -c 'import random, string; print "".join(random.sample(string.ascii_letters, 10))'
|
||||||
|
|
||||||
# Warn user of possible delay due to large page dump in full UNION query injections
|
# Warn user of possible delay due to large page dump in full UNION query injections
|
||||||
LARGE_OUTPUT_THRESHOLD = 1024 ** 2
|
LARGE_OUTPUT_THRESHOLD = 1024 ** 2
|
||||||
@@ -559,9 +593,15 @@ DNS_BOUNDARIES_ALPHABET = re.sub("[a-fA-F]", "", string.ascii_letters)
|
|||||||
# Alphabet used for heuristic checks
|
# Alphabet used for heuristic checks
|
||||||
HEURISTIC_CHECK_ALPHABET = ('"', '\'', ')', '(', ',', '.')
|
HEURISTIC_CHECK_ALPHABET = ('"', '\'', ')', '(', ',', '.')
|
||||||
|
|
||||||
|
# Minor artistic touch
|
||||||
|
BANNER = re.sub(r"\[.\]", lambda _: "[\033[01;41m%s\033[01;49m]" % random.sample(HEURISTIC_CHECK_ALPHABET, 1)[0], BANNER)
|
||||||
|
|
||||||
# String used for dummy non-SQLi (e.g. XSS) heuristic checks of a tested parameter value
|
# String used for dummy non-SQLi (e.g. XSS) heuristic checks of a tested parameter value
|
||||||
DUMMY_NON_SQLI_CHECK_APPENDIX = "<'\">"
|
DUMMY_NON_SQLI_CHECK_APPENDIX = "<'\">"
|
||||||
|
|
||||||
|
# Regular expression used for recognition of file inclusion errors
|
||||||
|
FI_ERROR_REGEX = "(?i)[^\n]{0,100}(no such file|failed (to )?open)[^\n]{0,100}"
|
||||||
|
|
||||||
# Length of prefix and suffix used in non-SQLI heuristic checks
|
# Length of prefix and suffix used in non-SQLI heuristic checks
|
||||||
NON_SQLI_CHECK_PREFIX_SUFFIX_LENGTH = 6
|
NON_SQLI_CHECK_PREFIX_SUFFIX_LENGTH = 6
|
||||||
|
|
||||||
@@ -569,7 +609,10 @@ NON_SQLI_CHECK_PREFIX_SUFFIX_LENGTH = 6
|
|||||||
MAX_CONNECTION_CHUNK_SIZE = 10 * 1024 * 1024
|
MAX_CONNECTION_CHUNK_SIZE = 10 * 1024 * 1024
|
||||||
|
|
||||||
# Maximum response total page size (trimmed if larger)
|
# Maximum response total page size (trimmed if larger)
|
||||||
MAX_CONNECTION_TOTAL_SIZE = 100 * 1024 * 1024
|
MAX_CONNECTION_TOTAL_SIZE = 50 * 1024 * 1024
|
||||||
|
|
||||||
|
# For preventing MemoryError exceptions (caused when using large sequences in difflib.SequenceMatcher)
|
||||||
|
MAX_DIFFLIB_SEQUENCE_LENGTH = 10 * 1024 * 1024
|
||||||
|
|
||||||
# Maximum (multi-threaded) length of entry in bisection algorithm
|
# Maximum (multi-threaded) length of entry in bisection algorithm
|
||||||
MAX_BISECTION_LENGTH = 50 * 1024 * 1024
|
MAX_BISECTION_LENGTH = 50 * 1024 * 1024
|
||||||
@@ -578,7 +621,7 @@ MAX_BISECTION_LENGTH = 50 * 1024 * 1024
|
|||||||
LARGE_CHUNK_TRIM_MARKER = "__TRIMMED_CONTENT__"
|
LARGE_CHUNK_TRIM_MARKER = "__TRIMMED_CONTENT__"
|
||||||
|
|
||||||
# Generic SQL comment formation
|
# Generic SQL comment formation
|
||||||
GENERIC_SQL_COMMENT = "-- -"
|
GENERIC_SQL_COMMENT = "-- [RANDSTR]"
|
||||||
|
|
||||||
# Threshold value for turning back on time auto-adjustment mechanism
|
# Threshold value for turning back on time auto-adjustment mechanism
|
||||||
VALID_TIME_CHARS_RUN_THRESHOLD = 100
|
VALID_TIME_CHARS_RUN_THRESHOLD = 100
|
||||||
@@ -587,7 +630,7 @@ VALID_TIME_CHARS_RUN_THRESHOLD = 100
|
|||||||
CHECK_ZERO_COLUMNS_THRESHOLD = 10
|
CHECK_ZERO_COLUMNS_THRESHOLD = 10
|
||||||
|
|
||||||
# Boldify all logger messages containing these "patterns"
|
# Boldify all logger messages containing these "patterns"
|
||||||
BOLD_PATTERNS = ("' injectable", "provided empty", "leftover chars", "might be injectable", "' is vulnerable", "is not injectable", "test failed", "test passed", "live test final result", "test shows that", "the back-end DBMS is", "created Github", "blocked by the target server", "protection is involved", "CloudFlare")
|
BOLD_PATTERNS = ("' injectable", "provided empty", "leftover chars", "might be injectable", "' is vulnerable", "is not injectable", "does not seem to be", "test failed", "test passed", "live test final result", "test shows that", "the back-end DBMS is", "created Github", "blocked by the target server", "protection is involved", "CAPTCHA")
|
||||||
|
|
||||||
# Generic www root directory names
|
# Generic www root directory names
|
||||||
GENERIC_DOC_ROOT_DIRECTORY_NAMES = ("htdocs", "httpdocs", "public", "wwwroot", "www")
|
GENERIC_DOC_ROOT_DIRECTORY_NAMES = ("htdocs", "httpdocs", "public", "wwwroot", "www")
|
||||||
@@ -599,7 +642,7 @@ MAX_HELP_OPTION_LENGTH = 18
|
|||||||
MAX_CONNECT_RETRIES = 100
|
MAX_CONNECT_RETRIES = 100
|
||||||
|
|
||||||
# Strings for detecting formatting errors
|
# Strings for detecting formatting errors
|
||||||
FORMAT_EXCEPTION_STRINGS = ("Type mismatch", "Error converting", "Failed to convert", "System.FormatException", "java.lang.NumberFormatException", "ValueError: invalid literal")
|
FORMAT_EXCEPTION_STRINGS = ("Type mismatch", "Error converting", "Conversion failed", "String or binary data would be truncated", "Failed to convert", "unable to interpret text value", "Input string was not in a correct format", "System.FormatException", "java.lang.NumberFormatException", "ValueError: invalid literal", "DataTypeMismatchException", "CF_SQL_INTEGER", " for CFSQLTYPE ", "cfqueryparam cfsqltype", "InvalidParamTypeException", "Invalid parameter type", "is not of type numeric", "<cfif Not IsNumeric(", "invalid input syntax for integer", "invalid input syntax for type", "invalid number", "character to number conversion error", "unable to interpret text value", "String was not recognized as a valid", "Convert.ToInt", "cannot be converted to a ", "InvalidDataException")
|
||||||
|
|
||||||
# Regular expression used for extracting ASP.NET view state values
|
# Regular expression used for extracting ASP.NET view state values
|
||||||
VIEWSTATE_REGEX = r'(?i)(?P<name>__VIEWSTATE[^"]*)[^>]+value="(?P<result>[^"]+)'
|
VIEWSTATE_REGEX = r'(?i)(?P<name>__VIEWSTATE[^"]*)[^>]+value="(?P<result>[^"]+)'
|
||||||
@@ -649,6 +692,9 @@ SUHOSIN_MAX_VALUE_LENGTH = 512
|
|||||||
# Minimum size of an (binary) entry before it can be considered for dumping to disk
|
# Minimum size of an (binary) entry before it can be considered for dumping to disk
|
||||||
MIN_BINARY_DISK_DUMP_SIZE = 100
|
MIN_BINARY_DISK_DUMP_SIZE = 100
|
||||||
|
|
||||||
|
# Filenames of payloads xml files (in order of loading)
|
||||||
|
PAYLOAD_XML_FILES = ("boolean_blind.xml", "error_based.xml", "inline_query.xml", "stacked_queries.xml", "time_blind.xml", "union_query.xml")
|
||||||
|
|
||||||
# Regular expression used for extracting form tags
|
# Regular expression used for extracting form tags
|
||||||
FORM_SEARCH_REGEX = r"(?si)<form(?!.+<form).+?</form>"
|
FORM_SEARCH_REGEX = r"(?si)<form(?!.+<form).+?</form>"
|
||||||
|
|
||||||
@@ -659,7 +705,7 @@ MAX_HISTORY_LENGTH = 1000
|
|||||||
MIN_ENCODED_LEN_CHECK = 5
|
MIN_ENCODED_LEN_CHECK = 5
|
||||||
|
|
||||||
# Timeout in seconds in which Metasploit remote session has to be initialized
|
# Timeout in seconds in which Metasploit remote session has to be initialized
|
||||||
METASPLOIT_SESSION_TIMEOUT = 300
|
METASPLOIT_SESSION_TIMEOUT = 120
|
||||||
|
|
||||||
# Reference: http://www.postgresql.org/docs/9.0/static/catalog-pg-largeobject.html
|
# Reference: http://www.postgresql.org/docs/9.0/static/catalog-pg-largeobject.html
|
||||||
LOBLKSIZE = 2048
|
LOBLKSIZE = 2048
|
||||||
@@ -680,7 +726,7 @@ BRUTE_DOC_ROOT_PREFIXES = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
# Suffixes used in brute force search for web server document root
|
# Suffixes used in brute force search for web server document root
|
||||||
BRUTE_DOC_ROOT_SUFFIXES = ("", "html", "htdocs", "httpdocs", "php", "public", "src", "site", "build", "web", "data", "sites/all", "www/build")
|
BRUTE_DOC_ROOT_SUFFIXES = ("", "html", "htdocs", "httpdocs", "php", "public", "src", "site", "build", "web", "www", "data", "sites/all", "www/build")
|
||||||
|
|
||||||
# String used for marking target name inside used brute force web server document root
|
# String used for marking target name inside used brute force web server document root
|
||||||
BRUTE_DOC_ROOT_TARGET_MARK = "%TARGET%"
|
BRUTE_DOC_ROOT_TARGET_MARK = "%TARGET%"
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -21,6 +21,7 @@ from lib.core.common import intersect
|
|||||||
from lib.core.common import normalizeUnicode
|
from lib.core.common import normalizeUnicode
|
||||||
from lib.core.common import openFile
|
from lib.core.common import openFile
|
||||||
from lib.core.common import paramToDict
|
from lib.core.common import paramToDict
|
||||||
|
from lib.core.common import randomStr
|
||||||
from lib.core.common import readInput
|
from lib.core.common import readInput
|
||||||
from lib.core.common import resetCookieJar
|
from lib.core.common import resetCookieJar
|
||||||
from lib.core.common import urldecode
|
from lib.core.common import urldecode
|
||||||
@@ -35,6 +36,7 @@ from lib.core.dump import dumper
|
|||||||
from lib.core.enums import HASHDB_KEYS
|
from lib.core.enums import HASHDB_KEYS
|
||||||
from lib.core.enums import HTTP_HEADER
|
from lib.core.enums import HTTP_HEADER
|
||||||
from lib.core.enums import HTTPMETHOD
|
from lib.core.enums import HTTPMETHOD
|
||||||
|
from lib.core.enums import MKSTEMP_PREFIX
|
||||||
from lib.core.enums import PLACE
|
from lib.core.enums import PLACE
|
||||||
from lib.core.enums import POST_HINT
|
from lib.core.enums import POST_HINT
|
||||||
from lib.core.exception import SqlmapFilePathException
|
from lib.core.exception import SqlmapFilePathException
|
||||||
@@ -66,7 +68,6 @@ from lib.core.settings import URI_INJECTABLE_REGEX
|
|||||||
from lib.core.settings import USER_AGENT_ALIASES
|
from lib.core.settings import USER_AGENT_ALIASES
|
||||||
from lib.core.settings import XML_RECOGNITION_REGEX
|
from lib.core.settings import XML_RECOGNITION_REGEX
|
||||||
from lib.utils.hashdb import HashDB
|
from lib.utils.hashdb import HashDB
|
||||||
from lib.core.xmldump import dumper as xmldumper
|
|
||||||
from thirdparty.odict.odict import OrderedDict
|
from thirdparty.odict.odict import OrderedDict
|
||||||
|
|
||||||
def _setRequestParams():
|
def _setRequestParams():
|
||||||
@@ -117,11 +118,12 @@ def _setRequestParams():
|
|||||||
if kb.processUserMarks is None and CUSTOM_INJECTION_MARK_CHAR in conf.data:
|
if kb.processUserMarks is None and CUSTOM_INJECTION_MARK_CHAR in conf.data:
|
||||||
message = "custom injection marking character ('%s') found in option " % CUSTOM_INJECTION_MARK_CHAR
|
message = "custom injection marking character ('%s') found in option " % CUSTOM_INJECTION_MARK_CHAR
|
||||||
message += "'--data'. Do you want to process it? [Y/n/q] "
|
message += "'--data'. Do you want to process it? [Y/n/q] "
|
||||||
test = readInput(message, default="Y")
|
choice = readInput(message, default='Y')
|
||||||
if test and test[0] in ("q", "Q"):
|
|
||||||
|
if choice == 'Q':
|
||||||
raise SqlmapUserQuitException
|
raise SqlmapUserQuitException
|
||||||
else:
|
else:
|
||||||
kb.processUserMarks = not test or test[0] not in ("n", "N")
|
kb.processUserMarks = choice == 'Y'
|
||||||
|
|
||||||
if kb.processUserMarks:
|
if kb.processUserMarks:
|
||||||
kb.testOnlyCustom = True
|
kb.testOnlyCustom = True
|
||||||
@@ -130,10 +132,11 @@ def _setRequestParams():
|
|||||||
if re.search(JSON_RECOGNITION_REGEX, conf.data):
|
if re.search(JSON_RECOGNITION_REGEX, conf.data):
|
||||||
message = "JSON data found in %s data. " % conf.method
|
message = "JSON data found in %s data. " % conf.method
|
||||||
message += "Do you want to process it? [Y/n/q] "
|
message += "Do you want to process it? [Y/n/q] "
|
||||||
test = readInput(message, default="Y")
|
choice = readInput(message, default='Y')
|
||||||
if test and test[0] in ("q", "Q"):
|
|
||||||
|
if choice == 'Q':
|
||||||
raise SqlmapUserQuitException
|
raise SqlmapUserQuitException
|
||||||
elif test[0] not in ("n", "N"):
|
elif choice == 'Y':
|
||||||
conf.data = getattr(conf.data, UNENCODED_ORIGINAL_VALUE, conf.data)
|
conf.data = getattr(conf.data, UNENCODED_ORIGINAL_VALUE, conf.data)
|
||||||
conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER)
|
conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER)
|
||||||
conf.data = re.sub(r'("(?P<name>[^"]+)"\s*:\s*"[^"]+)"', functools.partial(process, repl=r'\g<1>%s"' % CUSTOM_INJECTION_MARK_CHAR), conf.data)
|
conf.data = re.sub(r'("(?P<name>[^"]+)"\s*:\s*"[^"]+)"', functools.partial(process, repl=r'\g<1>%s"' % CUSTOM_INJECTION_MARK_CHAR), conf.data)
|
||||||
@@ -149,10 +152,11 @@ def _setRequestParams():
|
|||||||
elif re.search(JSON_LIKE_RECOGNITION_REGEX, conf.data):
|
elif re.search(JSON_LIKE_RECOGNITION_REGEX, conf.data):
|
||||||
message = "JSON-like data found in %s data. " % conf.method
|
message = "JSON-like data found in %s data. " % conf.method
|
||||||
message += "Do you want to process it? [Y/n/q] "
|
message += "Do you want to process it? [Y/n/q] "
|
||||||
test = readInput(message, default="Y")
|
choice = readInput(message, default='Y').upper()
|
||||||
if test and test[0] in ("q", "Q"):
|
|
||||||
|
if choice == 'Q':
|
||||||
raise SqlmapUserQuitException
|
raise SqlmapUserQuitException
|
||||||
elif test[0] not in ("n", "N"):
|
elif choice == 'Y':
|
||||||
conf.data = getattr(conf.data, UNENCODED_ORIGINAL_VALUE, conf.data)
|
conf.data = getattr(conf.data, UNENCODED_ORIGINAL_VALUE, conf.data)
|
||||||
conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER)
|
conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER)
|
||||||
conf.data = re.sub(r"('(?P<name>[^']+)'\s*:\s*'[^']+)'", functools.partial(process, repl=r"\g<1>%s'" % CUSTOM_INJECTION_MARK_CHAR), conf.data)
|
conf.data = re.sub(r"('(?P<name>[^']+)'\s*:\s*'[^']+)'", functools.partial(process, repl=r"\g<1>%s'" % CUSTOM_INJECTION_MARK_CHAR), conf.data)
|
||||||
@@ -162,10 +166,11 @@ def _setRequestParams():
|
|||||||
elif re.search(ARRAY_LIKE_RECOGNITION_REGEX, conf.data):
|
elif re.search(ARRAY_LIKE_RECOGNITION_REGEX, conf.data):
|
||||||
message = "Array-like data found in %s data. " % conf.method
|
message = "Array-like data found in %s data. " % conf.method
|
||||||
message += "Do you want to process it? [Y/n/q] "
|
message += "Do you want to process it? [Y/n/q] "
|
||||||
test = readInput(message, default="Y")
|
choice = readInput(message, default='Y').upper()
|
||||||
if test and test[0] in ("q", "Q"):
|
|
||||||
|
if choice == 'Q':
|
||||||
raise SqlmapUserQuitException
|
raise SqlmapUserQuitException
|
||||||
elif test[0] not in ("n", "N"):
|
elif choice == 'Y':
|
||||||
conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER)
|
conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER)
|
||||||
conf.data = re.sub(r"(=[^%s]+)" % DEFAULT_GET_POST_DELIMITER, r"\g<1>%s" % CUSTOM_INJECTION_MARK_CHAR, conf.data)
|
conf.data = re.sub(r"(=[^%s]+)" % DEFAULT_GET_POST_DELIMITER, r"\g<1>%s" % CUSTOM_INJECTION_MARK_CHAR, conf.data)
|
||||||
kb.postHint = POST_HINT.ARRAY_LIKE
|
kb.postHint = POST_HINT.ARRAY_LIKE
|
||||||
@@ -173,10 +178,11 @@ def _setRequestParams():
|
|||||||
elif re.search(XML_RECOGNITION_REGEX, conf.data):
|
elif re.search(XML_RECOGNITION_REGEX, conf.data):
|
||||||
message = "SOAP/XML data found in %s data. " % conf.method
|
message = "SOAP/XML data found in %s data. " % conf.method
|
||||||
message += "Do you want to process it? [Y/n/q] "
|
message += "Do you want to process it? [Y/n/q] "
|
||||||
test = readInput(message, default="Y")
|
choice = readInput(message, default='Y').upper()
|
||||||
if test and test[0] in ("q", "Q"):
|
|
||||||
|
if choice == 'Q':
|
||||||
raise SqlmapUserQuitException
|
raise SqlmapUserQuitException
|
||||||
elif test[0] not in ("n", "N"):
|
elif choice == 'Y':
|
||||||
conf.data = getattr(conf.data, UNENCODED_ORIGINAL_VALUE, conf.data)
|
conf.data = getattr(conf.data, UNENCODED_ORIGINAL_VALUE, conf.data)
|
||||||
conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER)
|
conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER)
|
||||||
conf.data = re.sub(r"(<(?P<name>[^>]+)( [^<]*)?>)([^<]+)(</\2)", functools.partial(process, repl=r"\g<1>\g<4>%s\g<5>" % CUSTOM_INJECTION_MARK_CHAR), conf.data)
|
conf.data = re.sub(r"(<(?P<name>[^>]+)( [^<]*)?>)([^<]+)(</\2)", functools.partial(process, repl=r"\g<1>\g<4>%s\g<5>" % CUSTOM_INJECTION_MARK_CHAR), conf.data)
|
||||||
@@ -185,10 +191,11 @@ def _setRequestParams():
|
|||||||
elif re.search(MULTIPART_RECOGNITION_REGEX, conf.data):
|
elif re.search(MULTIPART_RECOGNITION_REGEX, conf.data):
|
||||||
message = "Multipart-like data found in %s data. " % conf.method
|
message = "Multipart-like data found in %s data. " % conf.method
|
||||||
message += "Do you want to process it? [Y/n/q] "
|
message += "Do you want to process it? [Y/n/q] "
|
||||||
test = readInput(message, default="Y")
|
choice = readInput(message, default='Y').upper()
|
||||||
if test and test[0] in ("q", "Q"):
|
|
||||||
|
if choice == 'Q':
|
||||||
raise SqlmapUserQuitException
|
raise SqlmapUserQuitException
|
||||||
elif test[0] not in ("n", "N"):
|
elif choice == 'Y':
|
||||||
conf.data = getattr(conf.data, UNENCODED_ORIGINAL_VALUE, conf.data)
|
conf.data = getattr(conf.data, UNENCODED_ORIGINAL_VALUE, conf.data)
|
||||||
conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER)
|
conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER)
|
||||||
conf.data = re.sub(r"(?si)((Content-Disposition[^\n]+?name\s*=\s*[\"'](?P<name>[^\n]+?)[\"']).+?)(((\r)?\n)+--)", functools.partial(process, repl=r"\g<1>%s\g<4>" % CUSTOM_INJECTION_MARK_CHAR), conf.data)
|
conf.data = re.sub(r"(?si)((Content-Disposition[^\n]+?name\s*=\s*[\"'](?P<name>[^\n]+?)[\"']).+?)(((\r)?\n)+--)", functools.partial(process, repl=r"\g<1>%s\g<4>" % CUSTOM_INJECTION_MARK_CHAR), conf.data)
|
||||||
@@ -214,18 +221,18 @@ def _setRequestParams():
|
|||||||
|
|
||||||
if re.search(URI_INJECTABLE_REGEX, conf.url, re.I) and not any(place in conf.parameters for place in (PLACE.GET, PLACE.POST)) and not kb.postHint and not CUSTOM_INJECTION_MARK_CHAR in (conf.data or "") and conf.url.startswith("http"):
|
if re.search(URI_INJECTABLE_REGEX, conf.url, re.I) and not any(place in conf.parameters for place in (PLACE.GET, PLACE.POST)) and not kb.postHint and not CUSTOM_INJECTION_MARK_CHAR in (conf.data or "") and conf.url.startswith("http"):
|
||||||
warnMsg = "you've provided target URL without any GET "
|
warnMsg = "you've provided target URL without any GET "
|
||||||
warnMsg += "parameters (e.g. www.site.com/article.php?id=1) "
|
warnMsg += "parameters (e.g. 'http://www.site.com/article.php?id=1') "
|
||||||
warnMsg += "and without providing any POST parameters "
|
warnMsg += "and without providing any POST parameters "
|
||||||
warnMsg += "through --data option"
|
warnMsg += "through option '--data'"
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
message = "do you want to try URI injections "
|
message = "do you want to try URI injections "
|
||||||
message += "in the target URL itself? [Y/n/q] "
|
message += "in the target URL itself? [Y/n/q] "
|
||||||
test = readInput(message, default="Y")
|
choice = readInput(message, default='Y').upper()
|
||||||
|
|
||||||
if test and test[0] in ("q", "Q"):
|
if choice == 'Q':
|
||||||
raise SqlmapUserQuitException
|
raise SqlmapUserQuitException
|
||||||
elif not test or test[0] not in ("n", "N"):
|
elif choice == 'Y':
|
||||||
conf.url = "%s%s" % (conf.url, CUSTOM_INJECTION_MARK_CHAR)
|
conf.url = "%s%s" % (conf.url, CUSTOM_INJECTION_MARK_CHAR)
|
||||||
kb.processUserMarks = True
|
kb.processUserMarks = True
|
||||||
|
|
||||||
@@ -236,11 +243,12 @@ def _setRequestParams():
|
|||||||
lut = {PLACE.URI: '-u', PLACE.CUSTOM_POST: '--data', PLACE.CUSTOM_HEADER: '--headers/--user-agent/--referer/--cookie'}
|
lut = {PLACE.URI: '-u', PLACE.CUSTOM_POST: '--data', PLACE.CUSTOM_HEADER: '--headers/--user-agent/--referer/--cookie'}
|
||||||
message = "custom injection marking character ('%s') found in option " % CUSTOM_INJECTION_MARK_CHAR
|
message = "custom injection marking character ('%s') found in option " % CUSTOM_INJECTION_MARK_CHAR
|
||||||
message += "'%s'. Do you want to process it? [Y/n/q] " % lut[place]
|
message += "'%s'. Do you want to process it? [Y/n/q] " % lut[place]
|
||||||
test = readInput(message, default="Y")
|
choice = readInput(message, default='Y').upper()
|
||||||
if test and test[0] in ("q", "Q"):
|
|
||||||
|
if choice == 'Q':
|
||||||
raise SqlmapUserQuitException
|
raise SqlmapUserQuitException
|
||||||
else:
|
else:
|
||||||
kb.processUserMarks = not test or test[0] not in ("n", "N")
|
kb.processUserMarks = choice == 'Y'
|
||||||
|
|
||||||
if kb.processUserMarks:
|
if kb.processUserMarks:
|
||||||
kb.testOnlyCustom = True
|
kb.testOnlyCustom = True
|
||||||
@@ -370,7 +378,7 @@ def _setRequestParams():
|
|||||||
raise SqlmapGenericException(errMsg)
|
raise SqlmapGenericException(errMsg)
|
||||||
|
|
||||||
if conf.csrfToken:
|
if conf.csrfToken:
|
||||||
if not any(conf.csrfToken in _ for _ in (conf.paramDict.get(PLACE.GET, {}), conf.paramDict.get(PLACE.POST, {}))) and not conf.csrfToken in set(_[0].lower() for _ in conf.httpHeaders) and not conf.csrfToken in conf.paramDict.get(PLACE.COOKIE, {}):
|
if not any(conf.csrfToken in _ for _ in (conf.paramDict.get(PLACE.GET, {}), conf.paramDict.get(PLACE.POST, {}))) and not re.search(r"\b%s\b" % re.escape(conf.csrfToken), conf.data or "") and not conf.csrfToken in set(_[0].lower() for _ in conf.httpHeaders) and not conf.csrfToken in conf.paramDict.get(PLACE.COOKIE, {}):
|
||||||
errMsg = "anti-CSRF token parameter '%s' not " % conf.csrfToken
|
errMsg = "anti-CSRF token parameter '%s' not " % conf.csrfToken
|
||||||
errMsg += "found in provided GET, POST, Cookie or header values"
|
errMsg += "found in provided GET, POST, Cookie or header values"
|
||||||
raise SqlmapGenericException(errMsg)
|
raise SqlmapGenericException(errMsg)
|
||||||
@@ -380,8 +388,8 @@ def _setRequestParams():
|
|||||||
if any(parameter.lower().count(_) for _ in CSRF_TOKEN_PARAMETER_INFIXES):
|
if any(parameter.lower().count(_) for _ in CSRF_TOKEN_PARAMETER_INFIXES):
|
||||||
message = "%s parameter '%s' appears to hold anti-CSRF token. " % (place, parameter)
|
message = "%s parameter '%s' appears to hold anti-CSRF token. " % (place, parameter)
|
||||||
message += "Do you want sqlmap to automatically update it in further requests? [y/N] "
|
message += "Do you want sqlmap to automatically update it in further requests? [y/N] "
|
||||||
test = readInput(message, default="N")
|
|
||||||
if test and test[0] in ("y", "Y"):
|
if readInput(message, default='N', boolean=True):
|
||||||
conf.csrfToken = parameter
|
conf.csrfToken = parameter
|
||||||
break
|
break
|
||||||
|
|
||||||
@@ -430,7 +438,7 @@ def _resumeHashDBValues():
|
|||||||
|
|
||||||
if not conf.tech or intersect(conf.tech, injection.data.keys()):
|
if not conf.tech or intersect(conf.tech, injection.data.keys()):
|
||||||
if intersect(conf.tech, injection.data.keys()):
|
if intersect(conf.tech, injection.data.keys()):
|
||||||
injection.data = dict(filter(lambda (key, item): key in conf.tech, injection.data.items()))
|
injection.data = dict(_ for _ in injection.data.items() if _[0] in conf.tech)
|
||||||
|
|
||||||
if injection not in kb.injections:
|
if injection not in kb.injections:
|
||||||
kb.injections.append(injection)
|
kb.injections.append(injection)
|
||||||
@@ -470,9 +478,8 @@ def _resumeDBMS():
|
|||||||
message += "sqlmap assumes the back-end DBMS is '%s'. " % dbms
|
message += "sqlmap assumes the back-end DBMS is '%s'. " % dbms
|
||||||
message += "Do you really want to force the back-end "
|
message += "Do you really want to force the back-end "
|
||||||
message += "DBMS value? [y/N] "
|
message += "DBMS value? [y/N] "
|
||||||
test = readInput(message, default="N")
|
|
||||||
|
|
||||||
if not test or test[0] in ("n", "N"):
|
if not readInput(message, default='N', boolean=True):
|
||||||
conf.dbms = None
|
conf.dbms = None
|
||||||
Backend.setDbms(dbms)
|
Backend.setDbms(dbms)
|
||||||
Backend.setVersionList(dbmsVersion)
|
Backend.setVersionList(dbmsVersion)
|
||||||
@@ -506,9 +513,8 @@ def _resumeOS():
|
|||||||
message += "operating system is %s. " % os
|
message += "operating system is %s. " % os
|
||||||
message += "Do you really want to force the back-end DBMS "
|
message += "Do you really want to force the back-end DBMS "
|
||||||
message += "OS value? [y/N] "
|
message += "OS value? [y/N] "
|
||||||
test = readInput(message, default="N")
|
|
||||||
|
|
||||||
if not test or test[0] in ("n", "N"):
|
if not readInput(message, default='N', boolean=True):
|
||||||
conf.os = os
|
conf.os = os
|
||||||
else:
|
else:
|
||||||
conf.os = os
|
conf.os = os
|
||||||
@@ -531,7 +537,8 @@ def _setResultsFile():
|
|||||||
except (OSError, IOError), ex:
|
except (OSError, IOError), ex:
|
||||||
try:
|
try:
|
||||||
warnMsg = "unable to create results file '%s' ('%s'). " % (conf.resultsFilename, getUnicode(ex))
|
warnMsg = "unable to create results file '%s' ('%s'). " % (conf.resultsFilename, getUnicode(ex))
|
||||||
conf.resultsFilename = tempfile.mkstemp(prefix="sqlmapresults-", suffix=".csv")[1]
|
handle, conf.resultsFilename = tempfile.mkstemp(prefix=MKSTEMP_PREFIX.RESULTS, suffix=".csv")
|
||||||
|
os.close(handle)
|
||||||
conf.resultsFP = openFile(conf.resultsFilename, "w+", UNICODE_ENCODING, buffering=0)
|
conf.resultsFP = openFile(conf.resultsFilename, "w+", UNICODE_ENCODING, buffering=0)
|
||||||
warnMsg += "Using temporary file '%s' instead" % conf.resultsFilename
|
warnMsg += "Using temporary file '%s' instead" % conf.resultsFilename
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
@@ -542,7 +549,7 @@ def _setResultsFile():
|
|||||||
errMsg += "create temporary files and/or directories"
|
errMsg += "create temporary files and/or directories"
|
||||||
raise SqlmapSystemException(errMsg)
|
raise SqlmapSystemException(errMsg)
|
||||||
|
|
||||||
conf.resultsFP.writelines("Target URL,Place,Parameter,Techniques%s" % os.linesep)
|
conf.resultsFP.writelines("Target URL,Place,Parameter,Technique(s),Note(s)%s" % os.linesep)
|
||||||
|
|
||||||
logger.info("using '%s' as the CSV results file in multiple targets mode" % conf.resultsFilename)
|
logger.info("using '%s' as the CSV results file in multiple targets mode" % conf.resultsFilename)
|
||||||
|
|
||||||
@@ -591,11 +598,7 @@ def _createDumpDir():
|
|||||||
conf.dumpPath = tempDir
|
conf.dumpPath = tempDir
|
||||||
|
|
||||||
def _configureDumper():
|
def _configureDumper():
|
||||||
if hasattr(conf, 'xmlFile') and conf.xmlFile:
|
conf.dumper = dumper
|
||||||
conf.dumper = xmldumper
|
|
||||||
else:
|
|
||||||
conf.dumper = dumper
|
|
||||||
|
|
||||||
conf.dumper.setOutputFile()
|
conf.dumper.setOutputFile()
|
||||||
|
|
||||||
def _createTargetDirs():
|
def _createTargetDirs():
|
||||||
@@ -603,28 +606,33 @@ def _createTargetDirs():
|
|||||||
Create the output directory.
|
Create the output directory.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not os.path.isdir(paths.SQLMAP_OUTPUT_PATH):
|
try:
|
||||||
try:
|
if not os.path.isdir(paths.SQLMAP_OUTPUT_PATH):
|
||||||
if not os.path.isdir(paths.SQLMAP_OUTPUT_PATH):
|
os.makedirs(paths.SQLMAP_OUTPUT_PATH, 0755)
|
||||||
os.makedirs(paths.SQLMAP_OUTPUT_PATH, 0755)
|
|
||||||
|
_ = os.path.join(paths.SQLMAP_OUTPUT_PATH, randomStr())
|
||||||
|
open(_, "w+b").close()
|
||||||
|
os.remove(_)
|
||||||
|
|
||||||
|
if conf.outputDir:
|
||||||
warnMsg = "using '%s' as the output directory" % paths.SQLMAP_OUTPUT_PATH
|
warnMsg = "using '%s' as the output directory" % paths.SQLMAP_OUTPUT_PATH
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
except (OSError, IOError), ex:
|
except (OSError, IOError), ex:
|
||||||
try:
|
try:
|
||||||
tempDir = tempfile.mkdtemp(prefix="sqlmapoutput")
|
tempDir = tempfile.mkdtemp(prefix="sqlmapoutput")
|
||||||
except Exception, _:
|
except Exception, _:
|
||||||
errMsg = "unable to write to the temporary directory ('%s'). " % _
|
errMsg = "unable to write to the temporary directory ('%s'). " % _
|
||||||
errMsg += "Please make sure that your disk is not full and "
|
errMsg += "Please make sure that your disk is not full and "
|
||||||
errMsg += "that you have sufficient write permissions to "
|
errMsg += "that you have sufficient write permissions to "
|
||||||
errMsg += "create temporary files and/or directories"
|
errMsg += "create temporary files and/or directories"
|
||||||
raise SqlmapSystemException(errMsg)
|
raise SqlmapSystemException(errMsg)
|
||||||
|
|
||||||
warnMsg = "unable to create regular output directory "
|
warnMsg = "unable to %s output directory " % ("create" if not os.path.isdir(paths.SQLMAP_OUTPUT_PATH) else "write to the")
|
||||||
warnMsg += "'%s' (%s). " % (paths.SQLMAP_OUTPUT_PATH, getUnicode(ex))
|
warnMsg += "'%s' (%s). " % (paths.SQLMAP_OUTPUT_PATH, getUnicode(ex))
|
||||||
warnMsg += "Using temporary directory '%s' instead" % getUnicode(tempDir)
|
warnMsg += "Using temporary directory '%s' instead" % getUnicode(tempDir)
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
paths.SQLMAP_OUTPUT_PATH = tempDir
|
paths.SQLMAP_OUTPUT_PATH = tempDir
|
||||||
|
|
||||||
conf.outputPath = os.path.join(getUnicode(paths.SQLMAP_OUTPUT_PATH), normalizeUnicode(getUnicode(conf.hostname)))
|
conf.outputPath = os.path.join(getUnicode(paths.SQLMAP_OUTPUT_PATH), normalizeUnicode(getUnicode(conf.hostname)))
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -25,6 +25,7 @@ from lib.core.common import readXmlFile
|
|||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
from lib.core.data import paths
|
from lib.core.data import paths
|
||||||
|
from lib.core.enums import MKSTEMP_PREFIX
|
||||||
from lib.core.exception import SqlmapBaseException
|
from lib.core.exception import SqlmapBaseException
|
||||||
from lib.core.exception import SqlmapNotVulnerableException
|
from lib.core.exception import SqlmapNotVulnerableException
|
||||||
from lib.core.log import LOGGER_HANDLER
|
from lib.core.log import LOGGER_HANDLER
|
||||||
@@ -40,6 +41,8 @@ class Failures(object):
|
|||||||
failedParseOn = None
|
failedParseOn = None
|
||||||
failedTraceBack = None
|
failedTraceBack = None
|
||||||
|
|
||||||
|
_failures = Failures()
|
||||||
|
|
||||||
def smokeTest():
|
def smokeTest():
|
||||||
"""
|
"""
|
||||||
Runs the basic smoke testing of a program
|
Runs the basic smoke testing of a program
|
||||||
@@ -52,16 +55,17 @@ def smokeTest():
|
|||||||
if any(_ in root for _ in ("thirdparty", "extra")):
|
if any(_ in root for _ in ("thirdparty", "extra")):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for ifile in files:
|
for filename in files:
|
||||||
length += 1
|
if os.path.splitext(filename)[1].lower() == ".py" and filename != "__init__.py":
|
||||||
|
length += 1
|
||||||
|
|
||||||
for root, _, files in os.walk(paths.SQLMAP_ROOT_PATH):
|
for root, _, files in os.walk(paths.SQLMAP_ROOT_PATH):
|
||||||
if any(_ in root for _ in ("thirdparty", "extra")):
|
if any(_ in root for _ in ("thirdparty", "extra")):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for ifile in files:
|
for filename in files:
|
||||||
if os.path.splitext(ifile)[1].lower() == ".py" and ifile != "__init__.py":
|
if os.path.splitext(filename)[1].lower() == ".py" and filename != "__init__.py":
|
||||||
path = os.path.join(root, os.path.splitext(ifile)[0])
|
path = os.path.join(root, os.path.splitext(filename)[0])
|
||||||
path = path.replace(paths.SQLMAP_ROOT_PATH, '.')
|
path = path.replace(paths.SQLMAP_ROOT_PATH, '.')
|
||||||
path = path.replace(os.sep, '.').lstrip('.')
|
path = path.replace(os.sep, '.').lstrip('.')
|
||||||
try:
|
try:
|
||||||
@@ -70,7 +74,7 @@ def smokeTest():
|
|||||||
except Exception, msg:
|
except Exception, msg:
|
||||||
retVal = False
|
retVal = False
|
||||||
dataToStdout("\r")
|
dataToStdout("\r")
|
||||||
errMsg = "smoke test failed at importing module '%s' (%s):\n%s" % (path, os.path.join(root, ifile), msg)
|
errMsg = "smoke test failed at importing module '%s' (%s):\n%s" % (path, os.path.join(root, filename), msg)
|
||||||
logger.error(errMsg)
|
logger.error(errMsg)
|
||||||
else:
|
else:
|
||||||
# Run doc tests
|
# Run doc tests
|
||||||
@@ -79,9 +83,9 @@ def smokeTest():
|
|||||||
if failure_count > 0:
|
if failure_count > 0:
|
||||||
retVal = False
|
retVal = False
|
||||||
|
|
||||||
count += 1
|
count += 1
|
||||||
status = '%d/%d (%d%%) ' % (count, length, round(100.0 * count / length))
|
status = '%d/%d (%d%%) ' % (count, length, round(100.0 * count / length))
|
||||||
dataToStdout("\r[%s] [INFO] complete: %s" % (time.strftime("%X"), status))
|
dataToStdout("\r[%s] [INFO] complete: %s" % (time.strftime("%X"), status))
|
||||||
|
|
||||||
clearConsoleLine()
|
clearConsoleLine()
|
||||||
if retVal:
|
if retVal:
|
||||||
@@ -191,11 +195,11 @@ def liveTest():
|
|||||||
else:
|
else:
|
||||||
errMsg = "test failed"
|
errMsg = "test failed"
|
||||||
|
|
||||||
if Failures.failedItems:
|
if _failures.failedItems:
|
||||||
errMsg += " at parsing items: %s" % ", ".join(i for i in Failures.failedItems)
|
errMsg += " at parsing items: %s" % ", ".join(i for i in _failures.failedItems)
|
||||||
|
|
||||||
errMsg += " - scan folder: %s" % paths.SQLMAP_OUTPUT_PATH
|
errMsg += " - scan folder: %s" % paths.SQLMAP_OUTPUT_PATH
|
||||||
errMsg += " - traceback: %s" % bool(Failures.failedTraceBack)
|
errMsg += " - traceback: %s" % bool(_failures.failedTraceBack)
|
||||||
|
|
||||||
if not vulnerable:
|
if not vulnerable:
|
||||||
errMsg += " - SQL injection not detected"
|
errMsg += " - SQL injection not detected"
|
||||||
@@ -203,14 +207,14 @@ def liveTest():
|
|||||||
logger.error(errMsg)
|
logger.error(errMsg)
|
||||||
test_case_fd.write("%s\n" % errMsg)
|
test_case_fd.write("%s\n" % errMsg)
|
||||||
|
|
||||||
if Failures.failedParseOn:
|
if _failures.failedParseOn:
|
||||||
console_output_fd = codecs.open(os.path.join(paths.SQLMAP_OUTPUT_PATH, "console_output"), "wb", UNICODE_ENCODING)
|
console_output_fd = codecs.open(os.path.join(paths.SQLMAP_OUTPUT_PATH, "console_output"), "wb", UNICODE_ENCODING)
|
||||||
console_output_fd.write(Failures.failedParseOn)
|
console_output_fd.write(_failures.failedParseOn)
|
||||||
console_output_fd.close()
|
console_output_fd.close()
|
||||||
|
|
||||||
if Failures.failedTraceBack:
|
if _failures.failedTraceBack:
|
||||||
traceback_fd = codecs.open(os.path.join(paths.SQLMAP_OUTPUT_PATH, "traceback"), "wb", UNICODE_ENCODING)
|
traceback_fd = codecs.open(os.path.join(paths.SQLMAP_OUTPUT_PATH, "traceback"), "wb", UNICODE_ENCODING)
|
||||||
traceback_fd.write(Failures.failedTraceBack)
|
traceback_fd.write(_failures.failedTraceBack)
|
||||||
traceback_fd.close()
|
traceback_fd.close()
|
||||||
|
|
||||||
beep()
|
beep()
|
||||||
@@ -231,11 +235,11 @@ def liveTest():
|
|||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
def initCase(switches, count):
|
def initCase(switches, count):
|
||||||
Failures.failedItems = []
|
_failures.failedItems = []
|
||||||
Failures.failedParseOn = None
|
_failures.failedParseOn = None
|
||||||
Failures.failedTraceBack = None
|
_failures.failedTraceBack = None
|
||||||
|
|
||||||
paths.SQLMAP_OUTPUT_PATH = tempfile.mkdtemp(prefix="sqlmaptest-%d-" % count)
|
paths.SQLMAP_OUTPUT_PATH = tempfile.mkdtemp(prefix="%s%d-" % (MKSTEMP_PREFIX.TESTING, count))
|
||||||
paths.SQLMAP_DUMP_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "dump")
|
paths.SQLMAP_DUMP_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "dump")
|
||||||
paths.SQLMAP_FILES_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "files")
|
paths.SQLMAP_FILES_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "files")
|
||||||
|
|
||||||
@@ -277,10 +281,10 @@ def runCase(parse):
|
|||||||
LOGGER_HANDLER.stream = sys.stdout = sys.__stdout__
|
LOGGER_HANDLER.stream = sys.stdout = sys.__stdout__
|
||||||
|
|
||||||
if unhandled_exception:
|
if unhandled_exception:
|
||||||
Failures.failedTraceBack = "unhandled exception: %s" % str(traceback.format_exc())
|
_failures.failedTraceBack = "unhandled exception: %s" % str(traceback.format_exc())
|
||||||
retVal = None
|
retVal = None
|
||||||
elif handled_exception:
|
elif handled_exception:
|
||||||
Failures.failedTraceBack = "handled exception: %s" % str(traceback.format_exc())
|
_failures.failedTraceBack = "handled exception: %s" % str(traceback.format_exc())
|
||||||
retVal = None
|
retVal = None
|
||||||
elif result is False: # this means no SQL injection has been detected - if None, ignore
|
elif result is False: # this means no SQL injection has been detected - if None, ignore
|
||||||
retVal = False
|
retVal = False
|
||||||
@@ -297,17 +301,17 @@ def runCase(parse):
|
|||||||
if item.startswith("r'") and item.endswith("'"):
|
if item.startswith("r'") and item.endswith("'"):
|
||||||
if not re.search(item[2:-1], parse_on, re.DOTALL):
|
if not re.search(item[2:-1], parse_on, re.DOTALL):
|
||||||
retVal = None
|
retVal = None
|
||||||
Failures.failedItems.append(item)
|
_failures.failedItems.append(item)
|
||||||
|
|
||||||
elif item not in parse_on:
|
elif item not in parse_on:
|
||||||
retVal = None
|
retVal = None
|
||||||
Failures.failedItems.append(item)
|
_failures.failedItems.append(item)
|
||||||
|
|
||||||
if Failures.failedItems:
|
if _failures.failedItems:
|
||||||
Failures.failedParseOn = console
|
_failures.failedParseOn = console
|
||||||
|
|
||||||
elif retVal is False:
|
elif retVal is False:
|
||||||
Failures.failedParseOn = console
|
_failures.failedParseOn = console
|
||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
|
|||||||
@@ -1,17 +1,16 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import difflib
|
import difflib
|
||||||
|
import random
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from thread import error as ThreadError
|
|
||||||
|
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
@@ -19,6 +18,7 @@ from lib.core.datatype import AttribDict
|
|||||||
from lib.core.enums import PAYLOAD
|
from lib.core.enums import PAYLOAD
|
||||||
from lib.core.exception import SqlmapConnectionException
|
from lib.core.exception import SqlmapConnectionException
|
||||||
from lib.core.exception import SqlmapThreadException
|
from lib.core.exception import SqlmapThreadException
|
||||||
|
from lib.core.exception import SqlmapUserQuitException
|
||||||
from lib.core.exception import SqlmapValueException
|
from lib.core.exception import SqlmapValueException
|
||||||
from lib.core.settings import MAX_NUMBER_OF_THREADS
|
from lib.core.settings import MAX_NUMBER_OF_THREADS
|
||||||
from lib.core.settings import PYVERSION
|
from lib.core.settings import PYVERSION
|
||||||
@@ -41,8 +41,11 @@ class _ThreadData(threading.local):
|
|||||||
self.disableStdOut = False
|
self.disableStdOut = False
|
||||||
self.hashDBCursor = None
|
self.hashDBCursor = None
|
||||||
self.inTransaction = False
|
self.inTransaction = False
|
||||||
|
self.lastCode = None
|
||||||
self.lastComparisonPage = None
|
self.lastComparisonPage = None
|
||||||
self.lastComparisonHeaders = None
|
self.lastComparisonHeaders = None
|
||||||
|
self.lastComparisonCode = None
|
||||||
|
self.lastComparisonRatio = None
|
||||||
self.lastErrorPage = None
|
self.lastErrorPage = None
|
||||||
self.lastHTTPError = None
|
self.lastHTTPError = None
|
||||||
self.lastRedirectMsg = None
|
self.lastRedirectMsg = None
|
||||||
@@ -51,10 +54,12 @@ class _ThreadData(threading.local):
|
|||||||
self.lastRequestMsg = None
|
self.lastRequestMsg = None
|
||||||
self.lastRequestUID = 0
|
self.lastRequestUID = 0
|
||||||
self.lastRedirectURL = None
|
self.lastRedirectURL = None
|
||||||
|
self.random = random.WichmannHill()
|
||||||
self.resumed = False
|
self.resumed = False
|
||||||
self.retriesCount = 0
|
self.retriesCount = 0
|
||||||
self.seqMatcher = difflib.SequenceMatcher(None)
|
self.seqMatcher = difflib.SequenceMatcher(None)
|
||||||
self.shared = shared
|
self.shared = shared
|
||||||
|
self.validationRun = 0
|
||||||
self.valueStack = []
|
self.valueStack = []
|
||||||
|
|
||||||
ThreadData = _ThreadData()
|
ThreadData = _ThreadData()
|
||||||
@@ -62,7 +67,7 @@ ThreadData = _ThreadData()
|
|||||||
def getCurrentThreadUID():
|
def getCurrentThreadUID():
|
||||||
return hash(threading.currentThread())
|
return hash(threading.currentThread())
|
||||||
|
|
||||||
def readInput(message, default=None):
|
def readInput(message, default=None, checkBatch=True, boolean=False):
|
||||||
# It will be overwritten by original from lib.core.common
|
# It will be overwritten by original from lib.core.common
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -82,7 +87,7 @@ def getCurrentThreadName():
|
|||||||
|
|
||||||
return threading.current_thread().getName()
|
return threading.current_thread().getName()
|
||||||
|
|
||||||
def exceptionHandledFunction(threadFunction):
|
def exceptionHandledFunction(threadFunction, silent=False):
|
||||||
try:
|
try:
|
||||||
threadFunction()
|
threadFunction()
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
@@ -90,8 +95,8 @@ def exceptionHandledFunction(threadFunction):
|
|||||||
kb.threadException = True
|
kb.threadException = True
|
||||||
raise
|
raise
|
||||||
except Exception, ex:
|
except Exception, ex:
|
||||||
# thread is just going to be silently killed
|
if not silent:
|
||||||
logger.error("thread %s: %s" % (threading.currentThread().getName(), ex.message))
|
logger.error("thread %s: %s" % (threading.currentThread().getName(), ex.message))
|
||||||
|
|
||||||
def setDaemon(thread):
|
def setDaemon(thread):
|
||||||
# Reference: http://stackoverflow.com/questions/190010/daemon-threads-explanation
|
# Reference: http://stackoverflow.com/questions/190010/daemon-threads-explanation
|
||||||
@@ -145,7 +150,7 @@ def runThreads(numThreads, threadFunction, cleanupFunction=None, forwardExceptio
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
thread.start()
|
thread.start()
|
||||||
except ThreadError, ex:
|
except Exception, ex:
|
||||||
errMsg = "error occurred while starting new thread ('%s')" % ex.message
|
errMsg = "error occurred while starting new thread ('%s')" % ex.message
|
||||||
logger.critical(errMsg)
|
logger.critical(errMsg)
|
||||||
break
|
break
|
||||||
@@ -161,13 +166,13 @@ def runThreads(numThreads, threadFunction, cleanupFunction=None, forwardExceptio
|
|||||||
alive = True
|
alive = True
|
||||||
time.sleep(0.1)
|
time.sleep(0.1)
|
||||||
|
|
||||||
except KeyboardInterrupt:
|
except (KeyboardInterrupt, SqlmapUserQuitException), ex:
|
||||||
print
|
print
|
||||||
kb.threadContinue = False
|
kb.threadContinue = False
|
||||||
kb.threadException = True
|
kb.threadException = True
|
||||||
|
|
||||||
if numThreads > 1:
|
if numThreads > 1:
|
||||||
logger.info("waiting for threads to finish (Ctrl+C was pressed)")
|
logger.info("waiting for threads to finish%s" % (" (Ctrl+C was pressed)" if isinstance(ex, KeyboardInterrupt) else ""))
|
||||||
try:
|
try:
|
||||||
while (threading.activeCount() > 1):
|
while (threading.activeCount() > 1):
|
||||||
pass
|
pass
|
||||||
@@ -199,10 +204,10 @@ def runThreads(numThreads, threadFunction, cleanupFunction=None, forwardExceptio
|
|||||||
kb.threadException = False
|
kb.threadException = False
|
||||||
|
|
||||||
for lock in kb.locks.values():
|
for lock in kb.locks.values():
|
||||||
if lock.locked_lock():
|
if lock.locked():
|
||||||
try:
|
try:
|
||||||
lock.release()
|
lock.release()
|
||||||
except thread.error:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if conf.get("hashDB"):
|
if conf.get("hashDB"):
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,18 +1,16 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import locale
|
import locale
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import subprocess
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from subprocess import PIPE
|
|
||||||
from subprocess import Popen as execute
|
|
||||||
|
|
||||||
from lib.core.common import dataToStdout
|
from lib.core.common import dataToStdout
|
||||||
from lib.core.common import getSafeExString
|
from lib.core.common import getSafeExString
|
||||||
from lib.core.common import pollProcess
|
from lib.core.common import pollProcess
|
||||||
@@ -31,7 +29,7 @@ def update():
|
|||||||
|
|
||||||
if not os.path.exists(os.path.join(paths.SQLMAP_ROOT_PATH, ".git")):
|
if not os.path.exists(os.path.join(paths.SQLMAP_ROOT_PATH, ".git")):
|
||||||
errMsg = "not a git repository. Please checkout the 'sqlmapproject/sqlmap' repository "
|
errMsg = "not a git repository. Please checkout the 'sqlmapproject/sqlmap' repository "
|
||||||
errMsg += "from GitHub (e.g. 'git clone https://github.com/sqlmapproject/sqlmap.git sqlmap')"
|
errMsg += "from GitHub (e.g. 'git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap')"
|
||||||
logger.error(errMsg)
|
logger.error(errMsg)
|
||||||
else:
|
else:
|
||||||
infoMsg = "updating sqlmap to the latest development version from the "
|
infoMsg = "updating sqlmap to the latest development version from the "
|
||||||
@@ -44,7 +42,7 @@ def update():
|
|||||||
dataToStdout("\r[%s] [INFO] update in progress " % time.strftime("%X"))
|
dataToStdout("\r[%s] [INFO] update in progress " % time.strftime("%X"))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
process = execute("git checkout . && git pull %s HEAD" % GIT_REPOSITORY, shell=True, stdout=PIPE, stderr=PIPE, cwd=paths.SQLMAP_ROOT_PATH.encode(locale.getpreferredencoding())) # Reference: http://blog.stastnarodina.com/honza-en/spot/python-unicodeencodeerror/
|
process = subprocess.Popen("git checkout . && git pull %s HEAD" % GIT_REPOSITORY, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=paths.SQLMAP_ROOT_PATH.encode(locale.getpreferredencoding())) # Reference: http://blog.stastnarodina.com/honza-en/spot/python-unicodeencodeerror/
|
||||||
pollProcess(process, True)
|
pollProcess(process, True)
|
||||||
stdout, stderr = process.communicate()
|
stdout, stderr = process.communicate()
|
||||||
success = not process.returncode
|
success = not process.returncode
|
||||||
@@ -53,13 +51,11 @@ def update():
|
|||||||
stderr = getSafeExString(ex)
|
stderr = getSafeExString(ex)
|
||||||
|
|
||||||
if success:
|
if success:
|
||||||
import lib.core.settings
|
logger.info("%s the latest revision '%s'" % ("already at" if "Already" in stdout else "updated to", getRevisionNumber()))
|
||||||
_ = lib.core.settings.REVISION = getRevisionNumber()
|
|
||||||
logger.info("%s the latest revision '%s'" % ("already at" if "Already" in stdout else "updated to", _))
|
|
||||||
else:
|
else:
|
||||||
if "Not a git repository" in stderr:
|
if "Not a git repository" in stderr:
|
||||||
errMsg = "not a valid git repository. Please checkout the 'sqlmapproject/sqlmap' repository "
|
errMsg = "not a valid git repository. Please checkout the 'sqlmapproject/sqlmap' repository "
|
||||||
errMsg += "from GitHub (e.g. 'git clone https://github.com/sqlmapproject/sqlmap.git sqlmap')"
|
errMsg += "from GitHub (e.g. 'git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap')"
|
||||||
logger.error(errMsg)
|
logger.error(errMsg)
|
||||||
else:
|
else:
|
||||||
logger.error("update could not be completed ('%s')" % re.sub(r"\W+", " ", stderr).strip())
|
logger.error("update could not be completed ('%s')" % re.sub(r"\W+", " ", stderr).strip())
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -11,7 +11,6 @@ import zipfile
|
|||||||
from lib.core.common import getSafeExString
|
from lib.core.common import getSafeExString
|
||||||
from lib.core.exception import SqlmapDataException
|
from lib.core.exception import SqlmapDataException
|
||||||
from lib.core.exception import SqlmapInstallationException
|
from lib.core.exception import SqlmapInstallationException
|
||||||
from lib.core.settings import UNICODE_ENCODING
|
|
||||||
|
|
||||||
class Wordlist(object):
|
class Wordlist(object):
|
||||||
"""
|
"""
|
||||||
@@ -45,7 +44,7 @@ class Wordlist(object):
|
|||||||
try:
|
try:
|
||||||
_ = zipfile.ZipFile(self.current, 'r')
|
_ = zipfile.ZipFile(self.current, 'r')
|
||||||
except zipfile.error, ex:
|
except zipfile.error, ex:
|
||||||
errMsg = "something seems to be wrong with "
|
errMsg = "something appears to be wrong with "
|
||||||
errMsg += "the file '%s' ('%s'). Please make " % (self.current, getSafeExString(ex))
|
errMsg += "the file '%s' ('%s'). Please make " % (self.current, getSafeExString(ex))
|
||||||
errMsg += "sure that you haven't made any changes to it"
|
errMsg += "sure that you haven't made any changes to it"
|
||||||
raise SqlmapInstallationException, errMsg
|
raise SqlmapInstallationException, errMsg
|
||||||
@@ -71,7 +70,7 @@ class Wordlist(object):
|
|||||||
try:
|
try:
|
||||||
retVal = self.iter.next().rstrip()
|
retVal = self.iter.next().rstrip()
|
||||||
except zipfile.error, ex:
|
except zipfile.error, ex:
|
||||||
errMsg = "something seems to be wrong with "
|
errMsg = "something appears to be wrong with "
|
||||||
errMsg += "the file '%s' ('%s'). Please make " % (self.current, getSafeExString(ex))
|
errMsg += "the file '%s' ('%s'). Please make " % (self.current, getSafeExString(ex))
|
||||||
errMsg += "sure that you haven't made any changes to it"
|
errMsg += "sure that you haven't made any changes to it"
|
||||||
raise SqlmapInstallationException, errMsg
|
raise SqlmapInstallationException, errMsg
|
||||||
|
|||||||
@@ -1,536 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
import codecs
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import xml
|
|
||||||
|
|
||||||
import xml.sax.saxutils as saxutils
|
|
||||||
|
|
||||||
from lib.core.common import getUnicode
|
|
||||||
from lib.core.data import conf
|
|
||||||
from lib.core.data import kb
|
|
||||||
from lib.core.data import logger
|
|
||||||
from lib.core.exception import SqlmapFilePathException
|
|
||||||
from lib.core.settings import UNICODE_ENCODING
|
|
||||||
from thirdparty.prettyprint import prettyprint
|
|
||||||
from xml.dom.minidom import Document
|
|
||||||
from xml.parsers.expat import ExpatError
|
|
||||||
|
|
||||||
TECHNIC_ELEM_NAME = "Technic"
|
|
||||||
TECHNICS_ELEM_NAME = "Technics"
|
|
||||||
BANNER_ELEM_NAME = "Banner"
|
|
||||||
COLUMNS_ELEM_NAME = "DatabaseColumns"
|
|
||||||
COLUMN_ELEM_NAME = "Column"
|
|
||||||
CELL_ELEM_NAME = "Cell"
|
|
||||||
COLUMN_ATTR = "column"
|
|
||||||
ROW_ELEM_NAME = "Row"
|
|
||||||
TABLES_ELEM_NAME = "tables"
|
|
||||||
DATABASE_COLUMNS_ELEM = "DB"
|
|
||||||
DB_TABLES_ELEM_NAME = "DBTables"
|
|
||||||
DB_TABLE_ELEM_NAME = "DBTable"
|
|
||||||
IS_DBA_ELEM_NAME = "isDBA"
|
|
||||||
FILE_CONTENT_ELEM_NAME = "FileContent"
|
|
||||||
DB_ATTR = "db"
|
|
||||||
UNKNOWN_COLUMN_TYPE = "unknown"
|
|
||||||
USER_SETTINGS_ELEM_NAME = "UserSettings"
|
|
||||||
USER_SETTING_ELEM_NAME = "UserSetting"
|
|
||||||
USERS_ELEM_NAME = "Users"
|
|
||||||
USER_ELEM_NAME = "User"
|
|
||||||
DB_USER_ELEM_NAME = "DBUser"
|
|
||||||
SETTINGS_ELEM_NAME = "Settings"
|
|
||||||
DBS_ELEM_NAME = "DBs"
|
|
||||||
DB_NAME_ELEM_NAME = "DBName"
|
|
||||||
DATABASE_ELEM_NAME = "Database"
|
|
||||||
TABLE_ELEM_NAME = "Table"
|
|
||||||
DB_TABLE_VALUES_ELEM_NAME = "DBTableValues"
|
|
||||||
DB_VALUES_ELEM = "DBValues"
|
|
||||||
QUERIES_ELEM_NAME = "Queries"
|
|
||||||
QUERY_ELEM_NAME = "Query"
|
|
||||||
REGISTERY_ENTRIES_ELEM_NAME = "RegistryEntries"
|
|
||||||
REGISTER_DATA_ELEM_NAME = "RegisterData"
|
|
||||||
DEFAULT_DB = "All"
|
|
||||||
MESSAGE_ELEM = "Message"
|
|
||||||
MESSAGES_ELEM_NAME = "Messages"
|
|
||||||
ERROR_ELEM_NAME = "Error"
|
|
||||||
LST_ELEM_NAME = "List"
|
|
||||||
LSTS_ELEM_NAME = "Lists"
|
|
||||||
CURRENT_USER_ELEM_NAME = "CurrentUser"
|
|
||||||
CURRENT_DB_ELEM_NAME = "CurrentDB"
|
|
||||||
MEMBER_ELEM = "Member"
|
|
||||||
ADMIN_USER = "Admin"
|
|
||||||
REGULAR_USER = "User"
|
|
||||||
STATUS_ELEM_NAME = "Status"
|
|
||||||
RESULTS_ELEM_NAME = "Results"
|
|
||||||
UNHANDLED_PROBLEM_TYPE = "Unhandled"
|
|
||||||
NAME_ATTR = "name"
|
|
||||||
TYPE_ATTR = "type"
|
|
||||||
VALUE_ATTR = "value"
|
|
||||||
SUCESS_ATTR = "success"
|
|
||||||
NAME_SPACE_ATTR = 'http://www.w3.org/2001/XMLSchema-instance'
|
|
||||||
XMLNS_ATTR = "xmlns:xsi"
|
|
||||||
SCHEME_NAME = "sqlmap.xsd"
|
|
||||||
SCHEME_NAME_ATTR = "xsi:noNamespaceSchemaLocation"
|
|
||||||
CHARACTERS_TO_ENCODE = range(32) + range(127, 256)
|
|
||||||
ENTITIES = {'"': '"', "'": "'"}
|
|
||||||
|
|
||||||
class XMLDump(object):
|
|
||||||
'''
|
|
||||||
This class purpose is to dump the data into an xml Format.
|
|
||||||
The format of the xml file is described in the scheme file xml/sqlmap.xsd
|
|
||||||
'''
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self._outputFile = None
|
|
||||||
self._outputFP = None
|
|
||||||
self.__root = None
|
|
||||||
self.__doc = Document()
|
|
||||||
|
|
||||||
def _addToRoot(self, element):
|
|
||||||
'''
|
|
||||||
Adds element to the root element
|
|
||||||
'''
|
|
||||||
self.__root.appendChild(element)
|
|
||||||
|
|
||||||
def __write(self, data, n=True):
|
|
||||||
'''
|
|
||||||
Writes the data into the file
|
|
||||||
'''
|
|
||||||
if n:
|
|
||||||
self._outputFP.write("%s\n" % data)
|
|
||||||
else:
|
|
||||||
self._outputFP.write("%s " % data)
|
|
||||||
|
|
||||||
self._outputFP.flush()
|
|
||||||
|
|
||||||
kb.dataOutputFlag = True
|
|
||||||
|
|
||||||
def _getRootChild(self, elemName):
|
|
||||||
'''
|
|
||||||
Returns the child of the root with the described name
|
|
||||||
'''
|
|
||||||
elements = self.__root.getElementsByTagName(elemName)
|
|
||||||
if elements:
|
|
||||||
return elements[0]
|
|
||||||
|
|
||||||
return elements
|
|
||||||
|
|
||||||
def _createTextNode(self, data):
|
|
||||||
'''
|
|
||||||
Creates a text node with utf8 data inside.
|
|
||||||
The text is escaped to an fit the xml text Format.
|
|
||||||
'''
|
|
||||||
if data is None:
|
|
||||||
return self.__doc.createTextNode(u'')
|
|
||||||
else:
|
|
||||||
escaped_data = saxutils.escape(data, ENTITIES)
|
|
||||||
return self.__doc.createTextNode(escaped_data)
|
|
||||||
|
|
||||||
def _createAttribute(self, attrName, attrValue):
|
|
||||||
'''
|
|
||||||
Creates an attribute node with utf8 data inside.
|
|
||||||
The text is escaped to an fit the xml text Format.
|
|
||||||
'''
|
|
||||||
attr = self.__doc.createAttribute(attrName)
|
|
||||||
if attrValue is None:
|
|
||||||
attr.nodeValue = u''
|
|
||||||
else:
|
|
||||||
attr.nodeValue = getUnicode(attrValue)
|
|
||||||
return attr
|
|
||||||
|
|
||||||
def string(self, header, data, sort=True):
|
|
||||||
'''
|
|
||||||
Adds string element to the xml.
|
|
||||||
'''
|
|
||||||
if isinstance(data, (list, tuple, set)):
|
|
||||||
self.lister(header, data, sort)
|
|
||||||
return
|
|
||||||
|
|
||||||
messagesElem = self._getRootChild(MESSAGES_ELEM_NAME)
|
|
||||||
if (not(messagesElem)):
|
|
||||||
messagesElem = self.__doc.createElement(MESSAGES_ELEM_NAME)
|
|
||||||
self._addToRoot(messagesElem)
|
|
||||||
|
|
||||||
if data:
|
|
||||||
data = self._formatString(data)
|
|
||||||
else:
|
|
||||||
data = ""
|
|
||||||
|
|
||||||
elem = self.__doc.createElement(MESSAGE_ELEM)
|
|
||||||
elem.setAttributeNode(self._createAttribute(TYPE_ATTR, header))
|
|
||||||
elem.appendChild(self._createTextNode(data))
|
|
||||||
messagesElem.appendChild(elem)
|
|
||||||
|
|
||||||
def lister(self, header, elements, sort=True):
|
|
||||||
'''
|
|
||||||
Adds information formatted as list element
|
|
||||||
'''
|
|
||||||
lstElem = self.__doc.createElement(LST_ELEM_NAME)
|
|
||||||
lstElem.setAttributeNode(self._createAttribute(TYPE_ATTR, header))
|
|
||||||
if elements:
|
|
||||||
if sort:
|
|
||||||
try:
|
|
||||||
elements = set(elements)
|
|
||||||
elements = list(elements)
|
|
||||||
elements.sort(key=lambda x: x.lower())
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
for element in elements:
|
|
||||||
memberElem = self.__doc.createElement(MEMBER_ELEM)
|
|
||||||
lstElem.appendChild(memberElem)
|
|
||||||
if isinstance(element, basestring):
|
|
||||||
memberElem.setAttributeNode(self._createAttribute(TYPE_ATTR, "string"))
|
|
||||||
memberElem.appendChild(self._createTextNode(element))
|
|
||||||
elif isinstance(element, (list, tuple, set)):
|
|
||||||
memberElem.setAttributeNode(self._createAttribute(TYPE_ATTR, "list"))
|
|
||||||
for e in element:
|
|
||||||
memberElemStr = self.__doc.createElement(MEMBER_ELEM)
|
|
||||||
memberElemStr.setAttributeNode(self._createAttribute(TYPE_ATTR, "string"))
|
|
||||||
memberElemStr.appendChild(self._createTextNode(getUnicode(e)))
|
|
||||||
memberElem.appendChild(memberElemStr)
|
|
||||||
listsElem = self._getRootChild(LSTS_ELEM_NAME)
|
|
||||||
if not(listsElem):
|
|
||||||
listsElem = self.__doc.createElement(LSTS_ELEM_NAME)
|
|
||||||
self._addToRoot(listsElem)
|
|
||||||
listsElem.appendChild(lstElem)
|
|
||||||
|
|
||||||
def technic(self, technicType, data):
|
|
||||||
'''
|
|
||||||
Adds information about the technic used to extract data from the db
|
|
||||||
'''
|
|
||||||
technicElem = self.__doc.createElement(TECHNIC_ELEM_NAME)
|
|
||||||
technicElem.setAttributeNode(self._createAttribute(TYPE_ATTR, technicType))
|
|
||||||
textNode = self._createTextNode(data)
|
|
||||||
technicElem.appendChild(textNode)
|
|
||||||
technicsElem = self._getRootChild(TECHNICS_ELEM_NAME)
|
|
||||||
if not(technicsElem):
|
|
||||||
technicsElem = self.__doc.createElement(TECHNICS_ELEM_NAME)
|
|
||||||
self._addToRoot(technicsElem)
|
|
||||||
technicsElem.appendChild(technicElem)
|
|
||||||
|
|
||||||
def banner(self, data):
|
|
||||||
'''
|
|
||||||
Adds information about the database banner to the xml.
|
|
||||||
The banner contains information about the type and the version of the database.
|
|
||||||
'''
|
|
||||||
bannerElem = self.__doc.createElement(BANNER_ELEM_NAME)
|
|
||||||
bannerElem.appendChild(self._createTextNode(data))
|
|
||||||
self._addToRoot(bannerElem)
|
|
||||||
|
|
||||||
def currentUser(self, data):
|
|
||||||
'''
|
|
||||||
Adds information about the current database user to the xml
|
|
||||||
'''
|
|
||||||
currentUserElem = self.__doc.createElement(CURRENT_USER_ELEM_NAME)
|
|
||||||
textNode = self._createTextNode(data)
|
|
||||||
currentUserElem.appendChild(textNode)
|
|
||||||
self._addToRoot(currentUserElem)
|
|
||||||
|
|
||||||
def currentDb(self, data):
|
|
||||||
'''
|
|
||||||
Adds information about the current database is use to the xml
|
|
||||||
'''
|
|
||||||
currentDBElem = self.__doc.createElement(CURRENT_DB_ELEM_NAME)
|
|
||||||
textNode = self._createTextNode(data)
|
|
||||||
currentDBElem.appendChild(textNode)
|
|
||||||
self._addToRoot(currentDBElem)
|
|
||||||
|
|
||||||
def dba(self, isDBA):
|
|
||||||
'''
|
|
||||||
Adds information to the xml that indicates whether the user has DBA privileges
|
|
||||||
'''
|
|
||||||
isDBAElem = self.__doc.createElement(IS_DBA_ELEM_NAME)
|
|
||||||
isDBAElem.setAttributeNode(self._createAttribute(VALUE_ATTR, getUnicode(isDBA)))
|
|
||||||
self._addToRoot(isDBAElem)
|
|
||||||
|
|
||||||
def users(self, users):
|
|
||||||
'''
|
|
||||||
Adds a list of the existing users to the xml
|
|
||||||
'''
|
|
||||||
usersElem = self.__doc.createElement(USERS_ELEM_NAME)
|
|
||||||
if isinstance(users, basestring):
|
|
||||||
users = [users]
|
|
||||||
if users:
|
|
||||||
for user in users:
|
|
||||||
userElem = self.__doc.createElement(DB_USER_ELEM_NAME)
|
|
||||||
usersElem.appendChild(userElem)
|
|
||||||
userElem.appendChild(self._createTextNode(user))
|
|
||||||
self._addToRoot(usersElem)
|
|
||||||
|
|
||||||
def dbs(self, dbs):
|
|
||||||
'''
|
|
||||||
Adds a list of the existing databases to the xml
|
|
||||||
'''
|
|
||||||
dbsElem = self.__doc.createElement(DBS_ELEM_NAME)
|
|
||||||
if dbs:
|
|
||||||
for db in dbs:
|
|
||||||
dbElem = self.__doc.createElement(DB_NAME_ELEM_NAME)
|
|
||||||
dbsElem.appendChild(dbElem)
|
|
||||||
dbElem.appendChild(self._createTextNode(db))
|
|
||||||
self._addToRoot(dbsElem)
|
|
||||||
|
|
||||||
def userSettings(self, header, userSettings, subHeader):
|
|
||||||
'''
|
|
||||||
Adds information about the user's settings to the xml.
|
|
||||||
The information can be user's passwords, privileges and etc..
|
|
||||||
'''
|
|
||||||
self._areAdmins = set()
|
|
||||||
userSettingsElem = self._getRootChild(USER_SETTINGS_ELEM_NAME)
|
|
||||||
if (not(userSettingsElem)):
|
|
||||||
userSettingsElem = self.__doc.createElement(USER_SETTINGS_ELEM_NAME)
|
|
||||||
self._addToRoot(userSettingsElem)
|
|
||||||
|
|
||||||
userSettingElem = self.__doc.createElement(USER_SETTING_ELEM_NAME)
|
|
||||||
userSettingElem.setAttributeNode(self._createAttribute(TYPE_ATTR, header))
|
|
||||||
|
|
||||||
if isinstance(userSettings, (tuple, list, set)):
|
|
||||||
self._areAdmins = userSettings[1]
|
|
||||||
userSettings = userSettings[0]
|
|
||||||
|
|
||||||
users = userSettings.keys()
|
|
||||||
users.sort(key=lambda x: x.lower())
|
|
||||||
|
|
||||||
for user in users:
|
|
||||||
userElem = self.__doc.createElement(USER_ELEM_NAME)
|
|
||||||
userSettingElem.appendChild(userElem)
|
|
||||||
if user in self._areAdmins:
|
|
||||||
userElem.setAttributeNode(self._createAttribute(TYPE_ATTR, ADMIN_USER))
|
|
||||||
else:
|
|
||||||
userElem.setAttributeNode(self._createAttribute(TYPE_ATTR, REGULAR_USER))
|
|
||||||
|
|
||||||
settings = userSettings[user]
|
|
||||||
|
|
||||||
settings.sort()
|
|
||||||
|
|
||||||
for setting in settings:
|
|
||||||
settingsElem = self.__doc.createElement(SETTINGS_ELEM_NAME)
|
|
||||||
settingsElem.setAttributeNode(self._createAttribute(TYPE_ATTR, subHeader))
|
|
||||||
settingTextNode = self._createTextNode(setting)
|
|
||||||
settingsElem.appendChild(settingTextNode)
|
|
||||||
userElem.appendChild(settingsElem)
|
|
||||||
userSettingsElem.appendChild(userSettingElem)
|
|
||||||
|
|
||||||
def dbTables(self, dbTables):
|
|
||||||
'''
|
|
||||||
Adds information of the existing db tables to the xml
|
|
||||||
'''
|
|
||||||
if not isinstance(dbTables, dict):
|
|
||||||
self.string(TABLES_ELEM_NAME, dbTables)
|
|
||||||
return
|
|
||||||
|
|
||||||
dbTablesElem = self.__doc.createElement(DB_TABLES_ELEM_NAME)
|
|
||||||
|
|
||||||
for db, tables in dbTables.items():
|
|
||||||
tables.sort(key=lambda x: x.lower())
|
|
||||||
dbElem = self.__doc.createElement(DATABASE_ELEM_NAME)
|
|
||||||
dbElem.setAttributeNode(self._createAttribute(NAME_ATTR, db))
|
|
||||||
dbTablesElem.appendChild(dbElem)
|
|
||||||
for table in tables:
|
|
||||||
tableElem = self.__doc.createElement(DB_TABLE_ELEM_NAME)
|
|
||||||
tableElem.appendChild(self._createTextNode(table))
|
|
||||||
dbElem.appendChild(tableElem)
|
|
||||||
self._addToRoot(dbTablesElem)
|
|
||||||
|
|
||||||
def dbTableColumns(self, tableColumns):
|
|
||||||
'''
|
|
||||||
Adds information about the columns of the existing tables to the xml
|
|
||||||
'''
|
|
||||||
|
|
||||||
columnsElem = self._getRootChild(COLUMNS_ELEM_NAME)
|
|
||||||
if not(columnsElem):
|
|
||||||
columnsElem = self.__doc.createElement(COLUMNS_ELEM_NAME)
|
|
||||||
|
|
||||||
for db, tables in tableColumns.items():
|
|
||||||
if not db:
|
|
||||||
db = DEFAULT_DB
|
|
||||||
dbElem = self.__doc.createElement(DATABASE_COLUMNS_ELEM)
|
|
||||||
dbElem.setAttributeNode(self._createAttribute(NAME_ATTR, db))
|
|
||||||
columnsElem.appendChild(dbElem)
|
|
||||||
|
|
||||||
for table, columns in tables.items():
|
|
||||||
tableElem = self.__doc.createElement(TABLE_ELEM_NAME)
|
|
||||||
tableElem.setAttributeNode(self._createAttribute(NAME_ATTR, table))
|
|
||||||
|
|
||||||
colList = columns.keys()
|
|
||||||
colList.sort(key=lambda x: x.lower())
|
|
||||||
|
|
||||||
for column in colList:
|
|
||||||
colType = columns[column]
|
|
||||||
colElem = self.__doc.createElement(COLUMN_ELEM_NAME)
|
|
||||||
if colType is not None:
|
|
||||||
colElem.setAttributeNode(self._createAttribute(TYPE_ATTR, colType))
|
|
||||||
else:
|
|
||||||
colElem.setAttributeNode(self._createAttribute(TYPE_ATTR, UNKNOWN_COLUMN_TYPE))
|
|
||||||
colElem.appendChild(self._createTextNode(column))
|
|
||||||
tableElem.appendChild(colElem)
|
|
||||||
|
|
||||||
self._addToRoot(columnsElem)
|
|
||||||
|
|
||||||
def dbTableValues(self, tableValues):
|
|
||||||
'''
|
|
||||||
Adds the values of specific table to the xml.
|
|
||||||
The values are organized according to the relevant row and column.
|
|
||||||
'''
|
|
||||||
tableElem = self.__doc.createElement(DB_TABLE_VALUES_ELEM_NAME)
|
|
||||||
if (tableValues is not None):
|
|
||||||
db = tableValues["__infos__"]["db"]
|
|
||||||
if not db:
|
|
||||||
db = "All"
|
|
||||||
table = tableValues["__infos__"]["table"]
|
|
||||||
|
|
||||||
count = int(tableValues["__infos__"]["count"])
|
|
||||||
columns = tableValues.keys()
|
|
||||||
columns.sort(key=lambda x: x.lower())
|
|
||||||
|
|
||||||
tableElem.setAttributeNode(self._createAttribute(DB_ATTR, db))
|
|
||||||
tableElem.setAttributeNode(self._createAttribute(NAME_ATTR, table))
|
|
||||||
|
|
||||||
for i in range(count):
|
|
||||||
rowElem = self.__doc.createElement(ROW_ELEM_NAME)
|
|
||||||
tableElem.appendChild(rowElem)
|
|
||||||
for column in columns:
|
|
||||||
if column != "__infos__":
|
|
||||||
info = tableValues[column]
|
|
||||||
value = info["values"][i]
|
|
||||||
|
|
||||||
if re.search("^[\ *]*$", value):
|
|
||||||
value = "NULL"
|
|
||||||
|
|
||||||
cellElem = self.__doc.createElement(CELL_ELEM_NAME)
|
|
||||||
cellElem.setAttributeNode(self._createAttribute(COLUMN_ATTR, column))
|
|
||||||
cellElem.appendChild(self._createTextNode(value))
|
|
||||||
rowElem.appendChild(cellElem)
|
|
||||||
|
|
||||||
dbValuesElem = self._getRootChild(DB_VALUES_ELEM)
|
|
||||||
if (not(dbValuesElem)):
|
|
||||||
dbValuesElem = self.__doc.createElement(DB_VALUES_ELEM)
|
|
||||||
self._addToRoot(dbValuesElem)
|
|
||||||
|
|
||||||
dbValuesElem.appendChild(tableElem)
|
|
||||||
|
|
||||||
logger.info("Table '%s.%s' dumped to XML file" % (db, table))
|
|
||||||
|
|
||||||
def dbColumns(self, dbColumns, colConsider, dbs):
|
|
||||||
'''
|
|
||||||
Adds information about the columns
|
|
||||||
'''
|
|
||||||
for column in dbColumns.keys():
|
|
||||||
printDbs = {}
|
|
||||||
for db, tblData in dbs.items():
|
|
||||||
for tbl, colData in tblData.items():
|
|
||||||
for col, dataType in colData.items():
|
|
||||||
if column in col:
|
|
||||||
if db in printDbs:
|
|
||||||
if tbl in printDbs[db]:
|
|
||||||
printDbs[db][tbl][col] = dataType
|
|
||||||
else:
|
|
||||||
printDbs[db][tbl] = {col: dataType}
|
|
||||||
else:
|
|
||||||
printDbs[db] = {}
|
|
||||||
printDbs[db][tbl] = {col: dataType}
|
|
||||||
|
|
||||||
continue
|
|
||||||
|
|
||||||
self.dbTableColumns(printDbs)
|
|
||||||
|
|
||||||
def query(self, query, queryRes):
|
|
||||||
'''
|
|
||||||
Adds details of an executed query to the xml.
|
|
||||||
The query details are the query itself and its results.
|
|
||||||
'''
|
|
||||||
queryElem = self.__doc.createElement(QUERY_ELEM_NAME)
|
|
||||||
queryElem.setAttributeNode(self._createAttribute(VALUE_ATTR, query))
|
|
||||||
queryElem.appendChild(self._createTextNode(queryRes))
|
|
||||||
queriesElem = self._getRootChild(QUERIES_ELEM_NAME)
|
|
||||||
if (not(queriesElem)):
|
|
||||||
queriesElem = self.__doc.createElement(QUERIES_ELEM_NAME)
|
|
||||||
self._addToRoot(queriesElem)
|
|
||||||
queriesElem.appendChild(queryElem)
|
|
||||||
|
|
||||||
def registerValue(self, registerData):
|
|
||||||
'''
|
|
||||||
Adds information about an extracted registry key to the xml
|
|
||||||
'''
|
|
||||||
registerElem = self.__doc.createElement(REGISTER_DATA_ELEM_NAME)
|
|
||||||
registerElem.appendChild(self._createTextNode(registerData))
|
|
||||||
registriesElem = self._getRootChild(REGISTERY_ENTRIES_ELEM_NAME)
|
|
||||||
if (not(registriesElem)):
|
|
||||||
registriesElem = self.__doc.createElement(REGISTERY_ENTRIES_ELEM_NAME)
|
|
||||||
self._addToRoot(registriesElem)
|
|
||||||
registriesElem.appendChild(registerElem)
|
|
||||||
|
|
||||||
def rFile(self, filePath, data):
|
|
||||||
'''
|
|
||||||
Adds an extracted file's content to the xml
|
|
||||||
'''
|
|
||||||
fileContentElem = self.__doc.createElement(FILE_CONTENT_ELEM_NAME)
|
|
||||||
fileContentElem.setAttributeNode(self._createAttribute(NAME_ATTR, filePath))
|
|
||||||
fileContentElem.appendChild(self._createTextNode(data))
|
|
||||||
self._addToRoot(fileContentElem)
|
|
||||||
|
|
||||||
def setOutputFile(self):
|
|
||||||
'''
|
|
||||||
Initiates the xml file from the configuration.
|
|
||||||
'''
|
|
||||||
if (conf.xmlFile):
|
|
||||||
try:
|
|
||||||
self._outputFile = conf.xmlFile
|
|
||||||
self.__root = None
|
|
||||||
|
|
||||||
if os.path.exists(self._outputFile):
|
|
||||||
try:
|
|
||||||
self.__doc = xml.dom.minidom.parse(self._outputFile)
|
|
||||||
self.__root = self.__doc.childNodes[0]
|
|
||||||
except ExpatError:
|
|
||||||
self.__doc = Document()
|
|
||||||
|
|
||||||
self._outputFP = codecs.open(self._outputFile, "w+", UNICODE_ENCODING)
|
|
||||||
|
|
||||||
if self.__root is None:
|
|
||||||
self.__root = self.__doc.createElementNS(NAME_SPACE_ATTR, RESULTS_ELEM_NAME)
|
|
||||||
self.__root.setAttributeNode(self._createAttribute(XMLNS_ATTR, NAME_SPACE_ATTR))
|
|
||||||
self.__root.setAttributeNode(self._createAttribute(SCHEME_NAME_ATTR, SCHEME_NAME))
|
|
||||||
self.__doc.appendChild(self.__root)
|
|
||||||
except IOError:
|
|
||||||
raise SqlmapFilePathException("Wrong filename provided for saving the xml file: %s" % conf.xmlFile)
|
|
||||||
|
|
||||||
def getOutputFile(self):
|
|
||||||
return self._outputFile
|
|
||||||
|
|
||||||
def finish(self, resultStatus, resultMsg=""):
|
|
||||||
'''
|
|
||||||
Finishes the dumper operation:
|
|
||||||
1. Adds the session status to the xml
|
|
||||||
2. Writes the xml to the file
|
|
||||||
3. Closes the xml file
|
|
||||||
'''
|
|
||||||
if ((self._outputFP is not None) and not(self._outputFP.closed)):
|
|
||||||
statusElem = self.__doc.createElement(STATUS_ELEM_NAME)
|
|
||||||
statusElem.setAttributeNode(self._createAttribute(SUCESS_ATTR, getUnicode(resultStatus)))
|
|
||||||
|
|
||||||
if not resultStatus:
|
|
||||||
errorElem = self.__doc.createElement(ERROR_ELEM_NAME)
|
|
||||||
|
|
||||||
if isinstance(resultMsg, Exception):
|
|
||||||
errorElem.setAttributeNode(self._createAttribute(TYPE_ATTR, type(resultMsg).__name__))
|
|
||||||
else:
|
|
||||||
errorElem.setAttributeNode(self._createAttribute(TYPE_ATTR, UNHANDLED_PROBLEM_TYPE))
|
|
||||||
|
|
||||||
errorElem.appendChild(self._createTextNode(getUnicode(resultMsg)))
|
|
||||||
statusElem.appendChild(errorElem)
|
|
||||||
|
|
||||||
self._addToRoot(statusElem)
|
|
||||||
self.__write(prettyprint.formatXML(self.__doc, encoding=UNICODE_ENCODING))
|
|
||||||
self._outputFP.close()
|
|
||||||
|
|
||||||
|
|
||||||
def closeDumper(status, msg=""):
|
|
||||||
"""
|
|
||||||
Closes the dumper of the session
|
|
||||||
"""
|
|
||||||
|
|
||||||
if hasattr(conf, "dumper") and hasattr(conf.dumper, "finish"):
|
|
||||||
conf.dumper.finish(status, msg)
|
|
||||||
|
|
||||||
dumper = XMLDump()
|
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -17,6 +17,7 @@ from optparse import SUPPRESS_HELP
|
|||||||
|
|
||||||
from lib.core.common import checkDeprecatedOptions
|
from lib.core.common import checkDeprecatedOptions
|
||||||
from lib.core.common import checkSystemEncoding
|
from lib.core.common import checkSystemEncoding
|
||||||
|
from lib.core.common import dataToStdout
|
||||||
from lib.core.common import expandMnemonics
|
from lib.core.common import expandMnemonics
|
||||||
from lib.core.common import getUnicode
|
from lib.core.common import getUnicode
|
||||||
from lib.core.data import cmdLineOptions
|
from lib.core.data import cmdLineOptions
|
||||||
@@ -30,6 +31,7 @@ from lib.core.settings import BASIC_HELP_ITEMS
|
|||||||
from lib.core.settings import DUMMY_URL
|
from lib.core.settings import DUMMY_URL
|
||||||
from lib.core.settings import IS_WIN
|
from lib.core.settings import IS_WIN
|
||||||
from lib.core.settings import MAX_HELP_OPTION_LENGTH
|
from lib.core.settings import MAX_HELP_OPTION_LENGTH
|
||||||
|
from lib.core.settings import UNICODE_ENCODING
|
||||||
from lib.core.settings import VERSION_STRING
|
from lib.core.settings import VERSION_STRING
|
||||||
from lib.core.shell import autoCompletion
|
from lib.core.shell import autoCompletion
|
||||||
from lib.core.shell import clearHistory
|
from lib.core.shell import clearHistory
|
||||||
@@ -46,7 +48,7 @@ def cmdLineParser(argv=None):
|
|||||||
|
|
||||||
checkSystemEncoding()
|
checkSystemEncoding()
|
||||||
|
|
||||||
_ = getUnicode(os.path.basename(argv[0]), encoding=sys.getfilesystemencoding())
|
_ = getUnicode(os.path.basename(argv[0]), encoding=sys.getfilesystemencoding() or UNICODE_ENCODING)
|
||||||
|
|
||||||
usage = "%s%s [options]" % ("python " if not IS_WIN else "", \
|
usage = "%s%s [options]" % ("python " if not IS_WIN else "", \
|
||||||
"\"%s\"" % _ if " " in _ else _)
|
"\"%s\"" % _ if " " in _ else _)
|
||||||
@@ -150,6 +152,15 @@ def cmdLineParser(argv=None):
|
|||||||
request.add_option("--ignore-401", dest="ignore401", action="store_true",
|
request.add_option("--ignore-401", dest="ignore401", action="store_true",
|
||||||
help="Ignore HTTP Error 401 (Unauthorized)")
|
help="Ignore HTTP Error 401 (Unauthorized)")
|
||||||
|
|
||||||
|
request.add_option("--ignore-proxy", dest="ignoreProxy", action="store_true",
|
||||||
|
help="Ignore system default proxy settings")
|
||||||
|
|
||||||
|
request.add_option("--ignore-redirects", dest="ignoreRedirects", action="store_true",
|
||||||
|
help="Ignore redirection attempts")
|
||||||
|
|
||||||
|
request.add_option("--ignore-timeouts", dest="ignoreTimeouts", action="store_true",
|
||||||
|
help="Ignore connection timeouts")
|
||||||
|
|
||||||
request.add_option("--proxy", dest="proxy",
|
request.add_option("--proxy", dest="proxy",
|
||||||
help="Use a proxy to connect to the target URL")
|
help="Use a proxy to connect to the target URL")
|
||||||
|
|
||||||
@@ -160,9 +171,6 @@ def cmdLineParser(argv=None):
|
|||||||
request.add_option("--proxy-file", dest="proxyFile",
|
request.add_option("--proxy-file", dest="proxyFile",
|
||||||
help="Load proxy list from a file")
|
help="Load proxy list from a file")
|
||||||
|
|
||||||
request.add_option("--ignore-proxy", dest="ignoreProxy", action="store_true",
|
|
||||||
help="Ignore system default proxy settings")
|
|
||||||
|
|
||||||
request.add_option("--tor", dest="tor",
|
request.add_option("--tor", dest="tor",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Use Tor anonymity network")
|
help="Use Tor anonymity network")
|
||||||
@@ -171,7 +179,7 @@ def cmdLineParser(argv=None):
|
|||||||
help="Set Tor proxy port other than default")
|
help="Set Tor proxy port other than default")
|
||||||
|
|
||||||
request.add_option("--tor-type", dest="torType",
|
request.add_option("--tor-type", dest="torType",
|
||||||
help="Set Tor proxy type (HTTP (default), SOCKS4 or SOCKS5)")
|
help="Set Tor proxy type (HTTP, SOCKS4 or SOCKS5 (default))")
|
||||||
|
|
||||||
request.add_option("--check-tor", dest="checkTor",
|
request.add_option("--check-tor", dest="checkTor",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
@@ -259,7 +267,10 @@ def cmdLineParser(argv=None):
|
|||||||
help="Skip testing for given parameter(s)")
|
help="Skip testing for given parameter(s)")
|
||||||
|
|
||||||
injection.add_option("--skip-static", dest="skipStatic", action="store_true",
|
injection.add_option("--skip-static", dest="skipStatic", action="store_true",
|
||||||
help="Skip testing parameters that not appear dynamic")
|
help="Skip testing parameters that not appear to be dynamic")
|
||||||
|
|
||||||
|
injection.add_option("--param-exclude", dest="paramExclude",
|
||||||
|
help="Regexp to exclude parameters from testing (e.g. \"ses\")")
|
||||||
|
|
||||||
injection.add_option("--dbms", dest="dbms",
|
injection.add_option("--dbms", dest="dbms",
|
||||||
help="Force back-end DBMS to this value")
|
help="Force back-end DBMS to this value")
|
||||||
@@ -359,7 +370,7 @@ def cmdLineParser(argv=None):
|
|||||||
techniques.add_option("--union-from", dest="uFrom",
|
techniques.add_option("--union-from", dest="uFrom",
|
||||||
help="Table to use in FROM part of UNION query SQL injection")
|
help="Table to use in FROM part of UNION query SQL injection")
|
||||||
|
|
||||||
techniques.add_option("--dns-domain", dest="dnsName",
|
techniques.add_option("--dns-domain", dest="dnsDomain",
|
||||||
help="Domain name used for DNS exfiltration attack")
|
help="Domain name used for DNS exfiltration attack")
|
||||||
|
|
||||||
techniques.add_option("--second-order", dest="secondOrder",
|
techniques.add_option("--second-order", dest="secondOrder",
|
||||||
@@ -464,14 +475,17 @@ def cmdLineParser(argv=None):
|
|||||||
help="Exclude DBMS system databases when "
|
help="Exclude DBMS system databases when "
|
||||||
"enumerating tables")
|
"enumerating tables")
|
||||||
|
|
||||||
|
enumeration.add_option("--pivot-column", dest="pivotColumn",
|
||||||
|
help="Pivot column name")
|
||||||
|
|
||||||
enumeration.add_option("--where", dest="dumpWhere",
|
enumeration.add_option("--where", dest="dumpWhere",
|
||||||
help="Use WHERE condition while table dumping")
|
help="Use WHERE condition while table dumping")
|
||||||
|
|
||||||
enumeration.add_option("--start", dest="limitStart", type="int",
|
enumeration.add_option("--start", dest="limitStart", type="int",
|
||||||
help="First query output entry to retrieve")
|
help="First dump table entry to retrieve")
|
||||||
|
|
||||||
enumeration.add_option("--stop", dest="limitStop", type="int",
|
enumeration.add_option("--stop", dest="limitStop", type="int",
|
||||||
help="Last query output entry to retrieve")
|
help="Last dump table entry to retrieve")
|
||||||
|
|
||||||
enumeration.add_option("--first", dest="firstChar", type="int",
|
enumeration.add_option("--first", dest="firstChar", type="int",
|
||||||
help="First query output word character to retrieve")
|
help="First query output word character to retrieve")
|
||||||
@@ -617,9 +631,16 @@ def cmdLineParser(argv=None):
|
|||||||
action="store_true",
|
action="store_true",
|
||||||
help="Never ask for user input, use the default behaviour")
|
help="Never ask for user input, use the default behaviour")
|
||||||
|
|
||||||
|
general.add_option("--binary-fields", dest="binaryFields",
|
||||||
|
help="Result fields having binary values (e.g. \"digest\")")
|
||||||
|
|
||||||
general.add_option("--charset", dest="charset",
|
general.add_option("--charset", dest="charset",
|
||||||
help="Force character encoding used for data retrieval")
|
help="Force character encoding used for data retrieval")
|
||||||
|
|
||||||
|
general.add_option("--check-internet", dest="checkInternet",
|
||||||
|
action="store_true",
|
||||||
|
help="Check Internet connection before assessing the target")
|
||||||
|
|
||||||
general.add_option("--crawl", dest="crawlDepth", type="int",
|
general.add_option("--crawl", dest="crawlDepth", type="int",
|
||||||
help="Crawl the website starting from the target URL")
|
help="Crawl the website starting from the target URL")
|
||||||
|
|
||||||
@@ -662,9 +683,6 @@ def cmdLineParser(argv=None):
|
|||||||
action="store_true",
|
action="store_true",
|
||||||
help="Parse and display DBMS error messages from responses")
|
help="Parse and display DBMS error messages from responses")
|
||||||
|
|
||||||
general.add_option("--pivot-column", dest="pivotColumn",
|
|
||||||
help="Pivot column name")
|
|
||||||
|
|
||||||
general.add_option("--save", dest="saveConfig",
|
general.add_option("--save", dest="saveConfig",
|
||||||
help="Save options to a configuration INI file")
|
help="Save options to a configuration INI file")
|
||||||
|
|
||||||
@@ -716,10 +734,6 @@ def cmdLineParser(argv=None):
|
|||||||
action="store_true",
|
action="store_true",
|
||||||
help="Make a thorough testing for a WAF/IPS/IDS protection")
|
help="Make a thorough testing for a WAF/IPS/IDS protection")
|
||||||
|
|
||||||
miscellaneous.add_option("--skip-waf", dest="skipWaf",
|
|
||||||
action="store_true",
|
|
||||||
help="Skip heuristic detection of WAF/IPS/IDS protection")
|
|
||||||
|
|
||||||
miscellaneous.add_option("--mobile", dest="mobile",
|
miscellaneous.add_option("--mobile", dest="mobile",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Imitate smartphone through HTTP User-Agent header")
|
help="Imitate smartphone through HTTP User-Agent header")
|
||||||
@@ -728,20 +742,26 @@ def cmdLineParser(argv=None):
|
|||||||
action="store_true",
|
action="store_true",
|
||||||
help="Work in offline mode (only use session data)")
|
help="Work in offline mode (only use session data)")
|
||||||
|
|
||||||
miscellaneous.add_option("--page-rank", dest="pageRank",
|
|
||||||
action="store_true",
|
|
||||||
help="Display page rank (PR) for Google dork results")
|
|
||||||
|
|
||||||
miscellaneous.add_option("--purge-output", dest="purgeOutput",
|
miscellaneous.add_option("--purge-output", dest="purgeOutput",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Safely remove all content from output directory")
|
help="Safely remove all content from output directory")
|
||||||
|
|
||||||
|
miscellaneous.add_option("--skip-waf", dest="skipWaf",
|
||||||
|
action="store_true",
|
||||||
|
help="Skip heuristic detection of WAF/IPS/IDS protection")
|
||||||
|
|
||||||
miscellaneous.add_option("--smart", dest="smart",
|
miscellaneous.add_option("--smart", dest="smart",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Conduct thorough tests only if positive heuristic(s)")
|
help="Conduct thorough tests only if positive heuristic(s)")
|
||||||
|
|
||||||
miscellaneous.add_option("--sqlmap-shell", dest="sqlmapShell", action="store_true",
|
miscellaneous.add_option("--sqlmap-shell", dest="sqlmapShell", action="store_true",
|
||||||
help="Prompt for an interactive sqlmap shell")
|
help="Prompt for an interactive sqlmap shell")
|
||||||
|
|
||||||
|
miscellaneous.add_option("--tmp-dir", dest="tmpDir",
|
||||||
|
help="Local directory for storing temporary files")
|
||||||
|
|
||||||
|
miscellaneous.add_option("--web-root", dest="webRoot",
|
||||||
|
help="Web server document root directory (e.g. \"/var/www\")")
|
||||||
|
|
||||||
miscellaneous.add_option("--wizard", dest="wizard",
|
miscellaneous.add_option("--wizard", dest="wizard",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
@@ -751,21 +771,18 @@ def cmdLineParser(argv=None):
|
|||||||
parser.add_option("--dummy", dest="dummy", action="store_true",
|
parser.add_option("--dummy", dest="dummy", action="store_true",
|
||||||
help=SUPPRESS_HELP)
|
help=SUPPRESS_HELP)
|
||||||
|
|
||||||
parser.add_option("--pickled-options", dest="pickledOptions",
|
parser.add_option("--murphy-rate", dest="murphyRate", type="int",
|
||||||
help=SUPPRESS_HELP)
|
help=SUPPRESS_HELP)
|
||||||
|
|
||||||
parser.add_option("--disable-precon", dest="disablePrecon", action="store_true",
|
parser.add_option("--disable-precon", dest="disablePrecon", action="store_true",
|
||||||
help=SUPPRESS_HELP)
|
help=SUPPRESS_HELP)
|
||||||
|
|
||||||
|
parser.add_option("--disable-stats", dest="disableStats", action="store_true",
|
||||||
|
help=SUPPRESS_HELP)
|
||||||
|
|
||||||
parser.add_option("--profile", dest="profile", action="store_true",
|
parser.add_option("--profile", dest="profile", action="store_true",
|
||||||
help=SUPPRESS_HELP)
|
help=SUPPRESS_HELP)
|
||||||
|
|
||||||
parser.add_option("--binary-fields", dest="binaryFields",
|
|
||||||
help=SUPPRESS_HELP)
|
|
||||||
|
|
||||||
parser.add_option("--cpu-throttle", dest="cpuThrottle", type="int",
|
|
||||||
help=SUPPRESS_HELP)
|
|
||||||
|
|
||||||
parser.add_option("--force-dns", dest="forceDns", action="store_true",
|
parser.add_option("--force-dns", dest="forceDns", action="store_true",
|
||||||
help=SUPPRESS_HELP)
|
help=SUPPRESS_HELP)
|
||||||
|
|
||||||
@@ -783,6 +800,14 @@ def cmdLineParser(argv=None):
|
|||||||
|
|
||||||
parser.add_option("--run-case", dest="runCase", help=SUPPRESS_HELP)
|
parser.add_option("--run-case", dest="runCase", help=SUPPRESS_HELP)
|
||||||
|
|
||||||
|
# API options
|
||||||
|
parser.add_option("--api", dest="api", action="store_true",
|
||||||
|
help=SUPPRESS_HELP)
|
||||||
|
|
||||||
|
parser.add_option("--taskid", dest="taskid", help=SUPPRESS_HELP)
|
||||||
|
|
||||||
|
parser.add_option("--database", dest="database", help=SUPPRESS_HELP)
|
||||||
|
|
||||||
parser.add_option_group(target)
|
parser.add_option_group(target)
|
||||||
parser.add_option_group(request)
|
parser.add_option_group(request)
|
||||||
parser.add_option_group(optimization)
|
parser.add_option_group(optimization)
|
||||||
@@ -809,12 +834,12 @@ def cmdLineParser(argv=None):
|
|||||||
parser.formatter._format_option_strings = parser.formatter.format_option_strings
|
parser.formatter._format_option_strings = parser.formatter.format_option_strings
|
||||||
parser.formatter.format_option_strings = type(parser.formatter.format_option_strings)(_, parser, type(parser))
|
parser.formatter.format_option_strings = type(parser.formatter.format_option_strings)(_, parser, type(parser))
|
||||||
|
|
||||||
# Dirty hack for making a short option -hh
|
# Dirty hack for making a short option '-hh'
|
||||||
option = parser.get_option("--hh")
|
option = parser.get_option("--hh")
|
||||||
option._short_opts = ["-hh"]
|
option._short_opts = ["-hh"]
|
||||||
option._long_opts = []
|
option._long_opts = []
|
||||||
|
|
||||||
# Dirty hack for inherent help message of switch -h
|
# Dirty hack for inherent help message of switch '-h'
|
||||||
option = parser.get_option("-h")
|
option = parser.get_option("-h")
|
||||||
option.help = option.help.capitalize().replace("this help", "basic help")
|
option.help = option.help.capitalize().replace("this help", "basic help")
|
||||||
|
|
||||||
@@ -824,7 +849,7 @@ def cmdLineParser(argv=None):
|
|||||||
extraHeaders = []
|
extraHeaders = []
|
||||||
|
|
||||||
for arg in argv:
|
for arg in argv:
|
||||||
_.append(getUnicode(arg, encoding=sys.getfilesystemencoding()))
|
_.append(getUnicode(arg, encoding=sys.getfilesystemencoding() or UNICODE_ENCODING))
|
||||||
|
|
||||||
argv = _
|
argv = _
|
||||||
checkDeprecatedOptions(argv)
|
checkDeprecatedOptions(argv)
|
||||||
@@ -862,13 +887,13 @@ def cmdLineParser(argv=None):
|
|||||||
continue
|
continue
|
||||||
elif command.lower() == "clear":
|
elif command.lower() == "clear":
|
||||||
clearHistory()
|
clearHistory()
|
||||||
print "[i] history cleared"
|
dataToStdout("[i] history cleared\n")
|
||||||
saveHistory(AUTOCOMPLETE_TYPE.SQLMAP)
|
saveHistory(AUTOCOMPLETE_TYPE.SQLMAP)
|
||||||
elif command.lower() in ("x", "q", "exit", "quit"):
|
elif command.lower() in ("x", "q", "exit", "quit"):
|
||||||
raise SqlmapShellQuitException
|
raise SqlmapShellQuitException
|
||||||
elif command[0] != '-':
|
elif command[0] != '-':
|
||||||
print "[!] invalid option(s) provided"
|
dataToStdout("[!] invalid option(s) provided\n")
|
||||||
print "[i] proper example: '-u http://www.site.com/vuln.php?id=1 --banner'"
|
dataToStdout("[i] proper example: '-u http://www.site.com/vuln.php?id=1 --banner'\n")
|
||||||
else:
|
else:
|
||||||
saveHistory(AUTOCOMPLETE_TYPE.SQLMAP)
|
saveHistory(AUTOCOMPLETE_TYPE.SQLMAP)
|
||||||
loadHistory(AUTOCOMPLETE_TYPE.SQLMAP)
|
loadHistory(AUTOCOMPLETE_TYPE.SQLMAP)
|
||||||
@@ -880,12 +905,18 @@ def cmdLineParser(argv=None):
|
|||||||
except ValueError, ex:
|
except ValueError, ex:
|
||||||
raise SqlmapSyntaxException, "something went wrong during command line parsing ('%s')" % ex.message
|
raise SqlmapSyntaxException, "something went wrong during command line parsing ('%s')" % ex.message
|
||||||
|
|
||||||
# Hide non-basic options in basic help case
|
|
||||||
for i in xrange(len(argv)):
|
for i in xrange(len(argv)):
|
||||||
if argv[i] == "-hh":
|
if argv[i] == "-hh":
|
||||||
argv[i] = "-h"
|
argv[i] = "-h"
|
||||||
|
elif len(argv[i]) > 1 and all(ord(_) in xrange(0x2018, 0x2020) for _ in ((argv[i].split('=', 1)[-1].strip() or ' ')[0], argv[i][-1])):
|
||||||
|
dataToStdout("[!] copy-pasting illegal (non-console) quote characters from Internet is, well, illegal (%s)\n" % argv[i])
|
||||||
|
raise SystemExit
|
||||||
|
elif len(argv[i]) > 1 and u"\uff0c" in argv[i].split('=', 1)[-1]:
|
||||||
|
dataToStdout("[!] copy-pasting illegal (non-console) comma characters from Internet is, well, illegal (%s)\n" % argv[i])
|
||||||
|
raise SystemExit
|
||||||
elif re.search(r"\A-\w=.+", argv[i]):
|
elif re.search(r"\A-\w=.+", argv[i]):
|
||||||
print "[!] potentially miswritten (illegal '=') short option detected ('%s')" % argv[i]
|
dataToStdout("[!] potentially miswritten (illegal '=') short option detected ('%s')\n" % argv[i])
|
||||||
|
raise SystemExit
|
||||||
elif argv[i] == "-H":
|
elif argv[i] == "-H":
|
||||||
if i + 1 < len(argv):
|
if i + 1 < len(argv):
|
||||||
extraHeaders.append(argv[i + 1])
|
extraHeaders.append(argv[i + 1])
|
||||||
@@ -895,7 +926,7 @@ def cmdLineParser(argv=None):
|
|||||||
elif argv[i] == "--version":
|
elif argv[i] == "--version":
|
||||||
print VERSION_STRING.split('/')[-1]
|
print VERSION_STRING.split('/')[-1]
|
||||||
raise SystemExit
|
raise SystemExit
|
||||||
elif argv[i] == "-h":
|
elif argv[i] in ("-h", "--help"):
|
||||||
advancedHelp = False
|
advancedHelp = False
|
||||||
for group in parser.option_groups[:]:
|
for group in parser.option_groups[:]:
|
||||||
found = False
|
found = False
|
||||||
@@ -907,14 +938,22 @@ def cmdLineParser(argv=None):
|
|||||||
if not found:
|
if not found:
|
||||||
parser.option_groups.remove(group)
|
parser.option_groups.remove(group)
|
||||||
|
|
||||||
|
for verbosity in (_ for _ in argv if re.search(r"\A\-v+\Z", _)):
|
||||||
|
try:
|
||||||
|
if argv.index(verbosity) == len(argv) - 1 or not argv[argv.index(verbosity) + 1].isdigit():
|
||||||
|
conf.verbose = verbosity.count('v') + 1
|
||||||
|
del argv[argv.index(verbosity)]
|
||||||
|
except (IndexError, ValueError):
|
||||||
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
(args, _) = parser.parse_args(argv)
|
(args, _) = parser.parse_args(argv)
|
||||||
except UnicodeEncodeError, ex:
|
except UnicodeEncodeError, ex:
|
||||||
print "\n[!] %s" % ex.object.encode("unicode-escape")
|
dataToStdout("\n[!] %s\n" % ex.object.encode("unicode-escape"))
|
||||||
raise SystemExit
|
raise SystemExit
|
||||||
except SystemExit:
|
except SystemExit:
|
||||||
if "-h" in argv and not advancedHelp:
|
if "-h" in argv and not advancedHelp:
|
||||||
print "\n[!] to see full list of options run with '-hh'"
|
dataToStdout("\n[!] to see full list of options run with '-hh'\n")
|
||||||
raise
|
raise
|
||||||
|
|
||||||
if extraHeaders:
|
if extraHeaders:
|
||||||
@@ -933,9 +972,9 @@ def cmdLineParser(argv=None):
|
|||||||
|
|
||||||
if not any((args.direct, args.url, args.logFile, args.bulkFile, args.googleDork, args.configFile, \
|
if not any((args.direct, args.url, args.logFile, args.bulkFile, args.googleDork, args.configFile, \
|
||||||
args.requestFile, args.updateAll, args.smokeTest, args.liveTest, args.wizard, args.dependencies, \
|
args.requestFile, args.updateAll, args.smokeTest, args.liveTest, args.wizard, args.dependencies, \
|
||||||
args.purgeOutput, args.pickledOptions, args.sitemapUrl)):
|
args.purgeOutput, args.sitemapUrl)):
|
||||||
errMsg = "missing a mandatory option (-d, -u, -l, -m, -r, -g, -c, -x, --wizard, --update, --purge-output or --dependencies), "
|
errMsg = "missing a mandatory option (-d, -u, -l, -m, -r, -g, -c, -x, --wizard, --update, --purge-output or --dependencies), "
|
||||||
errMsg += "use -h for basic or -hh for advanced help"
|
errMsg += "use -h for basic or -hh for advanced help\n"
|
||||||
parser.error(errMsg)
|
parser.error(errMsg)
|
||||||
|
|
||||||
return args
|
return args
|
||||||
@@ -946,7 +985,7 @@ def cmdLineParser(argv=None):
|
|||||||
except SystemExit:
|
except SystemExit:
|
||||||
# Protection against Windows dummy double clicking
|
# Protection against Windows dummy double clicking
|
||||||
if IS_WIN:
|
if IS_WIN:
|
||||||
print "\nPress Enter to continue...",
|
dataToStdout("\nPress Enter to continue...")
|
||||||
raw_input()
|
raw_input()
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -14,13 +14,14 @@ from lib.core.common import UnicodeRawConfigParser
|
|||||||
from lib.core.data import cmdLineOptions
|
from lib.core.data import cmdLineOptions
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
|
from lib.core.enums import OPTION_TYPE
|
||||||
from lib.core.exception import SqlmapMissingMandatoryOptionException
|
from lib.core.exception import SqlmapMissingMandatoryOptionException
|
||||||
from lib.core.exception import SqlmapSyntaxException
|
from lib.core.exception import SqlmapSyntaxException
|
||||||
from lib.core.optiondict import optDict
|
from lib.core.optiondict import optDict
|
||||||
|
|
||||||
config = None
|
config = None
|
||||||
|
|
||||||
def configFileProxy(section, option, boolean=False, integer=False):
|
def configFileProxy(section, option, datatype):
|
||||||
"""
|
"""
|
||||||
Parse configuration file and save settings into the configuration
|
Parse configuration file and save settings into the configuration
|
||||||
advanced dictionary.
|
advanced dictionary.
|
||||||
@@ -30,10 +31,12 @@ def configFileProxy(section, option, boolean=False, integer=False):
|
|||||||
|
|
||||||
if config.has_option(section, option):
|
if config.has_option(section, option):
|
||||||
try:
|
try:
|
||||||
if boolean:
|
if datatype == OPTION_TYPE.BOOLEAN:
|
||||||
value = config.getboolean(section, option) if config.get(section, option) else False
|
value = config.getboolean(section, option) if config.get(section, option) else False
|
||||||
elif integer:
|
elif datatype == OPTION_TYPE.INTEGER:
|
||||||
value = config.getint(section, option) if config.get(section, option) else 0
|
value = config.getint(section, option) if config.get(section, option) else 0
|
||||||
|
elif datatype == OPTION_TYPE.FLOAT:
|
||||||
|
value = config.getfloat(section, option) if config.get(section, option) else 0.0
|
||||||
else:
|
else:
|
||||||
value = config.get(section, option)
|
value = config.get(section, option)
|
||||||
except ValueError, ex:
|
except ValueError, ex:
|
||||||
@@ -91,8 +94,4 @@ def configFileParser(configFile):
|
|||||||
for family, optionData in optDict.items():
|
for family, optionData in optDict.items():
|
||||||
for option, datatype in optionData.items():
|
for option, datatype in optionData.items():
|
||||||
datatype = unArrayizeValue(datatype)
|
datatype = unArrayizeValue(datatype)
|
||||||
|
configFileProxy(family, option, datatype)
|
||||||
boolean = datatype == "boolean"
|
|
||||||
integer = datatype == "integer"
|
|
||||||
|
|
||||||
configFileProxy(family, option, boolean, integer)
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -24,7 +24,8 @@ class HTMLHandler(ContentHandler):
|
|||||||
ContentHandler.__init__(self)
|
ContentHandler.__init__(self)
|
||||||
|
|
||||||
self._dbms = None
|
self._dbms = None
|
||||||
self._page = page
|
self._page = (page or "")
|
||||||
|
self._lower_page = self._page.lower()
|
||||||
|
|
||||||
self.dbms = None
|
self.dbms = None
|
||||||
|
|
||||||
@@ -33,11 +34,20 @@ class HTMLHandler(ContentHandler):
|
|||||||
threadData.lastErrorPage = (threadData.lastRequestUID, self._page)
|
threadData.lastErrorPage = (threadData.lastRequestUID, self._page)
|
||||||
|
|
||||||
def startElement(self, name, attrs):
|
def startElement(self, name, attrs):
|
||||||
|
if self.dbms:
|
||||||
|
return
|
||||||
|
|
||||||
if name == "dbms":
|
if name == "dbms":
|
||||||
self._dbms = attrs.get("value")
|
self._dbms = attrs.get("value")
|
||||||
|
|
||||||
elif name == "error":
|
elif name == "error":
|
||||||
if re.search(attrs.get("regexp"), self._page, re.I):
|
regexp = attrs.get("regexp")
|
||||||
|
if regexp not in kb.cache.regex:
|
||||||
|
keywords = re.findall("\w+", re.sub(r"\\.", " ", regexp))
|
||||||
|
keywords = sorted(keywords, key=len)
|
||||||
|
kb.cache.regex[regexp] = keywords[-1].lower()
|
||||||
|
|
||||||
|
if kb.cache.regex[regexp] in self._lower_page and re.search(regexp, self._page, re.I):
|
||||||
self.dbms = self._dbms
|
self.dbms = self._dbms
|
||||||
self._markAsErrorPage()
|
self._markAsErrorPage()
|
||||||
|
|
||||||
@@ -49,6 +59,13 @@ def htmlParser(page):
|
|||||||
|
|
||||||
xmlfile = paths.ERRORS_XML
|
xmlfile = paths.ERRORS_XML
|
||||||
handler = HTMLHandler(page)
|
handler = HTMLHandler(page)
|
||||||
|
key = hash(page)
|
||||||
|
|
||||||
|
if key in kb.cache.parsedDbms:
|
||||||
|
retVal = kb.cache.parsedDbms[key]
|
||||||
|
if retVal:
|
||||||
|
handler._markAsErrorPage()
|
||||||
|
return retVal
|
||||||
|
|
||||||
parseXmlFile(xmlfile, handler)
|
parseXmlFile(xmlfile, handler)
|
||||||
|
|
||||||
@@ -58,6 +75,8 @@ def htmlParser(page):
|
|||||||
else:
|
else:
|
||||||
kb.lastParserStatus = None
|
kb.lastParserStatus = None
|
||||||
|
|
||||||
|
kb.cache.parsedDbms[key] = handler.dbms
|
||||||
|
|
||||||
# generic SQL warning/error messages
|
# generic SQL warning/error messages
|
||||||
if re.search(r"SQL (warning|error|syntax)", page, re.I):
|
if re.search(r"SQL (warning|error|syntax)", page, re.I):
|
||||||
handler._markAsErrorPage()
|
handler._markAsErrorPage()
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -14,6 +14,7 @@ from lib.core.data import conf
|
|||||||
from lib.core.data import paths
|
from lib.core.data import paths
|
||||||
from lib.core.datatype import AttribDict
|
from lib.core.datatype import AttribDict
|
||||||
from lib.core.exception import SqlmapInstallationException
|
from lib.core.exception import SqlmapInstallationException
|
||||||
|
from lib.core.settings import PAYLOAD_XML_FILES
|
||||||
|
|
||||||
def cleanupVals(text, tag):
|
def cleanupVals(text, tag):
|
||||||
if tag in ("clause", "where"):
|
if tag in ("clause", "where"):
|
||||||
@@ -74,7 +75,7 @@ def loadBoundaries():
|
|||||||
try:
|
try:
|
||||||
doc = et.parse(paths.BOUNDARIES_XML)
|
doc = et.parse(paths.BOUNDARIES_XML)
|
||||||
except Exception, ex:
|
except Exception, ex:
|
||||||
errMsg = "something seems to be wrong with "
|
errMsg = "something appears to be wrong with "
|
||||||
errMsg += "the file '%s' ('%s'). Please make " % (paths.BOUNDARIES_XML, getSafeExString(ex))
|
errMsg += "the file '%s' ('%s'). Please make " % (paths.BOUNDARIES_XML, getSafeExString(ex))
|
||||||
errMsg += "sure that you haven't made any changes to it"
|
errMsg += "sure that you haven't made any changes to it"
|
||||||
raise SqlmapInstallationException, errMsg
|
raise SqlmapInstallationException, errMsg
|
||||||
@@ -83,16 +84,13 @@ def loadBoundaries():
|
|||||||
parseXmlNode(root)
|
parseXmlNode(root)
|
||||||
|
|
||||||
def loadPayloads():
|
def loadPayloads():
|
||||||
payloadFiles = os.listdir(paths.SQLMAP_XML_PAYLOADS_PATH)
|
for payloadFile in PAYLOAD_XML_FILES:
|
||||||
payloadFiles.sort()
|
|
||||||
|
|
||||||
for payloadFile in payloadFiles:
|
|
||||||
payloadFilePath = os.path.join(paths.SQLMAP_XML_PAYLOADS_PATH, payloadFile)
|
payloadFilePath = os.path.join(paths.SQLMAP_XML_PAYLOADS_PATH, payloadFile)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
doc = et.parse(payloadFilePath)
|
doc = et.parse(payloadFilePath)
|
||||||
except Exception, ex:
|
except Exception, ex:
|
||||||
errMsg = "something seems to be wrong with "
|
errMsg = "something appears to be wrong with "
|
||||||
errMsg += "the file '%s' ('%s'). Please make " % (payloadFilePath, getSafeExString(ex))
|
errMsg += "the file '%s' ('%s'). Please make " % (payloadFilePath, getSafeExString(ex))
|
||||||
errMsg += "sure that you haven't made any changes to it"
|
errMsg += "sure that you haven't made any changes to it"
|
||||||
raise SqlmapInstallationException, errMsg
|
raise SqlmapInstallationException, errMsg
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -41,8 +41,7 @@ def parseSitemap(url, retVal=None):
|
|||||||
if url.endswith(".xml") and "sitemap" in url.lower():
|
if url.endswith(".xml") and "sitemap" in url.lower():
|
||||||
if kb.followSitemapRecursion is None:
|
if kb.followSitemapRecursion is None:
|
||||||
message = "sitemap recursion detected. Do you want to follow? [y/N] "
|
message = "sitemap recursion detected. Do you want to follow? [y/N] "
|
||||||
test = readInput(message, default="N")
|
kb.followSitemapRecursion = readInput(message, default='N', boolean=True)
|
||||||
kb.followSitemapRecursion = test[0] in ("y", "Y")
|
|
||||||
if kb.followSitemapRecursion:
|
if kb.followSitemapRecursion:
|
||||||
parseSitemap(url, retVal)
|
parseSitemap(url, retVal)
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -13,6 +13,7 @@ import StringIO
|
|||||||
import struct
|
import struct
|
||||||
import zlib
|
import zlib
|
||||||
|
|
||||||
|
from lib.core.common import Backend
|
||||||
from lib.core.common import extractErrorMessage
|
from lib.core.common import extractErrorMessage
|
||||||
from lib.core.common import extractRegexResult
|
from lib.core.common import extractRegexResult
|
||||||
from lib.core.common import getPublicTypeMembers
|
from lib.core.common import getPublicTypeMembers
|
||||||
@@ -25,6 +26,8 @@ from lib.core.common import singleTimeWarnMessage
|
|||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
|
from lib.core.decorators import cachedmethod
|
||||||
|
from lib.core.enums import DBMS
|
||||||
from lib.core.enums import HTTP_HEADER
|
from lib.core.enums import HTTP_HEADER
|
||||||
from lib.core.enums import PLACE
|
from lib.core.enums import PLACE
|
||||||
from lib.core.exception import SqlmapCompressionException
|
from lib.core.exception import SqlmapCompressionException
|
||||||
@@ -34,6 +37,7 @@ from lib.core.settings import EVENTVALIDATION_REGEX
|
|||||||
from lib.core.settings import MAX_CONNECTION_TOTAL_SIZE
|
from lib.core.settings import MAX_CONNECTION_TOTAL_SIZE
|
||||||
from lib.core.settings import META_CHARSET_REGEX
|
from lib.core.settings import META_CHARSET_REGEX
|
||||||
from lib.core.settings import PARSE_HEADERS_LIMIT
|
from lib.core.settings import PARSE_HEADERS_LIMIT
|
||||||
|
from lib.core.settings import SELECT_FROM_TABLE_REGEX
|
||||||
from lib.core.settings import UNICODE_ENCODING
|
from lib.core.settings import UNICODE_ENCODING
|
||||||
from lib.core.settings import VIEWSTATE_REGEX
|
from lib.core.settings import VIEWSTATE_REGEX
|
||||||
from lib.parse.headers import headersParser
|
from lib.parse.headers import headersParser
|
||||||
@@ -91,19 +95,19 @@ def forgeHeaders(items=None):
|
|||||||
if cookie.domain_specified and not conf.hostname.endswith(cookie.domain):
|
if cookie.domain_specified and not conf.hostname.endswith(cookie.domain):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if ("%s=" % cookie.name) in headers[HTTP_HEADER.COOKIE]:
|
if ("%s=" % getUnicode(cookie.name)) in getUnicode(headers[HTTP_HEADER.COOKIE]):
|
||||||
if conf.loadCookies:
|
if conf.loadCookies:
|
||||||
conf.httpHeaders = filter(None, ((item if item[0] != HTTP_HEADER.COOKIE else None) for item in conf.httpHeaders))
|
conf.httpHeaders = filter(None, ((item if item[0] != HTTP_HEADER.COOKIE else None) for item in conf.httpHeaders))
|
||||||
elif kb.mergeCookies is None:
|
elif kb.mergeCookies is None:
|
||||||
message = "you provided a HTTP %s header value. " % HTTP_HEADER.COOKIE
|
message = "you provided a HTTP %s header value. " % HTTP_HEADER.COOKIE
|
||||||
message += "The target URL provided its own cookies within "
|
message += "The target URL provided its own cookies within "
|
||||||
message += "the HTTP %s header which intersect with yours. " % HTTP_HEADER.SET_COOKIE
|
message += "the HTTP %s header which intersect with yours. " % HTTP_HEADER.SET_COOKIE
|
||||||
message += "Do you want to merge them in futher requests? [Y/n] "
|
message += "Do you want to merge them in further requests? [Y/n] "
|
||||||
_ = readInput(message, default="Y")
|
|
||||||
kb.mergeCookies = not _ or _[0] in ("y", "Y")
|
kb.mergeCookies = readInput(message, default='Y', boolean=True)
|
||||||
|
|
||||||
if kb.mergeCookies and kb.injection.place != PLACE.COOKIE:
|
if kb.mergeCookies and kb.injection.place != PLACE.COOKIE:
|
||||||
_ = lambda x: re.sub(r"(?i)\b%s=[^%s]+" % (re.escape(cookie.name), conf.cookieDel or DEFAULT_COOKIE_DELIMITER), ("%s=%s" % (cookie.name, getUnicode(cookie.value))).replace('\\', r'\\'), x)
|
_ = lambda x: re.sub(r"(?i)\b%s=[^%s]+" % (re.escape(getUnicode(cookie.name)), conf.cookieDel or DEFAULT_COOKIE_DELIMITER), ("%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value))).replace('\\', r'\\'), x)
|
||||||
headers[HTTP_HEADER.COOKIE] = _(headers[HTTP_HEADER.COOKIE])
|
headers[HTTP_HEADER.COOKIE] = _(headers[HTTP_HEADER.COOKIE])
|
||||||
|
|
||||||
if PLACE.COOKIE in conf.parameters:
|
if PLACE.COOKIE in conf.parameters:
|
||||||
@@ -112,14 +116,14 @@ def forgeHeaders(items=None):
|
|||||||
conf.httpHeaders = [(item[0], item[1] if item[0] != HTTP_HEADER.COOKIE else _(item[1])) for item in conf.httpHeaders]
|
conf.httpHeaders = [(item[0], item[1] if item[0] != HTTP_HEADER.COOKIE else _(item[1])) for item in conf.httpHeaders]
|
||||||
|
|
||||||
elif not kb.testMode:
|
elif not kb.testMode:
|
||||||
headers[HTTP_HEADER.COOKIE] += "%s %s=%s" % (conf.cookieDel or DEFAULT_COOKIE_DELIMITER, cookie.name, getUnicode(cookie.value))
|
headers[HTTP_HEADER.COOKIE] += "%s %s=%s" % (conf.cookieDel or DEFAULT_COOKIE_DELIMITER, getUnicode(cookie.name), getUnicode(cookie.value))
|
||||||
|
|
||||||
if kb.testMode and not any((conf.csrfToken, conf.safeUrl)):
|
if kb.testMode and not any((conf.csrfToken, conf.safeUrl)):
|
||||||
resetCookieJar(conf.cj)
|
resetCookieJar(conf.cj)
|
||||||
|
|
||||||
return headers
|
return headers
|
||||||
|
|
||||||
def parseResponse(page, headers):
|
def parseResponse(page, headers, status=None):
|
||||||
"""
|
"""
|
||||||
@param page: the page to parse to feed the knowledge base htmlFp
|
@param page: the page to parse to feed the knowledge base htmlFp
|
||||||
(back-end DBMS fingerprint based upon DBMS error messages return
|
(back-end DBMS fingerprint based upon DBMS error messages return
|
||||||
@@ -131,8 +135,9 @@ def parseResponse(page, headers):
|
|||||||
headersParser(headers)
|
headersParser(headers)
|
||||||
|
|
||||||
if page:
|
if page:
|
||||||
htmlParser(page)
|
htmlParser(page if not status else "%s\n\n%s" % (status, page))
|
||||||
|
|
||||||
|
@cachedmethod
|
||||||
def checkCharEncoding(encoding, warn=True):
|
def checkCharEncoding(encoding, warn=True):
|
||||||
"""
|
"""
|
||||||
Checks encoding name, repairs common misspellings and adjusts to
|
Checks encoding name, repairs common misspellings and adjusts to
|
||||||
@@ -150,7 +155,7 @@ def checkCharEncoding(encoding, warn=True):
|
|||||||
return encoding
|
return encoding
|
||||||
|
|
||||||
# Reference: http://www.destructor.de/charsets/index.htm
|
# Reference: http://www.destructor.de/charsets/index.htm
|
||||||
translate = {"windows-874": "iso-8859-11", "utf-8859-1": "utf8", "en_us": "utf8", "macintosh": "iso-8859-1", "euc_tw": "big5_tw", "th": "tis-620", "unicode": "utf8", "utc8": "utf8", "ebcdic": "ebcdic-cp-be", "iso-8859": "iso8859-1", "ansi": "ascii", "gbk2312": "gbk", "windows-31j": "cp932"}
|
translate = {"windows-874": "iso-8859-11", "utf-8859-1": "utf8", "en_us": "utf8", "macintosh": "iso-8859-1", "euc_tw": "big5_tw", "th": "tis-620", "unicode": "utf8", "utc8": "utf8", "ebcdic": "ebcdic-cp-be", "iso-8859": "iso8859-1", "ansi": "ascii", "gbk2312": "gbk", "windows-31j": "cp932", "en": "us"}
|
||||||
|
|
||||||
for delimiter in (';', ',', '('):
|
for delimiter in (';', ',', '('):
|
||||||
if delimiter in encoding:
|
if delimiter in encoding:
|
||||||
@@ -163,6 +168,8 @@ def checkCharEncoding(encoding, warn=True):
|
|||||||
encoding = encoding.replace("8858", "8859") # iso-8858 -> iso-8859
|
encoding = encoding.replace("8858", "8859") # iso-8858 -> iso-8859
|
||||||
elif "8559" in encoding:
|
elif "8559" in encoding:
|
||||||
encoding = encoding.replace("8559", "8859") # iso-8559 -> iso-8859
|
encoding = encoding.replace("8559", "8859") # iso-8559 -> iso-8859
|
||||||
|
elif "8895" in encoding:
|
||||||
|
encoding = encoding.replace("8895", "8859") # iso-8895 -> iso-8859
|
||||||
elif "5889" in encoding:
|
elif "5889" in encoding:
|
||||||
encoding = encoding.replace("5889", "8859") # iso-5889 -> iso-8859
|
encoding = encoding.replace("5889", "8859") # iso-5889 -> iso-8859
|
||||||
elif "5589" in encoding:
|
elif "5589" in encoding:
|
||||||
@@ -197,7 +204,7 @@ def checkCharEncoding(encoding, warn=True):
|
|||||||
# Reference: http://philip.html5.org/data/charsets-2.html
|
# Reference: http://philip.html5.org/data/charsets-2.html
|
||||||
if encoding in translate:
|
if encoding in translate:
|
||||||
encoding = translate[encoding]
|
encoding = translate[encoding]
|
||||||
elif encoding in ("null", "{charset}", "*") or not re.search(r"\w", encoding):
|
elif encoding in ("null", "{charset}", "charset", "*") or not re.search(r"\w", encoding):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# Reference: http://www.iana.org/assignments/character-sets
|
# Reference: http://www.iana.org/assignments/character-sets
|
||||||
@@ -227,7 +234,10 @@ def getHeuristicCharEncoding(page):
|
|||||||
Returns page encoding charset detected by usage of heuristics
|
Returns page encoding charset detected by usage of heuristics
|
||||||
Reference: http://chardet.feedparser.org/docs/
|
Reference: http://chardet.feedparser.org/docs/
|
||||||
"""
|
"""
|
||||||
retVal = detect(page)["encoding"]
|
|
||||||
|
key = hash(page)
|
||||||
|
retVal = kb.cache.encoding.get(key) or detect(page)["encoding"]
|
||||||
|
kb.cache.encoding[key] = retVal
|
||||||
|
|
||||||
if retVal:
|
if retVal:
|
||||||
infoMsg = "heuristics detected web page charset '%s'" % retVal
|
infoMsg = "heuristics detected web page charset '%s'" % retVal
|
||||||
@@ -258,15 +268,16 @@ def decodePage(page, contentEncoding, contentType):
|
|||||||
|
|
||||||
page = data.read()
|
page = data.read()
|
||||||
except Exception, msg:
|
except Exception, msg:
|
||||||
errMsg = "detected invalid data for declared content "
|
if "<html" not in page: # in some cases, invalid "Content-Encoding" appears for plain HTML (should be ignored)
|
||||||
errMsg += "encoding '%s' ('%s')" % (contentEncoding, msg)
|
errMsg = "detected invalid data for declared content "
|
||||||
singleTimeLogMessage(errMsg, logging.ERROR)
|
errMsg += "encoding '%s' ('%s')" % (contentEncoding, msg)
|
||||||
|
singleTimeLogMessage(errMsg, logging.ERROR)
|
||||||
|
|
||||||
warnMsg = "turning off page compression"
|
warnMsg = "turning off page compression"
|
||||||
singleTimeWarnMessage(warnMsg)
|
singleTimeWarnMessage(warnMsg)
|
||||||
|
|
||||||
kb.pageCompress = False
|
kb.pageCompress = False
|
||||||
raise SqlmapCompressionException
|
raise SqlmapCompressionException
|
||||||
|
|
||||||
if not conf.charset:
|
if not conf.charset:
|
||||||
httpCharset, metaCharset = None, None
|
httpCharset, metaCharset = None, None
|
||||||
@@ -305,6 +316,12 @@ def decodePage(page, contentEncoding, contentType):
|
|||||||
page = re.sub(r"&([^;]+);", lambda _: chr(htmlEntities[_.group(1)]) if htmlEntities.get(_.group(1), 256) < 256 else _.group(0), page)
|
page = re.sub(r"&([^;]+);", lambda _: chr(htmlEntities[_.group(1)]) if htmlEntities.get(_.group(1), 256) < 256 else _.group(0), page)
|
||||||
|
|
||||||
kb.pageEncoding = kb.pageEncoding or checkCharEncoding(getHeuristicCharEncoding(page))
|
kb.pageEncoding = kb.pageEncoding or checkCharEncoding(getHeuristicCharEncoding(page))
|
||||||
|
|
||||||
|
if kb.pageEncoding and kb.pageEncoding.lower() == "utf-8-sig":
|
||||||
|
kb.pageEncoding = "utf-8"
|
||||||
|
if page and page.startswith("\xef\xbb\xbf"): # Reference: https://docs.python.org/2/library/codecs.html (Note: noticed problems when "utf-8-sig" is left to Python for handling)
|
||||||
|
page = page[3:]
|
||||||
|
|
||||||
page = getUnicode(page, kb.pageEncoding)
|
page = getUnicode(page, kb.pageEncoding)
|
||||||
|
|
||||||
# e.g. ’…™
|
# e.g. ’…™
|
||||||
@@ -323,18 +340,23 @@ def decodePage(page, contentEncoding, contentType):
|
|||||||
|
|
||||||
return page
|
return page
|
||||||
|
|
||||||
def processResponse(page, responseHeaders):
|
def processResponse(page, responseHeaders, status=None):
|
||||||
kb.processResponseCounter += 1
|
kb.processResponseCounter += 1
|
||||||
|
|
||||||
page = page or ""
|
page = page or ""
|
||||||
|
|
||||||
parseResponse(page, responseHeaders if kb.processResponseCounter < PARSE_HEADERS_LIMIT else None)
|
parseResponse(page, responseHeaders if kb.processResponseCounter < PARSE_HEADERS_LIMIT else None, status)
|
||||||
|
|
||||||
|
if not kb.tableFrom and Backend.getIdentifiedDbms() in (DBMS.ACCESS,):
|
||||||
|
kb.tableFrom = extractRegexResult(SELECT_FROM_TABLE_REGEX, page)
|
||||||
|
else:
|
||||||
|
kb.tableFrom = None
|
||||||
|
|
||||||
if conf.parseErrors:
|
if conf.parseErrors:
|
||||||
msg = extractErrorMessage(page)
|
msg = extractErrorMessage(page)
|
||||||
|
|
||||||
if msg:
|
if msg:
|
||||||
logger.warning("parsed DBMS error message: '%s'" % msg)
|
logger.warning("parsed DBMS error message: '%s'" % msg.rstrip('.'))
|
||||||
|
|
||||||
if kb.originalPage is None:
|
if kb.originalPage is None:
|
||||||
for regex in (EVENTVALIDATION_REGEX, VIEWSTATE_REGEX):
|
for regex in (EVENTVALIDATION_REGEX, VIEWSTATE_REGEX):
|
||||||
@@ -344,9 +366,25 @@ def processResponse(page, responseHeaders):
|
|||||||
if PLACE.POST in conf.paramDict and name in conf.paramDict[PLACE.POST]:
|
if PLACE.POST in conf.paramDict and name in conf.paramDict[PLACE.POST]:
|
||||||
if conf.paramDict[PLACE.POST][name] in page:
|
if conf.paramDict[PLACE.POST][name] in page:
|
||||||
continue
|
continue
|
||||||
conf.paramDict[PLACE.POST][name] = value
|
else:
|
||||||
conf.parameters[PLACE.POST] = re.sub("(?i)(%s=)[^&]+" % name, r"\g<1>%s" % value, conf.parameters[PLACE.POST])
|
msg = "do you want to automatically adjust the value of '%s'? [y/N]" % name
|
||||||
|
|
||||||
|
if not readInput(msg, default='N', boolean=True):
|
||||||
|
continue
|
||||||
|
|
||||||
|
conf.paramDict[PLACE.POST][name] = value
|
||||||
|
conf.parameters[PLACE.POST] = re.sub("(?i)(%s=)[^&]+" % re.escape(name), r"\g<1>%s" % re.escape(value), conf.parameters[PLACE.POST])
|
||||||
|
|
||||||
|
if not kb.captchaDetected and re.search(r"(?i)captcha", page or ""):
|
||||||
|
for match in re.finditer(r"(?si)<form.+?</form>", page):
|
||||||
|
if re.search(r"(?i)captcha", match.group(0)):
|
||||||
|
kb.captchaDetected = True
|
||||||
|
warnMsg = "potential CAPTCHA protection mechanism detected"
|
||||||
|
if re.search(r"(?i)<title>[^<]*CloudFlare", page):
|
||||||
|
warnMsg += " (CloudFlare)"
|
||||||
|
singleTimeWarnMessage(warnMsg)
|
||||||
|
break
|
||||||
|
|
||||||
if re.search(BLOCKED_IP_REGEX, page):
|
if re.search(BLOCKED_IP_REGEX, page):
|
||||||
errMsg = "it appears that you have been blocked by the target server"
|
warnMsg = "it appears that you have been blocked by the target server"
|
||||||
singleTimeLogMessage(errMsg, logging.ERROR)
|
singleTimeWarnMessage(warnMsg)
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -21,10 +21,12 @@ from lib.core.settings import DEFAULT_PAGE_ENCODING
|
|||||||
from lib.core.settings import DIFF_TOLERANCE
|
from lib.core.settings import DIFF_TOLERANCE
|
||||||
from lib.core.settings import HTML_TITLE_REGEX
|
from lib.core.settings import HTML_TITLE_REGEX
|
||||||
from lib.core.settings import MIN_RATIO
|
from lib.core.settings import MIN_RATIO
|
||||||
|
from lib.core.settings import MAX_DIFFLIB_SEQUENCE_LENGTH
|
||||||
from lib.core.settings import MAX_RATIO
|
from lib.core.settings import MAX_RATIO
|
||||||
from lib.core.settings import REFLECTED_VALUE_MARKER
|
from lib.core.settings import REFLECTED_VALUE_MARKER
|
||||||
from lib.core.settings import LOWER_RATIO_BOUND
|
from lib.core.settings import LOWER_RATIO_BOUND
|
||||||
from lib.core.settings import UPPER_RATIO_BOUND
|
from lib.core.settings import UPPER_RATIO_BOUND
|
||||||
|
from lib.core.settings import URI_HTTP_HEADER
|
||||||
from lib.core.threads import getCurrentThreadData
|
from lib.core.threads import getCurrentThreadData
|
||||||
|
|
||||||
def comparison(page, headers, code=None, getRatioValue=False, pageLength=None):
|
def comparison(page, headers, code=None, getRatioValue=False, pageLength=None):
|
||||||
@@ -47,19 +49,15 @@ def _comparison(page, headers, code, getRatioValue, pageLength):
|
|||||||
threadData = getCurrentThreadData()
|
threadData = getCurrentThreadData()
|
||||||
|
|
||||||
if kb.testMode:
|
if kb.testMode:
|
||||||
threadData.lastComparisonHeaders = listToStrValue(headers.headers) if headers else ""
|
threadData.lastComparisonHeaders = listToStrValue([_ for _ in headers.headers if not _.startswith("%s:" % URI_HTTP_HEADER)]) if headers else ""
|
||||||
threadData.lastComparisonPage = page
|
threadData.lastComparisonPage = page
|
||||||
|
threadData.lastComparisonCode = code
|
||||||
|
|
||||||
if page is None and pageLength is None:
|
if page is None and pageLength is None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
count = 0
|
|
||||||
|
|
||||||
seqMatcher = threadData.seqMatcher
|
|
||||||
seqMatcher.set_seq1(kb.pageTemplate)
|
|
||||||
|
|
||||||
if any((conf.string, conf.notString, conf.regexp)):
|
if any((conf.string, conf.notString, conf.regexp)):
|
||||||
rawResponse = "%s%s" % (listToStrValue(headers.headers) if headers else "", page)
|
rawResponse = "%s%s" % (listToStrValue([_ for _ in headers.headers if not _.startswith("%s:" % URI_HTTP_HEADER)]) if headers else "", page)
|
||||||
|
|
||||||
# String to match in page when the query is True and/or valid
|
# String to match in page when the query is True and/or valid
|
||||||
if conf.string:
|
if conf.string:
|
||||||
@@ -77,6 +75,9 @@ def _comparison(page, headers, code, getRatioValue, pageLength):
|
|||||||
if conf.code:
|
if conf.code:
|
||||||
return conf.code == code
|
return conf.code == code
|
||||||
|
|
||||||
|
seqMatcher = threadData.seqMatcher
|
||||||
|
seqMatcher.set_seq1(kb.pageTemplate)
|
||||||
|
|
||||||
if page:
|
if page:
|
||||||
# In case of an DBMS error page return None
|
# In case of an DBMS error page return None
|
||||||
if kb.errorIsNone and (wasLastResponseDBMSError() or wasLastResponseHTTPError()) and not kb.negativeLogic:
|
if kb.errorIsNone and (wasLastResponseDBMSError() or wasLastResponseHTTPError()) and not kb.negativeLogic:
|
||||||
@@ -109,62 +110,43 @@ def _comparison(page, headers, code, getRatioValue, pageLength):
|
|||||||
elif isinstance(seqMatcher.a, unicode) and isinstance(page, str):
|
elif isinstance(seqMatcher.a, unicode) and isinstance(page, str):
|
||||||
seqMatcher.a = seqMatcher.a.encode(kb.pageEncoding or DEFAULT_PAGE_ENCODING, 'ignore')
|
seqMatcher.a = seqMatcher.a.encode(kb.pageEncoding or DEFAULT_PAGE_ENCODING, 'ignore')
|
||||||
|
|
||||||
seq1, seq2 = None, None
|
if seqMatcher.a and page and seqMatcher.a == page:
|
||||||
|
ratio = 1
|
||||||
if conf.titles:
|
elif kb.skipSeqMatcher or seqMatcher.a and page and any(len(_) > MAX_DIFFLIB_SEQUENCE_LENGTH for _ in (seqMatcher.a, page)):
|
||||||
seq1 = extractRegexResult(HTML_TITLE_REGEX, seqMatcher.a)
|
ratio = 1.0 * len(seqMatcher.a) / len(page)
|
||||||
seq2 = extractRegexResult(HTML_TITLE_REGEX, page)
|
if ratio > 1:
|
||||||
|
ratio = 1. / ratio
|
||||||
else:
|
else:
|
||||||
seq1 = getFilteredPageContent(seqMatcher.a, True) if conf.textOnly else seqMatcher.a
|
seq1, seq2 = None, None
|
||||||
seq2 = getFilteredPageContent(page, True) if conf.textOnly else page
|
|
||||||
|
|
||||||
if seq1 is None or seq2 is None:
|
if conf.titles:
|
||||||
return None
|
seq1 = extractRegexResult(HTML_TITLE_REGEX, seqMatcher.a)
|
||||||
|
seq2 = extractRegexResult(HTML_TITLE_REGEX, page)
|
||||||
seq1 = seq1.replace(REFLECTED_VALUE_MARKER, "")
|
|
||||||
seq2 = seq2.replace(REFLECTED_VALUE_MARKER, "")
|
|
||||||
|
|
||||||
while count < min(len(seq1), len(seq2)):
|
|
||||||
if seq1[count] == seq2[count]:
|
|
||||||
count += 1
|
|
||||||
else:
|
else:
|
||||||
break
|
seq1 = getFilteredPageContent(seqMatcher.a, True) if conf.textOnly else seqMatcher.a
|
||||||
|
seq2 = getFilteredPageContent(page, True) if conf.textOnly else page
|
||||||
|
|
||||||
if count:
|
if seq1 is None or seq2 is None:
|
||||||
try:
|
return None
|
||||||
_seq1 = seq1[count:]
|
|
||||||
_seq2 = seq2[count:]
|
|
||||||
except MemoryError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
seq1 = _seq1
|
|
||||||
seq2 = _seq2
|
|
||||||
|
|
||||||
while True:
|
seq1 = seq1.replace(REFLECTED_VALUE_MARKER, "")
|
||||||
try:
|
seq2 = seq2.replace(REFLECTED_VALUE_MARKER, "")
|
||||||
seqMatcher.set_seq1(seq1)
|
|
||||||
except MemoryError:
|
|
||||||
seq1 = seq1[:len(seq1) / 1024]
|
|
||||||
else:
|
|
||||||
break
|
|
||||||
|
|
||||||
while True:
|
seqMatcher.set_seq1(seq1)
|
||||||
try:
|
seqMatcher.set_seq2(seq2)
|
||||||
seqMatcher.set_seq2(seq2)
|
|
||||||
except MemoryError:
|
|
||||||
seq2 = seq2[:len(seq2) / 1024]
|
|
||||||
else:
|
|
||||||
break
|
|
||||||
|
|
||||||
ratio = round(seqMatcher.quick_ratio(), 3)
|
ratio = round(seqMatcher.quick_ratio(), 3)
|
||||||
|
|
||||||
# If the url is stable and we did not set yet the match ratio and the
|
# If the url is stable and we did not set yet the match ratio and the
|
||||||
# current injected value changes the url page content
|
# current injected value changes the url page content
|
||||||
if kb.matchRatio is None:
|
if kb.matchRatio is None:
|
||||||
if (count or ratio >= LOWER_RATIO_BOUND) and ratio <= UPPER_RATIO_BOUND:
|
if ratio >= LOWER_RATIO_BOUND and ratio <= UPPER_RATIO_BOUND:
|
||||||
kb.matchRatio = ratio
|
kb.matchRatio = ratio
|
||||||
logger.debug("setting match ratio for current parameter to %.3f" % kb.matchRatio)
|
logger.debug("setting match ratio for current parameter to %.3f" % kb.matchRatio)
|
||||||
|
|
||||||
|
if kb.testMode:
|
||||||
|
threadData.lastComparisonRatio = ratio
|
||||||
|
|
||||||
# If it has been requested to return the ratio and not a comparison
|
# If it has been requested to return the ratio and not a comparison
|
||||||
# response
|
# response
|
||||||
if getRatioValue:
|
if getRatioValue:
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'doc/COPYING' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -31,8 +31,8 @@ from extra.safe2bin.safe2bin import safecharencode
|
|||||||
from lib.core.agent import agent
|
from lib.core.agent import agent
|
||||||
from lib.core.common import asciifyUrl
|
from lib.core.common import asciifyUrl
|
||||||
from lib.core.common import calculateDeltaSeconds
|
from lib.core.common import calculateDeltaSeconds
|
||||||
|
from lib.core.common import checkSameHost
|
||||||
from lib.core.common import clearConsoleLine
|
from lib.core.common import clearConsoleLine
|
||||||
from lib.core.common import cpuThrottle
|
|
||||||
from lib.core.common import dataToStdout
|
from lib.core.common import dataToStdout
|
||||||
from lib.core.common import evaluateCode
|
from lib.core.common import evaluateCode
|
||||||
from lib.core.common import extractRegexResult
|
from lib.core.common import extractRegexResult
|
||||||
@@ -91,6 +91,8 @@ from lib.core.settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE
|
|||||||
from lib.core.settings import MAX_CONNECTION_CHUNK_SIZE
|
from lib.core.settings import MAX_CONNECTION_CHUNK_SIZE
|
||||||
from lib.core.settings import MAX_CONNECTIONS_REGEX
|
from lib.core.settings import MAX_CONNECTIONS_REGEX
|
||||||
from lib.core.settings import MAX_CONNECTION_TOTAL_SIZE
|
from lib.core.settings import MAX_CONNECTION_TOTAL_SIZE
|
||||||
|
from lib.core.settings import MAX_CONSECUTIVE_CONNECTION_ERRORS
|
||||||
|
from lib.core.settings import MAX_MURPHY_SLEEP_TIME
|
||||||
from lib.core.settings import META_REFRESH_REGEX
|
from lib.core.settings import META_REFRESH_REGEX
|
||||||
from lib.core.settings import MIN_TIME_RESPONSES
|
from lib.core.settings import MIN_TIME_RESPONSES
|
||||||
from lib.core.settings import IS_WIN
|
from lib.core.settings import IS_WIN
|
||||||
@@ -103,6 +105,7 @@ from lib.core.settings import RANDOM_STRING_MARKER
|
|||||||
from lib.core.settings import REPLACEMENT_MARKER
|
from lib.core.settings import REPLACEMENT_MARKER
|
||||||
from lib.core.settings import TEXT_CONTENT_TYPE_REGEX
|
from lib.core.settings import TEXT_CONTENT_TYPE_REGEX
|
||||||
from lib.core.settings import UNENCODED_ORIGINAL_VALUE
|
from lib.core.settings import UNENCODED_ORIGINAL_VALUE
|
||||||
|
from lib.core.settings import UNICODE_ENCODING
|
||||||
from lib.core.settings import URI_HTTP_HEADER
|
from lib.core.settings import URI_HTTP_HEADER
|
||||||
from lib.core.settings import WARN_TIME_STDEV
|
from lib.core.settings import WARN_TIME_STDEV
|
||||||
from lib.request.basic import decodePage
|
from lib.request.basic import decodePage
|
||||||
@@ -111,7 +114,6 @@ from lib.request.basic import processResponse
|
|||||||
from lib.request.direct import direct
|
from lib.request.direct import direct
|
||||||
from lib.request.comparison import comparison
|
from lib.request.comparison import comparison
|
||||||
from lib.request.methodrequest import MethodRequest
|
from lib.request.methodrequest import MethodRequest
|
||||||
from thirdparty.multipart import multipartpost
|
|
||||||
from thirdparty.odict.odict import OrderedDict
|
from thirdparty.odict.odict import OrderedDict
|
||||||
from thirdparty.socks.socks import ProxyError
|
from thirdparty.socks.socks import ProxyError
|
||||||
|
|
||||||
@@ -123,7 +125,10 @@ class Connect(object):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _getPageProxy(**kwargs):
|
def _getPageProxy(**kwargs):
|
||||||
return Connect.getPage(**kwargs)
|
try:
|
||||||
|
return Connect.getPage(**kwargs)
|
||||||
|
except RuntimeError:
|
||||||
|
return None, None, None
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _retryProxy(**kwargs):
|
def _retryProxy(**kwargs):
|
||||||
@@ -142,10 +147,10 @@ class Connect(object):
|
|||||||
if kb.testMode and kb.previousMethod == PAYLOAD.METHOD.TIME:
|
if kb.testMode and kb.previousMethod == PAYLOAD.METHOD.TIME:
|
||||||
# timed based payloads can cause web server unresponsiveness
|
# timed based payloads can cause web server unresponsiveness
|
||||||
# if the injectable piece of code is some kind of JOIN-like query
|
# if the injectable piece of code is some kind of JOIN-like query
|
||||||
warnMsg = "most probably web server instance hasn't recovered yet "
|
warnMsg = "most likely web server instance hasn't recovered yet "
|
||||||
warnMsg += "from previous timed based payload. If the problem "
|
warnMsg += "from previous timed based payload. If the problem "
|
||||||
warnMsg += "persists please wait for few minutes and rerun "
|
warnMsg += "persists please wait for a few minutes and rerun "
|
||||||
warnMsg += "without flag T in option '--technique' "
|
warnMsg += "without flag 'T' in option '--technique' "
|
||||||
warnMsg += "(e.g. '--flush-session --technique=BEUS') or try to "
|
warnMsg += "(e.g. '--flush-session --technique=BEUS') or try to "
|
||||||
warnMsg += "lower the value of option '--time-sec' (e.g. '--time-sec=2')"
|
warnMsg += "lower the value of option '--time-sec' (e.g. '--time-sec=2')"
|
||||||
singleTimeWarnMessage(warnMsg)
|
singleTimeWarnMessage(warnMsg)
|
||||||
@@ -220,13 +225,13 @@ class Connect(object):
|
|||||||
|
|
||||||
if isinstance(conf.delay, (int, float)) and conf.delay > 0:
|
if isinstance(conf.delay, (int, float)) and conf.delay > 0:
|
||||||
time.sleep(conf.delay)
|
time.sleep(conf.delay)
|
||||||
elif conf.cpuThrottle:
|
|
||||||
cpuThrottle(conf.cpuThrottle)
|
|
||||||
|
|
||||||
if conf.offline:
|
if conf.offline:
|
||||||
return None, None, None
|
return None, None, None
|
||||||
elif conf.dummy:
|
elif conf.dummy or conf.murphyRate and randomInt() % conf.murphyRate == 0:
|
||||||
return getUnicode(randomStr(int(randomInt()), alphabet=[chr(_) for _ in xrange(256)]), {}, int(randomInt())), None, None
|
if conf.murphyRate:
|
||||||
|
time.sleep(randomInt() % (MAX_MURPHY_SLEEP_TIME + 1))
|
||||||
|
return getUnicode(randomStr(int(randomInt()), alphabet=[chr(_) for _ in xrange(256)]), {}, int(randomInt())), None, None if not conf.murphyRate else randomInt(3)
|
||||||
|
|
||||||
threadData = getCurrentThreadData()
|
threadData = getCurrentThreadData()
|
||||||
with kb.locks.request:
|
with kb.locks.request:
|
||||||
@@ -242,25 +247,29 @@ class Connect(object):
|
|||||||
referer = kwargs.get("referer", None) or conf.referer
|
referer = kwargs.get("referer", None) or conf.referer
|
||||||
host = kwargs.get("host", None) or conf.host
|
host = kwargs.get("host", None) or conf.host
|
||||||
direct_ = kwargs.get("direct", False)
|
direct_ = kwargs.get("direct", False)
|
||||||
multipart = kwargs.get("multipart", False)
|
multipart = kwargs.get("multipart", None)
|
||||||
silent = kwargs.get("silent", False)
|
silent = kwargs.get("silent", False)
|
||||||
raise404 = kwargs.get("raise404", True)
|
raise404 = kwargs.get("raise404", True)
|
||||||
timeout = kwargs.get("timeout", None) or conf.timeout
|
timeout = kwargs.get("timeout", None) or conf.timeout
|
||||||
auxHeaders = kwargs.get("auxHeaders", None)
|
auxHeaders = kwargs.get("auxHeaders", None)
|
||||||
response = kwargs.get("response", False)
|
response = kwargs.get("response", False)
|
||||||
ignoreTimeout = kwargs.get("ignoreTimeout", False) or kb.ignoreTimeout
|
ignoreTimeout = kwargs.get("ignoreTimeout", False) or kb.ignoreTimeout or conf.ignoreTimeouts
|
||||||
refreshing = kwargs.get("refreshing", False)
|
refreshing = kwargs.get("refreshing", False)
|
||||||
retrying = kwargs.get("retrying", False)
|
retrying = kwargs.get("retrying", False)
|
||||||
crawling = kwargs.get("crawling", False)
|
crawling = kwargs.get("crawling", False)
|
||||||
|
checking = kwargs.get("checking", False)
|
||||||
skipRead = kwargs.get("skipRead", False)
|
skipRead = kwargs.get("skipRead", False)
|
||||||
|
|
||||||
|
if multipart:
|
||||||
|
post = multipart
|
||||||
|
|
||||||
websocket_ = url.lower().startswith("ws")
|
websocket_ = url.lower().startswith("ws")
|
||||||
|
|
||||||
if not urlparse.urlsplit(url).netloc:
|
if not urlparse.urlsplit(url).netloc:
|
||||||
url = urlparse.urljoin(conf.url, url)
|
url = urlparse.urljoin(conf.url, url)
|
||||||
|
|
||||||
# flag to know if we are dealing with the same target host
|
# flag to know if we are dealing with the same target host
|
||||||
target = reduce(lambda x, y: x == y, map(lambda x: urlparse.urlparse(x).netloc.split(':')[0], [url, conf.url or ""]))
|
target = checkSameHost(url, conf.url)
|
||||||
|
|
||||||
if not retrying:
|
if not retrying:
|
||||||
# Reset the number of connection retries
|
# Reset the number of connection retries
|
||||||
@@ -270,13 +279,17 @@ class Connect(object):
|
|||||||
# url splitted with space char while urlencoding it in the later phase
|
# url splitted with space char while urlencoding it in the later phase
|
||||||
url = url.replace(" ", "%20")
|
url = url.replace(" ", "%20")
|
||||||
|
|
||||||
|
if "://" not in url:
|
||||||
|
url = "http://%s" % url
|
||||||
|
|
||||||
conn = None
|
conn = None
|
||||||
code = None
|
|
||||||
page = None
|
page = None
|
||||||
|
code = None
|
||||||
|
status = None
|
||||||
|
|
||||||
_ = urlparse.urlsplit(url)
|
_ = urlparse.urlsplit(url)
|
||||||
requestMsg = u"HTTP request [#%d]:\n%s " % (threadData.lastRequestUID, method or (HTTPMETHOD.POST if post is not None else HTTPMETHOD.GET))
|
requestMsg = u"HTTP request [#%d]:\n%s " % (threadData.lastRequestUID, method or (HTTPMETHOD.POST if post is not None else HTTPMETHOD.GET))
|
||||||
requestMsg += ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else "")) if not any((refreshing, crawling)) else url
|
requestMsg += ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else "")) if not any((refreshing, crawling, checking)) else url
|
||||||
responseMsg = u"HTTP response "
|
responseMsg = u"HTTP response "
|
||||||
requestHeaders = u""
|
requestHeaders = u""
|
||||||
responseHeaders = None
|
responseHeaders = None
|
||||||
@@ -298,27 +311,13 @@ class Connect(object):
|
|||||||
params = urlencode(params)
|
params = urlencode(params)
|
||||||
url = "%s?%s" % (url, params)
|
url = "%s?%s" % (url, params)
|
||||||
|
|
||||||
elif multipart:
|
elif any((refreshing, crawling, checking)):
|
||||||
# Needed in this form because of potential circle dependency
|
|
||||||
# problem (option -> update -> connect -> option)
|
|
||||||
from lib.core.option import proxyHandler
|
|
||||||
|
|
||||||
multipartOpener = urllib2.build_opener(proxyHandler, multipartpost.MultipartPostHandler)
|
|
||||||
conn = multipartOpener.open(unicodeencode(url), multipart)
|
|
||||||
page = Connect._connReadProxy(conn) if not skipRead else None
|
|
||||||
responseHeaders = conn.info()
|
|
||||||
responseHeaders[URI_HTTP_HEADER] = conn.geturl()
|
|
||||||
page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE))
|
|
||||||
|
|
||||||
return page
|
|
||||||
|
|
||||||
elif any((refreshing, crawling)):
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
elif target:
|
elif target:
|
||||||
if conf.forceSSL and urlparse.urlparse(url).scheme != "https":
|
if conf.forceSSL and urlparse.urlparse(url).scheme != "https":
|
||||||
url = re.sub("\Ahttp:", "https:", url, re.I)
|
url = re.sub("(?i)\Ahttp:", "https:", url)
|
||||||
url = re.sub(":80/", ":443/", url, re.I)
|
url = re.sub("(?i):80/", ":443/", url)
|
||||||
|
|
||||||
if PLACE.GET in conf.parameters and not get:
|
if PLACE.GET in conf.parameters and not get:
|
||||||
get = conf.parameters[PLACE.GET]
|
get = conf.parameters[PLACE.GET]
|
||||||
@@ -364,7 +363,7 @@ class Connect(object):
|
|||||||
if not getHeader(headers, HTTP_HEADER.ACCEPT_ENCODING):
|
if not getHeader(headers, HTTP_HEADER.ACCEPT_ENCODING):
|
||||||
headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if kb.pageCompress else "identity"
|
headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if kb.pageCompress else "identity"
|
||||||
|
|
||||||
if post is not None and not getHeader(headers, HTTP_HEADER.CONTENT_TYPE):
|
if post is not None and not multipart and not getHeader(headers, HTTP_HEADER.CONTENT_TYPE):
|
||||||
headers[HTTP_HEADER.CONTENT_TYPE] = POST_HINT_CONTENT_TYPES.get(kb.postHint, DEFAULT_CONTENT_TYPE)
|
headers[HTTP_HEADER.CONTENT_TYPE] = POST_HINT_CONTENT_TYPES.get(kb.postHint, DEFAULT_CONTENT_TYPE)
|
||||||
|
|
||||||
if headers.get(HTTP_HEADER.CONTENT_TYPE) == POST_HINT_CONTENT_TYPES[POST_HINT.MULTIPART]:
|
if headers.get(HTTP_HEADER.CONTENT_TYPE) == POST_HINT_CONTENT_TYPES[POST_HINT.MULTIPART]:
|
||||||
@@ -376,11 +375,12 @@ class Connect(object):
|
|||||||
if boundary:
|
if boundary:
|
||||||
headers[HTTP_HEADER.CONTENT_TYPE] = "%s; boundary=%s" % (headers[HTTP_HEADER.CONTENT_TYPE], boundary)
|
headers[HTTP_HEADER.CONTENT_TYPE] = "%s; boundary=%s" % (headers[HTTP_HEADER.CONTENT_TYPE], boundary)
|
||||||
|
|
||||||
|
if conf.keepAlive:
|
||||||
|
headers[HTTP_HEADER.CONNECTION] = "keep-alive"
|
||||||
|
|
||||||
# Reset header values to original in case of provided request file
|
# Reset header values to original in case of provided request file
|
||||||
if target and conf.requestFile:
|
if target and conf.requestFile:
|
||||||
headers = OrderedDict(conf.httpHeaders)
|
headers = forgeHeaders({HTTP_HEADER.COOKIE: cookie})
|
||||||
if cookie:
|
|
||||||
headers[HTTP_HEADER.COOKIE] = cookie
|
|
||||||
|
|
||||||
if auxHeaders:
|
if auxHeaders:
|
||||||
for key, value in auxHeaders.items():
|
for key, value in auxHeaders.items():
|
||||||
@@ -401,6 +401,7 @@ class Connect(object):
|
|||||||
|
|
||||||
if websocket_:
|
if websocket_:
|
||||||
ws = websocket.WebSocket()
|
ws = websocket.WebSocket()
|
||||||
|
ws.settimeout(timeout)
|
||||||
ws.connect(url, header=("%s: %s" % _ for _ in headers.items() if _[0] not in ("Host",)), cookie=cookie) # WebSocket will add Host field of headers automatically
|
ws.connect(url, header=("%s: %s" % _ for _ in headers.items() if _[0] not in ("Host",)), cookie=cookie) # WebSocket will add Host field of headers automatically
|
||||||
ws.send(urldecode(post or ""))
|
ws.send(urldecode(post or ""))
|
||||||
page = ws.recv()
|
page = ws.recv()
|
||||||
@@ -412,7 +413,7 @@ class Connect(object):
|
|||||||
responseHeaders = _(ws.getheaders())
|
responseHeaders = _(ws.getheaders())
|
||||||
responseHeaders.headers = ["%s: %s\r\n" % (_[0].capitalize(), _[1]) for _ in responseHeaders.items()]
|
responseHeaders.headers = ["%s: %s\r\n" % (_[0].capitalize(), _[1]) for _ in responseHeaders.items()]
|
||||||
|
|
||||||
requestHeaders += "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items())
|
requestHeaders += "\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()])
|
||||||
requestMsg += "\n%s" % requestHeaders
|
requestMsg += "\n%s" % requestHeaders
|
||||||
|
|
||||||
if post is not None:
|
if post is not None:
|
||||||
@@ -431,7 +432,7 @@ class Connect(object):
|
|||||||
else:
|
else:
|
||||||
req = urllib2.Request(url, post, headers)
|
req = urllib2.Request(url, post, headers)
|
||||||
|
|
||||||
requestHeaders += "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in req.header_items())
|
requestHeaders += "\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in req.header_items()])
|
||||||
|
|
||||||
if not getRequestHeader(req, HTTP_HEADER.COOKIE) and conf.cj:
|
if not getRequestHeader(req, HTTP_HEADER.COOKIE) and conf.cj:
|
||||||
conf.cj._policy._now = conf.cj._now = int(time.time())
|
conf.cj._policy._now = conf.cj._now = int(time.time())
|
||||||
@@ -443,7 +444,7 @@ class Connect(object):
|
|||||||
requestHeaders += "\n%s: %d" % (string.capwords(HTTP_HEADER.CONTENT_LENGTH), len(post))
|
requestHeaders += "\n%s: %d" % (string.capwords(HTTP_HEADER.CONTENT_LENGTH), len(post))
|
||||||
|
|
||||||
if not getRequestHeader(req, HTTP_HEADER.CONNECTION):
|
if not getRequestHeader(req, HTTP_HEADER.CONNECTION):
|
||||||
requestHeaders += "\n%s: close" % HTTP_HEADER.CONNECTION
|
requestHeaders += "\n%s: %s" % (HTTP_HEADER.CONNECTION, "close" if not conf.keepAlive else "keep-alive")
|
||||||
|
|
||||||
requestMsg += "\n%s" % requestHeaders
|
requestMsg += "\n%s" % requestHeaders
|
||||||
|
|
||||||
@@ -452,9 +453,10 @@ class Connect(object):
|
|||||||
|
|
||||||
requestMsg += "\n"
|
requestMsg += "\n"
|
||||||
|
|
||||||
threadData.lastRequestMsg = requestMsg
|
if not multipart:
|
||||||
|
threadData.lastRequestMsg = requestMsg
|
||||||
|
|
||||||
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
|
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
|
||||||
|
|
||||||
if conf.cj:
|
if conf.cj:
|
||||||
for cookie in conf.cj:
|
for cookie in conf.cj:
|
||||||
@@ -477,7 +479,7 @@ class Connect(object):
|
|||||||
return conn, None, None
|
return conn, None, None
|
||||||
|
|
||||||
# Get HTTP response
|
# Get HTTP response
|
||||||
if hasattr(conn, 'redurl'):
|
if hasattr(conn, "redurl"):
|
||||||
page = (threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO\
|
page = (threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO\
|
||||||
else Connect._connReadProxy(conn)) if not skipRead else None
|
else Connect._connReadProxy(conn)) if not skipRead else None
|
||||||
skipLogTraffic = kb.redirectChoice == REDIRECTION.NO
|
skipLogTraffic = kb.redirectChoice == REDIRECTION.NO
|
||||||
@@ -485,43 +487,53 @@ class Connect(object):
|
|||||||
else:
|
else:
|
||||||
page = Connect._connReadProxy(conn) if not skipRead else None
|
page = Connect._connReadProxy(conn) if not skipRead else None
|
||||||
|
|
||||||
code = code or conn.code
|
if conn:
|
||||||
responseHeaders = conn.info()
|
code = conn.code
|
||||||
responseHeaders[URI_HTTP_HEADER] = conn.geturl()
|
responseHeaders = conn.info()
|
||||||
|
responseHeaders[URI_HTTP_HEADER] = conn.geturl()
|
||||||
|
else:
|
||||||
|
code = None
|
||||||
|
responseHeaders = {}
|
||||||
|
|
||||||
page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE))
|
page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE))
|
||||||
status = getUnicode(conn.msg)
|
status = getUnicode(conn.msg) if conn else None
|
||||||
|
|
||||||
if extractRegexResult(META_REFRESH_REGEX, page) and not refreshing:
|
kb.connErrorCounter = 0
|
||||||
refresh = extractRegexResult(META_REFRESH_REGEX, page)
|
|
||||||
|
|
||||||
debugMsg = "got HTML meta refresh header"
|
if not refreshing:
|
||||||
logger.debug(debugMsg)
|
refresh = responseHeaders.get(HTTP_HEADER.REFRESH, "").split("url=")[-1].strip()
|
||||||
|
|
||||||
if kb.alwaysRefresh is None:
|
if extractRegexResult(META_REFRESH_REGEX, page):
|
||||||
msg = "sqlmap got a refresh request "
|
refresh = extractRegexResult(META_REFRESH_REGEX, page)
|
||||||
msg += "(redirect like response common to login pages). "
|
|
||||||
msg += "Do you want to apply the refresh "
|
|
||||||
msg += "from now on (or stay on the original page)? [Y/n]"
|
|
||||||
choice = readInput(msg, default="Y")
|
|
||||||
|
|
||||||
kb.alwaysRefresh = choice not in ("n", "N")
|
debugMsg = "got HTML meta refresh header"
|
||||||
|
logger.debug(debugMsg)
|
||||||
|
|
||||||
if kb.alwaysRefresh:
|
if refresh:
|
||||||
if re.search(r"\Ahttps?://", refresh, re.I):
|
if kb.alwaysRefresh is None:
|
||||||
url = refresh
|
msg = "sqlmap got a refresh request "
|
||||||
else:
|
msg += "(redirect like response common to login pages). "
|
||||||
url = urlparse.urljoin(url, refresh)
|
msg += "Do you want to apply the refresh "
|
||||||
|
msg += "from now on (or stay on the original page)? [Y/n]"
|
||||||
|
|
||||||
threadData.lastRedirectMsg = (threadData.lastRequestUID, page)
|
kb.alwaysRefresh = readInput(msg, default='Y', boolean=True)
|
||||||
kwargs['refreshing'] = True
|
|
||||||
kwargs['url'] = url
|
|
||||||
kwargs['get'] = None
|
|
||||||
kwargs['post'] = None
|
|
||||||
|
|
||||||
try:
|
if kb.alwaysRefresh:
|
||||||
return Connect._getPageProxy(**kwargs)
|
if re.search(r"\Ahttps?://", refresh, re.I):
|
||||||
except SqlmapSyntaxException:
|
url = refresh
|
||||||
pass
|
else:
|
||||||
|
url = urlparse.urljoin(url, refresh)
|
||||||
|
|
||||||
|
threadData.lastRedirectMsg = (threadData.lastRequestUID, page)
|
||||||
|
kwargs["refreshing"] = True
|
||||||
|
kwargs["url"] = url
|
||||||
|
kwargs["get"] = None
|
||||||
|
kwargs["post"] = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
return Connect._getPageProxy(**kwargs)
|
||||||
|
except SqlmapSyntaxException:
|
||||||
|
pass
|
||||||
|
|
||||||
# Explicit closing of connection object
|
# Explicit closing of connection object
|
||||||
if conn and not conf.keepAlive:
|
if conn and not conf.keepAlive:
|
||||||
@@ -537,6 +549,9 @@ class Connect(object):
|
|||||||
page = None
|
page = None
|
||||||
responseHeaders = None
|
responseHeaders = None
|
||||||
|
|
||||||
|
if checking:
|
||||||
|
return None, None, None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
page = ex.read() if not skipRead else None
|
page = ex.read() if not skipRead else None
|
||||||
responseHeaders = ex.info()
|
responseHeaders = ex.info()
|
||||||
@@ -555,16 +570,16 @@ class Connect(object):
|
|||||||
page = page if isinstance(page, unicode) else getUnicode(page)
|
page = page if isinstance(page, unicode) else getUnicode(page)
|
||||||
|
|
||||||
code = ex.code
|
code = ex.code
|
||||||
|
status = getUnicode(ex.msg)
|
||||||
|
|
||||||
kb.originalCode = kb.originalCode or code
|
kb.originalCode = kb.originalCode or code
|
||||||
threadData.lastHTTPError = (threadData.lastRequestUID, code)
|
threadData.lastHTTPError = (threadData.lastRequestUID, code, status)
|
||||||
kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1
|
kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1
|
||||||
|
|
||||||
status = getUnicode(ex.msg)
|
|
||||||
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status)
|
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status)
|
||||||
|
|
||||||
if responseHeaders:
|
if responseHeaders:
|
||||||
logHeaders = "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items())
|
logHeaders = "\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()])
|
||||||
|
|
||||||
logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]))
|
logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]))
|
||||||
|
|
||||||
@@ -575,7 +590,8 @@ class Connect(object):
|
|||||||
elif conf.verbose > 5:
|
elif conf.verbose > 5:
|
||||||
responseMsg += "%s\n\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])
|
responseMsg += "%s\n\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])
|
||||||
|
|
||||||
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
|
if not multipart:
|
||||||
|
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
|
||||||
|
|
||||||
if ex.code == httplib.UNAUTHORIZED and not conf.ignore401:
|
if ex.code == httplib.UNAUTHORIZED and not conf.ignore401:
|
||||||
errMsg = "not authorized, try to provide right HTTP "
|
errMsg = "not authorized, try to provide right HTTP "
|
||||||
@@ -588,10 +604,9 @@ class Connect(object):
|
|||||||
else:
|
else:
|
||||||
debugMsg = "page not found (%d)" % code
|
debugMsg = "page not found (%d)" % code
|
||||||
singleTimeLogMessage(debugMsg, logging.DEBUG)
|
singleTimeLogMessage(debugMsg, logging.DEBUG)
|
||||||
processResponse(page, responseHeaders)
|
|
||||||
elif ex.code == httplib.GATEWAY_TIMEOUT:
|
elif ex.code == httplib.GATEWAY_TIMEOUT:
|
||||||
if ignoreTimeout:
|
if ignoreTimeout:
|
||||||
return None, None, None
|
return None if not conf.ignoreTimeouts else "", None, None
|
||||||
else:
|
else:
|
||||||
warnMsg = "unable to connect to the target URL (%d - %s)" % (ex.code, httplib.responses[ex.code])
|
warnMsg = "unable to connect to the target URL (%d - %s)" % (ex.code, httplib.responses[ex.code])
|
||||||
if threadData.retriesCount < conf.retries and not kb.threadException:
|
if threadData.retriesCount < conf.retries and not kb.threadException:
|
||||||
@@ -610,20 +625,39 @@ class Connect(object):
|
|||||||
except (urllib2.URLError, socket.error, socket.timeout, httplib.HTTPException, struct.error, binascii.Error, ProxyError, SqlmapCompressionException, WebSocketException, TypeError):
|
except (urllib2.URLError, socket.error, socket.timeout, httplib.HTTPException, struct.error, binascii.Error, ProxyError, SqlmapCompressionException, WebSocketException, TypeError):
|
||||||
tbMsg = traceback.format_exc()
|
tbMsg = traceback.format_exc()
|
||||||
|
|
||||||
if "no host given" in tbMsg:
|
if checking:
|
||||||
|
return None, None, None
|
||||||
|
elif "no host given" in tbMsg:
|
||||||
warnMsg = "invalid URL address used (%s)" % repr(url)
|
warnMsg = "invalid URL address used (%s)" % repr(url)
|
||||||
raise SqlmapSyntaxException(warnMsg)
|
raise SqlmapSyntaxException(warnMsg)
|
||||||
elif "forcibly closed" in tbMsg or "Connection is already closed" in tbMsg:
|
elif "forcibly closed" in tbMsg or "Connection is already closed" in tbMsg:
|
||||||
warnMsg = "connection was forcibly closed by the target URL"
|
warnMsg = "connection was forcibly closed by the target URL"
|
||||||
elif "timed out" in tbMsg:
|
elif "timed out" in tbMsg:
|
||||||
singleTimeWarnMessage("turning off pre-connect mechanism because of connection time out(s)")
|
if not conf.disablePrecon:
|
||||||
conf.disablePrecon = True
|
singleTimeWarnMessage("turning off pre-connect mechanism because of connection time out(s)")
|
||||||
|
conf.disablePrecon = True
|
||||||
|
|
||||||
|
if kb.testMode and kb.testType not in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED):
|
||||||
|
kb.responseTimes.clear()
|
||||||
|
|
||||||
if kb.testMode and kb.testType not in (None, PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED):
|
if kb.testMode and kb.testType not in (None, PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED):
|
||||||
singleTimeWarnMessage("there is a possibility that the target (or WAF) is dropping 'suspicious' requests")
|
singleTimeWarnMessage("there is a possibility that the target (or WAF/IPS/IDS) is dropping 'suspicious' requests")
|
||||||
|
kb.droppingRequests = True
|
||||||
warnMsg = "connection timed out to the target URL"
|
warnMsg = "connection timed out to the target URL"
|
||||||
|
elif "Connection reset" in tbMsg:
|
||||||
|
if not conf.disablePrecon:
|
||||||
|
singleTimeWarnMessage("turning off pre-connect mechanism because of connection reset(s)")
|
||||||
|
conf.disablePrecon = True
|
||||||
|
|
||||||
|
if kb.testMode:
|
||||||
|
singleTimeWarnMessage("there is a possibility that the target (or WAF/IPS/IDS) is resetting 'suspicious' requests")
|
||||||
|
kb.droppingRequests = True
|
||||||
|
warnMsg = "connection reset to the target URL"
|
||||||
elif "URLError" in tbMsg or "error" in tbMsg:
|
elif "URLError" in tbMsg or "error" in tbMsg:
|
||||||
warnMsg = "unable to connect to the target URL"
|
warnMsg = "unable to connect to the target URL"
|
||||||
|
match = re.search(r"Errno \d+\] ([^>]+)", tbMsg)
|
||||||
|
if match:
|
||||||
|
warnMsg += " ('%s')" % match.group(1).strip()
|
||||||
elif "NTLM" in tbMsg:
|
elif "NTLM" in tbMsg:
|
||||||
warnMsg = "there has been a problem with NTLM authentication"
|
warnMsg = "there has been a problem with NTLM authentication"
|
||||||
elif "BadStatusLine" in tbMsg:
|
elif "BadStatusLine" in tbMsg:
|
||||||
@@ -642,16 +676,29 @@ class Connect(object):
|
|||||||
else:
|
else:
|
||||||
warnMsg = "unable to connect to the target URL"
|
warnMsg = "unable to connect to the target URL"
|
||||||
|
|
||||||
if "BadStatusLine" not in tbMsg:
|
if "BadStatusLine" not in tbMsg and any((conf.proxy, conf.tor)):
|
||||||
warnMsg += " or proxy"
|
warnMsg += " or proxy"
|
||||||
|
|
||||||
|
with kb.locks.connError:
|
||||||
|
kb.connErrorCounter += 1
|
||||||
|
|
||||||
|
if kb.connErrorCounter >= MAX_CONSECUTIVE_CONNECTION_ERRORS and kb.connErrorChoice is None:
|
||||||
|
message = "there seems to be a continuous problem with connection to the target. "
|
||||||
|
message += "Are you sure that you want to continue "
|
||||||
|
message += "with further target testing? [y/N] "
|
||||||
|
|
||||||
|
kb.connErrorChoice = readInput(message, default='N', boolean=True)
|
||||||
|
|
||||||
|
if kb.connErrorChoice is False:
|
||||||
|
raise SqlmapConnectionException(warnMsg)
|
||||||
|
|
||||||
if silent:
|
if silent:
|
||||||
return None, None, None
|
return None, None, None
|
||||||
elif "forcibly closed" in tbMsg:
|
elif "forcibly closed" in tbMsg:
|
||||||
logger.critical(warnMsg)
|
logger.critical(warnMsg)
|
||||||
return None, None, None
|
return None, None, None
|
||||||
elif ignoreTimeout and any(_ in tbMsg for _ in ("timed out", "IncompleteRead")):
|
elif ignoreTimeout and any(_ in tbMsg for _ in ("timed out", "IncompleteRead")):
|
||||||
return None, None, None
|
return None if not conf.ignoreTimeouts else "", None, None
|
||||||
elif threadData.retriesCount < conf.retries and not kb.threadException:
|
elif threadData.retriesCount < conf.retries and not kb.threadException:
|
||||||
warnMsg += ". sqlmap is going to retry the request"
|
warnMsg += ". sqlmap is going to retry the request"
|
||||||
if not retrying:
|
if not retrying:
|
||||||
@@ -674,7 +721,7 @@ class Connect(object):
|
|||||||
page = getUnicode(page)
|
page = getUnicode(page)
|
||||||
socket.setdefaulttimeout(conf.timeout)
|
socket.setdefaulttimeout(conf.timeout)
|
||||||
|
|
||||||
processResponse(page, responseHeaders)
|
processResponse(page, responseHeaders, status)
|
||||||
|
|
||||||
if conn and getattr(conn, "redurl", None):
|
if conn and getattr(conn, "redurl", None):
|
||||||
_ = urlparse.urlsplit(conn.redurl)
|
_ = urlparse.urlsplit(conn.redurl)
|
||||||
@@ -691,7 +738,7 @@ class Connect(object):
|
|||||||
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status)
|
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status)
|
||||||
|
|
||||||
if responseHeaders:
|
if responseHeaders:
|
||||||
logHeaders = "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items())
|
logHeaders = "\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()])
|
||||||
|
|
||||||
if not skipLogTraffic:
|
if not skipLogTraffic:
|
||||||
logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]))
|
logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]))
|
||||||
@@ -701,7 +748,8 @@ class Connect(object):
|
|||||||
elif conf.verbose > 5:
|
elif conf.verbose > 5:
|
||||||
responseMsg += "%s\n\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])
|
responseMsg += "%s\n\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])
|
||||||
|
|
||||||
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
|
if not multipart:
|
||||||
|
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
|
||||||
|
|
||||||
return page, responseHeaders, code
|
return page, responseHeaders, code
|
||||||
|
|
||||||
@@ -789,9 +837,20 @@ class Connect(object):
|
|||||||
value = agent.replacePayload(value, payload)
|
value = agent.replacePayload(value, payload)
|
||||||
else:
|
else:
|
||||||
# GET, POST, URI and Cookie payload needs to be thoroughly URL encoded
|
# GET, POST, URI and Cookie payload needs to be thoroughly URL encoded
|
||||||
if place in (PLACE.GET, PLACE.URI, PLACE.COOKIE) and not conf.skipUrlEncode or place in (PLACE.POST, PLACE.CUSTOM_POST) and kb.postUrlEncode:
|
if (place in (PLACE.GET, PLACE.URI, PLACE.COOKIE) or place == PLACE.CUSTOM_HEADER and value.split(',')[0] == HTTP_HEADER.COOKIE) and not conf.skipUrlEncode or place in (PLACE.POST, PLACE.CUSTOM_POST) and kb.postUrlEncode:
|
||||||
payload = urlencode(payload, '%', False, place != PLACE.URI) # spaceplus is handled down below
|
skip = False
|
||||||
value = agent.replacePayload(value, payload)
|
|
||||||
|
if place == PLACE.COOKIE or place == PLACE.CUSTOM_HEADER and value.split(',')[0] == HTTP_HEADER.COOKIE:
|
||||||
|
if kb.cookieEncodeChoice is None:
|
||||||
|
msg = "do you want to URL encode cookie values (implementation specific)? %s" % ("[Y/n]" if not conf.url.endswith(".aspx") else "[y/N]") # Reference: https://support.microsoft.com/en-us/kb/313282
|
||||||
|
choice = readInput(msg, default='Y' if not conf.url.endswith(".aspx") else 'N')
|
||||||
|
kb.cookieEncodeChoice = choice.upper().strip() == 'Y'
|
||||||
|
if not kb.cookieEncodeChoice:
|
||||||
|
skip = True
|
||||||
|
|
||||||
|
if not skip:
|
||||||
|
payload = urlencode(payload, '%', False, place != PLACE.URI) # spaceplus is handled down below
|
||||||
|
value = agent.replacePayload(value, payload)
|
||||||
|
|
||||||
if conf.hpp:
|
if conf.hpp:
|
||||||
if not any(conf.url.lower().endswith(_.lower()) for _ in (WEB_API.ASP, WEB_API.ASPX)):
|
if not any(conf.url.lower().endswith(_.lower()) for _ in (WEB_API.ASP, WEB_API.ASPX)):
|
||||||
@@ -828,9 +887,13 @@ class Connect(object):
|
|||||||
|
|
||||||
if PLACE.GET in conf.parameters:
|
if PLACE.GET in conf.parameters:
|
||||||
get = conf.parameters[PLACE.GET] if place != PLACE.GET or not value else value
|
get = conf.parameters[PLACE.GET] if place != PLACE.GET or not value else value
|
||||||
|
elif place == PLACE.GET: # Note: for (e.g.) checkWaf() when there are no GET parameters
|
||||||
|
get = value
|
||||||
|
|
||||||
if PLACE.POST in conf.parameters:
|
if PLACE.POST in conf.parameters:
|
||||||
post = conf.parameters[PLACE.POST] if place != PLACE.POST or not value else value
|
post = conf.parameters[PLACE.POST] if place != PLACE.POST or not value else value
|
||||||
|
elif place == PLACE.POST:
|
||||||
|
post = value
|
||||||
|
|
||||||
if PLACE.CUSTOM_POST in conf.parameters:
|
if PLACE.CUSTOM_POST in conf.parameters:
|
||||||
post = conf.parameters[PLACE.CUSTOM_POST].replace(CUSTOM_INJECTION_MARK_CHAR, "") if place != PLACE.CUSTOM_POST or not value else value
|
post = conf.parameters[PLACE.CUSTOM_POST].replace(CUSTOM_INJECTION_MARK_CHAR, "") if place != PLACE.CUSTOM_POST or not value else value
|
||||||
@@ -854,20 +917,31 @@ class Connect(object):
|
|||||||
uri = conf.url
|
uri = conf.url
|
||||||
|
|
||||||
if value and place == PLACE.CUSTOM_HEADER:
|
if value and place == PLACE.CUSTOM_HEADER:
|
||||||
auxHeaders[value.split(',')[0]] = value.split(',', 1)[1]
|
if value.split(',')[0].capitalize() == PLACE.COOKIE:
|
||||||
|
cookie = value.split(',', 1)[1]
|
||||||
|
else:
|
||||||
|
auxHeaders[value.split(',')[0]] = value.split(',', 1)[1]
|
||||||
|
|
||||||
if conf.csrfToken:
|
if conf.csrfToken:
|
||||||
def _adjustParameter(paramString, parameter, newValue):
|
def _adjustParameter(paramString, parameter, newValue):
|
||||||
retVal = paramString
|
retVal = paramString
|
||||||
match = re.search("%s=(?P<value>[^&]*)" % re.escape(parameter), paramString)
|
match = re.search("%s=[^&]*" % re.escape(parameter), paramString)
|
||||||
if match:
|
if match:
|
||||||
retVal = re.sub("%s=[^&]*" % re.escape(parameter), "%s=%s" % (parameter, newValue), paramString)
|
retVal = re.sub(re.escape(match.group(0)), "%s=%s" % (parameter, newValue), paramString)
|
||||||
|
else:
|
||||||
|
match = re.search("(%s[\"']:[\"'])([^\"']+)" % re.escape(parameter), paramString)
|
||||||
|
if match:
|
||||||
|
retVal = re.sub(re.escape(match.group(0)), "%s%s" % (match.group(1), newValue), paramString)
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
page, headers, code = Connect.getPage(url=conf.csrfUrl or conf.url, data=conf.data if conf.csrfUrl == conf.url else None, method=conf.method if conf.csrfUrl == conf.url else None, cookie=conf.parameters.get(PLACE.COOKIE), direct=True, silent=True, ua=conf.parameters.get(PLACE.USER_AGENT), referer=conf.parameters.get(PLACE.REFERER), host=conf.parameters.get(PLACE.HOST))
|
page, headers, code = Connect.getPage(url=conf.csrfUrl or conf.url, data=conf.data if conf.csrfUrl == conf.url else None, method=conf.method if conf.csrfUrl == conf.url else None, cookie=conf.parameters.get(PLACE.COOKIE), direct=True, silent=True, ua=conf.parameters.get(PLACE.USER_AGENT), referer=conf.parameters.get(PLACE.REFERER), host=conf.parameters.get(PLACE.HOST))
|
||||||
match = re.search(r"<input[^>]+name=[\"']?%s[\"']?\s[^>]*value=(\"([^\"]+)|'([^']+)|([^ >]+))" % re.escape(conf.csrfToken), page or "")
|
match = re.search(r"<input[^>]+name=[\"']?%s[\"']?\s[^>]*value=(\"([^\"]+)|'([^']+)|([^ >]+))" % re.escape(conf.csrfToken), page or "")
|
||||||
token = (match.group(2) or match.group(3) or match.group(4)) if match else None
|
token = (match.group(2) or match.group(3) or match.group(4)) if match else None
|
||||||
|
|
||||||
|
if not token:
|
||||||
|
match = re.search(r"%s[\"']:[\"']([^\"']+)" % re.escape(conf.csrfToken), page or "")
|
||||||
|
token = match.group(1) if match else None
|
||||||
|
|
||||||
if not token:
|
if not token:
|
||||||
if conf.csrfUrl != conf.url and code == httplib.OK:
|
if conf.csrfUrl != conf.url and code == httplib.OK:
|
||||||
if headers and "text/plain" in headers.get(HTTP_HEADER.CONTENT_TYPE, ""):
|
if headers and "text/plain" in headers.get(HTTP_HEADER.CONTENT_TYPE, ""):
|
||||||
@@ -877,7 +951,7 @@ class Connect(object):
|
|||||||
for _ in conf.cj:
|
for _ in conf.cj:
|
||||||
if _.name == conf.csrfToken:
|
if _.name == conf.csrfToken:
|
||||||
token = _.value
|
token = _.value
|
||||||
if not any (conf.csrfToken in _ for _ in (conf.paramDict.get(PLACE.GET, {}), conf.paramDict.get(PLACE.POST, {}))):
|
if not any(conf.csrfToken in _ for _ in (conf.paramDict.get(PLACE.GET, {}), conf.paramDict.get(PLACE.POST, {}))):
|
||||||
if post:
|
if post:
|
||||||
post = "%s%s%s=%s" % (post, conf.paramDel or DEFAULT_GET_POST_DELIMITER, conf.csrfToken, token)
|
post = "%s%s%s=%s" % (post, conf.paramDel or DEFAULT_GET_POST_DELIMITER, conf.csrfToken, token)
|
||||||
elif get:
|
elif get:
|
||||||
@@ -959,7 +1033,7 @@ class Connect(object):
|
|||||||
|
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
compiler.parse(conf.evalCode.replace(';', '\n'))
|
compiler.parse(unicodeencode(conf.evalCode.replace(';', '\n')))
|
||||||
except SyntaxError, ex:
|
except SyntaxError, ex:
|
||||||
original = replacement = ex.text.strip()
|
original = replacement = ex.text.strip()
|
||||||
for _ in re.findall(r"[A-Za-z_]+", original)[::-1]:
|
for _ in re.findall(r"[A-Za-z_]+", original)[::-1]:
|
||||||
@@ -970,7 +1044,7 @@ class Connect(object):
|
|||||||
conf.evalCode = conf.evalCode.replace(EVALCODE_KEYWORD_SUFFIX, "")
|
conf.evalCode = conf.evalCode.replace(EVALCODE_KEYWORD_SUFFIX, "")
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
conf.evalCode = conf.evalCode.replace(ex.text.strip(), replacement)
|
conf.evalCode = conf.evalCode.replace(getUnicode(ex.text.strip(), UNICODE_ENCODING), replacement)
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
|
|
||||||
@@ -989,17 +1063,31 @@ class Connect(object):
|
|||||||
if name != "__builtins__" and originals.get(name, "") != value:
|
if name != "__builtins__" and originals.get(name, "") != value:
|
||||||
if isinstance(value, (basestring, int)):
|
if isinstance(value, (basestring, int)):
|
||||||
found = False
|
found = False
|
||||||
value = getUnicode(value)
|
value = getUnicode(value, UNICODE_ENCODING)
|
||||||
|
|
||||||
|
if kb.postHint and re.search(r"\b%s\b" % re.escape(name), post or ""):
|
||||||
|
if kb.postHint in (POST_HINT.XML, POST_HINT.SOAP):
|
||||||
|
if re.search(r"<%s\b" % re.escape(name), post):
|
||||||
|
found = True
|
||||||
|
post = re.sub(r"(?s)(<%s\b[^>]*>)(.*?)(</%s)" % (re.escape(name), re.escape(name)), "\g<1>%s\g<3>" % value, post)
|
||||||
|
elif re.search(r"\b%s>" % re.escape(name), post):
|
||||||
|
found = True
|
||||||
|
post = re.sub(r"(?s)(\b%s>)(.*?)(</[^<]*\b%s>)" % (re.escape(name), re.escape(name)), "\g<1>%s\g<3>" % value, post)
|
||||||
|
|
||||||
|
regex = r"\b(%s)\b([^\w]+)(\w+)" % re.escape(name)
|
||||||
|
if not found and re.search(regex, (post or "")):
|
||||||
|
found = True
|
||||||
|
post = re.sub(regex, "\g<1>\g<2>%s" % value, post)
|
||||||
|
|
||||||
regex = r"((\A|%s)%s=).+?(%s|\Z)" % (re.escape(delimiter), re.escape(name), re.escape(delimiter))
|
regex = r"((\A|%s)%s=).+?(%s|\Z)" % (re.escape(delimiter), re.escape(name), re.escape(delimiter))
|
||||||
|
if not found and re.search(regex, (post or "")):
|
||||||
|
found = True
|
||||||
|
post = re.sub(regex, "\g<1>%s\g<3>" % value, post)
|
||||||
|
|
||||||
if re.search(regex, (get or "")):
|
if re.search(regex, (get or "")):
|
||||||
found = True
|
found = True
|
||||||
get = re.sub(regex, "\g<1>%s\g<3>" % value, get)
|
get = re.sub(regex, "\g<1>%s\g<3>" % value, get)
|
||||||
|
|
||||||
if re.search(regex, (post or "")):
|
|
||||||
found = True
|
|
||||||
post = re.sub(regex, "\g<1>%s\g<3>" % value, post)
|
|
||||||
|
|
||||||
if re.search(regex, (query or "")):
|
if re.search(regex, (query or "")):
|
||||||
found = True
|
found = True
|
||||||
uri = re.sub(regex.replace(r"\A", r"\?"), "\g<1>%s\g<3>" % value, uri)
|
uri = re.sub(regex.replace(r"\A", r"\?"), "\g<1>%s\g<3>" % value, uri)
|
||||||
@@ -1026,7 +1114,7 @@ class Connect(object):
|
|||||||
elif kb.postUrlEncode:
|
elif kb.postUrlEncode:
|
||||||
post = urlencode(post, spaceplus=kb.postSpaceToPlus)
|
post = urlencode(post, spaceplus=kb.postSpaceToPlus)
|
||||||
|
|
||||||
if timeBasedCompare:
|
if timeBasedCompare and not conf.disableStats:
|
||||||
if len(kb.responseTimes.get(kb.responseTimeMode, [])) < MIN_TIME_RESPONSES:
|
if len(kb.responseTimes.get(kb.responseTimeMode, [])) < MIN_TIME_RESPONSES:
|
||||||
clearConsoleLine()
|
clearConsoleLine()
|
||||||
|
|
||||||
@@ -1049,7 +1137,7 @@ class Connect(object):
|
|||||||
dataToStdout(" (done)\n")
|
dataToStdout(" (done)\n")
|
||||||
|
|
||||||
elif not kb.testMode:
|
elif not kb.testMode:
|
||||||
warnMsg = "it is very important to not stress the network adapter "
|
warnMsg = "it is very important to not stress the network connection "
|
||||||
warnMsg += "during usage of time-based payloads to prevent potential "
|
warnMsg += "during usage of time-based payloads to prevent potential "
|
||||||
warnMsg += "disruptions "
|
warnMsg += "disruptions "
|
||||||
singleTimeWarnMessage(warnMsg)
|
singleTimeWarnMessage(warnMsg)
|
||||||
@@ -1108,7 +1196,7 @@ class Connect(object):
|
|||||||
warnMsg = "site returned insanely large response"
|
warnMsg = "site returned insanely large response"
|
||||||
if kb.testMode:
|
if kb.testMode:
|
||||||
warnMsg += " in testing phase. This is a common "
|
warnMsg += " in testing phase. This is a common "
|
||||||
warnMsg += "behavior in custom WAF/IDS/IPS solutions"
|
warnMsg += "behavior in custom WAF/IPS/IDS solutions"
|
||||||
singleTimeWarnMessage(warnMsg)
|
singleTimeWarnMessage(warnMsg)
|
||||||
|
|
||||||
if conf.secondOrder:
|
if conf.secondOrder:
|
||||||
@@ -1116,6 +1204,7 @@ class Connect(object):
|
|||||||
|
|
||||||
threadData.lastQueryDuration = calculateDeltaSeconds(start)
|
threadData.lastQueryDuration = calculateDeltaSeconds(start)
|
||||||
threadData.lastPage = page
|
threadData.lastPage = page
|
||||||
|
threadData.lastCode = code
|
||||||
|
|
||||||
kb.originalCode = kb.originalCode or code
|
kb.originalCode = kb.originalCode or code
|
||||||
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user