mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2025-12-06 12:41:30 +00:00
Compare commits
278 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e3134cc965 | ||
|
|
5957aad738 | ||
|
|
4c4de54ad8 | ||
|
|
590e8ed5ae | ||
|
|
12427ff6f8 | ||
|
|
45d0010323 | ||
|
|
e18b41fc82 | ||
|
|
586c461ae6 | ||
|
|
c799e794f2 | ||
|
|
b31c264a59 | ||
|
|
a569f21ad7 | ||
|
|
8092551fa9 | ||
|
|
83972d405f | ||
|
|
de0df99d8e | ||
|
|
ddee027afb | ||
|
|
6ce0350abc | ||
|
|
37de01e993 | ||
|
|
63cca82288 | ||
|
|
7d1c2633c9 | ||
|
|
9564c8e8b1 | ||
|
|
5e099144d3 | ||
|
|
a007cd30e5 | ||
|
|
4c9e0b9f1e | ||
|
|
0c3fbc46df | ||
|
|
107d9f90ad | ||
|
|
daafe9b74a | ||
|
|
9727f0d691 | ||
|
|
61e0459ec8 | ||
|
|
c5802a5367 | ||
|
|
01d5da18e3 | ||
|
|
b288bfdbc3 | ||
|
|
1e508547d8 | ||
|
|
4fe4c582c1 | ||
|
|
28e7c8f378 | ||
|
|
c497aa98ed | ||
|
|
4ba9e9397c | ||
|
|
eedfa8c888 | ||
|
|
c224ea0e37 | ||
|
|
f544554475 | ||
|
|
c7c7e30130 | ||
|
|
d1426a023f | ||
|
|
83a1b9b2e7 | ||
|
|
38684ec220 | ||
|
|
2e5edce8b9 | ||
|
|
a02662f03f | ||
|
|
2c95b65eac | ||
|
|
e47c1aa61b | ||
|
|
f81062d595 | ||
|
|
ef911b6be4 | ||
|
|
4f1b0787ed | ||
|
|
070e173067 | ||
|
|
f04584bb68 | ||
|
|
66d26f67bf | ||
|
|
101d1f0d49 | ||
|
|
843126702d | ||
|
|
65f227fe65 | ||
|
|
536d9a597e | ||
|
|
0ce2128a9b | ||
|
|
e7ed2bbcbb | ||
|
|
560ff4154b | ||
|
|
1d0d5f1675 | ||
|
|
f1a3c81aec | ||
|
|
277a4fa402 | ||
|
|
037db0f4a0 | ||
|
|
3f1bf742fc | ||
|
|
abb911d741 | ||
|
|
dc5edf1a86 | ||
|
|
e11febdcbc | ||
|
|
f2af8861f9 | ||
|
|
2895e5c20f | ||
|
|
b1e8c75672 | ||
|
|
6cc52cc12a | ||
|
|
ddf353b86e | ||
|
|
e6535d359d | ||
|
|
90e381a5a5 | ||
|
|
e99e9919cd | ||
|
|
dbdfbcc425 | ||
|
|
1d6832a84a | ||
|
|
73d83280fe | ||
|
|
1bd8c519c3 | ||
|
|
a4fdbf1343 | ||
|
|
130879fbf3 | ||
|
|
db5ae9ae0b | ||
|
|
cc4833429f | ||
|
|
703b7079a4 | ||
|
|
ef52ee977f | ||
|
|
ba1b4c50be | ||
|
|
1e7dfe11b4 | ||
|
|
92febd22a8 | ||
|
|
83081b5e14 | ||
|
|
f2035145fe | ||
|
|
48b407c0fa | ||
|
|
4466504f30 | ||
|
|
dc65afe65a | ||
|
|
132e963b53 | ||
|
|
f52beff7c3 | ||
|
|
feb93dce44 | ||
|
|
e52422900e | ||
|
|
c045afd842 | ||
|
|
0d2db32539 | ||
|
|
77f4fd93e7 | ||
|
|
68f5597b4a | ||
|
|
411f56e710 | ||
|
|
fb95ab8c17 | ||
|
|
9f6e04b141 | ||
|
|
1f2bdf5a3d | ||
|
|
465a1e1a86 | ||
|
|
6af127cb64 | ||
|
|
880d438418 | ||
|
|
5efe3228f8 | ||
|
|
e005ba3f77 | ||
|
|
f2b4dc3ffc | ||
|
|
d1022f3f59 | ||
|
|
3984b94297 | ||
|
|
eba01ee74e | ||
|
|
36b660309b | ||
|
|
fd89fdf40b | ||
|
|
2e53096962 | ||
|
|
79e45bd8d7 | ||
|
|
ed5f4abebd | ||
|
|
03bbfdbc56 | ||
|
|
1b6365b195 | ||
|
|
d38a0542d8 | ||
|
|
9182b90b2b | ||
|
|
80af22435a | ||
|
|
065c5e8157 | ||
|
|
932aa8dd94 | ||
|
|
71208e891c | ||
|
|
3b369920a1 | ||
|
|
68a83098ab | ||
|
|
f4a0820dcb | ||
|
|
459e1dd9a4 | ||
|
|
4b698748f7 | ||
|
|
e697354765 | ||
|
|
721046831b | ||
|
|
a4068f9abf | ||
|
|
245c5e64e9 | ||
|
|
cd08d13647 | ||
|
|
8abae02111 | ||
|
|
dd9bfd13f2 | ||
|
|
0c7eecee9f | ||
|
|
3e72da66f9 | ||
|
|
ca9a56c0ff | ||
|
|
2d2b20344d | ||
|
|
a8a7dee800 | ||
|
|
35d9ed8476 | ||
|
|
a5e3dce26f | ||
|
|
71448b1c16 | ||
|
|
a633bc7f32 | ||
|
|
6697e49f75 | ||
|
|
db8bcd1d2e | ||
|
|
16c052ef13 | ||
|
|
a8c0722631 | ||
|
|
c9a73aeed1 | ||
|
|
470b68a83c | ||
|
|
f01ae291f8 | ||
|
|
c36749c3bb | ||
|
|
63b84c31e5 | ||
|
|
ec253dd5bd | ||
|
|
4c25a20efc | ||
|
|
2b56bdfaa6 | ||
|
|
c37014b8e8 | ||
|
|
349e9b9fa5 | ||
|
|
ac481492c0 | ||
|
|
91c5151770 | ||
|
|
ad5a731999 | ||
|
|
95be19a692 | ||
|
|
dbcf030743 | ||
|
|
fa3f3baf1e | ||
|
|
f125f64a80 | ||
|
|
12012b36b1 | ||
|
|
43c9e21c56 | ||
|
|
a831865633 | ||
|
|
578c41f6de | ||
|
|
dc01f2e773 | ||
|
|
db327a8538 | ||
|
|
aefb815064 | ||
|
|
014978cebc | ||
|
|
287371337d | ||
|
|
62a3618353 | ||
|
|
366a3f9336 | ||
|
|
74d2b60cf3 | ||
|
|
9e892e93f3 | ||
|
|
0bbf5f9467 | ||
|
|
8be4b29fd1 | ||
|
|
0507234add | ||
|
|
c3d9a1c2d4 | ||
|
|
9e8b28be7c | ||
|
|
f3f4a4cb37 | ||
|
|
2280f3ff2d | ||
|
|
d6cf038e48 | ||
|
|
2dfc383700 | ||
|
|
f20e7b403a | ||
|
|
36e62fe8a7 | ||
|
|
2542b6d241 | ||
|
|
bc13d8923b | ||
|
|
e51db6b355 | ||
|
|
6d28ca1f93 | ||
|
|
03e4741a69 | ||
|
|
b899ab9eb3 | ||
|
|
2e017eee99 | ||
|
|
a296d22195 | ||
|
|
ad11749b15 | ||
|
|
75a64245c5 | ||
|
|
9e00202823 | ||
|
|
df977d93d4 | ||
|
|
b0ca52086a | ||
|
|
af89137f2c | ||
|
|
1f9bf587b5 | ||
|
|
f0e4c20004 | ||
|
|
cef416559a | ||
|
|
ce47b6c76e | ||
|
|
39108bc100 | ||
|
|
f63ceaa0c1 | ||
|
|
1e60378fb2 | ||
|
|
22c7bc54b4 | ||
|
|
5f1bae86b0 | ||
|
|
a0cbf6991d | ||
|
|
9f2bc00426 | ||
|
|
6bb486c1bf | ||
|
|
741ce9e3f0 | ||
|
|
a479655097 | ||
|
|
4846d85ccd | ||
|
|
3c439c3929 | ||
|
|
5cc36a5736 | ||
|
|
29dcdd3bef | ||
|
|
53eadb0af8 | ||
|
|
7b705b94e3 | ||
|
|
558484644a | ||
|
|
e84142b6a9 | ||
|
|
b44551230e | ||
|
|
4ecf6eee05 | ||
|
|
57be1856a6 | ||
|
|
a424e4ab59 | ||
|
|
4660b816d5 | ||
|
|
f92e1ebc40 | ||
|
|
48cd0421a6 | ||
|
|
4b4f728d8e | ||
|
|
e8336ecfe1 | ||
|
|
38ea0686a8 | ||
|
|
73b0de67b5 | ||
|
|
fae97b3937 | ||
|
|
c0947846f4 | ||
|
|
5e2d0bd320 | ||
|
|
4badb54607 | ||
|
|
29aaec8925 | ||
|
|
27ff5d6fec | ||
|
|
72ff6e24ff | ||
|
|
717c451b8c | ||
|
|
e5968cae31 | ||
|
|
2b55ae3e2a | ||
|
|
8f4488d608 | ||
|
|
f1254fef4b | ||
|
|
ccda26a567 | ||
|
|
099110bc1f | ||
|
|
0265b3fcfa | ||
|
|
961d2b24d1 | ||
|
|
53578bcb7c | ||
|
|
756f02fb0e | ||
|
|
17c170e1f8 | ||
|
|
220c1be162 | ||
|
|
6b06332896 | ||
|
|
c268663bd9 | ||
|
|
a97fd1dede | ||
|
|
b93284530e | ||
|
|
cf4c263a4e | ||
|
|
23777143b6 | ||
|
|
9b397f00be | ||
|
|
d47c16e196 | ||
|
|
e0c7b5c63c | ||
|
|
091c8ab2dd | ||
|
|
86303bde55 | ||
|
|
c89f119e1a | ||
|
|
25369ca591 | ||
|
|
a399b65033 | ||
|
|
ed37ae1562 | ||
|
|
5381d4d5be | ||
|
|
c1825b2651 |
2
.github/ISSUE_TEMPLATE.md
vendored
2
.github/ISSUE_TEMPLATE.md
vendored
@@ -19,7 +19,7 @@
|
||||
* Client OS (e.g. `Microsoft Windows 10`)
|
||||
* Program version (`python sqlmap.py --version` or `sqlmap --version` depending on installation):
|
||||
* Target DBMS (e.g. `Microsoft SQL Server`):
|
||||
* Detected WAF/IDS/IPS protection (e.g. `ModSecurity` or `unknown`):
|
||||
* Detected WAF/IPS protection (e.g. `ModSecurity` or `unknown`):
|
||||
* SQLi techniques found by sqlmap (e.g. `error-based` and `boolean-based blind`):
|
||||
* Results of manual target assessment (e.g. found that the payload `query=test' AND 4113 IN ((SELECT 'foobar'))-- qKLV` works):
|
||||
* Relevant console output (if any):
|
||||
|
||||
2
LICENSE
2
LICENSE
@@ -1,7 +1,7 @@
|
||||
COPYING -- Describes the terms under which sqlmap is distributed. A copy
|
||||
of the GNU General Public License (GPL) is appended to this file.
|
||||
|
||||
sqlmap is (C) 2006-2018 Bernardo Damele Assumpcao Guimaraes, Miroslav Stampar.
|
||||
sqlmap is (C) 2006-2019 Bernardo Damele Assumpcao Guimaraes, Miroslav Stampar.
|
||||
|
||||
This program is free software; you may redistribute and/or modify it under
|
||||
the terms of the GNU General Public License as published by the Free
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
# sqlmap
|
||||
|
||||
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/LICENSE) [](https://twitter.com/sqlmap)
|
||||
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/LICENSE) [](https://badge.fury.io/py/sqlmap) [](https://github.com/sqlmapproject/sqlmap/issues?q=is%3Aissue+is%3Aclosed) [](https://twitter.com/sqlmap)
|
||||
|
||||
sqlmap is an open source penetration testing tool that automates the process of detecting and exploiting SQL injection flaws and taking over of database servers. It comes with a powerful detection engine, many niche features for the ultimate penetration tester and a broad range of switches lasting from database fingerprinting, over data fetching from the database, to accessing the underlying file system and executing commands on the operating system via out-of-band connections.
|
||||
|
||||
**The sqlmap project is sponsored by [Netsparker Web Application Security Scanner](https://www.netsparker.com/?utm_source=github.com&utm_medium=referral&utm_content=sqlmap+repo&utm_campaign=generic+advert).**
|
||||
**The sqlmap project is sponsored by [Netsparker Web Application Security Scanner](https://www.netsparker.com/scan-website-security-issues/?utm_source=sqlmap.org&utm_medium=banner&utm_campaign=github).**
|
||||
|
||||
Screenshots
|
||||
----
|
||||
@@ -67,3 +67,4 @@ Translations
|
||||
* [Russian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-ru-RUS.md)
|
||||
* [Spanish](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-es-MX.md)
|
||||
* [Turkish](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-tr-TR.md)
|
||||
* [Ukrainian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-uk-UA.md)
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
* Implemented support for automatic decoding of page content through detected charset.
|
||||
* Implemented mechanism for proper data dumping on DBMSes not supporting `LIMIT/OFFSET` like mechanism(s) (e.g. Microsoft SQL Server, Sybase, etc.).
|
||||
* Major improvements to program stabilization based on user reports.
|
||||
* Added new tampering scripts avoiding popular WAF/IPS/IDS mechanisms.
|
||||
* Added new tampering scripts avoiding popular WAF/IPS mechanisms.
|
||||
* Fixed major bug with DNS leaking in Tor mode.
|
||||
* Added wordlist compilation made of the most popular cracking dictionaries.
|
||||
* Implemented multi-processor hash cracking routine(s).
|
||||
@@ -23,7 +23,7 @@
|
||||
* Added option `--csv-del` for manually setting delimiting character used in CSV output.
|
||||
* Added switch `--hex` for using DBMS hex conversion function(s) for data retrieval.
|
||||
* Added switch `--smart` for conducting through tests only in case of positive heuristic(s).
|
||||
* Added switch `--check-waf` for checking of existence of WAF/IPS/IDS protection.
|
||||
* Added switch `--check-waf` for checking of existence of WAF/IPS protection.
|
||||
* Added switch `--schema` to enumerate DBMS schema: shows all columns of all databases' tables.
|
||||
* Added switch `--count` to count the number of entries for a specific table or all database(s) tables.
|
||||
* Major improvements to switches `--tables` and `--columns`.
|
||||
@@ -55,7 +55,7 @@
|
||||
* Added option `--host` to set the HTTP Host header value.
|
||||
* Added switch `--hostname` to turn on retrieval of DBMS server hostname.
|
||||
* Added switch `--hpp` to turn on the usage of HTTP parameter pollution WAF bypass method.
|
||||
* Added switch `--identify-waf` for turning on the thorough testing of WAF/IPS/IDS protection.
|
||||
* Added switch `--identify-waf` for turning on the thorough testing of WAF/IPS protection.
|
||||
* Added switch `--ignore-401` to ignore HTTP Error Code 401 (Unauthorized).
|
||||
* Added switch `--invalid-bignum` for usage of big numbers while invalidating values.
|
||||
* Added switch `--invalid-logical` for usage of logical operations while invalidating values.
|
||||
@@ -78,7 +78,7 @@
|
||||
* Added option `--skip` to skip testing of given parameter(s).
|
||||
* Added switch `--skip-static` to skip testing parameters that not appear to be dynamic.
|
||||
* Added switch `--skip-urlencode` to skip URL encoding of payload data.
|
||||
* Added switch `--skip-waf` to skip heuristic detection of WAF/IPS/IDS protection.
|
||||
* Added switch `--skip-waf` to skip heuristic detection of WAF/IPS protection.
|
||||
* Added switch `--smart` to conduct thorough tests only if positive heuristic(s).
|
||||
* Added option `--sql-file` for setting file(s) holding SQL statements to be executed (in case of stacked SQLi).
|
||||
* Added switch `--sqlmap-shell` to turn on interactive sqlmap shell prompt.
|
||||
|
||||
@@ -565,6 +565,9 @@ Efrain Torres, <et(at)metasploit.com>
|
||||
* for helping out to improve the Metasploit Framework sqlmap auxiliary module and for committing it on the Metasploit official subversion repository
|
||||
* for his great Metasploit WMAP Framework
|
||||
|
||||
Jennifer Torres, <jtorresf42(at)gmail.com>
|
||||
* for contributing a tamper script luanginx.py
|
||||
|
||||
Sandro Tosi, <matrixhasu(at)gmail.com>
|
||||
* for helping to create sqlmap Debian package correctly
|
||||
|
||||
@@ -597,6 +600,7 @@ Carlos Gabriel Vergara, <carlosgabrielvergara(at)gmail.com>
|
||||
|
||||
Patrick Webster, <patrick(at)aushack.com>
|
||||
* for suggesting an enhancement
|
||||
* for donating to sqlmap development (from OSI.Security)
|
||||
|
||||
Ed Williams, <ed.williams(at)ngssecure.com>
|
||||
* for suggesting a minor enhancement
|
||||
|
||||
50
doc/translations/README-uk-UA.md
Normal file
50
doc/translations/README-uk-UA.md
Normal file
@@ -0,0 +1,50 @@
|
||||
# sqlmap
|
||||
|
||||
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/LICENSE) [](https://badge.fury.io/py/sqlmap) [](https://github.com/sqlmapproject/sqlmap/issues?q=is%3Aissue+is%3Aclosed) [](https://twitter.com/sqlmap)
|
||||
|
||||
sqlmap - це інструмент для тестування вразливостей з відкритим сирцевим кодом, який автоматизує процес виявлення і використання дефектів SQL-ін'єкцій, а також захоплення серверів баз даних. Він оснащений потужним механізмом виявлення, безліччю приємних функцій для професійного тестувальника вразливостей і широким спектром скриптів, які спрощують роботу з базами даних - від відбитка бази даних до доступу до базової файлової системи та виконання команд в операційній системі через out-of-band з'єднання.
|
||||
|
||||
Скриншоти
|
||||
----
|
||||
|
||||

|
||||
|
||||
Ви можете ознайомитися з [колекцією скриншотів](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots), які демонструють деякі функції в wiki.
|
||||
|
||||
Встановлення
|
||||
----
|
||||
|
||||
Ви можете завантажити останню версію tarball натиснувши [сюди](https://github.com/sqlmapproject/sqlmap/tarball/master) або останню версію zipball натиснувши [сюди](https://github.com/sqlmapproject/sqlmap/zipball/master).
|
||||
|
||||
Найкраще завантажити sqlmap шляхом клонування [Git](https://github.com/sqlmapproject/sqlmap) репозиторію:
|
||||
|
||||
git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||
|
||||
sqlmap «працює з коробки» з [Python](http://www.python.org/download/) версії **2.6.x** та **2.7.x** на будь-якій платформі.
|
||||
|
||||
Використання
|
||||
----
|
||||
|
||||
Щоб отримати список основних опцій і перемикачів, використовуйте:
|
||||
|
||||
python sqlmap.py -h
|
||||
|
||||
Щоб отримати список всіх опцій і перемикачів, використовуйте:
|
||||
|
||||
python sqlmap.py -hh
|
||||
|
||||
Ви можете знайти приклад виконання [тут](https://asciinema.org/a/46601).
|
||||
Для того, щоб ознайомитися з можливостями sqlmap, списком підтримуваних функцій та описом всіх параметрів і перемикачів, а також прикладами, вам рекомендується скористатися [інструкцією користувача](https://github.com/sqlmapproject/sqlmap/wiki/Usage).
|
||||
|
||||
Посилання
|
||||
----
|
||||
|
||||
* Основний сайт: http://sqlmap.org
|
||||
* Завантаження: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) або [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master)
|
||||
* Канал новин RSS: https://github.com/sqlmapproject/sqlmap/commits/master.atom
|
||||
* Відстеження проблем: https://github.com/sqlmapproject/sqlmap/issues
|
||||
* Інструкція користувача: https://github.com/sqlmapproject/sqlmap/wiki
|
||||
* Поширенні питання (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
||||
* Демо: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos)
|
||||
* Скриншоти: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"""
|
||||
beep.py - Make a beep sound
|
||||
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"""
|
||||
cloak.py - Simple file encryption/compression utility
|
||||
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"""
|
||||
dbgtool.py - Portable executable to ASCII debug script converter
|
||||
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -80,7 +80,7 @@ def main(src, dst):
|
||||
cmd = ''
|
||||
|
||||
# Wait for incoming replies
|
||||
if sock in select.select([ sock ], [], [])[0]:
|
||||
if sock in select.select([sock], [], [])[0]:
|
||||
buff = sock.recv(4096)
|
||||
|
||||
if 0 == len(buff):
|
||||
|
||||
@@ -1,137 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
import codecs
|
||||
import os
|
||||
import re
|
||||
import urllib2
|
||||
import urlparse
|
||||
|
||||
from xml.dom.minidom import Document
|
||||
|
||||
# Path to the XML file with signatures
|
||||
MSSQL_XML = os.path.abspath("../../xml/banner/mssql.xml")
|
||||
|
||||
# Url to update Microsoft SQL Server XML versions file from
|
||||
MSSQL_VERSIONS_URL = "http://www.sqlsecurity.com/FAQs/SQLServerVersionDatabase/tabid/63/Default.aspx"
|
||||
|
||||
def updateMSSQLXML():
|
||||
if not os.path.exists(MSSQL_XML):
|
||||
errMsg = "[ERROR] file '%s' does not exist. Please run the script from its parent directory" % MSSQL_XML
|
||||
print errMsg
|
||||
return
|
||||
|
||||
infoMsg = "[INFO] retrieving data from '%s'" % MSSQL_VERSIONS_URL
|
||||
print infoMsg
|
||||
|
||||
try:
|
||||
req = urllib2.Request(MSSQL_VERSIONS_URL)
|
||||
f = urllib2.urlopen(req)
|
||||
mssqlVersionsHtmlString = f.read()
|
||||
f.close()
|
||||
except urllib2.URLError:
|
||||
__mssqlPath = urlparse.urlsplit(MSSQL_VERSIONS_URL)
|
||||
__mssqlHostname = __mssqlPath[1]
|
||||
|
||||
warnMsg = "[WARNING] sqlmap was unable to connect to %s," % __mssqlHostname
|
||||
warnMsg += " check your Internet connection and retry"
|
||||
print warnMsg
|
||||
|
||||
return
|
||||
|
||||
releases = re.findall("class=\"BCC_DV_01DarkBlueTitle\">SQL Server\s(.+?)\sBuilds", mssqlVersionsHtmlString, re.I)
|
||||
releasesCount = len(releases)
|
||||
|
||||
# Create the minidom document
|
||||
doc = Document()
|
||||
|
||||
# Create the <root> base element
|
||||
root = doc.createElement("root")
|
||||
doc.appendChild(root)
|
||||
|
||||
for index in xrange(0, releasesCount):
|
||||
release = releases[index]
|
||||
|
||||
# Skip Microsoft SQL Server 6.5 because the HTML
|
||||
# table is in another format
|
||||
if release == "6.5":
|
||||
continue
|
||||
|
||||
# Create the <signatures> base element
|
||||
signatures = doc.createElement("signatures")
|
||||
signatures.setAttribute("release", release)
|
||||
root.appendChild(signatures)
|
||||
|
||||
startIdx = mssqlVersionsHtmlString.index("SQL Server %s Builds" % releases[index])
|
||||
|
||||
if index == releasesCount - 1:
|
||||
stopIdx = len(mssqlVersionsHtmlString)
|
||||
else:
|
||||
stopIdx = mssqlVersionsHtmlString.index("SQL Server %s Builds" % releases[index + 1])
|
||||
|
||||
mssqlVersionsReleaseString = mssqlVersionsHtmlString[startIdx:stopIdx]
|
||||
servicepackVersion = re.findall("</td><td>(7\.0|2000|2005|2008|2008 R2)*(.*?)</td><td.*?([\d\.]+)</td>[\r]*\n", mssqlVersionsReleaseString, re.I)
|
||||
|
||||
for servicePack, version in servicepackVersion:
|
||||
if servicePack.startswith(" "):
|
||||
servicePack = servicePack[1:]
|
||||
if "/" in servicePack:
|
||||
servicePack = servicePack[:servicePack.index("/")]
|
||||
if "(" in servicePack:
|
||||
servicePack = servicePack[:servicePack.index("(")]
|
||||
if "-" in servicePack:
|
||||
servicePack = servicePack[:servicePack.index("-")]
|
||||
if "*" in servicePack:
|
||||
servicePack = servicePack[:servicePack.index("*")]
|
||||
if servicePack.startswith("+"):
|
||||
servicePack = "0%s" % servicePack
|
||||
|
||||
servicePack = servicePack.replace("\t", " ")
|
||||
servicePack = servicePack.replace("No SP", "0")
|
||||
servicePack = servicePack.replace("RTM", "0")
|
||||
servicePack = servicePack.replace("TM", "0")
|
||||
servicePack = servicePack.replace("SP", "")
|
||||
servicePack = servicePack.replace("Service Pack", "")
|
||||
servicePack = servicePack.replace("<a href=\"http:", "")
|
||||
servicePack = servicePack.replace(" ", " ")
|
||||
servicePack = servicePack.replace("+ ", "+")
|
||||
servicePack = servicePack.replace(" +", "+")
|
||||
|
||||
if servicePack.endswith(" "):
|
||||
servicePack = servicePack[:-1]
|
||||
|
||||
if servicePack and version:
|
||||
# Create the main <card> element
|
||||
signature = doc.createElement("signature")
|
||||
signatures.appendChild(signature)
|
||||
|
||||
# Create a <version> element
|
||||
versionElement = doc.createElement("version")
|
||||
signature.appendChild(versionElement)
|
||||
|
||||
# Give the <version> elemenet some text
|
||||
versionText = doc.createTextNode(version)
|
||||
versionElement.appendChild(versionText)
|
||||
|
||||
# Create a <servicepack> element
|
||||
servicepackElement = doc.createElement("servicepack")
|
||||
signature.appendChild(servicepackElement)
|
||||
|
||||
# Give the <servicepack> elemenet some text
|
||||
servicepackText = doc.createTextNode(servicePack)
|
||||
servicepackElement.appendChild(servicepackText)
|
||||
|
||||
# Save our newly created XML to the signatures file
|
||||
mssqlXml = codecs.open(MSSQL_XML, "w", "utf8")
|
||||
doc.writexml(writer=mssqlXml, addindent=" ", newl="\n")
|
||||
mssqlXml.close()
|
||||
|
||||
infoMsg = "[INFO] done. retrieved data parsed and saved into '%s'" % MSSQL_XML
|
||||
print infoMsg
|
||||
|
||||
if __name__ == "__main__":
|
||||
updateMSSQLXML()
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"""
|
||||
safe2bin.py - Simple safe(hex) to binary format converter
|
||||
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
# Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
# See the file 'LICENSE' for copying permission
|
||||
|
||||
# Removes duplicate entries in wordlist like files
|
||||
|
||||
@@ -4,16 +4,15 @@
|
||||
# Reference: http://rowinggolfer.blogspot.com/2009/08/pylint-recursively.html
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
def check(filepath):
|
||||
if filepath.endswith(".py"):
|
||||
content = open(filepath, "rb").read()
|
||||
#if re.search(r"\r?\n\r?\n", content):
|
||||
content = open(filepath, "rb").read()
|
||||
|
||||
if "\n\n\n" in content:
|
||||
index = content.find("\n\n\n")
|
||||
print filepath, repr(content[index-30:index+30])
|
||||
print filepath, repr(content[index - 30:index + 30])
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
||||
# See the file 'LICENSE' for copying permission
|
||||
|
||||
# Runs pep8 on all python files (prerequisite: apt-get install pep8)
|
||||
find . -wholename "./thirdparty" -prune -o -type f -iname "*.py" -exec pep8 '{}' \;
|
||||
@@ -1,5 +1,15 @@
|
||||
#!/bin/bash
|
||||
|
||||
: '
|
||||
cat > .git/hooks/post-commit << EOF
|
||||
#!/bin/bash
|
||||
|
||||
source ./extra/shutils/postcommit-hook.sh
|
||||
EOF
|
||||
|
||||
chmod +x .git/hooks/post-commit
|
||||
'
|
||||
|
||||
SETTINGS="../../lib/core/settings.py"
|
||||
|
||||
declare -x SCRIPTPATH="${0}"
|
||||
|
||||
@@ -1,5 +1,15 @@
|
||||
#!/bin/bash
|
||||
|
||||
: '
|
||||
cat > .git/hooks/pre-commit << EOF
|
||||
#!/bin/bash
|
||||
|
||||
source ./extra/shutils/precommit-hook.sh
|
||||
EOF
|
||||
|
||||
chmod +x .git/hooks/pre-commit
|
||||
'
|
||||
|
||||
PROJECT="../../"
|
||||
SETTINGS="../../lib/core/settings.py"
|
||||
CHECKSUM="../../txt/checksum.md5"
|
||||
|
||||
7
extra/shutils/pycodestyle.sh
Executable file
7
extra/shutils/pycodestyle.sh
Executable file
@@ -0,0 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
# See the file 'LICENSE' for copying permission
|
||||
|
||||
# Runs pycodestyle on all python files (prerequisite: pip install pycodestyle)
|
||||
find . -wholename "./thirdparty" -prune -o -type f -iname "*.py" -exec pycodestyle --ignore=E501,E302,E305,E722,E402 '{}' \;
|
||||
@@ -16,7 +16,7 @@ cat > $TMP_DIR/setup.py << EOF
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
@@ -25,10 +25,11 @@ from setuptools import setup, find_packages
|
||||
setup(
|
||||
name='sqlmap',
|
||||
version='$VERSION',
|
||||
description="Automatic SQL injection and database takeover tool",
|
||||
description='Automatic SQL injection and database takeover tool',
|
||||
long_description='sqlmap is an open source penetration testing tool that automates the process of detecting and exploiting SQL injection flaws and taking over of database servers. It comes with a powerful detection engine, many niche features for the ultimate penetration tester and a broad range of switches lasting from database fingerprinting, over data fetching from the database, to accessing the underlying file system and executing commands on the operating system via out-of-band connections.',
|
||||
author='Bernardo Damele Assumpcao Guimaraes, Miroslav Stampar',
|
||||
author_email='bernardo@sqlmap.org, miroslav@sqlmap.org',
|
||||
url='https://sqlmap.org',
|
||||
url='http://sqlmap.org',
|
||||
download_url='https://github.com/sqlmapproject/sqlmap/archive/$VERSION.zip',
|
||||
license='GNU General Public License v2 (GPLv2)',
|
||||
packages=find_packages(),
|
||||
@@ -60,7 +61,7 @@ cat > sqlmap/__init__.py << EOF
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
# Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
# See the file 'LICENSE' for copying permission
|
||||
|
||||
import codecs
|
||||
@@ -27,7 +27,7 @@ SMTP_SERVER = "127.0.0.1"
|
||||
SMTP_PORT = 25
|
||||
SMTP_TIMEOUT = 30
|
||||
FROM = "regressiontest@sqlmap.org"
|
||||
#TO = "dev@sqlmap.org"
|
||||
# TO = "dev@sqlmap.org"
|
||||
TO = ["bernardo.damele@gmail.com", "miroslav.stampar@gmail.com"]
|
||||
SUBJECT = "regression test started on %s using revision %s" % (START_TIME, getRevisionNumber())
|
||||
TARGET = "debian"
|
||||
@@ -83,7 +83,7 @@ def main():
|
||||
if stderr:
|
||||
failure_email("Execution of regression test failed with error:\n\n%s" % stderr)
|
||||
|
||||
failed_tests = re.findall("running live test case: (.+?) \((\d+)\/\d+\)[\r]*\n.+test failed (at parsing items: (.+))?\s*\- scan folder: (\/.+) \- traceback: (.*?)( - SQL injection not detected)?[\r]*\n", stdout)
|
||||
failed_tests = re.findall(r"running live test case: (.+?) \((\d+)\/\d+\)[\r]*\n.+test failed (at parsing items: (.+))?\s*\- scan folder: (\/.+) \- traceback: (.*?)( - SQL injection not detected)?[\r]*\n", stdout)
|
||||
|
||||
for failed_test in failed_tests:
|
||||
title = failed_test[0]
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
8
extra/wafdetectify/__init__.py
Normal file
8
extra/wafdetectify/__init__.py
Normal file
@@ -0,0 +1,8 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
pass
|
||||
121
extra/wafdetectify/wafdetectify.py
Normal file
121
extra/wafdetectify/wafdetectify.py
Normal file
@@ -0,0 +1,121 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
import cookielib
|
||||
import glob
|
||||
import httplib
|
||||
import inspect
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import urllib2
|
||||
|
||||
sys.dont_write_bytecode = True
|
||||
|
||||
NAME, VERSION, AUTHOR = "WAF Detectify", "0.1", "sqlmap developers (@sqlmap)"
|
||||
TIMEOUT = 10
|
||||
HEADERS = {"User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:61.0) Gecko/20100101 Firefox/61.0", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", "Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Cache-Control": "max-age=0"}
|
||||
SQLMAP_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
|
||||
SCRIPTS_DIR = os.path.join(SQLMAP_DIR, "waf")
|
||||
LEVEL_COLORS = {"o": "\033[00;94m", "x": "\033[00;91m", "!": "\033[00;93m", "i": "\033[00;92m"}
|
||||
CACHE = {}
|
||||
WAF_FUNCTIONS = []
|
||||
|
||||
def get_page(get=None, url=None, host=None, data=None):
|
||||
key = (get, url, host, data)
|
||||
|
||||
if key in CACHE:
|
||||
return CACHE[key]
|
||||
|
||||
page, headers, code = None, {}, httplib.OK
|
||||
|
||||
url = url or ("%s%s%s" % (sys.argv[1], '?' if '?' not in sys.argv[1] else '&', get) if get else sys.argv[1])
|
||||
if not url.startswith("http"):
|
||||
url = "http://%s" % url
|
||||
|
||||
try:
|
||||
req = urllib2.Request("".join(url[_].replace(' ', "%20") if _ > url.find('?') else url[_] for _ in xrange(len(url))), data, HEADERS)
|
||||
conn = urllib2.urlopen(req, timeout=TIMEOUT)
|
||||
page = conn.read()
|
||||
headers = conn.info()
|
||||
except Exception, ex:
|
||||
code = getattr(ex, "code", None)
|
||||
page = ex.read() if hasattr(ex, "read") else getattr(ex, "msg", "")
|
||||
headers = ex.info() if hasattr(ex, "info") else {}
|
||||
|
||||
result = CACHE[key] = page, headers, code
|
||||
|
||||
return result
|
||||
|
||||
def colorize(message):
|
||||
if not subprocess.mswindows and sys.stdout.isatty():
|
||||
message = re.sub(r"\[(.)\]", lambda match: "[%s%s\033[00;49m]" % (LEVEL_COLORS[match.group(1)], match.group(1)), message)
|
||||
message = message.replace("@sqlmap", "\033[00;96m@sqlmap\033[00;49m")
|
||||
message = message.replace(NAME, "\033[00;93m%s\033[00;49m" % NAME)
|
||||
|
||||
return message
|
||||
|
||||
def main():
|
||||
global WAF_FUNCTIONS
|
||||
|
||||
print colorize("%s #v%s\n by: %s\n" % (NAME, VERSION, AUTHOR))
|
||||
|
||||
if len(sys.argv) < 2:
|
||||
exit(colorize("[x] usage: python %s <hostname>" % os.path.split(__file__)[-1]))
|
||||
|
||||
cookie_jar = cookielib.CookieJar()
|
||||
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie_jar))
|
||||
urllib2.install_opener(opener)
|
||||
|
||||
sys.path.insert(0, SQLMAP_DIR)
|
||||
|
||||
for found in glob.glob(os.path.join(SCRIPTS_DIR, "*.py")):
|
||||
dirname, filename = os.path.split(found)
|
||||
dirname = os.path.abspath(dirname)
|
||||
|
||||
if filename == "__init__.py":
|
||||
continue
|
||||
|
||||
if dirname not in sys.path:
|
||||
sys.path.insert(0, dirname)
|
||||
|
||||
try:
|
||||
if filename[:-3] in sys.modules:
|
||||
del sys.modules[filename[:-3]]
|
||||
module = __import__(filename[:-3].encode(sys.getfilesystemencoding() or "utf8"))
|
||||
except ImportError, msg:
|
||||
exit(colorize("[x] cannot import WAF script '%s' (%s)" % (filename[:-3], msg)))
|
||||
|
||||
_ = dict(inspect.getmembers(module))
|
||||
if "detect" not in _:
|
||||
exit(colorize("[x] missing function 'detect(get_page)' in WAF script '%s'" % found))
|
||||
else:
|
||||
WAF_FUNCTIONS.append((_["detect"], _.get("__product__", filename[:-3])))
|
||||
|
||||
WAF_FUNCTIONS = sorted(WAF_FUNCTIONS, key=lambda _: "generic" in _[1].lower())
|
||||
|
||||
print colorize("[i] checking '%s'..." % sys.argv[1])
|
||||
|
||||
found = False
|
||||
for function, product in WAF_FUNCTIONS:
|
||||
if found and "unknown" in product.lower():
|
||||
continue
|
||||
|
||||
if function(get_page):
|
||||
print colorize("[!] WAF/IPS identified as '%s'" % product)
|
||||
found = True
|
||||
|
||||
if not found:
|
||||
print colorize("[o] nothing found")
|
||||
|
||||
print
|
||||
|
||||
exit(int(not found))
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
@@ -140,11 +140,11 @@ def action():
|
||||
conf.dbmsHandler.udfInjectCustom()
|
||||
|
||||
# File system options
|
||||
if conf.rFile:
|
||||
conf.dumper.rFile(conf.dbmsHandler.readFile(conf.rFile))
|
||||
if conf.fileRead:
|
||||
conf.dumper.rFile(conf.dbmsHandler.readFile(conf.fileRead))
|
||||
|
||||
if conf.wFile:
|
||||
conf.dbmsHandler.writeFile(conf.wFile, conf.dFile, conf.wFileType)
|
||||
if conf.fileWrite:
|
||||
conf.dbmsHandler.writeFile(conf.fileWrite, conf.fileDest, conf.fileWriteType)
|
||||
|
||||
# Operating system options
|
||||
if conf.osCmd:
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
@@ -13,6 +13,7 @@ import random
|
||||
import re
|
||||
import socket
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
|
||||
@@ -68,11 +69,14 @@ from lib.core.enums import NULLCONNECTION
|
||||
from lib.core.enums import PAYLOAD
|
||||
from lib.core.enums import PLACE
|
||||
from lib.core.enums import REDIRECTION
|
||||
from lib.core.enums import WEB_PLATFORM
|
||||
from lib.core.exception import SqlmapConnectionException
|
||||
from lib.core.exception import SqlmapDataException
|
||||
from lib.core.exception import SqlmapNoneDataException
|
||||
from lib.core.exception import SqlmapSilentQuitException
|
||||
from lib.core.exception import SqlmapSkipTargetException
|
||||
from lib.core.exception import SqlmapUserQuitException
|
||||
from lib.core.settings import BOUNDED_INJECTION_MARKER
|
||||
from lib.core.settings import CANDIDATE_SENTENCE_MIN_LENGTH
|
||||
from lib.core.settings import CHECK_INTERNET_ADDRESS
|
||||
from lib.core.settings import CHECK_INTERNET_VALUE
|
||||
@@ -87,9 +91,12 @@ from lib.core.settings import IDS_WAF_CHECK_RATIO
|
||||
from lib.core.settings import IDS_WAF_CHECK_TIMEOUT
|
||||
from lib.core.settings import MAX_DIFFLIB_SEQUENCE_LENGTH
|
||||
from lib.core.settings import NON_SQLI_CHECK_PREFIX_SUFFIX_LENGTH
|
||||
from lib.core.settings import PRECONNECT_INCOMPATIBLE_SERVERS
|
||||
from lib.core.settings import SINGLE_QUOTE_MARKER
|
||||
from lib.core.settings import SLEEP_TIME_MARKER
|
||||
from lib.core.settings import SUHOSIN_MAX_VALUE_LENGTH
|
||||
from lib.core.settings import SUPPORTED_DBMS
|
||||
from lib.core.settings import UNICODE_ENCODING
|
||||
from lib.core.settings import URI_HTTP_HEADER
|
||||
from lib.core.settings import UPPER_RATIO_BOUND
|
||||
from lib.core.threads import getCurrentThreadData
|
||||
@@ -357,7 +364,7 @@ def checkSqlInjection(place, parameter, value):
|
||||
|
||||
# Parse test's <request>
|
||||
comment = agent.getComment(test.request) if len(conf.boundaries) > 1 else None
|
||||
fstPayload = agent.cleanupPayload(test.request.payload, origValue=value if place not in (PLACE.URI, PLACE.CUSTOM_POST, PLACE.CUSTOM_HEADER) else None)
|
||||
fstPayload = agent.cleanupPayload(test.request.payload, origValue=value if place not in (PLACE.URI, PLACE.CUSTOM_POST, PLACE.CUSTOM_HEADER) and BOUNDED_INJECTION_MARKER not in (value or "") else None)
|
||||
|
||||
for boundary in boundaries:
|
||||
injectable = False
|
||||
@@ -433,7 +440,7 @@ def checkSqlInjection(place, parameter, value):
|
||||
|
||||
if conf.invalidLogical:
|
||||
_ = int(kb.data.randomInt[:2])
|
||||
origValue = "%s AND %s=%s" % (value, _, _ + 1)
|
||||
origValue = "%s AND %s LIKE %s" % (value, _, _ + 1)
|
||||
elif conf.invalidBignum:
|
||||
origValue = kb.data.randomInt[:6]
|
||||
elif conf.invalidString:
|
||||
@@ -468,13 +475,13 @@ def checkSqlInjection(place, parameter, value):
|
||||
# payload was successful
|
||||
# Parse test's <response>
|
||||
for method, check in test.response.items():
|
||||
check = agent.cleanupPayload(check, origValue=value if place not in (PLACE.URI, PLACE.CUSTOM_POST, PLACE.CUSTOM_HEADER) else None)
|
||||
check = agent.cleanupPayload(check, origValue=value if place not in (PLACE.URI, PLACE.CUSTOM_POST, PLACE.CUSTOM_HEADER) and BOUNDED_INJECTION_MARKER not in (value or "") else None)
|
||||
|
||||
# In case of boolean-based blind SQL injection
|
||||
if method == PAYLOAD.METHOD.COMPARISON:
|
||||
# Generate payload used for comparison
|
||||
def genCmpPayload():
|
||||
sndPayload = agent.cleanupPayload(test.response.comparison, origValue=value if place not in (PLACE.URI, PLACE.CUSTOM_POST, PLACE.CUSTOM_HEADER) else None)
|
||||
sndPayload = agent.cleanupPayload(test.response.comparison, origValue=value if place not in (PLACE.URI, PLACE.CUSTOM_POST, PLACE.CUSTOM_HEADER) and BOUNDED_INJECTION_MARKER not in (value or "") else None)
|
||||
|
||||
# Forge response payload by prepending with
|
||||
# boundary's prefix and appending the boundary's
|
||||
@@ -558,14 +565,14 @@ def checkSqlInjection(place, parameter, value):
|
||||
logger.info(infoMsg)
|
||||
else:
|
||||
trueSet = set(extractTextTagContent(trueRawResponse))
|
||||
trueSet = trueSet.union(__ for _ in trueSet for __ in _.split())
|
||||
trueSet |= set(__ for _ in trueSet for __ in _.split())
|
||||
|
||||
falseSet = set(extractTextTagContent(falseRawResponse))
|
||||
falseSet = falseSet.union(__ for _ in falseSet for __ in _.split())
|
||||
falseSet |= set(__ for _ in falseSet for __ in _.split())
|
||||
|
||||
if threadData.lastErrorPage and threadData.lastErrorPage[1]:
|
||||
errorSet = set(extractTextTagContent(threadData.lastErrorPage[1]))
|
||||
errorSet = errorSet.union(__ for _ in errorSet for __ in _.split())
|
||||
errorSet |= set(__ for _ in errorSet for __ in _.split())
|
||||
else:
|
||||
errorSet = set()
|
||||
|
||||
@@ -761,7 +768,7 @@ def checkSqlInjection(place, parameter, value):
|
||||
infoMsg = "executing alerting shell command(s) ('%s')" % conf.alert
|
||||
logger.info(infoMsg)
|
||||
|
||||
process = subprocess.Popen(conf.alert, shell=True)
|
||||
process = subprocess.Popen(conf.alert.encode(sys.getfilesystemencoding() or UNICODE_ENCODING), shell=True)
|
||||
process.wait()
|
||||
|
||||
kb.alerted = True
|
||||
@@ -856,8 +863,8 @@ def heuristicCheckDbms(injection):
|
||||
if conf.noEscape and dbms not in FROM_DUMMY_TABLE:
|
||||
continue
|
||||
|
||||
if checkBooleanExpression("(SELECT '%s'%s)='%s'" % (randStr1, FROM_DUMMY_TABLE.get(dbms, ""), randStr1)):
|
||||
if not checkBooleanExpression("(SELECT '%s'%s)='%s'" % (randStr1, FROM_DUMMY_TABLE.get(dbms, ""), randStr2)):
|
||||
if checkBooleanExpression("(SELECT '%s'%s)=%s%s%s" % (randStr1, FROM_DUMMY_TABLE.get(dbms, ""), SINGLE_QUOTE_MARKER, randStr1, SINGLE_QUOTE_MARKER)):
|
||||
if not checkBooleanExpression("(SELECT '%s'%s)=%s%s%s" % (randStr1, FROM_DUMMY_TABLE.get(dbms, ""), SINGLE_QUOTE_MARKER, randStr2, SINGLE_QUOTE_MARKER)):
|
||||
retVal = dbms
|
||||
break
|
||||
|
||||
@@ -893,7 +900,7 @@ def checkFalsePositives(injection):
|
||||
|
||||
kb.injection = injection
|
||||
|
||||
for i in xrange(conf.level):
|
||||
for level in xrange(conf.level):
|
||||
while True:
|
||||
randInt1, randInt2, randInt3 = (_() for j in xrange(3))
|
||||
|
||||
@@ -989,11 +996,6 @@ def checkFilteredChars(injection):
|
||||
kb.injection = popValue()
|
||||
|
||||
def heuristicCheckSqlInjection(place, parameter):
|
||||
if kb.nullConnection:
|
||||
debugMsg = "heuristic check skipped because NULL connection used"
|
||||
logger.debug(debugMsg)
|
||||
return None
|
||||
|
||||
if kb.heavilyDynamic:
|
||||
debugMsg = "heuristic check skipped because of heavy dynamicity"
|
||||
logger.debug(debugMsg)
|
||||
@@ -1051,9 +1053,19 @@ def heuristicCheckSqlInjection(place, parameter):
|
||||
kb.heuristicTest = HEURISTIC_TEST.CASTED if casting else HEURISTIC_TEST.NEGATIVE if not result else HEURISTIC_TEST.POSITIVE
|
||||
|
||||
if casting:
|
||||
errMsg = "possible %s casting " % ("integer" if origValue.isdigit() else "type")
|
||||
errMsg += "detected (e.g. \"$%s=intval($_REQUEST['%s'])\") " % (parameter, parameter)
|
||||
errMsg += "at the back-end web application"
|
||||
errMsg = "possible %s casting detected (e.g. '" % ("integer" if origValue.isdigit() else "type")
|
||||
|
||||
platform = conf.url.split('.')[-1].lower()
|
||||
if platform == WEB_PLATFORM.ASP:
|
||||
errMsg += "%s=CInt(request.querystring(\"%s\"))" % (parameter, parameter)
|
||||
elif platform == WEB_PLATFORM.ASPX:
|
||||
errMsg += "int.TryParse(Request.QueryString[\"%s\"], out %s)" % (parameter, parameter)
|
||||
elif platform == WEB_PLATFORM.JSP:
|
||||
errMsg += "%s=Integer.parseInt(request.getParameter(\"%s\"))" % (parameter, parameter)
|
||||
else:
|
||||
errMsg += "$%s=intval($_REQUEST[\"%s\"])" % (parameter, parameter)
|
||||
|
||||
errMsg += "') at the back-end web application"
|
||||
logger.error(errMsg)
|
||||
|
||||
if kb.ignoreCasted is None:
|
||||
@@ -1118,14 +1130,6 @@ def checkDynParam(place, parameter, value):
|
||||
try:
|
||||
payload = agent.payload(place, parameter, value, getUnicode(randInt))
|
||||
dynResult = Request.queryPage(payload, place, raise404=False)
|
||||
|
||||
if not dynResult:
|
||||
infoMsg = "confirming that %s parameter '%s' is dynamic" % (paramType, parameter)
|
||||
logger.info(infoMsg)
|
||||
|
||||
randInt = randomInt()
|
||||
payload = agent.payload(place, parameter, value, getUnicode(randInt))
|
||||
dynResult = Request.queryPage(payload, place, raise404=False)
|
||||
except SqlmapConnectionException:
|
||||
pass
|
||||
|
||||
@@ -1229,7 +1233,7 @@ def checkStability():
|
||||
logger.error(errMsg)
|
||||
|
||||
else:
|
||||
warnMsg = "target URL content is not stable. sqlmap will base the page "
|
||||
warnMsg = "target URL content is not stable (i.e. content differs). sqlmap will base the page "
|
||||
warnMsg += "comparison on a sequence matcher. If no dynamic nor "
|
||||
warnMsg += "injectable parameters are detected, or in case of "
|
||||
warnMsg += "junk results, refer to user's manual paragraph "
|
||||
@@ -1314,9 +1318,8 @@ def checkRegexp():
|
||||
rawResponse = "%s%s" % (listToStrValue(headers.headers if headers else ""), page)
|
||||
|
||||
if not re.search(conf.regexp, rawResponse, re.I | re.M):
|
||||
warnMsg = "you provided '%s' as the regular expression to " % conf.regexp
|
||||
warnMsg += "match, but such a regular expression does not have any "
|
||||
warnMsg += "match within the target URL raw response, sqlmap "
|
||||
warnMsg = "you provided '%s' as the regular expression " % conf.regexp
|
||||
warnMsg += "which does not have any match within the target URL raw response. sqlmap "
|
||||
warnMsg += "will carry on anyway"
|
||||
logger.warn(warnMsg)
|
||||
|
||||
@@ -1335,7 +1338,7 @@ def checkWaf():
|
||||
if _ is not None:
|
||||
if _:
|
||||
warnMsg = "previous heuristics detected that the target "
|
||||
warnMsg += "is protected by some kind of WAF/IPS/IDS"
|
||||
warnMsg += "is protected by some kind of WAF/IPS"
|
||||
logger.critical(warnMsg)
|
||||
return _
|
||||
|
||||
@@ -1343,7 +1346,7 @@ def checkWaf():
|
||||
return None
|
||||
|
||||
infoMsg = "checking if the target is protected by "
|
||||
infoMsg += "some kind of WAF/IPS/IDS"
|
||||
infoMsg += "some kind of WAF/IPS"
|
||||
logger.info(infoMsg)
|
||||
|
||||
retVal = False
|
||||
@@ -1357,7 +1360,12 @@ def checkWaf():
|
||||
value = "" if not conf.parameters.get(PLACE.GET) else conf.parameters[PLACE.GET] + DEFAULT_GET_POST_DELIMITER
|
||||
value += "%s=%s" % (randomStr(), agent.addPayloadDelimiters(payload))
|
||||
|
||||
pushValue(kb.redirectChoice)
|
||||
pushValue(kb.resendPostOnRedirect)
|
||||
pushValue(conf.timeout)
|
||||
|
||||
kb.redirectChoice = REDIRECTION.YES
|
||||
kb.resendPostOnRedirect = False
|
||||
conf.timeout = IDS_WAF_CHECK_TIMEOUT
|
||||
|
||||
try:
|
||||
@@ -1366,16 +1374,19 @@ def checkWaf():
|
||||
retVal = True
|
||||
finally:
|
||||
kb.matchRatio = None
|
||||
|
||||
conf.timeout = popValue()
|
||||
kb.resendPostOnRedirect = popValue()
|
||||
kb.redirectChoice = popValue()
|
||||
|
||||
if retVal:
|
||||
warnMsg = "heuristics detected that the target "
|
||||
warnMsg += "is protected by some kind of WAF/IPS/IDS"
|
||||
warnMsg += "is protected by some kind of WAF/IPS"
|
||||
logger.critical(warnMsg)
|
||||
|
||||
if not conf.identifyWaf:
|
||||
message = "do you want sqlmap to try to detect backend "
|
||||
message += "WAF/IPS/IDS? [y/N] "
|
||||
message += "WAF/IPS? [y/N] "
|
||||
|
||||
if readInput(message, default='N', boolean=True):
|
||||
conf.identifyWaf = True
|
||||
@@ -1399,7 +1410,7 @@ def identifyWaf():
|
||||
kb.testMode = True
|
||||
|
||||
infoMsg = "using WAF scripts to detect "
|
||||
infoMsg += "backend WAF/IPS/IDS protection"
|
||||
infoMsg += "backend WAF/IPS protection"
|
||||
logger.info(infoMsg)
|
||||
|
||||
@cachedmethod
|
||||
@@ -1426,7 +1437,7 @@ def identifyWaf():
|
||||
continue
|
||||
|
||||
try:
|
||||
logger.debug("checking for WAF/IPS/IDS product '%s'" % product)
|
||||
logger.debug("checking for WAF/IPS product '%s'" % product)
|
||||
found = function(_)
|
||||
except Exception, ex:
|
||||
errMsg = "exception occurred while running "
|
||||
@@ -1436,19 +1447,19 @@ def identifyWaf():
|
||||
found = False
|
||||
|
||||
if found:
|
||||
errMsg = "WAF/IPS/IDS identified as '%s'" % product
|
||||
errMsg = "WAF/IPS identified as '%s'" % product
|
||||
logger.critical(errMsg)
|
||||
|
||||
retVal.append(product)
|
||||
|
||||
if retVal:
|
||||
if kb.wafSpecificResponse and len(retVal) == 1 and "unknown" in retVal[0].lower():
|
||||
if kb.wafSpecificResponse and "You don't have permission to access" not in kb.wafSpecificResponse and len(retVal) == 1 and "unknown" in retVal[0].lower():
|
||||
handle, filename = tempfile.mkstemp(prefix=MKSTEMP_PREFIX.SPECIFIC_RESPONSE)
|
||||
os.close(handle)
|
||||
with openFile(filename, "w+b") as f:
|
||||
f.write(kb.wafSpecificResponse)
|
||||
|
||||
message = "WAF/IPS/IDS specific response can be found in '%s'. " % filename
|
||||
message = "WAF/IPS specific response can be found in '%s'. " % filename
|
||||
message += "If you know the details on used protection please "
|
||||
message += "report it along with specific response "
|
||||
message += "to '%s'" % DEV_EMAIL_ADDRESS
|
||||
@@ -1465,7 +1476,7 @@ def identifyWaf():
|
||||
if not choice:
|
||||
raise SqlmapUserQuitException
|
||||
else:
|
||||
warnMsg = "WAF/IPS/IDS product hasn't been identified"
|
||||
warnMsg = "WAF/IPS product hasn't been identified"
|
||||
logger.warn(warnMsg)
|
||||
|
||||
kb.testType = None
|
||||
@@ -1535,6 +1546,10 @@ def checkConnection(suppressOutput=False):
|
||||
errMsg = "problem occurred while "
|
||||
errMsg += "resolving a host name '%s' ('%s')" % (conf.hostname, getSafeExString(ex))
|
||||
raise SqlmapConnectionException(errMsg)
|
||||
except UnicodeError, ex:
|
||||
errMsg = "problem occurred while "
|
||||
errMsg += "handling a host name '%s' ('%s')" % (conf.hostname, getSafeExString(ex))
|
||||
raise SqlmapDataException(errMsg)
|
||||
|
||||
if not suppressOutput and not conf.dummy and not conf.offline:
|
||||
infoMsg = "testing connection to the target URL"
|
||||
@@ -1547,6 +1562,10 @@ def checkConnection(suppressOutput=False):
|
||||
|
||||
kb.errorIsNone = False
|
||||
|
||||
if any(_ in (kb.serverHeader or "") for _ in PRECONNECT_INCOMPATIBLE_SERVERS):
|
||||
singleTimeWarnMessage("turning off pre-connect mechanism because of incompatible server ('%s')" % kb.serverHeader)
|
||||
conf.disablePrecon = True
|
||||
|
||||
if not kb.originalPage and wasLastResponseHTTPError():
|
||||
errMsg = "unable to retrieve page content"
|
||||
raise SqlmapConnectionException(errMsg)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
@@ -56,9 +56,11 @@ from lib.core.exception import SqlmapNoneDataException
|
||||
from lib.core.exception import SqlmapNotVulnerableException
|
||||
from lib.core.exception import SqlmapSilentQuitException
|
||||
from lib.core.exception import SqlmapSkipTargetException
|
||||
from lib.core.exception import SqlmapSystemException
|
||||
from lib.core.exception import SqlmapValueException
|
||||
from lib.core.exception import SqlmapUserQuitException
|
||||
from lib.core.settings import ASP_NET_CONTROL_REGEX
|
||||
from lib.core.settings import CSRF_TOKEN_PARAMETER_INFIXES
|
||||
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
||||
from lib.core.settings import EMPTY_FORM_FIELDS_REGEX
|
||||
from lib.core.settings import IGNORE_PARAMETERS
|
||||
@@ -69,6 +71,7 @@ from lib.core.settings import REFERER_ALIASES
|
||||
from lib.core.settings import USER_AGENT_ALIASES
|
||||
from lib.core.target import initTargetEnv
|
||||
from lib.core.target import setupTargetEnv
|
||||
from lib.utils.hash import crackHashFile
|
||||
|
||||
def _selectInjection():
|
||||
"""
|
||||
@@ -243,16 +246,20 @@ def _saveToResultsFile():
|
||||
|
||||
results[key].extend(injection.data.keys())
|
||||
|
||||
for key, value in results.items():
|
||||
place, parameter, notes = key
|
||||
line = "%s,%s,%s,%s,%s%s" % (safeCSValue(kb.originalUrls.get(conf.url) or conf.url), place, parameter, "".join(techniques[_][0].upper() for _ in sorted(value)), notes, os.linesep)
|
||||
conf.resultsFP.write(line)
|
||||
try:
|
||||
for key, value in results.items():
|
||||
place, parameter, notes = key
|
||||
line = "%s,%s,%s,%s,%s%s" % (safeCSValue(kb.originalUrls.get(conf.url) or conf.url), place, parameter, "".join(techniques[_][0].upper() for _ in sorted(value)), notes, os.linesep)
|
||||
conf.resultsFP.write(line)
|
||||
|
||||
if not results:
|
||||
line = "%s,,,,%s" % (conf.url, os.linesep)
|
||||
conf.resultsFP.write(line)
|
||||
if not results:
|
||||
line = "%s,,,,%s" % (conf.url, os.linesep)
|
||||
conf.resultsFP.write(line)
|
||||
|
||||
conf.resultsFP.flush()
|
||||
conf.resultsFP.flush()
|
||||
except IOError, ex:
|
||||
errMsg = "unable to write to the results file '%s' ('%s'). " % (conf.resultsFilename, getSafeExString(ex))
|
||||
raise SqlmapSystemException(errMsg)
|
||||
|
||||
@stackedmethod
|
||||
def start():
|
||||
@@ -262,6 +269,9 @@ def start():
|
||||
check if they are dynamic and SQL injection affected
|
||||
"""
|
||||
|
||||
if conf.hashFile:
|
||||
crackHashFile(conf.hashFile)
|
||||
|
||||
if conf.direct:
|
||||
initTargetEnv()
|
||||
setupTargetEnv()
|
||||
@@ -307,6 +317,7 @@ def start():
|
||||
conf.cookie = targetCookie
|
||||
conf.httpHeaders = list(initialHeaders)
|
||||
conf.httpHeaders.extend(targetHeaders or [])
|
||||
conf.httpHeaders = [conf.httpHeaders[i] for i in xrange(len(conf.httpHeaders)) if conf.httpHeaders[i][0].upper() not in (__[0].upper() for __ in conf.httpHeaders[i + 1:])]
|
||||
|
||||
initTargetEnv()
|
||||
parseTargetUrl()
|
||||
@@ -498,14 +509,14 @@ def start():
|
||||
infoMsg = "skipping %s parameter '%s'" % (paramType, parameter)
|
||||
logger.info(infoMsg)
|
||||
|
||||
elif parameter == conf.csrfToken:
|
||||
elif conf.csrfToken and re.search(conf.csrfToken, parameter, re.I):
|
||||
testSqlInj = False
|
||||
|
||||
infoMsg = "skipping anti-CSRF token parameter '%s'" % parameter
|
||||
logger.info(infoMsg)
|
||||
|
||||
# Ignore session-like parameters for --level < 4
|
||||
elif conf.level < 4 and (parameter.upper() in IGNORE_PARAMETERS or parameter.upper().startswith(GOOGLE_ANALYTICS_COOKIE_PREFIX)):
|
||||
elif conf.level < 4 and (parameter.upper() in IGNORE_PARAMETERS or any(_ in parameter.lower() for _ in CSRF_TOKEN_PARAMETER_INFIXES) or parameter.upper().startswith(GOOGLE_ANALYTICS_COOKIE_PREFIX)):
|
||||
testSqlInj = False
|
||||
|
||||
infoMsg = "ignoring %s parameter '%s'" % (paramType, parameter)
|
||||
@@ -524,7 +535,7 @@ def start():
|
||||
|
||||
testSqlInj = False
|
||||
else:
|
||||
infoMsg = "%s parameter '%s' is dynamic" % (paramType, parameter)
|
||||
infoMsg = "%s parameter '%s' appears to be dynamic" % (paramType, parameter)
|
||||
logger.info(infoMsg)
|
||||
|
||||
kb.testedParams.add(paramKey)
|
||||
@@ -631,6 +642,9 @@ def start():
|
||||
errMsg += "involved (e.g. WAF) maybe you could try to use "
|
||||
errMsg += "option '--tamper' (e.g. '--tamper=space2comment')"
|
||||
|
||||
if not conf.randomAgent:
|
||||
errMsg += " and/or switch '--random-agent'"
|
||||
|
||||
raise SqlmapNotVulnerableException(errMsg.rstrip('.'))
|
||||
else:
|
||||
# Flush the flag
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
@@ -10,6 +10,7 @@ from lib.core.data import conf
|
||||
from lib.core.data import kb
|
||||
from lib.core.dicts import DBMS_DICT
|
||||
from lib.core.enums import DBMS
|
||||
from lib.core.exception import SqlmapConnectionException
|
||||
from lib.core.settings import MSSQL_ALIASES
|
||||
from lib.core.settings import MYSQL_ALIASES
|
||||
from lib.core.settings import ORACLE_ALIASES
|
||||
@@ -21,6 +22,7 @@ from lib.core.settings import MAXDB_ALIASES
|
||||
from lib.core.settings import SYBASE_ALIASES
|
||||
from lib.core.settings import DB2_ALIASES
|
||||
from lib.core.settings import HSQLDB_ALIASES
|
||||
from lib.core.settings import H2_ALIASES
|
||||
from lib.core.settings import INFORMIX_ALIASES
|
||||
from lib.utils.sqlalchemy import SQLAlchemy
|
||||
|
||||
@@ -46,6 +48,8 @@ from plugins.dbms.db2 import DB2Map
|
||||
from plugins.dbms.db2.connector import Connector as DB2Conn
|
||||
from plugins.dbms.hsqldb import HSQLDBMap
|
||||
from plugins.dbms.hsqldb.connector import Connector as HSQLDBConn
|
||||
from plugins.dbms.h2 import H2Map
|
||||
from plugins.dbms.h2.connector import Connector as H2Conn
|
||||
from plugins.dbms.informix import InformixMap
|
||||
from plugins.dbms.informix.connector import Connector as InformixConn
|
||||
|
||||
@@ -67,6 +71,7 @@ def setHandler():
|
||||
(DBMS.SYBASE, SYBASE_ALIASES, SybaseMap, SybaseConn),
|
||||
(DBMS.DB2, DB2_ALIASES, DB2Map, DB2Conn),
|
||||
(DBMS.HSQLDB, HSQLDB_ALIASES, HSQLDBMap, HSQLDBConn),
|
||||
(DBMS.H2, H2_ALIASES, H2Map, H2Conn),
|
||||
(DBMS.INFORMIX, INFORMIX_ALIASES, InformixMap, InformixConn),
|
||||
]
|
||||
|
||||
@@ -90,29 +95,41 @@ def setHandler():
|
||||
conf.dbmsConnector = Connector()
|
||||
|
||||
if conf.direct:
|
||||
exception = None
|
||||
dialect = DBMS_DICT[dbms][3]
|
||||
|
||||
if dialect:
|
||||
sqlalchemy = SQLAlchemy(dialect=dialect)
|
||||
sqlalchemy.connect()
|
||||
try:
|
||||
sqlalchemy = SQLAlchemy(dialect=dialect)
|
||||
sqlalchemy.connect()
|
||||
|
||||
if sqlalchemy.connector:
|
||||
conf.dbmsConnector = sqlalchemy
|
||||
else:
|
||||
try:
|
||||
conf.dbmsConnector.connect()
|
||||
except NameError:
|
||||
pass
|
||||
else:
|
||||
conf.dbmsConnector.connect()
|
||||
if sqlalchemy.connector:
|
||||
conf.dbmsConnector = sqlalchemy
|
||||
except Exception, ex:
|
||||
exception = ex
|
||||
|
||||
if not dialect or exception:
|
||||
try:
|
||||
conf.dbmsConnector.connect()
|
||||
except Exception, ex:
|
||||
if exception:
|
||||
raise exception
|
||||
else:
|
||||
if not isinstance(ex, NameError):
|
||||
raise
|
||||
else:
|
||||
msg = "support for direct connection to '%s' is not available. " % dbms
|
||||
msg += "Please rerun with '--dependencies'"
|
||||
raise SqlmapConnectionException(msg)
|
||||
|
||||
if conf.forceDbms == dbms or handler.checkDbms():
|
||||
if kb.resolutionDbms:
|
||||
conf.dbmsHandler = max(_ for _ in items if _[0] == kb.resolutionDbms)[2]()
|
||||
conf.dbmsHandler._dbms = kb.resolutionDbms
|
||||
else:
|
||||
conf.dbmsHandler = handler
|
||||
conf.dbmsHandler._dbms = dbms
|
||||
|
||||
conf.dbmsHandler._dbms = dbms
|
||||
break
|
||||
else:
|
||||
conf.dbmsConnector = None
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
@@ -43,6 +43,7 @@ from lib.core.settings import INFERENCE_MARKER
|
||||
from lib.core.settings import NULL
|
||||
from lib.core.settings import PAYLOAD_DELIMITER
|
||||
from lib.core.settings import REPLACEMENT_MARKER
|
||||
from lib.core.settings import SINGLE_QUOTE_MARKER
|
||||
from lib.core.settings import SLEEP_TIME_MARKER
|
||||
from lib.core.unescaper import unescaper
|
||||
|
||||
@@ -142,7 +143,7 @@ class Agent(object):
|
||||
match = re.search(r"\A[^ ]+", newValue)
|
||||
newValue = newValue[len(match.group() if match else ""):]
|
||||
_ = randomInt(2)
|
||||
value = "%s%s AND %s=%s" % (origValue, match.group() if match else "", _, _ + 1)
|
||||
value = "%s%s AND %s LIKE %s" % (origValue, match.group() if match else "", _, _ + 1)
|
||||
elif conf.invalidBignum:
|
||||
value = randomInt(6)
|
||||
elif conf.invalidString:
|
||||
@@ -198,7 +199,7 @@ class Agent(object):
|
||||
regex = r"(\A|\b)%s=%s%s" % (re.escape(parameter), re.escape(origValue), r"(\Z|\b)" if origValue[-1].isalnum() else "")
|
||||
retVal = _(regex, "%s=%s" % (parameter, self.addPayloadDelimiters(newValue)), paramString)
|
||||
else:
|
||||
retVal = _(r"(\A|\b)%s=%s(\Z|%s|%s|\s)" % (re.escape(parameter), re.escape(origValue), DEFAULT_GET_POST_DELIMITER, DEFAULT_COOKIE_DELIMITER), "%s=%s\g<2>" % (parameter, self.addPayloadDelimiters(newValue)), paramString)
|
||||
retVal = _(r"(\A|\b)%s=%s(\Z|%s|%s|\s)" % (re.escape(parameter), re.escape(origValue), DEFAULT_GET_POST_DELIMITER, DEFAULT_COOKIE_DELIMITER), r"%s=%s\g<2>" % (parameter, self.addPayloadDelimiters(newValue)), paramString)
|
||||
|
||||
if retVal == paramString and urlencode(parameter) != parameter:
|
||||
retVal = _(r"(\A|\b)%s=%s" % (re.escape(urlencode(parameter)), re.escape(origValue)), "%s=%s" % (urlencode(parameter), self.addPayloadDelimiters(newValue)), paramString)
|
||||
@@ -246,6 +247,9 @@ class Agent(object):
|
||||
else:
|
||||
query = kb.injection.prefix or prefix or ""
|
||||
|
||||
if "SELECT '[RANDSTR]'" in query: # escaping of pre-WHERE prefixes
|
||||
query = query.replace("'[RANDSTR]'", unescaper.escape(randomStr(), quote=False))
|
||||
|
||||
if not (expression and expression[0] == ';') and not (query and query[-1] in ('(', ')') and expression and expression[0] in ('(', ')')) and not (query and query[-1] == '('):
|
||||
query += " "
|
||||
|
||||
@@ -311,9 +315,12 @@ class Agent(object):
|
||||
for _ in set(re.findall(r"(?i)\[RANDSTR(?:\d+)?\]", payload)):
|
||||
payload = payload.replace(_, randomStr())
|
||||
|
||||
if origValue is not None and "[ORIGVALUE]" in payload:
|
||||
if origValue is not None:
|
||||
origValue = getUnicode(origValue)
|
||||
payload = getUnicode(payload).replace("[ORIGVALUE]", origValue if origValue.isdigit() else unescaper.escape("'%s'" % origValue))
|
||||
if "[ORIGVALUE]" in payload:
|
||||
payload = getUnicode(payload).replace("[ORIGVALUE]", origValue if origValue.isdigit() else unescaper.escape("'%s'" % origValue))
|
||||
if "[ORIGINAL]" in payload:
|
||||
payload = getUnicode(payload).replace("[ORIGINAL]", origValue)
|
||||
|
||||
if INFERENCE_MARKER in payload:
|
||||
if Backend.getIdentifiedDbms() is not None:
|
||||
@@ -342,6 +349,7 @@ class Agent(object):
|
||||
|
||||
if payload:
|
||||
payload = payload.replace(SLEEP_TIME_MARKER, str(conf.timeSec))
|
||||
payload = payload.replace(SINGLE_QUOTE_MARKER, "'")
|
||||
|
||||
for _ in set(re.findall(r"\[RANDNUM(?:\d+)?\]", payload, re.I)):
|
||||
payload = payload.replace(_, str(randomInt()))
|
||||
@@ -535,7 +543,7 @@ class Agent(object):
|
||||
fieldsToCastStr = fieldsToCastStr or ""
|
||||
|
||||
# Function
|
||||
if re.search("\A\w+\(.*\)", fieldsToCastStr, re.I) or (fieldsSelectCase and "WHEN use" not in query) or fieldsSubstr:
|
||||
if re.search(r"\A\w+\(.*\)", fieldsToCastStr, re.I) or (fieldsSelectCase and "WHEN use" not in query) or fieldsSubstr:
|
||||
fieldsToCastList = [fieldsToCastStr]
|
||||
else:
|
||||
fieldsToCastList = splitFields(fieldsToCastStr)
|
||||
@@ -616,7 +624,7 @@ class Agent(object):
|
||||
elif fieldsNoSelect:
|
||||
concatenatedQuery = "CONCAT('%s',%s,'%s')" % (kb.chars.start, concatenatedQuery, kb.chars.stop)
|
||||
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.PGSQL, DBMS.ORACLE, DBMS.SQLITE, DBMS.DB2, DBMS.FIREBIRD, DBMS.HSQLDB):
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.PGSQL, DBMS.ORACLE, DBMS.SQLITE, DBMS.DB2, DBMS.FIREBIRD, DBMS.HSQLDB, DBMS.H2):
|
||||
if fieldsExists:
|
||||
concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'||" % kb.chars.start, 1)
|
||||
concatenatedQuery += "||'%s'" % kb.chars.stop
|
||||
@@ -627,7 +635,7 @@ class Agent(object):
|
||||
concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'||" % kb.chars.start, 1)
|
||||
_ = unArrayizeValue(zeroDepthSearch(concatenatedQuery, " FROM "))
|
||||
concatenatedQuery = "%s||'%s'%s" % (concatenatedQuery[:_], kb.chars.stop, concatenatedQuery[_:])
|
||||
concatenatedQuery = re.sub(r"('%s'\|\|)(.+)(%s)" % (kb.chars.start, re.escape(castedFields)), "\g<2>\g<1>\g<3>", concatenatedQuery)
|
||||
concatenatedQuery = re.sub(r"('%s'\|\|)(.+)(%s)" % (kb.chars.start, re.escape(castedFields)), r"\g<2>\g<1>\g<3>", concatenatedQuery)
|
||||
elif fieldsSelect:
|
||||
concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'||" % kb.chars.start, 1)
|
||||
concatenatedQuery += "||'%s'" % kb.chars.stop
|
||||
@@ -639,7 +647,7 @@ class Agent(object):
|
||||
concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'+" % kb.chars.start, 1)
|
||||
concatenatedQuery += "+'%s'" % kb.chars.stop
|
||||
elif fieldsSelectTop:
|
||||
topNum = re.search("\ASELECT\s+TOP\s+([\d]+)\s+", concatenatedQuery, re.I).group(1)
|
||||
topNum = re.search(r"\ASELECT\s+TOP\s+([\d]+)\s+", concatenatedQuery, re.I).group(1)
|
||||
concatenatedQuery = concatenatedQuery.replace("SELECT TOP %s " % topNum, "TOP %s '%s'+" % (topNum, kb.chars.start), 1)
|
||||
concatenatedQuery = concatenatedQuery.replace(" FROM ", "+'%s' FROM " % kb.chars.stop, 1)
|
||||
elif fieldsSelectCase:
|
||||
@@ -815,7 +823,7 @@ class Agent(object):
|
||||
limitRegExp2 = None
|
||||
|
||||
if (limitRegExp or limitRegExp2) or (Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE) and topLimit):
|
||||
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.SQLITE):
|
||||
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.SQLITE, DBMS.H2):
|
||||
limitGroupStart = queries[Backend.getIdentifiedDbms()].limitgroupstart.query
|
||||
limitGroupStop = queries[Backend.getIdentifiedDbms()].limitgroupstop.query
|
||||
|
||||
@@ -905,14 +913,14 @@ class Agent(object):
|
||||
fromFrom = limitedQuery[fromIndex + 1:]
|
||||
orderBy = None
|
||||
|
||||
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.SQLITE):
|
||||
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.SQLITE, DBMS.H2):
|
||||
limitStr = queries[Backend.getIdentifiedDbms()].limit.query % (num, 1)
|
||||
limitedQuery += " %s" % limitStr
|
||||
|
||||
elif Backend.isDbms(DBMS.HSQLDB):
|
||||
match = re.search(r"ORDER BY [^ ]+", limitedQuery)
|
||||
if match:
|
||||
limitedQuery = re.sub(r"\s*%s\s*" % match.group(0), " ", limitedQuery).strip()
|
||||
limitedQuery = re.sub(r"\s*%s\s*" % re.escape(match.group(0)), " ", limitedQuery).strip()
|
||||
limitedQuery += " %s" % match.group(0)
|
||||
|
||||
if query.startswith("SELECT "):
|
||||
@@ -1086,7 +1094,7 @@ class Agent(object):
|
||||
if conf.dumpWhere and query:
|
||||
prefix, suffix = query.split(" ORDER BY ") if " ORDER BY " in query else (query, "")
|
||||
|
||||
if "%s)" % conf.tbl.upper() in prefix.upper():
|
||||
if conf.tbl and "%s)" % conf.tbl.upper() in prefix.upper():
|
||||
prefix = re.sub(r"(?i)%s\)" % re.escape(conf.tbl), "%s WHERE %s)" % (conf.tbl, conf.dumpWhere), prefix)
|
||||
elif re.search(r"(?i)\bWHERE\b", prefix):
|
||||
prefix += " AND %s" % conf.dumpWhere
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
@@ -126,7 +126,7 @@ class BigArray(list):
|
||||
try:
|
||||
with open(self.chunks[index], "rb") as f:
|
||||
self.cache = Cache(index, pickle.loads(bz2.decompress(f.read())), False)
|
||||
except IOError, ex:
|
||||
except Exception, ex:
|
||||
errMsg = "exception occurred while retrieving data "
|
||||
errMsg += "from a temporary file ('%s')" % ex.message
|
||||
raise SqlmapSystemException(errMsg)
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
import binascii
|
||||
import codecs
|
||||
import contextlib
|
||||
import cookielib
|
||||
@@ -101,7 +102,10 @@ from lib.core.settings import BOUNDED_INJECTION_MARKER
|
||||
from lib.core.settings import BRUTE_DOC_ROOT_PREFIXES
|
||||
from lib.core.settings import BRUTE_DOC_ROOT_SUFFIXES
|
||||
from lib.core.settings import BRUTE_DOC_ROOT_TARGET_MARK
|
||||
from lib.core.settings import BURP_REQUEST_REGEX
|
||||
from lib.core.settings import BURP_XML_HISTORY_REGEX
|
||||
from lib.core.settings import DBMS_DIRECTORY_DICT
|
||||
from lib.core.settings import CRAWL_EXCLUDE_EXTENSIONS
|
||||
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
||||
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
|
||||
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
||||
@@ -139,6 +143,7 @@ from lib.core.settings import PARTIAL_VALUE_MARKER
|
||||
from lib.core.settings import PAYLOAD_DELIMITER
|
||||
from lib.core.settings import PLATFORM
|
||||
from lib.core.settings import PRINTABLE_CHAR_REGEX
|
||||
from lib.core.settings import PROBLEMATIC_CUSTOM_INJECTION_PATTERNS
|
||||
from lib.core.settings import PUSH_VALUE_EXCEPTION_RETRY_COUNT
|
||||
from lib.core.settings import PYVERSION
|
||||
from lib.core.settings import REFERER_ALIASES
|
||||
@@ -161,6 +166,7 @@ from lib.core.settings import URLENCODE_CHAR_LIMIT
|
||||
from lib.core.settings import URLENCODE_FAILSAFE_CHARS
|
||||
from lib.core.settings import USER_AGENT_ALIASES
|
||||
from lib.core.settings import VERSION_STRING
|
||||
from lib.core.settings import WEBSCARAB_SPLITTER
|
||||
from lib.core.threads import getCurrentThreadData
|
||||
from lib.utils.sqlalchemy import _sqlalchemy
|
||||
from thirdparty.clientform.clientform import ParseResponse
|
||||
@@ -594,7 +600,7 @@ def paramToDict(place, parameters=None):
|
||||
|
||||
if condition:
|
||||
testableParameters[parameter] = "=".join(parts[1:])
|
||||
if not conf.multipleTargets and not (conf.csrfToken and parameter == conf.csrfToken):
|
||||
if not conf.multipleTargets and not (conf.csrfToken and re.search(conf.csrfToken, parameter, re.I)):
|
||||
_ = urldecode(testableParameters[parameter], convall=True)
|
||||
if (_.endswith("'") and _.count("'") == 1 or re.search(r'\A9{3,}', _) or re.search(r'\A-\d+\Z', _) or re.search(DUMMY_USER_INJECTION, _)) and not parameter.upper().startswith(GOOGLE_ANALYTICS_COOKIE_PREFIX):
|
||||
warnMsg = "it appears that you have provided tainted parameter values "
|
||||
@@ -639,7 +645,7 @@ def paramToDict(place, parameters=None):
|
||||
current[key] = "%s%s" % (getUnicode(value).lower(), BOUNDED_INJECTION_MARKER)
|
||||
else:
|
||||
current[key] = "%s%s" % (value, BOUNDED_INJECTION_MARKER)
|
||||
candidates["%s (%s)" % (parameter, key)] = re.sub(r"\b(%s\s*=\s*)%s" % (re.escape(parameter), re.escape(testableParameters[parameter])), r"\g<1>%s" % json.dumps(deserialized), parameters)
|
||||
candidates["%s (%s)" % (parameter, key)] = re.sub(r"\b(%s\s*=\s*)%s" % (re.escape(parameter), re.escape(testableParameters[parameter])), r"\g<1>%s" % json.dumps(deserialized, separators=(',', ':') if ", " not in testableParameters[parameter] else None), parameters)
|
||||
current[key] = original
|
||||
|
||||
deserialized = json.loads(testableParameters[parameter])
|
||||
@@ -862,11 +868,11 @@ def boldifyMessage(message):
|
||||
retVal = message
|
||||
|
||||
if any(_ in message for _ in BOLD_PATTERNS):
|
||||
retVal = setColor(message, True)
|
||||
retVal = setColor(message, bold=True)
|
||||
|
||||
return retVal
|
||||
|
||||
def setColor(message, bold=False):
|
||||
def setColor(message, color=None, bold=False):
|
||||
retVal = message
|
||||
level = extractRegexResult(r"\[(?P<result>%s)\]" % '|'.join(_[0] for _ in getPublicTypeMembers(LOGGING_LEVELS)), message) or kb.get("stickyLevel")
|
||||
|
||||
@@ -874,15 +880,11 @@ def setColor(message, bold=False):
|
||||
level = unicodeencode(level)
|
||||
|
||||
if message and getattr(LOGGER_HANDLER, "is_tty", False): # colorizing handler
|
||||
if bold:
|
||||
retVal = colored(message, color=None, on_color=None, attrs=("bold",))
|
||||
if bold or color:
|
||||
retVal = colored(message, color=color, on_color=None, attrs=("bold",) if bold else None)
|
||||
elif level:
|
||||
level = getattr(logging, level, None) if isinstance(level, basestring) else level
|
||||
_ = LOGGER_HANDLER.level_map.get(level)
|
||||
if _:
|
||||
background, foreground, bold = _
|
||||
retVal = colored(message, color=foreground, on_color="on_%s" % background if background else None, attrs=("bold",) if bold else None)
|
||||
|
||||
retVal = LOGGER_HANDLER.colorize(message, level)
|
||||
kb.stickyLevel = level if message and message[-1] != "\n" else None
|
||||
|
||||
return retVal
|
||||
@@ -897,7 +899,7 @@ def clearColors(message):
|
||||
|
||||
retVal = message
|
||||
|
||||
if message:
|
||||
if isinstance(message, str):
|
||||
retVal = re.sub(r"\x1b\[[\d;]+m", "", message)
|
||||
|
||||
return retVal
|
||||
@@ -921,9 +923,9 @@ def dataToStdout(data, forceOutput=False, bold=False, content_type=None, status=
|
||||
|
||||
try:
|
||||
if conf.get("api"):
|
||||
sys.stdout.write(message, status, content_type)
|
||||
sys.stdout.write(clearColors(message), status, content_type)
|
||||
else:
|
||||
sys.stdout.write(setColor(message, bold))
|
||||
sys.stdout.write(setColor(message, bold=bold))
|
||||
|
||||
sys.stdout.flush()
|
||||
except IOError:
|
||||
@@ -1007,6 +1009,9 @@ def readInput(message, default=None, checkBatch=True, boolean=False):
|
||||
kb.prependFlag = False
|
||||
|
||||
if conf.get("answers"):
|
||||
if not any(_ in conf.answers for _ in ",="):
|
||||
return conf.answers
|
||||
|
||||
for item in conf.answers.split(','):
|
||||
question = item.split('=')[0].strip()
|
||||
answer = item.split('=')[1] if len(item.split('=')) > 1 else None
|
||||
@@ -1025,7 +1030,7 @@ def readInput(message, default=None, checkBatch=True, boolean=False):
|
||||
logger.debug(debugMsg)
|
||||
|
||||
if retVal is None:
|
||||
if checkBatch and conf.get("batch"):
|
||||
if checkBatch and conf.get("batch") or conf.get("api"):
|
||||
if isListLike(default):
|
||||
options = ','.join(getUnicode(opt, UNICODE_ENCODING) for opt in default)
|
||||
elif default:
|
||||
@@ -1160,6 +1165,9 @@ def getHeader(headers, key):
|
||||
def checkFile(filename, raiseOnError=True):
|
||||
"""
|
||||
Checks for file existence and readability
|
||||
|
||||
>>> checkFile(__file__)
|
||||
True
|
||||
"""
|
||||
|
||||
valid = True
|
||||
@@ -1170,7 +1178,7 @@ def checkFile(filename, raiseOnError=True):
|
||||
try:
|
||||
if filename is None or not os.path.isfile(filename):
|
||||
valid = False
|
||||
except UnicodeError:
|
||||
except:
|
||||
valid = False
|
||||
|
||||
if valid:
|
||||
@@ -1190,7 +1198,7 @@ def banner():
|
||||
This function prints sqlmap banner with its version
|
||||
"""
|
||||
|
||||
if not any(_ in sys.argv for _ in ("--version", "--api")):
|
||||
if not any(_ in sys.argv for _ in ("--version", "--api")) and not conf.get("disableBanner"):
|
||||
_ = BANNER
|
||||
|
||||
if not getattr(LOGGER_HANDLER, "is_tty", False) or "--disable-coloring" in sys.argv:
|
||||
@@ -1267,11 +1275,15 @@ def setPaths(rootPath):
|
||||
paths.SQLMAP_DUMP_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "dump")
|
||||
paths.SQLMAP_FILES_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "files")
|
||||
|
||||
# history files
|
||||
paths.SQLMAP_HISTORY_PATH = getUnicode(os.path.join(_, "history"), encoding=sys.getfilesystemencoding() or UNICODE_ENCODING)
|
||||
paths.API_SHELL_HISTORY = os.path.join(paths.SQLMAP_HISTORY_PATH, "api.hst")
|
||||
paths.OS_SHELL_HISTORY = os.path.join(paths.SQLMAP_HISTORY_PATH, "os.hst")
|
||||
paths.SQL_SHELL_HISTORY = os.path.join(paths.SQLMAP_HISTORY_PATH, "sql.hst")
|
||||
paths.SQLMAP_SHELL_HISTORY = os.path.join(paths.SQLMAP_HISTORY_PATH, "sqlmap.hst")
|
||||
paths.GITHUB_HISTORY = os.path.join(paths.SQLMAP_HISTORY_PATH, "github.hst")
|
||||
|
||||
# sqlmap files
|
||||
paths.OS_SHELL_HISTORY = os.path.join(_, "os.hst")
|
||||
paths.SQL_SHELL_HISTORY = os.path.join(_, "sql.hst")
|
||||
paths.SQLMAP_SHELL_HISTORY = os.path.join(_, "sqlmap.hst")
|
||||
paths.GITHUB_HISTORY = os.path.join(_, "github.hst")
|
||||
paths.CHECKSUM_MD5 = os.path.join(paths.SQLMAP_TXT_PATH, "checksum.md5")
|
||||
paths.COMMON_COLUMNS = os.path.join(paths.SQLMAP_TXT_PATH, "common-columns.txt")
|
||||
paths.COMMON_TABLES = os.path.join(paths.SQLMAP_TXT_PATH, "common-tables.txt")
|
||||
@@ -1291,7 +1303,7 @@ def setPaths(rootPath):
|
||||
paths.PGSQL_XML = os.path.join(paths.SQLMAP_XML_BANNER_PATH, "postgresql.xml")
|
||||
|
||||
for path in paths.values():
|
||||
if any(path.endswith(_) for _ in (".txt", ".xml", ".zip")):
|
||||
if any(path.endswith(_) for _ in (".md5", ".txt", ".xml", ".zip")):
|
||||
checkFile(path)
|
||||
|
||||
def weAreFrozen():
|
||||
@@ -1311,13 +1323,11 @@ def parseTargetDirect():
|
||||
if not conf.direct:
|
||||
return
|
||||
|
||||
conf.direct = conf.direct.encode(UNICODE_ENCODING) # some DBMS connectors (e.g. pymssql) don't like Unicode with non-US letters
|
||||
|
||||
details = None
|
||||
remote = False
|
||||
|
||||
for dbms in SUPPORTED_DBMS:
|
||||
details = re.search("^(?P<dbms>%s)://(?P<credentials>(?P<user>.+?)\:(?P<pass>.*)\@)?(?P<remote>(?P<hostname>[\w.-]+?)\:(?P<port>[\d]+)\/)?(?P<db>[\w\d\ \:\.\_\-\/\\\\]+?)$" % dbms, conf.direct, re.I)
|
||||
details = re.search(r"^(?P<dbms>%s)://(?P<credentials>(?P<user>.+?)\:(?P<pass>.*)\@)?(?P<remote>(?P<hostname>[\w.-]+?)\:(?P<port>[\d]+)\/)?(?P<db>[\w\d\ \:\.\_\-\/\\]+?)$" % dbms, conf.direct, re.I)
|
||||
|
||||
if details:
|
||||
conf.dbms = details.group("dbms")
|
||||
@@ -1343,7 +1353,7 @@ def parseTargetDirect():
|
||||
conf.hostname = "localhost"
|
||||
conf.port = 0
|
||||
|
||||
conf.dbmsDb = details.group("db")
|
||||
conf.dbmsDb = details.group("db").strip() if details.group("db") is not None else None
|
||||
conf.parameters[None] = "direct connection"
|
||||
|
||||
break
|
||||
@@ -1397,8 +1407,10 @@ def parseTargetDirect():
|
||||
__import__("pyodbc")
|
||||
elif dbmsName == DBMS.FIREBIRD:
|
||||
__import__("kinterbasdb")
|
||||
except (SqlmapSyntaxException, SqlmapMissingDependence):
|
||||
raise
|
||||
except:
|
||||
if _sqlalchemy and data[3] in _sqlalchemy.dialects.__all__:
|
||||
if _sqlalchemy and data[3] and any(_ in _sqlalchemy.dialects.__all__ for _ in (data[3], data[3].split('+')[0])):
|
||||
pass
|
||||
else:
|
||||
errMsg = "sqlmap requires '%s' third-party library " % data[1]
|
||||
@@ -1423,7 +1435,7 @@ def parseTargetUrl():
|
||||
errMsg += "on this platform"
|
||||
raise SqlmapGenericException(errMsg)
|
||||
|
||||
if not re.search(r"^http[s]*://", conf.url, re.I) and not re.search(r"^ws[s]*://", conf.url, re.I):
|
||||
if not re.search(r"^https?://", conf.url, re.I) and not re.search(r"^wss?://", conf.url, re.I):
|
||||
if re.search(r":443\b", conf.url):
|
||||
conf.url = "https://%s" % conf.url
|
||||
else:
|
||||
@@ -1440,7 +1452,7 @@ def parseTargetUrl():
|
||||
errMsg += "in the hostname part"
|
||||
raise SqlmapGenericException(errMsg)
|
||||
|
||||
hostnamePort = urlSplit.netloc.split(":") if not re.search(r"\[.+\]", urlSplit.netloc) else filter(None, (re.search("\[.+\]", urlSplit.netloc).group(0), re.search(r"\](:(?P<port>\d+))?", urlSplit.netloc).group("port")))
|
||||
hostnamePort = urlSplit.netloc.split(":") if not re.search(r"\[.+\]", urlSplit.netloc) else filter(None, (re.search(r"\[.+\]", urlSplit.netloc).group(0), re.search(r"\](:(?P<port>\d+))?", urlSplit.netloc).group("port")))
|
||||
|
||||
conf.scheme = (urlSplit.scheme.strip().lower() or "http") if not conf.forceSSL else "https"
|
||||
conf.path = urlSplit.path.strip()
|
||||
@@ -1524,14 +1536,14 @@ def expandAsteriskForColumns(expression):
|
||||
the SQL query string (expression)
|
||||
"""
|
||||
|
||||
asterisk = re.search(r"(?i)\ASELECT(\s+TOP\s+[\d]+)?\s+\*\s+FROM\s+`?([^`\s()]+)", expression)
|
||||
match = re.search(r"(?i)\ASELECT(\s+TOP\s+[\d]+)?\s+\*\s+FROM\s+`?([^`\s()]+)", expression)
|
||||
|
||||
if asterisk:
|
||||
if match:
|
||||
infoMsg = "you did not provide the fields in your query. "
|
||||
infoMsg += "sqlmap will retrieve the column names itself"
|
||||
logger.info(infoMsg)
|
||||
|
||||
_ = asterisk.group(2).replace("..", '.').replace(".dbo.", '.')
|
||||
_ = match.group(2).replace("..", '.').replace(".dbo.", '.')
|
||||
db, conf.tbl = _.split('.', 1) if '.' in _ else (None, _)
|
||||
|
||||
if db is None:
|
||||
@@ -1640,6 +1652,9 @@ def parseUnionPage(page):
|
||||
def parseFilePaths(page):
|
||||
"""
|
||||
Detects (possible) absolute system paths inside the provided page content
|
||||
|
||||
>>> _ = "/var/www/html/index.php"; parseFilePaths("<html>Error occurred at line 207 of: %s<br>Please contact your administrator</html>" % _); _ in kb.absFilePaths
|
||||
True
|
||||
"""
|
||||
|
||||
if page:
|
||||
@@ -1866,8 +1881,7 @@ def getFilteredPageContent(page, onlyText=True, split=" "):
|
||||
# only if the page's charset has been successfully identified
|
||||
if isinstance(page, unicode):
|
||||
retVal = re.sub(r"(?si)<script.+?</script>|<!--.+?-->|<style.+?</style>%s" % (r"|<[^>]+>|\t|\n|\r" if onlyText else ""), split, page)
|
||||
while retVal.find(2 * split) != -1:
|
||||
retVal = retVal.replace(2 * split, split)
|
||||
retVal = re.sub(r"%s{2,}" % split, split, retVal)
|
||||
retVal = htmlunescape(retVal.strip().strip(split))
|
||||
|
||||
return retVal
|
||||
@@ -2033,6 +2047,9 @@ def parseXmlFile(xmlFile, handler):
|
||||
def getSQLSnippet(dbms, sfile, **variables):
|
||||
"""
|
||||
Returns content of SQL snippet located inside 'procs/' directory
|
||||
|
||||
>>> 'RECONFIGURE' in getSQLSnippet(DBMS.MSSQL, "activate_sp_oacreate")
|
||||
True
|
||||
"""
|
||||
|
||||
if sfile.endswith('.sql') and os.path.exists(sfile):
|
||||
@@ -2072,9 +2089,12 @@ def getSQLSnippet(dbms, sfile, **variables):
|
||||
|
||||
return retVal
|
||||
|
||||
def readCachedFileContent(filename, mode='rb'):
|
||||
def readCachedFileContent(filename, mode="rb"):
|
||||
"""
|
||||
Cached reading of file content (avoiding multiple same file reading)
|
||||
|
||||
>>> "readCachedFileContent" in readCachedFileContent(__file__)
|
||||
True
|
||||
"""
|
||||
|
||||
if filename not in kb.cache.content:
|
||||
@@ -2101,6 +2121,16 @@ def readXmlFile(xmlFile):
|
||||
|
||||
return retVal
|
||||
|
||||
def average(values):
|
||||
"""
|
||||
Computes the arithmetic mean of a list of numbers.
|
||||
|
||||
>>> average([0.9, 0.9, 0.9, 1.0, 0.8, 0.9])
|
||||
0.9
|
||||
"""
|
||||
|
||||
return (sum(values) / len(values)) if values else None
|
||||
|
||||
@cachedmethod
|
||||
def stdev(values):
|
||||
"""
|
||||
@@ -2115,22 +2145,15 @@ def stdev(values):
|
||||
return None
|
||||
else:
|
||||
avg = average(values)
|
||||
_ = reduce(lambda x, y: x + pow((y or 0) - avg, 2), values, 0.0)
|
||||
_ = 1.0 * sum(pow((_ or 0) - avg, 2) for _ in values)
|
||||
return sqrt(_ / (len(values) - 1))
|
||||
|
||||
def average(values):
|
||||
"""
|
||||
Computes the arithmetic mean of a list of numbers.
|
||||
|
||||
>>> average([0.9, 0.9, 0.9, 1.0, 0.8, 0.9])
|
||||
0.9
|
||||
"""
|
||||
|
||||
return (sum(values) / len(values)) if values else None
|
||||
|
||||
def calculateDeltaSeconds(start):
|
||||
"""
|
||||
Returns elapsed time from start till now
|
||||
|
||||
>>> calculateDeltaSeconds(0) > 1151721660
|
||||
True
|
||||
"""
|
||||
|
||||
return time.time() - start
|
||||
@@ -2138,6 +2161,9 @@ def calculateDeltaSeconds(start):
|
||||
def initCommonOutputs():
|
||||
"""
|
||||
Initializes dictionary containing common output values used by "good samaritan" feature
|
||||
|
||||
>>> initCommonOutputs(); "information_schema" in kb.commonOutputs["Databases"]
|
||||
True
|
||||
"""
|
||||
|
||||
kb.commonOutputs = {}
|
||||
@@ -2905,15 +2931,15 @@ def filterStringValue(value, charRegex, replacement=""):
|
||||
|
||||
return retVal
|
||||
|
||||
def filterControlChars(value):
|
||||
def filterControlChars(value, replacement=' '):
|
||||
"""
|
||||
Returns string value with control chars being supstituted with ' '
|
||||
Returns string value with control chars being supstituted with replacement character
|
||||
|
||||
>>> filterControlChars(u'AND 1>(2+3)\\n--')
|
||||
u'AND 1>(2+3) --'
|
||||
"""
|
||||
|
||||
return filterStringValue(value, PRINTABLE_CHAR_REGEX, ' ')
|
||||
return filterStringValue(value, PRINTABLE_CHAR_REGEX, replacement)
|
||||
|
||||
def isDBMSVersionAtLeast(version):
|
||||
"""
|
||||
@@ -3342,6 +3368,25 @@ def unhandledExceptionMessage():
|
||||
|
||||
return errMsg
|
||||
|
||||
def getLatestRevision():
|
||||
"""
|
||||
Retrieves latest revision from the offical repository
|
||||
|
||||
>>> from lib.core.settings import VERSION; getLatestRevision() == VERSION
|
||||
True
|
||||
"""
|
||||
|
||||
retVal = None
|
||||
req = urllib2.Request(url="https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/lib/core/settings.py")
|
||||
|
||||
try:
|
||||
content = urllib2.urlopen(req).read()
|
||||
retVal = extractRegexResult(r"VERSION\s*=\s*[\"'](?P<result>[\d.]+)", content)
|
||||
except:
|
||||
pass
|
||||
|
||||
return retVal
|
||||
|
||||
def createGithubIssue(errMsg, excMsg):
|
||||
"""
|
||||
Automatically create a Github issue with unhandled exception information
|
||||
@@ -3356,7 +3401,7 @@ def createGithubIssue(errMsg, excMsg):
|
||||
|
||||
_ = re.sub(r"'[^']+'", "''", excMsg)
|
||||
_ = re.sub(r"\s+line \d+", "", _)
|
||||
_ = re.sub(r'File ".+?/(\w+\.py)', "\g<1>", _)
|
||||
_ = re.sub(r'File ".+?/(\w+\.py)', r"\g<1>", _)
|
||||
_ = re.sub(r".+\Z", "", _)
|
||||
key = hashlib.md5(_).hexdigest()[:8]
|
||||
|
||||
@@ -3435,10 +3480,12 @@ def maskSensitiveData(msg):
|
||||
retVal = retVal.replace(value, '*' * len(value))
|
||||
|
||||
# Just in case (for problematic parameters regarding user encoding)
|
||||
match = re.search(r"(?i)[ -]-(u|url|data|cookie)( |=)(.*?)( -?-[a-z]|\Z)", retVal)
|
||||
if match:
|
||||
for match in re.finditer(r"(?i)[ -]-(u|url|data|cookie)( |=)(.*?)(?= -?-[a-z]|\Z)", retVal):
|
||||
retVal = retVal.replace(match.group(3), '*' * len(match.group(3)))
|
||||
|
||||
# Fail-safe substitution
|
||||
retVal = re.sub(r"(?i)\bhttps?://[^ ]+", lambda match: '*' * len(match.group(0)), retVal)
|
||||
|
||||
if getpass.getuser():
|
||||
retVal = re.sub(r"(?i)\b%s\b" % re.escape(getpass.getuser()), '*' * len(getpass.getuser()), retVal)
|
||||
|
||||
@@ -3462,9 +3509,9 @@ def listToStrValue(value):
|
||||
|
||||
return retVal
|
||||
|
||||
def intersect(valueA, valueB, lowerCase=False):
|
||||
def intersect(containerA, containerB, lowerCase=False):
|
||||
"""
|
||||
Returns intersection of the array-ized values
|
||||
Returns intersection of the container-ized values
|
||||
|
||||
>>> intersect([1, 2, 3], set([1,3]))
|
||||
[1, 3]
|
||||
@@ -3472,15 +3519,15 @@ def intersect(valueA, valueB, lowerCase=False):
|
||||
|
||||
retVal = []
|
||||
|
||||
if valueA and valueB:
|
||||
valueA = arrayizeValue(valueA)
|
||||
valueB = arrayizeValue(valueB)
|
||||
if containerA and containerB:
|
||||
containerA = arrayizeValue(containerA)
|
||||
containerB = arrayizeValue(containerB)
|
||||
|
||||
if lowerCase:
|
||||
valueA = [val.lower() if isinstance(val, basestring) else val for val in valueA]
|
||||
valueB = [val.lower() if isinstance(val, basestring) else val for val in valueB]
|
||||
containerA = [val.lower() if isinstance(val, basestring) else val for val in containerA]
|
||||
containerB = [val.lower() if isinstance(val, basestring) else val for val in containerB]
|
||||
|
||||
retVal = [val for val in valueA if val in valueB]
|
||||
retVal = [val for val in containerA if val in containerB]
|
||||
|
||||
return retVal
|
||||
|
||||
@@ -3500,7 +3547,7 @@ def removeReflectiveValues(content, payload, suppressWarning=False):
|
||||
return value
|
||||
|
||||
payload = getUnicode(urldecode(payload.replace(PAYLOAD_DELIMITER, ""), convall=True))
|
||||
regex = _(filterStringValue(payload, r"[A-Za-z0-9]", REFLECTED_REPLACEMENT_REGEX.encode("string-escape")))
|
||||
regex = _(filterStringValue(payload, r"[A-Za-z0-9]", REFLECTED_REPLACEMENT_REGEX.encode("string_escape")))
|
||||
|
||||
if regex != payload:
|
||||
if all(part.lower() in content.lower() for part in filter(None, regex.split(REFLECTED_REPLACEMENT_REGEX))[1:]): # fast optimization check
|
||||
@@ -3523,6 +3570,7 @@ def removeReflectiveValues(content, payload, suppressWarning=False):
|
||||
regex = r"%s\b" % regex
|
||||
|
||||
_retVal = [retVal]
|
||||
|
||||
def _thread(regex):
|
||||
try:
|
||||
_retVal[0] = re.sub(r"(?i)%s" % regex, REFLECTED_VALUE_MARKER, _retVal[0])
|
||||
@@ -3606,16 +3654,20 @@ def safeSQLIdentificatorNaming(name, isTable=False):
|
||||
|
||||
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.ACCESS):
|
||||
retVal = "`%s`" % retVal
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.PGSQL, DBMS.DB2, DBMS.SQLITE, DBMS.INFORMIX, DBMS.HSQLDB):
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.PGSQL, DBMS.DB2, DBMS.SQLITE, DBMS.HSQLDB, DBMS.H2, DBMS.INFORMIX):
|
||||
retVal = "\"%s\"" % retVal
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.ORACLE,):
|
||||
retVal = "\"%s\"" % retVal.upper()
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE):
|
||||
parts = retVal.split('.', 1)
|
||||
for i in xrange(len(parts)):
|
||||
if ((parts[i] or " ")[0].isdigit() or not re.match(r"\A\w+\Z", parts[i], re.U)):
|
||||
parts[i] = "[%s]" % parts[i]
|
||||
retVal = '.'.join(parts)
|
||||
if isTable:
|
||||
parts = retVal.split('.', 1)
|
||||
for i in xrange(len(parts)):
|
||||
if parts[i] and (re.search(r"\A\d|[^\w]", parts[i], re.U) or parts[i].upper() in kb.keywords):
|
||||
parts[i] = "[%s]" % parts[i]
|
||||
retVal = '.'.join(parts)
|
||||
else:
|
||||
if re.search(r"\A\d|[^\w]", retVal, re.U) or retVal.upper() in kb.keywords:
|
||||
retVal = "[%s]" % retVal
|
||||
|
||||
if _ and DEFAULT_MSSQL_SCHEMA not in retVal and '.' not in re.sub(r"\[[^]]+\]", "", retVal):
|
||||
retVal = "%s.%s" % (DEFAULT_MSSQL_SCHEMA, retVal)
|
||||
@@ -3743,7 +3795,7 @@ def expandMnemonics(mnemonics, parser, args):
|
||||
logger.debug(debugMsg)
|
||||
else:
|
||||
found = sorted(options.keys(), key=lambda x: len(x))[0]
|
||||
warnMsg = "detected ambiguity (mnemonic '%s' can be resolved to: %s). " % (name, ", ".join("'%s'" % key for key in options.keys()))
|
||||
warnMsg = "detected ambiguity (mnemonic '%s' can be resolved to any of: %s). " % (name, ", ".join("'%s'" % key for key in options.keys()))
|
||||
warnMsg += "Resolved to shortest of those ('%s')" % found
|
||||
logger.warn(warnMsg)
|
||||
|
||||
@@ -3958,6 +4010,7 @@ def findPageForms(content, url, raise_=False, addToTargets=False):
|
||||
def __init__(self, content, url):
|
||||
StringIO.__init__(self, unicodeencode(content, kb.pageEncoding) if isinstance(content, unicode) else content)
|
||||
self._url = url
|
||||
|
||||
def geturl(self):
|
||||
return self._url
|
||||
|
||||
@@ -4067,7 +4120,12 @@ def checkSameHost(*urls):
|
||||
elif len(urls) == 1:
|
||||
return True
|
||||
else:
|
||||
return all(re.sub(r"(?i)\Awww\.", "", urlparse.urlparse(url or "").netloc.split(':')[0]) == re.sub(r"(?i)\Awww\.", "", urlparse.urlparse(urls[0] or "").netloc.split(':')[0]) for url in urls[1:])
|
||||
def _(value):
|
||||
if value and not re.search(r"\A\w+://", value):
|
||||
value = "http://%s" % value
|
||||
return value
|
||||
|
||||
return all(re.sub(r"(?i)\Awww\.", "", urlparse.urlparse(_(url) or "").netloc.split(':')[0]) == re.sub(r"(?i)\Awww\.", "", urlparse.urlparse(_(urls[0]) or "").netloc.split(':')[0]) for url in urls[1:])
|
||||
|
||||
def getHostHeader(url):
|
||||
"""
|
||||
@@ -4083,7 +4141,7 @@ def getHostHeader(url):
|
||||
retVal = urlparse.urlparse(url).netloc
|
||||
|
||||
if re.search(r"http(s)?://\[.+\]", url, re.I):
|
||||
retVal = extractRegexResult("http(s)?://\[(?P<result>.+)\]", url)
|
||||
retVal = extractRegexResult(r"http(s)?://\[(?P<result>.+)\]", url)
|
||||
elif any(retVal.endswith(':%d' % _) for _ in (80, 443)):
|
||||
retVal = retVal.split(':')[0]
|
||||
|
||||
@@ -4095,6 +4153,7 @@ def checkDeprecatedOptions(args):
|
||||
"""
|
||||
|
||||
for _ in args:
|
||||
_ = _.split('=')[0].strip()
|
||||
if _ in DEPRECATED_OPTIONS:
|
||||
errMsg = "switch/option '%s' is deprecated" % _
|
||||
if DEPRECATED_OPTIONS[_]:
|
||||
@@ -4125,6 +4184,9 @@ def checkSystemEncoding():
|
||||
def evaluateCode(code, variables=None):
|
||||
"""
|
||||
Executes given python code given in a string form
|
||||
|
||||
>>> _ = {}; evaluateCode("a = 1; b = 2; c = a", _); _["c"]
|
||||
1
|
||||
"""
|
||||
|
||||
try:
|
||||
@@ -4178,6 +4240,9 @@ def incrementCounter(technique):
|
||||
def getCounter(technique):
|
||||
"""
|
||||
Returns query counter for a given technique
|
||||
|
||||
>>> resetCounter(PAYLOAD.TECHNIQUE.STACKED); incrementCounter(PAYLOAD.TECHNIQUE.STACKED); getCounter(PAYLOAD.TECHNIQUE.STACKED)
|
||||
1
|
||||
"""
|
||||
|
||||
return kb.counters.get(technique, 0)
|
||||
@@ -4224,7 +4289,7 @@ def decodeHexValue(value, raw=False):
|
||||
retVal = retVal.decode("utf-16-le")
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
elif Backend.isDbms(DBMS.HSQLDB):
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.HSQLDB, DBMS.H2):
|
||||
try:
|
||||
retVal = retVal.decode("utf-16-be")
|
||||
except UnicodeDecodeError:
|
||||
@@ -4263,9 +4328,11 @@ def extractExpectedValue(value, expected):
|
||||
value = value.strip().lower()
|
||||
if value in ("true", "false"):
|
||||
value = value == "true"
|
||||
elif value in ('t', 'f'):
|
||||
value = value == 't'
|
||||
elif value in ("1", "-1"):
|
||||
value = True
|
||||
elif value == "0":
|
||||
elif value == '0':
|
||||
value = False
|
||||
else:
|
||||
value = None
|
||||
@@ -4280,19 +4347,23 @@ def hashDBWrite(key, value, serialize=False):
|
||||
Helper function for writing session data to HashDB
|
||||
"""
|
||||
|
||||
_ = "%s%s%s" % (conf.url or "%s%s" % (conf.hostname, conf.port), key, HASHDB_MILESTONE_VALUE)
|
||||
conf.hashDB.write(_, value, serialize)
|
||||
if conf.hashDB:
|
||||
_ = '|'.join((str(_) if not isinstance(_, basestring) else _) for _ in (conf.hostname, conf.path.strip('/') if conf.path is not None else conf.port, key, HASHDB_MILESTONE_VALUE))
|
||||
conf.hashDB.write(_, value, serialize)
|
||||
|
||||
def hashDBRetrieve(key, unserialize=False, checkConf=False):
|
||||
"""
|
||||
Helper function for restoring session data from HashDB
|
||||
"""
|
||||
|
||||
_ = "%s%s%s" % (conf.url or "%s%s" % (conf.hostname, conf.port), key, HASHDB_MILESTONE_VALUE)
|
||||
retVal = conf.hashDB.retrieve(_, unserialize) if kb.resumeValues and not (checkConf and any((conf.flushSession, conf.freshQueries))) else None
|
||||
retVal = None
|
||||
|
||||
if not kb.inferenceMode and not kb.fileReadMode and isinstance(retVal, basestring) and any(_ in retVal for _ in (PARTIAL_VALUE_MARKER, PARTIAL_HEX_VALUE_MARKER)):
|
||||
retVal = None
|
||||
if conf.hashDB:
|
||||
_ = '|'.join((str(_) if not isinstance(_, basestring) else _) for _ in (conf.hostname, conf.path.strip('/') if conf.path is not None else conf.port, key, HASHDB_MILESTONE_VALUE))
|
||||
retVal = conf.hashDB.retrieve(_, unserialize) if kb.resumeValues and not (checkConf and any((conf.flushSession, conf.freshQueries))) else None
|
||||
|
||||
if not kb.inferenceMode and not kb.fileReadMode and isinstance(retVal, basestring) and any(_ in retVal for _ in (PARTIAL_VALUE_MARKER, PARTIAL_HEX_VALUE_MARKER)):
|
||||
retVal = None
|
||||
|
||||
return retVal
|
||||
|
||||
@@ -4340,7 +4411,7 @@ def resetCookieJar(cookieJar):
|
||||
|
||||
except cookielib.LoadError, msg:
|
||||
errMsg = "there was a problem loading "
|
||||
errMsg += "cookies file ('%s')" % re.sub(r"(cookies) file '[^']+'", "\g<1>", str(msg))
|
||||
errMsg += "cookies file ('%s')" % re.sub(r"(cookies) file '[^']+'", r"\g<1>", str(msg))
|
||||
raise SqlmapGenericException(errMsg)
|
||||
|
||||
def decloakToTemp(filename):
|
||||
@@ -4386,7 +4457,7 @@ def getRequestHeader(request, name):
|
||||
|
||||
retVal = None
|
||||
|
||||
if request and name:
|
||||
if request and request.headers and name:
|
||||
_ = name.upper()
|
||||
retVal = max(value if _ == key.upper() else None for key, value in request.header_items())
|
||||
|
||||
@@ -4415,6 +4486,9 @@ def zeroDepthSearch(expression, value):
|
||||
"""
|
||||
Searches occurrences of value inside expression at 0-depth level
|
||||
regarding the parentheses
|
||||
|
||||
>>> _ = "SELECT (SELECT id FROM users WHERE 2>1) AS result FROM DUAL"; _[zeroDepthSearch(_, "FROM")[0]:]
|
||||
'FROM DUAL'
|
||||
"""
|
||||
|
||||
retVal = []
|
||||
@@ -4450,7 +4524,7 @@ def pollProcess(process, suppress_errors=False):
|
||||
Checks for process status (prints . if still running)
|
||||
"""
|
||||
|
||||
while True:
|
||||
while process:
|
||||
dataToStdout(".")
|
||||
time.sleep(1)
|
||||
|
||||
@@ -4467,6 +4541,195 @@ def pollProcess(process, suppress_errors=False):
|
||||
|
||||
break
|
||||
|
||||
def parseRequestFile(reqFile, checkParams=True):
|
||||
"""
|
||||
Parses WebScarab and Burp logs and adds results to the target URL list
|
||||
"""
|
||||
|
||||
def _parseWebScarabLog(content):
|
||||
"""
|
||||
Parses WebScarab logs (POST method not supported)
|
||||
"""
|
||||
|
||||
reqResList = content.split(WEBSCARAB_SPLITTER)
|
||||
|
||||
for request in reqResList:
|
||||
url = extractRegexResult(r"URL: (?P<result>.+?)\n", request, re.I)
|
||||
method = extractRegexResult(r"METHOD: (?P<result>.+?)\n", request, re.I)
|
||||
cookie = extractRegexResult(r"COOKIE: (?P<result>.+?)\n", request, re.I)
|
||||
|
||||
if not method or not url:
|
||||
logger.debug("not a valid WebScarab log data")
|
||||
continue
|
||||
|
||||
if method.upper() == HTTPMETHOD.POST:
|
||||
warnMsg = "POST requests from WebScarab logs aren't supported "
|
||||
warnMsg += "as their body content is stored in separate files. "
|
||||
warnMsg += "Nevertheless you can use -r to load them individually."
|
||||
logger.warning(warnMsg)
|
||||
continue
|
||||
|
||||
if not(conf.scope and not re.search(conf.scope, url, re.I)):
|
||||
yield (url, method, None, cookie, tuple())
|
||||
|
||||
def _parseBurpLog(content):
|
||||
"""
|
||||
Parses Burp logs
|
||||
"""
|
||||
|
||||
if not re.search(BURP_REQUEST_REGEX, content, re.I | re.S):
|
||||
if re.search(BURP_XML_HISTORY_REGEX, content, re.I | re.S):
|
||||
reqResList = []
|
||||
for match in re.finditer(BURP_XML_HISTORY_REGEX, content, re.I | re.S):
|
||||
port, request = match.groups()
|
||||
try:
|
||||
request = request.decode("base64")
|
||||
except binascii.Error:
|
||||
continue
|
||||
_ = re.search(r"%s:.+" % re.escape(HTTP_HEADER.HOST), request)
|
||||
if _:
|
||||
host = _.group(0).strip()
|
||||
if not re.search(r":\d+\Z", host):
|
||||
request = request.replace(host, "%s:%d" % (host, int(port)))
|
||||
reqResList.append(request)
|
||||
else:
|
||||
reqResList = [content]
|
||||
else:
|
||||
reqResList = re.finditer(BURP_REQUEST_REGEX, content, re.I | re.S)
|
||||
|
||||
for match in reqResList:
|
||||
request = match if isinstance(match, basestring) else match.group(0)
|
||||
request = re.sub(r"\A[^\w]+", "", request)
|
||||
|
||||
schemePort = re.search(r"(http[\w]*)\:\/\/.*?\:([\d]+).+?={10,}", request, re.I | re.S)
|
||||
|
||||
if schemePort:
|
||||
scheme = schemePort.group(1)
|
||||
port = schemePort.group(2)
|
||||
request = re.sub(r"\n=+\Z", "", request.split(schemePort.group(0))[-1].lstrip())
|
||||
else:
|
||||
scheme, port = None, None
|
||||
|
||||
if not re.search(r"^[\n]*(%s).*?\sHTTP\/" % "|".join(getPublicTypeMembers(HTTPMETHOD, True)), request, re.I | re.M):
|
||||
continue
|
||||
|
||||
if re.search(r"^[\n]*%s.*?\.(%s)\sHTTP\/" % (HTTPMETHOD.GET, "|".join(CRAWL_EXCLUDE_EXTENSIONS)), request, re.I | re.M):
|
||||
continue
|
||||
|
||||
getPostReq = False
|
||||
url = None
|
||||
host = None
|
||||
method = None
|
||||
data = None
|
||||
cookie = None
|
||||
params = False
|
||||
newline = None
|
||||
lines = request.split('\n')
|
||||
headers = []
|
||||
|
||||
for index in xrange(len(lines)):
|
||||
line = lines[index]
|
||||
|
||||
if not line.strip() and index == len(lines) - 1:
|
||||
break
|
||||
|
||||
newline = "\r\n" if line.endswith('\r') else '\n'
|
||||
line = line.strip('\r')
|
||||
match = re.search(r"\A(%s) (.+) HTTP/[\d.]+\Z" % "|".join(getPublicTypeMembers(HTTPMETHOD, True)), line) if not method else None
|
||||
|
||||
if len(line.strip()) == 0 and method and method != HTTPMETHOD.GET and data is None:
|
||||
data = ""
|
||||
params = True
|
||||
|
||||
elif match:
|
||||
method = match.group(1)
|
||||
url = match.group(2)
|
||||
|
||||
if any(_ in line for _ in ('?', '=', kb.customInjectionMark)):
|
||||
params = True
|
||||
|
||||
getPostReq = True
|
||||
|
||||
# POST parameters
|
||||
elif data is not None and params:
|
||||
data += "%s%s" % (line, newline)
|
||||
|
||||
# GET parameters
|
||||
elif "?" in line and "=" in line and ": " not in line:
|
||||
params = True
|
||||
|
||||
# Headers
|
||||
elif re.search(r"\A\S+:", line):
|
||||
key, value = line.split(":", 1)
|
||||
value = value.strip().replace("\r", "").replace("\n", "")
|
||||
|
||||
# Cookie and Host headers
|
||||
if key.upper() == HTTP_HEADER.COOKIE.upper():
|
||||
cookie = value
|
||||
elif key.upper() == HTTP_HEADER.HOST.upper():
|
||||
if '://' in value:
|
||||
scheme, value = value.split('://')[:2]
|
||||
splitValue = value.split(":")
|
||||
host = splitValue[0]
|
||||
|
||||
if len(splitValue) > 1:
|
||||
port = filterStringValue(splitValue[1], "[0-9]")
|
||||
|
||||
# Avoid to add a static content length header to
|
||||
# headers and consider the following lines as
|
||||
# POSTed data
|
||||
if key.upper() == HTTP_HEADER.CONTENT_LENGTH.upper():
|
||||
params = True
|
||||
|
||||
# Avoid proxy and connection type related headers
|
||||
elif key not in (HTTP_HEADER.PROXY_CONNECTION, HTTP_HEADER.CONNECTION):
|
||||
headers.append((getUnicode(key), getUnicode(value)))
|
||||
|
||||
if kb.customInjectionMark in re.sub(PROBLEMATIC_CUSTOM_INJECTION_PATTERNS, "", value or ""):
|
||||
params = True
|
||||
|
||||
data = data.rstrip("\r\n") if data else data
|
||||
|
||||
if getPostReq and (params or cookie or not checkParams):
|
||||
if not port and isinstance(scheme, basestring) and scheme.lower() == "https":
|
||||
port = "443"
|
||||
elif not scheme and port == "443":
|
||||
scheme = "https"
|
||||
|
||||
if conf.forceSSL:
|
||||
scheme = "https"
|
||||
port = port or "443"
|
||||
|
||||
if not host:
|
||||
errMsg = "invalid format of a request file"
|
||||
raise SqlmapSyntaxException(errMsg)
|
||||
|
||||
if not url.startswith("http"):
|
||||
url = "%s://%s:%s%s" % (scheme or "http", host, port or "80", url)
|
||||
scheme = None
|
||||
port = None
|
||||
|
||||
if not(conf.scope and not re.search(conf.scope, url, re.I)):
|
||||
yield (url, conf.method or method, data, cookie, tuple(headers))
|
||||
|
||||
checkFile(reqFile)
|
||||
try:
|
||||
with openFile(reqFile, "rb") as f:
|
||||
content = f.read()
|
||||
except (IOError, OSError, MemoryError), ex:
|
||||
errMsg = "something went wrong while trying "
|
||||
errMsg += "to read the content of file '%s' ('%s')" % (reqFile, getSafeExString(ex))
|
||||
raise SqlmapSystemException(errMsg)
|
||||
|
||||
if conf.scope:
|
||||
logger.info("using regular expression '%s' for filtering targets" % conf.scope)
|
||||
|
||||
for target in _parseBurpLog(content):
|
||||
yield target
|
||||
|
||||
for target in _parseWebScarabLog(content):
|
||||
yield target
|
||||
|
||||
def getSafeExString(ex, encoding=None):
|
||||
"""
|
||||
Safe way how to get the proper exception represtation as a string
|
||||
@@ -4482,11 +4745,44 @@ def getSafeExString(ex, encoding=None):
|
||||
retVal = ex.message
|
||||
elif getattr(ex, "msg", None):
|
||||
retVal = ex.msg
|
||||
elif isinstance(ex, (list, tuple)) and len(ex) > 1 and isinstance(ex[1], basestring):
|
||||
retVal = ex[1]
|
||||
|
||||
return getUnicode(retVal or "", encoding=encoding).strip()
|
||||
|
||||
def safeVariableNaming(value):
|
||||
"""
|
||||
Returns escaped safe-representation of a given variable name that can be used in Python evaluated code
|
||||
|
||||
>>> safeVariableNaming("foo bar")
|
||||
'foo__SAFE__20bar'
|
||||
"""
|
||||
|
||||
return re.sub(r"[^\w]", lambda match: "%s%02x" % (SAFE_VARIABLE_MARKER, ord(match.group(0))), value)
|
||||
|
||||
def unsafeVariableNaming(value):
|
||||
"""
|
||||
Returns unescaped safe-representation of a given variable name
|
||||
|
||||
>>> unsafeVariableNaming("foo__SAFE__20bar")
|
||||
'foo bar'
|
||||
"""
|
||||
|
||||
return re.sub(r"%s([0-9a-f]{2})" % SAFE_VARIABLE_MARKER, lambda match: match.group(1).decode("hex"), value)
|
||||
|
||||
def firstNotNone(*args):
|
||||
"""
|
||||
Returns first not-None value from a given list of arguments
|
||||
|
||||
>>> firstNotNone(None, None, 1, 2, 3)
|
||||
1
|
||||
"""
|
||||
|
||||
retVal = None
|
||||
|
||||
for _ in args:
|
||||
if _ is not None:
|
||||
retVal = _
|
||||
break
|
||||
|
||||
return retVal
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
import functools
|
||||
import hashlib
|
||||
|
||||
from lib.core.threads import getCurrentThreadData
|
||||
@@ -16,6 +17,7 @@ def cachedmethod(f, cache={}):
|
||||
Reference: http://code.activestate.com/recipes/325205-cache-decorator-in-python-24/
|
||||
"""
|
||||
|
||||
@functools.wraps(f)
|
||||
def _(*args, **kwargs):
|
||||
key = int(hashlib.md5("|".join(str(_) for _ in (f, args, kwargs))).hexdigest(), 16) & 0x7fffffffffffffff
|
||||
if key not in cache:
|
||||
@@ -26,6 +28,11 @@ def cachedmethod(f, cache={}):
|
||||
return _
|
||||
|
||||
def stackedmethod(f):
|
||||
"""
|
||||
Method using pushValue/popValue functions (fallback function for stack realignment)
|
||||
"""
|
||||
|
||||
@functools.wraps(f)
|
||||
def _(*args, **kwargs):
|
||||
threadData = getCurrentThreadData()
|
||||
originalLevel = len(threadData.valueStack)
|
||||
@@ -38,4 +45,4 @@ def stackedmethod(f):
|
||||
|
||||
return result
|
||||
|
||||
return _
|
||||
return _
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
from lib.core.enums import CONTENT_TYPE
|
||||
from lib.core.enums import DBMS
|
||||
from lib.core.enums import OS
|
||||
from lib.core.enums import POST_HINT
|
||||
@@ -21,6 +22,7 @@ from lib.core.settings import MAXDB_ALIASES
|
||||
from lib.core.settings import SYBASE_ALIASES
|
||||
from lib.core.settings import DB2_ALIASES
|
||||
from lib.core.settings import HSQLDB_ALIASES
|
||||
from lib.core.settings import H2_ALIASES
|
||||
from lib.core.settings import INFORMIX_ALIASES
|
||||
|
||||
FIREBIRD_TYPES = {
|
||||
@@ -194,6 +196,7 @@ DBMS_DICT = {
|
||||
DBMS.SYBASE: (SYBASE_ALIASES, "python-pymssql", "https://github.com/pymssql/pymssql", "sybase"),
|
||||
DBMS.DB2: (DB2_ALIASES, "python ibm-db", "https://github.com/ibmdb/python-ibmdb", "ibm_db_sa"),
|
||||
DBMS.HSQLDB: (HSQLDB_ALIASES, "python jaydebeapi & python-jpype", "https://pypi.python.org/pypi/JayDeBeApi/ & http://jpype.sourceforge.net/", None),
|
||||
DBMS.H2: (H2_ALIASES, None, None, None),
|
||||
DBMS.INFORMIX: (INFORMIX_ALIASES, "python ibm-db", "https://github.com/ibmdb/python-ibmdb", "ibm_db_sa"),
|
||||
}
|
||||
|
||||
@@ -262,6 +265,10 @@ SQL_STATEMENTS = {
|
||||
"commit ",
|
||||
"rollback ",
|
||||
),
|
||||
|
||||
"SQL administration": (
|
||||
"set ",
|
||||
),
|
||||
}
|
||||
|
||||
POST_HINT_CONTENT_TYPES = {
|
||||
@@ -279,6 +286,8 @@ DEPRECATED_OPTIONS = {
|
||||
"--binary": "use '--binary-fields' instead",
|
||||
"--auth-private": "use '--auth-file' instead",
|
||||
"--ignore-401": "use '--ignore-code' instead",
|
||||
"--second-order": "use '--second-url' instead",
|
||||
"--purge-output": "use '--purge' instead",
|
||||
"--check-payload": None,
|
||||
"--check-waf": None,
|
||||
"--pickled-options": "use '--api -c ...' instead",
|
||||
@@ -293,3 +302,31 @@ DEFAULT_DOC_ROOTS = {
|
||||
OS.WINDOWS: ("C:/xampp/htdocs/", "C:/wamp/www/", "C:/Inetpub/wwwroot/"),
|
||||
OS.LINUX: ("/var/www/", "/var/www/html", "/usr/local/apache2/htdocs", "/var/www/nginx-default", "/srv/www") # Reference: https://wiki.apache.org/httpd/DistrosDefaultLayout
|
||||
}
|
||||
|
||||
PART_RUN_CONTENT_TYPES = {
|
||||
"checkDbms": CONTENT_TYPE.TECHNIQUES,
|
||||
"getFingerprint": CONTENT_TYPE.DBMS_FINGERPRINT,
|
||||
"getBanner": CONTENT_TYPE.BANNER,
|
||||
"getCurrentUser": CONTENT_TYPE.CURRENT_USER,
|
||||
"getCurrentDb": CONTENT_TYPE.CURRENT_DB,
|
||||
"getHostname": CONTENT_TYPE.HOSTNAME,
|
||||
"isDba": CONTENT_TYPE.IS_DBA,
|
||||
"getUsers": CONTENT_TYPE.USERS,
|
||||
"getPasswordHashes": CONTENT_TYPE.PASSWORDS,
|
||||
"getPrivileges": CONTENT_TYPE.PRIVILEGES,
|
||||
"getRoles": CONTENT_TYPE.ROLES,
|
||||
"getDbs": CONTENT_TYPE.DBS,
|
||||
"getTables": CONTENT_TYPE.TABLES,
|
||||
"getColumns": CONTENT_TYPE.COLUMNS,
|
||||
"getSchema": CONTENT_TYPE.SCHEMA,
|
||||
"getCount": CONTENT_TYPE.COUNT,
|
||||
"dumpTable": CONTENT_TYPE.DUMP_TABLE,
|
||||
"search": CONTENT_TYPE.SEARCH,
|
||||
"sqlQuery": CONTENT_TYPE.SQL_QUERY,
|
||||
"tableExists": CONTENT_TYPE.COMMON_TABLES,
|
||||
"columnExists": CONTENT_TYPE.COMMON_COLUMNS,
|
||||
"readFile": CONTENT_TYPE.FILE_READ,
|
||||
"writeFile": CONTENT_TYPE.FILE_WRITE,
|
||||
"osCmd": CONTENT_TYPE.OS_CMD,
|
||||
"regRead": CONTENT_TYPE.REG_READ
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
@@ -47,6 +47,7 @@ from lib.core.settings import MIN_BINARY_DISK_DUMP_SIZE
|
||||
from lib.core.settings import TRIM_STDOUT_DUMP_SIZE
|
||||
from lib.core.settings import UNICODE_ENCODING
|
||||
from lib.core.settings import UNSAFE_DUMP_FILEPATH_REPLACEMENT
|
||||
from lib.core.settings import VERSION_STRING
|
||||
from lib.core.settings import WINDOWS_RESERVED_NAMES
|
||||
from thirdparty.magic import magic
|
||||
|
||||
@@ -170,7 +171,7 @@ class Dump(object):
|
||||
def currentDb(self, data):
|
||||
if Backend.isDbms(DBMS.MAXDB):
|
||||
self.string("current database (no practical usage on %s)" % Backend.getIdentifiedDbms(), data, content_type=CONTENT_TYPE.CURRENT_DB)
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.PGSQL, DBMS.HSQLDB):
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.PGSQL, DBMS.HSQLDB, DBMS.H2):
|
||||
self.string("current schema (equivalent to database on %s)" % Backend.getIdentifiedDbms(), data, content_type=CONTENT_TYPE.CURRENT_DB)
|
||||
else:
|
||||
self.string("current database", data, content_type=CONTENT_TYPE.CURRENT_DB)
|
||||
@@ -532,6 +533,7 @@ class Dump(object):
|
||||
elif conf.dumpFormat == DUMP_FORMAT.HTML:
|
||||
dataToDumpFile(dumpFP, "<!DOCTYPE html>\n<html>\n<head>\n")
|
||||
dataToDumpFile(dumpFP, "<meta http-equiv=\"Content-type\" content=\"text/html;charset=%s\">\n" % UNICODE_ENCODING)
|
||||
dataToDumpFile(dumpFP, "<meta name=\"generator\" content=\"%s\" />\n" % VERSION_STRING)
|
||||
dataToDumpFile(dumpFP, "<title>%s</title>\n" % ("%s%s" % ("%s." % db if METADB_SUFFIX not in db else "", table)))
|
||||
dataToDumpFile(dumpFP, HTML_DUMP_CSS_STYLE)
|
||||
dataToDumpFile(dumpFP, "\n</head>\n<body>\n<table>\n<thead>\n<tr>\n")
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
@@ -43,6 +43,7 @@ class DBMS:
|
||||
SQLITE = "SQLite"
|
||||
SYBASE = "Sybase"
|
||||
HSQLDB = "HSQLDB"
|
||||
H2 = "H2"
|
||||
INFORMIX = "Informix"
|
||||
|
||||
class DBMS_DIRECTORY_NAME:
|
||||
@@ -57,6 +58,7 @@ class DBMS_DIRECTORY_NAME:
|
||||
SQLITE = "sqlite"
|
||||
SYBASE = "sybase"
|
||||
HSQLDB = "hsqldb"
|
||||
H2 = "h2"
|
||||
INFORMIX = "informix"
|
||||
|
||||
class CUSTOM_LOGGING:
|
||||
@@ -256,6 +258,7 @@ class PAYLOAD:
|
||||
3: "LIKE single quoted string",
|
||||
4: "Double quoted string",
|
||||
5: "LIKE double quoted string",
|
||||
6: "Identifier (e.g. column name)",
|
||||
}
|
||||
|
||||
RISK = {
|
||||
@@ -275,6 +278,7 @@ class PAYLOAD:
|
||||
6: "TOP",
|
||||
7: "Table name",
|
||||
8: "Column name",
|
||||
9: "Pre-WHERE (non-query)",
|
||||
}
|
||||
|
||||
class METHOD:
|
||||
@@ -306,7 +310,7 @@ class ADJUST_TIME_DELAY:
|
||||
NO = 0
|
||||
YES = 1
|
||||
|
||||
class WEB_API:
|
||||
class WEB_PLATFORM:
|
||||
PHP = "php"
|
||||
ASP = "asp"
|
||||
ASPX = "aspx"
|
||||
@@ -340,34 +344,6 @@ class CONTENT_TYPE:
|
||||
OS_CMD = 24
|
||||
REG_READ = 25
|
||||
|
||||
PART_RUN_CONTENT_TYPES = {
|
||||
"checkDbms": CONTENT_TYPE.TECHNIQUES,
|
||||
"getFingerprint": CONTENT_TYPE.DBMS_FINGERPRINT,
|
||||
"getBanner": CONTENT_TYPE.BANNER,
|
||||
"getCurrentUser": CONTENT_TYPE.CURRENT_USER,
|
||||
"getCurrentDb": CONTENT_TYPE.CURRENT_DB,
|
||||
"getHostname": CONTENT_TYPE.HOSTNAME,
|
||||
"isDba": CONTENT_TYPE.IS_DBA,
|
||||
"getUsers": CONTENT_TYPE.USERS,
|
||||
"getPasswordHashes": CONTENT_TYPE.PASSWORDS,
|
||||
"getPrivileges": CONTENT_TYPE.PRIVILEGES,
|
||||
"getRoles": CONTENT_TYPE.ROLES,
|
||||
"getDbs": CONTENT_TYPE.DBS,
|
||||
"getTables": CONTENT_TYPE.TABLES,
|
||||
"getColumns": CONTENT_TYPE.COLUMNS,
|
||||
"getSchema": CONTENT_TYPE.SCHEMA,
|
||||
"getCount": CONTENT_TYPE.COUNT,
|
||||
"dumpTable": CONTENT_TYPE.DUMP_TABLE,
|
||||
"search": CONTENT_TYPE.SEARCH,
|
||||
"sqlQuery": CONTENT_TYPE.SQL_QUERY,
|
||||
"tableExists": CONTENT_TYPE.COMMON_TABLES,
|
||||
"columnExists": CONTENT_TYPE.COMMON_COLUMNS,
|
||||
"readFile": CONTENT_TYPE.FILE_READ,
|
||||
"writeFile": CONTENT_TYPE.FILE_WRITE,
|
||||
"osCmd": CONTENT_TYPE.OS_CMD,
|
||||
"regRead": CONTENT_TYPE.REG_READ
|
||||
}
|
||||
|
||||
class CONTENT_STATUS:
|
||||
IN_PROGRESS = 0
|
||||
COMPLETE = 1
|
||||
@@ -382,6 +358,7 @@ class AUTOCOMPLETE_TYPE:
|
||||
SQL = 0
|
||||
OS = 1
|
||||
SQLMAP = 2
|
||||
API = 3
|
||||
|
||||
class NOTE:
|
||||
FALSE_POSITIVE_OR_UNEXPLOITABLE = "false positive or unexploitable"
|
||||
@@ -401,3 +378,7 @@ class TIMEOUT_STATE:
|
||||
NORMAL = 0
|
||||
EXCEPTION = 1
|
||||
TIMEOUT = 2
|
||||
|
||||
class HINT:
|
||||
PREPEND = 0
|
||||
APPEND = 1
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,16 +1,14 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
import binascii
|
||||
import cookielib
|
||||
import glob
|
||||
import inspect
|
||||
import logging
|
||||
import httplib
|
||||
import os
|
||||
import random
|
||||
import re
|
||||
@@ -37,19 +35,16 @@ from lib.core.common import checkFile
|
||||
from lib.core.common import dataToStdout
|
||||
from lib.core.common import getPublicTypeMembers
|
||||
from lib.core.common import getSafeExString
|
||||
from lib.core.common import extractRegexResult
|
||||
from lib.core.common import filterStringValue
|
||||
from lib.core.common import findLocalPort
|
||||
from lib.core.common import findPageForms
|
||||
from lib.core.common import getConsoleWidth
|
||||
from lib.core.common import getFileItems
|
||||
from lib.core.common import getFileType
|
||||
from lib.core.common import getUnicode
|
||||
from lib.core.common import normalizePath
|
||||
from lib.core.common import ntToPosixSlashes
|
||||
from lib.core.common import openFile
|
||||
from lib.core.common import parseRequestFile
|
||||
from lib.core.common import parseTargetDirect
|
||||
from lib.core.common import parseTargetUrl
|
||||
from lib.core.common import paths
|
||||
from lib.core.common import randomStr
|
||||
from lib.core.common import readCachedFileContent
|
||||
@@ -58,6 +53,7 @@ from lib.core.common import resetCookieJar
|
||||
from lib.core.common import runningAsAdmin
|
||||
from lib.core.common import safeExpandUser
|
||||
from lib.core.common import saveConfig
|
||||
from lib.core.common import setColor
|
||||
from lib.core.common import setOptimize
|
||||
from lib.core.common import setPaths
|
||||
from lib.core.common import singleTimeWarnMessage
|
||||
@@ -86,6 +82,7 @@ from lib.core.enums import PROXY_TYPE
|
||||
from lib.core.enums import REFLECTIVE_COUNTER
|
||||
from lib.core.enums import WIZARD
|
||||
from lib.core.exception import SqlmapConnectionException
|
||||
from lib.core.exception import SqlmapDataException
|
||||
from lib.core.exception import SqlmapFilePathException
|
||||
from lib.core.exception import SqlmapGenericException
|
||||
from lib.core.exception import SqlmapInstallationException
|
||||
@@ -100,17 +97,14 @@ from lib.core.exception import SqlmapUnsupportedDBMSException
|
||||
from lib.core.exception import SqlmapUserQuitException
|
||||
from lib.core.log import FORMATTER
|
||||
from lib.core.optiondict import optDict
|
||||
from lib.core.settings import BURP_REQUEST_REGEX
|
||||
from lib.core.settings import BURP_XML_HISTORY_REGEX
|
||||
from lib.core.settings import CODECS_LIST_PAGE
|
||||
from lib.core.settings import CRAWL_EXCLUDE_EXTENSIONS
|
||||
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
||||
from lib.core.settings import DBMS_ALIASES
|
||||
from lib.core.settings import DEFAULT_PAGE_ENCODING
|
||||
from lib.core.settings import DEFAULT_TOR_HTTP_PORTS
|
||||
from lib.core.settings import DEFAULT_TOR_SOCKS_PORTS
|
||||
from lib.core.settings import DEFAULT_USER_AGENT
|
||||
from lib.core.settings import DUMMY_URL
|
||||
from lib.core.settings import INJECT_HERE_REGEX
|
||||
from lib.core.settings import IS_WIN
|
||||
from lib.core.settings import KB_CHARS_BOUNDARY_CHAR
|
||||
from lib.core.settings import KB_CHARS_LOW_FREQUENCY_ALPHABET
|
||||
@@ -120,8 +114,6 @@ from lib.core.settings import MAX_NUMBER_OF_THREADS
|
||||
from lib.core.settings import NULL
|
||||
from lib.core.settings import PARAMETER_SPLITTING_REGEX
|
||||
from lib.core.settings import PRECONNECT_CANDIDATE_TIMEOUT
|
||||
from lib.core.settings import PROBLEMATIC_CUSTOM_INJECTION_PATTERNS
|
||||
from lib.core.settings import SITE
|
||||
from lib.core.settings import SOCKET_PRE_CONNECT_QUEUE_SIZE
|
||||
from lib.core.settings import SQLMAP_ENVIRONMENT_PREFIX
|
||||
from lib.core.settings import SUPPORTED_DBMS
|
||||
@@ -131,8 +123,6 @@ from lib.core.settings import UNICODE_ENCODING
|
||||
from lib.core.settings import UNION_CHAR_REGEX
|
||||
from lib.core.settings import UNKNOWN_DBMS_VERSION
|
||||
from lib.core.settings import URI_INJECTABLE_REGEX
|
||||
from lib.core.settings import VERSION_STRING
|
||||
from lib.core.settings import WEBSCARAB_SPLITTER
|
||||
from lib.core.threads import getCurrentThreadData
|
||||
from lib.core.threads import setDaemon
|
||||
from lib.core.update import update
|
||||
@@ -174,201 +164,6 @@ try:
|
||||
except NameError:
|
||||
WindowsError = None
|
||||
|
||||
def _feedTargetsDict(reqFile, addedTargetUrls):
|
||||
"""
|
||||
Parses web scarab and burp logs and adds results to the target URL list
|
||||
"""
|
||||
|
||||
def _parseWebScarabLog(content):
|
||||
"""
|
||||
Parses web scarab logs (POST method not supported)
|
||||
"""
|
||||
|
||||
reqResList = content.split(WEBSCARAB_SPLITTER)
|
||||
|
||||
for request in reqResList:
|
||||
url = extractRegexResult(r"URL: (?P<result>.+?)\n", request, re.I)
|
||||
method = extractRegexResult(r"METHOD: (?P<result>.+?)\n", request, re.I)
|
||||
cookie = extractRegexResult(r"COOKIE: (?P<result>.+?)\n", request, re.I)
|
||||
|
||||
if not method or not url:
|
||||
logger.debug("not a valid WebScarab log data")
|
||||
continue
|
||||
|
||||
if method.upper() == HTTPMETHOD.POST:
|
||||
warnMsg = "POST requests from WebScarab logs aren't supported "
|
||||
warnMsg += "as their body content is stored in separate files. "
|
||||
warnMsg += "Nevertheless you can use -r to load them individually."
|
||||
logger.warning(warnMsg)
|
||||
continue
|
||||
|
||||
if not(conf.scope and not re.search(conf.scope, url, re.I)):
|
||||
if not kb.targets or url not in addedTargetUrls:
|
||||
kb.targets.add((url, method, None, cookie, None))
|
||||
addedTargetUrls.add(url)
|
||||
|
||||
def _parseBurpLog(content):
|
||||
"""
|
||||
Parses burp logs
|
||||
"""
|
||||
|
||||
if not re.search(BURP_REQUEST_REGEX, content, re.I | re.S):
|
||||
if re.search(BURP_XML_HISTORY_REGEX, content, re.I | re.S):
|
||||
reqResList = []
|
||||
for match in re.finditer(BURP_XML_HISTORY_REGEX, content, re.I | re.S):
|
||||
port, request = match.groups()
|
||||
try:
|
||||
request = request.decode("base64")
|
||||
except binascii.Error:
|
||||
continue
|
||||
_ = re.search(r"%s:.+" % re.escape(HTTP_HEADER.HOST), request)
|
||||
if _:
|
||||
host = _.group(0).strip()
|
||||
if not re.search(r":\d+\Z", host):
|
||||
request = request.replace(host, "%s:%d" % (host, int(port)))
|
||||
reqResList.append(request)
|
||||
else:
|
||||
reqResList = [content]
|
||||
else:
|
||||
reqResList = re.finditer(BURP_REQUEST_REGEX, content, re.I | re.S)
|
||||
|
||||
for match in reqResList:
|
||||
request = match if isinstance(match, basestring) else match.group(0)
|
||||
request = re.sub(r"\A[^\w]+", "", request)
|
||||
|
||||
schemePort = re.search(r"(http[\w]*)\:\/\/.*?\:([\d]+).+?={10,}", request, re.I | re.S)
|
||||
|
||||
if schemePort:
|
||||
scheme = schemePort.group(1)
|
||||
port = schemePort.group(2)
|
||||
request = re.sub(r"\n=+\Z", "", request.split(schemePort.group(0))[-1].lstrip())
|
||||
else:
|
||||
scheme, port = None, None
|
||||
|
||||
if not re.search(r"^[\n]*(%s).*?\sHTTP\/" % "|".join(getPublicTypeMembers(HTTPMETHOD, True)), request, re.I | re.M):
|
||||
continue
|
||||
|
||||
if re.search(r"^[\n]*%s.*?\.(%s)\sHTTP\/" % (HTTPMETHOD.GET, "|".join(CRAWL_EXCLUDE_EXTENSIONS)), request, re.I | re.M):
|
||||
continue
|
||||
|
||||
getPostReq = False
|
||||
url = None
|
||||
host = None
|
||||
method = None
|
||||
data = None
|
||||
cookie = None
|
||||
params = False
|
||||
newline = None
|
||||
lines = request.split('\n')
|
||||
headers = []
|
||||
|
||||
for index in xrange(len(lines)):
|
||||
line = lines[index]
|
||||
|
||||
if not line.strip() and index == len(lines) - 1:
|
||||
break
|
||||
|
||||
newline = "\r\n" if line.endswith('\r') else '\n'
|
||||
line = line.strip('\r')
|
||||
match = re.search(r"\A(%s) (.+) HTTP/[\d.]+\Z" % "|".join(getPublicTypeMembers(HTTPMETHOD, True)), line) if not method else None
|
||||
|
||||
if len(line.strip()) == 0 and method and method != HTTPMETHOD.GET and data is None:
|
||||
data = ""
|
||||
params = True
|
||||
|
||||
elif match:
|
||||
method = match.group(1)
|
||||
url = match.group(2)
|
||||
|
||||
if any(_ in line for _ in ('?', '=', kb.customInjectionMark)):
|
||||
params = True
|
||||
|
||||
getPostReq = True
|
||||
|
||||
# POST parameters
|
||||
elif data is not None and params:
|
||||
data += "%s%s" % (line, newline)
|
||||
|
||||
# GET parameters
|
||||
elif "?" in line and "=" in line and ": " not in line:
|
||||
params = True
|
||||
|
||||
# Headers
|
||||
elif re.search(r"\A\S+:", line):
|
||||
key, value = line.split(":", 1)
|
||||
value = value.strip().replace("\r", "").replace("\n", "")
|
||||
|
||||
# Cookie and Host headers
|
||||
if key.upper() == HTTP_HEADER.COOKIE.upper():
|
||||
cookie = value
|
||||
elif key.upper() == HTTP_HEADER.HOST.upper():
|
||||
if '://' in value:
|
||||
scheme, value = value.split('://')[:2]
|
||||
splitValue = value.split(":")
|
||||
host = splitValue[0]
|
||||
|
||||
if len(splitValue) > 1:
|
||||
port = filterStringValue(splitValue[1], "[0-9]")
|
||||
|
||||
# Avoid to add a static content length header to
|
||||
# headers and consider the following lines as
|
||||
# POSTed data
|
||||
if key.upper() == HTTP_HEADER.CONTENT_LENGTH.upper():
|
||||
params = True
|
||||
|
||||
# Avoid proxy and connection type related headers
|
||||
elif key not in (HTTP_HEADER.PROXY_CONNECTION, HTTP_HEADER.CONNECTION):
|
||||
headers.append((getUnicode(key), getUnicode(value)))
|
||||
|
||||
if kb.customInjectionMark in re.sub(PROBLEMATIC_CUSTOM_INJECTION_PATTERNS, "", value or ""):
|
||||
params = True
|
||||
|
||||
data = data.rstrip("\r\n") if data else data
|
||||
|
||||
if getPostReq and (params or cookie):
|
||||
if not port and isinstance(scheme, basestring) and scheme.lower() == "https":
|
||||
port = "443"
|
||||
elif not scheme and port == "443":
|
||||
scheme = "https"
|
||||
|
||||
if conf.forceSSL:
|
||||
scheme = "https"
|
||||
port = port or "443"
|
||||
|
||||
if not host:
|
||||
errMsg = "invalid format of a request file"
|
||||
raise SqlmapSyntaxException(errMsg)
|
||||
|
||||
if not url.startswith("http"):
|
||||
url = "%s://%s:%s%s" % (scheme or "http", host, port or "80", url)
|
||||
scheme = None
|
||||
port = None
|
||||
|
||||
if not(conf.scope and not re.search(conf.scope, url, re.I)):
|
||||
if not kb.targets or url not in addedTargetUrls:
|
||||
kb.targets.add((url, conf.method or method, data, cookie, tuple(headers)))
|
||||
addedTargetUrls.add(url)
|
||||
|
||||
checkFile(reqFile)
|
||||
try:
|
||||
with openFile(reqFile, "rb") as f:
|
||||
content = f.read()
|
||||
except (IOError, OSError, MemoryError), ex:
|
||||
errMsg = "something went wrong while trying "
|
||||
errMsg += "to read the content of file '%s' ('%s')" % (reqFile, getSafeExString(ex))
|
||||
raise SqlmapSystemException(errMsg)
|
||||
|
||||
if conf.scope:
|
||||
logger.info("using regular expression '%s' for filtering targets" % conf.scope)
|
||||
|
||||
_parseBurpLog(content)
|
||||
_parseWebScarabLog(content)
|
||||
|
||||
if not addedTargetUrls:
|
||||
errMsg = "unable to find usable request(s) "
|
||||
errMsg += "in provided file ('%s')" % reqFile
|
||||
raise SqlmapGenericException(errMsg)
|
||||
|
||||
def _loadQueries():
|
||||
"""
|
||||
Loads queries from 'xml/queries.xml' file.
|
||||
@@ -414,7 +209,7 @@ def _setMultipleTargets():
|
||||
"""
|
||||
|
||||
initialTargetsCount = len(kb.targets)
|
||||
addedTargetUrls = set()
|
||||
seen = set()
|
||||
|
||||
if not conf.logFile:
|
||||
return
|
||||
@@ -427,7 +222,11 @@ def _setMultipleTargets():
|
||||
raise SqlmapFilePathException(errMsg)
|
||||
|
||||
if os.path.isfile(conf.logFile):
|
||||
_feedTargetsDict(conf.logFile, addedTargetUrls)
|
||||
for target in parseRequestFile(conf.logFile):
|
||||
url = target[0]
|
||||
if url not in seen:
|
||||
kb.targets.add(target)
|
||||
seen.add(url)
|
||||
|
||||
elif os.path.isdir(conf.logFile):
|
||||
files = os.listdir(conf.logFile)
|
||||
@@ -437,7 +236,11 @@ def _setMultipleTargets():
|
||||
if not re.search(r"([\d]+)\-request", reqFile):
|
||||
continue
|
||||
|
||||
_feedTargetsDict(os.path.join(conf.logFile, reqFile), addedTargetUrls)
|
||||
for target in parseRequestFile(os.path.join(conf.logFile, reqFile)):
|
||||
url = target[0]
|
||||
if url not in seen:
|
||||
kb.targets.add(target)
|
||||
seen.add(url)
|
||||
|
||||
else:
|
||||
errMsg = "the specified list of targets is not a file "
|
||||
@@ -478,22 +281,37 @@ def _setRequestFromFile():
|
||||
textual file, parses it and saves the information into the knowledge base.
|
||||
"""
|
||||
|
||||
if not conf.requestFile:
|
||||
return
|
||||
if conf.requestFile:
|
||||
conf.requestFile = safeExpandUser(conf.requestFile)
|
||||
seen = set()
|
||||
|
||||
addedTargetUrls = set()
|
||||
if not os.path.isfile(conf.requestFile):
|
||||
errMsg = "specified HTTP request file '%s' " % conf.requestFile
|
||||
errMsg += "does not exist"
|
||||
raise SqlmapFilePathException(errMsg)
|
||||
|
||||
conf.requestFile = safeExpandUser(conf.requestFile)
|
||||
infoMsg = "parsing HTTP request from '%s'" % conf.requestFile
|
||||
logger.info(infoMsg)
|
||||
|
||||
if not os.path.isfile(conf.requestFile):
|
||||
errMsg = "specified HTTP request file '%s' " % conf.requestFile
|
||||
errMsg += "does not exist"
|
||||
raise SqlmapFilePathException(errMsg)
|
||||
for target in parseRequestFile(conf.requestFile):
|
||||
url = target[0]
|
||||
if url not in seen:
|
||||
kb.targets.add(target)
|
||||
seen.add(url)
|
||||
|
||||
infoMsg = "parsing HTTP request from '%s'" % conf.requestFile
|
||||
logger.info(infoMsg)
|
||||
if conf.secondReq:
|
||||
conf.secondReq = safeExpandUser(conf.secondReq)
|
||||
|
||||
_feedTargetsDict(conf.requestFile, addedTargetUrls)
|
||||
if not os.path.isfile(conf.secondReq):
|
||||
errMsg = "specified second-order HTTP request file '%s' " % conf.secondReq
|
||||
errMsg += "does not exist"
|
||||
raise SqlmapFilePathException(errMsg)
|
||||
|
||||
infoMsg = "parsing second-order HTTP request from '%s'" % conf.secondReq
|
||||
logger.info(infoMsg)
|
||||
|
||||
target = parseRequestFile(conf.secondReq, False).next()
|
||||
kb.secondReq = target
|
||||
|
||||
def _setCrawler():
|
||||
if not conf.crawlDepth:
|
||||
@@ -788,22 +606,22 @@ def _setMetasploit():
|
||||
raise SqlmapFilePathException(errMsg)
|
||||
|
||||
def _setWriteFile():
|
||||
if not conf.wFile:
|
||||
if not conf.fileWrite:
|
||||
return
|
||||
|
||||
debugMsg = "setting the write file functionality"
|
||||
logger.debug(debugMsg)
|
||||
|
||||
if not os.path.exists(conf.wFile):
|
||||
errMsg = "the provided local file '%s' does not exist" % conf.wFile
|
||||
if not os.path.exists(conf.fileWrite):
|
||||
errMsg = "the provided local file '%s' does not exist" % conf.fileWrite
|
||||
raise SqlmapFilePathException(errMsg)
|
||||
|
||||
if not conf.dFile:
|
||||
if not conf.fileDest:
|
||||
errMsg = "you did not provide the back-end DBMS absolute path "
|
||||
errMsg += "where you want to write the local file '%s'" % conf.wFile
|
||||
errMsg += "where you want to write the local file '%s'" % conf.fileWrite
|
||||
raise SqlmapMissingMandatoryOptionException(errMsg)
|
||||
|
||||
conf.wFileType = getFileType(conf.wFile)
|
||||
conf.fileWriteType = getFileType(conf.fileWrite)
|
||||
|
||||
def _setOS():
|
||||
"""
|
||||
@@ -880,6 +698,22 @@ def _setDBMS():
|
||||
|
||||
break
|
||||
|
||||
def _listTamperingFunctions():
|
||||
"""
|
||||
Lists available tamper functions
|
||||
"""
|
||||
|
||||
if conf.listTampers:
|
||||
infoMsg = "listing available tamper scripts\n"
|
||||
logger.info(infoMsg)
|
||||
|
||||
for script in sorted(glob.glob(os.path.join(paths.SQLMAP_TAMPER_PATH, "*.py"))):
|
||||
content = openFile(script, "rb").read()
|
||||
match = re.search(r'(?s)__priority__.+"""(.+)"""', content)
|
||||
if match:
|
||||
comment = match.group(1).strip()
|
||||
dataToStdout("* %s - %s\n" % (setColor(os.path.basename(script), "yellow"), re.sub(r" *\n *", " ", comment.split("\n\n")[0].strip())))
|
||||
|
||||
def _setTamperingFunctions():
|
||||
"""
|
||||
Loads tampering functions from given script(s)
|
||||
@@ -988,7 +822,7 @@ def _setTamperingFunctions():
|
||||
|
||||
def _setWafFunctions():
|
||||
"""
|
||||
Loads WAF/IPS/IDS detecting functions from script(s)
|
||||
Loads WAF/IPS detecting functions from script(s)
|
||||
"""
|
||||
|
||||
if conf.identifyWaf:
|
||||
@@ -1059,6 +893,12 @@ def _setSocketPreConnect():
|
||||
family, type, proto, address = key
|
||||
s = socket.socket(family, type, proto)
|
||||
s._connect(address)
|
||||
try:
|
||||
if type == socket.SOCK_STREAM:
|
||||
# Reference: https://www.techrepublic.com/article/tcp-ip-options-for-high-performance-data-transmission/
|
||||
s.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
except:
|
||||
pass
|
||||
with kb.locks.socket:
|
||||
socket._ready[key].append((s._sock, time.time()))
|
||||
except KeyboardInterrupt:
|
||||
@@ -1237,7 +1077,7 @@ def _setSafeVisit():
|
||||
key, value = line.split(':', 1)
|
||||
value = value.strip()
|
||||
kb.safeReq.headers[key] = value
|
||||
if key == HTTP_HEADER.HOST:
|
||||
if key.upper() == HTTP_HEADER.HOST.upper():
|
||||
if not value.startswith("http"):
|
||||
scheme = "http"
|
||||
if value.endswith(":443"):
|
||||
@@ -1422,14 +1262,6 @@ def _setHTTPExtraHeaders():
|
||||
# Reference: http://stackoverflow.com/a/1383359
|
||||
conf.httpHeaders.append((HTTP_HEADER.CACHE_CONTROL, "no-cache"))
|
||||
|
||||
def _defaultHTTPUserAgent():
|
||||
"""
|
||||
@return: default sqlmap HTTP User-Agent header
|
||||
@rtype: C{str}
|
||||
"""
|
||||
|
||||
return "%s (%s)" % (VERSION_STRING, SITE)
|
||||
|
||||
def _setHTTPUserAgent():
|
||||
"""
|
||||
Set the HTTP User-Agent header.
|
||||
@@ -1469,12 +1301,12 @@ def _setHTTPUserAgent():
|
||||
_ = True
|
||||
|
||||
for header, _ in conf.httpHeaders:
|
||||
if header == HTTP_HEADER.USER_AGENT:
|
||||
if header.upper() == HTTP_HEADER.USER_AGENT.upper():
|
||||
_ = False
|
||||
break
|
||||
|
||||
if _:
|
||||
conf.httpHeaders.append((HTTP_HEADER.USER_AGENT, _defaultHTTPUserAgent()))
|
||||
conf.httpHeaders.append((HTTP_HEADER.USER_AGENT, DEFAULT_USER_AGENT))
|
||||
|
||||
else:
|
||||
if not kb.userAgents:
|
||||
@@ -1489,10 +1321,10 @@ def _setHTTPUserAgent():
|
||||
warnMsg += "file '%s'" % paths.USER_AGENTS
|
||||
logger.warn(warnMsg)
|
||||
|
||||
conf.httpHeaders.append((HTTP_HEADER.USER_AGENT, _defaultHTTPUserAgent()))
|
||||
conf.httpHeaders.append((HTTP_HEADER.USER_AGENT, DEFAULT_USER_AGENT))
|
||||
return
|
||||
|
||||
userAgent = random.sample(kb.userAgents or [_defaultHTTPUserAgent()], 1)[0]
|
||||
userAgent = random.sample(kb.userAgents or [DEFAULT_USER_AGENT], 1)[0]
|
||||
|
||||
infoMsg = "fetched random HTTP User-Agent header value '%s' from " % userAgent
|
||||
infoMsg += "file '%s'" % paths.USER_AGENTS
|
||||
@@ -1533,6 +1365,19 @@ def _setHTTPCookies():
|
||||
|
||||
conf.httpHeaders.append((HTTP_HEADER.COOKIE, conf.cookie))
|
||||
|
||||
def _setHostname():
|
||||
"""
|
||||
Set value conf.hostname
|
||||
"""
|
||||
|
||||
if conf.url:
|
||||
try:
|
||||
conf.hostname = urlparse.urlsplit(conf.url).netloc.split(':')[0]
|
||||
except ValueError, ex:
|
||||
errMsg = "problem occurred while "
|
||||
errMsg += "parsing an URL '%s' ('%s')" % (conf.url, getSafeExString(ex))
|
||||
raise SqlmapDataException(errMsg)
|
||||
|
||||
def _setHTTPTimeout():
|
||||
"""
|
||||
Set the HTTP timeout
|
||||
@@ -1590,7 +1435,7 @@ def _createTemporaryDirectory():
|
||||
try:
|
||||
if not os.path.isdir(tempfile.gettempdir()):
|
||||
os.makedirs(tempfile.gettempdir())
|
||||
except (OSError, IOError, WindowsError), ex:
|
||||
except Exception, ex:
|
||||
warnMsg = "there has been a problem while accessing "
|
||||
warnMsg += "system's temporary directory location(s) ('%s'). Please " % getSafeExString(ex)
|
||||
warnMsg += "make sure that there is enough disk space left. If problem persists, "
|
||||
@@ -1601,7 +1446,7 @@ def _createTemporaryDirectory():
|
||||
if "sqlmap" not in (tempfile.tempdir or "") or conf.tmpDir and tempfile.tempdir == conf.tmpDir:
|
||||
try:
|
||||
tempfile.tempdir = tempfile.mkdtemp(prefix="sqlmap", suffix=str(os.getpid()))
|
||||
except (OSError, IOError, WindowsError):
|
||||
except:
|
||||
tempfile.tempdir = os.path.join(paths.SQLMAP_HOME_PATH, "tmp", "sqlmap%s%d" % (randomStr(6), os.getpid()))
|
||||
|
||||
kb.tempDir = tempfile.tempdir
|
||||
@@ -1609,7 +1454,7 @@ def _createTemporaryDirectory():
|
||||
if not os.path.isdir(tempfile.tempdir):
|
||||
try:
|
||||
os.makedirs(tempfile.tempdir)
|
||||
except (OSError, IOError, WindowsError), ex:
|
||||
except Exception, ex:
|
||||
errMsg = "there has been a problem while setting "
|
||||
errMsg += "temporary directory location ('%s')" % getSafeExString(ex)
|
||||
raise SqlmapSystemException(errMsg)
|
||||
@@ -1672,15 +1517,17 @@ def _cleanupOptions():
|
||||
|
||||
if conf.url:
|
||||
conf.url = conf.url.strip()
|
||||
if not re.search(r"\A\w+://", conf.url):
|
||||
conf.url = "http://%s" % conf.url
|
||||
|
||||
if conf.rFile:
|
||||
conf.rFile = ntToPosixSlashes(normalizePath(conf.rFile))
|
||||
if conf.fileRead:
|
||||
conf.fileRead = ntToPosixSlashes(normalizePath(conf.fileRead))
|
||||
|
||||
if conf.wFile:
|
||||
conf.wFile = ntToPosixSlashes(normalizePath(conf.wFile))
|
||||
if conf.fileWrite:
|
||||
conf.fileWrite = ntToPosixSlashes(normalizePath(conf.fileWrite))
|
||||
|
||||
if conf.dFile:
|
||||
conf.dFile = ntToPosixSlashes(normalizePath(conf.dFile))
|
||||
if conf.fileDest:
|
||||
conf.fileDest = ntToPosixSlashes(normalizePath(conf.fileDest))
|
||||
|
||||
if conf.sitemapUrl and not conf.sitemapUrl.lower().startswith("http"):
|
||||
conf.sitemapUrl = "http%s://%s" % ('s' if conf.forceSSL else '', conf.sitemapUrl)
|
||||
@@ -1697,14 +1544,6 @@ def _cleanupOptions():
|
||||
if conf.optimize:
|
||||
setOptimize()
|
||||
|
||||
match = re.search(INJECT_HERE_REGEX, conf.data or "")
|
||||
if match:
|
||||
kb.customInjectionMark = match.group(0)
|
||||
|
||||
match = re.search(INJECT_HERE_REGEX, conf.url or "")
|
||||
if match:
|
||||
kb.customInjectionMark = match.group(0)
|
||||
|
||||
if conf.os:
|
||||
conf.os = conf.os.capitalize()
|
||||
|
||||
@@ -1722,16 +1561,33 @@ def _cleanupOptions():
|
||||
|
||||
if conf.testFilter:
|
||||
conf.testFilter = conf.testFilter.strip('*+')
|
||||
conf.testFilter = re.sub(r"([^.])([*+])", "\g<1>.\g<2>", conf.testFilter)
|
||||
conf.testFilter = re.sub(r"([^.])([*+])", r"\g<1>.\g<2>", conf.testFilter)
|
||||
|
||||
try:
|
||||
re.compile(conf.testFilter)
|
||||
except re.error:
|
||||
conf.testFilter = re.escape(conf.testFilter)
|
||||
|
||||
if conf.csrfToken:
|
||||
original = conf.csrfToken
|
||||
try:
|
||||
re.compile(conf.csrfToken)
|
||||
|
||||
if re.escape(conf.csrfToken) != conf.csrfToken:
|
||||
message = "provided value for option '--csrf-token' is a regular expression? [Y/n] "
|
||||
if not readInput(message, default='Y', boolean=True):
|
||||
conf.csrfToken = re.escape(conf.csrfToken)
|
||||
except re.error:
|
||||
conf.csrfToken = re.escape(conf.csrfToken)
|
||||
finally:
|
||||
class _(unicode):
|
||||
pass
|
||||
conf.csrfToken = _(conf.csrfToken)
|
||||
conf.csrfToken._original = original
|
||||
|
||||
if conf.testSkip:
|
||||
conf.testSkip = conf.testSkip.strip('*+')
|
||||
conf.testSkip = re.sub(r"([^.])([*+])", "\g<1>.\g<2>", conf.testSkip)
|
||||
conf.testSkip = re.sub(r"([^.])([*+])", r"\g<1>.\g<2>", conf.testSkip)
|
||||
|
||||
try:
|
||||
re.compile(conf.testSkip)
|
||||
@@ -1802,6 +1658,9 @@ def _cleanupOptions():
|
||||
if any((conf.proxy, conf.proxyFile, conf.tor)):
|
||||
conf.disablePrecon = True
|
||||
|
||||
if conf.dummy:
|
||||
conf.batch = True
|
||||
|
||||
threadData = getCurrentThreadData()
|
||||
threadData.reset()
|
||||
|
||||
@@ -1816,23 +1675,13 @@ def _cleanupEnvironment():
|
||||
if hasattr(socket, "_ready"):
|
||||
socket._ready.clear()
|
||||
|
||||
def _dirtyPatches():
|
||||
def _purge():
|
||||
"""
|
||||
Place for "dirty" Python related patches
|
||||
Safely removes (purges) sqlmap data directory.
|
||||
"""
|
||||
|
||||
httplib._MAXLINE = 1 * 1024 * 1024 # accept overly long result lines (e.g. SQLi results in HTTP header responses)
|
||||
|
||||
if IS_WIN:
|
||||
from thirdparty.wininetpton import win_inet_pton # add support for inet_pton() on Windows OS
|
||||
|
||||
def _purgeOutput():
|
||||
"""
|
||||
Safely removes (purges) output directory.
|
||||
"""
|
||||
|
||||
if conf.purgeOutput:
|
||||
purge(paths.SQLMAP_OUTPUT_PATH)
|
||||
if conf.purge:
|
||||
purge(paths.SQLMAP_HOME_PATH)
|
||||
|
||||
def _setConfAttributes():
|
||||
"""
|
||||
@@ -1870,7 +1719,7 @@ def _setConfAttributes():
|
||||
conf.tests = []
|
||||
conf.trafficFP = None
|
||||
conf.HARCollectorFactory = None
|
||||
conf.wFileType = None
|
||||
conf.fileWriteType = None
|
||||
|
||||
def _setKnowledgeBaseAttributes(flushAll=True):
|
||||
"""
|
||||
@@ -1884,6 +1733,7 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
||||
kb.absFilePaths = set()
|
||||
kb.adjustTimeDelay = None
|
||||
kb.alerted = False
|
||||
kb.aliasName = randomStr()
|
||||
kb.alwaysRefresh = None
|
||||
kb.arch = None
|
||||
kb.authHeader = None
|
||||
@@ -2022,6 +1872,8 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
||||
kb.rowXmlMode = False
|
||||
kb.safeCharEncode = False
|
||||
kb.safeReq = AttribDict()
|
||||
kb.secondReq = None
|
||||
kb.serverHeader = None
|
||||
kb.singleLogFlags = set()
|
||||
kb.skipSeqMatcher = False
|
||||
kb.reduceTests = None
|
||||
@@ -2411,10 +2263,18 @@ def _basicOptionValidation():
|
||||
errMsg = "switch '--eta' is incompatible with option '-v'"
|
||||
raise SqlmapSyntaxException(errMsg)
|
||||
|
||||
if conf.secondUrl and conf.secondReq:
|
||||
errMsg = "option '--second-url' is incompatible with option '--second-req')"
|
||||
raise SqlmapSyntaxException(errMsg)
|
||||
|
||||
if conf.direct and conf.url:
|
||||
errMsg = "option '-d' is incompatible with option '-u' ('--url')"
|
||||
raise SqlmapSyntaxException(errMsg)
|
||||
|
||||
if conf.direct and conf.dbms:
|
||||
errMsg = "option '-d' is incompatible with option '--dbms'"
|
||||
raise SqlmapSyntaxException(errMsg)
|
||||
|
||||
if conf.identifyWaf and conf.skipWaf:
|
||||
errMsg = "switch '--identify-waf' is incompatible with switch '--skip-waf'"
|
||||
raise SqlmapSyntaxException(errMsg)
|
||||
@@ -2581,6 +2441,10 @@ def _basicOptionValidation():
|
||||
errMsg = "value for option '--union-char' must be an alpha-numeric value (e.g. 1)"
|
||||
raise SqlmapSyntaxException(errMsg)
|
||||
|
||||
if conf.hashFile and any((conf.direct, conf.url, conf.logFile, conf.bulkFile, conf.googleDork, conf.configFile, conf.requestFile, conf.updateAll, conf.smokeTest, conf.liveTest, conf.wizard, conf.dependencies, conf.purge, conf.sitemapUrl, conf.listTampers)):
|
||||
errMsg = "option '--crack' should be used as a standalone"
|
||||
raise SqlmapSyntaxException(errMsg)
|
||||
|
||||
if isinstance(conf.uCols, basestring):
|
||||
if not conf.uCols.isdigit() and ("-" not in conf.uCols or len(conf.uCols.split("-")) != 2):
|
||||
errMsg = "value for option '--union-cols' must be a range with hyphon "
|
||||
@@ -2633,8 +2497,7 @@ def init():
|
||||
_setRequestFromFile()
|
||||
_cleanupOptions()
|
||||
_cleanupEnvironment()
|
||||
_dirtyPatches()
|
||||
_purgeOutput()
|
||||
_purge()
|
||||
_checkDependencies()
|
||||
_createTemporaryDirectory()
|
||||
_basicOptionValidation()
|
||||
@@ -2643,6 +2506,7 @@ def init():
|
||||
_setDNSServer()
|
||||
_adjustLoggingFormatter()
|
||||
_setMultipleTargets()
|
||||
_listTamperingFunctions()
|
||||
_setTamperingFunctions()
|
||||
_setWafFunctions()
|
||||
_setTrafficOutputFP()
|
||||
@@ -2650,10 +2514,10 @@ def init():
|
||||
_resolveCrossReferences()
|
||||
_checkWebSocket()
|
||||
|
||||
parseTargetUrl()
|
||||
parseTargetDirect()
|
||||
|
||||
if any((conf.url, conf.logFile, conf.bulkFile, conf.sitemapUrl, conf.requestFile, conf.googleDork, conf.liveTest)):
|
||||
_setHostname()
|
||||
_setHTTPTimeout()
|
||||
_setHTTPExtraHeaders()
|
||||
_setHTTPCookies()
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
@@ -109,7 +109,8 @@ optDict = {
|
||||
"uChar": "string",
|
||||
"uFrom": "string",
|
||||
"dnsDomain": "string",
|
||||
"secondOrder": "string",
|
||||
"secondUrl": "string",
|
||||
"secondReq": "string",
|
||||
},
|
||||
|
||||
"Fingerprint": {
|
||||
@@ -164,9 +165,9 @@ optDict = {
|
||||
},
|
||||
|
||||
"File system": {
|
||||
"rFile": "string",
|
||||
"wFile": "string",
|
||||
"dFile": "string",
|
||||
"fileRead": "string",
|
||||
"fileWrite": "string",
|
||||
"fileDest": "string",
|
||||
},
|
||||
|
||||
"Takeover": {
|
||||
@@ -226,9 +227,10 @@ optDict = {
|
||||
"disableColoring": "boolean",
|
||||
"googlePage": "integer",
|
||||
"identifyWaf": "boolean",
|
||||
"listTampers": "boolean",
|
||||
"mobile": "boolean",
|
||||
"offline": "boolean",
|
||||
"purgeOutput": "boolean",
|
||||
"purge": "boolean",
|
||||
"skipWaf": "boolean",
|
||||
"smart": "boolean",
|
||||
"tmpDir": "string",
|
||||
|
||||
26
lib/core/patch.py
Normal file
26
lib/core/patch.py
Normal file
@@ -0,0 +1,26 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
import codecs
|
||||
import httplib
|
||||
|
||||
from lib.core.settings import IS_WIN
|
||||
|
||||
def dirtyPatches():
|
||||
"""
|
||||
Place for "dirty" Python related patches
|
||||
"""
|
||||
|
||||
# accept overly long result lines (e.g. SQLi results in HTTP header responses)
|
||||
httplib._MAXLINE = 1 * 1024 * 1024
|
||||
|
||||
# add support for inet_pton() on Windows OS
|
||||
if IS_WIN:
|
||||
from thirdparty.wininetpton import win_inet_pton
|
||||
|
||||
# Reference: https://github.com/nodejs/node/issues/12786#issuecomment-298652440
|
||||
codecs.register(lambda name: codecs.lookup("utf-8") if name == "cp65001" else None)
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
@@ -50,7 +50,7 @@ def profile(profileOutputFile=None, dotOutputFile=None, imageOutputFile=None):
|
||||
if os.path.exists(imageOutputFile):
|
||||
os.remove(imageOutputFile)
|
||||
|
||||
infoMsg = "profiling the execution into file %s" % profileOutputFile
|
||||
infoMsg = "profiling the execution into file '%s'" % profileOutputFile
|
||||
logger.info(infoMsg)
|
||||
|
||||
# Start sqlmap main function and generate a raw profile file
|
||||
@@ -80,15 +80,20 @@ def profile(profileOutputFile=None, dotOutputFile=None, imageOutputFile=None):
|
||||
if isinstance(pydotGraph, list):
|
||||
pydotGraph = pydotGraph[0]
|
||||
|
||||
pydotGraph.write_png(imageOutputFile)
|
||||
try:
|
||||
pydotGraph.write_png(imageOutputFile)
|
||||
except OSError:
|
||||
errMsg = "profiling requires graphviz installed "
|
||||
errMsg += "(Hint: 'sudo apt-get install graphviz')"
|
||||
logger.error(errMsg)
|
||||
else:
|
||||
infoMsg = "displaying interactive graph with xdot library"
|
||||
logger.info(infoMsg)
|
||||
|
||||
infoMsg = "displaying interactive graph with xdot library"
|
||||
logger.info(infoMsg)
|
||||
|
||||
# Display interactive Graphviz dot file by using extra/xdot/xdot.py
|
||||
# http://code.google.com/p/jrfonseca/wiki/XDot
|
||||
win = xdot.DotWindow()
|
||||
win.connect('destroy', gtk.main_quit)
|
||||
win.set_filter("dot")
|
||||
win.open_file(dotOutputFile)
|
||||
gtk.main()
|
||||
# Display interactive Graphviz dot file by using extra/xdot/xdot.py
|
||||
# http://code.google.com/p/jrfonseca/wiki/XDot
|
||||
win = xdot.DotWindow()
|
||||
win.connect('destroy', gtk.main_quit)
|
||||
win.set_filter("dot")
|
||||
win.open_file(dotOutputFile)
|
||||
gtk.main()
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
@@ -19,12 +19,13 @@ from lib.core.enums import DBMS_DIRECTORY_NAME
|
||||
from lib.core.enums import OS
|
||||
|
||||
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
|
||||
VERSION = "1.2.6.0"
|
||||
VERSION = "1.3"
|
||||
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
|
||||
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
|
||||
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)
|
||||
DESCRIPTION = "automatic SQL injection and database takeover tool"
|
||||
SITE = "http://sqlmap.org"
|
||||
DEFAULT_USER_AGENT = "%s (%s)" % (VERSION_STRING, SITE)
|
||||
DEV_EMAIL_ADDRESS = "dev@sqlmap.org"
|
||||
ISSUES_PAGE = "https://github.com/sqlmapproject/sqlmap/issues/new"
|
||||
GIT_REPOSITORY = "https://github.com/sqlmapproject/sqlmap.git"
|
||||
@@ -45,10 +46,10 @@ BANNER = """\033[01;33m\
|
||||
DIFF_TOLERANCE = 0.05
|
||||
CONSTANT_RATIO = 0.9
|
||||
|
||||
# Ratio used in heuristic check for WAF/IPS/IDS protected targets
|
||||
# Ratio used in heuristic check for WAF/IPS protected targets
|
||||
IDS_WAF_CHECK_RATIO = 0.5
|
||||
|
||||
# Timeout used in heuristic check for WAF/IPS/IDS protected targets
|
||||
# Timeout used in heuristic check for WAF/IPS protected targets
|
||||
IDS_WAF_CHECK_TIMEOUT = 10
|
||||
|
||||
# Lower and upper values for match ratio in case of stable page
|
||||
@@ -71,6 +72,7 @@ RANDOM_INTEGER_MARKER = "[RANDINT]"
|
||||
RANDOM_STRING_MARKER = "[RANDSTR]"
|
||||
SLEEP_TIME_MARKER = "[SLEEPTIME]"
|
||||
INFERENCE_MARKER = "[INFERENCE]"
|
||||
SINGLE_QUOTE_MARKER = "[SINGLE_QUOTE]"
|
||||
|
||||
PAYLOAD_DELIMITER = "__PAYLOAD_DELIMITER__"
|
||||
CHAR_INFERENCE_MARK = "%c"
|
||||
@@ -97,6 +99,9 @@ MAX_CONSECUTIVE_CONNECTION_ERRORS = 15
|
||||
# Timeout before the pre-connection candidate is being disposed (because of high probability that the web server will reset it)
|
||||
PRECONNECT_CANDIDATE_TIMEOUT = 10
|
||||
|
||||
# Servers known to cause issue with pre-connection mechanism (because of lack of multi-threaded support)
|
||||
PRECONNECT_INCOMPATIBLE_SERVERS = ("SimpleHTTP",)
|
||||
|
||||
# Maximum sleep time in "Murphy" (testing) mode
|
||||
MAX_MURPHY_SLEEP_TIME = 3
|
||||
|
||||
@@ -170,6 +175,9 @@ PUSH_VALUE_EXCEPTION_RETRY_COUNT = 3
|
||||
# Minimum time response set needed for time-comparison based on standard deviation
|
||||
MIN_TIME_RESPONSES = 30
|
||||
|
||||
# Maximum time response set used during time-comparison based on standard deviation
|
||||
MAX_TIME_RESPONSES = 200
|
||||
|
||||
# Minimum comparison ratio set needed for searching valid union column number based on standard deviation
|
||||
MIN_UNION_RESPONSES = 5
|
||||
|
||||
@@ -233,6 +241,7 @@ MAXDB_SYSTEM_DBS = ("SYSINFO", "DOMAIN")
|
||||
SYBASE_SYSTEM_DBS = ("master", "model", "sybsystemdb", "sybsystemprocs")
|
||||
DB2_SYSTEM_DBS = ("NULLID", "SQLJ", "SYSCAT", "SYSFUN", "SYSIBM", "SYSIBMADM", "SYSIBMINTERNAL", "SYSIBMTS", "SYSPROC", "SYSPUBLIC", "SYSSTAT", "SYSTOOLS")
|
||||
HSQLDB_SYSTEM_DBS = ("INFORMATION_SCHEMA", "SYSTEM_LOB")
|
||||
H2_SYSTEM_DBS = ("INFORMATION_SCHEMA")
|
||||
INFORMIX_SYSTEM_DBS = ("sysmaster", "sysutils", "sysuser", "sysadmin")
|
||||
|
||||
MSSQL_ALIASES = ("microsoft sql server", "mssqlserver", "mssql", "ms")
|
||||
@@ -246,20 +255,21 @@ MAXDB_ALIASES = ("maxdb", "sap maxdb", "sap db")
|
||||
SYBASE_ALIASES = ("sybase", "sybase sql server")
|
||||
DB2_ALIASES = ("db2", "ibm db2", "ibmdb2")
|
||||
HSQLDB_ALIASES = ("hsql", "hsqldb", "hs", "hypersql")
|
||||
H2_ALIASES = ("h2",)
|
||||
INFORMIX_ALIASES = ("informix", "ibm informix", "ibminformix")
|
||||
|
||||
DBMS_DIRECTORY_DICT = dict((getattr(DBMS, _), getattr(DBMS_DIRECTORY_NAME, _)) for _ in dir(DBMS) if not _.startswith("_"))
|
||||
|
||||
SUPPORTED_DBMS = MSSQL_ALIASES + MYSQL_ALIASES + PGSQL_ALIASES + ORACLE_ALIASES + SQLITE_ALIASES + ACCESS_ALIASES + FIREBIRD_ALIASES + MAXDB_ALIASES + SYBASE_ALIASES + DB2_ALIASES + HSQLDB_ALIASES + INFORMIX_ALIASES
|
||||
SUPPORTED_DBMS = MSSQL_ALIASES + MYSQL_ALIASES + PGSQL_ALIASES + ORACLE_ALIASES + SQLITE_ALIASES + ACCESS_ALIASES + FIREBIRD_ALIASES + MAXDB_ALIASES + SYBASE_ALIASES + DB2_ALIASES + HSQLDB_ALIASES + H2_ALIASES + INFORMIX_ALIASES
|
||||
SUPPORTED_OS = ("linux", "windows")
|
||||
|
||||
DBMS_ALIASES = ((DBMS.MSSQL, MSSQL_ALIASES), (DBMS.MYSQL, MYSQL_ALIASES), (DBMS.PGSQL, PGSQL_ALIASES), (DBMS.ORACLE, ORACLE_ALIASES), (DBMS.SQLITE, SQLITE_ALIASES), (DBMS.ACCESS, ACCESS_ALIASES), (DBMS.FIREBIRD, FIREBIRD_ALIASES), (DBMS.MAXDB, MAXDB_ALIASES), (DBMS.SYBASE, SYBASE_ALIASES), (DBMS.DB2, DB2_ALIASES), (DBMS.HSQLDB, HSQLDB_ALIASES))
|
||||
DBMS_ALIASES = ((DBMS.MSSQL, MSSQL_ALIASES), (DBMS.MYSQL, MYSQL_ALIASES), (DBMS.PGSQL, PGSQL_ALIASES), (DBMS.ORACLE, ORACLE_ALIASES), (DBMS.SQLITE, SQLITE_ALIASES), (DBMS.ACCESS, ACCESS_ALIASES), (DBMS.FIREBIRD, FIREBIRD_ALIASES), (DBMS.MAXDB, MAXDB_ALIASES), (DBMS.SYBASE, SYBASE_ALIASES), (DBMS.DB2, DB2_ALIASES), (DBMS.HSQLDB, HSQLDB_ALIASES), (DBMS.H2, H2_ALIASES), (DBMS.INFORMIX, INFORMIX_ALIASES))
|
||||
|
||||
USER_AGENT_ALIASES = ("ua", "useragent", "user-agent")
|
||||
REFERER_ALIASES = ("ref", "referer", "referrer")
|
||||
HOST_ALIASES = ("host",)
|
||||
|
||||
HSQLDB_DEFAULT_SCHEMA = "PUBLIC"
|
||||
H2_DEFAULT_SCHEMA = HSQLDB_DEFAULT_SCHEMA = "PUBLIC"
|
||||
|
||||
# Names that can't be used to name files on Windows OS
|
||||
WINDOWS_RESERVED_NAMES = ("CON", "PRN", "AUX", "NUL", "COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9", "LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9")
|
||||
@@ -321,7 +331,8 @@ FILE_PATH_REGEXES = (r"<b>(?P<result>[^<>]+?)</b> on line \d+", r"in (?P<result>
|
||||
|
||||
# Regular expressions used for parsing error messages (--parse-errors)
|
||||
ERROR_PARSING_REGEXES = (
|
||||
r"<b>[^<]*(fatal|error|warning|exception)[^<]*</b>:?\s*(?P<result>.+?)<br\s*/?\s*>",
|
||||
r"\[Microsoft\]\[ODBC SQL Server Driver\]\[SQL Server\](?P<result>[^<]+)",
|
||||
r"<b>[^<]*(fatal|error|warning|exception)[^<]*</b>:?\s*(?P<result>[^<]+)",
|
||||
r"(?m)^\s*(fatal|error|warning|exception):?\s*(?P<result>[^\n]+?)$",
|
||||
r"(?P<result>[^\n>]*SQL Syntax[^\n<]+)",
|
||||
r"<li>Error Type:<br>(?P<result>.+?)</li>",
|
||||
@@ -364,10 +375,10 @@ URI_HTTP_HEADER = "URI"
|
||||
URI_INJECTABLE_REGEX = r"//[^/]*/([^\.*?]+)\Z"
|
||||
|
||||
# Regex used for masking sensitive data
|
||||
SENSITIVE_DATA_REGEX = "(\s|=)(?P<result>[^\s=]*%s[^\s]*)\s"
|
||||
SENSITIVE_DATA_REGEX = r"(\s|=)(?P<result>[^\s=]*%s[^\s]*)\s"
|
||||
|
||||
# Options to explicitly mask in anonymous (unhandled exception) reports (along with anything carrying the <hostname> inside)
|
||||
SENSITIVE_OPTIONS = ("hostname", "answers", "data", "dnsDomain", "googleDork", "authCred", "proxyCred", "tbl", "db", "col", "user", "cookie", "proxy", "rFile", "wFile", "dFile", "testParameter", "authCred")
|
||||
SENSITIVE_OPTIONS = ("hostname", "answers", "data", "dnsDomain", "googleDork", "authCred", "proxyCred", "tbl", "db", "col", "user", "cookie", "proxy", "fileRead", "fileWrite", "fileDest", "testParameter", "authCred")
|
||||
|
||||
# Maximum number of threads (avoiding connection issues and/or DoS)
|
||||
MAX_NUMBER_OF_THREADS = 10
|
||||
@@ -388,7 +399,7 @@ CANDIDATE_SENTENCE_MIN_LENGTH = 10
|
||||
CUSTOM_INJECTION_MARK_CHAR = '*'
|
||||
|
||||
# Other way to declare injection position
|
||||
INJECT_HERE_REGEX = '(?i)%INJECT[_ ]?HERE%'
|
||||
INJECT_HERE_REGEX = r"(?i)%INJECT[_ ]?HERE%"
|
||||
|
||||
# Minimum chunk length used for retrieving data over error based payloads
|
||||
MIN_ERROR_CHUNK_LENGTH = 8
|
||||
@@ -406,7 +417,7 @@ REFLECTED_VALUE_MARKER = "__REFLECTED_VALUE__"
|
||||
REFLECTED_BORDER_REGEX = r"[^A-Za-z]+"
|
||||
|
||||
# Regular expression used for replacing non-alphanum characters
|
||||
REFLECTED_REPLACEMENT_REGEX = r".+"
|
||||
REFLECTED_REPLACEMENT_REGEX = r"[^\n]{1,100}"
|
||||
|
||||
# Maximum time (in seconds) spent per reflective value(s) replacement
|
||||
REFLECTED_REPLACEMENT_TIMEOUT = 3
|
||||
@@ -426,6 +437,9 @@ DEFAULT_MSSQL_SCHEMA = "dbo"
|
||||
# Display hash attack info every mod number of items
|
||||
HASH_MOD_ITEM_DISPLAY = 11
|
||||
|
||||
# Display marker for (cracked) empty password
|
||||
HASH_EMPTY_PASSWORD_MARKER = "<empty>"
|
||||
|
||||
# Maximum integer value
|
||||
MAX_INT = sys.maxint
|
||||
|
||||
@@ -487,7 +501,7 @@ LEGAL_DISCLAIMER = "Usage of sqlmap for attacking targets without prior mutual c
|
||||
REFLECTIVE_MISS_THRESHOLD = 20
|
||||
|
||||
# Regular expression used for extracting HTML title
|
||||
HTML_TITLE_REGEX = "<title>(?P<result>[^<]+)</title>"
|
||||
HTML_TITLE_REGEX = r"<title>(?P<result>[^<]+)</title>"
|
||||
|
||||
# Table used for Base64 conversion in WordPress hash cracking routine
|
||||
ITOA64 = "./0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
|
||||
@@ -524,7 +538,7 @@ CHECK_INTERNET_ADDRESS = "https://ipinfo.io/"
|
||||
# Value to look for in response to CHECK_INTERNET_ADDRESS
|
||||
CHECK_INTERNET_VALUE = "IP Address Details"
|
||||
|
||||
# Vectors used for provoking specific WAF/IPS/IDS behavior(s)
|
||||
# Vectors used for provoking specific WAF/IPS behavior(s)
|
||||
WAF_ATTACK_VECTORS = (
|
||||
"", # NIL
|
||||
"search=<script>alert(1)</script>",
|
||||
@@ -571,7 +585,7 @@ UNION_CHAR_REGEX = r"\A\w+\Z"
|
||||
UNENCODED_ORIGINAL_VALUE = "original"
|
||||
|
||||
# Common column names containing usernames (used for hash cracking in some cases)
|
||||
COMMON_USER_COLUMNS = ("login", "user", "username", "user_name", "user_login", "benutzername", "benutzer", "utilisateur", "usager", "consommateur", "utente", "utilizzatore", "usufrutuario", "korisnik", "usuario", "consumidor", "client", "cuser")
|
||||
COMMON_USER_COLUMNS = ("login", "user", "username", "user_name", "user_login", "benutzername", "benutzer", "utilisateur", "usager", "consommateur", "utente", "utilizzatore", "utilizator", "utilizador", "usufrutuario", "korisnik", "uporabnik", "usuario", "consumidor", "client", "cuser")
|
||||
|
||||
# Default delimiter in GET/POST values
|
||||
DEFAULT_GET_POST_DELIMITER = '&'
|
||||
@@ -583,7 +597,7 @@ DEFAULT_COOKIE_DELIMITER = ';'
|
||||
FORCE_COOKIE_EXPIRATION_TIME = "9999999999"
|
||||
|
||||
# Github OAuth token used for creating an automatic Issue for unhandled exceptions
|
||||
GITHUB_REPORT_OAUTH_TOKEN = "NTMyNWNkMmZkMzRlMDZmY2JkMmY0MGI4NWI0MzVlM2Q5YmFjYWNhYQ=="
|
||||
GITHUB_REPORT_OAUTH_TOKEN = "NTYzYjhmZWJjYzc0Njg2ODJhNzhmNDg1YzM0YzlkYjk3N2JiMzE3Nw=="
|
||||
|
||||
# Skip unforced HashDB flush requests below the threshold number of cached items
|
||||
HASHDB_FLUSH_THRESHOLD = 32
|
||||
@@ -598,7 +612,7 @@ HASHDB_RETRIEVE_RETRIES = 3
|
||||
HASHDB_END_TRANSACTION_RETRIES = 3
|
||||
|
||||
# Unique milestone value used for forced deprecation of old HashDB values (e.g. when changing hash/pickle mechanism)
|
||||
HASHDB_MILESTONE_VALUE = "dPHoJRQYvs" # python -c 'import random, string; print "".join(random.sample(string.ascii_letters, 10))'
|
||||
HASHDB_MILESTONE_VALUE = "BZzRotigLX" # python -c 'import random, string; print "".join(random.sample(string.ascii_letters, 10))'
|
||||
|
||||
# Warn user of possible delay due to large page dump in full UNION query injections
|
||||
LARGE_OUTPUT_THRESHOLD = 1024 ** 2
|
||||
@@ -631,7 +645,7 @@ BANNER = re.sub(r"\[.\]", lambda _: "[\033[01;41m%s\033[01;49m]" % random.sample
|
||||
DUMMY_NON_SQLI_CHECK_APPENDIX = "<'\">"
|
||||
|
||||
# Regular expression used for recognition of file inclusion errors
|
||||
FI_ERROR_REGEX = "(?i)[^\n]{0,100}(no such file|failed (to )?open)[^\n]{0,100}"
|
||||
FI_ERROR_REGEX = r"(?i)[^\n]{0,100}(no such file|failed (to )?open)[^\n]{0,100}"
|
||||
|
||||
# Length of prefix and suffix used in non-SQLI heuristic checks
|
||||
NON_SQLI_CHECK_PREFIX_SUFFIX_LENGTH = 6
|
||||
@@ -661,7 +675,7 @@ VALID_TIME_CHARS_RUN_THRESHOLD = 100
|
||||
CHECK_ZERO_COLUMNS_THRESHOLD = 10
|
||||
|
||||
# Boldify all logger messages containing these "patterns"
|
||||
BOLD_PATTERNS = ("' injectable", "provided empty", "leftover chars", "might be injectable", "' is vulnerable", "is not injectable", "does not seem to be", "test failed", "test passed", "live test final result", "test shows that", "the back-end DBMS is", "created Github", "blocked by the target server", "protection is involved", "CAPTCHA", "specific response", "NULL connection is supported")
|
||||
BOLD_PATTERNS = ("' injectable", "provided empty", "leftover chars", "might be injectable", "' is vulnerable", "is not injectable", "does not seem to be", "test failed", "test passed", "live test final result", "test shows that", "the back-end DBMS is", "created Github", "blocked by the target server", "protection is involved", "CAPTCHA", "specific response", "NULL connection is supported", "PASSED", "FAILED")
|
||||
|
||||
# Generic www root directory names
|
||||
GENERIC_DOC_ROOT_DIRECTORY_NAMES = ("htdocs", "httpdocs", "public", "wwwroot", "www")
|
||||
@@ -673,7 +687,7 @@ MAX_HELP_OPTION_LENGTH = 18
|
||||
MAX_CONNECT_RETRIES = 100
|
||||
|
||||
# Strings for detecting formatting errors
|
||||
FORMAT_EXCEPTION_STRINGS = ("Type mismatch", "Error converting", "Conversion failed", "String or binary data would be truncated", "Failed to convert", "unable to interpret text value", "Input string was not in a correct format", "System.FormatException", "java.lang.NumberFormatException", "ValueError: invalid literal", "DataTypeMismatchException", "CF_SQL_INTEGER", " for CFSQLTYPE ", "cfqueryparam cfsqltype", "InvalidParamTypeException", "Invalid parameter type", "is not of type numeric", "<cfif Not IsNumeric(", "invalid input syntax for integer", "invalid input syntax for type", "invalid number", "character to number conversion error", "unable to interpret text value", "String was not recognized as a valid", "Convert.ToInt", "cannot be converted to a ", "InvalidDataException")
|
||||
FORMAT_EXCEPTION_STRINGS = ("Type mismatch", "Error converting", "Please enter a", "Conversion failed", "String or binary data would be truncated", "Failed to convert", "unable to interpret text value", "Input string was not in a correct format", "System.FormatException", "java.lang.NumberFormatException", "ValueError: invalid literal", "TypeMismatchException", "CF_SQL_INTEGER", " for CFSQLTYPE ", "cfqueryparam cfsqltype", "InvalidParamTypeException", "Invalid parameter type", "is not of type numeric", "<cfif Not IsNumeric(", "invalid input syntax for integer", "invalid input syntax for type", "invalid number", "character to number conversion error", "unable to interpret text value", "String was not recognized as a valid", "Convert.ToInt", "cannot be converted to a ", "InvalidDataException")
|
||||
|
||||
# Regular expression used for extracting ASP.NET view state values
|
||||
VIEWSTATE_REGEX = r'(?i)(?P<name>__VIEWSTATE[^"]*)[^>]+value="(?P<result>[^"]+)'
|
||||
@@ -748,7 +762,7 @@ EVALCODE_KEYWORD_SUFFIX = "_KEYWORD"
|
||||
NETSCAPE_FORMAT_HEADER_COOKIES = "# Netscape HTTP Cookie File."
|
||||
|
||||
# Infixes used for automatic recognition of parameters carrying anti-CSRF tokens
|
||||
CSRF_TOKEN_PARAMETER_INFIXES = ("csrf", "xsrf")
|
||||
CSRF_TOKEN_PARAMETER_INFIXES = ("csrf", "xsrf", "token")
|
||||
|
||||
# Prefixes used in brute force search for web server document root
|
||||
BRUTE_DOC_ROOT_PREFIXES = {
|
||||
@@ -786,9 +800,9 @@ tr:nth-child(even) {
|
||||
background-color: #D3DFEE
|
||||
}
|
||||
td{
|
||||
font-size:10px;
|
||||
font-size:12px;
|
||||
}
|
||||
th{
|
||||
font-size:10px;
|
||||
font-size:12px;
|
||||
}
|
||||
</style>"""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
@@ -53,28 +53,33 @@ def clearHistory():
|
||||
readline.clear_history()
|
||||
|
||||
def saveHistory(completion=None):
|
||||
if not readlineAvailable():
|
||||
return
|
||||
|
||||
if completion == AUTOCOMPLETE_TYPE.SQL:
|
||||
historyPath = paths.SQL_SHELL_HISTORY
|
||||
elif completion == AUTOCOMPLETE_TYPE.OS:
|
||||
historyPath = paths.OS_SHELL_HISTORY
|
||||
else:
|
||||
historyPath = paths.SQLMAP_SHELL_HISTORY
|
||||
|
||||
try:
|
||||
with open(historyPath, "w+"):
|
||||
if not readlineAvailable():
|
||||
return
|
||||
|
||||
if completion == AUTOCOMPLETE_TYPE.SQL:
|
||||
historyPath = paths.SQL_SHELL_HISTORY
|
||||
elif completion == AUTOCOMPLETE_TYPE.OS:
|
||||
historyPath = paths.OS_SHELL_HISTORY
|
||||
elif completion == AUTOCOMPLETE_TYPE.API:
|
||||
historyPath = paths.API_SHELL_HISTORY
|
||||
else:
|
||||
historyPath = paths.SQLMAP_SHELL_HISTORY
|
||||
|
||||
try:
|
||||
with open(historyPath, "w+"):
|
||||
pass
|
||||
except:
|
||||
pass
|
||||
except:
|
||||
pass
|
||||
|
||||
readline.set_history_length(MAX_HISTORY_LENGTH)
|
||||
try:
|
||||
readline.write_history_file(historyPath)
|
||||
except IOError, msg:
|
||||
warnMsg = "there was a problem writing the history file '%s' (%s)" % (historyPath, msg)
|
||||
logger.warn(warnMsg)
|
||||
readline.set_history_length(MAX_HISTORY_LENGTH)
|
||||
try:
|
||||
readline.write_history_file(historyPath)
|
||||
except IOError, msg:
|
||||
warnMsg = "there was a problem writing the history file '%s' (%s)" % (historyPath, msg)
|
||||
logger.warn(warnMsg)
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
def loadHistory(completion=None):
|
||||
if not readlineAvailable():
|
||||
@@ -86,6 +91,8 @@ def loadHistory(completion=None):
|
||||
historyPath = paths.SQL_SHELL_HISTORY
|
||||
elif completion == AUTOCOMPLETE_TYPE.OS:
|
||||
historyPath = paths.OS_SHELL_HISTORY
|
||||
elif completion == AUTOCOMPLETE_TYPE.API:
|
||||
historyPath = paths.API_SHELL_HISTORY
|
||||
else:
|
||||
historyPath = paths.SQLMAP_SHELL_HISTORY
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
import codecs
|
||||
import functools
|
||||
import os
|
||||
import re
|
||||
@@ -45,6 +44,7 @@ from lib.core.enums import POST_HINT
|
||||
from lib.core.exception import SqlmapFilePathException
|
||||
from lib.core.exception import SqlmapGenericException
|
||||
from lib.core.exception import SqlmapMissingPrivileges
|
||||
from lib.core.exception import SqlmapNoneDataException
|
||||
from lib.core.exception import SqlmapSystemException
|
||||
from lib.core.exception import SqlmapUserQuitException
|
||||
from lib.core.option import _setDBMS
|
||||
@@ -52,9 +52,11 @@ from lib.core.option import _setKnowledgeBaseAttributes
|
||||
from lib.core.option import _setAuthCred
|
||||
from lib.core.settings import ASTERISK_MARKER
|
||||
from lib.core.settings import CSRF_TOKEN_PARAMETER_INFIXES
|
||||
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
||||
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
||||
from lib.core.settings import HOST_ALIASES
|
||||
from lib.core.settings import ARRAY_LIKE_RECOGNITION_REGEX
|
||||
from lib.core.settings import INJECT_HERE_REGEX
|
||||
from lib.core.settings import JSON_RECOGNITION_REGEX
|
||||
from lib.core.settings import JSON_LIKE_RECOGNITION_REGEX
|
||||
from lib.core.settings import MULTIPART_RECOGNITION_REGEX
|
||||
@@ -83,6 +85,7 @@ def _setRequestParams():
|
||||
conf.parameters[None] = "direct connection"
|
||||
return
|
||||
|
||||
hintNames = []
|
||||
testableParameters = False
|
||||
|
||||
# Perform checks on GET parameters
|
||||
@@ -101,7 +104,6 @@ def _setRequestParams():
|
||||
|
||||
if conf.data is not None:
|
||||
conf.method = HTTPMETHOD.POST if not conf.method or conf.method == HTTPMETHOD.GET else conf.method
|
||||
hintNames = []
|
||||
|
||||
def process(match, repl):
|
||||
retVal = match.group(0)
|
||||
@@ -148,8 +150,8 @@ def _setRequestParams():
|
||||
match = re.search(r'(?P<name>[^"]+)"\s*:\s*\[([^\]]+)\]', conf.data)
|
||||
if match and not (conf.testParameter and match.group("name") not in conf.testParameter):
|
||||
_ = match.group(2)
|
||||
_ = re.sub(r'("[^"]+)"', '\g<1>%s"' % kb.customInjectionMark, _)
|
||||
_ = re.sub(r'(\A|,|\s+)(-?\d[\d\.]*\b)', '\g<0>%s' % kb.customInjectionMark, _)
|
||||
_ = re.sub(r'("[^"]+)"', r'\g<1>%s"' % kb.customInjectionMark, _)
|
||||
_ = re.sub(r'(\A|,|\s+)(-?\d[\d\.]*\b)', r'\g<0>%s' % kb.customInjectionMark, _)
|
||||
conf.data = conf.data.replace(match.group(0), match.group(0).replace(match.group(2), _))
|
||||
|
||||
kb.postHint = POST_HINT.JSON
|
||||
@@ -344,7 +346,7 @@ def _setRequestParams():
|
||||
# Url encoding of the header values should be avoided
|
||||
# Reference: http://stackoverflow.com/questions/5085904/is-ok-to-urlencode-the-value-in-headerlocation-value
|
||||
|
||||
if httpHeader.title() == HTTP_HEADER.USER_AGENT:
|
||||
if httpHeader.upper() == HTTP_HEADER.USER_AGENT.upper():
|
||||
conf.parameters[PLACE.USER_AGENT] = urldecode(headerValue)
|
||||
|
||||
condition = any((not conf.testParameter, intersect(conf.testParameter, USER_AGENT_ALIASES, True)))
|
||||
@@ -353,7 +355,7 @@ def _setRequestParams():
|
||||
conf.paramDict[PLACE.USER_AGENT] = {PLACE.USER_AGENT: headerValue}
|
||||
testableParameters = True
|
||||
|
||||
elif httpHeader.title() == HTTP_HEADER.REFERER:
|
||||
elif httpHeader.upper() == HTTP_HEADER.REFERER.upper():
|
||||
conf.parameters[PLACE.REFERER] = urldecode(headerValue)
|
||||
|
||||
condition = any((not conf.testParameter, intersect(conf.testParameter, REFERER_ALIASES, True)))
|
||||
@@ -362,7 +364,7 @@ def _setRequestParams():
|
||||
conf.paramDict[PLACE.REFERER] = {PLACE.REFERER: headerValue}
|
||||
testableParameters = True
|
||||
|
||||
elif httpHeader.title() == HTTP_HEADER.HOST:
|
||||
elif httpHeader.upper() == HTTP_HEADER.HOST.upper():
|
||||
conf.parameters[PLACE.HOST] = urldecode(headerValue)
|
||||
|
||||
condition = any((not conf.testParameter, intersect(conf.testParameter, HOST_ALIASES, True)))
|
||||
@@ -391,8 +393,8 @@ def _setRequestParams():
|
||||
raise SqlmapGenericException(errMsg)
|
||||
|
||||
if conf.csrfToken:
|
||||
if not any(conf.csrfToken in _ for _ in (conf.paramDict.get(PLACE.GET, {}), conf.paramDict.get(PLACE.POST, {}))) and not re.search(r"\b%s\b" % re.escape(conf.csrfToken), conf.data or "") and conf.csrfToken not in set(_[0].lower() for _ in conf.httpHeaders) and conf.csrfToken not in conf.paramDict.get(PLACE.COOKIE, {}):
|
||||
errMsg = "anti-CSRF token parameter '%s' not " % conf.csrfToken
|
||||
if not any(re.search(conf.csrfToken, ' '.join(_), re.I) for _ in (conf.paramDict.get(PLACE.GET, {}), conf.paramDict.get(PLACE.POST, {}))) and not re.search(r"\b%s\b" % re.escape(conf.csrfToken), conf.data or "") and conf.csrfToken not in set(_[0].lower() for _ in conf.httpHeaders) and conf.csrfToken not in conf.paramDict.get(PLACE.COOKIE, {}):
|
||||
errMsg = "anti-CSRF token parameter '%s' not " % conf.csrfToken._original
|
||||
errMsg += "found in provided GET, POST, Cookie or header values"
|
||||
raise SqlmapGenericException(errMsg)
|
||||
else:
|
||||
@@ -406,7 +408,10 @@ def _setRequestParams():
|
||||
message += "Do you want sqlmap to automatically update it in further requests? [y/N] "
|
||||
|
||||
if readInput(message, default='N', boolean=True):
|
||||
conf.csrfToken = getUnicode(parameter)
|
||||
class _(unicode):
|
||||
pass
|
||||
conf.csrfToken = _(re.escape(getUnicode(parameter)))
|
||||
conf.csrfToken._original = getUnicode(parameter)
|
||||
break
|
||||
|
||||
def _setHashDB():
|
||||
@@ -467,7 +472,13 @@ def _resumeDBMS():
|
||||
value = hashDBRetrieve(HASHDB_KEYS.DBMS)
|
||||
|
||||
if not value:
|
||||
return
|
||||
if conf.offline:
|
||||
errMsg = "unable to continue in offline mode "
|
||||
errMsg += "because of lack of usable "
|
||||
errMsg += "session data"
|
||||
raise SqlmapNoneDataException(errMsg)
|
||||
else:
|
||||
return
|
||||
|
||||
dbms = value.lower()
|
||||
dbmsVersion = [UNKNOWN_DBMS_VERSION]
|
||||
@@ -571,7 +582,7 @@ def _createFilesDir():
|
||||
Create the file directory.
|
||||
"""
|
||||
|
||||
if not conf.rFile:
|
||||
if not conf.fileRead:
|
||||
return
|
||||
|
||||
conf.filePath = paths.SQLMAP_FILES_PATH % conf.hostname
|
||||
@@ -619,33 +630,35 @@ def _createTargetDirs():
|
||||
Create the output directory.
|
||||
"""
|
||||
|
||||
try:
|
||||
if not os.path.isdir(paths.SQLMAP_OUTPUT_PATH):
|
||||
os.makedirs(paths.SQLMAP_OUTPUT_PATH)
|
||||
|
||||
_ = os.path.join(paths.SQLMAP_OUTPUT_PATH, randomStr())
|
||||
open(_, "w+b").close()
|
||||
os.remove(_)
|
||||
|
||||
if conf.outputDir:
|
||||
warnMsg = "using '%s' as the output directory" % paths.SQLMAP_OUTPUT_PATH
|
||||
logger.warn(warnMsg)
|
||||
except (OSError, IOError), ex:
|
||||
for context in "output", "history":
|
||||
directory = paths["SQLMAP_%s_PATH" % context.upper()]
|
||||
try:
|
||||
tempDir = tempfile.mkdtemp(prefix="sqlmapoutput")
|
||||
except Exception, _:
|
||||
errMsg = "unable to write to the temporary directory ('%s'). " % _
|
||||
errMsg += "Please make sure that your disk is not full and "
|
||||
errMsg += "that you have sufficient write permissions to "
|
||||
errMsg += "create temporary files and/or directories"
|
||||
raise SqlmapSystemException(errMsg)
|
||||
if not os.path.isdir(directory):
|
||||
os.makedirs(directory)
|
||||
|
||||
warnMsg = "unable to %s output directory " % ("create" if not os.path.isdir(paths.SQLMAP_OUTPUT_PATH) else "write to the")
|
||||
warnMsg += "'%s' (%s). " % (paths.SQLMAP_OUTPUT_PATH, getUnicode(ex))
|
||||
warnMsg += "Using temporary directory '%s' instead" % getUnicode(tempDir)
|
||||
logger.warn(warnMsg)
|
||||
_ = os.path.join(directory, randomStr())
|
||||
open(_, "w+b").close()
|
||||
os.remove(_)
|
||||
|
||||
paths.SQLMAP_OUTPUT_PATH = tempDir
|
||||
if conf.outputDir and context == "output":
|
||||
warnMsg = "using '%s' as the %s directory" % (directory, context)
|
||||
logger.warn(warnMsg)
|
||||
except (OSError, IOError), ex:
|
||||
try:
|
||||
tempDir = tempfile.mkdtemp(prefix="sqlmap%s" % context)
|
||||
except Exception, _:
|
||||
errMsg = "unable to write to the temporary directory ('%s'). " % _
|
||||
errMsg += "Please make sure that your disk is not full and "
|
||||
errMsg += "that you have sufficient write permissions to "
|
||||
errMsg += "create temporary files and/or directories"
|
||||
raise SqlmapSystemException(errMsg)
|
||||
|
||||
warnMsg = "unable to %s %s directory " % ("create" if not os.path.isdir(directory) else "write to the", context)
|
||||
warnMsg += "'%s' (%s). " % (directory, getUnicode(ex))
|
||||
warnMsg += "Using temporary directory '%s' instead" % getUnicode(tempDir)
|
||||
logger.warn(warnMsg)
|
||||
|
||||
paths["SQLMAP_%s_PATH" % context.upper()] = tempDir
|
||||
|
||||
conf.outputPath = os.path.join(getUnicode(paths.SQLMAP_OUTPUT_PATH), normalizeUnicode(getUnicode(conf.hostname)))
|
||||
|
||||
@@ -669,8 +682,10 @@ def _createTargetDirs():
|
||||
|
||||
conf.outputPath = tempDir
|
||||
|
||||
conf.outputPath = getUnicode(conf.outputPath)
|
||||
|
||||
try:
|
||||
with codecs.open(os.path.join(conf.outputPath, "target.txt"), "w+", UNICODE_ENCODING) as f:
|
||||
with openFile(os.path.join(conf.outputPath, "target.txt"), "w+") as f:
|
||||
f.write(kb.originalUrls.get(conf.url) or conf.url or conf.hostname)
|
||||
f.write(" (%s)" % (HTTPMETHOD.POST if conf.data else HTTPMETHOD.GET))
|
||||
f.write(" # %s" % getUnicode(subprocess.list2cmdline(sys.argv), encoding=sys.stdin.encoding))
|
||||
@@ -689,6 +704,13 @@ def _createTargetDirs():
|
||||
_createFilesDir()
|
||||
_configureDumper()
|
||||
|
||||
def _setAuxOptions():
|
||||
"""
|
||||
Setup auxiliary (host-dependent) options
|
||||
"""
|
||||
|
||||
kb.aliasName = randomStr(seed=hash(conf.hostname or ""))
|
||||
|
||||
def _restoreMergedOptions():
|
||||
"""
|
||||
Restore merged options (command line, configuration file and default values)
|
||||
@@ -735,6 +757,9 @@ def initTargetEnv():
|
||||
setattr(conf.data, UNENCODED_ORIGINAL_VALUE, original)
|
||||
kb.postSpaceToPlus = '+' in original
|
||||
|
||||
match = re.search(INJECT_HERE_REGEX, conf.data or "") or re.search(INJECT_HERE_REGEX, conf.url or "")
|
||||
kb.customInjectionMark = match.group(0) if match else CUSTOM_INJECTION_MARK_CHAR
|
||||
|
||||
def setupTargetEnv():
|
||||
_createTargetDirs()
|
||||
_setRequestParams()
|
||||
@@ -742,3 +767,4 @@ def setupTargetEnv():
|
||||
_resumeHashDBValues()
|
||||
_setResultsFile()
|
||||
_setAuthCred()
|
||||
_setAuxOptions()
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
@@ -17,6 +17,7 @@ import traceback
|
||||
|
||||
from extra.beep.beep import beep
|
||||
from lib.controller.controller import start
|
||||
from lib.core.common import checkIntegrity
|
||||
from lib.core.common import clearConsoleLine
|
||||
from lib.core.common import dataToStdout
|
||||
from lib.core.common import getUnicode
|
||||
@@ -51,41 +52,44 @@ def smokeTest():
|
||||
retVal = True
|
||||
count, length = 0, 0
|
||||
|
||||
for root, _, files in os.walk(paths.SQLMAP_ROOT_PATH):
|
||||
if any(_ in root for _ in ("thirdparty", "extra")):
|
||||
continue
|
||||
if not checkIntegrity():
|
||||
retVal = False
|
||||
else:
|
||||
for root, _, files in os.walk(paths.SQLMAP_ROOT_PATH):
|
||||
if any(_ in root for _ in ("thirdparty", "extra")):
|
||||
continue
|
||||
|
||||
for filename in files:
|
||||
if os.path.splitext(filename)[1].lower() == ".py" and filename != "__init__.py":
|
||||
length += 1
|
||||
for filename in files:
|
||||
if os.path.splitext(filename)[1].lower() == ".py" and filename != "__init__.py":
|
||||
length += 1
|
||||
|
||||
for root, _, files in os.walk(paths.SQLMAP_ROOT_PATH):
|
||||
if any(_ in root for _ in ("thirdparty", "extra")):
|
||||
continue
|
||||
for root, _, files in os.walk(paths.SQLMAP_ROOT_PATH):
|
||||
if any(_ in root for _ in ("thirdparty", "extra")):
|
||||
continue
|
||||
|
||||
for filename in files:
|
||||
if os.path.splitext(filename)[1].lower() == ".py" and filename != "__init__.py":
|
||||
path = os.path.join(root, os.path.splitext(filename)[0])
|
||||
path = path.replace(paths.SQLMAP_ROOT_PATH, '.')
|
||||
path = path.replace(os.sep, '.').lstrip('.')
|
||||
try:
|
||||
__import__(path)
|
||||
module = sys.modules[path]
|
||||
except Exception, msg:
|
||||
retVal = False
|
||||
dataToStdout("\r")
|
||||
errMsg = "smoke test failed at importing module '%s' (%s):\n%s" % (path, os.path.join(root, filename), msg)
|
||||
logger.error(errMsg)
|
||||
else:
|
||||
# Run doc tests
|
||||
# Reference: http://docs.python.org/library/doctest.html
|
||||
(failure_count, test_count) = doctest.testmod(module)
|
||||
if failure_count > 0:
|
||||
for filename in files:
|
||||
if os.path.splitext(filename)[1].lower() == ".py" and filename != "__init__.py":
|
||||
path = os.path.join(root, os.path.splitext(filename)[0])
|
||||
path = path.replace(paths.SQLMAP_ROOT_PATH, '.')
|
||||
path = path.replace(os.sep, '.').lstrip('.')
|
||||
try:
|
||||
__import__(path)
|
||||
module = sys.modules[path]
|
||||
except Exception, msg:
|
||||
retVal = False
|
||||
dataToStdout("\r")
|
||||
errMsg = "smoke test failed at importing module '%s' (%s):\n%s" % (path, os.path.join(root, filename), msg)
|
||||
logger.error(errMsg)
|
||||
else:
|
||||
# Run doc tests
|
||||
# Reference: http://docs.python.org/library/doctest.html
|
||||
(failure_count, test_count) = doctest.testmod(module)
|
||||
if failure_count > 0:
|
||||
retVal = False
|
||||
|
||||
count += 1
|
||||
status = '%d/%d (%d%%) ' % (count, length, round(100.0 * count / length))
|
||||
dataToStdout("\r[%s] [INFO] complete: %s" % (time.strftime("%X"), status))
|
||||
count += 1
|
||||
status = '%d/%d (%d%%) ' % (count, length, round(100.0 * count / length))
|
||||
dataToStdout("\r[%s] [INFO] complete: %s" % (time.strftime("%X"), status))
|
||||
|
||||
clearConsoleLine()
|
||||
if retVal:
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
@@ -92,10 +92,10 @@ def exceptionHandledFunction(threadFunction, silent=False):
|
||||
kb.threadException = True
|
||||
raise
|
||||
except Exception, ex:
|
||||
if not silent:
|
||||
if not silent and kb.get("threadContinue"):
|
||||
logger.error("thread %s: %s" % (threading.currentThread().getName(), ex.message))
|
||||
|
||||
if conf.verbose > 1:
|
||||
if conf.get("verbose") > 1:
|
||||
traceback.print_exc()
|
||||
|
||||
def setDaemon(thread):
|
||||
@@ -168,6 +168,7 @@ def runThreads(numThreads, threadFunction, cleanupFunction=None, forwardExceptio
|
||||
|
||||
except (KeyboardInterrupt, SqlmapUserQuitException), ex:
|
||||
print
|
||||
kb.prependFlag = False
|
||||
kb.threadContinue = False
|
||||
kb.threadException = True
|
||||
|
||||
@@ -188,7 +189,7 @@ def runThreads(numThreads, threadFunction, cleanupFunction=None, forwardExceptio
|
||||
kb.threadException = True
|
||||
logger.error("thread %s: %s" % (threading.currentThread().getName(), ex.message))
|
||||
|
||||
if conf.verbose > 1:
|
||||
if conf.get("verbose") > 1:
|
||||
traceback.print_exc()
|
||||
|
||||
except:
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
@@ -17,6 +17,7 @@ import zipfile
|
||||
|
||||
from lib.core.common import dataToStdout
|
||||
from lib.core.common import getSafeExString
|
||||
from lib.core.common import getLatestRevision
|
||||
from lib.core.common import pollProcess
|
||||
from lib.core.common import readInput
|
||||
from lib.core.data import conf
|
||||
@@ -25,6 +26,7 @@ from lib.core.data import paths
|
||||
from lib.core.revision import getRevisionNumber
|
||||
from lib.core.settings import GIT_REPOSITORY
|
||||
from lib.core.settings import IS_WIN
|
||||
from lib.core.settings import VERSION
|
||||
from lib.core.settings import ZIPBALL_PAGE
|
||||
from lib.core.settings import UNICODE_ENCODING
|
||||
|
||||
@@ -39,6 +41,10 @@ def update():
|
||||
warnMsg += "from GitHub (e.g. 'git clone --depth 1 %s sqlmap')" % GIT_REPOSITORY
|
||||
logger.warn(warnMsg)
|
||||
|
||||
if VERSION == getLatestRevision():
|
||||
logger.info("already at the latest revision '%s'" % getRevisionNumber())
|
||||
return
|
||||
|
||||
message = "do you want to try to fetch the latest 'zipball' from repository and extract it (experimental) ? [y/N]"
|
||||
if readInput(message, default='N', boolean=True):
|
||||
directory = os.path.abspath(paths.SQLMAP_ROOT_PATH)
|
||||
@@ -49,6 +55,7 @@ def update():
|
||||
errMsg = "unable to update content of directory '%s' ('%s')" % (directory, getSafeExString(ex))
|
||||
logger.error(errMsg)
|
||||
else:
|
||||
attrs = os.stat(os.path.join(directory, "sqlmap.py")).st_mode
|
||||
for wildcard in ('*', ".*"):
|
||||
for _ in glob.glob(os.path.join(directory, wildcard)):
|
||||
try:
|
||||
@@ -83,6 +90,11 @@ def update():
|
||||
else:
|
||||
if not success:
|
||||
logger.error("update could not be completed")
|
||||
else:
|
||||
try:
|
||||
os.chmod(os.path.join(directory, "sqlmap.py"), attrs)
|
||||
except OSError:
|
||||
logger.warning("could not set the file attributes of '%s'" % os.path.join(directory, "sqlmap.py"))
|
||||
else:
|
||||
infoMsg = "updating sqlmap to the latest development revision from the "
|
||||
infoMsg += "GitHub repository"
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
@@ -99,16 +99,16 @@ def cmdLineParser(argv=None):
|
||||
help="Force usage of given HTTP method (e.g. PUT)")
|
||||
|
||||
request.add_option("--data", dest="data",
|
||||
help="Data string to be sent through POST")
|
||||
help="Data string to be sent through POST (e.g. \"id=1\")")
|
||||
|
||||
request.add_option("--param-del", dest="paramDel",
|
||||
help="Character used for splitting parameter values")
|
||||
help="Character used for splitting parameter values (e.g. &)")
|
||||
|
||||
request.add_option("--cookie", dest="cookie",
|
||||
help="HTTP Cookie header value")
|
||||
help="HTTP Cookie header value (e.g. \"PHPSESSID=a8d127e..\")")
|
||||
|
||||
request.add_option("--cookie-del", dest="cookieDel",
|
||||
help="Character used for splitting cookie values")
|
||||
help="Character used for splitting cookie values (e.g. ;)")
|
||||
|
||||
request.add_option("--load-cookies", dest="loadCookies",
|
||||
help="File containing cookies in Netscape/wget format")
|
||||
@@ -144,7 +144,7 @@ def cmdLineParser(argv=None):
|
||||
help="HTTP authentication PEM cert/private key file")
|
||||
|
||||
request.add_option("--ignore-code", dest="ignoreCode", type="int",
|
||||
help="Ignore HTTP error code (e.g. 401)")
|
||||
help="Ignore (problematic) HTTP error code (e.g. 401)")
|
||||
|
||||
request.add_option("--ignore-proxy", dest="ignoreProxy", action="store_true",
|
||||
help="Ignore system default proxy settings")
|
||||
@@ -207,7 +207,7 @@ def cmdLineParser(argv=None):
|
||||
help="Parameter used to hold anti-CSRF token")
|
||||
|
||||
request.add_option("--csrf-url", dest="csrfUrl",
|
||||
help="URL address to visit to extract anti-CSRF token")
|
||||
help="URL address to visit for extraction of anti-CSRF token")
|
||||
|
||||
request.add_option("--force-ssl", dest="forceSSL", action="store_true",
|
||||
help="Force usage of SSL/HTTPS")
|
||||
@@ -253,13 +253,13 @@ def cmdLineParser(argv=None):
|
||||
help="Regexp to exclude parameters from testing (e.g. \"ses\")")
|
||||
|
||||
injection.add_option("--dbms", dest="dbms",
|
||||
help="Force back-end DBMS to this value")
|
||||
help="Force back-end DBMS to provided value")
|
||||
|
||||
injection.add_option("--dbms-cred", dest="dbmsCred",
|
||||
help="DBMS authentication credentials (user:password)")
|
||||
|
||||
injection.add_option("--os", dest="os",
|
||||
help="Force back-end DBMS operating system to this value")
|
||||
help="Force back-end DBMS operating system to provided value")
|
||||
|
||||
injection.add_option("--invalid-bignum", dest="invalidBignum", action="store_true",
|
||||
help="Use big numbers for invalidating values")
|
||||
@@ -333,9 +333,12 @@ def cmdLineParser(argv=None):
|
||||
techniques.add_option("--dns-domain", dest="dnsDomain",
|
||||
help="Domain name used for DNS exfiltration attack")
|
||||
|
||||
techniques.add_option("--second-order", dest="secondOrder",
|
||||
techniques.add_option("--second-url", dest="secondUrl",
|
||||
help="Resulting page URL searched for second-order response")
|
||||
|
||||
techniques.add_option("--second-req", dest="secondReq",
|
||||
help="Load second-order HTTP request from file")
|
||||
|
||||
# Fingerprint options
|
||||
fingerprint = OptionGroup(parser, "Fingerprint")
|
||||
|
||||
@@ -400,7 +403,7 @@ def cmdLineParser(argv=None):
|
||||
help="Search column(s), table(s) and/or database name(s)")
|
||||
|
||||
enumeration.add_option("--comments", dest="getComments", action="store_true",
|
||||
help="Retrieve DBMS comments")
|
||||
help="Check for DBMS comments during enumeration")
|
||||
|
||||
enumeration.add_option("-D", dest="db",
|
||||
help="DBMS database to enumerate")
|
||||
@@ -468,13 +471,13 @@ def cmdLineParser(argv=None):
|
||||
# File system options
|
||||
filesystem = OptionGroup(parser, "File system access", "These options can be used to access the back-end database management system underlying file system")
|
||||
|
||||
filesystem.add_option("--file-read", dest="rFile",
|
||||
filesystem.add_option("--file-read", dest="fileRead",
|
||||
help="Read a file from the back-end DBMS file system")
|
||||
|
||||
filesystem.add_option("--file-write", dest="wFile",
|
||||
filesystem.add_option("--file-write", dest="fileWrite",
|
||||
help="Write a local file on the back-end DBMS file system")
|
||||
|
||||
filesystem.add_option("--file-dest", dest="dFile",
|
||||
filesystem.add_option("--file-dest", dest="fileDest",
|
||||
help="Back-end DBMS absolute filepath to write to")
|
||||
|
||||
# Takeover options
|
||||
@@ -581,7 +584,7 @@ def cmdLineParser(argv=None):
|
||||
help="Log all HTTP traffic into a HAR file")
|
||||
|
||||
general.add_option("--hex", dest="hexConvert", action="store_true",
|
||||
help="Use DBMS hex function(s) for data retrieval")
|
||||
help="Use hex conversion during data retrieval")
|
||||
|
||||
general.add_option("--output-dir", dest="outputDir", action="store",
|
||||
help="Custom output directory path")
|
||||
@@ -614,7 +617,7 @@ def cmdLineParser(argv=None):
|
||||
help="Run host OS command(s) when SQL injection is found")
|
||||
|
||||
miscellaneous.add_option("--answers", dest="answers",
|
||||
help="Set question answers (e.g. \"quit=N,follow=N\")")
|
||||
help="Set predefined answers (e.g. \"quit=N,follow=N\")")
|
||||
|
||||
miscellaneous.add_option("--beep", dest="beep", action="store_true",
|
||||
help="Beep on question and/or when SQL injection is found")
|
||||
@@ -623,7 +626,7 @@ def cmdLineParser(argv=None):
|
||||
help="Clean up the DBMS from sqlmap specific UDF and tables")
|
||||
|
||||
miscellaneous.add_option("--dependencies", dest="dependencies", action="store_true",
|
||||
help="Check for missing (non-core) sqlmap dependencies")
|
||||
help="Check for missing (optional) sqlmap dependencies")
|
||||
|
||||
miscellaneous.add_option("--disable-coloring", dest="disableColoring", action="store_true",
|
||||
help="Disable console output coloring")
|
||||
@@ -632,7 +635,10 @@ def cmdLineParser(argv=None):
|
||||
help="Use Google dork results from specified page number")
|
||||
|
||||
miscellaneous.add_option("--identify-waf", dest="identifyWaf", action="store_true",
|
||||
help="Make a thorough testing for a WAF/IPS/IDS protection")
|
||||
help="Make a thorough testing for a WAF/IPS protection")
|
||||
|
||||
miscellaneous.add_option("--list-tampers", dest="listTampers", action="store_true",
|
||||
help="Display list of available tamper scripts")
|
||||
|
||||
miscellaneous.add_option("--mobile", dest="mobile", action="store_true",
|
||||
help="Imitate smartphone through HTTP User-Agent header")
|
||||
@@ -640,11 +646,11 @@ def cmdLineParser(argv=None):
|
||||
miscellaneous.add_option("--offline", dest="offline", action="store_true",
|
||||
help="Work in offline mode (only use session data)")
|
||||
|
||||
miscellaneous.add_option("--purge-output", dest="purgeOutput", action="store_true",
|
||||
help="Safely remove all content from output directory")
|
||||
miscellaneous.add_option("--purge", dest="purge", action="store_true",
|
||||
help="Safely remove all content from sqlmap data directory")
|
||||
|
||||
miscellaneous.add_option("--skip-waf", dest="skipWaf", action="store_true",
|
||||
help="Skip heuristic detection of WAF/IPS/IDS protection")
|
||||
help="Skip heuristic detection of WAF/IPS protection")
|
||||
|
||||
miscellaneous.add_option("--smart", dest="smart", action="store_true",
|
||||
help="Conduct thorough tests only if positive heuristic(s)")
|
||||
@@ -662,6 +668,10 @@ def cmdLineParser(argv=None):
|
||||
help="Simple wizard interface for beginner users")
|
||||
|
||||
# Hidden and/or experimental options
|
||||
parser.add_option("--crack", dest="hashFile",
|
||||
help=SUPPRESS_HELP)
|
||||
# help="Load and crack hashes from a file (standalone)")
|
||||
|
||||
parser.add_option("--dummy", dest="dummy", action="store_true",
|
||||
help=SUPPRESS_HELP)
|
||||
|
||||
@@ -747,6 +757,7 @@ def cmdLineParser(argv=None):
|
||||
prompt = False
|
||||
advancedHelp = True
|
||||
extraHeaders = []
|
||||
tamperIndex = None
|
||||
|
||||
# Reference: https://stackoverflow.com/a/4012683 (Note: previously used "...sys.getfilesystemencoding() or UNICODE_ENCODING")
|
||||
for arg in argv:
|
||||
@@ -818,6 +829,12 @@ def cmdLineParser(argv=None):
|
||||
elif re.search(r"\A-\w=.+", argv[i]):
|
||||
dataToStdout("[!] potentially miswritten (illegal '=') short option detected ('%s')\n" % argv[i])
|
||||
raise SystemExit
|
||||
elif argv[i].startswith("--tamper"):
|
||||
if tamperIndex is None:
|
||||
tamperIndex = i if '=' in argv[i] else (i + 1 if i + 1 < len(argv) and not argv[i + 1].startswith('-') else None)
|
||||
else:
|
||||
argv[tamperIndex] = "%s,%s" % (argv[tamperIndex], argv[i].split('=')[1] if '=' in argv[i] else (argv[i + 1] if i + 1 < len(argv) and not argv[i + 1].startswith('-') else ""))
|
||||
argv[i] = ""
|
||||
elif argv[i] == "-H":
|
||||
if i + 1 < len(argv):
|
||||
extraHeaders.append(argv[i + 1])
|
||||
@@ -871,9 +888,9 @@ def cmdLineParser(argv=None):
|
||||
if args.dummy:
|
||||
args.url = args.url or DUMMY_URL
|
||||
|
||||
if not any((args.direct, args.url, args.logFile, args.bulkFile, args.googleDork, args.configFile, args.requestFile, args.updateAll, args.smokeTest, args.liveTest, args.wizard, args.dependencies, args.purgeOutput, args.sitemapUrl)):
|
||||
errMsg = "missing a mandatory option (-d, -u, -l, -m, -r, -g, -c, -x, --wizard, --update, --purge-output or --dependencies), "
|
||||
errMsg += "use -h for basic or -hh for advanced help\n"
|
||||
if not any((args.direct, args.url, args.logFile, args.bulkFile, args.googleDork, args.configFile, args.requestFile, args.updateAll, args.smokeTest, args.liveTest, args.wizard, args.dependencies, args.purge, args.sitemapUrl, args.listTampers, args.hashFile)):
|
||||
errMsg = "missing a mandatory option (-d, -u, -l, -m, -r, -g, -c, -x, --list-tampers, --wizard, --update, --purge or --dependencies). "
|
||||
errMsg += "Use -h for basic and -hh for advanced help\n"
|
||||
parser.error(errMsg)
|
||||
|
||||
return args
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
from xml.etree import ElementTree as et
|
||||
|
||||
@@ -17,6 +18,9 @@ from lib.core.exception import SqlmapInstallationException
|
||||
from lib.core.settings import PAYLOAD_XML_FILES
|
||||
|
||||
def cleanupVals(text, tag):
|
||||
if tag == "clause" and '-' in text:
|
||||
text = re.sub(r"(\d+)-(\d+)", lambda match: ','.join(str(_) for _ in xrange(int(match.group(1)), int(match.group(2)) + 1)), text)
|
||||
|
||||
if tag in ("clause", "where"):
|
||||
text = text.split(',')
|
||||
|
||||
@@ -36,7 +40,7 @@ def cleanupVals(text, tag):
|
||||
return text
|
||||
|
||||
def parseXmlNode(node):
|
||||
for element in node.getiterator('boundary'):
|
||||
for element in node.getiterator("boundary"):
|
||||
boundary = AttribDict()
|
||||
|
||||
for child in element.getchildren():
|
||||
@@ -48,7 +52,7 @@ def parseXmlNode(node):
|
||||
|
||||
conf.boundaries.append(boundary)
|
||||
|
||||
for element in node.getiterator('test'):
|
||||
for element in node.getiterator("test"):
|
||||
test = AttribDict()
|
||||
|
||||
for child in element.getchildren():
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
@@ -35,7 +35,6 @@ from lib.core.enums import PLACE
|
||||
from lib.core.exception import SqlmapCompressionException
|
||||
from lib.core.settings import BLOCKED_IP_REGEX
|
||||
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
|
||||
from lib.core.settings import DEV_EMAIL_ADDRESS
|
||||
from lib.core.settings import EVENTVALIDATION_REGEX
|
||||
from lib.core.settings import MAX_CONNECTION_TOTAL_SIZE
|
||||
from lib.core.settings import META_CHARSET_REGEX
|
||||
@@ -343,7 +342,7 @@ def decodePage(page, contentEncoding, contentType):
|
||||
retVal = match.group(0)
|
||||
try:
|
||||
retVal = unichr(int(match.group(1)))
|
||||
except ValueError:
|
||||
except (ValueError, OverflowError):
|
||||
pass
|
||||
return retVal
|
||||
page = re.sub(r"&#(\d+);", _, page)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
@@ -137,10 +137,14 @@ def _comparison(page, headers, code, getRatioValue, pageLength):
|
||||
seq1 = seq1.replace(REFLECTED_VALUE_MARKER, "")
|
||||
seq2 = seq2.replace(REFLECTED_VALUE_MARKER, "")
|
||||
|
||||
if kb.heavilyDynamic:
|
||||
seq1 = seq1.split("\n")
|
||||
seq2 = seq2.split("\n")
|
||||
|
||||
seqMatcher.set_seq1(seq1)
|
||||
seqMatcher.set_seq2(seq2)
|
||||
|
||||
ratio = round(seqMatcher.quick_ratio(), 3)
|
||||
ratio = round(seqMatcher.quick_ratio() if not kb.heavilyDynamic else seqMatcher.ratio(), 3)
|
||||
|
||||
# If the url is stable and we did not set yet the match ratio and the
|
||||
# current injected value changes the url page content
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
import binascii
|
||||
import compiler
|
||||
import httplib
|
||||
import json
|
||||
import keyword
|
||||
import logging
|
||||
import re
|
||||
@@ -17,6 +16,7 @@ import string
|
||||
import struct
|
||||
import time
|
||||
import traceback
|
||||
import urllib
|
||||
import urllib2
|
||||
import urlparse
|
||||
|
||||
@@ -64,11 +64,13 @@ from lib.core.common import urlencode
|
||||
from lib.core.data import conf
|
||||
from lib.core.data import kb
|
||||
from lib.core.data import logger
|
||||
from lib.core.datatype import AttribDict
|
||||
from lib.core.decorators import stackedmethod
|
||||
from lib.core.dicts import POST_HINT_CONTENT_TYPES
|
||||
from lib.core.enums import ADJUST_TIME_DELAY
|
||||
from lib.core.enums import AUTH_TYPE
|
||||
from lib.core.enums import CUSTOM_LOGGING
|
||||
from lib.core.enums import HINT
|
||||
from lib.core.enums import HTTP_HEADER
|
||||
from lib.core.enums import HTTPMETHOD
|
||||
from lib.core.enums import NULLCONNECTION
|
||||
@@ -76,7 +78,7 @@ from lib.core.enums import PAYLOAD
|
||||
from lib.core.enums import PLACE
|
||||
from lib.core.enums import POST_HINT
|
||||
from lib.core.enums import REDIRECTION
|
||||
from lib.core.enums import WEB_API
|
||||
from lib.core.enums import WEB_PLATFORM
|
||||
from lib.core.exception import SqlmapCompressionException
|
||||
from lib.core.exception import SqlmapConnectionException
|
||||
from lib.core.exception import SqlmapGenericException
|
||||
@@ -88,6 +90,7 @@ from lib.core.settings import BOUNDARY_BACKSLASH_MARKER
|
||||
from lib.core.settings import DEFAULT_CONTENT_TYPE
|
||||
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
|
||||
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
||||
from lib.core.settings import DEFAULT_USER_AGENT
|
||||
from lib.core.settings import EVALCODE_KEYWORD_SUFFIX
|
||||
from lib.core.settings import HTTP_ACCEPT_HEADER_VALUE
|
||||
from lib.core.settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE
|
||||
@@ -98,6 +101,8 @@ from lib.core.settings import MAX_CONSECUTIVE_CONNECTION_ERRORS
|
||||
from lib.core.settings import MAX_MURPHY_SLEEP_TIME
|
||||
from lib.core.settings import META_REFRESH_REGEX
|
||||
from lib.core.settings import MIN_TIME_RESPONSES
|
||||
from lib.core.settings import MAX_TIME_RESPONSES
|
||||
from lib.core.settings import IDS_WAF_CHECK_PAYLOAD
|
||||
from lib.core.settings import IS_WIN
|
||||
from lib.core.settings import LARGE_CHUNK_TRIM_MARKER
|
||||
from lib.core.settings import PAYLOAD_DELIMITER
|
||||
@@ -169,9 +174,11 @@ class Connect(object):
|
||||
warnMsg += "(e.g. 'https://help.ubuntu.com/community/Tor')"
|
||||
else:
|
||||
warnMsg = "if the problem persists please check that the provided "
|
||||
warnMsg += "target URL is valid. In case that it is, you can try to rerun "
|
||||
warnMsg += "with the switch '--random-agent' turned on "
|
||||
warnMsg += "and/or proxy switches ('--ignore-proxy', '--proxy',...)"
|
||||
warnMsg += "target URL is reachable. In case that it is, "
|
||||
warnMsg += "you can try to rerun with "
|
||||
if not conf.randomAgent:
|
||||
warnMsg += "switch '--random-agent' and/or "
|
||||
warnMsg += "proxy switches ('--ignore-proxy', '--proxy',...)"
|
||||
singleTimeWarnMessage(warnMsg)
|
||||
|
||||
elif conf.threads > 1:
|
||||
@@ -358,14 +365,21 @@ class Connect(object):
|
||||
if kb.proxyAuthHeader:
|
||||
headers[HTTP_HEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader
|
||||
|
||||
if not getHeader(headers, HTTP_HEADER.ACCEPT):
|
||||
headers[HTTP_HEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE
|
||||
if not conf.requestFile or not target:
|
||||
if not getHeader(headers, HTTP_HEADER.HOST):
|
||||
headers[HTTP_HEADER.HOST] = getHostHeader(url)
|
||||
|
||||
if not getHeader(headers, HTTP_HEADER.HOST) or not target:
|
||||
headers[HTTP_HEADER.HOST] = getHostHeader(url)
|
||||
if not getHeader(headers, HTTP_HEADER.ACCEPT):
|
||||
headers[HTTP_HEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE
|
||||
|
||||
if not getHeader(headers, HTTP_HEADER.ACCEPT_ENCODING):
|
||||
headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if kb.pageCompress else "identity"
|
||||
if not getHeader(headers, HTTP_HEADER.ACCEPT_ENCODING):
|
||||
headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if kb.pageCompress else "identity"
|
||||
|
||||
elif conf.requestFile and getHeader(headers, HTTP_HEADER.USER_AGENT) == DEFAULT_USER_AGENT:
|
||||
for header in headers:
|
||||
if header.upper() == HTTP_HEADER.USER_AGENT.upper():
|
||||
del headers[header]
|
||||
break
|
||||
|
||||
if post is not None and not multipart and not getHeader(headers, HTTP_HEADER.CONTENT_TYPE):
|
||||
headers[HTTP_HEADER.CONTENT_TYPE] = POST_HINT_CONTENT_TYPES.get(kb.postHint, DEFAULT_CONTENT_TYPE)
|
||||
@@ -382,10 +396,6 @@ class Connect(object):
|
||||
if conf.keepAlive:
|
||||
headers[HTTP_HEADER.CONNECTION] = "keep-alive"
|
||||
|
||||
# Reset header values to original in case of provided request file
|
||||
if target and conf.requestFile:
|
||||
headers = forgeHeaders({HTTP_HEADER.COOKIE: cookie})
|
||||
|
||||
if auxHeaders:
|
||||
headers = forgeHeaders(auxHeaders, headers)
|
||||
|
||||
@@ -408,8 +418,10 @@ class Connect(object):
|
||||
ws.close()
|
||||
code = ws.status
|
||||
status = httplib.responses[code]
|
||||
|
||||
class _(dict):
|
||||
pass
|
||||
|
||||
responseHeaders = _(ws.getheaders())
|
||||
responseHeaders.headers = ["%s: %s\r\n" % (_[0].capitalize(), _[1]) for _ in responseHeaders.items()]
|
||||
|
||||
@@ -489,9 +501,10 @@ class Connect(object):
|
||||
page = Connect._connReadProxy(conn) if not skipRead else None
|
||||
|
||||
if conn:
|
||||
code = conn.code
|
||||
code = (code or conn.code) if conn.code == kb.originalCode else conn.code # do not override redirection code (for comparison purposes)
|
||||
responseHeaders = conn.info()
|
||||
responseHeaders[URI_HTTP_HEADER] = conn.geturl()
|
||||
kb.serverHeader = responseHeaders.get(HTTP_HEADER.SERVER, kb.serverHeader)
|
||||
else:
|
||||
code = None
|
||||
responseHeaders = {}
|
||||
@@ -586,7 +599,7 @@ class Connect(object):
|
||||
threadData.lastHTTPError = (threadData.lastRequestUID, code, status)
|
||||
kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1
|
||||
|
||||
responseMsg += "[#%d] (%d %s):\r\n" % (threadData.lastRequestUID, code, status)
|
||||
responseMsg += "[#%d] (%s %s):\r\n" % (threadData.lastRequestUID, code, status)
|
||||
|
||||
if responseHeaders:
|
||||
logHeaders = "\r\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()])
|
||||
@@ -645,7 +658,7 @@ class Connect(object):
|
||||
warnMsg = "connection was forcibly closed by the target URL"
|
||||
elif "timed out" in tbMsg:
|
||||
if kb.testMode and kb.testType not in (None, PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED):
|
||||
singleTimeWarnMessage("there is a possibility that the target (or WAF/IPS/IDS) is dropping 'suspicious' requests")
|
||||
singleTimeWarnMessage("there is a possibility that the target (or WAF/IPS) is dropping 'suspicious' requests")
|
||||
kb.droppingRequests = True
|
||||
warnMsg = "connection timed out to the target URL"
|
||||
elif "Connection reset" in tbMsg:
|
||||
@@ -654,7 +667,7 @@ class Connect(object):
|
||||
conf.disablePrecon = True
|
||||
|
||||
if kb.testMode:
|
||||
singleTimeWarnMessage("there is a possibility that the target (or WAF/IPS/IDS) is resetting 'suspicious' requests")
|
||||
singleTimeWarnMessage("there is a possibility that the target (or WAF/IPS) is resetting 'suspicious' requests")
|
||||
kb.droppingRequests = True
|
||||
warnMsg = "connection reset to the target URL"
|
||||
elif "URLError" in tbMsg or "error" in tbMsg:
|
||||
@@ -736,16 +749,16 @@ class Connect(object):
|
||||
if conn and getattr(conn, "redurl", None):
|
||||
_ = urlparse.urlsplit(conn.redurl)
|
||||
_ = ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else ""))
|
||||
requestMsg = re.sub(r"(\n[A-Z]+ ).+?( HTTP/\d)", "\g<1>%s\g<2>" % getUnicode(_).replace("\\", "\\\\"), requestMsg, 1)
|
||||
requestMsg = re.sub(r"(\n[A-Z]+ ).+?( HTTP/\d)", r"\g<1>%s\g<2>" % getUnicode(_).replace("\\", "\\\\"), requestMsg, 1)
|
||||
|
||||
if kb.resendPostOnRedirect is False:
|
||||
requestMsg = re.sub(r"(\[#\d+\]:\n)POST ", "\g<1>GET ", requestMsg)
|
||||
requestMsg = re.sub(r"(\[#\d+\]:\n)POST ", r"\g<1>GET ", requestMsg)
|
||||
requestMsg = re.sub(r"(?i)Content-length: \d+\n", "", requestMsg)
|
||||
requestMsg = re.sub(r"(?s)\n\n.+", "\n", requestMsg)
|
||||
|
||||
responseMsg += "[#%d] (%d %s):\r\n" % (threadData.lastRequestUID, conn.code, status)
|
||||
else:
|
||||
responseMsg += "[#%d] (%d %s):\r\n" % (threadData.lastRequestUID, code, status)
|
||||
responseMsg += "[#%d] (%s %s):\r\n" % (threadData.lastRequestUID, code, status)
|
||||
|
||||
if responseHeaders:
|
||||
logHeaders = "\r\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()])
|
||||
@@ -812,10 +825,14 @@ class Connect(object):
|
||||
conf.httpHeaders.append((HTTP_HEADER.CONTENT_TYPE, contentType))
|
||||
|
||||
if payload:
|
||||
delimiter = conf.paramDel or (DEFAULT_GET_POST_DELIMITER if place != PLACE.COOKIE else DEFAULT_COOKIE_DELIMITER)
|
||||
|
||||
if not disableTampering and kb.tamperFunctions:
|
||||
for function in kb.tamperFunctions:
|
||||
hints = {}
|
||||
|
||||
try:
|
||||
payload = function(payload=payload, headers=auxHeaders)
|
||||
payload = function(payload=payload, headers=auxHeaders, delimiter=delimiter, hints=hints)
|
||||
except Exception, ex:
|
||||
errMsg = "error occurred while running tamper "
|
||||
errMsg += "function '%s' ('%s')" % (function.func_name, getSafeExString(ex))
|
||||
@@ -828,6 +845,18 @@ class Connect(object):
|
||||
|
||||
value = agent.replacePayload(value, payload)
|
||||
|
||||
if hints:
|
||||
if HINT.APPEND in hints:
|
||||
value = "%s%s%s" % (value, delimiter, hints[HINT.APPEND])
|
||||
|
||||
if HINT.PREPEND in hints:
|
||||
if place == PLACE.URI:
|
||||
match = re.search(r"\w+\s*=\s*%s" % PAYLOAD_DELIMITER, value) or re.search(r"[^?%s/]=\s*%s" % (re.escape(delimiter), PAYLOAD_DELIMITER), value)
|
||||
if match:
|
||||
value = value.replace(match.group(0), "%s%s%s" % (hints[HINT.PREPEND], delimiter, match.group(0)))
|
||||
else:
|
||||
value = "%s%s%s" % (hints[HINT.PREPEND], delimiter, value)
|
||||
|
||||
logger.log(CUSTOM_LOGGING.PAYLOAD, safecharencode(payload.replace('\\', BOUNDARY_BACKSLASH_MARKER)).replace(BOUNDARY_BACKSLASH_MARKER, '\\'))
|
||||
|
||||
if place == PLACE.CUSTOM_POST and kb.postHint:
|
||||
@@ -844,10 +873,10 @@ class Connect(object):
|
||||
value = agent.replacePayload(value, payload)
|
||||
else:
|
||||
# GET, POST, URI and Cookie payload needs to be thoroughly URL encoded
|
||||
if (place in (PLACE.GET, PLACE.URI, PLACE.COOKIE) or place == PLACE.CUSTOM_HEADER and value.split(',')[0] == HTTP_HEADER.COOKIE) and not conf.skipUrlEncode or place in (PLACE.POST, PLACE.CUSTOM_POST) and postUrlEncode:
|
||||
if (place in (PLACE.GET, PLACE.URI, PLACE.COOKIE) or place == PLACE.CUSTOM_HEADER and value.split(',')[0].upper() == HTTP_HEADER.COOKIE.upper()) and not conf.skipUrlEncode or place in (PLACE.POST, PLACE.CUSTOM_POST) and postUrlEncode:
|
||||
skip = False
|
||||
|
||||
if place == PLACE.COOKIE or place == PLACE.CUSTOM_HEADER and value.split(',')[0] == HTTP_HEADER.COOKIE:
|
||||
if place == PLACE.COOKIE or place == PLACE.CUSTOM_HEADER and value.split(',')[0].upper() == HTTP_HEADER.COOKIE.upper():
|
||||
if kb.cookieEncodeChoice is None:
|
||||
msg = "do you want to URL encode cookie values (implementation specific)? %s" % ("[Y/n]" if not conf.url.endswith(".aspx") else "[y/N]") # Reference: https://support.microsoft.com/en-us/kb/313282
|
||||
kb.cookieEncodeChoice = readInput(msg, default='Y' if not conf.url.endswith(".aspx") else 'N', boolean=True)
|
||||
@@ -862,7 +891,7 @@ class Connect(object):
|
||||
postUrlEncode = False
|
||||
|
||||
if conf.hpp:
|
||||
if not any(conf.url.lower().endswith(_.lower()) for _ in (WEB_API.ASP, WEB_API.ASPX)):
|
||||
if not any(conf.url.lower().endswith(_.lower()) for _ in (WEB_PLATFORM.ASP, WEB_PLATFORM.ASPX)):
|
||||
warnMsg = "HTTP parameter pollution should work only against "
|
||||
warnMsg += "ASP(.NET) targets"
|
||||
singleTimeWarnMessage(warnMsg)
|
||||
@@ -934,63 +963,77 @@ class Connect(object):
|
||||
if conf.csrfToken:
|
||||
def _adjustParameter(paramString, parameter, newValue):
|
||||
retVal = paramString
|
||||
match = re.search(r"%s=[^&]*" % re.escape(parameter), paramString)
|
||||
match = re.search(r"%s=[^&]*" % re.escape(parameter), paramString, re.I)
|
||||
if match:
|
||||
retVal = re.sub(re.escape(match.group(0)), "%s=%s" % (parameter, newValue), paramString)
|
||||
retVal = re.sub("(?i)%s" % re.escape(match.group(0)), ("%s=%s" % (parameter, newValue)).replace('\\', r'\\'), paramString)
|
||||
else:
|
||||
match = re.search(r"(%s[\"']:[\"'])([^\"']+)" % re.escape(parameter), paramString)
|
||||
match = re.search(r"(%s[\"']:[\"'])([^\"']+)" % re.escape(parameter), paramString, re.I)
|
||||
if match:
|
||||
retVal = re.sub(re.escape(match.group(0)), "%s%s" % (match.group(1), newValue), paramString)
|
||||
retVal = re.sub("(?i)%s" % re.escape(match.group(0)), "%s%s" % (match.group(1), newValue), paramString)
|
||||
return retVal
|
||||
|
||||
token = AttribDict()
|
||||
page, headers, code = Connect.getPage(url=conf.csrfUrl or conf.url, data=conf.data if conf.csrfUrl == conf.url else None, method=conf.method if conf.csrfUrl == conf.url else None, cookie=conf.parameters.get(PLACE.COOKIE), direct=True, silent=True, ua=conf.parameters.get(PLACE.USER_AGENT), referer=conf.parameters.get(PLACE.REFERER), host=conf.parameters.get(PLACE.HOST))
|
||||
token = extractRegexResult(r"(?i)<input[^>]+\bname=[\"']?%s[\"']?[^>]*\bvalue=(?P<result>(\"([^\"]+)|'([^']+)|([^ >]+)))" % re.escape(conf.csrfToken), page or "")
|
||||
match = re.search(r"(?i)<input[^>]+\bname=[\"']?(?P<name>%s)\b[^>]*\bvalue=[\"']?(?P<value>[^>'\"]*)" % conf.csrfToken, page or "", re.I)
|
||||
|
||||
if not match:
|
||||
match = re.search(r"(?i)<input[^>]+\bvalue=[\"']?(?P<value>[^>'\"]*)[\"']?[^>]*\bname=[\"']?(?P<name>%s)\b" % conf.csrfToken, page or "", re.I)
|
||||
|
||||
if not match:
|
||||
match = re.search(r"(?P<name>%s)[\"']:[\"'](?P<value>[^\"']+)" % conf.csrfToken, page or "", re.I)
|
||||
|
||||
if not match:
|
||||
match = re.search(r"\b(?P<name>%s)\s*[:=]\s*(?P<value>\w+)" % conf.csrfToken, str(headers), re.I)
|
||||
|
||||
if not match:
|
||||
match = re.search(r"\b(?P<name>%s)\s*=\s*['\"]?(?P<value>[^;'\"]+)" % conf.csrfToken, page or "", re.I)
|
||||
|
||||
if match:
|
||||
token.name, token.value = match.group("name"), match.group("value")
|
||||
|
||||
match = re.search(r"String\.fromCharCode\(([\d+, ]+)\)", token.value)
|
||||
if match:
|
||||
token.value = "".join(chr(int(_)) for _ in match.group(1).replace(' ', "").split(','))
|
||||
|
||||
if not token:
|
||||
token = extractRegexResult(r"(?i)<input[^>]+\bvalue=(?P<result>(\"([^\"]+)|'([^']+)|([^ >]+)))[^>]+\bname=[\"']?%s[\"']?" % re.escape(conf.csrfToken), page or "")
|
||||
|
||||
if not token:
|
||||
match = re.search(r"%s[\"']:[\"']([^\"']+)" % re.escape(conf.csrfToken), page or "")
|
||||
token = match.group(1) if match else None
|
||||
|
||||
if not token:
|
||||
if conf.csrfUrl != conf.url and code == httplib.OK:
|
||||
if conf.csrfUrl and conf.csrfToken and conf.csrfUrl != conf.url and code == httplib.OK:
|
||||
if headers and "text/plain" in headers.get(HTTP_HEADER.CONTENT_TYPE, ""):
|
||||
token = page
|
||||
token.name = conf.csrfToken
|
||||
token.value = page
|
||||
|
||||
if not token and conf.cj and any(_.name == conf.csrfToken for _ in conf.cj):
|
||||
if not token and conf.cj and any(re.search(conf.csrfToken, _.name, re.I) for _ in conf.cj):
|
||||
for _ in conf.cj:
|
||||
if _.name == conf.csrfToken:
|
||||
token = _.value
|
||||
if not any(conf.csrfToken in _ for _ in (conf.paramDict.get(PLACE.GET, {}), conf.paramDict.get(PLACE.POST, {}))):
|
||||
if re.search(conf.csrfToken, _.name, re.I):
|
||||
token.name, token.value = _.name, _.value
|
||||
if not any(re.search(conf.csrfToken, ' '.join(_), re.I) for _ in (conf.paramDict.get(PLACE.GET, {}), conf.paramDict.get(PLACE.POST, {}))):
|
||||
if post:
|
||||
post = "%s%s%s=%s" % (post, conf.paramDel or DEFAULT_GET_POST_DELIMITER, conf.csrfToken, token)
|
||||
post = "%s%s%s=%s" % (post, conf.paramDel or DEFAULT_GET_POST_DELIMITER, token.name, token.value)
|
||||
elif get:
|
||||
get = "%s%s%s=%s" % (get, conf.paramDel or DEFAULT_GET_POST_DELIMITER, conf.csrfToken, token)
|
||||
get = "%s%s%s=%s" % (get, conf.paramDel or DEFAULT_GET_POST_DELIMITER, token.name, token.value)
|
||||
else:
|
||||
get = "%s=%s" % (conf.csrfToken, token)
|
||||
get = "%s=%s" % (token.name, token.value)
|
||||
break
|
||||
|
||||
if not token:
|
||||
errMsg = "anti-CSRF token '%s' can't be found at '%s'" % (conf.csrfToken, conf.csrfUrl or conf.url)
|
||||
errMsg = "anti-CSRF token '%s' can't be found at '%s'" % (conf.csrfToken._original, conf.csrfUrl or conf.url)
|
||||
if not conf.csrfUrl:
|
||||
errMsg += ". You can try to rerun by providing "
|
||||
errMsg += "a valid value for option '--csrf-url'"
|
||||
raise SqlmapTokenException(errMsg)
|
||||
|
||||
if token:
|
||||
token = token.strip("'\"")
|
||||
token.value = token.value.strip("'\"")
|
||||
|
||||
for place in (PLACE.GET, PLACE.POST):
|
||||
if place in conf.parameters:
|
||||
if place == PLACE.GET and get:
|
||||
get = _adjustParameter(get, conf.csrfToken, token)
|
||||
get = _adjustParameter(get, token.name, token.value)
|
||||
elif place == PLACE.POST and post:
|
||||
post = _adjustParameter(post, conf.csrfToken, token)
|
||||
post = _adjustParameter(post, token.name, token.value)
|
||||
|
||||
for i in xrange(len(conf.httpHeaders)):
|
||||
if conf.httpHeaders[i][0].lower() == conf.csrfToken.lower():
|
||||
conf.httpHeaders[i] = (conf.httpHeaders[i][0], token)
|
||||
if conf.httpHeaders[i][0].lower() == token.name.lower():
|
||||
conf.httpHeaders[i] = (conf.httpHeaders[i][0], token.value)
|
||||
|
||||
if conf.rParam:
|
||||
def _randomizeParameter(paramString, randomParameter):
|
||||
@@ -1104,33 +1147,33 @@ class Connect(object):
|
||||
if kb.postHint in (POST_HINT.XML, POST_HINT.SOAP):
|
||||
if re.search(r"<%s\b" % re.escape(name), post):
|
||||
found = True
|
||||
post = re.sub(r"(?s)(<%s\b[^>]*>)(.*?)(</%s)" % (re.escape(name), re.escape(name)), "\g<1>%s\g<3>" % value.replace('\\', r'\\'), post)
|
||||
post = re.sub(r"(?s)(<%s\b[^>]*>)(.*?)(</%s)" % (re.escape(name), re.escape(name)), r"\g<1>%s\g<3>" % value.replace('\\', r'\\'), post)
|
||||
elif re.search(r"\b%s>" % re.escape(name), post):
|
||||
found = True
|
||||
post = re.sub(r"(?s)(\b%s>)(.*?)(</[^<]*\b%s>)" % (re.escape(name), re.escape(name)), "\g<1>%s\g<3>" % value.replace('\\', r'\\'), post)
|
||||
post = re.sub(r"(?s)(\b%s>)(.*?)(</[^<]*\b%s>)" % (re.escape(name), re.escape(name)), r"\g<1>%s\g<3>" % value.replace('\\', r'\\'), post)
|
||||
|
||||
regex = r"\b(%s)\b([^\w]+)(\w+)" % re.escape(name)
|
||||
if not found and re.search(regex, (post or "")):
|
||||
found = True
|
||||
post = re.sub(regex, "\g<1>\g<2>%s" % value.replace('\\', r'\\'), post)
|
||||
post = re.sub(regex, r"\g<1>\g<2>%s" % value.replace('\\', r'\\'), post)
|
||||
|
||||
regex = r"((\A|%s)%s=).+?(%s|\Z)" % (re.escape(delimiter), re.escape(name), re.escape(delimiter))
|
||||
if not found and re.search(regex, (post or "")):
|
||||
found = True
|
||||
post = re.sub(regex, "\g<1>%s\g<3>" % value.replace('\\', r'\\'), post)
|
||||
post = re.sub(regex, r"\g<1>%s\g<3>" % value.replace('\\', r'\\'), post)
|
||||
|
||||
if re.search(regex, (get or "")):
|
||||
found = True
|
||||
get = re.sub(regex, "\g<1>%s\g<3>" % value.replace('\\', r'\\'), get)
|
||||
get = re.sub(regex, r"\g<1>%s\g<3>" % value.replace('\\', r'\\'), get)
|
||||
|
||||
if re.search(regex, (query or "")):
|
||||
found = True
|
||||
uri = re.sub(regex.replace(r"\A", r"\?"), "\g<1>%s\g<3>" % value.replace('\\', r'\\'), uri)
|
||||
uri = re.sub(regex.replace(r"\A", r"\?"), r"\g<1>%s\g<3>" % value.replace('\\', r'\\'), uri)
|
||||
|
||||
regex = r"((\A|%s)%s=).+?(%s|\Z)" % (re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER), re.escape(name), re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER))
|
||||
if re.search(regex, (cookie or "")):
|
||||
found = True
|
||||
cookie = re.sub(regex, "\g<1>%s\g<3>" % value.replace('\\', r'\\'), cookie)
|
||||
cookie = re.sub(regex, r"\g<1>%s\g<3>" % value.replace('\\', r'\\'), cookie)
|
||||
|
||||
if not found:
|
||||
if post is not None:
|
||||
@@ -1161,7 +1204,7 @@ class Connect(object):
|
||||
singleTimeWarnMessage(warnMsg)
|
||||
|
||||
warnMsg = "[%s] [WARNING] %stime-based comparison requires " % (time.strftime("%X"), "(case) " if kb.responseTimeMode else "")
|
||||
warnMsg += "larger statistical model, please wait"
|
||||
warnMsg += "%s statistical model, please wait" % ("larger" if len(kb.responseTimes) == 1 else "reset of")
|
||||
dataToStdout(warnMsg)
|
||||
|
||||
while len(kb.responseTimes[kb.responseTimeMode]) < MIN_TIME_RESPONSES:
|
||||
@@ -1216,14 +1259,17 @@ class Connect(object):
|
||||
_, headers, code = Connect.getPage(url=uri, get=get, post=post, method=method, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, auxHeaders=auxHeaders, raise404=raise404, skipRead=(kb.nullConnection == NULLCONNECTION.SKIP_READ))
|
||||
|
||||
if headers:
|
||||
if kb.nullConnection in (NULLCONNECTION.HEAD, NULLCONNECTION.SKIP_READ) and headers.get(HTTP_HEADER.CONTENT_LENGTH):
|
||||
pageLength = int(headers[HTTP_HEADER.CONTENT_LENGTH])
|
||||
elif kb.nullConnection == NULLCONNECTION.RANGE and headers.get(HTTP_HEADER.CONTENT_RANGE):
|
||||
pageLength = int(headers[HTTP_HEADER.CONTENT_RANGE][headers[HTTP_HEADER.CONTENT_RANGE].find('/') + 1:])
|
||||
try:
|
||||
if kb.nullConnection in (NULLCONNECTION.HEAD, NULLCONNECTION.SKIP_READ) and headers.get(HTTP_HEADER.CONTENT_LENGTH):
|
||||
pageLength = int(headers[HTTP_HEADER.CONTENT_LENGTH].split(',')[0])
|
||||
elif kb.nullConnection == NULLCONNECTION.RANGE and headers.get(HTTP_HEADER.CONTENT_RANGE):
|
||||
pageLength = int(headers[HTTP_HEADER.CONTENT_RANGE][headers[HTTP_HEADER.CONTENT_RANGE].find('/') + 1:])
|
||||
except ValueError:
|
||||
pass
|
||||
finally:
|
||||
kb.pageCompress = popValue()
|
||||
|
||||
if not pageLength:
|
||||
if pageLength is None:
|
||||
try:
|
||||
page, headers, code = Connect.getPage(url=uri, get=get, post=post, method=method, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare)
|
||||
except MemoryError:
|
||||
@@ -1231,11 +1277,20 @@ class Connect(object):
|
||||
warnMsg = "site returned insanely large response"
|
||||
if kb.testMode:
|
||||
warnMsg += " in testing phase. This is a common "
|
||||
warnMsg += "behavior in custom WAF/IPS/IDS solutions"
|
||||
warnMsg += "behavior in custom WAF/IPS solutions"
|
||||
singleTimeWarnMessage(warnMsg)
|
||||
|
||||
if conf.secondOrder:
|
||||
page, headers, code = Connect.getPage(url=conf.secondOrder, cookie=cookie, ua=ua, silent=silent, auxHeaders=auxHeaders, response=response, raise404=False, ignoreTimeout=timeBasedCompare, refreshing=True)
|
||||
if conf.secondUrl:
|
||||
page, headers, code = Connect.getPage(url=conf.secondUrl, cookie=cookie, ua=ua, silent=silent, auxHeaders=auxHeaders, response=response, raise404=False, ignoreTimeout=timeBasedCompare, refreshing=True)
|
||||
elif kb.secondReq and IDS_WAF_CHECK_PAYLOAD not in urllib.unquote(value or ""):
|
||||
def _(value):
|
||||
if kb.customInjectionMark in (value or ""):
|
||||
if payload is None:
|
||||
value = value.replace(kb.customInjectionMark, "")
|
||||
else:
|
||||
value = re.sub(r"\w*%s" % re.escape(kb.customInjectionMark), payload, value)
|
||||
return value
|
||||
page, headers, code = Connect.getPage(url=_(kb.secondReq[0]), post=_(kb.secondReq[2]), method=kb.secondReq[1], cookie=kb.secondReq[3], silent=silent, auxHeaders=dict(auxHeaders, **dict(kb.secondReq[4])), response=response, raise404=False, ignoreTimeout=timeBasedCompare, refreshing=True)
|
||||
|
||||
threadData.lastQueryDuration = calculateDeltaSeconds(start)
|
||||
threadData.lastPage = page
|
||||
@@ -1251,6 +1306,8 @@ class Connect(object):
|
||||
elif noteResponseTime:
|
||||
kb.responseTimes.setdefault(kb.responseTimeMode, [])
|
||||
kb.responseTimes[kb.responseTimeMode].append(threadData.lastQueryDuration)
|
||||
if len(kb.responseTimes) > MAX_TIME_RESPONSES:
|
||||
kb.responseTimes = kb.responseTimes[-MAX_TIME_RESPONSES:]
|
||||
|
||||
if not response and removeReflection:
|
||||
page = removeReflectiveValues(page, payload)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
@@ -12,6 +12,7 @@ import socket
|
||||
import urllib2
|
||||
|
||||
from lib.core.common import getSafeExString
|
||||
from lib.core.data import conf
|
||||
from lib.core.data import kb
|
||||
from lib.core.data import logger
|
||||
from lib.core.exception import SqlmapConnectionException
|
||||
@@ -48,7 +49,7 @@ class HTTPSConnection(httplib.HTTPSConnection):
|
||||
|
||||
# Reference(s): https://docs.python.org/2/library/ssl.html#ssl.SSLContext
|
||||
# https://www.mnot.net/blog/2014/12/27/python_2_and_tls_sni
|
||||
if re.search(r"\A[\d.]+\Z", self.host) is None and kb.tlsSNI.get(self.host) is not False and hasattr(ssl, "SSLContext"):
|
||||
if re.search(r"\A[\d.]+\Z", self.host) is None and kb.tlsSNI.get(self.host) is not False and not any((conf.proxy, conf.tor)) and hasattr(ssl, "SSLContext"):
|
||||
for protocol in filter(lambda _: _ >= ssl.PROTOCOL_TLSv1, _protocols):
|
||||
try:
|
||||
sock = create_sock()
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
@@ -440,7 +440,8 @@ def getValue(expression, blind=True, union=True, error=True, time=True, fromUser
|
||||
found = (value is not None) or (value is None and expectingNone) or count >= MAX_TECHNIQUES_PER_VALUE
|
||||
|
||||
if time and (isTechniqueAvailable(PAYLOAD.TECHNIQUE.TIME) or isTechniqueAvailable(PAYLOAD.TECHNIQUE.STACKED)) and not found:
|
||||
kb.responseTimeMode = re.sub(r"(?i)[^a-z]", "", re.sub(r"'[^']+'", "", re.sub(r"(?i)(\w+)\(.+\)", r"\g<1>", expression))) if re.search(r"(?i)SELECT.+FROM", expression) else None
|
||||
match = re.search(r"\bFROM\b ([^ ]+).+ORDER BY ([^ ]+)", expression)
|
||||
kb.responseTimeMode = "%s|%s" % (match.group(1), match.group(2)) if match else None
|
||||
|
||||
if isTechniqueAvailable(PAYLOAD.TECHNIQUE.TIME):
|
||||
kb.technique = PAYLOAD.TECHNIQUE.TIME
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
@@ -9,6 +9,8 @@ import httplib
|
||||
import urllib2
|
||||
|
||||
from lib.core.data import conf
|
||||
from lib.core.common import getSafeExString
|
||||
from lib.core.exception import SqlmapConnectionException
|
||||
|
||||
class HTTPSPKIAuthHandler(urllib2.HTTPSHandler):
|
||||
def __init__(self, auth_file):
|
||||
@@ -19,5 +21,10 @@ class HTTPSPKIAuthHandler(urllib2.HTTPSHandler):
|
||||
return self.do_open(self.getConnection, req)
|
||||
|
||||
def getConnection(self, host, timeout=None):
|
||||
# Reference: https://docs.python.org/2/library/ssl.html#ssl.SSLContext.load_cert_chain
|
||||
return httplib.HTTPSConnection(host, cert_file=self.auth_file, key_file=self.auth_file, timeout=conf.timeout)
|
||||
try:
|
||||
# Reference: https://docs.python.org/2/library/ssl.html#ssl.SSLContext.load_cert_chain
|
||||
return httplib.HTTPSConnection(host, cert_file=self.auth_file, key_file=self.auth_file, timeout=conf.timeout)
|
||||
except IOError, ex:
|
||||
errMsg = "error occurred while using key "
|
||||
errMsg += "file '%s' ('%s')" % (self.auth_file, getSafeExString(ex))
|
||||
raise SqlmapConnectionException(errMsg)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
import re
|
||||
import time
|
||||
import types
|
||||
import urllib2
|
||||
@@ -124,12 +123,21 @@ class SmartRedirectHandler(urllib2.HTTPRedirectHandler):
|
||||
|
||||
req.headers[HTTP_HEADER.HOST] = getHostHeader(redurl)
|
||||
if headers and HTTP_HEADER.SET_COOKIE in headers:
|
||||
cookies = dict()
|
||||
delimiter = conf.cookieDel or DEFAULT_COOKIE_DELIMITER
|
||||
_ = headers[HTTP_HEADER.SET_COOKIE].split(delimiter)[0]
|
||||
if HTTP_HEADER.COOKIE not in req.headers:
|
||||
req.headers[HTTP_HEADER.COOKIE] = _
|
||||
else:
|
||||
req.headers[HTTP_HEADER.COOKIE] = re.sub(r"%s{2,}" % delimiter, delimiter, ("%s%s%s" % (re.sub(r"\b%s=[^%s]*%s?" % (re.escape(_.split('=')[0]), delimiter, delimiter), "", req.headers[HTTP_HEADER.COOKIE]), delimiter, _)).strip(delimiter))
|
||||
last = None
|
||||
|
||||
for part in req.headers.get(HTTP_HEADER.COOKIE, "").split(delimiter) + headers.getheaders(HTTP_HEADER.SET_COOKIE):
|
||||
if '=' in part:
|
||||
part = part.strip()
|
||||
key, value = part.split('=', 1)
|
||||
cookies[key] = value
|
||||
last = key
|
||||
elif last:
|
||||
cookies[last] += "%s%s" % (delimiter, part)
|
||||
|
||||
req.headers[HTTP_HEADER.COOKIE] = delimiter.join("%s=%s" % (key, cookies[key]) for key in cookies)
|
||||
|
||||
try:
|
||||
result = urllib2.HTTPRedirectHandler.http_error_302(self, req, fp, code, msg, headers)
|
||||
except urllib2.HTTPError, e:
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
@@ -20,7 +20,6 @@ from lib.core.common import dataToStdout
|
||||
from lib.core.common import Backend
|
||||
from lib.core.common import getLocalIP
|
||||
from lib.core.common import getRemoteIP
|
||||
from lib.core.common import getUnicode
|
||||
from lib.core.common import normalizePath
|
||||
from lib.core.common import ntToPosixSlashes
|
||||
from lib.core.common import pollProcess
|
||||
@@ -39,7 +38,6 @@ from lib.core.exception import SqlmapGenericException
|
||||
from lib.core.settings import IS_WIN
|
||||
from lib.core.settings import METASPLOIT_SESSION_TIMEOUT
|
||||
from lib.core.settings import SHELLCODEEXEC_RANDOM_STRING_MARKER
|
||||
from lib.core.settings import UNICODE_ENCODING
|
||||
from lib.core.subprocessng import blockingReadFromFD
|
||||
from lib.core.subprocessng import blockingWriteToFD
|
||||
from lib.core.subprocessng import Popen as execute
|
||||
@@ -168,19 +166,8 @@ class Metasploit:
|
||||
|
||||
choice = readInput(message, default="%d" % default)
|
||||
|
||||
if not choice:
|
||||
if lst:
|
||||
choice = getUnicode(default, UNICODE_ENCODING)
|
||||
else:
|
||||
return default
|
||||
|
||||
elif not choice.isdigit():
|
||||
logger.warn("invalid value, only digits are allowed")
|
||||
return self._skeletonSelection(msg, lst, maxValue, default)
|
||||
|
||||
elif int(choice) > maxValue or int(choice) < 1:
|
||||
logger.warn("invalid value, it must be a digit between 1 and %d" % maxValue)
|
||||
return self._skeletonSelection(msg, lst, maxValue, default)
|
||||
if not choice or not choice.isdigit() or int(choice) > maxValue or int(choice) < 1:
|
||||
choice = default
|
||||
|
||||
choice = int(choice)
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
@@ -108,7 +108,7 @@ class UDF:
|
||||
return output
|
||||
|
||||
def udfCheckNeeded(self):
|
||||
if (not conf.rFile or (conf.rFile and not Backend.isDbms(DBMS.PGSQL))) and "sys_fileread" in self.sysUdfs:
|
||||
if (not conf.fileRead or (conf.fileRead and not Backend.isDbms(DBMS.PGSQL))) and "sys_fileread" in self.sysUdfs:
|
||||
self.sysUdfs.pop("sys_fileread")
|
||||
|
||||
if not conf.osPwn:
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
@@ -43,7 +43,7 @@ from lib.core.enums import HTTP_HEADER
|
||||
from lib.core.enums import OS
|
||||
from lib.core.enums import PAYLOAD
|
||||
from lib.core.enums import PLACE
|
||||
from lib.core.enums import WEB_API
|
||||
from lib.core.enums import WEB_PLATFORM
|
||||
from lib.core.exception import SqlmapNoneDataException
|
||||
from lib.core.settings import BACKDOOR_RUN_CMD_TIMEOUT
|
||||
from lib.core.settings import EVENTVALIDATION_REGEX
|
||||
@@ -60,7 +60,7 @@ class Web:
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.webApi = None
|
||||
self.webPlatform = None
|
||||
self.webBaseUrl = None
|
||||
self.webBackdoorUrl = None
|
||||
self.webBackdoorFilePath = None
|
||||
@@ -109,14 +109,14 @@ class Web:
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
if self.webApi in getPublicTypeMembers(WEB_API, True):
|
||||
if self.webPlatform in getPublicTypeMembers(WEB_PLATFORM, True):
|
||||
multipartParams = {
|
||||
"upload": "1",
|
||||
"file": stream,
|
||||
"uploadDir": directory,
|
||||
}
|
||||
|
||||
if self.webApi == WEB_API.ASPX:
|
||||
if self.webPlatform == WEB_PLATFORM.ASPX:
|
||||
multipartParams['__EVENTVALIDATION'] = kb.data.__EVENTVALIDATION
|
||||
multipartParams['__VIEWSTATE'] = kb.data.__VIEWSTATE
|
||||
|
||||
@@ -130,7 +130,7 @@ class Web:
|
||||
else:
|
||||
return True
|
||||
else:
|
||||
logger.error("sqlmap hasn't got a web backdoor nor a web file stager for %s" % self.webApi)
|
||||
logger.error("sqlmap hasn't got a web backdoor nor a web file stager for %s" % self.webPlatform)
|
||||
return False
|
||||
|
||||
def _webFileInject(self, fileContent, fileName, directory):
|
||||
@@ -146,8 +146,7 @@ class Web:
|
||||
query += "OR %d=%d " % (randInt, randInt)
|
||||
|
||||
query += getSQLSnippet(DBMS.MYSQL, "write_file_limit", OUTFILE=outFile, HEXSTRING=hexencode(uplQuery, conf.encoding))
|
||||
query = agent.prefixQuery(query)
|
||||
query = agent.suffixQuery(query)
|
||||
query = agent.prefixQuery(query) # Note: No need for suffix as 'write_file_limit' already ends with comment (required)
|
||||
payload = agent.payload(newValue=query)
|
||||
page = Request.queryPage(payload)
|
||||
|
||||
@@ -159,13 +158,13 @@ class Web:
|
||||
remote directory within the web server document root.
|
||||
"""
|
||||
|
||||
if self.webBackdoorUrl is not None and self.webStagerUrl is not None and self.webApi is not None:
|
||||
if self.webBackdoorUrl is not None and self.webStagerUrl is not None and self.webPlatform is not None:
|
||||
return
|
||||
|
||||
self.checkDbmsOs()
|
||||
|
||||
default = None
|
||||
choices = list(getPublicTypeMembers(WEB_API, True))
|
||||
choices = list(getPublicTypeMembers(WEB_PLATFORM, True))
|
||||
|
||||
for ext in choices:
|
||||
if conf.url.endswith(ext):
|
||||
@@ -173,7 +172,7 @@ class Web:
|
||||
break
|
||||
|
||||
if not default:
|
||||
default = WEB_API.ASP if Backend.isOs(OS.WINDOWS) else WEB_API.PHP
|
||||
default = WEB_PLATFORM.ASP if Backend.isOs(OS.WINDOWS) else WEB_PLATFORM.PHP
|
||||
|
||||
message = "which web application language does the web server "
|
||||
message += "support?\n"
|
||||
@@ -197,7 +196,7 @@ class Web:
|
||||
logger.warn("invalid value, it must be between 1 and %d" % len(choices))
|
||||
|
||||
else:
|
||||
self.webApi = choices[int(choice) - 1]
|
||||
self.webPlatform = choices[int(choice) - 1]
|
||||
break
|
||||
|
||||
if not kb.absFilePaths:
|
||||
@@ -219,7 +218,7 @@ class Web:
|
||||
finally:
|
||||
been.add(url)
|
||||
|
||||
url = re.sub(r"(\.\w+)\Z", "~\g<1>", conf.url)
|
||||
url = re.sub(r"(\.\w+)\Z", r"~\g<1>", conf.url)
|
||||
if url not in been:
|
||||
try:
|
||||
page, _, _ = Request.getPage(url=url, raise404=False, silent=True)
|
||||
@@ -231,7 +230,7 @@ class Web:
|
||||
|
||||
for place in (PLACE.GET, PLACE.POST):
|
||||
if place in conf.parameters:
|
||||
value = re.sub(r"(\A|&)(\w+)=", "\g<2>[]=", conf.parameters[place])
|
||||
value = re.sub(r"(\A|&)(\w+)=", r"\g<2>[]=", conf.parameters[place])
|
||||
if "[]" in value:
|
||||
page, headers, _ = Request.queryPage(value=value, place=place, content=True, raise404=False, silent=True, noteResponseTime=False)
|
||||
parseFilePaths(page)
|
||||
@@ -243,12 +242,12 @@ class Web:
|
||||
cookie = headers[HTTP_HEADER.SET_COOKIE]
|
||||
|
||||
if cookie:
|
||||
value = re.sub(r"(\A|;)(\w+)=[^;]*", "\g<2>=AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", cookie)
|
||||
value = re.sub(r"(\A|;)(\w+)=[^;]*", r"\g<2>=AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", cookie)
|
||||
if value != cookie:
|
||||
page, _, _ = Request.queryPage(value=value, place=PLACE.COOKIE, content=True, raise404=False, silent=True, noteResponseTime=False)
|
||||
parseFilePaths(page)
|
||||
|
||||
value = re.sub(r"(\A|;)(\w+)=[^;]*", "\g<2>=", cookie)
|
||||
value = re.sub(r"(\A|;)(\w+)=[^;]*", r"\g<2>=", cookie)
|
||||
if value != cookie:
|
||||
page, _, _ = Request.queryPage(value=value, place=PLACE.COOKIE, content=True, raise404=False, silent=True, noteResponseTime=False)
|
||||
parseFilePaths(page)
|
||||
@@ -267,16 +266,16 @@ class Web:
|
||||
_.append("%s/%s" % (directory.rstrip('/'), path.strip('/')))
|
||||
directories = _
|
||||
|
||||
backdoorName = "tmpb%s.%s" % (randomStr(lowercase=True), self.webApi)
|
||||
backdoorContent = decloak(os.path.join(paths.SQLMAP_SHELL_PATH, "backdoors", "backdoor.%s_" % self.webApi))
|
||||
backdoorName = "tmpb%s.%s" % (randomStr(lowercase=True), self.webPlatform)
|
||||
backdoorContent = decloak(os.path.join(paths.SQLMAP_SHELL_PATH, "backdoors", "backdoor.%s_" % self.webPlatform))
|
||||
|
||||
stagerContent = decloak(os.path.join(paths.SQLMAP_SHELL_PATH, "stagers", "stager.%s_" % self.webApi))
|
||||
stagerContent = decloak(os.path.join(paths.SQLMAP_SHELL_PATH, "stagers", "stager.%s_" % self.webPlatform))
|
||||
|
||||
for directory in directories:
|
||||
if not directory:
|
||||
continue
|
||||
|
||||
stagerName = "tmpu%s.%s" % (randomStr(lowercase=True), self.webApi)
|
||||
stagerName = "tmpu%s.%s" % (randomStr(lowercase=True), self.webPlatform)
|
||||
self.webStagerFilePath = posixpath.join(ntToPosixSlashes(directory), stagerName)
|
||||
|
||||
uploaded = False
|
||||
@@ -318,14 +317,14 @@ class Web:
|
||||
infoMsg += "via UNION method"
|
||||
logger.info(infoMsg)
|
||||
|
||||
stagerName = "tmpu%s.%s" % (randomStr(lowercase=True), self.webApi)
|
||||
stagerName = "tmpu%s.%s" % (randomStr(lowercase=True), self.webPlatform)
|
||||
self.webStagerFilePath = posixpath.join(ntToPosixSlashes(directory), stagerName)
|
||||
|
||||
handle, filename = tempfile.mkstemp()
|
||||
os.close(handle)
|
||||
|
||||
with open(filename, "w+b") as f:
|
||||
_ = decloak(os.path.join(paths.SQLMAP_SHELL_PATH, "stagers", "stager.%s_" % self.webApi))
|
||||
_ = decloak(os.path.join(paths.SQLMAP_SHELL_PATH, "stagers", "stager.%s_" % self.webPlatform))
|
||||
_ = _.replace(SHELL_WRITABLE_DIR_TAG, utf8encode(directory.replace('/', '\\\\') if Backend.isOs(OS.WINDOWS) else directory))
|
||||
f.write(_)
|
||||
|
||||
@@ -354,7 +353,7 @@ class Web:
|
||||
logger.warn(warnMsg)
|
||||
continue
|
||||
|
||||
elif self.webApi == WEB_API.ASPX:
|
||||
elif self.webPlatform == WEB_PLATFORM.ASPX:
|
||||
kb.data.__EVENTVALIDATION = extractRegexResult(EVENTVALIDATION_REGEX, uplPage)
|
||||
kb.data.__VIEWSTATE = extractRegexResult(VIEWSTATE_REGEX, uplPage)
|
||||
|
||||
@@ -362,7 +361,7 @@ class Web:
|
||||
infoMsg += "on '%s' - %s" % (directory, self.webStagerUrl)
|
||||
logger.info(infoMsg)
|
||||
|
||||
if self.webApi == WEB_API.ASP:
|
||||
if self.webPlatform == WEB_PLATFORM.ASP:
|
||||
match = re.search(r'input type=hidden name=scriptsdir value="([^"]+)"', uplPage)
|
||||
|
||||
if match:
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
@@ -136,7 +136,7 @@ class XP_cmdshell:
|
||||
|
||||
for line in lines:
|
||||
echoedLine = "echo %s " % line
|
||||
echoedLine += ">> \"%s\%s\"" % (tmpPath, randDestFile)
|
||||
echoedLine += ">> \"%s\\%s\"" % (tmpPath, randDestFile)
|
||||
echoedLines.append(echoedLine)
|
||||
|
||||
for echoedLine in echoedLines:
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
@@ -472,7 +472,6 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
|
||||
currentCharIndex = threadData.shared.index[0]
|
||||
|
||||
if kb.threadContinue:
|
||||
start = time.time()
|
||||
val = getChar(currentCharIndex, asciiTbl, not(charsetType is None and conf.charset))
|
||||
if val is None:
|
||||
val = INFERENCE_UNKNOWN_CHAR
|
||||
@@ -485,7 +484,7 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
|
||||
|
||||
if kb.threadContinue:
|
||||
if showEta:
|
||||
progress.progress(calculateDeltaSeconds(start), threadData.shared.index[0])
|
||||
progress.progress(threadData.shared.index[0])
|
||||
elif conf.verbose >= 1:
|
||||
startCharIndex = 0
|
||||
endCharIndex = 0
|
||||
@@ -502,7 +501,7 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
|
||||
count = threadData.shared.start
|
||||
|
||||
for i in xrange(startCharIndex, endCharIndex + 1):
|
||||
output += '_' if currentValue[i] is None else currentValue[i]
|
||||
output += '_' if currentValue[i] is None else filterControlChars(currentValue[i] if len(currentValue[i]) == 1 else ' ', replacement=' ')
|
||||
|
||||
for i in xrange(length):
|
||||
count += 1 if currentValue[i] is not None else 0
|
||||
@@ -519,7 +518,7 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
|
||||
status = ' %d/%d (%d%%)' % (_, length, int(100.0 * _ / length))
|
||||
output += status if _ != length else " " * len(status)
|
||||
|
||||
dataToStdout("\r[%s] [INFO] retrieved: %s" % (time.strftime("%X"), filterControlChars(output)))
|
||||
dataToStdout("\r[%s] [INFO] retrieved: %s" % (time.strftime("%X"), output))
|
||||
|
||||
runThreads(numThreads, blindThread, startThreadMsg=False)
|
||||
|
||||
@@ -553,7 +552,6 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
|
||||
|
||||
while True:
|
||||
index += 1
|
||||
start = time.time()
|
||||
|
||||
# Common prediction feature (a.k.a. "good samaritan")
|
||||
# NOTE: to be used only when multi-threading is not set for
|
||||
@@ -578,7 +576,7 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
|
||||
# Did we have luck?
|
||||
if result:
|
||||
if showEta:
|
||||
progress.progress(calculateDeltaSeconds(start), len(commonValue))
|
||||
progress.progress(len(commonValue))
|
||||
elif conf.verbose in (1, 2) or conf.api:
|
||||
dataToStdout(filterControlChars(commonValue[index - 1:]))
|
||||
|
||||
@@ -628,7 +626,7 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
|
||||
threadData.shared.value = partialValue = partialValue + val
|
||||
|
||||
if showEta:
|
||||
progress.progress(calculateDeltaSeconds(start), index)
|
||||
progress.progress(index)
|
||||
elif conf.verbose in (1, 2) or conf.api:
|
||||
dataToStdout(filterControlChars(val))
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user