1
0
mirror of https://github.com/nmap/nmap.git synced 2025-12-07 21:21:31 +00:00

sql-injection with pipeline support and with two new arguments:

sql-injection.start (to define start crawling url)
sql-injection.maxdepth (to set maximum depth to crawling)
This commit is contained in:
joao
2009-08-12 01:46:46 +00:00
parent 0a3a1b091c
commit ecaf3e90a9

View File

@@ -19,6 +19,8 @@ require('stdnse')
require('strbuf') require('strbuf')
require('listop') require('listop')
require('comm') require('comm')
require('http')
require('nsedebug')
author = "Eddie Bell <ejlbell@gmail.com>" author = "Eddie Bell <ejlbell@gmail.com>"
license = "Same as Nmap--See http://nmap.org/book/man-legal.html" license = "Same as Nmap--See http://nmap.org/book/man-legal.html"
@@ -35,52 +37,6 @@ local try = nmap.new_try(catch)
portrule = shortport.port_or_service({80, 443}, {"http","https"}) portrule = shortport.port_or_service({80, 443}, {"http","https"})
--[[
Download a page from host:port http server. The url is passed
straight to the get request, so shouldn't include the domain name
--]]
local function get_page(host, port, httpurl)
local lines = ""
local status = true
local response = ""
local opts = {timeout=10000, recv_before=false}
-- connect to webserver
--soc = nmap.new_socket()
--soc:set_timeout(4000)
--try(soc:connect(host.ip, port.number))
httpurl = string.gsub(httpurl, "&amp;", "&")
--print(filename .. ": " .. httpurl)
-- request page
local query = strbuf.new()
query = query .. "GET " .. httpurl .. " HTTP/1.1"
query = query .. "Accept: */*"
query = query .. "Accept-Language: en"
query = query .. "User-Agent: Mozilla/5.0 (compatible; Nmap Scripting Engine; http://nmap.org/book/nse.html)"
query = query .. "Host: " .. host.ip .. ":" .. port.number
--try(soc:send(strbuf.dump(query, '\r\n') .. '\r\n\r\n'))
soc, response, bopt = comm.tryssl(host, port, strbuf.dump(query, '\r\n') .. '\r\n\r\n' , opts)
while true do
status, lines = soc:receive_lines(1)
if not status then break end
response = response .. lines
end
soc:close()
return response
end
-- Curried function: so we don't have to pass port and host around
local function get_page_curried(host, port)
return function(url)
return get_page(host, port, url)
end
end
--[[ --[[
Pattern match response from a submitted injection query to see Pattern match response from a submitted injection query to see
if it is vulnerable if it is vulnerable
@@ -88,46 +44,72 @@ if it is vulnerable
local function check_injection_response(response) local function check_injection_response(response)
if not (string.find(response, 'HTTP/1.1 200 OK')) then response = string.lower(response)
return false
end
response = string.lower(response) if not (string.find(response, 'http/%d\.%d%s*[25]00')) then
return false
end
return (string.find(response, "invalid query") or return (string.find(response, "invalid query") or
string.find(response, "sql syntax") or string.find(response, "sql syntax") or
string.find(response, "odbc drivers error")) string.find(response, "odbc drivers error"))
end end
--[[ --[[
Parse urls with queries and transform them into potentially Replaces usual queries with malicious querie and return a table with them.
injectable urls. ]]--
--]]
local function enumerate_inject_codes(injectable) local function build_injection_vector(urls)
local utab, k, v, urlstr, response local utab, k, v, urlstr, response
local qtab, old_qtab, results local qtab, old_qtab, results
local all = {}
results = {} for _, injectable in ipairs(urls) do
utab = url.parse(injectable) if type(injectable) == "string" then
qtab = url.parse_query(utab.query) utab = url.parse(injectable)
qtab = url.parse_query(utab.query)
for k, v in pairs(qtab) do for k, v in pairs(qtab) do
old_qtab = qtab[k]; old_qtab = qtab[k];
qtab[k] = qtab[k] .. "'%20OR%20sqlspider" qtab[k] = qtab[k] .. "'%20OR%20sqlspider"
utab.query = url.build_query(qtab)
urlstr = url.build(utab)
table.insert(all, urlstr)
utab.query = url.build_query(qtab) qtab[k] = old_qtab
urlstr = url.build(utab) utab.query = url.build_query(qtab)
response = get_page_from_host(urlstr) end
end
end
return all
end
if (check_injection_response(response)) then --[[
table.insert(results, urlstr) Creates a pipeline table and returns the result
end ]]--
local function inject(host, port, injectable)
local all = {}
local pOpts = {}
pOpts.raw = true
for k, v in pairs(injectable) do
all = http.pGet(host, port, v, nil, nil, all)
end
return http.pipeline(host, port, all, pOpts)
end
qtab[k] = old_qtab --[[
utab.query = url.build_query(qtab) Checks is received responses matches with usual sql error messages,
end what potentially means that the host is vulnerable to sql injection.
return results ]]--
local function check_responses(queries, responses)
local results = {}
for k, v in pairs(responses) do
if (check_injection_response(v)) then
table.insert(results, queries[k])
end
end
return results
end end
--[[ --[[
@@ -137,26 +119,26 @@ it in find_links()
--]] --]]
local function check_redirects(page) local function check_redirects(page)
local lpage = string.lower(page) local lpage = string.lower(page)
local _, httpurl = nil local _, httpurl = nil
-- meta redirects -- meta redirects
if(string.find(lpage, '<%s*meta%s*http%-equiv%s*=%s*"%s*refresh%s*"')) then if(string.find(lpage, '<%s*meta%s*http%-equiv%s*=%s*"%s*refresh%s*"')) then
_, _, httpurl = string.find(lpage, 'content%s*=%s*"%s*%d+%s*;%s*url%s*=%s*([^"]+)"') _, _, httpurl = string.find(lpage, 'content%s*=%s*"%s*%d+%s*;%s*url%s*=%s*([^"]+)"')
if httpurl then if httpurl then
page = page .. 'href="' .. httpurl .. '"' page = page .. 'href="' .. httpurl .. '"'
end end
end end
-- http redirect -- http redirect
if(string.find(lpage, 'HTTP/1.1 301 moved permanently')) then if(string.find(lpage, 'HTTP/1.1 301 moved permanently')) then
_, _, httpurl = string.find(lpage, 'location:%s*([^\n]+)') _, _, httpurl = string.find(lpage, 'location:%s*([^\n]+)')
if httpurl then if httpurl then
page = page .. 'href="' .. httpurl .. '"' page = page .. 'href="' .. httpurl .. '"'
end end
end end
return page return page
end end
--[[ --[[
@@ -165,11 +147,10 @@ away from current site!
--]] --]]
local function is_local_link(url_parts, host) local function is_local_link(url_parts, host)
if url_parts.authority and if url_parts.authority and not(url_parts.authority == host.name) then
not(url_parts.authority == host.name) then return false
return false end
end return true
return true
end end
--[[ --[[
@@ -179,69 +160,83 @@ added to the inject list, which is returned.
--]] --]]
local function find_links(list, base_path, page, host) local function find_links(list, base_path, page, host)
local httpurl,injectable, url_parts local httpurl,injectable, url_parts
local i, s, e local i, s, e
injectable = {} injectable = {}
url_parts = {} url_parts = {}
for w in string.gmatch(page, 'href%s*=%s*"%s*[^"]+%s*"') do for w in string.gmatch(page, 'href%s*=%s*"%s*[^"]+%s*"') do
s, e = string.find(w, '"') s, e = string.find(w, '"')
httpurl = string.sub(w, s+1, string.len(w)-1) httpurl = string.sub(w, s+1, string.len(w)-1)
i = 1 i = 1
-- parse out duplicates, otherwise we'll be here all day -- parse out duplicates, otherwise we'll be here all day
while list[i] and not(list[i] == httpurl) do while list[i] and not(list[i] == httpurl) do
i = i + 1 i = i + 1
end end
url_parts = url.parse(httpurl) url_parts = url.parse(httpurl)
if list[i] == nil and is_local_link(url_parts, host) and if list[i] == nil and is_local_link(url_parts, host) and
(not url_parts.scheme or url_parts.scheme == "http") then (not url_parts.scheme or url_parts.scheme == "http") then
httpurl = url.absolute(base_path, httpurl) httpurl = url.absolute(base_path, httpurl)
table.insert(list, httpurl) table.insert(list, httpurl)
if url_parts.query then if url_parts.query then
table.insert(injectable, httpurl) table.insert(injectable, httpurl)
end
end
end end
return injectable end
end
return injectable
end end
action = function(host, port) action = function(host, port)
local urllist, results, injectable local urllist, injectable
local links, i, page local results = {}
local links, i, page
local injectableQs
i = 1 i = 1
urllist = {} urllist = {}
injectable = {} injectable = {}
get_page_from_host = get_page_curried(host, port)
-- start at the root -- start at the root
table.insert(urllist, "/") if nmap.registry.args['sql-injection.start'] then
table.insert(urllist, "/" .. nmap.registry.args['sql-injection.start'])
else
table.insert(urllist, "/")
end
while not(urllist[i] == nil) and i <= maxdepth do -- check for argument supplied max depth
page = get_page_from_host(urllist[i]) if nmap.registry.args['sql-injection.maxdepth'] then
page = check_redirects(page) maxdepth = tonumber(nmap.registry.args['sql-injection.maxdepth'])
links = find_links(urllist, urllist[i], page, host) stdnse.print_debug("maxdepth set to: " .. maxdepth)
-- store all urls with queries for later analysis end
injectable = listop.append(injectable, links)
i = i + 1
end
if #injectable > 0 then while not(urllist[i] == nil) and i <= maxdepth do
stdnse.print_debug(1, "%s: Testing %d suspicious URLs", filename, #injectable ) page = http.get(host, port, urllist[i], nil, nil)
end page = check_redirects(page.body)
links = find_links(urllist, urllist[i], page, host)
-- store all urls with queries for later analysis
injectable = listop.append(injectable, links)
i = i + 1
end
-- test all potentially vulnerable queries if #injectable > 0 then
results = listop.map(enumerate_inject_codes, injectable) stdnse.print_debug(1, "%s: Testing %d suspicious URLs", filename, #injectable )
-- we can get multiple vulnerable URLS from a single query -- test all potentially vulnerable queries
results = listop.flatten(results); injectableQs = build_injection_vector(injectable)
local responses = inject(host, port, injectableQs)
results = check_responses(injectableQs, responses)
end
if not listop.is_empty(results) then -- we can get multiple vulnerable URLS from a single query
return "Host might be vulnerable\n" .. table.concat(results, '\n') --results = listop.flatten(results);
end
return nil --if not listop.is_empty(results) then
if #results > 0 then
return "Host might be vulnerable\n" .. table.concat(results, '\n')
end
return nil
end end