mirror of
https://github.com/nmap/nmap.git
synced 2025-12-08 05:31:31 +00:00
o [NSE] Modified the sql-injection script to use the httpspider library.
[Lauri Kokkonen]
This commit is contained in:
@@ -1,5 +1,8 @@
|
|||||||
# Nmap Changelog ($Id$); -*-text-*-
|
# Nmap Changelog ($Id$); -*-text-*-
|
||||||
|
|
||||||
|
o [NSE] Modified the sql-injection script to use the httpspider library.
|
||||||
|
[Lauri Kokkonen]
|
||||||
|
|
||||||
o [NSE] Added a rsync library and two new script:
|
o [NSE] Added a rsync library and two new script:
|
||||||
+ rsync-list-modules - list available rsync modules
|
+ rsync-list-modules - list available rsync modules
|
||||||
+ rsync-brute - attempts to brute force passwords against a rsync module
|
+ rsync-brute - attempts to brute force passwords against a rsync module
|
||||||
|
|||||||
@@ -81,15 +81,15 @@ Options = {
|
|||||||
if ( ( o.base_url:getProto() == 'https' and o.base_url:getPort() == 443 ) or
|
if ( ( o.base_url:getProto() == 'https' and o.base_url:getPort() == 443 ) or
|
||||||
( o.base_url:getProto() == 'http' and o.base_url:getPort() == 80 ) ) then
|
( o.base_url:getProto() == 'http' and o.base_url:getPort() == 80 ) ) then
|
||||||
if ( o.withinhost ) then
|
if ( o.withinhost ) then
|
||||||
host_match = ("%s://%s"):format(o.base_url:getProto(), o.base_url:getHost())
|
host_match = ("^%s://%s"):format(o.base_url:getProto(), o.base_url:getHost())
|
||||||
elseif ( o.withindomain ) then
|
elseif ( o.withindomain ) then
|
||||||
domain_match = ("%s://.*%s/"):format(o.base_url:getProto(), o.base_url:getDomain())
|
domain_match = ("^%s://.*%s/"):format(o.base_url:getProto(), o.base_url:getDomain())
|
||||||
end
|
end
|
||||||
else
|
else
|
||||||
if ( o.withinhost ) then
|
if ( o.withinhost ) then
|
||||||
host_match = ("%s://%s:%d"):format(o.base_url:getProto(), o.base_url:getHost(), o.base_url:getPort() )
|
host_match = ("^%s://%s:%d"):format(o.base_url:getProto(), o.base_url:getHost(), o.base_url:getPort() )
|
||||||
elseif( o.withindomain ) then
|
elseif( o.withindomain ) then
|
||||||
domain_match = ("%s://.*%s/"):format(o.base_url:getProto(), o.base_url:getDomain() )
|
domain_match = ("^%s://.*%s/"):format(o.base_url:getProto(), o.base_url:getDomain() )
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
-- set up the appropriate matching functions
|
-- set up the appropriate matching functions
|
||||||
|
|||||||
@@ -6,21 +6,20 @@ The script spiders an HTTP server looking for URLs containing queries. It then
|
|||||||
proceeds to combine crafted SQL commands with susceptible URLs in order to
|
proceeds to combine crafted SQL commands with susceptible URLs in order to
|
||||||
obtain errors. The errors are analysed to see if the URL is vulnerable to
|
obtain errors. The errors are analysed to see if the URL is vulnerable to
|
||||||
attack. This uses the most basic form of SQL injection but anything more
|
attack. This uses the most basic form of SQL injection but anything more
|
||||||
complicated is better suited to a standalone tool. Both meta-style and HTTP redirects
|
complicated is better suited to a standalone tool.
|
||||||
are supported.
|
|
||||||
|
|
||||||
We may not have access to the target web server's true hostname, which can prevent access to
|
We may not have access to the target web server's true hostname, which can prevent access to
|
||||||
virtually hosted sites. This script only follows absolute links when the host name component is the same as the target server's reverse-DNS name.
|
virtually hosted sites.
|
||||||
]]
|
]]
|
||||||
|
|
||||||
require('url')
|
require('url')
|
||||||
require('shortport')
|
require('shortport')
|
||||||
require('stdnse')
|
require('stdnse')
|
||||||
require('strbuf')
|
require('strbuf')
|
||||||
require('listop')
|
|
||||||
require('comm')
|
require('comm')
|
||||||
require('http')
|
require('http')
|
||||||
require('nsedebug')
|
require('nsedebug')
|
||||||
|
require('httpspider')
|
||||||
|
|
||||||
author = "Eddie Bell"
|
author = "Eddie Bell"
|
||||||
license = "Same as Nmap--See http://nmap.org/book/man-legal.html"
|
license = "Same as Nmap--See http://nmap.org/book/man-legal.html"
|
||||||
@@ -38,13 +37,9 @@ categories = {"intrusive", "vuln"}
|
|||||||
-- | /a_index.php?id_str=1'%20OR%20sqlspider
|
-- | /a_index.php?id_str=1'%20OR%20sqlspider
|
||||||
-- | /a_index.php?id_str=2'%20OR%20sqlspider
|
-- | /a_index.php?id_str=2'%20OR%20sqlspider
|
||||||
|
|
||||||
-- Change this to increase depth of crawl
|
-- default settings
|
||||||
local maxdepth = 10
|
local maxdepth = 10
|
||||||
local get_page_from_host
|
local start = '/'
|
||||||
|
|
||||||
local soc
|
|
||||||
local catch = function() soc:close() end
|
|
||||||
local try = nmap.new_try(catch)
|
|
||||||
|
|
||||||
portrule = shortport.port_or_service({80, 443}, {"http","https"})
|
portrule = shortport.port_or_service({80, 443}, {"http","https"})
|
||||||
|
|
||||||
@@ -121,128 +116,48 @@ local function check_responses(queries, responses)
|
|||||||
return results
|
return results
|
||||||
end
|
end
|
||||||
|
|
||||||
--[[
|
|
||||||
Follow redirects, Instead of adding redirects to the url list
|
|
||||||
we just modify it's format so the parser logic can be applied to
|
|
||||||
it in find_links()
|
|
||||||
--]]
|
|
||||||
|
|
||||||
local function check_redirects(page)
|
|
||||||
local lpage = string.lower(page)
|
|
||||||
local _, httpurl = nil
|
|
||||||
|
|
||||||
-- meta redirects
|
|
||||||
if(string.find(lpage, '<%s*meta%s*http%-equiv%s*=%s*"%s*refresh%s*"')) then
|
|
||||||
_, _, httpurl = string.find(lpage, 'content%s*=%s*"%s*%d+%s*;%s*url%s*=%s*([^"]+)"')
|
|
||||||
if httpurl then
|
|
||||||
page = page .. 'href="' .. httpurl .. '"'
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
-- http redirect
|
|
||||||
if(string.find(lpage, 'HTTP/1.1 301 moved permanently')) then
|
|
||||||
_, _, httpurl = string.find(lpage, 'location:%s*([^\n]+)')
|
|
||||||
if httpurl then
|
|
||||||
page = page .. 'href="' .. httpurl .. '"'
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
return page
|
|
||||||
end
|
|
||||||
|
|
||||||
--[[
|
|
||||||
True if url is local to the site we're scanning. We never should spider
|
|
||||||
away from current site!
|
|
||||||
--]]
|
|
||||||
|
|
||||||
local function is_local_link(url_parts, host)
|
|
||||||
if url_parts.authority and not(url_parts.authority == host.name) then
|
|
||||||
return false
|
|
||||||
end
|
|
||||||
return true
|
|
||||||
end
|
|
||||||
|
|
||||||
--[[
|
|
||||||
Parse a html document looking for href links. If a local link is found
|
|
||||||
it is added to the spider list If a link with a query is found it is
|
|
||||||
added to the inject list, which is returned.
|
|
||||||
--]]
|
|
||||||
|
|
||||||
local function find_links(list, base_path, page, host)
|
|
||||||
local httpurl,injectable, url_parts
|
|
||||||
local i, s, e
|
|
||||||
|
|
||||||
injectable = {}
|
|
||||||
url_parts = {}
|
|
||||||
|
|
||||||
for w in string.gmatch(page, 'href%s*=%s*"%s*[^"]+%s*"') do
|
|
||||||
s, e = string.find(w, '"')
|
|
||||||
httpurl = string.sub(w, s+1, #w-1)
|
|
||||||
i = 1
|
|
||||||
|
|
||||||
-- parse out duplicates, otherwise we'll be here all day
|
|
||||||
while list[i] and not(list[i] == httpurl) do
|
|
||||||
i = i + 1
|
|
||||||
end
|
|
||||||
|
|
||||||
url_parts = url.parse(httpurl)
|
|
||||||
|
|
||||||
if list[i] == nil and is_local_link(url_parts, host) and
|
|
||||||
(not url_parts.scheme or url_parts.scheme == "http") then
|
|
||||||
httpurl = url.absolute(base_path, httpurl)
|
|
||||||
table.insert(list, httpurl)
|
|
||||||
if url_parts.query then
|
|
||||||
table.insert(injectable, httpurl)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
return injectable
|
|
||||||
end
|
|
||||||
|
|
||||||
action = function(host, port)
|
action = function(host, port)
|
||||||
local urllist, injectable
|
-- check for script arguments
|
||||||
local results = {}
|
|
||||||
local links, i, page
|
|
||||||
local injectableQs
|
|
||||||
|
|
||||||
i = 1
|
|
||||||
urllist = {}
|
|
||||||
injectable = {}
|
|
||||||
|
|
||||||
-- start at the root
|
|
||||||
if stdnse.get_script_args('sql-injection.start') then
|
if stdnse.get_script_args('sql-injection.start') then
|
||||||
table.insert(urllist, "/" .. stdnse.get_script_args('sql-injection.start'))
|
start = stdnse.get_script_args('sql-injection.start')
|
||||||
else
|
|
||||||
table.insert(urllist, "/")
|
|
||||||
end
|
end
|
||||||
|
|
||||||
-- check for argument supplied max depth
|
|
||||||
if stdnse.get_script_args('sql-injection.maxdepth') then
|
if stdnse.get_script_args('sql-injection.maxdepth') then
|
||||||
maxdepth = tonumber(stdnse.get_script_args('sql-injection.maxdepth'))
|
maxdepth = tonumber(stdnse.get_script_args('sql-injection.maxdepth'))
|
||||||
stdnse.print_debug("maxdepth set to: " .. maxdepth)
|
stdnse.print_debug("maxdepth set to: " .. maxdepth)
|
||||||
end
|
end
|
||||||
|
|
||||||
while not(urllist[i] == nil) and i <= maxdepth do
|
-- crawl to find injectable urls
|
||||||
page = http.get(host, port, urllist[i], nil, nil)
|
local crawler = httpspider.Crawler:new(host, port, start, {scriptname = SCRIPT_NAME, maxpagecount = maxdepth})
|
||||||
page = check_redirects(page.body)
|
local injectable = {}
|
||||||
links = find_links(urllist, urllist[i], page, host)
|
|
||||||
-- store all urls with queries for later analysis
|
while(true) do
|
||||||
injectable = listop.append(injectable, links)
|
local status, r = crawler:crawl()
|
||||||
i = i + 1
|
if (not(status)) then
|
||||||
|
if (r.err) then
|
||||||
|
return stdnse.format_output(true, "ERROR: %s", r.reason)
|
||||||
|
else
|
||||||
|
break
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
local links = httpspider.LinkExtractor:new(r.url, r.response.body, crawler.options):getLinks()
|
||||||
|
for _,u in ipairs(links) do
|
||||||
|
if url.parse(u).query then
|
||||||
|
table.insert(injectable, u)
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
-- try to inject
|
||||||
|
local results = {}
|
||||||
if #injectable > 0 then
|
if #injectable > 0 then
|
||||||
stdnse.print_debug(1, "%s: Testing %d suspicious URLs", SCRIPT_NAME, #injectable )
|
stdnse.print_debug(1, "%s: Testing %d suspicious URLs", SCRIPT_NAME, #injectable)
|
||||||
-- test all potentially vulnerable queries
|
local injectableQs = build_injection_vector(injectable)
|
||||||
injectableQs = build_injection_vector(injectable)
|
|
||||||
local responses = inject(host, port, injectableQs)
|
local responses = inject(host, port, injectableQs)
|
||||||
results = check_responses(injectableQs, responses)
|
results = check_responses(injectableQs, responses)
|
||||||
end
|
end
|
||||||
|
|
||||||
-- we can get multiple vulnerable URLS from a single query
|
|
||||||
--results = listop.flatten(results);
|
|
||||||
|
|
||||||
--if not listop.is_empty(results) then
|
|
||||||
if #results > 0 then
|
if #results > 0 then
|
||||||
return "Host might be vulnerable\n" .. table.concat(results, '\n')
|
return "Host might be vulnerable\n" .. table.concat(results, '\n')
|
||||||
end
|
end
|
||||||
|
|||||||
Reference in New Issue
Block a user