1
0
mirror of https://github.com/nmap/nmap.git synced 2025-12-08 05:31:31 +00:00

o [NSE] Modified the sql-injection script to use the httpspider library.

[Lauri Kokkonen]
This commit is contained in:
patrik
2012-02-05 13:47:31 +00:00
parent 908ac61fb0
commit 557874588f
3 changed files with 39 additions and 121 deletions

View File

@@ -1,5 +1,8 @@
# Nmap Changelog ($Id$); -*-text-*-
o [NSE] Modified the sql-injection script to use the httpspider library.
[Lauri Kokkonen]
o [NSE] Added a rsync library and two new script:
+ rsync-list-modules - list available rsync modules
+ rsync-brute - attempts to brute force passwords against a rsync module

View File

@@ -81,15 +81,15 @@ Options = {
if ( ( o.base_url:getProto() == 'https' and o.base_url:getPort() == 443 ) or
( o.base_url:getProto() == 'http' and o.base_url:getPort() == 80 ) ) then
if ( o.withinhost ) then
host_match = ("%s://%s"):format(o.base_url:getProto(), o.base_url:getHost())
host_match = ("^%s://%s"):format(o.base_url:getProto(), o.base_url:getHost())
elseif ( o.withindomain ) then
domain_match = ("%s://.*%s/"):format(o.base_url:getProto(), o.base_url:getDomain())
domain_match = ("^%s://.*%s/"):format(o.base_url:getProto(), o.base_url:getDomain())
end
else
if ( o.withinhost ) then
host_match = ("%s://%s:%d"):format(o.base_url:getProto(), o.base_url:getHost(), o.base_url:getPort() )
host_match = ("^%s://%s:%d"):format(o.base_url:getProto(), o.base_url:getHost(), o.base_url:getPort() )
elseif( o.withindomain ) then
domain_match = ("%s://.*%s/"):format(o.base_url:getProto(), o.base_url:getDomain() )
domain_match = ("^%s://.*%s/"):format(o.base_url:getProto(), o.base_url:getDomain() )
end
end
-- set up the appropriate matching functions

View File

@@ -6,21 +6,20 @@ The script spiders an HTTP server looking for URLs containing queries. It then
proceeds to combine crafted SQL commands with susceptible URLs in order to
obtain errors. The errors are analysed to see if the URL is vulnerable to
attack. This uses the most basic form of SQL injection but anything more
complicated is better suited to a standalone tool. Both meta-style and HTTP redirects
are supported.
complicated is better suited to a standalone tool.
We may not have access to the target web server's true hostname, which can prevent access to
virtually hosted sites. This script only follows absolute links when the host name component is the same as the target server's reverse-DNS name.
virtually hosted sites.
]]
require('url')
require('shortport')
require('stdnse')
require('strbuf')
require('listop')
require('comm')
require('http')
require('nsedebug')
require('httpspider')
author = "Eddie Bell"
license = "Same as Nmap--See http://nmap.org/book/man-legal.html"
@@ -38,13 +37,9 @@ categories = {"intrusive", "vuln"}
-- | /a_index.php?id_str=1'%20OR%20sqlspider
-- | /a_index.php?id_str=2'%20OR%20sqlspider
-- Change this to increase depth of crawl
-- default settings
local maxdepth = 10
local get_page_from_host
local soc
local catch = function() soc:close() end
local try = nmap.new_try(catch)
local start = '/'
portrule = shortport.port_or_service({80, 443}, {"http","https"})
@@ -121,128 +116,48 @@ local function check_responses(queries, responses)
return results
end
--[[
Follow redirects, Instead of adding redirects to the url list
we just modify it's format so the parser logic can be applied to
it in find_links()
--]]
local function check_redirects(page)
local lpage = string.lower(page)
local _, httpurl = nil
-- meta redirects
if(string.find(lpage, '<%s*meta%s*http%-equiv%s*=%s*"%s*refresh%s*"')) then
_, _, httpurl = string.find(lpage, 'content%s*=%s*"%s*%d+%s*;%s*url%s*=%s*([^"]+)"')
if httpurl then
page = page .. 'href="' .. httpurl .. '"'
end
end
-- http redirect
if(string.find(lpage, 'HTTP/1.1 301 moved permanently')) then
_, _, httpurl = string.find(lpage, 'location:%s*([^\n]+)')
if httpurl then
page = page .. 'href="' .. httpurl .. '"'
end
end
return page
end
--[[
True if url is local to the site we're scanning. We never should spider
away from current site!
--]]
local function is_local_link(url_parts, host)
if url_parts.authority and not(url_parts.authority == host.name) then
return false
end
return true
end
--[[
Parse a html document looking for href links. If a local link is found
it is added to the spider list If a link with a query is found it is
added to the inject list, which is returned.
--]]
local function find_links(list, base_path, page, host)
local httpurl,injectable, url_parts
local i, s, e
injectable = {}
url_parts = {}
for w in string.gmatch(page, 'href%s*=%s*"%s*[^"]+%s*"') do
s, e = string.find(w, '"')
httpurl = string.sub(w, s+1, #w-1)
i = 1
-- parse out duplicates, otherwise we'll be here all day
while list[i] and not(list[i] == httpurl) do
i = i + 1
end
url_parts = url.parse(httpurl)
if list[i] == nil and is_local_link(url_parts, host) and
(not url_parts.scheme or url_parts.scheme == "http") then
httpurl = url.absolute(base_path, httpurl)
table.insert(list, httpurl)
if url_parts.query then
table.insert(injectable, httpurl)
end
end
end
return injectable
end
action = function(host, port)
local urllist, injectable
local results = {}
local links, i, page
local injectableQs
i = 1
urllist = {}
injectable = {}
-- start at the root
-- check for script arguments
if stdnse.get_script_args('sql-injection.start') then
table.insert(urllist, "/" .. stdnse.get_script_args('sql-injection.start'))
else
table.insert(urllist, "/")
start = stdnse.get_script_args('sql-injection.start')
end
-- check for argument supplied max depth
if stdnse.get_script_args('sql-injection.maxdepth') then
maxdepth = tonumber(stdnse.get_script_args('sql-injection.maxdepth'))
stdnse.print_debug("maxdepth set to: " .. maxdepth)
end
while not(urllist[i] == nil) and i <= maxdepth do
page = http.get(host, port, urllist[i], nil, nil)
page = check_redirects(page.body)
links = find_links(urllist, urllist[i], page, host)
-- store all urls with queries for later analysis
injectable = listop.append(injectable, links)
i = i + 1
-- crawl to find injectable urls
local crawler = httpspider.Crawler:new(host, port, start, {scriptname = SCRIPT_NAME, maxpagecount = maxdepth})
local injectable = {}
while(true) do
local status, r = crawler:crawl()
if (not(status)) then
if (r.err) then
return stdnse.format_output(true, "ERROR: %s", r.reason)
else
break
end
end
local links = httpspider.LinkExtractor:new(r.url, r.response.body, crawler.options):getLinks()
for _,u in ipairs(links) do
if url.parse(u).query then
table.insert(injectable, u)
end
end
end
-- try to inject
local results = {}
if #injectable > 0 then
stdnse.print_debug(1, "%s: Testing %d suspicious URLs", SCRIPT_NAME, #injectable )
-- test all potentially vulnerable queries
injectableQs = build_injection_vector(injectable)
stdnse.print_debug(1, "%s: Testing %d suspicious URLs", SCRIPT_NAME, #injectable)
local injectableQs = build_injection_vector(injectable)
local responses = inject(host, port, injectableQs)
results = check_responses(injectableQs, responses)
end
-- we can get multiple vulnerable URLS from a single query
--results = listop.flatten(results);
--if not listop.is_empty(results) then
if #results > 0 then
return "Host might be vulnerable\n" .. table.concat(results, '\n')
end