1
0
mirror of https://github.com/nmap/nmap.git synced 2026-01-07 06:59:03 +00:00

o [NSE] Added getLimitations function to httpspider that returns any

limitations imposed on the crawler. [Patrik]
This commit is contained in:
patrik
2011-12-10 10:11:56 +00:00
parent 42fa95c755
commit 8254da793e
3 changed files with 34 additions and 22 deletions

View File

@@ -46,13 +46,6 @@ function action(host, port)
crawler:set_timeout(10000)
local maxdepth, maxpagecount = crawler.options.maxdepth, crawler.options.maxpagecount
if ( maxdepth < 0 ) then maxdepth = nil end
if ( maxpagecount < 0 ) then maxpagecount = nil end
stdnse.print_debug(2, "%s: Running crawler maxdepth: %s; maxpagecount: %s",
SCRIPT_NAME, maxdepth or "[none]", maxpagecount or "[none]")
local emails = {}
while(true) do
local status, r = crawler:crawl()
@@ -80,20 +73,7 @@ function action(host, port)
table.insert(results, email)
end
-- Inform the user of the limitations that were used
if ( maxdepth > 0 or maxpagecount > 0 ) then
local limit = "Spidering limited to: "
if ( maxdepth > 0 ) then
limit = limit .. ("maxdepth=%d; "):format(maxdepth)
end
if ( maxpagecount > 0 ) then
limit = limit .. ("maxpagecount=%d"):format(maxpagecount)
end
if ( #results == 0 ) then
table.insert(results, limit)
else
results.name = limit
end
end
results.name = crawler:getLimitations()
return stdnse.format_output(true, results)
end