1
0
mirror of https://github.com/nmap/nmap.git synced 2025-12-15 04:09:01 +00:00

o [NSE] Added getLimitations function to httpspider that returns any

limitations imposed on the crawler. [Patrik]
This commit is contained in:
patrik
2011-12-10 10:11:56 +00:00
parent 42fa95c755
commit 8254da793e
3 changed files with 34 additions and 22 deletions

View File

@@ -1,5 +1,8 @@
# Nmap Changelog ($Id$); -*-text-*- # Nmap Changelog ($Id$); -*-text-*-
o [NSE] Added getLimitations function to httpspider that returns any
limitations imposed on the crawler. [Patrik]
o [NSE] Modified the httpspider library to prefetch links in the queue and o [NSE] Modified the httpspider library to prefetch links in the queue and
change how script arguments are processed. Script and library arguments are change how script arguments are processed. Script and library arguments are
now processed from within the library. [Patrik] now processed from within the library. [Patrik]

View File

@@ -501,6 +501,8 @@ Crawler = {
o:addDefaultBlacklist() o:addDefaultBlacklist()
end end
stdnse.print_debug(2, "%s: %s", LIBRARY_NAME, o:getLimitations())
return o return o
end, end,
@@ -642,6 +644,33 @@ Crawler = {
self.url = self.url or '/' self.url = self.url or '/'
end, end,
-- gets a string of limitations imposed on the crawl
getLimitations = function(self)
local o = self.options
local limits = {}
if ( o.maxdepth > 0 or o.maxpagecount > 0 or
o.withinhost or o.wihtindomain ) then
if ( o.maxdepth > 0 ) then
table.insert(limits, ("maxdepth=%d"):format(o.maxdepth))
end
if ( o.maxpagecount > 0 ) then
table.insert(limits, ("maxpagecount=%d"):format(o.maxpagecount))
end
if ( o.withindomain ) then
table.insert(limits, ("withindomain=%s"):format(o.base_url:getDomain()))
end
if ( o.withinhost ) then
table.insert(limits, ("withinhost=%s"):format(o.base_url:getHost()))
end
end
if ( #limits > 0 ) then
return ("Spidering limited to: %s"):format(stdnse.strjoin("; ", limits))
end
end,
-- does the crawling
crawl = function(self) crawl = function(self)
self.response_queue = self.response_queue or {} self.response_queue = self.response_queue or {}

View File

@@ -46,13 +46,6 @@ function action(host, port)
crawler:set_timeout(10000) crawler:set_timeout(10000)
local maxdepth, maxpagecount = crawler.options.maxdepth, crawler.options.maxpagecount
if ( maxdepth < 0 ) then maxdepth = nil end
if ( maxpagecount < 0 ) then maxpagecount = nil end
stdnse.print_debug(2, "%s: Running crawler maxdepth: %s; maxpagecount: %s",
SCRIPT_NAME, maxdepth or "[none]", maxpagecount or "[none]")
local emails = {} local emails = {}
while(true) do while(true) do
local status, r = crawler:crawl() local status, r = crawler:crawl()
@@ -80,20 +73,7 @@ function action(host, port)
table.insert(results, email) table.insert(results, email)
end end
-- Inform the user of the limitations that were used results.name = crawler:getLimitations()
if ( maxdepth > 0 or maxpagecount > 0 ) then
local limit = "Spidering limited to: "
if ( maxdepth > 0 ) then
limit = limit .. ("maxdepth=%d; "):format(maxdepth)
end
if ( maxpagecount > 0 ) then
limit = limit .. ("maxpagecount=%d"):format(maxpagecount)
end
if ( #results == 0 ) then
table.insert(results, limit)
else
results.name = limit
end
end
return stdnse.format_output(true, results) return stdnse.format_output(true, results)
end end