mirror of
https://github.com/nmap/nmap.git
synced 2025-12-09 22:21:29 +00:00
o [NSE] Added getLimitations function to httpspider that returns any
limitations imposed on the crawler. [Patrik]
This commit is contained in:
@@ -1,5 +1,8 @@
|
||||
# Nmap Changelog ($Id$); -*-text-*-
|
||||
|
||||
o [NSE] Added getLimitations function to httpspider that returns any
|
||||
limitations imposed on the crawler. [Patrik]
|
||||
|
||||
o [NSE] Modified the httpspider library to prefetch links in the queue and
|
||||
change how script arguments are processed. Script and library arguments are
|
||||
now processed from within the library. [Patrik]
|
||||
|
||||
@@ -501,6 +501,8 @@ Crawler = {
|
||||
o:addDefaultBlacklist()
|
||||
end
|
||||
|
||||
stdnse.print_debug(2, "%s: %s", LIBRARY_NAME, o:getLimitations())
|
||||
|
||||
return o
|
||||
end,
|
||||
|
||||
@@ -642,6 +644,33 @@ Crawler = {
|
||||
self.url = self.url or '/'
|
||||
end,
|
||||
|
||||
-- gets a string of limitations imposed on the crawl
|
||||
getLimitations = function(self)
|
||||
local o = self.options
|
||||
local limits = {}
|
||||
|
||||
if ( o.maxdepth > 0 or o.maxpagecount > 0 or
|
||||
o.withinhost or o.wihtindomain ) then
|
||||
if ( o.maxdepth > 0 ) then
|
||||
table.insert(limits, ("maxdepth=%d"):format(o.maxdepth))
|
||||
end
|
||||
if ( o.maxpagecount > 0 ) then
|
||||
table.insert(limits, ("maxpagecount=%d"):format(o.maxpagecount))
|
||||
end
|
||||
if ( o.withindomain ) then
|
||||
table.insert(limits, ("withindomain=%s"):format(o.base_url:getDomain()))
|
||||
end
|
||||
if ( o.withinhost ) then
|
||||
table.insert(limits, ("withinhost=%s"):format(o.base_url:getHost()))
|
||||
end
|
||||
end
|
||||
|
||||
if ( #limits > 0 ) then
|
||||
return ("Spidering limited to: %s"):format(stdnse.strjoin("; ", limits))
|
||||
end
|
||||
end,
|
||||
|
||||
-- does the crawling
|
||||
crawl = function(self)
|
||||
|
||||
self.response_queue = self.response_queue or {}
|
||||
|
||||
@@ -46,13 +46,6 @@ function action(host, port)
|
||||
|
||||
crawler:set_timeout(10000)
|
||||
|
||||
local maxdepth, maxpagecount = crawler.options.maxdepth, crawler.options.maxpagecount
|
||||
if ( maxdepth < 0 ) then maxdepth = nil end
|
||||
if ( maxpagecount < 0 ) then maxpagecount = nil end
|
||||
|
||||
stdnse.print_debug(2, "%s: Running crawler maxdepth: %s; maxpagecount: %s",
|
||||
SCRIPT_NAME, maxdepth or "[none]", maxpagecount or "[none]")
|
||||
|
||||
local emails = {}
|
||||
while(true) do
|
||||
local status, r = crawler:crawl()
|
||||
@@ -80,20 +73,7 @@ function action(host, port)
|
||||
table.insert(results, email)
|
||||
end
|
||||
|
||||
-- Inform the user of the limitations that were used
|
||||
if ( maxdepth > 0 or maxpagecount > 0 ) then
|
||||
local limit = "Spidering limited to: "
|
||||
if ( maxdepth > 0 ) then
|
||||
limit = limit .. ("maxdepth=%d; "):format(maxdepth)
|
||||
end
|
||||
if ( maxpagecount > 0 ) then
|
||||
limit = limit .. ("maxpagecount=%d"):format(maxpagecount)
|
||||
end
|
||||
if ( #results == 0 ) then
|
||||
table.insert(results, limit)
|
||||
else
|
||||
results.name = limit
|
||||
end
|
||||
end
|
||||
results.name = crawler:getLimitations()
|
||||
|
||||
return stdnse.format_output(true, results)
|
||||
end
|
||||
|
||||
Reference in New Issue
Block a user