1
0
mirror of https://github.com/nmap/nmap.git synced 2025-12-21 15:09:02 +00:00

Re-indent some scripts. Whitespace-only commit

https://secwiki.org/w/Nmap/Code_Standards
This commit is contained in:
dmiller
2014-01-31 21:01:26 +00:00
parent 298be5bfaa
commit c7d4f2ec96
50 changed files with 4135 additions and 4135 deletions

View File

@@ -40,10 +40,10 @@ FEEDS = { RSS = { search = { '<rss(.*)>' }, version = 'version=["\'](.-)["\']' }
}
FEEDS_REFS = { "type=[\"']application/rss%+xml[\"']%s*href=[\"'](.-)[\"']",
"type=[\"']application/rss%+xml[\"']%s*title=[\"'].-[\"']%s*href=[\"'](.-)[\"']",
"type=[\"']application/atom%+xml[\"']%s*href=[\"'](.-)[\"']",
"type=[\"']application/atom%+xml[\"']%s*title=[\"'].-[\"']%s*href=[\"'](.-)[\"']",
}
"type=[\"']application/rss%+xml[\"']%s*title=[\"'].-[\"']%s*href=[\"'](.-)[\"']",
"type=[\"']application/atom%+xml[\"']%s*href=[\"'](.-)[\"']",
"type=[\"']application/atom%+xml[\"']%s*title=[\"'].-[\"']%s*href=[\"'](.-)[\"']",
}
feedsfound = {}
@@ -52,107 +52,107 @@ checked = {}
-- Searches the resource for feeds.
local findFeeds = function(body, path)
if body then
for _, f in pairs(FEEDS) do
for __, pf in pairs(f["search"]) do
if body then
for _, f in pairs(FEEDS) do
for __, pf in pairs(f["search"]) do
local c = string.match(body, pf)
local c = string.match(body, pf)
if c then
local v = ""
-- Try to find feed's version.
if string.match(c, f["version"]) then
v = " (version " .. string.match(c, f["version"]) .. ")"
end
feedsfound[path] = _ .. v .. ": "
end
end
end
if c then
local v = ""
-- Try to find feed's version.
if string.match(c, f["version"]) then
v = " (version " .. string.match(c, f["version"]) .. ")"
end
feedsfound[path] = _ .. v .. ": "
end
checked[path] = true
end
end
end
checked[path] = true
end
action = function(host, port)
local maxpagecount = stdnse.get_script_args("maxpagecount") or 40
local maxpagecount = stdnse.get_script_args("maxpagecount") or 40
local crawler = httpspider.Crawler:new(host, port, '/', { scriptname = SCRIPT_NAME,
maxpagecount = maxpagecount,
maxdepth = -1,
withinhost = 1
})
local crawler = httpspider.Crawler:new(host, port, '/', { scriptname = SCRIPT_NAME,
maxpagecount = maxpagecount,
maxdepth = -1,
withinhost = 1
})
crawler.options.doscraping = function(url)
if crawler:iswithinhost(url)
and not crawler:isresource(url, "js")
and not crawler:isresource(url, "css") then
return true
end
crawler.options.doscraping = function(url)
if crawler:iswithinhost(url)
and not crawler:isresource(url, "js")
and not crawler:isresource(url, "css") then
return true
end
end
if (not(crawler)) then
return
end
crawler:set_timeout(10000)
local index, k, target, response, path
while (true) do
local status, r = crawler:crawl()
-- if the crawler fails it can be due to a number of different reasons
-- most of them are "legitimate" and should not be reason to abort
if (not(status)) then
if (r.err) then
return stdnse.format_output(true, ("ERROR: %s"):format(r.reason))
else
break
end
end
if (not(crawler)) then
return
end
response = r.response
path = tostring(r.url)
crawler:set_timeout(10000)
if response.body then
findFeeds(response.body, path)
local index, k, target, response, path
while (true) do
local status, r = crawler:crawl()
-- if the crawler fails it can be due to a number of different reasons
-- most of them are "legitimate" and should not be reason to abort
if (not(status)) then
if (r.err) then
return stdnse.format_output(true, ("ERROR: %s"):format(r.reason))
for _, p in ipairs(FEEDS_REFS) do
for l in string.gmatch(response.body, p) do
if not checked[l] then
local resp
-- If this is an absolute URL, use get_url.
if string.match(l, "^http") then
resp = http.get_url(l)
else
break
resp = http.get(host, port, l)
end
end
response = r.response
path = tostring(r.url)
if response.body then
findFeeds(response.body, path)
for _, p in ipairs(FEEDS_REFS) do
for l in string.gmatch(response.body, p) do
if not checked[l] then
local resp
-- If this is an absolute URL, use get_url.
if string.match(l, "^http") then
resp = http.get_url(l)
else
resp = http.get(host, port, l)
end
if resp.body then
findFeeds(resp.body, l)
end
end
end
if resp.body then
findFeeds(resp.body, l)
end
end
end
end
end
-- If the table is empty.
if next(feedsfound) == nil then
return "Couldn't find any feeds."
end
end
-- Create a nice output.
local results = {}
for c, _ in pairs(feedsfound) do
table.insert(results, {_ .. c } )
end
-- If the table is empty.
if next(feedsfound) == nil then
return "Couldn't find any feeds."
end
table.insert(results, 1, "Found the following feeds: ")
-- Create a nice output.
local results = {}
for c, _ in pairs(feedsfound) do
table.insert(results, {_ .. c } )
end
results.name = crawler:getLimitations()
table.insert(results, 1, "Found the following feeds: ")
return stdnse.format_output(true, results)
results.name = crawler:getLimitations()
return stdnse.format_output(true, results)
end