mirror of
https://github.com/nmap/nmap.git
synced 2025-12-10 17:59:04 +00:00
o [NSE] Added new script http-chrono, which measures min, max and average
response times of web servers. [Ange Gutek]
This commit is contained in:
@@ -1,5 +1,8 @@
|
|||||||
# Nmap Changelog ($Id$); -*-text-*-
|
# Nmap Changelog ($Id$); -*-text-*-
|
||||||
|
|
||||||
|
o [NSE] Added new script http-chrono, which measures min, max and average
|
||||||
|
response times of web servers. [Ange Gutek]
|
||||||
|
|
||||||
o Applied a workaround to make pcap captures work better on Solaris
|
o Applied a workaround to make pcap captures work better on Solaris
|
||||||
10. This involves peeking at the pcap buffer to ensure that captures
|
10. This involves peeking at the pcap buffer to ensure that captures
|
||||||
are not being lost. A symptom of behavior before this fix was that,
|
are not being lost. A symptom of behavior before this fix was that,
|
||||||
@@ -551,7 +554,7 @@ o [NSE] Added 51(!) NSE scripts, bringing the total up to 297. They
|
|||||||
|
|
||||||
+ http-vuln-cve2011-3368 tests for the CVE-2011-3368 (Reverse Proxy
|
+ http-vuln-cve2011-3368 tests for the CVE-2011-3368 (Reverse Proxy
|
||||||
Bypass) vulnerability in Apache HTTP server's reverse proxy
|
Bypass) vulnerability in Apache HTTP server's reverse proxy
|
||||||
mode. [Ange Gutek, Patrik Karlsson"]
|
mode. [Ange Gutek, Patrik Karlsson]
|
||||||
|
|
||||||
+ ipv6-node-info obtains hostnames, IPv4 and IPv6 addresses through
|
+ ipv6-node-info obtains hostnames, IPv4 and IPv6 addresses through
|
||||||
IPv6 Node Information Queries. [David Fifield]
|
IPv6 Node Information Queries. [David Fifield]
|
||||||
|
|||||||
130
scripts/http-chrono.nse
Normal file
130
scripts/http-chrono.nse
Normal file
@@ -0,0 +1,130 @@
|
|||||||
|
description = [[
|
||||||
|
This script measures the time a website takes to deliver a web page and returns
|
||||||
|
the maximum, minimum and average time it took to fetch a page.
|
||||||
|
|
||||||
|
Web pages that take longer time to load could be abused by attackers in DoS or
|
||||||
|
DDoS attacks due to the fact that they are likely to consume more resources on
|
||||||
|
the target server. This script could help identifying these web pages.
|
||||||
|
]]
|
||||||
|
|
||||||
|
---
|
||||||
|
-- @usage
|
||||||
|
-- nmap --script http-chrono <ip>
|
||||||
|
--
|
||||||
|
-- @output
|
||||||
|
-- PORT STATE SERVICE
|
||||||
|
-- 80/tcp open http
|
||||||
|
-- |_http-chrono: Request times for /; avg: 2.98ms; min: 2.63ms; max: 3.62ms
|
||||||
|
--
|
||||||
|
-- PORT STATE SERVICE
|
||||||
|
-- 80/tcp open http
|
||||||
|
-- | http-chrono:
|
||||||
|
-- | page avg min max
|
||||||
|
-- | /admin/ 1.91ms 1.65ms 2.05ms
|
||||||
|
-- | /manager/status 2.14ms 2.03ms 2.24ms
|
||||||
|
-- | /manager/html 2.26ms 2.09ms 2.53ms
|
||||||
|
-- | /examples/servlets/ 2.43ms 1.97ms 3.62ms
|
||||||
|
-- | /examples/jsp/snp/snoop.jsp 2.75ms 2.59ms 3.13ms
|
||||||
|
-- | / 2.78ms 2.54ms 3.36ms
|
||||||
|
-- | /docs/ 3.14ms 2.61ms 3.53ms
|
||||||
|
-- | /RELEASE-NOTES.txt 3.70ms 2.97ms 5.58ms
|
||||||
|
-- | /examples/jsp/ 4.93ms 3.39ms 8.30ms
|
||||||
|
-- |_/docs/changelog.html 10.76ms 10.14ms 11.46ms
|
||||||
|
--
|
||||||
|
-- @args http-chrono.maxdepth the maximum amount of directories beneath
|
||||||
|
-- the initial url to spider. A negative value disables the limit.
|
||||||
|
-- (default: 3)
|
||||||
|
-- @args http-chrono.maxpagecount the maximum amount of pages to visit.
|
||||||
|
-- A negative value disables the limit (default: 1)
|
||||||
|
-- @args http-chrono.url the url to start spidering. This is a URL
|
||||||
|
-- relative to the scanned host eg. /default.html (default: /)
|
||||||
|
-- @args http-chrono.withinhost only spider URLs within the same host.
|
||||||
|
-- (default: true)
|
||||||
|
-- @args http-chrono.withindomain only spider URLs within the same
|
||||||
|
-- domain. This widens the scope from <code>withinhost</code> and can
|
||||||
|
-- not be used in combination. (default: false)
|
||||||
|
-- @args http-chrono.tries the number of times to fetch a page based on which
|
||||||
|
-- max, min and average calculations are performed.
|
||||||
|
|
||||||
|
|
||||||
|
author = "Ange Gutek"
|
||||||
|
license = "Same as Nmap--See http://nmap.org/book/man-legal.html"
|
||||||
|
categories = {"discovery", "intrusive"}
|
||||||
|
|
||||||
|
require 'http'
|
||||||
|
require 'shortport'
|
||||||
|
require 'httpspider'
|
||||||
|
require 'tab'
|
||||||
|
|
||||||
|
portrule = shortport.http
|
||||||
|
|
||||||
|
action = function(host, port)
|
||||||
|
|
||||||
|
local maxpages = stdnse.get_script_args(SCRIPT_NAME .. ".maxpagecount") or 1
|
||||||
|
local tries = stdnse.get_script_args(SCRIPT_NAME .. ".tries") or 5
|
||||||
|
|
||||||
|
local dump = {}
|
||||||
|
local crawler = httpspider.Crawler:new( host, port, nil, { scriptname = SCRIPT_NAME, maxpagecount = tonumber(maxpages) } )
|
||||||
|
crawler:set_timeout(10000)
|
||||||
|
|
||||||
|
-- launch the crawler
|
||||||
|
while(true) do
|
||||||
|
local start = stdnse.clock_ms()
|
||||||
|
local status, r = crawler:crawl()
|
||||||
|
if ( not(status) ) then
|
||||||
|
break
|
||||||
|
end
|
||||||
|
local chrono = stdnse.clock_ms() - start
|
||||||
|
dump[chrono] = tostring(r.url)
|
||||||
|
end
|
||||||
|
|
||||||
|
-- retest each page x times to find an average speed
|
||||||
|
-- a significant diff between instant and average may be an evidence of some weakness
|
||||||
|
-- either on the webserver or its database
|
||||||
|
local average,count,page_test
|
||||||
|
local results = {}
|
||||||
|
for result, page in pairs (dump) do
|
||||||
|
local url_host, url_page = page:match("//(.-)/(.*)")
|
||||||
|
url_host = string.gsub(url_host,":%d*","")
|
||||||
|
|
||||||
|
local min, max, page_test
|
||||||
|
local bulk_start = stdnse.clock_ms()
|
||||||
|
for i = 1,tries do
|
||||||
|
local start = stdnse.clock_ms()
|
||||||
|
if ( url_page:match("%?") ) then
|
||||||
|
page_test = http.get(url_host,port,"/"..url_page.."&test="..math.random(100), { no_cache = true })
|
||||||
|
else
|
||||||
|
page_test = http.get(url_host,port,"/"..url_page.."?test="..math.random(100), { no_cache = true })
|
||||||
|
end
|
||||||
|
local count = stdnse.clock_ms() - start
|
||||||
|
if ( not(max) or max < count ) then
|
||||||
|
max = count
|
||||||
|
end
|
||||||
|
if ( not(min) or min > count ) then
|
||||||
|
min = count
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
local count = stdnse.clock_ms() - bulk_start
|
||||||
|
table.insert(results, { min = min, max = max, avg = (count / tries), page = url.parse(page).path })
|
||||||
|
end
|
||||||
|
|
||||||
|
local output
|
||||||
|
if ( #results > 1 ) then
|
||||||
|
table.sort(results, function(a, b) return a.avg < b.avg end)
|
||||||
|
output = tab.new(4)
|
||||||
|
tab.addrow(output, "page", "avg", "min", "max")
|
||||||
|
for _, entry in ipairs(results) do
|
||||||
|
tab.addrow(output, entry.page, ("%.2fms"):format(entry.avg), ("%.2fms"):format(entry.min), ("%.2fms"):format(entry.max))
|
||||||
|
end
|
||||||
|
output = "\n" .. tab.dump(output)
|
||||||
|
else
|
||||||
|
local entry = results[1]
|
||||||
|
output = ("Request times for %s; avg: %.2fms; min: %.2fms; max: %.2fms"):format(entry.page, entry.avg, entry.min, entry.max)
|
||||||
|
end
|
||||||
|
return output
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -113,6 +113,7 @@ Entry { filename = "http-backup-finder.nse", categories = { "discovery", "safe",
|
|||||||
Entry { filename = "http-barracuda-dir-traversal.nse", categories = { "auth", "exploit", "intrusive", } }
|
Entry { filename = "http-barracuda-dir-traversal.nse", categories = { "auth", "exploit", "intrusive", } }
|
||||||
Entry { filename = "http-brute.nse", categories = { "brute", "intrusive", } }
|
Entry { filename = "http-brute.nse", categories = { "brute", "intrusive", } }
|
||||||
Entry { filename = "http-cakephp-version.nse", categories = { "discovery", "safe", } }
|
Entry { filename = "http-cakephp-version.nse", categories = { "discovery", "safe", } }
|
||||||
|
Entry { filename = "http-chrono.nse", categories = { "discovery", "intrusive", } }
|
||||||
Entry { filename = "http-config-backup.nse", categories = { "auth", "intrusive", } }
|
Entry { filename = "http-config-backup.nse", categories = { "auth", "intrusive", } }
|
||||||
Entry { filename = "http-cors.nse", categories = { "default", "discovery", "safe", } }
|
Entry { filename = "http-cors.nse", categories = { "default", "discovery", "safe", } }
|
||||||
Entry { filename = "http-date.nse", categories = { "discovery", "safe", } }
|
Entry { filename = "http-date.nse", categories = { "discovery", "safe", } }
|
||||||
|
|||||||
Reference in New Issue
Block a user