mirror of
https://github.com/nmap/nmap.git
synced 2025-12-06 04:31:29 +00:00
for lib in nselib/*.lua*; do l=${lib#*/}; l=${l%.lua*}; find . -name \
\*.lua -o -name \*.nse | xargs grep -l "require .$l\>" | xargs grep \
-c "\<$l\." | grep ':0$' | awk -F: '{print "'$l'", $1}'; done
Did not remove calls to stdnse.silent_require since these can be used to
abort script execution if OpenSSL is not included, even if the script
does not directly call openssl.* (perhaps it uses comm.tryssl instead,
for instance).
Also did not remove require "strict", since that library is special and
modifies the environment.
155 lines
4.1 KiB
Lua
155 lines
4.1 KiB
Lua
description = [[
|
|
Checks if various crawling ultities are allowed by the host.
|
|
]]
|
|
|
|
---
|
|
-- @usage nmap -p80 --script http-useragent-tester.nse <host>
|
|
--
|
|
-- This script sets various User-Agent headers that are used by different
|
|
-- ultities and crawling libraries (for example CURL or wget). If the request is
|
|
-- redirected to a page different than a (valid) browser request would be, that
|
|
-- means that this ultity is banned.
|
|
--
|
|
-- @args http-useragent-tester.useragents A table with more User-Agent headers.
|
|
-- Default: nil
|
|
--
|
|
-- @output
|
|
-- PORT STATE SERVICE REASON
|
|
-- 80/tcp open http syn-ack
|
|
-- | http-useragent-tester:
|
|
-- |
|
|
-- | Allowed User Agents:
|
|
-- |
|
|
-- | lwp-trivial
|
|
-- | PHP/
|
|
-- | Python-urllib/2.5
|
|
-- | GT::WWW
|
|
-- | Snoopy
|
|
-- | MFC_Tear_Sample
|
|
-- | HTTP::Lite
|
|
-- | PHPCrawl
|
|
-- | URI::Fetch
|
|
-- | Zend_Http_Client
|
|
-- | http client
|
|
-- | PECL::HTTP
|
|
-- | WWW-Mechanize/1.34
|
|
-- |
|
|
-- | Forbidden User Agents:
|
|
-- |
|
|
-- | libwww redirected to: https://www.some-random-page.com/unsupportedbrowser (different host)
|
|
-- | libcurl-agent/1.0 redirected to: https://www.some-random-page.com/unsupportedbrowser (different host)
|
|
-- |_ Wget/1.13.4 (linux-gnu) redirected to: https://www.some-random-page.com/unsupportedbrowser (different host)
|
|
--
|
|
---
|
|
|
|
categories = {"discovery", "safe"}
|
|
author = "George Chatzisofroniou"
|
|
license = "Same as Nmap--See http://nmap.org/book/man-legal.html"
|
|
|
|
local http = require "http"
|
|
local target = require "target"
|
|
local httpspider = require "httpspider"
|
|
local shortport = require "shortport"
|
|
local stdnse = require "stdnse"
|
|
local table = require "table"
|
|
|
|
getLastLoc = function(host, port, useragent)
|
|
|
|
local options
|
|
|
|
options = {header={}, no_cache=true, redirect_ok=function(host,port)
|
|
local c = 3
|
|
return function(url)
|
|
if ( c==0 ) then return false end
|
|
c = c - 1
|
|
return true
|
|
end
|
|
end }
|
|
|
|
|
|
options['header']['User-Agent'] = useragent
|
|
|
|
stdnse.print_debug(2, "Making a request with User-Agent: " .. useragent)
|
|
|
|
local response = http.get(host, port, '/', options)
|
|
|
|
if response.location then
|
|
return response.location[#response.location] or false
|
|
end
|
|
|
|
return false
|
|
|
|
end
|
|
|
|
portrule = shortport.port_or_service( {80, 443}, {"http", "https"}, "tcp", "open")
|
|
|
|
action = function(host, port)
|
|
|
|
local moreagents = stdnse.get_script_args("http-useragent-tester.useragents") or nil
|
|
local newtargets = stdnse.get_script_args("newtargets") or nil
|
|
|
|
-- We don't crawl any site. We initialize a crawler to use its iswithinhost method.
|
|
local crawler = httpspider.Crawler:new(host, port, '/', { scriptname = SCRIPT_NAME } )
|
|
|
|
local HTTPlibs = {"libwww",
|
|
"lwp-trivial",
|
|
"libcurl-agent/1.0",
|
|
"PHP/",
|
|
"Python-urllib/2.5",
|
|
"GT::WWW",
|
|
"Snoopy",
|
|
"MFC_Tear_Sample",
|
|
"HTTP::Lite",
|
|
"PHPCrawl",
|
|
"URI::Fetch",
|
|
"Zend_Http_Client",
|
|
"http client",
|
|
"PECL::HTTP",
|
|
"Wget/1.13.4 (linux-gnu)",
|
|
"WWW-Mechanize/1.34"
|
|
}
|
|
|
|
if moreagents then
|
|
for _, l in ipairs(moreagents) do
|
|
table.insert(HTTPlibs, l)
|
|
end
|
|
end
|
|
|
|
-- We perform a normal browser request and get the returned location
|
|
local loc = getLastLoc(host, port, "Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.57 Safari/537.17")
|
|
|
|
local allowed, forb = {}, {}
|
|
|
|
for _, l in ipairs(HTTPlibs) do
|
|
|
|
local libloc = getLastLoc(host, port, l)
|
|
|
|
-- If the library's request returned a different location, that means the request was redirected somewhere else, hence is forbidden.
|
|
if loc ~= libloc then
|
|
local msg = l .. " redirected to: " .. libloc
|
|
local libhost = http.parse_url(libloc)
|
|
if not crawler:iswithinhost(libhost.host) then
|
|
msg = msg .. " (different host)"
|
|
if newtargets then
|
|
target.add(libhost.host)
|
|
end
|
|
end
|
|
table.insert(forb, msg)
|
|
else
|
|
table.insert(allowed, l)
|
|
end
|
|
|
|
end
|
|
|
|
if next(allowed) ~= nil then
|
|
table.insert(allowed, 1, "Allowed User Agents:")
|
|
end
|
|
|
|
if next(forb) ~= nil then
|
|
table.insert(forb, 1, "Forbidden User Agents:")
|
|
end
|
|
|
|
return {allowed, forb}
|
|
|
|
end
|