From 7819471a54e7a2f8470b4b3e22aea9914d59a7b0 Mon Sep 17 00:00:00 2001 From: gyani Date: Fri, 14 Aug 2015 12:38:03 +0000 Subject: [PATCH] Removed http-email-harest and made changes to CHANGELOG and script.db to show the same. The new http-grep comitted earlier does what http-email-harvest does by default(no patterns given). --- CHANGELOG | 3 ++ scripts/http-email-harvest.nse | 88 ---------------------------------- scripts/script.db | 1 - 3 files changed, 3 insertions(+), 89 deletions(-) delete mode 100644 scripts/http-email-harvest.nse diff --git a/CHANGELOG b/CHANGELOG index a1b54c4d3..7ed7e07c0 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -1,5 +1,8 @@ # Nmap Changelog ($Id$); -*-text-*- +o [NSE] Removed http-email-harvest as the the new http-grep does email address + scraping by default. [Gyanendra Mishra] + o [NSE] Added script http-fetch. This script can be used to fetch all files from the target, specific files from the target or files that match a given pattern. [Gyanendra Mishra] diff --git a/scripts/http-email-harvest.nse b/scripts/http-email-harvest.nse deleted file mode 100644 index d978d4b8c..000000000 --- a/scripts/http-email-harvest.nse +++ /dev/null @@ -1,88 +0,0 @@ -local httpspider = require "httpspider" -local shortport = require "shortport" -local stdnse = require "stdnse" -local table = require "table" - -description = [[ -Spiders a web site and collects e-mail addresses. -]] - ---- --- @usage --- nmap --script=http-email-harvest --- --- @output --- PORT STATE SERVICE REASON --- 80/tcp open http syn-ack --- | http-email-harvest: --- | Spidering limited to: maxdepth=3; maxpagecount=20 --- | root@examplec.com --- |_ postmaster@example.com --- --- @args http-email-harvest.maxdepth the maximum amount of directories beneath --- the initial url to spider. A negative value disables the limit. --- (default: 3) --- @args http-email-harvest.maxpagecount the maximum amount of pages to visit. --- A negative value disables the limit (default: 20) --- @args http-email-harvest.url the url to start spidering. This is a URL --- relative to the scanned host eg. /default.html (default: /) --- @args http-email-harvest.withinhost only spider URLs within the same host. --- (default: true) --- @args http-email-harvest.withindomain only spider URLs within the same --- domain. This widens the scope from withinhost and can --- not be used in combination. (default: false) --- - -author = "Patrik Karlsson" -license = "Same as Nmap--See http://nmap.org/book/man-legal.html" -categories = {"discovery", "safe"} - - -portrule = shortport.http - -function action(host, port) - local EMAIL_PATTERN = "[A-Za-z0-9%.%%%+%-]+@[A-Za-z0-9%.%%%+%-]+%.%w%w%w?%w?" - - local crawler = httpspider.Crawler:new(host, port, nil, { - scriptname = SCRIPT_NAME - } - ) - - if ( not(crawler) ) then - return - end - crawler:set_timeout(10000) - - local emails = {} - while(true) do - local status, r = crawler:crawl() - -- if the crawler fails it can be due to a number of different reasons - -- most of them are "legitimate" and should not be reason to abort - if ( not(status) ) then - if ( r.err ) then - return stdnse.format_output(true, ("ERROR: %s"):format(r.reason)) - else - break - end - end - - -- Collect each e-mail address and build a unique index of them - if r.response.body then - for email in r.response.body:gmatch(EMAIL_PATTERN) do - emails[email] = true - end - end - end - - -- if no email addresses were collected abort - if ( not(emails) ) then return end - - local results = {} - for email, _ in pairs(emails) do - table.insert(results, email) - end - - results.name = crawler:getLimitations() - - return stdnse.format_output(true, results) -end diff --git a/scripts/script.db b/scripts/script.db index 957476590..1d9aa111a 100644 --- a/scripts/script.db +++ b/scripts/script.db @@ -163,7 +163,6 @@ Entry { filename = "http-dombased-xss.nse", categories = { "exploit", "intrusive Entry { filename = "http-domino-enum-passwords.nse", categories = { "auth", "intrusive", } } Entry { filename = "http-drupal-enum.nse", categories = { "discovery", "intrusive", } } Entry { filename = "http-drupal-enum-users.nse", categories = { "discovery", "intrusive", } } -Entry { filename = "http-email-harvest.nse", categories = { "discovery", "safe", } } Entry { filename = "http-enum.nse", categories = { "discovery", "intrusive", "vuln", } } Entry { filename = "http-errors.nse", categories = { "discovery", "intrusive", } } Entry { filename = "http-exif-spider.nse", categories = { "intrusive", } }