mirror of
https://github.com/carlospolop/privilege-escalation-awesome-scripts-suite.git
synced 2026-02-11 15:16:39 +00:00
Compare commits
15 Commits
20260121-4
...
test/chack
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
10e1786e6d | ||
|
|
e60c50100e | ||
|
|
8e917c239f | ||
|
|
143a20f17e | ||
|
|
de542f05a4 | ||
|
|
a10675d58f | ||
|
|
5c110bd4f8 | ||
|
|
c1bf38a8ab | ||
|
|
04c0b8aab3 | ||
|
|
a6c0491438 | ||
|
|
fce28d2b81 | ||
|
|
fcc78b919a | ||
|
|
29d350fa79 | ||
|
|
1473fedcbf | ||
|
|
f8f4250b81 |
@@ -1,4 +1,4 @@
|
||||
name: Codex PR Triage
|
||||
name: Chack-Agent PR Triage
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
@@ -6,7 +6,7 @@ on:
|
||||
types: [completed]
|
||||
|
||||
jobs:
|
||||
codex_triage:
|
||||
chack_agent_triage:
|
||||
if: ${{ github.event.workflow_run.conclusion == 'success' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
@@ -28,6 +28,7 @@ jobs:
|
||||
- name: Resolve PR context
|
||||
id: gate
|
||||
env:
|
||||
GH_REPO: ${{ github.repository }}
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
pr_number="${{ github.event.workflow_run.pull_requests[0].number }}"
|
||||
@@ -79,15 +80,24 @@ jobs:
|
||||
${{ steps.gate.outputs.base_ref }} \
|
||||
+refs/pull/${{ steps.gate.outputs.pr_number }}/head
|
||||
|
||||
- name: Run Codex
|
||||
id: run_codex
|
||||
- name: Run Chack Agent
|
||||
id: run_chack
|
||||
if: ${{ steps.gate.outputs.should_run == 'true' }}
|
||||
uses: openai/codex-action@v1
|
||||
uses: carlospolop/chack-agent@master
|
||||
with:
|
||||
openai-api-key: ${{ secrets.OPENAI_API_KEY }}
|
||||
output-schema-file: .github/codex/pr-merge-schema.json
|
||||
model: gpt-5.2-codex
|
||||
prompt: |
|
||||
provider: openrouter
|
||||
model_primary: BEST_QUALITY
|
||||
main_action: peass-ng
|
||||
sub_action: Chack-Agent PR Triage
|
||||
system_prompt: |
|
||||
You are Chack Agent, an elite PR reviewer for PEASS-ng.
|
||||
Be conservative: merge only if changes are simple, safe, and valuable accoding to the uers give guidelines.
|
||||
If in doubt, comment with clear questions or concerns.
|
||||
Remember taht you are an autonomouts agent, use the exec tool to run the needed commands to list, read, analyze, modify, test...
|
||||
tools_config_json: "{\"exec_enabled\": true}"
|
||||
session_config_json: "{\"long_term_memory_enabled\": false}"
|
||||
output_schema_file: .github/chack-agent/pr-merge-schema.json
|
||||
user_prompt: |
|
||||
You are reviewing PR #${{ steps.gate.outputs.pr_number }} for ${{ github.repository }}.
|
||||
|
||||
Decide whether to merge or comment. Merge only if all of the following are true:
|
||||
@@ -107,21 +117,23 @@ jobs:
|
||||
Review ONLY the changes introduced by the PR:
|
||||
git log --oneline ${{ steps.gate.outputs.base_sha }}...${{ steps.gate.outputs.head_sha }}
|
||||
|
||||
Output JSON only, following the provided schema.
|
||||
Output JSON only, following the provided schema:
|
||||
.github/chack-agent/pr-merge-schema.json
|
||||
openrouter_api_key: ${{ secrets.OPENROUTER_API_KEY }}
|
||||
|
||||
- name: Parse Codex decision
|
||||
- name: Parse Chack Agent decision
|
||||
id: parse
|
||||
if: ${{ steps.gate.outputs.should_run == 'true' }}
|
||||
env:
|
||||
CODEX_MESSAGE: ${{ steps.run_codex.outputs.final-message }}
|
||||
CHACK_MESSAGE: ${{ steps.run_chack.outputs.final-message }}
|
||||
run: |
|
||||
python3 - <<'PY'
|
||||
import json
|
||||
import os
|
||||
|
||||
data = json.loads(os.environ.get('CODEX_MESSAGE', '') or '{}')
|
||||
data = json.loads(os.environ.get('CHACK_MESSAGE', '') or '{}')
|
||||
decision = data.get('decision', 'comment')
|
||||
message = data.get('message', '').strip() or 'Codex did not provide details.'
|
||||
message = data.get('message', '').strip() or 'Chack Agent did not provide details.'
|
||||
with open(os.environ['GITHUB_OUTPUT'], 'a') as handle:
|
||||
handle.write(f"decision={decision}\n")
|
||||
handle.write("message<<EOF\n")
|
||||
@@ -131,31 +143,31 @@ jobs:
|
||||
|
||||
merge_or_comment:
|
||||
runs-on: ubuntu-latest
|
||||
needs: codex_triage
|
||||
if: ${{ github.event.workflow_run.conclusion == 'success' && needs.codex_triage.outputs.should_run == 'true' && needs.codex_triage.outputs.decision != '' }}
|
||||
needs: chack_agent_triage
|
||||
if: ${{ github.event.workflow_run.conclusion == 'success' && needs.chack_agent_triage.outputs.should_run == 'true' && needs.chack_agent_triage.outputs.decision != '' }}
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Merge PR when approved
|
||||
if: ${{ needs.codex_triage.outputs.decision == 'merge' }}
|
||||
if: ${{ needs.chack_agent_triage.outputs.decision == 'merge' }}
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
PR_NUMBER: ${{ needs.codex_triage.outputs.pr_number }}
|
||||
PR_NUMBER: ${{ needs.chack_agent_triage.outputs.pr_number }}
|
||||
run: |
|
||||
gh api \
|
||||
-X PUT \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
/repos/${{ github.repository }}/pulls/${PR_NUMBER}/merge \
|
||||
-f merge_method=squash \
|
||||
-f commit_title="Auto-merge PR #${PR_NUMBER} (Codex)"
|
||||
-f commit_title="Auto-merge PR #${PR_NUMBER} (Chack Agent)"
|
||||
|
||||
- name: Comment with doubts
|
||||
if: ${{ needs.codex_triage.outputs.decision == 'comment' }}
|
||||
if: ${{ needs.chack_agent_triage.outputs.decision == 'comment' }}
|
||||
uses: actions/github-script@v7
|
||||
env:
|
||||
PR_NUMBER: ${{ needs.codex_triage.outputs.pr_number }}
|
||||
CODEX_MESSAGE: ${{ needs.codex_triage.outputs.message }}
|
||||
PR_NUMBER: ${{ needs.chack_agent_triage.outputs.pr_number }}
|
||||
CHACK_MESSAGE: ${{ needs.chack_agent_triage.outputs.message }}
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
script: |
|
||||
@@ -163,5 +175,5 @@ jobs:
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: Number(process.env.PR_NUMBER),
|
||||
body: process.env.CODEX_MESSAGE,
|
||||
body: process.env.CHACK_MESSAGE,
|
||||
});
|
||||
165
.github/workflows/ci-master-failure-chack-agent-pr.yml
vendored
Normal file
165
.github/workflows/ci-master-failure-chack-agent-pr.yml
vendored
Normal file
@@ -0,0 +1,165 @@
|
||||
name: CI-master Failure Chack-Agent PR
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: ["CI-master_test"]
|
||||
types: [completed]
|
||||
|
||||
jobs:
|
||||
chack_agent_fix_master_failure:
|
||||
if: >
|
||||
${{ github.event.workflow_run.conclusion == 'failure' &&
|
||||
github.event.workflow_run.head_branch == 'master' &&
|
||||
!startsWith(github.event.workflow_run.head_commit.message, 'Fix CI-master failures for run #') }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
issues: write
|
||||
actions: read
|
||||
env:
|
||||
TARGET_BRANCH: master
|
||||
FIX_BRANCH: chack-agent/ci-master-fix-${{ github.event.workflow_run.id }}
|
||||
steps:
|
||||
- name: Checkout failing commit
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ github.event.workflow_run.head_sha }}
|
||||
fetch-depth: 0
|
||||
persist-credentials: true
|
||||
token: ${{ secrets.CHACK_AGENT_FIXER_TOKEN || github.token }}
|
||||
|
||||
- name: Configure git author
|
||||
run: |
|
||||
git config user.name "chack-agent"
|
||||
git config user.email "chack-agent@users.noreply.github.com"
|
||||
|
||||
- name: Create fix branch
|
||||
run: git checkout -b "$FIX_BRANCH"
|
||||
|
||||
- name: Fetch failure summary and failed-step logs
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
RUN_ID: ${{ github.event.workflow_run.id }}
|
||||
run: |
|
||||
failed_logs_file="$(pwd)/chack_failed_steps_logs.txt"
|
||||
if gh run view "$RUN_ID" --repo "${{ github.repository }}" --log-failed > "$failed_logs_file"; then
|
||||
if [ ! -s "$failed_logs_file" ]; then
|
||||
echo "No failed step logs were returned by gh run view --log-failed." > "$failed_logs_file"
|
||||
fi
|
||||
else
|
||||
echo "Failed to download failed step logs with gh run view --log-failed." > "$failed_logs_file"
|
||||
fi
|
||||
echo "FAILED_LOGS_PATH=$failed_logs_file" >> "$GITHUB_ENV"
|
||||
|
||||
gh api -H "Accept: application/vnd.github+json" \
|
||||
/repos/${{ github.repository }}/actions/runs/$RUN_ID/jobs \
|
||||
--paginate > /tmp/jobs.json
|
||||
python3 - <<'PY'
|
||||
import json
|
||||
|
||||
data = json.load(open('/tmp/jobs.json'))
|
||||
lines = []
|
||||
for job in data.get('jobs', []):
|
||||
if job.get('conclusion') == 'failure':
|
||||
lines.append(f"Job: {job.get('name')} (id {job.get('id')})")
|
||||
lines.append(f"URL: {job.get('html_url')}")
|
||||
for step in job.get('steps', []):
|
||||
if step.get('conclusion') == 'failure':
|
||||
lines.append(f" Step: {step.get('name')}")
|
||||
lines.append("")
|
||||
|
||||
summary = "\n".join(lines).strip() or "No failing job details found."
|
||||
with open('chack_failure_summary.txt', 'w') as handle:
|
||||
handle.write(summary)
|
||||
PY
|
||||
|
||||
- name: Create Chack Agent prompt
|
||||
env:
|
||||
RUN_URL: ${{ github.event.workflow_run.html_url }}
|
||||
HEAD_SHA: ${{ github.event.workflow_run.head_sha }}
|
||||
run: |
|
||||
{
|
||||
echo "You are fixing a failing CI-master_test run in ${{ github.repository }}."
|
||||
echo "The failing workflow run is: ${RUN_URL}"
|
||||
echo "The failing commit SHA is: ${HEAD_SHA}"
|
||||
echo "The target branch for the final PR is: ${TARGET_BRANCH}"
|
||||
echo ""
|
||||
echo "Failure summary:"
|
||||
cat chack_failure_summary.txt
|
||||
echo ""
|
||||
echo "Failed-step logs file absolute path (local runner): ${FAILED_LOGS_PATH}"
|
||||
echo "Read that file to inspect the exact failing logs."
|
||||
echo ""
|
||||
echo "Please identify the cause, apply an easy, simple and minimal fix, and update files accordingly."
|
||||
echo "Run any fast checks you can locally (no network)."
|
||||
echo "Leave the repo in a state ready to commit; changes will be committed and pushed automatically."
|
||||
} > chack_prompt.txt
|
||||
|
||||
- name: Run Chack Agent
|
||||
id: run_chack
|
||||
uses: carlospolop/chack-agent@master
|
||||
with:
|
||||
provider: openrouter
|
||||
model_primary: BEST_QUALITY
|
||||
main_action: peass-ng
|
||||
sub_action: CI-master Failure Chack-Agent PR
|
||||
system_prompt: |
|
||||
Diagnose the failing gh actions workflow, propose the minimal and effective safe fix, and implement it.
|
||||
Run only fast, local checks (no network). Leave the repo ready to commit.
|
||||
prompt_file: chack_prompt.txt
|
||||
tools_config_json: "{\"exec_enabled\": true}"
|
||||
session_config_json: "{\"long_term_memory_enabled\": false}"
|
||||
openrouter_api_key: ${{ secrets.OPENROUTER_API_KEY }}
|
||||
|
||||
- name: Commit and push fix branch if changed
|
||||
id: push_fix
|
||||
run: |
|
||||
if git diff --quiet; then
|
||||
echo "No changes to commit."
|
||||
echo "pushed=false" >> "$GITHUB_OUTPUT"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
rm -f chack_failure_summary.txt chack_prompt.txt chack_failed_steps_logs.txt
|
||||
git add -A
|
||||
git reset -- chack_failure_summary.txt chack_prompt.txt chack_failed_steps_logs.txt
|
||||
git commit -m "Fix CI-master failures for run #${{ github.event.workflow_run.id }}"
|
||||
git push origin HEAD:"$FIX_BRANCH"
|
||||
echo "pushed=true" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Create PR to master
|
||||
if: ${{ steps.push_fix.outputs.pushed == 'true' }}
|
||||
id: create_pr
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.CHACK_AGENT_FIXER_TOKEN || github.token }}
|
||||
RUN_URL: ${{ github.event.workflow_run.html_url }}
|
||||
run: |
|
||||
pr_url=$(gh pr create \
|
||||
--title "Fix CI-master_test failure (run #${{ github.event.workflow_run.id }})" \
|
||||
--body "Automated Chack Agent fix for failing CI-master_test run: ${RUN_URL}" \
|
||||
--base "$TARGET_BRANCH" \
|
||||
--head "$FIX_BRANCH")
|
||||
echo "url=$pr_url" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Comment on created PR with Chack Agent result
|
||||
if: ${{ steps.push_fix.outputs.pushed == 'true' && steps.run_chack.outputs.final-message != '' }}
|
||||
uses: actions/github-script@v7
|
||||
env:
|
||||
PR_URL: ${{ steps.create_pr.outputs.url }}
|
||||
CHACK_MESSAGE: ${{ steps.run_chack.outputs.final-message }}
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
script: |
|
||||
const prUrl = process.env.PR_URL;
|
||||
const match = prUrl.match(/\/pull\/(\d+)$/);
|
||||
if (!match) {
|
||||
core.info(`Could not parse PR number from URL: ${prUrl}`);
|
||||
return;
|
||||
}
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: Number(match[1]),
|
||||
body: process.env.CHACK_MESSAGE,
|
||||
});
|
||||
@@ -1,4 +1,4 @@
|
||||
name: PR Failure Codex Dispatch
|
||||
name: PR Failure Chack-Agent Dispatch
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
@@ -41,8 +41,8 @@ jobs:
|
||||
pr_labels=$(gh api -H "Accept: application/vnd.github+json" \
|
||||
/repos/${{ github.repository }}/issues/${PR_NUMBER} \
|
||||
--jq '.labels[].name')
|
||||
if echo "$pr_labels" | grep -q "^codex-fix-attempted$"; then
|
||||
echo "codex fix already attempted for PR #${PR_NUMBER}; skipping."
|
||||
if echo "$pr_labels" | grep -q "^chack-agent-fix-attempted$"; then
|
||||
echo "chack-agent fix already attempted for PR #${PR_NUMBER}; skipping."
|
||||
should_run=false
|
||||
else
|
||||
should_run=true
|
||||
@@ -55,7 +55,7 @@ jobs:
|
||||
echo "should_run=${should_run}"
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
|
||||
codex_on_failure:
|
||||
chack_agent_on_failure:
|
||||
needs: resolve_pr_context
|
||||
if: ${{ needs.resolve_pr_context.outputs.author == 'carlospolop' && needs.resolve_pr_context.outputs.should_run == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
@@ -75,7 +75,7 @@ jobs:
|
||||
github-token: ${{ github.token }}
|
||||
script: |
|
||||
const prNumber = Number(process.env.PR_NUMBER);
|
||||
const body = `PR #${prNumber} had a failing workflow "${process.env.WORKFLOW_NAME}".\n\nRun: ${process.env.RUN_URL}\n\nLaunching Codex to attempt a fix.`;
|
||||
const body = `PR #${prNumber} had a failing workflow "${process.env.WORKFLOW_NAME}".\n\nRun: ${process.env.RUN_URL}\n\nLaunching Chack Agent to attempt a fix.`;
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
@@ -90,7 +90,7 @@ jobs:
|
||||
run: |
|
||||
gh api -X POST -H "Accept: application/vnd.github+json" \
|
||||
/repos/${{ github.repository }}/issues/${PR_NUMBER}/labels \
|
||||
-f labels='["codex-fix-attempted"]'
|
||||
-f labels[]=chack-agent-fix-attempted
|
||||
|
||||
- name: Checkout PR head
|
||||
uses: actions/checkout@v5
|
||||
@@ -99,12 +99,12 @@ jobs:
|
||||
ref: ${{ github.event.workflow_run.head_sha }}
|
||||
fetch-depth: 0
|
||||
persist-credentials: true
|
||||
token: ${{ secrets.CODEX_FIXER_TOKEN }}
|
||||
token: ${{ secrets.CHACK_AGENT_FIXER_TOKEN || github.token }}
|
||||
|
||||
- name: Configure git author
|
||||
run: |
|
||||
git config user.name "codex-action"
|
||||
git config user.email "codex-action@users.noreply.github.com"
|
||||
git config user.name "chack-agent"
|
||||
git config user.email "chack-agent@users.noreply.github.com"
|
||||
|
||||
- name: Fetch failure summary
|
||||
env:
|
||||
@@ -129,11 +129,11 @@ jobs:
|
||||
lines.append("")
|
||||
|
||||
summary = "\n".join(lines).strip() or "No failing job details found."
|
||||
with open('codex_failure_summary.txt', 'w') as handle:
|
||||
with open('chack_failure_summary.txt', 'w') as handle:
|
||||
handle.write(summary)
|
||||
PY
|
||||
|
||||
- name: Create Codex prompt
|
||||
- name: Create Chack Agent prompt
|
||||
env:
|
||||
PR_NUMBER: ${{ needs.resolve_pr_context.outputs.number }}
|
||||
RUN_URL: ${{ github.event.workflow_run.html_url }}
|
||||
@@ -145,21 +145,29 @@ jobs:
|
||||
echo "The PR branch is: ${HEAD_BRANCH}"
|
||||
echo ""
|
||||
echo "Failure summary:"
|
||||
cat codex_failure_summary.txt
|
||||
cat chack_failure_summary.txt
|
||||
echo ""
|
||||
echo "Please identify the cause, apply a easy, simple and minimal fix, and update files accordingly."
|
||||
echo "Run any fast checks you can locally (no network)."
|
||||
echo "Leave the repo in a state ready to commit as when you finish, it'll be automatically committed and pushed."
|
||||
} > codex_prompt.txt
|
||||
} > chack_prompt.txt
|
||||
|
||||
- name: Run Codex
|
||||
id: run_codex
|
||||
uses: openai/codex-action@v1
|
||||
- name: Run Chack Agent
|
||||
id: run_chack
|
||||
uses: carlospolop/chack-agent@master
|
||||
with:
|
||||
openai-api-key: ${{ secrets.OPENAI_API_KEY }}
|
||||
prompt-file: codex_prompt.txt
|
||||
sandbox: workspace-write
|
||||
model: gpt-5.2-codex
|
||||
provider: openrouter
|
||||
model_primary: BEST_QUALITY
|
||||
main_action: peass-ng
|
||||
sub_action: PR Failure Chack-Agent Dispatch
|
||||
system_prompt: |
|
||||
You are Chack Agent, an elite CI-fix engineer.
|
||||
Diagnose the failing workflow, propose the minimal safe fix, and implement it.
|
||||
Run only fast, local checks (no network). Leave the repo ready to commit.
|
||||
prompt_file: chack_prompt.txt
|
||||
tools_config_json: "{\"exec_enabled\": true}"
|
||||
session_config_json: "{\"long_term_memory_enabled\": false}"
|
||||
openrouter_api_key: ${{ secrets.OPENROUTER_API_KEY }}
|
||||
|
||||
- name: Commit and push if changed
|
||||
env:
|
||||
@@ -170,18 +178,18 @@ jobs:
|
||||
echo "No changes to commit."
|
||||
exit 0
|
||||
fi
|
||||
rm -f codex_failure_summary.txt codex_prompt.txt
|
||||
rm -f chack_failure_summary.txt chack_prompt.txt
|
||||
git add -A
|
||||
git reset -- codex_failure_summary.txt codex_prompt.txt
|
||||
git reset -- chack_failure_summary.txt chack_prompt.txt
|
||||
git commit -m "Fix CI failures for PR #${PR_NUMBER}"
|
||||
git push origin HEAD:${TARGET_BRANCH}
|
||||
|
||||
- name: Comment with Codex result
|
||||
if: ${{ steps.run_codex.outputs.final-message != '' }}
|
||||
- name: Comment with Chack Agent result
|
||||
if: ${{ steps.run_chack.outputs.final-message != '' }}
|
||||
uses: actions/github-script@v7
|
||||
env:
|
||||
PR_NUMBER: ${{ needs.resolve_pr_context.outputs.number }}
|
||||
CODEX_MESSAGE: ${{ steps.run_codex.outputs.final-message }}
|
||||
CHACK_MESSAGE: ${{ steps.run_chack.outputs.final-message }}
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
script: |
|
||||
@@ -189,5 +197,5 @@ jobs:
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: Number(process.env.PR_NUMBER),
|
||||
body: process.env.CODEX_MESSAGE,
|
||||
body: process.env.CHACK_MESSAGE,
|
||||
});
|
||||
@@ -53,3 +53,4 @@ if __name__ == "__main__":
|
||||
exit(1)
|
||||
|
||||
main(all_modules, all_no_fat_modules, no_network_scanning, small, include_modules, exclude_modules, output)
|
||||
|
||||
|
||||
@@ -1,39 +0,0 @@
|
||||
# Title: System Information - Linux Exploit Suggester
|
||||
# ID: SY_Linux_exploit_suggester
|
||||
# Author: Carlos Polop
|
||||
# Last Update: 07-03-2024
|
||||
# Description: Execute Linux Exploit Suggester to identify potential kernel exploits:
|
||||
# - Automated kernel vulnerability detection
|
||||
# - Common vulnerable scenarios:
|
||||
# * Known kernel vulnerabilities
|
||||
# * Unpatched kernel versions
|
||||
# * Missing security patches
|
||||
# - Exploitation methods:
|
||||
# * Kernel exploit execution: Use suggested exploits
|
||||
# * Common attack vectors:
|
||||
# - Kernel memory corruption
|
||||
# - Race conditions
|
||||
# - Use-after-free
|
||||
# - Integer overflow
|
||||
# * Exploit techniques:
|
||||
# - Kernel memory manipulation
|
||||
# - Privilege escalation
|
||||
# - Root access acquisition
|
||||
# - System compromise
|
||||
# License: GNU GPL
|
||||
# Version: 1.0
|
||||
# Functions Used: print_2title, print_info
|
||||
# Global Variables: $MACPEAS
|
||||
# Initial Functions:
|
||||
# Generated Global Variables: $les_b64
|
||||
# Fat linpeas: 0
|
||||
# Small linpeas: 1
|
||||
|
||||
|
||||
if [ "$(command -v bash 2>/dev/null || echo -n '')" ] && ! [ "$MACPEAS" ]; then
|
||||
print_2title "Executing Linux Exploit Suggester"
|
||||
print_info "https://github.com/mzet-/linux-exploit-suggester"
|
||||
les_b64="peass{https://raw.githubusercontent.com/mzet-/linux-exploit-suggester/master/linux-exploit-suggester.sh}"
|
||||
echo $les_b64 | base64 -d | bash | sed "s,$(printf '\033')\\[[0-9;]*[a-zA-Z],,g" | grep -i "\[CVE" -A 10 | grep -Ev "^\-\-$" | sed -${E} "s/\[(CVE-[0-9]+-[0-9]+,?)+\].*/${SED_RED}/g"
|
||||
echo ""
|
||||
fi
|
||||
@@ -1,41 +0,0 @@
|
||||
# Title: System Information - Linux Exploit Suggester 2
|
||||
# ID: SY_Linux_exploit_suggester_2
|
||||
# Author: Carlos Polop
|
||||
# Last Update: 07-03-2024
|
||||
# Description: Execute Linux Exploit Suggester 2 (Perl version) to identify potential kernel exploits:
|
||||
# - Alternative kernel vulnerability detection
|
||||
# - Perl-based exploit suggestions
|
||||
# - Common vulnerable scenarios:
|
||||
# * Known kernel vulnerabilities
|
||||
# * Unpatched kernel versions
|
||||
# * Missing security patches
|
||||
# * Alternative exploit paths
|
||||
# - Exploitation methods:
|
||||
# * Kernel exploit execution: Use suggested exploits
|
||||
# * Common attack vectors:
|
||||
# - Kernel memory corruption
|
||||
# - Race conditions
|
||||
# - Use-after-free
|
||||
# - Integer overflow
|
||||
# * Exploit techniques:
|
||||
# - Kernel memory manipulation
|
||||
# - Privilege escalation
|
||||
# - Root access acquisition
|
||||
# - System compromise
|
||||
# License: GNU GPL
|
||||
# Version: 1.0
|
||||
# Functions Used: print_2title, print_info
|
||||
# Global Variables:
|
||||
# Initial Functions:
|
||||
# Generated Global Variables: $les2_b64
|
||||
# Fat linpeas: 1
|
||||
# Small linpeas: 0
|
||||
|
||||
|
||||
if [ "$(command -v perl 2>/dev/null || echo -n '')" ] && ! [ "$MACPEAS" ]; then
|
||||
print_2title "Executing Linux Exploit Suggester 2"
|
||||
print_info "https://github.com/jondonas/linux-exploit-suggester-2"
|
||||
les2_b64="peass{https://raw.githubusercontent.com/jondonas/linux-exploit-suggester-2/master/linux-exploit-suggester-2.pl}"
|
||||
echo $les2_b64 | base64 -d | perl 2>/dev/null | sed "s,$(printf '\033')\\[[0-9;]*[a-zA-Z],,g" | grep -iE "CVE" -B 1 -A 10 | grep -Ev "^\-\-$" | sed -${E} "s,CVE-[0-9]+-[0-9]+,${SED_RED},g"
|
||||
echo ""
|
||||
fi
|
||||
@@ -30,11 +30,33 @@
|
||||
# Functions Used: echo_not_found, print_2title, print_list, warn_exec
|
||||
# Global Variables:
|
||||
# Initial Functions:
|
||||
# Generated Global Variables: $ASLR, $hypervisorflag, $detectedvirt, $unpriv_userns_clone, $perf_event_paranoid, $mmap_min_addr, $ptrace_scope, $dmesg_restrict, $kptr_restrict, $unpriv_bpf_disabled, $protected_symlinks, $protected_hardlinks
|
||||
# Generated Global Variables: $ASLR, $hypervisorflag, $detectedvirt, $unpriv_userns_clone, $perf_event_paranoid, $mmap_min_addr, $ptrace_scope, $dmesg_restrict, $kptr_restrict, $unpriv_bpf_disabled, $protected_symlinks, $protected_hardlinks, $label, $sysctl_path, $sysctl_var, $zero_color, $nonzero_color, $sysctl_value
|
||||
# Fat linpeas: 0
|
||||
# Small linpeas: 0
|
||||
|
||||
|
||||
print_sysctl_eq_zero() {
|
||||
local label="$1"
|
||||
local sysctl_path="$2"
|
||||
local sysctl_var="$3"
|
||||
local zero_color="$4"
|
||||
local nonzero_color="$5"
|
||||
local sysctl_value
|
||||
|
||||
print_list "$label" "$NC"
|
||||
sysctl_value=$(cat "$sysctl_path" 2>/dev/null)
|
||||
eval "$sysctl_var=\$sysctl_value"
|
||||
if [ -z "$sysctl_value" ]; then
|
||||
echo_not_found "$sysctl_path"
|
||||
else
|
||||
if [ "$sysctl_value" -eq 0 ]; then
|
||||
echo "0" | sed -${E} "s,0,${zero_color},"
|
||||
else
|
||||
echo "$sysctl_value" | sed -${E} "s,.*,${nonzero_color},g"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
#-- SY) AppArmor
|
||||
print_2title "Protections"
|
||||
print_list "AppArmor enabled? .............. "$NC
|
||||
@@ -81,67 +103,25 @@ print_list "User namespace? ................ "$NC
|
||||
if [ "$(cat /proc/self/uid_map 2>/dev/null)" ]; then echo "enabled" | sed "s,enabled,${SED_GREEN},"; else echo "disabled" | sed "s,disabled,${SED_RED},"; fi
|
||||
|
||||
#-- SY) Unprivileged user namespaces
|
||||
print_list "unpriv_userns_clone? ........... "$NC
|
||||
unpriv_userns_clone=$(cat /proc/sys/kernel/unprivileged_userns_clone 2>/dev/null)
|
||||
if [ -z "$unpriv_userns_clone" ]; then
|
||||
echo_not_found "/proc/sys/kernel/unprivileged_userns_clone"
|
||||
else
|
||||
if [ "$unpriv_userns_clone" -eq 0 ]; then echo "0" | sed -${E} "s,0,${SED_GREEN},"; else echo "$unpriv_userns_clone" | sed -${E} "s,.*,${SED_RED},g"; fi
|
||||
fi
|
||||
print_sysctl_eq_zero "unpriv_userns_clone? ........... " "/proc/sys/kernel/unprivileged_userns_clone" "unpriv_userns_clone" "$SED_GREEN" "$SED_RED"
|
||||
|
||||
#-- SY) Unprivileged eBPF
|
||||
print_list "unpriv_bpf_disabled? ........... "$NC
|
||||
unpriv_bpf_disabled=$(cat /proc/sys/kernel/unprivileged_bpf_disabled 2>/dev/null)
|
||||
if [ -z "$unpriv_bpf_disabled" ]; then
|
||||
echo_not_found "/proc/sys/kernel/unprivileged_bpf_disabled"
|
||||
else
|
||||
if [ "$unpriv_bpf_disabled" -eq 0 ]; then echo "0" | sed -${E} "s,0,${SED_RED},"; else echo "$unpriv_bpf_disabled" | sed -${E} "s,.*,${SED_GREEN},g"; fi
|
||||
fi
|
||||
print_sysctl_eq_zero "unpriv_bpf_disabled? ........... " "/proc/sys/kernel/unprivileged_bpf_disabled" "unpriv_bpf_disabled" "$SED_RED" "$SED_GREEN"
|
||||
|
||||
#-- SY) cgroup2
|
||||
print_list "Cgroup2 enabled? ............... "$NC
|
||||
([ "$(grep cgroup2 /proc/filesystems 2>/dev/null)" ] && echo "enabled" || echo "disabled") | sed "s,disabled,${SED_RED}," | sed "s,enabled,${SED_GREEN},"
|
||||
|
||||
#-- SY) Kernel hardening sysctls
|
||||
print_list "kptr_restrict? ................. "$NC
|
||||
kptr_restrict=$(cat /proc/sys/kernel/kptr_restrict 2>/dev/null)
|
||||
if [ -z "$kptr_restrict" ]; then
|
||||
echo_not_found "/proc/sys/kernel/kptr_restrict"
|
||||
else
|
||||
if [ "$kptr_restrict" -eq 0 ]; then echo "0" | sed -${E} "s,0,${SED_RED},"; else echo "$kptr_restrict" | sed -${E} "s,.*,${SED_GREEN},g"; fi
|
||||
fi
|
||||
print_sysctl_eq_zero "kptr_restrict? ................. " "/proc/sys/kernel/kptr_restrict" "kptr_restrict" "$SED_RED" "$SED_GREEN"
|
||||
|
||||
print_list "dmesg_restrict? ................ "$NC
|
||||
dmesg_restrict=$(cat /proc/sys/kernel/dmesg_restrict 2>/dev/null)
|
||||
if [ -z "$dmesg_restrict" ]; then
|
||||
echo_not_found "/proc/sys/kernel/dmesg_restrict"
|
||||
else
|
||||
if [ "$dmesg_restrict" -eq 0 ]; then echo "0" | sed -${E} "s,0,${SED_RED},"; else echo "$dmesg_restrict" | sed -${E} "s,.*,${SED_GREEN},g"; fi
|
||||
fi
|
||||
print_sysctl_eq_zero "dmesg_restrict? ................ " "/proc/sys/kernel/dmesg_restrict" "dmesg_restrict" "$SED_RED" "$SED_GREEN"
|
||||
|
||||
print_list "ptrace_scope? .................. "$NC
|
||||
ptrace_scope=$(cat /proc/sys/kernel/yama/ptrace_scope 2>/dev/null)
|
||||
if [ -z "$ptrace_scope" ]; then
|
||||
echo_not_found "/proc/sys/kernel/yama/ptrace_scope"
|
||||
else
|
||||
if [ "$ptrace_scope" -eq 0 ]; then echo "0" | sed -${E} "s,0,${SED_RED},"; else echo "$ptrace_scope" | sed -${E} "s,.*,${SED_GREEN},g"; fi
|
||||
fi
|
||||
print_sysctl_eq_zero "ptrace_scope? .................. " "/proc/sys/kernel/yama/ptrace_scope" "ptrace_scope" "$SED_RED" "$SED_GREEN"
|
||||
|
||||
print_list "protected_symlinks? ............ "$NC
|
||||
protected_symlinks=$(cat /proc/sys/fs/protected_symlinks 2>/dev/null)
|
||||
if [ -z "$protected_symlinks" ]; then
|
||||
echo_not_found "/proc/sys/fs/protected_symlinks"
|
||||
else
|
||||
if [ "$protected_symlinks" -eq 0 ]; then echo "0" | sed -${E} "s,0,${SED_RED},"; else echo "$protected_symlinks" | sed -${E} "s,.*,${SED_GREEN},g"; fi
|
||||
fi
|
||||
print_sysctl_eq_zero "protected_symlinks? ............ " "/proc/sys/fs/protected_symlinks" "protected_symlinks" "$SED_RED" "$SED_GREEN"
|
||||
|
||||
print_list "protected_hardlinks? ........... "$NC
|
||||
protected_hardlinks=$(cat /proc/sys/fs/protected_hardlinks 2>/dev/null)
|
||||
if [ -z "$protected_hardlinks" ]; then
|
||||
echo_not_found "/proc/sys/fs/protected_hardlinks"
|
||||
else
|
||||
if [ "$protected_hardlinks" -eq 0 ]; then echo "0" | sed -${E} "s,0,${SED_RED},"; else echo "$protected_hardlinks" | sed -${E} "s,.*,${SED_GREEN},g"; fi
|
||||
fi
|
||||
print_sysctl_eq_zero "protected_hardlinks? ........... " "/proc/sys/fs/protected_hardlinks" "protected_hardlinks" "$SED_RED" "$SED_GREEN"
|
||||
|
||||
print_list "perf_event_paranoid? ........... "$NC
|
||||
perf_event_paranoid=$(cat /proc/sys/kernel/perf_event_paranoid 2>/dev/null)
|
||||
@@ -151,13 +131,7 @@ else
|
||||
if [ "$perf_event_paranoid" -le 1 ]; then echo "$perf_event_paranoid" | sed -${E} "s,.*,${SED_RED},g"; else echo "$perf_event_paranoid" | sed -${E} "s,.*,${SED_GREEN},g"; fi
|
||||
fi
|
||||
|
||||
print_list "mmap_min_addr? ................. "$NC
|
||||
mmap_min_addr=$(cat /proc/sys/vm/mmap_min_addr 2>/dev/null)
|
||||
if [ -z "$mmap_min_addr" ]; then
|
||||
echo_not_found "/proc/sys/vm/mmap_min_addr"
|
||||
else
|
||||
if [ "$mmap_min_addr" -eq 0 ]; then echo "0" | sed -${E} "s,0,${SED_RED},"; else echo "$mmap_min_addr" | sed -${E} "s,.*,${SED_GREEN},g"; fi
|
||||
fi
|
||||
print_sysctl_eq_zero "mmap_min_addr? ................. " "/proc/sys/vm/mmap_min_addr" "mmap_min_addr" "$SED_RED" "$SED_GREEN"
|
||||
|
||||
print_list "lockdown mode? ................. "$NC
|
||||
if [ -f "/sys/kernel/security/lockdown" ]; then
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
# Title: Container - Am I Containered
|
||||
# ID: CT_Am_I_contained
|
||||
# Author: Carlos Polop
|
||||
# Last Update: 22-08-2023
|
||||
# Description: Am I Containered tool
|
||||
# License: GNU GPL
|
||||
# Version: 1.0
|
||||
# Functions Used: print_2title, execBin
|
||||
# Global Variables:
|
||||
# Initial Functions:
|
||||
# Generated Global Variables: $FAT_LINPEAS_AMICONTAINED
|
||||
# Fat linpeas: 1
|
||||
# Small linpeas: 0
|
||||
|
||||
|
||||
if [ "$$FAT_LINPEAS_AMICONTAINED" ]; then
|
||||
print_2title "Am I Containered?"
|
||||
FAT_LINPEAS_AMICONTAINED="peass{https://github.com/genuinetools/amicontained/releases/latest/download/amicontained-linux-amd64}"
|
||||
execBin "AmIContainered" "https://github.com/genuinetools/amicontained" "$FAT_LINPEAS_AMICONTAINED"
|
||||
fi
|
||||
@@ -17,7 +17,7 @@
|
||||
# Functions Used: print_2title, print_list, echo_not_found
|
||||
# Global Variables: $SEARCH_IN_FOLDER, $Wfolders, $SED_RED, $SED_RED_YELLOW, $NC
|
||||
# Initial Functions:
|
||||
# Generated Global Variables: $WRITABLESYSTEMDPATH, $line, $service, $file, $version, $user, $caps, $path, $path_line, $service_file, $exec_line, $cmd
|
||||
# Generated Global Variables: $WRITABLESYSTEMDPATH, $line, $service, $file, $version, $user, $caps, $path, $path_line, $service_file, $exec_line, $exec_value, $cmd, $cmd_path
|
||||
# Fat linpeas: 0
|
||||
# Small linpeas: 1
|
||||
|
||||
@@ -116,18 +116,20 @@ if ! [ "$SEARCH_IN_FOLDER" ]; then
|
||||
# Check ExecStart paths
|
||||
grep -E "ExecStart|ExecStartPre|ExecStartPost" "$service_file" 2>/dev/null |
|
||||
while read -r exec_line; do
|
||||
# Extract the first word after ExecStart* as the command
|
||||
cmd=$(echo "$exec_line" | awk '{print $2}' | tr -d '"')
|
||||
# Extract the rest as arguments
|
||||
args=$(echo "$exec_line" | awk '{$1=$2=""; print $0}' | tr -d '"')
|
||||
# Extract command from the right side of Exec*=, not from argv
|
||||
exec_value="${exec_line#*=}"
|
||||
exec_value=$(echo "$exec_value" | sed 's/^[[:space:]]*//')
|
||||
cmd=$(echo "$exec_value" | awk '{print $1}' | tr -d '"')
|
||||
# Strip systemd command prefixes (-, @, :, +, !) before path checks
|
||||
cmd_path=$(echo "$cmd" | sed -E 's/^[-@:+!]+//')
|
||||
|
||||
# Only check the command path, not arguments
|
||||
if [ -n "$cmd" ] && [ -w "$cmd" ]; then
|
||||
echo "$service: $cmd (from $exec_line)" | sed -${E} "s,.*,${SED_RED},g"
|
||||
if [ -n "$cmd_path" ] && [ -w "$cmd_path" ]; then
|
||||
echo "$service: $cmd_path (from $exec_line)" | sed -${E} "s,.*,${SED_RED},g"
|
||||
fi
|
||||
# Check for relative paths only in the command, not arguments
|
||||
if [ -n "$cmd" ] && [ "${cmd#/}" = "$cmd" ] && ! echo "$cmd" | grep -qE '^-|^--'; then
|
||||
echo "$service: Uses relative path '$cmd' (from $exec_line)" | sed -${E} "s,.*,${SED_RED},g"
|
||||
if [ -n "$cmd_path" ] && [ "${cmd_path#/}" = "$cmd_path" ] && [ "${cmd_path#\$}" = "$cmd_path" ]; then
|
||||
echo "$service: Uses relative path '$cmd_path' (from $exec_line)" | sed -${E} "s,.*,${SED_RED},g"
|
||||
fi
|
||||
done
|
||||
fi
|
||||
@@ -153,4 +155,4 @@ if ! [ "$SEARCH_IN_FOLDER" ]; then
|
||||
fi
|
||||
|
||||
echo ""
|
||||
fi
|
||||
fi
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
# Title: Software Information - Checking leaks in git repositories
|
||||
# ID: SI_Leaks_git_repo
|
||||
# Author: Carlos Polop
|
||||
# Last Update: 22-08-2023
|
||||
# Description: Checking leaks in git repositories
|
||||
# License: GNU GPL
|
||||
# Version: 1.0
|
||||
# Functions Used: execBin, print_2title
|
||||
# Global Variables: $MACPEAS, $TIMEOUT
|
||||
# Initial Functions:
|
||||
# Generated Global Variables: $git_dirname, $FAT_LINPEAS_GITLEAKS
|
||||
# Fat linpeas: 1
|
||||
# Small linpeas: 0
|
||||
|
||||
|
||||
if ! [ "$FAST" ] && ! [ "$SUPERFAST" ] && [ "$TIMEOUT" ]; then
|
||||
print_2title "Checking leaks in git repositories"
|
||||
printf "%s\n" "$PSTORAGE_GITHUB" | while read f; do
|
||||
if echo "$f" | grep -Eq ".git$"; then
|
||||
git_dirname=$(dirname "$f")
|
||||
if [ "$MACPEAS" ]; then
|
||||
FAT_LINPEAS_GITLEAKS="peass{https://github.com/gitleaks/gitleaks/releases/download/v8.17.0/gitleaks_8.17.0_darwin_arm64.tar.gz}"
|
||||
else
|
||||
FAT_LINPEAS_GITLEAKS="peass{https://github.com/gitleaks/gitleaks/releases/download/v8.17.0/gitleaks_8.17.0_linux_x64.tar.gz}"
|
||||
fi
|
||||
execBin "GitLeaks (checking $git_dirname)" "https://github.com/zricethezav/gitleaks" "$FAT_LINPEAS_GITLEAKS" "detect -s '$git_dirname' -v | grep -E 'Description|Match|Secret|Message|Date'"
|
||||
fi
|
||||
done
|
||||
echo ""
|
||||
fi
|
||||
@@ -8,6 +8,7 @@ from .yamlGlobals import (
|
||||
class LinpeasModule:
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
real_path = os.path.realpath(path)
|
||||
with open(path, 'r') as file:
|
||||
self.module_text = file.read()
|
||||
|
||||
@@ -29,7 +30,7 @@ class LinpeasModule:
|
||||
self.section_info = {}
|
||||
if not (self.is_base or self.is_function or self.is_variable):
|
||||
for module in LINPEAS_PARTS["modules"]:
|
||||
if module["folder_path"] in path:
|
||||
if os.path.realpath(module["folder_path"]) in real_path:
|
||||
self.section_info = module
|
||||
self.is_check = True
|
||||
break
|
||||
|
||||
60
linPEAS/tests/test_modules_metadata.py
Normal file
60
linPEAS/tests/test_modules_metadata.py
Normal file
@@ -0,0 +1,60 @@
|
||||
import re
|
||||
import sys
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
class LinpeasModulesMetadataTests(unittest.TestCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.repo_root = Path(__file__).resolve().parents[2]
|
||||
cls.linpeas_dir = cls.repo_root / "linPEAS"
|
||||
cls.parts_dir = cls.linpeas_dir / "builder" / "linpeas_parts"
|
||||
|
||||
# Ensure `import builder.*` works when tests are run from repo root.
|
||||
sys.path.insert(0, str(cls.linpeas_dir))
|
||||
|
||||
from builder.src.linpeasModule import LinpeasModule # pylint: disable=import-error
|
||||
|
||||
cls.LinpeasModule = LinpeasModule
|
||||
|
||||
def _iter_module_files(self):
|
||||
return sorted(self.parts_dir.rglob("*.sh"))
|
||||
|
||||
def test_all_modules_parse(self):
|
||||
module_files = self._iter_module_files()
|
||||
self.assertGreater(len(module_files), 0, "No linPEAS module files were found.")
|
||||
|
||||
# Parsing a module validates its metadata and dependencies.
|
||||
for path in module_files:
|
||||
_ = self.LinpeasModule(str(path))
|
||||
|
||||
def test_check_module_id_matches_filename(self):
|
||||
for path in self._iter_module_files():
|
||||
module = self.LinpeasModule(str(path))
|
||||
if not getattr(module, "is_check", False):
|
||||
continue
|
||||
|
||||
# For checks, the filename (without numeric prefix) must match the module ID
|
||||
# (either full ID or stripping section prefix like `SI_`).
|
||||
file_base = re.sub(r"^[0-9]+_", "", path.stem)
|
||||
module_id = getattr(module, "id", "")
|
||||
module_id_tail = module_id[3:] if len(module_id) >= 3 else ""
|
||||
self.assertIn(
|
||||
file_base,
|
||||
{module_id, module_id_tail},
|
||||
f"Module ID mismatch in {path}: id={module_id} expected suffix={file_base}",
|
||||
)
|
||||
|
||||
def test_module_ids_are_unique(self):
|
||||
ids = []
|
||||
for path in self._iter_module_files():
|
||||
module = self.LinpeasModule(str(path))
|
||||
ids.append(getattr(module, "id", ""))
|
||||
|
||||
duplicates = {x for x in ids if x and ids.count(x) > 1}
|
||||
self.assertEqual(set(), duplicates, f"Duplicate module IDs found: {sorted(duplicates)}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -29,6 +29,7 @@ namespace winPEAS.Tests
|
||||
Assert.IsFalse(InvokeIsNetworkTypeValid("-network="));
|
||||
Assert.IsFalse(InvokeIsNetworkTypeValid("-network=10.10.10.999"));
|
||||
Assert.IsFalse(InvokeIsNetworkTypeValid("-network=10.10.10.10/64"));
|
||||
Assert.IsFalse(InvokeIsNetworkTypeValid("-network=999.999.999.999/24"));
|
||||
Assert.IsFalse(InvokeIsNetworkTypeValid("-network=not-an-ip"));
|
||||
}
|
||||
}
|
||||
|
||||
37
winPEAS/winPEASexe/Tests/ChecksArgumentEdgeCasesTests.cs
Normal file
37
winPEAS/winPEASexe/Tests/ChecksArgumentEdgeCasesTests.cs
Normal file
@@ -0,0 +1,37 @@
|
||||
using System;
|
||||
using Microsoft.VisualStudio.TestTools.UnitTesting;
|
||||
|
||||
namespace winPEAS.Tests
|
||||
{
|
||||
[TestClass]
|
||||
public class ChecksArgumentEdgeCasesTests
|
||||
{
|
||||
[TestMethod]
|
||||
public void ShouldNotThrowOnEmptyLogFileArg()
|
||||
{
|
||||
// Should return early with a user-friendly error, not crash.
|
||||
Program.Main(new[] { "log=" });
|
||||
}
|
||||
|
||||
[TestMethod]
|
||||
public void ShouldNotThrowOnPortsWithoutNetwork()
|
||||
{
|
||||
// Should warn and return early because -network was not provided.
|
||||
Program.Main(new[] { "-ports=80,443" });
|
||||
}
|
||||
|
||||
[TestMethod]
|
||||
public void ShouldNotThrowOnInvalidNetworkArgument()
|
||||
{
|
||||
// Should warn and return early because the IP is invalid.
|
||||
Program.Main(new[] { "-network=10.10.10.999" });
|
||||
}
|
||||
|
||||
[TestMethod]
|
||||
public void ShouldNotThrowOnEmptyNetworkArgument()
|
||||
{
|
||||
// Should warn and return early because the value is empty.
|
||||
Program.Main(new[] { "-network=" });
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -356,7 +356,7 @@ namespace winPEAS.Checks
|
||||
{
|
||||
var rangeParts = networkType.Split('/');
|
||||
|
||||
if (rangeParts.Length == 2 && int.TryParse(rangeParts[1], out int res) && res <= 32 && res >= 0)
|
||||
if (rangeParts.Length == 2 && IPAddress.TryParse(rangeParts[0], out _) && int.TryParse(rangeParts[1], out int res) && res <= 32 && res >= 0)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -524,7 +524,7 @@ namespace winPEAS.Checks
|
||||
{
|
||||
Beaprint.MainPrint("Looking for documents --limit 100--");
|
||||
List<string> docFiles = InterestingFiles.InterestingFiles.ListUsersDocs();
|
||||
Beaprint.ListPrint(docFiles.GetRange(0, docFiles.Count <= 100 ? docFiles.Count : 100));
|
||||
Beaprint.ListPrint(MyUtils.GetLimitedRange(docFiles, 100));
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
@@ -546,7 +546,7 @@ namespace winPEAS.Checks
|
||||
|
||||
if (recFiles.Count != 0)
|
||||
{
|
||||
foreach (Dictionary<string, string> recF in recFiles.GetRange(0, recFiles.Count <= 70 ? recFiles.Count : 70))
|
||||
foreach (Dictionary<string, string> recF in MyUtils.GetLimitedRange(recFiles, 70))
|
||||
{
|
||||
Beaprint.AnsiPrint(" " + recF["Target"] + "(" + recF["Accessed"] + ")", colorF);
|
||||
}
|
||||
|
||||
@@ -348,8 +348,7 @@ namespace winPEAS.Checks
|
||||
Beaprint.MainPrint("DNS cached --limit 70--");
|
||||
Beaprint.GrayPrint(string.Format(" {0,-38}{1,-38}{2}", "Entry", "Name", "Data"));
|
||||
List<Dictionary<string, string>> DNScache = NetworkInfoHelper.GetDNSCache();
|
||||
foreach (Dictionary<string, string> entry in DNScache.GetRange(0,
|
||||
DNScache.Count <= 70 ? DNScache.Count : 70))
|
||||
foreach (Dictionary<string, string> entry in MyUtils.GetLimitedRange(DNScache, 70))
|
||||
{
|
||||
Console.WriteLine($" {entry["Entry"],-38}{entry["Name"],-38}{entry["Data"]}");
|
||||
}
|
||||
|
||||
@@ -21,6 +21,11 @@ namespace winPEAS.Helpers
|
||||
""); //To get the default object you need to use an empty string
|
||||
}
|
||||
|
||||
public static List<T> GetLimitedRange<T>(List<T> items, int limit)
|
||||
{
|
||||
return items.GetRange(0, Math.Min(items.Count, limit));
|
||||
}
|
||||
|
||||
////////////////////////////////////
|
||||
/////// MISC - Files & Paths ///////
|
||||
////////////////////////////////////
|
||||
|
||||
@@ -1677,7 +1677,7 @@ if ($TimeStamp) { TimeElapsed }
|
||||
Write-Host -ForegroundColor Blue "=========|| WHOAMI INFO"
|
||||
Write-Host ""
|
||||
if ($TimeStamp) { TimeElapsed }
|
||||
Write-Host -ForegroundColor Blue "=========|| Check Token access here: https://book.hacktricks.wiki/en/windows-hardening/windows-local-privilege-escalation/privilege-escalation-abusing-tokens.html#abusing-tokens" -ForegroundColor yellow
|
||||
Write-Host -ForegroundColor Blue "=========|| Check Token access here: https://book.hacktricks.wiki/en/windows-hardening/windows-local-privilege-escalation/privilege-escalation-abusing-tokens.html#abusing-tokens"
|
||||
Write-Host -ForegroundColor Blue "=========|| Check if you are inside the Administrators group or if you have enabled any token that can be use to escalate privileges like SeImpersonatePrivilege, SeAssignPrimaryPrivilege, SeTcbPrivilege, SeBackupPrivilege, SeRestorePrivilege, SeCreateTokenPrivilege, SeLoadDriverPrivilege, SeTakeOwnershipPrivilege, SeDebugPrivilege"
|
||||
Write-Host "https://book.hacktricks.wiki/en/windows-hardening/windows-local-privilege-escalation/index.html#users--groups" -ForegroundColor Yellow
|
||||
Start-Process whoami.exe -ArgumentList "/all" -Wait -NoNewWindow
|
||||
|
||||
Reference in New Issue
Block a user