mirror of
https://github.com/carlospolop/privilege-escalation-awesome-scripts-suite.git
synced 2025-12-10 10:49:02 +00:00
Compare commits
13 Commits
aicoder
...
20230724-3
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
667bb5220d | ||
|
|
44a3cce5c7 | ||
|
|
965ca0868a | ||
|
|
1279434ba6 | ||
|
|
d60fed0f20 | ||
|
|
0a1a0d1e56 | ||
|
|
2bc6c94608 | ||
|
|
509e164d6f | ||
|
|
e7bfabe082 | ||
|
|
7c7b17a7cc | ||
|
|
2cb6af3f27 | ||
|
|
0d75c0085a | ||
|
|
bc064ddb88 |
13
.github/workflows/AIPRChecker.yml
vendored
Normal file
13
.github/workflows/AIPRChecker.yml
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
name: AIPRChecker - Check for security issues and code smells
|
||||
on: [pull_request_target]
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Run AIPRChecker
|
||||
uses: AI-Gents/AIPRChecker@main
|
||||
with:
|
||||
api-key: ${{ secrets.OPENAI_API_KEY }}
|
||||
model: 'gpt-4'
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
208
AICoder.py
208
AICoder.py
@@ -1,208 +0,0 @@
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
import string
|
||||
import random
|
||||
from typing import List
|
||||
import openai
|
||||
import json
|
||||
import subprocess
|
||||
import tiktoken
|
||||
import requests
|
||||
from github import Github
|
||||
|
||||
#########################
|
||||
#### OPENAI FUNCTIONS ###
|
||||
#########################
|
||||
|
||||
def reportTokens(prompt, model="gpt-4"):
|
||||
encoding = tiktoken.encoding_for_model(model)
|
||||
print("\033[37m" + str(len(encoding.encode(prompt))) + " tokens\033[0m" + " in prompt: " + "\033[92m" + prompt[:50] + "\033[0m" + ("..." if len(prompt) > 50 else ""))
|
||||
|
||||
def write_file(file_path: str, content: str):
|
||||
"""Write content to a file creating the needed directories first"""
|
||||
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
||||
|
||||
with open(file_path, "w") as file:
|
||||
file.write(content)
|
||||
|
||||
def delete_file(file_path: str):
|
||||
"""Delete a file if it exists"""
|
||||
|
||||
if os.path.isfile(file_path):
|
||||
os.remove(file_path)
|
||||
|
||||
openai_available_functions = {
|
||||
"write_file": write_file, "delete_file": delete_file
|
||||
}
|
||||
|
||||
openai_functions = [
|
||||
{
|
||||
"name": "write_file",
|
||||
"description": "Write a file giving the path and the content",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"file_path": {
|
||||
"type": "string",
|
||||
"description": "Path to the file to write",
|
||||
},
|
||||
"content": {
|
||||
"type": "string",
|
||||
"description": "Content to write in the file",
|
||||
},
|
||||
},
|
||||
"required": ["file_path", "content"],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "delete_file",
|
||||
"description": "Delete a file",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"file_path": {
|
||||
"type": "string",
|
||||
"description": "Path to the file to write",
|
||||
}
|
||||
},
|
||||
"required": ["file_path"],
|
||||
},
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
#########################
|
||||
#### GIT FUNCTIONS ######
|
||||
#########################
|
||||
|
||||
|
||||
def create_pull_request(branch_name, commit_message, github_token):
|
||||
github = Github(github_token)
|
||||
repo = github.get_repo(os.environ["GITHUB_REPOSITORY"])
|
||||
|
||||
# Create a new branch
|
||||
base_branch = repo.get_branch(repo.default_branch)
|
||||
repo.create_git_ref(ref=f"refs/heads/{branch_name}", sha=base_branch.commit.sha)
|
||||
|
||||
# Commit changes to the new branch
|
||||
subprocess.run(["git", "checkout", branch_name])
|
||||
subprocess.run(["git", "add", "."])
|
||||
subprocess.run(["git", "commit", "-m", commit_message])
|
||||
subprocess.run(["git", "push", "origin", branch_name])
|
||||
|
||||
# Create a pull request
|
||||
pr = repo.create_pull(
|
||||
title=commit_message,
|
||||
body="Generated by OpenAI Github Action",
|
||||
head=branch_name,
|
||||
base=repo.default_branch
|
||||
)
|
||||
|
||||
return pr.html_url
|
||||
|
||||
|
||||
#########################
|
||||
#### FILE PROCESSING ####
|
||||
#########################
|
||||
|
||||
|
||||
def process_file(prompt: str, api_key: str, file_path: str, model: str="gpt-4") -> str:
|
||||
with open(file_path, "r") as file:
|
||||
file_content = file.read()
|
||||
|
||||
messages = [
|
||||
{"role": "system", "content": f"You are a developer and your goal is to generate code. The user will ask you to improve and modify some code. Your response must be a valid JSON with the path of each file to write as keys and the content of the files as values. Several files can be written at the same time."},
|
||||
{"role": "user", "content": prompt},
|
||||
{"role": "user", "content": f"This is the code from the file '{file_path}':\n\n{file_content}"}
|
||||
]
|
||||
openai.api_key = api_key
|
||||
|
||||
reportTokens(f"This is the code from the file '{file_path}':\n\n{file_content}")
|
||||
|
||||
response = openai.ChatCompletion.create(
|
||||
model=model,
|
||||
messages=messages,
|
||||
temperature=0
|
||||
)
|
||||
response_message = response["choices"][0]["message"]
|
||||
|
||||
# Step 2: check if GPT wanted to call a function
|
||||
if response_message.get("function_call"):
|
||||
|
||||
function_name = response_message["function_call"]["name"]
|
||||
fuction_to_call = openai_available_functions[function_name]
|
||||
function_args = json.loads(response_message["function_call"]["arguments"])
|
||||
fuction_to_call(**function_args)
|
||||
|
||||
|
||||
def process_folder(prompt: str, api_key: str, folder_path: str, model: str="gpt-4") -> List[str]:
|
||||
responses = []
|
||||
for root, _, files in os.walk(folder_path):
|
||||
for file in files:
|
||||
file_path = os.path.join(root, file)
|
||||
response = process_file(prompt, api_key, file_path, model)
|
||||
responses.append(response)
|
||||
|
||||
|
||||
#########################
|
||||
#### MAIN FUNCTION ######
|
||||
#########################
|
||||
|
||||
|
||||
def get_random_string(length):
|
||||
# With combination of lower and upper case
|
||||
letters = string.ascii_letters
|
||||
result_str = ''.join(random.choice(letters) for i in range(length))
|
||||
return result_str
|
||||
|
||||
def main(prompt: str, api_key: str, file_path: str, github_token: str, model: str="gpt-4"):
|
||||
if os.path.isfile(file_path):
|
||||
process_file(prompt, api_key, file_path, model)
|
||||
elif os.path.isdir(file_path):
|
||||
process_folder(prompt, api_key, file_path, model)
|
||||
else:
|
||||
print("Error: Invalid file path.")
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
create_pull_request(get_random_string(5), f"Modified {file_path}", github_token)
|
||||
except Exception as e:
|
||||
print(f"Error: Failed to create pull request. {e}")
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Setup the argument parser
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
# Add arguments for prompt, api_key, file_path and github_token
|
||||
parser.add_argument('--prompt', default=None, type=str, help='Input prompt')
|
||||
parser.add_argument('--api-key', default=None, type=str, help='Input API key')
|
||||
parser.add_argument('--path', default=None, type=str, help='Input file/folder path')
|
||||
parser.add_argument('--github-token', default=None, type=str, help='Github token')
|
||||
parser.add_argument('--model', default="gpt-4", type=str, help='Model to use')
|
||||
|
||||
# Parse the arguments
|
||||
args = parser.parse_args()
|
||||
prompt = os.environ.get("INPUT_PROMPT", args.prompt)
|
||||
api_key = os.environ.get("INPUT_API_KEY", args.api_key)
|
||||
file_path = os.environ.get("INPUT_FILE_PATH", args.path)
|
||||
github_token = os.environ.get("GITHUB_TOKEN", args.github_token)
|
||||
model = os.environ.get("INPUT_MODEL", args.model)
|
||||
|
||||
if not prompt or not api_key or not file_path:
|
||||
print("Error: Missing required inputs.")
|
||||
sys.exit(1)
|
||||
|
||||
#if not github_token:
|
||||
# print("Error: Missing github token.")
|
||||
# sys.exit(1)
|
||||
|
||||
if os.path.exists(prompt):
|
||||
with open(prompt, "r") as file:
|
||||
prompt = file.read()
|
||||
|
||||
if prompt.startswith("http"):
|
||||
prompt = requests.get(prompt).text
|
||||
|
||||
main(prompt, api_key, file_path, github_token, model)
|
||||
File diff suppressed because one or more lines are too long
@@ -282,7 +282,7 @@ fi
|
||||
#If token secrets mounted
|
||||
if [ "$(mount | sed -n '/secret/ s/^tmpfs on \(.*default.*\) type tmpfs.*$/\1\/namespace/p')" ]; then
|
||||
print_2title "Listing mounted tokens"
|
||||
print_info "https://book.hacktricks.xyz/cloud-security/pentesting-kubernetes/attacking-kubernetes-from-inside-a-pod"
|
||||
print_info "https://cloud.hacktricks.xyz/pentesting-cloud/kubernetes-security/attacking-kubernetes-from-inside-a-pod"
|
||||
ALREADY="IinItialVaaluE"
|
||||
for i in $(mount | sed -n '/secret/ s/^tmpfs on \(.*default.*\) type tmpfs.*$/\1\/namespace/p'); do
|
||||
TOKEN=$(cat $(echo $i | sed 's/.namespace$/\/token/'))
|
||||
@@ -364,7 +364,7 @@ if [ "$inContainer" ]; then
|
||||
echo ""
|
||||
|
||||
print_2title "Kubernetes Information"
|
||||
print_info "https://book.hacktricks.xyz/cloud-security/pentesting-kubernetes/attacking-kubernetes-from-inside-a-pod"
|
||||
print_info "https://cloud.hacktricks.xyz/pentesting-cloud/kubernetes-security/attacking-kubernetes-from-inside-a-pod"
|
||||
|
||||
|
||||
print_3title "Kubernetes service account folder"
|
||||
@@ -376,7 +376,7 @@ if [ "$inContainer" ]; then
|
||||
echo ""
|
||||
|
||||
print_3title "Current sa user k8s permissions"
|
||||
print_info "https://book.hacktricks.xyz/cloud-security/pentesting-kubernetes/hardening-roles-clusterroles"
|
||||
print_info "https://cloud.hacktricks.xyz/pentesting-cloud/kubernetes-security/abusing-roles-clusterroles-in-kubernetes"
|
||||
kubectl auth can-i --list 2>/dev/null || curl -s -k -d "$(echo \"eyJraW5kIjoiU2VsZlN1YmplY3RSdWxlc1JldmlldyIsImFwaVZlcnNpb24iOiJhdXRob3JpemF0aW9uLms4cy5pby92MSIsIm1ldGFkYXRhIjp7ImNyZWF0aW9uVGltZXN0YW1wIjpudWxsfSwic3BlYyI6eyJuYW1lc3BhY2UiOiJlZXZlZSJ9LCJzdGF0dXMiOnsicmVzb3VyY2VSdWxlcyI6bnVsbCwibm9uUmVzb3VyY2VSdWxlcyI6bnVsbCwiaW5jb21wbGV0ZSI6ZmFsc2V9fQo=\"|base64 -d)" \
|
||||
"https://${KUBERNETES_SERVICE_HOST}:${KUBERNETES_SERVICE_PORT_HTTPS}/apis/authorization.k8s.io/v1/selfsubjectrulesreviews" \
|
||||
-X 'POST' -H 'Content-Type: application/json' \
|
||||
|
||||
@@ -153,7 +153,7 @@ if [ "$is_gcp" = "Yes" ]; then
|
||||
|
||||
if [ "$gcp_req" ]; then
|
||||
print_2title "Google CLoud Platform Enumeration"
|
||||
print_info "https://book.hacktricks.xyz/cloud-security/gcp-security"
|
||||
print_info "https://cloud.hacktricks.xyz/pentesting-cloud/gcp-security"
|
||||
|
||||
## GC Project Info
|
||||
p_id=$(eval $gcp_req 'http://metadata.google.internal/computeMetadata/v1/project/project-id')
|
||||
|
||||
@@ -454,7 +454,7 @@ else
|
||||
sh_usrs=$(cat /etc/passwd 2>/dev/null | grep -v "^root:" | grep -i "sh$" | cut -d ":" -f 1 | tr '\n' '|' | sed 's/|bin|/|bin[\\\s:]|^bin$|/' | sed 's/|sys|/|sys[\\\s:]|^sys$|/' | sed 's/|daemon|/|daemon[\\\s:]|^daemon$|/')"ImPoSSssSiBlEee" #Modified bin, sys and daemon so they are not colored everywhere
|
||||
nosh_usrs=$(cat /etc/passwd 2>/dev/null | grep -i -v "sh$" | sort | cut -d ":" -f 1 | tr '\n' '|' | sed 's/|bin|/|bin[\\\s:]|^bin$|/')"ImPoSSssSiBlEee"
|
||||
fi
|
||||
knw_usrs='_amavisd|_analyticsd|_appinstalld|_appleevents|_applepay|_appowner|_appserver|_appstore|_ard|_assetcache|_astris|_atsserver|_avbdeviced|_calendar|_captiveagent|_ces|_clamav|_cmiodalassistants|_coreaudiod|_coremediaiod|_coreml|_ctkd|_cvmsroot|_cvs|_cyrus|_datadetectors|_demod|_devdocs|_devicemgr|_diskimagesiod|_displaypolicyd|_distnote|_dovecot|_dovenull|_dpaudio|_driverkit|_eppc|_findmydevice|_fpsd|_ftp|_fud|_gamecontrollerd|_geod|_hidd|_iconservices|_installassistant|_installcoordinationd|_installer|_jabber|_kadmin_admin|_kadmin_changepw|_knowledgegraphd|_krb_anonymous|_krb_changepw|_krb_kadmin|_krb_kerberos|_krb_krbtgt|_krbfast|_krbtgt|_launchservicesd|_lda|_locationd|_logd|_lp|_mailman|_mbsetupuser|_mcxalr|_mdnsresponder|_mobileasset|_mysql|_nearbyd|_netbios|_netstatistics|_networkd|_nsurlsessiond|_nsurlstoraged|_oahd|_ondemand|_postfix|_postgres|_qtss|_reportmemoryexception|_rmd|_sandbox|_screensaver|_scsd|_securityagent|_softwareupdate|_spotlight|_sshd|_svn|_taskgated|_teamsserver|_timed|_timezone|_tokend|_trustd|_trustevaluationagent|_unknown|_update_sharing|_usbmuxd|_uucp|_warmd|_webauthserver|_windowserver|_www|_wwwproxy|_xserverdocs|daemon\W|^daemon$|message\+|syslog|www|www-data|mail|noboby|Debian\-\+|rtkit|systemd\+'
|
||||
knw_usrs='_amavisd|_analyticsd|_appinstalld|_appleevents|_applepay|_appowner|_appserver|_appstore|_ard|_assetcache|_astris|_atsserver|_avbdeviced|_calendar|_captiveagent|_ces|_clamav|_cmiodalassistants|_coreaudiod|_coremediaiod|_coreml|_ctkd|_cvmsroot|_cvs|_cyrus|_datadetectors|_demod|_devdocs|_devicemgr|_diskimagesiod|_displaypolicyd|_distnote|_dovecot|_dovenull|_dpaudio|_driverkit|_eppc|_findmydevice|_fpsd|_ftp|_fud|_gamecontrollerd|_geod|_hidd|_iconservices|_installassistant|_installcoordinationd|_installer|_jabber|_kadmin_admin|_kadmin_changepw|_knowledgegraphd|_krb_anonymous|_krb_changepw|_krb_kadmin|_krb_kerberos|_krb_krbtgt|_krbfast|_krbtgt|_launchservicesd|_lda|_locationd|_logd|_lp|_mailman|_mbsetupuser|_mcxalr|_mdnsresponder|_mobileasset|_mysql|_nearbyd|_netbios|_netstatistics|_networkd|_nsurlsessiond|_nsurlstoraged|_oahd|_ondemand|_postfix|_postgres|_qtss|_reportmemoryexception|_rmd|_sandbox|_screensaver|_scsd|_securityagent|_softwareupdate|_spotlight|_sshd|_svn|_taskgated|_teamsserver|_timed|_timezone|_tokend|_trustd|_trustevaluationagent|_unknown|_update_sharing|_usbmuxd|_uucp|_warmd|_webauthserver|_windowserver|_www|_wwwproxy|_xserverdocs|daemon\W|^daemon$|message\+|syslog|www|www-data|mail|nobody|Debian\-\+|rtkit|systemd\+'
|
||||
if ! [ "$USER" ]; then
|
||||
USER=$(whoami 2>/dev/null || echo -n "UserUnknown")
|
||||
fi
|
||||
@@ -1141,7 +1141,7 @@ if [ "$SEARCH_IN_FOLDER" ] || echo $CHECKS | grep -q procs_crons_timers_srvcs_so
|
||||
#GENERATE THE STORAGES OF THE FOUND FILES
|
||||
peass{STORAGES_HERE}
|
||||
|
||||
##### POST SERACH VARIABLES #####
|
||||
##### POST SEARCH VARIABLES #####
|
||||
backup_folders_row="$(echo $PSTORAGE_BACKUPS | tr '\n' ' ')"
|
||||
printf ${YELLOW}"DONE\n"$NC
|
||||
echo ""
|
||||
|
||||
@@ -353,7 +353,7 @@ class LinpeasBuilder:
|
||||
|
||||
def __get_gtfobins_lists(self) -> tuple:
|
||||
r = requests.get("https://github.com/GTFOBins/GTFOBins.github.io/tree/master/_gtfobins")
|
||||
bins = re.findall(r'/GTFOBins/GTFOBins.github.io/blob/master/_gtfobins/([\w_ \-]+).md', r.text)
|
||||
bins = re.findall(r'_gtfobins/([\w_ \-]+).md', r.text)
|
||||
|
||||
sudoVB = []
|
||||
suidVB = []
|
||||
|
||||
@@ -12,6 +12,7 @@ styles = getSampleStyleSheet()
|
||||
text_colors = { "GREEN": "#00DB00", "RED": "#FF0000", "REDYELLOW": "#FFA500", "BLUE": "#0000FF",
|
||||
"DARKGREY": "#5C5C5C", "YELLOW": "#ebeb21", "MAGENTA": "#FF00FF", "CYAN": "#00FFFF", "LIGHT_GREY": "#A6A6A6"}
|
||||
|
||||
# Required to automatically set Page Numbers
|
||||
class PageTemplateWithCount(PageTemplate):
|
||||
def __init__(self, id, frames, **kw):
|
||||
PageTemplate.__init__(self, id, frames, **kw)
|
||||
@@ -20,6 +21,7 @@ class PageTemplateWithCount(PageTemplate):
|
||||
page_num = canvas.getPageNumber()
|
||||
canvas.drawRightString(10.5*cm, 1*cm, str(page_num))
|
||||
|
||||
# Required to automatically set the Table of Contents
|
||||
class MyDocTemplate(BaseDocTemplate):
|
||||
def __init__(self, filename, **kw):
|
||||
self.allowSplitting = 0
|
||||
@@ -28,15 +30,22 @@ class MyDocTemplate(BaseDocTemplate):
|
||||
self.addPageTemplates(template)
|
||||
|
||||
def afterFlowable(self, flowable):
|
||||
if isinstance(flowable, Paragraph):
|
||||
if flowable.__class__.__name__ == "Paragraph":
|
||||
text = flowable.getPlainText()
|
||||
style = flowable.style.name
|
||||
if style in ["Heading1", "Heading2", "Heading3"]:
|
||||
self.notify("TOCEntry", (int(style[-1])-1, text, self.page))
|
||||
if style == "Heading1":
|
||||
self.notify("TOCEntry", (0, text, self.page))
|
||||
if style == "Heading2":
|
||||
self.notify("TOCEntry", (1, text, self.page))
|
||||
if style == "Heading3":
|
||||
self.notify("TOCEntry", (2, text, self.page))
|
||||
|
||||
|
||||
# Poor take at dynamicly generating styles depending on depth(?)
|
||||
def get_level_styles(level):
|
||||
global styles
|
||||
indent_value = 10 * (level - 1);
|
||||
# Overriding some default stylings
|
||||
level_styles = {
|
||||
"title": ParagraphStyle(
|
||||
**dict(styles[f"Heading{level}"].__dict__,
|
||||
@@ -66,6 +75,7 @@ def build_main_section(section, title, level=1):
|
||||
has_lines = "lines" in section.keys() and len(section["lines"]) > 1
|
||||
has_children = "sections" in section.keys() and len(section["sections"].keys()) > 0
|
||||
|
||||
# Only display data for Sections with results
|
||||
show_section = has_lines or has_children
|
||||
|
||||
elements = []
|
||||
@@ -73,14 +83,17 @@ def build_main_section(section, title, level=1):
|
||||
if show_section:
|
||||
elements.append(Paragraph(title, style=styles["title"]))
|
||||
|
||||
# Print info if any
|
||||
if show_section and has_links:
|
||||
for info in section["infos"]:
|
||||
words = info.split()
|
||||
# Join all lines and encode any links that might be present.
|
||||
words = map(lambda word: f'<a href="{word}" color="blue">{word}</a>' if "http" in word else word, words)
|
||||
words = " ".join(words)
|
||||
elements.append(Paragraph(words, style=styles["info"] ))
|
||||
|
||||
if has_lines:
|
||||
# Print lines if any
|
||||
if "lines" in section.keys() and len(section["lines"]) > 1:
|
||||
colors_by_line = list(map(lambda x: x["colors"], section["lines"]))
|
||||
lines = list(map(lambda x: html.escape(x["clean_text"]), section["lines"]))
|
||||
for (idx, line) in enumerate(lines):
|
||||
@@ -96,14 +109,18 @@ def build_main_section(section, title, level=1):
|
||||
elements.append(Spacer(0, 10))
|
||||
line = "<br/>".join(lines)
|
||||
|
||||
# If it's a top level entry remove the line break caused by an empty "clean_text"
|
||||
if level == 1: line = line[5:]
|
||||
elements.append(Paragraph(line, style=styles["text"]))
|
||||
|
||||
|
||||
# Print child sections
|
||||
if has_children:
|
||||
for child_title in section["sections"].keys():
|
||||
element_list = build_main_section(section["sections"][child_title], child_title, level + 1)
|
||||
elements.extend(element_list)
|
||||
|
||||
# Add spacing at the end of section. The deeper the level the smaller the spacing.
|
||||
if show_section:
|
||||
elements.append(Spacer(1, 40 - (10 * level)))
|
||||
|
||||
@@ -112,8 +129,10 @@ def build_main_section(section, title, level=1):
|
||||
|
||||
def main():
|
||||
with open(JSON_PATH) as file:
|
||||
# Read and parse JSON file
|
||||
data = json.loads(file.read())
|
||||
|
||||
# Default pdf values
|
||||
doc = MyDocTemplate(PDF_PATH)
|
||||
toc = TableOfContents()
|
||||
toc.levelStyles = [
|
||||
@@ -124,12 +143,14 @@ def main():
|
||||
|
||||
elements = [Paragraph("PEAS Report", style=styles["Title"]), Spacer(0, 30), toc, PageBreak()]
|
||||
|
||||
# Iterate over all top level sections and build their elements.
|
||||
for title in data.keys():
|
||||
element_list = build_main_section(data[title], title)
|
||||
elements.extend(element_list)
|
||||
|
||||
doc.multiBuild(elements)
|
||||
|
||||
# Start execution
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
JSON_PATH = sys.argv[1]
|
||||
@@ -139,11 +160,3 @@ if __name__ == "__main__":
|
||||
sys.exit(1)
|
||||
|
||||
main()
|
||||
|
||||
# Changes:
|
||||
# 1. Removed redundant checks for keys in dictionary.
|
||||
# 2. Simplified the condition in afterFlowable method.
|
||||
# 3. Removed unnecessary check for lines in build_main_section method.
|
||||
# 4. Removed unnecessary check for sections in build_main_section method.
|
||||
# 5. Removed unnecessary check for infos in build_main_section method.
|
||||
# 6. Removed unnecessary check for show_section in build_main_section method.
|
||||
@@ -10,6 +10,14 @@ REM Registry scan of other drives besides
|
||||
REM /////true or false
|
||||
SET long=false
|
||||
|
||||
REM Check if the current path contains spaces
|
||||
SET "CurrentFolder=%~dp0"
|
||||
IF "!CurrentFolder!" NEQ "!CurrentFolder: =!" (
|
||||
ECHO winPEAS.bat cannot run if the current path contains spaces.
|
||||
ECHO Exiting.
|
||||
EXIT /B 1
|
||||
)
|
||||
|
||||
:Splash
|
||||
ECHO.
|
||||
CALL :ColorLine " %E%32m((,.,/((((((((((((((((((((/, */%E%97m"
|
||||
|
||||
@@ -14,9 +14,9 @@ The official **maintainer of this script is [RandolphConley](https://github.com/
|
||||
|
||||
Download the **[latest releas from here](https://github.com/carlospolop/PEASS-ng/releases/latest)**.
|
||||
|
||||
|
||||
```bash
|
||||
powershell "IEX(New-Object Net.WebClient).downloadString('https://raw.githubusercontent.com/carlospolop/PEASS-ng/master/winPEAS/winPEASps1/WinPeas.ps1')"
|
||||
```
|
||||
powershell "IEX(New-Object Net.WebClient).downloadString('https://raw.githubusercontent.com/carlospolop/PEASS-ng/master/winPEAS/winPEASps1/winPEAS.ps1')"
|
||||
|
||||
## Advisory
|
||||
|
||||
|
||||
Reference in New Issue
Block a user