find in -type f -exec cat {} + | sed 's/$/./' | tee out/output.txt
Contributed by trickest
Add lines to values of shodan query.
find in -type f -exec cat {} + | awk '{ if ($0 != "") print "ssl.cert.subject.cn:\"*."$0"\""}' | tee out/output.txt
Contributed by trickest
Append headers supplied to csv file without them.
(echo "vulnerability_id,tags,description,authors,severity,type,host,ip,match,vuln_name,extracted_results,timestamp" && find in -type f -exec cat {} +) > out/output.txt
Contributed by trickest
Prints last column of a files in in folder.
find in -type f -exec cat {} + | awk '{print $(NF)}' | tee out/output.txt
Contributed by trickest
Extract first column from all files in in directory.
find in -type f -exec cat {} + | awk -F " " '{print $1}' | tee out/output.txt
Contributed by trickest
Extract second column from all files in in directory.
find in -type f -exec cat {} + | awk -F " " '{print $2}' | tee out/output.txt
Contributed by trickest
Extract third column from all files in in directory.
find in -type f -exec cat {} + | awk -F " " '{print $3}' | tee out/output.txt
Contributed by trickest
Cat all files in in directory
cat in/*/* | tee out/output.txt
Contributed by trickest
Cat file from line number to line number with sed.
sed -n 1,100p in/*/* | tee out/output.txt
Contributed by trickest
Efficiently clone a GitHub repository using the appropriate configuration.
USERNAME="YOUR_GITHUB_USERNAME"
EMAIL="YOUR_GITHUB_EMAIL"
REPOSITORY="USER/REPO"
TOKEN=$(cat /hive/in/http-input-1/output.txt)
BRANCH="main"
git config --global user.email "$EMAIL"
git config --global user.name "$USERNAME"
git config --global pack.windowMemory "50m"
git config --global http.version HTTP/1.1
git config --global http.postBuffer 157286400
git config --global http.lowSpeedLimit 0
git config --global http.lowSpeedTime 999999
git clone -b $BRANCH --depth 1 https://$USERNAME:$TOKEN@github.com/$REPOSITORY.git
cd $(echo $REPOSITORY | awk -F '/' '{print $2}')
# process your repository here
ls | tee /hive/out/output.txt
Contributed by trickest
Converts output with | delimiter to ip:port format.
find in -type f -exec cat {} + | awk -F"|" '{print $1":"$2}' | tee out/output.txt
Contributed by trickest
Used to quickly count all lines inside of all files in in folder.
find in -type f -exec cat {} + | wc -l | tee out/output.txt
Contributed by trickest
One-liner for generating wordlists from robots.txt
find in/ -mindepth 3 -type f -exec cat {} + | egrep -w "Disallow|Allow: " | awk '{print $2}' | sed 's/^\///' | sed 's/\/$//' | sed '/^[[:space:]]*$/d'| sed 's/\*$//' | sed 's/^\*//' | sed 's/\/$//' | sed -e 's/\*\///g' | sed -e s/\*//g | uniq | tee out/output.txt
Contributed by kljunowsky
Delete dot when first character, usefull for parsing subdomains with not valid results.
cat in/*/* | sed 's/^\.//' | sort -n | uniq | tee out/output.txt
Contributed by trickest
Delete dot when it is last character, massdns anyone?
cat in/*/* | sed 's/\.$//' | tee out/output.txt
Contributed by trickest
Egrep multiple values and print last in array.
cat in/*/* | egrep -w 'url|robots|linkfinder' | awk -F" " '{print $NF}' | tee out/output.txt
Contributed by trickest
Extract urls from list of files with regex.
find in -type f -exec cat {} + | egrep -o 'https?://[^ ]+' | tee out/output.txt
Contributed by trickest
Paste raw data and add it to output.
cat << "EOF" | tee out/output.txt
ADD_CONTENT_HERE
EOF
Contributed by trickest
Extract ip addresses from files with regex.
find in -type f -exec cat {} + | grep -E -o "(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)" | tee out/output.txt
Contributed by trickest
Find string in meg responses and print urls.
for f in $(find in -mindepth 3 -type f -exec grep 'root:' -R {} + | cut -d":" -f1 | sort -u); do echo "$(head -1 $f) [VULNERABLE]" | egrep '^http'; done | tee out/output.txt
Contributed by gliga
Utility to generate random passwords based on pwgen.
pwgen -N 100 5 | tee out/output.txt
Contributed by trickest
Sequence of numbers generator!
for i in {1..10};do echo $i;done | tee out/output.txt
Contributed by trickest
Get asn prefixes by id.
curl --silent https://stat.ripe.net/data/announced-prefixes/data.json\?resource\=ASN_ID | jq -r '.data.prefixes[].prefix' | tee out/output.txt
Contributed by trickest
Gather cloud assets from the Inventory project
FILE_NAME="cloud"
BASE_URL="https://raw.githubusercontent.com/trickest/inventory/main"
# Usage:
# get_invetory_files FILE_NAME COMPANY
#
# Example
# get_inventory_files spider Netflix
get_inventory_files() {
company=$2
file=$1
if ! wget "${BASE_URL}/${company}/${file}.txt" -O "out/${company}_${file}.txt" || [ ! -s "${company}_${file}.txt" ]; then
i=00
while wget "${BASE_URL}/${company}/${file}_${i}.txt" -O "out/${company}_${file}_${i}.txt"; do
i=$(printf '%02d' $((i+1)))
done
fi
}
echo "Downloading the targets index"
wget ${BASE_URL}/targets.json
jq -r '.targets[].name' targets.json > companies.txt
echo "Downloading $FILE_NAME files"
while read company; do
echo $company
get_inventory_files $FILE_NAME $company
done < companies.txt
find out -type f -empty -delete
cat out/* > out/output.txt
Contributed by trickest
Gather hostnames from the Inventory project
FILE_NAME="hostnames"
BASE_URL="https://raw.githubusercontent.com/trickest/inventory/main"
# Usage:
# get_invetory_files FILE_NAME COMPANY
#
# Example
# get_inventory_files spider Netflix
get_inventory_files() {
company=$2
file=$1
if ! wget "${BASE_URL}/${company}/${file}.txt" -O "out/${company}_${file}.txt" || [ ! -s "${company}_${file}.txt" ]; then
i=00
while wget "${BASE_URL}/${company}/${file}_${i}.txt" -O "out/${company}_${file}_${i}.txt"; do
i=$(printf '%02d' $((i+1)))
done
fi
}
echo "Downloading the targets index"
wget ${BASE_URL}/targets.json
jq -r '.targets[].name' targets.json > companies.txt
echo "Downloading $FILE_NAME files"
while read company; do
echo $company
get_inventory_files $FILE_NAME $company
done < companies.txt
find out -type f -empty -delete
cat out/* > out/output.txt
Contributed by trickest
Gather servers from the Inventory project
FILE_NAME="servers"
BASE_URL="https://raw.githubusercontent.com/trickest/inventory/main"
# Usage:
# get_invetory_files FILE_NAME COMPANY
#
# Example
# get_inventory_files spider Netflix
get_inventory_files() {
company=$2
file=$1
if ! wget "${BASE_URL}/${company}/${file}.txt" -O "out/${company}_${file}.txt" || [ ! -s "${company}_${file}.txt" ]; then
i=00
while wget "${BASE_URL}/${company}/${file}_${i}.txt" -O "out/${company}_${file}_${i}.txt"; do
i=$(printf '%02d' $((i+1)))
done
fi
}
echo "Downloading the targets index"
wget ${BASE_URL}/targets.json
jq -r '.targets[].name' targets.json > companies.txt
echo "Downloading $FILE_NAME files"
while read company; do
echo $company
get_inventory_files $FILE_NAME $company
done < companies.txt
find out -type f -empty -delete
cat out/* > out/output.txt
Contributed by trickest
Gather web spider results from the Inventory project
FILE_NAME="spider"
BASE_URL="https://raw.githubusercontent.com/trickest/inventory/main"
# Usage:
# get_invetory_files FILE_NAME COMPANY
#
# Example
# get_inventory_files spider Netflix
get_inventory_files() {
company=$2
file=$1
if ! wget "${BASE_URL}/${company}/${file}.txt" -O "out/${company}_${file}.txt" || [ ! -s "${company}_${file}.txt" ]; then
i=00
while wget "${BASE_URL}/${company}/${file}_${i}.txt" -O "out/${company}_${file}_${i}.txt"; do
i=$(printf '%02d' $((i+1)))
done
fi
}
echo "Downloading the targets index"
wget ${BASE_URL}/targets.json
jq -r '.targets[].name' targets.json > companies.txt
echo "Downloading $FILE_NAME files"
while read company; do
echo $company
get_inventory_files $FILE_NAME $company
done < companies.txt
find out -type f -empty -delete
cat out/* > out/output.txt
Contributed by trickest
Gather URLs from the Inventory project
FILE_NAME="urls"
BASE_URL="https://raw.githubusercontent.com/trickest/inventory/main"
# Usage:
# get_invetory_files FILE_NAME COMPANY
#
# Example
# get_inventory_files spider Netflix
get_inventory_files() {
company=$2
file=$1
if ! wget "${BASE_URL}/${company}/${file}.txt" -O "out/${company}_${file}.txt" || [ ! -s "${company}_${file}.txt" ]; then
i=00
while wget "${BASE_URL}/${company}/${file}_${i}.txt" -O "out/${company}_${file}_${i}.txt"; do
i=$(printf '%02d' $((i+1)))
done
fi
}
echo "Downloading the targets index"
wget ${BASE_URL}/targets.json
jq -r '.targets[].name' targets.json > companies.txt
echo "Downloading $FILE_NAME files"
while read company; do
echo $company
get_inventory_files $FILE_NAME $company
done < companies.txt
find out -type f -empty -delete
cat out/* > out/output.txt
Contributed by trickest
Get all js links from list of urls
find in -type f -exec cat {} + | grep -Eo "https?://\S+?\.js" | tee out/output.txt
Contributed by trickest
Extract gunzip file and cat json files to out file.
gunzip in/*/*.gz && find in -name '*.json' -exec cat {} \; | tee out/output.txt
Contributed by trickest
Parse httpx JSON output to line by line file
find in -type f -exec cat {} + | jq -r '"\(try(.url)) \([try(."title")]) \([try(."status_code")]) \([try(."content_length")]) \([try(."content_type")]) \([try(."host")]) \([try(."final_url")]) \([try(."webserver")]) \([try(."technologies")]) \([try(."a"|.[] | tostring)])"' | tee out/output.txt
Contributed by trickest
JQ for parsing json results.
cat in/*/* | jq -r '.results | .[]| "\(.url) \(.status) \(.length) \(.redirectlocation) "' | tee out/output.txt
Contributed by trickest
Parse masscan's output into IP:Port pairs (e.g. 127.0.0.1:80)
find in -type f -exec cat {} + | grep 'Host' | awk -F'[ /]' '{print $3":"$5}' | tee out/output.txt
Contributed by trickest
Move specific files from in to out.
mv in/*/*-takeover* out/
Contributed by trickest
Parse nuclei JSON output to create valid csv
find in -type f -exec cat {} + | jq '. | {vulnerability_id: .templateID, tags: .info.tags, description: .info.description, authors: .info.author, severity: .info.severity, type: .type, host: .host, ip: .ip, match: .matched, vuln_name: .matcher_name, extracted_results: .extracted_results|tostring, timestamp: .timestamp}' | jq -r 'to_entries|map(.value)|@csv' | tee out/output.txt
Contributed by trickest
Print file content to lowecase
cat in/*/* | awk '{print tolower($0)}' | tee out/output.txt
Contributed by trickest
Recursively cat all files in a folder.
find in -type f -exec cat {} + | tee out/output.txt
Contributed by trickest
Cat all files with custom extension
find in -name '*.txt' -exec cat {} \; | tee out/output.txt
Contributed by trickest
Remove whitespaces to files when appending values at the end of lines.
find in -type f -exec cat {} + | tr -d '[:blank:]' | tee out/output.txt
Contributed by trickest
Replaces character with new lines, sorts and deduplicates.
find in -type f -exec cat {} + | tr . '\n' | sort -n | uniq | tee out/output.txt
Contributed by trickest
Replacing dot in strings with dashes
cat in/*/* | sed s/[.]/-/g | tee out/output.txt
Contributed by trickest
Copy all files from in folders to out folder recursively.
rsync -rtv in out
Contributed by trickest
Parse RustScan's output into IP:Port pairs (e.g. 127.0.0.1:80)
find in -type f -exec cat {} + > merged.txt
while read line; do
# Extract the IP address and ports from each line
ip=$(echo $line | awk '{print $1}')
ports=$(echo $line | awk '{print $3}' | tr -d '[]')
# Split the ports into an array
IFS=',' read -ra port_arr <<< "$ports"
# Print the IP address and each port in the "ip:port" format
for port in "${port_arr[@]}"; do
echo "${ip}:${port}" >> out/output.txt
done
done < merged.txt
Contributed by trickest
Reverse-lookup a list of domains on SecurityTrails to retrieve hostnames
API_KEY='ADD_SECURITYTRAILS_API_KEY'
IP_ADDRESSES_INPUT_NODE='ADD_INPUT_NODE_ID'
# IP_ADDRESSES_INPUT_NODE='http-input-1'
while read ip; do
echo "$ip"
curl --request POST \
--url 'https://api.securitytrails.com/v1/domains/list?include_ips=true' \
--header "APIKEY: $API_KEY" \
--header 'Content-Type: application/json' \
--data '{"filter":{"ipv4":"'$ip'"}}' | jq -r '.records[].hostname' | tee -a out/output.txt
done < in/$IP_ADDRESSES_INPUT_NODE/output.txt
Contributed by trickest
Add string at the beginning of each line using sed.
find in -type f -exec cat {} + | sed 's/^/https:\/\//' | tee out/output.txt
Contributed by trickest
Add string at the end of each line using sed.
find in -type f -exec cat {} + | sed 's/$/\/FUZZ/' | tee out/output.txt
Contributed by trickest
Replace "foo" "bar" with words you want to replace in in folder.
sed 's/foo/bar/g' in/* | tee out/output.txt
Contributed by trickest
Sort all data and delete duplicates in files in in directory.
find in -type f -exec cat {} + | sort -n | uniq | tee out/output.txt
Contributed by trickest
Sort all data in files in in directory.
find in -type f -exec cat {} + | sort | tee out/output.txt
Contributed by trickest
Ungrep multiple strings.
cat in/*/* | egrep -wv 'url|robots|linkfinder' | awk -F" " '{print $NF}' | tee out/output.txt
Contributed by trickest
Unzip files in in folder to out folder
unzip in/*/*.zip -d out
Contributed by trickest
Wget list of urls and output to out directory
wget -i in/*/* --directory-prefix out
Contributed by trickest
Get registrant organization using whois for domain list file input.
for domain in `find in -type f -exec cat {} +`; do
organization=$(whois $domain | grep 'Organization: ' | head -1 | awk -F ': ' '{print $NF}');
echo "$domain: $organization"
done | tee out/output.txt
Contributed by trickest
Get all wildcard domains from Burp scope json file.
find in -type f -exec cat {} + | jq '.target.scope.include[] | .host' | grep "*" | sed 's/\\//g' | sed 's/\"^.\*//g' | sed 's/$\"//g' | sed 's/^\.//' | sort -n | uniq | tee out/output.txt
Contributed by trickest
Zip all files and move to out directory
zip -r output.zip in && mv output.zip out
Contributed by trickest