BashKat Web Scraping Utility Script
BashKat is pretty straight forward and really easy to use.
I made sure to add some “cute” to it with the emojis.
This bot will scrape from user input or a file using the wget function (example: urls.txt) and it’s Super Fun when using Proxychains.
#!/usr/bin/env bash
# BashKat Version 1.0.2
# K0NxT3D
# Variables
BotOptions="Url File Quit"
# Welcome Banner
clear
printf "✨ BashKat 1.0 ✨\nScrape Single URL/IP or Multiple From File.\n\n" && sleep 1
# Bot Options Menu
select option in $BotOptions; do
# Single URL Scrape
if [ "$option" = "Url" ];
then
printf "URL To Scrape: "
read scrapeurl
mkdir -p data/
wget -P data/ \
-4 \
-w 0 \
-t 3 \
-rkpN -e robots=off \
--header="Accept: text/html" \
--user-agent="BashKat/1.0 (BashKat 1.0 Web Scraper Utility +http://www.bashkat.bot/)" \
--referer="http://www.bashkat.bot" \
--random-wait \
--recursive \
--no-clobber \
--page-requisites \
--convert-links \
--restrict-file-names=windows \
--domains $scrapeurl \
--no-parent \
$scrapeurl
printf "🏁Scrape Complete.\nHit Enter To Continue.👍"
read anykey
./$(basename $0) && exit
elif [ "$option" = "File" ];
then
printf "Path To File: "
read filepath
while IFS= read -r scrapeurl
do
mkdir -p data/
wget -P data/ \
-4 \
-w 0 \
-t 3 \
-rkpN -e robots=off \
--header="Accept: text/html" \
--user-agent="BashKat/1.0 (BashKat 1.0 Web Scraper Utility +http://www.bashkat.bot/)" \
--referer="http://www.bashkat.bot" \
--random-wait \
--recursive \
--no-clobber \
--page-requisites \
--convert-links \
--restrict-file-names=windows \
--domains $scrapeurl \
--no-parent \
$scrapeurl
done < "$filepath"
printf "🏁Scrape Complete.\nHit Enter To Continue.👍"
read anykey
./$(basename $0) && exit
elif [ "$option" = "Quit" ];
then
printf "Quitting🏳"
sleep 1
clear
exit
# ERRORS
else
clear
printf "❌"
sleep 1
./$(basename $0) && exit
fi
exit
done
"Yesterday is history, tomorrow is a mystery, today is a gift of God, which is why we call it the present."
Bil Keane