#!/bin/bash # A utility to download all images from a specific website give the exact url # https://hdqwalls.com/2880x1800/anime-girl-wallpapers/page/2 # https://hdqwalls.com/search?q=Genshin+Impact&page=2 FILE="/tmp/wallpaperhtml" download() { curl -s "$1" > "$FILE" IMAGES=$(cat "$FILE" | pup img.thumbnail | grep -o '"http[^"]\+"' | sed 's/\/wallpapers\/thumb/\/download/g' | sed 's/.jpg"$/-2560x1600.jpg/g' | sed 's/"//g') COUNTER=0 for IMG in $IMAGES; do let COUNTER++ NAME="$(basename $IMG)" echo "[ Download ] ($COUNTER) $NAME" curl -s "$IMG" > "$NAME" sleep 2 done echo -e "\nFinished!" } if [[ -z "$1" ]]; then echo "[ Error ] Please provide a URL" exit 1 fi if [[ "$1" = "s" ]]; then IMAGE=$(curl -s "$2" | pup img.d_img_holder | grep -o '"http[^"]\+"' | sed 's/\/wallpapers\/bthumb/\/download/g' | sed 's/.jpg"$/-2560x1600.jpg/g' | sed 's/"//g') NAME="$(date +"%s")" curl -s "$IMAGE" > "$NAME.jpg" else download "$1" fi