Update rid

Changed wget to curl
Added useragent
This commit is contained in:
mirror-maker 2023-12-09 00:23:49 -05:00
parent f1b7bb1e07
commit 42dd8a93c8
1 changed files with 68 additions and 64 deletions

132
rid
View File

@ -1,64 +1,68 @@
#!/bin/bash #!/bin/bash
# #https://github.com/dbeley/rid/blob/master/rid
# rid - Reddit Image Downloader - Simple bash script to download images from a subreddit. # rid - Reddit Image Downloader - Simple bash script to download images from a subreddit.
timeout=60
usage() { printf "%s" "\ useragent="Love by u/gadelat"
Usage: rid SUBREDDIT [hot|new|rising|top|controversial] [number] [all|year|month|week|day] timeout=60
Examples: rid starterpacks new 10
rid funny top 50 month usage() { printf "%s" "\
"; exit 0; Usage: rid SUBREDDIT [hot|new|rising|top|controversial] [number] [all|year|month|week|day]
} Examples: rid starterpacks new 10
rid funny top 50 month
subreddit=$1 "; exit 0;
sort=$2 }
number=$3
top_time=$4 subreddit=$1
sort=$2
if [ "$1" == "-h" ] || [ -z $subreddit ]; then number=$3
usage top_time=$4
fi
if [ "$1" == "-h" ] || [ -z $subreddit ]; then
if [ -z $sort ]; then usage
sort="hot" fi
fi
if [ -z $sort ]; then
if [ -z $top_time ]; then sort="hot"
top_time="" fi
fi
if [ -z $top_time ]; then
if [ -z $number ]; then top_time=""
number=200 fi
fi
if [ -z $number ]; then
url="https://www.reddit.com/r/$subreddit/$sort/.json?raw_json=1&t=$top_time" number=200
content=$(wget -T $timeout -q -O - $url) fi
mkdir -p $subreddit
i=1
while : ; do url="https://www.reddit.com/r/$subreddit/$sort/.json?raw_json=1&t=$top_time"
urls=$(echo -n "$content" | jq -r '.data.children[]|select(.data.post_hint|test("image")?) | .data.preview.images[0].source.url') content=`curl -H "User-Agent: $useragent" $url`
names=$(echo -n "$content" | jq -r '.data.children[]|select(.data.post_hint|test("image")?) | .data.title') mkdir -p $subreddit
ids=$(echo -n "$content" | jq -r '.data.children[]|select(.data.post_hint|test("image")?) | .data.id') i=1
a=1 while : ; do
wait # prevent spawning too many processes urls=$(echo -n "$content" | jq -r '.data.children[]|select(.data.post_hint|test("image")?) | .data.preview.images[0].source.url')
for url in $urls; do names=$(echo -n "$content" | jq -r '.data.children[]|select(.data.post_hint|test("image")?) | .data.title')
name=$(echo -n "$names" | sed -n "$a"p) ids=$(echo -n "$content" | jq -r '.data.children[]|select(.data.post_hint|test("image")?) | .data.id')
id=$(echo -n "$ids" | sed -n "$a"p) a=1
ext=$(echo -n "${url##*.}" | cut -d '?' -f 1) wait # prevent spawning too many processes
newname="$subreddit"_"$sort""$top_time"_"$(printf "%04d" $i)"_"$name"_$id.$ext for url in $urls; do
printf "$i/$number : $newname\n" name=$(echo -n "$names" | sed -n "$a"p)
wget -T $timeout --no-check-certificate -nv -nc -P down -O "$subreddit/$newname" $url &>/dev/null & id=$(echo -n "$ids" | sed -n "$a"p)
((a=a+1)) ext=$(echo -n "${url##*.}" | cut -d '?' -f 1)
((i=i+1)) newname="$subreddit"_"$sort""$top_time"_"$(printf "%04d" $i)"_"$name"_$id.$ext
if [ $i -gt $number ] ; then printf "$i/$number : $newname\n"
exit 0 wget -T $timeout --no-check-certificate -nv -nc -P down -O "$subreddit/$newname" $url &>/dev/null &
fi ((a=a+1))
done ((i=i+1))
after=$(echo -n "$content"| jq -r '.data.after//empty') if [ $i -gt $number ] ; then
if [ -z $after ]; then exit 0
break fi
fi done
url="https://www.reddit.com/r/$subreddit/$sort/.json?count=200&after=$after&raw_json=1&t=$top_time" after=$(echo -n "$content"| jq -r '.data.after//empty')
content=`wget -T $timeout --no-check-certificate -q -O - $url` if [ -z $after ]; then
done break
fi
url="https://www.reddit.com/r/$subreddit/$sort/.json?count=200&after=$after&raw_json=1&t=$top_time"
content=`curl -H "User-Agent: $useragent" $url`
done