Reddit-Image-Downloader-RID/download-subreddit-images.sh

34 lines
1.3 KiB
Bash
Raw Normal View History

2014-06-07 06:06:23 -04:00
#!/bin/bash
#cfg
useragent="Love by u/gadelat"
2019-06-15 13:18:11 -04:00
timeout=60
2014-06-07 06:06:23 -04:00
subreddit=$1
url="https://www.reddit.com/r/$subreddit/.json?raw_json=1"
2019-06-15 13:18:11 -04:00
content=`wget -T $timeout -U "$useragent" -q -O - $url`
2019-01-24 09:35:32 -05:00
mkdir -p $subreddit
2014-06-07 06:06:23 -04:00
while : ; do
urls=$(echo -n "$content"| jq -r '.data.children[]|select(.data.post_hint|test("image")) | .data.preview.images[0].source.url')
names=$(echo -n "$content"| jq -r '.data.children[]|select(.data.post_hint|test("image")) | .data.title')
ids=$(echo -n "$content"| jq -r '.data.children[]|select(.data.post_hint|test("image")) | .data.id')
2014-06-07 06:06:23 -04:00
a=1
wait # prevent spawning too many processes
for url in $urls; do
name=`echo -n "$names"|sed -n "$a"p`
id=`echo -n "$ids"|sed -n "$a"p`
ext=`echo -n "${url##*.}"|cut -d '?' -f 1`
newname="$name"_"$subreddit"_$id.$ext
echo $name
2019-06-15 13:18:11 -04:00
wget -T $timeout -U "$useragent" --no-check-certificate -nv -nc -P down -O "$subreddit/$newname" $url &>/dev/null &
2014-06-07 06:06:23 -04:00
a=$(($a+1))
done
after=$(echo -n "$content"| jq -r '.data.after')
2014-06-07 06:06:23 -04:00
if [ -z $after ]; then
break
fi
url="https://www.reddit.com/r/$subreddit/.json?count=200&after=$after&raw_json=1"
2019-06-15 13:18:11 -04:00
content=`wget -T $timeout -U "$useragent" --no-check-certificate -q -O - $url`
2014-06-07 06:06:23 -04:00
#echo -e "$urls"
2018-04-07 11:08:31 -04:00
done