Reddit-Image-Downloader-RID/download-subreddit-images.sh

44 lines
1.4 KiB
Bash
Raw Normal View History

2014-06-07 06:06:23 -04:00
#!/bin/bash
#cfg
useragent="Love by u/gadelat"
2019-06-15 13:18:11 -04:00
timeout=60
2014-06-07 06:06:23 -04:00
subreddit=$1
2020-01-19 21:29:19 -05:00
sort=$2
top_time=$3
if [ -z $sort ]; then
sort="hot"
fi
if [ -z $top_time ];then
top_time=""
fi
url="https://www.reddit.com/r/$subreddit/$sort/.json?raw_json=1&t=$top_time"
2019-06-15 13:18:11 -04:00
content=`wget -T $timeout -U "$useragent" -q -O - $url`
2019-01-24 09:35:32 -05:00
mkdir -p $subreddit
2014-06-07 06:06:23 -04:00
while : ; do
urls=$(echo -n "$content"| jq -r '.data.children[]|select(.data.post_hint|test("image")?) | .data.preview.images[0].source.url')
names=$(echo -n "$content"| jq -r '.data.children[]|select(.data.post_hint|test("image")?) | .data.title')
ids=$(echo -n "$content"| jq -r '.data.children[]|select(.data.post_hint|test("image")?) | .data.id')
2014-06-07 06:06:23 -04:00
a=1
wait # prevent spawning too many processes
for url in $urls; do
name=`echo -n "$names"|sed -n "$a"p`
id=`echo -n "$ids"|sed -n "$a"p`
ext=`echo -n "${url##*.}"|cut -d '?' -f 1`
newname="$name"_"$subreddit"_$id.$ext
echo $name
2019-06-15 13:18:11 -04:00
wget -T $timeout -U "$useragent" --no-check-certificate -nv -nc -P down -O "$subreddit/$newname" $url &>/dev/null &
2014-06-07 06:06:23 -04:00
a=$(($a+1))
done
2020-01-19 21:29:19 -05:00
after=$(echo -n "$content"| jq -r '.data.after//empty')
2014-06-07 06:06:23 -04:00
if [ -z $after ]; then
break
fi
2020-01-19 21:29:19 -05:00
url="https://www.reddit.com/r/$subreddit/$sort/.json?count=200&after=$after&raw_json=1&t=$top_time"
2019-06-15 13:18:11 -04:00
content=`wget -T $timeout -U "$useragent" --no-check-certificate -q -O - $url`
2018-04-07 11:08:31 -04:00
done