2014-06-07 06:06:23 -04:00
|
|
|
#!/bin/bash
|
2020-02-14 17:32:14 -05:00
|
|
|
#
|
2020-02-14 18:16:34 -05:00
|
|
|
# rid - Reddit Image Downloader - Simple bash script to download images from a subreddit.
|
2014-06-07 06:06:23 -04:00
|
|
|
|
2019-06-15 13:18:11 -04:00
|
|
|
timeout=60
|
2014-06-07 06:06:23 -04:00
|
|
|
|
2020-02-14 17:32:14 -05:00
|
|
|
usage() { printf "%s" "\
|
2020-02-14 18:05:32 -05:00
|
|
|
Usage: rid SUBREDDIT [hot|new|rising|top|controversial] [number] [all|year|month|week|day]
|
2020-02-14 17:56:52 -05:00
|
|
|
Examples: rid starterpacks new 10
|
|
|
|
rid funny top 50 month
|
|
|
|
"; exit 0;
|
2020-02-14 17:32:14 -05:00
|
|
|
}
|
|
|
|
|
2014-06-07 06:06:23 -04:00
|
|
|
subreddit=$1
|
2020-01-19 21:29:19 -05:00
|
|
|
sort=$2
|
2020-02-14 17:32:14 -05:00
|
|
|
number=$3
|
|
|
|
top_time=$4
|
|
|
|
|
2020-02-14 17:51:34 -05:00
|
|
|
if [ "$1" == "-h" ] || [ -z $subreddit ]; then
|
2020-02-14 17:32:14 -05:00
|
|
|
usage
|
|
|
|
fi
|
2020-01-19 21:29:19 -05:00
|
|
|
|
|
|
|
if [ -z $sort ]; then
|
|
|
|
sort="hot"
|
|
|
|
fi
|
|
|
|
|
2020-02-14 17:32:14 -05:00
|
|
|
if [ -z $top_time ]; then
|
2020-01-19 21:29:19 -05:00
|
|
|
top_time=""
|
|
|
|
fi
|
|
|
|
|
2020-02-14 17:32:14 -05:00
|
|
|
if [ -z $number ]; then
|
|
|
|
number=200
|
|
|
|
fi
|
|
|
|
|
2020-01-19 21:29:19 -05:00
|
|
|
url="https://www.reddit.com/r/$subreddit/$sort/.json?raw_json=1&t=$top_time"
|
2020-02-14 17:32:14 -05:00
|
|
|
content=$(wget -T $timeout -q -O - $url)
|
2019-01-24 09:35:32 -05:00
|
|
|
mkdir -p $subreddit
|
2020-02-14 17:32:14 -05:00
|
|
|
i=1
|
2014-06-07 06:06:23 -04:00
|
|
|
while : ; do
|
2020-02-14 17:32:14 -05:00
|
|
|
urls=$(echo -n "$content" | jq -r '.data.children[]|select(.data.post_hint|test("image")?) | .data.preview.images[0].source.url')
|
|
|
|
names=$(echo -n "$content" | jq -r '.data.children[]|select(.data.post_hint|test("image")?) | .data.title')
|
|
|
|
ids=$(echo -n "$content" | jq -r '.data.children[]|select(.data.post_hint|test("image")?) | .data.id')
|
2014-06-07 06:06:23 -04:00
|
|
|
a=1
|
2019-02-17 11:55:07 -05:00
|
|
|
wait # prevent spawning too many processes
|
|
|
|
for url in $urls; do
|
2020-02-14 17:32:14 -05:00
|
|
|
name=$(echo -n "$names" | sed -n "$a"p)
|
|
|
|
id=$(echo -n "$ids" | sed -n "$a"p)
|
|
|
|
ext=$(echo -n "${url##*.}" | cut -d '?' -f 1)
|
2020-02-14 18:02:23 -05:00
|
|
|
newname="$subreddit"_"$sort""$top_time"_"$(printf "%04d" $i)"_"$name"_$id.$ext
|
2020-02-14 17:32:14 -05:00
|
|
|
printf "$i/$number : $newname\n"
|
|
|
|
wget -T $timeout --no-check-certificate -nv -nc -P down -O "$subreddit/$newname" $url &>/dev/null &
|
|
|
|
((a=a+1))
|
|
|
|
((i=i+1))
|
|
|
|
if [ $i -gt $number ] ; then
|
|
|
|
exit 0
|
|
|
|
fi
|
2014-06-07 06:06:23 -04:00
|
|
|
done
|
2020-01-19 21:29:19 -05:00
|
|
|
after=$(echo -n "$content"| jq -r '.data.after//empty')
|
2014-06-07 06:06:23 -04:00
|
|
|
if [ -z $after ]; then
|
|
|
|
break
|
|
|
|
fi
|
2020-01-19 21:29:19 -05:00
|
|
|
url="https://www.reddit.com/r/$subreddit/$sort/.json?count=200&after=$after&raw_json=1&t=$top_time"
|
2020-02-14 17:32:14 -05:00
|
|
|
content=`wget -T $timeout --no-check-certificate -q -O - $url`
|
2018-04-07 11:08:31 -04:00
|
|
|
done
|