Reddit-Image-Downloader-RID/download-subreddit-images.sh

33 lines
1.1 KiB
Bash
Raw Normal View History

2014-06-07 06:06:23 -04:00
#!/bin/bash
#cfg
useragent="Love by u/gadelat"
subreddit=$1
url="https://www.reddit.com/r/$subreddit/.json?raw_json=1"
2018-04-07 11:08:31 -04:00
content=`wget -U "$useragent" -q -O - $url`
2019-01-24 09:35:32 -05:00
mkdir -p $subreddit
2014-06-07 06:06:23 -04:00
while : ; do
2017-06-24 14:05:27 -04:00
urls=$(echo -e "$content"|grep -Po '"source": {"url":.*?[^\\]",'|cut -f 6 -d '"')
2014-06-07 06:06:23 -04:00
names=$(echo -e "$content"|grep -Po '"title":.*?[^\\]",'|cut -f 4 -d '"')
ids=$(echo -e "$content"|grep -Po '"id":.*?[^\\]",'|cut -f 4 -d '"')
a=1
for url in $(echo -e "$urls"); do
if [ -n "`echo "$url"|egrep \".gif|.jpg\"`" ]; then
name=`echo -e "$names"|sed -n "$a"p`
id=`echo -e "$ids"|sed -n "$a"p`
echo $name
newname="$name"_"$subreddit"_$id.${url##*.}
2019-01-24 09:35:32 -05:00
wget -U "$useragent" --no-check-certificate -nv -nc -P down -O "$subreddit/$newname" $url
2014-06-07 06:06:23 -04:00
fi
a=$(($a+1))
done
after=$(echo -e "$content"|grep -Po '"after":.*?[^\\]",'|cut -f 4 -d '"'|tail -n 1)
if [ -z $after ]; then
break
fi
url="https://www.reddit.com/r/$subreddit/.json?count=200&after=$after&raw_json=1"
2014-06-07 06:06:23 -04:00
content=`wget -U "$useragent" --no-check-certificate -q -O - $url`
#echo -e "$urls"
2018-04-07 11:08:31 -04:00
done