Added sort and time range option

This commit is contained in:
makeworld 2020-01-19 21:29:19 -05:00 committed by Gabriel Ostrolucký
parent deddc564d7
commit 0046879d08
No known key found for this signature in database
GPG Key ID: 0B618B95BA22CEEF
2 changed files with 20 additions and 6 deletions

View File

@ -7,7 +7,7 @@ Tired of all of those reddit downloaders which want you to install tons of depen
- Linux/MacOS/Windows - Linux/MacOS/Windows
- Parallel download - Parallel download
This script just downloads all directly linked images in subreddit. For more complex usage, use other reddit image downloader. This script just downloads all directly linked images in subreddit. It can also download with a specific sort. For more complex usage, use other reddit image downloader.
Requirements Requirements
============ ============
@ -17,6 +17,10 @@ Requirements
Usage Usage
===== =====
`./download-subreddit-images.sh <subreddit_name>` ```
./download-subreddit-images.sh <subreddit_name>
./download-subreddit-images.sh <subreddit_name> <hot|new|rising|top>
./download-subreddit-images.sh <subreddit_name> top <all|year|month|week|day>
```
Script downloads images to `<subreddit_name>` folder in current directory. If you want to change that, you need to edit destination in rdit.sh for now. Script downloads images to `<subreddit_name>` folder in current directory. If you want to change that, you need to edit destination in rdit.sh for now.

View File

@ -5,7 +5,18 @@ useragent="Love by u/gadelat"
timeout=60 timeout=60
subreddit=$1 subreddit=$1
url="https://www.reddit.com/r/$subreddit/.json?raw_json=1" sort=$2
top_time=$3
if [ -z $sort ]; then
sort="hot"
fi
if [ -z $top_time ];then
top_time=""
fi
url="https://www.reddit.com/r/$subreddit/$sort/.json?raw_json=1&t=$top_time"
content=`wget -T $timeout -U "$useragent" -q -O - $url` content=`wget -T $timeout -U "$useragent" -q -O - $url`
mkdir -p $subreddit mkdir -p $subreddit
while : ; do while : ; do
@ -23,11 +34,10 @@ while : ; do
wget -T $timeout -U "$useragent" --no-check-certificate -nv -nc -P down -O "$subreddit/$newname" $url &>/dev/null & wget -T $timeout -U "$useragent" --no-check-certificate -nv -nc -P down -O "$subreddit/$newname" $url &>/dev/null &
a=$(($a+1)) a=$(($a+1))
done done
after=$(echo -n "$content"| jq -r '.data.after') after=$(echo -n "$content"| jq -r '.data.after//empty')
if [ -z $after ]; then if [ -z $after ]; then
break break
fi fi
url="https://www.reddit.com/r/$subreddit/.json?count=200&after=$after&raw_json=1" url="https://www.reddit.com/r/$subreddit/$sort/.json?count=200&after=$after&raw_json=1&t=$top_time"
content=`wget -T $timeout -U "$useragent" --no-check-certificate -q -O - $url` content=`wget -T $timeout -U "$useragent" --no-check-certificate -q -O - $url`
#echo -e "$urls"
done done