From 0046879d087554a3a73d4cb29b72e8a845a7df26 Mon Sep 17 00:00:00 2001 From: makeworld <25111343+makeworld-the-better-one@users.noreply.github.com> Date: Sun, 19 Jan 2020 21:29:19 -0500 Subject: [PATCH] Added sort and time range option --- README.md | 8 ++++++-- download-subreddit-images.sh | 18 ++++++++++++++---- 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 025504b..8b6a37e 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,7 @@ Tired of all of those reddit downloaders which want you to install tons of depen - Linux/MacOS/Windows - Parallel download -This script just downloads all directly linked images in subreddit. For more complex usage, use other reddit image downloader. +This script just downloads all directly linked images in subreddit. It can also download with a specific sort. For more complex usage, use other reddit image downloader. Requirements ============ @@ -17,6 +17,10 @@ Requirements Usage ===== -`./download-subreddit-images.sh ` +``` +./download-subreddit-images.sh +./download-subreddit-images.sh +./download-subreddit-images.sh top +``` Script downloads images to `` folder in current directory. If you want to change that, you need to edit destination in rdit.sh for now. diff --git a/download-subreddit-images.sh b/download-subreddit-images.sh index f1314c5..303afd4 100644 --- a/download-subreddit-images.sh +++ b/download-subreddit-images.sh @@ -5,7 +5,18 @@ useragent="Love by u/gadelat" timeout=60 subreddit=$1 -url="https://www.reddit.com/r/$subreddit/.json?raw_json=1" +sort=$2 +top_time=$3 + +if [ -z $sort ]; then + sort="hot" +fi + +if [ -z $top_time ];then + top_time="" +fi + +url="https://www.reddit.com/r/$subreddit/$sort/.json?raw_json=1&t=$top_time" content=`wget -T $timeout -U "$useragent" -q -O - $url` mkdir -p $subreddit while : ; do @@ -23,11 +34,10 @@ while : ; do wget -T $timeout -U "$useragent" --no-check-certificate -nv -nc -P down -O "$subreddit/$newname" $url &>/dev/null & a=$(($a+1)) done - after=$(echo -n "$content"| jq -r '.data.after') + after=$(echo -n "$content"| jq -r '.data.after//empty') if [ -z $after ]; then break fi - url="https://www.reddit.com/r/$subreddit/.json?count=200&after=$after&raw_json=1" + url="https://www.reddit.com/r/$subreddit/$sort/.json?count=200&after=$after&raw_json=1&t=$top_time" content=`wget -T $timeout -U "$useragent" --no-check-certificate -q -O - $url` - #echo -e "$urls" done