diff --git a/rid b/rid
index e40a726..9223bd3 100755
--- a/rid
+++ b/rid
@@ -1,64 +1,68 @@
-#!/bin/bash
-#
-# rid - Reddit Image Downloader - Simple bash script to download images from a subreddit.
-
-timeout=60
-
-usage() { printf "%s" "\
-Usage:      rid SUBREDDIT [hot|new|rising|top|controversial] [number] [all|year|month|week|day]
-Examples:   rid starterpacks new 10
-            rid funny top 50 month
-"; exit 0;
-}
-
-subreddit=$1
-sort=$2
-number=$3
-top_time=$4
-
-if [ "$1" == "-h" ] || [ -z $subreddit ]; then
-    usage
-fi
-
-if [ -z $sort ]; then
-    sort="hot"
-fi
-
-if [ -z $top_time ]; then
-    top_time=""
-fi
-
-if [ -z $number ]; then
-	number=200
-fi
-
-url="https://www.reddit.com/r/$subreddit/$sort/.json?raw_json=1&t=$top_time"
-content=$(wget -T $timeout -q -O - $url)
-mkdir -p $subreddit
-i=1
-while : ; do
-    urls=$(echo -n "$content" | jq -r '.data.children[]|select(.data.post_hint|test("image")?) | .data.preview.images[0].source.url')
-    names=$(echo -n "$content" | jq -r '.data.children[]|select(.data.post_hint|test("image")?) | .data.title')
-    ids=$(echo -n "$content" | jq -r '.data.children[]|select(.data.post_hint|test("image")?) | .data.id')
-    a=1
-    wait # prevent spawning too many processes
-    for url in $urls; do
-        name=$(echo -n "$names" | sed -n "$a"p)
-        id=$(echo -n "$ids" | sed -n "$a"p)
-        ext=$(echo -n "${url##*.}" | cut -d '?' -f 1)
-        newname="$subreddit"_"$sort""$top_time"_"$(printf "%04d" $i)"_"$name"_$id.$ext
-        printf "$i/$number : $newname\n"
-        wget -T $timeout --no-check-certificate -nv -nc -P down -O "$subreddit/$newname" $url &>/dev/null &
-    	((a=a+1))
-    	((i=i+1))
-		if [ $i -gt $number ] ; then
-			exit 0
-		fi
-    done
-    after=$(echo -n "$content"| jq -r '.data.after//empty')
-    if [ -z $after ]; then
-        break
-    fi
-    url="https://www.reddit.com/r/$subreddit/$sort/.json?count=200&after=$after&raw_json=1&t=$top_time"
-    content=`wget -T $timeout --no-check-certificate -q -O - $url`
-done
+#!/bin/bash
+#https://github.com/dbeley/rid/blob/master/rid
+# rid - Reddit Image Downloader - Simple bash script to download images from a subreddit.
+
+
+
+useragent="Love by u/gadelat"
+timeout=60
+
+usage() { printf "%s" "\
+Usage:      rid SUBREDDIT [hot|new|rising|top|controversial] [number] [all|year|month|week|day]
+Examples:   rid starterpacks new 10
+            rid funny top 50 month
+"; exit 0;
+}
+
+subreddit=$1
+sort=$2
+number=$3
+top_time=$4
+
+if [ "$1" == "-h" ] || [ -z $subreddit ]; then
+    usage
+fi
+
+if [ -z $sort ]; then
+    sort="hot"
+fi
+
+if [ -z $top_time ]; then
+    top_time=""
+fi
+
+if [ -z $number ]; then
+	number=200
+fi
+
+
+url="https://www.reddit.com/r/$subreddit/$sort/.json?raw_json=1&t=$top_time"
+content=`curl -H "User-Agent: $useragent" $url`
+mkdir -p $subreddit
+i=1
+while : ; do
+    urls=$(echo -n "$content" | jq -r '.data.children[]|select(.data.post_hint|test("image")?) | .data.preview.images[0].source.url')
+    names=$(echo -n "$content" | jq -r '.data.children[]|select(.data.post_hint|test("image")?) | .data.title')
+    ids=$(echo -n "$content" | jq -r '.data.children[]|select(.data.post_hint|test("image")?) | .data.id')
+    a=1
+    wait # prevent spawning too many processes
+    for url in $urls; do
+        name=$(echo -n "$names" | sed -n "$a"p)
+        id=$(echo -n "$ids" | sed -n "$a"p)
+        ext=$(echo -n "${url##*.}" | cut -d '?' -f 1)
+        newname="$subreddit"_"$sort""$top_time"_"$(printf "%04d" $i)"_"$name"_$id.$ext
+        printf "$i/$number : $newname\n"
+        wget -T $timeout --no-check-certificate -nv -nc -P down -O "$subreddit/$newname" $url &>/dev/null &
+    	((a=a+1))
+    	((i=i+1))
+		if [ $i -gt $number ] ; then
+			exit 0
+		fi
+    done
+    after=$(echo -n "$content"| jq -r '.data.after//empty')
+    if [ -z $after ]; then
+        break
+    fi
+    url="https://www.reddit.com/r/$subreddit/$sort/.json?count=200&after=$after&raw_json=1&t=$top_time"
+    content=`curl -H "User-Agent: $useragent" $url`
+done