Add download interval option

This commit is contained in:
Xerbo 2020-09-23 14:32:02 +01:00
parent 9c8548076f
commit 0f0fe3e6a7
2 changed files with 7 additions and 1 deletions

View file

@ -17,7 +17,7 @@ furaffinity-dl has only been tested only on Linux, however it should also work o
## Usage ## Usage
``` ```
usage: furaffinity-dl.py [-h] [--output OUTPUT] [--cookies COOKIES] [--ua UA] [--start START] [--dont-redownload] [category] [username] usage: furaffinity-dl.py [-h] [--output OUTPUT] [--cookies COOKIES] [--ua UA] [--start START] [--dont-redownload] [--interval INTERVAL] [category] [username]
Downloads the entire gallery/scraps/favorites of a furaffinity user Downloads the entire gallery/scraps/favorites of a furaffinity user
@ -36,6 +36,8 @@ optional arguments:
page number to start from page number to start from
--dont-redownload, -d --dont-redownload, -d
Don't redownload files that have already been downloaded Don't redownload files that have already been downloaded
--interval INTERVAL, -i INTERVAL
delay between downloading pages
Examples: Examples:
python3 furaffinity-dl.py gallery koul python3 furaffinity-dl.py gallery koul

View file

@ -8,6 +8,7 @@ import requests
import http.cookiejar as cookielib import http.cookiejar as cookielib
import re import re
import os import os
from time import sleep
''' '''
Please refer to LICENSE for licensing conditions. Please refer to LICENSE for licensing conditions.
@ -30,6 +31,8 @@ parser.add_argument('--cookies', '-c', dest='cookies', type=str, default='', hel
parser.add_argument('--ua', '-u', dest='ua', type=str, default='Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:68.7) Gecko/20100101 Firefox/68.7', help="Your browser's useragent, may be required, depending on your luck") parser.add_argument('--ua', '-u', dest='ua', type=str, default='Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:68.7) Gecko/20100101 Firefox/68.7', help="Your browser's useragent, may be required, depending on your luck")
parser.add_argument('--start', '-s', dest='start', type=str, default=1, help="page number to start from") parser.add_argument('--start', '-s', dest='start', type=str, default=1, help="page number to start from")
parser.add_argument('--dont-redownload', '-d', const='dont_redownload', action='store_const', help="Don't redownload files that have already been downloaded") parser.add_argument('--dont-redownload', '-d', const='dont_redownload', action='store_const', help="Don't redownload files that have already been downloaded")
parser.add_argument('--interval', '-i', dest='interval', type=float, default=0, help="delay between downloading pages")
args = parser.parse_args() args = parser.parse_args()
if args.username is None: if args.username is None:
@ -182,6 +185,7 @@ while True:
# Download all images on the page # Download all images on the page
for img in s.findAll('figure'): for img in s.findAll('figure'):
download(img.find('a').attrs.get('href')) download(img.find('a').attrs.get('href'))
sleep(args.interval)
# Favorites galleries use a weird timestamp system, so grab the next "page" from the Next button # Favorites galleries use a weird timestamp system, so grab the next "page" from the Next button
if args.category == 'favorites': if args.category == 'favorites':