diff --git a/README.md b/README.md index adbf10d..8c70f93 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,28 @@ -# furaffinity-dl -FurAffinity Downloader +# FurAffinity Downloader +**furaffinity-dl** is a BASH script for batch downloading of galleries, scraps and favorites from furaffinity.net users. +I've written it for preservation of culture, to counter the people nuking their galleries every once a while. + +Supports all known submission types: images, texts and audio. Right now it dowloads only the files themselves. +I'd like to eventually expand it to download the description pages as well and arrange them in chronologiclal order. Patches are welcome! + +## Requirements + +Coreutils, bash and wget are the only requirements. + +furaffinity-dl was tested only on Linux. It should also work on Mac and BSDs. +Windows users can probably get it to work via Cygwin, but running a virtual machine with Linux might be simpler. + +## Usage + `furaffinity-dl section/username` + +All files from the given section and user will be downloaded to the current directory. +### Examples + `furaffinity-dl gallery/ymxa` + `furaffinity-dl scraps/---` + `furaffinity-dl favorites/kivuli` + +## TODO + * Support cookies - needs UI and documentation, can already be achieved by + adding "--load-cookies *file*" the wget line in the begginning of the script + * Download author's description of the artwork, and ideally the entire description page along with user comments + * Sort the downloaded stuff in chronological order diff --git a/furaffinity-dl b/furaffinity-dl new file mode 100755 index 0000000..81b0bed --- /dev/null +++ b/furaffinity-dl @@ -0,0 +1,59 @@ +#!/bin/bash +set -e + +# wget wrapper with custom user agent +# cookies also should be added here +fwget() { + wget --user-agent="Mozilla/5.0 furaffinity-dl" $* +} + +if [ "$1" = "" ] || [ "$1" = "-h" ] || [ "$1" = "--help" ]; then + echo "Usage: $0 SECTION/USER +Downloads the entire gallery/scraps/favorites of furaffinity.net user. + +Examples: + $0 gallery/ymxa + $0 scraps/--- + $0 favorites/kivuli" + exit 1 +fi + +tempfile="$(mktemp)" + +base_url=https://www.furaffinity.net/"$1" + +url="$base_url" +page_counter=1 + +while [ -n "$url" ]; do + fwget -O "$tempfile" "$url" + grep -q -i "there are no submissions to list" "$tempfile" && break + + artwork_pages=$(grep 'Download[[:space:]]?' "$tempfile" | cut -d '"' -f 2) + + # TODO: Get the submission title out of the page + # this trick may come in handy for avoiding slashes in filenames: + # | tr '/' '∕' + + # TODO: prepend a fancy title, date or something + wget "$image_url" + + done + + page_counter=$((page_counter + 1)) + url="$base_url"/"$page_counter" +done