- easy exporting of cookies to file with "--login"

- readme updated
This commit is contained in:
Kentai Radiquum 2022-06-19 21:55:40 +05:00
parent 872951fa44
commit 3b9c667d52
No known key found for this signature in database
GPG key ID: CB1FC16C710DB347
3 changed files with 76 additions and 19 deletions

View file

@ -12,7 +12,7 @@ Supports all known submission types: images, text, flash and audio.
`python 3`
`pip install -r requirements.txt`
`pip3 install -r requirements.txt`
**The script currently only works with the "Modern" theme**
@ -27,6 +27,7 @@ When downloading a folder make sure to put everything after **/folder/**, for ex
usage: furaffinity-dl.py [-h] [--submissions] [--folder FOLDER [FOLDER ...]] [--cookies COOKIES [COOKIES ...]]
[--user-agent USER_AGENT [USER_AGENT ...]] [--start START [START ...]] [--stop STOP [STOP ...]] [--dont-redownload]
[--interval INTERVAL [INTERVAL ...]] [--rating] [--filter] [--metadata] [--download DOWNLOAD] [--json-description]
[--login]
[username] [category]
Downloads the entire gallery/scraps/folder/favorites of a furaffinity user, or your submissions notifications
@ -57,6 +58,7 @@ options:
--metadata, -m enable downloading of metadata
--download DOWNLOAD download a specific submission /view/12345678/
--json-description download description as a JSON list
--login extract furaffinity cookies directly from your browser
Examples:
python3 furaffinity-dl.py koul -> will download gallery of user koul
@ -71,9 +73,9 @@ DISCLAIMER: It is your own responsibility to check whether batch downloading is
```
You can also log in to download restricted content. To do that, log in to FurAffinity in your web browser, export cookies to a file from your web browser in Netscape format (there are extensions to do that [for Firefox](https://addons.mozilla.org/en-US/firefox/addon/ganbo/) and [for Chrome based browsers](https://chrome.google.com/webstore/detail/get-cookiestxt/bgaddhkoddajcdgocldbbfleckgcbcid?hl=en)), you can then pass them to the script with the `-c` flag, like this (you may also have to provide your user agent):
You can also log in to download restricted content. To do that, log in to FurAffinity in your web browser, and use `python3 furaffinity-dl.py --login` to export furaffinity cookies from your web browser in Netscape format directly in file `cookies.txt` or export them manually with extensions: [for Firefox](https://addons.mozilla.org/en-US/firefox/addon/ganbo/) and [for Chrome based browsers](https://chrome.google.com/webstore/detail/get-cookiestxt/bgaddhkoddajcdgocldbbfleckgcbcid?hl=en), then you can then pass them to the script with the `-c` flag, like this (you may also have to provide your user agent):
`python3 furaffinity-dl.py -c cookies.txt -u 'Mozilla/5.0 ....' gallery letodoesart`
`python3 furaffinity-dl.py letodoesart -c cookies.txt --user_agent 'Mozilla/5.0 ....'`
## TODO

View file

@ -54,7 +54,9 @@ parser.add_argument(
"--user-agent",
dest="user_agent",
nargs="+",
default="Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:101.0) Gecko/20100101 Firefox/101.0",
default=[
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:101.0) Gecko/20100101 Firefox/101.0"
],
help="Your browser's useragent, may be required, depending on your luck",
)
parser.add_argument(
@ -108,13 +110,20 @@ parser.add_argument(
action="store_true",
help="download description as a JSON list",
)
parser.add_argument(
"--login",
action="store_true",
help="extract furaffinity cookies directly from your browser",
)
args = parser.parse_args()
BASE_URL = "https://www.furaffinity.net"
username = args.username
if args.submissions is False and args.download is None: # check if you are not downloading submissions or a specific post
if (
args.submissions is False and args.login is False and args.download is None
): # check if you are not downloading submissions or a specific post
categories = {
"gallery": "gallery",
"scraps": "scraps",
@ -141,6 +150,8 @@ if args.cookies is not None: # add cookies if present
session.cookies = cookies
# File downloading
def download_file(url, fname, desc):
try:
r = session.get(url, stream=True)
@ -178,6 +189,7 @@ if args.folder is not None:
if args.submissions is True:
download_url = f"{BASE_URL}/msg/submissions"
def download(path):
response = session.get(f"{BASE_URL}{path}")
s = BeautifulSoup(response.text, "html.parser")
@ -205,7 +217,9 @@ def download(path):
image = s.find(class_="download").find("a").attrs.get("href")
title = s.find(class_="submission-title").find("p").contents[0] + " "
description = s.find(class_="submission-description").text.strip().replace("\r\n", "\n")
description = (
s.find(class_="submission-description").text.strip().replace("\r\n", "\n")
)
if args.json_description is True:
description = []
@ -238,7 +252,7 @@ def download(path):
if args.filter is True:
match = re.search(
"YCH[a-z $-/:-?{-~!\"^_`\[\]]*OPEN|OPEN[a-z $-/:-?{-~!\"^_`\[\]]*YCH|YCH[a-z $-/:-?{-~!\"^_`\[\]]*CLOSE|CLOSE[a-z $-/:-?{-~!\"^_`\[\]]*YCH|YCH[a-z $-/:-?{-~!\"^_`\[\]]*ABLE|AVAIL[a-z $-/:-?{-~!\"^_`\[\]]*YCH|YCH[a-z $-/:-?{-~!\"^_`\[\]]*CLONE|CLONE[a-z $-/:-?{-~!\"^_`\[\]]*YCH|YCH[a-z $-/:-?{-~!\"^_`\[\]]*LIM|LIM[a-z $-/:-?{-~!\"^_`\[\]]*YCH|COM[a-z $-/:-?{-~!\"^_`\[\]]*OPEN|OPEN[a-z $-/:-?{-~!\"^_`\[\]]*COM|COM[a-z $-/:-?{-~!\"^_`\[\]]*CLOSE|CLOSE[a-z $-/:-?{-~!\"^_`\[\]]*COM|FIX[a-z $-/:-?{-~!\"^_`\[\]]*ICE|REM[insder]*\W|\\bREF|Sale$|auction|multislot|stream|adopt",
'YCH[a-z $-/:-?{-~!"^_`\\[\\]]*OPEN|OPEN[a-z $-/:-?{-~!"^_`\\[\\]]*YCH|YCH[a-z $-/:-?{-~!"^_`\\[\\]]*CLOSE|CLOSE[a-z $-/:-?{-~!"^_`\\[\\]]*YCH|YCH[a-z $-/:-?{-~!"^_`\\[\\]]*ABLE|AVAIL[a-z $-/:-?{-~!"^_`\\[\\]]*YCH|YCH[a-z $-/:-?{-~!"^_`\\[\\]]*CLONE|CLONE[a-z $-/:-?{-~!"^_`\\[\\]]*YCH|YCH[a-z $-/:-?{-~!"^_`\\[\\]]*LIM|LIM[a-z $-/:-?{-~!"^_`\\[\\]]*YCH|COM[a-z $-/:-?{-~!"^_`\\[\\]]*OPEN|OPEN[a-z $-/:-?{-~!"^_`\\[\\]]*COM|COM[a-z $-/:-?{-~!"^_`\\[\\]]*CLOSE|CLOSE[a-z $-/:-?{-~!"^_`\\[\\]]*COM|FIX[a-z $-/:-?{-~!"^_`\\[\\]]*ICE|REM[insder]*\\W|\\bREF|\\bSale\\W|auction|multislot|stream|adopt',
title,
re.IGNORECASE,
)
@ -310,6 +324,7 @@ def download(path):
return True
if args.download is not None:
output = "furaffinity-dl/downloaded/"
download(args.download)
@ -317,6 +332,8 @@ if args.download is not None:
exit()
# Main function
def main():
# check if you are logged in
page_end = args.stop[0]
@ -429,5 +446,42 @@ def main():
print(f"{GREEN}Finished downloading{END}")
def login():
import browser_cookie3
CJ = browser_cookie3.load()
response = session.get(BASE_URL, cookies=CJ)
FA_COOKIES = CJ._cookies[".furaffinity.net"]["/"]
cookie_a = FA_COOKIES["a"]
cookie_b = FA_COOKIES["b"]
s = BeautifulSoup(response.text, "html.parser")
try:
s.find(class_="loggedin_user_avatar")
account_username = s.find(class_="loggedin_user_avatar").attrs.get("alt")
print(f"{GREEN}<i> Logged in as: {account_username}{END}")
with open("cookies.txt", "w") as file:
file.write(
f"""# Netscape HTTP Cookie File
# http://curl.haxx.se/rfc/cookie_spec.html
# This is a generated file! Do not edit.
.furaffinity.net TRUE / TRUE {cookie_a.expires} b {cookie_a.value}
.furaffinity.net TRUE / TRUE {cookie_b.expires} a {cookie_b.value}
"""
)
print(
f'{GREEN}<i> cookies saved successfully, now you can provide them by using "-c cookies.txt"{END}'
)
except AttributeError:
print(
f"{RED}<i> Error getting cookies, either you need to login into furaffinity in your browser, or you can export cookies.txt manually{END}"
)
exit()
if __name__ == "__main__":
if args.login is True:
login()
main()

View file

@ -1,3 +1,4 @@
beautifulsoup4
requests
tqdm
browser-cookie3