[cyberfile:folder] support subfolders (#8323)
https://github.com/mikf/gallery-dl/issues/8323#issuecomment-3366857916 add 'recursive' option
This commit is contained in:
@@ -2231,6 +2231,16 @@ Description
|
||||
(see `getpass() <https://docs.python.org/3/library/getpass.html#getpass.getpass>`__).
|
||||
|
||||
|
||||
extractor.cyberfile.recursive
|
||||
-----------------------------
|
||||
Type
|
||||
``bool``
|
||||
Default
|
||||
``true``
|
||||
Description
|
||||
Recursively download files from subfolders.
|
||||
|
||||
|
||||
extractor.[Danbooru].external
|
||||
-----------------------------
|
||||
Type
|
||||
|
||||
@@ -246,7 +246,8 @@
|
||||
},
|
||||
"cyberfile":
|
||||
{
|
||||
"password": ""
|
||||
"password" : "",
|
||||
"recursive": true
|
||||
},
|
||||
"dankefuerslesen":
|
||||
{
|
||||
|
||||
@@ -56,7 +56,9 @@ class CyberfileFolderExtractor(CyberfileExtractor):
|
||||
url = f"{self.root}/folder/{folder_hash}"
|
||||
folder_num = text.extr(self.request(url).text, "ages('folder', '", "'")
|
||||
|
||||
extract_urls = text.re(r'dtfullurl="([^"]+)').findall
|
||||
extract_folders = text.re(r'sharing-url="([^"]+)').findall
|
||||
extract_files = text.re(r'dtfullurl="([^"]+)').findall
|
||||
recursive = self.config("recursive", True)
|
||||
perpage = 600
|
||||
|
||||
data = {
|
||||
@@ -67,20 +69,27 @@ class CyberfileFolderExtractor(CyberfileExtractor):
|
||||
"filterOrderBy": "",
|
||||
}
|
||||
resp = self.request_api("/account/ajax/load_files", data)
|
||||
html = resp["html"]
|
||||
|
||||
folder = {
|
||||
"_extractor" : CyberfileFileExtractor,
|
||||
"folder_hash": folder_hash,
|
||||
"folder_num" : text.parse_int(folder_num),
|
||||
"folder" : resp["page_title"],
|
||||
}
|
||||
|
||||
while True:
|
||||
urls = extract_urls(resp["html"])
|
||||
for url in urls:
|
||||
yield Message.Queue, url, folder
|
||||
folders = extract_folders(html)
|
||||
if recursive and folders:
|
||||
folder["_extractor"] = CyberfileFolderExtractor
|
||||
for url in folders:
|
||||
yield Message.Queue, url, folder
|
||||
|
||||
if len(urls) < perpage:
|
||||
if files := extract_files(html):
|
||||
folder["_extractor"] = CyberfileFileExtractor
|
||||
for url in files:
|
||||
yield Message.Queue, url, folder
|
||||
|
||||
if len(folders) + len(files) < perpage:
|
||||
return
|
||||
data["pageStart"] += 1
|
||||
resp = self.request_api("/account/ajax/load_files", data)
|
||||
@@ -109,12 +118,12 @@ class CyberfileSharedExtractor(CyberfileExtractor):
|
||||
pos = html.find("<!-- /.navbar-collapse -->") + 26
|
||||
|
||||
data = {"_extractor": CyberfileFolderExtractor}
|
||||
for folder in text.extract_iter(html, 'sharing-url="', '"', pos):
|
||||
yield Message.Queue, folder, data
|
||||
for url in text.extract_iter(html, 'sharing-url="', '"', pos):
|
||||
yield Message.Queue, url, data
|
||||
|
||||
data = {"_extractor": CyberfileFileExtractor}
|
||||
for file in text.extract_iter(html, 'dtfullurl="', '"', pos):
|
||||
yield Message.Queue, file, data
|
||||
for url in text.extract_iter(html, 'dtfullurl="', '"', pos):
|
||||
yield Message.Queue, url, data
|
||||
|
||||
|
||||
class CyberfileFileExtractor(CyberfileExtractor):
|
||||
|
||||
@@ -104,6 +104,19 @@ __tests__ = (
|
||||
"#exception": exception.AuthorizationError,
|
||||
},
|
||||
|
||||
{
|
||||
"#url" : "https://cyberfile.me/folder/8b17bbfdf25fca19aa51176bd246c97c/Helena_Price_Onlyfans",
|
||||
"#class" : cyberfile.CyberfileFolderExtractor,
|
||||
"#results" : (
|
||||
"https://cyberfile.me/folder/c2cfdcfcf1a6e6e57de7bc948804b0fc/PICS",
|
||||
"https://cyberfile.me/folder/bdc7c36e7d4dfdc3fb908a6d3fe1cae5/VIDEO",
|
||||
),
|
||||
|
||||
"folder" : "Helena Price Onlyfans",
|
||||
"folder_hash": "8b17bbfdf25fca19aa51176bd246c97c",
|
||||
"folder_num" : 18322,
|
||||
},
|
||||
|
||||
{
|
||||
"#url" : "https://cyberfile.me/shared/tao35avvfc",
|
||||
"#class" : cyberfile.CyberfileSharedExtractor,
|
||||
|
||||
Reference in New Issue
Block a user