rework the '-g' cmdline option

the amount of how often the -g option is given now determines up until
what level URLs are resolved.

example:

$ gallery-dl -g http://kissmanga.com/Manga/Dropout
http://kissmanga.com/Manga/Dropout/Ch-000---Oneshot-?id=145847

- when applied to a manga-extractor, specifying the -g option once will
  now print a list of all chapter URls

$ gallery-dl -gg http://kissmanga.com/Manga/Dropout
http://2.bp.blogspot.com/.../000.png
http://2.bp.blogspot.com/.../001.png
...

- specifying it twice (or even more often) will go a level deeper and
  print the image URLs found in those chapters
This commit is contained in:
Mike Fährmann
2017-02-17 22:18:16 +01:00
parent 9d36acbbc4
commit 3bca866185
2 changed files with 10 additions and 2 deletions

View File

@@ -30,7 +30,7 @@ def build_cmdline_parser():
parser = argparse.ArgumentParser(
description='Download images from various sources')
parser.add_argument(
"-g", "--get-urls", dest="list_urls", action="store_true",
"-g", "--get-urls", dest="list_urls", action="count",
help="print download urls",
)
parser.add_argument(
@@ -141,6 +141,7 @@ def main():
if args.list_urls:
jobtype = job.UrlJob
jobtype.maxdepth = args.list_urls
elif args.list_keywords:
jobtype = job.KeywordJob
else:

View File

@@ -153,13 +153,20 @@ class KeywordJob(Job):
class UrlJob(Job):
"""Print download urls"""
maxdepth = -1
def __init__(self, url, depth=1):
Job.__init__(self, url)
self.depth = depth
if depth == self.maxdepth:
self.handle_queue = print
def handle_url(self, url, _):
print(url)
def handle_queue(self, url):
try:
UrlJob(url).run()
UrlJob(url, self.depth + 1).run()
except exception.NoExtractorError:
pass