Rework fetching URLs from the file

This commit is contained in:
Maks Snegov 2019-10-22 12:15:31 +03:00
parent 91cddfab7c
commit bdceede4f2

View File

@ -161,17 +161,28 @@ def process_url(url):
def main(): def main():
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description='Nevernote - download pages locally.') prog="nevernote.py",
parser.add_argument('urls', metavar='URL', type=str, nargs='+', description="Nevernote - tool for downloading pages locally."
)
parser.add_argument("-i", "--infile",
help="File with URLs to download")
parser.add_argument('urls', metavar='URL', type=str, nargs='*',
help='URL of page to download') help='URL of page to download')
args = parser.parse_args() args = parser.parse_args()
# Process URLs from the file
if args.infile:
try:
fd = open(args.infile, 'r')
except OSError as err:
print(err)
return 1
for url in fd.readlines():
process_url(url.strip())
fd.close()
# Process URLs from CLI
for arg in args.urls: for arg in args.urls:
if os.path.isfile(arg):
print('Found file %s' % arg)
for url in (line.strip() for line in open(arg)):
process_url(url)
else:
process_url(arg) process_url(arg)