2019-04-03 04:27:37 +00:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
|
|
|
__package__ = 'archivebox.cli'
|
|
|
|
__command__ = 'archivebox add'
|
|
|
|
|
|
|
|
import sys
|
|
|
|
import argparse
|
|
|
|
|
2019-04-27 21:26:24 +00:00
|
|
|
from typing import List, Optional, IO
|
2019-04-25 22:59:41 +00:00
|
|
|
|
2020-07-24 17:25:25 +00:00
|
|
|
from ..main import add
|
|
|
|
from ..util import docstring
|
2019-04-27 21:26:24 +00:00
|
|
|
from ..config import OUTPUT_DIR, ONLY_NEW
|
2020-07-22 16:02:13 +00:00
|
|
|
from ..logging_util import SmartFormatter, accept_stdin, stderr
|
2019-04-03 04:27:37 +00:00
|
|
|
|
|
|
|
|
2019-05-01 03:10:48 +00:00
|
|
|
@docstring(add.__doc__)
|
2019-04-27 21:26:24 +00:00
|
|
|
def main(args: Optional[List[str]]=None, stdin: Optional[IO]=None, pwd: Optional[str]=None) -> None:
|
2019-04-03 04:27:37 +00:00
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
prog=__command__,
|
2019-05-01 03:10:48 +00:00
|
|
|
description=add.__doc__,
|
2019-04-03 04:27:37 +00:00
|
|
|
add_help=True,
|
2019-04-27 21:26:24 +00:00
|
|
|
formatter_class=SmartFormatter,
|
2019-04-03 04:27:37 +00:00
|
|
|
)
|
|
|
|
parser.add_argument(
|
2019-04-27 21:26:24 +00:00
|
|
|
'--update-all', #'-n',
|
2019-04-03 04:27:37 +00:00
|
|
|
action='store_true',
|
2019-05-01 03:10:48 +00:00
|
|
|
default=not ONLY_NEW, # when ONLY_NEW=True we skip updating old links
|
2019-04-27 21:26:24 +00:00
|
|
|
help="Also retry previously skipped/failed links when adding new links",
|
2019-04-03 04:27:37 +00:00
|
|
|
)
|
2019-04-25 22:59:41 +00:00
|
|
|
parser.add_argument(
|
|
|
|
'--index-only', #'-o',
|
|
|
|
action='store_true',
|
|
|
|
help="Add the links to the main index without archiving them",
|
|
|
|
)
|
2019-04-03 04:27:37 +00:00
|
|
|
parser.add_argument(
|
2020-07-13 15:26:30 +00:00
|
|
|
'urls',
|
|
|
|
nargs='*',
|
2019-04-03 04:27:37 +00:00
|
|
|
type=str,
|
|
|
|
default=None,
|
2019-04-27 21:26:24 +00:00
|
|
|
help=(
|
2020-07-13 15:26:30 +00:00
|
|
|
'URLs or paths to archive e.g.:\n'
|
2019-04-27 21:26:24 +00:00
|
|
|
' https://getpocket.com/users/USERNAME/feed/all\n'
|
|
|
|
' https://example.com/some/rss/feed.xml\n'
|
2020-07-07 17:39:36 +00:00
|
|
|
' https://example.com\n'
|
2019-04-27 21:26:24 +00:00
|
|
|
' ~/Downloads/firefox_bookmarks_export.html\n'
|
|
|
|
' ~/Desktop/sites_list.csv\n'
|
|
|
|
)
|
2019-04-03 04:27:37 +00:00
|
|
|
)
|
2020-07-07 14:10:36 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"--depth",
|
|
|
|
action="store",
|
|
|
|
default=0,
|
2020-07-13 15:26:30 +00:00
|
|
|
choices=[0, 1],
|
2020-07-07 14:10:36 +00:00
|
|
|
type=int,
|
|
|
|
help="Recursively archive all linked pages up to this many hops away"
|
|
|
|
)
|
2020-08-18 08:37:54 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"--overwrite",
|
|
|
|
default=False,
|
|
|
|
action="store_true",
|
|
|
|
help="Re-archive URLs from scratch, overwriting any existing files"
|
|
|
|
)
|
2020-08-18 12:12:10 +00:00
|
|
|
parser.add_argument(
|
2020-11-13 14:24:34 +00:00
|
|
|
"--init", #'-i',
|
2020-08-18 12:12:10 +00:00
|
|
|
action='store_true',
|
|
|
|
help="Init/upgrade the curent data directory before adding",
|
|
|
|
)
|
2020-11-13 14:24:34 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"--extract",
|
2020-11-13 18:01:11 +00:00
|
|
|
type=str,
|
2020-11-13 14:24:34 +00:00
|
|
|
help="Pass a list of the extractors to be used. If the method name is not correct, it will be ignored. \
|
2020-11-13 18:01:11 +00:00
|
|
|
This does not take precedence over the configuration",
|
|
|
|
default=""
|
2020-11-13 14:24:34 +00:00
|
|
|
)
|
2019-04-27 21:26:24 +00:00
|
|
|
command = parser.parse_args(args or ())
|
2020-07-13 15:26:30 +00:00
|
|
|
urls = command.urls
|
|
|
|
stdin_urls = accept_stdin(stdin)
|
|
|
|
if (stdin_urls and urls) or (not stdin and not urls):
|
2020-07-07 20:46:45 +00:00
|
|
|
stderr(
|
2020-07-13 15:26:30 +00:00
|
|
|
'[X] You must pass URLs/paths to add via stdin or CLI arguments.\n',
|
2020-07-07 20:46:45 +00:00
|
|
|
color='red',
|
|
|
|
)
|
|
|
|
raise SystemExit(2)
|
2019-04-27 21:26:24 +00:00
|
|
|
add(
|
2020-07-13 15:26:30 +00:00
|
|
|
urls=stdin_urls or urls,
|
2020-07-08 13:17:47 +00:00
|
|
|
depth=command.depth,
|
2019-04-27 21:26:24 +00:00
|
|
|
update_all=command.update_all,
|
2019-04-25 22:59:41 +00:00
|
|
|
index_only=command.index_only,
|
2020-08-18 08:37:54 +00:00
|
|
|
overwrite=command.overwrite,
|
2020-08-18 12:12:10 +00:00
|
|
|
init=command.init,
|
2019-04-27 21:26:24 +00:00
|
|
|
out_dir=pwd or OUTPUT_DIR,
|
2020-11-13 19:52:21 +00:00
|
|
|
extractors=command.extract,
|
2019-04-03 04:27:37 +00:00
|
|
|
)
|
|
|
|
|
2019-04-19 01:09:54 +00:00
|
|
|
|
2019-04-03 04:27:37 +00:00
|
|
|
if __name__ == '__main__':
|
2019-04-27 21:26:24 +00:00
|
|
|
main(args=sys.argv[1:], stdin=sys.stdin)
|
|
|
|
|
|
|
|
|
|
|
|
# TODO: Implement these
|
|
|
|
#
|
|
|
|
# parser.add_argument(
|
|
|
|
# '--mirror', #'-m',
|
|
|
|
# action='store_true',
|
|
|
|
# help='Archive an entire site (finding all linked pages below it on the same domain)',
|
|
|
|
# )
|
|
|
|
# parser.add_argument(
|
|
|
|
# '--crawler', #'-r',
|
|
|
|
# choices=('depth_first', 'breadth_first'),
|
|
|
|
# help='Controls which crawler to use in order to find outlinks in a given page',
|
|
|
|
# default=None,
|
|
|
|
# )
|