2019-04-03 04:27:37 +00:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
|
|
|
__package__ = 'archivebox.cli'
|
|
|
|
__command__ = 'archivebox add'
|
|
|
|
|
|
|
|
import sys
|
|
|
|
import argparse
|
|
|
|
|
2024-11-16 10:45:37 +00:00
|
|
|
from typing import IO, TYPE_CHECKING
|
2019-04-25 22:59:41 +00:00
|
|
|
|
2024-11-16 10:45:37 +00:00
|
|
|
|
|
|
|
from django.utils import timezone
|
|
|
|
from django.db.models import QuerySet
|
|
|
|
|
|
|
|
|
|
|
|
from archivebox import CONSTANTS
|
2024-10-08 06:45:11 +00:00
|
|
|
from archivebox.config.common import ARCHIVING_CONFIG
|
2024-11-16 10:45:37 +00:00
|
|
|
from archivebox.config.django import setup_django
|
|
|
|
from archivebox.config.permissions import USER, HOSTNAME
|
|
|
|
from archivebox.misc.checks import check_data_folder
|
|
|
|
from archivebox.parsers import PARSERS
|
|
|
|
from archivebox.logging_util import SmartFormatter, accept_stdin, stderr
|
|
|
|
|
|
|
|
from abid_utils.models import get_or_create_system_user_pk
|
|
|
|
|
|
|
|
if TYPE_CHECKING:
|
|
|
|
from core.models import Snapshot
|
|
|
|
|
|
|
|
|
|
|
|
ORCHESTRATOR = None
|
|
|
|
|
|
|
|
|
|
|
|
def add(urls: str | list[str],
|
|
|
|
tag: str='',
|
|
|
|
depth: int=0,
|
|
|
|
update: bool=not ARCHIVING_CONFIG.ONLY_NEW,
|
|
|
|
update_all: bool=False,
|
|
|
|
index_only: bool=False,
|
|
|
|
overwrite: bool=False,
|
|
|
|
extractors: str="",
|
|
|
|
parser: str="auto",
|
|
|
|
persona: str='Default',
|
|
|
|
created_by_id: int | None=None) -> QuerySet['Snapshot']:
|
|
|
|
"""Add a new URL or list of URLs to your archive"""
|
|
|
|
|
|
|
|
global ORCHESTRATOR
|
|
|
|
|
|
|
|
assert depth in (0, 1), 'Depth must be 0 or 1 (depth >1 is not supported yet)'
|
2024-10-01 00:44:18 +00:00
|
|
|
|
2024-11-16 10:45:37 +00:00
|
|
|
# 0. setup abx, django, check_data_folder
|
|
|
|
setup_django()
|
|
|
|
check_data_folder()
|
|
|
|
|
|
|
|
|
|
|
|
from seeds.models import Seed
|
|
|
|
from crawls.models import Crawl
|
|
|
|
from actors.orchestrator import Orchestrator
|
2019-04-03 04:27:37 +00:00
|
|
|
|
2024-11-16 10:45:37 +00:00
|
|
|
|
|
|
|
created_by_id = created_by_id or get_or_create_system_user_pk()
|
|
|
|
|
|
|
|
# 1. save the provided urls to sources/2024-11-05__23-59-59__cli_add.txt
|
|
|
|
sources_file = CONSTANTS.SOURCES_DIR / f'{timezone.now().strftime("%Y-%m-%d__%H-%M-%S")}__cli_add.txt'
|
|
|
|
sources_file.write_text(urls if isinstance(urls, str) else '\n'.join(urls))
|
|
|
|
|
|
|
|
# 2. create a new Seed pointing to the sources/2024-11-05__23-59-59__cli_add.txt
|
|
|
|
cmd = ' '.join(sys.argv)
|
|
|
|
seed = Seed.from_file(sources_file, label=f'{USER}@{HOSTNAME} $ {cmd}', parser=parser, tag=tag, created_by=created_by_id, config={
|
|
|
|
'ONLY_NEW': not update,
|
|
|
|
'INDEX_ONLY': index_only,
|
|
|
|
'OVERWRITE': overwrite,
|
|
|
|
'EXTRACTORS': extractors,
|
|
|
|
'DEFAULT_PERSONA': persona or 'Default',
|
|
|
|
})
|
|
|
|
# 3. create a new Crawl pointing to the Seed
|
|
|
|
crawl = Crawl.from_seed(seed, max_depth=depth)
|
|
|
|
|
|
|
|
# 4. start the Orchestrator & wait until it completes
|
|
|
|
# ... orchestrator will create the root Snapshot, which creates pending ArchiveResults, which gets run by the ArchiveResultActors ...
|
|
|
|
# from crawls.actors import CrawlActor
|
|
|
|
# from core.actors import SnapshotActor, ArchiveResultActor
|
2019-04-03 04:27:37 +00:00
|
|
|
|
2024-11-16 10:45:37 +00:00
|
|
|
orchestrator = Orchestrator(exit_on_idle=True)
|
|
|
|
orchestrator.start()
|
|
|
|
|
|
|
|
# 5. return the list of new Snapshots created
|
|
|
|
return crawl.snapshot_set.all()
|
|
|
|
|
|
|
|
|
|
|
|
def main(args: list[str] | None=None, stdin: IO | None=None, pwd: str | None=None) -> None:
|
|
|
|
"""Add a new URL or list of URLs to your archive"""
|
2019-04-03 04:27:37 +00:00
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
prog=__command__,
|
2019-05-01 03:10:48 +00:00
|
|
|
description=add.__doc__,
|
2019-04-03 04:27:37 +00:00
|
|
|
add_help=True,
|
2019-04-27 21:26:24 +00:00
|
|
|
formatter_class=SmartFormatter,
|
2019-04-03 04:27:37 +00:00
|
|
|
)
|
2021-03-27 07:57:05 +00:00
|
|
|
parser.add_argument(
|
|
|
|
'--tag', '-t',
|
|
|
|
type=str,
|
|
|
|
default='',
|
|
|
|
help="Tag the added URLs with the provided tags e.g. --tag=tag1,tag2,tag3",
|
|
|
|
)
|
2019-04-03 04:27:37 +00:00
|
|
|
parser.add_argument(
|
2022-05-10 03:15:55 +00:00
|
|
|
'--update', #'-u',
|
2019-04-03 04:27:37 +00:00
|
|
|
action='store_true',
|
2024-10-01 00:44:18 +00:00
|
|
|
default=not ARCHIVING_CONFIG.ONLY_NEW, # when ONLY_NEW=True we skip updating old links
|
2019-04-27 21:26:24 +00:00
|
|
|
help="Also retry previously skipped/failed links when adding new links",
|
2019-04-03 04:27:37 +00:00
|
|
|
)
|
2022-05-10 03:15:55 +00:00
|
|
|
parser.add_argument(
|
|
|
|
'--update-all', #'-n',
|
|
|
|
action='store_true',
|
|
|
|
default=False,
|
|
|
|
help="Also update ALL links in index when finished adding new links",
|
|
|
|
)
|
2019-04-25 22:59:41 +00:00
|
|
|
parser.add_argument(
|
|
|
|
'--index-only', #'-o',
|
|
|
|
action='store_true',
|
|
|
|
help="Add the links to the main index without archiving them",
|
|
|
|
)
|
2019-04-03 04:27:37 +00:00
|
|
|
parser.add_argument(
|
2020-07-13 15:26:30 +00:00
|
|
|
'urls',
|
|
|
|
nargs='*',
|
2019-04-03 04:27:37 +00:00
|
|
|
type=str,
|
|
|
|
default=None,
|
2019-04-27 21:26:24 +00:00
|
|
|
help=(
|
2020-07-13 15:26:30 +00:00
|
|
|
'URLs or paths to archive e.g.:\n'
|
2019-04-27 21:26:24 +00:00
|
|
|
' https://getpocket.com/users/USERNAME/feed/all\n'
|
|
|
|
' https://example.com/some/rss/feed.xml\n'
|
2020-07-07 17:39:36 +00:00
|
|
|
' https://example.com\n'
|
2019-04-27 21:26:24 +00:00
|
|
|
' ~/Downloads/firefox_bookmarks_export.html\n'
|
|
|
|
' ~/Desktop/sites_list.csv\n'
|
|
|
|
)
|
2019-04-03 04:27:37 +00:00
|
|
|
)
|
2020-07-07 14:10:36 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"--depth",
|
|
|
|
action="store",
|
|
|
|
default=0,
|
2020-07-13 15:26:30 +00:00
|
|
|
choices=[0, 1],
|
2020-07-07 14:10:36 +00:00
|
|
|
type=int,
|
|
|
|
help="Recursively archive all linked pages up to this many hops away"
|
|
|
|
)
|
2020-08-18 08:37:54 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"--overwrite",
|
|
|
|
default=False,
|
|
|
|
action="store_true",
|
|
|
|
help="Re-archive URLs from scratch, overwriting any existing files"
|
|
|
|
)
|
2020-08-18 12:12:10 +00:00
|
|
|
parser.add_argument(
|
2024-11-16 10:45:37 +00:00
|
|
|
"--extract", '-e',
|
2020-11-13 18:01:11 +00:00
|
|
|
type=str,
|
2020-11-13 14:24:34 +00:00
|
|
|
help="Pass a list of the extractors to be used. If the method name is not correct, it will be ignored. \
|
2020-11-13 18:01:11 +00:00
|
|
|
This does not take precedence over the configuration",
|
|
|
|
default=""
|
2020-11-13 14:24:34 +00:00
|
|
|
)
|
2021-03-20 16:38:00 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"--parser",
|
|
|
|
type=str,
|
|
|
|
help="Parser used to read inputted URLs.",
|
|
|
|
default="auto",
|
2021-03-31 04:47:42 +00:00
|
|
|
choices=["auto", *PARSERS.keys()],
|
2021-03-20 16:38:00 +00:00
|
|
|
)
|
2024-11-16 10:45:37 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"--persona",
|
|
|
|
type=str,
|
|
|
|
help="Name of accounts persona to use when archiving.",
|
|
|
|
default="Default",
|
|
|
|
)
|
2019-04-27 21:26:24 +00:00
|
|
|
command = parser.parse_args(args or ())
|
2020-07-13 15:26:30 +00:00
|
|
|
urls = command.urls
|
2021-02-16 06:20:47 +00:00
|
|
|
|
|
|
|
stdin_urls = ''
|
|
|
|
if not urls:
|
|
|
|
stdin_urls = accept_stdin(stdin)
|
|
|
|
|
2020-07-13 15:26:30 +00:00
|
|
|
if (stdin_urls and urls) or (not stdin and not urls):
|
2020-07-07 20:46:45 +00:00
|
|
|
stderr(
|
2020-07-13 15:26:30 +00:00
|
|
|
'[X] You must pass URLs/paths to add via stdin or CLI arguments.\n',
|
2020-07-07 20:46:45 +00:00
|
|
|
color='red',
|
|
|
|
)
|
|
|
|
raise SystemExit(2)
|
2019-04-27 21:26:24 +00:00
|
|
|
add(
|
2020-07-13 15:26:30 +00:00
|
|
|
urls=stdin_urls or urls,
|
2020-07-08 13:17:47 +00:00
|
|
|
depth=command.depth,
|
2021-03-27 07:57:05 +00:00
|
|
|
tag=command.tag,
|
2022-05-10 03:15:55 +00:00
|
|
|
update=command.update,
|
2019-04-27 21:26:24 +00:00
|
|
|
update_all=command.update_all,
|
2019-04-25 22:59:41 +00:00
|
|
|
index_only=command.index_only,
|
2020-08-18 08:37:54 +00:00
|
|
|
overwrite=command.overwrite,
|
2020-11-13 19:52:21 +00:00
|
|
|
extractors=command.extract,
|
2021-03-20 16:38:00 +00:00
|
|
|
parser=command.parser,
|
2024-11-16 10:45:37 +00:00
|
|
|
persona=command.persona,
|
2019-04-03 04:27:37 +00:00
|
|
|
)
|
|
|
|
|
2019-04-19 01:09:54 +00:00
|
|
|
|
2019-04-03 04:27:37 +00:00
|
|
|
if __name__ == '__main__':
|
2019-04-27 21:26:24 +00:00
|
|
|
main(args=sys.argv[1:], stdin=sys.stdin)
|