2019-04-27 21:26:24 +00:00
|
|
|
__package__ = 'archivebox.extractors'
|
|
|
|
|
|
|
|
import os
|
2022-09-12 20:34:02 +00:00
|
|
|
import sys
|
2020-09-15 19:05:48 +00:00
|
|
|
from pathlib import Path
|
2019-04-27 21:26:24 +00:00
|
|
|
|
2020-09-14 18:38:32 +00:00
|
|
|
from typing import Optional, List, Iterable, Union
|
2021-04-10 08:19:30 +00:00
|
|
|
from datetime import datetime, timezone
|
2020-09-08 14:36:06 +00:00
|
|
|
from django.db.models import QuerySet
|
2019-04-27 21:26:24 +00:00
|
|
|
|
2022-09-11 10:19:16 +00:00
|
|
|
from ..core.settings import ERROR_LOG
|
2019-04-27 21:26:24 +00:00
|
|
|
from ..index.schema import Link
|
2020-11-04 20:02:54 +00:00
|
|
|
from ..index.sql import write_link_to_sql_index
|
2019-04-27 21:26:24 +00:00
|
|
|
from ..index import (
|
|
|
|
load_link_details,
|
|
|
|
write_link_details,
|
|
|
|
)
|
|
|
|
from ..util import enforce_types
|
2020-07-22 16:02:13 +00:00
|
|
|
from ..logging_util import (
|
2020-07-13 15:26:30 +00:00
|
|
|
log_archiving_started,
|
|
|
|
log_archiving_paused,
|
|
|
|
log_archiving_finished,
|
2019-04-27 21:26:24 +00:00
|
|
|
log_link_archiving_started,
|
|
|
|
log_link_archiving_finished,
|
|
|
|
log_archive_method_started,
|
|
|
|
log_archive_method_finished,
|
|
|
|
)
|
2020-11-17 23:42:57 +00:00
|
|
|
from ..search import write_search_index
|
2019-04-27 21:26:24 +00:00
|
|
|
|
|
|
|
from .title import should_save_title, save_title
|
|
|
|
from .favicon import should_save_favicon, save_favicon
|
|
|
|
from .wget import should_save_wget, save_wget
|
2020-07-30 18:23:10 +00:00
|
|
|
from .singlefile import should_save_singlefile, save_singlefile
|
2020-08-07 13:05:17 +00:00
|
|
|
from .readability import should_save_readability, save_readability
|
2020-09-22 08:55:14 +00:00
|
|
|
from .mercury import should_save_mercury, save_mercury
|
2019-04-27 21:26:24 +00:00
|
|
|
from .pdf import should_save_pdf, save_pdf
|
|
|
|
from .screenshot import should_save_screenshot, save_screenshot
|
|
|
|
from .dom import should_save_dom, save_dom
|
|
|
|
from .git import should_save_git, save_git
|
|
|
|
from .media import should_save_media, save_media
|
|
|
|
from .archive_org import should_save_archive_dot_org, save_archive_dot_org
|
2020-09-11 14:06:52 +00:00
|
|
|
from .headers import should_save_headers, save_headers
|
2019-04-27 21:26:24 +00:00
|
|
|
|
2020-11-23 18:04:38 +00:00
|
|
|
|
2020-07-31 15:24:58 +00:00
|
|
|
def get_default_archive_methods():
|
|
|
|
return [
|
2020-08-18 08:37:54 +00:00
|
|
|
('favicon', should_save_favicon, save_favicon),
|
2021-02-16 20:52:18 +00:00
|
|
|
('headers', should_save_headers, save_headers),
|
2020-08-18 08:37:54 +00:00
|
|
|
('singlefile', should_save_singlefile, save_singlefile),
|
|
|
|
('pdf', should_save_pdf, save_pdf),
|
|
|
|
('screenshot', should_save_screenshot, save_screenshot),
|
|
|
|
('dom', should_save_dom, save_dom),
|
2021-02-16 20:52:18 +00:00
|
|
|
('wget', should_save_wget, save_wget),
|
2022-02-08 15:17:52 +00:00
|
|
|
('title', should_save_title, save_title), # keep title and readability below wget and singlefile, as it depends on them
|
|
|
|
('readability', should_save_readability, save_readability),
|
2020-09-22 08:55:14 +00:00
|
|
|
('mercury', should_save_mercury, save_mercury),
|
2020-08-18 08:37:54 +00:00
|
|
|
('git', should_save_git, save_git),
|
|
|
|
('media', should_save_media, save_media),
|
|
|
|
('archive_org', should_save_archive_dot_org, save_archive_dot_org),
|
|
|
|
]
|
2020-07-31 15:24:58 +00:00
|
|
|
|
2020-11-23 18:04:38 +00:00
|
|
|
ARCHIVE_METHODS_INDEXING_PRECEDENCE = [('readability', 1), ('singlefile', 2), ('dom', 3), ('wget', 4)]
|
|
|
|
|
2020-07-31 15:24:58 +00:00
|
|
|
@enforce_types
|
|
|
|
def ignore_methods(to_ignore: List[str]):
|
|
|
|
ARCHIVE_METHODS = get_default_archive_methods()
|
|
|
|
methods = filter(lambda x: x[0] not in to_ignore, ARCHIVE_METHODS)
|
2020-09-24 16:24:34 +00:00
|
|
|
methods = map(lambda x: x[0], methods)
|
2020-07-31 15:24:58 +00:00
|
|
|
return list(methods)
|
2019-04-27 21:26:24 +00:00
|
|
|
|
|
|
|
@enforce_types
|
2020-12-08 23:42:01 +00:00
|
|
|
def archive_link(link: Link, overwrite: bool=False, methods: Optional[Iterable[str]]=None, out_dir: Optional[Path]=None) -> Link:
|
2019-04-27 21:26:24 +00:00
|
|
|
"""download the DOM, PDF, and a screenshot into a folder named after the link's timestamp"""
|
|
|
|
|
2020-11-04 16:22:55 +00:00
|
|
|
# TODO: Remove when the input is changed to be a snapshot. Suboptimal approach.
|
2020-12-08 23:42:01 +00:00
|
|
|
from core.models import Snapshot, ArchiveResult
|
|
|
|
try:
|
|
|
|
snapshot = Snapshot.objects.get(url=link.url) # TODO: This will be unnecessary once everything is a snapshot
|
|
|
|
except Snapshot.DoesNotExist:
|
|
|
|
snapshot = write_link_to_sql_index(link)
|
2020-11-04 16:22:55 +00:00
|
|
|
|
2020-07-31 15:24:58 +00:00
|
|
|
ARCHIVE_METHODS = get_default_archive_methods()
|
|
|
|
|
2020-08-18 12:49:26 +00:00
|
|
|
if methods:
|
2020-07-28 09:58:38 +00:00
|
|
|
ARCHIVE_METHODS = [
|
|
|
|
method for method in ARCHIVE_METHODS
|
2020-08-18 12:49:26 +00:00
|
|
|
if method[0] in methods
|
2020-07-28 09:58:38 +00:00
|
|
|
]
|
2020-06-30 06:04:16 +00:00
|
|
|
|
2020-09-15 19:05:48 +00:00
|
|
|
out_dir = out_dir or Path(link.link_dir)
|
2019-04-27 21:26:24 +00:00
|
|
|
try:
|
2020-09-30 19:43:35 +00:00
|
|
|
is_new = not Path(out_dir).exists()
|
2019-04-27 21:26:24 +00:00
|
|
|
if is_new:
|
|
|
|
os.makedirs(out_dir)
|
|
|
|
|
|
|
|
link = load_link_details(link, out_dir=out_dir)
|
2020-12-08 23:42:01 +00:00
|
|
|
write_link_details(link, out_dir=out_dir, skip_sql_index=False)
|
2019-04-27 21:26:24 +00:00
|
|
|
log_link_archiving_started(link, out_dir, is_new)
|
2021-04-10 08:19:30 +00:00
|
|
|
link = link.overwrite(updated=datetime.now(timezone.utc))
|
2019-04-27 21:26:24 +00:00
|
|
|
stats = {'skipped': 0, 'succeeded': 0, 'failed': 0}
|
2021-04-10 11:52:01 +00:00
|
|
|
start_ts = datetime.now(timezone.utc)
|
2019-04-27 21:26:24 +00:00
|
|
|
|
|
|
|
for method_name, should_run, method_function in ARCHIVE_METHODS:
|
|
|
|
try:
|
|
|
|
if method_name not in link.history:
|
|
|
|
link.history[method_name] = []
|
2020-06-30 06:04:16 +00:00
|
|
|
|
2021-01-21 21:45:11 +00:00
|
|
|
if should_run(link, out_dir, overwrite):
|
2019-04-27 21:26:24 +00:00
|
|
|
log_archive_method_started(method_name)
|
|
|
|
|
|
|
|
result = method_function(link=link, out_dir=out_dir)
|
|
|
|
|
|
|
|
link.history[method_name].append(result)
|
|
|
|
|
|
|
|
stats[result.status] += 1
|
|
|
|
log_archive_method_finished(result)
|
2020-12-08 23:42:01 +00:00
|
|
|
write_search_index(link=link, texts=result.index_texts)
|
|
|
|
ArchiveResult.objects.create(snapshot=snapshot, extractor=method_name, cmd=result.cmd, cmd_version=result.cmd_version,
|
2020-11-04 16:22:55 +00:00
|
|
|
output=result.output, pwd=result.pwd, start_ts=result.start_ts, end_ts=result.end_ts, status=result.status)
|
2021-02-17 18:34:46 +00:00
|
|
|
|
|
|
|
|
|
|
|
# bump the updated time on the main Snapshot here, this is critical
|
|
|
|
# to be able to cache summaries of the ArchiveResults for a given
|
|
|
|
# snapshot without having to load all the results from the DB each time.
|
|
|
|
# (we use {Snapshot.id}-{Snapshot.updated} as the cache key and assume
|
|
|
|
# ArchiveResults are unchanged as long as the updated timestamp is unchanged)
|
|
|
|
snapshot.save()
|
2019-04-27 21:26:24 +00:00
|
|
|
else:
|
2020-08-18 12:13:35 +00:00
|
|
|
# print('{black} X {}{reset}'.format(method_name, **ANSI))
|
2019-04-27 21:26:24 +00:00
|
|
|
stats['skipped'] += 1
|
2022-09-12 21:29:43 +00:00
|
|
|
except Exception:
|
2022-09-11 10:19:16 +00:00
|
|
|
# Disabled until https://github.com/ArchiveBox/ArchiveBox/issues/984
|
|
|
|
# and https://github.com/ArchiveBox/ArchiveBox/issues/1014
|
|
|
|
# are fixed.
|
|
|
|
"""
|
2019-04-27 21:26:24 +00:00
|
|
|
raise Exception('Exception in archive_methods.save_{}(Link(url={}))'.format(
|
|
|
|
method_name,
|
|
|
|
link.url,
|
|
|
|
)) from e
|
2022-09-11 10:19:16 +00:00
|
|
|
"""
|
2022-09-12 21:29:43 +00:00
|
|
|
# Instead, use the kludgy workaround from
|
2022-09-11 10:19:16 +00:00
|
|
|
# https://github.com/ArchiveBox/ArchiveBox/issues/984#issuecomment-1150541627
|
|
|
|
with open(ERROR_LOG, "a", encoding='utf-8') as f:
|
|
|
|
command = ' '.join(sys.argv)
|
|
|
|
ts = datetime.now(timezone.utc).strftime('%Y-%m-%d__%H:%M:%S')
|
2022-09-12 21:32:47 +00:00
|
|
|
f.write(("\n" + 'Exception in archive_methods.save_{}(Link(url={})) command={}; ts={}'.format(
|
2022-09-11 10:19:16 +00:00
|
|
|
method_name,
|
|
|
|
link.url,
|
2022-09-12 21:32:47 +00:00
|
|
|
command,
|
|
|
|
ts
|
2022-09-11 10:19:16 +00:00
|
|
|
) + "\n"))
|
2022-09-12 21:31:33 +00:00
|
|
|
#f.write(f"\n> {command}; ts={ts} version={config['VERSION']} docker={config['IN_DOCKER']} is_tty={config['IS_TTY']}\n")
|
2019-04-27 21:26:24 +00:00
|
|
|
|
|
|
|
# print(' ', stats)
|
|
|
|
|
2020-07-28 09:55:09 +00:00
|
|
|
try:
|
|
|
|
latest_title = link.history['title'][-1].output.strip()
|
|
|
|
if latest_title and len(latest_title) >= len(link.title or ''):
|
|
|
|
link = link.overwrite(title=latest_title)
|
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
|
2020-12-08 23:42:01 +00:00
|
|
|
write_link_details(link, out_dir=out_dir, skip_sql_index=False)
|
2020-06-30 06:04:16 +00:00
|
|
|
|
2021-04-10 11:52:01 +00:00
|
|
|
log_link_archiving_finished(link, link.link_dir, is_new, stats, start_ts)
|
2019-04-27 21:26:24 +00:00
|
|
|
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
try:
|
|
|
|
write_link_details(link, out_dir=link.link_dir)
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
raise
|
|
|
|
|
|
|
|
except Exception as err:
|
|
|
|
print(' ! Failed to archive link: {}: {}'.format(err.__class__.__name__, err))
|
|
|
|
raise
|
|
|
|
|
|
|
|
return link
|
2020-07-13 15:26:30 +00:00
|
|
|
|
|
|
|
@enforce_types
|
2020-09-15 19:05:48 +00:00
|
|
|
def archive_links(all_links: Union[Iterable[Link], QuerySet], overwrite: bool=False, methods: Optional[Iterable[str]]=None, out_dir: Optional[Path]=None) -> List[Link]:
|
2020-08-21 14:57:29 +00:00
|
|
|
|
2020-09-08 14:36:06 +00:00
|
|
|
if type(all_links) is QuerySet:
|
2020-08-21 14:57:29 +00:00
|
|
|
num_links: int = all_links.count()
|
|
|
|
get_link = lambda x: x.as_link()
|
2020-09-08 15:39:01 +00:00
|
|
|
all_links = all_links.iterator()
|
2020-09-08 14:36:06 +00:00
|
|
|
else:
|
|
|
|
num_links: int = len(all_links)
|
|
|
|
get_link = lambda x: x
|
2020-08-21 14:57:29 +00:00
|
|
|
|
|
|
|
if num_links == 0:
|
2020-07-13 15:26:30 +00:00
|
|
|
return []
|
|
|
|
|
2020-08-21 14:57:29 +00:00
|
|
|
log_archiving_started(num_links)
|
2020-07-13 15:26:30 +00:00
|
|
|
idx: int = 0
|
|
|
|
try:
|
2020-09-08 15:39:01 +00:00
|
|
|
for link in all_links:
|
2020-08-21 14:57:29 +00:00
|
|
|
idx += 1
|
|
|
|
to_archive = get_link(link)
|
2020-09-15 19:05:48 +00:00
|
|
|
archive_link(to_archive, overwrite=overwrite, methods=methods, out_dir=Path(link.link_dir))
|
2020-07-13 15:26:30 +00:00
|
|
|
except KeyboardInterrupt:
|
2020-08-21 14:57:29 +00:00
|
|
|
log_archiving_paused(num_links, idx, link.timestamp)
|
2020-07-13 15:26:30 +00:00
|
|
|
raise SystemExit(0)
|
2022-02-08 15:17:52 +00:00
|
|
|
except BaseException:
|
2020-07-13 15:26:30 +00:00
|
|
|
print()
|
|
|
|
raise
|
|
|
|
|
2020-08-21 14:57:29 +00:00
|
|
|
log_archiving_finished(num_links)
|
|
|
|
return all_links
|