ArchiveBox/archivebox/logging_util.py

549 lines
18 KiB
Python
Raw Normal View History

__package__ = 'archivebox'
2019-04-27 21:26:24 +00:00
2019-05-01 03:13:04 +00:00
import re
import os
2019-03-21 05:28:12 +00:00
import sys
2019-05-01 03:13:04 +00:00
import time
import argparse
2020-07-13 15:26:30 +00:00
from multiprocessing import Process
2019-03-21 05:28:12 +00:00
from datetime import datetime
from dataclasses import dataclass
from typing import Optional, List, Dict, Union, IO, TYPE_CHECKING
if TYPE_CHECKING:
from .index.schema import Link, ArchiveResult
2019-03-21 05:28:12 +00:00
from .util import enforce_types
from .config import (
2019-05-01 03:13:04 +00:00
ConfigDict,
PYTHON_ENCODING,
2019-05-01 03:13:04 +00:00
ANSI,
IS_TTY,
TERM_WIDTH,
OUTPUT_DIR,
2020-07-13 15:26:30 +00:00
SOURCES_DIR_NAME,
HTML_INDEX_FILENAME,
2020-06-30 09:55:34 +00:00
stderr,
2019-05-01 03:13:04 +00:00
)
@dataclass
class RuntimeStats:
"""mutable stats counter for logging archiving timing info to CLI output"""
2019-03-21 05:28:12 +00:00
skipped: int = 0
succeeded: int = 0
failed: int = 0
2019-03-21 05:28:12 +00:00
parse_start_ts: Optional[datetime] = None
parse_end_ts: Optional[datetime] = None
index_start_ts: Optional[datetime] = None
index_end_ts: Optional[datetime] = None
archiving_start_ts: Optional[datetime] = None
archiving_end_ts: Optional[datetime] = None
# globals are bad, mmkay
_LAST_RUN_STATS = RuntimeStats()
2019-03-21 05:28:12 +00:00
2019-05-01 03:13:04 +00:00
class SmartFormatter(argparse.HelpFormatter):
"""Patched formatter that prints newlines in argparse help strings"""
def _split_lines(self, text, width):
if '\n' in text:
return text.splitlines()
return argparse.HelpFormatter._split_lines(self, text, width)
def reject_stdin(caller: str, stdin: Optional[IO]=sys.stdin) -> None:
"""Tell the user they passed stdin to a command that doesn't accept it"""
if stdin and not stdin.isatty():
stdin_raw_text = stdin.read().strip()
if stdin_raw_text:
2019-05-01 06:28:12 +00:00
stderr(f'[X] The "{caller}" command does not accept stdin.', color='red')
stderr(f' Run archivebox "{caller} --help" to see usage and examples.')
stderr()
2019-05-01 03:13:04 +00:00
raise SystemExit(1)
2019-05-01 03:13:04 +00:00
def accept_stdin(stdin: Optional[IO]=sys.stdin) -> Optional[str]:
2019-05-01 06:28:12 +00:00
"""accept any standard input and return it as a string or None"""
if not stdin:
return None
elif stdin and not stdin.isatty():
stdin_str = stdin.read().strip()
return stdin_str or None
2019-05-01 03:13:04 +00:00
return None
class TimedProgress:
"""Show a progress bar and measure elapsed time until .end() is called"""
def __init__(self, seconds, prefix=''):
from .config import SHOW_PROGRESS
self.SHOW_PROGRESS = SHOW_PROGRESS
if self.SHOW_PROGRESS:
2019-05-01 03:13:04 +00:00
self.p = Process(target=progress_bar, args=(seconds, prefix))
self.p.start()
self.stats = {'start_ts': datetime.now(), 'end_ts': None}
def end(self):
"""immediately end progress, clear the progressbar line, and save end_ts"""
end_ts = datetime.now()
self.stats['end_ts'] = end_ts
if self.SHOW_PROGRESS:
# terminate if we havent already terminated
2020-07-13 15:26:30 +00:00
self.p.terminate()
self.p.join()
self.p.close()
2019-05-01 03:13:04 +00:00
# clear whole terminal line
try:
sys.stdout.write('\r{}{}\r'.format((' ' * TERM_WIDTH()), ANSI['reset']))
except (IOError, BrokenPipeError):
# ignore when the parent proc has stopped listening to our stdout
pass
2019-05-01 03:13:04 +00:00
@enforce_types
def progress_bar(seconds: int, prefix: str='') -> None:
"""show timer in the form of progress bar, with percentage and seconds remaining"""
chunk = '' if PYTHON_ENCODING == 'UTF-8' else '#'
last_width = TERM_WIDTH()
chunks = last_width - len(prefix) - 20 # number of progress chunks to show (aka max bar width)
2019-05-01 03:13:04 +00:00
try:
for s in range(seconds * chunks):
max_width = TERM_WIDTH()
if max_width < last_width:
# when the terminal size is shrunk, we have to write a newline
# otherwise the progress bar will keep wrapping incorrectly
sys.stdout.write('\r\n')
sys.stdout.flush()
chunks = max_width - len(prefix) - 20
2019-05-01 03:13:04 +00:00
progress = s / chunks / seconds * 100
bar_width = round(progress/(100/chunks))
last_width = max_width
2019-05-01 03:13:04 +00:00
# ████████████████████ 0.9% (1/60sec)
sys.stdout.write('\r{0}{1}{2}{3} {4}% ({5}/{6}sec)'.format(
prefix,
ANSI['green'],
(chunk * bar_width).ljust(chunks),
ANSI['reset'],
round(progress, 1),
round(s/chunks),
seconds,
))
sys.stdout.flush()
time.sleep(1 / chunks)
# ██████████████████████████████████ 100.0% (60/60sec)
sys.stdout.write('\r{0}{1}{2}{3} {4}% ({5}/{6}sec)\n'.format(
prefix,
ANSI['red'],
chunk * chunks,
ANSI['reset'],
100.0,
seconds,
seconds,
))
sys.stdout.flush()
2020-07-13 15:26:30 +00:00
except (KeyboardInterrupt, BrokenPipeError):
2019-05-01 03:13:04 +00:00
print()
pass
2019-03-21 05:28:12 +00:00
2020-07-13 15:26:30 +00:00
def log_cli_command(subcommand: str, subcommand_args: List[str], stdin: Optional[str], pwd: str):
from .config import VERSION, ANSI
2020-07-13 15:26:30 +00:00
cmd = ' '.join(('archivebox', subcommand, *subcommand_args))
stdin_hint = ' < /dev/stdin' if not stdin.isatty() else ''
stderr('{black}[i] [{now}] ArchiveBox v{VERSION}: {cmd}{stdin_hint}{reset}'.format(
2020-07-13 15:26:30 +00:00
now=datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
VERSION=VERSION,
cmd=cmd,
stdin_hint=stdin_hint,
**ANSI,
))
stderr('{black} > {pwd}{reset}'.format(pwd=pwd, **ANSI))
stderr()
2020-07-13 15:26:30 +00:00
2019-03-23 01:38:24 +00:00
### Parsing Stage
2019-03-21 05:28:12 +00:00
2020-07-13 15:26:30 +00:00
def log_importing_started(urls: Union[str, List[str]], depth: int, index_only: bool):
_LAST_RUN_STATS.parse_start_ts = datetime.now()
print('{green}[+] [{}] Adding {} links to index (crawl depth={}){}...{reset}'.format(
_LAST_RUN_STATS.parse_start_ts.strftime('%Y-%m-%d %H:%M:%S'),
len(urls) if isinstance(urls, list) else len(urls.split('\n')),
depth,
' (index only)' if index_only else '',
2019-03-21 05:28:12 +00:00
**ANSI,
))
2020-07-13 15:26:30 +00:00
def log_source_saved(source_file: str):
print(' > Saved verbatim input to {}/{}'.format(SOURCES_DIR_NAME, source_file.rsplit('/', 1)[-1]))
def log_parsing_finished(num_parsed: int, parser_name: str):
_LAST_RUN_STATS.parse_end_ts = datetime.now()
print(' > Parsed {} URLs from input ({})'.format(num_parsed, parser_name))
def log_deduping_finished(num_new_links: int):
print(' > Found {} new URLs not already in index'.format(num_new_links))
2020-06-30 09:55:34 +00:00
2019-03-21 05:28:12 +00:00
2020-07-13 15:26:30 +00:00
def log_crawl_started(new_links):
2020-07-13 16:21:37 +00:00
print('{lightred}[*] Starting crawl of {} sites 1 hop out from starting point{reset}'.format(len(new_links), **ANSI))
2019-03-23 01:38:24 +00:00
### Indexing Stage
def log_indexing_process_started(num_links: int):
2019-03-21 05:28:12 +00:00
start_ts = datetime.now()
_LAST_RUN_STATS.index_start_ts = start_ts
2019-04-11 11:00:34 +00:00
print()
2020-07-13 15:26:30 +00:00
print('{black}[*] [{}] Writing {} links to main index...{reset}'.format(
2019-03-21 05:28:12 +00:00
start_ts.strftime('%Y-%m-%d %H:%M:%S'),
num_links,
2019-03-21 05:28:12 +00:00
**ANSI,
))
2020-06-30 09:55:34 +00:00
def log_indexing_process_finished():
2019-03-21 05:28:12 +00:00
end_ts = datetime.now()
_LAST_RUN_STATS.index_end_ts = end_ts
2020-06-30 09:55:34 +00:00
def log_indexing_started(out_path: str):
if IS_TTY:
sys.stdout.write(f' > {out_path}')
2020-06-30 09:55:34 +00:00
def log_indexing_finished(out_path: str):
print(f'\r{out_path}')
2019-03-21 05:28:12 +00:00
2019-03-23 01:38:24 +00:00
### Archiving Stage
2019-04-27 21:26:24 +00:00
def log_archiving_started(num_links: int, resume: Optional[float]=None):
2019-03-21 05:28:12 +00:00
start_ts = datetime.now()
_LAST_RUN_STATS.archiving_start_ts = start_ts
print()
2019-03-21 05:28:12 +00:00
if resume:
print('{green}[▶] [{}] Resuming archive updating for {} pages starting from {}...{reset}'.format(
start_ts.strftime('%Y-%m-%d %H:%M:%S'),
num_links,
resume,
**ANSI,
))
else:
2020-07-13 15:26:30 +00:00
print('{green}[▶] [{}] Collecting content for {} Snapshots in archive...{reset}'.format(
2019-03-21 05:28:12 +00:00
start_ts.strftime('%Y-%m-%d %H:%M:%S'),
num_links,
**ANSI,
))
def log_archiving_paused(num_links: int, idx: int, timestamp: str):
2019-03-21 05:28:12 +00:00
end_ts = datetime.now()
_LAST_RUN_STATS.archiving_end_ts = end_ts
2019-03-21 05:28:12 +00:00
print()
print('\n{lightyellow}[X] [{now}] Downloading paused on link {timestamp} ({idx}/{total}){reset}'.format(
**ANSI,
now=end_ts.strftime('%Y-%m-%d %H:%M:%S'),
idx=idx+1,
timestamp=timestamp,
total=num_links,
))
2019-05-01 03:44:51 +00:00
print()
print(' {lightred}Hint:{reset} To view your archive index, open:'.format(**ANSI))
print(' {}/{}'.format(OUTPUT_DIR, HTML_INDEX_FILENAME))
print(' Continue archiving where you left off by running:')
print(' archivebox update --resume={}'.format(timestamp))
2019-03-21 05:28:12 +00:00
def log_archiving_finished(num_links: int):
2019-03-21 05:28:12 +00:00
end_ts = datetime.now()
_LAST_RUN_STATS.archiving_end_ts = end_ts
2019-03-31 01:29:16 +00:00
assert _LAST_RUN_STATS.archiving_start_ts is not None
seconds = end_ts.timestamp() - _LAST_RUN_STATS.archiving_start_ts.timestamp()
2019-03-21 05:28:12 +00:00
if seconds > 60:
duration = '{0:.2f} min'.format(seconds / 60)
2019-03-21 05:28:12 +00:00
else:
duration = '{0:.2f} sec'.format(seconds)
2019-03-21 05:28:12 +00:00
print()
2019-03-21 05:28:12 +00:00
print('{}[√] [{}] Update of {} pages complete ({}){}'.format(
ANSI['green'],
end_ts.strftime('%Y-%m-%d %H:%M:%S'),
num_links,
duration,
ANSI['reset'],
))
print(' - {} links skipped'.format(_LAST_RUN_STATS.skipped))
print(' - {} links updated'.format(_LAST_RUN_STATS.succeeded + _LAST_RUN_STATS.failed))
print(' - {} links had errors'.format(_LAST_RUN_STATS.failed))
2019-05-01 03:44:51 +00:00
print()
print(' {lightred}Hint:{reset} To view your archive index, open:'.format(**ANSI))
print(' {}/{}'.format(OUTPUT_DIR, HTML_INDEX_FILENAME))
2019-05-01 03:44:51 +00:00
print(' Or run the built-in webserver:')
print(' archivebox server')
2019-03-23 01:38:24 +00:00
def log_link_archiving_started(link: "Link", link_dir: str, is_new: bool):
2019-03-23 01:38:24 +00:00
# [*] [2019-03-22 13:46:45] "Log Structured Merge Trees - ben stopford"
# http://www.benstopford.com/2015/02/14/log-structured-merge-trees/
# > output/archive/1478739709
print('\n[{symbol_color}{symbol}{reset}] [{symbol_color}{now}{reset}] "{title}"'.format(
symbol_color=ANSI['green' if is_new else 'black'],
2019-03-27 07:49:39 +00:00
symbol='+' if is_new else '',
2019-03-23 01:38:24 +00:00
now=datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
title=link.title or link.base_url,
2019-03-23 01:38:24 +00:00
**ANSI,
))
print(' {blue}{url}{reset}'.format(url=link.url, **ANSI))
2019-03-23 03:00:43 +00:00
print(' {} {}'.format(
'>' if is_new else '',
2019-03-23 01:38:24 +00:00
pretty_path(link_dir),
))
def log_link_archiving_finished(link: "Link", link_dir: str, is_new: bool, stats: dict):
2019-03-23 03:00:43 +00:00
total = sum(stats.values())
if stats['failed'] > 0 :
_LAST_RUN_STATS.failed += 1
2019-03-23 03:00:43 +00:00
elif stats['skipped'] == total:
_LAST_RUN_STATS.skipped += 1
2019-03-23 03:00:43 +00:00
else:
_LAST_RUN_STATS.succeeded += 1
2019-03-23 01:38:24 +00:00
def log_archive_method_started(method: str):
2019-03-23 01:38:24 +00:00
print(' > {}'.format(method))
def log_archive_method_finished(result: "ArchiveResult"):
2019-03-23 01:38:24 +00:00
"""quote the argument with whitespace in a command so the user can
copy-paste the outputted string directly to run the cmd
"""
# Prettify CMD string and make it safe to copy-paste by quoting arguments
quoted_cmd = ' '.join(
'"{}"'.format(arg) if ' ' in arg else arg
for arg in result.cmd
2019-03-23 01:38:24 +00:00
)
if result.status == 'failed':
2019-03-23 01:38:24 +00:00
# Prettify error output hints string and limit to five lines
hints = getattr(result.output, 'hints', None) or ()
2019-03-23 01:38:24 +00:00
if hints:
hints = hints if isinstance(hints, (list, tuple)) else hints.split('\n')
hints = (
' {}{}{}'.format(ANSI['lightyellow'], line.strip(), ANSI['reset'])
for line in hints[:5] if line.strip()
)
# Collect and prefix output lines with indentation
output_lines = [
'{lightred}Failed:{reset}'.format(**ANSI),
' {reset}{} {red}{}{reset}'.format(
result.output.__class__.__name__.replace('ArchiveError', ''),
result.output,
**ANSI,
2019-03-23 01:38:24 +00:00
),
*hints,
'{}Run to see full output:{}'.format(ANSI['lightred'], ANSI['reset']),
2019-03-31 01:29:16 +00:00
*([' cd {};'.format(result.pwd)] if result.pwd else []),
2019-03-23 01:38:24 +00:00
' {}'.format(quoted_cmd),
]
print('\n'.join(
' {}'.format(line)
for line in output_lines
if line
))
print()
2019-04-11 12:11:32 +00:00
2019-04-27 21:26:24 +00:00
def log_list_started(filter_patterns: Optional[List[str]], filter_type: str):
2019-04-11 12:11:32 +00:00
print('{green}[*] Finding links in the archive index matching these {} patterns:{reset}'.format(
filter_type,
**ANSI,
))
2019-04-27 21:26:24 +00:00
print(' {}'.format(' '.join(filter_patterns or ())))
2019-04-11 12:11:32 +00:00
def log_list_finished(links):
from .index.csv import links_to_csv
2019-04-11 12:11:32 +00:00
print()
print('---------------------------------------------------------------------------------------------------')
2019-05-01 03:13:04 +00:00
print(links_to_csv(links, cols=['timestamp', 'is_archived', 'num_outputs', 'url'], header=True, ljust=16, separator=' | '))
2019-04-11 12:11:32 +00:00
print('---------------------------------------------------------------------------------------------------')
print()
def log_removal_started(links: List["Link"], yes: bool, delete: bool):
2019-04-11 12:11:32 +00:00
print('{lightyellow}[i] Found {} matching URLs to remove.{reset}'.format(len(links), **ANSI))
if delete:
file_counts = [link.num_outputs for link in links if os.path.exists(link.link_dir)]
print(
f' {len(links)} Links will be de-listed from the main index, and their archived content folders will be deleted from disk.\n'
f' ({len(file_counts)} data folders with {sum(file_counts)} archived files will be deleted!)'
)
else:
print(
' Matching links will be de-listed from the main index, but their archived content folders will remain in place on disk.\n'
' (Pass --delete if you also want to permanently delete the data folders)'
2019-04-11 12:11:32 +00:00
)
if not yes:
print()
print('{lightyellow}[?] Do you want to proceed with removing these {} links?{reset}'.format(len(links), **ANSI))
try:
assert input(' y/[n]: ').lower() == 'y'
except (KeyboardInterrupt, EOFError, AssertionError):
raise SystemExit(0)
def log_removal_finished(all_links: int, to_keep: int):
if all_links == 0:
print()
print('{red}[X] No matching links found.{reset}'.format(**ANSI))
else:
num_removed = all_links - to_keep
print()
print('{red}[√] Removed {} out of {} links from the archive index.{reset}'.format(
num_removed,
all_links,
**ANSI,
))
print(' Index now contains {} links.'.format(to_keep))
2019-05-01 03:13:04 +00:00
def log_shell_welcome_msg():
from .cli import list_subcommands
2019-05-01 03:13:04 +00:00
print('{green}# ArchiveBox Imports{reset}'.format(**ANSI))
2019-05-01 03:44:51 +00:00
print('{green}from archivebox.core.models import Snapshot, User{reset}'.format(**ANSI))
2019-05-01 03:13:04 +00:00
print('{green}from archivebox import *\n {}{reset}'.format("\n ".join(list_subcommands().keys()), **ANSI))
print()
print('[i] Welcome to the ArchiveBox Shell!')
print(' https://github.com/pirate/ArchiveBox/wiki/Usage#Shell-Usage')
print()
print(' {lightred}Hint:{reset} Example use:'.format(**ANSI))
2019-05-01 03:44:51 +00:00
print(' print(Snapshot.objects.filter(is_archived=True).count())')
print(' Snapshot.objects.get(url="https://example.com").as_json()')
2019-05-01 03:13:04 +00:00
print(' add("https://example.com/some/new/url")')
### Helpers
@enforce_types
def pretty_path(path: str) -> str:
"""convert paths like .../ArchiveBox/archivebox/../output/abc into output/abc"""
pwd = os.path.abspath('.')
# parent = os.path.abspath(os.path.join(pwd, os.path.pardir))
return path.replace(pwd + '/', './')
@enforce_types
def printable_filesize(num_bytes: Union[int, float]) -> str:
for count in ['Bytes','KB','MB','GB']:
if num_bytes > -1024.0 and num_bytes < 1024.0:
return '%3.1f %s' % (num_bytes, count)
num_bytes /= 1024.0
return '%3.1f %s' % (num_bytes, 'TB')
@enforce_types
def printable_folders(folders: Dict[str, Optional["Link"]],
2019-05-01 03:13:04 +00:00
json: bool=False,
csv: Optional[str]=None) -> str:
if json:
from .index.json import to_json
2019-05-01 03:13:04 +00:00
return to_json(folders.values(), indent=4, sort_keys=True)
elif csv:
from .index.csv import links_to_csv
2019-05-01 03:13:04 +00:00
return links_to_csv(folders.values(), cols=csv.split(','), header=True)
return '\n'.join(f'{folder} {link}' for folder, link in folders.items())
@enforce_types
def printable_config(config: ConfigDict, prefix: str='') -> str:
return f'\n{prefix}'.join(
f'{key}={val}'
for key, val in config.items()
if not (isinstance(val, dict) or callable(val))
)
@enforce_types
def printable_folder_status(name: str, folder: Dict) -> str:
if folder['enabled']:
if folder['is_valid']:
color, symbol, note = 'green', '', 'valid'
else:
color, symbol, note, num_files = 'red', 'X', 'invalid', '?'
else:
color, symbol, note, num_files = 'lightyellow', '-', 'disabled', '-'
if folder['path']:
if os.path.exists(folder['path']):
num_files = (
f'{len(os.listdir(folder["path"]))} files'
if os.path.isdir(folder['path']) else
printable_filesize(os.path.getsize(folder['path']))
)
else:
num_files = 'missing'
if ' ' in folder['path']:
folder['path'] = f'"{folder["path"]}"'
return ' '.join((
ANSI[color],
symbol,
ANSI['reset'],
name.ljust(22),
(folder["path"] or '').ljust(76),
num_files.ljust(14),
ANSI[color],
note,
ANSI['reset'],
))
@enforce_types
def printable_dependency_version(name: str, dependency: Dict) -> str:
version = None
2019-05-01 03:13:04 +00:00
if dependency['enabled']:
if dependency['is_valid']:
color, symbol, note, version = 'green', '', 'valid', ''
parsed_version_num = re.search(r'[\d\.]+', dependency['version'])
if parsed_version_num:
version = f'v{parsed_version_num[0]}'
if not version:
color, symbol, note, version = 'red', 'X', 'invalid', '?'
else:
color, symbol, note, version = 'lightyellow', '-', 'disabled', '-'
if ' ' in dependency["path"]:
dependency["path"] = f'"{dependency["path"]}"'
return ' '.join((
ANSI[color],
symbol,
ANSI['reset'],
name.ljust(22),
(dependency["path"] or '').ljust(76),
version.ljust(14),
ANSI[color],
note,
ANSI['reset'],
))