2019-04-27 21:26:24 +00:00
|
|
|
__package__ = 'archivebox.extractors'
|
|
|
|
|
2020-09-15 19:05:48 +00:00
|
|
|
from pathlib import Path
|
2019-04-27 21:26:24 +00:00
|
|
|
|
|
|
|
from typing import Optional
|
|
|
|
|
|
|
|
from ..index.schema import Link, ArchiveResult, ArchiveOutput
|
2020-06-26 02:14:40 +00:00
|
|
|
from ..system import chmod_file, run
|
2024-02-21 21:13:06 +00:00
|
|
|
from ..util import (
|
|
|
|
enforce_types,
|
|
|
|
domain,
|
|
|
|
dedupe,
|
|
|
|
)
|
2019-04-27 21:26:24 +00:00
|
|
|
from ..config import (
|
|
|
|
TIMEOUT,
|
|
|
|
SAVE_FAVICON,
|
2023-05-06 01:42:36 +00:00
|
|
|
FAVICON_PROVIDER,
|
2019-04-27 21:26:24 +00:00
|
|
|
CURL_BINARY,
|
2020-10-15 13:49:54 +00:00
|
|
|
CURL_ARGS,
|
2024-02-21 21:13:06 +00:00
|
|
|
CURL_EXTRA_ARGS,
|
2019-04-27 21:26:24 +00:00
|
|
|
CURL_VERSION,
|
|
|
|
CHECK_SSL_VALIDITY,
|
2020-06-26 01:30:29 +00:00
|
|
|
CURL_USER_AGENT,
|
2019-04-27 21:26:24 +00:00
|
|
|
)
|
2020-07-22 16:02:13 +00:00
|
|
|
from ..logging_util import TimedProgress
|
2019-04-27 21:26:24 +00:00
|
|
|
|
|
|
|
|
|
|
|
@enforce_types
|
2021-01-21 21:45:11 +00:00
|
|
|
def should_save_favicon(link: Link, out_dir: Optional[str]=None, overwrite: Optional[bool]=False) -> bool:
|
|
|
|
out_dir = out_dir or Path(link.link_dir)
|
|
|
|
if not overwrite and (out_dir / 'favicon.ico').exists():
|
2019-04-27 21:26:24 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
return SAVE_FAVICON
|
2021-01-21 21:45:11 +00:00
|
|
|
|
2019-04-27 21:26:24 +00:00
|
|
|
@enforce_types
|
2020-09-15 19:05:48 +00:00
|
|
|
def save_favicon(link: Link, out_dir: Optional[Path]=None, timeout: int=TIMEOUT) -> ArchiveResult:
|
2019-04-27 21:26:24 +00:00
|
|
|
"""download site favicon from google's favicon api"""
|
|
|
|
|
|
|
|
out_dir = out_dir or link.link_dir
|
|
|
|
output: ArchiveOutput = 'favicon.ico'
|
2024-03-01 20:50:32 +00:00
|
|
|
# later options take precedence
|
2024-02-21 21:13:06 +00:00
|
|
|
options = [
|
2024-03-01 20:50:32 +00:00
|
|
|
*CURL_ARGS,
|
|
|
|
*CURL_EXTRA_ARGS,
|
2019-04-27 21:26:24 +00:00
|
|
|
'--max-time', str(timeout),
|
|
|
|
'--output', str(output),
|
2020-06-26 02:14:40 +00:00
|
|
|
*(['--user-agent', '{}'.format(CURL_USER_AGENT)] if CURL_USER_AGENT else []),
|
2019-04-27 21:26:24 +00:00
|
|
|
*([] if CHECK_SSL_VALIDITY else ['--insecure']),
|
2024-02-21 21:13:06 +00:00
|
|
|
]
|
|
|
|
cmd = [
|
|
|
|
CURL_BINARY,
|
|
|
|
*dedupe(*options),
|
2023-05-06 01:42:36 +00:00
|
|
|
FAVICON_PROVIDER.format(domain(link.url)),
|
2019-04-27 21:26:24 +00:00
|
|
|
]
|
2021-01-30 11:07:35 +00:00
|
|
|
status = 'failed'
|
2019-04-27 21:26:24 +00:00
|
|
|
timer = TimedProgress(timeout, prefix=' ')
|
|
|
|
try:
|
2020-09-15 19:05:48 +00:00
|
|
|
run(cmd, cwd=str(out_dir), timeout=timeout)
|
|
|
|
chmod_file(output, cwd=str(out_dir))
|
2020-06-26 01:30:29 +00:00
|
|
|
status = 'succeeded'
|
2019-04-27 21:26:24 +00:00
|
|
|
except Exception as err:
|
|
|
|
output = err
|
|
|
|
finally:
|
|
|
|
timer.end()
|
|
|
|
|
|
|
|
return ArchiveResult(
|
|
|
|
cmd=cmd,
|
2020-09-15 19:05:48 +00:00
|
|
|
pwd=str(out_dir),
|
2019-04-27 21:26:24 +00:00
|
|
|
cmd_version=CURL_VERSION,
|
|
|
|
output=output,
|
|
|
|
status=status,
|
|
|
|
**timer.stats,
|
|
|
|
)
|