2020-09-11 14:06:52 +00:00
|
|
|
__package__ = 'archivebox.extractors'
|
|
|
|
|
|
|
|
from pathlib import Path
|
|
|
|
|
|
|
|
from typing import Optional
|
|
|
|
|
2020-09-14 16:22:35 +00:00
|
|
|
from ..index.schema import Link, ArchiveResult, ArchiveOutput
|
|
|
|
from ..system import atomic_write
|
2020-09-11 14:06:52 +00:00
|
|
|
from ..util import (
|
|
|
|
enforce_types,
|
|
|
|
get_headers,
|
2024-02-21 21:13:06 +00:00
|
|
|
dedupe,
|
2020-09-11 14:06:52 +00:00
|
|
|
)
|
|
|
|
from ..config import (
|
|
|
|
TIMEOUT,
|
|
|
|
CURL_BINARY,
|
2020-10-15 13:47:32 +00:00
|
|
|
CURL_ARGS,
|
2024-02-21 21:13:06 +00:00
|
|
|
CURL_EXTRA_ARGS,
|
2020-09-11 14:06:52 +00:00
|
|
|
CURL_USER_AGENT,
|
|
|
|
CURL_VERSION,
|
|
|
|
CHECK_SSL_VALIDITY,
|
2020-09-24 13:37:27 +00:00
|
|
|
SAVE_HEADERS
|
2020-09-11 14:06:52 +00:00
|
|
|
)
|
|
|
|
from ..logging_util import TimedProgress
|
|
|
|
|
2024-05-12 05:28:59 +00:00
|
|
|
def get_output_path():
|
|
|
|
return 'headers.json'
|
|
|
|
|
|
|
|
|
2020-09-11 14:06:52 +00:00
|
|
|
@enforce_types
|
2021-01-21 21:45:11 +00:00
|
|
|
def should_save_headers(link: Link, out_dir: Optional[str]=None, overwrite: Optional[bool]=False) -> bool:
|
|
|
|
out_dir = out_dir or Path(link.link_dir)
|
2024-05-12 05:28:59 +00:00
|
|
|
if not overwrite and (out_dir / get_output_path()).exists():
|
2021-01-21 21:45:11 +00:00
|
|
|
return False
|
2020-09-11 14:06:52 +00:00
|
|
|
|
2021-01-21 21:45:11 +00:00
|
|
|
return SAVE_HEADERS
|
2020-09-11 14:06:52 +00:00
|
|
|
|
|
|
|
|
|
|
|
@enforce_types
|
|
|
|
def save_headers(link: Link, out_dir: Optional[str]=None, timeout: int=TIMEOUT) -> ArchiveResult:
|
|
|
|
"""Download site headers"""
|
|
|
|
|
|
|
|
out_dir = Path(out_dir or link.link_dir)
|
|
|
|
output_folder = out_dir.absolute()
|
2024-05-12 05:28:59 +00:00
|
|
|
output: ArchiveOutput = get_output_path()
|
2020-09-11 14:06:52 +00:00
|
|
|
|
|
|
|
status = 'succeeded'
|
|
|
|
timer = TimedProgress(timeout, prefix=' ')
|
2024-03-01 20:50:32 +00:00
|
|
|
# later options take precedence
|
2024-02-21 21:13:06 +00:00
|
|
|
options = [
|
2024-03-01 20:50:32 +00:00
|
|
|
*CURL_ARGS,
|
|
|
|
*CURL_EXTRA_ARGS,
|
2020-10-15 13:49:54 +00:00
|
|
|
'--head',
|
2020-09-11 19:28:04 +00:00
|
|
|
'--max-time', str(timeout),
|
2020-09-11 14:06:52 +00:00
|
|
|
*(['--user-agent', '{}'.format(CURL_USER_AGENT)] if CURL_USER_AGENT else []),
|
|
|
|
*([] if CHECK_SSL_VALIDITY else ['--insecure']),
|
2024-02-21 21:13:06 +00:00
|
|
|
]
|
|
|
|
cmd = [
|
|
|
|
CURL_BINARY,
|
2024-03-06 03:13:45 +00:00
|
|
|
*dedupe(options),
|
2020-09-11 14:06:52 +00:00
|
|
|
link.url,
|
|
|
|
]
|
|
|
|
try:
|
2020-10-31 11:55:27 +00:00
|
|
|
json_headers = get_headers(link.url, timeout=timeout)
|
2020-09-11 14:06:52 +00:00
|
|
|
output_folder.mkdir(exist_ok=True)
|
2024-05-12 05:28:59 +00:00
|
|
|
atomic_write(str(output_folder / get_output_path()), json_headers)
|
2020-09-11 14:06:52 +00:00
|
|
|
except (Exception, OSError) as err:
|
|
|
|
status = 'failed'
|
|
|
|
output = err
|
|
|
|
finally:
|
|
|
|
timer.end()
|
|
|
|
|
|
|
|
return ArchiveResult(
|
|
|
|
cmd=cmd,
|
|
|
|
pwd=str(out_dir),
|
|
|
|
cmd_version=CURL_VERSION,
|
|
|
|
output=output,
|
|
|
|
status=status,
|
|
|
|
**timer.stats,
|
|
|
|
)
|