2017-10-19 00:08:33 +00:00
|
|
|
"""
|
2018-12-20 13:13:50 +00:00
|
|
|
In ArchiveBox, a Link represents a single entry that we track in the
|
2017-10-19 00:08:33 +00:00
|
|
|
json index. All links pass through all archiver functions and the latest,
|
2017-10-23 09:58:41 +00:00
|
|
|
most up-to-date canonical output for each is stored in "latest".
|
|
|
|
|
2017-10-19 00:08:33 +00:00
|
|
|
Link {
|
2019-03-22 19:09:39 +00:00
|
|
|
timestamp: str, (how we uniquely id links)
|
|
|
|
url: str,
|
|
|
|
title: str,
|
|
|
|
tags: str,
|
|
|
|
sources: [str],
|
2017-10-19 00:08:33 +00:00
|
|
|
history: {
|
|
|
|
pdf: [
|
2019-03-26 23:21:34 +00:00
|
|
|
{start_ts, end_ts, cmd, pwd, cmd_version, status, output},
|
2017-10-19 00:08:33 +00:00
|
|
|
...
|
|
|
|
],
|
2019-03-22 19:09:39 +00:00
|
|
|
...
|
2017-10-19 00:08:33 +00:00
|
|
|
},
|
|
|
|
}
|
|
|
|
"""
|
2017-10-18 22:38:17 +00:00
|
|
|
|
2019-03-26 23:21:34 +00:00
|
|
|
from typing import Iterable
|
2018-09-14 22:08:59 +00:00
|
|
|
from collections import OrderedDict
|
2018-04-17 07:22:59 +00:00
|
|
|
|
2019-03-26 09:33:34 +00:00
|
|
|
from schema import Link
|
2017-10-23 09:58:41 +00:00
|
|
|
from util import (
|
2019-03-26 23:21:34 +00:00
|
|
|
scheme,
|
|
|
|
fuzzy_url,
|
2017-10-30 07:50:37 +00:00
|
|
|
merge_links,
|
2019-03-26 09:33:34 +00:00
|
|
|
htmldecode,
|
2019-03-26 23:21:34 +00:00
|
|
|
hashurl,
|
2017-10-23 09:58:41 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2019-03-26 23:21:34 +00:00
|
|
|
def validate_links(links: Iterable[Link]) -> Iterable[Link]:
|
2017-10-23 09:58:41 +00:00
|
|
|
links = archivable_links(links) # remove chrome://, about:, mailto: etc.
|
2018-04-17 13:49:32 +00:00
|
|
|
links = sorted_links(links) # deterministically sort the links based on timstamp, url
|
2019-03-26 23:21:34 +00:00
|
|
|
links = uniquefied_links(links) # merge/dedupe duplicate timestamps & urls
|
2019-02-21 22:45:28 +00:00
|
|
|
|
2017-10-18 22:38:17 +00:00
|
|
|
if not links:
|
|
|
|
print('[X] No links found :(')
|
|
|
|
raise SystemExit(1)
|
|
|
|
|
2019-03-26 23:21:34 +00:00
|
|
|
return links
|
2019-02-21 22:45:28 +00:00
|
|
|
|
2019-03-26 09:33:34 +00:00
|
|
|
def archivable_links(links: Iterable[Link]) -> Iterable[Link]:
|
2017-10-23 09:58:41 +00:00
|
|
|
"""remove chrome://, about:// or other schemed links that cant be archived"""
|
|
|
|
return (
|
|
|
|
link
|
|
|
|
for link in links
|
2019-03-26 23:21:34 +00:00
|
|
|
if scheme(link.url) in ('http', 'https', 'ftp')
|
2017-10-23 09:58:41 +00:00
|
|
|
)
|
2017-10-18 22:38:17 +00:00
|
|
|
|
2019-02-21 22:45:28 +00:00
|
|
|
|
2019-03-26 09:33:34 +00:00
|
|
|
def uniquefied_links(sorted_links: Iterable[Link]) -> Iterable[Link]:
|
2017-10-18 22:38:17 +00:00
|
|
|
"""
|
|
|
|
ensures that all non-duplicate links have monotonically increasing timestamps
|
|
|
|
"""
|
|
|
|
|
2019-03-26 09:33:34 +00:00
|
|
|
unique_urls: OrderedDict[str, Link] = OrderedDict()
|
2017-10-18 22:38:17 +00:00
|
|
|
|
|
|
|
for link in sorted_links:
|
2019-03-26 23:21:34 +00:00
|
|
|
fuzzy = fuzzy_url(link.url)
|
|
|
|
if fuzzy in unique_urls:
|
2017-10-18 22:38:17 +00:00
|
|
|
# merge with any other links that share the same url
|
2019-03-26 23:21:34 +00:00
|
|
|
link = merge_links(unique_urls[fuzzy], link)
|
|
|
|
unique_urls[fuzzy] = link
|
2017-10-19 00:33:31 +00:00
|
|
|
|
2019-03-26 09:33:34 +00:00
|
|
|
unique_timestamps: OrderedDict[str, Link] = OrderedDict()
|
2017-10-19 00:33:31 +00:00
|
|
|
for link in unique_urls.values():
|
2019-03-26 23:21:34 +00:00
|
|
|
new_link = Link(**{
|
|
|
|
**link._asdict(),
|
|
|
|
'timestamp': lowest_uniq_timestamp(unique_timestamps, link.timestamp),
|
|
|
|
})
|
|
|
|
unique_timestamps[new_link.timestamp] = new_link
|
2017-10-19 00:33:31 +00:00
|
|
|
|
|
|
|
return unique_timestamps.values()
|
2017-10-18 22:38:17 +00:00
|
|
|
|
2019-02-21 22:45:28 +00:00
|
|
|
|
2019-03-26 09:33:34 +00:00
|
|
|
def sorted_links(links: Iterable[Link]) -> Iterable[Link]:
|
2019-03-26 23:21:34 +00:00
|
|
|
sort_func = lambda link: (link.timestamp.split('.', 1)[0], link.url)
|
2017-10-23 09:58:41 +00:00
|
|
|
return sorted(links, key=sort_func, reverse=True)
|
|
|
|
|
2019-02-21 22:45:28 +00:00
|
|
|
|
2019-03-26 23:21:34 +00:00
|
|
|
def links_after_timestamp(links: Iterable[Link], resume: float=None) -> Iterable[Link]:
|
|
|
|
if not resume:
|
2017-10-18 22:38:17 +00:00
|
|
|
yield from links
|
|
|
|
return
|
|
|
|
|
|
|
|
for link in links:
|
|
|
|
try:
|
2019-03-26 23:21:34 +00:00
|
|
|
if float(link.timestamp) <= resume:
|
2017-10-18 22:38:17 +00:00
|
|
|
yield link
|
|
|
|
except (ValueError, TypeError):
|
|
|
|
print('Resume value and all timestamp values must be valid numbers.')
|
|
|
|
|
2019-02-21 22:45:28 +00:00
|
|
|
|
2019-03-26 09:33:34 +00:00
|
|
|
def lowest_uniq_timestamp(used_timestamps: OrderedDict, timestamp: str) -> str:
|
2017-10-18 22:38:17 +00:00
|
|
|
"""resolve duplicate timestamps by appending a decimal 1234, 1234 -> 1234.1, 1234.2"""
|
|
|
|
|
2017-10-19 00:33:31 +00:00
|
|
|
timestamp = timestamp.split('.')[0]
|
|
|
|
nonce = 0
|
|
|
|
|
|
|
|
# first try 152323423 before 152323423.0
|
2017-10-18 22:38:17 +00:00
|
|
|
if timestamp not in used_timestamps:
|
|
|
|
return timestamp
|
|
|
|
|
|
|
|
new_timestamp = '{}.{}'.format(timestamp, nonce)
|
|
|
|
while new_timestamp in used_timestamps:
|
|
|
|
nonce += 1
|
|
|
|
new_timestamp = '{}.{}'.format(timestamp, nonce)
|
|
|
|
|
|
|
|
return new_timestamp
|