mirror of
https://github.com/ArchiveBox/ArchiveBox
synced 2024-11-26 06:00:22 +00:00
exclude links that are in blacklist
This commit is contained in:
parent
417ee9e302
commit
4d10568477
2 changed files with 18 additions and 3 deletions
|
@ -270,6 +270,7 @@ except:
|
|||
|
||||
URL_BLACKLIST = re.compile(
|
||||
r'(.*\.youtube\.com)|'
|
||||
r'(.*\.facebook\.com)|'
|
||||
r'(.*\.amazon\.com)|'
|
||||
r'(.*\.reddit\.com)',
|
||||
re.IGNORECASE,
|
||||
|
|
|
@ -28,12 +28,18 @@ from util import (
|
|||
check_links_structure,
|
||||
)
|
||||
|
||||
from config import (
|
||||
URL_BLACKLIST,
|
||||
)
|
||||
|
||||
def validate_links(links):
|
||||
check_links_structure(links)
|
||||
links = archivable_links(links) # remove chrome://, about:, mailto: etc.
|
||||
links = uniquefied_links(links) # merge/dedupe duplicate timestamps & urls
|
||||
links = sorted_links(links) # deterministically sort the links based on timstamp, url
|
||||
links = exclude_links(links) # exclude links that are in blacklist
|
||||
|
||||
print(links)
|
||||
|
||||
if not links:
|
||||
print('[X] No links found :(')
|
||||
|
@ -43,6 +49,7 @@ def validate_links(links):
|
|||
link['title'] = unescape(link['title'].strip()) if link['title'] else None
|
||||
check_link_structure(link)
|
||||
|
||||
print("FINAL LIST", list(links))
|
||||
return list(links)
|
||||
|
||||
|
||||
|
@ -115,3 +122,10 @@ def lowest_uniq_timestamp(used_timestamps, timestamp):
|
|||
new_timestamp = '{}.{}'.format(timestamp, nonce)
|
||||
|
||||
return new_timestamp
|
||||
|
||||
def exclude_links(links):
|
||||
""" exclude links that are in blacklist"""
|
||||
|
||||
links = [link for link in links if not URL_BLACKLIST.match(link['url'])]
|
||||
|
||||
return links
|
Loading…
Reference in a new issue