2024-10-01 00:25:15 +00:00
|
|
|
__package__ = 'archivebox.search'
|
|
|
|
|
2020-11-17 23:42:57 +00:00
|
|
|
from pathlib import Path
|
2024-10-01 07:19:19 +00:00
|
|
|
from typing import List, Union
|
2020-11-17 23:42:57 +00:00
|
|
|
|
2020-11-19 23:19:33 +00:00
|
|
|
from django.db.models import QuerySet
|
2024-09-24 09:13:01 +00:00
|
|
|
from django.conf import settings
|
2020-11-17 23:42:57 +00:00
|
|
|
|
2024-10-28 11:07:35 +00:00
|
|
|
import abx
|
|
|
|
import archivebox
|
2020-11-18 22:54:13 +00:00
|
|
|
from archivebox.index.schema import Link
|
2024-10-01 00:25:15 +00:00
|
|
|
from archivebox.misc.util import enforce_types
|
2024-09-30 22:59:05 +00:00
|
|
|
from archivebox.misc.logging import stderr
|
2024-10-08 06:45:11 +00:00
|
|
|
from archivebox.config.common import SEARCH_BACKEND_CONFIG
|
2020-11-17 23:42:57 +00:00
|
|
|
|
2024-10-01 00:25:15 +00:00
|
|
|
|
|
|
|
def log_index_started(url):
|
2024-10-01 07:19:19 +00:00
|
|
|
print('[green][*] Indexing url: {} in the search index[/]'.format(url))
|
2024-10-01 00:25:15 +00:00
|
|
|
print( )
|
|
|
|
|
|
|
|
def get_file_result_content(res, extra_path, use_pwd=False):
|
|
|
|
if use_pwd:
|
|
|
|
fpath = f'{res.pwd}/{res.output}'
|
|
|
|
else:
|
|
|
|
fpath = f'{res.output}'
|
|
|
|
|
|
|
|
if extra_path:
|
|
|
|
fpath = f'{fpath}/{extra_path}'
|
|
|
|
|
|
|
|
with open(fpath, 'r', encoding='utf-8') as file:
|
|
|
|
data = file.read()
|
|
|
|
if data:
|
|
|
|
return [data]
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
|
|
# This should be abstracted by a plugin interface for extractors
|
|
|
|
@enforce_types
|
|
|
|
def get_indexable_content(results: QuerySet):
|
|
|
|
if not results:
|
|
|
|
return []
|
|
|
|
# Only use the first method available
|
|
|
|
res, method = results.first(), results.first().extractor
|
|
|
|
if method not in ('readability', 'singlefile', 'dom', 'wget'):
|
|
|
|
return []
|
|
|
|
# This should come from a plugin interface
|
|
|
|
|
|
|
|
# TODO: banish this duplication and get these from the extractor file
|
|
|
|
if method == 'readability':
|
|
|
|
return get_file_result_content(res, 'content.txt', use_pwd=True)
|
|
|
|
elif method == 'singlefile':
|
|
|
|
return get_file_result_content(res, '', use_pwd=True)
|
|
|
|
elif method == 'dom':
|
|
|
|
return get_file_result_content(res, '', use_pwd=True)
|
|
|
|
elif method == 'wget':
|
|
|
|
return get_file_result_content(res, '', use_pwd=True)
|
2020-11-17 23:42:57 +00:00
|
|
|
|
|
|
|
|
2020-11-18 22:54:13 +00:00
|
|
|
def import_backend():
|
2024-10-28 11:07:35 +00:00
|
|
|
for backend in abx.as_dict(archivebox.pm.hook.get_SEARCHBACKENDS()).values():
|
2024-10-01 06:21:34 +00:00
|
|
|
if backend.name == SEARCH_BACKEND_CONFIG.SEARCH_BACKEND_ENGINE:
|
2024-09-24 10:05:43 +00:00
|
|
|
return backend
|
2024-10-01 06:21:34 +00:00
|
|
|
raise Exception(f'Could not load {SEARCH_BACKEND_CONFIG.SEARCH_BACKEND_ENGINE} as search backend')
|
2020-11-17 23:42:57 +00:00
|
|
|
|
|
|
|
@enforce_types
|
2024-09-24 09:13:01 +00:00
|
|
|
def write_search_index(link: Link, texts: Union[List[str], None]=None, out_dir: Path=settings.DATA_DIR, skip_text_index: bool=False) -> None:
|
2024-10-01 06:21:34 +00:00
|
|
|
if not SEARCH_BACKEND_CONFIG.USE_INDEXING_BACKEND:
|
2020-11-18 22:54:13 +00:00
|
|
|
return
|
2020-11-17 23:42:57 +00:00
|
|
|
|
|
|
|
if not skip_text_index and texts:
|
2020-11-18 22:54:13 +00:00
|
|
|
from core.models import Snapshot
|
|
|
|
|
2020-11-17 23:42:57 +00:00
|
|
|
snap = Snapshot.objects.filter(url=link.url).first()
|
2020-11-18 22:54:13 +00:00
|
|
|
backend = import_backend()
|
2020-11-17 23:42:57 +00:00
|
|
|
if snap:
|
2020-11-21 14:37:13 +00:00
|
|
|
try:
|
2024-05-13 12:12:12 +00:00
|
|
|
backend.index(snapshot_id=str(snap.pk), texts=texts)
|
2020-11-21 14:37:13 +00:00
|
|
|
except Exception as err:
|
|
|
|
stderr()
|
|
|
|
stderr(
|
|
|
|
f'[X] The search backend threw an exception={err}:',
|
|
|
|
color='red',
|
|
|
|
)
|
2020-11-18 22:54:13 +00:00
|
|
|
|
|
|
|
@enforce_types
|
2024-09-24 09:13:01 +00:00
|
|
|
def query_search_index(query: str, out_dir: Path=settings.DATA_DIR) -> QuerySet:
|
2020-11-21 14:37:13 +00:00
|
|
|
from core.models import Snapshot
|
2020-11-19 23:19:33 +00:00
|
|
|
|
2024-10-01 06:21:34 +00:00
|
|
|
if SEARCH_BACKEND_CONFIG.USE_SEARCHING_BACKEND:
|
2020-11-18 22:54:13 +00:00
|
|
|
backend = import_backend()
|
2020-11-21 14:37:13 +00:00
|
|
|
try:
|
2024-05-13 12:12:12 +00:00
|
|
|
snapshot_pks = backend.search(query)
|
2020-11-21 14:37:13 +00:00
|
|
|
except Exception as err:
|
|
|
|
stderr()
|
|
|
|
stderr(
|
|
|
|
f'[X] The search backend threw an exception={err}:',
|
|
|
|
color='red',
|
|
|
|
)
|
|
|
|
raise
|
|
|
|
else:
|
|
|
|
# TODO preserve ordering from backend
|
2024-05-13 12:12:12 +00:00
|
|
|
qsearch = Snapshot.objects.filter(pk__in=snapshot_pks)
|
2020-11-21 14:37:13 +00:00
|
|
|
return qsearch
|
|
|
|
|
|
|
|
return Snapshot.objects.none()
|
2020-11-19 21:45:12 +00:00
|
|
|
|
|
|
|
@enforce_types
|
2020-11-19 23:19:33 +00:00
|
|
|
def flush_search_index(snapshots: QuerySet):
|
2024-10-01 06:21:34 +00:00
|
|
|
if not SEARCH_BACKEND_CONFIG.USE_INDEXING_BACKEND or not snapshots:
|
2020-11-19 21:45:12 +00:00
|
|
|
return
|
|
|
|
backend = import_backend()
|
2024-05-13 12:12:12 +00:00
|
|
|
snapshot_pks = (str(pk) for pk in snapshots.values_list('pk', flat=True))
|
2020-11-21 14:37:13 +00:00
|
|
|
try:
|
2024-05-13 12:12:12 +00:00
|
|
|
backend.flush(snapshot_pks)
|
2020-11-21 14:37:13 +00:00
|
|
|
except Exception as err:
|
|
|
|
stderr()
|
|
|
|
stderr(
|
|
|
|
f'[X] The search backend threw an exception={err}:',
|
|
|
|
color='red',
|
|
|
|
)
|
2020-11-23 20:51:59 +00:00
|
|
|
|
|
|
|
@enforce_types
|
2024-09-24 09:13:01 +00:00
|
|
|
def index_links(links: Union[List[Link],None], out_dir: Path=settings.DATA_DIR):
|
2020-11-23 20:51:59 +00:00
|
|
|
if not links:
|
|
|
|
return
|
|
|
|
|
|
|
|
from core.models import Snapshot, ArchiveResult
|
|
|
|
|
|
|
|
for link in links:
|
2020-12-06 17:23:02 +00:00
|
|
|
snap = Snapshot.objects.filter(url=link.url).first()
|
|
|
|
if snap:
|
2020-11-23 20:51:59 +00:00
|
|
|
results = ArchiveResult.objects.indexable().filter(snapshot=snap)
|
2020-11-23 22:23:26 +00:00
|
|
|
log_index_started(link.url)
|
2020-11-24 14:35:06 +00:00
|
|
|
try:
|
|
|
|
texts = get_indexable_content(results)
|
|
|
|
except Exception as err:
|
|
|
|
stderr()
|
|
|
|
stderr(
|
|
|
|
f'[X] An Exception ocurred reading the indexable content={err}:',
|
|
|
|
color='red',
|
|
|
|
)
|
|
|
|
else:
|
2020-12-11 14:51:11 +00:00
|
|
|
write_search_index(link, texts, out_dir=out_dir)
|