2020-11-19 23:19:33 +00:00
|
|
|
from typing import List, Union
|
2020-11-17 23:42:57 +00:00
|
|
|
from pathlib import Path
|
2020-11-18 22:54:13 +00:00
|
|
|
from importlib import import_module
|
2020-11-17 23:42:57 +00:00
|
|
|
|
2020-11-19 23:19:33 +00:00
|
|
|
from django.db.models import QuerySet
|
2020-11-17 23:42:57 +00:00
|
|
|
|
2020-11-18 22:54:13 +00:00
|
|
|
from archivebox.index.schema import Link
|
|
|
|
from archivebox.util import enforce_types
|
2020-11-21 14:37:13 +00:00
|
|
|
from archivebox.config import setup_django,stderr, OUTPUT_DIR, USE_INDEXING_BACKEND, USE_SEARCHING_BACKEND, SEARCH_BACKEND_ENGINE
|
2020-11-17 23:42:57 +00:00
|
|
|
|
2020-11-23 22:23:26 +00:00
|
|
|
from .utils import get_indexable_content, log_index_started
|
2020-11-23 20:51:59 +00:00
|
|
|
|
2020-11-18 22:54:13 +00:00
|
|
|
def indexing_enabled():
|
2020-11-19 13:06:13 +00:00
|
|
|
return USE_INDEXING_BACKEND
|
2020-11-17 23:42:57 +00:00
|
|
|
|
2020-11-18 22:54:13 +00:00
|
|
|
def search_backend_enabled():
|
2020-11-19 13:06:13 +00:00
|
|
|
return USE_SEARCHING_BACKEND
|
2020-11-17 23:42:57 +00:00
|
|
|
|
2020-11-18 22:54:13 +00:00
|
|
|
def get_backend():
|
2020-11-19 13:06:13 +00:00
|
|
|
return f'search.backends.{SEARCH_BACKEND_ENGINE}'
|
2020-11-17 23:42:57 +00:00
|
|
|
|
2020-11-18 22:54:13 +00:00
|
|
|
def import_backend():
|
|
|
|
backend_string = get_backend()
|
|
|
|
try:
|
|
|
|
backend = import_module(backend_string)
|
|
|
|
except Exception as err:
|
|
|
|
raise Exception("Could not load '%s' as a backend: %s" % (backend_string, err))
|
|
|
|
return backend
|
2020-11-17 23:42:57 +00:00
|
|
|
|
|
|
|
@enforce_types
|
|
|
|
def write_search_index(link: Link, texts: Union[List[str], None]=None, out_dir: Path=OUTPUT_DIR, skip_text_index: bool=False) -> None:
|
2020-11-18 22:54:13 +00:00
|
|
|
if not indexing_enabled():
|
|
|
|
return
|
2020-11-17 23:42:57 +00:00
|
|
|
|
|
|
|
if not skip_text_index and texts:
|
2020-11-18 22:54:13 +00:00
|
|
|
setup_django(out_dir, check_db=True)
|
|
|
|
from core.models import Snapshot
|
|
|
|
|
2020-11-17 23:42:57 +00:00
|
|
|
snap = Snapshot.objects.filter(url=link.url).first()
|
2020-11-18 22:54:13 +00:00
|
|
|
backend = import_backend()
|
2020-11-17 23:42:57 +00:00
|
|
|
if snap:
|
2020-11-21 14:37:13 +00:00
|
|
|
try:
|
|
|
|
backend.index(snapshot_id=str(snap.id), texts=texts)
|
|
|
|
except Exception as err:
|
|
|
|
stderr()
|
|
|
|
stderr(
|
|
|
|
f'[X] The search backend threw an exception={err}:',
|
|
|
|
color='red',
|
|
|
|
)
|
2020-11-18 22:54:13 +00:00
|
|
|
|
|
|
|
@enforce_types
|
2020-11-21 14:37:13 +00:00
|
|
|
def query_search_index(query: str, out_dir: Path=OUTPUT_DIR) -> QuerySet:
|
|
|
|
setup_django(out_dir, check_db=True)
|
|
|
|
from core.models import Snapshot
|
2020-11-19 23:19:33 +00:00
|
|
|
|
2020-11-21 14:37:13 +00:00
|
|
|
if search_backend_enabled():
|
2020-11-18 22:54:13 +00:00
|
|
|
backend = import_backend()
|
2020-11-21 14:37:13 +00:00
|
|
|
try:
|
|
|
|
snapshot_ids = backend.search(query)
|
|
|
|
except Exception as err:
|
|
|
|
stderr()
|
|
|
|
stderr(
|
|
|
|
f'[X] The search backend threw an exception={err}:',
|
|
|
|
color='red',
|
|
|
|
)
|
|
|
|
raise
|
|
|
|
else:
|
|
|
|
# TODO preserve ordering from backend
|
|
|
|
qsearch = Snapshot.objects.filter(pk__in=snapshot_ids)
|
|
|
|
return qsearch
|
|
|
|
|
|
|
|
return Snapshot.objects.none()
|
2020-11-19 21:45:12 +00:00
|
|
|
|
|
|
|
@enforce_types
|
2020-11-19 23:19:33 +00:00
|
|
|
def flush_search_index(snapshots: QuerySet):
|
|
|
|
if not indexing_enabled() or not snapshots:
|
2020-11-19 21:45:12 +00:00
|
|
|
return
|
|
|
|
backend = import_backend()
|
2020-11-19 23:19:33 +00:00
|
|
|
snapshot_ids=(str(pk) for pk in snapshots.values_list('pk',flat=True))
|
2020-11-21 14:37:13 +00:00
|
|
|
try:
|
|
|
|
backend.flush(snapshot_ids)
|
|
|
|
except Exception as err:
|
|
|
|
stderr()
|
|
|
|
stderr(
|
|
|
|
f'[X] The search backend threw an exception={err}:',
|
|
|
|
color='red',
|
|
|
|
)
|
2020-11-23 20:51:59 +00:00
|
|
|
|
|
|
|
@enforce_types
|
|
|
|
def index_links(links: Union[List[Link],None], out_dir: Path=OUTPUT_DIR):
|
|
|
|
if not links:
|
|
|
|
return
|
|
|
|
|
|
|
|
setup_django(out_dir=out_dir, check_db=True)
|
|
|
|
from core.models import Snapshot, ArchiveResult
|
|
|
|
|
|
|
|
for link in links:
|
|
|
|
if snap := Snapshot.objects.filter(url=link.url).first():
|
|
|
|
results = ArchiveResult.objects.indexable().filter(snapshot=snap)
|
|
|
|
texts = get_indexable_content(results)
|
2020-11-23 22:23:26 +00:00
|
|
|
log_index_started(link.url)
|
2020-11-23 21:54:27 +00:00
|
|
|
write_search_index(link, texts, out_dir=out_dir)
|