Add log print for url indexing

This commit is contained in:
JDC 2020-11-23 17:23:26 -05:00 committed by Nick Sweeting
parent 0acf479b70
commit db9c2edccc
2 changed files with 9 additions and 2 deletions

View file

@ -8,7 +8,7 @@ from archivebox.index.schema import Link
from archivebox.util import enforce_types
from archivebox.config import setup_django,stderr, OUTPUT_DIR, USE_INDEXING_BACKEND, USE_SEARCHING_BACKEND, SEARCH_BACKEND_ENGINE
from .utils import get_indexable_content
from .utils import get_indexable_content, log_index_started
def indexing_enabled():
return USE_INDEXING_BACKEND
@ -98,4 +98,5 @@ def index_links(links: Union[List[Link],None], out_dir: Path=OUTPUT_DIR):
if snap := Snapshot.objects.filter(url=link.url).first():
results = ArchiveResult.objects.indexable().filter(snapshot=snap)
texts = get_indexable_content(results)
log_index_started(link.url)
write_search_index(link, texts, out_dir=out_dir)

View file

@ -1,6 +1,11 @@
from django.db.models import QuerySet
from archivebox.util import enforce_types
from archivebox.config import ANSI
def log_index_started(url):
print('{green}[*] Indexing url: {} in the search index {reset}'.format(url, **ANSI))
print( )
def get_file_result_content(res, extra_path, use_pwd=False):
if use_pwd:
@ -12,7 +17,7 @@ def get_file_result_content(res, extra_path, use_pwd=False):
fpath = f'{fpath}/{extra_path}'
with open(fpath, 'r') as file:
data = file.read().replace('\n', '')
data = file.read()
if data:
return [data]
return []
@ -28,6 +33,7 @@ def get_indexable_content(results: QuerySet):
if method not in ('readability', 'singlefile', 'dom', 'wget'):
return []
# This should come from a plugin interface
if method == 'readability':
return get_file_result_content(res, 'content.txt')
elif method == 'singlefile':