diff --git a/archivebox/search/__init__.py b/archivebox/search/__init__.py index 360b20ff..6191ede9 100644 --- a/archivebox/search/__init__.py +++ b/archivebox/search/__init__.py @@ -6,7 +6,7 @@ from django.db.models import QuerySet from archivebox.index.schema import Link from archivebox.util import enforce_types -from archivebox.config import setup_django,stderr, OUTPUT_DIR, USE_INDEXING_BACKEND, USE_SEARCHING_BACKEND, SEARCH_BACKEND_ENGINE +from archivebox.config import stderr, OUTPUT_DIR, USE_INDEXING_BACKEND, USE_SEARCHING_BACKEND, SEARCH_BACKEND_ENGINE from .utils import get_indexable_content, log_index_started @@ -49,7 +49,6 @@ def write_search_index(link: Link, texts: Union[List[str], None]=None, out_dir: @enforce_types def query_search_index(query: str, out_dir: Path=OUTPUT_DIR) -> QuerySet: - setup_django(out_dir, check_db=True) from core.models import Snapshot if search_backend_enabled(): diff --git a/archivebox/search/backends/ripgrep.py b/archivebox/search/backends/ripgrep.py index ff02008d..e2e03c9b 100644 --- a/archivebox/search/backends/ripgrep.py +++ b/archivebox/search/backends/ripgrep.py @@ -2,7 +2,7 @@ import re from subprocess import run, PIPE, DEVNULL from typing import List, Generator -from archivebox.config import setup_django, ARCHIVE_DIR +from archivebox.config import ARCHIVE_DIR from archivebox.util import enforce_types RG_IGNORE_EXTENSIONS = ('css','js','orig','svg') @@ -30,7 +30,6 @@ def search(text: str) -> List[str]: if is_rg_installed.returncode: raise Exception("ripgrep binary not found, install ripgrep to use this search backend") - setup_django(check_db=True) from core.models import Snapshot rg_cmd = ['rg', RG_ADD_TYPE, RG_IGNORE_ARGUMENTS, RG_DEFAULT_ARGUMENTS, RG_REGEX_ARGUMENT, text, str(ARCHIVE_DIR)] diff --git a/tests/test_oneshot.py b/tests/test_oneshot.py index 4057a6ad..560ac43c 100644 --- a/tests/test_oneshot.py +++ b/tests/test_oneshot.py @@ -20,7 +20,6 @@ def test_oneshot_command_saves_page_in_right_folder(tmp_path, disable_extractors capture_output=True, env=disable_extractors_dict, ) - print(process.stdout) items = ' '.join([str(x) for x in tmp_path.iterdir()]) current_path = ' '.join([str(x) for x in Path.cwd().iterdir()]) assert "index.json" in items