mirror of
https://github.com/ArchiveBox/ArchiveBox
synced 2024-11-25 13:40:20 +00:00
relocate LIB_DIR and TMP_DIR inside docker so it doesnt clash with outside docker
This commit is contained in:
parent
f4f1d7893c
commit
b36e89d086
4 changed files with 28 additions and 20 deletions
|
@ -18,6 +18,7 @@ PACKAGE_DIR: Path = Path(__file__).resolve().parent.parent # archivebox sourc
|
||||||
DATA_DIR: Path = Path(os.curdir).resolve() # archivebox user data dir
|
DATA_DIR: Path = Path(os.curdir).resolve() # archivebox user data dir
|
||||||
ARCHIVE_DIR: Path = DATA_DIR / 'archive' # archivebox snapshot data dir
|
ARCHIVE_DIR: Path = DATA_DIR / 'archive' # archivebox snapshot data dir
|
||||||
|
|
||||||
|
IN_DOCKER = os.environ.get('IN_DOCKER', False) in ('1', 'true', 'True', 'yes')
|
||||||
|
|
||||||
def _detect_installed_version(PACKAGE_DIR: Path):
|
def _detect_installed_version(PACKAGE_DIR: Path):
|
||||||
"""Autodetect the installed archivebox version by using pip package metadata, pyproject.toml file, or package.json file"""
|
"""Autodetect the installed archivebox version by using pip package metadata, pyproject.toml file, or package.json file"""
|
||||||
|
@ -79,8 +80,8 @@ class ConstantsDict(Mapping):
|
||||||
PERSONAS_DIR: Path = DATA_DIR / PERSONAS_DIR_NAME
|
PERSONAS_DIR: Path = DATA_DIR / PERSONAS_DIR_NAME
|
||||||
CACHE_DIR: Path = DATA_DIR / CACHE_DIR_NAME
|
CACHE_DIR: Path = DATA_DIR / CACHE_DIR_NAME
|
||||||
LOGS_DIR: Path = DATA_DIR / LOGS_DIR_NAME
|
LOGS_DIR: Path = DATA_DIR / LOGS_DIR_NAME
|
||||||
LIB_DIR: Path = DATA_DIR / LIB_DIR_NAME
|
LIB_DIR: Path = (Path('/tmp') if IN_DOCKER else DATA_DIR) / LIB_DIR_NAME
|
||||||
TMP_DIR: Path = DATA_DIR / TMP_DIR_NAME
|
TMP_DIR: Path = (Path('/tmp') if IN_DOCKER else DATA_DIR) / TMP_DIR_NAME
|
||||||
CUSTOM_TEMPLATES_DIR: Path = DATA_DIR / CUSTOM_TEMPLATES_DIR_NAME
|
CUSTOM_TEMPLATES_DIR: Path = DATA_DIR / CUSTOM_TEMPLATES_DIR_NAME
|
||||||
USER_PLUGINS_DIR: Path = DATA_DIR / USER_PLUGINS_DIR_NAME
|
USER_PLUGINS_DIR: Path = DATA_DIR / USER_PLUGINS_DIR_NAME
|
||||||
|
|
||||||
|
|
|
@ -756,7 +756,9 @@ def setup_django(out_dir: Path | None=None, check_db=False, config: benedict=CON
|
||||||
global DJANGO_SET_UP
|
global DJANGO_SET_UP
|
||||||
|
|
||||||
if DJANGO_SET_UP:
|
if DJANGO_SET_UP:
|
||||||
raise Exception('django is already set up!')
|
# raise Exception('django is already set up!')
|
||||||
|
# TODO: figure out why CLI entrypoints with init_pending are running this twice sometimes
|
||||||
|
return
|
||||||
|
|
||||||
with Progress(transient=True, expand=True, console=CONSOLE) as INITIAL_STARTUP_PROGRESS:
|
with Progress(transient=True, expand=True, console=CONSOLE) as INITIAL_STARTUP_PROGRESS:
|
||||||
INITIAL_STARTUP_PROGRESS_TASK = INITIAL_STARTUP_PROGRESS.add_task("[green]Loading modules...", total=25)
|
INITIAL_STARTUP_PROGRESS_TASK = INITIAL_STARTUP_PROGRESS.add_task("[green]Loading modules...", total=25)
|
||||||
|
|
|
@ -1,13 +1,14 @@
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from archivebox.config import DATA_DIR, CONSTANTS
|
from archivebox.config import CONSTANTS
|
||||||
|
|
||||||
|
DATA_DIR = CONSTANTS.DATA_DIR
|
||||||
LOGS_DIR = CONSTANTS.LOGS_DIR
|
LOGS_DIR = CONSTANTS.LOGS_DIR
|
||||||
TMP_DIR = CONSTANTS.TMP_DIR
|
TMP_DIR = CONSTANTS.TMP_DIR
|
||||||
|
|
||||||
Path.mkdir(TMP_DIR, exist_ok=True)
|
Path.mkdir(TMP_DIR, exist_ok=True)
|
||||||
CONFIG_FILE = TMP_DIR / "supervisord.conf"
|
SUPERVISORD_CONFIG_FILE = TMP_DIR / "supervisord.conf"
|
||||||
PID_FILE = TMP_DIR / "supervisord.pid"
|
PID_FILE = TMP_DIR / "supervisord.pid"
|
||||||
SOCK_FILE = TMP_DIR / "supervisord.sock"
|
SOCK_FILE = TMP_DIR / "supervisord.sock"
|
||||||
LOG_FILE = TMP_DIR / "supervisord.log"
|
LOG_FILE = TMP_DIR / "supervisord.log"
|
||||||
WORKER_DIR = TMP_DIR / "workers"
|
WORKERS_DIR = TMP_DIR / "workers"
|
||||||
|
|
|
@ -12,7 +12,7 @@ from typing import Dict, cast
|
||||||
from supervisor.xmlrpc import SupervisorTransport
|
from supervisor.xmlrpc import SupervisorTransport
|
||||||
from xmlrpc.client import ServerProxy
|
from xmlrpc.client import ServerProxy
|
||||||
|
|
||||||
from .settings import CONFIG_FILE, PID_FILE, SOCK_FILE, LOG_FILE, WORKER_DIR, TMP_DIR, LOGS_DIR
|
from .settings import SUPERVISORD_CONFIG_FILE, DATA_DIR, PID_FILE, SOCK_FILE, LOG_FILE, WORKERS_DIR, TMP_DIR, LOGS_DIR
|
||||||
|
|
||||||
from typing import Iterator
|
from typing import Iterator
|
||||||
|
|
||||||
|
@ -36,38 +36,39 @@ def create_supervisord_config():
|
||||||
[supervisord]
|
[supervisord]
|
||||||
nodaemon = true
|
nodaemon = true
|
||||||
environment = IS_SUPERVISORD_PARENT="true"
|
environment = IS_SUPERVISORD_PARENT="true"
|
||||||
pidfile = %(here)s/{PID_FILE.name}
|
pidfile = {TMP_DIR}/{PID_FILE.name}
|
||||||
logfile = %(here)s/../{LOGS_DIR.name}/{LOG_FILE.name}
|
logfile = {LOGS_DIR}/{LOG_FILE.name}
|
||||||
childlogdir = %(here)s/../{LOGS_DIR.name}
|
childlogdir = {LOGS_DIR}
|
||||||
directory = %(here)s/..
|
directory = {DATA_DIR}
|
||||||
strip_ansi = true
|
strip_ansi = true
|
||||||
nocleanup = true
|
nocleanup = true
|
||||||
|
|
||||||
[unix_http_server]
|
[unix_http_server]
|
||||||
file = %(here)s/{SOCK_FILE.name}
|
file = {TMP_DIR}/{SOCK_FILE.name}
|
||||||
chmod = 0700
|
chmod = 0700
|
||||||
|
|
||||||
[supervisorctl]
|
[supervisorctl]
|
||||||
serverurl = unix://%(here)s/{SOCK_FILE.name}
|
serverurl = unix://{TMP_DIR}/{SOCK_FILE.name}
|
||||||
|
|
||||||
[rpcinterface:supervisor]
|
[rpcinterface:supervisor]
|
||||||
supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
|
supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
|
||||||
|
|
||||||
[include]
|
[include]
|
||||||
files = %(here)s/{WORKER_DIR.name}/*.conf
|
files = {WORKERS_DIR}/*.conf
|
||||||
|
|
||||||
"""
|
"""
|
||||||
CONFIG_FILE.write_text(config_content)
|
SUPERVISORD_CONFIG_FILE.write_text(config_content)
|
||||||
|
|
||||||
def create_worker_config(daemon):
|
def create_worker_config(daemon):
|
||||||
Path.mkdir(WORKER_DIR, exist_ok=True)
|
Path.mkdir(WORKERS_DIR, exist_ok=True)
|
||||||
|
|
||||||
name = daemon['name']
|
name = daemon['name']
|
||||||
configfile = WORKER_DIR / f"{name}.conf"
|
configfile = WORKERS_DIR / f"{name}.conf"
|
||||||
|
|
||||||
config_content = f"[program:{name}]\n"
|
config_content = f"[program:{name}]\n"
|
||||||
for key, value in daemon.items():
|
for key, value in daemon.items():
|
||||||
if key == 'name': continue
|
if key == 'name':
|
||||||
|
continue
|
||||||
config_content += f"{key}={value}\n"
|
config_content += f"{key}={value}\n"
|
||||||
config_content += "\n"
|
config_content += "\n"
|
||||||
|
|
||||||
|
@ -117,7 +118,7 @@ def start_new_supervisord_process(daemonize=False):
|
||||||
|
|
||||||
# Start supervisord
|
# Start supervisord
|
||||||
subprocess.Popen(
|
subprocess.Popen(
|
||||||
f"supervisord --configuration={CONFIG_FILE}",
|
f"supervisord --configuration={SUPERVISORD_CONFIG_FILE}",
|
||||||
stdin=None,
|
stdin=None,
|
||||||
shell=True,
|
shell=True,
|
||||||
start_new_session=daemonize,
|
start_new_session=daemonize,
|
||||||
|
@ -146,8 +147,11 @@ def get_or_create_supervisord_process(daemonize=False):
|
||||||
if supervisor is None:
|
if supervisor is None:
|
||||||
stop_existing_supervisord_process()
|
stop_existing_supervisord_process()
|
||||||
supervisor = start_new_supervisord_process(daemonize=daemonize)
|
supervisor = start_new_supervisord_process(daemonize=daemonize)
|
||||||
|
time.sleep(0.5)
|
||||||
|
|
||||||
assert supervisor and supervisor.getPID(), "Failed to start supervisord or connect to it!"
|
assert supervisor, "Failed to start supervisord or connect to it!"
|
||||||
|
supervisor.getPID() # make sure it doesn't throw an exception
|
||||||
|
|
||||||
return supervisor
|
return supervisor
|
||||||
|
|
||||||
def start_worker(supervisor, daemon, lazy=False):
|
def start_worker(supervisor, daemon, lazy=False):
|
||||||
|
|
Loading…
Reference in a new issue