mirror of
https://github.com/ArchiveBox/ArchiveBox
synced 2024-11-22 04:03:06 +00:00
fix django.forms.JSONField does not exist 500 error
This commit is contained in:
parent
d47d429e9d
commit
d93aa46949
4 changed files with 23 additions and 21 deletions
|
@ -13,9 +13,9 @@ __package__ = 'archivebox'
|
|||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from pathlib import Path
|
||||
from typing import cast
|
||||
|
||||
ASCII_LOGO = """
|
||||
█████╗ ██████╗ ██████╗██╗ ██╗██╗██╗ ██╗███████╗ ██████╗ ██████╗ ██╗ ██╗
|
||||
██╔══██╗██╔══██╗██╔════╝██║ ██║██║██║ ██║██╔════╝ ██╔══██╗██╔═══██╗╚██╗██╔╝
|
||||
|
@ -51,14 +51,13 @@ from .vendor import load_vendored_libs # noqa
|
|||
load_vendored_libs()
|
||||
# print('DONE LOADING VENDORED LIBRARIES')
|
||||
|
||||
|
||||
# Load ABX Plugin Specifications + Default Implementations
|
||||
import abx # noqa
|
||||
import abx_spec_archivebox # noqa
|
||||
import abx_spec_config # noqa
|
||||
import abx_spec_config # noqa
|
||||
import abx_spec_pydantic_pkgr # noqa
|
||||
import abx_spec_django # noqa
|
||||
import abx_spec_searchbackend # noqa
|
||||
|
||||
import abx_spec_django # noqa
|
||||
import abx_spec_searchbackend # noqa
|
||||
|
||||
abx.pm.add_hookspecs(abx_spec_config.PLUGIN_SPEC)
|
||||
abx.pm.register(abx_spec_config.PLUGIN_SPEC())
|
||||
|
@ -72,30 +71,30 @@ abx.pm.register(abx_spec_django.PLUGIN_SPEC())
|
|||
abx.pm.add_hookspecs(abx_spec_searchbackend.PLUGIN_SPEC)
|
||||
abx.pm.register(abx_spec_searchbackend.PLUGIN_SPEC())
|
||||
|
||||
|
||||
# Cast to ArchiveBoxPluginSpec to enable static type checking of pm.hook.call() methods
|
||||
abx.pm = cast(abx.ABXPluginManager[abx_spec_archivebox.ArchiveBoxPluginSpec], abx.pm)
|
||||
pm = abx.pm
|
||||
|
||||
|
||||
# Load all installed ABX-compatible plugins
|
||||
# Load all pip-installed ABX-compatible plugins
|
||||
ABX_ECOSYSTEM_PLUGINS = abx.get_pip_installed_plugins(group='abx')
|
||||
# Load all ArchiveBox-specific plugins
|
||||
|
||||
# Load all built-in ArchiveBox plugins
|
||||
ARCHIVEBOX_BUILTIN_PLUGINS = {
|
||||
'config': PACKAGE_DIR / 'config',
|
||||
'core': PACKAGE_DIR / 'core',
|
||||
# 'search': PACKAGE_DIR / 'search',
|
||||
# 'core': PACKAGE_DIR / 'core',
|
||||
}
|
||||
|
||||
# Load all user-defined ArchiveBox plugins
|
||||
USER_PLUGINS = abx.find_plugins_in_dir(Path(os.getcwd()) / 'user_plugins')
|
||||
# Merge all plugins together
|
||||
|
||||
# Import all plugins and register them with ABX Plugin Manager
|
||||
ALL_PLUGINS = {**ABX_ECOSYSTEM_PLUGINS, **ARCHIVEBOX_BUILTIN_PLUGINS, **USER_PLUGINS}
|
||||
|
||||
|
||||
# Load ArchiveBox plugins
|
||||
LOADED_PLUGINS = abx.load_plugins(ALL_PLUGINS)
|
||||
|
||||
|
||||
# Setup basic config, constants, paths, and version
|
||||
from .config.constants import CONSTANTS # noqa
|
||||
from .config.paths import PACKAGE_DIR, DATA_DIR, ARCHIVE_DIR # noqa
|
||||
from .config.version import VERSION # noqa
|
||||
|
|
|
@ -45,7 +45,7 @@ def detect_installed_version(PACKAGE_DIR: Path=PACKAGE_DIR):
|
|||
@cache
|
||||
def get_COMMIT_HASH() -> Optional[str]:
|
||||
try:
|
||||
git_dir = PACKAGE_DIR / '../.git'
|
||||
git_dir = PACKAGE_DIR.parent / '.git'
|
||||
ref = (git_dir / 'HEAD').read_text().strip().split(' ')[-1]
|
||||
commit_hash = git_dir.joinpath(ref).read_text().strip()
|
||||
return commit_hash
|
||||
|
@ -53,7 +53,7 @@ def get_COMMIT_HASH() -> Optional[str]:
|
|||
pass
|
||||
|
||||
try:
|
||||
return list((PACKAGE_DIR / '../.git/refs/heads/').glob('*'))[0].read_text().strip()
|
||||
return list((PACKAGE_DIR.parent / '.git/refs/heads/').glob('*'))[0].read_text().strip()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
@ -62,8 +62,12 @@ def get_COMMIT_HASH() -> Optional[str]:
|
|||
@cache
|
||||
def get_BUILD_TIME() -> str:
|
||||
if IN_DOCKER:
|
||||
docker_build_end_time = Path('/VERSION.txt').read_text().rsplit('BUILD_END_TIME=')[-1].split('\n', 1)[0]
|
||||
return docker_build_end_time
|
||||
try:
|
||||
# if we're in the archivebox official docker image, /VERSION.txt will contain the build time
|
||||
docker_build_end_time = Path('/VERSION.txt').read_text().rsplit('BUILD_END_TIME=')[-1].split('\n', 1)[0]
|
||||
return docker_build_end_time
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
src_last_modified_unix_timestamp = (PACKAGE_DIR / 'README.md').stat().st_mtime
|
||||
return datetime.fromtimestamp(src_last_modified_unix_timestamp).strftime('%Y-%m-%d %H:%M:%S %s')
|
||||
|
|
|
@ -8,7 +8,7 @@ from django.utils.html import format_html, mark_safe
|
|||
from django.core.exceptions import ValidationError
|
||||
from django.urls import reverse, resolve
|
||||
from django.utils import timezone
|
||||
from django.forms import forms
|
||||
from django_jsonform.forms.fields import JSONFormField
|
||||
|
||||
from huey_monitor.admin import TaskModel
|
||||
|
||||
|
@ -83,7 +83,7 @@ class ArchiveResultInline(admin.TabularInline):
|
|||
formset.form.base_fields['cmd_version'].initial = '-'
|
||||
formset.form.base_fields['pwd'].initial = str(snapshot.link_dir)
|
||||
formset.form.base_fields['created_by'].initial = request.user
|
||||
formset.form.base_fields['cmd'] = forms.JSONField(initial=['-'])
|
||||
formset.form.base_fields['cmd'] = JSONFormField(initial=['-'])
|
||||
formset.form.base_fields['output'].initial = 'Manually recorded cmd output...'
|
||||
|
||||
if obj is not None:
|
||||
|
|
|
@ -12,7 +12,6 @@ from django.views import View
|
|||
from django.views.generic.list import ListView
|
||||
from django.views.generic import FormView
|
||||
from django.db.models import Q
|
||||
from django.conf import settings
|
||||
from django.contrib import messages
|
||||
from django.contrib.auth.mixins import UserPassesTestMixin
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
|
|
Loading…
Reference in a new issue