Merge branch 'dev' into plugins-browsertrix

This commit is contained in:
Nick Sweeting 2024-04-24 16:29:36 -07:00 committed by GitHub
commit 33e82736f9
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
33 changed files with 3943 additions and 595 deletions

12
.github/dependabot.yml vendored Normal file
View file

@ -0,0 +1,12 @@
# To get started with Dependabot version updates, you'll need to specify which
# package ecosystems to update and where the package manifests are located.
# Please see the documentation for all configuration options:
# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
version: 2
updates:
- package-ecosystem: "pip" # See documentation for possible values
directory: "/"
target-branch: "dev"
schedule:
interval: "weekly"

92
.github/workflows/codeql.yml vendored Normal file
View file

@ -0,0 +1,92 @@
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "CodeQL"
on:
push:
branches: [ "dev" ]
pull_request:
branches: [ "dev" ]
schedule:
- cron: '33 17 * * 6'
jobs:
analyze:
name: Analyze (${{ matrix.language }})
# Runner size impacts CodeQL analysis time. To learn more, please see:
# - https://gh.io/recommended-hardware-resources-for-running-codeql
# - https://gh.io/supported-runners-and-hardware-resources
# - https://gh.io/using-larger-runners (GitHub.com only)
# Consider using larger runners or machines with greater resources for possible analysis time improvements.
runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }}
timeout-minutes: ${{ (matrix.language == 'swift' && 120) || 360 }}
permissions:
# required for all workflows
security-events: write
# required to fetch internal or private CodeQL packs
packages: read
# only required for workflows in private repositories
actions: read
contents: read
strategy:
fail-fast: false
matrix:
include:
- language: python
build-mode: none
# CodeQL supports the following values keywords for 'language': 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift'
# Use `c-cpp` to analyze code written in C, C++ or both
# Use 'java-kotlin' to analyze code written in Java, Kotlin or both
# Use 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both
# To learn more about changing the languages that are analyzed or customizing the build mode for your analysis,
# see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning.
# If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how
# your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages
steps:
- name: Checkout repository
uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
build-mode: ${{ matrix.build-mode }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
# If the analyze step fails for one of the languages you are analyzing with
# "We were unable to automatically build your code", modify the matrix above
# to set the build mode to "manual" for that language. Then modify this step
# to build your code.
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
- if: matrix.build-mode == 'manual'
run: |
echo 'If you are using a "manual" build mode for one or more of the' \
'languages you are analyzing, replace this with the commands to build' \
'your code, for example:'
echo ' make bootstrap'
echo ' make release'
exit 1
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
with:
category: "/language:${{matrix.language}}"

4
.gitignore vendored
View file

@ -12,6 +12,10 @@ venv/
.docker-venv/
node_modules/
# Ignore dev lockfiles (should always be built fresh)
requirements-dev.txt
pdm.dev.lock
# Packaging artifacts
.pdm-python
.pdm-build

View file

@ -20,10 +20,24 @@ FROM python:3.11-slim-bookworm
LABEL name="archivebox" \
maintainer="Nick Sweeting <dockerfile@archivebox.io>" \
description="All-in-one personal internet archiving container" \
description="All-in-one self-hosted internet archiving solution" \
homepage="https://github.com/ArchiveBox/ArchiveBox" \
documentation="https://github.com/ArchiveBox/ArchiveBox/wiki/Docker#docker"
documentation="https://github.com/ArchiveBox/ArchiveBox/wiki/Docker" \
org.opencontainers.image.title="ArchiveBox" \
org.opencontainers.image.vendor="ArchiveBox" \
org.opencontainers.image.description="All-in-one self-hosted internet archiving solution" \
org.opencontainers.image.source="https://github.com/ArchiveBox/ArchiveBox" \
com.docker.image.source.entrypoint="Dockerfile" \
# TODO: release ArchiveBox as a Docker Desktop extension (requires these labels):
# https://docs.docker.com/desktop/extensions-sdk/architecture/metadata/
com.docker.desktop.extension.api.version=">= 1.4.7" \
com.docker.desktop.extension.icon="https://archivebox.io/icon.png" \
com.docker.extension.publisher-url="https://archivebox.io" \
com.docker.extension.screenshots='[{"alt": "Screenshot of Admin UI", "url": "https://github.com/ArchiveBox/ArchiveBox/assets/511499/e8e0b6f8-8fdf-4b7f-8124-c10d8699bdb2"}]' \
com.docker.extension.detailed-description='See here for detailed documentation: https://wiki.archivebox.io' \
com.docker.extension.changelog='See here for release notes: https://github.com/ArchiveBox/ArchiveBox/releases' \
com.docker.extension.categories='database,utility-tools'
ARG TARGETPLATFORM
ARG TARGETOS
ARG TARGETARCH

View file

@ -124,8 +124,8 @@ curl -fsSL 'https://get.archivebox.io' | sh
## Key Features
- [**Free & open source**](https://github.com/ArchiveBox/ArchiveBox/blob/dev/LICENSE), doesn't require signing up online, stores all data locally
- [**Powerful, intuitive command line interface**](https://github.com/ArchiveBox/ArchiveBox/wiki/Usage#CLI-Usage) with [modular optional dependencies](#dependencies)
- [**Free & open source**](https://github.com/ArchiveBox/ArchiveBox/blob/dev/LICENSE), own your own data & maintain your privacy by self-hosting
- [**Powerful command line interface**](https://github.com/ArchiveBox/ArchiveBox/wiki/Usage#CLI-Usage) with [modular dependencies](#dependencies) and [support for Google Drive/NFS/SMB/S3/B2/etc.](https://github.com/ArchiveBox/ArchiveBox/wiki/Setting-Up-Storage)
- [**Comprehensive documentation**](https://github.com/ArchiveBox/ArchiveBox/wiki), [active development](https://github.com/ArchiveBox/ArchiveBox/wiki/Roadmap), and [rich community](https://github.com/ArchiveBox/ArchiveBox/wiki/Web-Archiving-Community)
- [**Extracts a wide variety of content out-of-the-box**](https://github.com/ArchiveBox/ArchiveBox/issues/51): [media (yt-dlp), articles (readability), code (git), etc.](#output-formats)
- [**Supports scheduled/realtime importing**](https://github.com/ArchiveBox/ArchiveBox/wiki/Scheduled-Archiving) from [many types of sources](#input-formats)
@ -669,7 +669,7 @@ docker run -it -v $PWD:/data archivebox/archivebox add --depth=1 'https://exampl
```bash
# archivebox add --help
archivebox add 'https://example.com/some/page'
archivebox add < ~/Downloads/firefox_bookmarks_export.html
archivebox add --parser=generic_rss < ~/Downloads/some_feed.xml
archivebox add --depth=1 'https://news.ycombinator.com#2020-12-12'
echo 'http://example.com' | archivebox add
echo 'any text with <a href="https://example.com">urls</a> in it' | archivebox add
@ -865,6 +865,7 @@ Each snapshot subfolder <code>data/archive/TIMESTAMP/</code> includes a static <
<h4>Learn More</h4>
<ul>
<li><a href="https://github.com/ArchiveBox/ArchiveBox/wiki/Setting-Up-Storage">Wiki: Setting Up Storage (SMB, NFS, S3, B2, Google Drive, etc.)</a></li>
<li><a href="https://github.com/ArchiveBox/ArchiveBox/wiki/Usage#Disk-Layout">Wiki: Usage (Disk Layout)</a></li>
<li><a href="https://github.com/ArchiveBox/ArchiveBox/wiki/Usage#large-archives">Wiki: Usage (Large Archives)</a></li>
<li><a href="https://github.com/ArchiveBox/ArchiveBox/wiki/Security-Overview#output-folder">Wiki: Security Overview (Output Folder)</a></li>

View file

5
archivebox/api/apps.py Normal file
View file

@ -0,0 +1,5 @@
from django.apps import AppConfig
class APIConfig(AppConfig):
name = 'api'

184
archivebox/api/archive.py Normal file
View file

@ -0,0 +1,184 @@
# archivebox_api.py
from typing import List, Optional
from enum import Enum
from pydantic import BaseModel
from ninja import Router
from main import (
add,
remove,
update,
list_all,
ONLY_NEW,
) # Assuming these functions are defined in main.py
# Schemas
class StatusChoices(str, Enum):
indexed = 'indexed'
archived = 'archived'
unarchived = 'unarchived'
present = 'present'
valid = 'valid'
invalid = 'invalid'
duplicate = 'duplicate'
orphaned = 'orphaned'
corrupted = 'corrupted'
unrecognized = 'unrecognized'
class AddURLSchema(BaseModel):
urls: List[str]
tag: str = ""
depth: int = 0
update: bool = not ONLY_NEW # Default to the opposite of ONLY_NEW
update_all: bool = False
index_only: bool = False
overwrite: bool = False
init: bool = False
extractors: str = ""
parser: str = "auto"
class RemoveURLSchema(BaseModel):
yes: bool = False
delete: bool = False
before: Optional[float] = None
after: Optional[float] = None
filter_type: str = "exact"
filter_patterns: Optional[List[str]] = None
class UpdateSchema(BaseModel):
resume: Optional[float] = None
only_new: Optional[bool] = None
index_only: Optional[bool] = False
overwrite: Optional[bool] = False
before: Optional[float] = None
after: Optional[float] = None
status: Optional[StatusChoices] = None
filter_type: Optional[str] = 'exact'
filter_patterns: Optional[List[str]] = None
extractors: Optional[str] = ""
class ListAllSchema(BaseModel):
filter_patterns: Optional[List[str]] = None
filter_type: str = 'exact'
status: Optional[StatusChoices] = None
after: Optional[float] = None
before: Optional[float] = None
sort: Optional[str] = None
csv: Optional[str] = None
json: bool = False
html: bool = False
with_headers: bool = False
# API Router
router = Router()
@router.post("/add", response={200: dict})
def api_add(request, payload: AddURLSchema):
try:
result = add(
urls=payload.urls,
tag=payload.tag,
depth=payload.depth,
update=payload.update,
update_all=payload.update_all,
index_only=payload.index_only,
overwrite=payload.overwrite,
init=payload.init,
extractors=payload.extractors,
parser=payload.parser,
)
# Currently the add function returns a list of ALL items in the DB, ideally only return new items
return {
"status": "success",
"message": "URLs added successfully.",
"result": str(result),
}
except Exception as e:
# Handle exceptions raised by the add function or during processing
return {"status": "error", "message": str(e)}
@router.post("/remove", response={200: dict})
def api_remove(request, payload: RemoveURLSchema):
try:
result = remove(
yes=payload.yes,
delete=payload.delete,
before=payload.before,
after=payload.after,
filter_type=payload.filter_type,
filter_patterns=payload.filter_patterns,
)
return {
"status": "success",
"message": "URLs removed successfully.",
"result": result,
}
except Exception as e:
# Handle exceptions raised by the remove function or during processing
return {"status": "error", "message": str(e)}
@router.post("/update", response={200: dict})
def api_update(request, payload: UpdateSchema):
try:
result = update(
resume=payload.resume,
only_new=payload.only_new,
index_only=payload.index_only,
overwrite=payload.overwrite,
before=payload.before,
after=payload.after,
status=payload.status,
filter_type=payload.filter_type,
filter_patterns=payload.filter_patterns,
extractors=payload.extractors,
)
return {
"status": "success",
"message": "Archive updated successfully.",
"result": result,
}
except Exception as e:
# Handle exceptions raised by the update function or during processing
return {"status": "error", "message": str(e)}
@router.post("/list_all", response={200: dict})
def api_list_all(request, payload: ListAllSchema):
try:
result = list_all(
filter_patterns=payload.filter_patterns,
filter_type=payload.filter_type,
status=payload.status,
after=payload.after,
before=payload.before,
sort=payload.sort,
csv=payload.csv,
json=payload.json,
html=payload.html,
with_headers=payload.with_headers,
)
# TODO: This is kind of bad, make the format a choice field
if payload.json:
return {"status": "success", "format": "json", "data": result}
elif payload.html:
return {"status": "success", "format": "html", "data": result}
elif payload.csv:
return {"status": "success", "format": "csv", "data": result}
else:
return {
"status": "success",
"message": "List generated successfully.",
"data": result,
}
except Exception as e:
# Handle exceptions raised by the list_all function or during processing
return {"status": "error", "message": str(e)}

48
archivebox/api/auth.py Normal file
View file

@ -0,0 +1,48 @@
from django.contrib.auth import authenticate
from ninja import Form, Router, Schema
from ninja.security import HttpBearer
from api.models import Token
router = Router()
class GlobalAuth(HttpBearer):
def authenticate(self, request, token):
try:
return Token.objects.get(token=token).user
except Token.DoesNotExist:
pass
class AuthSchema(Schema):
email: str
password: str
@router.post("/authenticate", auth=None) # overriding global auth
def get_token(request, auth_data: AuthSchema):
user = authenticate(username=auth_data.email, password=auth_data.password)
if user:
# Assuming a user can have multiple tokens and you want to create a new one every time
new_token = Token.objects.create(user=user)
return {"token": new_token.token, "expires": new_token.expiry_as_iso8601}
else:
return {"error": "Invalid credentials"}
class TokenValidationSchema(Schema):
token: str
@router.post("/validate_token", auth=None) # No authentication required for this endpoint
def validate_token(request, token_data: TokenValidationSchema):
try:
# Attempt to authenticate using the provided token
user = GlobalAuth().authenticate(request, token_data.token)
if user:
return {"status": "valid"}
else:
return {"status": "invalid"}
except Token.DoesNotExist:
return {"status": "invalid"}

View file

@ -0,0 +1,28 @@
# Generated by Django 3.1.14 on 2024-04-09 18:52
import api.models
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Token',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('token', models.CharField(default=auth.models.hex_uuid, max_length=32, unique=True)),
('created', models.DateTimeField(auto_now_add=True)),
('expiry', models.DateTimeField(blank=True, null=True)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='tokens', to=settings.AUTH_USER_MODEL)),
],
),
]

View file

30
archivebox/api/models.py Normal file
View file

@ -0,0 +1,30 @@
import uuid
from datetime import timedelta
from django.conf import settings
from django.db import models
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
def hex_uuid():
return uuid.uuid4().hex
class Token(models.Model):
user = models.ForeignKey(
settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name="tokens"
)
token = models.CharField(max_length=32, default=hex_uuid, unique=True)
created = models.DateTimeField(auto_now_add=True)
expiry = models.DateTimeField(null=True, blank=True)
@property
def expiry_as_iso8601(self):
"""Returns the expiry date of the token in ISO 8601 format or a date 100 years in the future if none."""
expiry_date = (
self.expiry if self.expiry else timezone.now() + timedelta(days=365 * 100)
)
return expiry_date.isoformat()
def __str__(self):
return self.token

27
archivebox/api/tests.py Normal file
View file

@ -0,0 +1,27 @@
from django.test import TestCase
from ninja.testing import TestClient
from archivebox.api.archive import router as archive_router
class ArchiveBoxAPITestCase(TestCase):
def setUp(self):
self.client = TestClient(archive_router)
def test_add_endpoint(self):
response = self.client.post("/add", json={"urls": ["http://example.com"], "tag": "test"})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json()["status"], "success")
def test_remove_endpoint(self):
response = self.client.post("/remove", json={"filter_patterns": ["http://example.com"]})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json()["status"], "success")
def test_update_endpoint(self):
response = self.client.post("/update", json={})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json()["status"], "success")
def test_list_all_endpoint(self):
response = self.client.post("/list_all", json={})
self.assertEqual(response.status_code, 200)
self.assertTrue("success" in response.json()["status"])

View file

@ -68,6 +68,7 @@ INSTALLED_APPS = [
'core',
'api',
# Plugins

View file

@ -8,6 +8,18 @@ from django.views.generic.base import RedirectView
from core.views import HomepageView, SnapshotView, PublicIndexView, AddView, HealthCheckView
from ninja import NinjaAPI
from api.auth import GlobalAuth
api = NinjaAPI(auth=GlobalAuth())
api.add_router("/auth/", "api.auth.router")
api.add_router("/archive/", "api.archive.router")
# GLOBAL_CONTEXT doesn't work as-is, disabled for now: https://github.com/ArchiveBox/ArchiveBox/discussions/1306
# from config import VERSION, VERSIONS_AVAILABLE, CAN_UPGRADE
# GLOBAL_CONTEXT = {'VERSION': VERSION, 'VERSIONS_AVAILABLE': VERSIONS_AVAILABLE, 'CAN_UPGRADE': CAN_UPGRADE}
# print('DEBUG', settings.DEBUG)
urlpatterns = [
@ -35,6 +47,8 @@ urlpatterns = [
path('accounts/', include('django.contrib.auth.urls')),
path('admin/', archivebox_admin.urls),
path("api/", api.urls),
path('health/', HealthCheckView.as_view(), name='healthcheck'),
path('error/', lambda _: 1/0),

0
archivebox/index.sqlite3 Normal file
View file

View file

@ -494,12 +494,12 @@ def log_removal_started(links: List["Link"], yes: bool, delete: bool):
if delete:
file_counts = [link.num_outputs for link in links if Path(link.link_dir).exists()]
print(
f' {len(links)} Links will be de-listed from the main index, and their archived content folders will be deleted from disk.\n'
f' {len(links)} Links will be de-listed from the main index, and their archived content folders will be deleted from disk.\n' +
f' ({len(file_counts)} data folders with {sum(file_counts)} archived files will be deleted!)'
)
else:
print(
' Matching links will be de-listed from the main index, but their archived content folders will remain in place on disk.\n'
' Matching links will be de-listed from the main index, but their archived content folders will remain in place on disk.\n' +
' (Pass --delete if you also want to permanently delete the data folders)'
)
@ -638,17 +638,15 @@ def printable_folder_status(name: str, folder: Dict) -> str:
@enforce_types
def printable_dependency_version(name: str, dependency: Dict) -> str:
version = None
color, symbol, note, version = 'red', 'X', 'invalid', '?'
if dependency['enabled']:
if dependency['is_valid']:
color, symbol, note, version = 'green', '', 'valid', ''
color, symbol, note = 'green', '', 'valid'
parsed_version_num = re.search(r'[\d\.]+', dependency['version'])
if parsed_version_num:
version = f'v{parsed_version_num[0]}'
if not version:
color, symbol, note, version = 'red', 'X', 'invalid', '?'
else:
color, symbol, note, version = 'lightyellow', '-', 'disabled', '-'

2371
archivebox/package-lock.json generated Normal file

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
{
"name": "archivebox",
"version": "0.7.3",
"version": "0.8.0",
"description": "ArchiveBox: The self-hosted internet archive",
"author": "Nick Sweeting <archivebox-npm@sweeting.me>",
"repository": "github:ArchiveBox/ArchiveBox",
@ -8,6 +8,6 @@
"dependencies": {
"@postlight/parser": "^2.2.3",
"readability-extractor": "github:ArchiveBox/readability-extractor",
"single-file-cli": "^1.1.46"
"single-file-cli": "^1.1.54"
}
}

View file

@ -57,19 +57,57 @@ short_ts = lambda ts: str(parse_date(ts).timestamp()).split('.')[0]
ts_to_date_str = lambda ts: ts and parse_date(ts).strftime('%Y-%m-%d %H:%M')
ts_to_iso = lambda ts: ts and parse_date(ts).isoformat()
COLOR_REGEX = re.compile(r'\[(?P<arg_1>\d+)(;(?P<arg_2>\d+)(;(?P<arg_3>\d+))?)?m')
# https://mathiasbynens.be/demo/url-regex
URL_REGEX = re.compile(
r'(?=('
r'http[s]?://' # start matching from allowed schemes
r'(?:[a-zA-Z]|[0-9]' # followed by allowed alphanum characters
r'|[-_$@.&+!*\(\),]' # or allowed symbols (keep hyphen first to match literal hyphen)
r'|(?:%[0-9a-fA-F][0-9a-fA-F]))' # or allowed unicode bytes
r'[^\]\[\(\)<>"\'\s]+' # stop parsing at these symbols
r'(?=(' +
r'http[s]?://' + # start matching from allowed schemes
r'(?:[a-zA-Z]|[0-9]' + # followed by allowed alphanum characters
r'|[-_$@.&+!*\(\),]' + # or allowed symbols (keep hyphen first to match literal hyphen)
r'|[^\u0000-\u007F])+' + # or allowed unicode bytes
r'[^\]\[<>"\'\s]+' + # stop parsing at these symbols
r'))',
re.IGNORECASE,
re.IGNORECASE | re.UNICODE,
)
COLOR_REGEX = re.compile(r'\[(?P<arg_1>\d+)(;(?P<arg_2>\d+)(;(?P<arg_3>\d+))?)?m')
def parens_are_matched(string: str, open_char='(', close_char=')'):
"""check that all parentheses in a string are balanced and nested properly"""
count = 0
for c in string:
if c == open_char:
count += 1
elif c == close_char:
count -= 1
if count < 0:
return False
return count == 0
def fix_url_from_markdown(url_str: str) -> str:
"""
cleanup a regex-parsed url that may contain dangling trailing parens from markdown link syntax
helpful to fix URLs parsed from markdown e.g.
input: https://wikipedia.org/en/some_article_(Disambiguation).html?abc=def).somemoretext
result: https://wikipedia.org/en/some_article_(Disambiguation).html?abc=def
"""
trimmed_url = url_str
# cut off one trailing character at a time
# until parens are balanced e.g. /a(b)c).x(y)z -> /a(b)c
while not parens_are_matched(trimmed_url):
trimmed_url = trimmed_url[:-1]
# make sure trimmed url is still valid
if re.findall(URL_REGEX, trimmed_url):
return trimmed_url
return url_str
def find_all_urls(urls_str: str):
for url in re.findall(URL_REGEX, urls_str):
yield fix_url_from_markdown(url)
def is_static_file(url: str):
# TODO: the proper way is with MIME type detection + ext, not only extension
@ -403,3 +441,48 @@ class ExtendedEncoder(pyjson.JSONEncoder):
return pyjson.JSONEncoder.default(self, obj)
### URL PARSING TESTS / ASSERTIONS
# they run at runtime because I like having them inline in this file,
# I like the peace of mind knowing it's enforced at runtime across all OS's (in case the regex engine ever has any weird locale-specific quirks),
# and these assertions are basically instant, so not a big performance cost to do it on startup
assert fix_url_from_markdown('/a(b)c).x(y)z') == '/a(b)c'
assert fix_url_from_markdown('https://wikipedia.org/en/some_article_(Disambiguation).html?abc=def).link(with)_trailingtext') == 'https://wikipedia.org/en/some_article_(Disambiguation).html?abc=def'
URL_REGEX_TESTS = [
('https://example.com', ['https://example.com']),
('http://abc-file234example.com/abc?def=abc&23423=sdfsdf#abc=234&234=a234', ['http://abc-file234example.com/abc?def=abc&23423=sdfsdf#abc=234&234=a234']),
('https://twitter.com/share?url=https://akaao.success-corp.co.jp&text=ア@サ!ト&hashtags=ア%オ,元+ア.ア-オ_イ*シ$ロ abc', ['https://twitter.com/share?url=https://akaao.success-corp.co.jp&text=ア@サ!ト&hashtags=ア%オ,元+ア.ア-オ_イ*シ$ロ', 'https://akaao.success-corp.co.jp&text=ア@サ!ト&hashtags=ア%オ,元+ア.ア-オ_イ*シ$ロ']),
('<a href="https://twitter.com/share#url=https://akaao.success-corp.co.jp&text=ア@サ!ト?hashtags=ア%オ,元+ア&abc=.ア-オ_イ*シ$ロ"> abc', ['https://twitter.com/share#url=https://akaao.success-corp.co.jp&text=ア@サ!ト?hashtags=ア%オ,元+ア&abc=.ア-オ_イ*シ$ロ', 'https://akaao.success-corp.co.jp&text=ア@サ!ト?hashtags=ア%オ,元+ア&abc=.ア-オ_イ*シ$ロ']),
('///a', []),
('http://', []),
('http://../', ['http://../']),
('http://-error-.invalid/', ['http://-error-.invalid/']),
('https://a(b)c+1#2?3&4/', ['https://a(b)c+1#2?3&4/']),
('http://उदाहरण.परीक्षा', ['http://उदाहरण.परीक्षा']),
('http://例子.测试', ['http://例子.测试']),
('http://➡.ws/䨹 htps://abc.1243?234', ['http://➡.ws/䨹']),
('http://⌘.ws">https://exa+mple.com//:abc ', ['http://⌘.ws', 'https://exa+mple.com//:abc']),
('http://مثال.إختبار/abc?def=ت&ب=abc#abc=234', ['http://مثال.إختبار/abc?def=ت&ب=abc#abc=234']),
('http://-.~_!$&()*+,;=:%40:80%2f::::::@example.c\'om', ['http://-.~_!$&()*+,;=:%40:80%2f::::::@example.c']),
('http://us:pa@ex.co:42/http://ex.co:19/a?_d=4#-a=2.3', ['http://us:pa@ex.co:42/http://ex.co:19/a?_d=4#-a=2.3', 'http://ex.co:19/a?_d=4#-a=2.3']),
('http://code.google.com/events/#&product=browser', ['http://code.google.com/events/#&product=browser']),
('http://foo.bar?q=Spaces should be encoded', ['http://foo.bar?q=Spaces']),
('http://foo.com/blah_(wikipedia)#c(i)t[e]-1', ['http://foo.com/blah_(wikipedia)#c(i)t']),
('http://foo.com/(something)?after=parens', ['http://foo.com/(something)?after=parens']),
('http://foo.com/unicode_(✪)_in_parens) abc', ['http://foo.com/unicode_(✪)_in_parens']),
('http://foo.bar/?q=Test%20URL-encoded%20stuff', ['http://foo.bar/?q=Test%20URL-encoded%20stuff']),
('[xyz](http://a.b/?q=(Test)%20U)RL-encoded%20stuff', ['http://a.b/?q=(Test)%20U']),
('[xyz](http://a.b/?q=(Test)%20U)-ab https://abc+123', ['http://a.b/?q=(Test)%20U', 'https://abc+123']),
('[xyz](http://a.b/?q=(Test)%20U) https://a(b)c+12)3', ['http://a.b/?q=(Test)%20U', 'https://a(b)c+12']),
('[xyz](http://a.b/?q=(Test)a\nabchttps://a(b)c+12)3', ['http://a.b/?q=(Test)a', 'https://a(b)c+12']),
('http://foo.bar/?q=Test%20URL-encoded%20stuff', ['http://foo.bar/?q=Test%20URL-encoded%20stuff']),
]
for urls_str, expected_url_matches in URL_REGEX_TESTS:
url_matches = list(find_all_urls(urls_str))
assert url_matches == expected_url_matches, 'FAILED URL_REGEX CHECK!'

6
archivebox/vendor/requirements.txt vendored Normal file
View file

@ -0,0 +1,6 @@
# this folder contains vendored versions of these packages
atomicwrites==1.4.0
pocket==0.3.7
django-taggit==1.3.0
base32-crockford==0.3.0

View file

@ -31,6 +31,20 @@ else
echo "[!] Warning: No virtualenv presesnt in $REPO_DIR.venv"
fi
# Build python package lists
# https://pdm-project.org/latest/usage/lockfile/
echo "[+] Generating requirements.txt and pdm.lock from pyproject.toml..."
pdm lock --group=':all' --production --lockfile pdm.lock --strategy="cross_platform"
pdm sync --group=':all' --production --lockfile pdm.lock --clean || pdm sync --group=':all' --production --lockfile pdm.lock --clean
pdm export --group=':all' --production --lockfile pdm.lock --without-hashes -o requirements.txt
pdm lock --group=':all' --dev --lockfile pdm.dev.lock --strategy="cross_platform"
pdm sync --group=':all' --dev --lockfile pdm.dev.lock --clean || pdm sync --group=':all' --dev --lockfile pdm.dev.lock --clean
pdm export --group=':all' --dev --lockfile pdm.dev.lock --without-hashes -o requirements-dev.txt
# cleanup build artifacts
rm -Rf build deb_dist dist archivebox-*.tar.gz

View file

@ -21,6 +21,20 @@ VERSION="$(jq -r '.version' < "$REPO_DIR/package.json")"
SHORT_VERSION="$(echo "$VERSION" | perl -pe 's/(\d+)\.(\d+)\.(\d+)/$1.$2/g')"
REQUIRED_PLATFORMS="${2:-"linux/arm64,linux/amd64,linux/arm/v7"}"
# Build python package lists
# https://pdm-project.org/latest/usage/lockfile/
echo "[+] Generating requirements.txt and pdm.lock from pyproject.toml..."
pdm lock --group=':all' --production --lockfile pdm.lock --strategy="cross_platform"
pdm sync --group=':all' --production --lockfile pdm.lock --clean || pdm sync --group=':all' --production --lockfile pdm.lock --clean
pdm export --group=':all' --production --lockfile pdm.lock --without-hashes -o requirements.txt
pdm lock --group=':all' --dev --lockfile pdm.dev.lock --strategy="cross_platform"
pdm sync --group=':all' --dev --lockfile pdm.dev.lock --clean || pdm sync --group=':all' --dev --lockfile pdm.dev.lock --clean
pdm export --group=':all' --dev --lockfile pdm.dev.lock --without-hashes -o requirements-dev.txt
echo "[+] Building Docker image: tag=$TAG_NAME version=$SHORT_VERSION arch=$REQUIRED_PLATFORMS"
@ -32,4 +46,4 @@ docker build . --no-cache -t archivebox-dev --load
# -t archivebox \
# -t archivebox:$TAG_NAME \
# -t archivebox:$VERSION \
# -t archivebox:$SHORT_VERSION
# -t archivebox:$SHORT_VERSION

View file

@ -71,10 +71,8 @@ docker buildx use xbuilder 2>&1 >/dev/null || create_builder
check_platforms || (recreate_builder && check_platforms) || exit 1
# Build python package lists
echo "[+] Generating requirements.txt and pdm.lock from pyproject.toml..."
pdm lock --group=':all' --strategy="cross_platform" --production
pdm export --group=':all' --production --without-hashes -o requirements.txt
# Make sure pyproject.toml, pdm{.dev}.lock, requirements{-dev}.txt, package{-lock}.json are all up-to-date
bash ./bin/lock_pkgs.sh
echo "[+] Building archivebox:$VERSION docker image..."

View file

@ -20,20 +20,13 @@ else
fi
cd "$REPO_DIR"
echo "[*] Cleaning up build dirs"
cd "$REPO_DIR"
rm -Rf build dist
# Generate pdm.lock, requirements.txt, and package-lock.json
bash ./bin/lock_pkgs.sh
echo "[+] Building sdist, bdist_wheel, and egg_info"
rm -f archivebox/package.json
cp package.json archivebox/package.json
pdm self update
pdm install
rm -Rf build dist
pdm build
pdm export --without-hashes -o ./pip_dist/requirements.txt
cp dist/* ./pip_dist/
echo
echo "[√] Finished. Don't forget to commit the new sdist and wheel files in ./pip_dist/"
echo "[√] Finished. Don't forget to commit the new sdist and wheel files in ./pip_dist/"

View file

@ -167,13 +167,13 @@ fi
# symlink etc crontabs into place
mkdir -p "$DATA_DIR/crontabs"
if ! test -L /var/spool/cron/crontabs; then
# copy files from old location into new data dir location
for file in $(ls /var/spool/cron/crontabs); do
cp /var/spool/cron/crontabs/"$file" "$DATA_DIR/crontabs"
# move files from old location into new data dir location
for existing_file in /var/spool/cron/crontabs/*; do
mv "$existing_file" "$DATA_DIR/crontabs/"
done
# replace old system path with symlink to data dir location
rm -Rf /var/spool/cron/crontabs
ln -s "$DATA_DIR/crontabs" /var/spool/cron/crontabs
ln -sf "$DATA_DIR/crontabs" /var/spool/cron/crontabs
fi
# set DBUS_SYSTEM_BUS_ADDRESS & DBUS_SESSION_BUS_ADDRESS

101
bin/lock_pkgs.sh Executable file
View file

@ -0,0 +1,101 @@
#!/usr/bin/env bash
### Bash Environment Setup
# http://redsymbol.net/articles/unofficial-bash-strict-mode/
# https://www.gnu.org/software/bash/manual/html_node/The-Set-Builtin.html
# set -o xtrace
set -o errexit
set -o errtrace
set -o nounset
set -o pipefail
IFS=$'\n'
REPO_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && cd .. && pwd )"
cd "$REPO_DIR"
py_version="$(grep 'version = ' pyproject.toml | awk '{print $3}' | jq -r)"
js_version="$(jq -r '.version' package.json)"
if [[ "$py_version" != "$js_version" ]]; then
echo "[❌] Version in pyproject.toml ($py_version) does not match version in package.json ($js_version)!"
exit 1
fi
echo "[🔒] Locking all ArchiveBox dependencies (pip, npm)"
echo
echo "pyproject.toml: archivebox $py_version"
echo "package.json: archivebox $js_version"
echo
echo
echo "[*] Cleaning up old lockfiles and build files"
deactivate 2>/dev/null || true
rm -Rf build dist
rm -f pdm.lock
rm -f pdm.dev.lock
rm -f requirements.txt
rm -f requirements-dev.txt
rm -f package-lock.json
rm -f archivebox/package.json
rm -f archivebox/package-lock.json
rm -Rf ./.venv
rm -Rf ./node_modules
rm -Rf ./archivebox/node_modules
echo
echo
echo "[+] Generating dev & prod requirements.txt & pdm.lock from pyproject.toml..."
pip install --upgrade pip setuptools
pdm self update
pdm venv create 3.12
echo
echo "pyproject.toml: archivebox $(grep 'version = ' pyproject.toml | awk '{print $3}' | jq -r)"
echo "$(which python): $(python --version | head -n 1)"
echo "$(which pdm): $(pdm --version | head -n 1)"
pdm info --env
pdm info
echo
# https://pdm-project.org/latest/usage/lockfile/
# prod
pdm lock --group=':all' --production --lockfile pdm.lock --strategy="cross_platform"
pdm sync --group=':all' --production --lockfile pdm.lock --clean
pdm export --group=':all' --production --lockfile pdm.lock --without-hashes -o requirements.txt
cp ./pdm.lock ./pip_dist/
cp ./requirements.txt ./pip_dist/
# dev
pdm lock --group=':all' --dev --lockfile pdm.dev.lock --strategy="cross_platform"
pdm sync --group=':all' --dev --lockfile pdm.dev.lock --clean
pdm export --group=':all' --dev --lockfile pdm.dev.lock --without-hashes -o requirements-dev.txt
cp ./pdm.dev.lock ./pip_dist/
cp ./requirements-dev.txt ./pip_dist/
echo
echo "[+]] Generating package-lock.json from package.json..."
npm install -g npm
echo
echo "package.json: archivebox $(jq -r '.version' package.json)"
echo
echo "$(which node): $(node --version | head -n 1)"
echo "$(which npm): $(npm --version | head -n 1)"
echo
npm install --package-lock-only
cp package.json archivebox/package.json
cp package-lock.json archivebox/package-lock.json
echo
echo "[√] Finished. Don't forget to commit the new lockfiles:"
echo
ls "pyproject.toml" | cat
ls "pdm.lock" | cat
ls "pdm.dev.lock" | cat
ls "requirements.txt" | cat
ls "requirements-dev.txt" | cat
echo
ls "package.json" | cat
ls "package-lock.json" | cat
ls "archivebox/package.json" | cat
ls "archivebox/package-lock.json" | cat

View file

@ -1,14 +1,12 @@
# Usage:
# docker compose run archivebox init --setup
# docker compose up
# echo "https://example.com" | docker compose run archivebox archivebox add
# docker compose run archivebox add --depth=1 https://example.com/some/feed.rss
# docker compose run archivebox config --set MEDIA_MAX_SIZE=750m
# echo 'https://example.com' | docker compose run -T archivebox add
# docker compose run archivebox add --depth=1 'https://news.ycombinator.com'
# docker compose run archivebox config --set SAVE_ARCHIVE_DOT_ORG=False
# docker compose run archivebox help
# Documentation:
# https://github.com/ArchiveBox/ArchiveBox/wiki/Docker#docker-compose
services:
archivebox:
image: archivebox/archivebox:latest
@ -23,11 +21,11 @@ services:
- PUBLIC_INDEX=True # set to False to prevent anonymous users from viewing snapshot list
- PUBLIC_SNAPSHOTS=True # set to False to prevent anonymous users from viewing snapshot content
- PUBLIC_ADD_VIEW=False # set to True to allow anonymous users to submit new URLs to archive
- SEARCH_BACKEND_ENGINE=sonic # uncomment these and sonic container below for better full-text search
- SEARCH_BACKEND_ENGINE=sonic # tells ArchiveBox to use sonic container below for fast full-text search
- SEARCH_BACKEND_HOST_NAME=sonic
- SEARCH_BACKEND_PASSWORD=SomeSecretPassword
# - PUID=911 # set to your host user's UID & GID if you encounter permissions issues
# - PGID=911
# - PGID=911 # UID/GIDs <500 may clash with existing users and are not recommended
# - MEDIA_MAX_SIZE=750m # increase this filesize limit to allow archiving larger audio/video files
# - TIMEOUT=60 # increase this number to 120+ seconds if you see many slow downloads timing out
# - CHECK_SSL_VALIDITY=True # set to False to disable strict SSL checking (allows saving URLs w/ broken certs)
@ -43,51 +41,50 @@ services:
######## Optional Addons: tweak examples below as needed for your specific use case ########
### Enable ability to run regularly scheduled archiving tasks by uncommenting this container
# $ docker compose run archivebox schedule --every=day --depth=1 'https://example.com/some/rss/feed.xml'
# then restart the scheduler container to apply the changes to the schedule
### This optional container runs any scheduled tasks in the background, add new tasks like so:
# $ docker compose run archivebox schedule --add --every=day --depth=1 'https://example.com/some/rss/feed.xml'
# then restart the scheduler container to apply any changes to the scheduled task list:
# $ docker compose restart archivebox_scheduler
archivebox_scheduler:
image: archivebox/archivebox:latest
command: schedule --foreground
command: schedule --foreground --update --every=day
environment:
- TIMEOUT=120 # increase if you see timeouts often during archiving / on slow networks
- ONLY_NEW=True # set to False to retry previously failed URLs when re-adding instead of skipping them
- TIMEOUT=120 # use a higher timeout than the main container to give slow tasks more time when retrying
# - PUID=502 # set to your host user's UID & GID if you encounter permissions issues
# - PGID=20
volumes:
- ./data:/data
# cpus: 2 # uncomment / edit these values to limit container resource consumption
# cpus: 2 # uncomment / edit these values to limit scheduler container resource consumption
# mem_limit: 2048m
# shm_size: 1024m
# restart: always
### Runs the Sonic full-text search backend, config file is auto-downloaded into sonic.cfg:
# After starting, backfill any existing Snapshots into the full-text index:
### This runs the optional Sonic full-text search backend (much faster than default rg backend).
# If Sonic is ever started after not running for a while, update its full-text index by running:
# $ docker-compose run archivebox update --index-only
sonic:
image: valeriansaliou/sonic:latest
build:
# custom build just auto-downloads archivebox's default sonic.cfg as a convenience
# not needed if you have already have /etc/sonic.cfg
# not needed after first run / if you have already have ./etc/sonic.cfg present
dockerfile_inline: |
FROM quay.io/curl/curl:latest AS setup
FROM quay.io/curl/curl:latest AS config_downloader
RUN curl -fsSL 'https://raw.githubusercontent.com/ArchiveBox/ArchiveBox/main/etc/sonic.cfg' > /tmp/sonic.cfg
FROM valeriansaliou/sonic:latest
COPY --from=setup /tmp/sonic.cfg /etc/sonic.cfg
COPY --from=config_downloader /tmp/sonic.cfg /etc/sonic.cfg
expose:
- 1491
environment:
- SEARCH_BACKEND_PASSWORD=SomeSecretPassword
volumes:
- ./etc/sonic.cfg:/etc/sonic.cfg
- ./sonic.cfg:/etc/sonic.cfg
- ./data/sonic:/var/lib/sonic/store
### Example: Watch the ArchiveBox browser in realtime as it archives things,
# or remote control it to set up logins and credentials for sites you want to archive.
### This container runs xvfb+noVNC so you can watch the ArchiveBox browser as it archives things,
# or remote control it to set up a chrome profile w/ login credentials for sites you want to archive.
# https://github.com/ArchiveBox/ArchiveBox/wiki/Chromium-Install#setting-up-a-chromium-user-profile
novnc:
@ -97,11 +94,13 @@ services:
- DISPLAY_HEIGHT=1080
- RUN_XTERM=no
ports:
# to view/control ArchiveBox's browser, visit: http://localhost:8080/vnc.html
- "8080:8080"
# to view/control ArchiveBox's browser, visit: http://127.0.0.1:8080/vnc.html
# restricted to access from localhost by default because it has no authentication
- 127.0.0.1:8080:8080
### Example: Put Nginx in front of the ArchiveBox server for SSL termination
### Example: Put Nginx in front of the ArchiveBox server for SSL termination and static file serving.
# You can also any other ingress provider for SSL like Apache, Caddy, Traefik, Cloudflare Tunnels, etc.
# nginx:
# image: nginx:alpine
@ -119,7 +118,8 @@ services:
# pihole:
# image: pihole/pihole:latest
# ports:
# - 127.0.0.1:8090:80 # uncomment to access the admin HTTP interface on http://localhost:8090
# # access the admin HTTP interface on http://localhost:8090
# - 127.0.0.1:8090:80
# environment:
# - WEBPASSWORD=SET_THIS_TO_SOME_SECRET_PASSWORD_FOR_ADMIN_DASHBOARD
# - DNSMASQ_LISTENING=all
@ -134,8 +134,45 @@ services:
# - ./etc/dnsmasq:/etc/dnsmasq.d
### Example: run all your ArchiveBox traffic through a WireGuard VPN tunnel
### Example: Enable ability to run regularly scheduled archiving tasks by uncommenting this container
# $ docker compose run archivebox schedule --every=day --depth=1 'https://example.com/some/rss/feed.xml'
# then restart the scheduler container to apply the changes to the schedule
# $ docker compose restart archivebox_scheduler
# archivebox_scheduler:
# image: archivebox/archivebox:latest
# command: schedule --foreground
# environment:
# - MEDIA_MAX_SIZE=750m # increase this number to allow archiving larger audio/video files
# # - TIMEOUT=60 # increase if you see timeouts often during archiving / on slow networks
# # - ONLY_NEW=True # set to False to retry previously failed URLs when re-adding instead of skipping them
# # - CHECK_SSL_VALIDITY=True # set to False to allow saving URLs w/ broken SSL certs
# # - SAVE_ARCHIVE_DOT_ORG=True # set to False to disable submitting URLs to Archive.org when archiving
# # - PUID=502 # set to your host user's UID & GID if you encounter permissions issues
# # - PGID=20
# volumes:
# - ./data:/data
# - ./etc/crontabs:/var/spool/cron/crontabs
# # cpus: 2 # uncomment / edit these values to limit container resource consumption
# # mem_limit: 2048m
# # shm_size: 1024m
### Example: Put Nginx in front of the ArchiveBox server for SSL termination
# nginx:
# image: nginx:alpine
# ports:
# - 443:443
# - 80:80
# volumes:
# - ./etc/nginx.conf:/etc/nginx/nginx.conf
# - ./data:/var/www
### Example: run all your ArchiveBox traffic through a WireGuard VPN tunnel to avoid IP blocks.
# You can also use any other VPN that works at the docker IP level, e.g. Tailscale, OpenVPN, etc.
# wireguard:
# image: linuxserver/wireguard:latest
# network_mode: 'service:archivebox'
@ -172,10 +209,30 @@ services:
networks:
# network needed for pihole container to offer :53 dns resolving on fixed ip for archivebox container
# network just used for pihole container to offer :53 dns resolving on fixed ip for archivebox container
dns:
ipam:
driver: default
config:
- subnet: 172.20.0.0/24
# To use remote storage for your ./data/archive (e.g. Amazon S3, Backblaze B2, Google Drive, OneDrive, SFTP, etc.)
# Follow the steps here to set up the Docker RClone Plugin https://rclone.org/docker/
# $ docker plugin install rclone/docker-volume-rclone:amd64 --grant-all-permissions --alias rclone
# $ nano /var/lib/docker-plugins/rclone/config/rclone.conf
# [examplegdrive]
# type = drive
# scope = drive
# drive_id = 1234567...
# root_folder_id = 0Abcd...
# token = {"access_token":...}
# volumes:
# archive:
# driver: rclone
# driver_opts:
# remote: 'examplegdrive:archivebox'
# allow_other: 'true'
# vfs_cache_mode: full
# poll_interval: 0

482
package-lock.json generated
View file

@ -1,23 +1,33 @@
{
"name": "archivebox",
"version": "0.7.3",
"version": "0.8.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "archivebox",
"version": "0.7.3",
"version": "0.8.0",
"license": "MIT",
"dependencies": {
"@postlight/parser": "^2.2.3",
"readability-extractor": "github:ArchiveBox/readability-extractor",
"single-file-cli": "^1.1.46"
"single-file-cli": "^1.1.54"
}
},
"node_modules/@asamuzakjp/dom-selector": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/@asamuzakjp/dom-selector/-/dom-selector-2.0.2.tgz",
"integrity": "sha512-x1KXOatwofR6ZAYzXRBL5wrdV0vwNxlTCK9NCuLqAzQYARqGcvFwiJA6A1ERuh+dgeA4Dxm3JBYictIes+SqUQ==",
"dependencies": {
"bidi-js": "^1.0.3",
"css-tree": "^2.3.1",
"is-potential-custom-element-name": "^1.0.1"
}
},
"node_modules/@babel/runtime-corejs2": {
"version": "7.23.7",
"resolved": "https://registry.npmjs.org/@babel/runtime-corejs2/-/runtime-corejs2-7.23.7.tgz",
"integrity": "sha512-JmMk2t1zGDNkvsY2MsLLksocjY+ufGzSk8UlcNcxzfrzAPu4nMx0HRFakzIg2bhcqQq6xBI2nUaW/sHoaYIHdQ==",
"version": "7.24.4",
"resolved": "https://registry.npmjs.org/@babel/runtime-corejs2/-/runtime-corejs2-7.24.4.tgz",
"integrity": "sha512-ZCKqyUKt/Coimg+3Kafu43yNetgYnTXzNbEGAgxc81J5sI0qFNbQ613w7PNny+SmijAmGVroL0GDvx5rG/JI5Q==",
"dependencies": {
"core-js": "^2.6.12",
"regenerator-runtime": "^0.14.0"
@ -168,9 +178,9 @@
}
},
"node_modules/@puppeteer/browsers": {
"version": "1.8.0",
"resolved": "https://registry.npmjs.org/@puppeteer/browsers/-/browsers-1.8.0.tgz",
"integrity": "sha512-TkRHIV6k2D8OlUe8RtG+5jgOF/H98Myx0M6AOafC8DdNVOFiBSFa5cpRDtpm8LXOa9sVwe0+e6Q3FC56X/DZfg==",
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/@puppeteer/browsers/-/browsers-2.0.0.tgz",
"integrity": "sha512-3PS82/5+tnpEaUWonjAFFvlf35QHF15xqyGd34GBa5oP5EPVfFXRsbSxIGYf1M+vZlqBZ3oxT1kRg9OYhtt8ng==",
"dependencies": {
"debug": "4.3.4",
"extract-zip": "2.0.1",
@ -184,7 +194,7 @@
"browsers": "lib/cjs/main-cli.js"
},
"engines": {
"node": ">=16.3.0"
"node": ">=18"
}
},
"node_modules/@tootallnate/quickjs-emscripten": {
@ -193,9 +203,9 @@
"integrity": "sha512-C5Mc6rdnsaJDjO3UpGW/CQTHtCKaYlScZTly4JIu97Jxo/odCiH0ITnDXSJPTOrEKk/ycSZ0AOgTmkDtkOsvIA=="
},
"node_modules/@types/node": {
"version": "20.10.6",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.10.6.tgz",
"integrity": "sha512-Vac8H+NlRNNlAmDfGUP7b5h/KA+AtWIzuXy0E6OyP8f1tCLYAtPvKRRDJjAPqhpCb0t6U2j7/xqAuLEebW2kiw==",
"version": "20.12.7",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.12.7.tgz",
"integrity": "sha512-wq0cICSkRLVaf3UGLMGItu/PtdY7oaXaI/RVU+xliKVOtRna3PRY57ZDfztpDL0n11vfymMUnXv8QwYCO7L1wg==",
"optional": true,
"dependencies": {
"undici-types": "~5.26.4"
@ -211,9 +221,9 @@
}
},
"node_modules/agent-base": {
"version": "7.1.0",
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.0.tgz",
"integrity": "sha512-o/zjMZRhJxny7OyEF+Op8X+efiELC7k7yOjMzgfzVqOzXqkBkWI79YoTdOtsuWd5BWhAGAuOY/Xa6xpiaWXiNg==",
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.1.tgz",
"integrity": "sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA==",
"dependencies": {
"debug": "^4.3.4"
},
@ -304,14 +314,15 @@
"integrity": "sha512-NmWvPnx0F1SfrQbYwOi7OeaNGokp9XhzNioJ/CSBs8Qa4vxug81mhJEAVZwxXuBmYB5KDRfMq/F3RR0BIU7sWg=="
},
"node_modules/b4a": {
"version": "1.6.4",
"resolved": "https://registry.npmjs.org/b4a/-/b4a-1.6.4.tgz",
"integrity": "sha512-fpWrvyVHEKyeEvbKZTVOeZF3VSKKWtJxFIxX/jaVPf+cLbGUSitjb49pHLqPV2BUNNZ0LcoeEGfE/YCpyDYHIw=="
"version": "1.6.6",
"resolved": "https://registry.npmjs.org/b4a/-/b4a-1.6.6.tgz",
"integrity": "sha512-5Tk1HLk6b6ctmjIkAcU/Ujv/1WqiDl0F0JdRCR80VsOcUlHcu7pWeWRlOqQLHfDEsVx9YH/aif5AG4ehoCtTmg=="
},
"node_modules/balanced-match": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="
"node_modules/bare-events": {
"version": "2.2.2",
"resolved": "https://registry.npmjs.org/bare-events/-/bare-events-2.2.2.tgz",
"integrity": "sha512-h7z00dWdG0PYOQEvChhOSWvOfkIKsdZGkWr083FgN/HyoQuebSew/cgirYqh9SCuy/hRvxc5Vy6Fw8xAmYHLkQ==",
"optional": true
},
"node_modules/base64-js": {
"version": "1.5.1",
@ -333,9 +344,9 @@
]
},
"node_modules/basic-ftp": {
"version": "5.0.4",
"resolved": "https://registry.npmjs.org/basic-ftp/-/basic-ftp-5.0.4.tgz",
"integrity": "sha512-8PzkB0arJFV4jJWSGOYR+OEic6aeKMu/osRhBULN6RY0ykby6LKhbmuQ5ublvaas5BOwboah5D87nrHyuh8PPA==",
"version": "5.0.5",
"resolved": "https://registry.npmjs.org/basic-ftp/-/basic-ftp-5.0.5.tgz",
"integrity": "sha512-4Bcg1P8xhUuqcii/S0Z9wiHIrQVPMermM1any+MX5GeGD7faD3/msQUDGLol9wOcz4/jbg/WJnGqoJF6LiBdtg==",
"engines": {
"node": ">=10.0.0"
}
@ -348,6 +359,14 @@
"tweetnacl": "^0.14.3"
}
},
"node_modules/bidi-js": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/bidi-js/-/bidi-js-1.0.3.tgz",
"integrity": "sha512-RKshQI1R3YQ+n9YJz2QQ147P66ELpa1FQEg20Dk8oW9t2KgLbpDLLp9aGZ7y8WHSshDknG0bknqGw5/tyCs5tw==",
"dependencies": {
"require-from-string": "^2.0.2"
}
},
"node_modules/bluebird": {
"version": "2.11.0",
"resolved": "https://registry.npmjs.org/bluebird/-/bluebird-2.11.0.tgz",
@ -358,15 +377,6 @@
"resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz",
"integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww=="
},
"node_modules/brace-expansion": {
"version": "1.1.11",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
"dependencies": {
"balanced-match": "^1.0.0",
"concat-map": "0.0.1"
}
},
"node_modules/brotli": {
"version": "1.3.3",
"resolved": "https://registry.npmjs.org/brotli/-/brotli-1.3.3.tgz",
@ -446,12 +456,12 @@
}
},
"node_modules/chromium-bidi": {
"version": "0.4.33",
"resolved": "https://registry.npmjs.org/chromium-bidi/-/chromium-bidi-0.4.33.tgz",
"integrity": "sha512-IxoFM5WGQOIAd95qrSXzJUv4eXIrh+RvU3rwwqIiwYuvfE7U/Llj4fejbsJnjJMUYCuGtVQsY2gv7oGl4aTNSQ==",
"version": "0.5.8",
"resolved": "https://registry.npmjs.org/chromium-bidi/-/chromium-bidi-0.5.8.tgz",
"integrity": "sha512-blqh+1cEQbHBKmok3rVJkBlBxt9beKBgOsxbFgs7UJcoVbbeZ+K7+6liAsjgpc8l1Xd55cQUy14fXZdGSb4zIw==",
"dependencies": {
"mitt": "3.0.1",
"urlpattern-polyfill": "9.0.0"
"urlpattern-polyfill": "10.0.0"
},
"peerDependencies": {
"devtools-protocol": "*"
@ -497,11 +507,6 @@
"node": ">= 0.8"
}
},
"node_modules/concat-map": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
"integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="
},
"node_modules/core-js": {
"version": "2.6.12",
"resolved": "https://registry.npmjs.org/core-js/-/core-js-2.6.12.tgz",
@ -533,6 +538,18 @@
"nth-check": "~1.0.1"
}
},
"node_modules/css-tree": {
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/css-tree/-/css-tree-2.3.1.tgz",
"integrity": "sha512-6Fv1DV/TYw//QF5IzQdqsNDjx/wc8TrMBZsqjL9eW01tWb7R7k/mq+/VXfJCl7SoD5emsJop9cOByJZfs8hYIw==",
"dependencies": {
"mdn-data": "2.0.30",
"source-map-js": "^1.0.1"
},
"engines": {
"node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0"
}
},
"node_modules/css-what": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/css-what/-/css-what-2.1.3.tgz",
@ -542,14 +559,14 @@
}
},
"node_modules/cssstyle": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-3.0.0.tgz",
"integrity": "sha512-N4u2ABATi3Qplzf0hWbVCdjenim8F3ojEXpBDF5hBpjzW182MjNGLqfmQ0SkSPeQ+V86ZXgeH8aXj6kayd4jgg==",
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-4.0.1.tgz",
"integrity": "sha512-8ZYiJ3A/3OkDd093CBT/0UKDWry7ak4BdPTFP2+QEP7cmhouyq/Up709ASSj2cK02BbZiMgk7kYjZNS4QP5qrQ==",
"dependencies": {
"rrweb-cssom": "^0.6.0"
},
"engines": {
"node": ">=14"
"node": ">=18"
}
},
"node_modules/dashdash": {
@ -564,9 +581,9 @@
}
},
"node_modules/data-uri-to-buffer": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-6.0.1.tgz",
"integrity": "sha512-MZd3VlchQkp8rdend6vrx7MmVDJzSNTBvghvKjirLkD+WTChA3KUf0jkE68Q4UyctNqI11zZO9/x2Yx+ub5Cvg==",
"version": "6.0.2",
"resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-6.0.2.tgz",
"integrity": "sha512-7hvf7/GW8e86rW0ptuwS3OcBGDjIi6SZva7hCyWC0yYry2cOPmLIjXAUHI6DK2HsnwJd9ifmt57i8eV2n4YNpw==",
"engines": {
"node": ">= 14"
}
@ -657,9 +674,9 @@
}
},
"node_modules/devtools-protocol": {
"version": "0.0.1203626",
"resolved": "https://registry.npmjs.org/devtools-protocol/-/devtools-protocol-0.0.1203626.tgz",
"integrity": "sha512-nEzHZteIUZfGCZtTiS1fRpC8UZmsfD1SiyPvaUNvS13dvKf666OAm8YTi0+Ca3n1nLEyu49Cy4+dPWpaHFJk9g=="
"version": "0.0.1232444",
"resolved": "https://registry.npmjs.org/devtools-protocol/-/devtools-protocol-0.0.1232444.tgz",
"integrity": "sha512-pM27vqEfxSxRkTMnF+XCmxSEb6duO5R+t8A9DEEJgy4Wz2RVanje2mmj99B6A3zv2r/qGfYlOvYznUhuokizmg=="
},
"node_modules/difflib": {
"version": "0.2.6",
@ -696,9 +713,9 @@
"integrity": "sha512-3VdM/SXBZX2omc9JF9nOPCtDaYQ67BGp5CoLpIQlO2KCAPETs8TcDHacF26jXadGbvUteZzRTeos2fhID5+ucQ=="
},
"node_modules/dompurify": {
"version": "3.0.7",
"resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.0.7.tgz",
"integrity": "sha512-BViYTZoqP3ak/ULKOc101y+CtHDUvBsVgSxIF1ku0HmK6BRf+C03MC+tArMvOPtVtZp83DDh5puywKDu4sbVjQ=="
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.1.0.tgz",
"integrity": "sha512-yoU4rhgPKCo+p5UrWWWNKiIq+ToGqmVVhk0PmMYBK4kRsR3/qhemNFL8f6CFmBd4gMwm3F4T7HBoydP5uY07fA=="
},
"node_modules/domutils": {
"version": "1.5.1",
@ -726,6 +743,11 @@
"safer-buffer": "^2.1.0"
}
},
"node_modules/ecc-jsbn/node_modules/jsbn": {
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz",
"integrity": "sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg=="
},
"node_modules/ellipsize": {
"version": "0.1.0",
"resolved": "https://registry.npmjs.org/ellipsize/-/ellipsize-0.1.0.tgz",
@ -750,9 +772,9 @@
"integrity": "sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w=="
},
"node_modules/escalade": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz",
"integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==",
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.2.tgz",
"integrity": "sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA==",
"engines": {
"node": ">=6"
}
@ -890,31 +912,26 @@
}
},
"node_modules/fs-extra": {
"version": "8.1.0",
"resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-8.1.0.tgz",
"integrity": "sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==",
"version": "11.2.0",
"resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.2.0.tgz",
"integrity": "sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw==",
"dependencies": {
"graceful-fs": "^4.2.0",
"jsonfile": "^4.0.0",
"universalify": "^0.1.0"
"jsonfile": "^6.0.1",
"universalify": "^2.0.0"
},
"engines": {
"node": ">=6 <7 || >=8"
"node": ">=14.14"
}
},
"node_modules/fs-extra/node_modules/universalify": {
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz",
"integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==",
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz",
"integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==",
"engines": {
"node": ">= 4.0.0"
"node": ">= 10.0.0"
}
},
"node_modules/fs.realpath": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
"integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw=="
},
"node_modules/get-caller-file": {
"version": "2.0.5",
"resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
@ -938,14 +955,14 @@
}
},
"node_modules/get-uri": {
"version": "6.0.2",
"resolved": "https://registry.npmjs.org/get-uri/-/get-uri-6.0.2.tgz",
"integrity": "sha512-5KLucCJobh8vBY1K07EFV4+cPZH3mrV9YeAruUseCQKHB58SGjjT2l9/eA9LD082IiuMjSlFJEcdJ27TXvbZNw==",
"version": "6.0.3",
"resolved": "https://registry.npmjs.org/get-uri/-/get-uri-6.0.3.tgz",
"integrity": "sha512-BzUrJBS9EcUb4cFol8r4W3v1cPsSyajLSthNkz5BxbpDcHN5tIrM10E2eNvfnvBn3DaT3DUgx0OpsBKkaOpanw==",
"dependencies": {
"basic-ftp": "^5.0.2",
"data-uri-to-buffer": "^6.0.0",
"data-uri-to-buffer": "^6.0.2",
"debug": "^4.3.4",
"fs-extra": "^8.1.0"
"fs-extra": "^11.2.0"
},
"engines": {
"node": ">= 14"
@ -959,25 +976,6 @@
"assert-plus": "^1.0.0"
}
},
"node_modules/glob": {
"version": "7.2.3",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
"integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
"dependencies": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "^3.1.1",
"once": "^1.3.0",
"path-is-absolute": "^1.0.0"
},
"engines": {
"node": "*"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/graceful-fs": {
"version": "4.2.11",
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz",
@ -1034,9 +1032,9 @@
}
},
"node_modules/http-proxy-agent": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.0.tgz",
"integrity": "sha512-+ZT+iBxVUQ1asugqnD6oWoRiS25AkjNfG085dKJGtGxkdwLQrMKU5wJr2bOOFAXzKcTuqq+7fZlTMgG3SRfIYQ==",
"version": "7.0.2",
"resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz",
"integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==",
"dependencies": {
"agent-base": "^7.1.0",
"debug": "^4.3.4"
@ -1059,9 +1057,9 @@
}
},
"node_modules/https-proxy-agent": {
"version": "7.0.2",
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.2.tgz",
"integrity": "sha512-NmLNjm6ucYwtcUmL7JQC1ZQ57LmHP4lT15FQ8D61nak1rO6DH+fz5qNK2Ap5UN4ZapYICE3/0KodcLYSPsPbaA==",
"version": "7.0.4",
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.4.tgz",
"integrity": "sha512-wlwpilI7YdjSkWaQ/7omYBMTliDcmCN8OLihO6I9B86g06lMyAoqgoDpV0XqoaPOKj+0DIdAvnsWfyAAhmimcg==",
"dependencies": {
"agent-base": "^7.0.2",
"debug": "4"
@ -1105,24 +1103,22 @@
"resolved": "https://registry.npmjs.org/immediate/-/immediate-3.0.6.tgz",
"integrity": "sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ=="
},
"node_modules/inflight": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
"integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==",
"dependencies": {
"once": "^1.3.0",
"wrappy": "1"
}
},
"node_modules/inherits": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
},
"node_modules/ip": {
"version": "1.1.8",
"resolved": "https://registry.npmjs.org/ip/-/ip-1.1.8.tgz",
"integrity": "sha512-PuExPYUiu6qMBQb4l06ecm6T6ujzhmh+MeJcW9wa89PoAz5pvd4zPgN5WJV104mb6S2T1AwNIAaB70JNrLQWhg=="
"node_modules/ip-address": {
"version": "9.0.5",
"resolved": "https://registry.npmjs.org/ip-address/-/ip-address-9.0.5.tgz",
"integrity": "sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g==",
"dependencies": {
"jsbn": "1.1.0",
"sprintf-js": "^1.1.3"
},
"engines": {
"node": ">= 12"
}
},
"node_modules/is-fullwidth-code-point": {
"version": "3.0.0",
@ -1153,16 +1149,17 @@
"integrity": "sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g=="
},
"node_modules/jsbn": {
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz",
"integrity": "sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg=="
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/jsbn/-/jsbn-1.1.0.tgz",
"integrity": "sha512-4bYVV3aAMtDTTu4+xsDYa6sy9GyJ69/amsu9sYF2zqjiEoZA5xJi3BrfX3uY+/IekIu7MwdObdbDWpoZdBv3/A=="
},
"node_modules/jsdom": {
"version": "23.0.1",
"resolved": "https://registry.npmjs.org/jsdom/-/jsdom-23.0.1.tgz",
"integrity": "sha512-2i27vgvlUsGEBO9+/kJQRbtqtm+191b5zAZrU/UezVmnC2dlDAFLgDYJvAEi94T4kjsRKkezEtLQTgsNEsW2lQ==",
"version": "23.2.0",
"resolved": "https://registry.npmjs.org/jsdom/-/jsdom-23.2.0.tgz",
"integrity": "sha512-L88oL7D/8ufIES+Zjz7v0aes+oBMh2Xnh3ygWvL0OaICOomKEPKuPnIfBJekiXr+BHbbMjrWn/xqrDQuxFTeyA==",
"dependencies": {
"cssstyle": "^3.0.0",
"@asamuzakjp/dom-selector": "^2.0.1",
"cssstyle": "^4.0.1",
"data-urls": "^5.0.0",
"decimal.js": "^10.4.3",
"form-data": "^4.0.0",
@ -1170,7 +1167,6 @@
"http-proxy-agent": "^7.0.0",
"https-proxy-agent": "^7.0.2",
"is-potential-custom-element-name": "^1.0.1",
"nwsapi": "^2.2.7",
"parse5": "^7.1.2",
"rrweb-cssom": "^0.6.0",
"saxes": "^6.0.0",
@ -1181,7 +1177,7 @@
"whatwg-encoding": "^3.1.1",
"whatwg-mimetype": "^4.0.0",
"whatwg-url": "^14.0.0",
"ws": "^8.14.2",
"ws": "^8.16.0",
"xml-name-validator": "^5.0.0"
},
"engines": {
@ -1235,13 +1231,24 @@
"integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA=="
},
"node_modules/jsonfile": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz",
"integrity": "sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg==",
"version": "6.1.0",
"resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz",
"integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==",
"dependencies": {
"universalify": "^2.0.0"
},
"optionalDependencies": {
"graceful-fs": "^4.1.6"
}
},
"node_modules/jsonfile/node_modules/universalify": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz",
"integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==",
"engines": {
"node": ">= 10.0.0"
}
},
"node_modules/jsprim": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/jsprim/-/jsprim-2.0.2.tgz",
@ -1375,6 +1382,11 @@
"node": ">=12"
}
},
"node_modules/mdn-data": {
"version": "2.0.30",
"resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.30.tgz",
"integrity": "sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA=="
},
"node_modules/mime-db": {
"version": "1.52.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
@ -1394,17 +1406,6 @@
"node": ">= 0.6"
}
},
"node_modules/minimatch": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
"dependencies": {
"brace-expansion": "^1.1.7"
},
"engines": {
"node": "*"
}
},
"node_modules/mitt": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/mitt/-/mitt-3.0.1.tgz",
@ -1461,9 +1462,9 @@
}
},
"node_modules/nwsapi": {
"version": "2.2.7",
"resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.7.tgz",
"integrity": "sha512-ub5E4+FBPKwAZx0UwIQOjYWGHTEq5sPqHQNRN8Z9e4A7u3Tj1weLJsL59yH9vmvqEtBHaOmT6cYQKIZOxp35FQ=="
"version": "2.2.9",
"resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.9.tgz",
"integrity": "sha512-2f3F0SEEer8bBu0dsNCFF50N0cTThV1nWFYcEYFZttdW0lDAoybv9cQoK7X7/68Z89S7FoRrVjP1LPX4XRf9vg=="
},
"node_modules/oauth-sign": {
"version": "0.9.0",
@ -1500,12 +1501,11 @@
}
},
"node_modules/pac-resolver": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/pac-resolver/-/pac-resolver-7.0.0.tgz",
"integrity": "sha512-Fd9lT9vJbHYRACT8OhCbZBbxr6KRSawSovFpy8nDGshaK99S/EBhVIHp9+crhxrsZOuvLpgL1n23iyPg6Rl2hg==",
"version": "7.0.1",
"resolved": "https://registry.npmjs.org/pac-resolver/-/pac-resolver-7.0.1.tgz",
"integrity": "sha512-5NPgf87AT2STgwa2ntRMr45jTKrYBGkVU36yT0ig/n/GMAa3oPqhZfIQ2kMEimReg0+t9kZViDVZ83qfVUlckg==",
"dependencies": {
"degenerator": "^5.0.0",
"ip": "^1.1.8",
"netmask": "^2.0.2"
},
"engines": {
@ -1539,14 +1539,6 @@
"url": "https://github.com/fb55/entities?sponsor=1"
}
},
"node_modules/path-is-absolute": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
"integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/pend": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz",
@ -1648,39 +1640,19 @@
}
},
"node_modules/puppeteer-core": {
"version": "21.5.2",
"resolved": "https://registry.npmjs.org/puppeteer-core/-/puppeteer-core-21.5.2.tgz",
"integrity": "sha512-v4T0cWnujSKs+iEfmb8ccd7u4/x8oblEyKqplqKnJ582Kw8PewYAWvkH4qUWhitN3O2q9RF7dzkvjyK5HbzjLA==",
"version": "22.0.0",
"resolved": "https://registry.npmjs.org/puppeteer-core/-/puppeteer-core-22.0.0.tgz",
"integrity": "sha512-S3s91rLde0A86PWVeNY82h+P0fdS7CTiNWAicCVH/bIspRP4nS2PnO5j+VTFqCah0ZJizGzpVPAmxVYbLxTc9w==",
"dependencies": {
"@puppeteer/browsers": "1.8.0",
"chromium-bidi": "0.4.33",
"@puppeteer/browsers": "2.0.0",
"chromium-bidi": "0.5.8",
"cross-fetch": "4.0.0",
"debug": "4.3.4",
"devtools-protocol": "0.0.1203626",
"ws": "8.14.2"
"devtools-protocol": "0.0.1232444",
"ws": "8.16.0"
},
"engines": {
"node": ">=16.13.2"
}
},
"node_modules/puppeteer-core/node_modules/ws": {
"version": "8.14.2",
"resolved": "https://registry.npmjs.org/ws/-/ws-8.14.2.tgz",
"integrity": "sha512-wEBG1ftX4jcglPxgFCMJmZ2PLtSbJ2Peg6TmpJFTbe9GZYOQCDPdMYu/Tm0/bGZkw8paZnJY45J4K2PZrLYq8g==",
"engines": {
"node": ">=10.0.0"
},
"peerDependencies": {
"bufferutil": "^4.0.1",
"utf-8-validate": ">=5.0.2"
},
"peerDependenciesMeta": {
"bufferutil": {
"optional": true
},
"utf-8-validate": {
"optional": true
}
"node": ">=18"
}
},
"node_modules/qs": {
@ -1703,8 +1675,7 @@
},
"node_modules/readability-extractor": {
"version": "0.0.11",
"resolved": "git+ssh://git@github.com/ArchiveBox/readability-extractor.git#2fb4689a65c6433036453dcbee7a268840604eb9",
"license": "MIT",
"resolved": "git+ssh://git@github.com/ArchiveBox/readability-extractor.git#057f2046f9535cfc6df7b8d551aaad32a9e6226c",
"dependencies": {
"@mozilla/readability": "^0.5.0",
"dompurify": "^3.0.6",
@ -1740,25 +1711,19 @@
"node": ">=0.10.0"
}
},
"node_modules/require-from-string": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz",
"integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/requires-port": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
"integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ=="
},
"node_modules/rimraf": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
"integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
"dependencies": {
"glob": "^7.1.3"
},
"bin": {
"rimraf": "bin.js"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/rrweb-cssom": {
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/rrweb-cssom/-/rrweb-cssom-0.6.0.tgz",
@ -1800,9 +1765,9 @@
}
},
"node_modules/selenium-webdriver": {
"version": "4.15.0",
"resolved": "https://registry.npmjs.org/selenium-webdriver/-/selenium-webdriver-4.15.0.tgz",
"integrity": "sha512-BNG1bq+KWiBGHcJ/wULi0eKY0yaDqFIbEmtbsYJmfaEghdCkXBsx1akgOorhNwjBipOr0uwpvNXqT6/nzl+zjg==",
"version": "4.17.0",
"resolved": "https://registry.npmjs.org/selenium-webdriver/-/selenium-webdriver-4.17.0.tgz",
"integrity": "sha512-e2E+2XBlGepzwgFbyQfSwo9Cbj6G5fFfs9MzAS00nC99EewmcS2rwn2MwtgfP7I5p1e7DYv4HQJXtWedsu6DvA==",
"dependencies": {
"jszip": "^3.10.1",
"tmp": "^0.2.1",
@ -1818,16 +1783,16 @@
"integrity": "sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA=="
},
"node_modules/single-file-cli": {
"version": "1.1.46",
"resolved": "https://registry.npmjs.org/single-file-cli/-/single-file-cli-1.1.46.tgz",
"integrity": "sha512-+vFj0a5Y4ESqpMwH0T6738pg8ZA9KVhhl6OlIOsicamGNU9DnMa+q9dL1S2KnLWHoauKjU0BThhR/YKUleJSxw==",
"version": "1.1.54",
"resolved": "https://registry.npmjs.org/single-file-cli/-/single-file-cli-1.1.54.tgz",
"integrity": "sha512-wnVPg7BklhswwFVrtuFXbmluI4piHxg2dC0xATxYTeXAld6PnRPlnp7ufallRKArjFBZdP2u+ihMkOIp7A38XA==",
"dependencies": {
"file-url": "3.0.0",
"iconv-lite": "0.6.3",
"jsdom": "23.0.0",
"puppeteer-core": "21.5.2",
"selenium-webdriver": "4.15.0",
"single-file-core": "1.3.15",
"jsdom": "24.0.0",
"puppeteer-core": "22.0.0",
"selenium-webdriver": "4.17.0",
"single-file-core": "1.3.24",
"strong-data-uri": "1.0.6",
"yargs": "17.7.2"
},
@ -1847,11 +1812,11 @@
}
},
"node_modules/single-file-cli/node_modules/jsdom": {
"version": "23.0.0",
"resolved": "https://registry.npmjs.org/jsdom/-/jsdom-23.0.0.tgz",
"integrity": "sha512-cbL/UCtohJguhFC7c2/hgW6BeZCNvP7URQGnx9tSJRYKCdnfbfWOrtuLTMfiB2VxKsx5wPHVsh/J0aBy9lIIhQ==",
"version": "24.0.0",
"resolved": "https://registry.npmjs.org/jsdom/-/jsdom-24.0.0.tgz",
"integrity": "sha512-UDS2NayCvmXSXVP6mpTj+73JnNQadZlr9N68189xib2tx5Mls7swlTNao26IoHv46BZJFvXygyRtyXd1feAk1A==",
"dependencies": {
"cssstyle": "^3.0.0",
"cssstyle": "^4.0.1",
"data-urls": "^5.0.0",
"decimal.js": "^10.4.3",
"form-data": "^4.0.0",
@ -1870,14 +1835,14 @@
"whatwg-encoding": "^3.1.1",
"whatwg-mimetype": "^4.0.0",
"whatwg-url": "^14.0.0",
"ws": "^8.14.2",
"ws": "^8.16.0",
"xml-name-validator": "^5.0.0"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"canvas": "^3.0.0"
"canvas": "^2.11.2"
},
"peerDependenciesMeta": {
"canvas": {
@ -1909,9 +1874,9 @@
}
},
"node_modules/single-file-core": {
"version": "1.3.15",
"resolved": "https://registry.npmjs.org/single-file-core/-/single-file-core-1.3.15.tgz",
"integrity": "sha512-/YNpHBwASWNxmSmZXz0xRolmXf0+PGAbwpVrwn6A8tYeuAdezxxde5RYTTQ7V4Zv68+H4JMhE2DwCRV0sVUGNA=="
"version": "1.3.24",
"resolved": "https://registry.npmjs.org/single-file-core/-/single-file-core-1.3.24.tgz",
"integrity": "sha512-1B256mKBbNV8jXAV+hRyEv0aMa7tn0C0Ci+zx7Ya4ZXZB3b9/1MgKsB/fxVwDiL28WJSU0pxzh8ftIYubCNn9w=="
},
"node_modules/smart-buffer": {
"version": "4.2.0",
@ -1923,24 +1888,24 @@
}
},
"node_modules/socks": {
"version": "2.7.1",
"resolved": "https://registry.npmjs.org/socks/-/socks-2.7.1.tgz",
"integrity": "sha512-7maUZy1N7uo6+WVEX6psASxtNlKaNVMlGQKkG/63nEDdLOWNbiUMoLK7X4uYoLhQstau72mLgfEWcXcwsaHbYQ==",
"version": "2.8.3",
"resolved": "https://registry.npmjs.org/socks/-/socks-2.8.3.tgz",
"integrity": "sha512-l5x7VUUWbjVFbafGLxPWkYsHIhEvmF85tbIeFZWc8ZPtoMyybuEhL7Jye/ooC4/d48FgOjSJXgsF/AJPYCW8Zw==",
"dependencies": {
"ip": "^2.0.0",
"ip-address": "^9.0.5",
"smart-buffer": "^4.2.0"
},
"engines": {
"node": ">= 10.13.0",
"node": ">= 10.0.0",
"npm": ">= 3.0.0"
}
},
"node_modules/socks-proxy-agent": {
"version": "8.0.2",
"resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-8.0.2.tgz",
"integrity": "sha512-8zuqoLv1aP/66PHF5TqwJ7Czm3Yv32urJQHrVyhD7mmA6d61Zv8cIXQYPTWwmg6qlupnPvs/QKDmfa4P/qct2g==",
"version": "8.0.3",
"resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-8.0.3.tgz",
"integrity": "sha512-VNegTZKhuGq5vSD6XNKlbqWhyt/40CgoEw8XxD6dhnm8Jq9IEa3nIa4HwnM8XOqU0CdB0BwWVXusqiFXfHB3+A==",
"dependencies": {
"agent-base": "^7.0.2",
"agent-base": "^7.1.1",
"debug": "^4.3.4",
"socks": "^2.7.1"
},
@ -1948,11 +1913,6 @@
"node": ">= 14"
}
},
"node_modules/socks/node_modules/ip": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/ip/-/ip-2.0.0.tgz",
"integrity": "sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ=="
},
"node_modules/source-map": {
"version": "0.6.1",
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
@ -1962,6 +1922,19 @@
"node": ">=0.10.0"
}
},
"node_modules/source-map-js": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz",
"integrity": "sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/sprintf-js": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.3.tgz",
"integrity": "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA=="
},
"node_modules/sshpk": {
"version": "1.18.0",
"resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.18.0.tgz",
@ -1986,6 +1959,11 @@
"node": ">=0.10.0"
}
},
"node_modules/sshpk/node_modules/jsbn": {
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz",
"integrity": "sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg=="
},
"node_modules/stream-length": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/stream-length/-/stream-length-1.0.2.tgz",
@ -1995,12 +1973,15 @@
}
},
"node_modules/streamx": {
"version": "2.15.6",
"resolved": "https://registry.npmjs.org/streamx/-/streamx-2.15.6.tgz",
"integrity": "sha512-q+vQL4AAz+FdfT137VF69Cc/APqUbxy+MDOImRrMvchJpigHj9GksgDU2LYbO9rx7RX6osWgxJB2WxhYv4SZAw==",
"version": "2.16.1",
"resolved": "https://registry.npmjs.org/streamx/-/streamx-2.16.1.tgz",
"integrity": "sha512-m9QYj6WygWyWa3H1YY69amr4nVgy61xfjys7xO7kviL5rfIEc2naf+ewFiOA+aEJD7y0JO3h2GoiUv4TDwEGzQ==",
"dependencies": {
"fast-fifo": "^1.1.0",
"queue-tick": "^1.0.1"
},
"optionalDependencies": {
"bare-events": "^2.2.0"
}
},
"node_modules/string_decoder": {
@ -2067,9 +2048,9 @@
}
},
"node_modules/tar-stream": {
"version": "3.1.6",
"resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-3.1.6.tgz",
"integrity": "sha512-B/UyjYwPpMBv+PaFSWAmtYjwdrlEaZQEhMIBFNC5oEG8lpiW8XjcSdmEaClj28ArfKScKHs2nshz3k2le6crsg==",
"version": "3.1.7",
"resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-3.1.7.tgz",
"integrity": "sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==",
"dependencies": {
"b4a": "^1.6.4",
"fast-fifo": "^1.2.0",
@ -2082,14 +2063,11 @@
"integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg=="
},
"node_modules/tmp": {
"version": "0.2.1",
"resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz",
"integrity": "sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==",
"dependencies": {
"rimraf": "^3.0.0"
},
"version": "0.2.3",
"resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.3.tgz",
"integrity": "sha512-nZD7m9iCPC5g0pYmcaxogYKggSfLsdxl8of3Q/oIbqCqLLIO9IAF0GWjX1z9NZRHPiXv8Wex4yDCaZsgEw0Y8w==",
"engines": {
"node": ">=8.17.0"
"node": ">=14.14"
}
},
"node_modules/tough-cookie": {
@ -2125,9 +2103,9 @@
"integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q=="
},
"node_modules/turndown": {
"version": "7.1.2",
"resolved": "https://registry.npmjs.org/turndown/-/turndown-7.1.2.tgz",
"integrity": "sha512-ntI9R7fcUKjqBP6QU8rBK2Ehyt8LAzt3UBT9JR9tgo6GtuKvyUzpayWmeMKJw1DPdXzktvtIT8m2mVXz+bL/Qg==",
"version": "7.1.3",
"resolved": "https://registry.npmjs.org/turndown/-/turndown-7.1.3.tgz",
"integrity": "sha512-Z3/iJ6IWh8VBiACWQJaA5ulPQE5E1QwvBHj00uGzdQxdRnd8fh1DPqNOJqzQDu6DkOstORrtXzf/9adB+vMtEA==",
"dependencies": {
"domino": "^2.1.6"
}
@ -2178,9 +2156,9 @@
}
},
"node_modules/urlpattern-polyfill": {
"version": "9.0.0",
"resolved": "https://registry.npmjs.org/urlpattern-polyfill/-/urlpattern-polyfill-9.0.0.tgz",
"integrity": "sha512-WHN8KDQblxd32odxeIgo83rdVDE2bvdkb86it7bMhYZwWKJz0+O0RK/eZiHYnM+zgt/U7hAHOlCQGfjjvSkw2g=="
"version": "10.0.0",
"resolved": "https://registry.npmjs.org/urlpattern-polyfill/-/urlpattern-polyfill-10.0.0.tgz",
"integrity": "sha512-H/A06tKD7sS1O1X2SshBVeA5FLycRpjqiBeqGKmBwBDBy28EnRjORxTNe269KSSr5un5qyWi1iL61wLxpd+ZOg=="
},
"node_modules/util-deprecate": {
"version": "1.0.2",

631
pdm.lock

File diff suppressed because it is too large Load diff

@ -1 +1 @@
Subproject commit 5323fc773d33ef3f219c35c946f3b353b1251d37
Subproject commit 1380be7e4ef156d85957dfef8c6d154ef9880578

View file

@ -1,45 +1,50 @@
[project]
name = "archivebox"
version = "0.7.3"
version = "0.8.0"
package-dir = "archivebox"
requires-python = ">=3.10,<3.13"
platform = "py3-none-any"
description = "Self-hosted internet archiving solution."
authors = [
{name = "Nick Sweeting", email = "pyproject.toml@archivebox.io"},
]
authors = [{name = "Nick Sweeting", email = "pyproject.toml@archivebox.io"}]
license = {text = "MIT"}
readme = "README.md"
package-dir = "archivebox"
requires-python = ">=3.10,<3.12"
# pdm install
# pdm update --unconstrained
dependencies = [
# pdm update [--unconstrained]
"ipython>5.0.0",
"requests>=2.24.0",
"croniter>=0.3.34",
"dateparser>=1.0.0",
<<<<<<< HEAD
=======
"django-extensions>=3.2.3",
# Base Framework and Language Dependencies
"setuptools>=69.5.1",
"django>=4.2.0,<5.0",
"setuptools>=69.0.3",
"feedparser>=6.0.11",
"ipython>5.0.0",
"mypy-extensions>=0.4.3",
>>>>>>> dev
"python-crontab>=2.5.1",
"django>=3.1.3,<3.2",
"django-extensions>=3.0.3",
"django-ninja>=1.1.0",
"django-solo>=2.0.0",
"django-extensions>=3.2.3",
"mypy-extensions>=1.0.0",
# Python Helper Libraries
"requests>=2.31.0",
"dateparser>=1.0.0",
"feedparser>=6.0.11",
"w3lib>=1.22.0",
"yt-dlp>=2024.3.10",
# dont add playwright becuase packages without sdists cause trouble on many build systems that refuse to install wheel-only packages
<<<<<<< HEAD
# "playwright>=1.39.0; platform_machine != 'armv7l'",
"mypy-extensions>=0.4.3",
# "django-stubs-ext>=4.2.7",
=======
"playwright>=1.39.0; platform_machine != 'armv7l'",
>>>>>>> dev
# Feature-Specific Dependencies
"python-crontab>=2.5.1", # for: archivebox schedule
"croniter>=0.3.34", # for: archivebox schedule
"ipython>5.0.0", # for: archivebox shell
# Extractor Dependencies
"yt-dlp>=2024.4.9", # for: media
"playwright>=1.43.0; platform_machine != 'armv7l'", # WARNING: playwright doesn't have any sdist, causes trouble on build systems that refuse to install wheel-only packages
# TODO: add more extractors
# - gallery-dl
# - scihubdl
# - See Github issues for more...
]
homepage = "https://github.com/ArchiveBox/ArchiveBox"
repository = "https://github.com/ArchiveBox/ArchiveBox"
documentation = "https://github.com/ArchiveBox/ArchiveBox/wiki"
keywords = ["internet archiving", "web archiving", "digipres", "warc", "preservation", "backups", "archiving", "web", "bookmarks", "puppeteer", "browser", "download"]
classifiers = [
"Development Status :: 4 - Beta",
"Environment :: Console",
@ -72,45 +77,51 @@ classifiers = [
"Topic :: Utilities",
"Typing :: Typed",
]
# dynamic = ["version"] # TODO: programatticaly fetch version from package.json at build time
# pdm lock --group=':all'
# pdm install -G:all
# pdm update --group=':all' --unconstrained
[project.optional-dependencies]
# pdm update [--group=':all'] [--unconstrained]
sonic = [
# echo "deb [signed-by=/usr/share/keyrings/valeriansaliou_sonic.gpg] https://packagecloud.io/valeriansaliou/sonic/debian/ bookworm main" > /etc/apt/sources.list.d/valeriansaliou_sonic.list
# curl -fsSL https://packagecloud.io/valeriansaliou/sonic/gpgkey | gpg --dearmor -o /usr/share/keyrings/valeriansaliou_sonic.gpg
# apt install sonic
"sonic-client>=0.0.5",
"sonic-client>=1.0.0",
]
ldap = [
# apt install libldap2-dev libsasl2-dev python3-ldap
"python-ldap>=3.4.3",
"django-auth-ldap>=4.1.0",
]
# playwright = [
# platform_machine isnt respected by pdm export -o requirements.txt, this breaks arm/v7
# "playwright>=1.39.0; platform_machine != 'armv7l'",
# ]
# pdm lock --group=':all' --dev
# pdm install -G:all --dev
# pdm update --dev [--unconstrained]
# pdm update --dev --unconstrained
[tool.pdm.dev-dependencies]
dev = [
# building
build = [
"setuptools>=69.5.1",
"pip",
"wheel",
"pdm",
"homebrew-pypi-poet>=0.10.0",
# documentation
"homebrew-pypi-poet>=0.10.0", # for: generating archivebox.rb brewfile list of python packages
]
docs = [
"recommonmark",
"sphinx",
"sphinx-rtd-theme",
# debugging
]
debug = [
"django-debug-toolbar",
"djdt_flamegraph",
"ipdb",
# testing
]
test = [
"pdm[pytest]",
"pytest",
# linting
]
lint = [
"flake8",
"mypy",
"django-stubs[compatible-mypy]>=4.2.7",
@ -118,6 +129,13 @@ dev = [
"pudb>=2024.1",
]
[build-system]
requires = ["pdm-backend"]
build-backend = "pdm.backend"
[project.scripts]
archivebox = "archivebox.cli:main"
[tool.pyright]
include = ["archivebox"]
exclude = ["data", "data2", "data3", "data4", "data5", "pip_dist", "brew_dist", "dist", "vendor", "migrations", "tests"]
@ -134,7 +152,6 @@ implicit_optional = true
django_settings_module = "core.settings"
strict_settings = false
[tool.pdm.scripts]
lint = "./bin/lint.sh"
test = "./bin/test.sh"
@ -143,13 +160,6 @@ test = "./bin/test.sh"
[tool.pytest.ini_options]
testpaths = [ "tests" ]
[project.scripts]
archivebox = "archivebox.cli:main"
[build-system]
requires = ["pdm-backend"]
build-backend = "pdm.backend"
[project.urls]
Homepage = "https://github.com/ArchiveBox/ArchiveBox"

View file

@ -1,54 +1,64 @@
# This file is @generated by PDM.
# Please do not edit it manually.
asgiref==3.7.2
annotated-types==0.6.0
asgiref==3.8.1
asttokens==2.4.1
brotli==1.1.0; implementation_name == "cpython"
brotlicffi==1.1.0.0; implementation_name != "cpython"
certifi==2023.11.17
certifi==2024.2.2
cffi==1.16.0; implementation_name != "cpython"
charset-normalizer==3.3.2
colorama==0.4.6; sys_platform == "win32"
croniter==2.0.1
croniter==2.0.5
dateparser==1.2.0
decorator==5.1.1
django==3.1.14
django-extensions==3.1.5
django-solo==2.0.0
exceptiongroup==1.2.0; python_version < "3.11"
django==4.2.11
django-auth-ldap==4.8.0
django-extensions==3.2.3
django-ninja==1.1.0
exceptiongroup==1.2.1; python_version < "3.11"
executing==2.0.1
idna==3.6
ipython==8.18.1
feedparser==6.0.11
greenlet==3.0.3; platform_machine != "armv7l"
idna==3.7
ipython==8.23.0
jedi==0.19.1
matplotlib-inline==0.1.6
matplotlib-inline==0.1.7
mutagen==1.47.0
mypy-extensions==1.0.0
parso==0.8.3
pexpect==4.9.0; sys_platform != "win32"
parso==0.8.4
pexpect==4.9.0; sys_platform != "win32" and sys_platform != "emscripten"
playwright==1.43.0; platform_machine != "armv7l"
prompt-toolkit==3.0.43
ptyprocess==0.7.0; sys_platform != "win32"
ptyprocess==0.7.0; sys_platform != "win32" and sys_platform != "emscripten"
pure-eval==0.2.2
pyasn1==0.5.1
pyasn1-modules==0.3.0
pycparser==2.21; implementation_name != "cpython"
pycryptodomex==3.19.1
pyasn1==0.6.0
pyasn1-modules==0.4.0
pycparser==2.22; implementation_name != "cpython"
pycryptodomex==3.20.0
pydantic==2.7.1
pydantic-core==2.18.2
pyee==11.1.0; platform_machine != "armv7l"
pygments==2.17.2
python-crontab==3.0.0
python-dateutil==2.8.2
pytz==2023.3.post1
regex==2023.12.25
python-dateutil==2.9.0.post0
python-ldap==3.4.4
pytz==2024.1
regex==2024.4.16
requests==2.31.0
setuptools==69.0.3
setuptools==69.5.1
sgmllib3k==1.0.0
six==1.16.0
sonic-client==1.0.0
sqlparse==0.4.4
sqlparse==0.5.0
stack-data==0.6.3
traitlets==5.14.1
typing-extensions==4.9.0; python_version < "3.11"
tzdata==2023.4; platform_system == "Windows"
traitlets==5.14.3
typing-extensions==4.11.0
tzdata==2024.1; sys_platform == "win32" or platform_system == "Windows"
tzlocal==5.2
urllib3==2.1.0
urllib3==2.2.1
w3lib==2.1.2
wcwidth==0.2.12
wcwidth==0.2.13
websockets==12.0
yt-dlp==2023.12.30
yt-dlp==2024.4.9