fix sphinx docs build

This commit is contained in:
Nick Sweeting 2024-11-12 22:20:11 -08:00
parent 840f831572
commit 57852fd89e
No known key found for this signature in database
5 changed files with 662 additions and 577 deletions

View file

@ -1,132 +1,132 @@
__package__ = 'abx.archivebox' # __package__ = 'abx.archivebox'
import importlib # import importlib
from typing import Dict, Set, Any, TYPE_CHECKING # from typing import Dict, Set, Any, TYPE_CHECKING
from benedict import benedict # from benedict import benedict
from django.conf import settings # from django.conf import settings
import abx # import abx
@abx.hookimpl # @abx.hookimpl
def get_or_create_snapshot(crawl, url, config): # def get_or_create_snapshot(crawl, url, config):
pass # pass
@abx.hookimpl # @abx.hookimpl
def update_crawl_schedule_next_run_at(crawl_schedule, next_run_at): # def update_crawl_schedule_next_run_at(crawl_schedule, next_run_at):
pass # pass
@abx.hookimpl # @abx.hookimpl
def create_crawl_copy(crawl_to_copy, schedule): # def create_crawl_copy(crawl_to_copy, schedule):
pass # pass
@abx.hookimpl # @abx.hookimpl
def create_crawl(seed, depth, tags_str, persona, created_by, config, schedule): # def create_crawl(seed, depth, tags_str, persona, created_by, config, schedule):
pass # pass
def create_crawl_from_ui_action(urls, extractor, credentials, depth, tags_str, persona, created_by, crawl_config): # def create_crawl_from_ui_action(urls, extractor, credentials, depth, tags_str, persona, created_by, crawl_config):
if seed_is_remote(urls, extractor, credentials): # if seed_is_remote(urls, extractor, credentials):
# user's seed is a remote source that will provide the urls (e.g. RSS feed URL, Pocket API, etc.) # # user's seed is a remote source that will provide the urls (e.g. RSS feed URL, Pocket API, etc.)
uri, extractor, credentials = abx.archivebox.effects.check_remote_seed_connection(urls, extractor, credentials, created_by) # uri, extractor, credentials = abx.archivebox.effects.check_remote_seed_connection(urls, extractor, credentials, created_by)
else: # else:
# user's seed is some raw text they provided to parse for urls, save it to a file then load the file as a Seed # # user's seed is some raw text they provided to parse for urls, save it to a file then load the file as a Seed
uri = abx.archivebox.writes.write_raw_urls_to_local_file(urls, extractor, tags_str, created_by) # file:///data/sources/some_import.txt # uri = abx.archivebox.writes.write_raw_urls_to_local_file(urls, extractor, tags_str, created_by) # file:///data/sources/some_import.txt
seed = abx.archivebox.writes.get_or_create_seed(uri=remote_uri, extractor, credentials, created_by) # seed = abx.archivebox.writes.get_or_create_seed(uri=remote_uri, extractor, credentials, created_by)
# abx.archivebox.events.on_seed_created(seed) # # abx.archivebox.events.on_seed_created(seed)
crawl = abx.archivebox.writes.create_crawl(seed=seed, depth=depth, tags_str=tags_str, persona=persona, created_by=created_by, config=crawl_config, schedule=None) # crawl = abx.archivebox.writes.create_crawl(seed=seed, depth=depth, tags_str=tags_str, persona=persona, created_by=created_by, config=crawl_config, schedule=None)
abx.archivebox.events.on_crawl_created(crawl) # abx.archivebox.events.on_crawl_created(crawl)
@abx.hookimpl(specname='on_crawl_schedule_tick') # @abx.hookimpl(specname='on_crawl_schedule_tick')
def create_crawl_from_crawlschedule_if_due(crawl_schedule): # def create_crawl_from_crawlschedule_if_due(crawl_schedule):
# make sure it's not too early to run this scheduled import (makes this function indepmpotent / safe to call multiple times / every second) # # make sure it's not too early to run this scheduled import (makes this function indepmpotent / safe to call multiple times / every second)
if timezone.now() < crawl_schedule.next_run_at: # if timezone.now() < crawl_schedule.next_run_at:
# it's not time to run it yet, wait for the next tick # # it's not time to run it yet, wait for the next tick
return # return
else: # else:
# we're going to run it now, bump the next run time so that no one else runs it at the same time as us # # we're going to run it now, bump the next run time so that no one else runs it at the same time as us
abx.archivebox.writes.update_crawl_schedule_next_run_at(crawl_schedule, next_run_at=crawl_schedule.next_run_at + crawl_schedule.interval) # abx.archivebox.writes.update_crawl_schedule_next_run_at(crawl_schedule, next_run_at=crawl_schedule.next_run_at + crawl_schedule.interval)
crawl_to_copy = None # crawl_to_copy = None
try: # try:
crawl_to_copy = crawl_schedule.crawl_set.first() # alternatively use .last() to copy most recent crawl instead of very first crawl # crawl_to_copy = crawl_schedule.crawl_set.first() # alternatively use .last() to copy most recent crawl instead of very first crawl
except Crawl.DoesNotExist: # except Crawl.DoesNotExist:
# there is no template crawl to base the next one off of # # there is no template crawl to base the next one off of
# user must add at least one crawl to a schedule that serves as the template for all future repeated crawls # # user must add at least one crawl to a schedule that serves as the template for all future repeated crawls
return # return
new_crawl = abx.archivebox.writes.create_crawl_copy(crawl_to_copy=crawl_to_copy, schedule=crawl_schedule) # new_crawl = abx.archivebox.writes.create_crawl_copy(crawl_to_copy=crawl_to_copy, schedule=crawl_schedule)
abx.archivebox.events.on_crawl_created(new_crawl) # abx.archivebox.events.on_crawl_created(new_crawl)
@abx.hookimpl(specname='on_crawl_post_save') # @abx.hookimpl(specname='on_crawl_post_save')
def create_root_snapshot_from_seed(crawl): # def create_root_snapshot_from_seed(crawl):
# create a snapshot for the seed URI which kicks off the crawl # # create a snapshot for the seed URI which kicks off the crawl
# only a single extractor will run on it, which will produce outlinks which get added back to the crawl # # only a single extractor will run on it, which will produce outlinks which get added back to the crawl
root_snapshot, created = abx.archivebox.writes.get_or_create_snapshot(crawl=crawl, url=crawl.seed.uri, config={ # root_snapshot, created = abx.archivebox.writes.get_or_create_snapshot(crawl=crawl, url=crawl.seed.uri, config={
'extractors': ( # 'extractors': (
abx.archivebox.reads.get_extractors_that_produce_outlinks() # abx.archivebox.reads.get_extractors_that_produce_outlinks()
if crawl.seed.extractor == 'auto' else # if crawl.seed.extractor == 'auto' else
[crawl.seed.extractor] # [crawl.seed.extractor]
), # ),
**crawl.seed.config, # **crawl.seed.config,
}) # })
if created: # if created:
abx.archivebox.events.on_snapshot_created(root_snapshot) # abx.archivebox.events.on_snapshot_created(root_snapshot)
abx.archivebox.writes.update_crawl_stats(started_at=timezone.now()) # abx.archivebox.writes.update_crawl_stats(started_at=timezone.now())
@abx.hookimpl(specname='on_snapshot_created') # @abx.hookimpl(specname='on_snapshot_created')
def create_archiveresults_pending_from_snapshot(snapshot, config): # def create_archiveresults_pending_from_snapshot(snapshot, config):
config = get_scope_config( # config = get_scope_config(
# defaults=settings.CONFIG_FROM_DEFAULTS, # # defaults=settings.CONFIG_FROM_DEFAULTS,
# collection=settings.CONFIG_FROM_FILE, # # collection=settings.CONFIG_FROM_FILE,
# environment=settings.CONFIG_FROM_ENVIRONMENT, # # environment=settings.CONFIG_FROM_ENVIRONMENT,
persona=archiveresult.snapshot.crawl.persona, # persona=archiveresult.snapshot.crawl.persona,
seed=archiveresult.snapshot.crawl.seed, # seed=archiveresult.snapshot.crawl.seed,
crawl=archiveresult.snapshot.crawl, # crawl=archiveresult.snapshot.crawl,
snapshot=archiveresult.snapshot, # snapshot=archiveresult.snapshot,
archiveresult=archiveresult, # archiveresult=archiveresult,
# extra_config=extra_config, # # extra_config=extra_config,
) # )
extractors = abx.archivebox.reads.get_extractors_for_snapshot(snapshot, config) # extractors = abx.archivebox.reads.get_extractors_for_snapshot(snapshot, config)
for extractor in extractors: # for extractor in extractors:
archiveresult, created = abx.archivebox.writes.get_or_create_archiveresult_pending( # archiveresult, created = abx.archivebox.writes.get_or_create_archiveresult_pending(
snapshot=snapshot, # snapshot=snapshot,
extractor=extractor, # extractor=extractor,
status='pending' # status='pending'
) # )
if created: # if created:
abx.archivebox.events.on_archiveresult_created(archiveresult) # abx.archivebox.events.on_archiveresult_created(archiveresult)
@abx.hookimpl(specname='on_archiveresult_updated') # @abx.hookimpl(specname='on_archiveresult_updated')
def create_snapshots_pending_from_archiveresult_outlinks(archiveresult): # def create_snapshots_pending_from_archiveresult_outlinks(archiveresult):
config = get_scope_config(...) # config = get_scope_config(...)
# check if extractor has finished succesfully, if not, dont bother checking for outlinks # # check if extractor has finished succesfully, if not, dont bother checking for outlinks
if not archiveresult.status == 'succeeded': # if not archiveresult.status == 'succeeded':
return # return
# check if we have already reached the maximum recursion depth # # check if we have already reached the maximum recursion depth
hops_to_here = abx.archivebox.reads.get_outlink_parents(crawl_pk=archiveresult.snapshot.crawl_id, url=archiveresult.url, config=config) # hops_to_here = abx.archivebox.reads.get_outlink_parents(crawl_pk=archiveresult.snapshot.crawl_id, url=archiveresult.url, config=config)
if len(hops_to_here) >= archiveresult.crawl.max_depth +1: # if len(hops_to_here) >= archiveresult.crawl.max_depth +1:
return # return
# parse the output to get outlink url_entries # # parse the output to get outlink url_entries
discovered_urls = abx.archivebox.reads.get_archiveresult_discovered_url_entries(archiveresult, config=config) # discovered_urls = abx.archivebox.reads.get_archiveresult_discovered_url_entries(archiveresult, config=config)
for url_entry in discovered_urls: # for url_entry in discovered_urls:
abx.archivebox.writes.create_outlink_record(src=archiveresult.snapshot.url, dst=url_entry.url, via=archiveresult) # abx.archivebox.writes.create_outlink_record(src=archiveresult.snapshot.url, dst=url_entry.url, via=archiveresult)
abx.archivebox.writes.create_snapshot(crawl=archiveresult.snapshot.crawl, url_entry=url_entry) # abx.archivebox.writes.create_snapshot(crawl=archiveresult.snapshot.crawl, url_entry=url_entry)
# abx.archivebox.events.on_crawl_updated(archiveresult.snapshot.crawl) # # abx.archivebox.events.on_crawl_updated(archiveresult.snapshot.crawl)

2
docs

@ -1 +1 @@
Subproject commit aeb53e2be3ee9c45d75508d6799ceefc16c66f01 Subproject commit ee3f16f041fddc69e33f83e6574f70b7dd0541d9

View file

@ -155,6 +155,9 @@ dev-dependencies = [
"recommonmark>=0.7.1", "recommonmark>=0.7.1",
"sphinx>=8.1.3", "sphinx>=8.1.3",
"sphinx-rtd-theme>=2.0.0", "sphinx-rtd-theme>=2.0.0",
"myst-parser>=4.0.0",
"sphinx-autodoc2>=0.5.0",
"linkify-it-py>=2.0.3",
### DEBUGGING ### DEBUGGING
"django-debug-toolbar>=4.4.6", "django-debug-toolbar>=4.4.6",
"requests-tracker>=0.3.3", "requests-tracker>=0.3.3",

82
uv.lock
View file

@ -706,8 +706,10 @@ dev = [
{ name = "djdt-flamegraph" }, { name = "djdt-flamegraph" },
{ name = "flake8" }, { name = "flake8" },
{ name = "ipdb" }, { name = "ipdb" },
{ name = "linkify-it-py" },
{ name = "logfire", extra = ["django"] }, { name = "logfire", extra = ["django"] },
{ name = "mypy" }, { name = "mypy" },
{ name = "myst-parser" },
{ name = "opentelemetry-instrumentation-django" }, { name = "opentelemetry-instrumentation-django" },
{ name = "opentelemetry-instrumentation-sqlite3" }, { name = "opentelemetry-instrumentation-sqlite3" },
{ name = "pip" }, { name = "pip" },
@ -717,6 +719,7 @@ dev = [
{ name = "ruff" }, { name = "ruff" },
{ name = "setuptools" }, { name = "setuptools" },
{ name = "sphinx" }, { name = "sphinx" },
{ name = "sphinx-autodoc2" },
{ name = "sphinx-rtd-theme" }, { name = "sphinx-rtd-theme" },
{ name = "uv" }, { name = "uv" },
{ name = "viztracer" }, { name = "viztracer" },
@ -814,8 +817,10 @@ dev = [
{ name = "djdt-flamegraph", specifier = ">=0.2.13" }, { name = "djdt-flamegraph", specifier = ">=0.2.13" },
{ name = "flake8", specifier = ">=7.1.1" }, { name = "flake8", specifier = ">=7.1.1" },
{ name = "ipdb", specifier = ">=0.13.13" }, { name = "ipdb", specifier = ">=0.13.13" },
{ name = "linkify-it-py", specifier = ">=2.0.3" },
{ name = "logfire", extras = ["django"], specifier = ">=0.51.0" }, { name = "logfire", extras = ["django"], specifier = ">=0.51.0" },
{ name = "mypy", specifier = ">=1.11.2" }, { name = "mypy", specifier = ">=1.11.2" },
{ name = "myst-parser", specifier = ">=4.0.0" },
{ name = "opentelemetry-instrumentation-django", specifier = ">=0.47b0" }, { name = "opentelemetry-instrumentation-django", specifier = ">=0.47b0" },
{ name = "opentelemetry-instrumentation-sqlite3", specifier = ">=0.47b0" }, { name = "opentelemetry-instrumentation-sqlite3", specifier = ">=0.47b0" },
{ name = "pip", specifier = ">=24.2" }, { name = "pip", specifier = ">=24.2" },
@ -825,6 +830,7 @@ dev = [
{ name = "ruff", specifier = ">=0.6.6" }, { name = "ruff", specifier = ">=0.6.6" },
{ name = "setuptools", specifier = ">=75.1.0" }, { name = "setuptools", specifier = ">=75.1.0" },
{ name = "sphinx", specifier = ">=8.1.3" }, { name = "sphinx", specifier = ">=8.1.3" },
{ name = "sphinx-autodoc2", specifier = ">=0.5.0" },
{ name = "sphinx-rtd-theme", specifier = ">=2.0.0" }, { name = "sphinx-rtd-theme", specifier = ">=2.0.0" },
{ name = "uv", specifier = ">=0.4.26" }, { name = "uv", specifier = ">=0.4.26" },
{ name = "viztracer", specifier = ">=0.17.0" }, { name = "viztracer", specifier = ">=0.17.0" },
@ -843,6 +849,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/39/e3/893e8757be2612e6c266d9bb58ad2e3651524b5b40cf56761e985a28b13e/asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47", size = 23828 }, { url = "https://files.pythonhosted.org/packages/39/e3/893e8757be2612e6c266d9bb58ad2e3651524b5b40cf56761e985a28b13e/asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47", size = 23828 },
] ]
[[package]]
name = "astroid"
version = "3.3.5"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "typing-extensions", marker = "python_full_version < '3.11'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/38/1e/326fb1d3d83a3bb77c9f9be29d31f2901e35acb94b0605c3f2e5085047f9/astroid-3.3.5.tar.gz", hash = "sha256:5cfc40ae9f68311075d27ef68a4841bdc5cc7f6cf86671b49f00607d30188e2d", size = 397229 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/41/30/624365383fa4a40329c0f0bbbc151abc4a64e30dfc110fc8f6e2afcd02bb/astroid-3.3.5-py3-none-any.whl", hash = "sha256:a9d1c946ada25098d790e079ba2a1b112157278f3fb7e718ae6a9252f5835dc8", size = 274586 },
]
[[package]] [[package]]
name = "asttokens" name = "asttokens"
version = "2.4.1" version = "2.4.1"
@ -1895,6 +1913,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/a0/9f/5b5481d716670ed5fbd8d06dfa94b7108272b645da2f2406eb909cb6a450/libcst-1.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:4d6acb0bdee1e55b44c6215c59755ec4693ac01e74bb1fde04c37358b378835d", size = 2029600 }, { url = "https://files.pythonhosted.org/packages/a0/9f/5b5481d716670ed5fbd8d06dfa94b7108272b645da2f2406eb909cb6a450/libcst-1.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:4d6acb0bdee1e55b44c6215c59755ec4693ac01e74bb1fde04c37358b378835d", size = 2029600 },
] ]
[[package]]
name = "linkify-it-py"
version = "2.0.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "uc-micro-py" },
]
sdist = { url = "https://files.pythonhosted.org/packages/2a/ae/bb56c6828e4797ba5a4821eec7c43b8bf40f69cda4d4f5f8c8a2810ec96a/linkify-it-py-2.0.3.tar.gz", hash = "sha256:68cda27e162e9215c17d786649d1da0021a451bdc436ef9e0fa0ba5234b9b048", size = 27946 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/04/1e/b832de447dee8b582cac175871d2f6c3d5077cc56d5575cadba1fd1cccfa/linkify_it_py-2.0.3-py3-none-any.whl", hash = "sha256:6bcbc417b0ac14323382aef5c5192c0075bf8a9d6b41820a2b66371eac6b6d79", size = 19820 },
]
[[package]] [[package]]
name = "logfire" name = "logfire"
version = "2.1.1" version = "2.1.1"
@ -2025,6 +2055,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/27/1a/1f68f9ba0c207934b35b86a8ca3aad8395a3d6dd7921c0686e23853ff5a9/mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e", size = 7350 }, { url = "https://files.pythonhosted.org/packages/27/1a/1f68f9ba0c207934b35b86a8ca3aad8395a3d6dd7921c0686e23853ff5a9/mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e", size = 7350 },
] ]
[[package]]
name = "mdit-py-plugins"
version = "0.4.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "markdown-it-py" },
]
sdist = { url = "https://files.pythonhosted.org/packages/19/03/a2ecab526543b152300717cf232bb4bb8605b6edb946c845016fa9c9c9fd/mdit_py_plugins-0.4.2.tar.gz", hash = "sha256:5f2cd1fdb606ddf152d37ec30e46101a60512bc0e5fa1a7002c36647b09e26b5", size = 43542 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a7/f7/7782a043553ee469c1ff49cfa1cdace2d6bf99a1f333cf38676b3ddf30da/mdit_py_plugins-0.4.2-py3-none-any.whl", hash = "sha256:0c673c3f889399a33b95e88d2f0d111b4447bdfea7f237dab2d488f459835636", size = 55316 },
]
[[package]] [[package]]
name = "mdurl" name = "mdurl"
version = "0.1.2" version = "0.1.2"
@ -2086,6 +2128,23 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", size = 4695 }, { url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", size = 4695 },
] ]
[[package]]
name = "myst-parser"
version = "4.0.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "docutils" },
{ name = "jinja2" },
{ name = "markdown-it-py" },
{ name = "mdit-py-plugins" },
{ name = "pyyaml" },
{ name = "sphinx" },
]
sdist = { url = "https://files.pythonhosted.org/packages/85/55/6d1741a1780e5e65038b74bce6689da15f620261c490c3511eb4c12bac4b/myst_parser-4.0.0.tar.gz", hash = "sha256:851c9dfb44e36e56d15d05e72f02b80da21a9e0d07cba96baf5e2d476bb91531", size = 93858 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ca/b4/b036f8fdb667587bb37df29dc6644681dd78b7a2a6321a34684b79412b28/myst_parser-4.0.0-py3-none-any.whl", hash = "sha256:b9317997552424448c6096c2558872fdb6f81d3ecb3a40ce84a7518798f3f28d", size = 84563 },
]
[[package]] [[package]]
name = "objprint" name = "objprint"
version = "0.2.3" version = "0.2.3"
@ -3067,6 +3126,20 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/26/60/1ddff83a56d33aaf6f10ec8ce84b4c007d9368b21008876fceda7e7381ef/sphinx-8.1.3-py3-none-any.whl", hash = "sha256:09719015511837b76bf6e03e42eb7595ac8c2e41eeb9c29c5b755c6b677992a2", size = 3487125 }, { url = "https://files.pythonhosted.org/packages/26/60/1ddff83a56d33aaf6f10ec8ce84b4c007d9368b21008876fceda7e7381ef/sphinx-8.1.3-py3-none-any.whl", hash = "sha256:09719015511837b76bf6e03e42eb7595ac8c2e41eeb9c29c5b755c6b677992a2", size = 3487125 },
] ]
[[package]]
name = "sphinx-autodoc2"
version = "0.5.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "astroid" },
{ name = "tomli", marker = "python_full_version < '3.11'" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/17/5f/5350046d1aa1a56b063ae08b9ad871025335c9d55fe2372896ea48711da9/sphinx_autodoc2-0.5.0.tar.gz", hash = "sha256:7d76044aa81d6af74447080182b6868c7eb066874edc835e8ddf810735b6565a", size = 115077 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/19/e6/48d47961bbdae755ba9c17dfc65d89356312c67668dcb36c87cfadfa1964/sphinx_autodoc2-0.5.0-py3-none-any.whl", hash = "sha256:e867013b1512f9d6d7e6f6799f8b537d6884462acd118ef361f3f619a60b5c9e", size = 43385 },
]
[[package]] [[package]]
name = "sphinx-rtd-theme" name = "sphinx-rtd-theme"
version = "3.0.1" version = "3.0.1"
@ -3303,6 +3376,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/97/3f/c4c51c55ff8487f2e6d0e618dba917e3c3ee2caae6cf0fbb59c9b1876f2e/tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8", size = 17859 }, { url = "https://files.pythonhosted.org/packages/97/3f/c4c51c55ff8487f2e6d0e618dba917e3c3ee2caae6cf0fbb59c9b1876f2e/tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8", size = 17859 },
] ]
[[package]]
name = "uc-micro-py"
version = "1.0.3"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/91/7a/146a99696aee0609e3712f2b44c6274566bc368dfe8375191278045186b8/uc-micro-py-1.0.3.tar.gz", hash = "sha256:d321b92cff673ec58027c04015fcaa8bb1e005478643ff4a500882eaab88c48a", size = 6043 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/37/87/1f677586e8ac487e29672e4b17455758fce261de06a0d086167bb760361a/uc_micro_py-1.0.3-py3-none-any.whl", hash = "sha256:db1dffff340817673d7b466ec86114a9dc0e9d4d9b5ba229d9d60e5c12600cd5", size = 6229 },
]
[[package]] [[package]]
name = "ulid-py" name = "ulid-py"
version = "1.1.0" version = "1.1.0"