2017-07-04 10:38:07 +00:00
|
|
|
import os
|
2017-10-18 22:38:17 +00:00
|
|
|
import json
|
|
|
|
|
2017-07-04 10:38:07 +00:00
|
|
|
from datetime import datetime
|
2017-07-05 09:59:09 +00:00
|
|
|
from string import Template
|
2018-04-17 11:00:40 +00:00
|
|
|
from distutils.dir_util import copy_tree
|
2017-07-04 10:38:07 +00:00
|
|
|
|
2017-07-05 21:33:51 +00:00
|
|
|
from config import (
|
2018-06-11 01:12:55 +00:00
|
|
|
TEMPLATES_DIR,
|
|
|
|
OUTPUT_PERMISSIONS,
|
2017-07-05 21:33:51 +00:00
|
|
|
ANSI,
|
2017-10-18 22:38:17 +00:00
|
|
|
GIT_SHA,
|
2018-04-17 13:13:38 +00:00
|
|
|
FOOTER_INFO,
|
2017-07-05 21:33:51 +00:00
|
|
|
)
|
2017-10-23 09:58:41 +00:00
|
|
|
from util import (
|
|
|
|
chmod_file,
|
2018-04-17 13:13:38 +00:00
|
|
|
wget_output_path,
|
2017-10-23 09:58:41 +00:00
|
|
|
derived_link_info,
|
2018-06-11 00:52:15 +00:00
|
|
|
pretty_path,
|
2017-10-23 09:58:41 +00:00
|
|
|
)
|
2017-10-18 22:38:17 +00:00
|
|
|
|
|
|
|
|
|
|
|
### Homepage index for all the links
|
|
|
|
|
|
|
|
def write_links_index(out_dir, links):
|
|
|
|
"""create index.html file for a given list of links"""
|
|
|
|
|
|
|
|
if not os.path.exists(out_dir):
|
|
|
|
os.makedirs(out_dir)
|
|
|
|
|
2019-02-07 06:06:21 +00:00
|
|
|
print('{green}[*] [{}] Updating main index files...{reset}'.format(
|
2017-10-18 22:38:17 +00:00
|
|
|
datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
|
2019-02-07 06:06:21 +00:00
|
|
|
**ANSI,
|
|
|
|
))
|
|
|
|
write_json_links_index(out_dir, links)
|
2018-06-11 00:52:15 +00:00
|
|
|
print(' > {}/index.json'.format(pretty_path(out_dir)))
|
2019-02-07 06:06:21 +00:00
|
|
|
|
|
|
|
write_html_links_index(out_dir, links)
|
2018-06-11 00:52:15 +00:00
|
|
|
print(' > {}/index.html'.format(pretty_path(out_dir)))
|
2019-02-07 06:06:21 +00:00
|
|
|
|
2017-07-04 10:38:07 +00:00
|
|
|
|
2017-10-18 22:38:17 +00:00
|
|
|
def write_json_links_index(out_dir, links):
|
|
|
|
"""write the json link index to a given path"""
|
|
|
|
|
|
|
|
path = os.path.join(out_dir, 'index.json')
|
|
|
|
|
|
|
|
index_json = {
|
2018-12-20 13:13:50 +00:00
|
|
|
'info': 'ArchiveBox Index',
|
|
|
|
'help': 'https://github.com/pirate/ArchiveBox',
|
2017-10-18 22:38:17 +00:00
|
|
|
'version': GIT_SHA,
|
|
|
|
'num_links': len(links),
|
|
|
|
'updated': str(datetime.now().timestamp()),
|
|
|
|
'links': links,
|
|
|
|
}
|
|
|
|
|
|
|
|
with open(path, 'w', encoding='utf-8') as f:
|
|
|
|
json.dump(index_json, f, indent=4, default=str)
|
|
|
|
|
|
|
|
chmod_file(path)
|
|
|
|
|
2017-10-23 09:58:41 +00:00
|
|
|
def parse_json_links_index(out_dir):
|
|
|
|
"""load the index in a given directory and merge it with the given link"""
|
|
|
|
index_path = os.path.join(out_dir, 'index.json')
|
|
|
|
if os.path.exists(index_path):
|
|
|
|
with open(index_path, 'r', encoding='utf-8') as f:
|
|
|
|
return json.load(f)['links']
|
|
|
|
|
|
|
|
return []
|
|
|
|
|
2017-10-18 22:38:17 +00:00
|
|
|
def write_html_links_index(out_dir, links):
|
|
|
|
"""write the html link index to a given path"""
|
|
|
|
|
|
|
|
path = os.path.join(out_dir, 'index.html')
|
2017-07-04 10:38:07 +00:00
|
|
|
|
2018-06-11 01:12:55 +00:00
|
|
|
copy_tree(os.path.join(TEMPLATES_DIR, 'static'), os.path.join(out_dir, 'static'))
|
2018-04-17 11:00:40 +00:00
|
|
|
|
2018-09-12 23:25:48 +00:00
|
|
|
with open(os.path.join(out_dir, 'robots.txt'), 'w+') as f:
|
|
|
|
f.write('User-agent: *\nDisallow: /')
|
|
|
|
|
2018-06-11 01:12:55 +00:00
|
|
|
with open(os.path.join(TEMPLATES_DIR, 'index.html'), 'r', encoding='utf-8') as f:
|
2017-07-04 10:38:07 +00:00
|
|
|
index_html = f.read()
|
|
|
|
|
2018-06-11 01:12:55 +00:00
|
|
|
with open(os.path.join(TEMPLATES_DIR, 'index_row.html'), 'r', encoding='utf-8') as f:
|
2017-10-18 22:38:17 +00:00
|
|
|
link_row_html = f.read()
|
2017-07-04 10:38:07 +00:00
|
|
|
|
2017-10-18 22:38:17 +00:00
|
|
|
link_rows = '\n'.join(
|
|
|
|
Template(link_row_html).substitute(**derived_link_info(link))
|
|
|
|
for link in links
|
2017-07-04 10:38:07 +00:00
|
|
|
)
|
|
|
|
|
2017-07-04 11:24:03 +00:00
|
|
|
template_vars = {
|
|
|
|
'num_links': len(links),
|
|
|
|
'date_updated': datetime.now().strftime('%Y-%m-%d'),
|
|
|
|
'time_updated': datetime.now().strftime('%Y-%m-%d %H:%M'),
|
2018-04-17 13:14:01 +00:00
|
|
|
'footer_info': FOOTER_INFO,
|
|
|
|
'git_sha': GIT_SHA,
|
|
|
|
'short_git_sha': GIT_SHA[:8],
|
2017-10-18 22:38:17 +00:00
|
|
|
'rows': link_rows,
|
2017-07-04 11:24:03 +00:00
|
|
|
}
|
2017-07-04 10:38:07 +00:00
|
|
|
|
2017-10-18 22:38:17 +00:00
|
|
|
with open(path, 'w', encoding='utf-8') as f:
|
2017-07-05 21:33:51 +00:00
|
|
|
f.write(Template(index_html).substitute(**template_vars))
|
|
|
|
|
2017-10-23 09:58:41 +00:00
|
|
|
chmod_file(path)
|
2017-07-05 21:33:51 +00:00
|
|
|
|
2017-10-18 22:38:17 +00:00
|
|
|
|
2017-10-23 09:58:41 +00:00
|
|
|
### Individual link index
|
2017-10-18 22:38:17 +00:00
|
|
|
|
|
|
|
def write_link_index(out_dir, link):
|
|
|
|
link['updated'] = str(datetime.now().timestamp())
|
|
|
|
write_json_link_index(out_dir, link)
|
|
|
|
write_html_link_index(out_dir, link)
|
|
|
|
|
|
|
|
def write_json_link_index(out_dir, link):
|
|
|
|
"""write a json file with some info about the link"""
|
|
|
|
|
|
|
|
path = os.path.join(out_dir, 'index.json')
|
|
|
|
|
2018-04-17 11:30:06 +00:00
|
|
|
print(' √ index.json')
|
2017-10-23 09:58:41 +00:00
|
|
|
|
2017-10-18 22:38:17 +00:00
|
|
|
with open(path, 'w', encoding='utf-8') as f:
|
|
|
|
json.dump(link, f, indent=4, default=str)
|
|
|
|
|
|
|
|
chmod_file(path)
|
|
|
|
|
2017-10-23 09:58:41 +00:00
|
|
|
def parse_json_link_index(out_dir):
|
|
|
|
"""load the json link index from a given directory"""
|
|
|
|
existing_index = os.path.join(out_dir, 'index.json')
|
|
|
|
if os.path.exists(existing_index):
|
|
|
|
with open(existing_index, 'r', encoding='utf-8') as f:
|
|
|
|
return json.load(f)
|
|
|
|
return {}
|
|
|
|
|
2017-10-18 22:38:17 +00:00
|
|
|
def write_html_link_index(out_dir, link):
|
2018-06-11 01:12:55 +00:00
|
|
|
with open(os.path.join(TEMPLATES_DIR, 'link_index_fancy.html'), 'r', encoding='utf-8') as f:
|
2017-10-18 22:38:17 +00:00
|
|
|
link_html = f.read()
|
|
|
|
|
|
|
|
path = os.path.join(out_dir, 'index.html')
|
|
|
|
|
2018-04-17 11:30:06 +00:00
|
|
|
print(' √ index.html')
|
2017-10-23 09:58:41 +00:00
|
|
|
|
2017-10-18 22:38:17 +00:00
|
|
|
with open(path, 'w', encoding='utf-8') as f:
|
|
|
|
f.write(Template(link_html).substitute({
|
|
|
|
**link,
|
2017-10-23 09:58:41 +00:00
|
|
|
**link['latest'],
|
2017-10-18 22:38:17 +00:00
|
|
|
'type': link['type'] or 'website',
|
2017-10-23 09:58:41 +00:00
|
|
|
'tags': link['tags'] or 'untagged',
|
2017-10-18 22:38:17 +00:00
|
|
|
'bookmarked': datetime.fromtimestamp(float(link['timestamp'])).strftime('%Y-%m-%d %H:%M'),
|
|
|
|
'updated': datetime.fromtimestamp(float(link['updated'])).strftime('%Y-%m-%d %H:%M'),
|
2018-06-10 23:20:09 +00:00
|
|
|
'bookmarked_ts': link['timestamp'],
|
|
|
|
'updated_ts': link['updated'],
|
2017-11-07 15:39:48 +00:00
|
|
|
'archive_org': link['latest'].get('archive_org') or 'https://web.archive.org/save/{}'.format(link['url']),
|
2018-04-17 14:30:25 +00:00
|
|
|
'wget': link['latest'].get('wget') or wget_output_path(link),
|
2017-10-18 22:38:17 +00:00
|
|
|
}))
|
|
|
|
|
|
|
|
chmod_file(path)
|