tweak console output format

This commit is contained in:
Nick Sweeting 2018-04-17 09:11:27 -04:00
parent 262fa0e1bb
commit 64e6eb5f7b
3 changed files with 41 additions and 35 deletions

22
archive
View file

@ -54,18 +54,20 @@ def merge_links(archive_path=HTML_FOLDER, import_path=None):
all_links = validate_links(existing_links + all_links)
num_new_links = len(all_links) - len(existing_links)
if import_path:
print('[+] [{}] Adding {} new links from {} to index'.format(
if num_new_links:
print('[{green}+{reset}] [{}] Adding {} new links from {} to {}/index.json'.format(
datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
num_new_links,
import_path,
archive_path,
**ANSI,
))
else:
print('[*] [{}] Running on existing index with {}{}{} links.'.format(
print('[*] [{}] No new links added to {}/index.json{}'.format(
datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
ANSI['green'],
len(all_links),
ANSI['reset'],
archive_path,
' from {}'.format(import_path) if import_path else '',
**ANSI,
))
return all_links
@ -76,14 +78,15 @@ def update_archive(archive_path, links, source=None, resume=None, append=True):
start_ts = datetime.now().timestamp()
if resume:
print('{green}[▶] [{}] Resuming archive update from {}...{reset}'.format(
print('{green}[▶] [{}] Resuming archive downloading from {}...{reset}'.format(
datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
resume,
**ANSI,
))
else:
print('{green}[▶] [{}] Running full archive update...{reset}'.format(
print('{green}[▶] [{}] Updating files for {} links in archive...{reset}'.format(
datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
len(links),
**ANSI,
))
@ -98,9 +101,10 @@ def update_archive(archive_path, links, source=None, resume=None, append=True):
else:
duration = '{0:.2f} sec'.format(seconds, 2)
print('{}[√] [{}] Archive update complete ({}){}'.format(
print('{}[√] [{}] Update of {} links complete ({}){}'.format(
ANSI['green'],
datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
len(links),
duration,
ANSI['reset'],
))

View file

@ -111,14 +111,15 @@ def archive_link(link_dir, link, overwrite=False):
return link
def log_link_archive(link_dir, link, update_existing):
print('[{symbol_color}{symbol}{reset}] [{timestamp}] "{title}": {blue}{base_url}{reset}'.format(
print('[{symbol_color}{symbol}{reset}] [{now}] "{title}"\n {blue}{url}{reset}'.format(
symbol='*' if update_existing else '+',
symbol_color=ANSI['black' if update_existing else 'green'],
now=datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
**link,
**ANSI,
))
print(' > {} ({})'.format(link_dir, 'updating' if update_existing else 'creating'))
print(' > {}{}'.format(link_dir, '' if update_existing else ' (new)'))
if link['type']:
print(' i {}'.format(link['type']))
@ -196,11 +197,12 @@ def fetch_wget(link_dir, link, requisites=FETCH_WGET_REQUISITES, timeout=TIMEOUT
try:
result = run(CMD, stdout=PIPE, stderr=PIPE, cwd=link_dir, timeout=timeout + 1) # index.html
end()
output = html_appended_url(link)
if result.returncode > 0:
output = wget_output_path(link, look_in=domain_dir)
if result.returncode > 0 and result.returncode != 8:
print(' got wget response code {}:'.format(result.returncode))
print('\n'.join(' ' + line for line in (result.stderr or result.stdout).decode().rsplit('\n', 10)[-10:] if line.strip()))
# raise Exception('Failed to wget download')
if result.returncode == 4:
raise Exception('Failed to wget download')
except Exception as e:
end()
print(' Run to see full output:', 'cd {}; {}'.format(link_dir, ' '.join(CMD)))

View file

@ -33,11 +33,11 @@ def write_links_index(out_dir, links):
write_json_links_index(out_dir, links)
write_html_links_index(out_dir, links)
print('[√] [{}] Archive Main Index now up-to-date: {}/index.html'.format(
print('{green}[√] [{}] Updated main index files:{reset}'.format(
datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
out_dir,
))
**ANSI))
print(' > {}/index.json'.format(out_dir))
print(' > {}/index.html'.format(out_dir))
def write_json_links_index(out_dir, links):
"""write the json link index to a given path"""