Merge pull request #390 from cdvv7788/linter-fixes

refactor: Organize code to remove flake8 issues
This commit is contained in:
Nick Sweeting 2020-07-24 14:32:00 -04:00 committed by GitHub
commit e4b5b2887f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
24 changed files with 43 additions and 39 deletions

View file

@ -8,7 +8,8 @@ import argparse
from typing import List, Optional, IO
from ..main import add, docstring
from ..main import add
from ..util import docstring
from ..config import OUTPUT_DIR, ONLY_NEW
from ..logging_util import SmartFormatter, accept_stdin, stderr

View file

@ -8,7 +8,8 @@ import argparse
from typing import Optional, List, IO
from ..main import config, docstring
from ..main import config
from ..util import docstring
from ..config import OUTPUT_DIR
from ..logging_util import SmartFormatter, accept_stdin

View file

@ -8,7 +8,8 @@ import argparse
from typing import Optional, List, IO
from ..main import help, docstring
from ..main import help
from ..util import docstring
from ..config import OUTPUT_DIR
from ..logging_util import SmartFormatter, reject_stdin

View file

@ -8,7 +8,8 @@ import argparse
from typing import Optional, List, IO
from ..main import init, docstring
from ..main import init
from ..util import docstring
from ..config import OUTPUT_DIR
from ..logging_util import SmartFormatter, reject_stdin

View file

@ -8,7 +8,8 @@ import argparse
from typing import Optional, List, IO
from ..main import list_all, docstring
from ..main import list_all
from ..util import docstring
from ..config import OUTPUT_DIR
from ..index import (
get_indexed_folders,

View file

@ -7,7 +7,8 @@ import sys
from typing import Optional, List, IO
from ..main import manage, docstring
from ..main import manage
from ..util import docstring
from ..config import OUTPUT_DIR

View file

@ -8,7 +8,8 @@ import argparse
from typing import Optional, List, IO
from ..main import remove, docstring
from ..main import remove
from ..util import docstring
from ..config import OUTPUT_DIR
from ..logging_util import SmartFormatter, accept_stdin

View file

@ -8,7 +8,8 @@ import argparse
from typing import Optional, List, IO
from ..main import schedule, docstring
from ..main import schedule
from ..util import docstring
from ..config import OUTPUT_DIR
from ..logging_util import SmartFormatter, reject_stdin

View file

@ -8,7 +8,8 @@ import argparse
from typing import Optional, List, IO
from ..main import server, docstring
from ..main import server
from ..util import docstring
from ..config import OUTPUT_DIR
from ..logging_util import SmartFormatter, reject_stdin

View file

@ -8,7 +8,8 @@ import argparse
from typing import Optional, List, IO
from ..main import shell, docstring
from ..main import shell
from ..util import docstring
from ..config import OUTPUT_DIR
from ..logging_util import SmartFormatter, reject_stdin

View file

@ -8,7 +8,8 @@ import argparse
from typing import Optional, List, IO
from ..main import status, docstring
from ..main import status
from ..util import docstring
from ..config import OUTPUT_DIR
from ..logging_util import SmartFormatter, reject_stdin

View file

@ -8,7 +8,8 @@ import argparse
from typing import List, Optional, IO
from ..main import update, docstring
from ..main import update
from ..util import docstring
from ..config import OUTPUT_DIR
from ..index import (
get_indexed_folders,

View file

@ -8,7 +8,8 @@ import argparse
from typing import Optional, List, IO
from ..main import version, docstring
from ..main import version
from ..util import docstring
from ..config import OUTPUT_DIR
from ..logging_util import SmartFormatter, reject_stdin

View file

@ -198,7 +198,7 @@ class TestRemove(unittest.TestCase):
def test_remove_regex(self):
with output_hidden():
archivebox_remove.main(['--yes', '--delete', '--filter-type=regex', 'http(s)?:\/\/(.+\.)?(example\d\.com)'])
archivebox_remove.main(['--yes', '--delete', '--filter-type=regex', r'http(s)?:\/\/(.+\.)?(example\d\.com)'])
all_links = load_main_index(out_dir=OUTPUT_DIR)
assert len(all_links) == 4

View file

@ -868,4 +868,4 @@ def setup_django(out_dir: str=None, check_db=False, config: ConfigDict=CONFIG) -
except KeyboardInterrupt:
raise SystemExit(2)
os.umask(0o777 - int(OUTPUT_PERMISSIONS, base=8))
os.umask(0o777 - int(OUTPUT_PERMISSIONS, base=8)) # noqa: F821

View file

@ -8,7 +8,6 @@ from django.utils.crypto import get_random_string
from ..config import (
OUTPUT_DIR,
SECRET_KEY,
DEBUG,
ALLOWED_HOSTS,
PYTHON_DIR,
ACTIVE_THEME,

View file

@ -1,3 +1,3 @@
from django.test import TestCase
#from django.test import TestCase
# Create your tests here.

View file

@ -4,7 +4,6 @@ from django.shortcuts import render, redirect
from django.http import HttpResponse
from django.views import View, static
from django.conf import settings
from core.models import Snapshot
@ -75,7 +74,7 @@ class AddLinks(View):
}
add_stdout = StringIO()
with redirect_stdout(add_stdout):
extracted_links = add(**input_kwargs)
add(**input_kwargs)
print(add_stdout.getvalue())
context = {

View file

@ -2,5 +2,4 @@ from archivebox.logging_util import log_shell_welcome_msg
if __name__ == '__main__':
from main import *
log_shell_welcome_msg()

View file

@ -294,7 +294,6 @@ def parse_links_from_source(source_path: str) -> Tuple[List[Link], List[Link]]:
def dedupe_links(existing_links: List[Link],
new_links: List[Link]) -> Tuple[List[Link], List[Link]]:
from ..parsers import parse_links
# merge existing links in out_dir and new links
all_links = validate_links(existing_links + new_links)
all_link_urls = {link.url for link in existing_links}
@ -537,7 +536,7 @@ def get_unrecognized_folders(links, out_dir: str=OUTPUT_DIR) -> Dict[str, Option
link_guessed = parse_json_link_details(entry.path, guess=True)
write_json_link_details(link_guessed, out_dir=entry.path)
link = parse_json_link_details(entry.path)
except Exception as e:
except Exception:
pass
if index_exists and link is None:
@ -565,7 +564,7 @@ def is_valid(link: Link) -> bool:
try:
parsed_link = parse_json_link_details(link.link_dir, guess=True)
return link.url == parsed_link.url
except Exception as e:
except Exception:
pass
return False

View file

@ -5,8 +5,6 @@ import os
import sys
import time
import argparse
import logging
import signal
from multiprocessing import Process
from datetime import datetime
@ -263,9 +261,9 @@ def log_archiving_finished(num_links: int):
assert _LAST_RUN_STATS.archiving_start_ts is not None
seconds = end_ts.timestamp() - _LAST_RUN_STATS.archiving_start_ts.timestamp()
if seconds > 60:
duration = '{0:.2f} min'.format(seconds / 60, 2)
duration = '{0:.2f} min'.format(seconds / 60)
else:
duration = '{0:.2f} sec'.format(seconds, 2)
duration = '{0:.2f} sec'.format(seconds)
print()
print('{}[√] [{}] Update of {} pages complete ({}){}'.format(
@ -385,8 +383,8 @@ def log_removal_started(links: List["Link"], yes: bool, delete: bool):
)
else:
print(
f' Matching links will be de-listed from the main index, but their archived content folders will remain in place on disk.\n'
f' (Pass --delete if you also want to permanently delete the data folders)'
' Matching links will be de-listed from the main index, but their archived content folders will remain in place on disk.\n'
' (Pass --delete if you also want to permanently delete the data folders)'
)
if not yes:

View file

@ -20,10 +20,9 @@ from .parsers import (
save_file_as_source,
)
from .index.schema import Link
from .util import enforce_types, docstring # type: ignore
from .util import enforce_types # type: ignore
from .system import get_dir_size, dedupe_cron_jobs, CRON_COMMENT
from .index import (
links_after_timestamp,
load_main_index,
parse_links_from_source,
dedupe_links,
@ -291,7 +290,6 @@ def init(force: bool=False, out_dir: str=OUTPUT_DIR) -> None:
print('\n{green}[+] Building main SQL index and running migrations...{reset}'.format(**ANSI))
setup_django(out_dir, check_db=False)
from django.conf import settings
DATABASE_FILE = os.path.join(out_dir, SQL_INDEX_FILENAME)
print(f'{DATABASE_FILE}')
print()
@ -469,7 +467,8 @@ def status(out_dir: str=OUTPUT_DIR) -> None:
users = get_admins().values_list('username', flat=True)
print(f' UI users {len(users)}: {", ".join(users)}')
last_login = User.objects.order_by('last_login').last()
print(f' Last UI login: {last_login.username} @ {str(last_login.last_login)[:16]}')
if last_login:
print(f' Last UI login: {last_login.username} @ {str(last_login.last_login)[:16]}')
last_updated = Snapshot.objects.order_by('updated').last()
print(f' Last changes: {str(last_updated.updated)[:16]}')

View file

@ -19,17 +19,15 @@ from ..config import (
OUTPUT_DIR,
SOURCES_DIR_NAME,
TIMEOUT,
check_data_folder,
)
from ..util import (
basename,
domain,
download_url,
enforce_types,
URL_REGEX,
)
from ..index.schema import Link
from ..logging_util import pretty_path, TimedProgress, log_source_saved
from ..logging_util import TimedProgress, log_source_saved
from .pocket_html import parse_pocket_html_export
from .pinboard_rss import parse_pinboard_rss_export
from .shaarli_rss import parse_shaarli_rss_export

View file

@ -226,11 +226,11 @@ def ansi_to_html(text):
argsdict = match.groupdict()
if argsdict['arg_3'] is None:
if argsdict['arg_2'] is None:
bold, color = 0, argsdict['arg_1']
_, color = 0, argsdict['arg_1']
else:
bold, color = argsdict['arg_1'], argsdict['arg_2']
_, color = argsdict['arg_1'], argsdict['arg_2']
else:
bold, color = argsdict['arg_3'], argsdict['arg_2']
_, color = argsdict['arg_3'], argsdict['arg_2']
return TEMPLATE.format(COLOR_DICT[color][0])