mirror of
https://github.com/ArchiveBox/ArchiveBox
synced 2024-11-21 19:53:06 +00:00
refactor: Replace --index with --with-headers in the list command to make it more explicit. Change it so it affects the csv output too.
This commit is contained in:
parent
2aa8d69b72
commit
fa622d3e14
4 changed files with 32 additions and 17 deletions
|
@ -52,9 +52,9 @@ def main(args: Optional[List[str]]=None, stdin: Optional[IO]=None, pwd: Optional
|
|||
help="Print the output in HTML format"
|
||||
)
|
||||
parser.add_argument(
|
||||
'--index',
|
||||
'--with-headers',
|
||||
action='store_true',
|
||||
help='Include the index additional structures'
|
||||
help='Include the headers in the output document'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--sort', #'-s',
|
||||
|
@ -112,9 +112,9 @@ def main(args: Optional[List[str]]=None, stdin: Optional[IO]=None, pwd: Optional
|
|||
command = parser.parse_args(args or ())
|
||||
filter_patterns_str = accept_stdin(stdin)
|
||||
|
||||
if command.index and not (command.json or command.html):
|
||||
if command.with_headers and not (command.json or command.html or command.csv):
|
||||
stderr(
|
||||
'[X] --index can only be used with --json or --html options.\n',
|
||||
'[X] --with-headers can only be used with --json, --html or --csv options.\n',
|
||||
color='red',
|
||||
)
|
||||
raise SystemExit(2)
|
||||
|
@ -130,7 +130,7 @@ def main(args: Optional[List[str]]=None, stdin: Optional[IO]=None, pwd: Optional
|
|||
csv=command.csv,
|
||||
json=command.json,
|
||||
html=command.html,
|
||||
index=command.index,
|
||||
with_headers=command.with_headers,
|
||||
out_dir=pwd or OUTPUT_DIR,
|
||||
)
|
||||
raise SystemExit(not matching_folders)
|
||||
|
|
|
@ -463,11 +463,11 @@ def printable_folders(folders: Dict[str, Optional["Link"]],
|
|||
json: bool=False,
|
||||
html: bool=False,
|
||||
csv: Optional[str]=None,
|
||||
index: bool=False) -> str:
|
||||
with_headers: bool=False) -> str:
|
||||
links = folders.values()
|
||||
if json:
|
||||
from .index.json import to_json
|
||||
if index:
|
||||
if with_headers:
|
||||
output = {
|
||||
**MAIN_INDEX_HEADER,
|
||||
'num_links': len(links),
|
||||
|
@ -480,7 +480,7 @@ def printable_folders(folders: Dict[str, Optional["Link"]],
|
|||
return to_json(output, indent=4, sort_keys=True)
|
||||
elif html:
|
||||
from .index.html import main_index_template
|
||||
if index:
|
||||
if with_headers:
|
||||
output = main_index_template(links, True)
|
||||
else:
|
||||
from .index.html import MINIMAL_INDEX_TEMPLATE
|
||||
|
@ -488,7 +488,7 @@ def printable_folders(folders: Dict[str, Optional["Link"]],
|
|||
return output
|
||||
elif csv:
|
||||
from .index.csv import links_to_csv
|
||||
return links_to_csv(folders.values(), cols=csv.split(','), header=True)
|
||||
return links_to_csv(folders.values(), cols=csv.split(','), header=with_headers)
|
||||
|
||||
return '\n'.join(
|
||||
f'{folder} {link and link.url} "{link and link.title}"'
|
||||
|
|
|
@ -709,7 +709,7 @@ def list_all(filter_patterns_str: Optional[str]=None,
|
|||
csv: Optional[str]=None,
|
||||
json: bool=False,
|
||||
html: bool=False,
|
||||
index: bool=False,
|
||||
with_headers: bool=False,
|
||||
out_dir: str=OUTPUT_DIR) -> Iterable[Link]:
|
||||
"""List, filter, and export information about archive entries"""
|
||||
|
||||
|
@ -742,7 +742,7 @@ def list_all(filter_patterns_str: Optional[str]=None,
|
|||
out_dir=out_dir,
|
||||
)
|
||||
|
||||
print(printable_folders(folders, json=json, csv=csv, html=html, index=index))
|
||||
print(printable_folders(folders, json=json, csv=csv, html=html, with_headers=with_headers))
|
||||
return folders
|
||||
|
||||
|
||||
|
|
|
@ -10,10 +10,10 @@ def test_list_json(process, disable_extractors_dict):
|
|||
assert output_json[0]["url"] == "http://127.0.0.1:8080/static/example.com.html"
|
||||
|
||||
|
||||
def test_list_json_index(process, disable_extractors_dict):
|
||||
def test_list_json_headers(process, disable_extractors_dict):
|
||||
subprocess.run(["archivebox", "add", "http://127.0.0.1:8080/static/example.com.html", "--depth=0"],
|
||||
capture_output=True, env=disable_extractors_dict)
|
||||
list_process = subprocess.run(["archivebox", "list", "--json", "--index"], capture_output=True)
|
||||
list_process = subprocess.run(["archivebox", "list", "--json", "--with-headers"], capture_output=True)
|
||||
output_json = json.loads(list_process.stdout.decode("utf-8"))
|
||||
assert output_json["links"][0]["url"] == "http://127.0.0.1:8080/static/example.com.html"
|
||||
|
||||
|
@ -25,17 +25,32 @@ def test_list_html(process, disable_extractors_dict):
|
|||
assert "<footer>" not in output_html
|
||||
assert "http://127.0.0.1:8080/static/example.com.html" in output_html
|
||||
|
||||
def test_list_html_index(process, disable_extractors_dict):
|
||||
def test_list_html_headers(process, disable_extractors_dict):
|
||||
subprocess.run(["archivebox", "add", "http://127.0.0.1:8080/static/example.com.html", "--depth=0"],
|
||||
capture_output=True, env=disable_extractors_dict)
|
||||
list_process = subprocess.run(["archivebox", "list", "--html", "--index"], capture_output=True)
|
||||
list_process = subprocess.run(["archivebox", "list", "--html", "--with-headers"], capture_output=True)
|
||||
output_html = list_process.stdout.decode("utf-8")
|
||||
assert "<footer>" in output_html
|
||||
assert "http://127.0.0.1:8080/static/example.com.html" in output_html
|
||||
|
||||
def test_list_csv(process, disable_extractors_dict):
|
||||
subprocess.run(["archivebox", "add", "http://127.0.0.1:8080/static/example.com.html", "--depth=0"],
|
||||
capture_output=True, env=disable_extractors_dict)
|
||||
list_process = subprocess.run(["archivebox", "list", "--csv", "url"], capture_output=True)
|
||||
output_csv = list_process.stdout.decode("utf-8")
|
||||
assert "http://127.0.0.1:8080/static/example.com.html" in output_csv
|
||||
|
||||
def test_list_csv_headers(process, disable_extractors_dict):
|
||||
subprocess.run(["archivebox", "add", "http://127.0.0.1:8080/static/example.com.html", "--depth=0"],
|
||||
capture_output=True, env=disable_extractors_dict)
|
||||
list_process = subprocess.run(["archivebox", "list", "--csv", "url", "--with-headers"], capture_output=True)
|
||||
output_csv = list_process.stdout.decode("utf-8")
|
||||
assert "http://127.0.0.1:8080/static/example.com.html" in output_csv
|
||||
assert "url" in output_csv
|
||||
|
||||
def test_list_index_with_wrong_flags(process):
|
||||
list_process = subprocess.run(["archivebox", "list", "--index"], capture_output=True)
|
||||
assert "--index can only be used with --json or --html options." in list_process.stderr.decode("utf-8")
|
||||
list_process = subprocess.run(["archivebox", "list", "--with-headers"], capture_output=True)
|
||||
assert "--with-headers can only be used with --json, --html or --csv options." in list_process.stderr.decode("utf-8")
|
||||
|
||||
def test_link_sort_by_url(process, disable_extractors_dict):
|
||||
subprocess.run(["archivebox", "add", "http://127.0.0.1:8080/static/iana.org.html", "--depth=0"],
|
||||
|
|
Loading…
Reference in a new issue