mirror of
https://github.com/ArchiveBox/ArchiveBox
synced 2024-11-10 14:44:18 +00:00
add git downloading
This commit is contained in:
parent
5b6c768a47
commit
827e15b31a
4 changed files with 48 additions and 10 deletions
|
@ -17,6 +17,7 @@ from config import (
|
||||||
FETCH_PDF,
|
FETCH_PDF,
|
||||||
FETCH_SCREENSHOT,
|
FETCH_SCREENSHOT,
|
||||||
FETCH_DOM,
|
FETCH_DOM,
|
||||||
|
FETCH_GIT,
|
||||||
RESOLUTION,
|
RESOLUTION,
|
||||||
CHECK_SSL_VALIDITY,
|
CHECK_SSL_VALIDITY,
|
||||||
SUBMIT_ARCHIVE_DOT_ORG,
|
SUBMIT_ARCHIVE_DOT_ORG,
|
||||||
|
@ -108,6 +109,9 @@ def archive_link(link_dir, link, overwrite=True):
|
||||||
# if FETCH_VIDEO:
|
# if FETCH_VIDEO:
|
||||||
# link = fetch_video(link_dir, link, overwrite=overwrite)
|
# link = fetch_video(link_dir, link, overwrite=overwrite)
|
||||||
|
|
||||||
|
if FETCH_GIT:
|
||||||
|
link = fetch_git(link_dir, link, overwrite=overwrite)
|
||||||
|
|
||||||
if FETCH_FAVICON:
|
if FETCH_FAVICON:
|
||||||
link = fetch_favicon(link_dir, link, overwrite=overwrite)
|
link = fetch_favicon(link_dir, link, overwrite=overwrite)
|
||||||
|
|
||||||
|
@ -496,6 +500,40 @@ def fetch_favicon(link_dir, link, timeout=TIMEOUT):
|
||||||
# else:
|
# else:
|
||||||
# print(' √ Skipping video download')
|
# print(' √ Skipping video download')
|
||||||
|
|
||||||
|
@attach_result_to_link('git')
|
||||||
|
def fetch_git(link_dir, link, timeout=TIMEOUT):
|
||||||
|
"""download full site using git"""
|
||||||
|
|
||||||
|
if not (link['domain'] == 'github.com'
|
||||||
|
or link['url'].endswith('.git')
|
||||||
|
or link['type'] == 'git'):
|
||||||
|
return
|
||||||
|
|
||||||
|
if os.path.exists(os.path.join(link_dir, 'git')):
|
||||||
|
return {'output': 'git', 'status': 'skipped'}
|
||||||
|
|
||||||
|
CMD = ['git', 'clone', '--recursive', link['url'], 'git']
|
||||||
|
output = 'git'
|
||||||
|
|
||||||
|
end = progress(timeout, prefix=' ')
|
||||||
|
try:
|
||||||
|
result = run(CMD, stdout=PIPE, stderr=PIPE, cwd=link_dir, timeout=timeout + 1) # git/<reponame>
|
||||||
|
end()
|
||||||
|
|
||||||
|
if result.returncode > 0:
|
||||||
|
print(' got git response code {}:'.format(result.returncode))
|
||||||
|
raise Exception('Failed git download')
|
||||||
|
except Exception as e:
|
||||||
|
end()
|
||||||
|
print(' Run to see full output:', 'cd {}; {}'.format(link_dir, ' '.join(CMD)))
|
||||||
|
print(' {}Failed: {} {}{}'.format(ANSI['red'], e.__class__.__name__, e, ANSI['reset']))
|
||||||
|
output = e
|
||||||
|
|
||||||
|
return {
|
||||||
|
'cmd': CMD,
|
||||||
|
'output': output,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def chrome_headless(binary=CHROME_BINARY, user_data_dir=CHROME_USER_DATA_DIR):
|
def chrome_headless(binary=CHROME_BINARY, user_data_dir=CHROME_USER_DATA_DIR):
|
||||||
args = [binary, '--headless'] # '--disable-gpu'
|
args = [binary, '--headless'] # '--disable-gpu'
|
||||||
|
|
|
@ -21,6 +21,7 @@ FETCH_VIDEO = os.getenv('FETCH_VIDEO', 'False'
|
||||||
FETCH_PDF = os.getenv('FETCH_PDF', 'True' ).lower() == 'true'
|
FETCH_PDF = os.getenv('FETCH_PDF', 'True' ).lower() == 'true'
|
||||||
FETCH_SCREENSHOT = os.getenv('FETCH_SCREENSHOT', 'True' ).lower() == 'true'
|
FETCH_SCREENSHOT = os.getenv('FETCH_SCREENSHOT', 'True' ).lower() == 'true'
|
||||||
FETCH_DOM = os.getenv('FETCH_DOM', 'True' ).lower() == 'true'
|
FETCH_DOM = os.getenv('FETCH_DOM', 'True' ).lower() == 'true'
|
||||||
|
FETCH_GIT = os.getenv('FETCH_GIT', 'True' ).lower() == 'true'
|
||||||
FETCH_FAVICON = os.getenv('FETCH_FAVICON', 'True' ).lower() == 'true'
|
FETCH_FAVICON = os.getenv('FETCH_FAVICON', 'True' ).lower() == 'true'
|
||||||
SUBMIT_ARCHIVE_DOT_ORG = os.getenv('SUBMIT_ARCHIVE_DOT_ORG', 'True' ).lower() == 'true'
|
SUBMIT_ARCHIVE_DOT_ORG = os.getenv('SUBMIT_ARCHIVE_DOT_ORG', 'True' ).lower() == 'true'
|
||||||
RESOLUTION = os.getenv('RESOLUTION', '1440,1200' )
|
RESOLUTION = os.getenv('RESOLUTION', '1440,1200' )
|
||||||
|
|
|
@ -81,9 +81,9 @@ def parse_pocket_export(html_file):
|
||||||
'url': fixed_url,
|
'url': fixed_url,
|
||||||
'domain': domain(fixed_url),
|
'domain': domain(fixed_url),
|
||||||
'base_url': base_url(fixed_url),
|
'base_url': base_url(fixed_url),
|
||||||
'timestamp': str(time.timestamp()),
|
'timestamp': str(datetime.now().timestamp()),
|
||||||
'tags': match.group(3),
|
'tags': match.group(3),
|
||||||
'title': match.group(4).replace(' — Readability', '').replace('http://www.readability.com/read?url=', '') or base_url(fixed_url),
|
'title': match.group(4).replace(' — Readability', '').replace('http://www.readability.com/read?url=', '') or fetch_page_title(url),
|
||||||
'sources': [html_file.name],
|
'sources': [html_file.name],
|
||||||
}
|
}
|
||||||
info['type'] = get_link_type(info)
|
info['type'] = get_link_type(info)
|
||||||
|
@ -157,7 +157,7 @@ def parse_rss_export(rss_file):
|
||||||
'url': url,
|
'url': url,
|
||||||
'domain': domain(url),
|
'domain': domain(url),
|
||||||
'base_url': base_url(url),
|
'base_url': base_url(url),
|
||||||
'timestamp': str(time.timestamp()),
|
'timestamp': str(datetime.now().timestamp()),
|
||||||
'tags': '',
|
'tags': '',
|
||||||
'title': title or fetch_page_title(url),
|
'title': title or fetch_page_title(url),
|
||||||
'sources': [rss_file.name],
|
'sources': [rss_file.name],
|
||||||
|
@ -184,7 +184,7 @@ def parse_bookmarks_export(html_file):
|
||||||
'url': url,
|
'url': url,
|
||||||
'domain': domain(url),
|
'domain': domain(url),
|
||||||
'base_url': base_url(url),
|
'base_url': base_url(url),
|
||||||
'timestamp': str(time.timestamp()),
|
'timestamp': str(datetime.now().timestamp()),
|
||||||
'tags': "",
|
'tags': "",
|
||||||
'title': match.group(3).strip() or fetch_page_title(url),
|
'title': match.group(3).strip() or fetch_page_title(url),
|
||||||
'sources': [html_file.name],
|
'sources': [html_file.name],
|
||||||
|
@ -217,7 +217,7 @@ def parse_pinboard_rss_feed(rss_file):
|
||||||
'url': url,
|
'url': url,
|
||||||
'domain': domain(url),
|
'domain': domain(url),
|
||||||
'base_url': base_url(url),
|
'base_url': base_url(url),
|
||||||
'timestamp': str(time.timestamp()),
|
'timestamp': str(datetime.now().timestamp()),
|
||||||
'tags': tags,
|
'tags': tags,
|
||||||
'title': title or fetch_page_title(url),
|
'title': title or fetch_page_title(url),
|
||||||
'sources': [rss_file.name],
|
'sources': [rss_file.name],
|
||||||
|
@ -242,7 +242,7 @@ def parse_medium_rss_feed(rss_file):
|
||||||
'url': url,
|
'url': url,
|
||||||
'domain': domain(url),
|
'domain': domain(url),
|
||||||
'base_url': base_url(url),
|
'base_url': base_url(url),
|
||||||
'timestamp': str(time.timestamp()),
|
'timestamp': str(datetime.now().timestamp()),
|
||||||
'tags': '',
|
'tags': '',
|
||||||
'title': title or fetch_page_title(url),
|
'title': title or fetch_page_title(url),
|
||||||
'sources': [rss_file.name],
|
'sources': [rss_file.name],
|
||||||
|
@ -261,13 +261,11 @@ def parse_plain_text(text_file):
|
||||||
urls = re.findall(URL_REGEX, line)
|
urls = re.findall(URL_REGEX, line)
|
||||||
|
|
||||||
for url in urls:
|
for url in urls:
|
||||||
timestamp = str(datetime.now().timestamp())
|
|
||||||
|
|
||||||
info = {
|
info = {
|
||||||
'url': url,
|
'url': url,
|
||||||
'domain': domain(url),
|
'domain': domain(url),
|
||||||
'base_url': base_url(url),
|
'base_url': base_url(url),
|
||||||
'timestamp': timestamp,
|
'timestamp': str(datetime.now().timestamp()),
|
||||||
'tags': '',
|
'tags': '',
|
||||||
'title': fetch_page_title(url),
|
'title': fetch_page_title(url),
|
||||||
'sources': [text_file.name],
|
'sources': [text_file.name],
|
||||||
|
|
|
@ -186,7 +186,8 @@
|
||||||
<div class="col-lg-4 alert well">
|
<div class="col-lg-4 alert well">
|
||||||
Download:
|
Download:
|
||||||
<a href="index.json" title="JSON summary of archived link.">JSON</a> |
|
<a href="index.json" title="JSON summary of archived link.">JSON</a> |
|
||||||
<a href="." title="Webserver-provided index of files directory.">Files</a>
|
<a href="." title="Webserver-provided index of files directory.">Files</a> |
|
||||||
|
<a href="git/" title="Any git repos at the url">Git Code</a>
|
||||||
</div>
|
</div>
|
||||||
<hr/>
|
<hr/>
|
||||||
<div class="col-lg-2">
|
<div class="col-lg-2">
|
||||||
|
|
Loading…
Reference in a new issue