mirror of
https://github.com/pirate/ArchiveBox.git
synced 2025-08-27 00:05:27 +02:00
exclude links that are in blacklist
This commit is contained in:
@@ -270,6 +270,7 @@ except:
|
||||
|
||||
URL_BLACKLIST = re.compile(
|
||||
r'(.*\.youtube\.com)|'
|
||||
r'(.*\.facebook\.com)|'
|
||||
r'(.*\.amazon\.com)|'
|
||||
r'(.*\.reddit\.com)',
|
||||
re.IGNORECASE,
|
||||
|
@@ -28,12 +28,18 @@ from util import (
|
||||
check_links_structure,
|
||||
)
|
||||
|
||||
from config import (
|
||||
URL_BLACKLIST,
|
||||
)
|
||||
|
||||
def validate_links(links):
|
||||
check_links_structure(links)
|
||||
links = archivable_links(links) # remove chrome://, about:, mailto: etc.
|
||||
links = uniquefied_links(links) # merge/dedupe duplicate timestamps & urls
|
||||
links = sorted_links(links) # deterministically sort the links based on timstamp, url
|
||||
links = exclude_links(links) # exclude links that are in blacklist
|
||||
|
||||
print(links)
|
||||
|
||||
if not links:
|
||||
print('[X] No links found :(')
|
||||
@@ -43,6 +49,7 @@ def validate_links(links):
|
||||
link['title'] = unescape(link['title'].strip()) if link['title'] else None
|
||||
check_link_structure(link)
|
||||
|
||||
print("FINAL LIST", list(links))
|
||||
return list(links)
|
||||
|
||||
|
||||
@@ -115,3 +122,10 @@ def lowest_uniq_timestamp(used_timestamps, timestamp):
|
||||
new_timestamp = '{}.{}'.format(timestamp, nonce)
|
||||
|
||||
return new_timestamp
|
||||
|
||||
def exclude_links(links):
|
||||
""" exclude links that are in blacklist"""
|
||||
|
||||
links = [link for link in links if not URL_BLACKLIST.match(link['url'])]
|
||||
|
||||
return links
|
Reference in New Issue
Block a user