Browse Source

Merge pull request #200 from mlazana/master

Exclude blacklisted URLs
Nick Sweeting 6 years ago
parent
commit
4fd04b8caa
2 changed files with 18 additions and 9 deletions
  1. 4 0
      archivebox/config.py
  2. 14 9
      archivebox/links.py

+ 4 - 0
archivebox/config.py

@@ -47,6 +47,8 @@ WGET_BINARY =            os.getenv('WGET_BINARY',            'wget')
 YOUTUBEDL_BINARY =       os.getenv('YOUTUBEDL_BINARY',       'youtube-dl')
 YOUTUBEDL_BINARY =       os.getenv('YOUTUBEDL_BINARY',       'youtube-dl')
 CHROME_BINARY =          os.getenv('CHROME_BINARY',          None)
 CHROME_BINARY =          os.getenv('CHROME_BINARY',          None)
 
 
+URL_BLACKLIST =          os.getenv('URL_BLACKLIST',          None)
+
 try:
 try:
     OUTPUT_DIR = os.path.abspath(os.getenv('OUTPUT_DIR'))
     OUTPUT_DIR = os.path.abspath(os.getenv('OUTPUT_DIR'))
 except Exception:
 except Exception:
@@ -74,6 +76,8 @@ USE_CHROME = FETCH_PDF or FETCH_SCREENSHOT or FETCH_DOM
 USE_WGET = FETCH_WGET or FETCH_WGET_REQUISITES or FETCH_WARC
 USE_WGET = FETCH_WGET or FETCH_WGET_REQUISITES or FETCH_WARC
 WGET_AUTO_COMPRESSION = USE_WGET and WGET_BINARY and (not run([WGET_BINARY, "--compression=auto", "--help"], stdout=DEVNULL).returncode)
 WGET_AUTO_COMPRESSION = USE_WGET and WGET_BINARY and (not run([WGET_BINARY, "--compression=auto", "--help"], stdout=DEVNULL).returncode)
 
 
+URL_BLACKLIST = URL_BLACKLIST and re.compile(URL_BLACKLIST, re.IGNORECASE)
+
 ########################### Environment & Dependencies #########################
 ########################### Environment & Dependencies #########################
 
 
 try:
 try:

+ 14 - 9
archivebox/links.py

@@ -28,13 +28,16 @@ from util import (
     check_links_structure,
     check_links_structure,
 )
 )
 
 
+from config import (
+    URL_BLACKLIST,
+)
 
 
 def validate_links(links):
 def validate_links(links):
     check_links_structure(links)
     check_links_structure(links)
-    links = archivable_links(links)  # remove chrome://, about:, mailto: etc.
-    links = uniquefied_links(links)  # merge/dedupe duplicate timestamps & urls
-    links = sorted_links(links)      # deterministically sort the links based on timstamp, url
-
+    links = archivable_links(links)     # remove chrome://, about:, mailto: etc.
+    links = uniquefied_links(links)     # merge/dedupe duplicate timestamps & urls
+    links = sorted_links(links)         # deterministically sort the links based on timstamp, url
+    
     if not links:
     if not links:
         print('[X] No links found :(')
         print('[X] No links found :(')
         raise SystemExit(1)
         raise SystemExit(1)
@@ -48,11 +51,11 @@ def validate_links(links):
 
 
 def archivable_links(links):
 def archivable_links(links):
     """remove chrome://, about:// or other schemed links that cant be archived"""
     """remove chrome://, about:// or other schemed links that cant be archived"""
-    return (
-        link
-        for link in links
-        if any(link['url'].lower().startswith(s) for s in ('http://', 'https://', 'ftp://'))
-    )
+    for link in links:
+        scheme_is_valid = scheme(url) in ('http', 'https', 'ftp)
+        not_blacklisted = (not URL_BLACKLIST.match(link['url'])) if URL_BLACKLIST else True
+        if scheme_is_valid and not_blacklisted:
+            yield link
 
 
 
 
 def uniquefied_links(sorted_links):
 def uniquefied_links(sorted_links):
@@ -115,3 +118,5 @@ def lowest_uniq_timestamp(used_timestamps, timestamp):
         new_timestamp = '{}.{}'.format(timestamp, nonce)
         new_timestamp = '{}.{}'.format(timestamp, nonce)
 
 
     return new_timestamp
     return new_timestamp
+    
+