|
@@ -45,8 +45,8 @@ MAIN_INDEX_HEADER = {
|
|
|
def parse_json_main_index(out_dir: Path=OUTPUT_DIR) -> Iterator[Link]:
|
|
def parse_json_main_index(out_dir: Path=OUTPUT_DIR) -> Iterator[Link]:
|
|
|
"""parse an archive index json file and return the list of links"""
|
|
"""parse an archive index json file and return the list of links"""
|
|
|
|
|
|
|
|
- index_path = os.path.join(out_dir, JSON_INDEX_FILENAME)
|
|
|
|
|
- if os.path.exists(index_path):
|
|
|
|
|
|
|
+ index_path = Path(out_dir) / JSON_INDEX_FILENAME
|
|
|
|
|
+ if index_path.exists():
|
|
|
with open(index_path, 'r', encoding='utf-8') as f:
|
|
with open(index_path, 'r', encoding='utf-8') as f:
|
|
|
links = pyjson.load(f)['links']
|
|
links = pyjson.load(f)['links']
|
|
|
for link_json in links:
|
|
for link_json in links:
|
|
@@ -86,7 +86,7 @@ def write_json_main_index(links: List[Link], out_dir: Path=OUTPUT_DIR) -> None:
|
|
|
'last_run_cmd': sys.argv,
|
|
'last_run_cmd': sys.argv,
|
|
|
'links': links,
|
|
'links': links,
|
|
|
}
|
|
}
|
|
|
- atomic_write(os.path.join(out_dir, JSON_INDEX_FILENAME), main_index_json)
|
|
|
|
|
|
|
+ atomic_write(str(Path(out_dir) / JSON_INDEX_FILENAME), main_index_json)
|
|
|
|
|
|
|
|
|
|
|
|
|
### Link Details Index
|
|
### Link Details Index
|
|
@@ -96,15 +96,15 @@ def write_json_link_details(link: Link, out_dir: Optional[str]=None) -> None:
|
|
|
"""write a json file with some info about the link"""
|
|
"""write a json file with some info about the link"""
|
|
|
|
|
|
|
|
out_dir = out_dir or link.link_dir
|
|
out_dir = out_dir or link.link_dir
|
|
|
- path = os.path.join(out_dir, JSON_INDEX_FILENAME)
|
|
|
|
|
- atomic_write(path, link._asdict(extended=True))
|
|
|
|
|
|
|
+ path = Path(out_dir) / JSON_INDEX_FILENAME
|
|
|
|
|
+ atomic_write(str(path), link._asdict(extended=True))
|
|
|
|
|
|
|
|
|
|
|
|
|
@enforce_types
|
|
@enforce_types
|
|
|
def parse_json_link_details(out_dir: Union[Path, str], guess: Optional[bool]=False) -> Optional[Link]:
|
|
def parse_json_link_details(out_dir: Union[Path, str], guess: Optional[bool]=False) -> Optional[Link]:
|
|
|
"""load the json link index from a given directory"""
|
|
"""load the json link index from a given directory"""
|
|
|
- existing_index = os.path.join(out_dir, JSON_INDEX_FILENAME)
|
|
|
|
|
- if os.path.exists(existing_index):
|
|
|
|
|
|
|
+ existing_index = Path(out_dir) / JSON_INDEX_FILENAME
|
|
|
|
|
+ if existing_index.exists():
|
|
|
with open(existing_index, 'r', encoding='utf-8') as f:
|
|
with open(existing_index, 'r', encoding='utf-8') as f:
|
|
|
try:
|
|
try:
|
|
|
link_json = pyjson.load(f)
|
|
link_json = pyjson.load(f)
|
|
@@ -118,9 +118,9 @@ def parse_json_link_details(out_dir: Union[Path, str], guess: Optional[bool]=Fal
|
|
|
def parse_json_links_details(out_dir: Union[Path, str]) -> Iterator[Link]:
|
|
def parse_json_links_details(out_dir: Union[Path, str]) -> Iterator[Link]:
|
|
|
"""read through all the archive data folders and return the parsed links"""
|
|
"""read through all the archive data folders and return the parsed links"""
|
|
|
|
|
|
|
|
- for entry in os.scandir(os.path.join(out_dir, ARCHIVE_DIR_NAME)):
|
|
|
|
|
|
|
+ for entry in os.scandir(Path(out_dir) / ARCHIVE_DIR_NAME):
|
|
|
if entry.is_dir(follow_symlinks=True):
|
|
if entry.is_dir(follow_symlinks=True):
|
|
|
- if os.path.exists(os.path.join(entry.path, 'index.json')):
|
|
|
|
|
|
|
+ if (Path(entry.path) / 'index.json').exists():
|
|
|
try:
|
|
try:
|
|
|
link = parse_json_link_details(entry.path)
|
|
link = parse_json_link_details(entry.path)
|
|
|
except KeyError:
|
|
except KeyError:
|