44 lines
1.3 KiB
Python
44 lines
1.3 KiB
Python
import time, subprocess
|
|
from os import walk, path
|
|
import urllib.parse
|
|
|
|
def the_line_after_metadata(lines: []) -> int:
|
|
i = 0
|
|
while i < len(lines) and lines[i].strip():
|
|
i += 1
|
|
return i
|
|
|
|
def parse_metadata(filepath: str) -> {}:
|
|
result = {}
|
|
with open(filepath, "r") as f:
|
|
content = f.readlines()
|
|
i = the_line_after_metadata(content)
|
|
|
|
for line in content[:i]:
|
|
delim = line.find(":")
|
|
key, val = (line[:delim].strip(), line[delim+1:].strip())
|
|
if key == "Date":
|
|
result["Date"] = time.gmtime(int(val))
|
|
elif key == "Tags":
|
|
result["Tags"] = [v.strip() for v in val.split(",")]
|
|
elif val:
|
|
result[key] = val
|
|
result["Last Edit"] = time.gmtime(int(subprocess.getoutput(r"stat -c %Y " + filepath)))
|
|
|
|
return result
|
|
|
|
def parse_article_directory(directory: str) -> {}:
|
|
articles = {}
|
|
for root, dirs, _ in walk(directory):
|
|
for d in dirs:
|
|
metadata = parse_metadata(path.abspath(root + '/' + d + "/page.mmd"))
|
|
article = urllib.parse.quote(d)
|
|
articles[article] = {
|
|
"metadata": metadata
|
|
}
|
|
break
|
|
return articles
|
|
|
|
def sort_titles_by_date(articles: {}) -> []:
|
|
return sorted(articles.keys(), key=lambda a: articles[a]["metadata"].get("Date", time.gmtime(0)), reverse=True)
|