defupdate_links_in_html(html_path: Path): with html_path.open("r+", encoding="utf-8") as file: content = file.read() soup = BeautifulSoup(content, "html.parser") for a_tag in soup.find_all("a"): href = a_tag.get("href") if href: for old, new in REPLACE_RULES.items(): if old in href: href = href.replace(old, new) a_tag["href"] = href file.seek(0) file.write(str(soup)) file.truncate()
if __name__ == "__main__": shutil.move(ARCHIVES_DIR / "index.html", SITE_ROOT / "index.html") shutil.rmtree(ARCHIVES_DIR) for root, _, files in os.walk(SITE_ROOT): for filename in files: if filename.endswith(".html") and filename != "404.html": update_links_in_html(Path(root) / filename)
bs.bat
1 2 3
cd /d d:\repo\blog-source start http://localhost:54321/ call npx hexo s -p 54321
bg.bat
1 2 3 4 5 6 7 8 9 10 11 12 13
:: 删除旧文件 cd /d d:\repo\blog git rm -r * :: 渲染新文件 cd ..\blog-source call npx hexo g :: 转移新文件 xcopy public ..\blog /s /e call npx hexo clean :: 二次处理文件 cd /d d:\scripts call D:/ProgramData/anaconda3/Scripts/activate call python bm.py