refactor: consolidate load_cache into build.load_stars

load_cache was a duplicate of logic now living in build.load_stars.
Switch the call site to the shared helper and remove the redundant
local function and its tests.

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
Vinta Chen
2026-03-18 17:28:53 +08:00
parent 143abbf201
commit af3baab2ed
2 changed files with 2 additions and 35 deletions

View File

@@ -10,7 +10,7 @@ from pathlib import Path
import httpx import httpx
from build import extract_github_repo from build import extract_github_repo, load_stars
CACHE_MAX_AGE_DAYS = 7 CACHE_MAX_AGE_DAYS = 7
DATA_DIR = Path(__file__).parent / "data" DATA_DIR = Path(__file__).parent / "data"
@@ -30,17 +30,6 @@ def extract_github_repos(text: str) -> set[str]:
return repos return repos
def load_cache() -> dict:
"""Load the star cache from disk. Returns empty dict if missing or corrupt."""
if CACHE_FILE.exists():
try:
return json.loads(CACHE_FILE.read_text(encoding="utf-8"))
except json.JSONDecodeError:
print(f"Warning: corrupt cache at {CACHE_FILE}, starting fresh.", file=sys.stderr)
return {}
return {}
def save_cache(cache: dict) -> None: def save_cache(cache: dict) -> None:
"""Write the star cache to disk, creating data/ dir if needed.""" """Write the star cache to disk, creating data/ dir if needed."""
DATA_DIR.mkdir(parents=True, exist_ok=True) DATA_DIR.mkdir(parents=True, exist_ok=True)
@@ -114,7 +103,7 @@ def main() -> None:
current_repos = extract_github_repos(readme_text) current_repos = extract_github_repos(readme_text)
print(f"Found {len(current_repos)} GitHub repos in README.md") print(f"Found {len(current_repos)} GitHub repos in README.md")
cache = load_cache() cache = load_stars(CACHE_FILE)
now = datetime.now(timezone.utc) now = datetime.now(timezone.utc)
# Prune entries not in current README # Prune entries not in current README

View File

@@ -8,7 +8,6 @@ sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
from fetch_github_stars import ( from fetch_github_stars import (
build_graphql_query, build_graphql_query,
extract_github_repos, extract_github_repos,
load_cache,
parse_graphql_response, parse_graphql_response,
save_cache, save_cache,
) )
@@ -65,27 +64,6 @@ class TestExtractGithubRepos:
assert result == {"org/repo"} assert result == {"org/repo"}
class TestLoadCache:
def test_returns_empty_when_missing(self, tmp_path, monkeypatch):
monkeypatch.setattr("fetch_github_stars.CACHE_FILE", tmp_path / "nonexistent.json")
result = load_cache()
assert result == {}
def test_loads_valid_cache(self, tmp_path, monkeypatch):
cache_file = tmp_path / "stars.json"
cache_file.write_text('{"a/b": {"stars": 1}}', encoding="utf-8")
monkeypatch.setattr("fetch_github_stars.CACHE_FILE", cache_file)
result = load_cache()
assert result == {"a/b": {"stars": 1}}
def test_returns_empty_on_corrupt_json(self, tmp_path, monkeypatch):
cache_file = tmp_path / "stars.json"
cache_file.write_text("not json", encoding="utf-8")
monkeypatch.setattr("fetch_github_stars.CACHE_FILE", cache_file)
result = load_cache()
assert result == {}
class TestSaveCache: class TestSaveCache:
def test_creates_directory_and_writes_json(self, tmp_path, monkeypatch): def test_creates_directory_and_writes_json(self, tmp_path, monkeypatch):
data_dir = tmp_path / "data" data_dir = tmp_path / "data"