From f19873a6a97a59c44aa1ba916c20e8f84e9a06ef Mon Sep 17 00:00:00 2001 From: beckermr Date: Sun, 15 Mar 2026 05:22:24 -0500 Subject: [PATCH] fix: ensure deploy does not write data --- conda_forge_tick/deploy.py | 11 +-- conda_forge_tick/lazy_json_backends.py | 21 +++++ tests/test_lazy_json_backends.py | 103 +++++++++++++++++++++++++ 3 files changed, 130 insertions(+), 5 deletions(-) diff --git a/conda_forge_tick/deploy.py b/conda_forge_tick/deploy.py index 250ea6b49..962824540 100644 --- a/conda_forge_tick/deploy.py +++ b/conda_forge_tick/deploy.py @@ -14,6 +14,7 @@ from .lazy_json_backends import ( CF_TICK_GRAPH_DATA_HASHMAPS, get_lazy_json_backends, + lazy_json_override_backends, ) from .os_utils import clean_disk_space from .settings import settings @@ -304,11 +305,11 @@ def deploy( return # make sure the graph can load, if not it will error - gx = load_existing_graph() - # TODO: be more selective about which json to check - for node, attrs in gx.nodes.items(): - with attrs["payload"]: - pass + with lazy_json_override_backends(["file-read-only"], use_file_cache=False): + gx = load_existing_graph() + for node, attrs in gx.nodes.items(): + with attrs["payload"]: + pass with fold_log_lines("cleaning up disk space for deploy"): clean_disk_space() diff --git a/conda_forge_tick/lazy_json_backends.py b/conda_forge_tick/lazy_json_backends.py index 629067b6e..59f334f2b 100644 --- a/conda_forge_tick/lazy_json_backends.py +++ b/conda_forge_tick/lazy_json_backends.py @@ -200,6 +200,26 @@ def hget(self, name: str, key: str) -> str: return data_str +class ReadOnlyFileLazyJsonBackend(FileLazyJsonBackend): + _write_warned = False + + @classmethod + def _ignore_write(cls) -> None: + if cls._write_warned: + return + logger.info("Note: Write operations to the read-only file backend are ignored.") + cls._write_warned = True + + def hset(self, name: str, key: str, value: str) -> None: + self._ignore_write() + + def hmset(self, name: str, mapping: Mapping[str, str]) -> None: + self._ignore_write() + + def hdel(self, name: str, keys: Iterable[str]) -> None: + self._ignore_write() + + class GithubLazyJsonBackend(LazyJsonBackend): """ Read-only backend that makes live requests to https://raw.githubusercontent.com @@ -709,6 +729,7 @@ def hget(self, name, key): LAZY_JSON_BACKENDS: dict[str, type[LazyJsonBackend]] = { "file": FileLazyJsonBackend, + "file-read-only": ReadOnlyFileLazyJsonBackend, "mongodb": MongoDBLazyJsonBackend, "github": GithubLazyJsonBackend, "github_api": GithubAPILazyJsonBackend, diff --git a/tests/test_lazy_json_backends.py b/tests/test_lazy_json_backends.py index 07538c8c3..c43feb9c8 100644 --- a/tests/test_lazy_json_backends.py +++ b/tests/test_lazy_json_backends.py @@ -1047,3 +1047,106 @@ def test_lazy_json_backends_contexts_double_write(): # now the file exists at the end of the context block assert os.path.exists(lzj.sharded_path) assert lzj.data == {"hi": "world"} + + +def test_lazy_json_file_read_only_backend(tmpdir): + with pushd(tmpdir): + old_backend = conda_forge_tick.lazy_json_backends.CF_TICK_GRAPH_DATA_BACKENDS + old_cache = ( + conda_forge_tick.lazy_json_backends.CF_TICK_GRAPH_DATA_USE_FILE_CACHE + ) + try: + conda_forge_tick.lazy_json_backends.CF_TICK_GRAPH_DATA_BACKENDS = ( + "file-read-only", + ) + conda_forge_tick.lazy_json_backends.CF_TICK_GRAPH_DATA_PRIMARY_BACKEND = ( + "file-read-only" + ) + conda_forge_tick.lazy_json_backends.CF_TICK_GRAPH_DATA_USE_FILE_CACHE = ( + False + ) + + f = "hi.json" + sharded_path = get_sharded_path(f) + assert not os.path.exists(f) + lj = LazyJson(f) + + assert not os.path.exists(sharded_path) + assert not os.path.exists(lj.file_name) + + with pytest.raises(AssertionError): + lj.update({"hi": "globe"}) + assert not os.path.exists(sharded_path) + assert not os.path.exists(lj.file_name) + + p = pickle.dumps(lj) + lj2 = pickle.loads(p) + assert not getattr(lj2, "_data", None) + assert not os.path.exists(sharded_path) + assert not os.path.exists(lj.file_name) + + with lj as attrs: + attrs["hi"] = "world" + assert lj == {} + assert not os.path.exists(sharded_path) + assert not os.path.exists(lj.file_name) + + with open(sharded_path, "w") as ff: + assert ff.write(dumps({"hi": "world"})) + lj = LazyJson(f) + assert lj == {"hi": "world"} + + with lj as attrs: + attrs.update({"hi": "globe"}) + attrs.setdefault("lst", []).append("universe") + assert lj == {"hi": "world"} + with open(sharded_path) as ff: + assert ff.read() == dumps({"hi": "world"}) + + with lj as attrs: + attrs.setdefault("lst", []).append("universe") + with lj as attrs_again: + attrs_again.setdefault("lst", []).append("universe") + attrs.setdefault("lst", []).append("universe") + assert lj == {"hi": "world"} + with open(sharded_path) as ff: + assert ff.read() == dumps({"hi": "world"}) + + with lj as attrs: + with lj as attrs_again: + attrs_again.setdefault("lst2", []).append("universe") + attrs.setdefault("lst2", []).append("universe") + assert lj == {"hi": "world"} + with open(sharded_path) as ff: + assert ff.read() == dumps({"hi": "world"}) + + with lj as attrs: + del attrs["hi"] + assert lj == {"hi": "world"} + with open(sharded_path) as ff: + assert ff.read() == dumps({"hi": "world"}) + + with lj as attrs: + attrs.pop("hi") + assert lj == {"hi": "world"} + with open(sharded_path) as ff: + assert ff.read() == dumps({"hi": "world"}) + + assert len(lj) == 1 + assert {k for k in lj} == {"hi"} + + with lj as attrs: + attrs.clear() + assert lj == {"hi": "world"} + with open(sharded_path) as ff: + assert ff.read() == dumps({"hi": "world"}) + finally: + conda_forge_tick.lazy_json_backends.CF_TICK_GRAPH_DATA_BACKENDS = ( + old_backend + ) + conda_forge_tick.lazy_json_backends.CF_TICK_GRAPH_DATA_PRIMARY_BACKEND = ( + old_backend[0] + ) + conda_forge_tick.lazy_json_backends.CF_TICK_GRAPH_DATA_USE_FILE_CACHE = ( + old_cache + )