Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 6 additions & 5 deletions conda_forge_tick/deploy.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
from .lazy_json_backends import (
CF_TICK_GRAPH_DATA_HASHMAPS,
get_lazy_json_backends,
lazy_json_override_backends,
)
from .os_utils import clean_disk_space
from .settings import settings
Expand Down Expand Up @@ -304,11 +305,11 @@ def deploy(
return

# make sure the graph can load, if not it will error
gx = load_existing_graph()
# TODO: be more selective about which json to check
for node, attrs in gx.nodes.items():
with attrs["payload"]:
pass
with lazy_json_override_backends(["file-read-only"], use_file_cache=False):
gx = load_existing_graph()
for node, attrs in gx.nodes.items():
with attrs["payload"]:
pass

with fold_log_lines("cleaning up disk space for deploy"):
clean_disk_space()
Expand Down
21 changes: 21 additions & 0 deletions conda_forge_tick/lazy_json_backends.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,6 +200,26 @@ def hget(self, name: str, key: str) -> str:
return data_str


class ReadOnlyFileLazyJsonBackend(FileLazyJsonBackend):
_write_warned = False

@classmethod
def _ignore_write(cls) -> None:
if cls._write_warned:
return
logger.info("Note: Write operations to the read-only file backend are ignored.")
cls._write_warned = True

def hset(self, name: str, key: str, value: str) -> None:
self._ignore_write()

def hmset(self, name: str, mapping: Mapping[str, str]) -> None:
self._ignore_write()

def hdel(self, name: str, keys: Iterable[str]) -> None:
self._ignore_write()


class GithubLazyJsonBackend(LazyJsonBackend):
"""
Read-only backend that makes live requests to https://raw.githubusercontent.com
Expand Down Expand Up @@ -709,6 +729,7 @@ def hget(self, name, key):

LAZY_JSON_BACKENDS: dict[str, type[LazyJsonBackend]] = {
"file": FileLazyJsonBackend,
"file-read-only": ReadOnlyFileLazyJsonBackend,
"mongodb": MongoDBLazyJsonBackend,
"github": GithubLazyJsonBackend,
"github_api": GithubAPILazyJsonBackend,
Expand Down
103 changes: 103 additions & 0 deletions tests/test_lazy_json_backends.py
Original file line number Diff line number Diff line change
Expand Up @@ -1047,3 +1047,106 @@ def test_lazy_json_backends_contexts_double_write():
# now the file exists at the end of the context block
assert os.path.exists(lzj.sharded_path)
assert lzj.data == {"hi": "world"}


def test_lazy_json_file_read_only_backend(tmpdir):
with pushd(tmpdir):
old_backend = conda_forge_tick.lazy_json_backends.CF_TICK_GRAPH_DATA_BACKENDS
old_cache = (
conda_forge_tick.lazy_json_backends.CF_TICK_GRAPH_DATA_USE_FILE_CACHE
)
try:
conda_forge_tick.lazy_json_backends.CF_TICK_GRAPH_DATA_BACKENDS = (
"file-read-only",
)
conda_forge_tick.lazy_json_backends.CF_TICK_GRAPH_DATA_PRIMARY_BACKEND = (
"file-read-only"
)
conda_forge_tick.lazy_json_backends.CF_TICK_GRAPH_DATA_USE_FILE_CACHE = (
False
)

f = "hi.json"
sharded_path = get_sharded_path(f)
assert not os.path.exists(f)
lj = LazyJson(f)

assert not os.path.exists(sharded_path)
assert not os.path.exists(lj.file_name)

with pytest.raises(AssertionError):
lj.update({"hi": "globe"})
assert not os.path.exists(sharded_path)
assert not os.path.exists(lj.file_name)

p = pickle.dumps(lj)
lj2 = pickle.loads(p)
assert not getattr(lj2, "_data", None)
assert not os.path.exists(sharded_path)
assert not os.path.exists(lj.file_name)

with lj as attrs:
attrs["hi"] = "world"
assert lj == {}
assert not os.path.exists(sharded_path)
assert not os.path.exists(lj.file_name)

with open(sharded_path, "w") as ff:
assert ff.write(dumps({"hi": "world"}))
lj = LazyJson(f)
assert lj == {"hi": "world"}

with lj as attrs:
attrs.update({"hi": "globe"})
attrs.setdefault("lst", []).append("universe")
assert lj == {"hi": "world"}
with open(sharded_path) as ff:
assert ff.read() == dumps({"hi": "world"})

with lj as attrs:
attrs.setdefault("lst", []).append("universe")
with lj as attrs_again:
attrs_again.setdefault("lst", []).append("universe")
attrs.setdefault("lst", []).append("universe")
assert lj == {"hi": "world"}
with open(sharded_path) as ff:
assert ff.read() == dumps({"hi": "world"})

with lj as attrs:
with lj as attrs_again:
attrs_again.setdefault("lst2", []).append("universe")
attrs.setdefault("lst2", []).append("universe")
assert lj == {"hi": "world"}
with open(sharded_path) as ff:
assert ff.read() == dumps({"hi": "world"})

with lj as attrs:
del attrs["hi"]
assert lj == {"hi": "world"}
with open(sharded_path) as ff:
assert ff.read() == dumps({"hi": "world"})

with lj as attrs:
attrs.pop("hi")
assert lj == {"hi": "world"}
with open(sharded_path) as ff:
assert ff.read() == dumps({"hi": "world"})

assert len(lj) == 1
assert {k for k in lj} == {"hi"}

with lj as attrs:
attrs.clear()
assert lj == {"hi": "world"}
with open(sharded_path) as ff:
assert ff.read() == dumps({"hi": "world"})
finally:
conda_forge_tick.lazy_json_backends.CF_TICK_GRAPH_DATA_BACKENDS = (
old_backend
)
conda_forge_tick.lazy_json_backends.CF_TICK_GRAPH_DATA_PRIMARY_BACKEND = (
old_backend[0]
)
conda_forge_tick.lazy_json_backends.CF_TICK_GRAPH_DATA_USE_FILE_CACHE = (
old_cache
)
Loading