Skip to content

Commit 1fb4e38

Browse files
authored
Merge pull request #295 from fgcz/rel-20250919_1
Release 1.13.34
2 parents 160534c + 4aba1b8 commit 1fb4e38

35 files changed

Lines changed: 1285 additions & 429 deletions

.github/workflows/pr_release_preview.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ jobs:
4040
echo -e "$table" >> $GITHUB_OUTPUT
4141
echo "TABLE_EOF" >> $GITHUB_OUTPUT
4242
- name: Create or update PR comment
43-
uses: actions/github-script@v7
43+
uses: actions/github-script@v8
4444
with:
4545
script: |
4646
const packages = JSON.parse('${{ steps.check-versions.outputs.packages-to-release }}');

.github/workflows/publish_release.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ jobs:
5858
steps:
5959
- uses: actions/checkout@v5
6060
- name: Set up Python
61-
uses: actions/setup-python@v5
61+
uses: actions/setup-python@v6
6262
with:
6363
python-version: '3.11'
6464
- name: Install hatch

.github/workflows/run_unit_tests.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ jobs:
1111
runs-on: ubuntu-latest
1212
steps:
1313
- uses: actions/checkout@v5
14-
- uses: actions/setup-python@v5
14+
- uses: actions/setup-python@v6
1515
with:
1616
python-version: 3.11
1717
- name: Install nox
@@ -23,7 +23,7 @@ jobs:
2323
runs-on: ubuntu-latest
2424
steps:
2525
- uses: actions/checkout@v5
26-
- uses: actions/setup-python@v5
26+
- uses: actions/setup-python@v6
2727
with:
2828
python-version: 3.11
2929
- name: Install nox
@@ -49,7 +49,7 @@ jobs:
4949
head-ref: ${{ github.sha }}
5050
- name: Comment on PR with "TODO" changes
5151
if: steps.todo-diff.outputs.has-changes == 'true'
52-
uses: actions/github-script@v7
52+
uses: actions/github-script@v8
5353
with:
5454
script: |
5555
const summary = `${{ steps.todo-diff.outputs.summary }}`;

bfabric/docs/changelog.md

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,18 @@ Versioning currently follows `X.Y.Z` where
1010

1111
## \[Unreleased\]
1212

13+
## \[1.13.34\] - 2025-09-19
14+
15+
### Added
16+
17+
- `bfabric.experimental.cache` which implements re-entrant lookup caching for entities (when retrieved by ID).
18+
- Fields `filename`, `storage_relative_path`, `storage_absolute_path` have been added to `Resource`. This should be used
19+
in the future, to ensure path handling is performed consistently
20+
21+
### Removed
22+
23+
- `bfabric.experimental.entity_lookup_cache` has been removed in favor of the new (experimental) API.
24+
1325
## \[1.13.33\] - 2025-08-26
1426

1527
### Added

bfabric/pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ build-backend = "hatchling.build"
66
name = "bfabric"
77
description = "Python client for the B-Fabric API"
88
readme = "../README.md"
9-
version = "1.13.33"
9+
version = "1.13.34"
1010
license = { text = "GPL-3.0" }
1111
authors = [
1212
{ name = "Christian Panse", email = "cp@fgcz.ethz.ch" },

bfabric/src/bfabric/entities/core/entity.py

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
from loguru import logger
77

88
from bfabric.experimental import MultiQuery
9-
from bfabric.experimental.entity_lookup_cache import EntityLookupCache
9+
from bfabric.experimental.cache.context import get_cache_stack
1010

1111
if TYPE_CHECKING:
1212
from pathlib import Path
@@ -47,41 +47,41 @@ def _client(self) -> Bfabric | None:
4747
@classmethod
4848
def find(cls, id: int, client: Bfabric) -> Self | None:
4949
"""Finds an entity by its ID, if it does not exist `None` is returned."""
50-
cache = EntityLookupCache.instance()
51-
if cache and cache.contains(entity_type=cls, entity_id=id):
52-
return cache.get(entity_type=cls, entity_id=id)
53-
else:
54-
result = client.read(cls.ENDPOINT, obj={"id": int(id)})
55-
entity = cls(result[0], client=client) if len(result) == 1 else None
56-
if cache:
57-
cache.put(entity_type=cls, entity_id=id, entity=entity)
58-
return entity
50+
cache_stack = get_cache_stack()
51+
cache_entry = cache_stack.item_get(entity_type=cls, entity_id=id)
52+
if cache_entry:
53+
return cache_entry
54+
55+
result = client.read(cls.ENDPOINT, obj={"id": int(id)})
56+
entity = cls(result[0], client=client) if len(result) == 1 else None
57+
cache_stack.item_put(entity_type=cls, entity_id=id, entity=entity)
58+
return entity
5959

6060
@classmethod
6161
def find_all(cls, ids: list[int], client: Bfabric) -> dict[int, Self]:
6262
"""Returns a dictionary of entities with the given IDs. The order will generally match the input, however,
6363
if some entities are not found they will be omitted and a warning will be logged.
6464
"""
65-
cache = EntityLookupCache.instance()
65+
cache_stack = get_cache_stack()
6666
ids_requested = cls.__check_ids_list(ids)
6767

6868
# retrieve entities from cache and from B-Fabric as needed
69-
results_cached = cache.get_all(entity_type=cls, entity_ids=ids) if cache else {}
69+
results_cached = cache_stack.item_get_all(entity_type=cls, entity_ids=ids)
7070
results_fresh = cls.__retrieve_entities(
7171
client=client, ids_requested=ids_requested, ids_cached=results_cached.keys()
7272
)
7373

74-
if cache:
75-
for entity_id, entity in results_fresh.items():
76-
cache.put(entity_type=cls, entity_id=entity_id, entity=entity)
77-
74+
cache_stack.item_put_all(entity_type=cls, entities=results_fresh)
7875
return cls.__ensure_results_order(ids_requested, results_cached, results_fresh)
7976

8077
@classmethod
8178
def find_by(cls, obj: dict[str, Any], client: Bfabric, max_results: int | None = 100) -> dict[int, Self]:
8279
"""Returns a dictionary of entities that match the given query."""
8380
result = client.read(cls.ENDPOINT, obj=obj, max_results=max_results)
84-
return {x["id"]: cls(x, client=client) for x in result}
81+
cache_stack = get_cache_stack()
82+
entities = {x["id"]: cls(x, client=client) for x in result}
83+
cache_stack.item_put_all(entity_type=cls, entities=entities)
84+
return entities
8585

8686
def dump_yaml(self, path: Path) -> None:
8787
"""Writes the entity's data dictionary to a YAML file."""

bfabric/src/bfabric/entities/resource.py

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
from __future__ import annotations
22

3+
from pathlib import Path
34
from typing import Any, TYPE_CHECKING
45

56
from bfabric.entities.core.entity import Entity
@@ -21,3 +22,20 @@ def __init__(self, data_dict: dict[str, Any], client: Bfabric | None = None) ->
2122
storage: HasOne[Storage] = HasOne("Storage", bfabric_field="storage")
2223
workunit: HasOne[Workunit] = HasOne("Workunit", bfabric_field="workunit")
2324
sample: HasOne[Sample] = HasOne("Sample", bfabric_field="sample", optional=True)
25+
26+
@property
27+
def storage_relative_path(self) -> Path:
28+
"""Returns the relative path of the resource in the storage as a Path object."""
29+
return Path(self["relativepath"].lstrip("/"))
30+
31+
@property
32+
def storage_absolute_path(self) -> Path:
33+
"""Returns the absolute path of the resource in the storage as a Path object."""
34+
return Path(self.storage.base_path) / self.storage_relative_path
35+
36+
@property
37+
def filename(self) -> str:
38+
"""Returns the filename of the actual path, i.e. not necessarily the resource name but rather the name
39+
as the file is stored.
40+
"""
41+
return self["relativepath"].split("/")[-1]

bfabric/src/bfabric/experimental/cache/__init__.py

Whitespace-only changes.
Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
from __future__ import annotations
2+
from typing import TYPE_CHECKING
3+
4+
if TYPE_CHECKING:
5+
from bfabric.entities.core.entity import Entity
6+
from bfabric.experimental.cache._entity_memory_cache import EntityMemoryCache
7+
8+
9+
class CacheStack:
10+
"""Implements a stack of entity caches.
11+
12+
Retrieval is performed by looking first at the most recently added cache.
13+
Items are put into all caches in the stack (they may each have different sizes and inclusion rules) so that nesting
14+
contexts has no effect on cache hits.
15+
"""
16+
17+
def __init__(self) -> None:
18+
self._stack: list[EntityMemoryCache] = []
19+
20+
def cache_push(self, cache: EntityMemoryCache) -> None:
21+
self._stack.append(cache)
22+
23+
def cache_pop(self) -> None:
24+
self._stack.pop()
25+
26+
def item_contains(self, entity_type: type[Entity], entity_id: int) -> bool:
27+
return any(cache.contains(entity_type, entity_id) for cache in reversed(self._stack))
28+
29+
def item_get(self, entity_type: type[Entity], entity_id: int) -> Entity | None:
30+
for cache in reversed(self._stack):
31+
entity = cache.get(entity_type, entity_id)
32+
if entity is not None:
33+
return entity
34+
return None
35+
36+
def item_get_all(self, entity_type: type[Entity], entity_ids: list[int]) -> dict[int, Entity]:
37+
results = {}
38+
pending = set(entity_ids)
39+
for cache in reversed(self._stack):
40+
if not pending:
41+
break
42+
matches = cache.get_all(entity_type, list(pending))
43+
results.update(matches)
44+
pending.difference_update(matches)
45+
return results
46+
47+
def item_put(self, entity_type: type[Entity], entity_id: int, entity: Entity | None) -> None:
48+
for cache in reversed(self._stack):
49+
cache.put(entity_type, entity_id, entity)
50+
51+
def item_put_all(self, entity_type: type[Entity], entities: dict[int, Entity | None]) -> None:
52+
for entity_id, entity in entities.items():
53+
self.item_put(entity_type, entity_id, entity)
Lines changed: 58 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,58 @@
1+
from __future__ import annotations
2+
3+
from typing import TypeVar, TYPE_CHECKING
4+
5+
from loguru import logger
6+
7+
from bfabric.experimental.cache._fifo_cache import FifoCache
8+
9+
if TYPE_CHECKING:
10+
from bfabric.entities.core.entity import Entity
11+
12+
E = TypeVar("E", bound="Entity")
13+
14+
15+
class EntityMemoryCache:
16+
"""Implements a configurable cache for different entity types in memory.
17+
18+
Only entity types specified in the config will be cached, and at most the specified number of entities will be
19+
cached for each type.
20+
"""
21+
22+
def __init__(self, config: dict[type[Entity], int]) -> None:
23+
self._config = config
24+
self._caches = {entity_type: FifoCache(max_size=max_size) for entity_type, max_size in config.items()}
25+
26+
def contains(self, entity_type: type[Entity], entity_id: int) -> bool:
27+
"""Returns whether the cache contains an entity with the given type and ID."""
28+
if entity_type not in self._caches:
29+
return False
30+
else:
31+
return entity_id in self._caches[entity_type]
32+
33+
def get(self, entity_type: type[E], entity_id: int) -> E | None:
34+
"""Returns the entity with the given type and ID, if it exists in the cache."""
35+
if entity_type not in self._caches:
36+
return None
37+
38+
if self._caches[entity_type].get(entity_id):
39+
logger.debug(f"Cache hit for entity {entity_type} with ID {entity_id}")
40+
return self._caches[entity_type].get(entity_id)
41+
else:
42+
logger.debug(f"Cache miss for entity {entity_type} with ID {entity_id}")
43+
return None
44+
45+
def get_all(self, entity_type: type[Entity], entity_ids: list[int]) -> dict[int, Entity]:
46+
"""Returns a dictionary of entities with the given type and IDs,
47+
containing only the entities that exist in the cache.
48+
"""
49+
results = {entity_id: self.get(entity_type, entity_id) for entity_id in entity_ids}
50+
return {entity_id: result for entity_id, result in results.items() if result is not None}
51+
52+
def put(self, entity_type: type[Entity], entity_id: int, entity: Entity | None) -> None:
53+
"""Puts an entity with the given type and ID into the cache."""
54+
if entity_type not in self._caches:
55+
return
56+
57+
logger.debug(f"Caching entity {entity_type} with ID {entity_id}")
58+
self._caches[entity_type].put(entity_id, entity)

0 commit comments

Comments
 (0)