Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
eff38e6
Add FrozenDataContainer class
psomhorst Dec 1, 2025
3a1532d
Add function to freeze a numpy array
psomhorst Dec 1, 2025
70d2b90
Make EITData objects frozen
psomhorst Dec 1, 2025
bc6196e
Update MDN to use frozen EITData object
psomhorst Dec 1, 2025
1b5a884
Update ROI to use frozen EITData
psomhorst Dec 1, 2025
a3105f1
Add test to see whether frozen objects work as intended
psomhorst Dec 1, 2025
fe5af4c
Update pixel breath tests to work with frozen EITData
psomhorst Dec 1, 2025
d597d74
Remove nframes and allow other EITData attributes to be None
psomhorst Dec 1, 2025
7482598
Migrate all code to use EITData.values instead of pixel_impedance
psomhorst Dec 1, 2025
7c3380b
Remove derived_from and parameters from data containers
psomhorst Dec 1, 2025
f116c37
Fix import
psomhorst Dec 1, 2025
414f07e
Remove lock and unlock methods
psomhorst Dec 1, 2025
23c1f46
Make ContinuousData frozen
psomhorst Dec 1, 2025
405a857
Update ContinuousData attributes
psomhorst Dec 1, 2025
6544321
Add tests for frozen ContinuousData
psomhorst Dec 1, 2025
037c71b
Change EITData.path to be a tuple of Paths
psomhorst Dec 2, 2025
4117811
Add option to use memoryview for freezing arrays
psomhorst Dec 16, 2025
c665170
Simplify logic
psomhorst Dec 16, 2025
3cea05c
Add NamedTupleArray including tests
psomhorst Dec 18, 2025
c96d549
Freeze Event dataclass
psomhorst Dec 18, 2025
5cddfc6
Add target version for ruff
psomhorst Dec 18, 2025
97591e0
Set IntervalData intervals to NamedTupleArray
psomhorst Dec 18, 2025
903b040
Update NamedTupleArray
psomhorst Feb 5, 2026
2afd20a
Make DataContainers frozen
psomhorst Feb 9, 2026
51e3cf3
Remove NamedTupleArray and replace with StructuredArray
psomhorst Feb 9, 2026
59bcfc0
Make Interval dataclass with post_init validation method
psomhorst Feb 9, 2026
19006ae
Update IntervalData for StructuredArray
psomhorst Feb 9, 2026
3a32df9
Freeze Breath
psomhorst Feb 9, 2026
368fa69
Convert Timpel loading to StructuredArray
psomhorst Feb 9, 2026
351c02c
Convert BreathDetection to StructuredArray
psomhorst Feb 9, 2026
3e33a9a
Update tests
psomhorst Feb 9, 2026
010372a
Update TIV for StructuredArray
psomhorst Feb 9, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 13 additions & 1 deletion eitprocessing/datahandling/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import dataclasses
from copy import deepcopy
from dataclasses import dataclass

Expand All @@ -6,7 +7,7 @@
from eitprocessing.datahandling.mixins.equality import Equivalence


@dataclass(eq=False)
@dataclass(eq=False, frozen=True)
class DataContainer(Equivalence):
"""Base class for data container classes."""

Expand All @@ -16,3 +17,14 @@ def __bool__(self):
def deepcopy(self) -> Self:
"""Return a deep copy of the object."""
return deepcopy(self)

def update(self: Self, **kwargs: object) -> Self:
"""Return a copy of the object with specified fields replaced.

Args:
**kwargs: Fields to replace.

Returns:
A new instance of the object with the specified fields replaced.
"""
return dataclasses.replace(self, **kwargs)
11 changes: 8 additions & 3 deletions eitprocessing/datahandling/breath.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,13 @@
from collections.abc import Iterator
from __future__ import annotations

from dataclasses import dataclass
from typing import TYPE_CHECKING

if TYPE_CHECKING:
from collections.abc import Iterator


@dataclass
@dataclass(frozen=True)
class Breath:
"""Represents a breath with a start, middle and end time."""

Expand All @@ -14,7 +19,7 @@ def __post_init__(self):
if self.start_time >= self.middle_time or self.middle_time >= self.end_time:
msg = (
"Start, middle and end should be consecutive, not "
"{self.start_time:.2f}, {self.middle_time:.2f} and {self.end_time:.2f}"
f"{self.start_time:.2f}, {self.middle_time:.2f} and {self.end_time:.2f}"
)
raise ValueError(msg)

Expand Down
148 changes: 17 additions & 131 deletions eitprocessing/datahandling/continuousdata.py
Original file line number Diff line number Diff line change
@@ -1,23 +1,24 @@
from __future__ import annotations

import warnings
from dataclasses import dataclass, field
from dataclasses import KW_ONLY, dataclass, field
from typing import TYPE_CHECKING, TypeVar

import numpy as np

from eitprocessing.datahandling import DataContainer
from eitprocessing.datahandling.mixins.slicing import SelectByTime
from eitprocessing.utils.frozen_array import freeze_array

if TYPE_CHECKING:
from collections.abc import Callable

from typing_extensions import Any, Self
from typing_extensions import Self

T = TypeVar("T", bound="ContinuousData")


@dataclass(eq=False)
@dataclass(eq=False, frozen=True)
class ContinuousData(DataContainer, SelectByTime):
"""Container for data with a continuous time axis.

Expand All @@ -32,27 +33,20 @@ class ContinuousData(DataContainer, SelectByTime):
unit: Unit of the data, if applicable.
category: Category the data falls into, e.g. 'airway pressure'.
description: Human readable extended description of the data.
parameters: Parameters used to derive this data.
derived_from: Traceback of intermediates from which the current data was derived.
values: Data points.
"""

label: str = field(compare=False)
name: str = field(compare=False, repr=False)
unit: str = field(metadata={"check_equivalence": True}, repr=False)
category: str = field(metadata={"check_equivalence": True}, repr=False)
description: str = field(default="", compare=False, repr=False)
parameters: dict[str, Any] = field(default_factory=dict, repr=False, metadata={"check_equivalence": True})
derived_from: Any | list[Any] = field(default_factory=list, repr=False, compare=False)
time: np.ndarray = field(kw_only=True, repr=False)
values: np.ndarray = field(kw_only=True, repr=False)
time: np.ndarray = field(repr=False)
values: np.ndarray = field(repr=False)
_: KW_ONLY
label: str | None = field(compare=False, default=None)
name: str | None = field(compare=False, repr=False, default=None)
description: str | None = field(compare=False, repr=False, default=None)
unit: str | None = field(metadata={"check_equivalence": True}, repr=False, default=None)
category: str | None = field(metadata={"check_equivalence": True}, repr=False, default=None)
sample_frequency: float | None = field(kw_only=True, repr=False, metadata={"check_equivalence": True}, default=None)

def __post_init__(self) -> None:
if self.loaded:
self.lock()
self.lock("time")

if self.sample_frequency is None:
msg = (
"`sample_frequency` is set to `None`. This will not be supported in future versions. "
Expand All @@ -64,48 +58,8 @@ def __post_init__(self) -> None:
msg = f"The number of time points ({lt}) does not match the number of values ({lv})."
raise ValueError(msg)

def __setattr__(self, attr: str, value: Any): # noqa: ANN401
try:
old_value = getattr(self, attr)
except AttributeError:
pass
else:
if isinstance(old_value, np.ndarray) and old_value.flags["WRITEABLE"] is False:
msg = f"Attribute '{attr}' is locked and can't be overwritten."
raise AttributeError(msg)
super().__setattr__(attr, value)

def copy(
self,
label: str,
*,
name: str | None = None,
unit: str | None = None,
description: str | None = None,
parameters: dict | None = None,
) -> Self:
"""Create a copy.

Whenever data is altered, it should probably be copied first. The alterations should then be made in the copy.
"""
obj = self.__class__(
label=label,
name=name or label,
unit=unit or self.unit,
description=description or f"Derived from {self.name}",
parameters=self.parameters | (parameters or {}),
derived_from=[*self.derived_from, self],
category=self.category,
# copying data can become inefficient with large datasets if the
# data is not directly edited afer copying but overridden instead;
# consider creating a view and locking it, requiring the user to
# make a copy if they want to edit the data directly
time=np.copy(self.time),
values=np.copy(self.values),
sample_frequency=self.sample_frequency,
)
obj.unlock()
return obj
object.__setattr__(self, "time", freeze_array(self.time))
object.__setattr__(self, "values", freeze_array(self.values))

def __add__(self: Self, other: Self) -> Self:
return self.concatenate(other)
Expand All @@ -128,7 +82,6 @@ def concatenate(self: Self, other: Self, newlabel: str | None = None) -> Self:
category=self.category,
time=np.concatenate((self.time, other.time)),
values=np.concatenate((self.values, other.values)),
derived_from=[*self.derived_from, *other.derived_from, self, other],
sample_frequency=self.sample_frequency,
)

Expand Down Expand Up @@ -173,61 +126,6 @@ def convert_data(x, add=None, subtract=None, multiply=None, divide=None):
copy.values = function(copy.values, **func_args)
return copy

def lock(self, *attr: str) -> None:
"""Lock attributes, essentially rendering them read-only.

Locked attributes cannot be overwritten. Attributes can be unlocked using `unlock()`.

Args:
*attr: any number of attributes can be passed here, all of which will be locked. Defaults to "values".

Examples:
>>> # lock the `values` attribute of `data`
>>> data.lock()
>>> data.values = [1, 2, 3] # will result in an AttributeError
>>> data.values[0] = 1 # will result in a RuntimeError
"""
if not attr:
# default values are not allowed when using *attr, so set a default here if none is supplied
attr = ("values",)
for attr_ in attr:
getattr(self, attr_).flags["WRITEABLE"] = False

def unlock(self, *attr: str) -> None:
"""Unlock attributes, rendering them editable.

Locked attributes cannot be overwritten, but can be unlocked with this function to make them editable.

Args:
*attr: any number of attributes can be passed here, all of which will be unlocked. Defaults to "values".

Examples:
>>> # lock the `values` attribute of `data`
>>> data.lock()
>>> data.values = [1, 2, 3] # will result in an AttributeError
>>> data.values[0] = 1 # will result in a RuntimeError
>>> data.unlock()
>>> data.values = [1, 2, 3]
>>> print(data.values)
[1,2,3]
>>> data.values[0] = 1 # will result in a RuntimeError
>>> print(data.values)
1
"""
if not attr:
# default values are not allowed when using *attr, so set a default here if none is supplied
attr = ("values",)
for attr_ in attr:
getattr(self, attr_).flags["WRITEABLE"] = True

@property
def locked(self) -> bool:
"""Return whether the values attribute is locked.

See lock().
"""
return not self.values.flags["WRITEABLE"]

@property
def loaded(self) -> bool:
"""Return whether the data was loaded from disk, or derived from elsewhere."""
Expand All @@ -243,19 +141,7 @@ def _sliced_copy(
newlabel: str, # noqa: ARG002
) -> Self:
# TODO: check correct implementation
cls = self.__class__
time = np.copy(self.time[start_index:end_index])
values = np.copy(self.values[start_index:end_index])
description = f"Slice ({start_index}-{end_index}) of <{self.description}>"
time = self.time[start_index:end_index]
values = self.values[start_index:end_index]

return cls(
label=self.label, # TODO: newlabel gives errors
name=self.name,
unit=self.unit,
category=self.category,
description=description,
derived_from=[*self.derived_from, self],
time=time,
values=values,
sample_frequency=self.sample_frequency,
)
return self.update(time=time, values=values)
12 changes: 0 additions & 12 deletions eitprocessing/datahandling/datacollection.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,18 +93,6 @@ def _check_item(
)
raise KeyError(msg)

def get_loaded_data(self) -> dict[str, V]:
"""Return all data that was directly loaded from disk."""
return {k: v for k, v in self.items() if v.loaded}

def get_data_derived_from(self, obj: V) -> dict[str, V]:
"""Return all data that was derived from a specific source."""
return {k: v for k, v in self.items() if any(obj is item for item in v.derived_from)}

def get_derived_data(self) -> dict[str, V]:
"""Return all data that was derived from any source."""
return {k: v for k, v in self.items() if v.derived_from}

def concatenate(self: Self, other: Self) -> Self:
"""Concatenate this collection with an equivalent collection.

Expand Down
Loading
Loading