Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
0902229
Summary: Variable Registry Implementation
FBumann Jan 15, 2026
6fa5a24
Summary: Fine-Grained Variable Categories
FBumann Jan 15, 2026
47969a7
Add IO for variable categories
FBumann Jan 15, 2026
f7e0669
The refactoring is complete. Here's what was accomplished:
FBumann Jan 15, 2026
77ea9db
Here's what we accomplished:
FBumann Jan 15, 2026
b146d3e
Completed:
FBumann Jan 15, 2026
ed221cb
All simplifications complete! Here's a summary of what we cleaned up:
FBumann Jan 15, 2026
64ce6ce
Removed the unnecessary lookup and use segment_indices directl
FBumann Jan 15, 2026
ff43e32
The IO roundtrip fix is working correctly. Here's a summary of what w…
FBumann Jan 15, 2026
3bdcd1d
Updated condition in transform_accessor.py:2063-2066:
FBumann Jan 15, 2026
94e0a1d
Summary of Fixes
FBumann Jan 15, 2026
3cddeb8
Added test_segmented_total_effects_match_solution to TestSegmentatio…
FBumann Jan 15, 2026
325cbcf
Merge branch 'feature/tsam-v3+rework' into fix/segmentation-expansion…
FBumann Jan 15, 2026
dd671f2
Added all remaining tsam.aggregate() paramaters and missing type hint
FBumann Jan 15, 2026
b9e1608
Merge branch 'fix/segmentation-expansion' into fix/segmentation-expan…
FBumann Jan 15, 2026
fe3ae38
Updated expression_tracking_variable
FBumann Jan 15, 2026
9df0a85
Added to flow_system.py
FBumann Jan 15, 2026
cf59335
Ensure backwards compatability
FBumann Jan 15, 2026
7acec0c
Summary of Changes
FBumann Jan 15, 2026
d3ce064
perf: Keep data in minimal form (no pre-broadcasting) (#575)
FBumann Jan 16, 2026
872bbbd
Merge branch 'main' into feature/tsam-v3+rework
FBumann Jan 16, 2026
c150fd5
Merge branch 'feature/tsam-v3+rework' into fix/segmentation-expansion…
FBumann Jan 16, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
511 changes: 225 additions & 286 deletions flixopt/clustering/base.py

Large diffs are not rendered by default.

73 changes: 54 additions & 19 deletions flixopt/components.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@
from .elements import Component, ComponentModel, Flow
from .features import InvestmentModel, PiecewiseModel
from .interface import InvestParameters, PiecewiseConversion, StatusParameters
from .modeling import BoundingPatterns
from .structure import FlowSystemModel, register_class_for_io
from .modeling import BoundingPatterns, _scalar_safe_isel, _scalar_safe_isel_drop, _scalar_safe_reduce
from .structure import FlowSystemModel, VariableCategory, register_class_for_io

if TYPE_CHECKING:
import linopy
Expand Down Expand Up @@ -570,8 +570,12 @@ def _plausibility_checks(self) -> None:
# Initial charge state should not constrain investment decision
# If initial > (min_cap * rel_max), investment is forced to increase capacity
# If initial < (max_cap * rel_min), investment is forced to decrease capacity
min_initial_at_max_capacity = maximum_capacity * self.relative_minimum_charge_state.isel(time=0)
max_initial_at_min_capacity = minimum_capacity * self.relative_maximum_charge_state.isel(time=0)
min_initial_at_max_capacity = maximum_capacity * _scalar_safe_isel(
self.relative_minimum_charge_state, {'time': 0}
)
max_initial_at_min_capacity = minimum_capacity * _scalar_safe_isel(
self.relative_maximum_charge_state, {'time': 0}
)

# Only perform numeric comparisons if using a numeric initial_charge_state
if not initial_equals_final and self.initial_charge_state is not None:
Expand Down Expand Up @@ -940,8 +944,13 @@ def _create_storage_variables(self):
upper=ub,
coords=self._model.get_coords(extra_timestep=True),
short_name='charge_state',
category=VariableCategory.CHARGE_STATE,
)
self.add_variables(
coords=self._model.get_coords(),
short_name='netto_discharge',
category=VariableCategory.NETTO_DISCHARGE,
)
self.add_variables(coords=self._model.get_coords(), short_name='netto_discharge')

def _add_netto_discharge_constraint(self):
"""Add constraint: netto_discharge = discharging - charging."""
Expand Down Expand Up @@ -972,6 +981,7 @@ def _add_investment_model(self):
label_of_element=self.label_of_element,
label_of_model=self.label_of_element,
parameters=self.element.capacity_in_flow_hours,
size_category=VariableCategory.STORAGE_SIZE,
),
short_name='investment',
)
Expand Down Expand Up @@ -1100,24 +1110,47 @@ def _relative_charge_state_bounds(self) -> tuple[xr.DataArray, xr.DataArray]:
Returns:
Tuple of (minimum_bounds, maximum_bounds) DataArrays extending to final timestep
"""
final_coords = {'time': [self._model.flow_system.timesteps_extra[-1]]}
timesteps_extra = self._model.flow_system.timesteps_extra

# Get the original bounds (may be scalar or have time dim)
rel_min = self.element.relative_minimum_charge_state
rel_max = self.element.relative_maximum_charge_state

# Get final minimum charge state
if self.element.relative_minimum_final_charge_state is None:
min_final = self.element.relative_minimum_charge_state.isel(time=-1, drop=True)
min_final_value = _scalar_safe_isel_drop(rel_min, 'time', -1)
else:
min_final = self.element.relative_minimum_final_charge_state
min_final = min_final.expand_dims('time').assign_coords(time=final_coords['time'])
min_final_value = self.element.relative_minimum_final_charge_state

# Get final maximum charge state
if self.element.relative_maximum_final_charge_state is None:
max_final = self.element.relative_maximum_charge_state.isel(time=-1, drop=True)
max_final_value = _scalar_safe_isel_drop(rel_max, 'time', -1)
else:
max_final_value = self.element.relative_maximum_final_charge_state

# Build bounds arrays for timesteps_extra (includes final timestep)
# Handle case where original data may be scalar (no time dim)
if 'time' in rel_min.dims:
# Original has time dim - concat with final value
min_final_da = (
min_final_value.expand_dims('time') if 'time' not in min_final_value.dims else min_final_value
)
min_final_da = min_final_da.assign_coords(time=[timesteps_extra[-1]])
min_bounds = xr.concat([rel_min, min_final_da], dim='time')
else:
# Original is scalar - broadcast to full time range (constant value)
min_bounds = rel_min.expand_dims(time=timesteps_extra)

if 'time' in rel_max.dims:
# Original has time dim - concat with final value
max_final_da = (
max_final_value.expand_dims('time') if 'time' not in max_final_value.dims else max_final_value
)
max_final_da = max_final_da.assign_coords(time=[timesteps_extra[-1]])
max_bounds = xr.concat([rel_max, max_final_da], dim='time')
else:
max_final = self.element.relative_maximum_final_charge_state
max_final = max_final.expand_dims('time').assign_coords(time=final_coords['time'])
# Concatenate with original bounds
min_bounds = xr.concat([self.element.relative_minimum_charge_state, min_final], dim='time')
max_bounds = xr.concat([self.element.relative_maximum_charge_state, max_final], dim='time')
# Original is scalar - broadcast to full time range (constant value)
max_bounds = rel_max.expand_dims(time=timesteps_extra)

return min_bounds, max_bounds

Expand Down Expand Up @@ -1286,6 +1319,7 @@ def _add_investment_model(self):
label_of_element=self.label_of_element,
label_of_model=self.label_of_element,
parameters=self.element.capacity_in_flow_hours,
size_category=VariableCategory.STORAGE_SIZE,
),
short_name='investment',
)
Expand Down Expand Up @@ -1342,6 +1376,7 @@ def _add_intercluster_linking(self) -> None:
coords=boundary_coords,
dims=boundary_dims,
short_name='SOC_boundary',
category=VariableCategory.SOC_BOUNDARY,
)

# 3. Link SOC_boundary to investment size
Expand Down Expand Up @@ -1472,8 +1507,8 @@ def _add_linking_constraints(
# relative_loss_per_hour is per-hour, so we need total hours per cluster
# Use sum over time to handle both regular and segmented systems
# Keep as DataArray to respect per-period/scenario values
rel_loss = self.element.relative_loss_per_hour.mean('time')
hours_per_cluster = self._model.timestep_duration.sum('time')
rel_loss = _scalar_safe_reduce(self.element.relative_loss_per_hour, 'time', 'mean')
hours_per_cluster = _scalar_safe_reduce(self._model.timestep_duration, 'time', 'mean')
decay_n = (1 - rel_loss) ** hours_per_cluster

lhs = soc_after - soc_before * decay_n - delta_soc_ordered
Expand Down Expand Up @@ -1517,8 +1552,8 @@ def _add_combined_bound_constraints(
# Get self-discharge rate for decay calculation
# relative_loss_per_hour is per-hour, so we need to convert offsets to hours
# Keep as DataArray to respect per-period/scenario values
rel_loss = self.element.relative_loss_per_hour.mean('time')
mean_timestep_duration = self._model.timestep_duration.mean('time')
rel_loss = _scalar_safe_reduce(self.element.relative_loss_per_hour, 'time', 'mean')
mean_timestep_duration = _scalar_safe_reduce(self._model.timestep_duration, 'time', 'mean')

# Use actual time dimension size (may be smaller than timesteps_per_cluster for segmented systems)
actual_time_size = charge_state.sizes['time']
Expand Down
54 changes: 52 additions & 2 deletions flixopt/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -366,6 +366,55 @@ def _broadcast_dataarray_to_target_specification(
broadcasted = source_data.broadcast_like(target_template)
return broadcasted.transpose(*target_dims)

@staticmethod
def _validate_dataarray_dims(
data: xr.DataArray, target_coords: dict[str, pd.Index], target_dims: tuple[str, ...]
) -> xr.DataArray:
"""
Validate that DataArray dims are a subset of target dims (without broadcasting).

This method validates compatibility without expanding to full dimensions,
allowing data to remain in compact form. Broadcasting happens later at
the linopy interface (FlowSystemModel.add_variables).

Also transposes data to canonical dimension order (matching target_dims order).

Args:
data: DataArray to validate
target_coords: Target coordinates {dim_name: coordinate_index}
target_dims: Target dimension names in canonical order

Returns:
DataArray with validated dims, transposed to canonical order

Raises:
ConversionError: If data has dimensions not in target_dims,
or coordinate values don't match
"""
# Validate: all data dimensions must exist in target
extra_dims = set(data.dims) - set(target_dims)
if extra_dims:
raise ConversionError(f'Data has dimensions {extra_dims} not in target dimensions {target_dims}')

# Validate: coordinate compatibility for overlapping dimensions
for dim in data.dims:
if dim in data.coords and dim in target_coords:
data_coords = data.coords[dim]
target_coords_for_dim = target_coords[dim]

if not np.array_equal(data_coords.values, target_coords_for_dim.values):
raise ConversionError(
f'Coordinate mismatch for dimension "{dim}". Data and target coordinates have different values.'
)

# Transpose to canonical dimension order (subset of target_dims that data has)
if data.dims:
canonical_order = tuple(d for d in target_dims if d in data.dims)
if data.dims != canonical_order:
data = data.transpose(*canonical_order)

return data

@classmethod
def to_dataarray(
cls,
Expand Down Expand Up @@ -480,8 +529,9 @@ def to_dataarray(
f'Unsupported data type: {type(data).__name__}. Supported types: {", ".join(supported_types)}'
)

# Broadcast intermediate result to target specification
return cls._broadcast_dataarray_to_target_specification(intermediate, validated_coords, target_dims)
# Validate dims are compatible (no broadcasting - data stays compact)
# Broadcasting happens at FlowSystemModel.add_variables() via _ensure_coords
return cls._validate_dataarray_dims(intermediate, validated_coords, target_dims)

@staticmethod
def _validate_and_prepare_target_coordinates(
Expand Down
12 changes: 11 additions & 1 deletion flixopt/effects.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,15 @@

from .core import PlausibilityError
from .features import ShareAllocationModel
from .structure import Element, ElementContainer, ElementModel, FlowSystemModel, Submodel, register_class_for_io
from .structure import (
Element,
ElementContainer,
ElementModel,
FlowSystemModel,
Submodel,
VariableCategory,
register_class_for_io,
)

if TYPE_CHECKING:
from collections.abc import Iterator
Expand Down Expand Up @@ -377,6 +385,7 @@ def _do_modeling(self):
upper=self.element.maximum_total if self.element.maximum_total is not None else np.inf,
coords=self._model.get_coords(['period', 'scenario']),
name=self.label_full,
category=VariableCategory.TOTAL,
)

self.add_constraints(
Expand All @@ -394,6 +403,7 @@ def _do_modeling(self):
upper=self.element.maximum_over_periods if self.element.maximum_over_periods is not None else np.inf,
coords=self._model.get_coords(['scenario']),
short_name='total_over_periods',
category=VariableCategory.TOTAL_OVER_PERIODS,
)

self.add_constraints(self.total_over_periods == weighted_total, short_name='total_over_periods')
Expand Down
29 changes: 25 additions & 4 deletions flixopt/elements.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
Element,
ElementModel,
FlowSystemModel,
VariableCategory,
register_class_for_io,
)

Expand Down Expand Up @@ -672,6 +673,7 @@ def _do_modeling(self):
upper=self.absolute_flow_rate_bounds[1],
coords=self._model.get_coords(),
short_name='flow_rate',
category=VariableCategory.FLOW_RATE,
)

self._constraint_flow_rate()
Expand All @@ -687,6 +689,7 @@ def _do_modeling(self):
),
coords=['period', 'scenario'],
short_name='total_flow_hours',
category=VariableCategory.TOTAL,
)

# Weighted sum over all periods constraint
Expand Down Expand Up @@ -717,6 +720,7 @@ def _do_modeling(self):
),
coords=['scenario'],
short_name='flow_hours_over_periods',
category=VariableCategory.TOTAL_OVER_PERIODS,
)

# Load factor constraints
Expand All @@ -726,7 +730,12 @@ def _do_modeling(self):
self._create_shares()

def _create_status_model(self):
status = self.add_variables(binary=True, short_name='status', coords=self._model.get_coords())
status = self.add_variables(
binary=True,
short_name='status',
coords=self._model.get_coords(),
category=VariableCategory.STATUS,
)
self.add_submodels(
StatusModel(
model=self._model,
Expand All @@ -746,6 +755,7 @@ def _create_investment_model(self):
label_of_element=self.label_of_element,
parameters=self.element.size,
label_of_model=self.label_of_element,
size_category=VariableCategory.FLOW_SIZE,
),
'investment',
)
Expand Down Expand Up @@ -957,11 +967,17 @@ def _do_modeling(self):
imbalance_penalty = self.element.imbalance_penalty_per_flow_hour * self._model.timestep_duration

self.virtual_supply = self.add_variables(
lower=0, coords=self._model.get_coords(), short_name='virtual_supply'
lower=0,
coords=self._model.get_coords(),
short_name='virtual_supply',
category=VariableCategory.VIRTUAL_FLOW,
)

self.virtual_demand = self.add_variables(
lower=0, coords=self._model.get_coords(), short_name='virtual_demand'
lower=0,
coords=self._model.get_coords(),
short_name='virtual_demand',
category=VariableCategory.VIRTUAL_FLOW,
)

# Σ(inflows) + virtual_supply = Σ(outflows) + virtual_demand
Expand Down Expand Up @@ -1028,7 +1044,12 @@ def _do_modeling(self):

# Create component status variable and StatusModel if needed
if self.element.status_parameters:
status = self.add_variables(binary=True, short_name='status', coords=self._model.get_coords())
status = self.add_variables(
binary=True,
short_name='status',
coords=self._model.get_coords(),
category=VariableCategory.STATUS,
)
if len(all_flows) == 1:
self.add_constraints(status == all_flows[0].submodel.status.status, short_name='status')
else:
Expand Down
Loading