Skip to content

Commit

Permalink
test corrected to use dataframe for TimeScenarioSeriesData
Browse files Browse the repository at this point in the history
  • Loading branch information
vargastat committed Apr 17, 2024
1 parent 09a7b10 commit 40bf223
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 10 deletions.
8 changes: 2 additions & 6 deletions tests/functional/test_xpansion.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
#
# This file is part of the Antares project.

import pandas as pd
import pytest

from andromede.expression.expression import literal, param, port_field, var
Expand Down Expand Up @@ -47,7 +48,6 @@
Network,
Node,
PortRef,
TimeScenarioIndex,
TimeScenarioSeriesData,
create_component,
)
Expand Down Expand Up @@ -347,11 +347,7 @@ def test_generation_xpansion_two_time_steps_two_scenarios(
horizon = 2
time_block = TimeBlock(1, list(range(horizon)))

data = {}
data[TimeScenarioIndex(0, 0)] = 300
data[TimeScenarioIndex(1, 0)] = 500
data[TimeScenarioIndex(0, 1)] = 200
data[TimeScenarioIndex(1, 1)] = 400
data = pd.DataFrame([[300, 200], [500, 400]], index=[0, 1], columns=[0, 1])

demand_data = TimeScenarioSeriesData(time_scenario_series=data)

Expand Down
9 changes: 5 additions & 4 deletions tests/unittests/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
#
# This file is part of the Antares project.

import pandas as pd
import pytest

from andromede.study import TimeScenarioIndex, TimeScenarioSeriesData
Expand All @@ -33,8 +34,8 @@ def value_factory() -> str:


def generate_data(value: float, horizon: int, scenarios: int) -> TimeScenarioSeriesData:
data = {}
for absolute_timestep in range(horizon):
for scenario in range(scenarios):
data[TimeScenarioIndex(absolute_timestep, scenario)] = value
data = pd.DataFrame(index=range(horizon), columns=range(scenarios))

data.fillna(value, inplace=True)

return TimeScenarioSeriesData(time_scenario_series=data)

0 comments on commit 40bf223

Please sign in to comment.