Skip to content

Commit

Permalink
Performance test: check large nb of port connections
Browse files Browse the repository at this point in the history
  • Loading branch information
ianmnz committed May 16, 2024
1 parent acc77e8 commit ead9b99
Show file tree
Hide file tree
Showing 3 changed files with 67 additions and 12 deletions.
57 changes: 53 additions & 4 deletions tests/functional/test_performance.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
PortRef,
create_component,
)
from tests.unittests.test_utils import generate_const_data
from tests.unittests.test_utils import generate_scalar_matrix_data


def test_large_sum_inside_model_with_loop() -> None:
Expand Down Expand Up @@ -112,7 +112,7 @@ def test_large_sum_outside_model_with_loop() -> None:
def test_large_sum_inside_model_with_sum_operator() -> None:
"""
Test performance when the problem involves an expression with a high number of terms.
Here the objective function is the sum over nb_terms terms withe the sum() operator inside the model
Here the objective function is the sum over nb_terms terms with the sum() operator inside the model
"""
nb_terms = 10_000

Expand Down Expand Up @@ -154,6 +154,51 @@ def test_large_sum_inside_model_with_sum_operator() -> None:
assert problem.solver.Objective().Value() == 3 * nb_terms


def test_large_sum_of_port_connections() -> None:
"""
Test performance when the problem involves a model where several generators are connected to a node.
This test pass with 470 terms but fails with 471 locally due to recursion depth,
and possibly even less terms are possible with Jenkins...
"""
nb_generators = 100

time_block = TimeBlock(0, [0])
scenarios = 1

database = DataBase()
database.add_data("D", "demand", ConstantData(nb_generators))

for gen_id in range(nb_generators):
database.add_data(f"G_{gen_id}", "p_max", ConstantData(1))
database.add_data(f"G_{gen_id}", "cost", ConstantData(5))

node = Node(model=NODE_BALANCE_MODEL, id="N")
demand = create_component(model=DEMAND_MODEL, id="D")
generators = [
create_component(model=GENERATOR_MODEL, id=f"G_{gen_id}")
for gen_id in range(nb_generators)
]

network = Network("test")
network.add_node(node)

network.add_component(demand)
network.connect(PortRef(demand, "balance_port"), PortRef(node, "balance_port"))

for gen_id in range(nb_generators):
network.add_component(generators[gen_id])
network.connect(
PortRef(generators[gen_id], "balance_port"), PortRef(node, "balance_port")
)

problem = build_problem(network, database, time_block, scenarios)
status = problem.solver.Solve()

assert status == problem.solver.OPTIMAL
assert problem.solver.Objective().Value() == 5 * nb_generators


def test_basic_balance_on_whole_year() -> None:
"""
Balance on one node with one fixed demand and one generation, on 8760 timestep.
Expand All @@ -164,7 +209,9 @@ def test_basic_balance_on_whole_year() -> None:
time_block = TimeBlock(1, list(range(horizon)))

database = DataBase()
database.add_data("D", "demand", generate_const_data(100, horizon, scenarios))
database.add_data(
"D", "demand", generate_scalar_matrix_data(100, horizon, scenarios)
)

database.add_data("G", "p_max", ConstantData(100))
database.add_data("G", "cost", ConstantData(30))
Expand Down Expand Up @@ -198,7 +245,9 @@ def test_basic_balance_on_whole_year_with_large_sum() -> None:
time_block = TimeBlock(1, list(range(horizon)))

database = DataBase()
database.add_data("D", "demand", generate_const_data(100, horizon, scenarios))
database.add_data(
"D", "demand", generate_scalar_matrix_data(100, horizon, scenarios)
)

database.add_data("G", "p_max", ConstantData(100))
database.add_data("G", "cost", ConstantData(30))
Expand Down
16 changes: 11 additions & 5 deletions tests/functional/test_stochastic.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
PortRef,
create_component,
)
from tests.unittests.test_utils import generate_const_data
from tests.unittests.test_utils import generate_scalar_matrix_data


@pytest.fixture
Expand All @@ -46,13 +46,19 @@ def scenarios() -> int:
def database(horizon: int, scenarios: int) -> DataBase:
database = DataBase()

database.add_data("D", "demand", generate_const_data(500, horizon, scenarios))
database.add_data(
"D", "demand", generate_scalar_matrix_data(500, horizon, scenarios)
)

database.add_data("BASE", "nb_failures", generate_const_data(1, horizon, scenarios))
database.add_data(
"SEMIBASE", "nb_failures", generate_const_data(1, horizon, scenarios)
"BASE", "nb_failures", generate_scalar_matrix_data(1, horizon, scenarios)
)
database.add_data(
"SEMIBASE", "nb_failures", generate_scalar_matrix_data(1, horizon, scenarios)
)
database.add_data(
"PEAK", "nb_failures", generate_scalar_matrix_data(1, horizon, scenarios)
)
database.add_data("PEAK", "nb_failures", generate_const_data(1, horizon, scenarios))

database.add_data("BASE", "p_max", ConstantData(250))
database.add_data("BASE", "p_min", ConstantData(100))
Expand Down
6 changes: 3 additions & 3 deletions tests/unittests/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def value_factory() -> str:
assert get_or_add(d, "key2", value_factory) == "value2"


def generate_const_data(
def generate_scalar_matrix_data(
value: float, horizon: int, scenarios: int
) -> TimeScenarioSeriesData:
data = {}
Expand Down Expand Up @@ -68,12 +68,12 @@ def generate_random_data(
) -> TimeScenarioSeriesData:
X = truncnorm((lower - mean) / std, (upper - mean) / std, loc=mean, scale=std)

sample = X.rvs(horizon * scenarios, random_state=seed)
sample = X.rvs(size=(horizon, scenarios), random_state=seed)

data = {}
for absolute_timestep in range(horizon):
for scenario in range(scenarios):
data[TimeScenarioIndex(absolute_timestep, scenario)] = sample[
scenario + absolute_timestep * scenarios
absolute_timestep, scenario
]
return TimeScenarioSeriesData(time_scenario_series=data)

0 comments on commit ead9b99

Please sign in to comment.