Skip to content

Commit

Permalink
* Add temporary randon assignment of the second coupling variable, #42.
Browse files Browse the repository at this point in the history
  • Loading branch information
MBaranskiEBC committed Jan 4, 2019
1 parent 404c01f commit d3f181f
Showing 1 changed file with 8 additions and 7 deletions.
15 changes: 8 additions & 7 deletions pyDMPC/ControlFramework/Objective_Function.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from sklearn.neural_network import MLPClassifier, MLPRegressor
from sklearn.preprocessing import StandardScaler
import time
import random

'''Global variables used for simulation handling'''
# Variable indicating if the subsystem model is compiled
Expand Down Expand Up @@ -180,7 +181,7 @@ def Obj(values_DVs, BC, s):
output_list = []
MLPModel = load("C:\\TEMP\Dymola\\" + s._name + ".joblib")
scaler = load("C:\\TEMP\\Dymola\\" + s._name + "_scaler.joblib")

for t in range(60):
T_met_prev_1.append(s._initial_values[0])
T_met_prev_2.append(s._initial_values[1])
Expand All @@ -189,19 +190,19 @@ def Obj(values_DVs, BC, s):
command.append(values_DVs[0])
T_cur.append(BC[0])
T_prev.append(BC[0])


x_test = np.stack((command,T_cur,T_prev,T_met_prev_1,T_met_prev_2,T_met_prev_3,T_met_prev_4),axis=1)

scaled_instances = scaler.transform(x_test)
traj = MLPModel.predict(scaled_instances)
traj += 273*np.ones(len(traj))
if s._output_vars is not None:
output_traj = [traj, 0.3*np.ones(60)]
output_traj = [traj, (0.3+random.uniform(0.0,0.01))*np.ones(60)]

output_list.append(traj[-1])
output_list.append(0.3)
output_list.append(0.3+random.uniform(0.0,0.01))

print(values_DVs[0])
print(BC[0])
print(traj)
Expand Down Expand Up @@ -254,7 +255,7 @@ def Obj(values_DVs, BC, s):
print("cost_total: " + str(cost_total))
print("output: " + str(tout))
#time.sleep(2)

else:
for l,tout in enumerate(output_traj[0]):
if l > 100 or s._model_type == "MLP":
Expand Down

0 comments on commit d3f181f

Please sign in to comment.