Skip to content

Commit

Permalink
Merge pull request #94 from US-JOET/polish_and_finalize
Browse files Browse the repository at this point in the history
🔧 configure limits in a way that is consistent and makes sense
  • Loading branch information
shankari authored Nov 19, 2024
2 parents 760bdf8 + 2075ed2 commit 74e1a72
Show file tree
Hide file tree
Showing 8 changed files with 153 additions and 12 deletions.
2 changes: 1 addition & 1 deletion .env
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
TAG=0.0.21
TAG=0.0.22

EVEREST_MANAGER_CPUS='1.0'
EVEREST_MANAGER_MEMORY='1024mb'
1 change: 1 addition & 0 deletions manager/config-sil-ocpp201-pnc.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ active_modules:
ac_hlc_enabled: true
ac_hlc_use_5percent: false
ac_enforce_hlc: false
max_current_import_A: 16
connections:
bsp:
- module_id: yeti_driver_1
Expand Down
Binary file not shown.
2 changes: 2 additions & 0 deletions manager/demo-patch-scripts/apply-runtime-patches.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@ echo "Applying enabled_payment_method_in_python.patch"
cd /ext && patch -p0 -i /tmp/demo-patches/enable_payment_method_in_python.patch
echo "Applying support_payment_in_jsevmanager.patch"
cd /ext/dist/libexec/everest && patch -p1 -i /tmp/demo-patches/support_payment_in_jsevmanager.patch
echo "Applying hw_cap_down_to_16A.patch"
cd / && patch -p0 -i /tmp/demo-patches/hw_cap_down_to_16A.patch

cp /tmp/demo-patches/power_curve.py \
/ext/dist/libexec/everest/3rd_party/josev/iso15118/evcc/states/
Expand Down
22 changes: 22 additions & 0 deletions manager/demo-patches/hw_cap_down_to_16A.patch
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
--- /ext/dist/libexec/everest/modules/JsYetiSimulator/index.js
+++ /ext/dist/libexec/everest/modules/JsYetiSimulator/index.js
@@ -1434,7 +1434,7 @@ boot_module(async ({
clearData(mod);

mod.provides.board_support.publish.capabilities({
- max_current_A_import: 32.0,
+ max_current_A_import: 16.0,
min_current_A_import: 6.0,
max_phase_count_import: 3,
min_phase_count_import: 1,
--- /ext/dist/share/everest/modules/OCPP201/component_config/standardized/SmartChargingCtrlr.json
+++ /ext/dist/share/everest/modules/OCPP201/component_config/standardized/SmartChargingCtrlr.json
@@ -191,7 +191,7 @@
{
"type": "Actual",
"mutability": "ReadWrite",
- "value": 48
+ "value": 16
}
],
"type": "integer",
56 changes: 56 additions & 0 deletions manager/demo-patches/limit_logging.patch
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
diff --git a/modules/EnergyNode/energy_grid/energyImpl.cpp b/modules/EnergyNode/energy_grid/energyImpl.cpp
index f7afcc99..fc88f8c2 100644
--- a/modules/EnergyNode/energy_grid/energyImpl.cpp
+++ b/modules/EnergyNode/energy_grid/energyImpl.cpp
@@ -64,6 +64,7 @@ void energyImpl::init() {

types::energy::ScheduleReqEntry energyImpl::get_local_schedule() {
// local schedule of this module
+ EVLOG_debug << "Getting local schedule with fuse limit: " << mod->config.fuse_limit_A;;
types::energy::ScheduleReqEntry local_schedule;
auto tp = date::utc_clock::now();

@@ -79,6 +80,7 @@ types::energy::ScheduleReqEntry energyImpl::get_local_schedule() {

void energyImpl::set_external_limits(types::energy::ExternalLimits& l) {
std::scoped_lock lock(energy_mutex);
+ EVLOG_debug << "Setting external limits with fuse limit: " << mod->config.fuse_limit_A;;

if (l.schedule_import.has_value()) {
energy_flow_request.schedule_import = l.schedule_import;
diff --git a/modules/EvseManager/energy_grid/energyImpl.cpp b/modules/EvseManager/energy_grid/energyImpl.cpp
index af290e86..2260e819 100644
--- a/modules/EvseManager/energy_grid/energyImpl.cpp
+++ b/modules/EvseManager/energy_grid/energyImpl.cpp
@@ -54,6 +54,7 @@ void energyImpl::clear_import_request_schedule() {
const auto tp =
Everest::Date::to_rfc3339(date::floor<std::chrono::hours>(tpnow) + date::get_leap_second_info(tpnow).elapsed);

+ EVLOG_info << "Clearing import request schedule by setting max current from hw_caps = " << hw_caps.max_current_A_import;
entry_import.timestamp = tp;
entry_import.limits_to_root.ac_max_phase_count = hw_caps.max_phase_count_import;
entry_import.limits_to_root.ac_min_phase_count = hw_caps.min_phase_count_import;
@@ -79,6 +80,7 @@ void energyImpl::clear_export_request_schedule() {
const auto tp =
Everest::Date::to_rfc3339(date::floor<std::chrono::hours>(tpnow) + date::get_leap_second_info(tpnow).elapsed);

+ EVLOG_info << "Clearing export request schedule by setting max current from hw_caps = " << hw_caps.max_current_A_import;
entry_export.timestamp = tp;
entry_export.limits_to_root.ac_max_phase_count = hw_caps.max_phase_count_export;
entry_export.limits_to_root.ac_min_phase_count = hw_caps.min_phase_count_export;
@@ -353,6 +355,7 @@ void energyImpl::handle_enforce_limits(types::energy::EnforcedLimits& value) {
if (value.limits_root_side.has_value()) {
// set enforced AC current limit
if (value.limits_root_side.value().ac_max_current_A.has_value()) {
+ EVLOG_info << "Handle enforce limits with ac_max_current_A = " << value.limits_root_side.value().ac_max_current_A.value();
limit = value.limits_root_side.value().ac_max_current_A.value();
}

@@ -383,6 +386,7 @@ void energyImpl::handle_enforce_limits(types::energy::EnforcedLimits& value) {
mod->mqtt.publish(fmt::format("everest_external/nodered/{}/state/max_watt", mod->config.connector_id),
value.limits_root_side.value().total_power_W.value());

+ EVLOG_info << "Handle enforce limits with total_power_W = " << value.limits_root_side.value().total_power_W.value();
float a = value.limits_root_side.value().total_power_W.value() / mod->config.ac_nominal_voltage /
mod->ac_nr_phases_active;
if (a < limit) {
2 changes: 1 addition & 1 deletion manager/disable_iso_tls.patch
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
--- /tmp/config-sil-ocpp201-pnc.yaml
--- config/config-sil-ocpp201-pnc.yaml
+++ config/config-sil-ocpp201-pnc.yaml
@@ -14,7 +14,7 @@
config_module:
Expand Down
80 changes: 70 additions & 10 deletions nodered/config/config-sil-iso15118-ac-flow.json
Original file line number Diff line number Diff line change
Expand Up @@ -2082,7 +2082,7 @@
"topic": "everest_external/nodered/#/cmd/set_max_current",
"topicType": "str",
"min": "6",
"max": "32",
"max": "16",
"step": "0.1",
"x": 450,
"y": 700,
Expand Down Expand Up @@ -3672,7 +3672,7 @@
"wires": [
[
"1ef1dcc581d80607",
"45fbfa036a572ce9"
"61e3f2339b3e0da0"
]
]
},
Expand All @@ -3699,7 +3699,7 @@
"wires": [
[
"15a95c5d894549e7",
"9b997430586ae6b5"
"31958f916cb20b65"
]
]
},
Expand Down Expand Up @@ -3890,12 +3890,11 @@
"topic": "",
"payload": "",
"payloadType": "date",
"x": 290,
"x": 190,
"y": 200,
"wires": [
[
"3d44bdce67fb80de",
"b041a6efa19167c7"
"c050da8732b0983f"
]
]
},
Expand Down Expand Up @@ -4389,7 +4388,7 @@
"type": "function",
"z": "9fe0155e5b3d31b1",
"name": "convert_to_recurring_schedule",
"func": "const midas_schedule = flow.get(\"midas_schedule\");\n\n// Assumptions:\n// time ranges start and end at hour boundaries\n// there are no gaps (not sure how to handle gaps, we would probably need to have separate schedules)\n// since the duration is only in the schedule\n// logic for that seems too complicated right now\n// start date is hardcoded (can make it be dynamic if we assume that the first entry\n// has the earliest start date)\n\nconst schedulePeriods = midas_schedule.ValueInformation.map((entry) => {\n return {\n // we get the hour by splitting the time and taking the\n // first entry, then we multiply by 3600 to get it in seconds\n startPeriod: entry.TimeStart.split(\":\")[0] * 3600,\n limit: entry.value,\n numberPhases: 3\n }\n});\nnode.warn(schedulePeriods);\n\n// based on section 3.6 of the OCPP spec\n\nconst final_schedule = {\n \"id\": 100, \n \"chargingProfileKind\": \"Recurring\",\n \"chargingProfilePurpose\": \"TxDefaultProfile\",\n \"recurrencyKind\": \"Daily\",\n \"stackLevel\": 0,\n \"chargingSchedule\": [\n {\n \"id\": 0,\n \"chargingRateUnit\": \"A\",\n \"chargingSchedulePeriod\": schedulePeriods,\n \"duration\": 86400,\n \"minChargingRate\": 0.0,\n \"startSchedule\": new Date(\"2022-01-01T00:00:00Z\"),\n }],\n}\n\n\n/*\nconst final_schedule = {\n id: 100,\n stackLevel: 0,\n chargingProfilePurpose: \"TxDefaultProfile\",\n chargingProfileKind: \"Recurring\",\n recurrencyKind: \"Daily\",\n chargingSchedule: {\n id: 0,\n duration: 86400,\n chargingRateUnit: \"A\",\n startSchedule: new Date(\"2022-01-01T00:00:00Z\").toString(),\n chargingSchedulePeriod: schedulePeriods\n }\n}\n*/\nreturn {payload: final_schedule};",
"func": "const midas_schedule = flow.get(\"midas_schedule\");\n\n// Assumptions:\n// time ranges start and end at hour boundaries\n// there are no gaps (not sure how to handle gaps, we would probably need to have separate schedules)\n// since the duration is only in the schedule\n// logic for that seems too complicated right now\n// start date is hardcoded (can make it be dynamic if we assume that the first entry\n// has the earliest start date)\n\nconst schedulePeriods = midas_schedule.ValueInformation.map((entry) => {\n return {\n // we get the hour by splitting the time and taking the\n // first entry, then we multiply by 3600 to get it in seconds\n startPeriod: entry.TimeStart.split(\":\")[0] * 3600,\n limit: entry.value,\n numberPhases: 3\n }\n});\nnode.warn(schedulePeriods);\n\n// based on section 3.6 of the OCPP spec\n\nconst final_schedule = {\n \"id\": 100, \n \"chargingProfileKind\": \"Recurring\",\n \"chargingProfilePurpose\": \"TxDefaultProfile\",\n \"recurrencyKind\": \"Daily\",\n \"stackLevel\": 0,\n \"chargingSchedule\": [\n {\n \"id\": 0,\n \"chargingRateUnit\": \"A\",\n \"chargingSchedulePeriod\": schedulePeriods,\n \"duration\": 86400,\n \"minChargingRate\": 0.0,\n \"startSchedule\": new Date(\"2022-01-01T08:00:00Z\"),\n }],\n}\n\n\n/*\nconst final_schedule = {\n id: 100,\n stackLevel: 0,\n chargingProfilePurpose: \"TxDefaultProfile\",\n chargingProfileKind: \"Recurring\",\n recurrencyKind: \"Daily\",\n chargingSchedule: {\n id: 0,\n duration: 86400,\n chargingRateUnit: \"A\",\n startSchedule: new Date(\"2022-01-01T00:00:00Z\").toString(),\n chargingSchedulePeriod: schedulePeriods\n }\n}\n*/\nreturn {payload: final_schedule};",
"outputs": 1,
"timeout": 0,
"noerr": 0,
Expand All @@ -4410,7 +4409,7 @@
"type": "function",
"z": "9fe0155e5b3d31b1",
"name": "massage_and_rescale",
"func": "// The MIDAS data currently has tariffs, which we don't support\n// we want to actually get curtailment data,\n// but don't have time to coordinate with CARB\n// so let's massage the tariff data to look like a curtailment.\n// Steps:\n// find the range of tariff changes\n// assume that range of current changes is from 0-48 (45)\n// Determine the scaling factor\n// determine the increment over the minimum tariff\n// apply the scaling factor\n// more tariff = lower usage, so subtract from the max current\n\nnode.warn(msg.payload);\nconst original_value_objs = msg.payload.ValueInformation;\nconst original_values = original_value_objs.map((entry) => {\n return entry.value;\n})\nconst min_original = Math.min(...original_values);\nconst max_original = Math.max(...original_values);\nconst scaling_factor = 48 / (max_original - min_original);\nnode.warn(\"min_original = \"+min_original+\" max original \"+max_original+\" scaling_factor\" + scaling_factor);\nconst new_value_objs = original_value_objs.map((entry) => {\n entry.value = 48 - ((entry.value - min_original) * scaling_factor);\n entry.Unit = 'A';\n return entry;\n});\nmsg.payload.ValueInformation = new_value_objs;\nreturn msg;",
"func": "// The MIDAS data currently has tariffs, which we don't support\n// we want to actually get curtailment data,\n// but don't have time to coordinate with CARB\n// so let's massage the tariff data to look like a curtailment.\n// Steps:\n// find the range of tariff changes\n// assume that range of current changes is from 0-16 (45)\n// Determine the scaling factor\n// determine the increment over the minimum tariff\n// apply the scaling factor\n// more tariff = lower usage, so subtract from the max current\n\nnode.warn(msg.payload);\nconst original_value_objs = msg.payload.ValueInformation;\nconst original_values = original_value_objs.map((entry) => {\n return entry.value;\n})\nconst min_original = Math.min(...original_values);\nconst max_original = Math.max(...original_values);\nconst scaling_factor = 16 / (max_original - min_original);\nnode.warn(\"min_original = \"+min_original+\" max original \"+max_original+\" scaling_factor\" + scaling_factor);\nconst new_value_objs = original_value_objs.map((entry) => {\n entry.value = 16 - ((entry.value - min_original) * scaling_factor);\n entry.Unit = 'A';\n return entry;\n});\nmsg.payload.ValueInformation = new_value_objs;\nreturn msg;",
"outputs": 1,
"timeout": 0,
"noerr": 0,
Expand Down Expand Up @@ -4464,7 +4463,7 @@
"xformat": "HH:mm:ss",
"interpolate": "linear",
"nodata": "",
"dot": false,
"dot": true,
"ymin": "",
"ymax": "",
"removeOlder": "3",
Expand Down Expand Up @@ -4591,5 +4590,66 @@
"84cf15f37335a643"
]
]
},
{
"id": "c050da8732b0983f",
"type": "function",
"z": "9fe0155e5b3d31b1",
"name": "convert_to_local_time",
"func": "node.warn(\"Converting \"+new Date(msg.payload)+ \" to \"+ new Date(msg.payload - 8*3600*1000))\nmsg.payload = msg.payload - 8 * 3600 * 1000\nreturn msg;",
"outputs": 1,
"timeout": 0,
"noerr": 0,
"initialize": "",
"finalize": "",
"libs": [],
"x": 420,
"y": 200,
"wires": [
[
"3d44bdce67fb80de",
"b041a6efa19167c7"
]
]
},
{
"id": "61e3f2339b3e0da0",
"type": "function",
"z": "9fe0155e5b3d31b1",
"name": "convert_to_utc",
"func": "msg.payload = msg.payload + 8 * 3600 * 1000\nreturn msg;",
"outputs": 1,
"timeout": 0,
"noerr": 0,
"initialize": "",
"finalize": "",
"libs": [],
"x": 220,
"y": 20,
"wires": [
[
"45fbfa036a572ce9"
]
]
},
{
"id": "31958f916cb20b65",
"type": "function",
"z": "9fe0155e5b3d31b1",
"name": "convert_to_utc",
"func": "msg.payload = msg.payload + 8 * 3600 * 1000\nreturn msg;",
"outputs": 1,
"timeout": 0,
"noerr": 0,
"initialize": "",
"finalize": "",
"libs": [],
"x": 480,
"y": 20,
"wires": [
[
"9b997430586ae6b5"
]
]
}
]
]

0 comments on commit 74e1a72

Please sign in to comment.