Skip to content

Commit

Permalink
Merge branch 'issue604_kpisDisaggregated' into gh-pages-custom
Browse files Browse the repository at this point in the history
  • Loading branch information
javiarrobas committed Mar 22, 2024
2 parents 6bdcdd6 + c5fc249 commit 4629df4
Show file tree
Hide file tree
Showing 27 changed files with 747 additions and 258 deletions.
3 changes: 2 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -68,14 +68,15 @@ Example RESTful interaction:

| Interaction | Request |
|-----------------------------------------------------------------------|-----------------------------------------------------------|
| Advance simulation with control input and receive measurements. | POST ``advance`` with optional json data "{<input_name>:<value>}" |
| Advance simulation with control input and receive measurements. | POST ``advance`` with optional arguments ``<input_name_u>:<value>``, and corresponding ``<input_name_activate>:<0 or 1>``, where 1 enables value overwrite and 0 disables (0 is default) |
| Initialize simulation to a start time using a warmup period in seconds. Also resets point data history and KPI calculations. | PUT ``initialize`` with required arguments ``start_time=<value>``, ``warmup_period=<value>``|
| Receive communication step in seconds. | GET ``step`` |
| Set communication step in seconds. | PUT ``step`` with required argument ``step=<value>`` |
| Receive sensor signal point names (y) and metadata. | GET ``measurements`` |
| Receive control signal point names (u) and metadata. | GET ``inputs`` |
| Receive test result data for the given point names between the start and final time in seconds. | PUT ``results`` with required arguments ``point_names=<list of strings>``, ``start_time=<value>``, ``final_time=<value>``|
| Receive test KPIs. | GET ``kpi`` |
| Receive test KPIs disaggregated into contributing components (e.g. each equipment or zone) ...| GET ``kpi_disaggregated`` |
| Receive test case name. | GET ``name`` |
| Receive boundary condition forecast from current communication step for the given point names for the horizon and at the interval in seconds. | PUT ``forecast`` with required arguments ``point_names=<list of strings>``, ``horizon=<value>``, ``interval=<value>``|
| Receive boundary condition forecast available point names and metadata. | GET ``forecast_points`` |
Expand Down
29 changes: 22 additions & 7 deletions data/get_html_IO.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,9 @@
2. Run BOPTEST test case on localhost:5000
3. Run this script
Outputs:
"inputs.txt": html code documenting the inputs
"measurements.txt": html code documenting the outputs
Output:
"inputs_measurements_forecasts.html" html code documenting inputs, outputs and
forecasts together
"""

# GENERAL PACKAGE IMPORT
Expand Down Expand Up @@ -40,24 +39,40 @@ def run():

# GET TEST INFORMATION
# --------------------
# Create single I/O file
# Inputs available
inputs = requests.get('{0}/inputs'.format(url)).json()['payload']
with open('inputs.txt', 'w') as f:
with open('inputs_measurements_forecasts.html', 'w') as f:
f.write('<h3>Model IO\'s</h3>\n')
f.write('<h4>Inputs</h4>\n')
f.write('The model inputs are:\n')
f.write('<ul>\n')
for i in sorted(inputs.keys()):
if 'activate' not in i:
f.write('<li>\n<code>{0}</code> [{1}] [min={2}, max={3}]: {4}\n</li>\n'.format(i,inputs[i]['Unit'],inputs[i]['Minimum'], inputs[i]['Maximum'], inputs[i]['Description']))
else:
f.write('<li>\n<code>{0}</code> [1] [min=0, max=1]: Activation signal to overwrite input {1} where 1 activates, 0 deactivates (default value)\n</li>\n'.format(i,i.replace('activate','')+'u'))
f.write('</ul>\n')
# Measurements available
measurements = requests.get('{0}/measurements'.format(url)).json()['payload']
with open('measurements.txt', 'w') as f:
with open('inputs_measurements_forecasts.html', 'a') as f:
f.write('<h4>Outputs</h4>\n')
f.write('The model outputs are:\n')
f.write('<ul>\n')
for i in sorted(measurements.keys()):
if 'activate' not in i:
f.write('<li>\n<code>{0}</code> [{1}] [min={2}, max={3}]: {4}\n</li>\n'.format(i,measurements[i]['Unit'],measurements[i]['Minimum'], measurements[i]['Maximum'], measurements[i]['Description']))
f.write('</ul>\n')
# Forecasts available
forecast_points = requests.get('{0}/forecast_points'.format(url)).json()['payload']
with open('forecast_points.txt', 'w') as f:
with open('inputs_measurements_forecasts.html', 'a') as f:
f.write('<h4>Forecasts</h4>\n')
f.write('The model forecasts are:\n')
f.write('<ul>\n')
for i in sorted(forecast_points.keys()):
if 'activate' not in i:
f.write('<li>\n<code>{0}</code> [{1}]: {2}\n</li>\n'.format(i,forecast_points[i]['Unit'],forecast_points[i]['Description']))
f.write('</ul>\n')
# --------------------

if __name__ == "__main__":
Expand Down
69 changes: 50 additions & 19 deletions kpis/kpi_calculator.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,6 +215,40 @@ def get_core_kpis(self, price_scenario='Constant'):

return ckpi

def get_kpis_disaggregated(self, price_scenario='Constant'):
'''Return the core KPIs of a test case disaggregated and
with absolute values (not normalized by area or zone)
to see the contributions of each element to each KPI.
Parameters
----------
price_scenario : str, optional
Price scenario for cost kpi calculation.
'Constant' or 'Dynamic' or 'HighlyDynamic'.
Default is 'Constant'.
Returns
-------
dkpi = dict
Dictionary with the core KPIs disaggregated and
with absolute values.
'''

_ = self.get_core_kpis(price_scenario=price_scenario)

dkpi = OrderedDict()
dkpi['tdis'] = self.tdis_dict
dkpi['idis'] = self.idis_dict
dkpi['ener'] = self.ener_dict
dkpi['cost'] = self.cost_dict
dkpi['emis'] = self.emis_dict
dkpi['pele'] = self.pele_dict
dkpi['pgas'] = self.pgas_dict
dkpi['pdih'] = self.pdih_dict

return dkpi

def get_thermal_discomfort(self):
'''The thermal discomfort is the integral of the deviation
of the temperature with respect to the predefined comfort
Expand Down Expand Up @@ -333,11 +367,10 @@ def get_energy(self):
if 'Power' in source:
for signal in self.case.kpi_json[source]:
pow_data = np.array(self._get_data_from_last_index(signal,self.i_last_ener))
self.ener_dict[signal] += \
trapz(pow_data,
self._get_data_from_last_index('time',self.i_last_ener))*2.77778e-7 # Convert to kWh
self.ener_dict_by_source[source+'_'+signal] += \
self.ener_dict[signal]
integral = trapz(pow_data,
self._get_data_from_last_index('time',self.i_last_ener))*2.77778e-7 # Convert to kWh
self.ener_dict[signal] += integral
self.ener_dict_by_source[source+'_'+signal] += integral
self.ener_tot = self.ener_tot + self.ener_dict[signal]/self.case._get_area() # Normalize total by floor area

# Assign to case
Expand Down Expand Up @@ -382,10 +415,10 @@ def get_peak_electricity(self):
df_pow_data_all = pd.concat([df_pow_data_all, df_pow_data], axis=1)
df_pow_data_all.index = pd.TimedeltaIndex(df_pow_data_all.index, unit='s')
df_pow_data_all['total_demand'] = df_pow_data_all.sum(axis=1)
df_pow_data_all = df_pow_data_all.resample('15T').mean()/self.case._get_area()/1000.
df_pow_data_all = df_pow_data_all.resample('15T').mean()/1000.
i = df_pow_data_all['total_demand'].idxmax()
peak = df_pow_data_all.loc[i,'total_demand']
self.pele_tot = peak
self.pele_tot = peak/self.case._get_area()
# Find contributions to peak by each signal
for signal in self.case.kpi_json[source]:
self.pele_dict[signal] = df_pow_data_all.loc[i,signal]
Expand Down Expand Up @@ -429,10 +462,10 @@ def get_peak_gas(self):
df_pow_data_all = pd.concat([df_pow_data_all, df_pow_data], axis=1)
df_pow_data_all.index = pd.TimedeltaIndex(df_pow_data_all.index, unit='s')
df_pow_data_all['total_demand'] = df_pow_data_all.sum(axis=1)
df_pow_data_all = df_pow_data_all.resample('15T').mean()/self.case._get_area()/1000.
df_pow_data_all = df_pow_data_all.resample('15T').mean()/1000.
i = df_pow_data_all['total_demand'].idxmax()
peak = df_pow_data_all.loc[i,'total_demand']
self.pgas_tot = peak
self.pgas_tot = peak/self.case._get_area()
# Find contributions to peak by each signal
for signal in self.case.kpi_json[source]:
self.pgas_dict[signal] = df_pow_data_all.loc[i,signal]
Expand Down Expand Up @@ -476,10 +509,10 @@ def get_peak_district_heating(self):
df_pow_data_all = pd.concat([df_pow_data_all, df_pow_data], axis=1)
df_pow_data_all.index = pd.TimedeltaIndex(df_pow_data_all.index, unit='s')
df_pow_data_all['total_demand'] = df_pow_data_all.sum(axis=1)
df_pow_data_all = df_pow_data_all.resample('15T').mean()/self.case._get_area()/1000.
df_pow_data_all = df_pow_data_all.resample('15T').mean()/1000.
i = df_pow_data_all['total_demand'].idxmax()
peak = df_pow_data_all.loc[i,'total_demand']
self.pdih_tot = peak
self.pdih_tot = peak/self.case._get_area()
# Find contributions to peak by each signal
for signal in self.case.kpi_json[source]:
self.pdih_dict[signal] = df_pow_data_all.loc[i,signal]
Expand Down Expand Up @@ -541,11 +574,10 @@ def get_cost(self, scenario='Constant'):
# Calculate costs
for signal in self.case.kpi_json[source]:
pow_data = np.array(self._get_data_from_last_index(signal,self.i_last_cost))
self.cost_dict[signal] += \
trapz(np.multiply(source_price_data,pow_data),
integral = trapz(np.multiply(source_price_data,pow_data),
self._get_data_from_last_index('time',self.i_last_cost))*factor
self.cost_dict_by_source[source+'_'+signal] += \
self.cost_dict[signal]
self.cost_dict[signal] += integral
self.cost_dict_by_source[source+'_'+signal] += integral
self.cost_tot = self.cost_tot + self.cost_dict[signal]/self.case._get_area() # Normalize total by floor area

# Assign to case
Expand Down Expand Up @@ -585,11 +617,10 @@ def get_emissions(self):
['Emissions'+source])
for signal in self.case.kpi_json[source]:
pow_data = np.array(self._get_data_from_last_index(signal,self.i_last_emis))
self.emis_dict[signal] += \
trapz(np.multiply(source_emissions_data,pow_data),
integral = trapz(np.multiply(source_emissions_data,pow_data),
self._get_data_from_last_index('time',self.i_last_emis))*2.77778e-7 # Convert to kWh
self.emis_dict_by_source[source+'_'+signal] += \
self.emis_dict[signal]
self.emis_dict[signal] += integral
self.emis_dict_by_source[source+'_'+signal] += integral
self.emis_tot = self.emis_tot + self.emis_dict[signal]/self.case._get_area() # Normalize total by floor area

# Update last integration index
Expand Down
4 changes: 3 additions & 1 deletion releasenotes.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,9 @@ Released on xx/xx/xxxx.
- Correct typo in documentation for ``multizone_office_simple_air``, cooling setback temperature changed from 12 to 30. This is for [#605](https://github.com/ibpsa/project1-boptest/issues/605).
- Modify unit tests for test case scenarios to only simulate two days after warmup instead of the whole two-week scenario. This is for [#576](https://github.com/ibpsa/project1-boptest/issues/576).
- Fix unit tests for possible false passes in certain test cases. This is for [#620](https://github.com/ibpsa/project1-boptest/issues/620).

- Add ``activate`` control inputs to all test case documentation and update ``get_html_IO.py`` to print one file with all inputs, outputs, and forecasts. This is for [#555](https://github.com/ibpsa/project1-boptest/issues/555).
- Add storing of scenario result trajectories, kpis, and test information to simulation directory within test case docker container. This is for [#626](https://github.com/ibpsa/project1-boptest/issues/626).
- Implement method to get disaggregated KPIs with absolute values. This enables to make a more comprehensive analysis of which elements are contributing to each KPI. This is for [#604](https://github.com/ibpsa/project1-boptest/issues/604).

## BOPTEST v0.5.0

Expand Down
8 changes: 8 additions & 0 deletions restapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,6 +191,13 @@ def get(self):
status, message, payload = case.get_kpis()
return construct(status, message, payload)

class KPI_Disaggregated(Resource):
'''Interface to test case KPIs disaggregated and with absolute values.'''

def get(self):
'''GET request to receive KPIs disaggregated and with absolute values.'''
status, message, payload = case.get_kpis_disaggregated()
return construct(status, message, payload)

class Forecast(Resource):
'''Interface to test case forecast data.'''
Expand Down Expand Up @@ -267,6 +274,7 @@ def post(self):
api.add_resource(Forecast_Points, '/forecast_points')
api.add_resource(Results, '/results')
api.add_resource(KPI, '/kpi')
api.add_resource(KPI_Disaggregated, '/kpi_disaggregated')
api.add_resource(Forecast, '/forecast')
api.add_resource(Scenario, '/scenario')
api.add_resource(Name, '/name')
Expand Down
127 changes: 108 additions & 19 deletions testcase.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import os
import json
import array as a
import pandas as pd

class TestCase(object):
'''Class that implements the test case.
Expand Down Expand Up @@ -359,6 +360,8 @@ def advance(self, u):
# Check if scenario is over
if self.start_time >= self.end_time:
self.scenario_end = True
# store results
self.store_results()
# Log and return
logging.info(message)
return status, message, payload
Expand Down Expand Up @@ -789,6 +792,50 @@ def get_kpis(self):

return status, message, payload

def get_kpis_disaggregated(self):
'''Returns KPIs disaggregated and with absolute values.
Requires standard sensor signals.
Parameters
----------
None
Returns
-------
status: int
Indicates whether a request for querying the KPIs has been completed.
If 200, the KPIs were successfully queried.
If 500, an internal error occured.
message: str
Includes detailed debugging information
payload : dict
Dictionary containing KPIs disaggregated and with absolute values.
{<kpi_ele_name>:<kpi_ele_value>}
Returns None if error during calculation.
'''

status = 200
message = "Queried disaggregated KPIs successfully."
try:
# Set correct price scenario for cost
if self.scenario['electricity_price'] == 'constant':
price_scenario = 'Constant'
elif self.scenario['electricity_price'] == 'dynamic':
price_scenario = 'Dynamic'
elif self.scenario['electricity_price'] == 'highly_dynamic':
price_scenario = 'HighlyDynamic'
# Calculate the disaggregated kpis
payload = self.cal.get_kpis_disaggregated(price_scenario=price_scenario)
except:
payload = None
status = 500
message = "Failed to query disaggregated KPIs: {}".format(traceback.format_exc())
logging.error(message)
logging.info(message)

return status, message, payload

def get_forecast_points(self):
'''Returns a dictionary of available forecast points and their meta-data.
Expand Down Expand Up @@ -1142,27 +1189,17 @@ def post_results_to_dashboard(self, api_key, tags, unit_test=False):
dash_server = os.environ['BOPTEST_DASHBOARD_SERVER']
# Create payload
uid = str(uuid.uuid4())
payload = {
"results": [
{
"uid": uid,
"dateRun": str(datetime.now(tz=pytz.UTC)),
"boptestVersion": self.version,
"isShared": True,
"controlStep": str(self.get_step()[2]),
"account": {
test_results = self._get_test_results()
api_parameters = {
"uid": uid,
"isShared": True,
"account": {
"apiKey": api_key
},
"forecastParameters":{},
"tags": tags,
"kpis": self.get_kpis()[2],
"scenario": self.add_forecast_uncertainty(self.keys_to_camel_case(self.get_scenario()[2])),
"buildingType": {
"uid": self.get_name()[2]['name']
}
}
]
},
"tags": tags,
}
test_results.update(api_parameters)
payload = {"results":[test_results]}
dash_url = "%s/api/results" % dash_server
# Post to dashboard
if not unit_test:
Expand Down Expand Up @@ -1327,6 +1364,58 @@ def _get_full_current_state(self):

return z

def _get_test_results(self):
'''Collect test results and information into a dictionary.
Returns
-------
results: dict
Dictionary of test specific results and information.
'''

results = {
"dateRun": str(datetime.now(tz=pytz.UTC)),
"boptestVersion": self.version,
"controlStep": str(self.get_step()[2]),
"forecastParameters":{},
"kpis": self.get_kpis()[2],
"scenario": self.add_forecast_uncertainty(self.keys_to_camel_case(self.get_scenario()[2])),
"buildingType": {
"uid": self.get_name()[2]['name'],
}
}

return results

def store_results(self):
'''Stores results from scenario in working directory as json and csv.
When run with Service, the result will be packed in the result tarball and
be retrieveable with the test_id.
Returns
-------
None
'''

file_name = "results"
# get results_json
results_json = self._get_test_results()
# store results_json
with open(file_name + ".json", "w") as outfile:
json.dump(results_json, outfile)
# get list of results, need to use output metadata so duplicate inputs are removed
result_list = self.input_names + list(self.outputs_metadata.keys())
# get results trajectories
results = self.get_results(result_list, self.initial_time, self.end_time)[2]
# convert to dataframe with time as index
results_df = pd.DataFrame.from_dict(results)
results_df.index = results_df['time']
# store results csv
results_df.to_csv(file_name + ".csv")

def to_camel_case(self, snake_str):
components = snake_str.split('_')
# We capitalize the first letter of each component except the first one
Expand Down
Loading

0 comments on commit 4629df4

Please sign in to comment.