import numpy as np import pandas as pd import xarray as xr import os import time ## ===== GLOBAL CONSTANTS ====== ## SECTOR_MAP = {1211 : 'HOTELS', 1212 : 'HOTELS', 1220 : 'COMMERCIAL OFFICES', 1230 : 'COMMERCIAL OFFICES', 1231 : 'RESTAURANTS', 1261 : 'MEAN', 1262 : 'MEAN', 1263 : 'SCHOOLS', 1264 : 'HOSPITALS', 1265 : 'MEAN',} ## ====== ANNUAL SUM ======== ## def get_yearly_sum( ds, var = None ): # create an xarray datasets that contains, for each timestamp in "ds", the number of repetitions of that hour in the month # and sums all values over a year days_of_each_month = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] month = range(1,13) days_of_month = pd.DataFrame( data = [ month, days_of_each_month ], index = ['month', 'n_days'] ).transpose() timestamps = ds.timestamp.to_dataframe() timestamps['month'] = timestamps.timestamp.dt.month days_of_month = timestamps.merge( days_of_month, on = 'month', how = 'left' ).drop( columns = 'month' ).set_index('timestamp').to_xarray() if var is not None: annual_sum = (ds[var] * days_of_month.n_days).sum( dim = 'timestamp' ) else: annual_sum = (ds * days_of_month.n_days).sum( dim = 'timestamp' ) week_weighted = annual_sum.sel( daytype = 'weekday' ) * 5/7 + annual_sum.sel( daytype = 'weekend' ) * 2/7 return week_weighted ## ======= DEMAND MAPPING FUNCTIONS ====== ## def get_cluster_demand_RES( BLD_df, demand, grouping_col = None, uniqueID_col = 'EGID', dwell_nb = 'N_DWELL', SFH_demand = 'maison', MFH_demand = 'appart', communal_demand = 'communal_app', add_communal_demand = True): SFH = BLD_df[BLD_df[dwell_nb] == 1] MFH = BLD_df[BLD_df[dwell_nb] > 1] if grouping_col is None: building_counts = { 'N_SFH' : len(SFH), 'N_DWELL' : MFH[dwell_nb].sum() } else: building_counts = xr.Dataset({ 'N_SFH' : SFH.groupby( grouping_col )[ uniqueID_col ].count(), 'N_DWELL' : MFH.groupby( grouping_col )[ dwell_nb ].sum() } ).fillna(0) cluster_demand = ( building_counts['N_SFH'] * demand[SFH_demand] + building_counts['N_DWELL'] * demand[MFH_demand] ) if add_communal_demand: cluster_demand = cluster_demand + ( building_counts['N_DWELL'] * demand[communal_demand] ) return [cluster_demand, building_counts] def get_cluster_demand_SER( BLD_df, demand_norm, demand_annual_TWh = 15.6, # (from Swiss electricity statistics 2018) grouping_col = None, service_area = 'SER_AREA', sector_map = SECTOR_MAP, map_col = 'GKLAS' ): # Assumes that demand_norm sums to 1 for each month and weekend/weekday # --> weekends, weekdays & months are weighted equally # Create a copy of the input dataframe with relevant columns only if grouping_col is None: df = BLD_df[[service_area, map_col]].copy() df['dummy'] = 1 grouping_vars = ['service_type', 'dummy'] else: df = BLD_df[[service_area, map_col, grouping_col]].copy() grouping_vars = ['service_type', grouping_col] # Compute daily demand df['area_weight'] = df[service_area] / df[service_area].sum() # use GW for scaling df['daily_demand'] = df['area_weight'] * demand_annual_TWh * 1e12 / 365 # Map service types df['service_type'] = df[ map_col ].map( sector_map ) # Group data according to service type and grouping_col (if applicable) daily_demand_cluster = ( df.groupby( grouping_vars ).daily_demand.sum().unstack(level = 0) .to_xarray().fillna(0) ) # Remove dummy (if applicable) if grouping_col is None: daily_demand_cluster = daily_demand_cluster.drop('dummy').squeeze() hourly_demand_per_service = daily_demand_cluster * demand_norm hourly_demand_all_services = hourly_demand_per_service.to_dataframe().sum(axis = 1).to_xarray() # return the hourly demand return hourly_demand_all_services, hourly_demand_per_service