@@ -806,8 +806,117 @@ def graph_curtailment_per_tech(tools):
806806 df .plot (ax = ax , kind = 'line' , xlabel = 'Period' , marker = "x" , ** kwargs )
807807
808808 # Set the y-axis to use percent
809- ax .yaxis .set_major_formatter (tools .mplt .ticker .PercentFormatter (1.0 ))
809+ ax .yaxis .set_major_formatter (tools .plt .ticker .PercentFormatter (1.0 ))
810810 # Horizontal line at 100%
811811 # ax.axhline(y=1, linestyle="--", color='b')
812812
813813
814+ @graph (
815+ "energy_balance_2" ,
816+ title = "Balance between demand, generation and storage for last period" ,
817+ note = "Dashed green and red lines are total generation and total demand (incl. transmission losses),"
818+ " respectively.\n Dotted line is the total state of charge (scaled for readability)."
819+ "\n We used a 14-day rolling mean to smoothen out values."
820+ )
821+ def graph_energy_balance_2 (tools ):
822+ # Get dispatch dataframe
823+ dispatch = tools .get_dataframe ("dispatch.csv" , usecols = [
824+ "timestamp" , "gen_tech" , "gen_energy_source" , "DispatchGen_MW" , "scenario_name"
825+ ]).rename ({"DispatchGen_MW" : "value" }, axis = 1 )
826+ dispatch = tools .transform .gen_type (dispatch )
827+
828+ # Sum dispatch across all the projects of the same type and timepoint
829+ dispatch = dispatch .groupby (["timestamp" , "gen_type" ], as_index = False ).sum ()
830+ dispatch = dispatch [dispatch ["gen_type" ] != "Storage" ]
831+
832+ # Get load dataframe
833+ load = tools .get_dataframe ("load_balance.csv" , usecols = [
834+ "timestamp" , "zone_demand_mw" , "TXPowerNet" , "scenario_name"
835+ ])
836+
837+ def process_time (df ):
838+ df = df .astype ({"period" : int })
839+ df = df [df ["period" ] == df ["period" ].max ()].drop (columns = "period" )
840+ return df .set_index ("datetime" )
841+
842+ # Sum load across all the load zones
843+ load = load .groupby (["timestamp" ], as_index = False ).sum ()
844+
845+ # Include Tx Losses in demand and flip sign
846+ load ["value" ] = (load ["zone_demand_mw" ] + load ["TXPowerNet" ]) * - 1
847+
848+ # Rename and convert from wide to long format
849+ load = load [["timestamp" , "value" ]]
850+
851+ # Add the timestamp information and make period string to ensure it doesn't mess up the graphing
852+ dispatch = process_time (tools .transform .timestamp (dispatch ))
853+ load = process_time (tools .transform .timestamp (load ))
854+
855+ # Convert to TWh (incl. multiply by timepoint duration)
856+ dispatch ["value" ] *= dispatch ["tp_duration" ] / 1e6
857+ load ["value" ] *= load ["tp_duration" ] / 1e6
858+
859+ days = 14
860+ freq = str (days ) + "D"
861+ offset = tools .pd .Timedelta (freq ) / 2
862+
863+ def rolling_sum (df ):
864+ df = df .rolling (freq , center = True ).value .sum ().reset_index ()
865+ df ["value" ] /= days
866+ df = df [(df .datetime .min () + offset < df .datetime ) & (df .datetime < df .datetime .max () - offset )]
867+ return df
868+
869+ dispatch = rolling_sum (dispatch .groupby ("gen_type" , as_index = False ))
870+ load = rolling_sum (load ).set_index ("datetime" )["value" ]
871+
872+ # Get the state of charge data
873+ soc = tools .get_dataframe ("StateOfCharge.csv" , dtype = {"STORAGE_GEN_TPS_1" : str }) \
874+ .rename (columns = {"STORAGE_GEN_TPS_2" : "timepoint" , "StateOfCharge" : "value" })
875+ # Sum over all the projects that are in the same scenario with the same timepoint
876+ soc = soc .groupby (["timepoint" ], as_index = False ).sum ()
877+ soc ["value" ] /= 1e6 # Convert to TWh
878+ max_soc = soc ["value" ].max ()
879+
880+ # Group by time
881+ soc = process_time (tools .transform .timestamp (soc , use_timepoint = True , key_col = "timepoint" ))
882+ soc = soc .rolling (freq , center = True )["value" ].mean ().reset_index ()
883+ soc = soc [(soc .datetime .min () + offset < soc .datetime ) & (soc .datetime < soc .datetime .max () - offset )]
884+ soc = soc .set_index ("datetime" )["value" ]
885+
886+
887+ dispatch = dispatch [dispatch ["value" ] != 0 ]
888+ dispatch = dispatch .pivot (columns = "gen_type" , index = "datetime" , values = "value" )
889+ dispatch = dispatch [dispatch .std ().sort_values ().index ].rename_axis ("Technology" , axis = 1 )
890+ total_dispatch = dispatch .sum (axis = 1 )
891+
892+ max_val = max (total_dispatch .max (), load .max ())
893+
894+ # Scale soc to the graph
895+ soc *= max_val / max_soc
896+
897+ # Plot
898+ # Get the colors for the lines
899+ # plot
900+ ax = tools .get_axes (ylabel = "Average Daily Generation (TWh)" )
901+ ax .set_ylim (0 , max_val * 1.05 )
902+ dispatch .plot (
903+ ax = ax ,
904+ color = tools .get_colors ()
905+ )
906+ soc .plot (ax = ax , color = "black" , linestyle = "dotted" )
907+ load .plot (ax = ax , color = "red" , linestyle = "dashed" )
908+ total_dispatch .plot (ax = ax , color = "green" , linestyle = "dashed" )
909+ ax .fill_between (total_dispatch .index , total_dispatch .values , load .values , alpha = 0.2 , where = load < total_dispatch , facecolor = "green" )
910+ ax .fill_between (total_dispatch .index , total_dispatch .values , load .values , alpha = 0.2 , where = load > total_dispatch , facecolor = "red" )
911+
912+
913+ @graph (
914+ "dispatch_map" ,
915+ title = "Dispatched electricity per load zone"
916+ )
917+ def dispatch_map (tools ):
918+ dispatch = tools .get_dataframe ("dispatch_zonal_annual_summary.csv" ).rename ({"Energy_GWh_typical_yr" : "value" },
919+ axis = 1 )
920+ dispatch = tools .transform .gen_type (dispatch )
921+ dispatch = dispatch .groupby (["gen_type" , "gen_load_zone" ], as_index = False )["value" ].sum ()
922+ tools .maps .graph_pie_chart (dispatch )
0 commit comments