-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathfunctions_GL_paper_V3.py
2915 lines (2423 loc) · 125 KB
/
functions_GL_paper_V3.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
import pandas as pd
import numpy as np
import requests
import pypsa
import pypsatopo
import parameters_GL_paper_V3 as p
import os
import sys
import matplotlib.pyplot as plt
import pickle as pkl
import math
import itertools
import bisect
import dataframe_image as dfi
import seaborn as sns
import re
from scipy.stats import pearsonr
# -------TECHNO-ECONOMIC DATA & ANNUITY
def annuity(n, r):
"""Calculate the annuity factor for an asset with lifetime n years and
discount rate of r, e.g. annuity(20,0.05)*20 = 1.6"""
if r > 0:
return r / (1. - 1. / (1. + r) ** n)
else:
return 1 / n
def prepare_costs(cost_file, USD_to_EUR, discount_rate, Nyears, lifetime):
""" This function uses, data retrived form the technology catalogue and other sources and compiles a DF used in the model
input: cost_file # csv
output: costs # DF with all cost used in the model"""
# Nyear = nyear in the interval for myoptic optimization--> set to 1 for annual optimization
# set all asset costs and other parameters
costs = pd.read_csv(cost_file, index_col=[0, 1]).sort_index()
# correct units to MW and EUR
costs.loc[costs.unit.str.contains("/kW"), "value"] *= 1e3
costs.loc[costs.unit.str.contains("USD"), "value"] *= USD_to_EUR
# min_count=1 is important to generate NaNs which are then filled by fillna
costs = costs.loc[:, "value"].unstack(level=1).groupby("technology").sum(min_count=1)
costs = costs.fillna({"CO2 intensity": 0,
"FOM": 0,
"VOM": 0,
"discount rate": discount_rate,
"efficiency": 1,
"fuel": 0,
"investment": 0,
"lifetime": lifetime
})
annuity_factor = lambda v: annuity(v["lifetime"], v["discount rate"]) + v["FOM"] / 100
costs["fixed"] = [annuity_factor(v) * v["investment"] * Nyears for i, v in costs.iterrows()]
return costs
def cost_add_technology(discount_rate, tech_costs, technology, investment, lifetime, FOM):
'''function to calculate annualized fixed cost for any technology from inpits
and adds it to the tech_costs dataframe '''
annuity_factor = annuity(lifetime, discount_rate) + FOM / 100
tech_costs.at[technology, "fixed"] = annuity_factor * investment
tech_costs.at[technology, "lifetime"] = lifetime
tech_costs.at[technology, "FOM"] = FOM
tech_costs.at[technology, "investment"] = investment
return tech_costs
def add_technology_cost(tech_costs):
"""Function that adds the tehcnology costs not presente din the original cost file"""
cost_add_technology(p.discount_rate, tech_costs, 'CO2 storage cylinders', p.CO2_cylinders_inv,
tech_costs.at['CO2 storage tank', 'lifetime'], tech_costs.at['CO2 storage tank', 'FOM'])
cost_add_technology(p.discount_rate, tech_costs, 'CO2_pipeline_gas', p.CO2_pipeline_inv, p.CO2_pipeline_lifetime,
p.CO2_pipeline_FOM)
cost_add_technology(p.discount_rate, tech_costs, 'H2_pipeline_gas', p.H2_pipeline_inv, p.H2_pipeline_lifetime,
p.H2_pipeline_FOM)
cost_add_technology(p.discount_rate, tech_costs, 'CO2_compressor', p.CO2_comp_inv, p.CO2_comp_lifetime,
p.CO2_comp_FOM)
cost_add_technology(p.discount_rate, tech_costs, 'DH heat exchanger', p.DH_HEX_inv, p.DH_HEX_lifetime, p.DH_HEX_FOM)
cost_add_technology(p.discount_rate, tech_costs, 'Slow pyrolysis', p.Pyrolysis_inv, p.Pyrolysis_lifetime,
p.Pyrolysis_FOM)
cost_add_technology(p.discount_rate, tech_costs, 'Electrolysis small', p.Electrolysis_small_inv,
p.Electrolysis_small_lifetime, p.Electrolysis_small_FOM)
cost_add_technology(p.discount_rate, tech_costs, 'Electrolysis large', p.Electrolysis_large_inv,
p.Electrolysis_large_lifetime, p.Electrolysis_large_FOM)
return tech_costs
# ------ INPUTS PRE-PROCESSING ----
def GL_inputs_to_eff(GL_inputs):
''' function that reads csv file with GreenLab energy and material flows for each plant and calcualtes the
efficiencies for multilinks in the network'''
# NOTE: (-) refers to energy or material flow CONSUMED by the plant
# (+) refers to energy or material flow PRODUCED by the plant
# Calculates Efficiencies for MultiLinks
GL_eff = GL_inputs
GL_eff = GL_eff.drop(columns='Bus Unit') # drops not relevant columns
GL_eff = GL_eff.drop(index='bus0')
# bus-to-bus efficiency set with bus0 as reference (normalized)
for j in list(GL_eff.columns.values):
bus0_prc = GL_inputs.loc['bus0', j]
bus0_val = GL_inputs.loc[bus0_prc, j]
GL_eff.loc[:, j] = GL_eff.loc[:, j] / -bus0_val
GL_eff[GL_eff == 0] = np.nan
return GL_eff
def balance_bioCH4_MeOH_demand_GL():
''' function preprocesses the GreenLab site input data'''
'''Load GreenLab inputs'''
GL_inputs = pd.read_excel(p.GL_input_file, sheet_name='Overview_2', index_col=0)
GL_eff = GL_inputs_to_eff(GL_inputs)
'''bioCH4 production ('demand')'''
bioCH4_prod = p.ref_df.copy()
bioCH4_prod = bioCH4_prod.rename(columns={p.ref_col_name: 'bioCH4 demand MWh'})
bioCH4_prod['bioCH4 demand MWh'] = np.abs(
GL_inputs.loc["bioCH4", 'SkiveBiogas']) * p.f_FLH_Biogas # MWh Yearly demand delivered
bioCH4_prod.to_csv(p.bioCH4_prod_input_file, sep=';') # MWh/h
"""Methanol demand"""
# maximum of MeOH (yearly) demand compatible with CO2 produced from the biogas plant
Methanol_demand_y_max = np.abs(GL_eff.at['Methanol', 'Methanol plant']) * np.abs(
GL_inputs.at['CO2 pure', 'SkiveBiogas']) * p.f_FLH_Biogas * p.FLH_y # Max MWh MeOH Yearly delivered
# Create Randomized weekly delivery
# Time series demand (hourly)
f_delivery = 24 * 7 # frequency of delivery in (h)
n_delivery = len(p.hours_in_period) // f_delivery
# Delivery amount profile
KK = np.random.uniform(low=0.0, high=1.0, size=n_delivery)
q_delivery = KK * Methanol_demand_y_max / np.sum(KK) # quantity for each delivery
empty_v = np.zeros(len(p.hours_in_period))
delivery = pd.DataFrame({'a': empty_v})
Methanol_demand_max = p.ref_df.copy()
Methanol_demand_max.rename(columns={p.ref_col_name: 'Methanol demand MWh'}, inplace=True)
for i in range(n_delivery):
delivery_ind = (i + 1) * f_delivery - 10 # Delivery at 14:00
delivery.iloc[delivery_ind] = q_delivery[i]
Methanol_demand_max['Methanol demand MWh'] = delivery['a'].values
Methanol_demand_max.to_csv(p.Methanol_demand_max_input_file, sep=';') # t/h
return
def load_input_data():
"""Load csv files and prepare Input Data to GL network"""
GL_inputs = pd.read_excel(p.GL_input_file, sheet_name='Overview_2', index_col=0)
GL_eff = GL_inputs_to_eff(GL_inputs)
Elspotprices = pd.read_csv(p.El_price_input_file, sep=';', index_col=0) # currency/MWh
Elspotprices = Elspotprices.set_axis(p.hours_in_period)
CO2_emiss_El = pd.read_csv(p.CO2emis_input_file, sep=';', index_col=0) # kg/MWh CO2
CO2_emiss_El = CO2_emiss_El.set_axis(p.hours_in_period)
bioCH4_prod = pd.read_csv(p.bioCH4_prod_input_file, sep=';', index_col=0) # MWh/h y
bioCH4_prod = bioCH4_prod.set_axis(p.hours_in_period)
CF_wind = pd.read_csv(p.CF_wind_input_file, sep=';', index_col=0) # MWh/h y
CF_wind = CF_wind.set_axis(p.hours_in_period)
CF_solar = pd.read_csv(p.CF_solar_input_file, sep=';', index_col=0) # MWh/h y
CF_solar = CF_solar.set_axis(p.hours_in_period)
NG_price_year = pd.read_csv(p.NG_price_year_input_file, sep=';', index_col=0) # MWh/h y
NG_price_year = NG_price_year.set_axis(p.hours_in_period)
Methanol_demand_max = pd.read_csv(p.Methanol_demand_max_input_file, sep=';', index_col=0) # MWh/h y Methanol
Methanol_demand_max = Methanol_demand_max.set_axis(p.hours_in_period)
NG_demand_DK = pd.read_csv(p.NG_demand_input_file, sep=';', index_col=0) # currency/MWh
NG_demand_DK = NG_demand_DK.set_axis(p.hours_in_period)
El_demand_DK1 = pd.read_csv(p.El_external_demand_input_file, sep=';', index_col=0) # currency/MWh
El_demand_DK1 = El_demand_DK1.set_axis(p.hours_in_period)
DH_external_demand = pd.read_csv(p.DH_external_demand_input_file, sep=';', index_col=0) # currency/MWh
DH_external_demand = DH_external_demand.set_axis(p.hours_in_period)
return GL_inputs, GL_eff, Elspotprices, CO2_emiss_El, bioCH4_prod, CF_wind, CF_solar, NG_price_year, Methanol_demand_max, NG_demand_DK, El_demand_DK1, DH_external_demand
# ---- DEMANDS for H2, MeOH and El_DK1_GLS
def preprocess_H2_grid_demand(flh_H2, H2_size, NG_demand_DK, flag_one_delivery):
''' Creates the H2 demand from the grid '''
# flh_H2 = target full load hours from the plant
# H2 size = plant size in MW of H2 produced
# IF: flag_one_delivery == True -> single delivery at the end of the year
# IF: flag_one_delivery == False -> monthly delivery follows the same profile of the NG demand
H2_demand_y = p.ref_df.copy()
col_name = 'H2_demand' + ' MWh/month'
H2_demand_y.rename(columns={p.ref_col_name: col_name}, inplace=True)
h2_demand_flag = p.h2_demand_flag
n = 12 # numebr of intervals (months)
for i in range(1, n + 1):
if i < 9:
st_time1 = p.start_date[0:5] + '0%d' + p.start_date[7:]
end_time1 = p.start_date[0:5] + '0%d' + p.start_date[7:]
st_time = st_time1 % i
end_time = end_time1 % (i + 1)
if h2_demand_flag == 'profile':
H2_val = np.sum(NG_demand_DK.loc[st_time:end_time, :].values) / np.sum(
NG_demand_DK.values) * H2_size * flh_H2
H2_demand_y.at[end_time, col_name] = H2_val
else:
H2_demand_y.at[end_time, col_name] = H2_size * flh_H2 / n
elif i == 9:
st_time1 = p.start_date[0:5] + '0%d' + p.start_date[7:]
end_time1 = p.start_date[0:5] + '%d' + p.start_date[7:]
st_time = st_time1 % i
end_time = end_time1 % (i + 1)
if h2_demand_flag == 'profile':
H2_val = np.sum(NG_demand_DK.loc[st_time:end_time, :].values) / np.sum(
NG_demand_DK.values) * H2_size * flh_H2
H2_demand_y.at[end_time, col_name] = H2_val
else:
H2_demand_y.at[end_time, col_name] = H2_size * flh_H2 / n
elif i == n:
st_time1 = p.start_date[0:5] + '%d' + p.start_date[7:]
st_time = st_time1 % i
end_time = p.end_date
if h2_demand_flag == 'profile':
H2_val = np.sum(NG_demand_DK.loc[st_time:end_time, :].values) / np.sum(
NG_demand_DK.values) * H2_size * flh_H2
H2_demand_y.at[end_time, col_name] = H2_val
else:
H2_demand_y.at[end_time, col_name] = H2_size * flh_H2 / n
else:
st_time1 = p.start_date[0:5] + '%d' + p.start_date[7:]
end_time1 = p.start_date[0:5] + '%d' + p.start_date[7:]
st_time = st_time1 % i
end_time = end_time1 % (i + 1)
if h2_demand_flag == 'profile':
H2_val = np.sum(NG_demand_DK.loc[st_time:end_time, :].values) / np.sum(
NG_demand_DK.values) * H2_size * flh_H2
H2_demand_y.at[end_time, col_name] = H2_val
else:
H2_demand_y.at[end_time, col_name] = H2_size * flh_H2 / n
if flag_one_delivery:
delivery = H2_demand_y.iloc[:, 0].sum()
H2_demand_y = p.ref_df.copy()
H2_demand_y.iloc[-1, 0] = delivery
H2_demand_y.to_csv(p.H2_demand_input_file, sep=';') # currency/ton
return H2_demand_y
def preprocess_methanol_demand(Methanol_demand_max, f_max_MeOH_y_demand, flag_one_delivery):
''' function that builds a Methnaol demand (load) for the GL network.
The load can be on weekly basis or one single delivery at the end of the year'''
# if: flag_one_delivery= True => aggregates methanol demand to one single time step in the end of the year.
# f_max_MeOH_y_demand = fraction (0-1) of maximum MeOH production compatible with yearly prod of CO2 from biogas
# if p.MeOH_set_demand_input == True:
if flag_one_delivery: # yearly demand
Methanol_input_demand = Methanol_demand_max.copy()
Methanol_input_demand["Methanol demand MWh"] = 0
Methanol_input_demand.iloc[-1, :] = np.sum(Methanol_demand_max.values) * f_max_MeOH_y_demand
elif not flag_one_delivery: # weekly demand
Methanol_input_demand = Methanol_demand_max * f_max_MeOH_y_demand
return Methanol_input_demand
# ----- EXTERNAL ENERGY MARKETS
def download_energidata(dataset_name, start_date, end_date, sort_val, filter_area):
""" function that download energy data from energidataservice.dk and returns a dataframe"""
if filter_area != '':
URL = 'https://api.energidataservice.dk/dataset/%s?start=%s&end=%s&%s&%s' % (
dataset_name, start_date, end_date, sort_val, filter_area)
elif filter_area == '':
URL = 'https://api.energidataservice.dk/dataset/%s?start=%s&end=%s&%s' % (
dataset_name, start_date, end_date, sort_val)
response = requests.get(url=URL)
result = response.json()
records = result.get('records', [])
downloaded_df = pd.json_normalize(records)
return downloaded_df
def pre_processing_energy_data():
""" function that preprocess all the energy input data and saves in
NOTE:Some data are not always used depending on the network configuration
Prices from DK are downlaoded in DKK"""
'''El spot prices DK1 - input DKK/MWh or EUR/MWh'''
dataset_name = 'Elspotprices'
sort_val = 'sort=HourDK%20asc'
filter_area = r'filter={"PriceArea":"DK1"}'
Elspotprices_data = download_energidata(dataset_name, p.start_date, p.end_date, sort_val, filter_area)
Elspotprices = Elspotprices_data[['HourDK', 'SpotPrice' + p.currency]].copy()
Elspotprices.rename(columns={'SpotPrice' + p.currency: 'SpotPrice ' + p.currency}, inplace=True)
Elspotprices['HourDK'] = pd.to_datetime(Elspotprices['HourDK'], infer_datetime_format=True)
Elspotprices.set_index('HourDK', inplace=True)
Elspotprices.index.name = None
Elspotprices.to_csv(p.El_price_input_file, sep=';') # currency/MWh
'''CO2 emission from El Grid DK1'''
dataset_name = 'DeclarationEmissionHour'
sort_val = 'sort=HourDK%20asc'
filter_area = r'filter={"PriceArea":"DK1"}'
CO2emis_data = download_energidata(dataset_name, p.start_date, p.end_date, sort_val, filter_area) # g/kWh = kg/MWh
CO2_emiss_El = CO2emis_data[['HourDK', 'CO2PerkWh']].copy()
CO2_emiss_El['CO2PerkWh'] = CO2_emiss_El['CO2PerkWh'] / 1000 # t/MWh
CO2_emiss_El.rename(columns={'CO2PerkWh': 'CO2PerMWh'}, inplace=True)
CO2_emiss_El['HourDK'] = pd.to_datetime(CO2_emiss_El['HourDK'], infer_datetime_format=True)
CO2_emiss_El.set_index('HourDK', inplace=True)
CO2_emiss_El.index.name = None
CO2_emiss_El.to_csv(p.CO2emis_input_file, sep=';') # kg/MWh
'''El Demand DK1'''
# source https://data.open-power-system-data.org/time_series/
El_demand_DK1 = pd.read_csv('data/time_series_60min_singleindex_filtered_DK1_2019.csv', index_col=0,
usecols=['cet_cest_timestamp', 'DK_1_load_actual_entsoe_transparency'])
El_demand_DK1.rename(columns={'DK_1_load_actual_entsoe_transparency': 'DK_1_load_actual_entsoe_transparency MWh'},
inplace=True)
El_demand_DK1 = El_demand_DK1.set_axis(p.hours_in_period)
El_demand_DK1 = El_demand_DK1
El_demand_DK1.to_csv(p.El_external_demand_input_file, sep=';') # MWh/h
# NG prices depending on the year
''' NG prices prices in DKK/kWh or EUR/kWH'''
if p.En_price_year == 2019:
# due to different structure of Energinet dataset for the year 2019 and 2022
data_folder = p.NG_price_data_folder # prices in DKK/kWh or EUR/kWH
name_files = os.listdir(data_folder)
NG_price_year = pd.DataFrame()
NG_price_col_name = 'Neutral gas price ' + 'DKK' + '/kWh'
NG_price_col_name_new = 'Neutral gas price ' + 'EUR' + '/MWh'
for name in name_files:
# print(name)
df_temp = pd.read_csv(os.path.join(data_folder, name),
skiprows=[0, 1, 2], sep=';', index_col=0,
usecols=['Delivery date', NG_price_col_name])
df_temp.index = pd.to_datetime(df_temp.index, dayfirst=True).strftime("%Y-%m-%dT%H:%M:%S+000")
if df_temp[NG_price_col_name].dtype != 'float64':
df_temp[NG_price_col_name] = df_temp[NG_price_col_name].str.replace(',', '.').astype(float)
NG_price_year = pd.concat([NG_price_year, df_temp])
NG_price_year = NG_price_year.sort_index()
NG_price_year[NG_price_col_name] = NG_price_year[
NG_price_col_name] * 1000 / p.DKK_Euro # converts to MwH and Euro
NG_price_year.rename(columns={NG_price_col_name: NG_price_col_name_new}, inplace=True)
NG_price_year.index.rename('HourDK', inplace=True)
NG_price_year.index.name = None
# Up-sampling to hour resolution
NG_price_year.index = pd.to_datetime(NG_price_year.index)
NG_price_year = NG_price_year.asfreq('H', method='ffill')
# add last 23h
last_rows_time = p.hours_in_period[-23:len(p.hours_in_period)]
last_rows_val = np.ones(len(last_rows_time)) * NG_price_year.iloc[-1, 0]
last_rows = pd.DataFrame({'Delivery date': last_rows_time, NG_price_col_name_new: last_rows_val})
last_rows = last_rows.set_index('Delivery date')
last_rows.index = pd.to_datetime(last_rows.index)
NG_price_year = pd.concat([NG_price_year, last_rows])
NG_price_year.to_csv(p.NG_price_year_input_file, sep=';') # €/MWh
elif p.En_price_year == 2022:
# due to different structure of Energinet dataset for the year 2019 and 2022
dataset_name = 'GasMonthlyNeutralPrice'
sort_val = 'sort=Month%20ASC'
filter_area = ''
NG_price_year = download_energidata(dataset_name, p.start_date, p.end_date, sort_val, filter_area)
NG_price_col_name = 'Neutral gas price ' + 'EUR' + '/MWh'
NG_price_year.rename(columns={'MonthlyNeutralGasPriceDKK_kWh': NG_price_col_name}, inplace=True)
NG_price_year.rename(columns={'Month': 'HourDK'}, inplace=True)
NG_price_year['HourDK'] = pd.to_datetime(NG_price_year['HourDK'])
NG_price_year['HourDK'] = pd.to_datetime(NG_price_year['HourDK'].dt.strftime("%Y-%m-%d %H:%M:%S+00:00"))
NG_price_year.set_index('HourDK', inplace=True)
NG_price_year[NG_price_col_name] = NG_price_year[NG_price_col_name] * 1000 / p.DKK_Euro # coversion to €/MWh
last_rows3 = pd.DataFrame(
{'HourDK': p.hours_in_period[-1:len(p.hours_in_period)], NG_price_col_name: NG_price_year.iloc[-1, 0]})
last_rows3.set_index('HourDK', inplace=True)
NG_price_year = pd.concat([NG_price_year, last_rows3])
NG_price_year = NG_price_year.asfreq('H', method='ffill')
NG_price_year.to_csv(p.NG_price_year_input_file, sep=';') # €/MWh
''' NG Demand (Consumption) DK '''
# source: https://www.energidataservice.dk/tso-gas/Gasflow
dataset_name = 'Gasflow'
sort_val = 'sort=GasDay'
filter_area = ''
NG_demand_DK_data = download_energidata(dataset_name, p.start_date, p.end_date, sort_val, filter_area)
NG_demand_DK = NG_demand_DK_data[['GasDay', 'KWhToDenmark']].copy()
NG_demand_DK['KWhToDenmark'] = NG_demand_DK['KWhToDenmark'] / -1000 # kWh-> MWh
NG_demand_DK.rename(columns={'KWhToDenmark': 'NG Demand DK MWh'}, inplace=True)
NG_demand_DK['GasDay'] = pd.to_datetime(NG_demand_DK['GasDay'], infer_datetime_format=True)
NG_demand_DK.set_index('GasDay', inplace=True)
NG_demand_DK = NG_demand_DK.asfreq('H', method='ffill')
NG_demand_DK.rename_axis(index='HourDK', inplace=True)
# add last 23h
last_rows_time2 = p.hours_in_period[-23:len(p.hours_in_period)]
last_rows_val2 = np.ones(len(last_rows_time2)) * NG_demand_DK.iloc[-1, 0]
last_rows2 = pd.DataFrame({'HourDK': last_rows_time2, 'NG Demand DK MWh': last_rows_val2})
last_rows2 = last_rows2.set_index('HourDK')
last_rows2.index = pd.to_datetime(last_rows2.index)
NG_demand_DK = pd.concat([NG_demand_DK, last_rows2])
NG_demand_DK['NG Demand DK MWh'] = NG_demand_DK['NG Demand DK MWh'] / 24
NG_demand_DK = NG_demand_DK.set_index(p.hours_in_period)
NG_demand_DK.to_csv(p.NG_demand_input_file, sep=';') # MWh/h
'''District heating data'''
# Download weather data near Skive (Mejrup)
# https://www.dmi.dk/friedata/observationer/
data_folder = p.DH_data_folder # prices in currency/kWh
name_files = os.listdir(data_folder)
DH_Skive = pd.DataFrame()
for name in name_files:
df_temp_2 = pd.read_csv(os.path.join(data_folder, name), sep=';', usecols=['DateTime', 'Middeltemperatur'])
DH_Skive = pd.concat([DH_Skive, df_temp_2])
# print(name)
DH_Skive = DH_Skive.drop_duplicates(subset='DateTime', keep='first')
DH_Skive = DH_Skive.sort_values(by=['DateTime'], ascending=True)
DH_Skive['DateTime'] = pd.to_datetime(DH_Skive['DateTime'])
DH_Skive['DateTime'] = pd.to_datetime(DH_Skive['DateTime'].dt.strftime("%Y-%m-%d %H:%M:%S+00:00"))
hours_in_2019 = pd.date_range('2019-01-01T00:00' + 'Z', '2020-01-01T00:00' + 'Z', freq='H')
hours_in_2019 = hours_in_2019.drop(hours_in_2019[-1])
DH_Skive = DH_Skive.set_index("DateTime").reindex(hours_in_2019)
DH_Skive_Capacity = 59 # MW
# source: https://ens.dk/sites/ens.dk/files/Statistik/denmarks_heat_supply_2020_eng.pdf
DH_Tamb_min = -15 # minimum outdoor temp --> maximum Capacity Factor
DH_Tamb_max = 18 # maximum outdoor temp--> capacity Factor = 0
CF_DH = (DH_Tamb_max - DH_Skive['Middeltemperatur'].values) / (DH_Tamb_max - DH_Tamb_min)
CF_DH[CF_DH < 0] = 0
DH_Skive['Capacity Factor DH'] = CF_DH
# adjust for base load in summer months due to sanitary water
# assumption: mean heat load in January/July = 6 (from Aarhus data).
DH_CFmean_Jan = np.mean(DH_Skive.loc['2019-01', 'Capacity Factor DH'])
DH_CFbase_load = DH_CFmean_Jan / 4
DH_Skive['Capacity Factor DH'] = DH_Skive['Capacity Factor DH'] + DH_CFbase_load
DH_Skive['DH demand MWh'] = DH_Skive[
'Capacity Factor DH'] * DH_Skive_Capacity # estimated demand for DH in Skive municipality
DH_Skive.to_csv(p.DH_external_demand_input_file, sep=';') # MWh/h
DH_Skive = DH_Skive.set_axis(p.hours_in_period)
'''Onshore Wind Capacity Factor DK1'''
# from 2017
hours_in_2017 = pd.date_range('2017-01-01T00:00Z', '2017-12-31T23:00Z', freq='H')
df_onshorewind = pd.read_csv('data/onshore_wind_1979-2017.csv', sep=';', index_col=0)
df_onshorewind.index = pd.to_datetime(df_onshorewind.index)
CF_wind = pd.DataFrame(df_onshorewind['DNK'][[hour.strftime("%Y-%m-%d %H:%M:%S+00:00") for hour in hours_in_2017]])
CF_wind.rename(columns={'DNK': 'Capacity Factor Wind Onshore DK1'}, inplace=True)
CF_wind = CF_wind.set_axis(p.hours_in_period)
CF_wind.to_csv(p.CF_wind_input_file, sep=';') # kg/MWh
'''Solar Capacity Factor DK'''
# add solar PV generator
df_solar = pd.read_csv('data/pv_optimal.csv', sep=';', index_col=0)
df_solar.index = pd.to_datetime(df_solar.index)
CF_solar = pd.DataFrame(df_solar['DNK'][[hour.strftime("%Y-%m-%dT%H:%M:%S+00:00") for hour in hours_in_2017]])
CF_solar.rename(columns={'DNK': 'Capacity Factor Solar DK'}, inplace=True)
CF_solar = CF_solar.set_axis(p.hours_in_period)
CF_solar.to_csv(p.CF_solar_input_file, sep=';') # kg/MWh
return
def build_electricity_grid_price_w_tariff(Elspotprices):
"""this function creates the Electricity grid price including the all the tariffs
Note that CO2 tax is added separately
Tariff system valid for customer conected to 60kV grid via a 60/10kV transformer
Tariff system in place from 2025"""
# for tariff reference check the parameter file
# Grid tariff are based on hour of the day, day of the week and season:
# high tariff in summer + weekdays + 06:00 to 24.00
# high tariff in winter + weekends + 06:00 to 24.00
# high tariff in winter + weekdays + 21:00 to 24.00
# peak tariff in winter + weekdays + 06:00 to 21.00
# Low tariff the rest of the time
summer_start = str(p.En_price_year) + '-04-01T00:00' # '2019-04-01 00:00:00+00:00' # Monday
summer_end = str(p.En_price_year) + '-10-01T00:00' # '2019-10-01 00:00:00+00:00'
winter_1 = pd.date_range(p.start_date + 'Z', summer_start + 'Z', freq='H')
winter_1 = winter_1.drop(winter_1[-1])
winter_2 = pd.date_range(summer_end + 'Z', p.end_date + 'Z', freq='H')
winter_2 = winter_2.drop(winter_2[-1])
winter = winter_1.append(winter_2)
summer = pd.date_range(summer_start + 'Z', summer_end + 'Z', freq='H')
summer = summer.drop(summer[-1])
peak_weekday = range(1, 6)
peak_hours = range(7, 21 + 1)
high_hours_weekday_winter = range(22, 24 + 1)
high_hours_weekend_winter = range(7, 24 + 1)
high_hours_weekday_summer = range(7, 24 + 1)
# set the tariff in every hour equal to low and che
el_grid_price = Elspotprices + p.el_transmission_tariff + p.el_system_tariff + p.el_afgift
el_grid_sell_price = -Elspotprices + p.el_tariff_sell
# assign tariff to hours
for h in winter:
day = h.weekday()
hour = h.hour
if day in [6, 7]: # weekends
if hour in high_hours_weekend_winter:
net_tariff = p.el_net_tariff_high
else:
net_tariff = p.el_net_tariff_low
elif day in range(1, 6): # weekdays
if hour in peak_hours:
net_tariff = p.el_net_tariff_peak
elif hour in high_hours_weekday_winter:
net_tariff = p.el_net_tariff_high
else:
net_tariff = p.el_net_tariff_low
el_grid_price.loc[h, :] = el_grid_price.loc[h, :] + net_tariff
for h in summer:
day = h.weekday()
hour = h.hour
if day in [6, 7]: # weekends
net_tariff = p.el_net_tariff_low
elif day in range(1, 6): # weekdays
if hour in high_hours_weekday_summer:
net_tariff = p.el_net_tariff_high
else:
net_tariff = p.el_net_tariff_low
el_grid_price.loc[h, :] = el_grid_price.loc[h, :] + net_tariff
return el_grid_price, el_grid_sell_price
# ---- Pre-processing for PyPSA network
def n_flags_to_preprocess (n_flags_OK, flh_H2, f_max_MeOH_y_demand):
flh_H2_OK = flh_H2
f_max_MeOH_y_demand_OK = f_max_MeOH_y_demand
if not n_flags_OK['electrolyzer']:
flh_H2_OK = 0
if not n_flags_OK['meoh']:
f_max_MeOH_y_demand_OK = 0
return flh_H2_OK, f_max_MeOH_y_demand_OK
def pre_processing_all_inputs(flh_H2, f_max_MeOH_y_demand, CO2_cost, el_DK1_sale_el_RFNBO, preprocess_flag):
# functions calling all other functions and build inputs dictionary to the model
# inputs_dict contains all in
"""Run preprocessing of energy data and build input file OR read input files saved"""
if preprocess_flag:
pre_processing_energy_data() # download + preprocessing + save to CSV
balance_bioCH4_MeOH_demand_GL() # Read CSV GL + create CSV with bioCH4 and MeOH max demands
# load the inputs form CSV files
GL_inputs, GL_eff, Elspotprices, CO2_emiss_El, bioCH4_prod, CF_wind, CF_solar, NG_price_year, Methanol_demand_max, NG_demand_DK, El_demand_DK1, DH_external_demand = load_input_data()
''' create H2 grid demand'''
# H2_input_demand = preprocess_H2_grid_demand(flh_H2, np.abs(GL_inputs.at['H2', 'GreenHyScale']), NG_demand_DK,
# p.H2_one_delivery)
H2_input_demand = preprocess_H2_grid_demand(flh_H2, p.H2_output, NG_demand_DK,
p.H2_one_delivery)
''' create Methanol demand'''
Methanol_input_demand = preprocess_methanol_demand(Methanol_demand_max, f_max_MeOH_y_demand, p.MeOH_one_delivery)
""" return the yearly el demand for the DK1 which is avaibale sale of RE form GLS,
it is estimated in proportion to the El in GLS needed for producing RFNBOs """
# Guess of the RE EL demand yearly in GLS based on H2 and MeOH demand
El_d_H2 = np.abs(
H2_input_demand.values.sum() / GL_eff.at['H2', 'GreenHyScale']) # yearly electricity demand for H2 demand
El_d_MeOH = np.abs(Methanol_input_demand.values.sum() * (
(GL_eff.at['H2', 'Methanol plant'] / GL_eff.at['Methanol', 'Methanol plant']) * (
p.el_comp_H2 + 1 / GL_eff.at['H2', 'GreenHyScale']) + p.el_comp_CO2 / GL_eff.at[
'Methanol', 'Methanol plant']))
El_d_y_guess_GLS = El_d_H2 + El_d_MeOH # MWh el for H2 and MeOH
# Assign a ratio between the RE consumed for RFNBO production at the GLS and the Max which can be sold to DK1
if el_DK1_sale_el_RFNBO < 0:
el_DK1_sale_el_RFNBO = 0
print('Warning: ElDK1 demand set = 0')
El_d_y_DK1 = El_d_y_guess_GLS * el_DK1_sale_el_RFNBO
# Distribute the available demand according to the time series of DK1 demand
El_demand_DK1.iloc[:, 0] = El_demand_DK1.iloc[:, 0] * (
El_d_y_DK1 / len(p.hours_in_period)) / El_demand_DK1.values.mean()
inputs_dict = {'GL_inputs': GL_inputs,
'GL_eff': GL_eff,
'Elspotprices': Elspotprices,
'CO2_emiss_El': CO2_emiss_El,
'bioCH4_demand': bioCH4_prod,
'CF_wind': CF_wind,
'CF_solar': CF_solar,
'NG_price_year': NG_price_year,
'Methanol_input_demand': Methanol_input_demand,
'NG_demand_DK': NG_demand_DK,
'El_demand_DK1': El_demand_DK1,
'DH_external_demand': DH_external_demand,
'H2_input_demand': H2_input_demand,
'CO2 cost': CO2_cost,
'el_DK1_sale_el_RFNBO': el_DK1_sale_el_RFNBO,
}
return inputs_dict
def en_market_prices_w_CO2(inputs_dict, tech_costs):
"""Returns the market price of commodities adjusted for CO2 tax --> including CO2 tax there needed!"""
" returns input currency"
CO2_cost = inputs_dict['CO2 cost']
CO2_emiss_El = inputs_dict['CO2_emiss_El']
NG_price_year = inputs_dict['NG_price_year']
Elspotprices = inputs_dict['Elspotprices']
GL_eff = inputs_dict['GL_eff']
el_grid_price, el_grid_sell_price = build_electricity_grid_price_w_tariff(Elspotprices) #
# Market prices of energy commodities purchased on the market, including CO2 tax
# adjust el price for difference in CO2 tax
mk_el_grid_price = el_grid_price + (np.array(CO2_emiss_El) * (CO2_cost - p.CO2_cost_ref_year)) # currency / MWh
mk_el_grid_sell_price = el_grid_sell_price # NOTE selling prices are negative in the model
# NG grid price uneffected by CO2 tax (paid locally by the consumer)
mk_NG_grid_price = NG_price_year + tech_costs.at['gas', 'CO2 intensity'] * (
CO2_cost - p.CO2_cost_ref_year) # currency / MWH
# District heating price
DH_price = p.ref_df.copy()
DH_price.iloc[:, 0] = -p.DH_price
en_market_prices = {'el_grid_price': np.squeeze(mk_el_grid_price),
'el_grid_sell_price': np.squeeze(mk_el_grid_sell_price),
'NG_grid_price': np.squeeze(mk_NG_grid_price),
# 'bioCH4_grid_sell_price': np.squeeze(mk_bioCH4_grid_sell_price),
'DH_price': np.squeeze(DH_price)
}
return en_market_prices
# -----CONSTRAINTS on GRID ELECTRICITY RFNBOs---------------
def p_max_pu_EU_renewable_el(Elspotprices, CO2_emiss_El):
""" function that enables power from the grid tp be used for H2 production according to EU rules:
1) price below limit, 2) emissionintensity below limit"""
idx_renw_el_p = Elspotprices[Elspotprices.values <= p.EU_renew_el_price_limit].index
idx_renw_el_em = CO2_emiss_El[CO2_emiss_El.values <= p.EU_renew_el_emission_limit].index
p_max_pu_renew_el_price = pd.DataFrame(data=0, index=p.hours_in_period, columns=['p_max_pu el price'])
p_max_pu_renew_em = pd.DataFrame(data=0, index=p.hours_in_period, columns=['p_max_pu emiss limit'])
p_max_pu_renew_el_price.loc[idx_renw_el_p, 'p_max_pu el price'] = 1
p_max_pu_renew_em.loc[idx_renw_el_em, 'p_max_pu emiss limit'] = 1
return p_max_pu_renew_el_price, p_max_pu_renew_em
def add_link_El_grid_to_H2(n, inputs_dict, tech_costs):
""" sets condition for use of electricity formthe grid - depending on the year_EU and the legislation
it is limiting the use of electricity form the grid after 2030 withouth installaiton of additional renewables"""
Elspotprices = inputs_dict['Elspotprices']
CO2_emiss_El = inputs_dict['CO2_emiss_El']
en_market_prices = en_market_prices_w_CO2(inputs_dict, tech_costs)
# Grid to H2 availability
p_max_pu_renew_el_price, p_max_pu_renew_em = p_max_pu_EU_renewable_el(Elspotprices, CO2_emiss_El)
# Link for use fo electricity from the grid to produce H2
p_max_pu_grid_to_h2 = p.ref_df.copy()
p_max_pu_grid_to_h2.iloc[:, 0] = p_max_pu_renew_el_price.iloc[:, 0]
capex_DK1_to_h2 = 0 # because RE peak sold is expected to be higher than peak consumption from grid
n.add('Link',
'DK1_to_El3',
bus0="ElDK1 bus",
bus1="El3 bus",
efficiency=1,
p_nom_extendable=True,
p_max_pu=p_max_pu_grid_to_h2.iloc[:, 0],
capital_cost=capex_DK1_to_h2,
marginal_cost=en_market_prices['el_grid_price'])
return n
# ------- BUILD PYPSA NETWORK-------------
def network_dependencies(n_flags):
"""Check if all required dependencies are satisfied when building the network based on n_flags dictionary in main,
modifies n_flag dict """
n_flags_OK = n_flags.copy()
# SkiveBiogas : NO dependencies
n_flags_OK['SkiveBiogas'] = n_flags['SkiveBiogas']
# renewables : NO Dependencies
n_flags_OK['renewables'] = n_flags['renewables']
# H2 production Dependencies
n_flags_OK['electrolyzer'] = n_flags['electrolyzer']
# MeOH production Dependencies
if n_flags['meoh'] and n_flags['electrolyzer'] and n_flags['renewables'] and n_flags['SkiveBiogas'] and n_flags[
'symbiosis_net']:
n_flags_OK['meoh'] = True
else:
n_flags_OK['meoh'] = False
# Symbiosis net : NO Dependencies (but layout depends on the other n_flags_OK)
n_flags_OK['symbiosis_net'] = n_flags['symbiosis_net']
# Central heating Dependencies
if n_flags['central_heat'] and n_flags['symbiosis_net']:
n_flags_OK['central_heat'] = True
else:
n_flags_OK['central_heat'] = False
# DH Dependencies ( option for heat recovery form MeOH available)
if n_flags['DH'] and n_flags['symbiosis_net']:
n_flags_OK['DH'] = True
else:
n_flags_OK['DH'] = False
return n_flags_OK
def override_components_mlinks():
"""function required by PyPSA for overwriting link component to multiple connecitons (multilink)
the model can take up to 5 additional buses (7 in total) but can be extended"""
override_component_attrs = pypsa.descriptors.Dict(
{k: v.copy() for k, v in pypsa.components.component_attrs.items()})
override_component_attrs["Link"].loc["bus2"] = ["string", np.nan, np.nan, "2nd bus", "Input (optional)"]
override_component_attrs["Link"].loc["bus3"] = ["string", np.nan, np.nan, "3rd bus", "Input (optional)"]
override_component_attrs["Link"].loc["bus4"] = ["string", np.nan, np.nan, "4th bus", "Input (optional)"]
override_component_attrs["Link"].loc["bus5"] = ["string", np.nan, np.nan, "5th bus", "Input (optional)"]
override_component_attrs["Link"].loc["bus6"] = ["string", np.nan, np.nan, "6th bus", "Input (optional)"]
override_component_attrs["Link"].loc["efficiency2"] = ["static or series", "per unit", 1., "2nd bus efficiency",
"Input (optional)"]
override_component_attrs["Link"].loc["efficiency3"] = ["static or series", "per unit", 1., "3rd bus efficiency",
"Input (optional)"]
override_component_attrs["Link"].loc["efficiency4"] = ["static or series", "per unit", 1., "4th bus efficiency",
"Input (optional)"]
override_component_attrs["Link"].loc["efficiency5"] = ["static or series", "per unit", 1., "5th bus efficiency",
"Input (optional)"]
override_component_attrs["Link"].loc["efficiency6"] = ["static or series", "per unit", 1., "6th bus efficiency",
"Input (optional)"]
override_component_attrs["Link"].loc["p2"] = ["series", "MW", 0., "2nd bus output", "Output"]
override_component_attrs["Link"].loc["p3"] = ["series", "MW", 0., "3rd bus output", "Output"]
override_component_attrs["Link"].loc["p4"] = ["series", "MW", 0., "4th bus output", "Output"]
override_component_attrs["Link"].loc["p5"] = ["series", "MW", 0., "5th bus output", "Output"]
override_component_attrs["Link"].loc["p6"] = ["series", "MW", 0., "6th bus output", "Output"]
return override_component_attrs
def add_local_heat_connections(n, heat_bus_list, GL_eff, plant_name, n_flags, tech_costs):
"""function that creates local heat buses for each plant.
heat leaving the plant can be rejected to the ambient for free.
heat required by the plant can be supplied by symbiosys net ar added heating technologies"""
new_buses = ['', '', '']
for i in range(len(heat_bus_list)):
b = heat_bus_list[i] # symbiosys net bus
if not math.isnan(GL_eff.loc[b, plant_name]):
sign_eff = np.sign(
GL_eff.loc[b, plant_name]) # negative is consumed by the agent, positive is produced by the agent
# add local bus (input)
bus_name = b + '_' + plant_name
new_buses[i] = bus_name
n.add('Bus', bus_name, carrier='Heat', unit='MW')
# for heat rejection add connection to Heat amb (cooling included in plant cost)
if sign_eff > 0:
link_name = b + '_' + plant_name + '_amb'
n.add('Link',
link_name,
bus0=bus_name,
bus1='Heat amb',
efficiency=1,
p_nom_extendable=True)
# if symbiosys net is available, enable connection with heat grids and add cost (bidirectional)
if n_flags['symbiosis_net']:
if b not in n.buses.index.values:
n.add('Bus', b, carrier='Heat', unit='MW')
link_name = b + '_' + plant_name
if sign_eff > 0:
bus0 = bus_name
bus1 = b
elif sign_eff < 0:
bus0 = b
bus1 = bus_name
n.add('Link', link_name,
bus0=bus0,
bus1=bus1,
efficiency=1,
p_min_pu=-1,
p_nom_extendable=True,
capital_cost=tech_costs.at['DH heat exchanger', "fixed"] * p.currency_multiplier)
return n, new_buses
def add_el_conections(n, local_EL_bus, en_market_prices, n_flags, tech_costs):
"""function that adds El connections for the plant
one connection to the DK1 grid.
one connection to the El2 bus if symbiosys net is active"""
# ------ Create Local El bus
n.add('Bus', local_EL_bus, carrier='AC', unit='MW')
# -------EL connections------------
link_name1 = 'DK1_to_' + local_EL_bus
# direct grid connection
n.add("Link",
link_name1,
bus0="ElDK1 bus",
bus1=local_EL_bus, # 'El_biogas',
efficiency=1,
marginal_cost=en_market_prices['el_grid_price'],
capital_cost=tech_costs.at[
'electricity grid connection', 'fixed'] * p.currency_multiplier,
p_nom_extendable=True)
# internal el connection
if n_flags['symbiosis_net']:
if 'El2 bus' not in n.buses.index.values:
n.add('Bus', 'El2 bus', carrier='AC', unit='MW')
link_name2 = 'El2_to_' + local_EL_bus
n.add("Link",
link_name2,
bus0="El2 bus",
bus1=local_EL_bus, # 'El_biogas',
efficiency=1,
p_nom_extendable=True)
return n
def add_local_boilers(n, local_EL_bus, local_heat_bus, plant_name, tech_costs, en_market_prices):
"""function that add a local El boiler and NG boiler for plants requiring heating but not connected to the sybiosys net.
both boilers need connections to local buses"""
# additional NG boiler
n.add("Link",
"NG boiler" + plant_name,
bus0="NG",
bus1=local_heat_bus,
efficiency=tech_costs.at['central gas boiler', 'efficiency'],
p_nom_extendable=True,
capital_cost=tech_costs.at['central gas boiler', 'fixed'] * p.currency_multiplier,
marginal_cost=en_market_prices['NG_grid_price'] +
tech_costs.at['gas boiler steam', 'VOM'] * p.currency_multiplier)
# additional El boiler
n.add('Link',
'El boiler',
bus0=local_EL_bus,
bus1=local_heat_bus,
efficiency=tech_costs.at['electric boiler steam', 'efficiency'],
capital_cost=tech_costs.at['electric boiler steam', 'fixed'] * p.currency_multiplier,
marginal_cost=tech_costs.at['electric boiler steam', 'VOM'] * p.currency_multiplier,
p_nom_extendable=True)
return n
def add_external_grids(network, inputs_dict, n_flags):
"""function building the external grids and loads according to n_flgas dict,
this function DOES NOT allocate capital or marginal costs to any component"""
'''-----BASE NETWORK STRUCTURE - INDEPENDENT ON CONFIGURATION --------'''
''' these components do not have allocated capital costs'''
bus_list = ['ElDK1 bus', 'Heat amb', 'NG']
carrier_list = ['AC', 'Heat', 'gas']
unit_list = ['MW', 'MW', 'MW']
add_buses = list(set(bus_list) - set(network.buses.index.values))
idx_add = [bus_list.index(i) for i in add_buses]
# take a status of the network before adding componets
n0_links = network.links.index.values
n0_generators = network.generators.index.values
n0_loads = network.loads.index.values
n0_stores = network.stores.index.values
n0_buses = network.buses.index.values
if add_buses:
network.madd('Bus', add_buses, carrier=[carrier_list[i] for i in idx_add], unit=[unit_list[i] for i in idx_add])
# -----------Electricity Grid and connection DK1-----------
# Load simulating the DK1 grid load
El_demand_DK1 = inputs_dict['El_demand_DK1']
network.add("Load",
"Grid Load",
bus="ElDK1 bus",
p_set=El_demand_DK1.iloc[:, 0]) #
# generator simulating all the generators in DK1
network.add("Generator",
"Grid gen",
bus="ElDK1 bus",
p_nom_extendable=True)
# ----------ambient heat sink --------------------
# add waste heat to ambient if not present already
network.add("Store",
"Heat amb",
bus="Heat amb",
e_nom_extendable=True,
e_nom_min=0,
e_nom_max=float("inf"), # Total emission limit
e_cyclic=False)
# ----------NG source in local distrubtion------
network.add("Generator",
"NG grid",
bus="NG",
p_nom_extendable=True)
# --------------District heating-------------------
if n_flags['DH']:
DH_external_demand = inputs_dict['DH_external_demand']
network.add('Bus', 'DH grid', carrier='Heat', unit='MW')
# External DH grid
network.add('Load',
'DH load',
bus='DH grid',
p_set=DH_external_demand['DH demand MWh'])
network.add("Generator",
"DH gen",
bus="DH grid",
p_nom_extendable=True)
# new componets
new_links = list(set(network.links.index.values) - set(n0_links))
new_generators = list(set(network.generators.index.values) - set(n0_generators))
new_loads = list(set(network.loads.index.values) - set(n0_loads))
new_stores = list(set(network.stores.index.values) - set(n0_stores))
new_buses = list(set(network.buses.index.values) - set(n0_buses))
new_components = {'links': new_links,
'generators': new_generators,
'loads': new_loads,
'stores': new_stores,
'buses': bus_list}
return network, new_components
def add_biogas(n, n_flags, inputs_dict, tech_costs):
"""fucntion that add the biogas plant to the network and all the dependecies if not preset in the network yet"""
bioCH4_demand = inputs_dict['bioCH4_demand']
GL_eff = inputs_dict['GL_eff']
GL_inputs = inputs_dict['GL_inputs']
en_market_prices = en_market_prices_w_CO2(inputs_dict, tech_costs)
# take a status of the network before adding componets
n0_links = n.links.index.values
n0_generators = n.generators.index.values
n0_loads = n.loads.index.values
n0_stores = n.stores.index.values
n0_buses = n.buses.index.values
bus_list = ['Biomass', 'Digest DM', 'ElDK1 bus', 'bioCH4', 'NG', 'CO2 sep', 'CO2 pure atm']
carrier_list = ['Biomass', 'Digest DM', 'AC', 'gas', 'gas', 'CO2 pure', 'CO2 pure']
unit_list = ['MW', 't/h', 'MW', 'MW', 'MW', 't/h', 't/h']
if n_flags['SkiveBiogas']:
# add required buses if not in the network
add_buses = list(set(bus_list) - set(n.buses.index.values))
idx_add = [bus_list.index(i) for i in add_buses]
if add_buses:
n.madd('Bus', add_buses, carrier=[carrier_list[i] for i in idx_add], unit=[unit_list[i] for i in idx_add])