Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Dev/miso #18

Open
wants to merge 9 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ revise/
*.log
*.lp
*.mps
*.png
*.ipynb
*notebook-checkpoint
.DS_Store
Expand Down
5 changes: 4 additions & 1 deletion TimeSeriesHelper.jl/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,16 @@ version = "0.1.0"
[deps]
CSV = "336ed68f-0bac-5ca0-87d4-7b16caf5d00b"
DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0"
Dates = "ade2ca70-3891-5945-98fb-dc099432e06a"
Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f"
HTTP = "cd3eb016-35fb-5094-929b-558a96fad6f3"
HiddenMarkovModels = "84ca31d5-effc-45e0-bfda-5a68cd981f47"
JSON = "682c06a0-de6a-54ab-a142-c8b1cf79cde6"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"

[extras]
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"

[targets]
test = ["Test"]
test = ["Test"]
7 changes: 6 additions & 1 deletion TimeSeriesHelper.jl/src/TimeSeriesHelper.jl
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,14 @@ using LinearAlgebra
using Random
using CSV
using DataFrames
using Dates
using JSON
using HTTP

include("structs.jl")
include("read_table.jl")
include("read_table_pjm.jl")
include("read_table_miso.jl")
include("read_open_meteo.jl")
include("estimate.jl")

end # module TimeSeriesHelper
6 changes: 4 additions & 2 deletions TimeSeriesHelper.jl/src/estimate.jl
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,8 @@ function build_markov_transition(
transition_matrix::Matrix{Float64}, T::Int
)::Vector{Matrix{Float64}}
transitions_matrix = Vector{Matrix{Float64}}(undef, T)
for t in 1:T
transitions_matrix[1] = sum(transition_matrix, dims=1) / size(transition_matrix, 1)
for t in 2:T
transitions_matrix[t] = transition_matrix
end
return transitions_matrix
Expand Down Expand Up @@ -94,7 +95,8 @@ function build_scenarios(
for w in 1:W
push!(temp[n], [])
for i in 1:I
push!(temp[n][w], samples_inflow[t, n, w, i])
# TODO abs
push!(temp[n][w], abs(samples_inflow[t, n, w, i]))
end
end
end
Expand Down
33 changes: 33 additions & 0 deletions TimeSeriesHelper.jl/src/read_open_meteo.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
"""
read_open_meteo_json(directory::String,
serie::String,
start::Int,
stop::Int
)

Returns a dictionary with keys being the node id and values being a vector of wind.
"""
function read_open_meteo_json(directory::String, serie::String, start::Int, stop::Int)
start_string = string(start)
stop_string = string(stop)
start_utc = start_string[1:4] * "-" * start_string[5:6] * "-" * start_string[7:8] * "T00:00"
stop_utc = stop_string[1:4] * "-" * stop_string[5:6] * "-" * stop_string[7:8] * "T23:00"

dict = Dict{String, Vector{Float64}}()
for file_path in readdir(directory)
if endswith(file_path, ".json")
json_path = joinpath(directory, file_path)
json = JSON.parse(String(read(json_path)))
times = json["hourly"]["time"]
data = json["hourly"][serie]
vec = [data[i] for i in 1:length(times) if times[i] >= start_utc && times[i] <= stop_utc]
key = file_path[1:end - 5]
if !haskey(dict, key)
dict[key] = vec
else
append!(dict[key], vec)
end
end
end
return dict
end
62 changes: 62 additions & 0 deletions TimeSeriesHelper.jl/src/read_table_miso.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
function _download_csv(url, filename)
response = HTTP.get(url)
open(filename, "w") do file
write(file, response.body)
end
end

"""
read_miso_csv(directory::String,
file_pattern::String,
start::Int,
stop::Int
)

Returns a dictionary with keys being the node id and values being a vector of prices.
"""
function read_miso_csv(directory::String, file_pattern::String, start::Int, stop::Int)
dict = Dict{String, Vector{Float64}}()
for i in start:stop
file_path = joinpath(directory, string(i) * file_pattern);
if !isfile(file_path)
url = joinpath("https://docs.misoenergy.org/marketreports", string(i) * file_pattern)
task = @async _download_csv(url, file_path)
wait(task)
end
df = CSV.read(file_path, DataFrame)[4:end,:]
for row in eachrow(df)
if (row[3] != "LMP") continue end
vec = [parse(Float64, x) for x in values(row[4:27])]
if !haskey(dict, row[1])
dict[row[1]] = vec
else
append!(dict[row[1]], vec)
end
end
end
return dict
end

"""
read_miso_da_lmps(directory::String,
start::Int,
stop::Int
)

Returns a dictionary with keys being the node id and values being a vector of prices.
"""
function read_miso_da_lmps(directory::String, start::Int, stop::Int)
return read_miso_csv(directory, "_da_expost_lmp.csv", start, stop)
end

"""
read_miso_rt_lmps(directory::String,
start::Int,
stop::Int
)

Returns a dictionary with keys being the node id and values being a vector of prices.
"""
function read_miso_rt_lmps(directory::String, start::Int, stop::Int)
return read_miso_csv(directory, "_rt_lmp_final.csv", start, stop)
end
Original file line number Diff line number Diff line change
Expand Up @@ -41,19 +41,19 @@ function read_pjm_csv(path::String, column1::String, column2::String, column3::S
end

"""
read_da_hrl_lmps(path::String)
read_pjm_da_lmps(path::String)

Returns a pivot table of da_hrl_lmps using UTC date as rows, node id as columns and price as values.
"""
function read_da_hrl_lmps(path::String)
function read_pjm_da_lmps(path::String)
return read_pjm_csv(path, "datetime_beginning_utc", "pnode_name", "total_lmp_da")
end

"""
read_rt_hrl_lmps(path::String)
read_pjm_rt_lmps(path::String)

Returns a pivot table of rt_hrl_lmps using UTC date as rows, node id as columns and price as values.
"""
function read_rt_hrl_lmps(path::String)
function read_pjm_rt_lmps(path::String)
return read_pjm_csv(path, "datetime_beginning_utc", "pnode_name", "total_lmp_rt")
end
2 changes: 1 addition & 1 deletion cases/deterministic.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
"period_of_day_ahead_clear": 2,
"duration": 6,
"day_ahead_steps": 1,
"real_tume_steps": 1
"real_time_steps": 1
},
"data": {
"names": ["unit 1", "unit 2"],
Expand Down
2 changes: 1 addition & 1 deletion cases/stochastic.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
"period_of_day_ahead_clear": 2,
"duration": 6,
"day_ahead_steps": 2,
"real_tume_steps": 2
"real_time_steps": 2
},
"data": {
"names": ["unit 1", "unit 2"],
Expand Down
2 changes: 1 addition & 1 deletion cases/toy.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
"period_of_day_ahead_clear": 1,
"duration": 1,
"day_ahead_steps": 1,
"real_tume_steps": 1
"real_time_steps": 1
},
"data": {
"unit_to_bus": [1],
Expand Down
4 changes: 2 additions & 2 deletions examples/local.jl
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,10 @@ solar1, solar2, solar3 = TimeSeriesHelper.read_generation_csv(
"C:\\Users\\thiag\\Documents\\Data\\solar_gen_cf_2022.csv"
)

rt1, rt2, rt3 = TimeSeriesHelper.read_rt_hrl_lmps(
rt1, rt2, rt3 = TimeSeriesHelper.read_pjm_rt_lmps(
"C:\\Users\\thiag\\Documents\\Data\\rt_hrl_lmps.csv"
)
da1, da2, da3 = TimeSeriesHelper.read_da_hrl_lmps(
da1, da2, da3 = TimeSeriesHelper.read_pjm_da_lmps(
"C:\\Users\\thiag\\Documents\\Data\\da_hrl_lmps.csv"
)

Expand Down
116 changes: 116 additions & 0 deletions examples/local_miso.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
using OptimalEnergyBid
using TimeSeriesHelper
using HiGHS

directory = "C:\\Users\\thiag\\Documents\\Data\\";
start = 20240810;
stop = 20240823;

day_ahead = TimeSeriesHelper.read_miso_da_lmps(
directory, start, stop + 1
)

real_time = TimeSeriesHelper.read_miso_rt_lmps(
directory, start, stop
)

wind = TimeSeriesHelper.read_open_meteo_json(
directory, "wind_speed_10m", start, stop
)

nodes = ["AECI", "AEP"]

prices_real_time = Vector{Vector{Float64}}()
for i in 1:(stop - start + 1) * 24
push!(prices_real_time, [])
for node in nodes
push!(prices_real_time[i], real_time[node][i])
end
end

prices_day_ahead = Vector{Vector{Float64}}()
for i in 1:(stop - start + 2) * 24
push!(prices_day_ahead, [])
for node in nodes
push!(prices_day_ahead[i], day_ahead[node][i])
end
end

inflow = Vector{Vector{Float64}}()
for i in 1:(stop - start + 1) * 24
push!(inflow, [])
for key in keys(wind)
push!(inflow[i], wind[key][i])
end
end

history = TimeSeriesHelper.History()
history.prices_real_time = prices_real_time
history.prices_day_ahead = prices_day_ahead
history.inflow = inflow

h = TimeSeriesHelper.build_serial_history(history, 336, 24)

m, o = TimeSeriesHelper.estimate_hmm(h, 5)

matrix = TimeSeriesHelper.build_markov_transition(m, 48)

rt, da, inflow = TimeSeriesHelper.build_scenarios(o, 48, 24, 3, 1, 2, 2)

prb = OptimalEnergyBid.Problem()

numbers = prb.numbers
random = prb.random
data = prb.data
options = prb.options

numbers.periods_per_day = 24
numbers.first_period = 1
numbers.units = 2
numbers.buses = 2
numbers.duration = 48
numbers.real_time_steps = 5
numbers.day_ahead_steps = 5
numbers.period_of_day_ahead_bid = 12
numbers.period_of_day_ahead_clear = 20
# TODO
numbers.days = 2

random.prices_real_time = rt
random.prices_day_ahead = da
random.inflow = inflow
random.inflow_probability = v = [[[1/3 for k in 1:3] for j in 1:5] for i in 1:48]
random.markov_transitions = matrix

data.unit_to_bus = [1, 2]
data.volume_max = ones(2) * 50
data.volume_min = zeros(2)
data.volume_initial = zeros(2)

rt_sorted = deepcopy(rt)
da_sorted = deepcopy(da)

for t in 1:48
for i in 1:2
sort!(rt_sorted[t][i])
end
end

for d in 1:2
for j in 1:24
for i in 1:2
sort!(da_sorted[d][j][i])
end
end
end

data.prices_real_time_curve = rt_sorted
data.prices_day_ahead_curve = da_sorted
data.names = ["unit1", "unit2"]

OptimalEnergyBid.set_parameter!(prb, OptimalEnergyBid.Parameter.Optimizer, HiGHS.Optimizer)

OptimalEnergyBid.build_model!(prb, true)
OptimalEnergyBid.train!(prb; time_limit=60)
OptimalEnergyBid.simulate!(prb)
OptimalEnergyBid.plot_all(prb, 1, "")
4 changes: 2 additions & 2 deletions schemas/problem.json
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
"period_of_day_ahead_clear",
"duration",
"day_ahead_steps",
"real_tume_steps"
"real_time_steps"
],
"properties": {
"periods_per_day": {
Expand All @@ -68,7 +68,7 @@
"day_ahead_steps": {
"type": "integer"
},
"real_tume_steps": {
"real_time_steps": {
"type": "integer"
}
}
Expand Down
4 changes: 2 additions & 2 deletions src/build.jl
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ function _validate_numbers(prb::Problem)::Nothing
@assert 1 <= numbers.period_of_day_ahead_clear &&
numbers.period_of_day_ahead_clear <= numbers.periods_per_day
@assert 1 <= numbers.duration
@assert 1 <= numbers.real_tume_steps
@assert 1 <= numbers.real_time_steps
@assert 1 <= numbers.day_ahead_steps

return nothing
Expand All @@ -155,7 +155,7 @@ function _validate_data(prb::Problem)::Nothing
for t in 1:(numbers.duration)
@assert length(data.prices_real_time_curve[t]) >= numbers.units
for i in 1:(numbers.units)
@assert length(data.prices_real_time_curve[t][i]) >= numbers.real_tume_steps
@assert length(data.prices_real_time_curve[t][i]) >= numbers.real_time_steps
end
end

Expand Down
Loading