From deebdbc1a71ec29fd113143336c73018a16d9933 Mon Sep 17 00:00:00 2001 From: Jose Daniel Lara Date: Thu, 21 Dec 2023 17:58:31 -0600 Subject: [PATCH] formatter --- src/deterministic.jl | 86 ++--- src/deterministic_single_time_series.jl | 4 +- src/forecasts.jl | 14 +- src/hdf5_time_series_storage.jl | 55 +-- src/in_memory_time_series_storage.jl | 8 +- src/internal.jl | 10 +- src/probabilistic.jl | 94 ++--- src/scenarios.jl | 56 +-- src/serialization.jl | 18 +- src/single_time_series.jl | 92 ++--- src/time_series_cache.jl | 72 ++-- src/time_series_container.jl | 2 +- src/time_series_formats.jl | 8 +- src/time_series_interface.jl | 154 ++++---- src/time_series_parameters.jl | 34 +- src/time_series_parser.jl | 58 +-- src/time_series_storage.jl | 21 +- src/utils/flatten_iterator_wrapper.jl | 2 +- src/utils/generate_struct_files.jl | 48 +-- src/utils/generate_structs.jl | 12 +- src/utils/logging.jl | 69 ++-- src/utils/print.jl | 29 +- src/utils/recorder_events.jl | 27 +- src/utils/test.jl | 9 +- src/validation.jl | 8 +- test/common.jl | 10 +- test/runtests.jl | 8 +- test/test_deprecations.jl | 2 +- test/test_generate_structs.jl | 46 ++- test/test_lazy_dict_from_iterator.jl | 2 +- test/test_logging.jl | 72 ++-- test/test_printing.jl | 4 +- test/test_recorder.jl | 4 +- test/test_serialization.jl | 30 +- test/test_time_series.jl | 488 +++++++++++++----------- test/test_time_series_cache.jl | 42 +- test/test_time_series_storage.jl | 63 +-- 37 files changed, 932 insertions(+), 829 deletions(-) diff --git a/src/deterministic.jl b/src/deterministic.jl index fa3ca5991..11c71425d 100644 --- a/src/deterministic.jl +++ b/src/deterministic.jl @@ -42,9 +42,9 @@ function Deterministic(; name, data, resolution, - scaling_factor_multiplier=nothing, - normalization_factor=1.0, - internal=InfrastructureSystemsInternal(), + scaling_factor_multiplier = nothing, + normalization_factor = 1.0, + internal = InfrastructureSystemsInternal(), ) data = handle_normalization_factor(convert_data(data), normalization_factor) return Deterministic(name, data, resolution, scaling_factor_multiplier, internal) @@ -54,15 +54,15 @@ function Deterministic( name::AbstractString, data::AbstractDict, resolution::Dates.Period; - normalization_factor::NormalizationFactor=1.0, - scaling_factor_multiplier::Union{Nothing, Function}=nothing, + normalization_factor::NormalizationFactor = 1.0, + scaling_factor_multiplier::Union{Nothing, Function} = nothing, ) - return Deterministic( - name=name, - data=data, - resolution=resolution, - scaling_factor_multiplier=scaling_factor_multiplier, - internal=InfrastructureSystemsInternal(), + return Deterministic(; + name = name, + data = data, + resolution = resolution, + scaling_factor_multiplier = scaling_factor_multiplier, + internal = InfrastructureSystemsInternal(), ) end @@ -84,8 +84,8 @@ Construct Deterministic from a Dict of TimeArrays. function Deterministic( name::AbstractString, input_data::AbstractDict{Dates.DateTime, <:TimeSeries.TimeArray}; - normalization_factor::NormalizationFactor=1.0, - scaling_factor_multiplier::Union{Nothing, Function}=nothing, + normalization_factor::NormalizationFactor = 1.0, + scaling_factor_multiplier::Union{Nothing, Function} = nothing, ) data_type = eltype(TimeSeries.values(first(values(input_data)))) data = SortedDict{Dates.DateTime, Vector{data_type}}() @@ -99,12 +99,12 @@ function Deterministic( data[k] = TimeSeries.values(v) end - return Deterministic( - name=name, - data=data, - resolution=resolution, - normalization_factor=normalization_factor, - scaling_factor_multiplier=scaling_factor_multiplier, + return Deterministic(; + name = name, + data = data, + resolution = resolution, + normalization_factor = normalization_factor, + scaling_factor_multiplier = scaling_factor_multiplier, ) end @@ -128,8 +128,8 @@ function Deterministic( filename::AbstractString, component::InfrastructureSystemsComponent, resolution::Dates.Period; - normalization_factor::NormalizationFactor=1.0, - scaling_factor_multiplier::Union{Nothing, Function}=nothing, + normalization_factor::NormalizationFactor = 1.0, + scaling_factor_multiplier::Union{Nothing, Function} = nothing, ) component_name = get_name(component) raw_data = read_time_series(Deterministic, filename, component_name) @@ -137,8 +137,8 @@ function Deterministic( name, raw_data, resolution; - normalization_factor=normalization_factor, - scaling_factor_multiplier=scaling_factor_multiplier, + normalization_factor = normalization_factor, + scaling_factor_multiplier = scaling_factor_multiplier, ) end @@ -149,25 +149,25 @@ function Deterministic( name::AbstractString, series_data::RawTimeSeries, resolution::Dates.Period; - normalization_factor::NormalizationFactor=1.0, - scaling_factor_multiplier::Union{Nothing, Function}=nothing, + normalization_factor::NormalizationFactor = 1.0, + scaling_factor_multiplier::Union{Nothing, Function} = nothing, ) - return Deterministic( - name=name, - data=series_data.data, - resolution=resolution, - normalization_factor=normalization_factor, - scaling_factor_multiplier=scaling_factor_multiplier, + return Deterministic(; + name = name, + data = series_data.data, + resolution = resolution, + normalization_factor = normalization_factor, + scaling_factor_multiplier = scaling_factor_multiplier, ) end function Deterministic(ts_metadata::DeterministicMetadata, data::SortedDict) - return Deterministic( - name=get_name(ts_metadata), - resolution=get_resolution(ts_metadata), - data=data, - scaling_factor_multiplier=get_scaling_factor_multiplier(ts_metadata), - internal=InfrastructureSystemsInternal(get_time_series_uuid(ts_metadata)), + return Deterministic(; + name = get_name(ts_metadata), + resolution = get_resolution(ts_metadata), + data = data, + scaling_factor_multiplier = get_scaling_factor_multiplier(ts_metadata), + internal = InfrastructureSystemsInternal(get_time_series_uuid(ts_metadata)), ) end @@ -176,8 +176,8 @@ function Deterministic(info::TimeSeriesParsedInfo) info.name, info.data, info.resolution; - normalization_factor=info.normalization_factor, - scaling_factor_multiplier=info.scaling_factor_multiplier, + normalization_factor = info.normalization_factor, + scaling_factor_multiplier = info.scaling_factor_multiplier, ) end @@ -278,16 +278,16 @@ get_initial_times(forecast::Deterministic) = get_initial_times_common(forecast) get_initial_timestamp(forecast::Deterministic) = get_initial_timestamp_common(forecast) get_interval(forecast::Deterministic) = get_interval_common(forecast) iterate_windows(forecast::Deterministic) = iterate_windows_common(forecast) -get_window(f::Deterministic, initial_time::Dates.DateTime; len=nothing) = - get_window_common(f, initial_time; len=len) +get_window(f::Deterministic, initial_time::Dates.DateTime; len = nothing) = + get_window_common(f, initial_time; len = len) function make_time_array(forecast::Deterministic) # Artificial limitation to reduce scope. @assert_op get_count(forecast) == 1 timestamps = range( get_initial_timestamp(forecast); - step=get_resolution(forecast), - length=get_horizon(forecast), + step = get_resolution(forecast), + length = get_horizon(forecast), ) data = first(values(get_data(forecast))) return TimeSeries.TimeArray(timestamps, data) diff --git a/src/deterministic_single_time_series.jl b/src/deterministic_single_time_series.jl index cc19c5888..c5253c421 100644 --- a/src/deterministic_single_time_series.jl +++ b/src/deterministic_single_time_series.jl @@ -104,7 +104,7 @@ get_resolution(val::DeterministicSingleTimeSeries) = get_resolution(val.single_t function get_window( forecast::DeterministicSingleTimeSeries, initial_time::Dates.DateTime; - len::Union{Nothing, Int}=nothing, + len::Union{Nothing, Int} = nothing, ) tdiff = Dates.Millisecond(initial_time - forecast.initial_timestamp) interval_ms = Dates.Millisecond(forecast.interval) @@ -134,7 +134,7 @@ function iterate_windows(forecast::DeterministicSingleTimeSeries) end initial_times = - range(forecast.initial_timestamp; step=forecast.interval, length=forecast.count) + range(forecast.initial_timestamp; step = forecast.interval, length = forecast.count) return (get_window(forecast, it) for it in initial_times) end diff --git a/src/forecasts.jl b/src/forecasts.jl index 9b7fa0a7a..892765cbd 100644 --- a/src/forecasts.jl +++ b/src/forecasts.jl @@ -61,8 +61,8 @@ end """ Return the forecast window corresponsing to interval index. """ -function get_window(forecast::Forecast, index::Int; len=nothing) - return get_window(forecast, index_to_initial_time(forecast, index); len=len) +function get_window(forecast::Forecast, index::Int; len = nothing) + return get_window(forecast, index_to_initial_time(forecast, index); len = len) end function iterate_windows_common(forecast) @@ -82,17 +82,17 @@ Return a TimeSeries.TimeArray for one forecast window. function make_time_array( forecast::Forecast, start_time::Dates.DateTime; - len::Union{Nothing, Int}=nothing, + len::Union{Nothing, Int} = nothing, ) - return get_window(forecast, start_time; len=len) + return get_window(forecast, start_time; len = len) end -function make_timestamps(forecast::Forecast, initial_time::Dates.DateTime, len=nothing) +function make_timestamps(forecast::Forecast, initial_time::Dates.DateTime, len = nothing) if len === nothing len = get_horizon(forecast) end - return range(initial_time; length=len, step=get_resolution(forecast)) + return range(initial_time; length = len, step = get_resolution(forecast)) end # This method requires that the forecast type implement a `get_data` method like @@ -118,7 +118,7 @@ end function get_window_common( forecast, initial_time::Dates.DateTime; - len::Union{Nothing, Int}=nothing, + len::Union{Nothing, Int} = nothing, ) horizon = get_horizon(forecast) if len === nothing diff --git a/src/hdf5_time_series_storage.jl b/src/hdf5_time_series_storage.jl index 5aecbc404..13bb48c87 100644 --- a/src/hdf5_time_series_storage.jl +++ b/src/hdf5_time_series_storage.jl @@ -41,10 +41,10 @@ Constructs Hdf5TimeSeriesStorage. """ function Hdf5TimeSeriesStorage( create_file::Bool; - filename=nothing, - directory=nothing, - read_only=false, - compression=CompressionSettings(), + filename = nothing, + directory = nothing, + read_only = false, + compression = CompressionSettings(), ) if create_file if isnothing(filename) @@ -72,8 +72,8 @@ Constructs Hdf5TimeSeriesStorage from an existing file. function from_file( ::Type{Hdf5TimeSeriesStorage}, filename::AbstractString; - read_only=false, - directory=nothing, + read_only = false, + directory = nothing, ) if !isfile(filename) error("time series storage $filename does not exist") @@ -88,7 +88,7 @@ function from_file( copy_h5_file(filename, file_path) end - storage = Hdf5TimeSeriesStorage(false; filename=file_path, read_only=read_only) + storage = Hdf5TimeSeriesStorage(false; filename = file_path, read_only = read_only) if !read_only version = read_data_format_version(storage) if version == "1.0.0" @@ -117,7 +117,7 @@ undergoing a deepcopy. - `storage::Hdf5TimeSeriesStorage`: storage instance - `directory::String`: If nothing, use tempdir """ -function copy_to_new_file!(storage::Hdf5TimeSeriesStorage, directory=nothing) +function copy_to_new_file!(storage::Hdf5TimeSeriesStorage, directory = nothing) if directory === nothing directory = tempdir() end @@ -181,12 +181,12 @@ function serialize_time_series!( settings = storage.compression if settings.enabled if settings.type == CompressionTypes.BLOSC - group["data", blosc=settings.level] = data + group["data", blosc = settings.level] = data elseif settings.type == CompressionTypes.DEFLATE if settings.shuffle - group["data", shuffle=(), deflate=settings.level] = data + group["data", shuffle = (), deflate = settings.level] = data else - group["data", deflate=settings.level] = data + group["data", deflate = settings.level] = data end else error("not implemented for type=$(settings.type)") @@ -402,8 +402,8 @@ function deserialize_time_series( TimeSeries.TimeArray( range( attributes["start_time"]; - length=length(rows), - step=attributes["resolution"], + length = length(rows), + step = attributes["resolution"], ), data, ), @@ -459,7 +459,8 @@ function get_hdf_array( data[start_time] = dataset[rows, columns.start] else data_read = dataset[rows, columns] - for (i, it) in enumerate(range(start_time; length=length(columns), step=interval)) + for (i, it) in + enumerate(range(start_time; length = length(columns), step = interval)) data[it] = @view data_read[1:length(rows), i] end end @@ -481,7 +482,8 @@ function get_hdf_array( data[start_time] = retransform_hdf_array(dataset[rows, columns.start, :], type) else data_read = retransform_hdf_array(dataset[rows, columns, :], type) - for (i, it) in enumerate(range(start_time; length=length(columns), step=interval)) + for (i, it) in + enumerate(range(start_time; length = length(columns), step = interval)) data[it] = @view data_read[1:length(rows), i] end end @@ -503,7 +505,8 @@ function get_hdf_array( data[start_time] = retransform_hdf_array(dataset[rows, columns.start, :, :], type) else data_read = retransform_hdf_array(dataset[rows, columns, :, :], type) - for (i, it) in enumerate(range(start_time; length=length(columns), step=interval)) + for (i, it) in + enumerate(range(start_time; length = length(columns), step = interval)) data[it] = @view data_read[1:length(rows), i] end end @@ -622,7 +625,7 @@ function deserialize_time_series( [3, 2, 1], ) for (i, it) in enumerate( - range(start_time; length=length(columns), step=attributes["interval"]), + range(start_time; length = length(columns), step = attributes["interval"]), ) data[it] = @view data_read[i, 1:length(rows), 1:total_percentiles] end @@ -661,7 +664,7 @@ function deserialize_time_series( data_read = PermutedDimsArray(path["data"][1:total_scenarios, rows, columns], [3, 2, 1]) for (i, it) in enumerate( - range(start_time; length=length(columns), step=attributes["interval"]), + range(start_time; length = length(columns), step = attributes["interval"]), ) data[it] = @view data_read[i, 1:length(rows), 1:total_scenarios] end @@ -735,11 +738,11 @@ end function _deserialize_compression_settings!(storage::Hdf5TimeSeriesStorage) HDF5.h5open(storage.file_path, "r+") do file root = _get_root(storage, file) - storage.compression = CompressionSettings( - enabled=HDF5.read(HDF5.attributes(root)["compression_enabled"]), - type=CompressionTypes(HDF5.read(HDF5.attributes(root)["compression_type"])), - level=HDF5.read(HDF5.attributes(root)["compression_level"]), - shuffle=HDF5.read(HDF5.attributes(root)["compression_shuffle"]), + storage.compression = CompressionSettings(; + enabled = HDF5.read(HDF5.attributes(root)["compression_enabled"]), + type = CompressionTypes(HDF5.read(HDF5.attributes(root)["compression_type"])), + level = HDF5.read(HDF5.attributes(root)["compression_level"]), + shuffle = HDF5.read(HDF5.attributes(root)["compression_shuffle"]), ) return end @@ -768,10 +771,10 @@ is_read_only(storage::Hdf5TimeSeriesStorage) = storage.read_only function compare_values( x::Hdf5TimeSeriesStorage, y::Hdf5TimeSeriesStorage; - compare_uuids=false, + compare_uuids = false, ) - item_x = sort!(collect(iterate_time_series(x)), by=z -> z[1]) - item_y = sort!(collect(iterate_time_series(y)), by=z -> z[1]) + item_x = sort!(collect(iterate_time_series(x)); by = z -> z[1]) + item_y = sort!(collect(iterate_time_series(y)); by = z -> z[1]) if length(item_x) != length(item_y) @error "lengths don't match" length(item_x) length(item_y) return false diff --git a/src/in_memory_time_series_storage.jl b/src/in_memory_time_series_storage.jl index 9fd90fa6e..3693afc61 100644 --- a/src/in_memory_time_series_storage.jl +++ b/src/in_memory_time_series_storage.jl @@ -42,7 +42,7 @@ Base.isempty(storage::InMemoryTimeSeriesStorage) = isempty(storage.data) check_read_only(storage::InMemoryTimeSeriesStorage) = nothing get_compression_settings(storage::InMemoryTimeSeriesStorage) = - CompressionSettings(enabled=false) + CompressionSettings(; enabled = false) is_read_only(storage::InMemoryTimeSeriesStorage) = false @@ -164,7 +164,7 @@ function deserialize_time_series( interval = get_interval(ts) start_time = initial_timestamp + interval * (columns.start - 1) data = SortedDict{Dates.DateTime, eltype(typeof(full_data)).parameters[2]}() - for initial_time in range(start_time; step=interval, length=length(columns)) + for initial_time in range(start_time; step = interval, length = length(columns)) if rows.start == 1 it = initial_time else @@ -218,7 +218,7 @@ function replace_component_uuid!( end function convert_to_hdf5(storage::InMemoryTimeSeriesStorage, filename::AbstractString) create_file = true - hdf5_storage = Hdf5TimeSeriesStorage(create_file; filename=filename) + hdf5_storage = Hdf5TimeSeriesStorage(create_file; filename = filename) for record in values(storage.data) for pair in record.component_names serialize_time_series!(hdf5_storage, pair[1], pair[2], record.ts) @@ -229,7 +229,7 @@ end function compare_values( x::InMemoryTimeSeriesStorage, y::InMemoryTimeSeriesStorage; - compare_uuids=false, + compare_uuids = false, ) keys_x = sort!(collect(keys(x.data))) keys_y = sort!(collect(keys(y.data))) diff --git a/src/internal.jl b/src/internal.jl index bad946ca3..0ff78f177 100644 --- a/src/internal.jl +++ b/src/internal.jl @@ -32,7 +32,7 @@ end """ Creates InfrastructureSystemsInternal with a new UUID. """ -InfrastructureSystemsInternal(; uuid=make_uuid(), units_info=nothing, ext=nothing) = +InfrastructureSystemsInternal(; uuid = make_uuid(), units_info = nothing, ext = nothing) = InfrastructureSystemsInternal(uuid, units_info, ext) """ @@ -111,7 +111,7 @@ end function compare_values( x::InfrastructureSystemsInternal, y::InfrastructureSystemsInternal; - compare_uuids=false, + compare_uuids = false, ) match = true for name in fieldnames(InfrastructureSystemsInternal) @@ -127,14 +127,14 @@ function compare_values( if val2 isa Dict && isempty(val2) val2 = nothing end - if !compare_values(val1, val2, compare_uuids=compare_uuids) + if !compare_values(val1, val2; compare_uuids = compare_uuids) @error "ext does not match" val1 val2 match = false end elseif !compare_values( getfield(x, name), - getfield(y, name), - compare_uuids=compare_uuids, + getfield(y, name); + compare_uuids = compare_uuids, ) @error "InfrastructureSystemsInternal field=$name does not match" match = false diff --git a/src/probabilistic.jl b/src/probabilistic.jl index 11484a949..eaf778bd0 100644 --- a/src/probabilistic.jl +++ b/src/probabilistic.jl @@ -47,9 +47,9 @@ function Probabilistic(; resolution, percentiles, data, - scaling_factor_multiplier=nothing, - normalization_factor=1.0, - internal=InfrastructureSystemsInternal(), + scaling_factor_multiplier = nothing, + normalization_factor = 1.0, + internal = InfrastructureSystemsInternal(), ) data = handle_normalization_factor(convert_data(data), normalization_factor) quantile_count = size(first(values(data)))[2] @@ -91,17 +91,17 @@ function Probabilistic( input_data::AbstractDict, percentiles::Vector, resolution::Dates.Period; - normalization_factor::NormalizationFactor=1.0, - scaling_factor_multiplier::Union{Nothing, Function}=nothing, + normalization_factor::NormalizationFactor = 1.0, + scaling_factor_multiplier::Union{Nothing, Function} = nothing, ) - return Probabilistic( - name=name, - data=input_data, - percentiles=percentiles, - resolution=resolution, - scaling_factor_multiplier=scaling_factor_multiplier, - normalization_factor=normalization_factor, - internal=InfrastructureSystemsInternal(), + return Probabilistic(; + name = name, + data = input_data, + percentiles = percentiles, + resolution = resolution, + scaling_factor_multiplier = scaling_factor_multiplier, + normalization_factor = normalization_factor, + internal = InfrastructureSystemsInternal(), ) end @@ -125,8 +125,8 @@ function Probabilistic( name::AbstractString, input_data::AbstractDict{Dates.DateTime, <:TimeSeries.TimeArray}, percentiles::Vector{Float64}; - normalization_factor::NormalizationFactor=1.0, - scaling_factor_multiplier::Union{Nothing, Function}=nothing, + normalization_factor::NormalizationFactor = 1.0, + scaling_factor_multiplier::Union{Nothing, Function} = nothing, ) data = SortedDict{Dates.DateTime, Matrix{Float64}}() resolution = @@ -136,13 +136,13 @@ function Probabilistic( data[k] = TimeSeries.values(v) end - return Probabilistic( - name=name, - data=data, - percentiles=percentiles, - resolution=resolution, - normalization_factor=normalization_factor, - scaling_factor_multiplier=scaling_factor_multiplier, + return Probabilistic(; + name = name, + data = data, + percentiles = percentiles, + resolution = resolution, + normalization_factor = normalization_factor, + scaling_factor_multiplier = scaling_factor_multiplier, ) end @@ -154,38 +154,38 @@ function Probabilistic( series_data::RawTimeSeries, percentiles::Vector, resolution::Dates.Period; - normalization_factor::NormalizationFactor=1.0, - scaling_factor_multiplier::Union{Nothing, Function}=nothing, + normalization_factor::NormalizationFactor = 1.0, + scaling_factor_multiplier::Union{Nothing, Function} = nothing, ) - return Probabilistic( - name=name, - data=series_data.data, - percentiles=percentiles, - resolution=resolution, - normalization_factor=normalization_factor, - scaling_factor_multiplier=scaling_factor_multiplier, + return Probabilistic(; + name = name, + data = series_data.data, + percentiles = percentiles, + resolution = resolution, + normalization_factor = normalization_factor, + scaling_factor_multiplier = scaling_factor_multiplier, ) end function Probabilistic(ts_metadata::ProbabilisticMetadata, data::SortedDict) - return Probabilistic( - name=get_name(ts_metadata), - percentiles=get_percentiles(ts_metadata), - resolution=get_resolution(ts_metadata), - data=data, - scaling_factor_multiplier=get_scaling_factor_multiplier(ts_metadata), - internal=InfrastructureSystemsInternal(get_time_series_uuid(ts_metadata)), + return Probabilistic(; + name = get_name(ts_metadata), + percentiles = get_percentiles(ts_metadata), + resolution = get_resolution(ts_metadata), + data = data, + scaling_factor_multiplier = get_scaling_factor_multiplier(ts_metadata), + internal = InfrastructureSystemsInternal(get_time_series_uuid(ts_metadata)), ) end function Probabilistic(info::TimeSeriesParsedInfo) - return Probabilistic( - name=info.name, - data=info.data, - percentiles=info.percentiles, - resolution=info.resolution, - normalization_factor=info.normalization_factor, - scaling_factor_multiplier=info.scaling_factor_multiplier, + return Probabilistic(; + name = info.name, + data = info.data, + percentiles = info.percentiles, + resolution = info.resolution, + normalization_factor = info.normalization_factor, + scaling_factor_multiplier = info.scaling_factor_multiplier, ) end @@ -290,6 +290,6 @@ get_count(forecast::Probabilistic) = get_count_common(forecast) get_initial_times(forecast::Probabilistic) = get_initial_times_common(forecast) get_initial_timestamp(forecast::Probabilistic) = get_initial_timestamp_common(forecast) get_interval(forecast::Probabilistic) = get_interval_common(forecast) -get_window(f::Probabilistic, initial_time::Dates.DateTime; len=nothing) = - get_window_common(f, initial_time; len=len) +get_window(f::Probabilistic, initial_time::Dates.DateTime; len = nothing) = + get_window_common(f, initial_time; len = len) iterate_windows(forecast::Probabilistic) = iterate_windows_common(forecast) diff --git a/src/scenarios.jl b/src/scenarios.jl index b1e59369e..aae6d922e 100644 --- a/src/scenarios.jl +++ b/src/scenarios.jl @@ -47,9 +47,9 @@ function Scenarios(; data, scenario_count, resolution, - scaling_factor_multiplier=nothing, - normalization_factor=1.0, - internal=InfrastructureSystemsInternal(), + scaling_factor_multiplier = nothing, + normalization_factor = 1.0, + internal = InfrastructureSystemsInternal(), ) data = handle_normalization_factor(convert_data(data), normalization_factor) return Scenarios( @@ -80,19 +80,19 @@ function Scenarios( name::AbstractString, input_data::AbstractDict, resolution::Dates.Period; - normalization_factor::NormalizationFactor=1.0, - scaling_factor_multiplier::Union{Nothing, Function}=nothing, + normalization_factor::NormalizationFactor = 1.0, + scaling_factor_multiplier::Union{Nothing, Function} = nothing, ) scenario_count = size(first(values(input_data)))[2] - return Scenarios( - name=name, - data=input_data, - scenario_count=scenario_count, - resolution=resolution, - scaling_factor_multiplier=scaling_factor_multiplier, - normalization_factor=normalization_factor, - internal=InfrastructureSystemsInternal(), + return Scenarios(; + name = name, + data = input_data, + scenario_count = scenario_count, + resolution = resolution, + scaling_factor_multiplier = scaling_factor_multiplier, + normalization_factor = normalization_factor, + internal = InfrastructureSystemsInternal(), ) end @@ -114,8 +114,8 @@ Construct Scenarios from a Dict of TimeArrays. function Scenarios( name::AbstractString, input_data::AbstractDict{Dates.DateTime, <:TimeSeries.TimeArray}; - normalization_factor::NormalizationFactor=1.0, - scaling_factor_multiplier::Union{Nothing, Function}=nothing, + normalization_factor::NormalizationFactor = 1.0, + scaling_factor_multiplier::Union{Nothing, Function} = nothing, ) data = SortedDict{Dates.DateTime, Matrix{Float64}}() resolution = @@ -129,19 +129,19 @@ function Scenarios( name, data, resolution; - normalization_factor=normalization_factor, - scaling_factor_multiplier=scaling_factor_multiplier, + normalization_factor = normalization_factor, + scaling_factor_multiplier = scaling_factor_multiplier, ) end function Scenarios(ts_metadata::ScenariosMetadata, data::SortedDict) - return Scenarios( - name=get_name(ts_metadata), - scenario_count=get_scenario_count(ts_metadata), - resolution=get_resolution(ts_metadata), - data=data, - scaling_factor_multiplier=get_scaling_factor_multiplier(ts_metadata), - internal=InfrastructureSystemsInternal(get_time_series_uuid(ts_metadata)), + return Scenarios(; + name = get_name(ts_metadata), + scenario_count = get_scenario_count(ts_metadata), + resolution = get_resolution(ts_metadata), + data = data, + scaling_factor_multiplier = get_scaling_factor_multiplier(ts_metadata), + internal = InfrastructureSystemsInternal(get_time_series_uuid(ts_metadata)), ) end @@ -150,8 +150,8 @@ function Scenarios(info::TimeSeriesParsedInfo) info.name, info.data, info.resolution; - normalization_factor=info.normalization_factor, - scaling_factor_multiplier=info.scaling_factor_multiplier, + normalization_factor = info.normalization_factor, + scaling_factor_multiplier = info.scaling_factor_multiplier, ) end @@ -242,6 +242,6 @@ get_count(forecast::Scenarios) = get_count_common(forecast) get_initial_times(forecast::Scenarios) = get_initial_times_common(forecast) get_initial_timestamp(forecast::Scenarios) = get_initial_timestamp_common(forecast) get_interval(forecast::Scenarios) = get_interval_common(forecast) -get_window(f::Scenarios, initial_time::Dates.DateTime; len=nothing) = - get_window_common(f, initial_time; len=len) +get_window(f::Scenarios, initial_time::Dates.DateTime; len = nothing) = + get_window_common(f, initial_time; len = len) iterate_windows(forecast::Scenarios) = iterate_windows_common(forecast) diff --git a/src/serialization.jl b/src/serialization.jl index 2769b5ca1..e2fb05792 100644 --- a/src/serialization.jl +++ b/src/serialization.jl @@ -12,14 +12,14 @@ Serializes a InfrastructureSystemsType to a JSON file. function to_json( obj::T, filename::AbstractString; - force=false, - pretty=false, + force = false, + pretty = false, ) where {T <: InfrastructureSystemsType} if !force && isfile(filename) error("$file already exists. Set force=true to overwrite.") end result = open(filename, "w") do io - return to_json(io, obj, pretty=pretty) + return to_json(io, obj; pretty = pretty) end @info "Serialized $T to $filename" @@ -29,10 +29,10 @@ end """ Serializes a InfrastructureSystemsType to a JSON string. """ -function to_json(obj::T; pretty=false, indent=2) where {T <: InfrastructureSystemsType} +function to_json(obj::T; pretty = false, indent = 2) where {T <: InfrastructureSystemsType} if pretty io = IOBuffer() - JSON3.pretty(io, serialize(obj), JSON3.AlignmentContext(indent=indent)) + JSON3.pretty(io, serialize(obj), JSON3.AlignmentContext(; indent = indent)) return take!(io) else return JSON3.write(serialize(obj)) @@ -42,12 +42,12 @@ end function to_json( io::IO, obj::T; - pretty=false, - indent=2, + pretty = false, + indent = 2, ) where {T <: InfrastructureSystemsType} data = serialize(obj) if pretty - res = JSON3.pretty(io, data, JSON3.AlignmentContext(indent=indent)) + res = JSON3.pretty(io, data, JSON3.AlignmentContext(; indent = indent)) else res = JSON3.write(io, data) end @@ -267,7 +267,7 @@ deserialize(::Type{Vector{Symbol}}, data::Vector) = Symbol.(data) function serialize_julia_info() data = Dict{String, Any}("julia_version" => string(VERSION)) io = IOBuffer() - Pkg.status(io=io, mode=Pkg.PKGMODE_MANIFEST) + Pkg.status(; io = io, mode = Pkg.PKGMODE_MANIFEST) data["package_info"] = String(take!(io)) return data end diff --git a/src/single_time_series.jl b/src/single_time_series.jl index 4e04072a2..b1bc499a7 100644 --- a/src/single_time_series.jl +++ b/src/single_time_series.jl @@ -30,9 +30,9 @@ end function SingleTimeSeries(; name, data, - scaling_factor_multiplier=nothing, - normalization_factor=1.0, - internal=InfrastructureSystemsInternal(), + scaling_factor_multiplier = nothing, + normalization_factor = 1.0, + internal = InfrastructureSystemsInternal(), ) data = handle_normalization_factor(data, normalization_factor) return SingleTimeSeries( @@ -73,24 +73,24 @@ Construct SingleTimeSeries from a TimeArray or DataFrame. function SingleTimeSeries( name::AbstractString, data::Union{TimeSeries.TimeArray, DataFrames.DataFrame}; - normalization_factor::NormalizationFactor=1.0, - scaling_factor_multiplier::Union{Nothing, Function}=nothing, - timestamp=:timestamp, + normalization_factor::NormalizationFactor = 1.0, + scaling_factor_multiplier::Union{Nothing, Function} = nothing, + timestamp = :timestamp, ) if data isa DataFrames.DataFrame - ta = TimeSeries.TimeArray(data; timestamp=timestamp) + ta = TimeSeries.TimeArray(data; timestamp = timestamp) elseif data isa TimeSeries.TimeArray ta = data else error("fatal: $(typeof(data))") end - return SingleTimeSeries( - name=name, - data=ta, - scaling_factor_multiplier=scaling_factor_multiplier, - normalization_factor=normalization_factor, - internal=InfrastructureSystemsInternal(), + return SingleTimeSeries(; + name = name, + data = ta, + scaling_factor_multiplier = scaling_factor_multiplier, + normalization_factor = normalization_factor, + internal = InfrastructureSystemsInternal(), ) end @@ -115,17 +115,17 @@ function SingleTimeSeries( filename::AbstractString, component::InfrastructureSystemsComponent, resolution::Dates.Period; - normalization_factor::NormalizationFactor=1.0, - scaling_factor_multiplier::Union{Nothing, Function}=nothing, + normalization_factor::NormalizationFactor = 1.0, + scaling_factor_multiplier::Union{Nothing, Function} = nothing, ) component_name = get_name(component) raw = read_time_series(SingleTimeSeries, filename, component_name) ta = make_time_array(raw, component_name, resolution) - return SingleTimeSeries( - name=name, - data=ta, - normalization_factor=normalization_factor, - scaling_factor_multiplier=scaling_factor_multiplier, + return SingleTimeSeries(; + name = name, + data = ta, + normalization_factor = normalization_factor, + scaling_factor_multiplier = scaling_factor_multiplier, ) end @@ -143,7 +143,7 @@ function SingleTimeSeries( initial_time:resolution:(initial_time + resolution * (time_steps - 1)), ones(time_steps), ) - return SingleTimeSeries(; name=name, data=data) + return SingleTimeSeries(; name = name, data = data) end function SingleTimeSeries(time_series::Vector{SingleTimeSeries}) @@ -153,10 +153,10 @@ function SingleTimeSeries(time_series::Vector{SingleTimeSeries}) data = collect(Iterators.flatten((TimeSeries.values(get_data(x)) for x in time_series))) ta = TimeSeries.TimeArray(timestamps, data) - time_series = SingleTimeSeries( - name=get_name(time_series[1]), - data=ta, - scaling_factor_multiplier=time_series[1].scaling_factor_multiplier, + time_series = SingleTimeSeries(; + name = get_name(time_series[1]), + data = ta, + scaling_factor_multiplier = time_series[1].scaling_factor_multiplier, ) @debug "concatenated time_series" LOG_GROUP_TIME_SERIES time_series return time_series @@ -185,11 +185,11 @@ end function SingleTimeSeries(info::TimeSeriesParsedInfo) data = make_time_array(info) - return SingleTimeSeries( - name=info.name, - data=data, - normalization_factor=info.normalization_factor, - scaling_factor_multiplier=info.scaling_factor_multiplier, + return SingleTimeSeries(; + name = info.name, + data = data, + normalization_factor = info.normalization_factor, + scaling_factor_multiplier = info.scaling_factor_multiplier, ) end @@ -257,7 +257,7 @@ Base.lastindex(time_series::SingleTimeSeries, d) = lastindex(get_data(time_serie Base.eachindex(time_series::SingleTimeSeries) = eachindex(get_data(time_series)) -Base.iterate(time_series::SingleTimeSeries, n=1) = iterate(get_data(time_series), n) +Base.iterate(time_series::SingleTimeSeries, n = 1) = iterate(get_data(time_series), n) """ Refer to TimeSeries.when(). Underlying data is copied. @@ -270,9 +270,9 @@ end Return a time_series truncated starting with timestamp. """ function from(time_series::SingleTimeSeries, timestamp) - return SingleTimeSeries( - name=get_name(time_series), - data=TimeSeries.from(get_data(time_series), timestamp), + return SingleTimeSeries(; + name = get_name(time_series), + data = TimeSeries.from(get_data(time_series), timestamp), ) end @@ -280,9 +280,9 @@ end Return a time_series truncated after timestamp. """ function to(time_series::SingleTimeSeries, timestamp) - return SingleTimeSeries( - name=get_name(time_series), - data=TimeSeries.to(get_data(time_series), timestamp), + return SingleTimeSeries(; + name = get_name(time_series), + data = TimeSeries.to(get_data(time_series), timestamp), ) end @@ -334,7 +334,7 @@ get_columns(::Type{<:TimeSeriesMetadata}, ta::TimeSeries.TimeArray) = nothing function make_time_array( time_series::SingleTimeSeries, start_time::Dates.DateTime; - len::Union{Nothing, Int}=nothing, + len::Union{Nothing, Int} = nothing, ) ta = get_data(time_series) first_time = first(TimeSeries.timestamp(ta)) @@ -349,13 +349,13 @@ function make_time_array( end function SingleTimeSeriesMetadata(ts_metadata::DeterministicMetadata) - return SingleTimeSeriesMetadata( - name=get_name(ts_metadata), - resolution=get_resolution(ts_metadata), - initial_timestamp=get_initial_timestamp(ts_metadata), - time_series_uuid=get_time_series_uuid(ts_metadata), - length=get_count(ts_metadata) * get_horizon(ts_metadata), - scaling_factor_multiplier=get_scaling_factor_multiplier(ts_metadata), - internal=get_internal(ts_metadata), + return SingleTimeSeriesMetadata(; + name = get_name(ts_metadata), + resolution = get_resolution(ts_metadata), + initial_timestamp = get_initial_timestamp(ts_metadata), + time_series_uuid = get_time_series_uuid(ts_metadata), + length = get_count(ts_metadata) * get_horizon(ts_metadata), + scaling_factor_multiplier = get_scaling_factor_multiplier(ts_metadata), + internal = get_internal(ts_metadata), ) end diff --git a/src/time_series_cache.jl b/src/time_series_cache.jl index 1fb63d730..cc9ded7ec 100644 --- a/src/time_series_cache.jl +++ b/src/time_series_cache.jl @@ -2,7 +2,7 @@ const TIME_SERIES_CACHE_SIZE_BYTES = 1024 * 1024 abstract type TimeSeriesCache end -function Base.iterate(cache::TimeSeriesCache, state=nothing) +function Base.iterate(cache::TimeSeriesCache, state = nothing) if state === nothing reset!(cache) end @@ -52,8 +52,8 @@ function get_time_series_array!(cache::TimeSeriesCache, timestamp::Dates.DateTim _get_component(cache), _get_time_series(cache), next_time; - len=len, - ignore_scaling_factors=_get_ignore_scaling_factors(cache), + len = len, + ignore_scaling_factors = _get_ignore_scaling_factors(cache), ) _increment_next_time!(cache, len) _decrement_iterations_remaining!(cache) @@ -200,10 +200,10 @@ function ForecastCache( ::Type{T}, component::InfrastructureSystemsComponent, name::AbstractString; - start_time::Union{Nothing, Dates.DateTime}=nothing, - horizon::Union{Nothing, Int}=nothing, - cache_size_bytes=TIME_SERIES_CACHE_SIZE_BYTES, - ignore_scaling_factors=false, + start_time::Union{Nothing, Dates.DateTime} = nothing, + horizon::Union{Nothing, Int} = nothing, + cache_size_bytes = TIME_SERIES_CACHE_SIZE_BYTES, + ignore_scaling_factors = false, ) where {T <: Forecast} metadata_type = time_series_data_to_metadata(T) ts_metadata = get_time_series_metadata(metadata_type, component, name) @@ -220,10 +220,10 @@ function ForecastCache( T, component, name; - start_time=start_time, - len=get_horizon(ts_metadata), + start_time = start_time, + len = get_horizon(ts_metadata), ) - vals = get_time_series_values(component, ts, start_time, len=get_horizon(ts_metadata)) + vals = get_time_series_values(component, ts, start_time; len = get_horizon(ts_metadata)) row_size = _get_row_size(vals) count = get_count(ts_metadata) @@ -238,14 +238,14 @@ function ForecastCache( @debug "ForecastCache" _group = LOG_GROUP_TIME_SERIES row_size window_size in_memory_count return ForecastCache( - TimeSeriesCacheCommon( - ts=ts, - component=component, - name=name, - next_time=start_time, - len=count, - num_iterations=count, - ignore_scaling_factors=ignore_scaling_factors, + TimeSeriesCacheCommon(; + ts = ts, + component = component, + name = name, + next_time = start_time, + len = count, + num_iterations = count, + ignore_scaling_factors = ignore_scaling_factors, ), in_memory_count, horizon, @@ -270,9 +270,9 @@ function _update!(cache::ForecastCache) _get_type(cache), _get_component(cache), _get_name(cache); - start_time=next_time, - len=len, - count=count, + start_time = next_time, + len = len, + count = count, ) _set_length_available!(cache, len) _set_time_series!(cache, ts) @@ -318,9 +318,9 @@ function StaticTimeSeriesCache( ::Type{T}, component::InfrastructureSystemsComponent, name::AbstractString; - cache_size_bytes=TIME_SERIES_CACHE_SIZE_BYTES, - start_time::Union{Nothing, Dates.DateTime}=nothing, - ignore_scaling_factors=false, + cache_size_bytes = TIME_SERIES_CACHE_SIZE_BYTES, + start_time::Union{Nothing, Dates.DateTime} = nothing, + ignore_scaling_factors = false, ) where {T <: StaticTimeSeries} metadata_type = time_series_data_to_metadata(T) ts_metadata = get_time_series_metadata(metadata_type, component, name) @@ -335,8 +335,8 @@ function StaticTimeSeriesCache( end # Get an instance to assess data size. - ts = get_time_series(T, component, name; start_time=start_time, len=1) - vals = get_time_series_values(component, ts, start_time, len=1) + ts = get_time_series(T, component, name; start_time = start_time, len = 1) + vals = get_time_series_values(component, ts, start_time; len = 1) row_size = _get_row_size(vals) if row_size > cache_size_bytes @@ -347,14 +347,14 @@ function StaticTimeSeriesCache( @debug "StaticTimeSeriesCache" _group = LOG_GROUP_TIME_SERIES total_length in_memory_rows return StaticTimeSeriesCache( - TimeSeriesCacheCommon( - ts=ts, - component=component, - name=name, - next_time=start_time, - len=total_length, - num_iterations=total_length, - ignore_scaling_factors=ignore_scaling_factors, + TimeSeriesCacheCommon(; + ts = ts, + component = component, + name = name, + next_time = start_time, + len = total_length, + num_iterations = total_length, + ignore_scaling_factors = ignore_scaling_factors, ), in_memory_rows, ) @@ -374,8 +374,8 @@ function _update!(cache::StaticTimeSeriesCache) _get_type(cache), _get_component(cache), _get_name(cache); - start_time=next_time, - len=len, + start_time = next_time, + len = len, ) _set_length_available!(cache, len) _set_time_series!(cache, ts) diff --git a/src/time_series_container.jl b/src/time_series_container.jl index 0e67a1835..5db13fa27 100644 --- a/src/time_series_container.jl +++ b/src/time_series_container.jl @@ -49,7 +49,7 @@ end function add_time_series!( container::TimeSeriesContainer, ts_metadata::T; - skip_if_present=false, + skip_if_present = false, ) where {T <: TimeSeriesMetadata} key = TimeSeriesKey(T, get_name(ts_metadata)) if haskey(container.data, key) diff --git a/src/time_series_formats.jl b/src/time_series_formats.jl index 96c898318..ea9fa5687 100644 --- a/src/time_series_formats.jl +++ b/src/time_series_formats.jl @@ -15,7 +15,7 @@ Pass component_name when the file does not have the component name in a column h function read_time_series( ::Type{T}, data_file::AbstractString, - component_name=nothing; + component_name = nothing; kwargs..., ) where {T <: TimeSeriesData} if !isfile(data_file) @@ -175,7 +175,7 @@ function read_time_series( ::Type{T}, ::Type{Deterministic}, file::CSV.File, - component_name=nothing; + component_name = nothing; kwargs..., ) where {T <: TimeSeriesFormatDateTimeAsColumn} @debug "Read CSV data from $file." _group = LOG_GROUP_TIME_SERIES @@ -202,7 +202,7 @@ function read_time_series( ::Type{T}, ::Type{<:StaticTimeSeries}, file::CSV.File, - component_name=nothing; + component_name = nothing; kwargs..., ) where {T <: Union{TimeSeriesFormatPeriodAsColumn, TimeSeriesFormatDateTimeAsColumn}} first_timestamp = get_timestamp(T, file, 1) @@ -248,7 +248,7 @@ function read_time_series( ::Type{T}, ::Type{<:StaticTimeSeries}, file::CSV.File, - component_name=nothing; + component_name = nothing; kwargs..., ) where {T <: TimeSeriesFormatComponentsAsColumnsNoTime} first_timestamp = get(kwargs, :start_datetime, Dates.DateTime(Dates.today())) diff --git a/src/time_series_interface.jl b/src/time_series_interface.jl index 8e800cfe9..6eadc8081 100644 --- a/src/time_series_interface.jl +++ b/src/time_series_interface.jl @@ -1,18 +1,18 @@ -const SUPPORTED_TIME_SERIES_TYPES = +const SupportedTimeSeriesTypes = Union{InfrastructureSystemsComponent, InfrastructureSystemsSupplementalAttribute} function add_time_series!( component::T, time_series::TimeSeriesMetadata; - skip_if_present=false, -) where {T <: SUPPORTED_TIME_SERIES_TYPES} + skip_if_present = false, +) where {T <: SupportedTimeSeriesTypes} component_id = get_uuid(component) container = get_time_series_container(component) if isnothing(container) throw(ArgumentError("type $T does not support storing time series")) end - add_time_series!(container, time_series, skip_if_present=skip_if_present) + add_time_series!(container, time_series; skip_if_present = skip_if_present) @debug "Added $time_series to $(typeof(component)) $(component_id) " * "num_time_series=$(length(get_time_series_container(component).data))." _group = LOG_GROUP_TIME_SERIES @@ -23,7 +23,7 @@ Removes the metadata for a time_series. If this returns true then the caller must also remove the actual time series data. """ function remove_time_series_metadata!( - component::SUPPORTED_TIME_SERIES_TYPES, + component::SupportedTimeSeriesTypes, ::Type{T}, name::AbstractString, ) where {T <: TimeSeriesMetadata} @@ -42,7 +42,7 @@ function remove_time_series_metadata!( return true end -function clear_time_series!(component::SUPPORTED_TIME_SERIES_TYPES) +function clear_time_series!(component::SupportedTimeSeriesTypes) container = get_time_series_container(component) if !isnothing(container) clear_time_series!(container) @@ -140,7 +140,7 @@ Return a time series corresponding to the given parameters. # Arguments - `::Type{T}`: Concrete subtype of TimeSeriesData to return - - `component::SUPPORTED_TIME_SERIES_TYPES`: Component containing the time series + - `component::SupportedTimeSeriesTypes`: Component containing the time series - `name::AbstractString`: name of time series - `start_time::Union{Nothing, Dates.DateTime} = nothing`: If nothing, use the `initial_timestamp` of the time series. If T is a subtype of Forecast then `start_time` @@ -152,11 +152,11 @@ Return a time series corresponding to the given parameters. """ function get_time_series( ::Type{T}, - component::SUPPORTED_TIME_SERIES_TYPES, + component::SupportedTimeSeriesTypes, name::AbstractString; - start_time::Union{Nothing, Dates.DateTime}=nothing, - len::Union{Nothing, Int}=nothing, - count::Union{Nothing, Int}=nothing, + start_time::Union{Nothing, Dates.DateTime} = nothing, + len::Union{Nothing, Int} = nothing, + count::Union{Nothing, Int} = nothing, ) where {T <: TimeSeriesData} if !has_time_series(component) throw(ArgumentError("no forecasts are stored in $component")) @@ -173,7 +173,7 @@ end function get_time_series_uuid( ::Type{T}, - component::SUPPORTED_TIME_SERIES_TYPES, + component::SupportedTimeSeriesTypes, name::AbstractString, ) where {T <: TimeSeriesData} metadata_type = time_series_data_to_metadata(T) @@ -183,7 +183,7 @@ end function get_time_series_metadata( ::Type{T}, - component::SUPPORTED_TIME_SERIES_TYPES, + component::SupportedTimeSeriesTypes, name::AbstractString, ) where {T <: TimeSeriesMetadata} return get_time_series_metadata(T, get_time_series_container(component), name) @@ -197,13 +197,13 @@ the component and applied to the data unless ignore_scaling_factors is true. """ function get_time_series_array( ::Type{T}, - component::SUPPORTED_TIME_SERIES_TYPES, + component::SupportedTimeSeriesTypes, name::AbstractString; - start_time::Union{Nothing, Dates.DateTime}=nothing, - len::Union{Nothing, Int}=nothing, - ignore_scaling_factors=false, + start_time::Union{Nothing, Dates.DateTime} = nothing, + len::Union{Nothing, Int} = nothing, + ignore_scaling_factors = false, ) where {T <: TimeSeriesData} - ts = get_time_series(T, component, name; start_time=start_time, len=len, count=1) + ts = get_time_series(T, component, name; start_time = start_time, len = len, count = 1) if start_time === nothing start_time = get_initial_timestamp(ts) end @@ -212,8 +212,8 @@ function get_time_series_array( component, ts, start_time; - len=len, - ignore_scaling_factors=ignore_scaling_factors, + len = len, + ignore_scaling_factors = ignore_scaling_factors, ) end @@ -226,11 +226,11 @@ the component and applied to the data unless ignore_scaling_factors is true. See also [`ForecastCache`](@ref). """ function get_time_series_array( - component::SUPPORTED_TIME_SERIES_TYPES, + component::SupportedTimeSeriesTypes, forecast::Forecast, start_time::Dates.DateTime; - len=nothing, - ignore_scaling_factors=false, + len = nothing, + ignore_scaling_factors = false, ) return _make_time_array(component, forecast, start_time, len, ignore_scaling_factors) end @@ -244,11 +244,11 @@ the component and applied to the data unless ignore_scaling_factors is true. See also [`StaticTimeSeriesCache`](@ref). """ function get_time_series_array( - component::SUPPORTED_TIME_SERIES_TYPES, + component::SupportedTimeSeriesTypes, time_series::StaticTimeSeries, - start_time::Union{Nothing, Dates.DateTime}=nothing; - len::Union{Nothing, Int}=nothing, - ignore_scaling_factors=false, + start_time::Union{Nothing, Dates.DateTime} = nothing; + len::Union{Nothing, Int} = nothing, + ignore_scaling_factors = false, ) if start_time === nothing start_time = get_initial_timestamp(time_series) @@ -266,13 +266,13 @@ Return a vector of timestamps from storage for the given time series parameters. """ function get_time_series_timestamps( ::Type{T}, - component::SUPPORTED_TIME_SERIES_TYPES, + component::SupportedTimeSeriesTypes, name::AbstractString; - start_time::Union{Nothing, Dates.DateTime}=nothing, - len::Union{Nothing, Int}=nothing, + start_time::Union{Nothing, Dates.DateTime} = nothing, + len::Union{Nothing, Int} = nothing, ) where {T <: TimeSeriesData} return TimeSeries.timestamp( - get_time_series_array(T, component, name; start_time=start_time, len=len), + get_time_series_array(T, component, name; start_time = start_time, len = len), ) end @@ -280,13 +280,13 @@ end Return a vector of timestamps from a cached Forecast instance. """ function get_time_series_timestamps( - component::SUPPORTED_TIME_SERIES_TYPES, + component::SupportedTimeSeriesTypes, forecast::Forecast, - start_time::Union{Nothing, Dates.DateTime}=nothing; - len::Union{Nothing, Int}=nothing, + start_time::Union{Nothing, Dates.DateTime} = nothing; + len::Union{Nothing, Int} = nothing, ) return TimeSeries.timestamp( - get_time_series_array(component, forecast, start_time; len=len), + get_time_series_array(component, forecast, start_time; len = len), ) end @@ -294,13 +294,13 @@ end Return a vector of timestamps from a cached StaticTimeSeries instance. """ function get_time_series_timestamps( - component::SUPPORTED_TIME_SERIES_TYPES, + component::SupportedTimeSeriesTypes, time_series::StaticTimeSeries, - start_time::Union{Nothing, Dates.DateTime}=nothing; - len::Union{Nothing, Int}=nothing, + start_time::Union{Nothing, Dates.DateTime} = nothing; + len::Union{Nothing, Int} = nothing, ) return TimeSeries.timestamp( - get_time_series_array(component, time_series, start_time; len=len), + get_time_series_array(component, time_series, start_time; len = len), ) end @@ -312,20 +312,20 @@ that accepts a cached TimeSeriesData instance. """ function get_time_series_values( ::Type{T}, - component::SUPPORTED_TIME_SERIES_TYPES, + component::SupportedTimeSeriesTypes, name::AbstractString; - start_time::Union{Nothing, Dates.DateTime}=nothing, - len::Union{Nothing, Int}=nothing, - ignore_scaling_factors=false, + start_time::Union{Nothing, Dates.DateTime} = nothing, + len::Union{Nothing, Int} = nothing, + ignore_scaling_factors = false, ) where {T <: TimeSeriesData} return TimeSeries.values( get_time_series_array( T, component, name; - start_time=start_time, - len=len, - ignore_scaling_factors=ignore_scaling_factors, + start_time = start_time, + len = len, + ignore_scaling_factors = ignore_scaling_factors, ), ) end @@ -334,19 +334,19 @@ end Return an Array of values for one forecast window from a cached Forecast instance. """ function get_time_series_values( - component::SUPPORTED_TIME_SERIES_TYPES, + component::SupportedTimeSeriesTypes, forecast::Forecast, start_time::Dates.DateTime; - len::Union{Nothing, Int}=nothing, - ignore_scaling_factors=false, + len::Union{Nothing, Int} = nothing, + ignore_scaling_factors = false, ) return TimeSeries.values( get_time_series_array( component, forecast, start_time; - len=len, - ignore_scaling_factors=ignore_scaling_factors, + len = len, + ignore_scaling_factors = ignore_scaling_factors, ), ) end @@ -356,25 +356,25 @@ Return an Array of values from a cached StaticTimeSeries instance for the reques series parameters. """ function get_time_series_values( - component::SUPPORTED_TIME_SERIES_TYPES, + component::SupportedTimeSeriesTypes, time_series::StaticTimeSeries, - start_time::Union{Nothing, Dates.DateTime}=nothing; - len::Union{Nothing, Int}=nothing, - ignore_scaling_factors=false, + start_time::Union{Nothing, Dates.DateTime} = nothing; + len::Union{Nothing, Int} = nothing, + ignore_scaling_factors = false, ) return TimeSeries.values( get_time_series_array( component, time_series, start_time; - len=len, - ignore_scaling_factors=ignore_scaling_factors, + len = len, + ignore_scaling_factors = ignore_scaling_factors, ), ) end function _make_time_array(component, time_series, start_time, len, ignore_scaling_factors) - ta = make_time_array(time_series, start_time; len=len) + ta = make_time_array(time_series, start_time; len = len) if ignore_scaling_factors return ta end @@ -390,7 +390,7 @@ end """ Return true if the component has time series data. """ -function has_time_series(component::SUPPORTED_TIME_SERIES_TYPES) +function has_time_series(component::SupportedTimeSeriesTypes) container = get_time_series_container(component) return !isnothing(container) && !isempty(container) end @@ -399,7 +399,7 @@ end Return true if the component has time series data of type T. """ function has_time_series( - component::SUPPORTED_TIME_SERIES_TYPES, + component::SupportedTimeSeriesTypes, ::Type{T}, ) where {T <: TimeSeriesData} container = get_time_series_container(component) @@ -421,7 +421,7 @@ function has_time_series( end function has_time_series( - component::SUPPORTED_TIME_SERIES_TYPES, + component::SupportedTimeSeriesTypes, type::Type{<:TimeSeriesMetadata}, name::AbstractString, ) @@ -436,8 +436,8 @@ references. # Arguments - - `dst::SUPPORTED_TIME_SERIES_TYPES`: Destination component - - `src::SUPPORTED_TIME_SERIES_TYPES`: Source component + - `dst::SupportedTimeSeriesTypes`: Destination component + - `src::SupportedTimeSeriesTypes`: Source component - `name_mapping::Dict = nothing`: Optionally map src names to different dst names. If provided and src has a time_series with a name not present in name_mapping, that time_series will not copied. If name_mapping is nothing then all time_series will be @@ -449,10 +449,10 @@ references. src's multipliers. """ function copy_time_series!( - dst::SUPPORTED_TIME_SERIES_TYPES, - src::SUPPORTED_TIME_SERIES_TYPES; - name_mapping::Union{Nothing, Dict{Tuple{String, String}, String}}=nothing, - scaling_factor_multiplier_mapping::Union{Nothing, Dict{String, String}}=nothing, + dst::SupportedTimeSeriesTypes, + src::SupportedTimeSeriesTypes; + name_mapping::Union{Nothing, Dict{Tuple{String, String}, String}} = nothing, + scaling_factor_multiplier_mapping::Union{Nothing, Dict{String, String}} = nothing, ) storage = _get_time_series_storage(dst) if isnothing(storage) @@ -504,17 +504,17 @@ function copy_time_series!( end end -function get_time_series_keys(component::SUPPORTED_TIME_SERIES_TYPES) +function get_time_series_keys(component::SupportedTimeSeriesTypes) return keys(get_time_series_container(component).data) end -function list_time_series_metadata(component::SUPPORTED_TIME_SERIES_TYPES) +function list_time_series_metadata(component::SupportedTimeSeriesTypes) return collect(values(get_time_series_container(component).data)) end function get_time_series_names( ::Type{T}, - component::SUPPORTED_TIME_SERIES_TYPES, + component::SupportedTimeSeriesTypes, ) where {T <: TimeSeriesData} return get_time_series_names( time_series_data_to_metadata(T), @@ -522,7 +522,7 @@ function get_time_series_names( ) end -function get_num_time_series(component::SUPPORTED_TIME_SERIES_TYPES) +function get_num_time_series(component::SupportedTimeSeriesTypes) container = get_time_series_container(component) if isnothing(container) return (0, 0) @@ -543,7 +543,7 @@ function get_num_time_series(component::SUPPORTED_TIME_SERIES_TYPES) return (static_ts_count, forecast_count) end -function get_num_time_series_by_type(component::SUPPORTED_TIME_SERIES_TYPES) +function get_num_time_series_by_type(component::SupportedTimeSeriesTypes) counts = Dict{String, Int}() container = get_time_series_container(component) if isnothing(container) @@ -563,14 +563,14 @@ function get_num_time_series_by_type(component::SUPPORTED_TIME_SERIES_TYPES) end function get_time_series( - component::SUPPORTED_TIME_SERIES_TYPES, + component::SupportedTimeSeriesTypes, time_series::TimeSeriesData, ) storage = _get_time_series_storage(component) return get_time_series(storage, get_time_series_uuid(time_series)) end -function get_time_series_uuids(component::SUPPORTED_TIME_SERIES_TYPES) +function get_time_series_uuids(component::SupportedTimeSeriesTypes) container = get_time_series_container(component) return [ @@ -581,10 +581,10 @@ end function attach_time_series_and_serialize!( data::SystemData, - component::SUPPORTED_TIME_SERIES_TYPES, + component::SupportedTimeSeriesTypes, ts_metadata::T, ts::TimeSeriesData; - skip_if_present=false, + skip_if_present = false, ) where {T <: TimeSeriesMetadata} check_add_time_series(data.time_series_params, ts) check_read_only(data.time_series_storage) @@ -599,7 +599,7 @@ function attach_time_series_and_serialize!( get_name(ts_metadata), ts, ) - add_time_series!(component, ts_metadata, skip_if_present=skip_if_present) + add_time_series!(component, ts_metadata; skip_if_present = skip_if_present) # Order is important. Set this last in case exceptions are thrown at previous steps. set_parameters!(data.time_series_params, ts) return diff --git a/src/time_series_parameters.jl b/src/time_series_parameters.jl index 43747031c..2bf6e79da 100644 --- a/src/time_series_parameters.jl +++ b/src/time_series_parameters.jl @@ -10,10 +10,10 @@ mutable struct ForecastParameters <: InfrastructureSystemsType end function ForecastParameters(; - horizon=UNINITIALIZED_LENGTH, - initial_timestamp=UNINITIALIZED_DATETIME, - interval=UNINITIALIZED_PERIOD, - count=UNINITIALIZED_LENGTH, + horizon = UNINITIALIZED_LENGTH, + initial_timestamp = UNINITIALIZED_DATETIME, + interval = UNINITIALIZED_PERIOD, + count = UNINITIALIZED_LENGTH, ) return ForecastParameters(horizon, initial_timestamp, interval, count) end @@ -87,22 +87,22 @@ mutable struct TimeSeriesParameters <: InfrastructureSystemsType end function TimeSeriesParameters(; - resolution=UNINITIALIZED_PERIOD, - forecast_params=ForecastParameters(), + resolution = UNINITIALIZED_PERIOD, + forecast_params = ForecastParameters(), ) return TimeSeriesParameters(resolution, forecast_params) end function TimeSeriesParameters(ts::StaticTimeSeries) - return TimeSeriesParameters(resolution=get_resolution(ts)) + return TimeSeriesParameters(; resolution = get_resolution(ts)) end function TimeSeriesParameters(ts::Forecast) - forecast_params = ForecastParameters( - count=get_count(ts), - horizon=get_horizon(ts), - initial_timestamp=get_initial_timestamp(ts), - interval=get_interval(ts), + forecast_params = ForecastParameters(; + count = get_count(ts), + horizon = get_horizon(ts), + initial_timestamp = get_initial_timestamp(ts), + interval = get_interval(ts), ) return TimeSeriesParameters(get_resolution(ts), forecast_params) end @@ -131,11 +131,11 @@ function TimeSeriesParameters( Dates.Millisecond(last_initial_time - initial_timestamp) / Dates.Millisecond(interval) + 1 end - fparams = ForecastParameters( - horizon=horizon, - initial_timestamp=initial_timestamp, - interval=interval, - count=count, + fparams = ForecastParameters(; + horizon = horizon, + initial_timestamp = initial_timestamp, + interval = interval, + count = count, ) return TimeSeriesParameters(resolution, fparams) end diff --git a/src/time_series_parser.jl b/src/time_series_parser.jl index 7b21aa5dc..ccfa5f31d 100644 --- a/src/time_series_parser.jl +++ b/src/time_series_parser.jl @@ -40,7 +40,7 @@ mutable struct TimeSeriesFileMetadata end function TimeSeriesFileMetadata(; - simulation="", + simulation = "", category, component_name, name, @@ -50,8 +50,8 @@ function TimeSeriesFileMetadata(; percentiles, time_series_type_module, time_series_type, - scaling_factor_multiplier=nothing, - scaling_factor_multiplier_module=nothing, + scaling_factor_multiplier = nothing, + scaling_factor_multiplier_module = nothing, ) return TimeSeriesFileMetadata( simulation, @@ -91,23 +91,23 @@ function read_time_series_file_metadata(file_path::AbstractString) push!( metadata, TimeSeriesFileMetadata(; - simulation=simulation, - category=item["category"], - component_name=item["component_name"], - name=item["name"], - normalization_factor=normalization_factor, - data_file=item["data_file"], - resolution=parsed_resolution, + simulation = simulation, + category = item["category"], + component_name = item["component_name"], + name = item["name"], + normalization_factor = normalization_factor, + data_file = item["data_file"], + resolution = parsed_resolution, # Use default values until CDM data is updated. - percentiles=get(item, "percentiles", []), - time_series_type_module=get( + percentiles = get(item, "percentiles", []), + time_series_type_module = get( item, "module", "InfrastructureSystems", ), - time_series_type=get(item, "type", "SingleTimeSeries"), - scaling_factor_multiplier=scaling_factor_multiplier, - scaling_factor_multiplier_module=scaling_factor_multiplier_module, + time_series_type = get(item, "type", "SingleTimeSeries"), + scaling_factor_multiplier = scaling_factor_multiplier, + scaling_factor_multiplier_module = scaling_factor_multiplier_module, ), ) end @@ -125,18 +125,18 @@ function read_time_series_file_metadata(file_path::AbstractString) push!( metadata, TimeSeriesFileMetadata(; - simulation=simulation, - category=row.category, - component_name=row.component_name, - name=row.name, - resolution=Dates.Millisecond(Dates.Second(row.resolution)), - normalization_factor=row.normalization_factor, - data_file=row.data_file, - percentiles=[], - time_series_type_module=get(row, :module, "InfrastructureSystems"), - time_series_type=get(row, :type, "SingleTimeSeries"), - scaling_factor_multiplier=scaling_factor_multiplier, - scaling_factor_multiplier_module=scaling_factor_multiplier_module, + simulation = simulation, + category = row.category, + component_name = row.component_name, + name = row.name, + resolution = Dates.Millisecond(Dates.Second(row.resolution)), + normalization_factor = row.normalization_factor, + data_file = row.data_file, + percentiles = [], + time_series_type_module = get(row, :module, "InfrastructureSystems"), + time_series_type = get(row, :type, "SingleTimeSeries"), + scaling_factor_multiplier = scaling_factor_multiplier, + scaling_factor_multiplier_module = scaling_factor_multiplier_module, ), ) end @@ -210,7 +210,7 @@ struct TimeSeriesParsedInfo percentiles, file_path, resolution, - scaling_factor_multiplier=nothing, + scaling_factor_multiplier = nothing, ) return new( simulation, @@ -283,7 +283,7 @@ end function make_time_array(raw::RawTimeSeries, component_name, resolution) series_length = raw.length ini_time = raw.initial_time - timestamps = range(ini_time; length=series_length, step=resolution) + timestamps = range(ini_time; length = series_length, step = resolution) return TimeSeries.TimeArray(timestamps, raw.data[component_name]) end diff --git a/src/time_series_storage.jl b/src/time_series_storage.jl index 4fdb1ed81..c00b3a48c 100644 --- a/src/time_series_storage.jl +++ b/src/time_series_storage.jl @@ -38,26 +38,27 @@ struct CompressionSettings end function CompressionSettings(; - enabled=DEFAULT_COMPRESSION, - type=CompressionTypes.DEFLATE, - level=3, - shuffle=true, + enabled = DEFAULT_COMPRESSION, + type = CompressionTypes.DEFLATE, + level = 3, + shuffle = true, ) return CompressionSettings(enabled, type, level, shuffle) end function make_time_series_storage(; - in_memory=false, - filename=nothing, - directory=nothing, - compression=CompressionSettings(), + in_memory = false, + filename = nothing, + directory = nothing, + compression = CompressionSettings(), ) if in_memory storage = InMemoryTimeSeriesStorage() elseif !isnothing(filename) - storage = Hdf5TimeSeriesStorage(; filename=filename, compression=compression) + storage = Hdf5TimeSeriesStorage(; filename = filename, compression = compression) else - storage = Hdf5TimeSeriesStorage(true; directory=directory, compression=compression) + storage = + Hdf5TimeSeriesStorage(true; directory = directory, compression = compression) end return storage diff --git a/src/utils/flatten_iterator_wrapper.jl b/src/utils/flatten_iterator_wrapper.jl index efa39c4e4..9028bb229 100644 --- a/src/utils/flatten_iterator_wrapper.jl +++ b/src/utils/flatten_iterator_wrapper.jl @@ -14,7 +14,7 @@ end Base.@propagate_inbounds function Base.iterate( iter::FlattenIteratorWrapper{T, I}, - state=(), + state = (), ) where {T, I} Base.iterate(iter.iter, state) end diff --git a/src/utils/generate_struct_files.jl b/src/utils/generate_struct_files.jl index 9eadea661..64f1b625d 100644 --- a/src/utils/generate_struct_files.jl +++ b/src/utils/generate_struct_files.jl @@ -45,14 +45,14 @@ Construct a StructField for code auto-generation purposes. function StructField(; name, data_type, - default=nothing, - comment="", - needs_conversion=false, - exclude_setter=false, - valid_range=nothing, - validation_action=nothing, - null_value=nothing, - internal_default=nothing, + default = nothing, + comment = "", + needs_conversion = false, + exclude_setter = false, + valid_range = nothing, + validation_action = nothing, + null_value = nothing, + internal_default = nothing, ) if !isnothing(valid_range) && valid_range isa Dict diff = setdiff(keys(valid_range), ("min", "max")) @@ -111,9 +111,9 @@ Construct a StructDefinition for code auto-generation purposes. function StructDefinition(; struct_name, fields, - supertype=nothing, - docstring="", - is_component=true, + supertype = nothing, + docstring = "", + is_component = true, ) if supertype isa DataType supertype = string(DataType) @@ -123,12 +123,12 @@ function StructDefinition(; if !any(x -> endswith(x.data_type, "InfrastructureSystemsInternal"), fields) push!( fields, - StructField( - name="internal", - data_type="InfrastructureSystemsInternal", - comment="Internal reference, do not modify.", - internal_default="InfrastructureSystemsInternal()", - exclude_setter=true, + StructField(; + name = "internal", + data_type = "InfrastructureSystemsInternal", + comment = "Internal reference, do not modify.", + internal_default = "InfrastructureSystemsInternal()", + exclude_setter = true, ), ) @info "Added InfrastructureSystemsInternal to component struct $struct_name." @@ -166,13 +166,13 @@ Refer to `StructDefinition` and `StructField` for descriptions of the available """ function generate_struct_file( definition::StructDefinition; - filename=nothing, - output_directory=nothing, + filename = nothing, + output_directory = nothing, ) generate_struct_files( - [definition], - filename=filename, - output_directory=output_directory, + [definition]; + filename = filename, + output_directory = output_directory, ) end @@ -190,7 +190,7 @@ Refer to `StructDefinition` and `StructField` for descriptions of the available - `output_directory::AbstractString`: Generate the files in this directory. Defaults to `src/generated` """ -function generate_struct_files(definitions; filename=nothing, output_directory=nothing) +function generate_struct_files(definitions; filename = nothing, output_directory = nothing) if isnothing(filename) filename = joinpath( dirname(Base.find_package("InfrastructureSystems")), @@ -224,7 +224,7 @@ function generate_struct_files(definitions; filename=nothing, output_directory=n end open(filename, "w") do io - JSON3.pretty(io, data, JSON3.AlignmentContext(indent=2)) + JSON3.pretty(io, data, JSON3.AlignmentContext(; indent = 2)) end @info "Added $(length(definitions)) structs to $filename" diff --git a/src/utils/generate_structs.jl b/src/utils/generate_structs.jl index 32f108f0a..05d74cf6d 100644 --- a/src/utils/generate_structs.jl +++ b/src/utils/generate_structs.jl @@ -90,7 +90,7 @@ function read_json_data(filename::String) end end -function generate_structs(directory, data::Vector; print_results=true) +function generate_structs(directory, data::Vector; print_results = true) struct_names = Vector{String}() unique_accessor_functions = Set{String}() unique_setter_functions = Set{String}() @@ -232,7 +232,7 @@ end function generate_structs( input_file::AbstractString, output_directory::AbstractString; - print_results=true, + print_results = true, ) # Include each generated file. if !isdir(output_directory) @@ -240,7 +240,7 @@ function generate_structs( end data = read_json_data(input_file) - generate_structs(output_directory, data, print_results=print_results) + generate_structs(output_directory, data; print_results = print_results) return end @@ -251,11 +251,11 @@ from descriptor_file. function test_generated_structs(descriptor_file, existing_dir) output_dir = "tmp-test-generated-structs" if isdir(output_dir) - rm(output_dir; recursive=true) + rm(output_dir; recursive = true) end mkdir(output_dir) - generate_structs(descriptor_file, output_dir; print_results=false) + generate_structs(descriptor_file, output_dir; print_results = false) matched = true for (file1, file2) in zip(readdir(output_dir), readdir(existing_dir)) @@ -275,6 +275,6 @@ function test_generated_structs(descriptor_file, existing_dir) end end - rm(output_dir; recursive=true) + rm(output_dir; recursive = true) return matched end diff --git a/src/utils/logging.jl b/src/utils/logging.jl index d9513bb7c..0d8345598 100644 --- a/src/utils/logging.jl +++ b/src/utils/logging.jl @@ -61,11 +61,11 @@ Returns a summary of log event counts by level. function report_log_summary(tracker::LogEventTracker) text = "\nLog message summary:\n" # Order by criticality. - for level in sort!(collect(keys(tracker.events)), rev=true) + for level in sort!(collect(keys(tracker.events)); rev = true) num_events = length(tracker.events[level]) text *= "\n$num_events $level events:\n" for event in - sort!(collect(get_log_events(tracker, level)), by=x -> x.count, rev=true) + sort!(collect(get_log_events(tracker, level)); by = x -> x.count, rev = true) text *= " count=$(event.count) at $(event.file):$(event.line)\n" text *= " example message=\"$(event.message)\"\n" if event.suppressed > 0 @@ -144,8 +144,8 @@ function LoggingConfiguration(config_filename) return LoggingConfiguration(; Dict(Symbol(k) => v for (k, v) in config)...) end -function make_logging_config_file(filename="logging_config.toml"; force=false) - cp(SIIP_LOGGING_CONFIG_FILENAME, filename, force=force) +function make_logging_config_file(filename = "logging_config.toml"; force = false) + cp(SIIP_LOGGING_CONFIG_FILENAME, filename; force = force) println("Created $filename") return end @@ -160,7 +160,7 @@ LogEventTracker() LogEventTracker((Logging.Info, Logging.Warn, Logging.Error)) ``` """ -function LogEventTracker(levels=(Logging.Info, Logging.Warn, Logging.Error)) +function LogEventTracker(levels = (Logging.Info, Logging.Warn, Logging.Error)) return LogEventTracker(Dict(l => Dict{Symbol, LogEvent}() for l in levels)) end @@ -201,28 +201,28 @@ logger = configure_logging(filename="mylog.txt") ``` """ function configure_logging(; - console=true, - console_stream=stderr, - console_level=Logging.Error, - progress=true, - file=true, - filename="log.txt", - file_level=Logging.Info, - file_mode="w+", - tracker=LogEventTracker(), - set_global=true, + console = true, + console_stream = stderr, + console_level = Logging.Error, + progress = true, + file = true, + filename = "log.txt", + file_level = Logging.Info, + file_mode = "w+", + tracker = LogEventTracker(), + set_global = true, ) - config = LoggingConfiguration( - console=console, - console_stream=console_stream, - console_level=console_level, - progress=progress, - file=file, - filename=filename, - file_level=file_level, - file_mode=file_mode, - tracker=tracker, - set_global=set_global, + config = LoggingConfiguration(; + console = console, + console_stream = console_stream, + console_level = console_level, + progress = progress, + file = file, + filename = filename, + file_level = file_level, + file_mode = file_mode, + tracker = tracker, + set_global = set_global, ) return configure_logging(config) end @@ -288,7 +288,7 @@ function Logging.handle_message( id, file, line; - maxlog=nothing, + maxlog = nothing, kwargs..., ) return Logging.handle_message( @@ -300,7 +300,7 @@ function Logging.handle_message( id, file, line; - maxlog=maxlog, + maxlog = maxlog, kwargs..., ) end @@ -327,7 +327,12 @@ open_file_logger("log.txt", Logging.Info) do logger end ``` """ -function open_file_logger(func::Function, filename::String, level=Logging.Info, mode="w+") +function open_file_logger( + func::Function, + filename::String, + level = Logging.Info, + mode = "w+", +) stream = open(filename, mode) try logger = FileLogger(stream, level) @@ -511,8 +516,8 @@ function Logging.handle_message( id, file, line; - maxlog=nothing, - _suppression_period=nothing, + maxlog = nothing, + _suppression_period = nothing, kwargs..., ) suppressed, num_suppressed = @@ -539,7 +544,7 @@ function Logging.handle_message( id, file, line; - maxlog=maxlog, + maxlog = maxlog, kwargs..., ) end diff --git a/src/utils/print.jl b/src/utils/print.jl index 9bfdb4359..8a78e9e00 100644 --- a/src/utils/print.jl +++ b/src/utils/print.jl @@ -30,7 +30,7 @@ function Base.show(io::IO, ::MIME"text/plain", container::InfrastructureSystemsC println(io, "Num components: $num_components") if num_components > 0 println(io) - show_components_table(io, container, backend=Val(:auto)) + show_components_table(io, container; backend = Val(:auto)) end end @@ -39,7 +39,7 @@ function Base.show(io::IO, ::MIME"text/html", components::Components) println(io, "

Components

") println(io, "

Num components: $num_components

") if num_components > 0 - show_components_table(io, components, backend=Val(:html), standalone=false) + show_components_table(io, components; backend = Val(:html), standalone = false) end end @@ -71,14 +71,14 @@ end function Base.show(io::IO, ::MIME"text/plain", data::SystemData) show(io, MIME"text/plain"(), data.components) println(io, "\n") - show_time_series_data(io, data, backend=Val(:auto)) + show_time_series_data(io, data; backend = Val(:auto)) show(io, data.time_series_params) end function Base.show(io::IO, ::MIME"text/html", data::SystemData) show(io, MIME"text/html"(), data.components) println(io, "\n") - show_time_series_data(io, data, backend=Val(:html), standalone=false) + show_time_series_data(io, data; backend = Val(:html), standalone = false) show(io, data.time_series_params) end @@ -113,9 +113,9 @@ function show_time_series_data(io::IO, data::SystemData; kwargs...) PrettyTables.pretty_table( io, table; - header=header, - title="Time Series Summary", - alignment=:l, + header = header, + title = "Time Series Summary", + alignment = :l, kwargs..., ) return @@ -202,7 +202,7 @@ function show_components_table(io::IO, components::Components; kwargs...) data = Array{Any, 2}(undef, length(components.data), length(header)) type_names = [(strip_module_name(string(x)), x) for x in keys(components.data)] - sort!(type_names, by=x -> x[1]) + sort!(type_names; by = x -> x[1]) for (i, (type_name, type)) in enumerate(type_names) vals = components.data[type] has_sts = false @@ -224,7 +224,7 @@ function show_components_table(io::IO, components::Components; kwargs...) data[i, 4] = has_forecasts end - PrettyTables.pretty_table(io, data; header=header, alignment=:l, kwargs...) + PrettyTables.pretty_table(io, data; header = header, alignment = :l, kwargs...) return end @@ -232,7 +232,7 @@ function show_components( io::IO, components::Components, component_type::Type{<:InfrastructureSystemsComponent}, - additional_columns::Union{Dict, Vector}=[]; + additional_columns::Union{Dict, Vector} = []; kwargs..., ) if !isconcretetype(component_type) @@ -293,7 +293,14 @@ function show_components( end end - PrettyTables.pretty_table(io, data; header=header, title=title, alignment=:l, kwargs...) + PrettyTables.pretty_table( + io, + data; + header = header, + title = title, + alignment = :l, + kwargs..., + ) return end diff --git a/src/utils/recorder_events.jl b/src/utils/recorder_events.jl index a36ddba4f..8d828a733 100644 --- a/src/utils/recorder_events.jl +++ b/src/utils/recorder_events.jl @@ -71,7 +71,12 @@ Construct a Recorder. - `mode = "w"`: Only used when io is nothing. - `directory = "."`: Only used when io is nothing. """ -function Recorder(name::Symbol; io::Union{Nothing, IO}=nothing, mode="w", directory=".") +function Recorder( + name::Symbol; + io::Union{Nothing, IO} = nothing, + mode = "w", + directory = ".", +) if isnothing(io) filename = joinpath(directory, string(name) * ".log") io = open(filename, mode) @@ -102,19 +107,19 @@ handle. """ function register_recorder!( name::Symbol; - io::Union{Nothing, IO}=nothing, - mode="w", - directory=".", + io::Union{Nothing, IO} = nothing, + mode = "w", + directory = ".", ) unregister_recorder!(name) - g_recorders[name] = Recorder(name; io=io, mode=mode, directory=directory) + g_recorders[name] = Recorder(name; io = io, mode = mode, directory = directory) @debug "registered new Recorder" _group = LOG_GROUP_RECORDER name end """ Unregister the recorder with this name and stop recording events. """ -function unregister_recorder!(name::Symbol; close_io=true) +function unregister_recorder!(name::Symbol; close_io = true) if haskey(g_recorders, name) @debug "unregister Recorder" _group = LOG_GROUP_RECORDER name recorder = pop!(g_recorders, name) @@ -161,7 +166,7 @@ Return the events of type T in filename. function list_recorder_events( ::Type{T}, filename::AbstractString, - filter_func::Union{Nothing, Function}=nothing, + filter_func::Union{Nothing, Function} = nothing, ) where {T <: AbstractRecorderEvent} events = Vector{T}() for line in eachline(filename) @@ -207,7 +212,7 @@ show_recorder_events(TestEvent, test_recorder.log, x -> x.val2 > 2) function show_recorder_events( ::Type{T}, filename::AbstractString, - filter_func::Union{Nothing, Function}=nothing; + filter_func::Union{Nothing, Function} = nothing; kwargs..., ) where {T <: AbstractRecorderEvent} return show_recorder_events(stdout, T, filename, filter_func; kwargs...) @@ -217,7 +222,7 @@ function show_recorder_events( io::IO, ::Type{T}, filename::AbstractString, - filter_func::Union{Nothing, Function}=nothing; + filter_func::Union{Nothing, Function} = nothing; kwargs..., ) where {T <: AbstractRecorderEvent} events = list_recorder_events(T, filename, filter_func) @@ -233,7 +238,7 @@ end function show_recorder_events( io::IO, events::Vector{T}; - exclude_columns=Set{String}(), + exclude_columns = Set{String}(), kwargs..., ) where {T <: AbstractRecorderEvent} if isempty(events) @@ -267,6 +272,6 @@ function show_recorder_events( end end - PrettyTables.pretty_table(io, data; header=header, kwargs...) + PrettyTables.pretty_table(io, data; header = header, kwargs...) return end diff --git a/src/utils/test.jl b/src/utils/test.jl index af6a649e4..d5570e336 100644 --- a/src/utils/test.jl +++ b/src/utils/test.jl @@ -107,9 +107,14 @@ end function TestSupplemental(; value::Float64 = 0.0, - component_uuids::Set{UUIDs.UUID}=Set{UUIDs.UUID}(), + component_uuids::Set{UUIDs.UUID} = Set{UUIDs.UUID}(), ) - return TestSupplemental(value, component_uuids, InfrastructureSystemsInternal(), TimeSeriesContainer()) + return TestSupplemental( + value, + component_uuids, + InfrastructureSystemsInternal(), + TimeSeriesContainer(), + ) end get_value(attr::TestSupplemental) = attr.attr_json diff --git a/src/validation.jl b/src/validation.jl index 6c02e2437..e3fd8c509 100644 --- a/src/validation.jl +++ b/src/validation.jl @@ -110,7 +110,7 @@ end function get_limits(valid_range::String, ist_struct::InfrastructureSystemsType) # Gets min and max values from activepowerlimits for activepower, etc. - function recur(d, a, i=1) + function recur(d, a, i = 1) if i <= length(a) d = getfield(d, Symbol(a[i])) recur(d, a, i + 1) @@ -123,7 +123,7 @@ function get_limits(valid_range::String, ist_struct::InfrastructureSystemsType) vr = recur(ist_struct, split(valid_range, ".")) if isnothing(vr) - limits = (min=nothing, max=nothing) + limits = (min = nothing, max = nothing) else limits = get_limits(vr, ist_struct) end @@ -134,7 +134,7 @@ end function get_limits(valid_range::Dict, unused::InfrastructureSystemsType) # Gets min and max value defined for a field, # e.g. "valid_range": {"min":-1.571, "max":1.571}. - return (min=valid_range["min"], max=valid_range["max"]) + return (min = valid_range["min"], max = valid_range["max"]) end function get_limits( @@ -143,7 +143,7 @@ function get_limits( ) # Gets min and max value defined for a field, # e.g. "valid_range": {"min":-1.571, "max":1.571}. - return (min=valid_range.min, max=valid_range.max) + return (min = valid_range.min, max = valid_range.max) end function validate_range(::String, valid_info::ValidationInfo, field_value) diff --git a/test/common.jl b/test/common.jl index 9b4130a66..cebe7e888 100644 --- a/test/common.jl +++ b/test/common.jl @@ -1,6 +1,6 @@ -function create_system_data(; with_time_series=false, time_series_in_memory=false) - data = IS.SystemData(; time_series_in_memory=time_series_in_memory) +function create_system_data(; with_time_series = false, time_series_in_memory = false) + data = IS.SystemData(; time_series_in_memory = time_series_in_memory) name = "Component1" component = IS.TestComponent(name, 5) @@ -20,8 +20,8 @@ function create_system_data(; with_time_series=false, time_series_in_memory=fals return data end -function create_system_data_shared_time_series(; time_series_in_memory=false) - data = IS.SystemData(; time_series_in_memory=time_series_in_memory) +function create_system_data_shared_time_series(; time_series_in_memory = false) + data = IS.SystemData(; time_series_in_memory = time_series_in_memory) name1 = "Component1" name2 = "Component2" @@ -30,7 +30,7 @@ function create_system_data_shared_time_series(; time_series_in_memory=false) IS.add_component!(data, component1) IS.add_component!(data, component2) - ts = IS.SingleTimeSeries(name="val", data=create_time_array()) + ts = IS.SingleTimeSeries(; name = "val", data = create_time_array()) IS.add_time_series!(data, component1, ts) IS.add_time_series!(data, component2, ts) diff --git a/test/runtests.jl b/test/runtests.jl index 2c8adfee1..c17ed87cc 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -82,10 +82,10 @@ function run_tests() if logging_config_filename !== nothing config = IS.LoggingConfiguration(logging_config_filename) else - config = IS.LoggingConfiguration( - filename=LOG_FILE, - file_level=Logging.Info, - console_level=Logging.Error, + config = IS.LoggingConfiguration(; + filename = LOG_FILE, + file_level = Logging.Info, + console_level = Logging.Error, ) end console_logger = TerminalLogger(config.console_stream, config.console_level) diff --git a/test/test_deprecations.jl b/test/test_deprecations.jl index 5f7c2ef08..aecece456 100644 --- a/test/test_deprecations.jl +++ b/test/test_deprecations.jl @@ -2,5 +2,5 @@ data = IS.SystemData() component = IS.TestComponent("component1", 5) - @test_deprecated IS.add_component!(data, component; deserialization_in_progress=true) + @test_deprecated IS.add_component!(data, component; deserialization_in_progress = true) end diff --git a/test/test_generate_structs.jl b/test/test_generate_structs.jl index e8fc992ce..5abd0a2dd 100644 --- a/test/test_generate_structs.jl +++ b/test/test_generate_structs.jl @@ -9,21 +9,21 @@ end output_directory = mktempdir() descriptor_file = joinpath(output_directory, "structs.json") cp(orig_descriptor_file, descriptor_file) - new_struct = IS.StructDefinition( - struct_name="MyComponent", - docstring="Custom component", - supertype="InfrastructureSystemsComponent", - fields=[ - IS.StructField(name="val1", data_type=Float64), - IS.StructField(name="val2", data_type=Int), - IS.StructField(name="val3", data_type=String), + new_struct = IS.StructDefinition(; + struct_name = "MyComponent", + docstring = "Custom component", + supertype = "InfrastructureSystemsComponent", + fields = [ + IS.StructField(; name = "val1", data_type = Float64), + IS.StructField(; name = "val2", data_type = Int), + IS.StructField(; name = "val3", data_type = String), ], ) redirect_stdout(devnull) do IS.generate_struct_file( - new_struct, - filename=descriptor_file, - output_directory=output_directory, + new_struct; + filename = descriptor_file, + output_directory = output_directory, ) end data = open(descriptor_file, "r") do io @@ -36,18 +36,24 @@ end @testset "Test StructField errors" begin @test_throws ErrorException IS.StructDefinition( - struct_name="MyStruct", - fields=[IS.StructField(name="val", data_type=Float64, valid_range="invalid_field")], + struct_name = "MyStruct", + fields = [ + IS.StructField(; + name = "val", + data_type = Float64, + valid_range = "invalid_field", + ), + ], ) @test_throws ErrorException IS.StructField( - name="val", - data_type=Float64, - valid_range=Dict("min" => 0, "invalid" => 100), + name = "val", + data_type = Float64, + valid_range = Dict("min" => 0, "invalid" => 100), ) @test_throws ErrorException IS.StructField( - name="val", - data_type=Float64, - valid_range=Dict("min" => 0, "max" => 100), - validation_action="invalid", + name = "val", + data_type = Float64, + valid_range = Dict("min" => 0, "max" => 100), + validation_action = "invalid", ) end diff --git a/test/test_lazy_dict_from_iterator.jl b/test/test_lazy_dict_from_iterator.jl index 7cd990a23..1f90347f3 100644 --- a/test/test_lazy_dict_from_iterator.jl +++ b/test/test_lazy_dict_from_iterator.jl @@ -16,7 +16,7 @@ end container = IS.LazyDictFromIterator(Int, TestItem, iter, get_field) # Run through twice because the items must persist in the dict. - for i in range(1, length=2) + for i in range(1; length = 2) for x in 1:10 @test get(container, x) isa TestItem end diff --git a/test/test_logging.jl b/test/test_logging.jl index 761dac30f..2813d7a88 100644 --- a/test/test_logging.jl +++ b/test/test_logging.jl @@ -19,7 +19,7 @@ TEST_MSG = "test log message" IS.LogEvent("file", 14, :id, TEST_MSG, Logging.Error), ) - for i in range(1, length=2) + for i in range(1; length = 2) for event in events increment_count!(tracker, event, false) end @@ -60,7 +60,7 @@ end ) with_logger(logger) do - for i in range(1, length=2) + for i in range(1; length = 2) @debug TEST_MSG @info TEST_MSG @warn TEST_MSG @@ -82,10 +82,10 @@ end # Verify logging to a file. logfile = "testlog.txt" logger = IS.configure_logging(; - file=true, - filename=logfile, - file_level=Logging.Info, - set_global=false, + file = true, + filename = logfile, + file_level = Logging.Info, + set_global = false, ) with_logger(logger) do @info TEST_MSG @@ -103,12 +103,12 @@ end # Verify logging with no file. logger = IS.configure_logging(; - console=true, - file=false, - console_stream=devnull, - filename=nothing, - file_level=Logging.Info, - set_global=false, + console = true, + file = false, + console_stream = devnull, + filename = nothing, + file_level = Logging.Info, + set_global = false, ) with_logger(logger) do @error TEST_MSG @@ -122,13 +122,13 @@ end # Verify disabling of tracker. logger = IS.configure_logging(; - console=true, - file=false, - console_stream=devnull, - filename=logfile, - file_level=Logging.Info, - set_global=false, - tracker=nothing, + console = true, + file = false, + console_stream = devnull, + filename = logfile, + file_level = Logging.Info, + set_global = false, + tracker = nothing, ) with_logger(logger) do @error TEST_MSG @@ -138,13 +138,13 @@ end # Verify setting of global logger orig_logger = global_logger() logger = IS.configure_logging(; - console=true, - file=false, - console_stream=devnull, - filename=logfile, - file_level=Logging.Info, - set_global=true, - tracker=nothing, + console = true, + file = false, + console_stream = devnull, + filename = logfile, + file_level = Logging.Info, + set_global = true, + tracker = nothing, ) @error TEST_MSG @test orig_logger != global_logger() @@ -196,7 +196,7 @@ end try redirect_stdout(devnull) do IS.make_logging_config_file(filename) - return IS.make_logging_config_file(filename, force=true) + return IS.make_logging_config_file(filename; force = true) end @test IS.LoggingConfiguration(filename) isa IS.LoggingConfiguration finally @@ -305,12 +305,12 @@ end @testset "Test progress logger" begin io = IOBuffer() - logger = IS.configure_logging( - console_stream=io, - console=false, - file=false, - progress=true, - set_global=false, + logger = IS.configure_logging(; + console_stream = io, + console = false, + file = false, + progress = true, + set_global = false, ) with_logger(logger) do @progress for i in 1:5 @@ -324,8 +324,8 @@ end @testset "Test bad input" begin @test_throws ErrorException IS.configure_logging( - console=false, - file=false, - progress=false, + console = false, + file = false, + progress = false, ) end diff --git a/test/test_printing.jl b/test/test_printing.jl index ed80df597..4fc53c8ef 100644 --- a/test/test_printing.jl +++ b/test/test_printing.jl @@ -1,5 +1,5 @@ @testset "Test printing of the system and components" begin - sys = create_system_data(with_time_series=true, time_series_in_memory=true) + sys = create_system_data(; with_time_series = true, time_series_in_memory = true) io = IOBuffer() show(io, "text/plain", sys) text = String(take!(io)) @@ -8,7 +8,7 @@ end @testset "Test show_component_tables" begin - sys = create_system_data(with_time_series=true, time_series_in_memory=true) + sys = create_system_data(; with_time_series = true, time_series_in_memory = true) io = IOBuffer() IS.show_components(io, sys.components, IS.TestComponent) @test occursin("TestComponent", String(take!(io))) diff --git a/test/test_recorder.jl b/test/test_recorder.jl index 4e12196cd..050f8e75b 100644 --- a/test/test_recorder.jl +++ b/test/test_recorder.jl @@ -99,8 +99,8 @@ end IS.show_recorder_events( buf2, InfrastructureSystems.TestEvent, - filename, - exclude_columns=Set("timestamp"), + filename; + exclude_columns = Set("timestamp"), ) text = String(take!(buf1)) @test !occursin("timestamp", text) diff --git a/test/test_serialization.jl b/test/test_serialization.jl index 200d572b1..d91a65e87 100644 --- a/test/test_serialization.jl +++ b/test/test_serialization.jl @@ -1,5 +1,5 @@ -function validate_serialization(sys::IS.SystemData; time_series_read_only=false) +function validate_serialization(sys::IS.SystemData; time_series_read_only = false) #path, io = mktemp() # For some reason files aren't getting deleted when written to /tmp. Using current dir. filename = "test_system_serialization.json" @@ -9,7 +9,7 @@ function validate_serialization(sys::IS.SystemData; time_series_read_only=false) if isfile(filename) rm(filename) end - IS.prepare_for_serialization!(sys, filename; force=true) + IS.prepare_for_serialization!(sys, filename; force = true) data = IS.serialize(sys) open(filename, "w") do io return JSON3.write(io, data) @@ -41,15 +41,19 @@ function validate_serialization(sys::IS.SystemData; time_series_read_only=false) try cd(dirname(path)) sys2 = - IS.deserialize(IS.SystemData, data; time_series_read_only=time_series_read_only) + IS.deserialize( + IS.SystemData, + data; + time_series_read_only = time_series_read_only, + ) # Deserialization of components should be directed by the parent of SystemData. # There isn't one in IS, so perform the deserialization in the test code. for component in data["components"] type = IS.get_type_from_serialization_data(component) comp = IS.deserialize(type, component) - IS.add_component!(sys2, comp, allow_existing_time_series=true) + IS.add_component!(sys2, comp; allow_existing_time_series = true) end - return sys2, IS.compare_values(sys, sys2, compare_uuids=true) + return sys2, IS.compare_values(sys, sys2; compare_uuids = true) finally cd(orig) end @@ -57,7 +61,7 @@ end @testset "Test JSON serialization of system data" begin for in_memory in (true, false) - sys = create_system_data_shared_time_series(; time_series_in_memory=in_memory) + sys = create_system_data_shared_time_series(; time_series_in_memory = in_memory) _, result = validate_serialization(sys) @test result end @@ -71,19 +75,19 @@ end end @testset "Test JSON serialization of with read-only time series" begin - sys = create_system_data_shared_time_series(; time_series_in_memory=false) - sys2, result = validate_serialization(sys; time_series_read_only=true) + sys = create_system_data_shared_time_series(; time_series_in_memory = false) + sys2, result = validate_serialization(sys; time_series_read_only = true) @test result end @testset "Test JSON serialization of with mutable time series" begin - sys = create_system_data_shared_time_series(; time_series_in_memory=false) - sys2, result = validate_serialization(sys; time_series_read_only=false) + sys = create_system_data_shared_time_series(; time_series_in_memory = false) + sys2, result = validate_serialization(sys; time_series_read_only = false) @test result end @testset "Test JSON serialization with no time series" begin - sys = create_system_data(with_time_series=false) + sys = create_system_data(; with_time_series = false) sys2, result = validate_serialization(sys) @test result end @@ -103,13 +107,13 @@ end @testset "Test pretty-print JSON IO" begin component = IS.TestComponent("Component1", 2) io = IOBuffer() - IS.to_json(io, component, pretty=false) + IS.to_json(io, component; pretty = false) text = String(take!(io)) @test !occursin(" ", text) IS.deserialize(IS.TestComponent, JSON3.read(text, Dict)) == component io = IOBuffer() - IS.to_json(io, component, pretty=true) + IS.to_json(io, component; pretty = true) text = String(take!(io)) @test occursin(" ", text) IS.deserialize(IS.TestComponent, JSON3.read(text, Dict)) == component diff --git a/test/test_time_series.jl b/test/test_time_series.jl index 1f24111ec..a7654576f 100644 --- a/test/test_time_series.jl +++ b/test/test_time_series.jl @@ -12,9 +12,9 @@ horizon = 24 data = SortedDict(initial_time => ones(horizon), other_time => ones(horizon)) - forecast = IS.Deterministic(data=data, name=name, resolution=resolution) + forecast = IS.Deterministic(; data = data, name = name, resolution = resolution) IS.add_time_series!(sys, component, forecast) - var1 = IS.get_time_series(IS.Deterministic, component, name; start_time=initial_time) + var1 = IS.get_time_series(IS.Deterministic, component, name; start_time = initial_time) @test length(var1) == 2 @test IS.get_horizon(var1) == horizon @test IS.get_initial_timestamp(var1) == initial_time @@ -23,27 +23,27 @@ IS.Deterministic, component, name; - start_time=initial_time, - count=2, + start_time = initial_time, + count = 2, ) @test length(var2) == 2 - var3 = IS.get_time_series(IS.Deterministic, component, name; start_time=other_time) + var3 = IS.get_time_series(IS.Deterministic, component, name; start_time = other_time) @test length(var2) == 2 # Throws errors @test_throws ArgumentError IS.get_time_series( IS.Deterministic, component, name; - start_time=initial_time, - count=3, + start_time = initial_time, + count = 3, ) @test_throws ArgumentError IS.get_time_series( IS.Deterministic, component, name; - start_time=other_time, - count=2, + start_time = other_time, + count = 2, ) count = IS.get_count(var2) @@ -90,11 +90,11 @@ end data_ts = Dict( initial_time => TimeSeries.TimeArray( - range(initial_time; length=horizon, step=resolution), + range(initial_time; length = horizon, step = resolution), ones(horizon), ), other_time => TimeSeries.TimeArray( - range(other_time; length=horizon, step=resolution), + range(other_time; length = horizon, step = resolution), ones(horizon), ), ) @@ -109,11 +109,11 @@ end data_ts_two_cols = Dict( initial_time => TimeSeries.TimeArray( - range(initial_time; length=horizon, step=resolution), + range(initial_time; length = horizon, step = resolution), ones(horizon, 2), ), other_time => TimeSeries.TimeArray( - range(other_time; length=horizon, step=resolution), + range(other_time; length = horizon, step = resolution), ones(horizon, 2), ), ) @@ -150,21 +150,21 @@ end data_ts_polynomial = Dict( initial_time => TimeSeries.TimeArray( - range(initial_time; length=horizon, step=resolution), + range(initial_time; length = horizon, step = resolution), polynomial_cost, ), other_time => TimeSeries.TimeArray( - range(other_time; length=horizon, step=resolution), + range(other_time; length = horizon, step = resolution), polynomial_cost, ), ) data_ts_pwl = Dict( initial_time => TimeSeries.TimeArray( - range(initial_time; length=horizon, step=resolution), + range(initial_time; length = horizon, step = resolution), pwl_cost, ), other_time => TimeSeries.TimeArray( - range(other_time; length=horizon, step=resolution), + range(other_time; length = horizon, step = resolution), pwl_cost, ), ) @@ -198,16 +198,16 @@ end @test IS.has_time_series(component) @test IS.get_initial_timestamp(forecast) == initial_time forecast_retrieved = - IS.get_time_series(IS.Probabilistic, component, "test"; start_time=initial_time) + IS.get_time_series(IS.Probabilistic, component, "test"; start_time = initial_time) @test IS.get_initial_timestamp(forecast_retrieved) == initial_time data_ts = Dict( initial_time => TimeSeries.TimeArray( - range(initial_time; length=horizon, step=resolution), + range(initial_time; length = horizon, step = resolution), ones(horizon, 99), ), other_time => TimeSeries.TimeArray( - range(other_time; length=horizon, step=resolution), + range(other_time; length = horizon, step = resolution), ones(horizon, 99), ), ) @@ -237,16 +237,16 @@ end @test IS.has_time_series(component) @test IS.get_initial_timestamp(forecast) == initial_time forecast_retrieved = - IS.get_time_series(IS.Scenarios, component, "test"; start_time=initial_time) + IS.get_time_series(IS.Scenarios, component, "test"; start_time = initial_time) @test IS.get_initial_timestamp(forecast_retrieved) == initial_time data_ts = Dict( initial_time => TimeSeries.TimeArray( - range(initial_time; length=horizon, step=resolution), + range(initial_time; length = horizon, step = resolution), ones(horizon, 2), ), other_time => TimeSeries.TimeArray( - range(other_time; length=horizon, step=resolution), + range(other_time; length = horizon, step = resolution), ones(horizon, 2), ), ) @@ -269,30 +269,33 @@ end initial_time = Dates.DateTime("2020-09-01") resolution = Dates.Hour(1) - data = TimeSeries.TimeArray(range(initial_time; length=365, step=resolution), ones(365)) - data = IS.SingleTimeSeries(data=data, name="test_c") + data = TimeSeries.TimeArray( + range(initial_time; length = 365, step = resolution), + ones(365), + ) + data = IS.SingleTimeSeries(; data = data, name = "test_c") IS.add_time_series!(sys, component, data) ts1 = IS.get_time_series( IS.SingleTimeSeries, component, "test_c"; - start_time=initial_time, - len=12, + start_time = initial_time, + len = 12, ) @test length(IS.get_data(ts1)) == 12 ts2 = IS.get_time_series( IS.SingleTimeSeries, component, "test_c"; - start_time=initial_time + Dates.Day(1), - len=12, + start_time = initial_time + Dates.Day(1), + len = 12, ) @test length(IS.get_data(ts2)) == 12 ts3 = IS.get_time_series( IS.SingleTimeSeries, component, "test_c"; - start_time=initial_time + Dates.Day(1), + start_time = initial_time + Dates.Day(1), ) @test length(IS.get_data(ts3)) == 341 #Throws errors @@ -300,29 +303,29 @@ end IS.SingleTimeSeries, component, "test_c"; - start_time=initial_time, - len=1200, + start_time = initial_time, + len = 1200, ) @test_throws ArgumentError IS.get_time_series( IS.SingleTimeSeries, component, "test_c"; - start_time=initial_time - Dates.Day(10), - len=12, + start_time = initial_time - Dates.Day(10), + len = 12, ) # Conflicting resolution data = TimeSeries.TimeArray( - range(initial_time; length=365, step=Dates.Minute(5)), + range(initial_time; length = 365, step = Dates.Minute(5)), ones(365), ) - data = IS.SingleTimeSeries(data=data, name="test_d") + data = IS.SingleTimeSeries(; data = data, name = "test_d") @test_throws IS.ConflictingInputsError IS.add_time_series!(sys, component, data) end @testset "Test Deterministic with a wrapped SingleTimeSeries" begin for in_memory in (true, false) - sys = IS.SystemData(time_series_in_memory=in_memory) + sys = IS.SystemData(; time_series_in_memory = in_memory) component = IS.TestComponent("Component1", 5) IS.add_component!(sys, component) @@ -342,7 +345,8 @@ end for i in 1:forecast_count fdata[dates[i]] = ones(horizon) end - bystander = IS.Deterministic(data=fdata, name="bystander", resolution=resolution) + bystander = + IS.Deterministic(; data = fdata, name = "bystander", resolution = resolution) IS.add_time_series!(sys, component, bystander) # This interval is greater than the max possible. @@ -396,12 +400,15 @@ end # Verify that get_time_series_multiple works with these types. forecasts = collect(IS.get_time_series_multiple(sys)) @test length(forecasts) == 3 - forecasts = collect(IS.get_time_series_multiple(sys; type=IS.AbstractDeterministic)) + forecasts = + collect(IS.get_time_series_multiple(sys; type = IS.AbstractDeterministic)) @test length(forecasts) == 2 - forecasts = collect(IS.get_time_series_multiple(sys; type=IS.Deterministic)) + forecasts = collect(IS.get_time_series_multiple(sys; type = IS.Deterministic)) @test length(forecasts) == 1 forecasts = - collect(IS.get_time_series_multiple(sys; type=IS.DeterministicSingleTimeSeries)) + collect( + IS.get_time_series_multiple(sys; type = IS.DeterministicSingleTimeSeries), + ) @test length(forecasts) == 1 @test forecasts[1] isa IS.DeterministicSingleTimeSeries @@ -410,14 +417,14 @@ end IS.Deterministic, component, name; - start_time=dates[2], + start_time = dates[2], ) # Must pass a full horizon. @test_throws ArgumentError IS.get_time_series( IS.Deterministic, component, name; - len=horizon - 1, + len = horizon - 1, ) # Already stored. @test IS.transform_single_time_series!( @@ -470,7 +477,7 @@ end @testset "Test Deterministic with a wrapped SingleTimeSeries different offsets" begin for in_memory in (true, false) - sys = IS.SystemData(time_series_in_memory=in_memory) + sys = IS.SystemData(; time_series_in_memory = in_memory) component = IS.TestComponent("Component1", 5) IS.add_component!(sys, component) @@ -500,7 +507,7 @@ end end @testset "Test SingleTimeSeries transform with multiple forecasts per component" begin - sys = IS.SystemData(time_series_in_memory=true) + sys = IS.SystemData(; time_series_in_memory = true) component = IS.TestComponent("Component1", 5) IS.add_component!(sys, component) @@ -533,7 +540,7 @@ end @testset "Test SingleTimeSeries transform deletions" begin for in_memory in (true, false) - sys = IS.SystemData(time_series_in_memory=in_memory) + sys = IS.SystemData(; time_series_in_memory = in_memory) component = IS.TestComponent("Component1", 5) IS.add_component!(sys, component) @@ -568,14 +575,14 @@ end end @testset "Test DeterministicSingleTimeSeries with single window" begin - sys = IS.SystemData(time_series_in_memory=true) + sys = IS.SystemData(; time_series_in_memory = true) component = IS.TestComponent("Component1", 5) IS.add_component!(sys, component) resolution = Dates.Hour(1) horizon = 24 dates = collect( - range(Dates.DateTime("2020-01-01T00:00:00"); length=horizon, step=resolution), + range(Dates.DateTime("2020-01-01T00:00:00"); length = horizon, step = resolution), ) data = collect(1:horizon) ta = TimeSeries.TimeArray(dates, data, [IS.get_name(component)]) @@ -598,14 +605,14 @@ end end @testset "Test DeterministicSingleTimeSeries with interval = resolution" begin - sys = IS.SystemData(time_series_in_memory=true) + sys = IS.SystemData(; time_series_in_memory = true) component = IS.TestComponent("Component1", 5) IS.add_component!(sys, component) resolution = Dates.Hour(1) horizon = 24 dates = collect( - range(Dates.DateTime("2020-01-01T00:00:00"); length=horizon, step=resolution), + range(Dates.DateTime("2020-01-01T00:00:00"); length = horizon, step = resolution), ) data = collect(1:horizon) ta = TimeSeries.TimeArray(dates, data, [IS.get_name(component)]) @@ -662,10 +669,10 @@ end other_time = initial_time + resolution polynomial_cost = repeat([(999.0, 1.0)], 365) data_polynomial = TimeSeries.TimeArray( - range(initial_time; length=365, step=resolution), + range(initial_time; length = 365, step = resolution), polynomial_cost, ) - data = IS.SingleTimeSeries(data=data_polynomial, name="test_c") + data = IS.SingleTimeSeries(; data = data_polynomial, name = "test_c") IS.add_time_series!(sys, component, data) ts = IS.get_time_series(IS.SingleTimeSeries, component, "test_c";) @test IS.get_data_type(ts) == "POLYNOMIAL" @@ -675,23 +682,23 @@ end IS.SingleTimeSeries, component, "test_c"; - start_time=initial_time, - len=12, + start_time = initial_time, + len = 12, ) @test length(IS.get_data(ts1)) == 12 ts2 = IS.get_time_series( IS.SingleTimeSeries, component, "test_c"; - start_time=initial_time + Dates.Day(1), - len=12, + start_time = initial_time + Dates.Day(1), + len = 12, ) @test length(IS.get_data(ts2)) == 12 ts3 = IS.get_time_series( IS.SingleTimeSeries, component, "test_c"; - start_time=initial_time + Dates.Day(1), + start_time = initial_time + Dates.Day(1), ) @test length(IS.get_data(ts3)) == 341 end @@ -707,8 +714,8 @@ end other_time = initial_time + resolution pwl_cost = repeat([repeat([(999.0, 1.0)], 5)], 365) data_pwl = - TimeSeries.TimeArray(range(initial_time; length=365, step=resolution), pwl_cost) - data = IS.SingleTimeSeries(data=data_pwl, name="test_c") + TimeSeries.TimeArray(range(initial_time; length = 365, step = resolution), pwl_cost) + data = IS.SingleTimeSeries(; data = data_pwl, name = "test_c") IS.add_time_series!(sys, component, data) ts = IS.get_time_series(IS.SingleTimeSeries, component, "test_c";) @test IS.get_data_type(ts) == "PWL" @@ -717,23 +724,23 @@ end IS.SingleTimeSeries, component, "test_c"; - start_time=initial_time, - len=12, + start_time = initial_time, + len = 12, ) @test length(IS.get_data(ts1)) == 12 ts2 = IS.get_time_series( IS.SingleTimeSeries, component, "test_c"; - start_time=initial_time + Dates.Day(1), - len=12, + start_time = initial_time + Dates.Day(1), + len = 12, ) @test length(IS.get_data(ts2)) == 12 ts3 = IS.get_time_series( IS.SingleTimeSeries, component, "test_c"; - start_time=initial_time + Dates.Day(1), + start_time = initial_time + Dates.Day(1), ) @test length(IS.get_data(ts3)) == 341 end @@ -769,7 +776,7 @@ end typeof(time_series), component, IS.get_name(time_series); - start_time=IS.get_initial_timestamp(time_series), + start_time = IS.get_initial_timestamp(time_series), ) @test length(time_series) == length(time_series2) @test IS.get_initial_timestamp(time_series) == IS.get_initial_timestamp(time_series2) @@ -801,7 +808,7 @@ end @test IS.has_time_series(component) ini_time = IS.get_initial_timestamp(data) retrieved_data = - IS.get_time_series(IS.Deterministic, component, "test"; start_time=ini_time) + IS.get_time_series(IS.Deterministic, component, "test"; start_time = ini_time) @test IS.get_name(data) == IS.get_name(retrieved_data) @test IS.get_resolution(data) == IS.get_resolution(retrieved_data) end @@ -817,9 +824,13 @@ end data = collect(1:24) ta = TimeSeries.TimeArray(dates, data, [IS.get_name(component)]) name = "val" - ts = IS.SingleTimeSeries(name=name, data=ta, scaling_factor_multiplier=IS.get_val) + ts = IS.SingleTimeSeries(; + name = name, + data = ta, + scaling_factor_multiplier = IS.get_val, + ) IS.add_time_series!(sys, component, ts) - ts = IS.get_time_series(IS.SingleTimeSeries, component, name; start_time=dates[1]) + ts = IS.get_time_series(IS.SingleTimeSeries, component, name; start_time = dates[1]) @test ts isa IS.SingleTimeSeries name = "Component2" @@ -847,7 +858,11 @@ end data = collect(1:24) ta = TimeSeries.TimeArray(dates, data, ["1"]) name = "val" - ts = IS.SingleTimeSeries(name=name, data=ta, scaling_factor_multiplier=IS.get_val) + ts = IS.SingleTimeSeries(; + name = name, + data = ta, + scaling_factor_multiplier = IS.get_val, + ) IS.add_time_series!(sys, components, ts) hash_ta_main = nothing @@ -883,9 +898,17 @@ end ta1 = TimeSeries.TimeArray(dates1, data1, [IS.get_name(component)]) ta2 = TimeSeries.TimeArray(dates2, data2, [IS.get_name(component)]) time_series1 = - IS.SingleTimeSeries(name="val", data=ta1, scaling_factor_multiplier=IS.get_val) + IS.SingleTimeSeries(; + name = "val", + data = ta1, + scaling_factor_multiplier = IS.get_val, + ) time_series2 = - IS.SingleTimeSeries(name="val2", data=ta2, scaling_factor_multiplier=IS.get_val) + IS.SingleTimeSeries(; + name = "val2", + data = ta2, + scaling_factor_multiplier = IS.get_val, + ) IS.add_time_series!(sys, component, time_series1) IS.add_time_series!(sys, component, time_series2) @@ -893,17 +916,17 @@ end @test length(collect(IS.get_time_series_multiple(component))) == 2 @test length(collect(IS.get_time_series_multiple(sys))) == 2 - @test length(collect(IS.get_time_series_multiple(sys; type=IS.SingleTimeSeries))) == 2 - @test length(collect(IS.get_time_series_multiple(sys; type=IS.Probabilistic))) == 0 + @test length(collect(IS.get_time_series_multiple(sys; type = IS.SingleTimeSeries))) == 2 + @test length(collect(IS.get_time_series_multiple(sys; type = IS.Probabilistic))) == 0 time_series = collect(IS.get_time_series_multiple(sys)) @test length(time_series) == 2 - @test length(collect(IS.get_time_series_multiple(sys; name="val"))) == 1 - @test length(collect(IS.get_time_series_multiple(sys; name="bad_name"))) == 0 + @test length(collect(IS.get_time_series_multiple(sys; name = "val"))) == 1 + @test length(collect(IS.get_time_series_multiple(sys; name = "bad_name"))) == 0 filter_func = x -> TimeSeries.values(IS.get_data(x))[12] == 12 - @test length(collect(IS.get_time_series_multiple(sys, filter_func; name="val2"))) == 0 + @test length(collect(IS.get_time_series_multiple(sys, filter_func; name = "val2"))) == 0 end @testset "Test get_time_series_with_metadata_multiple" begin @@ -922,9 +945,17 @@ end ta1 = TimeSeries.TimeArray(dates1, data1, [IS.get_name(component)]) ta2 = TimeSeries.TimeArray(dates2, data2, [IS.get_name(component)]) time_series1 = - IS.SingleTimeSeries(name="val", data=ta1, scaling_factor_multiplier=IS.get_val) + IS.SingleTimeSeries(; + name = "val", + data = ta1, + scaling_factor_multiplier = IS.get_val, + ) time_series2 = - IS.SingleTimeSeries(name="val2", data=ta2, scaling_factor_multiplier=IS.get_val) + IS.SingleTimeSeries(; + name = "val2", + data = ta2, + scaling_factor_multiplier = IS.get_val, + ) IS.add_time_series!(sys, component, time_series1) IS.add_time_series!(sys, component, time_series2) @@ -932,26 +963,33 @@ end @test length( collect( - IS.get_time_series_with_metadata_multiple(component; type=IS.SingleTimeSeries), + IS.get_time_series_with_metadata_multiple( + component; + type = IS.SingleTimeSeries, + ), ), ) == 2 @test length( collect( - IS.get_time_series_with_metadata_multiple(component; type=IS.Probabilistic), + IS.get_time_series_with_metadata_multiple(component; type = IS.Probabilistic), ), ) == 0 @test length( - collect(IS.get_time_series_with_metadata_multiple(component; name="val")), + collect(IS.get_time_series_with_metadata_multiple(component; name = "val")), ) == 1 @test length( - collect(IS.get_time_series_with_metadata_multiple(component; name="bad_name")), + collect(IS.get_time_series_with_metadata_multiple(component; name = "bad_name")), ) == 0 filter_func = x -> TimeSeries.values(IS.get_data(x))[12] == 12 @test length( collect( - IS.get_time_series_with_metadata_multiple(component, filter_func; name="val2"), + IS.get_time_series_with_metadata_multiple( + component, + filter_func; + name = "val2", + ), ), ) == 0 end @@ -967,14 +1005,14 @@ end data = collect(1:24) ta = TimeSeries.TimeArray(dates, data, [IS.get_name(component)]) name = "val" - ts = IS.SingleTimeSeries(name, ta; scaling_factor_multiplier=IS.get_val) + ts = IS.SingleTimeSeries(name, ta; scaling_factor_multiplier = IS.get_val) IS.add_time_series!(sys, component, ts) time_series = IS.get_time_series(IS.SingleTimeSeries, component, name) @test time_series isa IS.SingleTimeSeries end @testset "Test remove_time_series" begin - data = create_system_data(; with_time_series=true) + data = create_system_data(; with_time_series = true) components = collect(IS.iterate_components(data)) @test length(components) == 1 component = components[1] @@ -989,13 +1027,13 @@ end end @testset "Test clear_time_series" begin - data = create_system_data(; with_time_series=true) + data = create_system_data(; with_time_series = true) IS.clear_time_series!(data) @test length(get_all_time_series(data)) == 0 end @testset "Test that remove_component removes time_series" begin - data = create_system_data(; with_time_series=true) + data = create_system_data(; with_time_series = true) components = collect(IS.get_components(IS.InfrastructureSystemsComponent, data)) @test length(components) == 1 @@ -1027,8 +1065,8 @@ end ts = IS.SingleTimeSeries( name, ta; - normalization_factor=1.0, - scaling_factor_multiplier=IS.get_val, + normalization_factor = 1.0, + scaling_factor_multiplier = IS.get_val, ) IS.add_time_series!(sys, component, ts) time_series = IS.get_time_series(IS.SingleTimeSeries, component, name) @@ -1057,11 +1095,11 @@ end ts = IS.SingleTimeSeries(name, ta) IS.add_time_series!(sys, component, ts) - ts = IS.get_time_series(IS.SingleTimeSeries, component, name; start_time=dates[1]) + ts = IS.get_time_series(IS.SingleTimeSeries, component, name; start_time = dates[1]) @test TimeSeries.timestamp(IS.get_data(ts))[1] == dates[1] @test length(ts) == 24 - ts = IS.get_time_series(IS.SingleTimeSeries, component, name; start_time=dates[3]) + ts = IS.get_time_series(IS.SingleTimeSeries, component, name; start_time = dates[3]) @test TimeSeries.timestamp(IS.get_data(ts))[1] == dates[3] @test length(ts) == 22 @@ -1069,8 +1107,8 @@ end IS.SingleTimeSeries, component, name; - start_time=dates[5], - len=10, + start_time = dates[5], + len = 10, ) @test TimeSeries.timestamp(IS.get_data(time_series))[1] == dates[5] @test length(time_series) == 10 @@ -1119,7 +1157,7 @@ end IS.add_component!(sys, component2) name2 = "val2" name_mapping = Dict((IS.get_name(component), name1) => name2) - IS.copy_time_series!(component2, component; name_mapping=name_mapping) + IS.copy_time_series!(component2, component; name_mapping = name_mapping) time_series = IS.get_time_series(IS.SingleTimeSeries, component2, name2) @test time_series isa IS.SingleTimeSeries @test IS.get_initial_timestamp(time_series) == initial_time @@ -1153,7 +1191,7 @@ end IS.add_component!(sys, component2) name2b = "val2b" name_mapping = Dict((IS.get_name(component), name2a) => name2b) - IS.copy_time_series!(component2, component; name_mapping=name_mapping) + IS.copy_time_series!(component2, component; name_mapping = name_mapping) time_series = IS.get_time_series(IS.SingleTimeSeries, component2, name2b) @test time_series isa IS.SingleTimeSeries @test IS.get_initial_timestamp(time_series) == initial_time2 @@ -1162,7 +1200,7 @@ end end @testset "Test copy time_series with transformed time series" begin - sys = create_system_data(time_series_in_memory=true) + sys = create_system_data(; time_series_in_memory = true) components = collect(IS.get_components(IS.InfrastructureSystemsComponent, sys)) @test length(components) == 1 component = components[1] @@ -1216,12 +1254,12 @@ end end @testset "Summarize time_series" begin - data = create_system_data(; with_time_series=true) + data = create_system_data(; with_time_series = true) summary(devnull, data.time_series_params) end @testset "Test time_series forwarding methods" begin - data = create_system_data(; with_time_series=true) + data = create_system_data(; with_time_series = true) time_series = get_all_time_series(data)[1] # Iteration @@ -1242,7 +1280,7 @@ end end @testset "Test time_series head" begin - data = create_system_data(; with_time_series=true) + data = create_system_data(; with_time_series = true) time_series = get_all_time_series(data)[1] fcast = IS.head(time_series) # head returns a length of 6 by default, but don't hard-code that. @@ -1253,7 +1291,7 @@ end end @testset "Test time_series tail" begin - data = create_system_data(; with_time_series=true) + data = create_system_data(; with_time_series = true) time_series = get_all_time_series(data)[1] fcast = IS.tail(time_series) # tail returns a length of 6 by default, but don't hard-code that. @@ -1264,7 +1302,7 @@ end end @testset "Test time_series from" begin - data = create_system_data(; with_time_series=true) + data = create_system_data(; with_time_series = true) time_series = get_all_time_series(data)[1] start_time = Dates.DateTime(Dates.today()) + Dates.Hour(3) fcast = IS.from(time_series, start_time) @@ -1273,7 +1311,7 @@ end end @testset "Test time_series from" begin - data = create_system_data(; with_time_series=true) + data = create_system_data(; with_time_series = true) time_series = get_all_time_series(data)[1] for end_time in ( Dates.DateTime(Dates.today()) + Dates.Hour(15), @@ -1287,7 +1325,7 @@ end @testset "Test Scenarios time_series" begin for in_memory in (true, false) - sys = IS.SystemData(time_series_in_memory=in_memory) + sys = IS.SystemData(; time_series_in_memory = in_memory) sys = IS.SystemData() name = "Component1" name = "val" @@ -1300,11 +1338,11 @@ end scenario_count = 2 data_input = rand(horizon, scenario_count) data = SortedDict(initial_timestamp => data_input) - time_series = IS.Scenarios( - name=name, - resolution=resolution, - scenario_count=scenario_count, - data=data, + time_series = IS.Scenarios(; + name = name, + resolution = resolution, + scenario_count = scenario_count, + data = data, ) fdata = IS.get_data(time_series) @test size(first(values(fdata)))[2] == 2 @@ -1323,7 +1361,7 @@ end @testset "Test Probabilistic time_series" begin for in_memory in (true, false) - sys = IS.SystemData(time_series_in_memory=in_memory) + sys = IS.SystemData(; time_series_in_memory = in_memory) name = "Component1" name = "val" component = IS.TestComponent(name, 5) @@ -1335,11 +1373,11 @@ end percentiles = 1:99 data_input = rand(horizon, length(percentiles)) data = SortedDict(initial_timestamp => data_input) - time_series = IS.Probabilistic( - name=name, - resolution=resolution, - percentiles=percentiles, - data=data, + time_series = IS.Probabilistic(; + name = name, + resolution = resolution, + percentiles = percentiles, + data = data, ) fdata = IS.get_data(time_series) @test size(first(values(fdata)))[2] == length(percentiles) @@ -1372,7 +1410,7 @@ end IS.add_component!(sys, component) dates = create_dates("2020-01-01T00:00:00", Dates.Hour(1), "2020-01-01T23:00:00") ta = TimeSeries.TimeArray(dates, collect(1:24), [IS.get_name(component)]) - time_series = IS.SingleTimeSeries(name="val", data=ta) + time_series = IS.SingleTimeSeries(; name = "val", data = ta) @test_throws ArgumentError IS.add_time_series!(sys, component, time_series) end @@ -1391,12 +1429,15 @@ end horizon = 24 data = SortedDict(initial_time => ones(horizon), second_time => ones(horizon)) - forecast = IS.Deterministic(data=data, name=name, resolution=resolution) + forecast = IS.Deterministic(; data = data, name = name, resolution = resolution) IS.add_time_series!(sys, component, forecast) sts_data = - TimeSeries.TimeArray(range(initial_time; length=365, step=resolution), ones(365)) - sts = IS.SingleTimeSeries(data=sts_data, name="test_sts") + TimeSeries.TimeArray( + range(initial_time; length = 365, step = resolution), + ones(365), + ) + sts = IS.SingleTimeSeries(; data = sts_data, name = "test_sts") IS.add_time_series!(sys, component, sts) @test IS.get_time_series_resolution(sys) == resolution @@ -1414,7 +1455,7 @@ end @testset "Test get_time_series options" begin for in_memory in (true, false) - sys = IS.SystemData(time_series_in_memory=in_memory) + sys = IS.SystemData(; time_series_in_memory = in_memory) name = "Component1" component = IS.TestComponent(name, 5) IS.add_component!(sys, component) @@ -1423,7 +1464,7 @@ end resolution = Dates.Minute(5) interval = Dates.Hour(1) initial_timestamp = Dates.DateTime("2020-09-01") - initial_times = collect(range(initial_timestamp, length=24, step=interval)) + initial_times = collect(range(initial_timestamp; length = 24, step = interval)) name = "test" horizon = 24 data = SortedDict(it => ones(horizon) * i for (i, it) in enumerate(initial_times)) @@ -1446,8 +1487,8 @@ end IS.Deterministic, component, name; - start_time=it, - count=count, + start_time = it, + count = count, ) @test IS.get_initial_timestamp(f2) == it @test IS.get_count(f2) == count @@ -1461,9 +1502,9 @@ end IS.Deterministic, component, name; - start_time=it, - count=count, - len=horizon, + start_time = it, + count = count, + len = horizon, ) @test IS.get_initial_timestamp(f2) == it @test IS.get_count(f2) == count @@ -1477,14 +1518,14 @@ end IS.Deterministic, component, name; - start_time=it + Dates.Minute(1), + start_time = it + Dates.Minute(1), ) end end @testset "Test get_time_series options for Polynomial Cost" begin for in_memory in (true, false) - sys = IS.SystemData(time_series_in_memory=in_memory) + sys = IS.SystemData(; time_series_in_memory = in_memory) name = "Component1" component = IS.TestComponent(name, 5) IS.add_component!(sys, component) @@ -1493,14 +1534,15 @@ end resolution = Dates.Minute(5) interval = Dates.Hour(1) initial_timestamp = Dates.DateTime("2020-09-01") - initial_times = collect(range(initial_timestamp, length=24, step=interval)) + initial_times = collect(range(initial_timestamp; length = 24, step = interval)) name = "test" horizon = 24 data_polynomial = SortedDict{Dates.DateTime, Vector{IS.POLYNOMIAL}}( it => repeat([(999.0, 1.0 * i)], 24) for (i, it) in enumerate(initial_times) ) - forecast = IS.Deterministic(data=data_polynomial, name=name, resolution=resolution) + forecast = + IS.Deterministic(; data = data_polynomial, name = name, resolution = resolution) IS.add_time_series!(sys, component, forecast) @test IS.get_forecast_window_count(sys) == length(data_polynomial) @@ -1518,8 +1560,8 @@ end IS.Deterministic, component, name; - start_time=it, - count=count, + start_time = it, + count = count, ) @test IS.get_initial_timestamp(f2) == it @test IS.get_count(f2) == count @@ -1536,8 +1578,8 @@ end IS.Deterministic, component, name; - start_time=it, - count=count, + start_time = it, + count = count, ) @test IS.get_initial_timestamp(f2) == it @test IS.get_count(f2) == count @@ -1552,9 +1594,9 @@ end IS.Deterministic, component, name; - start_time=it, - count=count, - len=horizon, + start_time = it, + count = count, + len = horizon, ) @test IS.get_initial_timestamp(f2) == it @test IS.get_count(f2) == count @@ -1568,7 +1610,7 @@ end IS.Deterministic, component, name; - start_time=it + Dates.Minute(1), + start_time = it + Dates.Minute(1), ) end end @@ -1576,7 +1618,7 @@ end @testset "Test get_time_series options for PWL Cost" begin #for in_memory in (true, false) for in_memory in [false] - sys = IS.SystemData(time_series_in_memory=in_memory) + sys = IS.SystemData(; time_series_in_memory = in_memory) name = "Component1" component = IS.TestComponent(name, 5) IS.add_component!(sys, component) @@ -1585,7 +1627,7 @@ end resolution = Dates.Minute(5) interval = Dates.Hour(1) initial_timestamp = Dates.DateTime("2020-09-01") - initial_times = collect(range(initial_timestamp, length=24, step=interval)) + initial_times = collect(range(initial_timestamp; length = 24, step = interval)) name = "test" horizon = 24 data_pwl = SortedDict{Dates.DateTime, Vector{IS.PWL}}( @@ -1593,7 +1635,7 @@ end (i, it) in enumerate(initial_times) ) - forecast = IS.Deterministic(data=data_pwl, name=name, resolution=resolution) + forecast = IS.Deterministic(; data = data_pwl, name = name, resolution = resolution) IS.add_time_series!(sys, component, forecast) @test IS.get_forecast_window_count(sys) == length(data_pwl) @@ -1611,8 +1653,8 @@ end IS.Deterministic, component, name; - start_time=it, - count=count, + start_time = it, + count = count, ) @test IS.get_initial_timestamp(f2) == it @test IS.get_count(f2) == count @@ -1628,8 +1670,8 @@ end IS.Deterministic, component, name; - start_time=it, - count=count, + start_time = it, + count = count, ) @test IS.get_initial_timestamp(f2) == it @test IS.get_count(f2) == count @@ -1643,9 +1685,9 @@ end IS.Deterministic, component, name; - start_time=it, - count=count, - len=horizon, + start_time = it, + count = count, + len = horizon, ) @test IS.get_initial_timestamp(f2) == it @test IS.get_count(f2) == count @@ -1659,7 +1701,7 @@ end IS.Deterministic, component, name; - start_time=it + Dates.Minute(1), + start_time = it + Dates.Minute(1), ) end end @@ -1674,7 +1716,7 @@ end data = collect(1:24) ta = TimeSeries.TimeArray(dates, data, [IS.get_name(component)]) name = "val" - ts = IS.SingleTimeSeries(name, ta; scaling_factor_multiplier=IS.get_val) + ts = IS.SingleTimeSeries(name, ta; scaling_factor_multiplier = IS.get_val) IS.add_time_series!(sys, component, ts) # Get data from storage, defaults. @@ -1692,24 +1734,24 @@ end IS.SingleTimeSeries, component, name; - start_time=dates[5], - len=5, + start_time = dates[5], + len = 5, ) @test TimeSeries.timestamp(ta2) == dates[5:9] @test TimeSeries.timestamp(ta2) == IS.get_time_series_timestamps( IS.SingleTimeSeries, component, name; - start_time=dates[5], - len=5, + start_time = dates[5], + len = 5, ) @test TimeSeries.values(ta2) == data[5:9] * IS.get_val(component) @test TimeSeries.values(ta2) == IS.get_time_series_values( IS.SingleTimeSeries, component, name; - start_time=dates[5], - len=5, + start_time = dates[5], + len = 5, ) # Get data from storage, ignore_scaling_factors. @@ -1717,8 +1759,8 @@ end IS.SingleTimeSeries, component, name; - start_time=dates[5], - ignore_scaling_factors=true, + start_time = dates[5], + ignore_scaling_factors = true, ) @test TimeSeries.timestamp(ta2) == dates[5:end] @test TimeSeries.values(ta2) == data[5:end] @@ -1731,30 +1773,30 @@ end @test TimeSeries.values(ta2) == IS.get_time_series_values(component, ts) # Get data from cached instance, custom offsets - ta2 = IS.get_time_series_array(component, ts, dates[5], len=5) + ta2 = IS.get_time_series_array(component, ts, dates[5]; len = 5) @test TimeSeries.timestamp(ta2) == dates[5:9] @test TimeSeries.timestamp(ta2) == - IS.get_time_series_timestamps(component, ts, dates[5], len=5) + IS.get_time_series_timestamps(component, ts, dates[5]; len = 5) @test TimeSeries.values(ta2) == data[5:9] * IS.get_val(component) @test TimeSeries.values(ta2) == - IS.get_time_series_values(component, ts, dates[5], len=5) + IS.get_time_series_values(component, ts, dates[5]; len = 5) # Get data from cached instance, custom offsets, ignore_scaling_factors. ta2 = IS.get_time_series_array( component, ts, - dates[5], - len=5, - ignore_scaling_factors=true, + dates[5]; + len = 5, + ignore_scaling_factors = true, ) @test TimeSeries.timestamp(ta2) == dates[5:9] @test TimeSeries.values(ta2) == data[5:9] @test TimeSeries.values(ta2) == IS.get_time_series_values( component, ts, - dates[5], - len=5, - ignore_scaling_factors=true, + dates[5]; + len = 5, + ignore_scaling_factors = true, ) IS.clear_time_series!(sys) @@ -1779,26 +1821,27 @@ end resolution = Dates.Minute(5) interval = Dates.Hour(1) initial_timestamp = Dates.DateTime("2020-09-01") - initial_times = collect(range(initial_timestamp, length=2, step=interval)) + initial_times = collect(range(initial_timestamp; length = 2, step = interval)) name = "test" horizon = 24 data = SortedDict(it => ones(horizon) * i for (i, it) in enumerate(initial_times)) forecast = - IS.Deterministic(name, data, resolution; scaling_factor_multiplier=IS.get_val) + IS.Deterministic(name, data, resolution; scaling_factor_multiplier = IS.get_val) IS.add_time_series!(sys, component, forecast) start_time = initial_timestamp + interval # Verify all permutations with defaults. - ta2 = IS.get_time_series_array(IS.Deterministic, component, name; start_time=start_time) + ta2 = + IS.get_time_series_array(IS.Deterministic, component, name; start_time = start_time) @test ta2 isa TimeSeries.TimeArray @test TimeSeries.timestamp(ta2) == - collect(range(start_time, length=horizon, step=resolution)) + collect(range(start_time; length = horizon, step = resolution)) @test TimeSeries.timestamp(ta2) == IS.get_time_series_timestamps( IS.Deterministic, component, name; - start_time=start_time, + start_time = start_time, ) @test TimeSeries.timestamp(ta2) == IS.get_time_series_timestamps(component, forecast, start_time) @@ -1807,7 +1850,7 @@ end IS.Deterministic, component, name; - start_time=start_time, + start_time = start_time, ) @test TimeSeries.values(ta2) == IS.get_time_series_values(component, forecast, start_time) @@ -1820,22 +1863,22 @@ end IS.Deterministic, component, name; - start_time=start_time, - ignore_scaling_factors=true, + start_time = start_time, + ignore_scaling_factors = true, ), ) == data[start_time] IS.get_time_series_values( IS.Deterministic, component, name; - start_time=start_time, - ignore_scaling_factors=true, + start_time = start_time, + ignore_scaling_factors = true, ) == data[start_time] IS.get_time_series_values( component, forecast, - start_time, - ignore_scaling_factors=true, + start_time; + ignore_scaling_factors = true, ) == data[start_time] # Custom length @@ -1844,22 +1887,22 @@ end IS.Deterministic, component, name; - start_time=start_time, - len=10, + start_time = start_time, + len = 10, ) @test TimeSeries.timestamp(ta2)[1:10] == - IS.get_time_series_timestamps(component, forecast, start_time; len=10) + IS.get_time_series_timestamps(component, forecast, start_time; len = 10) @test TimeSeries.values(ta2)[1:10] == IS.get_time_series_values( IS.Deterministic, component, name; - start_time=start_time, - len=len, + start_time = start_time, + len = len, ) @test TimeSeries.values(ta2)[1:10] == - IS.get_time_series_values(component, forecast, start_time; len=10) + IS.get_time_series_values(component, forecast, start_time; len = 10) @test TimeSeries.values(ta2)[1:10] == TimeSeries.values( - IS.get_time_series_array(component, forecast, start_time, len=10), + IS.get_time_series_array(component, forecast, start_time; len = 10), ) end @@ -1881,13 +1924,13 @@ end @test IS.has_time_series(component) @test IS.get_initial_timestamp(forecast) == initial_time forecast_retrieved = - IS.get_time_series(IS.Probabilistic, component, "test"; start_time=initial_time) + IS.get_time_series(IS.Probabilistic, component, "test"; start_time = initial_time) @test IS.get_initial_timestamp(forecast_retrieved) == initial_time t = IS.get_time_series_array( IS.Probabilistic, component, "test"; - start_time=initial_time, + start_time = initial_time, ) @test size(t) == (24, 99) @test TimeSeries.values(t) == data1 @@ -1896,12 +1939,13 @@ end IS.Probabilistic, component, "test"; - start_time=initial_time, - len=12, + start_time = initial_time, + len = 12, ) @test size(t) == (12, 99) @test TimeSeries.values(t) == data1[1:12, :] - t_other = IS.get_time_series(IS.Probabilistic, component, "test"; start_time=other_time) + t_other = + IS.get_time_series(IS.Probabilistic, component, "test"; start_time = other_time) @test collect(keys(IS.get_data(t_other)))[1] == other_time end @@ -1923,9 +1967,9 @@ end @test IS.has_time_series(component) @test IS.get_initial_timestamp(forecast) == initial_time forecast_retrieved = - IS.get_time_series(IS.Scenarios, component, "test"; start_time=initial_time) + IS.get_time_series(IS.Scenarios, component, "test"; start_time = initial_time) @test IS.get_initial_timestamp(forecast_retrieved) == initial_time - t = IS.get_time_series_array(IS.Scenarios, component, "test"; start_time=initial_time) + t = IS.get_time_series_array(IS.Scenarios, component, "test"; start_time = initial_time) @test size(t) == (24, 99) @test TimeSeries.values(t) == data1 @@ -1933,12 +1977,12 @@ end IS.Scenarios, component, "test"; - start_time=initial_time, - len=12, + start_time = initial_time, + len = 12, ) @test size(t) == (12, 99) @test TimeSeries.values(t) == data1[1:12, :] - t_other = IS.get_time_series(IS.Scenarios, component, "test"; start_time=other_time) + t_other = IS.get_time_series(IS.Scenarios, component, "test"; start_time = other_time) @test collect(keys(IS.get_data(t_other)))[1] == other_time end @@ -1956,17 +2000,17 @@ end # Horizon must be greater than 1. bad_data = SortedDict(initial_time => ones(1), second_time => ones(1)) - forecast = IS.Deterministic(data=bad_data, name=name, resolution=resolution) + forecast = IS.Deterministic(; data = bad_data, name = name, resolution = resolution) @test_throws ArgumentError IS.add_time_series!(sys, component, forecast) # Arrays must have the same length. bad_data = SortedDict(initial_time => ones(2), second_time => ones(3)) - forecast = IS.Deterministic(data=bad_data, name=name, resolution=resolution) + forecast = IS.Deterministic(; data = bad_data, name = name, resolution = resolution) @test_throws DimensionMismatch IS.add_time_series!(sys, component, forecast) # Set baseline parameters for the rest of the tests. data = SortedDict(initial_time => ones(horizon), second_time => ones(horizon)) - forecast = IS.Deterministic(data=data, name=name, resolution=resolution) + forecast = IS.Deterministic(; data = data, name = name, resolution = resolution) IS.add_time_series!(sys, component, forecast) # Conflicting initial time @@ -1974,7 +2018,7 @@ end name = "test2" data = SortedDict(initial_time2 => ones(horizon), second_time => ones(horizon)) - forecast = IS.Deterministic(data=data, name=name, resolution=resolution) + forecast = IS.Deterministic(; data = data, name = name, resolution = resolution) @test_throws IS.ConflictingInputsError IS.add_time_series!(sys, component, forecast) # Conflicting resolution @@ -1982,7 +2026,7 @@ end name = "test2" data = SortedDict(initial_time => ones(horizon), second_time => ones(horizon)) - forecast = IS.Deterministic(data=data, name=name, resolution=resolution2) + forecast = IS.Deterministic(; data = data, name = name, resolution = resolution2) @test_throws IS.ConflictingInputsError IS.add_time_series!(sys, component, forecast) # Conflicting horizon @@ -1990,7 +2034,7 @@ end horizon2 = 23 data = SortedDict(initial_time => ones(horizon2), second_time => ones(horizon2)) - forecast = IS.Deterministic(data=data, name=name, resolution=resolution) + forecast = IS.Deterministic(; data = data, name = name, resolution = resolution) @test_throws IS.ConflictingInputsError IS.add_time_series!(sys, component, forecast) # Conflicting count @@ -2002,7 +2046,7 @@ end third_time => ones(horizon), ) - forecast = IS.Deterministic(data=data, name=name, resolution=resolution) + forecast = IS.Deterministic(; data = data, name = name, resolution = resolution) @test_throws IS.ConflictingInputsError IS.add_time_series!(sys, component, forecast) end @@ -2020,14 +2064,15 @@ end horizon = 24 data = SortedDict(initial_time => ones(horizon), other_time => ones(horizon)) - forecast = IS.Deterministic(data=data, name=name, resolution=resolution) + forecast = IS.Deterministic(; data = data, name = name, resolution = resolution) IS.add_time_series!(sys, component, forecast) key = IS.TimeSeriesKey(forecast) @test key == IS.TimeSeriesKey(IS.DeterministicMetadata, name) - @test key == IS.TimeSeriesKey(; time_series_type=IS.DeterministicMetadata, name=name) + @test key == + IS.TimeSeriesKey(; time_series_type = IS.DeterministicMetadata, name = name) - var1 = IS.get_time_series(IS.Deterministic, component, name; start_time=initial_time) - var_key1 = IS.get_time_series_by_key(key, component; start_time=initial_time) + var1 = IS.get_time_series(IS.Deterministic, component, name; start_time = initial_time) + var_key1 = IS.get_time_series_by_key(key, component; start_time = initial_time) @test length(var1) == length(var_key1) @test IS.get_horizon(var1) == horizon @test IS.get_horizon(var1) == IS.get_horizon(var_key1) @@ -2038,10 +2083,11 @@ end IS.Deterministic, component, name; - start_time=initial_time, - count=2, + start_time = initial_time, + count = 2, ) - var_key2 = IS.get_time_series_by_key(key, component; start_time=initial_time, count=2) + var_key2 = + IS.get_time_series_by_key(key, component; start_time = initial_time, count = 2) @test length(var2) == 2 @test length(var2) == length(var_key2) @@ -2049,13 +2095,13 @@ end @test_throws ArgumentError IS.get_time_series_by_key( key, component; - start_time=initial_time, - count=3, + start_time = initial_time, + count = 3, ) end @testset "Test copy_to_new_file! on HDF5" begin - sys = IS.SystemData(time_series_in_memory=false) + sys = IS.SystemData(; time_series_in_memory = false) name = "Component1" name = "val" component = IS.TestComponent(name, 5) @@ -2066,7 +2112,7 @@ end resolution = Dates.Hour(1) data_input = rand(horizon) data = SortedDict(initial_timestamp => data_input) - time_series = IS.Deterministic(name=name, resolution=resolution, data=data) + time_series = IS.Deterministic(; name = name, resolution = resolution, data = data) fdata = IS.get_data(time_series) @test initial_timestamp == first(keys((fdata))) @test data_input == first(values((fdata))) @@ -2096,8 +2142,8 @@ end end for compression_enabled in (true, false) - compression = IS.CompressionSettings(enabled=compression_enabled) - sys = IS.SystemData(time_series_in_memory=false, compression=compression) + compression = IS.CompressionSettings(; enabled = compression_enabled) + sys = IS.SystemData(; time_series_in_memory = false, compression = compression) @test sys.time_series_storage.compression.enabled == compression_enabled name = "Component1" name = "val" @@ -2110,7 +2156,8 @@ end data_input = rand(horizon) data = SortedDict(initial_timestamp => data_input) for i in 1:2 - time_series = IS.Deterministic(name="name_$i", resolution=resolution, data=data) + time_series = + IS.Deterministic(; name = "name_$i", resolution = resolution, data = data) IS.add_time_series!(sys, component, time_series) end old_file = IS.get_file_path(sys.time_series_storage) @@ -2146,7 +2193,7 @@ end @testset "Test assign_new_uuid! for component with time series" begin for in_memory in (true, false) - sys = IS.SystemData(time_series_in_memory=in_memory) + sys = IS.SystemData(; time_series_in_memory = in_memory) name = "Component1" component = IS.TestComponent(name, 5) IS.add_component!(sys, component) @@ -2156,8 +2203,11 @@ end name = "test" data = - TimeSeries.TimeArray(range(initial_time; length=24, step=resolution), ones(24)) - data = IS.SingleTimeSeries(data=data, name=name) + TimeSeries.TimeArray( + range(initial_time; length = 24, step = resolution), + ones(24), + ) + data = IS.SingleTimeSeries(; data = data, name = name) IS.add_time_series!(sys, component, data) @test IS.get_time_series(IS.SingleTimeSeries, component, name) isa IS.SingleTimeSeries diff --git a/test/test_time_series_cache.jl b/test/test_time_series_cache.jl index bdcecba37..a70010c39 100644 --- a/test/test_time_series_cache.jl +++ b/test/test_time_series_cache.jl @@ -35,7 +35,7 @@ @test IS.get_next_time(cache) === nothing # Iterate over all initial times with custom cache size. - cache = IS.ForecastCache(IS.Deterministic, component, "test"; cache_size_bytes=1024) + cache = IS.ForecastCache(IS.Deterministic, component, "test"; cache_size_bytes = 1024) @test length(cache) == cache.common.num_iterations == 168 for (i, ta) in enumerate(cache) it = initial_times[i] @@ -57,7 +57,7 @@ IS.Deterministic, component, "test"; - start_time=Dates.DateTime("2020-01-02T00:00:00"), + start_time = Dates.DateTime("2020-01-02T00:00:00"), ) for (i, ta) in enumerate(cache) it = initial_times[i + 24] @@ -67,7 +67,7 @@ end # Test caching internals. - cache = IS.ForecastCache(IS.Deterministic, component, "test"; cache_size_bytes=1024) + cache = IS.ForecastCache(IS.Deterministic, component, "test"; cache_size_bytes = 1024) @test cache.in_memory_count == 5 @test IS.get_next_time(cache) == initial_timestamp for it in initial_times[1:(cache.in_memory_count)] @@ -99,10 +99,10 @@ end len = 96 data = TimeSeries.TimeArray( - range(initial_timestamp; length=len, step=resolution), + range(initial_timestamp; length = len, step = resolution), rand(len), ) - ts = IS.SingleTimeSeries(data=data, name="test") + ts = IS.SingleTimeSeries(; data = data, name = "test") IS.add_time_series!(sys, component, ts) cache = IS.StaticTimeSeriesCache(IS.SingleTimeSeries, component, "test") @@ -115,16 +115,16 @@ end for (i, ta) in enumerate(cache) it = initial_timestamp + (i - 1) * resolution @test TimeSeries.timestamp(ta) == - IS.get_time_series_timestamps(component, ts, it, len=1) - @test TimeSeries.values(ta) == IS.get_time_series_values(component, ts, it, len=1) + IS.get_time_series_timestamps(component, ts, it; len = 1) + @test TimeSeries.values(ta) == IS.get_time_series_values(component, ts, it; len = 1) end ta = IS.get_next_time_series_array!(cache) @test first(TimeSeries.timestamp(ta)) == initial_timestamp @test TimeSeries.timestamp(ta) == - IS.get_time_series_timestamps(component, ts, initial_timestamp, len=1) + IS.get_time_series_timestamps(component, ts, initial_timestamp; len = 1) @test TimeSeries.values(ta) == - IS.get_time_series_values(component, ts, initial_timestamp, len=1) + IS.get_time_series_values(component, ts, initial_timestamp; len = 1) # Iterate over all initial times with custom cache size. cache_size_bytes = 96 @@ -132,7 +132,7 @@ end IS.SingleTimeSeries, component, "test"; - cache_size_bytes=cache_size_bytes, + cache_size_bytes = cache_size_bytes, ) @test cache.in_memory_rows == cache_size_bytes / 8 @test length(cache) == cache.common.num_iterations == len @@ -145,8 +145,8 @@ end for (i, ta) in enumerate(cache) it = initial_timestamp + (i - 1) * resolution @test TimeSeries.timestamp(ta) == - IS.get_time_series_timestamps(component, ts, it; len=1) - @test TimeSeries.values(ta) == IS.get_time_series_values(component, ts, it; len=1) + IS.get_time_series_timestamps(component, ts, it; len = 1) + @test TimeSeries.values(ta) == IS.get_time_series_values(component, ts, it; len = 1) end IS.reset!(cache) @@ -154,8 +154,8 @@ end ta = IS.get_next_time_series_array!(cache) it = initial_timestamp + (i - 1) * resolution @test TimeSeries.timestamp(ta) == - IS.get_time_series_timestamps(component, ts, it; len=1) - @test TimeSeries.values(ta) == IS.get_time_series_values(component, ts, it; len=1) + IS.get_time_series_timestamps(component, ts, it; len = 1) + @test TimeSeries.values(ta) == IS.get_time_series_values(component, ts, it; len = 1) end cache_size_bytes = 96 @@ -164,8 +164,8 @@ end IS.SingleTimeSeries, component, "test"; - start_time=start_time, - cache_size_bytes=cache_size_bytes, + start_time = start_time, + cache_size_bytes = cache_size_bytes, ) @test cache.in_memory_rows == cache_size_bytes / 8 @test cache.common.num_iterations == @@ -175,8 +175,8 @@ end ta = IS.get_next_time_series_array!(cache) it = start_time + (i - 1) * resolution @test TimeSeries.timestamp(ta) == - IS.get_time_series_timestamps(component, ts, it; len=1) - @test TimeSeries.values(ta) == IS.get_time_series_values(component, ts, it; len=1) + IS.get_time_series_timestamps(component, ts, it; len = 1) + @test TimeSeries.values(ta) == IS.get_time_series_values(component, ts, it; len = 1) end end @@ -204,7 +204,7 @@ end forecast = IS.get_time_series(IS.AbstractDeterministic, component, name) initial_times = collect(IS.get_initial_times(forecast)) cache = - IS.ForecastCache(IS.AbstractDeterministic, component, name; cache_size_bytes=1024) + IS.ForecastCache(IS.AbstractDeterministic, component, name; cache_size_bytes = 1024) for (i, ta) in enumerate(cache) @test TimeSeries.timestamp(ta) == @@ -221,7 +221,7 @@ end name = "test" horizon = 24 data = SortedDict{Dates.DateTime, Matrix{Float64}}() - for (i, it) in enumerate(range(initial_time, step=interval, length=100)) + for (i, it) in enumerate(range(initial_time; step = interval, length = 100)) data[it] = ones(horizon, 99) * i end sys = IS.SystemData() @@ -233,7 +233,7 @@ end # Iterate over all initial times with custom cache size. sz = 1024 * 1024 - cache = IS.ForecastCache(IS.Probabilistic, component, "test"; cache_size_bytes=sz) + cache = IS.ForecastCache(IS.Probabilistic, component, "test"; cache_size_bytes = sz) initial_times = collect(keys(data)) @test cache.in_memory_count == trunc(Int, sz / (99 * 8 * 24)) for (i, ta) in enumerate(cache) diff --git a/test/test_time_series_storage.jl b/test/test_time_series_storage.jl index 6664f84b8..75d1c42fb 100644 --- a/test/test_time_series_storage.jl +++ b/test/test_time_series_storage.jl @@ -21,7 +21,7 @@ function test_add_remove(storage::IS.TimeSeriesStorage) name = "component1" name = "val" component = IS.TestComponent(name, 5) - ts = IS.SingleTimeSeries(data=create_time_array(), name="test") + ts = IS.SingleTimeSeries(; data = create_time_array(), name = "test") IS.serialize_time_series!(storage, IS.get_uuid(component), name, ts) ts2 = _deserialize_full(storage, ts) @@ -48,7 +48,7 @@ function test_add_references(storage::IS.TimeSeriesStorage) name = "val" component1 = IS.TestComponent("component1", 5) component2 = IS.TestComponent("component2", 6) - ts = IS.SingleTimeSeries(data=create_time_array(), name="test") + ts = IS.SingleTimeSeries(; data = create_time_array(), name = "test") ts_uuid = IS.get_uuid(ts) IS.serialize_time_series!(storage, IS.get_uuid(component1), name, ts) IS.add_time_series_reference!(storage, IS.get_uuid(component2), name, ts_uuid) @@ -77,7 +77,7 @@ function test_get_subset(storage::IS.TimeSeriesStorage) name = "component1" name = "val" component = IS.TestComponent(name, 1) - ts = IS.SingleTimeSeries(data=create_time_array(), name="test") + ts = IS.SingleTimeSeries(; data = create_time_array(), name = "test") IS.serialize_time_series!(storage, IS.get_uuid(component), name, ts) ts2 = _deserialize_full(storage, ts) @@ -97,7 +97,7 @@ function test_get_subset(storage::IS.TimeSeriesStorage) horizon = 24 data = SortedDict(initial_time1 => ones(horizon), initial_time2 => ones(horizon)) - ts = IS.Deterministic(data=data, name=name, resolution=resolution) + ts = IS.Deterministic(; data = data, name = name, resolution = resolution) IS.serialize_time_series!(storage, IS.get_uuid(component), name, ts) ts_metadata = make_metadata(ts) rows = UnitRange(1, horizon) @@ -129,7 +129,7 @@ function test_clear(storage::IS.TimeSeriesStorage) name = "component1" name = "val" component = IS.TestComponent(name, 5) - ts = IS.SingleTimeSeries(data=create_time_array(), name="test") + ts = IS.SingleTimeSeries(; data = create_time_array(), name = "test") IS.serialize_time_series!(storage, IS.get_uuid(component), name, ts) ts2 = _deserialize_full(storage, ts) @@ -142,27 +142,27 @@ end @testset "Test time series storage implementations" begin for in_memory in (true, false) - test_add_remove(IS.make_time_series_storage(; in_memory=in_memory)) - test_get_subset(IS.make_time_series_storage(; in_memory=in_memory)) - test_clear(IS.make_time_series_storage(; in_memory=in_memory)) + test_add_remove(IS.make_time_series_storage(; in_memory = in_memory)) + test_get_subset(IS.make_time_series_storage(; in_memory = in_memory)) + test_clear(IS.make_time_series_storage(; in_memory = in_memory)) end - test_add_remove(IS.make_time_series_storage(; in_memory=false, directory=".")) - test_get_subset(IS.make_time_series_storage(; in_memory=false, directory=".")) - test_clear(IS.make_time_series_storage(; in_memory=false, directory=".")) + test_add_remove(IS.make_time_series_storage(; in_memory = false, directory = ".")) + test_get_subset(IS.make_time_series_storage(; in_memory = false, directory = ".")) + test_clear(IS.make_time_series_storage(; in_memory = false, directory = ".")) end @testset "Test copy time series references" begin for in_memory in (true, false) - test_add_remove(IS.make_time_series_storage(; in_memory=in_memory)) - test_add_references(IS.make_time_series_storage(; in_memory=in_memory)) - test_get_subset(IS.make_time_series_storage(; in_memory=in_memory)) - test_clear(IS.make_time_series_storage(; in_memory=in_memory)) + test_add_remove(IS.make_time_series_storage(; in_memory = in_memory)) + test_add_references(IS.make_time_series_storage(; in_memory = in_memory)) + test_get_subset(IS.make_time_series_storage(; in_memory = in_memory)) + test_clear(IS.make_time_series_storage(; in_memory = in_memory)) end end @testset "Test data format version" begin - storage = IS.make_time_series_storage(in_memory=false) + storage = IS.make_time_series_storage(; in_memory = false) @test IS.read_data_format_version(storage) == IS.TIME_SERIES_DATA_FORMAT_VERSION end @@ -171,18 +171,35 @@ end for type in (IS.CompressionTypes.BLOSC, IS.CompressionTypes.DEFLATE) for shuffle in (true, false) compression = - IS.CompressionSettings(enabled=true, type=type, level=5, shuffle=shuffle) + IS.CompressionSettings(; + enabled = true, + type = type, + level = 5, + shuffle = shuffle, + ) test_add_remove( - IS.make_time_series_storage(; in_memory=in_memory, compression=compression), + IS.make_time_series_storage(; + in_memory = in_memory, + compression = compression, + ), ) test_add_references( - IS.make_time_series_storage(; in_memory=in_memory, compression=compression), + IS.make_time_series_storage(; + in_memory = in_memory, + compression = compression, + ), ) test_get_subset( - IS.make_time_series_storage(; in_memory=in_memory, compression=compression), + IS.make_time_series_storage(; + in_memory = in_memory, + compression = compression, + ), ) test_clear( - IS.make_time_series_storage(; in_memory=in_memory, compression=compression), + IS.make_time_series_storage(; + in_memory = in_memory, + compression = compression, + ), ) end end @@ -190,12 +207,12 @@ end @testset "Test isempty" begin for in_memory in (true, false) - storage = IS.make_time_series_storage(in_memory=in_memory) + storage = IS.make_time_series_storage(; in_memory = in_memory) @test isempty(storage) name = "component1" name = "val" component = IS.TestComponent(name, 5) - ts = IS.SingleTimeSeries(data=create_time_array(), name="test") + ts = IS.SingleTimeSeries(; data = create_time_array(), name = "test") IS.serialize_time_series!(storage, IS.get_uuid(component), name, ts) @test !isempty(storage) end