diff --git a/src/decision/dMCDA.jl b/src/decision/dMCDA.jl index 1105ed721f..fc53d31dfe 100644 --- a/src/decision/dMCDA.jl +++ b/src/decision/dMCDA.jl @@ -73,16 +73,16 @@ function mcda_methods() end """ - DMCDA_vars(domain::Domain, criteria::NamedDimsArray, - site_ids::AbstractArray, leftover_space::AbstractArray, area_to_seed::Float64, - waves::AbstractArray, dhws::AbstractArray)::DMCDA_vars - DMCDA_vars(domain::Domain, criteria::NamedDimsArray, site_ids::AbstractArray, - leftover_space::AbstractArray, area_to_seed::Float64)::DMCDA_vars - DMCDA_vars(domain::Domain, criteria::DataFrameRow, site_ids::AbstractArray, - leftover_space::AbstractArray, area_to_seed::Float64)::DMCDA_vars - DMCDA_vars(domain::Domain, criteria::DataFrameRow, site_ids::AbstractArray, - leftover_space::AbstractArray, area_to_seed::Float64, - waves::AbstractArray, dhw::AbstractArray)::DMCDA_vars +DMCDA_vars(domain::Domain, criteria::NamedDimsArray, + site_ids::AbstractArray, leftover_space::AbstractArray, area_to_seed::Float64, + waves::AbstractArray, dhws::AbstractArray)::DMCDA_vars +DMCDA_vars(domain::Domain, criteria::NamedDimsArray, site_ids::AbstractArray, +leftover_space::AbstractArray, area_to_seed::Float64)::DMCDA_vars +DMCDA_vars(domain::Domain, criteria::DataFrameRow, site_ids::AbstractArray, +leftover_space::AbstractArray, area_to_seed::Float64)::DMCDA_vars +DMCDA_vars(domain::Domain, criteria::DataFrameRow, site_ids::AbstractArray, +leftover_space::AbstractArray, area_to_seed::Float64, + waves::AbstractArray, dhw::AbstractArray)::DMCDA_vars Constuctors for DMCDA variables. """ @@ -161,7 +161,7 @@ function DMCDA_vars( waves::AbstractArray, dhw::AbstractArray, )::DMCDA_vars - criteria_vec::NamedDimsArray = NamedDimsArray(collect(criteria); rows = names(criteria)) + criteria_vec::NamedDimsArray = NamedDimsArray(collect(criteria); rows=names(criteria)) return DMCDA_vars( domain, criteria_vec, site_ids, leftover_space, area_to_seed, waves, dhw ) @@ -173,12 +173,12 @@ function DMCDA_vars( leftover_space::AbstractArray, area_to_seed::Float64, )::DMCDA_vars - criteria_vec::NamedDimsArray = NamedDimsArray(collect(criteria); rows = names(criteria)) + criteria_vec::NamedDimsArray = NamedDimsArray(collect(criteria); rows=names(criteria)) return DMCDA_vars(domain, criteria_vec, site_ids, leftover_space, area_to_seed) end """ - mcda_normalize(x::Vector)::Vector +mcda_normalize(x::Vector)::Vector Normalize a Vector (wse/wsh) for MCDA. """ @@ -187,25 +187,25 @@ function mcda_normalize(x::Vector)::Vector end """ - mcda_normalize(x::Matrix)::Matrix +mcda_normalize(x::Matrix)::Matrix Normalize a Matrix (SE/SH) for MCDA. """ function mcda_normalize(x::Matrix)::Matrix - return x ./ sqrt.(sum(x .^ 2; dims = 1)) + return x ./ sqrt.(sum(x .^ 2; dims=1)) end """ - mcda_normalize(x::DataFrame)::DataFrame +mcda_normalize(x::DataFrame)::DataFrame Normalize weights for a set of scenarios (wse/wsh) for MCDA. """ function mcda_normalize(x::DataFrame)::DataFrame - return x ./ sum(Matrix(x); dims = 2) + return x ./ sum(Matrix(x); dims=2) end """ - align_rankings!(rankings::Array, s_order::Matrix, col::Int64)::Nothing +align_rankings!(rankings::Array, s_order::Matrix, col::Int64)::Nothing Align a vector of site rankings to match the indicated order in `s_order`. """ @@ -219,7 +219,7 @@ function align_rankings!(rankings::Array, s_order::Matrix, col::Int64)::Nothing end """ - rank_sites!(S, weights, rankings, n_site_int, mcda_func, rank_col) +rank_sites!(S, weights, rankings, n_site_int, mcda_func, rank_col) # Arguments - `S` : Matrix, Site preference values @@ -240,7 +240,7 @@ function rank_sites!( mcda_func::Union{Function, Type{<:MCDMMethod}}, rank_col)::Tuple{Vector{Int64}, Matrix{Union{Float64, Int64}}} # Filter out all non-preferred sites - selector = vec(.!all(S[:, 2:end] .== 0; dims = 1)) + selector = vec(.!all(S[:, 2:end] .== 0; dims=1)) # weights in order of: in_conn, out_conn, wave, heat, predecessors, low cover weights = weights[selector] @@ -258,9 +258,9 @@ function rank_sites!( end """ - retrieve_ranks(S::Matrix, site_ids::Vector, weights::Vector{Float64}, mcda_func::Function)::Matrix{Union{Float64,Int64}} - retrieve_ranks(S::Matrix, site_ids::Vector, weights::Vector{Float64}, mcda_func::Type{<:MCDMMethod})::Matrix{Union{Float64,Int64}} - retrieve_ranks(site_ids::Vector, scores::Vector, maximize::Bool)::Matrix{Union{Float64,Int64}} +retrieve_ranks(S::Matrix, site_ids::Vector, weights::Vector{Float64}, mcda_func::Function)::Matrix{Union{Float64,Int64}} +retrieve_ranks(S::Matrix, site_ids::Vector, weights::Vector{Float64}, mcda_func::Type{<:MCDMMethod})::Matrix{Union{Float64,Int64}} +retrieve_ranks(site_ids::Vector, scores::Vector, maximize::Bool)::Matrix{Union{Float64,Int64}} Get location ranks using mcda technique specified in mcda_func, weights and a decision matrix S. @@ -302,12 +302,12 @@ function retrieve_ranks( scores::Vector, maximize::Bool, )::Matrix{Union{Float64, Int64}} - s_order::Vector{Int64} = sortperm(scores; rev = maximize) + s_order::Vector{Int64} = sortperm(scores; rev=maximize) return Union{Float64, Int64}[Int64.(site_ids[s_order]) scores[s_order]] end """ - create_decision_matrix(site_ids, in_conn, out_conn, leftover_space, wave_stress, heat_stress, predec, risk_tol) +create_decision_matrix(site_ids, in_conn, out_conn, leftover_space, wave_stress, heat_stress, predec, risk_tol) Creates criteria matrix `A`, where each column is a selection criterium and each row is a site. Sites are then filtered based on heat and wave stress risk. @@ -382,7 +382,7 @@ function create_decision_matrix( rule = (A[:, 4] .<= risk_tol) .& (A[:, 5] .> risk_tol) A[rule, 5] .= NaN - filtered = vec(.!any(isnan.(A); dims = 2)) + filtered = vec(.!any(isnan.(A); dims=2)) # Remove rows with NaNs A = A[filtered, :] @@ -391,7 +391,7 @@ function create_decision_matrix( end """ - create_seed_matrix(A, in_conn_seed, out_conn_seed, waves, heat, predec, low_cover) +create_seed_matrix(A, in_conn_seed, out_conn_seed, waves, heat, predec, low_cover) Create seeding specific decision matrix from criteria matrix. The weight criteria and filter. @@ -409,25 +409,25 @@ Create seeding specific decision matrix from criteria matrix. The weight criteri # Returns Tuple (SE, wse) - `SE` : Matrix of shape [n sites considered, 7] - 1. Site index ID - 2. Incoming centrality - 3. Outgoing centrality - 4. Wave risk (higher values = less risk) - 5. Damage risk (higher values = less risk) - 6. Priority predecessors - 7. GBRMPA zoning criteria - 8. Available space - 9. Location depth +1. Site index ID +2. Incoming centrality +3. Outgoing centrality +4. Wave risk (higher values = less risk) +5. Damage risk (higher values = less risk) +6. Priority predecessors +7. GBRMPA zoning criteria +8. Available space +9. Location depth - `wse` : 8-element vector of criteria weights - 1. incoming connectivity - 2. outgoing connectivity - 3. wave - 4. heat - 5. seed predecessors (weights importance of sites highly connected to priority sites for seeding) - 6. seed zones (weights importance of sites highly connected to or within priority zones for seeding) - 7. low cover (weights importance of sites with low cover/high available real estate to plant corals) - 8. depth +1. incoming connectivity +2. outgoing connectivity +3. wave +4. heat +5. seed predecessors (weights importance of sites highly connected to priority sites for seeding) +6. seed zones (weights importance of sites highly connected to or within priority zones for seeding) +7. low cover (weights importance of sites with low cover/high available real estate to plant corals) +8. depth """ function create_seed_matrix( A::Matrix{Float64}, @@ -460,13 +460,13 @@ function create_seed_matrix( SE[SE[:, 8] .<= 0.0, 8] .= NaN # Filter out sites with no space # Filter out identified locations - SE = SE[vec(.!any(isnan.(SE); dims = 2)), :] + SE = SE[vec(.!any(isnan.(SE); dims=2)), :] return SE, wse end """ - create_fog_matrix(A, wt_conn_fog , wt_waves_fog, wt_heat_fog, wt_predec_fog, wt_hi_cover) +create_fog_matrix(A, wt_conn_fog , wt_waves_fog, wt_heat_fog, wt_predec_fog, wt_hi_cover) Create shading specific decision matrix and apply weightings. @@ -483,20 +483,20 @@ Create shading specific decision matrix and apply weightings. # Returns Tuple (SH, wsh) - `SH` : Matrix of shape [n sites considered, 7] - 1. Site index ID - 2. Incoming Centrality - 3. Outgoing Centrality - 4. Wave risk (higher values = less risk) - 5. Damage risk (higher values = less risk) - 6. Priority predecessors relating to coral real estate relative to max capacity - 7. Available space +1. Site index ID +2. Incoming Centrality +3. Outgoing Centrality +4. Wave risk (higher values = less risk) +5. Damage risk (higher values = less risk) +6. Priority predecessors relating to coral real estate relative to max capacity +7. Available space - `wsh` : 5-element vector of criteria weights - 1. fog connectivity - 2. wave - 3. heat - 4. fog predecessors (weights importance of sites highly connected to priority sites for fogging) - 4. fog zones (weights importance of sites highly connected to or within priority zones) - 5. high cover (weights importance of sites with high cover of coral to fog) +1. fog connectivity +2. wave +3. heat +4. fog predecessors (weights importance of sites highly connected to priority sites for fogging) +4. fog zones (weights importance of sites highly connected to or within priority zones) +5. high cover (weights importance of sites with high cover of coral to fog) """ function create_fog_matrix( A::Matrix{Float64}, @@ -536,7 +536,7 @@ function create_fog_matrix( end """ - guided_site_selection(d_vars::DMCDA_vars, alg_ind::Int64, log_seed::Bool, log_fog::Bool, pref_seed_locs::AbstractArray{Int64}, pref_fog_locs::AbstractArray{Int64}, rankings_in::Matrix{Int64}) +guided_site_selection(d_vars::DMCDA_vars, alg_ind::Int64, log_seed::Bool, log_fog::Bool, pref_seed_locs::AbstractArray{Int64}, pref_fog_locs::AbstractArray{Int64}, rankings_in::Matrix{Int64}) # Arguments - `d_vars` : DMCDA_vars type struct containing weightings and criteria values for site selection. @@ -552,10 +552,10 @@ end # Returns Tuple : - - `pref_seed_locs` : Vector, Indices of preferred seeding locations - - `pref_fog_locs` : Vector, Indices of preferred shading locations - - `rankings` : Matrix[n_sites ⋅ 3] where columns are site_id, seeding_rank, shading_rank - Values of 0 indicate sites that were not considered +- `pref_seed_locs` : Vector, Indices of preferred seeding locations +- `pref_fog_locs` : Vector, Indices of preferred shading locations +- `rankings` : Matrix[n_sites ⋅ 3] where columns are site_id, seeding_rank, shading_rank +Values of 0 indicate sites that were not considered """ function guided_site_selection( d_vars::DMCDA_vars, @@ -568,8 +568,10 @@ function guided_site_selection( in_conn::Vector{Float64}, out_conn::Vector{Float64}, strong_pred::Vector{Int64}; - methods_mcda = mcda_methods() -)::Tuple{Vector{T}, Vector{T}, Matrix{T}} where { + methods_mcda=mcda_methods() +)::Tuple{ + Vector{T}, Vector{T}, Matrix{T} +} where { T <: Int64, IA <: AbstractArray{<:Int64}, IB <: AbstractArray{<:Int64}, B <: Bool } site_ids = copy(d_vars.site_ids) @@ -585,7 +587,9 @@ function guided_site_selection( end n_iv_locs::Int64 = d_vars.n_site_int - priority_sites::Array{Int64} = d_vars.priority_sites[in.(d_vars.priority_sites, [site_ids])] + priority_sites::Array{Int64} = d_vars.priority_sites[in.( + d_vars.priority_sites, [site_ids] + )] priority_zones::Array{String} = d_vars.priority_zones zones = d_vars.zones[site_ids] @@ -612,7 +616,9 @@ function guided_site_selection( zone_preds_temp::Vector{Int64} = strong_pred[zones .== z_name] for s::Int64 in unique(zone_preds_temp) # for each predecessor site, add zone_weights * (no. of zone sites the site is a strongest predecessor for) - zone_preds[site_ids .== s] .= zone_preds[site_ids .== s] .+ (zone_weights[k] .* sum(zone_preds_temp .== s)) + zone_preds[site_ids .== s] .= + zone_preds[site_ids .== s] .+ + (zone_weights[k] .* sum(zone_preds_temp .== s)) end # add zone_weights for sites in the zone (whether a strongest predecessor of a zone or not) zone_sites[zones .== z_name] .= zone_weights[k] @@ -714,10 +720,10 @@ function guided_site_selection( end """ - constrain_reef_cluster(reefs::Union{Vector{String}, Vector{Float64}}, - s_order::Matrix{Union{Float64, Int64}}, rankings::Matrix{Int64}, - area_to_seed::Float64, available_space::Vector{Float64}, n_iv_locs::Int64, - max_members::Int64)::Tuple{Vector{Int64}, Matrix{Int64}} +constrain_reef_cluster(reefs::Union{Vector{String}, Vector{Float64}}, +s_order::Matrix{Union{Float64, Int64}}, rankings::Matrix{Int64}, +area_to_seed::Float64, available_space::Vector{Float64}, n_iv_locs::Int64, +max_members::Int64)::Tuple{Vector{Int64}, Matrix{Int64}} # Arguments - `reefs` : List of the the reefs each location sits within @@ -732,7 +738,7 @@ end Tuple : - `pref_locs` : Vector, Indices of preferred intervention locations - `rankings` : Matrix[n_sites ⋅ 3] where columns are site_id, seeding_rank, shading_rank - Values of 0 indicate sites that were not considered +Values of 0 indicate sites that were not considered """ function constrain_reef_cluster( reefs::Union{Vector{String}, Vector{Float64}}, @@ -753,7 +759,9 @@ function constrain_reef_cluster( local num_locs::Int64 for _ in 1:max_iters # If enough space for seeding corals, keep n_site_int, else expand as needed - num_locs = max(findfirst(>=(area_to_seed), cumsum(available_space[loc_ordered_ids])), n_iv_locs) + num_locs = max( + findfirst(>=(area_to_seed), cumsum(available_space[loc_ordered_ids])), n_iv_locs + ) pref_locs = loc_ordered_ids[1:num_locs] @@ -766,7 +774,7 @@ function constrain_reef_cluster( pref_reefs = reefs[pref_locs] # Reefs that selected locations sit within # Number of times a reef appears within each location - reef_occurances = vec(sum(pref_reefs .== unique_reefs; dims = 1)) + reef_occurances = vec(sum(pref_reefs .== unique_reefs; dims=1)) # If more than n_reefs locations in a reef, swap out the worst locations reefs_swap = unique_reefs[(reef_occurances .> max_members)] @@ -779,9 +787,11 @@ function constrain_reef_cluster( # Find locations in reefs which need replacement, and find the ids of lowest # ranked locations in this set - locs_to_replace = vcat([ - pref_locs[pref_reefs .== reef][replace_start:end] for reef in reefs_swap - ]...) + locs_to_replace = vcat( + [ + pref_locs[pref_reefs .== reef][replace_start:end] for reef in reefs_swap + ]..., + ) # Acceptable reefs to switch out for reef_switch_ids = unique_reefs[(reef_occurances .+ 1) .<= max_members] @@ -797,10 +807,13 @@ function constrain_reef_cluster( # Indices of the subset of locations which can be added which also sit within an # allowed reef - add_locs_ind = findall(dropdims(any( - reshape(reefs[alternate_loc_ids], 1, length(reefs[alternate_loc_ids])) - .== - reef_switch_ids; dims = 1); dims = 1)) + add_locs_ind = findall( + dropdims( + any( + reshape(reefs[alternate_loc_ids], 1, length(reefs[alternate_loc_ids])) + .== + reef_switch_ids; dims=1); dims=1), + ) # New preferred location set locs_to_add_inds = add_locs_ind[1:length(locs_to_replace)] @@ -825,9 +838,9 @@ function constrain_reef_cluster( end """ - unguided_site_selection(pref_seed_locs::T, pref_fog_locs::T, seed_years::Bool, - fog_years::Bool, n_site_int::Int64, available_space::Vector{Float64}, depth::T) - ::Tuple{Vector,Vector} where {T<:Vector{Int64}} +unguided_site_selection(pref_seed_locs::T, pref_fog_locs::T, seed_years::Bool, +fog_years::Bool, n_site_int::Int64, available_space::Vector{Float64}, depth::T) +::Tuple{Vector,Vector} where {T<:Vector{Int64}} Randomly select seed/fog site locations for the given year, constraining to sites with max. carrying capacity > 0. @@ -864,13 +877,15 @@ function unguided_site_selection( if seed_years pref_seed_locs = zeros(Int64, n_site_int) - pref_seed_locs[1:s_n_site_int] .= StatsBase.sample(candidate_sites, s_n_site_int; replace = false) + pref_seed_locs[1:s_n_site_int] .= StatsBase.sample( + candidate_sites, s_n_site_int; replace=false + ) end if fog_years pref_fog_locs = zeros(Int64, n_site_int) pref_fog_locs[1:s_n_site_int] .= StatsBase.sample( - candidate_sites, s_n_site_int; replace = false + candidate_sites, s_n_site_int; replace=false ) end @@ -878,7 +893,7 @@ function unguided_site_selection( end """ - summary_stat_env(env_layer::NamedDimsArray dims::Union{Symbol,Tuple{Symbol,Symbol}}; w=0.5)::Vector{Float64} +summary_stat_env(env_layer::NamedDimsArray dims::Union{Symbol,Tuple{Symbol,Symbol}}; w=0.5)::Vector{Float64} Calculates mean over specified dimensions plus half the standard deviation. @@ -890,18 +905,20 @@ Calculates mean over specified dimensions plus half the standard deviation. # Returns Weighted combination of mean and standard deviation of the projected environmental conditions (e.g., DHWs, wave stress, etc): - (μ * w) + (σ * (1 - w)) +(μ * w) + (σ * (1 - w)) """ function summary_stat_env( env_layer::AbstractArray, dims::Union{Int64, Symbol, Tuple{Symbol, Symbol}}; - w = 0.5, + w=0.5, )::Vector{Float64} - return vec((mean(env_layer; dims = dims) .* w) .+ (std(env_layer; dims = dims) .* (1.0 - w))) + return vec( + (mean(env_layer; dims=dims) .* w) .+ (std(env_layer; dims=dims) .* (1.0 - w)) + ) end """ - within_depth_bounds(loc_depth::Vector{T}, depth_max::T, depth_min::T)::BitVector{T} where {T<:Float64} +within_depth_bounds(loc_depth::Vector{T}, depth_max::T, depth_min::T)::BitVector{T} where {T<:Float64} Determines whether a location is within the min/max depth bounds. Used to filter locations based on their depth for location selection. diff --git a/test/site_selection.jl b/test/site_selection.jl index 321242ce0c..215ae45040 100644 --- a/test/site_selection.jl +++ b/test/site_selection.jl @@ -9,101 +9,102 @@ if !@isdefined(ADRIA_DIR) end function test_site_ranks( - weights_set::Vector{Vector{Float64}}, - A::Matrix{Float64}, - rankings::Matrix{Int64}, - n_site_int::Int64, - mcda_func::Function, - inv::Float64, + weights_set::Vector{Vector{Float64}}, + A::Matrix{Float64}, + rankings::Matrix{Int64}, + n_site_int::Int64, + mcda_func::Function, ) - S = ADRIA.decision.mcda_normalize(A) - S[:, 1] .= A[:, 1] - criteria_names = [ - "heat stress", - "wave stress", - "median depth", - "coral cover space", - "in connectivity", - "out connectivity", - ] - for weights in weights_set - crit_inds = findall(weights .> 0.0) - - prefsites, s_order = ADRIA.decision.rank_sites!( - S, weights, rankings, n_site_int, mcda_func, inv - ) - - names_temp = criteria_names[crit_inds] - names_string = string(["$(name), " for name in names_temp[1:(end - 1)]]...) - - # Check that 2 best sites are selected (5 and 6) - @test any(prefsites .== 5) & any(prefsites .== 6) || string( - "For the ", - names_string, - "and $(names_temp[end]) criteria, the best sites (5 and 6) were not selected.", - ) - # Check that 2 worst sites aren't selected (9 and 10) - @test any(prefsites .!= 9) & any(prefsites .!= 10) || string( - "For the ", - names_string, - "and $(names_temp[end]) criteria, the worst sites (9 and 10) were selected.", - ) - end + S = ADRIA.decision.mcda_normalize(A) + S[:, 1] .= A[:, 1] + criteria_names = [ + "heat stress", + "wave stress", + "median depth", + "coral cover space", + "in connectivity", + "out connectivity", + ] + for weights in weights_set + # Find criteria being used for decision + crit_inds = findall(weights .> 0.0) + + # Get site preference order + prefsites, s_order = ADRIA.decision.rank_sites!( + S, weights, rankings, n_site_int, mcda_func, 2 + ) + + # Get names of criteria being used for error message + names_temp = criteria_names[crit_inds] + names_string = string(["$(name), " for name in names_temp[1:(end - 1)]]...) + + # Check that 2 best sites are selected (5 and 6) + @test any(prefsites .== 5) & any(prefsites .== 6) || string( + "For the ", + names_string, + "and $(names_temp[end]) criteria, the best sites (5 and 6) were not selected.", + ) + # Check that 2 worst sites aren't selected (9 and 10) + @test any(prefsites .!= 9) & any(prefsites .!= 10) || string( + "For the ", + names_string, + "and $(names_temp[end]) criteria, the worst sites (9 and 10) were selected.", + ) + end end function test_mcda_funcs(rankings::Matrix{Int64}, S::Matrix{Float64}, - weights::Vector{Float64}, - mcda_funcs, n_site_int::Int64) - for mf in mcda_funcs - prefsites, s_order = ADRIA.decision.rank_sites!( - S, weights, rankings, n_site_int, mf, 2 - ) - # Check that 2 best sites are selected (5 and 6) - @test in(5, prefsites) .& in(6, prefsites) || - "The best overall sites (5 and 6) were not chosen by method $mf." - - # Check that 2 worst sites aren't selected (9 and 10) - @test any(prefsites .!= 9) & any(prefsites .!= 10) || - "The worst overall sites (9 and 10) were chosen by method $mf." - end + weights::Vector{Float64}, mcda_funcs, n_site_int::Int64) + for mf in mcda_funcs + prefsites, s_order = ADRIA.decision.rank_sites!( + S, weights, rankings, n_site_int, mf, 2 + ) + # Check that 2 best sites are selected (5 and 6) + @test in(5, prefsites) .& in(6, prefsites) || + "The best overall sites (5 and 6) were not chosen by method $mf." + + # Check that 2 worst sites aren't selected (9 and 10) + @test any(prefsites .!= 9) & any(prefsites .!= 10) || + "The worst overall sites (9 and 10) were chosen by method $mf." + end end function get_test_decision_matrix(dom::Domain)::Matrix{Float64} - cover = sum(dom.init_coral_cover; dims = :species)[species = 1] - leftover_space = 1 .- cover - k_area = dom.site_data.area .* dom.site_data.k - dhw_av = ADRIA.decision.summary_stat_env(dom.dhw_scens, (:timesteps, :scenarios)) - wave_av = ADRIA.decision.summary_stat_env(dom.wave_scens, (:timesteps, :scenarios)) - depth_med = dom.site_data.depth_med - - TP_data = ADRIA.connectivity_strength( - dom.TP_data .* ADRIA.site_k_area(dom), collect(cover), dom.TP_data - ) - - site_ids = dom.site_data.site_id + cover = sum(dom.init_coral_cover; dims=:species)[species=1] + leftover_space = 1 .- cover + k_area = dom.site_data.area .* dom.site_data.k + dhw_av = ADRIA.decision.summary_stat_env(dom.dhw_scens, (:timesteps, :scenarios)) + wave_av = ADRIA.decision.summary_stat_env(dom.wave_scens, (:timesteps, :scenarios)) + depth_med = dom.site_data.depth_med + + TP_data = ADRIA.connectivity_strength( + dom.TP_data .* ADRIA.site_k_area(dom), collect(cover), dom.TP_data + ) - heat_stress = - 1 .- vec((dhw_av .- minimum(dhw_av)) ./ (maximum(dhw_av) - minimum(dhw_av))) - wave_stress = - 1 .- vec((wave_av .- minimum(wave_av)) ./ (maximum(wave_av) - minimum(wave_av))) - space_area = leftover_space .* k_area - in_conn = TP_data.in_conn - out_conn = TP_data.out_conn + heat_stress = + 1 .- vec((dhw_av .- minimum(dhw_av)) ./ (maximum(dhw_av) - minimum(dhw_av))) + wave_stress = + 1 .- vec((wave_av .- minimum(wave_av)) ./ (maximum(wave_av) - minimum(wave_av))) + space_area = leftover_space .* k_area + in_conn = TP_data.in_conn + out_conn = TP_data.out_conn - A = hcat(site_ids, heat_stress, wave_stress, depth_med, space_area, in_conn, out_conn) + site_ids = dom.site_data.site_id + # Simplified decision matrix for testing + A = hcat(site_ids, heat_stress, wave_stress, depth_med, space_area, in_conn, out_conn) - return A + return A end @testset "site selection" begin - # TODO: Complete tests with @tests + # TODO: Complete tests with @tests dom = ADRIA.load_domain(TEST_DOMAIN_PATH, 45) p_tbl = ADRIA.param_table(dom) - p_tbl[:, :depth_offset] .= 7.0 + p_tbl[:, :depth_offset] .= 7.0 - # ranks = ADRIA.site_selection(dom, p_tbl, 1, 10, 1) + # ranks = ADRIA.site_selection(dom, p_tbl, 1, 10, 1) end @testset "MCDA variable constructor" begin dom = ADRIA.load_domain(TEST_DOMAIN_PATH, 45) @@ -112,10 +113,10 @@ end site_ids = collect(1:length(dom.site_ids)) available_space = rand(Uniform(200, 30000), length(site_ids)) - area_to_seed = 962.11 # Area of seeded corals in m^2. + area_to_seed = 962.11 # Area of seeded corals in m^2. - dhw_scens = dom.dhw_scens[1, :, criteria_df.dhw_scenario[1]] - wave_scens = dom.wave_scens[1, :, criteria_df.wave_scenario[1]] + dhw_scens = dom.dhw_scens[1, :, criteria_df.dhw_scenario[1]] + wave_scens = dom.wave_scens[1, :, criteria_df.wave_scenario[1]] mcda_vars = ADRIA.decision.DMCDA_vars( dom, @@ -127,20 +128,22 @@ end dhw_scens, ) n_sites = length(mcda_vars.site_ids) - @test (size(mcda_vars.conn, 1) == n_sites) && (size(mcda_vars.conn, 2) == n_sites) || "Connectivity input is incorrect size." + @test (size(mcda_vars.conn, 1) == n_sites) && (size(mcda_vars.conn, 2) == n_sites) || + "Connectivity input is incorrect size." @test length(mcda_vars.dam_prob) == n_sites || "Wave damage input is incorrect size." - @test length(mcda_vars.heat_stress_prob) == n_sites || "Heat stress input is incorrect size." + @test length(mcda_vars.heat_stress_prob) == n_sites || + "Heat stress input is incorrect size." @test length(mcda_vars.leftover_space) == n_sites || "Initial cover input is incorrect size." end @testset "Unguided site selection" begin - n_intervention_sites = 5 - pref_seed_sites = zeros(Int64, n_intervention_sites) - pref_fog_sites = zeros(Int64, n_intervention_sites) - seed_years = true - fog_years = true - max_cover = [0.0, 3000.0, 5000.0, 0.0, 0.0] - depth_priority = collect(1:5) + n_intervention_sites = 5 + pref_seed_sites = zeros(Int64, n_intervention_sites) + pref_fog_sites = zeros(Int64, n_intervention_sites) + seed_years = true + fog_years = true + max_cover = [0.0, 3000.0, 5000.0, 0.0, 0.0] + depth_priority = collect(1:5) pref_seed_sites, pref_fog_sites = ADRIA.decision.unguided_site_selection( pref_seed_sites, @@ -152,83 +155,86 @@ end depth_priority, ) - # Check that only two sites are selected (the sites where k > 0.0) - @test length(pref_seed_sites[pref_seed_sites .> 0]) == 2 - @test length(pref_fog_sites[pref_fog_sites .> 0]) == 2 + # Check that only two sites are selected (the sites where k > 0.0) + @test length(pref_seed_sites[pref_seed_sites .> 0]) == 2 + @test length(pref_fog_sites[pref_fog_sites .> 0]) == 2 - @test all([in(sid, [2, 3]) for sid in pref_seed_sites[pref_seed_sites .> 0]]) - @test all([in(sid, [2, 3]) for sid in pref_fog_sites[pref_fog_sites .> 0]]) + @test all([in(sid, [2, 3]) for sid in pref_seed_sites[pref_seed_sites .> 0]]) + @test all([in(sid, [2, 3]) for sid in pref_fog_sites[pref_fog_sites .> 0]]) end @testset "Guided site selection without ADRIA ecological model" begin - dom = ADRIA.load_domain(EXAMPLE_DOMAIN_PATH, 45) + dom = ADRIA.load_domain(TEST_DOMAIN_PATH, 45) site_ids = collect(1:length(dom.site_data.site_id)) - A = get_test_decision_matrix(dom) + mcda_funcs = ADRIA.decision.mcda_methods() + A = get_test_decision_matrix(dom) n_sites = length(site_ids) n_site_int = 5 - rankings = Int64[site_ids zeros(Int64, n_sites) zeros(Int64, n_sites)] + rankings = Int64[site_ids zeros(Int64, n_sites) zeros(Int64, n_sites)] # Test 0.0 or 1.0 weights in all combinations weights = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0] - weights_choice = collect(0.0:0.1:1.0) for num_crit in 1:6 weights[num_crit] = 1.0 weights_set = collect(distinct(permutations(weights, 6))) test_site_ranks( - weights_set, A, rankings, criteria_names, n_site_int, mcda_funcs[1], 1 + weights_set, A, rankings, n_site_int, mcda_funcs[1] ) end + + # Test randomised weights in all combinations weights = rand(100, 6) weights = weights ./ sum(weights; dims=2) for ww in eachrow(weights) weights_set = collect(distinct(permutations(ww, 6))) test_site_ranks( - weights_set, A, rankings, criteria_names, n_site_int, mcda_funcs[1], 1 + weights_set, A, rankings, n_site_int, mcda_funcs[1] ) end - end -@testset "Test ranks line up with ordering" begin +@testset "Ranks line up with ordering" begin supported_methods = ADRIA.decision.mcda_methods() mcda_func = supported_methods[rand(1:length(supported_methods))] n_sites = 20 - S = ADRIA.decision.mcda_normalize(rand(Uniform(0, 1), n_sites, 6)) + S = ADRIA.decision.mcda_normalize(rand(Uniform(0, 1), n_sites, 6)) - weights = ADRIA.decision.mcda_normalize(rand(Uniform(0, 1), 6)) - n_site_int = 5 - site_ids = collect(1:n_sites) - S = hcat(site_ids, S) + weights = ADRIA.decision.mcda_normalize(rand(Uniform(0, 1), 6)) + n_site_int = 5 + site_ids = collect(1:n_sites) + S = hcat(site_ids, S) - rankings = Int64[site_ids zeros(Int64, n_sites) zeros(Int64, n_sites)] + rankings = Int64[site_ids zeros(Int64, n_sites) zeros(Int64, n_sites)] - prefsites, s_order = ADRIA.decision.rank_sites!( - S, weights, rankings, n_site_int, mcda_func, 2 - ) - - @test all([(rankings[rankings[:, 1].==s_order[rank, 1], 2].==rank)[1] for rank in 1:size(s_order, 1)]) || "Ranking does not match mcda score ordering" + prefsites, s_order = ADRIA.decision.rank_sites!( + S, weights, rankings, n_site_int, mcda_func, 2 + ) + @test all([ + (rankings[rankings[:, 1] .== s_order[rank, 1], 2] .== rank)[1] for + rank in 1:size(s_order, 1) + ]) || "Ranking does not match mcda score ordering" end -@testset "Test each mcda method is working" begin +@testset "MCDA methods" begin mcda_funcs = ADRIA.decision.mcda_methods() - dom = ADRIA.load_domain(EXAMPLE_DOMAIN_PATH, 45) - A = get_test_decision_matrix(dom) + dom = ADRIA.load_domain(TEST_DOMAIN_PATH, 45) + A = get_test_decision_matrix(dom) - site_ids = dom.site_data.site_id - n_sites = length(site_ids) - n_site_int = 5 + site_ids = dom.site_data.site_id + n_sites = length(site_ids) + n_site_int = 5 - rankings = Int64[site_ids zeros(Int64, n_sites) zeros(Int64, n_sites)] + rankings = Int64[site_ids zeros(Int64, n_sites) zeros(Int64, n_sites)] - S = ADRIA.decision.mcda_normalize(A) - S[:, 1] .= A[:, 1] + S = ADRIA.decision.mcda_normalize(A) + S[:, 1] .= A[:, 1] - weights = [1.0, 1.0, 1.0, 1.0, 0.0, 0.0] + weights = [1.0, 1.0, 1.0, 1.0, 0.0, 0.0] - test_mcda_funcs(rankings, S, weights, mcda_funcs, n_site_int) -end \ No newline at end of file + test_mcda_funcs(rankings, S, weights, mcda_funcs, n_site_int) +end