From a77264e8d87cd66043cf5a233fa19b0a1cddca70 Mon Sep 17 00:00:00 2001 From: Bagaev Dmitry Date: Wed, 2 Oct 2024 17:37:58 +0200 Subject: [PATCH 1/2] restore error message for unparametrized Normal --- src/model/graphppl.jl | 6 +++--- test/models/aliases/aliases_gamma_tests.jl | 10 ++++++++++ test/models/aliases/aliases_normal_tests.jl | 20 ++++++++++++++++++++ 3 files changed, 33 insertions(+), 3 deletions(-) diff --git a/src/model/graphppl.jl b/src/model/graphppl.jl index b542c0c33..d38573785 100644 --- a/src/model/graphppl.jl +++ b/src/model/graphppl.jl @@ -239,7 +239,7 @@ end GraphPPL.factor_alias(::ReactiveMPGraphPPLBackend, ::Type{Normal}, ::GraphPPL.StaticInterfaces{(:μ, :v)}) = ExponentialFamily.NormalMeanVariance GraphPPL.factor_alias(::ReactiveMPGraphPPLBackend, ::Type{Normal}, ::GraphPPL.StaticInterfaces{(:μ, :τ)}) = ExponentialFamily.NormalMeanPrecision -GraphPPL.default_parametrization(::ReactiveMPGraphPPLBackend, ::Type{Normal}) = +GraphPPL.default_parametrization(::ReactiveMPGraphPPLBackend, ::GraphPPL.Atomic, ::Type{Normal}, rhs) = error("`Normal` cannot be constructed without keyword arguments. Use `Normal(mean = ..., var = ...)` or `Normal(mean = ..., precision = ...)`.") # GraphPPL.interfaces(::ReactiveMPGraphPPLBackend, ::Type{<:ExponentialFamily.NormalMeanVariance}, _) = GraphPPL.StaticInterfaces((:out, :μ, :v)) @@ -251,7 +251,7 @@ GraphPPL.interface_aliases(::ReactiveMPGraphPPLBackend, ::Type{Normal}) = GraphP GraphPPL.factor_alias(::ReactiveMPGraphPPLBackend, ::Type{MvNormal}, ::GraphPPL.StaticInterfaces{(:μ, :Σ)}) = ExponentialFamily.MvNormalMeanCovariance GraphPPL.factor_alias(::ReactiveMPGraphPPLBackend, ::Type{MvNormal}, ::GraphPPL.StaticInterfaces{(:μ, :Λ)}) = ExponentialFamily.MvNormalMeanPrecision -GraphPPL.default_parametrization(::ReactiveMPGraphPPLBackend, ::Type{MvNormal}) = +GraphPPL.default_parametrization(::ReactiveMPGraphPPLBackend, ::GraphPPL.Atomic, ::Type{MvNormal}, rhs) = error("`MvNormal` cannot be constructed without keyword arguments. Use `MvNormal(mean = ..., covariance = ...)` or `MvNormal(mean = ..., precision = ...)`.") GraphPPL.interface_aliases(::ReactiveMPGraphPPLBackend, ::Type{MvNormal}) = @@ -259,7 +259,7 @@ GraphPPL.interface_aliases(::ReactiveMPGraphPPLBackend, ::Type{MvNormal}) = GraphPPL.factor_alias(::ReactiveMPGraphPPLBackend, ::Type{Gamma}, ::GraphPPL.StaticInterfaces{(:α, :θ)}) = ExponentialFamily.GammaShapeScale GraphPPL.factor_alias(::ReactiveMPGraphPPLBackend, ::Type{Gamma}, ::GraphPPL.StaticInterfaces{(:α, :β)}) = ExponentialFamily.GammaShapeRate -GraphPPL.default_parametrization(::ReactiveMPGraphPPLBackend, ::Type{Gamma}) = +GraphPPL.default_parametrization(::ReactiveMPGraphPPLBackend, ::GraphPPL.Atomic, ::Type{Gamma}, rhs) = error("`Gamma` cannot be constructed without keyword arguments. Use `Gamma(shape = ..., rate = ...)` or `Gamma(shape = ..., scale = ...)`.") GraphPPL.interface_aliases(::ReactiveMPGraphPPLBackend, ::Type{Gamma}) = diff --git a/test/models/aliases/aliases_gamma_tests.jl b/test/models/aliases/aliases_gamma_tests.jl index 8de00fe4f..8e7a35791 100644 --- a/test/models/aliases/aliases_gamma_tests.jl +++ b/test/models/aliases/aliases_gamma_tests.jl @@ -37,3 +37,13 @@ @test first(results.free_energy[end]) ≈ 4.385584096993327 @test all(<=(1e-14), diff(results.free_energy)) # it oscilates a bit at the end, but all should be less or equal to zero end + +@testitem "`Gamma` by itself cannot be used as a node" begin + @model function gamma_by_itself(d) + x ~ Gamma(1.0, 1.0) + d ~ Gamma(x, 1.0) + end + @test_throws "`Gamma` cannot be constructed without keyword arguments. Use `Gamma(shape = ..., rate = ...)` or `Gamma(shape = ..., scale = ...)`." infer( + model = gamma_by_itself(), data = (d = 1.0,), iterations = 1, free_energy = false + ) +end diff --git a/test/models/aliases/aliases_normal_tests.jl b/test/models/aliases/aliases_normal_tests.jl index 3b83501e7..8eeb7592c 100644 --- a/test/models/aliases/aliases_normal_tests.jl +++ b/test/models/aliases/aliases_normal_tests.jl @@ -39,3 +39,23 @@ @test last(result.free_energy) ≈ 2.319611135721246 @test all(iszero, diff(result.free_energy)) end + +@testitem "`Normal` by itself cannot be used as a node" begin + @model function normal_by_itself(d) + x ~ Normal(0.0, 1.0) + d ~ Normal(x, 1.0) + end + @test_throws "`Normal` cannot be constructed without keyword arguments. Use `Normal(mean = ..., var = ...)` or `Normal(mean = ..., precision = ...)`." infer( + model = normal_by_itself(), data = (d = 1.0,), iterations = 1, free_energy = false + ) +end + +@testitem "`MvNormal` by itself cannot be used as a node" begin + @model function mvnormal_by_itself(d) + x ~ MvNormal(zeros(2), diageye(2)) + d ~ MvNormal(x, diageye(2)) + end + @test_throws "`MvNormal` cannot be constructed without keyword arguments. Use `MvNormal(mean = ..., covariance = ...)` or `MvNormal(mean = ..., precision = ...)`." infer( + model = mvnormal_by_itself(), data = (d = 1.0,), iterations = 1, free_energy = false + ) +end From 1c0e49b2c9c7bc23de3c57aae233553a7d1d2c14 Mon Sep 17 00:00:00 2001 From: Bagaev Dmitry Date: Wed, 2 Oct 2024 18:59:26 +0200 Subject: [PATCH 2/2] fix docs and tests --- docs/src/manuals/comparison.md | 8 ++++---- docs/src/manuals/constraints-specification.md | 8 ++++---- docs/src/manuals/model-specification.md | 2 +- .../ProjectionExt/inference_with_projection_tests.jl | 4 ++-- test/inference/inference_tests.jl | 6 +++--- test/model/graphppl_tests.jl | 2 +- test/model/initialization_plugin_tests.jl | 12 ++++++------ 7 files changed, 21 insertions(+), 21 deletions(-) diff --git a/docs/src/manuals/comparison.md b/docs/src/manuals/comparison.md index 8241e11ef..f6befb941 100644 --- a/docs/src/manuals/comparison.md +++ b/docs/src/manuals/comparison.md @@ -49,19 +49,19 @@ Nowadays there's plenty of probabilistic programming languages and packages avai using RxInfer #hide @model function inner_inner(τ, y, x) - y ~ Normal(τ[1], τ[2] + x) + y ~ Normal(mean = τ[1], var = τ[2] + x) end @model function inner(θ, α) - β ~ Normal(0, 1) - α ~ Gamma(β, 1) + β ~ Normal(mean = 0.0, var = 1.0) + α ~ Gamma(shape = β, rate = 1.0) α ~ inner_inner(τ = θ, x = 3) end @model function outer() local w for i = 1:5 - w[i] ~ inner(θ = Gamma(1, 1)) + w[i] ~ inner(θ = Gamma(shape = 1.0, rate = 1.0)) end y ~ inner(θ = w[2:3]) end diff --git a/docs/src/manuals/constraints-specification.md b/docs/src/manuals/constraints-specification.md index 294d84ad2..6c7605348 100644 --- a/docs/src/manuals/constraints-specification.md +++ b/docs/src/manuals/constraints-specification.md @@ -177,19 +177,19 @@ Read more about the `@constraints` macro in the [official documentation](https:/ ```@example manual_constraints @model function inner_inner(τ, y) - y ~ Normal(τ[1], τ[2]) + y ~ Normal(mean = τ[1], var = τ[2]) end @model function inner(θ, α) - β ~ Normal(0, 1) - α ~ Gamma(β, 1) + β ~ Normal(mean = 0.0, var = 1.0) + α ~ Gamma(shape = β, rate = 1.0) α ~ inner_inner(τ = θ) end @model function outer() local w for i = 1:5 - w[i] ~ inner(θ = Gamma(1, 1)) + w[i] ~ inner(θ = Gamma(shape = 1.0, rate = 1.0)) end y ~ inner(θ = w[2:3]) end diff --git a/docs/src/manuals/model-specification.md b/docs/src/manuals/model-specification.md index d7e24d53e..15b137e73 100644 --- a/docs/src/manuals/model-specification.md +++ b/docs/src/manuals/model-specification.md @@ -23,7 +23,7 @@ The `@model` macro returns a regular Julia function (in this example `model_name ```@example model-specification-model-macro using RxInfer #hide @model function my_model(observation, hyperparameter) - observations ~ Normal(0.0, hyperparameter) + observations ~ Normal(mean = 0.0, var = hyperparameter) end ``` diff --git a/test/ext/ProjectionExt/inference_with_projection_tests.jl b/test/ext/ProjectionExt/inference_with_projection_tests.jl index fea835d6a..1f4be22f0 100644 --- a/test/ext/ProjectionExt/inference_with_projection_tests.jl +++ b/test/ext/ProjectionExt/inference_with_projection_tests.jl @@ -383,7 +383,7 @@ end @model function mymodel(y, C) a ~ Beta(2, 1) - b ~ Gamma(2, 1) + b ~ Gamma(shape = 2.0, rate = 1.0) μ := foo(a, b) for i in eachindex(y) y[i] ~ Normal(mean = μ, variance = C) @@ -448,7 +448,7 @@ end @model function mymodel(y, C) a ~ Beta(1, 1) - b ~ Gamma(1, 1) + b ~ Gamma(shape = 1.0, rate = 1.0) μ := foo(a, b) for i in eachindex(y) y[i] ~ MvNormal(mean = μ, covariance = C) diff --git a/test/inference/inference_tests.jl b/test/inference/inference_tests.jl index 041516682..a4820670b 100644 --- a/test/inference/inference_tests.jl +++ b/test/inference/inference_tests.jl @@ -30,7 +30,7 @@ end @testitem "__infer_create_factor_graph_model" begin @model function simple_model_for_infer_create_model(y, a, b) x ~ Beta(a, b) - y ~ Normal(x, 1.0) + y ~ Normal(mean = x, var = 1.0) end import RxInfer: __infer_create_factor_graph_model, ProbabilisticModel, getmodel @@ -52,7 +52,7 @@ end # A simple model for testing that resembles a simple kalman filter with # random walk state transition and unknown observational noise @model function test_model1(y) - τ ~ Gamma(1.0, 1.0) + τ ~ Gamma(shape = 1.0, rate = 1.0) x[1] ~ Normal(mean = 0.0, variance = 1.0) y[1] ~ Normal(mean = x[1], precision = τ) @@ -364,7 +364,7 @@ end @testitem "Invalid data size error" begin @model function test_model1(y) n = length(y) - τ ~ Gamma(1.0, 1.0) + τ ~ Gamma(shape = 1.0, rate = 1.0) x[1] ~ Normal(mean = 0.0, variance = 1.0) y[1] ~ Normal(mean = x[1], precision = τ) diff --git a/test/model/graphppl_tests.jl b/test/model/graphppl_tests.jl index e076c09e3..f65f66f23 100644 --- a/test/model/graphppl_tests.jl +++ b/test/model/graphppl_tests.jl @@ -12,7 +12,7 @@ input = :(a = constvar()) @test @capture(apply_pipeline(input, error_datavar_constvar_randomvar), error(_)) - input = :(x ~ Normal(0, 1)) + input = :(x ~ Normal(mean = 0.0, var = 1.0)) @test apply_pipeline(input, error_datavar_constvar_randomvar) == input end diff --git a/test/model/initialization_plugin_tests.jl b/test/model/initialization_plugin_tests.jl index 90e930be4..706b37fcd 100644 --- a/test/model/initialization_plugin_tests.jl +++ b/test/model/initialization_plugin_tests.jl @@ -26,8 +26,8 @@ end import RxInfer: SpecificSubModelInit, InitSpecification, InitDescriptor, InitMarginal, InitObject, GeneralSubModelInit @model function dummymodel() - x ~ Normal(0, 1) - y ~ Normal(x, 1) + x ~ Normal(mean = 0.0, var = 1.0) + y ~ Normal(mean = x, var = 1.0) end @test SpecificSubModelInit(GraphPPL.FactorID(dummymodel, 1), InitSpecification()) isa SpecificSubModelInit @@ -42,8 +42,8 @@ end import RxInfer: SpecificSubModelInit, InitSpecification, InitDescriptor, InitMarginal, InitObject, GeneralSubModelInit @model function dummymodel() - x ~ Normal(0, 1) - y ~ Normal(x, 1) + x ~ Normal(mean = 0.0, var = 1.0) + y ~ Normal(mean = x, var = 1.0) end @test GeneralSubModelInit(dummymodel, InitSpecification()) isa GeneralSubModelInit @@ -641,7 +641,7 @@ end local x for i in 1:3 for j in 1:3 - x[i, j] ~ Normal(0, 1) + x[i, j] ~ Normal(mean = 0.0, var = 1.0) end end end @@ -683,7 +683,7 @@ end @test default_init(some_model) === RxInfer.EmptyInit @model function model_with_init() - x ~ Normal(0.0, 1.0) + x ~ Normal(mean = 0.0, var = 1.0) end default_init(::typeof(model_with_init)) = @initialization begin