diff --git a/src/code_gen/main.jl b/src/code_gen/main.jl index e22b590..0677d2d 100644 --- a/src/code_gen/main.jl +++ b/src/code_gen/main.jl @@ -57,14 +57,13 @@ Execute the given generated_code (as returned by [`gen_code`](@ref)) on the give """ function execute(generated_code, input::Dict{ParticleType, Vector{Particle}}) (code, inputSymbols, outputSymbol) = generated_code - @assert length(input) == length(inputSymbols) assignInputs = Vector{Expr}() for (name, symbol) in inputSymbols type = nothing - if startswith("A", name) + if startswith(name, "A") type = A - elseif startswith("B", name) + elseif startswith(name, "B") type = B else type = C diff --git a/src/models/abc/create.jl b/src/models/abc/create.jl index cf9e39d..865b18c 100644 --- a/src/models/abc/create.jl +++ b/src/models/abc/create.jl @@ -26,8 +26,44 @@ Note: This does not take into account the preservation of momenta required for a """ function gen_particles(ns::Dict{ParticleType, Int}) particles = Dict{ParticleType, Vector{Particle}}() - rng = MersenneTwister(0) + + + if ns == Dict((A => 2), (B => 2)) + rho = 1.0 + + omega = rand(rng, Float64) + theta = rand(rng, Float64) * π + phi = rand(rng, Float64) * π + + particles[A] = Vector{Particle}() + particles[B] = Vector{Particle}() + + push!(particles[A], Particle(omega, 0, 0, omega, A)) + push!(particles[B], Particle(omega, 0, 0, -omega, B)) + push!( + particles[A], + Particle( + omega, + rho * cos(theta) * cos(phi), + rho * cos(theta) * sin(phi), + rho * sin(theta), + A, + ), + ) + push!( + particles[B], + Particle( + omega, + -rho * cos(theta) * cos(phi), + -rho * cos(theta) * sin(phi), + -rho * sin(theta), + B, + ), + ) + return particles + end + for (type, n) in ns particles[type] = Vector{Particle}() for i in 1:n diff --git a/test/runtests.jl b/test/runtests.jl index 2ea1a76..5827252 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -8,6 +8,7 @@ using Test include("unit_tests_properties.jl") include("node_reduction.jl") include("unit_tests_graph.jl") + include("unit_tests_execution.jl") include("known_graphs.jl") end diff --git a/test/unit_tests_execution.jl b/test/unit_tests_execution.jl new file mode 100644 index 0000000..d0c0f68 --- /dev/null +++ b/test/unit_tests_execution.jl @@ -0,0 +1,31 @@ +import MetagraphOptimization.A +import MetagraphOptimization.B +import MetagraphOptimization.ParticleType + +@testset "Unit Tests Graph" begin + particles = Dict{ParticleType, Vector{Particle}}( + ( + A => [ + Particle(0.823648, 0.0, 0.0, 0.823648, A), + Particle(0.823648, -0.835061, -0.474802, 0.277915, A), + ] + ), + ( + B => [ + Particle(0.823648, 0.0, 0.0, -0.823648, B), + Particle(0.823648, 0.835061, 0.474802, -0.277915, B), + ] + ), + ) + + expected_result = 5.5320567694746876e-5 + + for _ in 1:10 # test in a loop because graph layout should not change the result + graph = parse_abc(joinpath(@__DIR__, "..", "input", "AB->AB.txt")) + @test isapprox(execute(graph, particles), expected_result; rtol = 0.001) + + code = MetagraphOptimization.gen_code(graph) + @test isapprox(execute(code, particles), expected_result; rtol = 0.001) + end +end +println("Execution Unit Tests Complete!")