Compare commits
6 Commits
Author | SHA1 | Date | |
---|---|---|---|
0c74a3f30e | |||
241fd227c1 | |||
a97a878881 | |||
1cf8ae643c | |||
8b4e2b5ef7 | |||
61b6541b81 |
@ -8,7 +8,7 @@ env:
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
runs-on: arch-latest
|
||||
runs-on: ubuntu-22.04
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
@ -65,7 +65,7 @@ jobs:
|
||||
|
||||
test:
|
||||
needs: prepare
|
||||
runs-on: arch-latest
|
||||
runs-on: ubuntu-22.04
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
@ -127,7 +127,7 @@ jobs:
|
||||
|
||||
docs:
|
||||
needs: prepare
|
||||
runs-on: arch-latest
|
||||
runs-on: ubuntu-22.04
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
|
21
docs/src/lib/internals/estimator.md
Normal file
21
docs/src/lib/internals/estimator.md
Normal file
@ -0,0 +1,21 @@
|
||||
# Models
|
||||
|
||||
## Interface
|
||||
|
||||
The interface that has to be implemented for an estimator.
|
||||
|
||||
```@autodocs
|
||||
Modules = [MetagraphOptimization]
|
||||
Pages = ["estimator/interafce.jl"]
|
||||
Order = [:type, :constant, :function]
|
||||
```
|
||||
|
||||
## Global Metric Estimator
|
||||
|
||||
Implementation of a global metric estimator. It uses the graph properties compute effort, data transfer, and compute intensity.
|
||||
|
||||
```@autodocs
|
||||
Modules = [MetagraphOptimization]
|
||||
Pages = ["estimator/global_metric.jl"]
|
||||
Order = [:type, :function]
|
||||
```
|
@ -5,6 +5,7 @@ A module containing tools to work on DAGs.
|
||||
"""
|
||||
module MetagraphOptimization
|
||||
|
||||
# graph types
|
||||
export DAG
|
||||
export Node
|
||||
export Edge
|
||||
@ -18,6 +19,7 @@ export FusedComputeTask
|
||||
export PossibleOperations
|
||||
export GraphProperties
|
||||
|
||||
# graph functions
|
||||
export make_node
|
||||
export make_edge
|
||||
export insert_node
|
||||
@ -27,10 +29,13 @@ export is_exit_node
|
||||
export parents
|
||||
export children
|
||||
export compute
|
||||
export data
|
||||
export compute_effort
|
||||
export get_properties
|
||||
export get_exit_node
|
||||
export is_valid, is_scheduled
|
||||
|
||||
# graph operation related
|
||||
export Operation
|
||||
export AppliedOperation
|
||||
export NodeFusion
|
||||
@ -42,6 +47,10 @@ export can_pop
|
||||
export reset_graph!
|
||||
export get_operations
|
||||
|
||||
# ABC model
|
||||
export ParticleValue
|
||||
export ParticleA, ParticleB, ParticleC
|
||||
export ABCProcessDescription, ABCProcessInput, ABCModel
|
||||
export ComputeTaskP
|
||||
export ComputeTaskS1
|
||||
export ComputeTaskS2
|
||||
@ -49,14 +58,17 @@ export ComputeTaskV
|
||||
export ComputeTaskU
|
||||
export ComputeTaskSum
|
||||
|
||||
# code generation related
|
||||
export execute
|
||||
export parse_dag, parse_process
|
||||
export gen_process_input
|
||||
export get_compute_function
|
||||
export ParticleValue
|
||||
export ParticleA, ParticleB, ParticleC
|
||||
export ABCProcessDescription, ABCProcessInput, ABCModel
|
||||
|
||||
# estimator
|
||||
export cost_type, graph_cost, operation_effect
|
||||
export GlobalMetricEstimator, CDCost
|
||||
|
||||
# machine info
|
||||
export Machine
|
||||
export get_machine_info
|
||||
|
||||
@ -121,6 +133,9 @@ include("task/compute.jl")
|
||||
include("task/print.jl")
|
||||
include("task/properties.jl")
|
||||
|
||||
include("estimator/interface.jl")
|
||||
include("estimator/global_metric.jl")
|
||||
|
||||
include("models/interface.jl")
|
||||
include("models/print.jl")
|
||||
|
||||
|
65
src/estimator/global_metric.jl
Normal file
65
src/estimator/global_metric.jl
Normal file
@ -0,0 +1,65 @@
|
||||
|
||||
"""
|
||||
CDCost
|
||||
|
||||
Representation of a [`DAG`](@ref)'s cost as estimated by the [`GlobalMetricEstimator`](@ref).
|
||||
|
||||
# Fields:
|
||||
`.data`: The total data transfer.\\
|
||||
`.computeEffort`: The total compute effort.\\
|
||||
`.computeIntensity`: The compute intensity, will always equal `.computeEffort / .data`.
|
||||
|
||||
|
||||
!!! note
|
||||
Note that the `computeIntensity` doesn't necessarily make sense in the context of only operation costs.
|
||||
For example, for node fusions this will always be 0, since the computeEffort is zero.
|
||||
It will still work as intended when adding/subtracting to/from a `graph_cost` estimate.
|
||||
"""
|
||||
const CDCost = NamedTuple{(:data, :computeEffort, :computeIntensity), Tuple{Float64, Float64, Float64}}
|
||||
|
||||
function +(cost1::CDCost, cost2::CDCost)::CDCost
|
||||
d = cost1.data + cost2.data
|
||||
ce = computeEffort = cost1.computeEffort + cost2.computeEffort
|
||||
return (data = d, computeEffort = ce, computeIntensity = ce / d)::CDCost
|
||||
end
|
||||
|
||||
function -(cost1::CDCost, cost2::CDCost)::CDCost
|
||||
d = cost1.data - cost2.data
|
||||
ce = computeEffort = cost1.computeEffort - cost2.computeEffort
|
||||
return (data = d, computeEffort = ce, computeIntensity = ce / d)::CDCost
|
||||
end
|
||||
|
||||
struct GlobalMetricEstimator <: AbstractEstimator end
|
||||
|
||||
function cost_type(estimator::GlobalMetricEstimator)
|
||||
return CDCost
|
||||
end
|
||||
|
||||
function graph_cost(estimator::GlobalMetricEstimator, graph::DAG)
|
||||
properties = get_properties(graph)
|
||||
return (
|
||||
data = properties.data,
|
||||
computeEffort = properties.computeEffort,
|
||||
computeIntensity = properties.computeIntensity,
|
||||
)::CDCost
|
||||
end
|
||||
|
||||
function operation_effect(estimator::GlobalMetricEstimator, graph::DAG, operation::NodeFusion)
|
||||
return (data = -data(operation.input[2].task), computeEffort = 0.0, computeIntensity = 0.0)::CDCost
|
||||
end
|
||||
|
||||
function operation_effect(estimator::GlobalMetricEstimator, graph::DAG, operation::NodeReduction)
|
||||
s = length(operation.input) - 1
|
||||
return (
|
||||
data = s * -data(operation.input[1].task),
|
||||
computeEffort = s * -compute_effort(operation.input[1].task),
|
||||
computeIntensity = typeof(operation.input) <: DataTaskNode ? 0.0 : Inf,
|
||||
)::CDCost
|
||||
end
|
||||
|
||||
function operation_effect(estimator::GlobalMetricEstimator, graph::DAG, operation::NodeSplit)
|
||||
s = length(operation.input.parents) - 1
|
||||
d = s * data(operation.input.task)
|
||||
ce = s * compute_effort(operation.input.task)
|
||||
return (data = d, computeEffort = ce, computeIntensity = ce / d)::CDCost
|
||||
end
|
44
src/estimator/interface.jl
Normal file
44
src/estimator/interface.jl
Normal file
@ -0,0 +1,44 @@
|
||||
|
||||
"""
|
||||
AbstractEstimator
|
||||
|
||||
Abstract base type for an estimator. An estimator estimates the cost of a graph or the difference an operation applied to a graph will make to its cost.
|
||||
|
||||
Interface functions are
|
||||
- [`graph_cost`](@ref)
|
||||
- [`operation_effect`](@ref)
|
||||
"""
|
||||
abstract type AbstractEstimator end
|
||||
|
||||
"""
|
||||
cost_type(estimator::AbstractEstimator)
|
||||
|
||||
Interface function returning a specific estimator's cost type, i.e., the type returned by its implementation of [`graph_cost`](@ref) and [`operation_effect`](@ref).
|
||||
"""
|
||||
function cost_type end
|
||||
|
||||
"""
|
||||
graph_cost(estimator::AbstractEstimator, graph::DAG)
|
||||
|
||||
Get the total estimated cost of the graph. The cost's data type can be chosen by the implementation, but should have usable comparison operators (<, <=, >, >=, ==) and basic math operators (+, -, *, /).
|
||||
"""
|
||||
function graph_cost end
|
||||
|
||||
"""
|
||||
operation_effect(estimator::AbstractEstimator, graph::DAG, operation::Operation)
|
||||
|
||||
Get the estimated effect on the cost of the graph, such that `graph_cost(estimator, graph) + operation_effect(estimator, graph, operation) ~= graph_cost(estimator, graph_with_operation_applied)`. There is no hard requirement for this, but the better the estimate, the better an optimization algorithm will be.
|
||||
|
||||
!!! note
|
||||
There is a default implementation of this function, applying the operation, calling [`graph_cost`](@ref), then popping the operation again.
|
||||
|
||||
It can be much faster to overload this function for a specific estimator and directly compute the effects from the operation if possible.
|
||||
"""
|
||||
function operation_effect(estimator::AbstractEstimator, graph::DAG, operation::Operation)
|
||||
# This is currently not stably working, see issue #16
|
||||
cost = graph_cost(estimator, graph)
|
||||
push_operation!(graph, operation)
|
||||
cost_after = graph_cost(estimator, graph)
|
||||
pop_operation!(graph)
|
||||
return cost_after - cost
|
||||
end
|
@ -55,7 +55,8 @@ function compute(::ComputeTaskS2, data1::ParticleValue{P}, data2::ParticleValue{
|
||||
@assert isapprox(data1.p.momentum.py, -data2.p.momentum.py, rtol = 0.001, atol = sqrt(eps())) "py: $(data1.p.momentum.py) vs. $(data2.p.momentum.py)"
|
||||
@assert isapprox(data1.p.momentum.pz, -data2.p.momentum.pz, rtol = 0.001, atol = sqrt(eps())) "pz: $(data1.p.momentum.pz) vs. $(data2.p.momentum.pz)"
|
||||
=#
|
||||
return data1.v * inner_edge(data1.p) * data2.v
|
||||
inner = inner_edge(data1.p)
|
||||
return data1.v * inner * data2.v
|
||||
end
|
||||
|
||||
"""
|
||||
|
@ -1,5 +1,7 @@
|
||||
using QEDbase
|
||||
|
||||
import QEDbase.mass
|
||||
|
||||
"""
|
||||
ABCModel <: AbstractPhysicsModel
|
||||
|
||||
@ -87,9 +89,9 @@ For 2 given (non-equal) particle types, return the third of ABC.
|
||||
"""
|
||||
function interaction_result(t1::Type{T1}, t2::Type{T2}) where {T1 <: ABCParticle, T2 <: ABCParticle}
|
||||
@assert t1 != t2
|
||||
if t1 != Type{ParticleA} && t2 != Type{ParticleA}
|
||||
if t1 != ParticleA && t2 != ParticleA
|
||||
return ParticleA
|
||||
elseif t1 != Type{ParticleB} && t2 != Type{ParticleB}
|
||||
elseif t1 != ParticleB && t2 != ParticleB
|
||||
return ParticleB
|
||||
else
|
||||
return ParticleC
|
||||
@ -161,7 +163,6 @@ Takes 4 effective FLOP.
|
||||
function preserve_momentum(p1::ABCParticle, p2::ABCParticle)
|
||||
t3 = interaction_result(typeof(p1), typeof(p2))
|
||||
p3 = t3(p1.momentum + p2.momentum)
|
||||
|
||||
return p3
|
||||
end
|
||||
|
||||
|
@ -3,35 +3,35 @@
|
||||
|
||||
Return the compute effort of an S1 task.
|
||||
"""
|
||||
compute_effort(t::ComputeTaskS1) = 11
|
||||
compute_effort(t::ComputeTaskS1) = 11.0
|
||||
|
||||
"""
|
||||
compute_effort(t::ComputeTaskS2)
|
||||
|
||||
Return the compute effort of an S2 task.
|
||||
"""
|
||||
compute_effort(t::ComputeTaskS2) = 12
|
||||
compute_effort(t::ComputeTaskS2) = 12.0
|
||||
|
||||
"""
|
||||
compute_effort(t::ComputeTaskU)
|
||||
|
||||
Return the compute effort of a U task.
|
||||
"""
|
||||
compute_effort(t::ComputeTaskU) = 1
|
||||
compute_effort(t::ComputeTaskU) = 1.0
|
||||
|
||||
"""
|
||||
compute_effort(t::ComputeTaskV)
|
||||
|
||||
Return the compute effort of a V task.
|
||||
"""
|
||||
compute_effort(t::ComputeTaskV) = 6
|
||||
compute_effort(t::ComputeTaskV) = 6.0
|
||||
|
||||
"""
|
||||
compute_effort(t::ComputeTaskP)
|
||||
|
||||
Return the compute effort of a P task.
|
||||
"""
|
||||
compute_effort(t::ComputeTaskP) = 0
|
||||
compute_effort(t::ComputeTaskP) = 0.0
|
||||
|
||||
"""
|
||||
compute_effort(t::ComputeTaskSum)
|
||||
@ -41,7 +41,7 @@ Return the compute effort of a Sum task.
|
||||
Note: This is a constant compute effort, even though sum scales with the number of its inputs. Since there is only ever a single sum node in a graph generated from the ABC-Model,
|
||||
this doesn't matter.
|
||||
"""
|
||||
compute_effort(t::ComputeTaskSum) = 1
|
||||
compute_effort(t::ComputeTaskSum) = 1.0
|
||||
|
||||
"""
|
||||
show(io::IO, t::DataTask)
|
||||
|
@ -4,7 +4,7 @@
|
||||
Task representing a specific data transfer in the ABC Model.
|
||||
"""
|
||||
struct DataTask <: AbstractDataTask
|
||||
data::UInt64
|
||||
data::Float64
|
||||
end
|
||||
|
||||
"""
|
||||
|
@ -49,7 +49,7 @@ end
|
||||
|
||||
Return the compute effort of a data task, always zero, regardless of the specific task.
|
||||
"""
|
||||
compute_effort(t::AbstractDataTask) = 0
|
||||
compute_effort(t::AbstractDataTask) = 0.0
|
||||
|
||||
"""
|
||||
data(t::AbstractDataTask)
|
||||
@ -63,7 +63,7 @@ data(t::AbstractDataTask) = getfield(t, :data)
|
||||
|
||||
Return the data of a compute task, always zero, regardless of the specific task.
|
||||
"""
|
||||
data(t::AbstractComputeTask) = 0
|
||||
data(t::AbstractComputeTask) = 0.0
|
||||
|
||||
"""
|
||||
compute_effort(t::FusedComputeTask)
|
||||
|
@ -1,4 +1,5 @@
|
||||
[deps]
|
||||
AccurateArithmetic = "22286c92-06ac-501d-9306-4abd417d9753"
|
||||
QEDbase = "10e22c08-3ccb-4172-bfcf-7d7aa3d04d93"
|
||||
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
|
||||
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
|
||||
|
@ -6,6 +6,8 @@ using Test
|
||||
include("unit_tests_tasks.jl")
|
||||
include("unit_tests_nodes.jl")
|
||||
include("unit_tests_properties.jl")
|
||||
include("unit_tests_estimator.jl")
|
||||
include("unit_tests_abcmodel.jl")
|
||||
include("node_reduction.jl")
|
||||
include("unit_tests_graph.jl")
|
||||
include("unit_tests_execution.jl")
|
||||
|
26
test/unit_tests_abcmodel.jl
Normal file
26
test/unit_tests_abcmodel.jl
Normal file
@ -0,0 +1,26 @@
|
||||
using MetagraphOptimization
|
||||
using QEDbase
|
||||
|
||||
import MetagraphOptimization.interaction_result
|
||||
|
||||
def_momentum = SFourMomentum(1.0, 0.0, 0.0, 0.0)
|
||||
|
||||
testparticleTypes = [ParticleA, ParticleB, ParticleC]
|
||||
testparticles = [ParticleA(def_momentum), ParticleB(def_momentum), ParticleC(def_momentum)]
|
||||
|
||||
@testset "Unit Tests ABC-Model" begin
|
||||
@testset "Interaction Result" begin
|
||||
for p1 in testparticleTypes, p2 in testparticleTypes
|
||||
if (p1 == p2)
|
||||
@test_throws AssertionError interaction_result(p1, p2)
|
||||
else
|
||||
@test interaction_result(p1, p2) == setdiff(testparticleTypes, [p1, p2])[1]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@testset "Vertex" begin
|
||||
@test isapprox(MetagraphOptimization.vertex(), 1 / 137.0)
|
||||
end
|
||||
end
|
||||
println("ABC-Model Unit Tests Complete!")
|
99
test/unit_tests_estimator.jl
Normal file
99
test/unit_tests_estimator.jl
Normal file
@ -0,0 +1,99 @@
|
||||
function test_op_specific(estimator, graph, nf::NodeFusion)
|
||||
estimate = operation_effect(estimator, graph, nf)
|
||||
data_reduce = data(nf.input[2].task)
|
||||
|
||||
@test isapprox(estimate.data, -data_reduce)
|
||||
@test isapprox(estimate.computeEffort, 0; atol = eps(Float64))
|
||||
@test isapprox(estimate.computeIntensity, 0; atol = eps(Float64))
|
||||
|
||||
return nothing
|
||||
end
|
||||
|
||||
function test_op_specific(estimator, graph, nr::NodeReduction)
|
||||
estimate = operation_effect(estimator, graph, nr)
|
||||
|
||||
data_reduce = data(nr.input[1].task) * (length(nr.input) - 1)
|
||||
compute_effort_reduce = compute_effort(nr.input[1].task) * (length(nr.input) - 1)
|
||||
|
||||
@test isapprox(estimate.data, -data_reduce; atol = eps(Float64))
|
||||
@test isapprox(estimate.computeEffort, -compute_effort_reduce)
|
||||
@test isapprox(estimate.computeIntensity, compute_effort_reduce / data_reduce)
|
||||
|
||||
return nothing
|
||||
end
|
||||
|
||||
function test_op_specific(estimator, graph, ns::NodeSplit)
|
||||
estimate = operation_effect(estimator, graph, ns)
|
||||
|
||||
copies = length(ns.input.parents) - 1
|
||||
|
||||
data_increase = data(ns.input.task) * copies
|
||||
compute_effort_increase = compute_effort(ns.input.task) * copies
|
||||
|
||||
@test isapprox(estimate.data, data_increase; atol = eps(Float64))
|
||||
@test isapprox(estimate.computeEffort, compute_effort_increase)
|
||||
@test isapprox(estimate.computeIntensity, compute_effort_increase / data_increase)
|
||||
|
||||
return nothing
|
||||
end
|
||||
|
||||
function test_op(estimator, graph, op)
|
||||
#=
|
||||
See issue #16
|
||||
|
||||
estimate_before = graph_cost(estimator, graph)
|
||||
|
||||
estimate = operation_effect(estimator, graph, op)
|
||||
|
||||
push_operation!(graph, op)
|
||||
estimate_after_apply = graph_cost(estimator, graph)
|
||||
reset_graph!(graph)
|
||||
|
||||
@test isapprox((estimate_before + estimate).data, estimate_after_apply.data)
|
||||
@test isapprox((estimate_before + estimate).computeEffort, estimate_after_apply.computeEffort)
|
||||
@test isapprox((estimate_before + estimate).computeIntensity, estimate_after_apply.computeIntensity)
|
||||
=#
|
||||
|
||||
test_op_specific(estimator, graph, op)
|
||||
return nothing
|
||||
end
|
||||
|
||||
@testset "Unit Tests Estimator" begin
|
||||
@testset "Global Metric Estimator" for (graph_string, exp_data, exp_computeEffort) in
|
||||
zip(["AB->AB", "AB->ABBB"], [976, 10944], [53, 1075])
|
||||
estimator = GlobalMetricEstimator()
|
||||
|
||||
@test cost_type(estimator) == CDCost
|
||||
|
||||
graph = parse_dag(joinpath(@__DIR__, "..", "input", "$(graph_string).txt"), ABCModel())
|
||||
|
||||
@testset "Graph Cost" begin
|
||||
estimate = graph_cost(estimator, graph)
|
||||
|
||||
@test estimate.data == exp_data
|
||||
@test estimate.computeEffort == exp_computeEffort
|
||||
@test isapprox(estimate.computeIntensity, exp_computeEffort / exp_data)
|
||||
end
|
||||
|
||||
@testset "Operation Cost" begin
|
||||
ops = get_operations(graph)
|
||||
nfs = copy(ops.nodeFusions)
|
||||
nrs = copy(ops.nodeReductions)
|
||||
nss = copy(ops.nodeSplits)
|
||||
|
||||
println(
|
||||
"Testing $(length(ops.nodeFusions))xNF, $(length(ops.nodeReductions))xNR, $(length(ops.nodeSplits))xNS",
|
||||
)
|
||||
for nf in nfs
|
||||
test_op(estimator, graph, nf)
|
||||
end
|
||||
for nr in nrs
|
||||
test_op(estimator, graph, nr)
|
||||
end
|
||||
for ns in nss
|
||||
test_op(estimator, graph, ns)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
println("Estimator Unit Tests Complete!")
|
@ -1,9 +1,51 @@
|
||||
import MetagraphOptimization.ABCParticle
|
||||
import MetagraphOptimization.interaction_result
|
||||
|
||||
using QEDbase
|
||||
using AccurateArithmetic
|
||||
|
||||
include("../examples/profiling_utilities.jl")
|
||||
|
||||
const RTOL = sqrt(eps(Float64))
|
||||
|
||||
function check_particle_reverse_moment(p1::SFourMomentum, p2::SFourMomentum)
|
||||
@test isapprox(abs(p1.E), abs(p2.E))
|
||||
@test isapprox(p1.px, -p2.px)
|
||||
@test isapprox(p1.py, -p2.py)
|
||||
@test isapprox(p1.pz, -p2.pz)
|
||||
return nothing
|
||||
end
|
||||
|
||||
function ground_truth_graph_result(input::ABCProcessInput)
|
||||
# formula for one diagram:
|
||||
# u_Bp * iλ * u_Ap * S_C * u_B * iλ * u_A
|
||||
# for the second diagram:
|
||||
# u_B * iλ * u_Ap * S_C * u_Bp * iλ * u_Ap
|
||||
# the "u"s are all 1, we ignore the i, λ is 1/137.
|
||||
|
||||
constant = (1 / 137.0)^2
|
||||
|
||||
# calculate particle C in diagram 1
|
||||
diagram1_C = ParticleC(input.inParticles[1].momentum + input.inParticles[2].momentum)
|
||||
diagram2_C = ParticleC(input.inParticles[1].momentum + input.outParticles[2].momentum)
|
||||
|
||||
diagram1_Cp = ParticleC(input.outParticles[1].momentum + input.outParticles[2].momentum)
|
||||
diagram2_Cp = ParticleC(input.outParticles[1].momentum + input.inParticles[2].momentum)
|
||||
|
||||
check_particle_reverse_moment(diagram1_Cp.momentum, diagram1_C.momentum)
|
||||
check_particle_reverse_moment(diagram2_Cp.momentum, diagram2_C.momentum)
|
||||
@test isapprox(getMass2(diagram1_C.momentum), getMass2(diagram1_Cp.momentum))
|
||||
@test isapprox(getMass2(diagram2_C.momentum), getMass2(diagram2_Cp.momentum))
|
||||
|
||||
inner1 = MetagraphOptimization.inner_edge(diagram1_C)
|
||||
inner2 = MetagraphOptimization.inner_edge(diagram2_C)
|
||||
|
||||
diagram1_result = inner1 * constant
|
||||
diagram2_result = inner2 * constant
|
||||
|
||||
return sum_kbn([diagram1_result, diagram2_result])
|
||||
end
|
||||
|
||||
@testset "Unit Tests Execution" begin
|
||||
machine = get_machine_info()
|
||||
|
||||
@ -23,29 +65,29 @@ include("../examples/profiling_utilities.jl")
|
||||
ParticleB(SFourMomentum(0.823648, 0.835061, 0.474802, -0.277915)),
|
||||
],
|
||||
)
|
||||
expected_result = 0.00013916495566048735
|
||||
expected_result = ground_truth_graph_result(particles_2_2)
|
||||
|
||||
@testset "AB->AB no optimization" begin
|
||||
for _ in 1:10 # test in a loop because graph layout should not change the result
|
||||
graph = parse_dag(joinpath(@__DIR__, "..", "input", "AB->AB.txt"), ABCModel())
|
||||
@test isapprox(execute(graph, process_2_2, machine, particles_2_2), expected_result; rtol = 0.001)
|
||||
@test isapprox(execute(graph, process_2_2, machine, particles_2_2), expected_result; rtol = RTOL)
|
||||
|
||||
# graph should be fully scheduled after being executed
|
||||
@test is_scheduled(graph)
|
||||
|
||||
func = get_compute_function(graph, process_2_2, machine)
|
||||
@test isapprox(func(particles_2_2), expected_result; rtol = 0.001)
|
||||
@test isapprox(func(particles_2_2), expected_result; rtol = RTOL)
|
||||
end
|
||||
end
|
||||
|
||||
@testset "AB->AB after random walk" begin
|
||||
for i in 1:1000
|
||||
for i in 1:200
|
||||
graph = parse_dag(joinpath(@__DIR__, "..", "input", "AB->AB.txt"), ABCModel())
|
||||
random_walk!(graph, 50)
|
||||
|
||||
@test is_valid(graph)
|
||||
|
||||
@test isapprox(execute(graph, process_2_2, machine, particles_2_2), expected_result; rtol = 0.001)
|
||||
@test isapprox(execute(graph, process_2_2, machine, particles_2_2), expected_result; rtol = RTOL)
|
||||
|
||||
# graph should be fully scheduled after being executed
|
||||
@test is_scheduled(graph)
|
||||
@ -63,20 +105,20 @@ include("../examples/profiling_utilities.jl")
|
||||
@testset "AB->ABBB no optimization" begin
|
||||
for _ in 1:5 # test in a loop because graph layout should not change the result
|
||||
graph = parse_dag(joinpath(@__DIR__, "..", "input", "AB->ABBB.txt"), ABCModel())
|
||||
@test isapprox(execute(graph, process_2_4, machine, particles_2_4), expected_result; rtol = 0.001)
|
||||
@test isapprox(execute(graph, process_2_4, machine, particles_2_4), expected_result; rtol = RTOL)
|
||||
|
||||
func = get_compute_function(graph, process_2_4, machine)
|
||||
@test isapprox(func(particles_2_4), expected_result; rtol = 0.001)
|
||||
@test isapprox(func(particles_2_4), expected_result; rtol = RTOL)
|
||||
end
|
||||
end
|
||||
|
||||
@testset "AB->ABBB after random walk" begin
|
||||
for i in 1:200
|
||||
for i in 1:50
|
||||
graph = parse_dag(joinpath(@__DIR__, "..", "input", "AB->ABBB.txt"), ABCModel())
|
||||
random_walk!(graph, 100)
|
||||
@test is_valid(graph)
|
||||
|
||||
@test isapprox(execute(graph, process_2_4, machine, particles_2_4), expected_result; rtol = 0.001)
|
||||
@test isapprox(execute(graph, process_2_4, machine, particles_2_4), expected_result; rtol = RTOL)
|
||||
end
|
||||
end
|
||||
|
||||
@ -105,8 +147,8 @@ include("../examples/profiling_utilities.jl")
|
||||
|
||||
# try execute
|
||||
@test is_valid(graph)
|
||||
expected_result = 0.00013916495566048735
|
||||
@test isapprox(execute(graph, process_2_2, machine, particles_2_2), expected_result; rtol = 0.001)
|
||||
expected_result = ground_truth_graph_result(particles_2_2)
|
||||
@test isapprox(execute(graph, process_2_2, machine, particles_2_2), expected_result; rtol = RTOL)
|
||||
end
|
||||
|
||||
|
||||
@ -135,8 +177,8 @@ include("../examples/profiling_utilities.jl")
|
||||
|
||||
# try execute
|
||||
@test is_valid(graph)
|
||||
expected_result = 0.00013916495566048735
|
||||
@test isapprox(execute(graph, process_2_2, machine, particles_2_2), expected_result; rtol = 0.001)
|
||||
expected_result = ground_truth_graph_result(particles_2_2)
|
||||
@test isapprox(execute(graph, process_2_2, machine, particles_2_2), expected_result; rtol = RTOL)
|
||||
end
|
||||
|
||||
@testset "AB->AB fusion edge case" for _ in 1:20
|
||||
@ -169,8 +211,8 @@ include("../examples/profiling_utilities.jl")
|
||||
|
||||
# try execute
|
||||
@test is_valid(graph)
|
||||
expected_result = 0.00013916495566048735
|
||||
@test isapprox(execute(graph, process_2_2, machine, particles_2_2), expected_result; rtol = 0.001)
|
||||
expected_result = ground_truth_graph_result(particles_2_2)
|
||||
@test isapprox(execute(graph, process_2_2, machine, particles_2_2), expected_result; rtol = RTOL)
|
||||
end
|
||||
|
||||
end
|
||||
|
Loading…
x
Reference in New Issue
Block a user