Merge pull request 'Code Generation' (#8) from code-gen into main
Reviewed-on: Rubydragon/MetagraphOptimization.jl#8
This commit is contained in:
commit
bd6c54c1ae
@ -4,6 +4,7 @@ authors = ["Anton Reinhard <anton.reinhard@proton.me>"]
|
||||
version = "0.1.0"
|
||||
|
||||
[deps]
|
||||
AccurateArithmetic = "22286c92-06ac-501d-9306-4abd417d9753"
|
||||
DataStructures = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8"
|
||||
JuliaFormatter = "98e50ef6-434e-11e9-1051-2b60c6c9e899"
|
||||
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
|
||||
|
@ -7,10 +7,12 @@ makedocs(
|
||||
root = "docs",
|
||||
source = "src",
|
||||
build = "build",
|
||||
warnonly = true,
|
||||
clean = true,
|
||||
doctest = true,
|
||||
modules = Module[MetagraphOptimization],
|
||||
repo = "https://code.woubery.com/Rubydragon/MetagraphOptimization.jl/src/branch/{commit}{path}#L{line}",
|
||||
#repo = "https://code.woubery.com/Rubydragon/MetagraphOptimization.jl/src/branch/{commit}{path}#L{line}",
|
||||
remotes = nothing,
|
||||
sitename = "MetagraphOptimization.jl",
|
||||
pages = [
|
||||
"index.md",
|
||||
@ -24,6 +26,7 @@ makedocs(
|
||||
"Models" => "lib/internals/models.md",
|
||||
"Diff" => "lib/internals/diff.md",
|
||||
"Utility" => "lib/internals/utility.md",
|
||||
"Code Generation" => "lib/internals/code_gen.md",
|
||||
],
|
||||
"Contribution" => "contribution.md",
|
||||
],
|
||||
|
8
docs/src/lib/internals/code_gen.md
Normal file
8
docs/src/lib/internals/code_gen.md
Normal file
@ -0,0 +1,8 @@
|
||||
# Code Generation
|
||||
|
||||
## Main
|
||||
```@autodocs
|
||||
Modules = [MetagraphOptimization]
|
||||
Pages = ["code_gen/main.jl"]
|
||||
Order = [:function]
|
||||
```
|
@ -9,6 +9,13 @@ Pages = ["models/abc/types.jl"]
|
||||
Order = [:type, :constant]
|
||||
```
|
||||
|
||||
### Particle
|
||||
```@autodocs
|
||||
Modules = [MetagraphOptimization]
|
||||
Pages = ["models/abc/particle.jl"]
|
||||
Order = [:type, :constant, :function]
|
||||
```
|
||||
|
||||
### Parse
|
||||
```@autodocs
|
||||
Modules = [MetagraphOptimization]
|
||||
@ -23,6 +30,20 @@ Pages = ["models/abc/properties.jl"]
|
||||
Order = [:function]
|
||||
```
|
||||
|
||||
### Create
|
||||
```@autodocs
|
||||
Modules = [MetagraphOptimization]
|
||||
Pages = ["models/abc/create.jl"]
|
||||
Order = [:function]
|
||||
```
|
||||
|
||||
### Compute
|
||||
```@autodocs
|
||||
Modules = [MetagraphOptimization]
|
||||
Pages = ["models/abc/compute.jl"]
|
||||
Order = [:function]
|
||||
```
|
||||
|
||||
## QED-Model
|
||||
|
||||
*To be added*
|
||||
|
@ -10,7 +10,7 @@ Order = [:type]
|
||||
## Create
|
||||
```@autodocs
|
||||
Modules = [MetagraphOptimization]
|
||||
Pages = ["task/create.jl"]
|
||||
Pages = ["properties/create.jl"]
|
||||
Order = [:function]
|
||||
```
|
||||
|
||||
|
@ -3,7 +3,7 @@
|
||||
## Helper Functions
|
||||
```@autodocs
|
||||
Modules = [MetagraphOptimization]
|
||||
Pages = ["utility.jl"]
|
||||
Pages = ["./utility.jl"]
|
||||
Order = [:type, :function]
|
||||
```
|
||||
|
||||
|
@ -34,3 +34,26 @@ function test_random_walk(g::DAG, n::Int64)
|
||||
|
||||
return reset_graph!(g)
|
||||
end
|
||||
|
||||
function reduce_all!(g::DAG)
|
||||
reset_graph!(g)
|
||||
|
||||
opt = get_operations(g)
|
||||
while (!isempty(opt.nodeReductions))
|
||||
push_operation!(g, pop!(opt.nodeReductions))
|
||||
|
||||
if (isempty(opt.nodeReductions))
|
||||
opt = get_operations(g)
|
||||
end
|
||||
end
|
||||
return nothing
|
||||
end
|
||||
|
||||
function reduce_one!(g::DAG)
|
||||
opt = get_operations(g)
|
||||
if !isempty(opt.nodeReductions)
|
||||
push_operation!(g, pop!(opt.nodeReductions))
|
||||
end
|
||||
opt = get_operations(g)
|
||||
return nothing
|
||||
end
|
||||
|
@ -50,6 +50,11 @@ export ComputeTaskV
|
||||
export ComputeTaskU
|
||||
export ComputeTaskSum
|
||||
|
||||
export execute
|
||||
export gen_particles
|
||||
export ParticleValue
|
||||
export Particle
|
||||
|
||||
export ==, in, show, isempty, delete!, length
|
||||
|
||||
export bytes_to_human_readable
|
||||
@ -110,7 +115,12 @@ include("task/print.jl")
|
||||
include("task/properties.jl")
|
||||
|
||||
include("models/abc/types.jl")
|
||||
include("models/abc/particle.jl")
|
||||
include("models/abc/compute.jl")
|
||||
include("models/abc/create.jl")
|
||||
include("models/abc/properties.jl")
|
||||
include("models/abc/parse.jl")
|
||||
|
||||
include("code_gen/main.jl")
|
||||
|
||||
end # module MetagraphOptimization
|
||||
|
126
src/code_gen/main.jl
Normal file
126
src/code_gen/main.jl
Normal file
@ -0,0 +1,126 @@
|
||||
using DataStructures
|
||||
|
||||
"""
|
||||
gen_code(graph::DAG)
|
||||
|
||||
Generate the code for a given graph. The return value is a tuple of:
|
||||
|
||||
- `code::Expr`: The julia expression containing the code for the whole graph.
|
||||
- `inputSymbols::Dict{String, Symbol}`: A dictionary of symbols mapping the names of the input nodes of the graph to the symbols their inputs should be provided on.
|
||||
- `outputSymbol::Symbol`: The symbol of the final calculated value
|
||||
|
||||
See also: [`execute`](@ref)
|
||||
"""
|
||||
function gen_code(graph::DAG)
|
||||
code = Vector{Expr}()
|
||||
sizehint!(code, length(graph.nodes))
|
||||
|
||||
nodeQueue = PriorityQueue{Node, Int}()
|
||||
inputSyms = Dict{String, Symbol}()
|
||||
|
||||
# use a priority equal to the number of unseen children -> 0 are nodes that can be added
|
||||
for node in get_entry_nodes(graph)
|
||||
enqueue!(nodeQueue, node => 0)
|
||||
push!(inputSyms, node.name => Symbol("data_$(to_var_name(node.id))_in"))
|
||||
end
|
||||
|
||||
node = nothing
|
||||
while !isempty(nodeQueue)
|
||||
@assert peek(nodeQueue)[2] == 0
|
||||
node = dequeue!(nodeQueue)
|
||||
|
||||
push!(code, get_expression(node))
|
||||
for parent in node.parents
|
||||
# reduce the priority of all parents by one
|
||||
if (!haskey(nodeQueue, parent))
|
||||
enqueue!(nodeQueue, parent => length(parent.children) - 1)
|
||||
else
|
||||
nodeQueue[parent] = nodeQueue[parent] - 1
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# node is now the last node we looked at -> the output node
|
||||
outSym = Symbol("data_$(to_var_name(node.id))")
|
||||
|
||||
return (
|
||||
code = Expr(:block, code...),
|
||||
inputSymbols = inputSyms,
|
||||
outputSymbol = outSym,
|
||||
)
|
||||
end
|
||||
|
||||
"""
|
||||
execute(generated_code, input::Dict{ParticleType, Vector{Particle}})
|
||||
|
||||
Execute the given `generated_code` (as returned by [`gen_code`](@ref)) on the given input particles.
|
||||
"""
|
||||
function execute(generated_code, input::Dict{ParticleType, Vector{Particle}})
|
||||
(code, inputSymbols, outputSymbol) = generated_code
|
||||
|
||||
assignInputs = Vector{Expr}()
|
||||
for (name, symbol) in inputSymbols
|
||||
type = nothing
|
||||
if startswith(name, "A")
|
||||
type = A
|
||||
elseif startswith(name, "B")
|
||||
type = B
|
||||
else
|
||||
type = C
|
||||
end
|
||||
index = parse(Int, name[2:end])
|
||||
|
||||
push!(
|
||||
assignInputs,
|
||||
Meta.parse(
|
||||
"$(symbol) = ParticleValue(Particle($(input[type][index]).P0, $(input[type][index]).P1, $(input[type][index]).P2, $(input[type][index]).P3, $(type)), 1.0)",
|
||||
),
|
||||
)
|
||||
end
|
||||
|
||||
assignInputs = Expr(:block, assignInputs...)
|
||||
eval(assignInputs)
|
||||
eval(code)
|
||||
|
||||
eval(Meta.parse("result = $outputSymbol"))
|
||||
return result
|
||||
end
|
||||
|
||||
"""
|
||||
execute(graph::DAG, input::Dict{ParticleType, Vector{Particle}})
|
||||
|
||||
Execute the given `generated_code` (as returned by [`gen_code`](@ref)) on the given input particles.
|
||||
The input particles should be sorted correctly into the dictionary to their according [`ParticleType`](@ref)s.
|
||||
|
||||
See also: [`gen_particles`](@ref)
|
||||
"""
|
||||
function execute(graph::DAG, input::Dict{ParticleType, Vector{Particle}})
|
||||
(code, inputSymbols, outputSymbol) = gen_code(graph)
|
||||
|
||||
assignInputs = Vector{Expr}()
|
||||
for (name, symbol) in inputSymbols
|
||||
type = nothing
|
||||
if startswith(name, "A")
|
||||
type = A
|
||||
elseif startswith(name, "B")
|
||||
type = B
|
||||
else
|
||||
type = C
|
||||
end
|
||||
index = parse(Int, name[2:end])
|
||||
|
||||
push!(
|
||||
assignInputs,
|
||||
Meta.parse(
|
||||
"$(symbol) = ParticleValue(Particle($(input[type][index]).P0, $(input[type][index]).P1, $(input[type][index]).P2, $(input[type][index]).P3, $(type)), 1.0)",
|
||||
),
|
||||
)
|
||||
end
|
||||
|
||||
assignInputs = Expr(:block, assignInputs...)
|
||||
eval(assignInputs)
|
||||
eval(code)
|
||||
|
||||
eval(Meta.parse("result = $outputSymbol"))
|
||||
return result
|
||||
end
|
@ -144,6 +144,8 @@ function remove_edge!(
|
||||
# 1: mute
|
||||
pre_length1 = length(node1.parents)
|
||||
pre_length2 = length(node2.children)
|
||||
|
||||
#TODO: filter is very slow
|
||||
filter!(x -> x != node2, node1.parents)
|
||||
filter!(x -> x != node1, node2.children)
|
||||
|
||||
@ -201,6 +203,7 @@ function invalidate_caches!(graph::DAG, operation::NodeFusion)
|
||||
delete!(graph.possibleOperations, operation)
|
||||
|
||||
# delete the operation from all caches of nodes involved in the operation
|
||||
# TODO: filter is very slow
|
||||
filter!(!=(operation), operation.input[1].nodeFusions)
|
||||
filter!(!=(operation), operation.input[3].nodeFusions)
|
||||
|
||||
|
@ -23,6 +23,7 @@ end
|
||||
Print the given graph to io. If there are too many nodes it will print only a summary of them.
|
||||
"""
|
||||
function show(io::IO, graph::DAG)
|
||||
apply_all!(graph)
|
||||
println(io, "Graph:")
|
||||
print(io, " Nodes: ")
|
||||
|
||||
|
@ -27,3 +27,18 @@ function get_exit_node(graph::DAG)
|
||||
end
|
||||
@assert false "The given graph has no exit node! It is either empty or not acyclic!"
|
||||
end
|
||||
|
||||
"""
|
||||
get_entry_nodes(graph::DAG)
|
||||
|
||||
Return a vector of the graph's entry nodes.
|
||||
"""
|
||||
function get_entry_nodes(graph::DAG)
|
||||
result = Vector{Node}()
|
||||
for node in graph.nodes
|
||||
if (is_entry_node(node))
|
||||
push!(result, node)
|
||||
end
|
||||
end
|
||||
return result
|
||||
end
|
||||
|
256
src/models/abc/compute.jl
Normal file
256
src/models/abc/compute.jl
Normal file
@ -0,0 +1,256 @@
|
||||
using AccurateArithmetic
|
||||
|
||||
"""
|
||||
compute(::ComputeTaskP, data::ParticleValue)
|
||||
|
||||
Return the particle and value as is.
|
||||
|
||||
0 FLOP.
|
||||
"""
|
||||
function compute(::ComputeTaskP, data::ParticleValue)
|
||||
return data
|
||||
end
|
||||
|
||||
"""
|
||||
compute(::ComputeTaskU, data::ParticleValue)
|
||||
|
||||
Compute an outer edge. Return the particle value with the same particle and the value multiplied by an outer_edge factor.
|
||||
|
||||
1 FLOP.
|
||||
"""
|
||||
function compute(::ComputeTaskU, data::ParticleValue)
|
||||
return ParticleValue(data.p, data.v * outer_edge(data.p))
|
||||
end
|
||||
|
||||
"""
|
||||
compute(::ComputeTaskV, data1::ParticleValue, data2::ParticleValue)
|
||||
|
||||
Compute a vertex. Preserve momentum and particle types (AB->C etc.) to create resulting particle, multiply values together and times a vertex factor.
|
||||
|
||||
6 FLOP.
|
||||
"""
|
||||
function compute(::ComputeTaskV, data1::ParticleValue, data2::ParticleValue)
|
||||
p3 = preserve_momentum(data1.p, data2.p)
|
||||
dataOut = ParticleValue(p3, data1.v * vertex() * data2.v)
|
||||
return dataOut
|
||||
end
|
||||
|
||||
"""
|
||||
compute(::ComputeTaskS2, data1::ParticleValue, data2::ParticleValue)
|
||||
|
||||
Compute a final inner edge (2 input particles, no output particle).
|
||||
|
||||
For valid inputs, both input particles should have the same momenta at this point.
|
||||
|
||||
12 FLOP.
|
||||
"""
|
||||
function compute(::ComputeTaskS2, data1::ParticleValue, data2::ParticleValue)
|
||||
return data1.v * inner_edge(data1.p) * data2.v
|
||||
end
|
||||
|
||||
"""
|
||||
compute(::ComputeTaskS1, data::ParticleValue)
|
||||
|
||||
Compute inner edge (1 input particle, 1 output particle).
|
||||
|
||||
11 FLOP.
|
||||
"""
|
||||
function compute(::ComputeTaskS1, data::ParticleValue)
|
||||
return ParticleValue(data.p, data.v * inner_edge(data.p))
|
||||
end
|
||||
|
||||
"""
|
||||
compute(::ComputeTaskSum, data::Vector{Float64})
|
||||
|
||||
Compute a sum over the vector. Use an algorithm that accounts for accumulated errors in long sums with potentially large differences in magnitude of the summands.
|
||||
|
||||
Linearly many FLOP with growing data.
|
||||
"""
|
||||
function compute(::ComputeTaskSum, data::Vector{Float64})
|
||||
return sum_kbn(data)
|
||||
end
|
||||
|
||||
"""
|
||||
compute(t::FusedComputeTask, data)
|
||||
|
||||
Compute a [`FusedComputeTask`](@ref). This simply asserts false and should not be called. Fused Compute Tasks generate their expressions directly through the other tasks instead.
|
||||
"""
|
||||
function compute(t::FusedComputeTask, data)
|
||||
@assert false "This is not implemented and should never be called"
|
||||
end
|
||||
|
||||
"""
|
||||
get_expression(::ComputeTaskP, inSymbol::Symbol, outSymbol::Symbol)
|
||||
|
||||
Generate and return code evaluating [`ComputeTaskP`](@ref) on `inSymbol`, providing the output on `outSymbol`.
|
||||
"""
|
||||
function get_expression(::ComputeTaskP, inSymbol::Symbol, outSymbol::Symbol)
|
||||
return Meta.parse("$outSymbol = compute(ComputeTaskP(), $inSymbol)")
|
||||
end
|
||||
|
||||
"""
|
||||
get_expression(::ComputeTaskU, inSymbol::Symbol, outSymbol::Symbol)
|
||||
|
||||
Generate code evaluating [`ComputeTaskU`](@ref) on `inSymbol`, providing the output on `outSymbol`.
|
||||
`inSymbol` should be of type [`ParticleValue`](@ref), `outSymbol` will be of type [`ParticleValue`](@ref).
|
||||
"""
|
||||
function get_expression(::ComputeTaskU, inSymbol::Symbol, outSymbol::Symbol)
|
||||
return Meta.parse("$outSymbol = compute(ComputeTaskU(), $inSymbol)")
|
||||
end
|
||||
|
||||
"""
|
||||
get_expression(::ComputeTaskV, inSymbol1::Symbol, inSymbol2::Symbol, outSymbol::Symbol)
|
||||
|
||||
Generate code evaluating [`ComputeTaskV`](@ref) on `inSymbol1` and `inSymbol2`, providing the output on `outSymbol`.
|
||||
`inSymbol1` and `inSymbol2` should be of type [`ParticleValue`](@ref), `outSymbol` will be of type [`ParticleValue`](@ref).
|
||||
"""
|
||||
function get_expression(
|
||||
::ComputeTaskV,
|
||||
inSymbol1::Symbol,
|
||||
inSymbol2::Symbol,
|
||||
outSymbol::Symbol,
|
||||
)
|
||||
return Meta.parse(
|
||||
"$outSymbol = compute(ComputeTaskV(), $inSymbol1, $inSymbol2)",
|
||||
)
|
||||
end
|
||||
|
||||
"""
|
||||
get_expression(::ComputeTaskS2, inSymbol1::Symbol, inSymbol2::Symbol, outSymbol::Symbol)
|
||||
|
||||
Generate code evaluating [`ComputeTaskS2`](@ref) on `inSymbol1` and `inSymbol2`, providing the output on `outSymbol`.
|
||||
`inSymbol1` and `inSymbol2` should be of type [`ParticleValue`](@ref), `outSymbol` will be of type `Float64`.
|
||||
"""
|
||||
function get_expression(
|
||||
::ComputeTaskS2,
|
||||
inSymbol1::Symbol,
|
||||
inSymbol2::Symbol,
|
||||
outSymbol::Symbol,
|
||||
)
|
||||
return Meta.parse(
|
||||
"$outSymbol = compute(ComputeTaskS2(), $inSymbol1, $inSymbol2)",
|
||||
)
|
||||
end
|
||||
|
||||
"""
|
||||
get_expression(::ComputeTaskS1, inSymbol::Symbol, outSymbol::Symbol)
|
||||
|
||||
Generate code evaluating [`ComputeTaskS1`](@ref) on `inSymbol`, providing the output on `outSymbol`.
|
||||
`inSymbol` should be of type [`ParticleValue`](@ref), `outSymbol` will be of type [`ParticleValue`](@ref).
|
||||
"""
|
||||
function get_expression(::ComputeTaskS1, inSymbol::Symbol, outSymbol::Symbol)
|
||||
return Meta.parse("$outSymbol = compute(ComputeTaskS1(), $inSymbol)")
|
||||
end
|
||||
|
||||
"""
|
||||
get_expression(::ComputeTaskSum, inSymbols::Vector{Symbol}, outSymbol::Symbol)
|
||||
|
||||
Generate code evaluating [`ComputeTaskSum`](@ref) on `inSymbols`, providing the output on `outSymbol`.
|
||||
`inSymbols` should be of type [`Float64`], `outSymbol` will be of type [`Float64`].
|
||||
"""
|
||||
function get_expression(
|
||||
::ComputeTaskSum,
|
||||
inSymbols::Vector{Symbol},
|
||||
outSymbol::Symbol,
|
||||
)
|
||||
return quote
|
||||
$outSymbol = compute(ComputeTaskSum(), [$(inSymbols...)])
|
||||
end
|
||||
end
|
||||
|
||||
"""
|
||||
get_expression(t::FusedComputeTask, inSymbols::Vector{Symbol}, outSymbol::Symbol)
|
||||
|
||||
Generate code evaluating a [`FusedComputeTask`](@ref) on `inSymbols`, providing the output on `outSymbol`.
|
||||
`inSymbols` should be of the correct types and may be heterogeneous. `outSymbol` will be of the type of the output of `T2` of t.
|
||||
"""
|
||||
function get_expression(
|
||||
t::FusedComputeTask,
|
||||
inSymbols::Vector{Symbol},
|
||||
outSymbol::Symbol,
|
||||
)
|
||||
(T1, T2) = get_types(t)
|
||||
c1 = children(T1())
|
||||
c2 = children(T2())
|
||||
|
||||
expr1 = nothing
|
||||
expr2 = nothing
|
||||
|
||||
# TODO need to figure out how to know which inputs belong to which subtask
|
||||
# since we order the vectors with the child nodes we can't just split
|
||||
if (c1 == 1)
|
||||
expr1 = get_expression(T1(), inSymbols[begin], :intermediate)
|
||||
elseif (c1 == 2)
|
||||
expr1 =
|
||||
get_expression(T1(), inSymbols[begin], inSymbols[2], :intermediate)
|
||||
else
|
||||
expr1 = get_expression(T1(), inSymbols[begin:c1], :intermediate)
|
||||
end
|
||||
|
||||
if (c2 == 1)
|
||||
expr2 = get_expression(T2(), :intermediate, outSymbol)
|
||||
elseif c2 == 2
|
||||
expr2 =
|
||||
get_expression(T2(), :intermediate, inSymbols[c1 + 1], outSymbol)
|
||||
else
|
||||
expr2 = get_expression(
|
||||
T2(),
|
||||
:intermediate * inSymbols[(c1 + 1):end],
|
||||
outSymbol,
|
||||
)
|
||||
end
|
||||
|
||||
return Expr(:block, expr1, expr2)
|
||||
end
|
||||
|
||||
"""
|
||||
get_expression(node::ComputeTaskNode)
|
||||
|
||||
Generate and return code for a given [`ComputeTaskNode`](@ref).
|
||||
"""
|
||||
function get_expression(node::ComputeTaskNode)
|
||||
t = typeof(node.task)
|
||||
@assert length(node.children) == children(node.task) || t <: ComputeTaskSum
|
||||
|
||||
if (t <: ComputeTaskU || t <: ComputeTaskP || t <: ComputeTaskS1) # single input
|
||||
symbolIn = Symbol("data_$(to_var_name(node.children[1].id))")
|
||||
symbolOut = Symbol("data_$(to_var_name(node.id))")
|
||||
return get_expression(t(), symbolIn, symbolOut)
|
||||
elseif (t <: ComputeTaskS2 || t <: ComputeTaskV) # double input
|
||||
symbolIn1 = Symbol("data_$(to_var_name(node.children[1].id))")
|
||||
symbolIn2 = Symbol("data_$(to_var_name(node.children[2].id))")
|
||||
symbolOut = Symbol("data_$(to_var_name(node.id))")
|
||||
return get_expression(t(), symbolIn1, symbolIn2, symbolOut)
|
||||
elseif (t <: ComputeTaskSum || t <: FusedComputeTask) # vector input
|
||||
inSymbols = Vector{Symbol}()
|
||||
for child in node.children
|
||||
push!(inSymbols, Symbol("data_$(to_var_name(child.id))"))
|
||||
end
|
||||
outSymbol = Symbol("data_$(to_var_name(node.id))")
|
||||
return get_expression(t(), inSymbols, outSymbol)
|
||||
else
|
||||
error("Unknown compute task")
|
||||
end
|
||||
end
|
||||
|
||||
"""
|
||||
get_expression(node::DataTaskNode)
|
||||
|
||||
Generate and return code for a given [`DataTaskNode`](@ref).
|
||||
"""
|
||||
function get_expression(node::DataTaskNode)
|
||||
# TODO: do things to transport data from/to gpu, between numa nodes, etc.
|
||||
@assert length(node.children) <= 1
|
||||
|
||||
inSymbol = nothing
|
||||
if (length(node.children) == 1)
|
||||
inSymbol = Symbol("data_$(to_var_name(node.children[1].id))")
|
||||
else
|
||||
inSymbol = Symbol("data_$(to_var_name(node.id))_in")
|
||||
end
|
||||
outSymbol = Symbol("data_$(to_var_name(node.id))")
|
||||
|
||||
dataTransportExp = Meta.parse("$outSymbol = $inSymbol")
|
||||
|
||||
return dataTransportExp
|
||||
end
|
74
src/models/abc/create.jl
Normal file
74
src/models/abc/create.jl
Normal file
@ -0,0 +1,74 @@
|
||||
|
||||
"""
|
||||
Particle(rng)
|
||||
|
||||
Return a randomly generated particle.
|
||||
"""
|
||||
function Particle(rng, type::ParticleType)
|
||||
|
||||
p1 = rand(rng, Float64)
|
||||
p2 = rand(rng, Float64)
|
||||
p3 = rand(rng, Float64)
|
||||
m = mass(type)
|
||||
|
||||
# keep the momenta of the particles on-shell
|
||||
p4 = sqrt(p1^2 + p2^2 + p3^2 + m^2)
|
||||
|
||||
return Particle(p1, p2, p3, p4, type)
|
||||
end
|
||||
|
||||
"""
|
||||
gen_particles(n::Int)
|
||||
|
||||
Return a Vector of `n` randomly generated [`Particle`](@ref)s.
|
||||
|
||||
Note: This does not take into account the preservation of momenta required for an actual valid process!
|
||||
"""
|
||||
function gen_particles(ns::Dict{ParticleType, Int})
|
||||
particles = Dict{ParticleType, Vector{Particle}}()
|
||||
rng = MersenneTwister(0)
|
||||
|
||||
|
||||
if ns == Dict((A => 2), (B => 2))
|
||||
rho = 1.0
|
||||
|
||||
omega = rand(rng, Float64)
|
||||
theta = rand(rng, Float64) * π
|
||||
phi = rand(rng, Float64) * π
|
||||
|
||||
particles[A] = Vector{Particle}()
|
||||
particles[B] = Vector{Particle}()
|
||||
|
||||
push!(particles[A], Particle(omega, 0, 0, omega, A))
|
||||
push!(particles[B], Particle(omega, 0, 0, -omega, B))
|
||||
push!(
|
||||
particles[A],
|
||||
Particle(
|
||||
omega,
|
||||
rho * cos(theta) * cos(phi),
|
||||
rho * cos(theta) * sin(phi),
|
||||
rho * sin(theta),
|
||||
A,
|
||||
),
|
||||
)
|
||||
push!(
|
||||
particles[B],
|
||||
Particle(
|
||||
omega,
|
||||
-rho * cos(theta) * cos(phi),
|
||||
-rho * cos(theta) * sin(phi),
|
||||
-rho * sin(theta),
|
||||
B,
|
||||
),
|
||||
)
|
||||
return particles
|
||||
end
|
||||
|
||||
for (type, n) in ns
|
||||
particles[type] = Vector{Particle}()
|
||||
for i in 1:n
|
||||
push!(particles[type], Particle(rng, type))
|
||||
end
|
||||
end
|
||||
return particles
|
||||
end
|
@ -4,6 +4,9 @@ regex_c = r"^[A-C]\(([^']*),([^']*)\)$" # Regex for the combinations of 2 pa
|
||||
regex_m = r"^M\(([^']*),([^']*),([^']*)\)$" # Regex for the combinations of 3 particles
|
||||
regex_plus = r"^\+$" # Regex for the sum
|
||||
|
||||
const PARTICLE_VALUE_SIZE::Int = 48
|
||||
const FLOAT_SIZE::Int = 8
|
||||
|
||||
"""
|
||||
parse_nodes(input::AbstractString)
|
||||
|
||||
@ -61,7 +64,8 @@ function parse_abc(filename::String, verbose::Bool = false)
|
||||
sizehint!(graph.nodes, estimate_no_nodes)
|
||||
|
||||
sum_node = insert_node!(graph, make_node(ComputeTaskSum()), false, false)
|
||||
global_data_out = insert_node!(graph, make_node(DataTask(10)), false, false)
|
||||
global_data_out =
|
||||
insert_node!(graph, make_node(DataTask(FLOAT_SIZE)), false, false)
|
||||
insert_edge!(graph, sum_node, global_data_out, false, false)
|
||||
|
||||
# remember the data out nodes for connection
|
||||
@ -86,13 +90,28 @@ function parse_abc(filename::String, verbose::Bool = false)
|
||||
end
|
||||
if occursin(regex_a, node)
|
||||
# add nodes and edges for the state reading to u(P(Particle))
|
||||
data_in = insert_node!(graph, make_node(DataTask(4)), false, false) # read particle data node
|
||||
data_in = insert_node!(
|
||||
graph,
|
||||
make_node(DataTask(PARTICLE_VALUE_SIZE), string(node)),
|
||||
false,
|
||||
false,
|
||||
) # read particle data node
|
||||
compute_P =
|
||||
insert_node!(graph, make_node(ComputeTaskP()), false, false) # compute P node
|
||||
data_Pu = insert_node!(graph, make_node(DataTask(6)), false, false) # transfer data from P to u
|
||||
data_Pu = insert_node!(
|
||||
graph,
|
||||
make_node(DataTask(PARTICLE_VALUE_SIZE)),
|
||||
false,
|
||||
false,
|
||||
) # transfer data from P to u (one ParticleValue object)
|
||||
compute_u =
|
||||
insert_node!(graph, make_node(ComputeTaskU()), false, false) # compute U node
|
||||
data_out = insert_node!(graph, make_node(DataTask(3)), false, false) # transfer data out from u
|
||||
data_out = insert_node!(
|
||||
graph,
|
||||
make_node(DataTask(PARTICLE_VALUE_SIZE)),
|
||||
false,
|
||||
false,
|
||||
) # transfer data out from u (one ParticleValue object)
|
||||
|
||||
insert_edge!(graph, data_in, compute_P, false, false)
|
||||
insert_edge!(graph, compute_P, data_Pu, false, false)
|
||||
@ -109,7 +128,12 @@ function parse_abc(filename::String, verbose::Bool = false)
|
||||
|
||||
compute_v =
|
||||
insert_node!(graph, make_node(ComputeTaskV()), false, false)
|
||||
data_out = insert_node!(graph, make_node(DataTask(5)), false, false)
|
||||
data_out = insert_node!(
|
||||
graph,
|
||||
make_node(DataTask(PARTICLE_VALUE_SIZE)),
|
||||
false,
|
||||
false,
|
||||
)
|
||||
|
||||
if (occursin(regex_c, in1))
|
||||
# put an S node after this input
|
||||
@ -119,8 +143,12 @@ function parse_abc(filename::String, verbose::Bool = false)
|
||||
false,
|
||||
false,
|
||||
)
|
||||
data_S_v =
|
||||
insert_node!(graph, make_node(DataTask(5)), false, false)
|
||||
data_S_v = insert_node!(
|
||||
graph,
|
||||
make_node(DataTask(PARTICLE_VALUE_SIZE)),
|
||||
false,
|
||||
false,
|
||||
)
|
||||
|
||||
insert_edge!(graph, dataOutNodes[in1], compute_S, false, false)
|
||||
insert_edge!(graph, compute_S, data_S_v, false, false)
|
||||
@ -139,8 +167,12 @@ function parse_abc(filename::String, verbose::Bool = false)
|
||||
false,
|
||||
false,
|
||||
)
|
||||
data_S_v =
|
||||
insert_node!(graph, make_node(DataTask(5)), false, false)
|
||||
data_S_v = insert_node!(
|
||||
graph,
|
||||
make_node(DataTask(PARTICLE_VALUE_SIZE)),
|
||||
false,
|
||||
false,
|
||||
)
|
||||
|
||||
insert_edge!(graph, dataOutNodes[in2], compute_S, false, false)
|
||||
insert_edge!(graph, compute_S, data_S_v, false, false)
|
||||
@ -163,7 +195,12 @@ function parse_abc(filename::String, verbose::Bool = false)
|
||||
# in2 + in3 with a v
|
||||
compute_v =
|
||||
insert_node!(graph, make_node(ComputeTaskV()), false, false)
|
||||
data_v = insert_node!(graph, make_node(DataTask(5)), false, false)
|
||||
data_v = insert_node!(
|
||||
graph,
|
||||
make_node(DataTask(PARTICLE_VALUE_SIZE)),
|
||||
false,
|
||||
false,
|
||||
)
|
||||
|
||||
insert_edge!(graph, dataOutNodes[in2], compute_v, false, false)
|
||||
insert_edge!(graph, dataOutNodes[in3], compute_v, false, false)
|
||||
@ -172,8 +209,12 @@ function parse_abc(filename::String, verbose::Bool = false)
|
||||
# combine with the v of the combined other input
|
||||
compute_S2 =
|
||||
insert_node!(graph, make_node(ComputeTaskS2()), false, false)
|
||||
data_out =
|
||||
insert_node!(graph, make_node(DataTask(10)), false, false)
|
||||
data_out = insert_node!(
|
||||
graph,
|
||||
make_node(DataTask(FLOAT_SIZE)),
|
||||
false,
|
||||
false,
|
||||
) # output of a S2 task is only a float
|
||||
|
||||
insert_edge!(graph, data_v, compute_S2, false, false)
|
||||
insert_edge!(graph, dataOutNodes[in1], compute_S2, false, false)
|
||||
|
130
src/models/abc/particle.jl
Normal file
130
src/models/abc/particle.jl
Normal file
@ -0,0 +1,130 @@
|
||||
"""
|
||||
ParticleType
|
||||
|
||||
A Particle Type in the ABC Model as an enum, with types `A`, `B` and `C`.
|
||||
"""
|
||||
@enum ParticleType A = 1 B = 2 C = 3
|
||||
|
||||
"""
|
||||
PARTICLE_MASSES
|
||||
|
||||
A constant dictionary containing the masses of the different [`ParticleType`](@ref)s.
|
||||
"""
|
||||
const PARTICLE_MASSES =
|
||||
Dict{ParticleType, Float64}(A => 1.0, B => 1.0, C => 0.0)
|
||||
|
||||
"""
|
||||
Particle
|
||||
|
||||
A struct describing a particle of the ABC-Model. It has the 4 momentum parts P0...P3 and a [`ParticleType`](@ref).
|
||||
|
||||
`sizeof(Particle())` = 40 Byte
|
||||
"""
|
||||
struct Particle
|
||||
P0::Float64
|
||||
P1::Float64
|
||||
P2::Float64
|
||||
P3::Float64
|
||||
|
||||
type::ParticleType
|
||||
end
|
||||
|
||||
"""
|
||||
ParticleValue
|
||||
|
||||
A struct describing a particle during a calculation of a Feynman Diagram, together with the value that's being calculated.
|
||||
|
||||
`sizeof(ParticleValue())` = 48 Byte
|
||||
"""
|
||||
struct ParticleValue
|
||||
p::Particle
|
||||
v::Float64
|
||||
end
|
||||
|
||||
"""
|
||||
mass(t::ParticleType)
|
||||
|
||||
Return the mass (at rest) of the given particle type.
|
||||
"""
|
||||
mass(t::ParticleType) = PARTICLE_MASSES[t]
|
||||
|
||||
"""
|
||||
remaining_type(t1::ParticleType, t2::ParticleType)
|
||||
|
||||
For 2 given (non-equal) particle types, return the third of ABC.
|
||||
"""
|
||||
function remaining_type(t1::ParticleType, t2::ParticleType)
|
||||
@assert t1 != t2
|
||||
if t1 != A && t2 != A
|
||||
return A
|
||||
elseif t1 != B && t2 != B
|
||||
return B
|
||||
else
|
||||
return C
|
||||
end
|
||||
end
|
||||
|
||||
"""
|
||||
square(p::Particle)
|
||||
|
||||
Return the square of the particle's momentum as a `Float` value.
|
||||
|
||||
Takes 7 effective FLOP.
|
||||
"""
|
||||
function square(p::Particle)
|
||||
return p.P0 * p.P0 - p.P1 * p.P1 - p.P2 * p.P2 - p.P3 * p.P3
|
||||
end
|
||||
|
||||
"""
|
||||
inner_edge(p::Particle)
|
||||
|
||||
Return the factor of the inner edge with the given (virtual) particle.
|
||||
|
||||
Takes 10 effective FLOP. (3 here + 10 in square(p))
|
||||
"""
|
||||
function inner_edge(p::Particle)
|
||||
return 1.0 / (square(p) - mass(p.type) * mass(p.type))
|
||||
end
|
||||
|
||||
"""
|
||||
outer_edge(p::Particle)
|
||||
|
||||
Return the factor of the outer edge with the given (real) particle.
|
||||
|
||||
Takes 0 effective FLOP.
|
||||
"""
|
||||
function outer_edge(p::Particle)
|
||||
return 1.0
|
||||
end
|
||||
|
||||
"""
|
||||
vertex()
|
||||
|
||||
Return the factor of a vertex.
|
||||
|
||||
Takes 0 effective FLOP since it's constant.
|
||||
"""
|
||||
function vertex()
|
||||
i = 1.0
|
||||
lambda = 1.0 / 137.0
|
||||
return i * lambda
|
||||
end
|
||||
|
||||
"""
|
||||
preserve_momentum(p1::Particle, p2::Particle)
|
||||
|
||||
Calculate and return a new particle from two given interacting ones at a vertex.
|
||||
|
||||
Takes 4 effective FLOP.
|
||||
"""
|
||||
function preserve_momentum(p1::Particle, p2::Particle)
|
||||
p3 = Particle(
|
||||
p1.P0 + p2.P0,
|
||||
p1.P1 + p2.P1,
|
||||
p1.P2 + p2.P2,
|
||||
p1.P3 + p2.P3,
|
||||
remaining_type(p1.type, p2.type),
|
||||
)
|
||||
|
||||
return p3
|
||||
end
|
@ -3,35 +3,35 @@
|
||||
|
||||
Return the compute effort of an S1 task.
|
||||
"""
|
||||
compute_effort(t::ComputeTaskS1) = 10
|
||||
compute_effort(t::ComputeTaskS1) = 11
|
||||
|
||||
"""
|
||||
compute_effort(t::ComputeTaskS2)
|
||||
|
||||
Return the compute effort of an S2 task.
|
||||
"""
|
||||
compute_effort(t::ComputeTaskS2) = 10
|
||||
compute_effort(t::ComputeTaskS2) = 12
|
||||
|
||||
"""
|
||||
compute_effort(t::ComputeTaskU)
|
||||
|
||||
Return the compute effort of a U task.
|
||||
"""
|
||||
compute_effort(t::ComputeTaskU) = 6
|
||||
compute_effort(t::ComputeTaskU) = 1
|
||||
|
||||
"""
|
||||
compute_effort(t::ComputeTaskV)
|
||||
|
||||
Return the compute effort of a V task.
|
||||
"""
|
||||
compute_effort(t::ComputeTaskV) = 20
|
||||
compute_effort(t::ComputeTaskV) = 6
|
||||
|
||||
"""
|
||||
compute_effort(t::ComputeTaskP)
|
||||
|
||||
Return the compute effort of a P task.
|
||||
"""
|
||||
compute_effort(t::ComputeTaskP) = 15
|
||||
compute_effort(t::ComputeTaskP) = 0
|
||||
|
||||
"""
|
||||
compute_effort(t::ComputeTaskSum)
|
||||
@ -100,3 +100,66 @@ show(io::IO, t::ComputeTaskSum) = print("ComputeSum")
|
||||
Copy the data task and return it.
|
||||
"""
|
||||
copy(t::DataTask) = DataTask(t.data)
|
||||
|
||||
"""
|
||||
children(::DataTask)
|
||||
|
||||
Return the number of children of a data task (always 1).
|
||||
"""
|
||||
children(::DataTask) = 1
|
||||
|
||||
"""
|
||||
children(::ComputeTaskS1)
|
||||
|
||||
Return the number of children of a ComputeTaskS1 (always 1).
|
||||
"""
|
||||
children(::ComputeTaskS1) = 1
|
||||
|
||||
"""
|
||||
children(::ComputeTaskS2)
|
||||
|
||||
Return the number of children of a ComputeTaskS2 (always 2).
|
||||
"""
|
||||
children(::ComputeTaskS2) = 2
|
||||
|
||||
"""
|
||||
children(::ComputeTaskP)
|
||||
|
||||
Return the number of children of a ComputeTaskP (always 1).
|
||||
"""
|
||||
children(::ComputeTaskP) = 1
|
||||
|
||||
"""
|
||||
children(::ComputeTaskU)
|
||||
|
||||
Return the number of children of a ComputeTaskU (always 1).
|
||||
"""
|
||||
children(::ComputeTaskU) = 1
|
||||
|
||||
"""
|
||||
children(::ComputeTaskV)
|
||||
|
||||
Return the number of children of a ComputeTaskV (always 2).
|
||||
"""
|
||||
children(::ComputeTaskV) = 2
|
||||
|
||||
|
||||
"""
|
||||
children(::ComputeTaskSum)
|
||||
|
||||
Return the number of children of a ComputeTaskSum, since this is variable and the task doesn't know
|
||||
how many children it will sum over, return a wildcard -1.
|
||||
|
||||
TODO: this is kind of bad because it means we can't fuse with a sum task
|
||||
"""
|
||||
children(::ComputeTaskSum) = -1
|
||||
|
||||
"""
|
||||
children(t::FusedComputeTask)
|
||||
|
||||
Return the number of children of a FusedComputeTask. It's the sum of the children of both tasks minus one.
|
||||
"""
|
||||
function children(t::FusedComputeTask)
|
||||
(T1, T2) = get_types(t)
|
||||
return children(T1()) + children(T2()) - 1 # one of the inputs is the output of T1 and thus not a child of the node
|
||||
end
|
||||
|
@ -1,5 +1,5 @@
|
||||
|
||||
DataTaskNode(t::AbstractDataTask) = DataTaskNode(
|
||||
DataTaskNode(t::AbstractDataTask, name = "") = DataTaskNode(
|
||||
t,
|
||||
Vector{Node}(),
|
||||
Vector{Node}(),
|
||||
@ -7,6 +7,7 @@ DataTaskNode(t::AbstractDataTask) = DataTaskNode(
|
||||
missing,
|
||||
missing,
|
||||
missing,
|
||||
name,
|
||||
)
|
||||
ComputeTaskNode(t::AbstractComputeTask) = ComputeTaskNode(
|
||||
t,
|
||||
@ -36,6 +37,7 @@ copy(n::DataTaskNode) = DataTaskNode(
|
||||
copy(n.nodeReduction),
|
||||
copy(n.nodeSplit),
|
||||
copy(n.nodeFusion),
|
||||
n.name,
|
||||
)
|
||||
|
||||
"""
|
||||
@ -52,8 +54,8 @@ end
|
||||
|
||||
Construct and return a new [`DataTaskNode`](@ref) with the given task.
|
||||
"""
|
||||
function make_node(t::AbstractDataTask)
|
||||
return DataTaskNode(t)
|
||||
function make_node(t::AbstractDataTask, name::String = "")
|
||||
return DataTaskNode(t, name)
|
||||
end
|
||||
|
||||
"""
|
||||
|
@ -15,3 +15,12 @@ Print a short string representation of the edge to io.
|
||||
function show(io::IO, e::Edge)
|
||||
return print(io, "Edge(", e.edge[1], ", ", e.edge[2], ")")
|
||||
end
|
||||
|
||||
"""
|
||||
to_var_name(id::UUID)
|
||||
|
||||
Return the uuid as a string usable as a variable name in code generation.
|
||||
"""
|
||||
function to_var_name(id::UUID)
|
||||
return replace(string(id), "-" => "_")
|
||||
end
|
||||
|
@ -52,6 +52,9 @@ mutable struct DataTaskNode <: Node
|
||||
|
||||
# the node fusion involving this node, if it exists
|
||||
nodeFusion::Union{Operation, Missing}
|
||||
|
||||
# for input nodes we need a name for the node to distinguish between them
|
||||
name::String
|
||||
end
|
||||
|
||||
"""
|
||||
|
@ -160,7 +160,6 @@ function node_fusion!(
|
||||
# clear snapshot
|
||||
get_snapshot_diff(graph)
|
||||
|
||||
|
||||
# save children and parents
|
||||
n1_children = children(n1)
|
||||
n3_parents = parents(n3)
|
||||
@ -181,26 +180,18 @@ function node_fusion!(
|
||||
ComputeTaskNode(FusedComputeTask{typeof(n1.task), typeof(n3.task)}())
|
||||
insert_node!(graph, new_node)
|
||||
|
||||
# use a set for combined children of n1 and n3 to not get duplicates
|
||||
n1and3_children = Set{Node}()
|
||||
|
||||
# remove edges from n1 children to n1
|
||||
for child in n1_children
|
||||
remove_edge!(graph, child, n1)
|
||||
push!(n1and3_children, child)
|
||||
end
|
||||
|
||||
# remove edges from n3 children to n3
|
||||
for child in n3_children
|
||||
remove_edge!(graph, child, n3)
|
||||
push!(n1and3_children, child)
|
||||
end
|
||||
|
||||
for child in n1and3_children
|
||||
insert_edge!(graph, child, new_node)
|
||||
end
|
||||
|
||||
# "repoint" parents of n3 from new node
|
||||
for child in n3_children
|
||||
remove_edge!(graph, child, n3)
|
||||
if !(child in n1_children)
|
||||
insert_edge!(graph, child, new_node)
|
||||
end
|
||||
end
|
||||
|
||||
for parent in n3_parents
|
||||
remove_edge!(graph, n3, parent)
|
||||
insert_edge!(graph, new_node, parent)
|
||||
|
@ -71,14 +71,8 @@ function find_reductions!(graph::DAG, node::Node)
|
||||
partners_ = partners(node)
|
||||
delete!(partners_, node)
|
||||
for partner in partners_
|
||||
if partner ∉ graph.nodes
|
||||
error("Partner is not part of the graph")
|
||||
end
|
||||
|
||||
@assert partner in graph.nodes
|
||||
if can_reduce(node, partner)
|
||||
if Set(node.children) != Set(partner.children)
|
||||
error("Not equal children")
|
||||
end
|
||||
if reductionVector === nothing
|
||||
# only when there's at least one reduction partner, insert the vector
|
||||
reductionVector = Vector{Node}()
|
||||
|
@ -7,6 +7,24 @@ function compute(t::AbstractTask; data...)
|
||||
return error("Need to implement compute()")
|
||||
end
|
||||
|
||||
"""
|
||||
compute(t::FusedComputeTask; data...)
|
||||
|
||||
Compute a fused compute task.
|
||||
"""
|
||||
function compute(t::FusedComputeTask; data...)
|
||||
(T1, T2) = collect(typeof(t).parameters)
|
||||
|
||||
return compute(T2(), compute(T1(), data))
|
||||
end
|
||||
|
||||
"""
|
||||
compute(t::AbstractDataTask; data...)
|
||||
|
||||
The compute function of a data task, always the identity function, regardless of the specific task.
|
||||
"""
|
||||
compute(t::AbstractDataTask; data...) = data
|
||||
|
||||
"""
|
||||
compute_effort(t::AbstractTask)
|
||||
|
||||
@ -33,13 +51,6 @@ Return the compute effort of a data task, always zero, regardless of the specifi
|
||||
"""
|
||||
compute_effort(t::AbstractDataTask) = 0
|
||||
|
||||
"""
|
||||
compute(t::AbstractDataTask; data...)
|
||||
|
||||
The compute function of a data task, always the identity function, regardless of the specific task.
|
||||
"""
|
||||
compute(t::AbstractDataTask; data...) = data
|
||||
|
||||
"""
|
||||
data(t::AbstractDataTask)
|
||||
|
||||
@ -64,12 +75,36 @@ function compute_effort(t::FusedComputeTask)
|
||||
return compute_effort(T1()) + compute_effort(T2())
|
||||
end
|
||||
|
||||
# actual compute functions for the tasks can stay undefined for now
|
||||
# compute(t::ComputeTaskU, data::Any) = mycomputation(data)
|
||||
|
||||
"""
|
||||
get_types(::FusedComputeTask{T1, T2})
|
||||
|
||||
Return a tuple of a the fused compute task's components' types.
|
||||
"""
|
||||
get_types(::FusedComputeTask{T1, T2}) where {T1, T2} = (T1, T2)
|
||||
|
||||
"""
|
||||
get_expression(t::AbstractTask)
|
||||
|
||||
Return an expression evaluating the given task on the :dataIn symbol
|
||||
"""
|
||||
function get_expression(t::AbstractTask)
|
||||
return quote
|
||||
dataOut = compute($t, dataIn)
|
||||
end
|
||||
end
|
||||
|
||||
"""
|
||||
get_expression()
|
||||
"""
|
||||
function get_expression(
|
||||
t::FusedComputeTask,
|
||||
inSymbol::Symbol,
|
||||
outSymbol::Symbol,
|
||||
)
|
||||
#TODO
|
||||
computeExp = quote
|
||||
$outSymbol = compute($t, $inSymbol)
|
||||
end
|
||||
|
||||
return computeExp
|
||||
end
|
||||
|
@ -8,6 +8,7 @@ using Test
|
||||
include("unit_tests_properties.jl")
|
||||
include("node_reduction.jl")
|
||||
include("unit_tests_graph.jl")
|
||||
include("unit_tests_execution.jl")
|
||||
|
||||
include("known_graphs.jl")
|
||||
end
|
||||
|
31
test/unit_tests_execution.jl
Normal file
31
test/unit_tests_execution.jl
Normal file
@ -0,0 +1,31 @@
|
||||
import MetagraphOptimization.A
|
||||
import MetagraphOptimization.B
|
||||
import MetagraphOptimization.ParticleType
|
||||
|
||||
@testset "Unit Tests Graph" begin
|
||||
particles = Dict{ParticleType, Vector{Particle}}(
|
||||
(
|
||||
A => [
|
||||
Particle(0.823648, 0.0, 0.0, 0.823648, A),
|
||||
Particle(0.823648, -0.835061, -0.474802, 0.277915, A),
|
||||
]
|
||||
),
|
||||
(
|
||||
B => [
|
||||
Particle(0.823648, 0.0, 0.0, -0.823648, B),
|
||||
Particle(0.823648, 0.835061, 0.474802, -0.277915, B),
|
||||
]
|
||||
),
|
||||
)
|
||||
|
||||
expected_result = 5.5320567694746876e-5
|
||||
|
||||
for _ in 1:10 # test in a loop because graph layout should not change the result
|
||||
graph = parse_abc(joinpath(@__DIR__, "..", "input", "AB->AB.txt"))
|
||||
@test isapprox(execute(graph, particles), expected_result; rtol = 0.001)
|
||||
|
||||
code = MetagraphOptimization.gen_code(graph)
|
||||
@test isapprox(execute(code, particles), expected_result; rtol = 0.001)
|
||||
end
|
||||
end
|
||||
println("Execution Unit Tests Complete!")
|
@ -143,9 +143,9 @@ import MetagraphOptimization.partners
|
||||
nf = first(operations.nodeFusions)
|
||||
|
||||
properties = get_properties(graph)
|
||||
@test properties.computeEffort == 134
|
||||
@test properties.computeEffort == 28
|
||||
@test properties.data == 62
|
||||
@test properties.computeIntensity ≈ 134 / 62
|
||||
@test properties.computeIntensity ≈ 28 / 62
|
||||
@test properties.noNodes == 26
|
||||
@test properties.noEdges == 25
|
||||
|
||||
@ -169,9 +169,9 @@ import MetagraphOptimization.partners
|
||||
@test length(graph.dirtyNodes) != 0
|
||||
@test properties.noNodes == 24
|
||||
@test properties.noEdges == 23
|
||||
@test properties.computeEffort == 134
|
||||
@test properties.computeEffort == 28
|
||||
@test properties.data < 62
|
||||
@test properties.computeIntensity > 134 / 62
|
||||
@test properties.computeIntensity > 28 / 62
|
||||
|
||||
operations = get_operations(graph)
|
||||
@test length(graph.dirtyNodes) == 0
|
||||
@ -208,9 +208,9 @@ import MetagraphOptimization.partners
|
||||
properties = get_properties(graph)
|
||||
@test properties.noNodes == 26
|
||||
@test properties.noEdges == 25
|
||||
@test properties.computeEffort == 134
|
||||
@test properties.computeEffort == 28
|
||||
@test properties.data == 62
|
||||
@test properties.computeIntensity ≈ 134 / 62
|
||||
@test properties.computeIntensity ≈ 28 / 62
|
||||
|
||||
operations = get_operations(graph)
|
||||
@test length(operations) ==
|
||||
|
@ -10,11 +10,11 @@
|
||||
Data10 = MetagraphOptimization.DataTask(10)
|
||||
Data20 = MetagraphOptimization.DataTask(20)
|
||||
|
||||
@test MetagraphOptimization.compute_effort(S1) == 10
|
||||
@test MetagraphOptimization.compute_effort(S2) == 10
|
||||
@test MetagraphOptimization.compute_effort(U) == 6
|
||||
@test MetagraphOptimization.compute_effort(V) == 20
|
||||
@test MetagraphOptimization.compute_effort(P) == 15
|
||||
@test MetagraphOptimization.compute_effort(S1) == 11
|
||||
@test MetagraphOptimization.compute_effort(S2) == 12
|
||||
@test MetagraphOptimization.compute_effort(U) == 1
|
||||
@test MetagraphOptimization.compute_effort(V) == 6
|
||||
@test MetagraphOptimization.compute_effort(P) == 0
|
||||
@test MetagraphOptimization.compute_effort(Sum) == 1
|
||||
@test MetagraphOptimization.compute_effort(Data10) == 0
|
||||
@test MetagraphOptimization.compute_effort(Data20) == 0
|
||||
|
Loading…
x
Reference in New Issue
Block a user