14 Commits

36 changed files with 160 additions and 1257 deletions

View File

@ -4,7 +4,6 @@ authors = ["Anton Reinhard <anton.reinhard@proton.me>"]
version = "0.1.0"
[deps]
AccurateArithmetic = "22286c92-06ac-501d-9306-4abd417d9753"
DataStructures = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8"
JuliaFormatter = "98e50ef6-434e-11e9-1051-2b60c6c9e899"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"

View File

@ -7,18 +7,15 @@ makedocs(
root = "docs",
source = "src",
build = "build",
warnonly = true,
clean = true,
doctest = true,
modules = Module[MetagraphOptimization],
#repo = "https://code.woubery.com/Rubydragon/MetagraphOptimization.jl/src/branch/{commit}{path}#L{line}",
remotes = nothing,
repo = "https://code.woubery.com/Rubydragon/MetagraphOptimization.jl/src/branch/{commit}{path}#L{line}",
sitename = "MetagraphOptimization.jl",
pages = [
"index.md",
"Manual" => "manual.md",
"Library" => [
"Public" => "lib/public.md",
"Graph" => "lib/internals/graph.md",
"Node" => "lib/internals/node.md",
"Task" => "lib/internals/task.md",
@ -26,7 +23,6 @@ makedocs(
"Models" => "lib/internals/models.md",
"Diff" => "lib/internals/diff.md",
"Utility" => "lib/internals/utility.md",
"Code Generation" => "lib/internals/code_gen.md",
],
"Contribution" => "contribution.md",
],

View File

@ -1,8 +0,0 @@
# Code Generation
## Main
```@autodocs
Modules = [MetagraphOptimization]
Pages = ["code_gen/main.jl"]
Order = [:function]
```

View File

@ -9,13 +9,6 @@ Pages = ["models/abc/types.jl"]
Order = [:type, :constant]
```
### Particle
```@autodocs
Modules = [MetagraphOptimization]
Pages = ["models/abc/particle.jl"]
Order = [:type, :constant, :function]
```
### Parse
```@autodocs
Modules = [MetagraphOptimization]
@ -30,20 +23,6 @@ Pages = ["models/abc/properties.jl"]
Order = [:function]
```
### Create
```@autodocs
Modules = [MetagraphOptimization]
Pages = ["models/abc/create.jl"]
Order = [:function]
```
### Compute
```@autodocs
Modules = [MetagraphOptimization]
Pages = ["models/abc/compute.jl"]
Order = [:function]
```
## QED-Model
*To be added*

View File

@ -1,22 +0,0 @@
# Properties
## Type
```@autodocs
Modules = [MetagraphOptimization]
Pages = ["properties/type.jl"]
Order = [:type]
```
## Create
```@autodocs
Modules = [MetagraphOptimization]
Pages = ["properties/create.jl"]
Order = [:function]
```
## Utility
```@autodocs
Modules = [MetagraphOptimization]
Pages = ["properties/utility.jl"]
Order = [:function]
```

View File

@ -3,7 +3,7 @@
## Helper Functions
```@autodocs
Modules = [MetagraphOptimization]
Pages = ["./utility.jl"]
Pages = ["utility.jl"]
Order = [:type, :function]
```

View File

@ -41,9 +41,9 @@ function gen_plot(filepath)
i = i - 1
end
props = get_properties(g)
props = graph_properties(g)
push!(x, props.data)
push!(y, props.computeEffort)
push!(y, props.compute_effort)
end
println("\rDone.")

View File

@ -44,9 +44,9 @@ function gen_plot(filepath)
props = get_properties(g)
props = graph_properties(g)
x0 = props.data
y0 = props.computeEffort
y0 = props.compute_effort
x = Vector{Float64}()
y = Vector{Float64}()
@ -55,9 +55,9 @@ function gen_plot(filepath)
opt = get_operations(g)
for op in opt.nodeFusions
push_operation!(g, op)
props = get_properties(g)
props = graph_properties(g)
push!(x, props.data)
push!(y, props.computeEffort)
push!(y, props.compute_effort)
pop_operation!(g)
push!(
@ -65,15 +65,15 @@ function gen_plot(filepath)
"NF: (" *
string(props.data) *
", " *
string(props.computeEffort) *
string(props.compute_effort) *
")",
)
end
for op in opt.nodeReductions
push_operation!(g, op)
props = get_properties(g)
props = graph_properties(g)
push!(x, props.data)
push!(y, props.computeEffort)
push!(y, props.compute_effort)
pop_operation!(g)
push!(
@ -81,15 +81,15 @@ function gen_plot(filepath)
"NR: (" *
string(props.data) *
", " *
string(props.computeEffort) *
string(props.compute_effort) *
")",
)
end
for op in opt.nodeSplits
push_operation!(g, op)
props = get_properties(g)
props = graph_properties(g)
push!(x, props.data)
push!(y, props.computeEffort)
push!(y, props.compute_effort)
pop_operation!(g)
push!(
@ -97,7 +97,7 @@ function gen_plot(filepath)
"NS: (" *
string(props.data) *
", " *
string(props.computeEffort) *
string(props.compute_effort) *
")",
)
end

View File

@ -3,7 +3,7 @@ function test_random_walk(g::DAG, n::Int64)
# the purpose here is to do "random" operations and reverse them again and validate that the graph stays the same and doesn't diverge
reset_graph!(g)
properties = get_properties(g)
properties = graph_properties(g)
for i in 1:n
# choose push or pop
@ -34,26 +34,3 @@ function test_random_walk(g::DAG, n::Int64)
return reset_graph!(g)
end
function reduce_all!(g::DAG)
reset_graph!(g)
opt = get_operations(g)
while (!isempty(opt.nodeReductions))
push_operation!(g, pop!(opt.nodeReductions))
if (isempty(opt.nodeReductions))
opt = get_operations(g)
end
end
return nothing
end
function reduce_one!(g::DAG)
opt = get_operations(g)
if !isempty(opt.nodeReductions)
push_operation!(g, pop!(opt.nodeReductions))
end
opt = get_operations(g)
return nothing
end

View File

@ -15,8 +15,6 @@ export AbstractComputeTask
export AbstractDataTask
export DataTask
export FusedComputeTask
export PossibleOperations
export GraphProperties
export make_node
export make_edge
@ -27,7 +25,7 @@ export is_exit_node
export parents
export children
export compute
export get_properties
export graph_properties
export get_exit_node
export is_valid
@ -50,11 +48,6 @@ export ComputeTaskV
export ComputeTaskU
export ComputeTaskSum
export execute
export gen_particles
export ParticleValue
export Particle
export ==, in, show, isempty, delete!, length
export bytes_to_human_readable
@ -62,8 +55,6 @@ export bytes_to_human_readable
import Base.length
import Base.show
import Base.==
import Base.+
import Base.-
import Base.in
import Base.copy
import Base.isempty
@ -75,7 +66,6 @@ import Base.collect
include("task/type.jl")
include("node/type.jl")
include("diff/type.jl")
include("properties/type.jl")
include("operation/type.jl")
include("graph/type.jl")
@ -106,21 +96,13 @@ include("operation/get.jl")
include("operation/print.jl")
include("operation/validate.jl")
include("properties/create.jl")
include("properties/utility.jl")
include("task/create.jl")
include("task/compare.jl")
include("task/print.jl")
include("task/properties.jl")
include("models/abc/types.jl")
include("models/abc/particle.jl")
include("models/abc/compute.jl")
include("models/abc/create.jl")
include("models/abc/properties.jl")
include("models/abc/parse.jl")
include("code_gen/main.jl")
end # module MetagraphOptimization

View File

@ -1,126 +0,0 @@
using DataStructures
"""
gen_code(graph::DAG)
Generate the code for a given graph. The return value is a tuple of:
- `code::Expr`: The julia expression containing the code for the whole graph.
- `inputSymbols::Dict{String, Symbol}`: A dictionary of symbols mapping the names of the input nodes of the graph to the symbols their inputs should be provided on.
- `outputSymbol::Symbol`: The symbol of the final calculated value
See also: [`execute`](@ref)
"""
function gen_code(graph::DAG)
code = Vector{Expr}()
sizehint!(code, length(graph.nodes))
nodeQueue = PriorityQueue{Node, Int}()
inputSyms = Dict{String, Symbol}()
# use a priority equal to the number of unseen children -> 0 are nodes that can be added
for node in get_entry_nodes(graph)
enqueue!(nodeQueue, node => 0)
push!(inputSyms, node.name => Symbol("data_$(to_var_name(node.id))_in"))
end
node = nothing
while !isempty(nodeQueue)
@assert peek(nodeQueue)[2] == 0
node = dequeue!(nodeQueue)
push!(code, get_expression(node))
for parent in node.parents
# reduce the priority of all parents by one
if (!haskey(nodeQueue, parent))
enqueue!(nodeQueue, parent => length(parent.children) - 1)
else
nodeQueue[parent] = nodeQueue[parent] - 1
end
end
end
# node is now the last node we looked at -> the output node
outSym = Symbol("data_$(to_var_name(node.id))")
return (
code = Expr(:block, code...),
inputSymbols = inputSyms,
outputSymbol = outSym,
)
end
"""
execute(generated_code, input::Dict{ParticleType, Vector{Particle}})
Execute the given `generated_code` (as returned by [`gen_code`](@ref)) on the given input particles.
"""
function execute(generated_code, input::Dict{ParticleType, Vector{Particle}})
(code, inputSymbols, outputSymbol) = generated_code
assignInputs = Vector{Expr}()
for (name, symbol) in inputSymbols
type = nothing
if startswith(name, "A")
type = A
elseif startswith(name, "B")
type = B
else
type = C
end
index = parse(Int, name[2:end])
push!(
assignInputs,
Meta.parse(
"$(symbol) = ParticleValue(Particle($(input[type][index]).P0, $(input[type][index]).P1, $(input[type][index]).P2, $(input[type][index]).P3, $(type)), 1.0)",
),
)
end
assignInputs = Expr(:block, assignInputs...)
eval(assignInputs)
eval(code)
eval(Meta.parse("result = $outputSymbol"))
return result
end
"""
execute(graph::DAG, input::Dict{ParticleType, Vector{Particle}})
Execute the given `generated_code` (as returned by [`gen_code`](@ref)) on the given input particles.
The input particles should be sorted correctly into the dictionary to their according [`ParticleType`](@ref)s.
See also: [`gen_particles`](@ref)
"""
function execute(graph::DAG, input::Dict{ParticleType, Vector{Particle}})
(code, inputSymbols, outputSymbol) = gen_code(graph)
assignInputs = Vector{Expr}()
for (name, symbol) in inputSymbols
type = nothing
if startswith(name, "A")
type = A
elseif startswith(name, "B")
type = B
else
type = C
end
index = parse(Int, name[2:end])
push!(
assignInputs,
Meta.parse(
"$(symbol) = ParticleValue(Particle($(input[type][index]).P0, $(input[type][index]).P1, $(input[type][index]).P2, $(input[type][index]).P3, $(type)), 1.0)",
),
)
end
assignInputs = Expr(:block, assignInputs...)
eval(assignInputs)
eval(code)
eval(Meta.parse("result = $outputSymbol"))
return result
end

View File

@ -144,8 +144,6 @@ function remove_edge!(
# 1: mute
pre_length1 = length(node1.parents)
pre_length2 = length(node2.children)
#TODO: filter is very slow
filter!(x -> x != node2, node1.parents)
filter!(x -> x != node1, node2.children)
@ -203,7 +201,6 @@ function invalidate_caches!(graph::DAG, operation::NodeFusion)
delete!(graph.possibleOperations, operation)
# delete the operation from all caches of nodes involved in the operation
# TODO: filter is very slow
filter!(!=(operation), operation.input[1].nodeFusions)
filter!(!=(operation), operation.input[3].nodeFusions)

View File

@ -23,7 +23,6 @@ end
Print the given graph to io. If there are too many nodes it will print only a summary of them.
"""
function show(io::IO, graph::DAG)
apply_all!(graph)
println(io, "Graph:")
print(io, " Nodes: ")
@ -59,12 +58,12 @@ function show(io::IO, graph::DAG)
end
println(io)
println(io, " Edges: ", noEdges)
properties = get_properties(graph)
println(io, " Total Compute Effort: ", properties.computeEffort)
properties = graph_properties(graph)
println(io, " Total Compute Effort: ", properties.compute_effort)
println(io, " Total Data Transfer: ", properties.data)
return println(
io,
" Total Compute Intensity: ",
properties.computeIntensity,
properties.compute_intensity,
)
end

View File

@ -1,17 +1,31 @@
"""
get_properties(graph::DAG)
graph_properties(graph::DAG)
Return the graph's [`GraphProperties`](@ref).
Return the graph's properties, a named tuple with fields `.data`, `.compute_effort`, `.compute_intensity`, `.nodes` (number of nodes) and `.edges` (number of edges).
"""
function get_properties(graph::DAG)
function graph_properties(graph::DAG)
# make sure the graph is fully generated
apply_all!(graph)
if (graph.properties.computeEffort == 0.0)
graph.properties = GraphProperties(graph)
d = 0
ce = 0
ed = 0
for node in graph.nodes
d += data(node.task) * length(node.parents)
ce += compute_effort(node.task)
ed += length(node.parents)
end
return graph.properties
ci = ce / d
result = (
data = d,
compute_effort = ce,
compute_intensity = ci,
nodes = length(graph.nodes),
edges = ed,
)
return result
end
"""
@ -27,18 +41,3 @@ function get_exit_node(graph::DAG)
end
@assert false "The given graph has no exit node! It is either empty or not acyclic!"
end
"""
get_entry_nodes(graph::DAG)
Return a vector of the graph's entry nodes.
"""
function get_entry_nodes(graph::DAG)
result = Vector{Node}()
for node in graph.nodes
if (is_entry_node(node))
push!(result, node)
end
end
return result
end

View File

@ -41,9 +41,6 @@ mutable struct DAG
# "snapshot" system: keep track of added/removed nodes/edges since last snapshot
# these are muted in insert_node! etc.
diff::Diff
# the cached properties of the DAG
properties::GraphProperties
end
"""
@ -72,6 +69,5 @@ function DAG()
PossibleOperations(),
Set{Node}(),
Diff(),
GraphProperties(),
)
end

View File

@ -1,256 +0,0 @@
using AccurateArithmetic
"""
compute(::ComputeTaskP, data::ParticleValue)
Return the particle and value as is.
0 FLOP.
"""
function compute(::ComputeTaskP, data::ParticleValue)
return data
end
"""
compute(::ComputeTaskU, data::ParticleValue)
Compute an outer edge. Return the particle value with the same particle and the value multiplied by an outer_edge factor.
1 FLOP.
"""
function compute(::ComputeTaskU, data::ParticleValue)
return ParticleValue(data.p, data.v * outer_edge(data.p))
end
"""
compute(::ComputeTaskV, data1::ParticleValue, data2::ParticleValue)
Compute a vertex. Preserve momentum and particle types (AB->C etc.) to create resulting particle, multiply values together and times a vertex factor.
6 FLOP.
"""
function compute(::ComputeTaskV, data1::ParticleValue, data2::ParticleValue)
p3 = preserve_momentum(data1.p, data2.p)
dataOut = ParticleValue(p3, data1.v * vertex() * data2.v)
return dataOut
end
"""
compute(::ComputeTaskS2, data1::ParticleValue, data2::ParticleValue)
Compute a final inner edge (2 input particles, no output particle).
For valid inputs, both input particles should have the same momenta at this point.
12 FLOP.
"""
function compute(::ComputeTaskS2, data1::ParticleValue, data2::ParticleValue)
return data1.v * inner_edge(data1.p) * data2.v
end
"""
compute(::ComputeTaskS1, data::ParticleValue)
Compute inner edge (1 input particle, 1 output particle).
11 FLOP.
"""
function compute(::ComputeTaskS1, data::ParticleValue)
return ParticleValue(data.p, data.v * inner_edge(data.p))
end
"""
compute(::ComputeTaskSum, data::Vector{Float64})
Compute a sum over the vector. Use an algorithm that accounts for accumulated errors in long sums with potentially large differences in magnitude of the summands.
Linearly many FLOP with growing data.
"""
function compute(::ComputeTaskSum, data::Vector{Float64})
return sum_kbn(data)
end
"""
compute(t::FusedComputeTask, data)
Compute a [`FusedComputeTask`](@ref). This simply asserts false and should not be called. Fused Compute Tasks generate their expressions directly through the other tasks instead.
"""
function compute(t::FusedComputeTask, data)
@assert false "This is not implemented and should never be called"
end
"""
get_expression(::ComputeTaskP, inSymbol::Symbol, outSymbol::Symbol)
Generate and return code evaluating [`ComputeTaskP`](@ref) on `inSymbol`, providing the output on `outSymbol`.
"""
function get_expression(::ComputeTaskP, inSymbol::Symbol, outSymbol::Symbol)
return Meta.parse("$outSymbol = compute(ComputeTaskP(), $inSymbol)")
end
"""
get_expression(::ComputeTaskU, inSymbol::Symbol, outSymbol::Symbol)
Generate code evaluating [`ComputeTaskU`](@ref) on `inSymbol`, providing the output on `outSymbol`.
`inSymbol` should be of type [`ParticleValue`](@ref), `outSymbol` will be of type [`ParticleValue`](@ref).
"""
function get_expression(::ComputeTaskU, inSymbol::Symbol, outSymbol::Symbol)
return Meta.parse("$outSymbol = compute(ComputeTaskU(), $inSymbol)")
end
"""
get_expression(::ComputeTaskV, inSymbol1::Symbol, inSymbol2::Symbol, outSymbol::Symbol)
Generate code evaluating [`ComputeTaskV`](@ref) on `inSymbol1` and `inSymbol2`, providing the output on `outSymbol`.
`inSymbol1` and `inSymbol2` should be of type [`ParticleValue`](@ref), `outSymbol` will be of type [`ParticleValue`](@ref).
"""
function get_expression(
::ComputeTaskV,
inSymbol1::Symbol,
inSymbol2::Symbol,
outSymbol::Symbol,
)
return Meta.parse(
"$outSymbol = compute(ComputeTaskV(), $inSymbol1, $inSymbol2)",
)
end
"""
get_expression(::ComputeTaskS2, inSymbol1::Symbol, inSymbol2::Symbol, outSymbol::Symbol)
Generate code evaluating [`ComputeTaskS2`](@ref) on `inSymbol1` and `inSymbol2`, providing the output on `outSymbol`.
`inSymbol1` and `inSymbol2` should be of type [`ParticleValue`](@ref), `outSymbol` will be of type `Float64`.
"""
function get_expression(
::ComputeTaskS2,
inSymbol1::Symbol,
inSymbol2::Symbol,
outSymbol::Symbol,
)
return Meta.parse(
"$outSymbol = compute(ComputeTaskS2(), $inSymbol1, $inSymbol2)",
)
end
"""
get_expression(::ComputeTaskS1, inSymbol::Symbol, outSymbol::Symbol)
Generate code evaluating [`ComputeTaskS1`](@ref) on `inSymbol`, providing the output on `outSymbol`.
`inSymbol` should be of type [`ParticleValue`](@ref), `outSymbol` will be of type [`ParticleValue`](@ref).
"""
function get_expression(::ComputeTaskS1, inSymbol::Symbol, outSymbol::Symbol)
return Meta.parse("$outSymbol = compute(ComputeTaskS1(), $inSymbol)")
end
"""
get_expression(::ComputeTaskSum, inSymbols::Vector{Symbol}, outSymbol::Symbol)
Generate code evaluating [`ComputeTaskSum`](@ref) on `inSymbols`, providing the output on `outSymbol`.
`inSymbols` should be of type [`Float64`], `outSymbol` will be of type [`Float64`].
"""
function get_expression(
::ComputeTaskSum,
inSymbols::Vector{Symbol},
outSymbol::Symbol,
)
return quote
$outSymbol = compute(ComputeTaskSum(), [$(inSymbols...)])
end
end
"""
get_expression(t::FusedComputeTask, inSymbols::Vector{Symbol}, outSymbol::Symbol)
Generate code evaluating a [`FusedComputeTask`](@ref) on `inSymbols`, providing the output on `outSymbol`.
`inSymbols` should be of the correct types and may be heterogeneous. `outSymbol` will be of the type of the output of `T2` of t.
"""
function get_expression(
t::FusedComputeTask,
inSymbols::Vector{Symbol},
outSymbol::Symbol,
)
(T1, T2) = get_types(t)
c1 = children(T1())
c2 = children(T2())
expr1 = nothing
expr2 = nothing
# TODO need to figure out how to know which inputs belong to which subtask
# since we order the vectors with the child nodes we can't just split
if (c1 == 1)
expr1 = get_expression(T1(), inSymbols[begin], :intermediate)
elseif (c1 == 2)
expr1 =
get_expression(T1(), inSymbols[begin], inSymbols[2], :intermediate)
else
expr1 = get_expression(T1(), inSymbols[begin:c1], :intermediate)
end
if (c2 == 1)
expr2 = get_expression(T2(), :intermediate, outSymbol)
elseif c2 == 2
expr2 =
get_expression(T2(), :intermediate, inSymbols[c1 + 1], outSymbol)
else
expr2 = get_expression(
T2(),
:intermediate * inSymbols[(c1 + 1):end],
outSymbol,
)
end
return Expr(:block, expr1, expr2)
end
"""
get_expression(node::ComputeTaskNode)
Generate and return code for a given [`ComputeTaskNode`](@ref).
"""
function get_expression(node::ComputeTaskNode)
t = typeof(node.task)
@assert length(node.children) == children(node.task) || t <: ComputeTaskSum
if (t <: ComputeTaskU || t <: ComputeTaskP || t <: ComputeTaskS1) # single input
symbolIn = Symbol("data_$(to_var_name(node.children[1].id))")
symbolOut = Symbol("data_$(to_var_name(node.id))")
return get_expression(t(), symbolIn, symbolOut)
elseif (t <: ComputeTaskS2 || t <: ComputeTaskV) # double input
symbolIn1 = Symbol("data_$(to_var_name(node.children[1].id))")
symbolIn2 = Symbol("data_$(to_var_name(node.children[2].id))")
symbolOut = Symbol("data_$(to_var_name(node.id))")
return get_expression(t(), symbolIn1, symbolIn2, symbolOut)
elseif (t <: ComputeTaskSum || t <: FusedComputeTask) # vector input
inSymbols = Vector{Symbol}()
for child in node.children
push!(inSymbols, Symbol("data_$(to_var_name(child.id))"))
end
outSymbol = Symbol("data_$(to_var_name(node.id))")
return get_expression(t(), inSymbols, outSymbol)
else
error("Unknown compute task")
end
end
"""
get_expression(node::DataTaskNode)
Generate and return code for a given [`DataTaskNode`](@ref).
"""
function get_expression(node::DataTaskNode)
# TODO: do things to transport data from/to gpu, between numa nodes, etc.
@assert length(node.children) <= 1
inSymbol = nothing
if (length(node.children) == 1)
inSymbol = Symbol("data_$(to_var_name(node.children[1].id))")
else
inSymbol = Symbol("data_$(to_var_name(node.id))_in")
end
outSymbol = Symbol("data_$(to_var_name(node.id))")
dataTransportExp = Meta.parse("$outSymbol = $inSymbol")
return dataTransportExp
end

View File

@ -1,74 +0,0 @@
"""
Particle(rng)
Return a randomly generated particle.
"""
function Particle(rng, type::ParticleType)
p1 = rand(rng, Float64)
p2 = rand(rng, Float64)
p3 = rand(rng, Float64)
m = mass(type)
# keep the momenta of the particles on-shell
p4 = sqrt(p1^2 + p2^2 + p3^2 + m^2)
return Particle(p1, p2, p3, p4, type)
end
"""
gen_particles(n::Int)
Return a Vector of `n` randomly generated [`Particle`](@ref)s.
Note: This does not take into account the preservation of momenta required for an actual valid process!
"""
function gen_particles(ns::Dict{ParticleType, Int})
particles = Dict{ParticleType, Vector{Particle}}()
rng = MersenneTwister(0)
if ns == Dict((A => 2), (B => 2))
rho = 1.0
omega = rand(rng, Float64)
theta = rand(rng, Float64) * π
phi = rand(rng, Float64) * π
particles[A] = Vector{Particle}()
particles[B] = Vector{Particle}()
push!(particles[A], Particle(omega, 0, 0, omega, A))
push!(particles[B], Particle(omega, 0, 0, -omega, B))
push!(
particles[A],
Particle(
omega,
rho * cos(theta) * cos(phi),
rho * cos(theta) * sin(phi),
rho * sin(theta),
A,
),
)
push!(
particles[B],
Particle(
omega,
-rho * cos(theta) * cos(phi),
-rho * cos(theta) * sin(phi),
-rho * sin(theta),
B,
),
)
return particles
end
for (type, n) in ns
particles[type] = Vector{Particle}()
for i in 1:n
push!(particles[type], Particle(rng, type))
end
end
return particles
end

View File

@ -4,9 +4,6 @@ regex_c = r"^[A-C]\(([^']*),([^']*)\)$" # Regex for the combinations of 2 pa
regex_m = r"^M\(([^']*),([^']*),([^']*)\)$" # Regex for the combinations of 3 particles
regex_plus = r"^\+$" # Regex for the sum
const PARTICLE_VALUE_SIZE::Int = 48
const FLOAT_SIZE::Int = 8
"""
parse_nodes(input::AbstractString)
@ -64,8 +61,7 @@ function parse_abc(filename::String, verbose::Bool = false)
sizehint!(graph.nodes, estimate_no_nodes)
sum_node = insert_node!(graph, make_node(ComputeTaskSum()), false, false)
global_data_out =
insert_node!(graph, make_node(DataTask(FLOAT_SIZE)), false, false)
global_data_out = insert_node!(graph, make_node(DataTask(10)), false, false)
insert_edge!(graph, sum_node, global_data_out, false, false)
# remember the data out nodes for connection
@ -90,28 +86,13 @@ function parse_abc(filename::String, verbose::Bool = false)
end
if occursin(regex_a, node)
# add nodes and edges for the state reading to u(P(Particle))
data_in = insert_node!(
graph,
make_node(DataTask(PARTICLE_VALUE_SIZE), string(node)),
false,
false,
) # read particle data node
data_in = insert_node!(graph, make_node(DataTask(4)), false, false) # read particle data node
compute_P =
insert_node!(graph, make_node(ComputeTaskP()), false, false) # compute P node
data_Pu = insert_node!(
graph,
make_node(DataTask(PARTICLE_VALUE_SIZE)),
false,
false,
) # transfer data from P to u (one ParticleValue object)
data_Pu = insert_node!(graph, make_node(DataTask(6)), false, false) # transfer data from P to u
compute_u =
insert_node!(graph, make_node(ComputeTaskU()), false, false) # compute U node
data_out = insert_node!(
graph,
make_node(DataTask(PARTICLE_VALUE_SIZE)),
false,
false,
) # transfer data out from u (one ParticleValue object)
data_out = insert_node!(graph, make_node(DataTask(3)), false, false) # transfer data out from u
insert_edge!(graph, data_in, compute_P, false, false)
insert_edge!(graph, compute_P, data_Pu, false, false)
@ -128,12 +109,7 @@ function parse_abc(filename::String, verbose::Bool = false)
compute_v =
insert_node!(graph, make_node(ComputeTaskV()), false, false)
data_out = insert_node!(
graph,
make_node(DataTask(PARTICLE_VALUE_SIZE)),
false,
false,
)
data_out = insert_node!(graph, make_node(DataTask(5)), false, false)
if (occursin(regex_c, in1))
# put an S node after this input
@ -143,12 +119,8 @@ function parse_abc(filename::String, verbose::Bool = false)
false,
false,
)
data_S_v = insert_node!(
graph,
make_node(DataTask(PARTICLE_VALUE_SIZE)),
false,
false,
)
data_S_v =
insert_node!(graph, make_node(DataTask(5)), false, false)
insert_edge!(graph, dataOutNodes[in1], compute_S, false, false)
insert_edge!(graph, compute_S, data_S_v, false, false)
@ -167,12 +139,8 @@ function parse_abc(filename::String, verbose::Bool = false)
false,
false,
)
data_S_v = insert_node!(
graph,
make_node(DataTask(PARTICLE_VALUE_SIZE)),
false,
false,
)
data_S_v =
insert_node!(graph, make_node(DataTask(5)), false, false)
insert_edge!(graph, dataOutNodes[in2], compute_S, false, false)
insert_edge!(graph, compute_S, data_S_v, false, false)
@ -195,12 +163,7 @@ function parse_abc(filename::String, verbose::Bool = false)
# in2 + in3 with a v
compute_v =
insert_node!(graph, make_node(ComputeTaskV()), false, false)
data_v = insert_node!(
graph,
make_node(DataTask(PARTICLE_VALUE_SIZE)),
false,
false,
)
data_v = insert_node!(graph, make_node(DataTask(5)), false, false)
insert_edge!(graph, dataOutNodes[in2], compute_v, false, false)
insert_edge!(graph, dataOutNodes[in3], compute_v, false, false)
@ -209,12 +172,8 @@ function parse_abc(filename::String, verbose::Bool = false)
# combine with the v of the combined other input
compute_S2 =
insert_node!(graph, make_node(ComputeTaskS2()), false, false)
data_out = insert_node!(
graph,
make_node(DataTask(FLOAT_SIZE)),
false,
false,
) # output of a S2 task is only a float
data_out =
insert_node!(graph, make_node(DataTask(10)), false, false)
insert_edge!(graph, data_v, compute_S2, false, false)
insert_edge!(graph, dataOutNodes[in1], compute_S2, false, false)
@ -236,14 +195,6 @@ function parse_abc(filename::String, verbose::Bool = false)
#put all nodes into dirty nodes set
graph.dirtyNodes = copy(graph.nodes)
if (verbose)
println("Generating the graph's properties")
end
graph.properties = GraphProperties(graph)
if (verbose)
println("Done")
end
# don't actually need to read the edges
return graph
end

View File

@ -1,130 +0,0 @@
"""
ParticleType
A Particle Type in the ABC Model as an enum, with types `A`, `B` and `C`.
"""
@enum ParticleType A = 1 B = 2 C = 3
"""
PARTICLE_MASSES
A constant dictionary containing the masses of the different [`ParticleType`](@ref)s.
"""
const PARTICLE_MASSES =
Dict{ParticleType, Float64}(A => 1.0, B => 1.0, C => 0.0)
"""
Particle
A struct describing a particle of the ABC-Model. It has the 4 momentum parts P0...P3 and a [`ParticleType`](@ref).
`sizeof(Particle())` = 40 Byte
"""
struct Particle
P0::Float64
P1::Float64
P2::Float64
P3::Float64
type::ParticleType
end
"""
ParticleValue
A struct describing a particle during a calculation of a Feynman Diagram, together with the value that's being calculated.
`sizeof(ParticleValue())` = 48 Byte
"""
struct ParticleValue
p::Particle
v::Float64
end
"""
mass(t::ParticleType)
Return the mass (at rest) of the given particle type.
"""
mass(t::ParticleType) = PARTICLE_MASSES[t]
"""
remaining_type(t1::ParticleType, t2::ParticleType)
For 2 given (non-equal) particle types, return the third of ABC.
"""
function remaining_type(t1::ParticleType, t2::ParticleType)
@assert t1 != t2
if t1 != A && t2 != A
return A
elseif t1 != B && t2 != B
return B
else
return C
end
end
"""
square(p::Particle)
Return the square of the particle's momentum as a `Float` value.
Takes 7 effective FLOP.
"""
function square(p::Particle)
return p.P0 * p.P0 - p.P1 * p.P1 - p.P2 * p.P2 - p.P3 * p.P3
end
"""
inner_edge(p::Particle)
Return the factor of the inner edge with the given (virtual) particle.
Takes 10 effective FLOP. (3 here + 10 in square(p))
"""
function inner_edge(p::Particle)
return 1.0 / (square(p) - mass(p.type) * mass(p.type))
end
"""
outer_edge(p::Particle)
Return the factor of the outer edge with the given (real) particle.
Takes 0 effective FLOP.
"""
function outer_edge(p::Particle)
return 1.0
end
"""
vertex()
Return the factor of a vertex.
Takes 0 effective FLOP since it's constant.
"""
function vertex()
i = 1.0
lambda = 1.0 / 137.0
return i * lambda
end
"""
preserve_momentum(p1::Particle, p2::Particle)
Calculate and return a new particle from two given interacting ones at a vertex.
Takes 4 effective FLOP.
"""
function preserve_momentum(p1::Particle, p2::Particle)
p3 = Particle(
p1.P0 + p2.P0,
p1.P1 + p2.P1,
p1.P2 + p2.P2,
p1.P3 + p2.P3,
remaining_type(p1.type, p2.type),
)
return p3
end

View File

@ -3,35 +3,35 @@
Return the compute effort of an S1 task.
"""
compute_effort(t::ComputeTaskS1) = 11
compute_effort(t::ComputeTaskS1) = 10
"""
compute_effort(t::ComputeTaskS2)
Return the compute effort of an S2 task.
"""
compute_effort(t::ComputeTaskS2) = 12
compute_effort(t::ComputeTaskS2) = 10
"""
compute_effort(t::ComputeTaskU)
Return the compute effort of a U task.
"""
compute_effort(t::ComputeTaskU) = 1
compute_effort(t::ComputeTaskU) = 6
"""
compute_effort(t::ComputeTaskV)
Return the compute effort of a V task.
"""
compute_effort(t::ComputeTaskV) = 6
compute_effort(t::ComputeTaskV) = 20
"""
compute_effort(t::ComputeTaskP)
Return the compute effort of a P task.
"""
compute_effort(t::ComputeTaskP) = 0
compute_effort(t::ComputeTaskP) = 15
"""
compute_effort(t::ComputeTaskSum)
@ -100,66 +100,3 @@ show(io::IO, t::ComputeTaskSum) = print("ComputeSum")
Copy the data task and return it.
"""
copy(t::DataTask) = DataTask(t.data)
"""
children(::DataTask)
Return the number of children of a data task (always 1).
"""
children(::DataTask) = 1
"""
children(::ComputeTaskS1)
Return the number of children of a ComputeTaskS1 (always 1).
"""
children(::ComputeTaskS1) = 1
"""
children(::ComputeTaskS2)
Return the number of children of a ComputeTaskS2 (always 2).
"""
children(::ComputeTaskS2) = 2
"""
children(::ComputeTaskP)
Return the number of children of a ComputeTaskP (always 1).
"""
children(::ComputeTaskP) = 1
"""
children(::ComputeTaskU)
Return the number of children of a ComputeTaskU (always 1).
"""
children(::ComputeTaskU) = 1
"""
children(::ComputeTaskV)
Return the number of children of a ComputeTaskV (always 2).
"""
children(::ComputeTaskV) = 2
"""
children(::ComputeTaskSum)
Return the number of children of a ComputeTaskSum, since this is variable and the task doesn't know
how many children it will sum over, return a wildcard -1.
TODO: this is kind of bad because it means we can't fuse with a sum task
"""
children(::ComputeTaskSum) = -1
"""
children(t::FusedComputeTask)
Return the number of children of a FusedComputeTask. It's the sum of the children of both tasks minus one.
"""
function children(t::FusedComputeTask)
(T1, T2) = get_types(t)
return children(T1()) + children(T2()) - 1 # one of the inputs is the output of T1 and thus not a child of the node
end

View File

@ -1,45 +1,3 @@
DataTaskNode(t::AbstractDataTask, name = "") = DataTaskNode(
t,
Vector{Node}(),
Vector{Node}(),
UUIDs.uuid1(rng[threadid()]),
missing,
missing,
missing,
name,
)
ComputeTaskNode(t::AbstractComputeTask) = ComputeTaskNode(
t,
Vector{Node}(),
Vector{Node}(),
UUIDs.uuid1(rng[threadid()]),
missing,
missing,
Vector{NodeFusion}(),
)
copy(m::Missing) = missing
copy(n::ComputeTaskNode) = ComputeTaskNode(
copy(n.task),
copy(n.parents),
copy(n.children),
UUIDs.uuid1(rng[threadid()]),
copy(n.nodeReduction),
copy(n.nodeSplit),
copy(n.nodeFusions),
)
copy(n::DataTaskNode) = DataTaskNode(
copy(n.task),
copy(n.parents),
copy(n.children),
UUIDs.uuid1(rng[threadid()]),
copy(n.nodeReduction),
copy(n.nodeSplit),
copy(n.nodeFusion),
n.name,
)
"""
make_node(t::AbstractTask)
@ -54,8 +12,8 @@ end
Construct and return a new [`DataTaskNode`](@ref) with the given task.
"""
function make_node(t::AbstractDataTask, name::String = "")
return DataTaskNode(t, name)
function make_node(t::AbstractDataTask)
return DataTaskNode(t)
end
"""

View File

@ -15,12 +15,3 @@ Print a short string representation of the edge to io.
function show(io::IO, e::Edge)
return print(io, "Edge(", e.edge[1], ", ", e.edge[2], ")")
end
"""
to_var_name(id::UUID)
Return the uuid as a string usable as a variable name in code generation.
"""
function to_var_name(id::UUID)
return replace(string(id), "-" => "_")
end

View File

@ -12,18 +12,6 @@ Return whether this node is an exit node of its graph, i.e., it has no parents.
"""
is_exit_node(node::Node) = length(node.parents) == 0
"""
data(edge::Edge)
Return the data transfered by this edge, i.e., 0 if the child is a [`ComputeTaskNode`](@ref), otherwise the child's `data()`.
"""
function data(edge::Edge)
if typeof(edge.edge[1]) <: DataTaskNode
return data(edge.edge[1].task)
end
return 0.0
end
"""
children(node::Node)

View File

@ -52,9 +52,6 @@ mutable struct DataTaskNode <: Node
# the node fusion involving this node, if it exists
nodeFusion::Union{Operation, Missing}
# for input nodes we need a name for the node to distinguish between them
name::String
end
"""
@ -84,6 +81,25 @@ mutable struct ComputeTaskNode <: Node
nodeFusions::Vector{Operation}
end
DataTaskNode(t::AbstractDataTask) = DataTaskNode(
t,
Vector{Node}(),
Vector{Node}(),
UUIDs.uuid1(rng[threadid()]),
missing,
missing,
missing,
)
ComputeTaskNode(t::AbstractComputeTask) = ComputeTaskNode(
t,
Vector{Node}(),
Vector{Node}(),
UUIDs.uuid1(rng[threadid()]),
missing,
missing,
Vector{NodeFusion}(),
)
"""
Edge
@ -100,3 +116,23 @@ struct Edge
Tuple{ComputeTaskNode, DataTaskNode},
}
end
copy(m::Missing) = missing
copy(n::ComputeTaskNode) = ComputeTaskNode(
copy(n.task),
copy(n.parents),
copy(n.children),
UUIDs.uuid1(rng[threadid()]),
copy(n.nodeReduction),
copy(n.nodeSplit),
copy(n.nodeFusions),
)
copy(n::DataTaskNode) = DataTaskNode(
copy(n.task),
copy(n.parents),
copy(n.children),
UUIDs.uuid1(rng[threadid()]),
copy(n.nodeReduction),
copy(n.nodeSplit),
copy(n.nodeFusion),
)

View File

@ -40,9 +40,6 @@ function apply_operation!(graph::DAG, operation::NodeFusion)
operation.input[2],
operation.input[3],
)
graph.properties += GraphProperties(diff)
return AppliedNodeFusion(operation, diff)
end
@ -55,9 +52,6 @@ Return an [`AppliedNodeReduction`](@ref) object generated from the graph's [`Dif
"""
function apply_operation!(graph::DAG, operation::NodeReduction)
diff = node_reduction!(graph, operation.input)
graph.properties += GraphProperties(diff)
return AppliedNodeReduction(operation, diff)
end
@ -70,9 +64,6 @@ Return an [`AppliedNodeSplit`](@ref) object generated from the graph's [`Diff`](
"""
function apply_operation!(graph::DAG, operation::NodeSplit)
diff = node_split!(graph, operation.input)
graph.properties += GraphProperties(diff)
return AppliedNodeSplit(operation, diff)
end
@ -136,9 +127,6 @@ function revert_diff!(graph::DAG, diff::Diff)
for edge in diff.removedEdges
insert_edge!(graph, edge.edge[1], edge.edge[2], false)
end
graph.properties -= GraphProperties(diff)
return nothing
end
@ -160,6 +148,7 @@ function node_fusion!(
# clear snapshot
get_snapshot_diff(graph)
# save children and parents
n1_children = children(n1)
n3_parents = parents(n3)
@ -180,18 +169,26 @@ function node_fusion!(
ComputeTaskNode(FusedComputeTask{typeof(n1.task), typeof(n3.task)}())
insert_node!(graph, new_node)
# use a set for combined children of n1 and n3 to not get duplicates
n1and3_children = Set{Node}()
# remove edges from n1 children to n1
for child in n1_children
remove_edge!(graph, child, n1)
push!(n1and3_children, child)
end
# remove edges from n3 children to n3
for child in n3_children
remove_edge!(graph, child, n3)
push!(n1and3_children, child)
end
for child in n1and3_children
insert_edge!(graph, child, new_node)
end
for child in n3_children
remove_edge!(graph, child, n3)
if !(child in n1_children)
insert_edge!(graph, child, new_node)
end
end
# "repoint" parents of n3 from new node
for parent in n3_parents
remove_edge!(graph, n3, parent)
insert_edge!(graph, new_node, parent)

View File

@ -71,8 +71,14 @@ function find_reductions!(graph::DAG, node::Node)
partners_ = partners(node)
delete!(partners_, node)
for partner in partners_
@assert partner in graph.nodes
if partner graph.nodes
error("Partner is not part of the graph")
end
if can_reduce(node, partner)
if Set(node.children) != Set(partner.children)
error("Not equal children")
end
if reductionVector === nothing
# only when there's at least one reduction partner, insert the vector
reductionVector = Vector{Node}()

View File

@ -1,73 +0,0 @@
"""
GraphProperties()
Create an empty [`GraphProperties`](@ref) object.
"""
function GraphProperties()
return (
data = 0.0,
computeEffort = 0.0,
computeIntensity = 0.0,
cost = 0.0,
noNodes = 0,
noEdges = 0,
)::GraphProperties
end
"""
GraphProperties(graph::DAG)
Calculate the graph's properties and return the constructed [`GraphProperties`](@ref) object.
"""
function GraphProperties(graph::DAG)
# make sure the graph is fully generated
apply_all!(graph)
d = 0.0
ce = 0.0
ed = 0
for node in graph.nodes
d += data(node.task) * length(node.parents)
ce += compute_effort(node.task)
ed += length(node.parents)
end
return (
data = d,
computeEffort = ce,
computeIntensity = (d == 0) ? 0.0 : ce / d,
cost = 0.0, # TODO
noNodes = length(graph.nodes),
noEdges = ed,
)::GraphProperties
end
"""
GraphProperties(diff::Diff)
Create the graph properties difference from a given [`Diff`](@ref).
The graph's properties after applying the [`Diff`](@ref) will be `get_properties(graph) + GraphProperties(diff)`.
For reverting a diff, it's `get_properties(graph) - GraphProperties(diff)`.
"""
function GraphProperties(diff::Diff)
d = 0.0
ce = 0.0
c = 0.0 # TODO
ce =
reduce(+, compute_effort(n.task) for n in diff.addedNodes; init = 0.0) -
reduce(+, compute_effort(n.task) for n in diff.removedNodes; init = 0.0)
d =
reduce(+, data(e) for e in diff.addedEdges; init = 0.0) -
reduce(+, data(e) for e in diff.removedEdges; init = 0.0)
return (
data = d,
computeEffort = ce,
computeIntensity = (d == 0) ? 0.0 : ce / d,
cost = c,
noNodes = length(diff.addedNodes) - length(diff.removedNodes),
noEdges = length(diff.addedEdges) - length(diff.removedEdges),
)::GraphProperties
end

View File

@ -1,17 +0,0 @@
"""
GraphProperties
Representation of a [`DAG`](@ref)'s properties.
# Fields:
`.data`: The total data transfer.\\
`.computeEffort`: The total compute effort.\\
`.computeIntensity`: The compute intensity, will always equal `.computeEffort / .data`.\\
`.cost`: The estimated cost.\\
`.noNodes`: Number of [`Node`](@ref)s.\\
`.noEdges`: Number of [`Edge`](@ref)s.
"""
const GraphProperties = NamedTuple{
(:data, :computeEffort, :computeIntensity, :cost, :noNodes, :noEdges),
Tuple{Float64, Float64, Float64, Float64, Int, Int},
}

View File

@ -1,59 +0,0 @@
"""
-(prop1::GraphProperties, prop2::GraphProperties)
Subtract `prop1` from `prop2` and return the result as a new [`GraphProperties`](@ref).
Also take care to keep consistent compute intensity.
"""
function -(prop1::GraphProperties, prop2::GraphProperties)
return (
data = prop1.data - prop2.data,
computeEffort = prop1.computeEffort - prop2.computeEffort,
computeIntensity = if (prop1.data - prop2.data == 0)
0.0
else
(prop1.computeEffort - prop2.computeEffort) /
(prop1.data - prop2.data)
end,
cost = prop1.cost - prop2.cost,
noNodes = prop1.noNodes - prop2.noNodes,
noEdges = prop1.noEdges - prop2.noEdges,
)::GraphProperties
end
"""
+(prop1::GraphProperties, prop2::GraphProperties)
Add `prop1` and `prop2` and return the result as a new [`GraphProperties`](@ref).
Also take care to keep consistent compute intensity.
"""
function +(prop1::GraphProperties, prop2::GraphProperties)
return (
data = prop1.data + prop2.data,
computeEffort = prop1.computeEffort + prop2.computeEffort,
computeIntensity = if (prop1.data + prop2.data == 0)
0.0
else
(prop1.computeEffort + prop2.computeEffort) /
(prop1.data + prop2.data)
end,
cost = prop1.cost + prop2.cost,
noNodes = prop1.noNodes + prop2.noNodes,
noEdges = prop1.noEdges + prop2.noEdges,
)::GraphProperties
end
"""
-(prop::GraphProperties)
Unary negation of the graph properties. `.computeIntensity` will not be negated because `.data` and `.computeEffort` both are.
"""
function -(prop::GraphProperties)
return (
data = -prop.data,
computeEffort = -prop.computeEffort,
computeIntensity = prop.computeIntensity, # no negation here!
cost = -prop.cost,
noNodes = -prop.noNodes,
noEdges = -prop.noEdges,
)::GraphProperties
end

View File

@ -7,24 +7,6 @@ function compute(t::AbstractTask; data...)
return error("Need to implement compute()")
end
"""
compute(t::FusedComputeTask; data...)
Compute a fused compute task.
"""
function compute(t::FusedComputeTask; data...)
(T1, T2) = collect(typeof(t).parameters)
return compute(T2(), compute(T1(), data))
end
"""
compute(t::AbstractDataTask; data...)
The compute function of a data task, always the identity function, regardless of the specific task.
"""
compute(t::AbstractDataTask; data...) = data
"""
compute_effort(t::AbstractTask)
@ -51,6 +33,13 @@ Return the compute effort of a data task, always zero, regardless of the specifi
"""
compute_effort(t::AbstractDataTask) = 0
"""
compute(t::AbstractDataTask; data...)
The compute function of a data task, always the identity function, regardless of the specific task.
"""
compute(t::AbstractDataTask; data...) = data
"""
data(t::AbstractDataTask)
@ -75,36 +64,12 @@ function compute_effort(t::FusedComputeTask)
return compute_effort(T1()) + compute_effort(T2())
end
# actual compute functions for the tasks can stay undefined for now
# compute(t::ComputeTaskU, data::Any) = mycomputation(data)
"""
get_types(::FusedComputeTask{T1, T2})
Return a tuple of a the fused compute task's components' types.
"""
get_types(::FusedComputeTask{T1, T2}) where {T1, T2} = (T1, T2)
"""
get_expression(t::AbstractTask)
Return an expression evaluating the given task on the :dataIn symbol
"""
function get_expression(t::AbstractTask)
return quote
dataOut = compute($t, dataIn)
end
end
"""
get_expression()
"""
function get_expression(
t::FusedComputeTask,
inSymbol::Symbol,
outSymbol::Symbol,
)
#TODO
computeExp = quote
$outSymbol = compute($t, $inSymbol)
end
return computeExp
end

View File

@ -3,7 +3,7 @@ using Random
function test_known_graph(name::String, n, fusion_test = true)
@testset "Test $name Graph ($n)" begin
graph = parse_abc(joinpath(@__DIR__, "..", "input", "$name.txt"))
props = get_properties(graph)
props = graph_properties(graph)
if (fusion_test)
test_node_fusion(graph)
@ -14,13 +14,13 @@ end
function test_node_fusion(g::DAG)
@testset "Test Node Fusion" begin
props = get_properties(g)
props = graph_properties(g)
options = get_operations(g)
nodes_number = length(g.nodes)
data = props.data
compute_effort = props.computeEffort
compute_effort = props.compute_effort
while !isempty(options.nodeFusions)
fusion = first(options.nodeFusions)
@ -29,13 +29,13 @@ function test_node_fusion(g::DAG)
push_operation!(g, fusion)
props = get_properties(g)
props = graph_properties(g)
@test props.data < data
@test props.computeEffort == compute_effort
@test props.compute_effort == compute_effort
nodes_number = length(g.nodes)
data = props.data
compute_effort = props.computeEffort
compute_effort = props.compute_effort
options = get_operations(g)
end
@ -49,7 +49,7 @@ function test_random_walk(g::DAG, n::Int64)
@test is_valid(g)
properties = get_properties(g)
properties = graph_properties(g)
for i in 1:n
# choose push or pop
@ -82,7 +82,7 @@ function test_random_walk(g::DAG, n::Int64)
@test is_valid(g)
@test properties == get_properties(g)
@test properties == graph_properties(g)
end
end

View File

@ -5,10 +5,8 @@ using Test
include("unit_tests_utility.jl")
include("unit_tests_tasks.jl")
include("unit_tests_nodes.jl")
include("unit_tests_properties.jl")
include("node_reduction.jl")
include("unit_tests_graph.jl")
include("unit_tests_execution.jl")
include("known_graphs.jl")
end

View File

@ -1,31 +0,0 @@
import MetagraphOptimization.A
import MetagraphOptimization.B
import MetagraphOptimization.ParticleType
@testset "Unit Tests Graph" begin
particles = Dict{ParticleType, Vector{Particle}}(
(
A => [
Particle(0.823648, 0.0, 0.0, 0.823648, A),
Particle(0.823648, -0.835061, -0.474802, 0.277915, A),
]
),
(
B => [
Particle(0.823648, 0.0, 0.0, -0.823648, B),
Particle(0.823648, 0.835061, 0.474802, -0.277915, B),
]
),
)
expected_result = 5.5320567694746876e-5
for _ in 1:10 # test in a loop because graph layout should not change the result
graph = parse_abc(joinpath(@__DIR__, "..", "input", "AB->AB.txt"))
@test isapprox(execute(graph, particles), expected_result; rtol = 0.001)
code = MetagraphOptimization.gen_code(graph)
@test isapprox(execute(code, particles), expected_result; rtol = 0.001)
end
end
println("Execution Unit Tests Complete!")

View File

@ -69,7 +69,7 @@ import MetagraphOptimization.partners
@test length(graph.nodes) == 26
@test length(graph.dirtyNodes) == 26
# now for all the edges
# now for all the edgese
insert_edge!(graph, d_PB, PB, false)
insert_edge!(graph, d_PA, PA, false)
insert_edge!(graph, d_PBp, PBp, false)
@ -142,12 +142,12 @@ import MetagraphOptimization.partners
@test operations == get_operations(graph)
nf = first(operations.nodeFusions)
properties = get_properties(graph)
@test properties.computeEffort == 28
properties = graph_properties(graph)
@test properties.compute_effort == 134
@test properties.data == 62
@test properties.computeIntensity 28 / 62
@test properties.noNodes == 26
@test properties.noEdges == 25
@test properties.compute_intensity 134 / 62
@test properties.nodes == 26
@test properties.edges == 25
push_operation!(graph, nf)
# **does not immediately apply the operation**
@ -161,17 +161,17 @@ import MetagraphOptimization.partners
(addedNodes = 0, removedNodes = 0, addedEdges = 0, removedEdges = 0)
# this applies pending operations
properties = get_properties(graph)
properties = graph_properties(graph)
@test length(graph.nodes) == 24
@test length(graph.appliedOperations) == 1
@test length(graph.operationsToApply) == 0
@test length(graph.dirtyNodes) != 0
@test properties.noNodes == 24
@test properties.noEdges == 23
@test properties.computeEffort == 28
@test properties.nodes == 24
@test properties.edges == 23
@test properties.compute_effort == 134
@test properties.data < 62
@test properties.computeIntensity > 28 / 62
@test properties.compute_intensity > 134 / 62
operations = get_operations(graph)
@test length(graph.dirtyNodes) == 0
@ -205,12 +205,12 @@ import MetagraphOptimization.partners
@test length(graph.appliedOperations) == 0
@test length(graph.operationsToApply) == 0
properties = get_properties(graph)
@test properties.noNodes == 26
@test properties.noEdges == 25
@test properties.computeEffort == 28
properties = graph_properties(graph)
@test properties.nodes == 26
@test properties.edges == 25
@test properties.compute_effort == 134
@test properties.data == 62
@test properties.computeIntensity 28 / 62
@test properties.compute_intensity 134 / 62
operations = get_operations(graph)
@test length(operations) ==

View File

@ -1,52 +0,0 @@
@testset "GraphProperties Unit Tests" begin
prop = GraphProperties()
@test prop.data == 0.0
@test prop.computeEffort == 0.0
@test prop.computeIntensity == 0.0
@test prop.cost == 0.0
@test prop.noNodes == 0.0
@test prop.noEdges == 0.0
prop2 = (
data = 5.0,
computeEffort = 6.0,
computeIntensity = 6.0 / 5.0,
cost = 0.0,
noNodes = 2,
noEdges = 3,
)::GraphProperties
@test prop + prop2 == prop2
@test prop2 - prop == prop2
negProp = -prop2
@test negProp.data == -5.0
@test negProp.computeEffort == -6.0
@test negProp.computeIntensity == 6.0 / 5.0
@test negProp.cost == 0.0
@test negProp.noNodes == -2
@test negProp.noEdges == -3
@test negProp + prop2 == GraphProperties()
prop3 = (
data = 7.0,
computeEffort = 3.0,
computeIntensity = 7.0 / 3.0,
cost = 0.0,
noNodes = -3,
noEdges = 2,
)::GraphProperties
propSum = prop2 + prop3
@test propSum.data == 12.0
@test propSum.computeEffort == 9.0
@test propSum.computeIntensity == 9.0 / 12.0
@test propSum.cost == 0.0
@test propSum.noNodes == -1
@test propSum.noEdges == 5
end
println("GraphProperties Unit Tests Complete!")

View File

@ -10,11 +10,11 @@
Data10 = MetagraphOptimization.DataTask(10)
Data20 = MetagraphOptimization.DataTask(20)
@test MetagraphOptimization.compute_effort(S1) == 11
@test MetagraphOptimization.compute_effort(S2) == 12
@test MetagraphOptimization.compute_effort(U) == 1
@test MetagraphOptimization.compute_effort(V) == 6
@test MetagraphOptimization.compute_effort(P) == 0
@test MetagraphOptimization.compute_effort(S1) == 10
@test MetagraphOptimization.compute_effort(S2) == 10
@test MetagraphOptimization.compute_effort(U) == 6
@test MetagraphOptimization.compute_effort(V) == 20
@test MetagraphOptimization.compute_effort(P) == 15
@test MetagraphOptimization.compute_effort(Sum) == 1
@test MetagraphOptimization.compute_effort(Data10) == 0
@test MetagraphOptimization.compute_effort(Data20) == 0