Improve interface, add random walk and reduction implementations, add tests

This commit is contained in:
Anton Reinhard 2023-11-21 19:57:45 +01:00
parent 7d31f61e5f
commit 968f6856de
17 changed files with 235 additions and 112 deletions

View File

@ -17,9 +17,8 @@ println("Parsing DAG")
println("Generating input data")
@time input_data = [gen_process_input(process) for _ in 1:1000]
include("profiling_utilities.jl")
println("Reducing graph")
@time reduce_all!(graph)
@time optimize_to_fixpoint!(ReductionOptimizer(), graph)
println("Generating compute function")
@time compute_func = get_compute_function(graph, process, machine)

View File

@ -17,9 +17,8 @@ println("Parsing DAG")
println("Generating input data")
@time input_data = [gen_process_input(process) for _ in 1:1000]
include("profiling_utilities.jl")
println("Reducing graph")
@time reduce_all!(graph)
@time optimize_to_fixpoint!(ReductionOptimizer(), graph)
println("Generating compute function")
@time compute_func = get_compute_function(graph, process, machine)

View File

@ -1,59 +0,0 @@
function random_walk!(g::DAG, n::Int64)
# the purpose here is to do "random" operations on the graph to simulate an optimizer
reset_graph!(g)
properties = get_properties(g)
for i in 1:n
# choose push or pop
if rand(Bool)
# push
opt = get_operations(g)
# choose one of fuse/split/reduce
option = rand(1:3)
if option == 1 && !isempty(opt.nodeFusions)
push_operation!(g, rand(collect(opt.nodeFusions)))
elseif option == 2 && !isempty(opt.nodeReductions)
push_operation!(g, rand(collect(opt.nodeReductions)))
elseif option == 3 && !isempty(opt.nodeSplits)
push_operation!(g, rand(collect(opt.nodeSplits)))
else
i = i - 1
end
else
# pop
if (can_pop(g))
pop_operation!(g)
else
i = i - 1
end
end
end
return nothing
end
function reduce_all!(g::DAG)
reset_graph!(g)
opt = get_operations(g)
while (!isempty(opt.nodeReductions))
push_operation!(g, pop!(opt.nodeReductions))
if (isempty(opt.nodeReductions))
opt = get_operations(g)
end
end
return nothing
end
function reduce_one!(g::DAG)
opt = get_operations(g)
if !isempty(opt.nodeReductions)
push_operation!(g, pop!(opt.nodeReductions))
end
opt = get_operations(g)
return nothing
end

View File

@ -99,8 +99,7 @@
}
],
"source": [
"include(\"../examples/profiling_utilities.jl\")\n",
"@time reduce_all!(graph)\n",
"@time optimize_to_fixpoint!(ReductionOptimizer(), graph)\n",
"print(graph)"
]
},

View File

@ -211,10 +211,8 @@
"metadata": {},
"outputs": [],
"source": [
"include(\"../examples/profiling_utilities.jl\")\n",
"\n",
"# We can also mute the graph by applying some operations to it\n",
"reduce_all!(graph)"
"optimize_to_fixpoint!(ReductionOptimizer(), graph)"
]
},
{

View File

@ -30,8 +30,7 @@
"metadata": {},
"outputs": [],
"source": [
"include(\"../examples/profiling_utilities.jl\")\n",
"@ProfileView.profview reduce_all!(graph)"
"@ProfileView.profview optimize_to_fixpoint!(ReductionOptimizer(), graph)"
]
},
{

View File

@ -34,6 +34,7 @@ export compute_effort
export task
export get_properties
export get_exit_node
export operation_stack_length
export is_valid, is_scheduled
# graph operation related
@ -70,8 +71,9 @@ export cost_type, graph_cost, operation_effect
export GlobalMetricEstimator, CDCost
# optimization
export AbstractOptimizer, GreedyOptimizer
export AbstractOptimizer, GreedyOptimizer, ReductionOptimizer, RandomWalkOptimizer
export optimize_step!, optimize!
export fixpoint_reached, optimize_to_fixpoint!
# machine info
export Machine
@ -144,6 +146,8 @@ include("estimator/global_metric.jl")
include("optimization/interface.jl")
include("optimization/greedy.jl")
include("optimization/random_walk.jl")
include("optimization/reduce.jl")
include("models/interface.jl")
include("models/print.jl")

View File

@ -43,3 +43,12 @@ function get_entry_nodes(graph::DAG)
end
return result
end
"""
operation_stack_length(graph::DAG)
Return the number of operations applied to the graph.
"""
function operation_stack_length(graph::DAG)
return length(graph.appliedOperations) + length(graph.operationsToApply)
end

View File

@ -2,6 +2,8 @@
GreedyOptimizer
An implementation of the greedy optimization algorithm, simply choosing the best next option evaluated with the given estimator.
The fixpoint is reached when any leftover operation would increase the graph's total cost according to the given estimator.
"""
struct GreedyOptimizer{EstimatorType <: AbstractEstimator} <: AbstractOptimizer
estimator::EstimatorType
@ -11,22 +13,61 @@ function optimize_step!(optimizer::GreedyOptimizer, graph::DAG)
# generate all options
operations = get_operations(graph)
if isempty(operations)
println("[warn] No operations left to apply")
return nothing
return false
end
lowestCost = typemax(cost_type(optimizer.estimator))
result = nothing
result = reduce((acc, op) -> begin
op_cost = operation_effect(optimizer.estimator, graph, op)
if op_cost < lowestCost
lowestCost = op_cost
return op
end
return acc
end, operations)
lowestCost = reduce(
(acc, op) -> begin
op_cost = operation_effect(optimizer.estimator, graph, op)
if op_cost < acc
result = op
return op_cost
end
return acc
end,
operations;
init = typemax(cost_type(optimizer.estimator)),
)
if lowestCost > zero(cost_type(optimizer.estimator))
return false
end
push_operation!(graph, result)
return true
end
function fixpoint_reached(optimizer::GreedyOptimizer, graph::DAG)
# generate all options
operations = get_operations(graph)
if isempty(operations)
return true
end
lowestCost = reduce(
(acc, op) -> begin
op_cost = operation_effect(optimizer.estimator, graph, op)
if op_cost < acc
return op_cost
end
return acc
end,
operations;
init = typemax(cost_type(optimizer.estimator)),
)
if lowestCost > zero(cost_type(optimizer.estimator))
return true
end
return false
end
function optimize_to_fixpoint!(optimizer::GreedyOptimizer, graph::DAG)
while optimize_step!(optimizer, graph)
end
return nothing
end

View File

@ -9,7 +9,7 @@ abstract type AbstractOptimizer end
"""
optimize_step!(optimizer::AbstractOptimizer, graph::DAG)
Interface function that must be implemented by implementations of [`AbstractOptimizer`](@ref). Returns `nothing`.
Interface function that must be implemented by implementations of [`AbstractOptimizer`](@ref). Returns `true` if an operations has been applied, `false` if not, usually when a fixpoint of the algorithm has been reached.
It should do one smallest logical step on the given [`DAG`](@ref), muting the graph and, if necessary, the optimizer's state.
"""
@ -18,13 +18,43 @@ function optimize_step! end
"""
optimize!(optimizer::AbstractOptimizer, graph::DAG, n::Int)
Function calling the given optimizer `n` times, muting the graph. Returns `nothing`.
Function calling the given optimizer `n` times, muting the graph. Returns `true` if the requested number of operations has been applied, `false` if not, usually when a fixpoint of the algorithm has been reached.
If a more efficient method exists, this can be overloaded for a specific optimizer.
"""
function optimize!(optimizer::AbstractOptimizer, graph::DAG, n::Int)
for i in 1:n
optimize_step!(optimizer, graph)
if !optimize_step!(optimizer, graph)
return false
end
end
return nothing
return true
end
"""
fixpoint_reached(optimizer::AbstractOptimizer, graph::DAG)
Interface function that can be implemented by optimization algorithms that can reach a fixpoint, returning as a `Bool` whether it has been reached. The default implementation returns `false`.
See also: [`optimize_to_fixpoint!`](@ref)
"""
function fixpoint_reached(optimizer::AbstractOptimizer, graph::DAG)
return false
end
"""
optimize_to_fixpoint!(optimizer::AbstractOptimizer, graph::DAG)
Interface function that can be implemented by optimization algorithms that can reach a fixpoint. The algorithm will be run until that fixpoint is reached, at which point [`fixpoint_reached`](@ref) should return true.
A usual implementation might look like this:
```julia
function optimize_to_fixpoint!(optimizer::MyOptimizer, graph::DAG)
while !fixpoint_reached(optimizer, graph)
optimize_step!(optimizer, graph)
end
return nothing
end
```
"""
function optimize_to_fixpoint! end

View File

@ -0,0 +1,49 @@
using Random
"""
RandomWalkOptimizer
An optimizer that randomly pushes or pops operations. It doesn't optimize in any direction and is useful mainly for testing purposes.
This algorithm never reaches a fixpoint, so it does not implement [`optimize_to_fixpoint`](@ref).
"""
struct RandomWalkOptimizer <: AbstractOptimizer
rng::AbstractRNG
end
function optimize_step!(optimizer::RandomWalkOptimizer, graph::DAG)
operations = get_operations(graph)
if sum(length(operations)) == 0 && length(graph.appliedOperations) + length(graph.operationsToApply) == 0
# in case there are zero operations possible at all on the graph
return false
end
r = optimizer.rng
# try until something was applied or popped
while true
# choose push or pop
if rand(r, Bool)
# push
# choose one of fuse/split/reduce
option = rand(r, 1:3)
if option == 1 && !isempty(operations.nodeFusions)
push_operation!(graph, rand(r, collect(operations.nodeFusions)))
return true
elseif option == 2 && !isempty(operations.nodeReductions)
push_operation!(graph, rand(r, collect(operations.nodeReductions)))
return true
elseif option == 3 && !isempty(operations.nodeSplits)
push_operation!(graph, rand(r, collect(operations.nodeSplits)))
return true
end
else
# pop
if (can_pop(graph))
pop_operation!(graph)
return true
end
end
end
end

View File

@ -0,0 +1,30 @@
"""
ReductionOptimizer
An optimizer that simply applies an available [`NodeReduction`](@ref) on each step. It implements [`optimize_to_fixpoint`](@ref). The fixpoint is reached when there are no more possible [`NodeReduction`](@ref)s in the graph.
"""
struct ReductionOptimizer <: AbstractOptimizer end
function optimize_step!(optimizer::ReductionOptimizer, graph::DAG)
# generate all options
operations = get_operations(graph)
if fixpoint_reached(optimizer, graph)
return false
end
push_operation!(graph, first(operations.nodeReductions))
return true
end
function fixpoint_reached(optimizer::ReductionOptimizer, graph::DAG)
operations = get_operations(graph)
return isempty(operations.nodeReductions)
end
function optimize_to_fixpoint!(optimizer::ReductionOptimizer, graph::DAG)
while !fixpoint_reached(optimizer, graph)
optimize_step!(optimizer, graph)
end
return nothing
end

View File

@ -17,15 +17,16 @@ copy(t::AbstractComputeTask) = typeof(t)()
Return a copy of th egiven [`FusedComputeTask`](@ref).
"""
function copy(t::FusedComputeTask{T1, T2}) where {T1, T2}
return FusedComputeTask{T1, T2}(
copy(t.first_task),
copy(t.second_task),
copy(t.t1_inputs),
t.t1_output,
copy(t.t2_inputs),
)
function copy(t::FusedComputeTask)
return FusedComputeTask(copy(t.first_task), copy(t.second_task), copy(t.t1_inputs), t.t1_output, copy(t.t2_inputs))
end
FusedComputeTask{T1, T2}(t1_inputs::Vector{String}, t1_output::String, t2_inputs::Vector{String}) where {T1, T2} =
FusedComputeTask{T1, T2}(T1(), T2(), t1_inputs, t1_output, t2_inputs)
function FusedComputeTask(
T1::Type{<:AbstractComputeTask},
T2::Type{<:AbstractComputeTask},
t1_inputs::Vector{String},
t1_output::String,
t2_inputs::Vector{String},
)
return FusedComputeTask(T1(), T2(), t1_inputs, t1_output, t2_inputs)
end

View File

@ -79,4 +79,4 @@ end
Return a tuple of a the fused compute task's components' types.
"""
get_types(::FusedComputeTask{T1, T2}) where {T1, T2} = (T1, T2)
get_types(t::FusedComputeTask) = (typeof(t.first_task), typeof(t.second_task))

View File

@ -26,9 +26,9 @@ A fused compute task made up of the computation of first `T1` and then `T2`.
Also see: [`get_types`](@ref).
"""
struct FusedComputeTask{T1 <: AbstractComputeTask, T2 <: AbstractComputeTask} <: AbstractComputeTask
first_task::T1
second_task::T2
struct FusedComputeTask <: AbstractComputeTask
first_task::AbstractComputeTask
second_task::AbstractComputeTask
# the names of the inputs for T1
t1_inputs::Vector{Symbol}
# output name of T1

View File

@ -3,10 +3,10 @@ import MetagraphOptimization.interaction_result
using QEDbase
using AccurateArithmetic
include("../examples/profiling_utilities.jl")
using Random
const RTOL = sqrt(eps(Float64))
RNG = Random.default_rng()
function check_particle_reverse_moment(p1::SFourMomentum, p2::SFourMomentum)
@test isapprox(abs(p1.E), abs(p2.E))
@ -83,7 +83,7 @@ end
@testset "AB->AB after random walk" begin
for i in 1:200
graph = parse_dag(joinpath(@__DIR__, "..", "input", "AB->AB.txt"), ABCModel())
random_walk!(graph, 50)
optimize!(RandomWalkOptimizer(RNG), graph, 50)
@test is_valid(graph)
@ -115,7 +115,7 @@ end
@testset "AB->ABBB after random walk" begin
for i in 1:50
graph = parse_dag(joinpath(@__DIR__, "..", "input", "AB->ABBB.txt"), ABCModel())
random_walk!(graph, 100)
optimize!(RandomWalkOptimizer(RNG), graph, 100)
@test is_valid(graph)
@test isapprox(execute(graph, process_2_4, machine, particles_2_4), expected_result; rtol = RTOL)

View File

@ -1,18 +1,42 @@
using Random
RNG = Random.default_rng()
@testset "Unit Tests Optimization" begin
graph = parse_dag(joinpath(@__DIR__, "..", "input", "AB->AB.txt"), ABCModel())
graph = parse_dag(joinpath(@__DIR__, "..", "input", "AB->ABBB.txt"), ABCModel())
@testset "Greedy Optimizer" begin
estimator = GlobalMetricEstimator()
optimizer = GreedyOptimizer(estimator)
# create the optimizers
FIXPOINT_OPTIMIZERS = [GreedyOptimizer(GlobalMetricEstimator()), ReductionOptimizer()]
NO_FIXPOINT_OPTIMIZERS = [RandomWalkOptimizer(RNG)]
optimize_step!(optimizer, graph)
@testset "Optimizer $optimizer" for optimizer in vcat(NO_FIXPOINT_OPTIMIZERS, FIXPOINT_OPTIMIZERS)
@test operation_stack_length(graph) == 0
@test optimize_step!(optimizer, graph)
@test length(graph.operationsToApply) + length(graph.appliedOperations) == 1
@test !fixpoint_reached(optimizer, graph)
@test operation_stack_length(graph) == 1
optimize!(optimizer, graph, 10)
@test optimize!(optimizer, graph, 10)
@test length(graph.operationsToApply) + length(graph.appliedOperations) == 11
@test !fixpoint_reached(optimizer, graph)
reset_graph!(graph)
end
@testset "Fixpoint optimizer $optimizer" for optimizer in FIXPOINT_OPTIMIZERS
@test operation_stack_length(graph) == 0
optimize_to_fixpoint!(optimizer, graph)
@test fixpoint_reached(optimizer, graph)
@test !optimize_step!(optimizer, graph)
@test !optimize!(optimizer, graph, 10)
reset_graph!(graph)
end
@testset "No fixpoint optimizer $optimizer" for optimizer in NO_FIXPOINT_OPTIMIZERS
@test_throws MethodError optimize_to_fixpoint!(optimizer, graph)
end
end
println("Optimization Unit Tests Complete!")