diff --git a/docs/make.jl b/docs/make.jl index 142681d..578cdb5 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -16,6 +16,7 @@ makedocs( "index.md", "Manual" => "manual.md", "Library" => [ + "Public" => "lib/public.md", "Graph" => "lib/internals/graph.md", "Node" => "lib/internals/node.md", "Task" => "lib/internals/task.md", diff --git a/docs/src/lib/internals/properties.md b/docs/src/lib/internals/properties.md new file mode 100644 index 0000000..5773020 --- /dev/null +++ b/docs/src/lib/internals/properties.md @@ -0,0 +1,22 @@ +# Properties + +## Type +```@autodocs +Modules = [MetagraphOptimization] +Pages = ["properties/type.jl"] +Order = [:type] +``` + +## Create +```@autodocs +Modules = [MetagraphOptimization] +Pages = ["task/create.jl"] +Order = [:function] +``` + +## Utility +```@autodocs +Modules = [MetagraphOptimization] +Pages = ["properties/utility.jl"] +Order = [:function] +``` diff --git a/examples/plot_chain.jl b/examples/plot_chain.jl index 0298120..4e9cb1c 100644 --- a/examples/plot_chain.jl +++ b/examples/plot_chain.jl @@ -41,9 +41,9 @@ function gen_plot(filepath) i = i - 1 end - props = graph_properties(g) + props = get_properties(g) push!(x, props.data) - push!(y, props.compute_effort) + push!(y, props.computeEffort) end println("\rDone.") diff --git a/examples/plot_star.jl b/examples/plot_star.jl index 2dca0a2..3f82fb5 100644 --- a/examples/plot_star.jl +++ b/examples/plot_star.jl @@ -44,9 +44,9 @@ function gen_plot(filepath) - props = graph_properties(g) + props = get_properties(g) x0 = props.data - y0 = props.compute_effort + y0 = props.computeEffort x = Vector{Float64}() y = Vector{Float64}() @@ -55,9 +55,9 @@ function gen_plot(filepath) opt = get_operations(g) for op in opt.nodeFusions push_operation!(g, op) - props = graph_properties(g) + props = get_properties(g) push!(x, props.data) - push!(y, props.compute_effort) + push!(y, props.computeEffort) pop_operation!(g) push!( @@ -65,15 +65,15 @@ function gen_plot(filepath) "NF: (" * string(props.data) * ", " * - string(props.compute_effort) * + string(props.computeEffort) * ")", ) end for op in opt.nodeReductions push_operation!(g, op) - props = graph_properties(g) + props = get_properties(g) push!(x, props.data) - push!(y, props.compute_effort) + push!(y, props.computeEffort) pop_operation!(g) push!( @@ -81,15 +81,15 @@ function gen_plot(filepath) "NR: (" * string(props.data) * ", " * - string(props.compute_effort) * + string(props.computeEffort) * ")", ) end for op in opt.nodeSplits push_operation!(g, op) - props = graph_properties(g) + props = get_properties(g) push!(x, props.data) - push!(y, props.compute_effort) + push!(y, props.computeEffort) pop_operation!(g) push!( @@ -97,7 +97,7 @@ function gen_plot(filepath) "NS: (" * string(props.data) * ", " * - string(props.compute_effort) * + string(props.computeEffort) * ")", ) end diff --git a/examples/profiling_utilities.jl b/examples/profiling_utilities.jl index 0d14835..6aa7727 100644 --- a/examples/profiling_utilities.jl +++ b/examples/profiling_utilities.jl @@ -3,7 +3,7 @@ function test_random_walk(g::DAG, n::Int64) # the purpose here is to do "random" operations and reverse them again and validate that the graph stays the same and doesn't diverge reset_graph!(g) - properties = graph_properties(g) + properties = get_properties(g) for i in 1:n # choose push or pop diff --git a/src/MetagraphOptimization.jl b/src/MetagraphOptimization.jl index e4a0061..1e8c0c9 100644 --- a/src/MetagraphOptimization.jl +++ b/src/MetagraphOptimization.jl @@ -15,6 +15,8 @@ export AbstractComputeTask export AbstractDataTask export DataTask export FusedComputeTask +export PossibleOperations +export GraphProperties export make_node export make_edge @@ -25,7 +27,7 @@ export is_exit_node export parents export children export compute -export graph_properties +export get_properties export get_exit_node export is_valid @@ -55,6 +57,8 @@ export bytes_to_human_readable import Base.length import Base.show import Base.== +import Base.+ +import Base.- import Base.in import Base.copy import Base.isempty @@ -66,6 +70,7 @@ import Base.collect include("task/type.jl") include("node/type.jl") include("diff/type.jl") +include("properties/type.jl") include("operation/type.jl") include("graph/type.jl") @@ -96,6 +101,9 @@ include("operation/get.jl") include("operation/print.jl") include("operation/validate.jl") +include("properties/create.jl") +include("properties/utility.jl") + include("task/create.jl") include("task/compare.jl") include("task/print.jl") diff --git a/src/graph/print.jl b/src/graph/print.jl index 729b14c..73df06c 100644 --- a/src/graph/print.jl +++ b/src/graph/print.jl @@ -58,12 +58,12 @@ function show(io::IO, graph::DAG) end println(io) println(io, " Edges: ", noEdges) - properties = graph_properties(graph) - println(io, " Total Compute Effort: ", properties.compute_effort) + properties = get_properties(graph) + println(io, " Total Compute Effort: ", properties.computeEffort) println(io, " Total Data Transfer: ", properties.data) return println( io, " Total Compute Intensity: ", - properties.compute_intensity, + properties.computeIntensity, ) end diff --git a/src/graph/properties.jl b/src/graph/properties.jl index ed5e985..211d8be 100644 --- a/src/graph/properties.jl +++ b/src/graph/properties.jl @@ -1,31 +1,17 @@ """ - graph_properties(graph::DAG) + get_properties(graph::DAG) -Return the graph's properties, a named tuple with fields `.data`, `.compute_effort`, `.compute_intensity`, `.nodes` (number of nodes) and `.edges` (number of edges). +Return the graph's [`GraphProperties`](@ref). """ -function graph_properties(graph::DAG) +function get_properties(graph::DAG) # make sure the graph is fully generated apply_all!(graph) - d = 0 - ce = 0 - ed = 0 - for node in graph.nodes - d += data(node.task) * length(node.parents) - ce += compute_effort(node.task) - ed += length(node.parents) + if (graph.properties.computeEffort == 0.0) + graph.properties = GraphProperties(graph) end - ci = ce / d - - result = ( - data = d, - compute_effort = ce, - compute_intensity = ci, - nodes = length(graph.nodes), - edges = ed, - ) - return result + return graph.properties end """ diff --git a/src/graph/type.jl b/src/graph/type.jl index f65c228..64ef860 100644 --- a/src/graph/type.jl +++ b/src/graph/type.jl @@ -41,6 +41,9 @@ mutable struct DAG # "snapshot" system: keep track of added/removed nodes/edges since last snapshot # these are muted in insert_node! etc. diff::Diff + + # the cached properties of the DAG + properties::GraphProperties end """ @@ -69,5 +72,6 @@ function DAG() PossibleOperations(), Set{Node}(), Diff(), + GraphProperties(), ) end diff --git a/src/models/abc/parse.jl b/src/models/abc/parse.jl index 9553dc2..d78b23b 100644 --- a/src/models/abc/parse.jl +++ b/src/models/abc/parse.jl @@ -195,6 +195,14 @@ function parse_abc(filename::String, verbose::Bool = false) #put all nodes into dirty nodes set graph.dirtyNodes = copy(graph.nodes) + if (verbose) + println("Generating the graph's properties") + end + graph.properties = GraphProperties(graph) + + if (verbose) + println("Done") + end # don't actually need to read the edges return graph end diff --git a/src/node/create.jl b/src/node/create.jl index dd01d05..127f3c2 100644 --- a/src/node/create.jl +++ b/src/node/create.jl @@ -1,3 +1,43 @@ + +DataTaskNode(t::AbstractDataTask) = DataTaskNode( + t, + Vector{Node}(), + Vector{Node}(), + UUIDs.uuid1(rng[threadid()]), + missing, + missing, + missing, +) +ComputeTaskNode(t::AbstractComputeTask) = ComputeTaskNode( + t, + Vector{Node}(), + Vector{Node}(), + UUIDs.uuid1(rng[threadid()]), + missing, + missing, + Vector{NodeFusion}(), +) + +copy(m::Missing) = missing +copy(n::ComputeTaskNode) = ComputeTaskNode( + copy(n.task), + copy(n.parents), + copy(n.children), + UUIDs.uuid1(rng[threadid()]), + copy(n.nodeReduction), + copy(n.nodeSplit), + copy(n.nodeFusions), +) +copy(n::DataTaskNode) = DataTaskNode( + copy(n.task), + copy(n.parents), + copy(n.children), + UUIDs.uuid1(rng[threadid()]), + copy(n.nodeReduction), + copy(n.nodeSplit), + copy(n.nodeFusion), +) + """ make_node(t::AbstractTask) diff --git a/src/node/properties.jl b/src/node/properties.jl index 2edb400..b28a234 100644 --- a/src/node/properties.jl +++ b/src/node/properties.jl @@ -12,6 +12,18 @@ Return whether this node is an exit node of its graph, i.e., it has no parents. """ is_exit_node(node::Node) = length(node.parents) == 0 +""" + data(edge::Edge) + +Return the data transfered by this edge, i.e., 0 if the child is a [`ComputeTaskNode`](@ref), otherwise the child's `data()`. +""" +function data(edge::Edge) + if typeof(edge.edge[1]) <: DataTaskNode + return data(edge.edge[1].task) + end + return 0.0 +end + """ children(node::Node) diff --git a/src/node/type.jl b/src/node/type.jl index db32f1b..e8a353a 100644 --- a/src/node/type.jl +++ b/src/node/type.jl @@ -81,25 +81,6 @@ mutable struct ComputeTaskNode <: Node nodeFusions::Vector{Operation} end -DataTaskNode(t::AbstractDataTask) = DataTaskNode( - t, - Vector{Node}(), - Vector{Node}(), - UUIDs.uuid1(rng[threadid()]), - missing, - missing, - missing, -) -ComputeTaskNode(t::AbstractComputeTask) = ComputeTaskNode( - t, - Vector{Node}(), - Vector{Node}(), - UUIDs.uuid1(rng[threadid()]), - missing, - missing, - Vector{NodeFusion}(), -) - """ Edge @@ -116,23 +97,3 @@ struct Edge Tuple{ComputeTaskNode, DataTaskNode}, } end - -copy(m::Missing) = missing -copy(n::ComputeTaskNode) = ComputeTaskNode( - copy(n.task), - copy(n.parents), - copy(n.children), - UUIDs.uuid1(rng[threadid()]), - copy(n.nodeReduction), - copy(n.nodeSplit), - copy(n.nodeFusions), -) -copy(n::DataTaskNode) = DataTaskNode( - copy(n.task), - copy(n.parents), - copy(n.children), - UUIDs.uuid1(rng[threadid()]), - copy(n.nodeReduction), - copy(n.nodeSplit), - copy(n.nodeFusion), -) diff --git a/src/operation/apply.jl b/src/operation/apply.jl index 6230322..b6da322 100644 --- a/src/operation/apply.jl +++ b/src/operation/apply.jl @@ -40,6 +40,9 @@ function apply_operation!(graph::DAG, operation::NodeFusion) operation.input[2], operation.input[3], ) + + graph.properties += GraphProperties(diff) + return AppliedNodeFusion(operation, diff) end @@ -52,6 +55,9 @@ Return an [`AppliedNodeReduction`](@ref) object generated from the graph's [`Dif """ function apply_operation!(graph::DAG, operation::NodeReduction) diff = node_reduction!(graph, operation.input) + + graph.properties += GraphProperties(diff) + return AppliedNodeReduction(operation, diff) end @@ -64,6 +70,9 @@ Return an [`AppliedNodeSplit`](@ref) object generated from the graph's [`Diff`]( """ function apply_operation!(graph::DAG, operation::NodeSplit) diff = node_split!(graph, operation.input) + + graph.properties += GraphProperties(diff) + return AppliedNodeSplit(operation, diff) end @@ -127,6 +136,9 @@ function revert_diff!(graph::DAG, diff::Diff) for edge in diff.removedEdges insert_edge!(graph, edge.edge[1], edge.edge[2], false) end + + graph.properties -= GraphProperties(diff) + return nothing end diff --git a/src/properties/create.jl b/src/properties/create.jl new file mode 100644 index 0000000..6db46f4 --- /dev/null +++ b/src/properties/create.jl @@ -0,0 +1,73 @@ +""" + GraphProperties() + +Create an empty [`GraphProperties`](@ref) object. +""" +function GraphProperties() + return ( + data = 0.0, + computeEffort = 0.0, + computeIntensity = 0.0, + cost = 0.0, + noNodes = 0, + noEdges = 0, + )::GraphProperties +end + +""" + GraphProperties(graph::DAG) + +Calculate the graph's properties and return the constructed [`GraphProperties`](@ref) object. +""" +function GraphProperties(graph::DAG) + # make sure the graph is fully generated + apply_all!(graph) + + d = 0.0 + ce = 0.0 + ed = 0 + for node in graph.nodes + d += data(node.task) * length(node.parents) + ce += compute_effort(node.task) + ed += length(node.parents) + end + + return ( + data = d, + computeEffort = ce, + computeIntensity = (d == 0) ? 0.0 : ce / d, + cost = 0.0, # TODO + noNodes = length(graph.nodes), + noEdges = ed, + )::GraphProperties +end + +""" + GraphProperties(diff::Diff) + +Create the graph properties difference from a given [`Diff`](@ref). +The graph's properties after applying the [`Diff`](@ref) will be `get_properties(graph) + GraphProperties(diff)`. +For reverting a diff, it's `get_properties(graph) - GraphProperties(diff)`. +""" +function GraphProperties(diff::Diff) + d = 0.0 + ce = 0.0 + c = 0.0 # TODO + + ce = + reduce(+, compute_effort(n.task) for n in diff.addedNodes; init = 0.0) - + reduce(+, compute_effort(n.task) for n in diff.removedNodes; init = 0.0) + + d = + reduce(+, data(e) for e in diff.addedEdges; init = 0.0) - + reduce(+, data(e) for e in diff.removedEdges; init = 0.0) + + return ( + data = d, + computeEffort = ce, + computeIntensity = (d == 0) ? 0.0 : ce / d, + cost = c, + noNodes = length(diff.addedNodes) - length(diff.removedNodes), + noEdges = length(diff.addedEdges) - length(diff.removedEdges), + )::GraphProperties +end diff --git a/src/properties/type.jl b/src/properties/type.jl new file mode 100644 index 0000000..084486c --- /dev/null +++ b/src/properties/type.jl @@ -0,0 +1,17 @@ +""" + GraphProperties + +Representation of a [`DAG`](@ref)'s properties. + +# Fields: +`.data`: The total data transfer.\\ +`.computeEffort`: The total compute effort.\\ +`.computeIntensity`: The compute intensity, will always equal `.computeEffort / .data`.\\ +`.cost`: The estimated cost.\\ +`.noNodes`: Number of [`Node`](@ref)s.\\ +`.noEdges`: Number of [`Edge`](@ref)s. +""" +const GraphProperties = NamedTuple{ + (:data, :computeEffort, :computeIntensity, :cost, :noNodes, :noEdges), + Tuple{Float64, Float64, Float64, Float64, Int, Int}, +} diff --git a/src/properties/utility.jl b/src/properties/utility.jl new file mode 100644 index 0000000..bf936db --- /dev/null +++ b/src/properties/utility.jl @@ -0,0 +1,59 @@ +""" + -(prop1::GraphProperties, prop2::GraphProperties) + +Subtract `prop1` from `prop2` and return the result as a new [`GraphProperties`](@ref). +Also take care to keep consistent compute intensity. +""" +function -(prop1::GraphProperties, prop2::GraphProperties) + return ( + data = prop1.data - prop2.data, + computeEffort = prop1.computeEffort - prop2.computeEffort, + computeIntensity = if (prop1.data - prop2.data == 0) + 0.0 + else + (prop1.computeEffort - prop2.computeEffort) / + (prop1.data - prop2.data) + end, + cost = prop1.cost - prop2.cost, + noNodes = prop1.noNodes - prop2.noNodes, + noEdges = prop1.noEdges - prop2.noEdges, + )::GraphProperties +end + +""" + +(prop1::GraphProperties, prop2::GraphProperties) + +Add `prop1` and `prop2` and return the result as a new [`GraphProperties`](@ref). +Also take care to keep consistent compute intensity. +""" +function +(prop1::GraphProperties, prop2::GraphProperties) + return ( + data = prop1.data + prop2.data, + computeEffort = prop1.computeEffort + prop2.computeEffort, + computeIntensity = if (prop1.data + prop2.data == 0) + 0.0 + else + (prop1.computeEffort + prop2.computeEffort) / + (prop1.data + prop2.data) + end, + cost = prop1.cost + prop2.cost, + noNodes = prop1.noNodes + prop2.noNodes, + noEdges = prop1.noEdges + prop2.noEdges, + )::GraphProperties +end + +""" + -(prop::GraphProperties) + +Unary negation of the graph properties. `.computeIntensity` will not be negated because `.data` and `.computeEffort` both are. +""" +function -(prop::GraphProperties) + return ( + data = -prop.data, + computeEffort = -prop.computeEffort, + computeIntensity = prop.computeIntensity, # no negation here! + cost = -prop.cost, + noNodes = -prop.noNodes, + noEdges = -prop.noEdges, + )::GraphProperties +end diff --git a/test/known_graphs.jl b/test/known_graphs.jl index cbbb5a1..de81c12 100644 --- a/test/known_graphs.jl +++ b/test/known_graphs.jl @@ -3,7 +3,7 @@ using Random function test_known_graph(name::String, n, fusion_test = true) @testset "Test $name Graph ($n)" begin graph = parse_abc(joinpath(@__DIR__, "..", "input", "$name.txt")) - props = graph_properties(graph) + props = get_properties(graph) if (fusion_test) test_node_fusion(graph) @@ -14,13 +14,13 @@ end function test_node_fusion(g::DAG) @testset "Test Node Fusion" begin - props = graph_properties(g) + props = get_properties(g) options = get_operations(g) nodes_number = length(g.nodes) data = props.data - compute_effort = props.compute_effort + compute_effort = props.computeEffort while !isempty(options.nodeFusions) fusion = first(options.nodeFusions) @@ -29,13 +29,13 @@ function test_node_fusion(g::DAG) push_operation!(g, fusion) - props = graph_properties(g) + props = get_properties(g) @test props.data < data - @test props.compute_effort == compute_effort + @test props.computeEffort == compute_effort nodes_number = length(g.nodes) data = props.data - compute_effort = props.compute_effort + compute_effort = props.computeEffort options = get_operations(g) end @@ -49,7 +49,7 @@ function test_random_walk(g::DAG, n::Int64) @test is_valid(g) - properties = graph_properties(g) + properties = get_properties(g) for i in 1:n # choose push or pop @@ -82,7 +82,7 @@ function test_random_walk(g::DAG, n::Int64) @test is_valid(g) - @test properties == graph_properties(g) + @test properties == get_properties(g) end end diff --git a/test/runtests.jl b/test/runtests.jl index 74f7f81..2ea1a76 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -5,6 +5,7 @@ using Test include("unit_tests_utility.jl") include("unit_tests_tasks.jl") include("unit_tests_nodes.jl") + include("unit_tests_properties.jl") include("node_reduction.jl") include("unit_tests_graph.jl") diff --git a/test/unit_tests_graph.jl b/test/unit_tests_graph.jl index 32efa11..77dd84c 100644 --- a/test/unit_tests_graph.jl +++ b/test/unit_tests_graph.jl @@ -69,7 +69,7 @@ import MetagraphOptimization.partners @test length(graph.nodes) == 26 @test length(graph.dirtyNodes) == 26 - # now for all the edgese + # now for all the edges insert_edge!(graph, d_PB, PB, false) insert_edge!(graph, d_PA, PA, false) insert_edge!(graph, d_PBp, PBp, false) @@ -142,12 +142,12 @@ import MetagraphOptimization.partners @test operations == get_operations(graph) nf = first(operations.nodeFusions) - properties = graph_properties(graph) - @test properties.compute_effort == 134 + properties = get_properties(graph) + @test properties.computeEffort == 134 @test properties.data == 62 - @test properties.compute_intensity ≈ 134 / 62 - @test properties.nodes == 26 - @test properties.edges == 25 + @test properties.computeIntensity ≈ 134 / 62 + @test properties.noNodes == 26 + @test properties.noEdges == 25 push_operation!(graph, nf) # **does not immediately apply the operation** @@ -161,17 +161,17 @@ import MetagraphOptimization.partners (addedNodes = 0, removedNodes = 0, addedEdges = 0, removedEdges = 0) # this applies pending operations - properties = graph_properties(graph) + properties = get_properties(graph) @test length(graph.nodes) == 24 @test length(graph.appliedOperations) == 1 @test length(graph.operationsToApply) == 0 @test length(graph.dirtyNodes) != 0 - @test properties.nodes == 24 - @test properties.edges == 23 - @test properties.compute_effort == 134 + @test properties.noNodes == 24 + @test properties.noEdges == 23 + @test properties.computeEffort == 134 @test properties.data < 62 - @test properties.compute_intensity > 134 / 62 + @test properties.computeIntensity > 134 / 62 operations = get_operations(graph) @test length(graph.dirtyNodes) == 0 @@ -205,12 +205,12 @@ import MetagraphOptimization.partners @test length(graph.appliedOperations) == 0 @test length(graph.operationsToApply) == 0 - properties = graph_properties(graph) - @test properties.nodes == 26 - @test properties.edges == 25 - @test properties.compute_effort == 134 + properties = get_properties(graph) + @test properties.noNodes == 26 + @test properties.noEdges == 25 + @test properties.computeEffort == 134 @test properties.data == 62 - @test properties.compute_intensity ≈ 134 / 62 + @test properties.computeIntensity ≈ 134 / 62 operations = get_operations(graph) @test length(operations) == diff --git a/test/unit_tests_properties.jl b/test/unit_tests_properties.jl new file mode 100644 index 0000000..97a53ad --- /dev/null +++ b/test/unit_tests_properties.jl @@ -0,0 +1,52 @@ + +@testset "GraphProperties Unit Tests" begin + prop = GraphProperties() + + @test prop.data == 0.0 + @test prop.computeEffort == 0.0 + @test prop.computeIntensity == 0.0 + @test prop.cost == 0.0 + @test prop.noNodes == 0.0 + @test prop.noEdges == 0.0 + + prop2 = ( + data = 5.0, + computeEffort = 6.0, + computeIntensity = 6.0 / 5.0, + cost = 0.0, + noNodes = 2, + noEdges = 3, + )::GraphProperties + + @test prop + prop2 == prop2 + @test prop2 - prop == prop2 + + negProp = -prop2 + @test negProp.data == -5.0 + @test negProp.computeEffort == -6.0 + @test negProp.computeIntensity == 6.0 / 5.0 + @test negProp.cost == 0.0 + @test negProp.noNodes == -2 + @test negProp.noEdges == -3 + + @test negProp + prop2 == GraphProperties() + + prop3 = ( + data = 7.0, + computeEffort = 3.0, + computeIntensity = 7.0 / 3.0, + cost = 0.0, + noNodes = -3, + noEdges = 2, + )::GraphProperties + + propSum = prop2 + prop3 + + @test propSum.data == 12.0 + @test propSum.computeEffort == 9.0 + @test propSum.computeIntensity == 9.0 / 12.0 + @test propSum.cost == 0.0 + @test propSum.noNodes == -1 + @test propSum.noEdges == 5 +end +println("GraphProperties Unit Tests Complete!")