Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

More package extensions #161

Merged
merged 7 commits into from
Apr 24, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion .github/workflows/CI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ jobs:
fail-fast: false
matrix:
version:
- '1.7'
- '1'
os:
- ubuntu-latest
Expand Down
26 changes: 16 additions & 10 deletions Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "ITensorNetworks"
uuid = "2919e153-833c-4bdc-8836-1ea460a35fc7"
authors = ["Matthew Fishman <[email protected]> and contributors"]
version = "0.9.0"
version = "0.10.0"

[deps]
AbstractTrees = "1520ce14-60c1-5f80-bbc7-55ef81b5835c"
Expand All @@ -13,19 +13,16 @@ Dictionaries = "85a47980-9c8c-11e8-2b9f-f7ca1fa99fb4"
Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f"
DocStringExtensions = "ffbed154-4ef7-542d-bbb7-c09d3a79fcae"
Graphs = "86223c79-3864-5bf0-83f7-82e725a168b6"
GraphsFlows = "06909019-6f44-4949-96fc-b9d9aaa02889"
ITensors = "9136182c-28ba-11e9-034c-db9fb085ebd5"
IsApprox = "28f27b66-4bd8-47e7-9110-e2746eb8bed7"
IterTools = "c8e1da08-722c-5040-9ed9-7db0dc04731e"
KrylovKit = "0b1a1467-8014-51b9-945f-bf0ae24f4b77"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
NDTensors = "23ae76d9-e61a-49c4-8f12-3f1a16adf9cf"
NamedGraphs = "678767b0-92e7-4007-89e4-4527a8725b19"
Observers = "338f10d5-c7f1-4033-a7d1-f9dec39bcaa0"
PackageExtensionCompat = "65ce6f38-6b18-4e1d-a461-8949797d7930"
Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
Requires = "ae029012-a4dd-5104-9daa-d747884805df"
SerializedElementArrays = "d3ce8812-9567-47e9-a7b5-65a6d70a3065"
SimpleTraits = "699a6c99-e7fa-54fc-8d76-47d257e15c1d"
SparseArrayKit = "a9a3c162-d163-4c15-8926-b8794fbefed2"
Expand All @@ -38,9 +35,15 @@ TupleTools = "9d95972d-f1c8-5527-a6e0-b4b365fa01f6"

[weakdeps]
EinExprs = "b1794770-133b-4de1-afb4-526377e9f4c5"
GraphsFlows = "06909019-6f44-4949-96fc-b9d9aaa02889"
Observers = "338f10d5-c7f1-4033-a7d1-f9dec39bcaa0"
OMEinsumContractionOrders = "6f22d1fd-8eed-4bb7-9776-e7d684900715"

[extensions]
ITensorNetworksEinExprsExt = "EinExprs"
ITensorNetworksGraphsFlowsExt = "GraphsFlows"
ITensorNetworksObserversExt = "Observers"
ITensorNetworksOMEinsumContractionOrdersExt = "OMEinsumContractionOrders"

[compat]
AbstractTrees = "0.4.4"
Expand All @@ -50,32 +53,35 @@ DataGraphs = "0.2.2"
DataStructures = "0.18"
Dictionaries = "0.4"
Distributions = "0.25.86"
DocStringExtensions = "0.8, 0.9"
DocStringExtensions = "0.9"
EinExprs = "0.6.4"
Graphs = "1.8"
GraphsFlows = "0.1.1"
ITensors = "0.3.58, 0.4"
ITensors = "0.4"
IsApprox = "0.1"
IterTools = "1.4.0"
KrylovKit = "0.6, 0.7"
NamedGraphs = "0.5.1"
NDTensors = "0.2, 0.3"
NDTensors = "0.3"
Observers = "0.2"
OMEinsumContractionOrders = "0.8.3"
PackageExtensionCompat = "1"
Requires = "1.3"
SerializedElementArrays = "0.1"
SimpleTraits = "0.9"
SparseArrayKit = "0.2.1, 0.3"
SparseArrayKit = "0.3"
SplitApplyCombine = "1.2"
StaticArrays = "1.5.12"
StructWalk = "0.2"
Suppressor = "0.2"
TimerOutputs = "0.5.22"
TupleTools = "1.4"
julia = "1.7"
julia = "1.10"

[extras]
EinExprs = "b1794770-133b-4de1-afb4-526377e9f4c5"
GraphsFlows = "06909019-6f44-4949-96fc-b9d9aaa02889"
Observers = "338f10d5-c7f1-4033-a7d1-f9dec39bcaa0"
OMEinsumContractionOrders = "6f22d1fd-8eed-4bb7-9776-e7d684900715"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"

[targets]
Expand Down
19 changes: 19 additions & 0 deletions ext/ITensorNetworksGraphsFlowsExt/ITensorNetworksGraphsFlowsExt.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
module ITensorNetworksGraphsFlowsExt
using Graphs: AbstractGraph
using GraphsFlows: GraphsFlows
using ITensorNetworks: ITensorNetworks
using NDTensors.AlgorithmSelection: @Algorithm_str

function ITensorNetworks.mincut(
::Algorithm"GraphsFlows",
graph::AbstractGraph,
source_vertex,
target_vertex;
capacity_matrix,
alg=GraphsFlows.PushRelabelAlgorithm(),
)
# TODO: Replace with `Backend(backend)`.
return GraphsFlows.mincut(graph, source_vertex, target_vertex, capacity_matrix, alg)
end

end
Original file line number Diff line number Diff line change
@@ -0,0 +1,185 @@
module ITensorNetworksOMEinsumContractionOrdersExt
using DocStringExtensions: TYPEDSIGNATURES
using ITensorNetworks: ITensorNetworks
using ITensors: ITensors, Index, ITensor, inds
using NDTensors: dim
using NDTensors.AlgorithmSelection: @Algorithm_str
using OMEinsumContractionOrders: OMEinsumContractionOrders

# OMEinsumContractionOrders wrapper for ITensors
# Slicing is not supported, because it might require extra work to slice an `ITensor` correctly.

const ITensorList = Union{Vector{ITensor},Tuple{Vararg{ITensor}}}

# infer the output tensor labels
# TODO: Use `symdiff` instead.
function infer_output(inputs::AbstractVector{<:AbstractVector{<:Index}})
indslist = reduce(vcat, inputs)
# get output indices
iy = eltype(eltype(inputs))[]
for l in indslist
c = count(==(l), indslist)
if c == 1
push!(iy, l)
elseif c !== 2
error("Each index in a tensor network must appear at most twice!")
end
end
return iy
end

# get a (labels, size_dict) representation of a collection of ITensors
function rawcode(tensors::ITensorList)
# we use id as the label
indsAs = [collect(Index{Int}, ITensors.inds(A)) for A in tensors]
ixs = collect.(inds.(tensors))
unique_labels = unique(reduce(vcat, indsAs))
size_dict = Dict([x => dim(x) for x in unique_labels])
index_dict = Dict([x => x for x in unique_labels])
return OMEinsumContractionOrders.EinCode(ixs, infer_output(indsAs)), size_dict, index_dict
end

"""
$(TYPEDSIGNATURES)
Optimize the contraction order of a tensor network specified as a vector tensors.
Returns a [`NestedEinsum`](@ref) instance.
### Examples
```jldoctest
julia> using ITensors, ITensorContractionOrders
julia> i, j, k, l = Index(4), Index(5), Index(6), Index(7);
julia> x, y, z = randomITensor(i, j), randomITensor(j, k), randomITensor(k, l);
julia> net = optimize_contraction([x, y, z]; optimizer=TreeSA());
```
"""
function optimize_contraction_nested_einsum(
tensors::ITensorList;
optimizer::OMEinsumContractionOrders.CodeOptimizer=OMEinsumContractionOrders.TreeSA(),
)
r, size_dict, index_dict = rawcode(tensors)
# merge vectors can speed up contraction order finding
# optimize the permutation of tensors is set to true
res = OMEinsumContractionOrders.optimize_code(
r, size_dict, optimizer, OMEinsumContractionOrders.MergeVectors(), true
)
if res isa OMEinsumContractionOrders.SlicedEinsum # slicing is not supported!
if length(res.slicing) != 0
@warn "Slicing is not yet supported by `ITensors`, removing slices..."
end
res = res.eins
end
return res
end

"""
Convert NestedEinsum to contraction sequence, such as `[[1, 2], [3, 4]]`.
"""
function convert_to_contraction_sequence(net::OMEinsumContractionOrders.NestedEinsum)
if OMEinsumContractionOrders.isleaf(net)
return net.tensorindex
else
return convert_to_contraction_sequence.(net.args)
end
end

"""
Convert the result of `optimize_contraction` to a contraction sequence.
"""
function optimize_contraction_sequence(
tensors::ITensorList; optimizer::OMEinsumContractionOrders.CodeOptimizer=TreeSA()
)
res = optimize_contraction_nested_einsum(tensors; optimizer)
return convert_to_contraction_sequence(res)
end

"""
GreedyMethod(; method=MinSpaceOut(), nrepeat=10)

The fast but poor greedy optimizer. Input arguments are:

* `method` is `MinSpaceDiff()` or `MinSpaceOut`.
* `MinSpaceOut` choose one of the contraction that produces a minimum output tensor size,
* `MinSpaceDiff` choose one of the contraction that decrease the space most.
* `nrepeat` is the number of repeatition, returns the best contraction order.
"""
function ITensorNetworks.contraction_sequence(
::Algorithm"greedy", tn::Vector{ITensor}; kwargs...
)
return optimize_contraction_sequence(
tn; optimizer=OMEinsumContractionOrders.GreedyMethod(; kwargs...)
)
end

"""
TreeSA(; sc_target=20, βs=collect(0.01:0.05:15), ntrials=10, niters=50,
sc_weight=1.0, rw_weight=0.2, initializer=:greedy, greedy_config=GreedyMethod(; nrepeat=1))

Optimize the einsum contraction pattern using the simulated annealing on tensor expression tree.

* `sc_target` is the target space complexity,
* `ntrials`, `βs` and `niters` are annealing parameters, doing `ntrials` indepedent annealings, each has inverse tempteratures specified by `βs`, in each temperature, do `niters` updates of the tree.
* `sc_weight` is the relative importance factor of space complexity in the loss compared with the time complexity.
* `rw_weight` is the relative importance factor of memory read and write in the loss compared with the time complexity.
* `initializer` specifies how to determine the initial configuration, it can be `:greedy` or `:random`. If it is using `:greedy` method to generate the initial configuration, it also uses two extra arguments `greedy_method` and `greedy_nrepeat`.
* `nslices` is the number of sliced legs, default is 0.
* `fixed_slices` is a vector of sliced legs, default is `[]`.

### References
* [Recursive Multi-Tensor Contraction for XEB Verification of Quantum Circuits](https://arxiv.org/abs/2108.05665)
"""
function ITensorNetworks.contraction_sequence(::Algorithm"tree_sa", tn; kwargs...)
return optimize_contraction_sequence(
tn; optimizer=OMEinsumContractionOrders.TreeSA(; kwargs...)
)
end

"""
SABipartite(; sc_target=25, ntrials=50, βs=0.1:0.2:15.0, niters=1000
max_group_size=40, greedy_config=GreedyMethod(), initializer=:random)

Optimize the einsum code contraction order using the Simulated Annealing bipartition + Greedy approach.
This program first recursively cuts the tensors into several groups using simulated annealing,
with maximum group size specifed by `max_group_size` and maximum space complexity specified by `sc_target`,
Then finds the contraction order inside each group with the greedy search algorithm. Other arguments are:

* `size_dict`, a dictionary that specifies leg dimensions,
* `sc_target` is the target space complexity, defined as `log2(number of elements in the largest tensor)`,
* `max_group_size` is the maximum size that allowed to used greedy search,
* `βs` is a list of inverse temperature `1/T`,
* `niters` is the number of iteration in each temperature,
* `ntrials` is the number of repetition (with different random seeds),
* `greedy_config` configures the greedy method,
* `initializer`, the partition configuration initializer, one can choose `:random` or `:greedy` (slow but better).

### References
* [Hyper-optimized tensor network contraction](https://arxiv.org/abs/2002.01935)
"""
function ITensorNetworks.contraction_sequence(::Algorithm"sa_bipartite", tn; kwargs...)
return optimize_contraction_sequence(
tn; optimizer=OMEinsumContractionOrders.SABipartite(; kwargs...)
)
end

"""
KaHyParBipartite(; sc_target, imbalances=collect(0.0:0.005:0.8),
max_group_size=40, greedy_config=GreedyMethod())

Optimize the einsum code contraction order using the KaHyPar + Greedy approach.
This program first recursively cuts the tensors into several groups using KaHyPar,
with maximum group size specifed by `max_group_size` and maximum space complexity specified by `sc_target`,
Then finds the contraction order inside each group with the greedy search algorithm. Other arguments are:

* `sc_target` is the target space complexity, defined as `log2(number of elements in the largest tensor)`,
* `imbalances` is a KaHyPar parameter that controls the group sizes in hierarchical bipartition,
* `max_group_size` is the maximum size that allowed to used greedy search,
* `greedy_config` is a greedy optimizer.

### References
* [Hyper-optimized tensor network contraction](https://arxiv.org/abs/2002.01935)
* [Simulating the Sycamore quantum supremacy circuits](https://arxiv.org/abs/2103.03074)
"""
function ITensorNetworks.contraction_sequence(::Algorithm"kahypar_bipartite", tn; kwargs...)
return optimize_contraction_sequence(
tn; optimizer=OMEinsumContractionOrders.KaHyParBipartite(; kwargs...)
)
end
end
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
module ITensorNetworksObserversExt
using ITensorNetworks: ITensorNetworks
using Observers.DataFrames: AbstractDataFrame
using Observers: Observers

function ITensorNetworks.update_observer!(observer::AbstractDataFrame; kwargs...)
return Observers.update!(observer; kwargs...)
end
end
6 changes: 1 addition & 5 deletions src/ITensorNetworks.jl
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
module ITensorNetworks
include("lib/BaseExtensions/src/BaseExtensions.jl")
include("lib/ITensorsExtensions/src/ITensorsExtensions.jl")
include("observers.jl")
include("visualize.jl")
include("graphs.jl")
include("abstractindsnetwork.jl")
Expand Down Expand Up @@ -33,6 +32,7 @@ include("caches/beliefpropagationcache.jl")
include("contraction_tree_to_graph.jl")
include("gauging.jl")
include("utils.jl")
include("update_observer.jl")
include("solvers/local_solvers/eigsolve.jl")
include("solvers/local_solvers/exponentiate.jl")
include("solvers/local_solvers/dmrg_x.jl")
Expand Down Expand Up @@ -66,11 +66,7 @@ include("lib/ModelHamiltonians/src/ModelHamiltonians.jl")
include("lib/ModelNetworks/src/ModelNetworks.jl")

using PackageExtensionCompat: @require_extensions
using Requires: @require
function __init__()
@require_extensions
@require OMEinsumContractionOrders = "6f22d1fd-8eed-4bb7-9776-e7d684900715" include(
"requires/omeinsumcontractionorders.jl"
)
end
end
1 change: 0 additions & 1 deletion src/apply.jl
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@ using ITensors.ITensorMPS: siteinds
using KrylovKit: linsolve
using LinearAlgebra: eigen, norm, svd
using NamedGraphs: NamedEdge, has_edge
using Observers: Observers

function full_update_bp(
o,
Expand Down
16 changes: 14 additions & 2 deletions src/contract_approx/mincut.jl
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
using AbstractTrees: Leaves, PostOrderDFS
using Combinatorics: powerset
using Graphs: dijkstra_shortest_paths, weights
using GraphsFlows: GraphsFlows
using NamedGraphs: NamedDiGraph
using NDTensors.AlgorithmSelection: Algorithm

# a large number to prevent this edge being a cut
MAX_WEIGHT = 1e32
Expand Down Expand Up @@ -37,6 +37,18 @@ function binary_tree_structure(tn::ITensorNetwork, outinds::Vector)
return _binary_tree_structure(tn, outinds; maximally_unbalanced=false)
end

function mincut(graph::AbstractGraph, source_vertex, target_vertex; backend, kwargs...)
# TODO: Replace with `Backend(backend)`.
return mincut(Algorithm(backend), graph, source_vertex, target_vertex; kwargs...)
end

# TODO: Replace with `backend::Backend`.
function mincut(
backend::Algorithm, graph::AbstractGraph, source_vertex, target_vertex; kwargs...
)
return error("Backend `$backend` not implemented for `mincut`.")
end

"""
Calculate the mincut between two subsets of the uncontracted inds
(source_inds and terminal_inds) of the input tn.
Expand All @@ -52,7 +64,7 @@ function _mincut(tn::ITensorNetwork, source_inds::Vector, terminal_inds::Vector)
tn = disjoint_union(
ITensorNetwork([ITensor(source_inds...), ITensor(terminal_inds...)]), tn
)
return GraphsFlows.mincut(tn, (1, 1), (2, 1), weights(tn))
return mincut(tn, (1, 1), (2, 1); backend="GraphsFlows", capacity_matrix=weights(tn))
end

"""
Expand Down
Loading
Loading