diff --git a/Project.toml b/Project.toml index 2ce895f0..2f82e3a0 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "ITensorNetworks" uuid = "2919e153-833c-4bdc-8836-1ea460a35fc7" authors = ["Matthew Fishman and contributors"] -version = "0.7" +version = "0.8.0" [deps] AbstractTrees = "1520ce14-60c1-5f80-bbc7-55ef81b5835c" @@ -19,6 +19,7 @@ IsApprox = "28f27b66-4bd8-47e7-9110-e2746eb8bed7" IterTools = "c8e1da08-722c-5040-9ed9-7db0dc04731e" KrylovKit = "0b1a1467-8014-51b9-945f-bf0ae24f4b77" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" +NDTensors = "23ae76d9-e61a-49c4-8f12-3f1a16adf9cf" NamedGraphs = "678767b0-92e7-4007-89e4-4527a8725b19" Observers = "338f10d5-c7f1-4033-a7d1-f9dec39bcaa0" PackageExtensionCompat = "65ce6f38-6b18-4e1d-a461-8949797d7930" diff --git a/README.md b/README.md index fe933ec8..2f86bee0 100644 --- a/README.md +++ b/README.md @@ -39,7 +39,7 @@ julia> using ITensorNetworks: ITensorNetwork, siteinds julia> using NamedGraphs: named_grid, subgraph julia> tn = ITensorNetwork(named_grid(4); link_space=2) -ITensorNetwork{Int64} with 4 vertices: +ITensorNetworks.ITensorNetwork{Int64} with 4 vertices: 4-element Vector{Int64}: 1 2 @@ -90,7 +90,7 @@ and here is a similar example for making a tensor network on a grid (a tensor pr ```julia julia> tn = ITensorNetwork(named_grid((2, 2)); link_space=2) -ITensorNetwork{Tuple{Int64, Int64}} with 4 vertices: +ITensorNetworks.ITensorNetwork{Tuple{Int64, Int64}} with 4 vertices: 4-element Vector{Tuple{Int64, Int64}}: (1, 1) (2, 1) @@ -125,7 +125,7 @@ julia> neighbors(tn, (1, 2)) (2, 2) julia> tn_1 = subgraph(v -> v[1] == 1, tn) -ITensorNetwork{Tuple{Int64, Int64}} with 2 vertices: +ITensorNetworks.ITensorNetwork{Tuple{Int64, Int64}} with 2 vertices: 2-element Vector{Tuple{Int64, Int64}}: (1, 1) (1, 2) @@ -139,7 +139,7 @@ with vertex data: (1, 2) │ ((dim=2|id=723|"1×1,1×2"), (dim=2|id=712|"1×2,2×2")) julia> tn_2 = subgraph(v -> v[1] == 2, tn) -ITensorNetwork{Tuple{Int64, Int64}} with 2 vertices: +ITensorNetworks.ITensorNetwork{Tuple{Int64, Int64}} with 2 vertices: 2-element Vector{Tuple{Int64, Int64}}: (2, 1) (2, 2) @@ -184,7 +184,7 @@ and edge data: 0-element Dictionaries.Dictionary{NamedGraphs.NamedEdge{Int64}, Vector{ITensors.Index}} julia> tn1 = ITensorNetwork(s; link_space=2) -ITensorNetwork{Int64} with 3 vertices: +ITensorNetworks.ITensorNetwork{Int64} with 3 vertices: 3-element Vector{Int64}: 1 2 @@ -201,7 +201,7 @@ with vertex data: 3 │ ((dim=2|id=656|"S=1/2,Site,n=3"), (dim=2|id=190|"2,3")) julia> tn2 = ITensorNetwork(s; link_space=2) -ITensorNetwork{Int64} with 3 vertices: +ITensorNetworks.ITensorNetwork{Int64} with 3 vertices: 3-element Vector{Int64}: 1 2 diff --git a/src/ITensorNetworks.jl b/src/ITensorNetworks.jl index bfe4b8a2..7e69dd39 100644 --- a/src/ITensorNetworks.jl +++ b/src/ITensorNetworks.jl @@ -36,7 +36,6 @@ include("caches/beliefpropagationcache.jl") include("contraction_tree_to_graph.jl") include("gauging.jl") include("utils.jl") -include("tensornetworkoperators.jl") include("ITensorsExt/itensorutils.jl") include("solvers/local_solvers/eigsolve.jl") include("solvers/local_solvers/exponentiate.jl") diff --git a/src/abstractitensornetwork.jl b/src/abstractitensornetwork.jl index 3e044abe..958a5845 100644 --- a/src/abstractitensornetwork.jl +++ b/src/abstractitensornetwork.jl @@ -23,7 +23,6 @@ using ITensors: commoninds, commontags, contract, - convert_eltype, dag, hascommoninds, noprime, @@ -39,18 +38,18 @@ using ITensors: swaptags using ITensors.ITensorMPS: ITensorMPS, add, linkdim, linkinds, siteinds using ITensors.ITensorVisualizationCore: ITensorVisualizationCore, visualize -using ITensors.NDTensors: NDTensors, dim -using LinearAlgebra: LinearAlgebra +using LinearAlgebra: LinearAlgebra, factorize using NamedGraphs: NamedGraphs, NamedGraph, ⊔, + directed_graph, incident_edges, not_implemented, rename_vertices, vertex_to_parent_vertex, vertextype -using NamedGraphs: directed_graph +using NDTensors: NDTensors, dim using SplitApplyCombine: flatten abstract type AbstractITensorNetwork{V} <: AbstractDataGraph{V,ITensor,ITensor} end @@ -174,41 +173,26 @@ function Base.Vector{ITensor}(tn::AbstractITensorNetwork) end # Convenience wrapper -# TODO: Delete this and just use `Vector{ITensor}`, or maybe -# it should output a dictionary or be called `eachtensor`? -itensors(tn::AbstractITensorNetwork) = Vector{ITensor}(tn) +function tensors(tn::AbstractITensorNetwork, vertices=vertices(tn)) + return map(v -> tn[v], Indices(vertices)) +end # # Promotion and conversion # -function LinearAlgebra.promote_leaf_eltypes(tn::AbstractITensorNetwork) - return LinearAlgebra.promote_leaf_eltypes(itensors(tn)) -end - function promote_indtypeof(tn::AbstractITensorNetwork) - return mapreduce(promote_indtype, vertices(tn)) do v - return indtype(tn[v]) + return mapreduce(promote_indtype, tensors(tn)) do t + return indtype(t) end end -# TODO: Delete in favor of `scalartype`. -function ITensors.promote_itensor_eltype(tn::AbstractITensorNetwork) - return LinearAlgebra.promote_leaf_eltypes(tn) +function NDTensors.scalartype(tn::AbstractITensorNetwork) + return mapreduce(eltype, promote_type, tensors(tn); init=Bool) end -NDTensors.scalartype(tn::AbstractITensorNetwork) = LinearAlgebra.promote_leaf_eltypes(tn) - -# TODO: eltype(::AbstractITensorNetwork) (cannot behave the same as eltype(::ITensors.AbstractMPS)) - -# TODO: mimic ITensors.AbstractMPS implementation using map -function ITensors.convert_leaf_eltype(eltype::Type, tn::AbstractITensorNetwork) - tn = copy(tn) - vertex_data(tn) .= convert_eltype.(Ref(eltype), vertex_data(tn)) - return tn -end +# TODO: Define `eltype(::AbstractITensorNetwork)` as `ITensor`? -# TODO: Mimic ITensors.AbstractMPS implementation using map # TODO: Implement using `adapt` function NDTensors.convert_scalartype(eltype::Type{<:Number}, tn::AbstractITensorNetwork) tn = copy(tn) @@ -217,7 +201,7 @@ function NDTensors.convert_scalartype(eltype::Type{<:Number}, tn::AbstractITenso end function Base.complex(tn::AbstractITensorNetwork) - return NDTensors.convert_scalartype(complex(LinearAlgebra.promote_leaf_eltypes(tn)), tn) + return NDTensors.convert_scalartype(complex(scalartype(tn)), tn) end # @@ -251,7 +235,9 @@ end # Alias indsnetwork(tn::AbstractITensorNetwork) = IndsNetwork(tn) -function external_indsnetwork(tn::AbstractITensorNetwork) +# TODO: Output a `VertexDataGraph`? Unfortunately +# `IndsNetwork` doesn't allow iterating over vertex data. +function ITensorMPS.siteinds(tn::AbstractITensorNetwork) is = IndsNetwork(underlying_graph(tn)) for v in vertices(tn) is[v] = uniqueinds(tn, v) @@ -259,25 +245,12 @@ function external_indsnetwork(tn::AbstractITensorNetwork) return is end -# For backwards compatibility -# TODO: Delete this -ITensorMPS.siteinds(tn::AbstractITensorNetwork) = external_indsnetwork(tn) - -# External indsnetwork of the flattened network, with vertices -# mapped back to `tn1`. -function flatten_external_indsnetwork( - tn1::AbstractITensorNetwork, tn2::AbstractITensorNetwork -) - is = external_indsnetwork(sim(tn1; sites=[]) ⊗ tn2) - flattened_is = IndsNetwork(underlying_graph(tn1)) - for v in vertices(flattened_is) - # setindex_preserve_graph!(flattened_is, unioninds(is[v, 1], is[v, 2]), v) - flattened_is[v] = unioninds(is[v, 1], is[v, 2]) - end - return flattened_is +function flatten_siteinds(tn::AbstractITensorNetwork) + # reduce(noncommoninds, tensors(tn)) + return unique(flatten([uniqueinds(tn, v) for v in vertices(tn)])) end -function internal_indsnetwork(tn::AbstractITensorNetwork) +function ITensorMPS.linkinds(tn::AbstractITensorNetwork) is = IndsNetwork(underlying_graph(tn)) for e in edges(tn) is[e] = commoninds(tn, e) @@ -285,20 +258,22 @@ function internal_indsnetwork(tn::AbstractITensorNetwork) return is end -# For backwards compatibility -# TODO: Delete this -ITensorMPS.linkinds(tn::AbstractITensorNetwork) = internal_indsnetwork(tn) +function flatten_linkinds(tn::AbstractITensorNetwork) + return unique(flatten([commoninds(tn, e) for e in edges(tn)])) +end # # Index access # -function neighbor_itensors(tn::AbstractITensorNetwork, vertex) - return [tn[vn] for vn in neighbors(tn, vertex)] +function neighbor_tensors(tn::AbstractITensorNetwork, vertex) + return tensors(tn, neighbors(tn, vertex)) end function ITensors.uniqueinds(tn::AbstractITensorNetwork, vertex) - return uniqueinds(tn[vertex], neighbor_itensors(tn, vertex)...) + # TODO: Splatting here isn't good, make a version that works for + # collections of ITensors. + return reduce(uniqueinds, Iterators.flatten(([tn[vertex]], neighbor_tensors(tn, vertex)))) end function ITensors.uniqueinds(tn::AbstractITensorNetwork, edge::AbstractEdge) @@ -322,14 +297,6 @@ function ITensorMPS.linkinds(tn::AbstractITensorNetwork, edge) return commoninds(tn, edge) end -function internalinds(tn::AbstractITensorNetwork) - return unique(flatten([commoninds(tn, e) for e in edges(tn)])) -end - -function externalinds(tn::AbstractITensorNetwork) - return unique(flatten([uniqueinds(tn, v) for v in vertices(tn)])) -end - # Priming and tagging (changing Index identifiers) function ITensors.replaceinds( tn::AbstractITensorNetwork, is_is′::Pair{<:IndsNetwork,<:IndsNetwork} @@ -439,9 +406,7 @@ function Base.isapprox( x::AbstractITensorNetwork, y::AbstractITensorNetwork; atol::Real=0, - rtol::Real=Base.rtoldefault( - LinearAlgebra.promote_leaf_eltypes(x), LinearAlgebra.promote_leaf_eltypes(y), atol - ), + rtol::Real=Base.rtoldefault(scalartype(x), scalartype(y), atol), ) error("Not implemented") d = norm(x - y) diff --git a/src/apply.jl b/src/apply.jl index 559438e2..948ccb7c 100644 --- a/src/apply.jl +++ b/src/apply.jl @@ -1,3 +1,4 @@ +using Graphs: has_edge using LinearAlgebra: qr using ITensors: Ops using ITensors: diff --git a/src/approx_itensornetwork/partition.jl b/src/approx_itensornetwork/partition.jl index 9f89d063..7dd994f5 100644 --- a/src/approx_itensornetwork/partition.jl +++ b/src/approx_itensornetwork/partition.jl @@ -1,3 +1,9 @@ +using DataGraphs: AbstractDataGraph, DataGraph, edge_data, vertex_data +using Dictionaries: Dictionary +using Graphs: AbstractGraph, add_edge!, has_edge, dst, edges, edgetype, src, vertices +using ITensors: ITensor, noncommoninds +using NamedGraphs: NamedGraph, subgraph + function _partition(g::AbstractGraph, subgraph_vertices) partitioned_graph = DataGraph( NamedGraph(eachindex(subgraph_vertices)), diff --git a/src/formnetworks/bilinearformnetwork.jl b/src/formnetworks/bilinearformnetwork.jl index bcb59704..14d21114 100644 --- a/src/formnetworks/bilinearformnetwork.jl +++ b/src/formnetworks/bilinearformnetwork.jl @@ -57,7 +57,7 @@ function BilinearFormNetwork( dual_site_index_map=default_dual_site_index_map, kwargs..., ) - @assert issetequal(externalinds(bra), externalinds(ket)) + @assert issetequal(flatten_siteinds(bra), flatten_siteinds(ket)) operator_inds = union_all_inds(siteinds(ket), dual_site_index_map(siteinds(ket))) O = ITensorNetwork(Op("I"), operator_inds) return BilinearFormNetwork(O, bra, ket; dual_site_index_map, kwargs...) diff --git a/src/solvers/contract.jl b/src/solvers/contract.jl index cfc90fd6..1e6ddeec 100644 --- a/src/solvers/contract.jl +++ b/src/solvers/contract.jl @@ -72,11 +72,6 @@ end Overload of `ITensors.apply`. """ function ITensors.apply(tn1::AbstractTTN, tn2::AbstractTTN; init, kwargs...) - if !isone(plev_diff(flatten_external_indsnetwork(tn1, tn2), external_indsnetwork(init))) - error( - "Initial guess `init` needs to primelevel one less than the contraction tn1 and tn2." - ) - end init = init' tn12 = contract(tn1, tn2; init, kwargs...) return replaceprime(tn12, 1 => 0) @@ -85,24 +80,7 @@ end function sum_apply( tns::Vector{<:Tuple{<:AbstractTTN,<:AbstractTTN}}; alg="fit", init, kwargs... ) - if !isone( - plev_diff( - flatten_external_indsnetwork(first(first(tns)), last(first(tns))), - external_indsnetwork(init), - ), - ) - error( - "Initial guess `init` needs to primelevel one less than the contraction tn1 and tn2." - ) - end - init = init' tn12 = sum_contract(Algorithm(alg), tns; init, kwargs...) return replaceprime(tn12, 1 => 0) end - -function plev_diff(a::IndsNetwork, b::IndsNetwork) - pla = plev(only(a[first(vertices(a))])) - plb = plev(only(b[first(vertices(b))])) - return pla - plb -end diff --git a/src/tensornetworkoperators.jl b/src/tensornetworkoperators.jl deleted file mode 100644 index 080c723b..00000000 --- a/src/tensornetworkoperators.jl +++ /dev/null @@ -1,48 +0,0 @@ -using Graphs: has_edge -using ITensors: ITensors, commoninds, product -using LinearAlgebra: factorize - -""" -Take a vector of gates which act on different edges/ vertices of an Inds network and construct the tno which represents prod(gates). -""" -function gate_group_to_tno(s::IndsNetwork, gates::Vector{ITensor}) - - #Construct indsnetwork for TNO - s_O = union_all_inds(s, prime(s; links=[])) - - # Make a TNO with `I` on every site. - O = ITensorNetwork(Op("I"), s_O) - - for gate in gates - v⃗ = vertices(s)[findall(i -> (length(commoninds(s[i], inds(gate))) != 0), vertices(s))] - if length(v⃗) == 1 - O[v⃗[1]] = product(O[v⃗[1]], gate) - elseif length(v⃗) == 2 - e = v⃗[1] => v⃗[2] - if !has_edge(s, e) - error("Vertices where the gates are being applied must be neighbors for now.") - end - Osrc, Odst = factorize(gate, commoninds(O[v⃗[1]], gate)) - O[v⃗[1]] = product(O[v⃗[1]], Osrc) - O[v⃗[2]] = product(O[v⃗[2]], Odst) - else - error( - "Can only deal with gates acting on one or two sites for now. Physical indices of the gates must also match those in the IndsNetwork.", - ) - end - end - - return combine_linkinds(O) -end - -"""Take a series of gates acting on the physical indices specified by IndsNetwork convert into a series of tnos -whose product represents prod(gates). Useful for keeping the bond dimension of each tno low (as opposed to just building a single tno)""" -function get_tnos(s::IndsNetwork, gates::Vector{ITensor}) - tnos = ITensorNetwork[] - gate_groups = group_commuting_itensors(gates) - for group in gate_groups - push!(tnos, gate_group_to_tno(s, group)) - end - - return tnos -end diff --git a/src/treetensornetworks/abstracttreetensornetwork.jl b/src/treetensornetworks/abstracttreetensornetwork.jl index 35cbd128..33146a70 100644 --- a/src/treetensornetworks/abstracttreetensornetwork.jl +++ b/src/treetensornetworks/abstracttreetensornetwork.jl @@ -285,9 +285,7 @@ function Base.isapprox( x::AbstractTTN, y::AbstractTTN; atol::Real=0, - rtol::Real=Base.rtoldefault( - LinearAlgebra.promote_leaf_eltypes(x), LinearAlgebra.promote_leaf_eltypes(y), atol - ), + rtol::Real=Base.rtoldefault(scalartype(x), scalartype(y), atol), ) d = norm(x - y) if isfinite(d) diff --git a/src/treetensornetworks/projttns/abstractprojttn.jl b/src/treetensornetworks/projttns/abstractprojttn.jl index 63ff4bf7..d86cc48a 100644 --- a/src/treetensornetworks/projttns/abstractprojttn.jl +++ b/src/treetensornetworks/projttns/abstractprojttn.jl @@ -1,6 +1,6 @@ using DataGraphs: DataGraphs, underlying_graph using Graphs: neighbors -using ITensors: ITensor, contract, order +using ITensors: ITensor, contract, order, product using ITensors.ITensorMPS: ITensorMPS, nsite using NamedGraphs: NamedGraphs, NamedEdge, incident_edges, vertextype diff --git a/src/treetensornetworks/projttns/projttnsum.jl b/src/treetensornetworks/projttns/projttnsum.jl index 4abb8965..73b87af8 100644 --- a/src/treetensornetworks/projttns/projttnsum.jl +++ b/src/treetensornetworks/projttns/projttnsum.jl @@ -1,4 +1,4 @@ -using ITensors: ITensors, contract +using ITensors: ITensors, contract, product using ITensors.LazyApply: LazyApply, terms using NamedGraphs: NamedGraphs, incident_edges diff --git a/src/treetensornetworks/ttn.jl b/src/treetensornetworks/ttn.jl index 31bc9770..a30c0776 100644 --- a/src/treetensornetworks/ttn.jl +++ b/src/treetensornetworks/ttn.jl @@ -1,6 +1,6 @@ using Graphs: path_graph using ITensors: ITensor -using LinearAlgebra: normalize +using LinearAlgebra: factorize, normalize using NamedGraphs: vertextype """ diff --git a/test/Project.toml b/test/Project.toml index 8ac3670c..9e02c3ef 100644 --- a/test/Project.toml +++ b/test/Project.toml @@ -16,6 +16,7 @@ KaHyPar = "2a6221f6-aa48-11e9-3542-2d9e0ef01880" KrylovKit = "0b1a1467-8014-51b9-945f-bf0ae24f4b77" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" Metis = "2679e427-3c69-5b7f-982b-ece356f1e94b" +NDTensors = "23ae76d9-e61a-49c4-8f12-3f1a16adf9cf" NamedGraphs = "678767b0-92e7-4007-89e4-4527a8725b19" OMEinsumContractionOrders = "6f22d1fd-8eed-4bb7-9776-e7d684900715" Observers = "338f10d5-c7f1-4033-a7d1-f9dec39bcaa0" diff --git a/test/test_forms.jl b/test/test_forms.jl index a9a0e453..e6cda5cd 100644 --- a/test/test_forms.jl +++ b/test/test_forms.jl @@ -10,7 +10,7 @@ using ITensorNetworks: bra_vertex, dual_index_map, environment, - externalinds, + flatten_siteinds, ket_network, ket_vertex, operator_network, @@ -36,7 +36,7 @@ using Random: Random blf = BilinearFormNetwork(A, ψbra, ψket) @test nv(blf) == nv(ψket) + nv(ψbra) + nv(A) - @test isempty(externalinds(blf)) + @test isempty(flatten_siteinds(blf)) @test underlying_graph(ket_network(blf)) == underlying_graph(ψket) @test underlying_graph(operator_network(blf)) == underlying_graph(A) @@ -44,7 +44,7 @@ using Random: Random qf = QuadraticFormNetwork(A, ψket) @test nv(qf) == 2 * nv(ψbra) + nv(A) - @test isempty(externalinds(qf)) + @test isempty(flatten_siteinds(qf)) v = (1, 1) new_tensor = randomITensor(inds(ψket[v])) diff --git a/test/test_itensornetwork.jl b/test/test_itensornetwork.jl index 18845dd5..cc5c6217 100644 --- a/test/test_itensornetwork.jl +++ b/test/test_itensornetwork.jl @@ -34,18 +34,17 @@ using ITensors: scalartype, sim, uniqueinds -using ITensors.NDTensors: dim using ITensorNetworks: ITensorNetworks, ⊗, IndsNetwork, ITensorNetwork, contraction_sequence, - externalinds, + flatten_linkinds, + flatten_siteinds, inner_network, - internalinds, linkinds, - neighbor_itensors, + neighbor_tensors, norm_sqr, norm_sqr_network, orthogonalize, @@ -54,6 +53,7 @@ using ITensorNetworks: ttn using LinearAlgebra: factorize using NamedGraphs: NamedEdge, incident_edges, named_comb_tree, named_grid +using NDTensors: NDTensors, dim using Random: Random, randn! using Test: @test, @test_broken, @testset @@ -323,7 +323,7 @@ const elts = (Float32, Float64, Complex{Float32}, Complex{Float64}) s = siteinds("S=1/2", g) ψ = ITensorNetwork(s; link_space=2) - nt = neighbor_itensors(ψ, (1, 1)) + nt = neighbor_tensors(ψ, (1, 1)) @test length(nt) == 2 @test all(map(hascommoninds(ψ[1, 1]), nt)) @@ -342,8 +342,8 @@ const elts = (Float32, Float64, Complex{Float32}, Complex{Float64}) @test linkinds(ψ, e) == commoninds(ψ[1, 1], ψ[2, 1]) - @test length(externalinds(ψ)) == length(vertices(g)) - @test length(internalinds(ψ)) == length(edges(g)) + @test length(flatten_siteinds(ψ)) == length(vertices(g)) + @test length(flatten_linkinds(ψ)) == length(edges(g)) end @testset "eltype conversion, $new_eltype" for new_eltype in (Float32, ComplexF64) @@ -351,10 +351,10 @@ const elts = (Float32, Float64, Complex{Float32}, Complex{Float64}) g = named_grid(dims) s = siteinds("S=1/2", g) ψ = random_tensornetwork(s; link_space=2) - @test ITensors.scalartype(ψ) == Float64 + @test scalartype(ψ) == Float64 - ϕ = ITensors.convert_leaf_eltype(new_eltype, ψ) - @test ITensors.scalartype(ϕ) == new_eltype + ϕ = NDTensors.convert_scalartype(new_eltype, ψ) + @test scalartype(ϕ) == new_eltype end @testset "Construction from state map" for elt in (Float32, ComplexF64) diff --git a/test/test_tno.jl b/test/test_tno.jl deleted file mode 100644 index 30811130..00000000 --- a/test/test_tno.jl +++ /dev/null @@ -1,65 +0,0 @@ -@eval module $(gensym()) -using Graphs: vertices -using ITensorNetworks: - apply, - flatten_networks, - group_commuting_itensors, - gate_group_to_tno, - get_tnos, - random_tensornetwork, - siteinds -using ITensors: ITensor, inner, noprime -using ITensorNetworks.ModelHamiltonians: ModelHamiltonians -using NamedGraphs: named_grid -using Test: @test, @testset - -@testset "TN operator Basics" begin - L = 3 - g = named_grid((L, L)) - s = siteinds("S=1/2", g) - - ℋ = ModelHamiltonians.ising(g; h=1.5) - gates = Vector{ITensor}(ℋ, s) - gate_groups = group_commuting_itensors(gates) - - @test typeof(gate_groups) == Vector{Vector{ITensor}} - - #Construct a number of tnos whose product is prod(gates) - tnos = get_tnos(s, gates) - @test length(tnos) == length(gate_groups) - - #Construct a single tno which represents prod(gates) - single_tno = gate_group_to_tno(s, gates) - - ψ = random_tensornetwork(s; link_space=2) - - ψ_gated = copy(ψ) - - for gate in gates - ψ_gated = apply(gate, ψ_gated) - end - ψ_tnod = copy(ψ) - - for tno in tnos - ψ_tnod = flatten_networks(ψ_tnod, tno) - for v in vertices(ψ_tnod) - ψ_tnod[v] = noprime(ψ_tnod[v]) - end - end - ψ_tno = copy(ψ) - ψ_tno = flatten_networks(ψ_tno, single_tno) - for v in vertices(ψ_tno) - ψ_tno[v] = noprime(ψ_tno[v]) - end - - z1 = inner(ψ_gated, ψ_gated) - z2 = inner(ψ_tnod, ψ_tnod) - z3 = inner(ψ_tno, ψ_tno) - f12 = inner(ψ_tnod, ψ_gated) / sqrt(z1 * z2) - f13 = inner(ψ_tno, ψ_gated) / sqrt(z1 * z3) - f23 = inner(ψ_tno, ψ_tnod) / sqrt(z2 * z3) - @test f12 * conj(f12) ≈ 1.0 - @test f13 * conj(f13) ≈ 1.0 - @test f23 * conj(f23) ≈ 1.0 -end -end