Skip to content

Commit

Permalink
Merge.
Browse files Browse the repository at this point in the history
  • Loading branch information
Benedikt Kloss committed Apr 16, 2024
2 parents 2b73463 + ce7b3e4 commit 7ce2b08
Show file tree
Hide file tree
Showing 17 changed files with 63 additions and 226 deletions.
4 changes: 3 additions & 1 deletion Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "ITensorNetworks"
uuid = "2919e153-833c-4bdc-8836-1ea460a35fc7"
authors = ["Matthew Fishman <[email protected]> and contributors"]
version = "0.7.0"
version = "0.8.0"

[deps]
AbstractTrees = "1520ce14-60c1-5f80-bbc7-55ef81b5835c"
Expand All @@ -19,6 +19,7 @@ IsApprox = "28f27b66-4bd8-47e7-9110-e2746eb8bed7"
IterTools = "c8e1da08-722c-5040-9ed9-7db0dc04731e"
KrylovKit = "0b1a1467-8014-51b9-945f-bf0ae24f4b77"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
NDTensors = "23ae76d9-e61a-49c4-8f12-3f1a16adf9cf"
NamedGraphs = "678767b0-92e7-4007-89e4-4527a8725b19"
Observers = "338f10d5-c7f1-4033-a7d1-f9dec39bcaa0"
PackageExtensionCompat = "65ce6f38-6b18-4e1d-a461-8949797d7930"
Expand Down Expand Up @@ -58,6 +59,7 @@ IsApprox = "0.1"
IterTools = "1.4.0"
KrylovKit = "0.6, 0.7"
NamedGraphs = "0.1.23"
NDTensors = "0.2, 0.3"
Observers = "0.2"
PackageExtensionCompat = "1"
Requires = "1.3"
Expand Down
12 changes: 6 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ julia> using ITensorNetworks: ITensorNetwork, siteinds
julia> using NamedGraphs: named_grid, subgraph

julia> tn = ITensorNetwork(named_grid(4); link_space=2)
ITensorNetwork{Int64} with 4 vertices:
ITensorNetworks.ITensorNetwork{Int64} with 4 vertices:
4-element Vector{Int64}:
1
2
Expand Down Expand Up @@ -90,7 +90,7 @@ and here is a similar example for making a tensor network on a grid (a tensor pr

```julia
julia> tn = ITensorNetwork(named_grid((2, 2)); link_space=2)
ITensorNetwork{Tuple{Int64, Int64}} with 4 vertices:
ITensorNetworks.ITensorNetwork{Tuple{Int64, Int64}} with 4 vertices:
4-element Vector{Tuple{Int64, Int64}}:
(1, 1)
(2, 1)
Expand Down Expand Up @@ -125,7 +125,7 @@ julia> neighbors(tn, (1, 2))
(2, 2)

julia> tn_1 = subgraph(v -> v[1] == 1, tn)
ITensorNetwork{Tuple{Int64, Int64}} with 2 vertices:
ITensorNetworks.ITensorNetwork{Tuple{Int64, Int64}} with 2 vertices:
2-element Vector{Tuple{Int64, Int64}}:
(1, 1)
(1, 2)
Expand All @@ -139,7 +139,7 @@ with vertex data:
(1, 2) │ ((dim=2|id=723|"1×1,1×2"), (dim=2|id=712|"1×2,2×2"))

julia> tn_2 = subgraph(v -> v[1] == 2, tn)
ITensorNetwork{Tuple{Int64, Int64}} with 2 vertices:
ITensorNetworks.ITensorNetwork{Tuple{Int64, Int64}} with 2 vertices:
2-element Vector{Tuple{Int64, Int64}}:
(2, 1)
(2, 2)
Expand Down Expand Up @@ -184,7 +184,7 @@ and edge data:
0-element Dictionaries.Dictionary{NamedGraphs.NamedEdge{Int64}, Vector{ITensors.Index}}

julia> tn1 = ITensorNetwork(s; link_space=2)
ITensorNetwork{Int64} with 3 vertices:
ITensorNetworks.ITensorNetwork{Int64} with 3 vertices:
3-element Vector{Int64}:
1
2
Expand All @@ -201,7 +201,7 @@ with vertex data:
3 │ ((dim=2|id=656|"S=1/2,Site,n=3"), (dim=2|id=190|"2,3"))
julia> tn2 = ITensorNetwork(s; link_space=2)
ITensorNetwork{Int64} with 3 vertices:
ITensorNetworks.ITensorNetwork{Int64} with 3 vertices:
3-element Vector{Int64}:
1
2
Expand Down
1 change: 0 additions & 1 deletion src/ITensorNetworks.jl
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@ include("caches/beliefpropagationcache.jl")
include("contraction_tree_to_graph.jl")
include("gauging.jl")
include("utils.jl")
include("tensornetworkoperators.jl")
include("ITensorsExt/itensorutils.jl")
include("solvers/local_solvers/eigsolve.jl")
include("solvers/local_solvers/exponentiate.jl")
Expand Down
91 changes: 28 additions & 63 deletions src/abstractitensornetwork.jl
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ using ITensors:
commoninds,
commontags,
contract,
convert_eltype,
dag,
hascommoninds,
noprime,
Expand All @@ -39,18 +38,18 @@ using ITensors:
swaptags
using ITensors.ITensorMPS: ITensorMPS, add, linkdim, linkinds, siteinds
using ITensors.ITensorVisualizationCore: ITensorVisualizationCore, visualize
using ITensors.NDTensors: NDTensors, dim
using LinearAlgebra: LinearAlgebra
using LinearAlgebra: LinearAlgebra, factorize
using NamedGraphs:
NamedGraphs,
NamedGraph,
,
directed_graph,
incident_edges,
not_implemented,
rename_vertices,
vertex_to_parent_vertex,
vertextype
using NamedGraphs: directed_graph
using NDTensors: NDTensors, dim
using SplitApplyCombine: flatten

abstract type AbstractITensorNetwork{V} <: AbstractDataGraph{V,ITensor,ITensor} end
Expand Down Expand Up @@ -174,41 +173,26 @@ function Base.Vector{ITensor}(tn::AbstractITensorNetwork)
end

# Convenience wrapper
# TODO: Delete this and just use `Vector{ITensor}`, or maybe
# it should output a dictionary or be called `eachtensor`?
itensors(tn::AbstractITensorNetwork) = Vector{ITensor}(tn)
function tensors(tn::AbstractITensorNetwork, vertices=vertices(tn))
return map(v -> tn[v], Indices(vertices))
end

#
# Promotion and conversion
#

function LinearAlgebra.promote_leaf_eltypes(tn::AbstractITensorNetwork)
return LinearAlgebra.promote_leaf_eltypes(itensors(tn))
end

function promote_indtypeof(tn::AbstractITensorNetwork)
return mapreduce(promote_indtype, vertices(tn)) do v
return indtype(tn[v])
return mapreduce(promote_indtype, tensors(tn)) do t
return indtype(t)
end
end

# TODO: Delete in favor of `scalartype`.
function ITensors.promote_itensor_eltype(tn::AbstractITensorNetwork)
return LinearAlgebra.promote_leaf_eltypes(tn)
function NDTensors.scalartype(tn::AbstractITensorNetwork)
return mapreduce(eltype, promote_type, tensors(tn); init=Bool)
end

NDTensors.scalartype(tn::AbstractITensorNetwork) = LinearAlgebra.promote_leaf_eltypes(tn)

# TODO: eltype(::AbstractITensorNetwork) (cannot behave the same as eltype(::ITensors.AbstractMPS))

# TODO: mimic ITensors.AbstractMPS implementation using map
function ITensors.convert_leaf_eltype(eltype::Type, tn::AbstractITensorNetwork)
tn = copy(tn)
vertex_data(tn) .= convert_eltype.(Ref(eltype), vertex_data(tn))
return tn
end
# TODO: Define `eltype(::AbstractITensorNetwork)` as `ITensor`?

# TODO: Mimic ITensors.AbstractMPS implementation using map
# TODO: Implement using `adapt`
function NDTensors.convert_scalartype(eltype::Type{<:Number}, tn::AbstractITensorNetwork)
tn = copy(tn)
Expand All @@ -217,7 +201,7 @@ function NDTensors.convert_scalartype(eltype::Type{<:Number}, tn::AbstractITenso
end

function Base.complex(tn::AbstractITensorNetwork)
return NDTensors.convert_scalartype(complex(LinearAlgebra.promote_leaf_eltypes(tn)), tn)
return NDTensors.convert_scalartype(complex(scalartype(tn)), tn)
end

#
Expand Down Expand Up @@ -251,54 +235,45 @@ end
# Alias
indsnetwork(tn::AbstractITensorNetwork) = IndsNetwork(tn)

function external_indsnetwork(tn::AbstractITensorNetwork)
# TODO: Output a `VertexDataGraph`? Unfortunately
# `IndsNetwork` doesn't allow iterating over vertex data.
function ITensorMPS.siteinds(tn::AbstractITensorNetwork)
is = IndsNetwork(underlying_graph(tn))
for v in vertices(tn)
is[v] = uniqueinds(tn, v)
end
return is
end

# For backwards compatibility
# TODO: Delete this
ITensorMPS.siteinds(tn::AbstractITensorNetwork) = external_indsnetwork(tn)

# External indsnetwork of the flattened network, with vertices
# mapped back to `tn1`.
function flatten_external_indsnetwork(
tn1::AbstractITensorNetwork, tn2::AbstractITensorNetwork
)
is = external_indsnetwork(sim(tn1; sites=[]) tn2)
flattened_is = IndsNetwork(underlying_graph(tn1))
for v in vertices(flattened_is)
# setindex_preserve_graph!(flattened_is, unioninds(is[v, 1], is[v, 2]), v)
flattened_is[v] = unioninds(is[v, 1], is[v, 2])
end
return flattened_is
function flatten_siteinds(tn::AbstractITensorNetwork)
# reduce(noncommoninds, tensors(tn))
return unique(flatten([uniqueinds(tn, v) for v in vertices(tn)]))
end

function internal_indsnetwork(tn::AbstractITensorNetwork)
function ITensorMPS.linkinds(tn::AbstractITensorNetwork)
is = IndsNetwork(underlying_graph(tn))
for e in edges(tn)
is[e] = commoninds(tn, e)
end
return is
end

# For backwards compatibility
# TODO: Delete this
ITensorMPS.linkinds(tn::AbstractITensorNetwork) = internal_indsnetwork(tn)
function flatten_linkinds(tn::AbstractITensorNetwork)
return unique(flatten([commoninds(tn, e) for e in edges(tn)]))
end

#
# Index access
#

function neighbor_itensors(tn::AbstractITensorNetwork, vertex)
return [tn[vn] for vn in neighbors(tn, vertex)]
function neighbor_tensors(tn::AbstractITensorNetwork, vertex)
return tensors(tn, neighbors(tn, vertex))
end

function ITensors.uniqueinds(tn::AbstractITensorNetwork, vertex)
return uniqueinds(tn[vertex], neighbor_itensors(tn, vertex)...)
# TODO: Splatting here isn't good, make a version that works for
# collections of ITensors.
return reduce(uniqueinds, Iterators.flatten(([tn[vertex]], neighbor_tensors(tn, vertex))))
end

function ITensors.uniqueinds(tn::AbstractITensorNetwork, edge::AbstractEdge)
Expand All @@ -322,14 +297,6 @@ function ITensorMPS.linkinds(tn::AbstractITensorNetwork, edge)
return commoninds(tn, edge)
end

function internalinds(tn::AbstractITensorNetwork)
return unique(flatten([commoninds(tn, e) for e in edges(tn)]))
end

function externalinds(tn::AbstractITensorNetwork)
return unique(flatten([uniqueinds(tn, v) for v in vertices(tn)]))
end

# Priming and tagging (changing Index identifiers)
function ITensors.replaceinds(
tn::AbstractITensorNetwork, is_is′::Pair{<:IndsNetwork,<:IndsNetwork}
Expand Down Expand Up @@ -439,9 +406,7 @@ function Base.isapprox(
x::AbstractITensorNetwork,
y::AbstractITensorNetwork;
atol::Real=0,
rtol::Real=Base.rtoldefault(
LinearAlgebra.promote_leaf_eltypes(x), LinearAlgebra.promote_leaf_eltypes(y), atol
),
rtol::Real=Base.rtoldefault(scalartype(x), scalartype(y), atol),
)
error("Not implemented")
d = norm(x - y)
Expand Down
1 change: 1 addition & 0 deletions src/apply.jl
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
using Graphs: has_edge
using LinearAlgebra: qr
using ITensors: Ops
using ITensors:
Expand Down
6 changes: 6 additions & 0 deletions src/approx_itensornetwork/partition.jl
Original file line number Diff line number Diff line change
@@ -1,3 +1,9 @@
using DataGraphs: AbstractDataGraph, DataGraph, edge_data, vertex_data
using Dictionaries: Dictionary
using Graphs: AbstractGraph, add_edge!, has_edge, dst, edges, edgetype, src, vertices
using ITensors: ITensor, noncommoninds
using NamedGraphs: NamedGraph, subgraph

function _partition(g::AbstractGraph, subgraph_vertices)
partitioned_graph = DataGraph(
NamedGraph(eachindex(subgraph_vertices)),
Expand Down
2 changes: 1 addition & 1 deletion src/formnetworks/bilinearformnetwork.jl
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ function BilinearFormNetwork(
dual_site_index_map=default_dual_site_index_map,
kwargs...,
)
@assert issetequal(externalinds(bra), externalinds(ket))
@assert issetequal(flatten_siteinds(bra), flatten_siteinds(ket))
operator_inds = union_all_inds(siteinds(ket), dual_site_index_map(siteinds(ket)))
O = ITensorNetwork(Op("I"), operator_inds)
return BilinearFormNetwork(O, bra, ket; dual_site_index_map, kwargs...)
Expand Down
22 changes: 0 additions & 22 deletions src/solvers/contract.jl
Original file line number Diff line number Diff line change
Expand Up @@ -72,11 +72,6 @@ end
Overload of `ITensors.apply`.
"""
function ITensors.apply(tn1::AbstractTTN, tn2::AbstractTTN; init, kwargs...)
if !isone(plev_diff(flatten_external_indsnetwork(tn1, tn2), external_indsnetwork(init)))
error(
"Initial guess `init` needs to primelevel one less than the contraction tn1 and tn2."
)
end
init = init'
tn12 = contract(tn1, tn2; init, kwargs...)
return replaceprime(tn12, 1 => 0)
Expand All @@ -85,24 +80,7 @@ end
function sum_apply(
tns::Vector{<:Tuple{<:AbstractTTN,<:AbstractTTN}}; alg="fit", init, kwargs...
)
if !isone(
plev_diff(
flatten_external_indsnetwork(first(first(tns)), last(first(tns))),
external_indsnetwork(init),
),
)
error(
"Initial guess `init` needs to primelevel one less than the contraction tn1 and tn2."
)
end

init = init'
tn12 = sum_contract(Algorithm(alg), tns; init, kwargs...)
return replaceprime(tn12, 1 => 0)
end

function plev_diff(a::IndsNetwork, b::IndsNetwork)
pla = plev(only(a[first(vertices(a))]))
plb = plev(only(b[first(vertices(b))]))
return pla - plb
end
48 changes: 0 additions & 48 deletions src/tensornetworkoperators.jl

This file was deleted.

4 changes: 1 addition & 3 deletions src/treetensornetworks/abstracttreetensornetwork.jl
Original file line number Diff line number Diff line change
Expand Up @@ -285,9 +285,7 @@ function Base.isapprox(
x::AbstractTTN,
y::AbstractTTN;
atol::Real=0,
rtol::Real=Base.rtoldefault(
LinearAlgebra.promote_leaf_eltypes(x), LinearAlgebra.promote_leaf_eltypes(y), atol
),
rtol::Real=Base.rtoldefault(scalartype(x), scalartype(y), atol),
)
d = norm(x - y)
if isfinite(d)
Expand Down
Loading

0 comments on commit 7ce2b08

Please sign in to comment.