Skip to content

Commit

Permalink
add qtn.enforce_1d_like, use in 1d compression routines
Browse files Browse the repository at this point in the history
  • Loading branch information
jcmgray committed Apr 25, 2024
1 parent 4bdf174 commit 75a8569
Show file tree
Hide file tree
Showing 4 changed files with 141 additions and 17 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -91,4 +91,4 @@ jobs:
run: pytest tests/test_tensor --cov=quimb --cov-report=xml

- name: Report to codecov
uses: codecov/codecov-action@v3
uses: codecov/codecov-action@v4
1 change: 1 addition & 0 deletions docs/changelog.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ Release notes for `quimb`.
- add [`Tensor.new_ind_pair_with_identity`](quimb.tensor.tensor_core.Tensor.new_ind_pair_with_identity)
- TN2D, TN3D and arbitrary geom classical partition function builders now all support `outputs=` kwarg specifying non-marginalized variables
- add simple dense 1-norm belief propagation algorithm [`D1BP`](quimb.experimental.belief_propagation.d1bp.D1BP)
- add [`qtn.enforce_1d_like`](quimb.tensor.tensor_1d_compress.enforce_1d_like) for checking whether a tensor network is 1D-like, including automatically adding strings of identities between non-local bonds, expanding applicability of [`tensor_network_1d_compress`](quimb.tensor.tensor_1d_compress.tensor_network_1d_compress)

**Bug fixes:**

Expand Down
28 changes: 22 additions & 6 deletions quimb/tensor/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,15 @@
gate_TN_1D,
superop_TN_1D,
)
from .tensor_1d_compress import (
enforce_1d_like,
tensor_network_1d_compress,
tensor_network_1d_compress_direct,
tensor_network_1d_compress_dm,
tensor_network_1d_compress_fit,
tensor_network_1d_compress_zipup,
tensor_network_1d_compress_zipup_first,
)
from .tensor_1d_tebd import (
NNI,
TEBD,
Expand Down Expand Up @@ -85,8 +94,8 @@
)
from .tensor_arbgeom import (
tensor_network_align,
tensor_network_apply_op_vec,
tensor_network_apply_op_op,
tensor_network_apply_op_vec,
)
from .tensor_arbgeom_tebd import (
LocalHamGen,
Expand Down Expand Up @@ -129,15 +138,14 @@
NNI_ham_XY,
SpinHam,
SpinHam1D,
TN_matching,
TN2D_classical_ising_partition_function,
TN2D_corner_double_line,
TN2D_embedded_classical_ising_partition_function,
TN2D_empty,
TN2D_from_fill_fn,
TN2D_rand,
TN2D_rand_symmetric,
TN2D_rand_hidden_loop,
TN2D_rand_symmetric,
TN2D_with_value,
TN3D_classical_ising_partition_function,
TN3D_corner_double_line,
Expand All @@ -155,6 +163,7 @@
TN_from_sites_computational_state,
TN_from_sites_product_state,
TN_from_strings,
TN_matching,
TN_rand_from_edges,
TN_rand_reg,
TN_rand_tree,
Expand Down Expand Up @@ -244,6 +253,7 @@
"edges_3d_diamond",
"edges_3d_pyrochlore",
"edges_tree_rand",
"enforce_1d_like",
"expec_TN_1D",
"FullUpdate",
"gate_TN_1D",
Expand Down Expand Up @@ -337,9 +347,15 @@
"tensor_direct_product",
"tensor_fuse_squeeze",
"tensor_linop_backend",
"tensor_network_1d_compress_direct",
"tensor_network_1d_compress_dm",
"tensor_network_1d_compress_fit",
"tensor_network_1d_compress_zipup_first",
"tensor_network_1d_compress_zipup",
"tensor_network_1d_compress",
"tensor_network_align",
"tensor_network_apply_op_vec",
"tensor_network_apply_op_op",
"tensor_network_apply_op_vec",
"tensor_network_distance",
"tensor_network_fit_als",
"tensor_network_fit_autodiff",
Expand All @@ -360,18 +376,18 @@
"TN_from_sites_computational_state",
"TN_from_sites_product_state",
"TN_from_strings",
"TN_matching",
"TN_rand_from_edges",
"TN_rand_reg",
"TN_rand_tree",
"TN_matching",
"TN2D_classical_ising_partition_function",
"TN2D_corner_double_line",
"TN2D_embedded_classical_ising_partition_function",
"TN2D_empty",
"TN2D_from_fill_fn",
"TN2D_rand_hidden_loop",
"TN2D_rand",
"TN2D_rand_symmetric",
"TN2D_rand",
"TN2D_with_value",
"TN3D_classical_ising_partition_function",
"TN3D_corner_double_line",
Expand Down
127 changes: 117 additions & 10 deletions quimb/tensor/tensor_1d_compress.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,17 +16,112 @@
import itertools
import warnings

from autoray import do

from .tensor_arbgeom import tensor_network_apply_op_vec
from .tensor_arbgeom_compress import tensor_network_ag_compress
from .tensor_builder import TN_matching
from .tensor_core import (
Tensor,
TensorNetwork,
ensure_dict,
rand_uuid,
tensor_contract,
)


def enforce_1d_like(tn, site_tags=None, fix_bonds=True, inplace=False):
"""Check that ``tn`` is 1D-like with OBC, i.e. 1) that each tensor has
exactly one of the given ``site_tags``. If not, raise a ValueError. 2) That
there are no hyper indices. And 3) that there are only bonds within sites
or between nearest neighbor sites. This issue can be optionally
automatically fixed by inserting a string of identity tensors.
Parameters
----------
tn : TensorNetwork
The tensor network to check.
site_tags : sequence of str, optional
The tags to use to group and order the tensors from ``tn``. If not
given, uses ``tn.site_tags``.
fix_bonds : bool, optional
Whether to fix the bond structure by inserting identity tensors.
inplace : bool, optional
Whether to perform the fix inplace or not.
Raises
------
ValueError
If the tensor network is not 1D-like.
"""
tn = tn if inplace else tn.copy()

if site_tags is None:
site_tags = tn.site_tags

tag_to_site = {tag: i for i, tag in enumerate(site_tags)}
tid_to_site = {}

def _check_tensor_site(tid, t):
if tid in tid_to_site:
return tid_to_site[tid]

sites = []
for tag in t.tags:
site = tag_to_site.get(tag, None)
if site is not None:
sites.append(site)
if len(sites) != 1:
raise ValueError(
f"{t} does not have one site tag, it has {sites}."
)

return sites[0]

for ix, tids in list(tn.ind_map.items()):
if len(tids) == 1:
# assume outer
continue
elif len(tids) != 2:
raise ValueError(
f"TN has a hyper index, {ix}, connecting more than 2 tensors."
)

tida, tidb = tids
ta = tn.tensor_map[tida]
tb = tn.tensor_map[tidb]

# get which single site each tensor belongs too
sa = _check_tensor_site(tida, ta)
sb = _check_tensor_site(tidb, tb)
if sa > sb:
sa, sb = sb, sa

if sb - sa > 1:
if not fix_bonds:
raise ValueError(
f"Tensor {ta} and {tb} are not nearest "
"neighbors, and `fix_bonds=False`."
)

# not 1d like: bond is not nearest neighbor
# but can insert identites along string to fix
d = ta.ind_size(ix)
ixl = ix
for i in range(sa + 1, sb):
ixr = rand_uuid()
tn |= Tensor(
data=do("eye", d, like=ta.data, dtype=ta.dtype),
inds=[ixl, ixr],
tags=site_tags[i],
)
ixl = ixr

tb.reindex_({ix: ixl})

return tn


def possibly_permute_(tn, permute_arrays):
# possibly put the array indices in canonical order (e.g. when MPS or MPO)
if permute_arrays and hasattr(tn, "permute_arrays"):
Expand Down Expand Up @@ -111,13 +206,13 @@ def tensor_network_1d_compress_direct(
``site_tags[0]`` ('right canonical' form) or ``site_tags[-1]`` ('left
canonical' form) if ``sweep_reverse``.
"""
new = tn if inplace else tn.copy()

if site_tags is None:
site_tags = new.site_tags
site_tags = tn.site_tags
if sweep_reverse:
site_tags = tuple(reversed(site_tags))

new = enforce_1d_like(tn, site_tags=site_tags, inplace=inplace)

# contract the first site group
new.contract_tags_(site_tags[0], optimize=optimize)

Expand Down Expand Up @@ -256,25 +351,25 @@ def tensor_network_1d_compress_dm(
site_tags = tn.site_tags
if sweep_reverse:
site_tags = tuple(reversed(site_tags))

N = len(site_tags)

ket = enforce_1d_like(tn, site_tags=site_tags, inplace=inplace)

# partition outer indices, and create conjugate bra indices
ket_site_inds = []
bra_site_inds = []
ketbra_indmap = {}
for tag in site_tags:
k_inds_i = []
b_inds_i = []
for kix in tn.select(tag)._outer_inds & tn._outer_inds:
for kix in ket.select(tag)._outer_inds & ket._outer_inds:
bix = rand_uuid()
k_inds_i.append(kix)
b_inds_i.append(bix)
ketbra_indmap[kix] = bix
ket_site_inds.append(tuple(k_inds_i))
bra_site_inds.append(tuple(b_inds_i))

ket = tn.copy()
bra = ket.H
# doing this means forming the norm doesn't do its own mangling
bra.mangle_inner_()
Expand Down Expand Up @@ -470,6 +565,8 @@ def tensor_network_1d_compress_zipup(
site_tags = tuple(reversed(site_tags))
N = len(site_tags)

tn = enforce_1d_like(tn, site_tags=site_tags, inplace=inplace)

# calculate the local site (outer) indices
site_inds = [
tuple(tn.select(tag)._outer_inds & tn._outer_inds) for tag in site_tags
Expand All @@ -484,7 +581,7 @@ def tensor_network_1d_compress_zipup(
# │ │ │ │ │ │ │ │ │ │
# ▶─▶─▶─▶─▶─▶─▶─▶─▶─○ MPS
#
tn = tn.canonize_around(site_tags[-1], inplace=inplace)
tn = tn.canonize_around_(site_tags[-1])

# zip along the bonds
ts = []
Expand All @@ -500,9 +597,13 @@ def tensor_network_1d_compress_zipup(
# .... contract
if Us is None:
# first site
C = tn.select(site_tags[i]).contract(optimize=optimize)
C = tensor_contract(
*tn.select_tensors(site_tags[i]), optimize=optimize
)
else:
C = (Us | tn.select(site_tags[i])).contract(optimize=optimize)
C = tensor_contract(
Us, *tn.select_tensors(site_tags[i]), optimize=optimize
)
# i
# │ │ │ │
# ─▶──□━━━━◀━◀━
Expand Down Expand Up @@ -536,7 +637,9 @@ def tensor_network_1d_compress_zipup(
# ─▶ : :
# U*s VH

U0 = (Us | tn.select(site_tags[0])).contract(optimize=optimize)
U0 = tensor_contract(
Us, *tn.select_tensors(site_tags[0]), optimize=optimize
)

if normalize:
# in right canonical form already
Expand Down Expand Up @@ -1104,6 +1207,10 @@ def tensor_network_1d_compress_fit(
tn.site_tags for tn in tns if hasattr(tn, "site_tags")
)

tns = tuple(
enforce_1d_like(tn, site_tags=site_tags, inplace=inplace) for tn in tns
)

# choose the block size of the sweeping function
if bsz == "auto":
if max_bond is not None:
Expand Down

0 comments on commit 75a8569

Please sign in to comment.