"""
Module `tools.py` provides functions to write files for the Fortran backend
and run serial simulations from the command line.
last revision 091911-MLW
/ instead of &end as end of namelist file for consistency with nagfor
"""
import subprocess
from math import ceil, floor
from copy import deepcopy
#import os.path
from os import remove, makedirs, symlink
import shutil
import zipfile
import hashlib
import numpy as np
#from .mpo import MPO
#from .ops import OperatorList
#from .obsterms import Observables, check_restore_timeevo
#from .state import State, GenerateqInitialState, nint, GenerateParityState
#from .dynamics import WriteDynamics, CheckOrDefault, QuenchList
#from .dynamics import QuantumCircuits
# Import * is ok, convergence defines all necessary classes in __all__
#from .convergence import *
from sys import version_info
if(version_info[0] < 3):
myFileNotFoundError = IOError
else:
myFileNotFoundError = FileNotFoundError
[docs]def write_makefile(settings):
"""
Write out the main file for compiling the fortran library. Return boolean
if MPI is used.
**Arguments**
settings : dict
Key-value pairs for the settings of the compiler. Necessary keys are
PY, FC, OFLAGS, OPENMP, BLAS_FLAG, LAPACK_FLAG, ARPACK_FLAG, HDF5_FLAG,
LIBFLAGS.
"""
imps = (len(settings['ARPACK_FLAG']) > 0)
hdf5 = (len(settings['HDF5_FLAG']) > 0)
opmp = (len(settings['OPENMP']) > 0)
mpi = (settings['FC'] == 'mpif90')
dbg = (('-O0' in settings['OFLAGS']) or ('-g' in settings['OFLAGS']))
macacc = (('-framework accelerate' in settings['LIBFLAGS'])
or ('-framework accelerate' in settings['LAPACK_FLAG'])
or ('-framework accelerate' in settings['BLAS_FLAG']))
# Strings for generating command line options
simps = 'T' if(imps) else 'F'
sdbg = 'T' if (dbg) else 'F'
sacc = 'T' if (macacc) else 'F'
sh5 = 'T' if(hdf5) else 'F'
modulelist = ['BasicOps', 'IOOps', 'LinearAlgebra', 'ExpokitOps', 'Tensors',
'qTensors', 'ContractionOps', 'DecompositionOps', 'MPOOps',
'MPSOps', 'MPDOOps', 'LPTNOps', 'LanczosOps', 'Convergence',
'ObsOps', 'VariationalOps', 'iMPSOps', 'KrylovOps',
'TDVPOps', 'TEBDOps', 'ZaletelOps', 'TimeEvolutionOps',
'PyInterface']
if(not imps): modulelist.remove('iMPSOps')
#if(not hdf5): modulelist.remove('HDF5Ops')
mstr = ''
mstr += '# Makefile for MPSFortLib\n'
mstr += '# -----------------------\n\n'
mstr += 'export PRJHOME=$(CURDIR)/\n'
mstr += 'export MODDIR=$(PRJHOME)Mods/\n'
mstr += 'export DIRECTORIES=$(MODDIR)\n'
mstr += 'export INCLUDES=$(addprefix -I, $(DIRECTORIES))\n'
mstr += 'export PY = ' + settings['PY'] + '\n'
if(hdf5):
mstr += 'export FC = ' + settings['HDF5_FLAG'] + '\n'
else:
mstr += 'export FC = ' + settings['FC'] + '\n'
mstr += 'export INCFLAGS = ' + settings['INCFLAGS'] + '\n'
mstr += 'export OFLAGS = ' + settings['OFLAGS'] + '\n'
tmp = settings['OPENMP'] + ' '
tmp += settings['ARPACK_FLAG'] + ' '
tmp += settings['LAPACK_FLAG'] + ' '
tmp += settings['BLAS_FLAG'] + ' '
#tmp += settings['HDF5_FLAG'] + ' ' # HDF5 is not used via flags
tmp += settings['LIBFLAGS'] + '\n\n'
tmp = ' '.join(tmp.split())
mstr += 'export LIBFLAGS = ' + tmp + '\n\n'
mstr += '# List objects\n'
mstr += '# ------------\n\n'
mstr += 'export OBJS ='
for elem in modulelist:
mstr += ' $(MODDIR)' + elem + '.o'
mstr += '\n\n'
mstr += 'export EXES ='
for elem in modulelist:
mstr += ' $(MODDIR)' + elem + '_tests'
mstr += '\n\n'
mstr += 'all : moddir_obj MPSMain tests\n\n'
mstr += '# Object compilation rules\n'
mstr += '# ------------------------\n\n'
mstr += 'moddir_obj :\n'
mstr += '\t@echo $(FC)\n'
mstr += '\tcd $(MODDIR); $(PY) TemplateGenerator.py $(OBJS)' \
+ ' --mpi=$(FC) --dbg=' + sdbg + ' --imps=' + simps \
+ ' --macacc=' + sacc + ' --hdf5=' + sh5 + ' \n'
mstr += '\tcd $(MODDIR); make all\n\n'
mstr += 'tests : moddir_obj\n'
mstr += '\t@echo "Build tests"\n'
mstr += '\t@echo ""\n'
mstr += '\t@echo ""\n'
mstr += '\tcd $(MODDIR); make tests\n\n'
mstr += '%Main.o : %Main.f90\n'
if settings['INCFLAGS'] != '': mstr += '\t$(FC) $(OFLAGS) -c $< $(INCLUDES) $(INCFLAGS)\n\n'
else: mstr += '\t$(FC) $(OFLAGS) -c $< $(INCLUDES)\n\n'
mstr += '%_Tests.o : %_Tests.f90\n'
if settings['INCFLAGS'] != '': mstr += '\t$(FC) $(OFLAGS) -c $< $(INCLUDES) $(INCFLAGS)\n\n'
else: mstr += '\t$(FC) $(OFLAGS) -c $< $(INCLUDES)\n\n'
mstr += '# Primary rules\n'
mstr += '# -------------\n\n'
mstr += '%Main : moddir_obj %Main.o\n'
if settings['INCFLAGS'] != '': mstr += '\t$(FC) $(OFLAGS) -o Execute_$@ $@.o $(OBJS) $(LIBFLAGS) $(INCLUDES) $(INCFLAGS)\n\n'
else: mstr += '\t$(FC) $(OFLAGS) -o Execute_$@ $@.o $(OBJS) $(LIBFLAGS) $(INCLUDES)\n\n'
mstr += '%_Tests : moddir_obj %_Tests.o\n'
if settings['INCFLAGS'] != '': mstr += '\t$(FC) $(OFLAGS) -o Execute_$@ $@.o $(OBJS) $(LIBFLAGS) $(INCLUDES) $(INCFLAGS)\n\n'
else: mstr += '\t$(FC) $(OFLAGS) -o Execute_$@ $@.o $(OBJS) $(LIBFLAGS) $(INCLUDES)\n\n'
mstr += '\tmv Execute_$@ ../tests_MPSFortLib/.\n\n'
mstr += 'cleanSub : \n'
mstr += '\tcd $(MODDIR); make clean\n\n'
mstr += 'clean: #removes all old *.o and *.mod files\n'
mstr += '\tmake cleanSub ;\n'
mstr += '\trm -f *.o; rm -f Execute*;\n\n'
fh = open('MPSFortLib/makefile', 'w+')
fh.write(mstr)
fh.close()
return mpi
[docs]def BuildMPSFortLib(Parallel=False, tests=False, track=True):
"""
Make the fortran libraries. The return vector contains the
status of make command for serial, parallel, and tests in
this order if all are compiled.
**Arguments**
Parallel : boolean, optional
sets flag for openMPI compilation.
default to ``False`` (serial code without MPI)
tests : boolean, optional
Set handling if tests for fortran modules are compiled (True)
or not (False).
Default to ``True``.
track : boolean, optional
Flag if line numbers are decoded in file `make.log` (True) or
if original output goes to standard out (False).
Default to ``True``.
"""
if(track):
val = execute(['make', 'MPSMain', '>', 'make.log', '2>&1'],
cwd='MPSFortLib')
else:
val = execute(['make', 'MPSMain'], cwd='MPSFortLib')
retvec = [val]
if(Parallel):
if(track):
val = execute(['make', 'MPSParallelMain', '>>', 'make.log', '2>&1'],
cwd='MPSFortLib')
else:
val = execute(['make', 'MPSParallelMain'], cwd='MPSFortLib')
retvec.append(val)
if(tests):
if(track):
val = execute(['make', 'tests', '>>', 'make.log', '2>&1'],
cwd='MPSFortLib')
else:
val = execute(['make', 'tests'], cwd='MPSFortLib')
retvec.append(val)
if(track):
execute(['python', 'deref.py', 'make.log'], cwd='MPSFortLib')
execute(['head', '-n', '40', 'make.log'], cwd='MPSFortLib')
execute(['mv', 'MPSFortLib/make.log', '.'])
return retvec
[docs]def CleanMPSFortLib():
"""
Clean the fortran libraries.
"""
return execute(['make', 'clean'], cwd='MPSFortLib')
[docs]def InstallMPSFortLib(INSTALLATION_DIR='/usr/local/bin', Parallel=False):
"""
Install the Fortran libraries, that is copying the build executabels
to the installation directory.
**Arguments**
INSTALLATION_DIR : str, optional
path of the installation directory.
default to `/usr/local/bin/`
Parallel : boolean, optional
flag for openMPI installation. If ``True``, MPI-executable is
copied as well to the installation directory
default to False
"""
stat = execute(['cp', 'MPSFortLib/Execute_MPSMain',
INSTALLATION_DIR + '/' + 'Execute_MPSMain'])
if(Parallel):
stat_par = execute(['cp', 'MPSFortLib/Execute_MPSParallelMain',
INSTALLATION_DIR + '/' + 'Execute_MPSParallelMain'])
stat = 0 if((stat == 0) and (stat_par == 0)) else 1
return stat
[docs]def InstallMPSCLIUtility(INSTALLATION_DIR='/usr/local/bin'):
stat = execute(['cp', '-r', 'MPSCLI/*', INSTALLATION_DIR])
return stat
[docs]def execute(command, cwd=None):
"""
Execute the given command on the command line.
**Arguments**
command : list of strings
List with all parts of the command. Parts seperate by whitespaces
form a individual entry in the list.
"""
tmp = subprocess.list2cmdline(command)
print(tmp)
return subprocess.call(tmp, shell=True, cwd=cwd)
[docs]def submit_job(script_name, comp_info):
"""
Submits a job to the cluster.
**Arguments**
script_name :
name of the pbs/slurm script to be submitted.
comp_info : dictionary
dictionary with cluster settings, of interest is the key
``queueing`` to decide whether `pbs` or `slurm` is used.
"""
if(comp_info['queueing'] == 'pbs'):
cmdline = ['qsub', script_name]
elif(comp_info['queueing'] == 'slurm'):
cmdline = ['sbatch', script_name]
else:
raise ValueError('Unkown value for queueing system.')
if(os.path.isfile(script_name)):
execute(cmdline)
else:
raise ValueError('Script to be submitted does not exist.')
return
[docs]def runMPS(infileList, RunDir=None, Debug=False, customAppl=None):
"""
Run the MPS application with the given list of input files.
**Arguments**
infileList : list
list of of the main files passed to fortran executable as
command line argument.
RunDir : str, optional
Specifies path of the fortran-executable (local or global
installation). This is available for default-simulation and
custom application. (@Developers: Debugging with valgrind always
works with a local installation.)
`None` : if executable available within local folder, use local
installation, otherwise assume global installation, default
'' (empty string) : for global installation
'./' : local installation
PATH+'/' : path to executable ending on slash `/`. Additional steps
may be necessary when using this.
Debug : boolean, optional
Developers only:
false : run usual simulation, default
true : run debugging with valgrind using local installation
If the folder MPSFortLib is visible, then line numbers from
f90 modules are de-references to the template line numbers.
(visible = actual MPSFortLib folder in the present working
directory, a symbolic link to MPSFortLib, ...)
customAppl : str, optional
define custom executable. Global and local installation work
as before. Custom applications cannot run with valgrind.
default to `None` (running default executable)
"""
if customAppl is not None:
resList=[]
if(RunDir is None):
if(os.path.isfile('./' + customAppl)):
# Local installtion
appname = './' + customAppl
else:
# Global installation
appname = customAppl
else:
# User-defined path
appname = RunDir + customAppl
for infile in infileList:
cmdline = [appname]
cmdline += [infile[0], infile[1]]
ret_val=execute(cmdline)
if ret_val!=0:
raise MPSFortLibError(ret_val)
resList.append(ret_val)
else:
if Debug:
resList=[]
counter=0
for infile in infileList:
cmdline = ['valgrind', '--tool=memcheck', '--leak-check=yes',
'--track-origins=yes', '--show-reachable=yes',
'./Execute_MPSMain', infile[0], infile[1], '>',
'memcheck_' + str(counter) + '.log', '2>&1']
ret_val = execute(cmdline)
try:
execute(['python', 'deref.py', '../memcheck_' + str(counter)
+ '.log'], cwd='MPSFortLib')
except OSError:
# MPSFortLib is not necessary visible, so do not throw an
# error (python 2.x)
print('Line numbers in F90 not de-referenced.')
except FileNotFoundError:
# python 3.x
print('Line numbers in F90 not de-referenced.')
if(ret_val != 0):
raise MPSFortLibError(ret_val)
resList.append(ret_val)
counter = counter + 1
else:
resList=[]
if RunDir != None:
# Local installation
appname = RunDir + 'Execute_MPSMain'
else:
# Global installation
appname = 'Execute_MPSMain'
for infile in infileList:
cmdline = [appname]
cmdline += [infile[0], infile[1]]
ret_val=execute(cmdline)
if ret_val!=0:
raise MPSFortLibError(ret_val)
resList.append(ret_val)
return resList
[docs]def WriteHparams(fileStub,Parameters,IntHparams):
"""
Write out the hamiltonian Parameters to a Fortran-readable file.
**Arguments**
fileStub : string
basis for the filename extended with `HamiParams.dat`
Parameters : dictionary
keys correspond to values of `IntHparams[i]` for integers `i`
values are floats for translational invariance models or
lists/np.ndarrays for translational variance models.
IntHparams : dictionary
key `num` defines the number of parameters. Keys (integer) from 0
to `num` - 1 can be set if the change (no warranty that this is right)
**Details**
The part written by one call of ``WriteHparams`` looks like
1) number of parameters (integer)
2) number of sites (1=translational invariance)
3) floats for coupling
"""
Hpfilename=fileStub+'HamiParams.dat'
Hpfile=open(Hpfilename,'w')
#Write number of Hamiltonian parameters
Hpfile.write('%16i'%(IntHparams['num'])+'\n')
for i in range(IntHparams['num']):
if IntHparams[i] in Parameters.keys():
#Write out site-valued term:
if(hasattr(Parameters[IntHparams[i]], '__len__')):
tstr=''
for j in Parameters[IntHparams[i]]:
tstr+='%30.15E'%(j)
Hpfile.write('%16i'%(len(Parameters[IntHparams[i]]))+'\n')
Hpfile.write(tstr+'\n')
#Write out uniform term
else:
Hpfile.write('%16i'%(1)+'\n')
Hpfile.write('%30.15E'%(Parameters[IntHparams[i]])+'\n')
Hpfile.close()
return Hpfilename
[docs]def WriteMainFile(p, cmplx_op, hashlist, MainHashList, nqns=0, npns=0,
Operators=None, PostProcess=False):
"""
Subprogram to write main files from a parameter dictionary p
for a MPS simulation. Intended for internal use only.
**Arguments**
p : dictionary
p contains the parameter settings for the simulation
specified through various keys.
cmplx_op : boolean
True : at least one operator is complex valued
False : all operators are real
hashlist : list of strings
contains a list of hashes to be merged to one single hash identifying
the variational/static search of MPS (containing the information about
Hamiltonian, the couplings, convergence settings etc ...)
MainHashList : list of strings
List contains the hashes of previous main files to check
nqns : integer, optional
default to 0
npns : integer, optional
default to 0
Operators : instance of OperatorList, optional
default to None
PostProcess : logical, optional
unused, default to False
**Details**
The file `...qnumbers.dat` is written during this function.
"""
writedir=p['Write_Directory']
outdir=p['Output_Directory']
# Write out quantum numbers
# -------------------------
if(nqns + npns > 0):
# Check length of the lists
if(len(p['Abelian_quantum_numbers']) != nqns):
raise Exception("Number of Abelian quantum numbers passed in " +
"does not match the number of generators!")
if(len(p['Discrete_quantum_numbers']) != npns):
raise Exception("Number of Discrete quantum numbers passed in " +
"does not match the number of generators!")
# Check that the shifted and scaled quantum numbers given correspond
# to integers. This, in essence, states that they are valid quantum
# numbers for the given system size.
qstr = ''
qnums = []
pnums = []
for ii in range(nqns):
qnum = ((p['Abelian_quantum_numbers'][ii] -
p['L'] * Operators['q_shifts'][ii]) /
Operators['q_scales'][ii])
if(abs(float(nint(qnum)) - float(qnum)) > 0.00000001):
raise Exception("The shifted and scaled quantum number " +
str(qnum) + " computed from (" +
str(p['Abelian_quantum_numbers'][ii]) + "-" +
str(p['L'] * Operators['q_shifts'][ii]) +
")/" + str(Operators['q_scales'][ii]) + " " +
"\nis not an integer and so does not " +
"represent a valid quantum number!\n" +
"If you are certain this is an error file " +
"a bug report!")
qnum = int(qnum)
qnums.append(qnum)
qstr += '%16i'%(qnum)
for ii in range(npns):
jj = nqns + ii
qnum = ((p['Discrete_quantum_numbers'][ii] -
p['L'] * Operators['q_shifts'][jj]) /
Operators['q_scales'][jj])
if(ceil(qnum) != floor(qnum)):
raise Exception("The shifted and scaled quantum number " +
str(qnum) + " computed from (" +
str(p['Abelian_quantum_numbers'][ii]) + "-" +
str(p['L'] * Operators['q_shifts'][jj]) +
")/" + str(Operators['q_scales'][jj]) + " " +
"\nis not an integer and so does not " +
"represent a valid quantum number!\n" +
"If you are certain this is an error file " +
"a bug report!")
qnum = int(qnum)
pnums.append(qnum)
qstr += '%16i'%(qnum)
qfilename = writedir + p['job_ID'] + p['unique_ID'] + 'qnumbers.dat'
qfile = open(qfilename,'w')
qfile.write(qstr + '\n')
qfile.close()
if(not PostProcess and ('QT' in p)): copy_files_qt(qfilename, p['QT'])
p['int_files'].append(qfilename)
if(not 'State' in p):
# generate initial state guess appropriate for iMPS with the
# desired fixed quantum numbers
p['int_files'].append(GenerateqInitialState(writedir + p['job_ID'] +
p['unique_ID'], qnums,
pnums, p['L'],
Operators))
if(not PostProcess and ('QT' in p)):
copy_files_qt(p['int_files'][-1], p['QT'])
else:
p['int_files'].append(GenerateParityState(writedir + p['job_ID'] +
p['unique_ID'], p['L'],
p['N0'], p['N1']))
# Write main file
# ---------------
# Generate the single hash of all the parameters [1st always on
# parameters, then on Abelian operators if available, finally on
# Hamiltonian.
hstr = generate_full_hash(hashlist)
# Check if the initial state is calculated in a simulation before
timeevo_initial = (len(p['timeevo_mps_initial']) == 0) \
and (len(p['timeevo_mpdo_initial']) == 0)
if((hstr in MainHashList) and timeevo_initial):
wait4state = True
else:
wait4state = False
MainHashList.append(hstr)
MainFileName=writedir+p['job_ID']+p['unique_ID']+'Main.nml'
FinishedFileName=outdir+p['job_ID']+p['unique_ID']+'Finished.dat'
if p['strict'] is True:
if os.path.isfile(MainFileName):
raise Exception("Duplicate Main file already exists! Delete it or turn off strict to overwrite!")
if os.path.isfile(FinishedFileName):
raise Exception("Duplicate final output file already exists! Move it or turn off strict to overwrite!")
MainFile=open(MainFileName,'w')
MainFile.write('&SystemSettings\n')
MainFile.write('job_ID="'+p['job_ID']+'", ')
MainFile.write('unique_ID="'+p['unique_ID']+'", ')
MainFile.write('writedir="'+writedir+'", ')
MainFile.write('outdir="'+outdir+'", ')
MainFile.write('nqns='+str(nqns)+', ')
MainFile.write('npns='+str(npns)+', ')
MainFile.write('ll=%16i'%(p['L'])+', ')
MainFile.write('ne=%16i'%(p['n_excited_states'])+', ')
MainFile.write('verbose=%16i'%(p['verbose'])+', ')
if p['MPS']:
MainFile.write('DoMPS=.True., ')
else:
MainFile.write('DoMPS=.False., ')
if p['eMPS']:
MainFile.write('DoeMPS=.True., ')
else:
MainFile.write('DoeMPS=.False., ')
if p['Dynamics']:
MainFile.write('DoDynamics=.True., ')
else:
MainFile.write('DoDynamics=.False., ')
if(p['MPDO+']):
MainFile.write('mpdoplus=.True., ')
else:
MainFile.write('mpdoplus=.False., ')
if p['simtype']=='Infinite':
MainFile.write('simtype="I", ')
elif p['simtype']=='Finite':
MainFile.write('simtype="F", ')
else:
MainFile.write('simtype="T", ')
if(cmplx_op):
MainFile.write('cmplx_op=.True., ')
else:
MainFile.write('cmplx_op=.False., ')
MainFile.write('paramhash="' + hstr + '", ')
if(wait4state):
MainFile.write('wait4state=.True., ')
else:
MainFile.write('wait4state=.False., ')
if(hasattr(p['statics_initial'], '__call__')):
MainFile.write('statics_initial="' + p['statics_initial']() + '", ')
else:
MainFile.write('statics_initial="' + p['statics_initial'] + '", ')
MainFile.write('timeevo_mps_initial="' + p['timeevo_mps_initial'] + '", ')
MainFile.write('timeevo_mpdo_initial="' + p['timeevo_mpdo_initial'] + '", ')
p['timeevo_restore'] = check_restore_timeevo(p)
if(p['timeevo_restore']):
MainFile.write('timeevo_restore=.True.,')
else:
MainFile.write('timeevo_restore=.False.,')
if(p['logfile']):
MainFile.write('logfile=.True.,')
else:
MainFile.write('logfile=.False.,')
if(p['hdf5']):
MainFile.write('use_h5=.True.')
else:
MainFile.write('use_h5=.False.')
MainFile.write('/\n\n')
MainFile.close()
if('QT' in p): copy_files_qt(MainFileName, p['QT'])
return MainFileName, hstr
[docs]def generate_full_hash(hashlist):
"""
Generate the full hash for a simulation based on a list of hashes.
**Arguments**
hashlist : list of strings
contains a list of hashes to be merged to one single hash identifying
the variational/static search of MPS (containing the information about
Hamiltonian, the couplings, convergence settings etc ...)
"""
hstr = ""
for hashelem in hashlist:
hstr += hashelem
hh = hashlib.sha512()
hh.update(str.encode(hstr))
hstr = hh.hexdigest()[:50]
return hstr
[docs]def WriteFiles(Parameters, Operators, HamiltonianMPO, PostProcess=False,
EDSim=False):
"""
Write out files for use by the Fortran routines.
**Arguments**
Parameters: single dictionary or list of dictionaries
Arguments in the dictionary are `job_ID` (mandatory), `unique_ID`,
`Write_Directory` (optional), `Output_Directory` (optional)
`L` (system size?, mandatory), `verbose`, `MPSObservables`,
`MPSConvergenceParameters` [list incomplete]
Operators : dictionary
dictionary containing the matrices for the operators. The keys are
the names of the operators as string.
HamiltonianMPO : instance of `MPO` or list of instances of `MPO`
containing the Hamiltonian. If list, it has to have the same
length as the `Parameters`-list. If `Parameters` is a list,
`HamiltonianMPO` can still be a single instance of `MPO`.
PostProcess : boolean
function distinguishes between writing files for the fortran simulation
(`PostProcess=False`) and running the postprocessing of an existing
data set (`PostProcessing=True`).
EDSim : boolean
Specify that the setup is for an ED simulation. This suppresses
writing temporary files similar to PostProcess=True overwriting
the PostProcessing flag.
**Details**
The following keys exist in Parameters:
+---------------------------+----------------------------------------+-----+
| Key (strings) | Description | opt |
+===========================+========================================+=====+
| L | Integer value specifying the length | N |
| | of the lattice for finite systems and | |
| | the length of the unit cell for | |
| | infinite systems | |
+---------------------------+----------------------------------------+-----+
| job_ID | String specifying a simulation. For | N |
| | batched simulations, ``'job_ID'`` | |
| | specifies a group of simulations | |
| | which are distinguished by their | |
| | ``'unique_ID'``. | |
+---------------------------+----------------------------------------+-----+
| simtype | Takes values of ``'Finite'`` or | N |
| | ``'Infinite'`` to toggle between | |
| | finite and infinite-size ground state | |
| | search, see Sec. :ref:`sec_imps`. | |
| | For finite temperature evolutions via | |
| | imaginary time evolution choose | |
| | ``'FiniteImag'``. | |
| | Defaults to ``'Finite'``. | |
+---------------------------+----------------------------------------+-----+
| unique_ID | String which distinguishes a | Y |
| | simulation from all others with the | |
| | same ``'job_ID'``. Defaults to the | |
| | empty string. | |
+---------------------------+----------------------------------------+-----+
| Write_Directory | Directory where files for internal | Y |
| | OSMPS use are written. Defaults to | |
| | current directory. | |
+---------------------------+----------------------------------------+-----+
| Output_Directory | Directory where output files are | Y |
| | written. Defaults to current | |
| | directory. | |
+---------------------------+----------------------------------------+-----+
| verbose | Integer specifying the level of code | Y |
| | output, with ``0`` being the least | |
| | amount of output and ``2`` the most. | |
| | Defaults to ``0``. | |
+---------------------------+----------------------------------------+-----+
| logfile | True writes seperate logfile, false | Y |
| | information goes to standard out. | |
| | Default to False. | |
+---------------------------+----------------------------------------+-----+
| hdf5 | Settings for the file type of the | Y |
| | observables. If false, use human | |
| | readable text files. If true, use | |
| | HDF5 files, which implies that the | Y |
| | F90 source was compiled with HDF5. | |
| | Right now, the implementation is for | |
| | serial simulations / single thread | |
| | and was not tested for any scenario | |
| | with multiple simulations at a time, | |
| | e.g., submitting two serial | |
| | simulations to a cluster. MPI is | |
| | disabled internally in the MPSPyLib. | |
| | Only use false for iMPS. | |
| | Default to False. | |
+---------------------------+----------------------------------------+-----+
| strict | Defines the overwrite policy for main | Y |
| | and output files. When True files are | |
| | not overwritten but an error raised. | |
| | Default to False. | |
+---------------------------+----------------------------------------+-----+
| MPSObservables | An object of the | Y |
| | :py:class:`Obs.Observables` class | |
| | specifying ground state measurements, | |
| | see Sec. :ref:`sec_observables`. | |
| | Defaults to no measurements. | |
+---------------------------+----------------------------------------+-----+
| MPSConvergenceParameters | An object of the :py:class:`tools.MPS\ | Y |
| | ConvergenceParameters` class | |
| | specifying the convergence criteria | |
| | imposed on variational ground state | |
| | search, see Sec. :ref:`sec_vmps`. | |
| | Defaults to the default values of | |
| | :py:class:`tools.MPSConvergence\ | |
| | Parameters` (see class description) | |
+---------------------------+----------------------------------------+-----+
| n_excited_states | Integer number of excited states to | Y |
| | be found using eMPS, see Sec. | |
| | :ref:`sec_emps`. Defaults to 0. | |
+---------------------------+----------------------------------------+-----+
| eMPSConvergenceParameters | An object of the :py:class:`tools.MPS\ | Y |
| | ConvergenceParameters` class | |
| | specifying the convergence criteria | |
| | imposed on variational excited state | |
| | search, see Sec. :ref:`sec_emps`. | |
| | Defaults to the default values of | |
| | :py:class:`tools.MPSConvergence\ | |
| | Parameters` | |
| | (see class description) | |
+---------------------------+----------------------------------------+-----+
| eMPSObservables | An object of the | Y |
| | :py:class:`Obs.Observables` class | |
| | specifying excited state measurements, | |
| | see Sec. :ref:`sec_observables`. | |
| | Defaults to no measurements. | |
+---------------------------+----------------------------------------+-----+
| Quenches | An object of the | Y |
| | :py:class:`Dynamics.QuenchList` class | |
| | specifying a dynamical process, see | |
| | Sec. :ref:`sec_dynamics`. | |
| | Defaults to ``None``. | |
+---------------------------+----------------------------------------+-----+
| DynamicsObservables | An object of the | Y |
| | :py:class:`Obs.Observables` class | |
| | class specifying measurements of the | |
| | time-evolved state, see Sec. | |
| | :ref:`sec_observables`. Defaults to | |
| | no measurements. | |
+---------------------------+----------------------------------------+-----+
| timeevo_restore | True : try to continue interrupted | Y |
| | time evolution; False : start time | |
| | evolution at t=0. | |
| | Default to False. | |
+---------------------------+----------------------------------------+-----+
| timeevo_mps_initial | Can specicy an input state from file | Y |
| | instead of ground state. | |
| | Default empty string (Ground state). | |
+---------------------------+----------------------------------------+-----+
| statics_initial | Define a initial guess for the ground | Y |
| | state search. If not specified, guess | |
| | is generated automatically. | |
+---------------------------+----------------------------------------+-----+
| timeevo_mpdo_initial | Initial guess finite-T evolution with | Y |
| | symmetries or initial state for time | |
| | evolution. | |
+---------------------------+----------------------------------------+-----+
| Abelian_generators | A list of the keys of generators of | Y |
| | Abelian symmetries to be used. See | |
| | Sec. :ref:`sec_Abelian` for more on | |
| | the use of Abelian symmetries. | |
| | Defaults to no symmetries. | |
+---------------------------+----------------------------------------+-----+
| Abelian_quantum_numbers | A list of the quantum numbers | Y |
| | corresponding to the generators in | |
| | ``'Abelian_generators'``. See Sec. | |
| | :ref:`sec_Abelian` for more on the | |
| | use of Abelian symmetries. Defaults | |
| | to no quantum numbers. | |
+---------------------------+----------------------------------------+-----+
| Discrete_generators | not enables yet. Raises exception | |
+---------------------------+----------------------------------------+-----+
| MPDO+ | Logical, used to switch to MPDO | Y |
| | algorithms in future. | |
| | Default to False. | |
+---------------------------+----------------------------------------+-----+
| QT | Sampling over realizations of the | Y |
| | same simulation, e.g. in future used | |
| | for quantum trajectories. | |
| | Default: 1 simulation (not present) | |
+---------------------------+----------------------------------------+-----+
The following keys are set internally:
+---------------+----------------------------------------------------------+
| Key (strings) | Description |
+===============+==========================================================+
| Dynamics | True if key `Quenches` available, else default is False |
+---------------+----------------------------------------------------------+
| MPS | if `MPSConvergenceParameters` given, `MPS` must be True. |
| | Default value is True except for the dynamics with a |
| | given initial state defined through the key ``state``. |
+---------------+----------------------------------------------------------+
| eMPS | if `n_excited_states` equal to 0, `MPS=True` and |
| | `eMPS=False` |
| | If > 0, then `MPS=True` and `eMPS=True`. If |
| | `n_excited_states` not given, default to False. |
+---------------+----------------------------------------------------------+
| int_files | empty list set in this function, changed during the |
| | `WriteMainFile`. |
+---------------+----------------------------------------------------------+
| hash | The hash of the static simulations. |
+---------------+----------------------------------------------------------+
"""
# Overwrite PostProcess if EDSim
if(EDSim): PostProcess = True
# Checks on whole lists
# ---------------------
if not isinstance(Parameters,list):
Parameters=[Parameters]
if isinstance(HamiltonianMPO,list):
if len(HamiltonianMPO)!=len(Parameters):
raise Exception("Length of HamiltonianMPO list does not match " +
"length of Parameters list in WriteFiles!")
if(isinstance(Operators, list)):
# Indexing operators using unique integer keys is done afterwards
raise Exception("Operators should not be a list in WriteFiles!")
elif(isinstance(Operators, dict)):
# Convert dictionary to `OperatorList` in order to use class methods
Operators = OperatorList(Operators)
ids=[]
hashlist1 = []
# Check and initialize defaults for each simulation
# -------------------------------------------------
ii = 0
for p in Parameters:
# Checking on coherent input
# --------------------------
# Check on mandatory parameters
if(not 'L' in p):
raise Exception("Missing L from Parameters!")
if(not 'job_ID' in p):
raise Exception("Please name your job (add a key job_ID to the " +
"Parameters dictionary)!")
if(not 'simtype' in p):
raise Exception("Please provide simtype for Parameters.")
# Checks around simtype
if(p['simtype'] == 'Infinite'):
if('MPSConvergenceParameters' in p):
if(not isinstance(p['MPSConvergenceParameters'],
iMPSConvParam)):
raise Exception("simtype=Infinite, but not iMPSConvParam!")
else:
DefaultMPSConvParam = iMPSConvParam()
if('n_excited_states' in p):
if(p['n_excited_states'] != 0):
raise Exception("Number of excited states not 0 for " +
"Infinite.")
if('eMPSConvergenceParameters' in p):
raise Exception("Specified eMPSConvergenceParameters for " +
"Infinite.")
if('eMPSObservables' in p):
raise Exception("Specified eMPSObservables for Infinite.")
if('Quenches' in p):
raise Exception("Specified Quenches for Infinite.")
if('hdf5' in p):
if(p['hdf5']): raise Exception('Cannot use HDF5 for iMPS.')
elif(p['simtype'] == 'Finite'):
if('MPSConvergenceParameters' in p):
if(not isinstance(p['MPSConvergenceParameters'],
(MPSConvParam, FiniteTConvParam))):
raise Exception("simtype=Finite, but ConvParam not "
+ "recognized.")
else:
DefaultMPSConvParam = MPSConvParam()
elif(p['simtype'] == 'FiniteImag'):
if('MPSConvergenceParameters' in p):
if((not isinstance(p['MPSConvergenceParameters'],
FiniteTConvParam)) and
(not isinstance(p['MPSConvergenceParameters'],
ImagConvParam))):
raise Exception("simtype=FiniteImag, but not " +
"FiniteTConvParam or ImagConvParam.")
else:
DefaultMPSConvParam = FiniteTConvParam()
if('n_excited_states' in p):
if(p['n_excited_states'] != 0):
raise Exception("Number of excited states not 0 for " +
"FiniteImag.")
if('eMPSConvergenceParameters' in p):
raise Exception("Specified eMPSConvergenceParameters for " +
"FiniteImag.")
if('eMPSObservables' in p):
raise Exception("Specified eMPSObservables for FiniteImag.")
else:
raise Exception("Unknown simtype!")
if(('DynamicsObservables' in p) and (not 'Quenches' in p)):
raise Exception("Specified dynamic observables, but no Quenches.")
# Check directories and create if necessary
CheckOrDefault(p,'Write_Directory','')
CheckOrDefault(p,'Output_Directory','')
if(not os.path.exists(p['Write_Directory'])):
makedirs(p['Write_Directory'])
if(not os.path.exists(p['Output_Directory'])):
makedirs(p['Output_Directory'])
# Take care of jobnames
CheckOrDefault(p,'unique_ID','')
ids.append(p['job_ID']+p['unique_ID'])
#determine which algorithms are being performed
#dynamics is determined by the presence of Quenches
if 'Quenches' in p:
p['Dynamics']=True
#If an initial state is specified with dynamics, no MPS, no eMPS
if 'State' in p:
CheckOrDefault(p,'MPS',False)
CheckOrDefault(p,'eMPS',False)
else:
p['Dynamics']=False
if 'MPSConvergenceParameters' in p:
CheckOrDefault(p,'MPS',True)
if not p['MPS']:
raise Exception("Inputs specify both the use and non-use of variaitonal state search!")
if 'n_excited_states' in p:
CheckOrDefault(p,'MPS',True)
if p['n_excited_states']>0:
CheckOrDefault(p,'eMPS',True)
else:
CheckOrDefault(p,'eMPS',False)
else:
CheckOrDefault(p,'eMPS',False)
CheckOrDefault(p,'MPS',True)
CheckOrDefault(p,'eMPS',False)
#Use default MPS convergence parameters if not specified
if p['MPS'] and 'MPSConvergenceParameters' not in p:
p['MPSConvergenceParameters'] = DefaultMPSConvParam
#Use empty MPS observables if not specified
if p['MPS'] and 'MPSObservables' not in p:
MPSObs=Observables(Operators)
p['MPSObservables']=MPSObs
#Use default eMPS convergence parameters if not specified
if p['eMPS'] and 'eMPSConvergenceParameters' not in p:
p['eMPSConvergenceParameters'] = MPSConvParam()
#Use empty eMPS observables if not specified
if p['eMPS'] and 'eMPSObservables' not in p:
MPSObs=Observables(Operators)
p['eMPSObservables']=MPSObs
#Use empty dynamics observables if not specified
if p['Dynamics'] and 'DynamicsObservables' not in p:
MPSObs=Observables(Operators)
p['DynamicsObservables']=MPSObs
CheckOrDefault(p,'strict',False)
CheckOrDefault(p,'n_excited_states',0)
CheckOrDefault(p,'verbose',0)
CheckOrDefault(p, 'logfile', False)
CheckOrDefault(p, 'hdf5', False)
CheckOrDefault(p,'Dynamics',False)
if(p['Dynamics']):
if(isinstance(p['Quenches'], QuantumCircuits)):
pass
# Could check for Lindblad operators as well
elif(isinstance(p['Quenches'].data[0]['ConvergenceParameters'],
MpdoplusConvParam)):
p['MPDO+'] = True
CheckOrDefault(p, 'MPDO+', False)
CheckOrDefault(p, 'statics_initial', '')
CheckOrDefault(p, 'timeevo_mps_initial', '')
CheckOrDefault(p, 'timeevo_mpdo_initial', '')
CheckOrDefault(p, 'timeevo_restore', False)
# Checks and flags for operators and symmetries
p['has_complex_op'] = Operators.has_complex
CheckOrDefault(p, 'Abelian_generators', [])
CheckOrDefault(p, 'Discrete_generators', [])
CheckOrDefault(p, 'Abelian_quantum_numbers', [])
if(not isinstance(p['Abelian_quantum_numbers'], list)):
p['Abelian_quantum_numbers'] = [p['Abelian_quantum_numbers']]
CheckOrDefault(p, 'Discrete_quantum_numbers', [])
if(not isinstance(p['Discrete_quantum_numbers'], list)):
p['Discrete_quantum_numbers'] = [p['Discrete_quantum_numbers']]
# Generate the hash for the actual parameters
# -------------------------------------------
#
# For convenience handling: hash as well Observables, eMPS Observables
# and excited states. They change not the ground state, but that way we
# can be sure to reuse the measurement outcomes.
hstr = ""
hstr += p['job_ID']
hstr += str(p['L'])
hstr += p['Write_Directory']
hstr += p['Output_Directory']
# hstr += p['unique_ID'] - never add this because it is unique!!!
# p['Quenches'] - not of importance for ground/excited state
# p['Dynamics'] - not of importance for ground/excited state
# p['State'] !!! WHAT IS THIS !!!
if('MPSConvergenceParameters' in p):
hstr += p['MPSConvergenceParameters'].get_hash()
# p['MPS']
hstr += str(p['n_excited_states'])
# p['eMPS']
if("MPSObservables" in p):
hstr += p['MPSObservables'].get_hash()
if(p['eMPS']): hstr += p['eMPSConvergenceParameters'].get_hash()
if(p['eMPS']): hstr += p['eMPSObservables'].get_hash()
# p['DynamicsObservables'] - not of importance for ground/excited state
hstr += str(p['strict'])
hstr += str(p['simtype'])
# p['verbose'] - not of importance for states
# p['int_files'] !!! WHAT IS THIS !!!
# p['Discrete_generators'] - not enabled, would be done later
# p['Abelian_generators'] - done later in the second loop
hstr += str(p['Abelian_quantum_numbers'])
hstr += str(p['Discrete_quantum_numbers'])
hstr += str(p['MPDO+'])
if(hasattr(HamiltonianMPO, '__len__')):
hp_list = HamiltonianMPO[ii].get_param_set()
else:
hp_list = HamiltonianMPO.get_param_set()
for elem in sorted(hp_list):
if(elem == 'one'):
hstr += str(1.0)
else:
hstr += str(p[elem])
hh = hashlib.sha512()
hh.update(str.encode(hstr))
hashlist1.append([hh.hexdigest()])
# increment
ii += 1
# Check if job names are unique
if len(set(ids))<len(ids):
raise Exception("Make sure all job_IDs in Parameters are unique!" +
" You can add a unique_ID if needs be.")
# Get a list of all unique write directories. This ensures that each one
# gets its own Ops etc.
MainFileList=[]
MainHashList = []
# Write out operators and get indexed operators
# ---------------------------------------------
counter=0
hacount = -1
for p in Parameters:
hacount += 1
writedir=p['Write_Directory']
p['int_files']=[]
Operators.add_abelian(p['Abelian_generators'])
Operators.add_discrete(p['Discrete_generators'])
Operators.check_symmetry(HamiltonianMPO, counter)
IntOperators, Opsfilename = Operators.write(writedir + p['job_ID'] +
p['unique_ID'], PostProcess)
if((not PostProcess) and ('QT' in p)):
copy_files_qt(Opsfilename, p['QT'])
hashlist1[hacount].append(Operators.get_hash())
nqns, npns = Operators.get_nqns_npns()
#if 'State' in p:
# raise Exception("Initial state use not yet supported!")
# if 'n_excited_states' in p:
# raise Exception("Cannot specify both n_excited_states and " +
# "the initial state in parameters!")
# if qOperators is None:
# Statefilename = p['State'].writeState(writedir + p['job_ID'] +
# p['unique_ID'])
# p['int_files'].append(Statefilename)
# else:
# Statefilename = p['State'].writeqState(writedir + p['job_ID'] +
# p['unique_ID'],
# qOperators)
# p['int_files'].append(Statefilename)
if isinstance(HamiltonianMPO,list):
H=HamiltonianMPO[counter]
counter+=1
else:
H=HamiltonianMPO
hashlist1[hacount].append(H.get_hash())
if not PostProcess:
#Get indexed Hamiltonian parameters
H.IndexHParams()
#Check that all parameters actually exist within parameter file
for i in range((len(H.IntHparam) - 1) // 2):
if((not H.IntHparam[i] in p) and (H.IntHparam[i] != 'one')):
raise Exception("Hamiltonian parameter " +
str(H.IntHparam[i]) + " used in H not " +
"found in input parameters!")
#Write out hamiltonian template
Hamifilename=writedir+p['job_ID']+p['unique_ID']+'Hamirules.dat'
Hamifile=open(Hamifilename,'w')
H.write(Hamifile,p,IntOperators, H.IntHparam)
Hamifile.close()
if('QT' in p): copy_files_qt(Hamifilename, p['QT'])
#Write out hamiltonian parameters
HPfilename=WriteHparams(writedir+p['job_ID']+p['unique_ID'],p,H.IntHparam)
if('QT' in p): copy_files_qt(HPfilename, p['QT'])
if 'MPSObservables' in p:
ftmp = p['MPSObservables'].write(writedir + p['job_ID']
+ p['unique_ID'] + '0', p,
IntOperators, H.IntHparam,
PostProcess)
if(not PostProcess and ('QT' in p)):
copy_files_qt(ftmp, p['QT'])
if 'eMPSObservables' in p:
ftmp = p['eMPSObservables'].write(writedir + p['job_ID']
+ p['unique_ID'] + '1', p,
IntOperators, H.IntHparam,
PostProcess)
if(not PostProcess and ('QT' in p)):
copy_files_qt(ftmp, p['QT'])
if not PostProcess:
#MPS convergence
if p['MPS']:
ftmp = writedir + p['job_ID'] + p['unique_ID'] + 'MPS.dat'
p['MPSConvergenceParameters'].write(ftmp)
if('QT' in p): copy_files_qt(ftmp, p['QT'])
#eMPS convergence
if p['eMPS']:
ftmp = writedir + p['job_ID'] + p['unique_ID'] + 'eMPS.dat'
p['eMPSConvergenceParameters'].write(ftmp)
if('QT' in p): copy_files_qt(ftmp, p['QT'])
cmplx_op = Operators.has_complex
MainFileName, GsHash = WriteMainFile(p, cmplx_op,
hashlist1[hacount],
MainHashList, nqns,
npns, Operators)
qtid = 0 if('QT' not in p) else p['QT']
if(qtid == 0):
MainFileList.append((MainFileName, str(qtid)))
else:
#raise NotImplementedError('QT flag not yet enabled.')
# What about WAIT4STATE flag
for ii in range(qtid):
MainFileList.append((MainFileName, str(ii + 1)))
append_hash2map(p, GsHash)
p['hash'] = GsHash
else:
p['hash'] = generate_full_hash(hashlist1[hacount])
if p['Dynamics']:
WriteDynamics(H, p, copy_files_qt, PostProcess)
if not PostProcess:
ftmp = writedir + p['job_ID'] + p['unique_ID'] + 'Dynamics'
p['DynamicsObservables'].write(ftmp, p, IntOperators,
H.IntHparam)
if('QT' in p): copy_files_qt(ftmp + 'Obs.dat', p['QT'])
return MainFileList
[docs]def CleanFiles(Parameters):
"""
Delete all files generated for the Fortran backend.
**Arguments**
Parameters : list or single string
tba
"""
if not isinstance(Parameters,list):
Parameters=[Parameters]
uniquefiles=[]
for p in Parameters:
if 'int_files' in p:
uniquefiles=[i for i in set(uniquefiles+p['int_files'])]
for file in uniquefiles:
remove(file)
return
[docs]def append_hash2map(param, Hash):
"""
Append a new information to the mapping of the hashed
groundstates / observables.
**Arguments**
param : Dictionary
the dictionary with all the parameters for the simulation
Hash : str
hash for the parameters in `param`
"""
# Create filename and complete id
flnm = param['Output_Directory'] + param['job_ID'] + "_static_mapping.dat"
cid = param['Output_Directory'] + param['job_ID'] + param['unique_ID']
# Check if the current ID already exists in the file
try:
fh = open(flnm, 'r')
for line in fh:
print(line)
this_cid, this_hash = line.split()
if(this_cid == cid):
if(this_hash == Hash):
# This is ok, id and parameters are the same
fh.close()
return
raise ValueError("Cannot rerun simulation with different params!")
fh.close()
except myFileNotFoundError:
# File does not exist yet
pass
# Append the data to the file
fh = open(flnm, 'a')
fh.write(cid + " " + Hash + "\n")
fh.close()
return
[docs]def copy_files_qt(filename, QT):
"""
Makes copies of a file for quantum trajectory appending the corresponding
index before the file appendix.
**Arguments**
filename : str
file to be copied.
QT : int
make QT copies with indices 1 to QT (including QT).
"""
ext = '.' + filename.split('.')[-1]
for ii in range(QT):
repl = '_qt%09d'%(ii + 1) + ext
shutil.copyfile(filename, filename.replace(ext, repl))
remove(filename)
return
[docs]class MPSFortLibError(Exception):
"""
Excpetion raised if the fortran library exits with an error.
**Arguments __init__**
ret_val : int
return code of the fortran library.
"""
def __init__(self, ret_val):
self.val = ret_val
return
[docs] def __str__(self):
"""
String representing error message.
"""
return "MPSFortLib quit with fatal return code " + str(self.val) + "!"