pyerrors.input.hadrons
1import os 2from collections import Counter 3import h5py 4from pathlib import Path 5import numpy as np 6from ..obs import Obs, CObs 7from ..correlators import Corr 8from ..dirac import epsilon_tensor_rank4 9 10 11def _get_files(path, filestem, idl): 12 ls = os.listdir(path) 13 14 # Clean up file list 15 files = list(filter(lambda x: x.startswith(filestem + "."), ls)) 16 17 if not files: 18 raise Exception('No files starting with', filestem, 'in folder', path) 19 20 def get_cnfg_number(n): 21 return int(n.replace(".h5", "")[len(filestem) + 1:]) # From python 3.9 onward the safer 'removesuffix' method can be used. 22 23 # Sort according to configuration number 24 files.sort(key=get_cnfg_number) 25 26 cnfg_numbers = [] 27 filtered_files = [] 28 for line in files: 29 no = get_cnfg_number(line) 30 if idl: 31 if no in list(idl): 32 filtered_files.append(line) 33 cnfg_numbers.append(no) 34 else: 35 filtered_files.append(line) 36 cnfg_numbers.append(no) 37 38 if idl: 39 if Counter(list(idl)) != Counter(cnfg_numbers): 40 raise Exception("Not all configurations specified in idl found, configurations " + str(list(Counter(list(idl)) - Counter(cnfg_numbers))) + " are missing.") 41 42 # Check that configurations are evenly spaced 43 dc = np.unique(np.diff(cnfg_numbers)) 44 if np.any(dc < 0): 45 raise Exception("Unsorted files") 46 if len(dc) == 1: 47 idx = range(cnfg_numbers[0], cnfg_numbers[-1] + dc[0], dc[0]) 48 elif idl: 49 idx = idl 50 else: 51 raise Exception("Configurations are not evenly spaced. Provide an idl if you want to proceed with this set of configurations.") 52 53 return filtered_files, idx 54 55 56def read_meson_hd5(path, filestem, ens_id, meson='meson_0', idl=None, gammas=None): 57 r'''Read hadrons meson hdf5 file and extract the meson labeled 'meson' 58 59 Parameters 60 ----------------- 61 path : str 62 path to the files to read 63 filestem : str 64 namestem of the files to read 65 ens_id : str 66 name of the ensemble, required for internal bookkeeping 67 meson : str 68 label of the meson to be extracted, standard value meson_0 which 69 corresponds to the pseudoscalar pseudoscalar two-point function. 70 gammas : tuple of strings 71 Instrad of a meson label one can also provide a tuple of two strings 72 indicating the gamma matrices at source and sink. 73 ("Gamma5", "Gamma5") corresponds to the pseudoscalar pseudoscalar 74 two-point function. The gammas argument dominateds over meson. 75 idl : range 76 If specified only configurations in the given range are read in. 77 78 Returns 79 ------- 80 corr : Corr 81 Correlator of the source sink combination in question. 82 ''' 83 84 files, idx = _get_files(path, filestem, idl) 85 86 tree = meson.rsplit('_')[0] 87 if gammas is not None: 88 h5file = h5py.File(path + '/' + files[0], "r") 89 found_meson = None 90 for key in h5file[tree].keys(): 91 if gammas[0] == h5file[tree][key].attrs["gamma_snk"][0].decode() and h5file[tree][key].attrs["gamma_src"][0].decode() == gammas[1]: 92 found_meson = key 93 break 94 h5file.close() 95 if found_meson: 96 meson = found_meson 97 else: 98 raise Exception("Source Sink combination " + str(gammas) + " not found.") 99 100 corr_data = [] 101 infos = [] 102 for hd5_file in files: 103 h5file = h5py.File(path + '/' + hd5_file, "r") 104 if not tree + '/' + meson in h5file: 105 raise Exception("Entry '" + meson + "' not contained in the files.") 106 raw_data = h5file[tree + '/' + meson + '/corr'] 107 real_data = raw_data[:]["re"].astype(np.double) 108 corr_data.append(real_data) 109 if not infos: 110 for k, i in h5file[tree + '/' + meson].attrs.items(): 111 infos.append(k + ': ' + i[0].decode()) 112 h5file.close() 113 corr_data = np.array(corr_data) 114 115 l_obs = [] 116 for c in corr_data.T: 117 l_obs.append(Obs([c], [ens_id], idl=[idx])) 118 119 corr = Corr(l_obs) 120 corr.tag = r", ".join(infos) 121 return corr 122 123 124def read_DistillationContraction_hd5(path, ens_id, diagrams=["direct"], idl=None): 125 """Read hadrons DistillationContraction hdf5 files in given directory structure 126 127 Parameters 128 ----------------- 129 path : str 130 path to the directories to read 131 ens_id : str 132 name of the ensemble, required for internal bookkeeping 133 diagrams : list 134 List of strings of the diagrams to extract, e.g. ["direct", "box", "cross"]. 135 idl : range 136 If specified only configurations in the given range are read in. 137 138 Returns 139 ------- 140 result : dict 141 extracted DistillationContration data 142 """ 143 144 res_dict = {} 145 146 directories, idx = _get_files(path, "data", idl) 147 148 explore_path = Path(path + "/" + directories[0]) 149 150 for explore_file in explore_path.iterdir(): 151 if explore_file.is_file(): 152 stem = explore_file.with_suffix("").with_suffix("").as_posix().split("/")[-1] 153 else: 154 continue 155 156 file_list = [] 157 for dir in directories: 158 tmp_path = Path(path + "/" + dir) 159 file_list.append((tmp_path / stem).as_posix() + tmp_path.suffix + ".h5") 160 161 corr_data = {} 162 163 for diagram in diagrams: 164 corr_data[diagram] = [] 165 166 for n_file, (hd5_file, n_traj) in enumerate(zip(file_list, list(idx))): 167 h5file = h5py.File(hd5_file) 168 169 if n_file == 0: 170 if h5file["DistillationContraction/Metadata"].attrs.get("TimeSources")[0].decode() != "0...": 171 raise Exception("Routine is only implemented for files containing inversions on all timeslices.") 172 173 Nt = h5file["DistillationContraction/Metadata"].attrs.get("Nt")[0] 174 175 identifier = [] 176 for in_file in range(len(h5file["DistillationContraction/Metadata/DmfInputFiles"].attrs.keys()) - 1): 177 encoded_info = h5file["DistillationContraction/Metadata/DmfInputFiles"].attrs.get("DmfInputFiles_" + str(in_file)) 178 full_info = encoded_info[0].decode().split("/")[-1].replace(".h5", "").split("_") 179 my_tuple = (full_info[0], full_info[1][1:], full_info[2], full_info[3]) 180 identifier.append(my_tuple) 181 identifier = tuple(identifier) 182 # "DistillationContraction/Metadata/DmfSuffix" contains info about different quarks, irrelevant in the SU(3) case. 183 184 for diagram in diagrams: 185 186 if diagram == "triangle" and "Identity" not in str(identifier): 187 part = "im" 188 else: 189 part = "re" 190 191 real_data = np.zeros(Nt) 192 for x0 in range(Nt): 193 raw_data = h5file["DistillationContraction/Correlators/" + diagram + "/" + str(x0)][:][part].astype(np.double) 194 real_data += np.roll(raw_data, -x0) 195 real_data /= Nt 196 197 corr_data[diagram].append(real_data) 198 h5file.close() 199 200 res_dict[str(identifier)] = {} 201 202 for diagram in diagrams: 203 204 tmp_data = np.array(corr_data[diagram]) 205 206 l_obs = [] 207 for c in tmp_data.T: 208 l_obs.append(Obs([c], [ens_id], idl=[idx])) 209 210 corr = Corr(l_obs) 211 corr.tag = str(identifier) 212 213 res_dict[str(identifier)][diagram] = corr 214 215 return res_dict 216 217 218class Npr_matrix(np.ndarray): 219 220 def __new__(cls, input_array, mom_in=None, mom_out=None): 221 obj = np.asarray(input_array).view(cls) 222 obj.mom_in = mom_in 223 obj.mom_out = mom_out 224 return obj 225 226 @property 227 def g5H(self): 228 """Gamma_5 hermitean conjugate 229 230 Uses the fact that the propagator is gamma5 hermitean, so just the 231 in and out momenta of the propagator are exchanged. 232 """ 233 return Npr_matrix(self, 234 mom_in=self.mom_out, 235 mom_out=self.mom_in) 236 237 def _propagate_mom(self, other, name): 238 s_mom = getattr(self, name, None) 239 o_mom = getattr(other, name, None) 240 if s_mom is not None and o_mom is not None: 241 if not np.allclose(s_mom, o_mom): 242 raise Exception(name + ' does not match.') 243 return o_mom if o_mom is not None else s_mom 244 245 def __matmul__(self, other): 246 return self.__new__(Npr_matrix, 247 super().__matmul__(other), 248 self._propagate_mom(other, 'mom_in'), 249 self._propagate_mom(other, 'mom_out')) 250 251 def __array_finalize__(self, obj): 252 if obj is None: 253 return 254 self.mom_in = getattr(obj, 'mom_in', None) 255 self.mom_out = getattr(obj, 'mom_out', None) 256 257 258def read_ExternalLeg_hd5(path, filestem, ens_id, idl=None): 259 """Read hadrons ExternalLeg hdf5 file and output an array of CObs 260 261 Parameters 262 ---------- 263 path : str 264 path to the files to read 265 filestem : str 266 namestem of the files to read 267 ens_id : str 268 name of the ensemble, required for internal bookkeeping 269 idl : range 270 If specified only configurations in the given range are read in. 271 272 Returns 273 ------- 274 result : Npr_matrix 275 read Cobs-matrix 276 """ 277 278 files, idx = _get_files(path, filestem, idl) 279 280 mom = None 281 282 corr_data = [] 283 for hd5_file in files: 284 file = h5py.File(path + '/' + hd5_file, "r") 285 raw_data = file['ExternalLeg/corr'][0][0].view('complex') 286 corr_data.append(raw_data) 287 if mom is None: 288 mom = np.array(str(file['ExternalLeg/info'].attrs['pIn'])[3:-2].strip().split(), dtype=float) 289 file.close() 290 corr_data = np.array(corr_data) 291 292 rolled_array = np.rollaxis(corr_data, 0, 5) 293 294 matrix = np.empty((rolled_array.shape[:-1]), dtype=object) 295 for si, sj, ci, cj in np.ndindex(rolled_array.shape[:-1]): 296 real = Obs([rolled_array[si, sj, ci, cj].real], [ens_id], idl=[idx]) 297 imag = Obs([rolled_array[si, sj, ci, cj].imag], [ens_id], idl=[idx]) 298 matrix[si, sj, ci, cj] = CObs(real, imag) 299 300 return Npr_matrix(matrix, mom_in=mom) 301 302 303def read_Bilinear_hd5(path, filestem, ens_id, idl=None): 304 """Read hadrons Bilinear hdf5 file and output an array of CObs 305 306 Parameters 307 ---------- 308 path : str 309 path to the files to read 310 filestem : str 311 namestem of the files to read 312 ens_id : str 313 name of the ensemble, required for internal bookkeeping 314 idl : range 315 If specified only configurations in the given range are read in. 316 317 Returns 318 ------- 319 result_dict: dict[Npr_matrix] 320 extracted Bilinears 321 """ 322 323 files, idx = _get_files(path, filestem, idl) 324 325 mom_in = None 326 mom_out = None 327 328 corr_data = {} 329 for hd5_file in files: 330 file = h5py.File(path + '/' + hd5_file, "r") 331 for i in range(16): 332 name = file['Bilinear/Bilinear_' + str(i) + '/info'].attrs['gamma'][0].decode('UTF-8') 333 if name not in corr_data: 334 corr_data[name] = [] 335 raw_data = file['Bilinear/Bilinear_' + str(i) + '/corr'][0][0].view('complex') 336 corr_data[name].append(raw_data) 337 if mom_in is None: 338 mom_in = np.array(str(file['Bilinear/Bilinear_' + str(i) + '/info'].attrs['pIn'])[3:-2].strip().split(), dtype=float) 339 if mom_out is None: 340 mom_out = np.array(str(file['Bilinear/Bilinear_' + str(i) + '/info'].attrs['pOut'])[3:-2].strip().split(), dtype=float) 341 342 file.close() 343 344 result_dict = {} 345 346 for key, data in corr_data.items(): 347 local_data = np.array(data) 348 349 rolled_array = np.rollaxis(local_data, 0, 5) 350 351 matrix = np.empty((rolled_array.shape[:-1]), dtype=object) 352 for si, sj, ci, cj in np.ndindex(rolled_array.shape[:-1]): 353 real = Obs([rolled_array[si, sj, ci, cj].real], [ens_id], idl=[idx]) 354 imag = Obs([rolled_array[si, sj, ci, cj].imag], [ens_id], idl=[idx]) 355 matrix[si, sj, ci, cj] = CObs(real, imag) 356 357 result_dict[key] = Npr_matrix(matrix, mom_in=mom_in, mom_out=mom_out) 358 359 return result_dict 360 361 362def read_Fourquark_hd5(path, filestem, ens_id, idl=None, vertices=["VA", "AV"]): 363 """Read hadrons FourquarkFullyConnected hdf5 file and output an array of CObs 364 365 Parameters 366 ---------- 367 path : str 368 path to the files to read 369 filestem : str 370 namestem of the files to read 371 ens_id : str 372 name of the ensemble, required for internal bookkeeping 373 idl : range 374 If specified only configurations in the given range are read in. 375 vertices : list 376 Vertex functions to be extracted. 377 378 Returns 379 ------- 380 result_dict : dict 381 extracted fourquark matrizes 382 """ 383 384 files, idx = _get_files(path, filestem, idl) 385 386 mom_in = None 387 mom_out = None 388 389 vertex_names = [] 390 for vertex in vertices: 391 vertex_names += _get_lorentz_names(vertex) 392 393 corr_data = {} 394 395 tree = 'FourQuarkFullyConnected/FourQuarkFullyConnected_' 396 397 for hd5_file in files: 398 file = h5py.File(path + '/' + hd5_file, "r") 399 400 for i in range(32): 401 name = (file[tree + str(i) + '/info'].attrs['gammaA'][0].decode('UTF-8'), file[tree + str(i) + '/info'].attrs['gammaB'][0].decode('UTF-8')) 402 if name in vertex_names: 403 if name not in corr_data: 404 corr_data[name] = [] 405 raw_data = file[tree + str(i) + '/corr'][0][0].view('complex') 406 corr_data[name].append(raw_data) 407 if mom_in is None: 408 mom_in = np.array(str(file[tree + str(i) + '/info'].attrs['pIn'])[3:-2].strip().split(), dtype=float) 409 if mom_out is None: 410 mom_out = np.array(str(file[tree + str(i) + '/info'].attrs['pOut'])[3:-2].strip().split(), dtype=float) 411 412 file.close() 413 414 intermediate_dict = {} 415 416 for vertex in vertices: 417 lorentz_names = _get_lorentz_names(vertex) 418 for v_name in lorentz_names: 419 if v_name in [('SigmaXY', 'SigmaZT'), 420 ('SigmaXT', 'SigmaYZ'), 421 ('SigmaYZ', 'SigmaXT'), 422 ('SigmaZT', 'SigmaXY')]: 423 sign = -1 424 else: 425 sign = 1 426 if vertex not in intermediate_dict: 427 intermediate_dict[vertex] = sign * np.array(corr_data[v_name]) 428 else: 429 intermediate_dict[vertex] += sign * np.array(corr_data[v_name]) 430 431 result_dict = {} 432 433 for key, data in intermediate_dict.items(): 434 435 rolled_array = np.moveaxis(data, 0, 8) 436 437 matrix = np.empty((rolled_array.shape[:-1]), dtype=object) 438 for index in np.ndindex(rolled_array.shape[:-1]): 439 real = Obs([rolled_array[index].real], [ens_id], idl=[idx]) 440 imag = Obs([rolled_array[index].imag], [ens_id], idl=[idx]) 441 matrix[index] = CObs(real, imag) 442 443 result_dict[key] = Npr_matrix(matrix, mom_in=mom_in, mom_out=mom_out) 444 445 return result_dict 446 447 448def _get_lorentz_names(name): 449 lorentz_index = ['X', 'Y', 'Z', 'T'] 450 451 res = [] 452 453 if name == "TT": 454 for i in range(4): 455 for j in range(i + 1, 4): 456 res.append(("Sigma" + lorentz_index[i] + lorentz_index[j], "Sigma" + lorentz_index[i] + lorentz_index[j])) 457 return res 458 459 if name == "TTtilde": 460 for i in range(4): 461 for j in range(i + 1, 4): 462 for k in range(4): 463 for o in range(k + 1, 4): 464 fac = epsilon_tensor_rank4(i, j, k, o) 465 if not np.isclose(fac, 0.0): 466 res.append(("Sigma" + lorentz_index[i] + lorentz_index[j], "Sigma" + lorentz_index[k] + lorentz_index[o])) 467 return res 468 469 assert len(name) == 2 470 471 if 'S' in name or 'P' in name: 472 if not set(name) <= set(['S', 'P']): 473 raise Exception("'" + name + "' is not a Lorentz scalar") 474 475 g_names = {'S': 'Identity', 476 'P': 'Gamma5'} 477 478 res.append((g_names[name[0]], g_names[name[1]])) 479 480 else: 481 if not set(name) <= set(['V', 'A']): 482 raise Exception("'" + name + "' is not a Lorentz scalar") 483 484 for ind in lorentz_index: 485 res.append(('Gamma' + ind + (name[0] == 'A') * 'Gamma5', 486 'Gamma' + ind + (name[1] == 'A') * 'Gamma5')) 487 488 return res
57def read_meson_hd5(path, filestem, ens_id, meson='meson_0', idl=None, gammas=None): 58 r'''Read hadrons meson hdf5 file and extract the meson labeled 'meson' 59 60 Parameters 61 ----------------- 62 path : str 63 path to the files to read 64 filestem : str 65 namestem of the files to read 66 ens_id : str 67 name of the ensemble, required for internal bookkeeping 68 meson : str 69 label of the meson to be extracted, standard value meson_0 which 70 corresponds to the pseudoscalar pseudoscalar two-point function. 71 gammas : tuple of strings 72 Instrad of a meson label one can also provide a tuple of two strings 73 indicating the gamma matrices at source and sink. 74 ("Gamma5", "Gamma5") corresponds to the pseudoscalar pseudoscalar 75 two-point function. The gammas argument dominateds over meson. 76 idl : range 77 If specified only configurations in the given range are read in. 78 79 Returns 80 ------- 81 corr : Corr 82 Correlator of the source sink combination in question. 83 ''' 84 85 files, idx = _get_files(path, filestem, idl) 86 87 tree = meson.rsplit('_')[0] 88 if gammas is not None: 89 h5file = h5py.File(path + '/' + files[0], "r") 90 found_meson = None 91 for key in h5file[tree].keys(): 92 if gammas[0] == h5file[tree][key].attrs["gamma_snk"][0].decode() and h5file[tree][key].attrs["gamma_src"][0].decode() == gammas[1]: 93 found_meson = key 94 break 95 h5file.close() 96 if found_meson: 97 meson = found_meson 98 else: 99 raise Exception("Source Sink combination " + str(gammas) + " not found.") 100 101 corr_data = [] 102 infos = [] 103 for hd5_file in files: 104 h5file = h5py.File(path + '/' + hd5_file, "r") 105 if not tree + '/' + meson in h5file: 106 raise Exception("Entry '" + meson + "' not contained in the files.") 107 raw_data = h5file[tree + '/' + meson + '/corr'] 108 real_data = raw_data[:]["re"].astype(np.double) 109 corr_data.append(real_data) 110 if not infos: 111 for k, i in h5file[tree + '/' + meson].attrs.items(): 112 infos.append(k + ': ' + i[0].decode()) 113 h5file.close() 114 corr_data = np.array(corr_data) 115 116 l_obs = [] 117 for c in corr_data.T: 118 l_obs.append(Obs([c], [ens_id], idl=[idx])) 119 120 corr = Corr(l_obs) 121 corr.tag = r", ".join(infos) 122 return corr
Read hadrons meson hdf5 file and extract the meson labeled 'meson'
Parameters
- path (str): path to the files to read
- filestem (str): namestem of the files to read
- ens_id (str): name of the ensemble, required for internal bookkeeping
- meson (str): label of the meson to be extracted, standard value meson_0 which corresponds to the pseudoscalar pseudoscalar two-point function.
- gammas (tuple of strings): Instrad of a meson label one can also provide a tuple of two strings indicating the gamma matrices at source and sink. ("Gamma5", "Gamma5") corresponds to the pseudoscalar pseudoscalar two-point function. The gammas argument dominateds over meson.
- idl (range): If specified only configurations in the given range are read in.
Returns
- corr (Corr): Correlator of the source sink combination in question.
125def read_DistillationContraction_hd5(path, ens_id, diagrams=["direct"], idl=None): 126 """Read hadrons DistillationContraction hdf5 files in given directory structure 127 128 Parameters 129 ----------------- 130 path : str 131 path to the directories to read 132 ens_id : str 133 name of the ensemble, required for internal bookkeeping 134 diagrams : list 135 List of strings of the diagrams to extract, e.g. ["direct", "box", "cross"]. 136 idl : range 137 If specified only configurations in the given range are read in. 138 139 Returns 140 ------- 141 result : dict 142 extracted DistillationContration data 143 """ 144 145 res_dict = {} 146 147 directories, idx = _get_files(path, "data", idl) 148 149 explore_path = Path(path + "/" + directories[0]) 150 151 for explore_file in explore_path.iterdir(): 152 if explore_file.is_file(): 153 stem = explore_file.with_suffix("").with_suffix("").as_posix().split("/")[-1] 154 else: 155 continue 156 157 file_list = [] 158 for dir in directories: 159 tmp_path = Path(path + "/" + dir) 160 file_list.append((tmp_path / stem).as_posix() + tmp_path.suffix + ".h5") 161 162 corr_data = {} 163 164 for diagram in diagrams: 165 corr_data[diagram] = [] 166 167 for n_file, (hd5_file, n_traj) in enumerate(zip(file_list, list(idx))): 168 h5file = h5py.File(hd5_file) 169 170 if n_file == 0: 171 if h5file["DistillationContraction/Metadata"].attrs.get("TimeSources")[0].decode() != "0...": 172 raise Exception("Routine is only implemented for files containing inversions on all timeslices.") 173 174 Nt = h5file["DistillationContraction/Metadata"].attrs.get("Nt")[0] 175 176 identifier = [] 177 for in_file in range(len(h5file["DistillationContraction/Metadata/DmfInputFiles"].attrs.keys()) - 1): 178 encoded_info = h5file["DistillationContraction/Metadata/DmfInputFiles"].attrs.get("DmfInputFiles_" + str(in_file)) 179 full_info = encoded_info[0].decode().split("/")[-1].replace(".h5", "").split("_") 180 my_tuple = (full_info[0], full_info[1][1:], full_info[2], full_info[3]) 181 identifier.append(my_tuple) 182 identifier = tuple(identifier) 183 # "DistillationContraction/Metadata/DmfSuffix" contains info about different quarks, irrelevant in the SU(3) case. 184 185 for diagram in diagrams: 186 187 if diagram == "triangle" and "Identity" not in str(identifier): 188 part = "im" 189 else: 190 part = "re" 191 192 real_data = np.zeros(Nt) 193 for x0 in range(Nt): 194 raw_data = h5file["DistillationContraction/Correlators/" + diagram + "/" + str(x0)][:][part].astype(np.double) 195 real_data += np.roll(raw_data, -x0) 196 real_data /= Nt 197 198 corr_data[diagram].append(real_data) 199 h5file.close() 200 201 res_dict[str(identifier)] = {} 202 203 for diagram in diagrams: 204 205 tmp_data = np.array(corr_data[diagram]) 206 207 l_obs = [] 208 for c in tmp_data.T: 209 l_obs.append(Obs([c], [ens_id], idl=[idx])) 210 211 corr = Corr(l_obs) 212 corr.tag = str(identifier) 213 214 res_dict[str(identifier)][diagram] = corr 215 216 return res_dict
Read hadrons DistillationContraction hdf5 files in given directory structure
Parameters
- path (str): path to the directories to read
- ens_id (str): name of the ensemble, required for internal bookkeeping
- diagrams (list): List of strings of the diagrams to extract, e.g. ["direct", "box", "cross"].
- idl (range): If specified only configurations in the given range are read in.
Returns
- result (dict): extracted DistillationContration data
219class Npr_matrix(np.ndarray): 220 221 def __new__(cls, input_array, mom_in=None, mom_out=None): 222 obj = np.asarray(input_array).view(cls) 223 obj.mom_in = mom_in 224 obj.mom_out = mom_out 225 return obj 226 227 @property 228 def g5H(self): 229 """Gamma_5 hermitean conjugate 230 231 Uses the fact that the propagator is gamma5 hermitean, so just the 232 in and out momenta of the propagator are exchanged. 233 """ 234 return Npr_matrix(self, 235 mom_in=self.mom_out, 236 mom_out=self.mom_in) 237 238 def _propagate_mom(self, other, name): 239 s_mom = getattr(self, name, None) 240 o_mom = getattr(other, name, None) 241 if s_mom is not None and o_mom is not None: 242 if not np.allclose(s_mom, o_mom): 243 raise Exception(name + ' does not match.') 244 return o_mom if o_mom is not None else s_mom 245 246 def __matmul__(self, other): 247 return self.__new__(Npr_matrix, 248 super().__matmul__(other), 249 self._propagate_mom(other, 'mom_in'), 250 self._propagate_mom(other, 'mom_out')) 251 252 def __array_finalize__(self, obj): 253 if obj is None: 254 return 255 self.mom_in = getattr(obj, 'mom_in', None) 256 self.mom_out = getattr(obj, 'mom_out', None)
ndarray(shape, dtype=float, buffer=None, offset=0, strides=None, order=None)
An array object represents a multidimensional, homogeneous array of fixed-size items. An associated data-type object describes the format of each element in the array (its byte-order, how many bytes it occupies in memory, whether it is an integer, a floating point number, or something else, etc.)
Arrays should be constructed using array
, zeros
or empty
(refer
to the See Also section below). The parameters given here refer to
a low-level method (ndarray(...)
) for instantiating an array.
For more information, refer to the numpy
module and examine the
methods and attributes of an array.
Parameters
- (for the __new__ method; see Notes below)
- shape (tuple of ints): Shape of created array.
- dtype (data-type, optional): Any object that can be interpreted as a numpy data type.
- buffer (object exposing buffer interface, optional): Used to fill the array with data.
- offset (int, optional): Offset of array data in buffer.
- strides (tuple of ints, optional): Strides of data in memory.
- order ({'C', 'F'}, optional): Row-major (C-style) or column-major (Fortran-style) order.
Attributes
- T (ndarray): Transpose of the array.
- data (buffer): The array's elements, in memory.
- dtype (dtype object): Describes the format of the elements in the array.
- flags (dict): Dictionary containing information related to memory use, e.g., 'C_CONTIGUOUS', 'OWNDATA', 'WRITEABLE', etc.
- flat (numpy.flatiter object):
Flattened version of the array as an iterator. The iterator
allows assignments, e.g.,
x.flat = 3
(Seendarray.flat
for assignment examples; TODO). - imag (ndarray): Imaginary part of the array.
- real (ndarray): Real part of the array.
- size (int): Number of elements in the array.
- itemsize (int): The memory use of each array element in bytes.
- nbytes (int):
The total number of bytes required to store the array data,
i.e.,
itemsize * size
. - ndim (int): The array's number of dimensions.
- shape (tuple of ints): Shape of the array.
- strides (tuple of ints):
The step-size required to move from one element to the next in
memory. For example, a contiguous
(3, 4)
array of typeint16
in C-order has strides(8, 2)
. This implies that to move from element to element in memory requires jumps of 2 bytes. To move from row-to-row, one needs to jump 8 bytes at a time (2 * 4
). - ctypes (ctypes object): Class containing properties of the array needed for interaction with ctypes.
- base (ndarray):
If the array is a view into another array, that array is its
base
(unless that array is also a view). Thebase
array is where the array data is actually stored.
See Also
array
: Construct an array.
zeros
: Create an array, each element of which is zero.
empty
: Create an array, but leave its allocated memory unchanged (i.e.,
it contains "garbage").
dtype
: Create a data-type.
numpy.typing.NDArray
: An ndarray alias :term:generic <generic type>
w.r.t. its dtype.type <numpy.dtype.type>
.
Notes
There are two modes of creating an array using __new__
:
- If
buffer
is None, then onlyshape
,dtype
, andorder
are used. - If
buffer
is an object exposing the buffer interface, then all keywords are interpreted.
No __init__
method is needed because the array is fully initialized
after the __new__
method.
Examples
These examples illustrate the low-level ndarray
constructor. Refer
to the See Also
section above for easier ways of constructing an
ndarray.
First mode, buffer
is None:
>>> np.ndarray(shape=(2,2), dtype=float, order='F')
array([[0.0e+000, 0.0e+000], # random
[ nan, 2.5e-323]])
Second mode:
>>> np.ndarray((2,), buffer=np.array([1,2,3]),
... offset=np.int_().itemsize,
... dtype=int) # offset = 1*itemsize, i.e. skip first element
array([2, 3])
Gamma_5 hermitean conjugate
Uses the fact that the propagator is gamma5 hermitean, so just the in and out momenta of the propagator are exchanged.
Inherited Members
- numpy.ndarray
- dumps
- dump
- all
- any
- argmax
- argmin
- argpartition
- argsort
- astype
- byteswap
- choose
- clip
- compress
- conj
- conjugate
- copy
- cumprod
- cumsum
- diagonal
- dot
- fill
- flatten
- getfield
- item
- itemset
- max
- mean
- min
- newbyteorder
- nonzero
- partition
- prod
- ptp
- put
- ravel
- repeat
- reshape
- resize
- round
- searchsorted
- setfield
- setflags
- sort
- squeeze
- std
- sum
- swapaxes
- take
- tobytes
- tofile
- tolist
- tostring
- trace
- transpose
- var
- view
- ndim
- flags
- shape
- strides
- data
- itemsize
- size
- nbytes
- base
- dtype
- real
- imag
- flat
- ctypes
- T
259def read_ExternalLeg_hd5(path, filestem, ens_id, idl=None): 260 """Read hadrons ExternalLeg hdf5 file and output an array of CObs 261 262 Parameters 263 ---------- 264 path : str 265 path to the files to read 266 filestem : str 267 namestem of the files to read 268 ens_id : str 269 name of the ensemble, required for internal bookkeeping 270 idl : range 271 If specified only configurations in the given range are read in. 272 273 Returns 274 ------- 275 result : Npr_matrix 276 read Cobs-matrix 277 """ 278 279 files, idx = _get_files(path, filestem, idl) 280 281 mom = None 282 283 corr_data = [] 284 for hd5_file in files: 285 file = h5py.File(path + '/' + hd5_file, "r") 286 raw_data = file['ExternalLeg/corr'][0][0].view('complex') 287 corr_data.append(raw_data) 288 if mom is None: 289 mom = np.array(str(file['ExternalLeg/info'].attrs['pIn'])[3:-2].strip().split(), dtype=float) 290 file.close() 291 corr_data = np.array(corr_data) 292 293 rolled_array = np.rollaxis(corr_data, 0, 5) 294 295 matrix = np.empty((rolled_array.shape[:-1]), dtype=object) 296 for si, sj, ci, cj in np.ndindex(rolled_array.shape[:-1]): 297 real = Obs([rolled_array[si, sj, ci, cj].real], [ens_id], idl=[idx]) 298 imag = Obs([rolled_array[si, sj, ci, cj].imag], [ens_id], idl=[idx]) 299 matrix[si, sj, ci, cj] = CObs(real, imag) 300 301 return Npr_matrix(matrix, mom_in=mom)
Read hadrons ExternalLeg hdf5 file and output an array of CObs
Parameters
- path (str): path to the files to read
- filestem (str): namestem of the files to read
- ens_id (str): name of the ensemble, required for internal bookkeeping
- idl (range): If specified only configurations in the given range are read in.
Returns
- result (Npr_matrix): read Cobs-matrix
304def read_Bilinear_hd5(path, filestem, ens_id, idl=None): 305 """Read hadrons Bilinear hdf5 file and output an array of CObs 306 307 Parameters 308 ---------- 309 path : str 310 path to the files to read 311 filestem : str 312 namestem of the files to read 313 ens_id : str 314 name of the ensemble, required for internal bookkeeping 315 idl : range 316 If specified only configurations in the given range are read in. 317 318 Returns 319 ------- 320 result_dict: dict[Npr_matrix] 321 extracted Bilinears 322 """ 323 324 files, idx = _get_files(path, filestem, idl) 325 326 mom_in = None 327 mom_out = None 328 329 corr_data = {} 330 for hd5_file in files: 331 file = h5py.File(path + '/' + hd5_file, "r") 332 for i in range(16): 333 name = file['Bilinear/Bilinear_' + str(i) + '/info'].attrs['gamma'][0].decode('UTF-8') 334 if name not in corr_data: 335 corr_data[name] = [] 336 raw_data = file['Bilinear/Bilinear_' + str(i) + '/corr'][0][0].view('complex') 337 corr_data[name].append(raw_data) 338 if mom_in is None: 339 mom_in = np.array(str(file['Bilinear/Bilinear_' + str(i) + '/info'].attrs['pIn'])[3:-2].strip().split(), dtype=float) 340 if mom_out is None: 341 mom_out = np.array(str(file['Bilinear/Bilinear_' + str(i) + '/info'].attrs['pOut'])[3:-2].strip().split(), dtype=float) 342 343 file.close() 344 345 result_dict = {} 346 347 for key, data in corr_data.items(): 348 local_data = np.array(data) 349 350 rolled_array = np.rollaxis(local_data, 0, 5) 351 352 matrix = np.empty((rolled_array.shape[:-1]), dtype=object) 353 for si, sj, ci, cj in np.ndindex(rolled_array.shape[:-1]): 354 real = Obs([rolled_array[si, sj, ci, cj].real], [ens_id], idl=[idx]) 355 imag = Obs([rolled_array[si, sj, ci, cj].imag], [ens_id], idl=[idx]) 356 matrix[si, sj, ci, cj] = CObs(real, imag) 357 358 result_dict[key] = Npr_matrix(matrix, mom_in=mom_in, mom_out=mom_out) 359 360 return result_dict
Read hadrons Bilinear hdf5 file and output an array of CObs
Parameters
- path (str): path to the files to read
- filestem (str): namestem of the files to read
- ens_id (str): name of the ensemble, required for internal bookkeeping
- idl (range): If specified only configurations in the given range are read in.
Returns
- result_dict (dict[Npr_matrix]): extracted Bilinears
363def read_Fourquark_hd5(path, filestem, ens_id, idl=None, vertices=["VA", "AV"]): 364 """Read hadrons FourquarkFullyConnected hdf5 file and output an array of CObs 365 366 Parameters 367 ---------- 368 path : str 369 path to the files to read 370 filestem : str 371 namestem of the files to read 372 ens_id : str 373 name of the ensemble, required for internal bookkeeping 374 idl : range 375 If specified only configurations in the given range are read in. 376 vertices : list 377 Vertex functions to be extracted. 378 379 Returns 380 ------- 381 result_dict : dict 382 extracted fourquark matrizes 383 """ 384 385 files, idx = _get_files(path, filestem, idl) 386 387 mom_in = None 388 mom_out = None 389 390 vertex_names = [] 391 for vertex in vertices: 392 vertex_names += _get_lorentz_names(vertex) 393 394 corr_data = {} 395 396 tree = 'FourQuarkFullyConnected/FourQuarkFullyConnected_' 397 398 for hd5_file in files: 399 file = h5py.File(path + '/' + hd5_file, "r") 400 401 for i in range(32): 402 name = (file[tree + str(i) + '/info'].attrs['gammaA'][0].decode('UTF-8'), file[tree + str(i) + '/info'].attrs['gammaB'][0].decode('UTF-8')) 403 if name in vertex_names: 404 if name not in corr_data: 405 corr_data[name] = [] 406 raw_data = file[tree + str(i) + '/corr'][0][0].view('complex') 407 corr_data[name].append(raw_data) 408 if mom_in is None: 409 mom_in = np.array(str(file[tree + str(i) + '/info'].attrs['pIn'])[3:-2].strip().split(), dtype=float) 410 if mom_out is None: 411 mom_out = np.array(str(file[tree + str(i) + '/info'].attrs['pOut'])[3:-2].strip().split(), dtype=float) 412 413 file.close() 414 415 intermediate_dict = {} 416 417 for vertex in vertices: 418 lorentz_names = _get_lorentz_names(vertex) 419 for v_name in lorentz_names: 420 if v_name in [('SigmaXY', 'SigmaZT'), 421 ('SigmaXT', 'SigmaYZ'), 422 ('SigmaYZ', 'SigmaXT'), 423 ('SigmaZT', 'SigmaXY')]: 424 sign = -1 425 else: 426 sign = 1 427 if vertex not in intermediate_dict: 428 intermediate_dict[vertex] = sign * np.array(corr_data[v_name]) 429 else: 430 intermediate_dict[vertex] += sign * np.array(corr_data[v_name]) 431 432 result_dict = {} 433 434 for key, data in intermediate_dict.items(): 435 436 rolled_array = np.moveaxis(data, 0, 8) 437 438 matrix = np.empty((rolled_array.shape[:-1]), dtype=object) 439 for index in np.ndindex(rolled_array.shape[:-1]): 440 real = Obs([rolled_array[index].real], [ens_id], idl=[idx]) 441 imag = Obs([rolled_array[index].imag], [ens_id], idl=[idx]) 442 matrix[index] = CObs(real, imag) 443 444 result_dict[key] = Npr_matrix(matrix, mom_in=mom_in, mom_out=mom_out) 445 446 return result_dict
Read hadrons FourquarkFullyConnected hdf5 file and output an array of CObs
Parameters
- path (str): path to the files to read
- filestem (str): namestem of the files to read
- ens_id (str): name of the ensemble, required for internal bookkeeping
- idl (range): If specified only configurations in the given range are read in.
- vertices (list): Vertex functions to be extracted.
Returns
- result_dict (dict): extracted fourquark matrizes