pyerrors.input.hadrons
1import os 2import warnings 3from collections import Counter 4import h5py 5from pathlib import Path 6import numpy as np 7from ..obs import Obs, CObs 8from ..correlators import Corr 9from ..dirac import epsilon_tensor_rank4 10 11 12def _get_files(path, filestem, idl): 13 ls = os.listdir(path) 14 15 # Clean up file list 16 files = list(filter(lambda x: x.startswith(filestem + "."), ls)) 17 18 if not files: 19 raise Exception('No files starting with', filestem, 'in folder', path) 20 21 def get_cnfg_number(n): 22 return int(n.replace(".h5", "")[len(filestem) + 1:]) # From python 3.9 onward the safer 'removesuffix' method can be used. 23 24 # Sort according to configuration number 25 files.sort(key=get_cnfg_number) 26 27 cnfg_numbers = [] 28 filtered_files = [] 29 for line in files: 30 no = get_cnfg_number(line) 31 if idl: 32 if no in list(idl): 33 filtered_files.append(line) 34 cnfg_numbers.append(no) 35 else: 36 filtered_files.append(line) 37 cnfg_numbers.append(no) 38 39 if idl: 40 if Counter(list(idl)) != Counter(cnfg_numbers): 41 raise Exception("Not all configurations specified in idl found, configurations " + str(list(Counter(list(idl)) - Counter(cnfg_numbers))) + " are missing.") 42 43 # Check that configurations are evenly spaced 44 dc = np.unique(np.diff(cnfg_numbers)) 45 if np.any(dc < 0): 46 raise Exception("Unsorted files") 47 if len(dc) == 1: 48 idx = range(cnfg_numbers[0], cnfg_numbers[-1] + dc[0], dc[0]) 49 elif idl: 50 idx = idl 51 else: 52 raise Exception("Configurations are not evenly spaced. Provide an idl if you want to proceed with this set of configurations.") 53 54 return filtered_files, idx 55 56 57def read_meson_hd5(path, filestem, ens_id, meson='meson_0', idl=None, gammas=None): 58 r'''Read hadrons meson hdf5 file and extract the meson labeled 'meson' 59 60 Parameters 61 ----------------- 62 path : str 63 path to the files to read 64 filestem : str 65 namestem of the files to read 66 ens_id : str 67 name of the ensemble, required for internal bookkeeping 68 meson : str 69 label of the meson to be extracted, standard value meson_0 which 70 corresponds to the pseudoscalar pseudoscalar two-point function. 71 gammas : tuple of strings 72 Instrad of a meson label one can also provide a tuple of two strings 73 indicating the gamma matrices at source and sink. 74 ("Gamma5", "Gamma5") corresponds to the pseudoscalar pseudoscalar 75 two-point function. The gammas argument dominateds over meson. 76 idl : range 77 If specified only configurations in the given range are read in. 78 ''' 79 80 files, idx = _get_files(path, filestem, idl) 81 82 tree = meson.rsplit('_')[0] 83 if gammas is not None: 84 h5file = h5py.File(path + '/' + files[0], "r") 85 found_meson = None 86 for key in h5file[tree].keys(): 87 if gammas[0] == h5file[tree][key].attrs["gamma_snk"][0].decode() and h5file[tree][key].attrs["gamma_src"][0].decode() == gammas[1]: 88 found_meson = key 89 break 90 h5file.close() 91 if found_meson: 92 meson = found_meson 93 else: 94 raise Exception("Source Sink combination " + str(gammas) + " not found.") 95 96 corr_data = [] 97 infos = [] 98 for hd5_file in files: 99 h5file = h5py.File(path + '/' + hd5_file, "r") 100 if not tree + '/' + meson in h5file: 101 raise Exception("Entry '" + meson + "' not contained in the files.") 102 raw_data = h5file[tree + '/' + meson + '/corr'] 103 real_data = raw_data[:]["re"].astype(np.double) 104 corr_data.append(real_data) 105 if not infos: 106 for k, i in h5file[tree + '/' + meson].attrs.items(): 107 infos.append(k + ': ' + i[0].decode()) 108 h5file.close() 109 corr_data = np.array(corr_data) 110 111 l_obs = [] 112 for c in corr_data.T: 113 l_obs.append(Obs([c], [ens_id], idl=[idx])) 114 115 corr = Corr(l_obs) 116 corr.tag = r", ".join(infos) 117 return corr 118 119 120def read_DistillationContraction_hd5(path, ens_id, diagrams=["direct"], idl=None): 121 """Read hadrons DistillationContraction hdf5 files in given directory structure 122 123 Parameters 124 ----------------- 125 path : str 126 path to the directories to read 127 ens_id : str 128 name of the ensemble, required for internal bookkeeping 129 diagrams : list 130 List of strings of the diagrams to extract, e.g. ["direct", "box", "cross"]. 131 idl : range 132 If specified only configurations in the given range are read in. 133 """ 134 135 res_dict = {} 136 137 directories, idx = _get_files(path, "data", idl) 138 139 explore_path = Path(path + "/" + directories[0]) 140 141 for explore_file in explore_path.iterdir(): 142 if explore_file.is_file(): 143 stem = explore_file.with_suffix("").with_suffix("").as_posix().split("/")[-1] 144 else: 145 continue 146 147 file_list = [] 148 for dir in directories: 149 tmp_path = Path(path + "/" + dir) 150 file_list.append((tmp_path / stem).as_posix() + tmp_path.suffix + ".h5") 151 152 corr_data = {} 153 154 for diagram in diagrams: 155 corr_data[diagram] = [] 156 157 for n_file, (hd5_file, n_traj) in enumerate(zip(file_list, list(idx))): 158 h5file = h5py.File(hd5_file) 159 160 if n_file == 0: 161 if h5file["DistillationContraction/Metadata"].attrs.get("TimeSources")[0].decode() != "0...": 162 raise Exception("Routine is only implemented for files containing inversions on all timeslices.") 163 164 Nt = h5file["DistillationContraction/Metadata"].attrs.get("Nt")[0] 165 166 identifier = [] 167 for in_file in range(len(h5file["DistillationContraction/Metadata/DmfInputFiles"].attrs.keys()) - 1): 168 encoded_info = h5file["DistillationContraction/Metadata/DmfInputFiles"].attrs.get("DmfInputFiles_" + str(in_file)) 169 full_info = encoded_info[0].decode().split("/")[-1].replace(".h5", "").split("_") 170 my_tuple = (full_info[0], full_info[1][1:], full_info[2], full_info[3]) 171 identifier.append(my_tuple) 172 identifier = tuple(identifier) 173 # "DistillationContraction/Metadata/DmfSuffix" contains info about different quarks, irrelevant in the SU(3) case. 174 175 for diagram in diagrams: 176 real_data = np.zeros(Nt) 177 for x0 in range(Nt): 178 raw_data = h5file["DistillationContraction/Correlators/" + diagram + "/" + str(x0)][:]["re"].astype(np.double) 179 real_data += np.roll(raw_data, -x0) 180 real_data /= Nt 181 182 corr_data[diagram].append(real_data) 183 h5file.close() 184 185 res_dict[str(identifier)] = {} 186 187 for diagram in diagrams: 188 189 tmp_data = np.array(corr_data[diagram]) 190 191 l_obs = [] 192 for c in tmp_data.T: 193 l_obs.append(Obs([c], [ens_id], idl=[idx])) 194 195 corr = Corr(l_obs) 196 corr.tag = str(identifier) 197 198 res_dict[str(identifier)][diagram] = corr 199 200 return res_dict 201 202 203class Npr_matrix(np.ndarray): 204 205 def __new__(cls, input_array, mom_in=None, mom_out=None): 206 obj = np.asarray(input_array).view(cls) 207 obj.mom_in = mom_in 208 obj.mom_out = mom_out 209 return obj 210 211 @property 212 def g5H(self): 213 """Gamma_5 hermitean conjugate 214 215 Uses the fact that the propagator is gamma5 hermitean, so just the 216 in and out momenta of the propagator are exchanged. 217 """ 218 return Npr_matrix(self, 219 mom_in=self.mom_out, 220 mom_out=self.mom_in) 221 222 def _propagate_mom(self, other, name): 223 s_mom = getattr(self, name, None) 224 o_mom = getattr(other, name, None) 225 if s_mom is not None and o_mom is not None: 226 if not np.allclose(s_mom, o_mom): 227 raise Exception(name + ' does not match.') 228 return o_mom if o_mom is not None else s_mom 229 230 def __matmul__(self, other): 231 return self.__new__(Npr_matrix, 232 super().__matmul__(other), 233 self._propagate_mom(other, 'mom_in'), 234 self._propagate_mom(other, 'mom_out')) 235 236 def __array_finalize__(self, obj): 237 if obj is None: 238 return 239 self.mom_in = getattr(obj, 'mom_in', None) 240 self.mom_out = getattr(obj, 'mom_out', None) 241 242 243def read_ExternalLeg_hd5(path, filestem, ens_id, idl=None): 244 """Read hadrons ExternalLeg hdf5 file and output an array of CObs 245 246 Parameters 247 ---------- 248 path : str 249 path to the files to read 250 filestem : str 251 namestem of the files to read 252 ens_id : str 253 name of the ensemble, required for internal bookkeeping 254 idl : range 255 If specified only configurations in the given range are read in. 256 """ 257 258 files, idx = _get_files(path, filestem, idl) 259 260 mom = None 261 262 corr_data = [] 263 for hd5_file in files: 264 file = h5py.File(path + '/' + hd5_file, "r") 265 raw_data = file['ExternalLeg/corr'][0][0].view('complex') 266 corr_data.append(raw_data) 267 if mom is None: 268 mom = np.array(str(file['ExternalLeg/info'].attrs['pIn'])[3:-2].strip().split(), dtype=float) 269 file.close() 270 corr_data = np.array(corr_data) 271 272 rolled_array = np.rollaxis(corr_data, 0, 5) 273 274 matrix = np.empty((rolled_array.shape[:-1]), dtype=object) 275 for si, sj, ci, cj in np.ndindex(rolled_array.shape[:-1]): 276 real = Obs([rolled_array[si, sj, ci, cj].real], [ens_id], idl=[idx]) 277 imag = Obs([rolled_array[si, sj, ci, cj].imag], [ens_id], idl=[idx]) 278 matrix[si, sj, ci, cj] = CObs(real, imag) 279 280 return Npr_matrix(matrix, mom_in=mom) 281 282 283def read_Bilinear_hd5(path, filestem, ens_id, idl=None): 284 """Read hadrons Bilinear hdf5 file and output an array of CObs 285 286 Parameters 287 ---------- 288 path : str 289 path to the files to read 290 filestem : str 291 namestem of the files to read 292 ens_id : str 293 name of the ensemble, required for internal bookkeeping 294 idl : range 295 If specified only configurations in the given range are read in. 296 """ 297 298 files, idx = _get_files(path, filestem, idl) 299 300 mom_in = None 301 mom_out = None 302 303 corr_data = {} 304 for hd5_file in files: 305 file = h5py.File(path + '/' + hd5_file, "r") 306 for i in range(16): 307 name = file['Bilinear/Bilinear_' + str(i) + '/info'].attrs['gamma'][0].decode('UTF-8') 308 if name not in corr_data: 309 corr_data[name] = [] 310 raw_data = file['Bilinear/Bilinear_' + str(i) + '/corr'][0][0].view('complex') 311 corr_data[name].append(raw_data) 312 if mom_in is None: 313 mom_in = np.array(str(file['Bilinear/Bilinear_' + str(i) + '/info'].attrs['pIn'])[3:-2].strip().split(), dtype=float) 314 if mom_out is None: 315 mom_out = np.array(str(file['Bilinear/Bilinear_' + str(i) + '/info'].attrs['pOut'])[3:-2].strip().split(), dtype=float) 316 317 file.close() 318 319 result_dict = {} 320 321 for key, data in corr_data.items(): 322 local_data = np.array(data) 323 324 rolled_array = np.rollaxis(local_data, 0, 5) 325 326 matrix = np.empty((rolled_array.shape[:-1]), dtype=object) 327 for si, sj, ci, cj in np.ndindex(rolled_array.shape[:-1]): 328 real = Obs([rolled_array[si, sj, ci, cj].real], [ens_id], idl=[idx]) 329 imag = Obs([rolled_array[si, sj, ci, cj].imag], [ens_id], idl=[idx]) 330 matrix[si, sj, ci, cj] = CObs(real, imag) 331 332 result_dict[key] = Npr_matrix(matrix, mom_in=mom_in, mom_out=mom_out) 333 334 return result_dict 335 336 337def read_Fourquark_hd5(path, filestem, ens_id, idl=None, vertices=["VA", "AV"]): 338 """Read hadrons FourquarkFullyConnected hdf5 file and output an array of CObs 339 340 Parameters 341 ---------- 342 path : str 343 path to the files to read 344 filestem : str 345 namestem of the files to read 346 ens_id : str 347 name of the ensemble, required for internal bookkeeping 348 idl : range 349 If specified only configurations in the given range are read in. 350 vertices : list 351 Vertex functions to be extracted. 352 """ 353 354 files, idx = _get_files(path, filestem, idl) 355 356 mom_in = None 357 mom_out = None 358 359 vertex_names = [] 360 for vertex in vertices: 361 vertex_names += _get_lorentz_names(vertex) 362 363 corr_data = {} 364 365 tree = 'FourQuarkFullyConnected/FourQuarkFullyConnected_' 366 367 for hd5_file in files: 368 file = h5py.File(path + '/' + hd5_file, "r") 369 370 for i in range(32): 371 name = (file[tree + str(i) + '/info'].attrs['gammaA'][0].decode('UTF-8'), file[tree + str(i) + '/info'].attrs['gammaB'][0].decode('UTF-8')) 372 if name in vertex_names: 373 if name not in corr_data: 374 corr_data[name] = [] 375 raw_data = file[tree + str(i) + '/corr'][0][0].view('complex') 376 corr_data[name].append(raw_data) 377 if mom_in is None: 378 mom_in = np.array(str(file[tree + str(i) + '/info'].attrs['pIn'])[3:-2].strip().split(), dtype=float) 379 if mom_out is None: 380 mom_out = np.array(str(file[tree + str(i) + '/info'].attrs['pOut'])[3:-2].strip().split(), dtype=float) 381 382 file.close() 383 384 intermediate_dict = {} 385 386 for vertex in vertices: 387 lorentz_names = _get_lorentz_names(vertex) 388 for v_name in lorentz_names: 389 if v_name in [('SigmaXY', 'SigmaZT'), 390 ('SigmaXT', 'SigmaYZ'), 391 ('SigmaYZ', 'SigmaXT'), 392 ('SigmaZT', 'SigmaXY')]: 393 sign = -1 394 else: 395 sign = 1 396 if vertex not in intermediate_dict: 397 intermediate_dict[vertex] = sign * np.array(corr_data[v_name]) 398 else: 399 intermediate_dict[vertex] += sign * np.array(corr_data[v_name]) 400 401 result_dict = {} 402 403 for key, data in intermediate_dict.items(): 404 405 rolled_array = np.moveaxis(data, 0, 8) 406 407 matrix = np.empty((rolled_array.shape[:-1]), dtype=object) 408 for index in np.ndindex(rolled_array.shape[:-1]): 409 real = Obs([rolled_array[index].real], [ens_id], idl=[idx]) 410 imag = Obs([rolled_array[index].imag], [ens_id], idl=[idx]) 411 matrix[index] = CObs(real, imag) 412 413 result_dict[key] = Npr_matrix(matrix, mom_in=mom_in, mom_out=mom_out) 414 415 return result_dict 416 417 418def _get_lorentz_names(name): 419 lorentz_index = ['X', 'Y', 'Z', 'T'] 420 421 res = [] 422 423 if name == "TT": 424 for i in range(4): 425 for j in range(i + 1, 4): 426 res.append(("Sigma" + lorentz_index[i] + lorentz_index[j], "Sigma" + lorentz_index[i] + lorentz_index[j])) 427 return res 428 429 if name == "TTtilde": 430 for i in range(4): 431 for j in range(i + 1, 4): 432 for k in range(4): 433 for o in range(k + 1, 4): 434 fac = epsilon_tensor_rank4(i, j, k, o) 435 if not np.isclose(fac, 0.0): 436 res.append(("Sigma" + lorentz_index[i] + lorentz_index[j], "Sigma" + lorentz_index[k] + lorentz_index[o])) 437 return res 438 439 assert len(name) == 2 440 441 if 'S' in name or 'P' in name: 442 if not set(name) <= set(['S', 'P']): 443 raise Exception("'" + name + "' is not a Lorentz scalar") 444 445 g_names = {'S': 'Identity', 446 'P': 'Gamma5'} 447 448 res.append((g_names[name[0]], g_names[name[1]])) 449 450 else: 451 if not set(name) <= set(['V', 'A']): 452 raise Exception("'" + name + "' is not a Lorentz scalar") 453 454 for ind in lorentz_index: 455 res.append(('Gamma' + ind + (name[0] == 'A') * 'Gamma5', 456 'Gamma' + ind + (name[1] == 'A') * 'Gamma5')) 457 458 return res
58def read_meson_hd5(path, filestem, ens_id, meson='meson_0', idl=None, gammas=None): 59 r'''Read hadrons meson hdf5 file and extract the meson labeled 'meson' 60 61 Parameters 62 ----------------- 63 path : str 64 path to the files to read 65 filestem : str 66 namestem of the files to read 67 ens_id : str 68 name of the ensemble, required for internal bookkeeping 69 meson : str 70 label of the meson to be extracted, standard value meson_0 which 71 corresponds to the pseudoscalar pseudoscalar two-point function. 72 gammas : tuple of strings 73 Instrad of a meson label one can also provide a tuple of two strings 74 indicating the gamma matrices at source and sink. 75 ("Gamma5", "Gamma5") corresponds to the pseudoscalar pseudoscalar 76 two-point function. The gammas argument dominateds over meson. 77 idl : range 78 If specified only configurations in the given range are read in. 79 ''' 80 81 files, idx = _get_files(path, filestem, idl) 82 83 tree = meson.rsplit('_')[0] 84 if gammas is not None: 85 h5file = h5py.File(path + '/' + files[0], "r") 86 found_meson = None 87 for key in h5file[tree].keys(): 88 if gammas[0] == h5file[tree][key].attrs["gamma_snk"][0].decode() and h5file[tree][key].attrs["gamma_src"][0].decode() == gammas[1]: 89 found_meson = key 90 break 91 h5file.close() 92 if found_meson: 93 meson = found_meson 94 else: 95 raise Exception("Source Sink combination " + str(gammas) + " not found.") 96 97 corr_data = [] 98 infos = [] 99 for hd5_file in files: 100 h5file = h5py.File(path + '/' + hd5_file, "r") 101 if not tree + '/' + meson in h5file: 102 raise Exception("Entry '" + meson + "' not contained in the files.") 103 raw_data = h5file[tree + '/' + meson + '/corr'] 104 real_data = raw_data[:]["re"].astype(np.double) 105 corr_data.append(real_data) 106 if not infos: 107 for k, i in h5file[tree + '/' + meson].attrs.items(): 108 infos.append(k + ': ' + i[0].decode()) 109 h5file.close() 110 corr_data = np.array(corr_data) 111 112 l_obs = [] 113 for c in corr_data.T: 114 l_obs.append(Obs([c], [ens_id], idl=[idx])) 115 116 corr = Corr(l_obs) 117 corr.tag = r", ".join(infos) 118 return corr
Read hadrons meson hdf5 file and extract the meson labeled 'meson'
Parameters
- path (str): path to the files to read
- filestem (str): namestem of the files to read
- ens_id (str): name of the ensemble, required for internal bookkeeping
- meson (str): label of the meson to be extracted, standard value meson_0 which corresponds to the pseudoscalar pseudoscalar two-point function.
- gammas (tuple of strings): Instrad of a meson label one can also provide a tuple of two strings indicating the gamma matrices at source and sink. ("Gamma5", "Gamma5") corresponds to the pseudoscalar pseudoscalar two-point function. The gammas argument dominateds over meson.
- idl (range): If specified only configurations in the given range are read in.
121def read_DistillationContraction_hd5(path, ens_id, diagrams=["direct"], idl=None): 122 """Read hadrons DistillationContraction hdf5 files in given directory structure 123 124 Parameters 125 ----------------- 126 path : str 127 path to the directories to read 128 ens_id : str 129 name of the ensemble, required for internal bookkeeping 130 diagrams : list 131 List of strings of the diagrams to extract, e.g. ["direct", "box", "cross"]. 132 idl : range 133 If specified only configurations in the given range are read in. 134 """ 135 136 res_dict = {} 137 138 directories, idx = _get_files(path, "data", idl) 139 140 explore_path = Path(path + "/" + directories[0]) 141 142 for explore_file in explore_path.iterdir(): 143 if explore_file.is_file(): 144 stem = explore_file.with_suffix("").with_suffix("").as_posix().split("/")[-1] 145 else: 146 continue 147 148 file_list = [] 149 for dir in directories: 150 tmp_path = Path(path + "/" + dir) 151 file_list.append((tmp_path / stem).as_posix() + tmp_path.suffix + ".h5") 152 153 corr_data = {} 154 155 for diagram in diagrams: 156 corr_data[diagram] = [] 157 158 for n_file, (hd5_file, n_traj) in enumerate(zip(file_list, list(idx))): 159 h5file = h5py.File(hd5_file) 160 161 if n_file == 0: 162 if h5file["DistillationContraction/Metadata"].attrs.get("TimeSources")[0].decode() != "0...": 163 raise Exception("Routine is only implemented for files containing inversions on all timeslices.") 164 165 Nt = h5file["DistillationContraction/Metadata"].attrs.get("Nt")[0] 166 167 identifier = [] 168 for in_file in range(len(h5file["DistillationContraction/Metadata/DmfInputFiles"].attrs.keys()) - 1): 169 encoded_info = h5file["DistillationContraction/Metadata/DmfInputFiles"].attrs.get("DmfInputFiles_" + str(in_file)) 170 full_info = encoded_info[0].decode().split("/")[-1].replace(".h5", "").split("_") 171 my_tuple = (full_info[0], full_info[1][1:], full_info[2], full_info[3]) 172 identifier.append(my_tuple) 173 identifier = tuple(identifier) 174 # "DistillationContraction/Metadata/DmfSuffix" contains info about different quarks, irrelevant in the SU(3) case. 175 176 for diagram in diagrams: 177 real_data = np.zeros(Nt) 178 for x0 in range(Nt): 179 raw_data = h5file["DistillationContraction/Correlators/" + diagram + "/" + str(x0)][:]["re"].astype(np.double) 180 real_data += np.roll(raw_data, -x0) 181 real_data /= Nt 182 183 corr_data[diagram].append(real_data) 184 h5file.close() 185 186 res_dict[str(identifier)] = {} 187 188 for diagram in diagrams: 189 190 tmp_data = np.array(corr_data[diagram]) 191 192 l_obs = [] 193 for c in tmp_data.T: 194 l_obs.append(Obs([c], [ens_id], idl=[idx])) 195 196 corr = Corr(l_obs) 197 corr.tag = str(identifier) 198 199 res_dict[str(identifier)][diagram] = corr 200 201 return res_dict
Read hadrons DistillationContraction hdf5 files in given directory structure
Parameters
- path (str): path to the directories to read
- ens_id (str): name of the ensemble, required for internal bookkeeping
- diagrams (list): List of strings of the diagrams to extract, e.g. ["direct", "box", "cross"].
- idl (range): If specified only configurations in the given range are read in.
204class Npr_matrix(np.ndarray): 205 206 def __new__(cls, input_array, mom_in=None, mom_out=None): 207 obj = np.asarray(input_array).view(cls) 208 obj.mom_in = mom_in 209 obj.mom_out = mom_out 210 return obj 211 212 @property 213 def g5H(self): 214 """Gamma_5 hermitean conjugate 215 216 Uses the fact that the propagator is gamma5 hermitean, so just the 217 in and out momenta of the propagator are exchanged. 218 """ 219 return Npr_matrix(self, 220 mom_in=self.mom_out, 221 mom_out=self.mom_in) 222 223 def _propagate_mom(self, other, name): 224 s_mom = getattr(self, name, None) 225 o_mom = getattr(other, name, None) 226 if s_mom is not None and o_mom is not None: 227 if not np.allclose(s_mom, o_mom): 228 raise Exception(name + ' does not match.') 229 return o_mom if o_mom is not None else s_mom 230 231 def __matmul__(self, other): 232 return self.__new__(Npr_matrix, 233 super().__matmul__(other), 234 self._propagate_mom(other, 'mom_in'), 235 self._propagate_mom(other, 'mom_out')) 236 237 def __array_finalize__(self, obj): 238 if obj is None: 239 return 240 self.mom_in = getattr(obj, 'mom_in', None) 241 self.mom_out = getattr(obj, 'mom_out', None)
ndarray(shape, dtype=float, buffer=None, offset=0, strides=None, order=None)
An array object represents a multidimensional, homogeneous array of fixed-size items. An associated data-type object describes the format of each element in the array (its byte-order, how many bytes it occupies in memory, whether it is an integer, a floating point number, or something else, etc.)
Arrays should be constructed using array
, zeros
or empty
(refer
to the See Also section below). The parameters given here refer to
a low-level method (ndarray(...)
) for instantiating an array.
For more information, refer to the numpy
module and examine the
methods and attributes of an array.
Parameters
- (for the __new__ method; see Notes below)
- shape (tuple of ints): Shape of created array.
- dtype (data-type, optional): Any object that can be interpreted as a numpy data type.
- buffer (object exposing buffer interface, optional): Used to fill the array with data.
- offset (int, optional): Offset of array data in buffer.
- strides (tuple of ints, optional): Strides of data in memory.
- order ({'C', 'F'}, optional): Row-major (C-style) or column-major (Fortran-style) order.
Attributes
- T (ndarray): Transpose of the array.
- data (buffer): The array's elements, in memory.
- dtype (dtype object): Describes the format of the elements in the array.
- flags (dict): Dictionary containing information related to memory use, e.g., 'C_CONTIGUOUS', 'OWNDATA', 'WRITEABLE', etc.
- flat (numpy.flatiter object):
Flattened version of the array as an iterator. The iterator
allows assignments, e.g.,
x.flat = 3
(Seendarray.flat
for assignment examples; TODO). - imag (ndarray): Imaginary part of the array.
- real (ndarray): Real part of the array.
- size (int): Number of elements in the array.
- itemsize (int): The memory use of each array element in bytes.
- nbytes (int):
The total number of bytes required to store the array data,
i.e.,
itemsize * size
. - ndim (int): The array's number of dimensions.
- shape (tuple of ints): Shape of the array.
- strides (tuple of ints):
The step-size required to move from one element to the next in
memory. For example, a contiguous
(3, 4)
array of typeint16
in C-order has strides(8, 2)
. This implies that to move from element to element in memory requires jumps of 2 bytes. To move from row-to-row, one needs to jump 8 bytes at a time (2 * 4
). - ctypes (ctypes object): Class containing properties of the array needed for interaction with ctypes.
- base (ndarray):
If the array is a view into another array, that array is its
base
(unless that array is also a view). Thebase
array is where the array data is actually stored.
See Also
array
: Construct an array.
zeros
: Create an array, each element of which is zero.
empty
: Create an array, but leave its allocated memory unchanged (i.e.,
it contains "garbage").
dtype
: Create a data-type.
numpy.typing.NDArray
: An ndarray alias :term:generic <generic type>
w.r.t. its dtype.type <numpy.dtype.type>
.
Notes
There are two modes of creating an array using __new__
:
- If
buffer
is None, then onlyshape
,dtype
, andorder
are used. - If
buffer
is an object exposing the buffer interface, then all keywords are interpreted.
No __init__
method is needed because the array is fully initialized
after the __new__
method.
Examples
These examples illustrate the low-level ndarray
constructor. Refer
to the See Also
section above for easier ways of constructing an
ndarray.
First mode, buffer
is None:
>>> np.ndarray(shape=(2,2), dtype=float, order='F')
array([[0.0e+000, 0.0e+000], # random
[ nan, 2.5e-323]])
Second mode:
>>> np.ndarray((2,), buffer=np.array([1,2,3]),
... offset=np.int_().itemsize,
... dtype=int) # offset = 1*itemsize, i.e. skip first element
array([2, 3])
Gamma_5 hermitean conjugate
Uses the fact that the propagator is gamma5 hermitean, so just the in and out momenta of the propagator are exchanged.
Inherited Members
- numpy.ndarray
- dumps
- dump
- all
- any
- argmax
- argmin
- argpartition
- argsort
- astype
- byteswap
- choose
- clip
- compress
- conj
- conjugate
- copy
- cumprod
- cumsum
- diagonal
- dot
- fill
- flatten
- getfield
- item
- itemset
- max
- mean
- min
- newbyteorder
- nonzero
- partition
- prod
- ptp
- put
- ravel
- repeat
- reshape
- resize
- round
- searchsorted
- setfield
- setflags
- sort
- squeeze
- std
- sum
- swapaxes
- take
- tobytes
- tofile
- tolist
- tostring
- trace
- transpose
- var
- view
- ndim
- flags
- shape
- strides
- data
- itemsize
- size
- nbytes
- base
- dtype
- real
- imag
- flat
- ctypes
- T
244def read_ExternalLeg_hd5(path, filestem, ens_id, idl=None): 245 """Read hadrons ExternalLeg hdf5 file and output an array of CObs 246 247 Parameters 248 ---------- 249 path : str 250 path to the files to read 251 filestem : str 252 namestem of the files to read 253 ens_id : str 254 name of the ensemble, required for internal bookkeeping 255 idl : range 256 If specified only configurations in the given range are read in. 257 """ 258 259 files, idx = _get_files(path, filestem, idl) 260 261 mom = None 262 263 corr_data = [] 264 for hd5_file in files: 265 file = h5py.File(path + '/' + hd5_file, "r") 266 raw_data = file['ExternalLeg/corr'][0][0].view('complex') 267 corr_data.append(raw_data) 268 if mom is None: 269 mom = np.array(str(file['ExternalLeg/info'].attrs['pIn'])[3:-2].strip().split(), dtype=float) 270 file.close() 271 corr_data = np.array(corr_data) 272 273 rolled_array = np.rollaxis(corr_data, 0, 5) 274 275 matrix = np.empty((rolled_array.shape[:-1]), dtype=object) 276 for si, sj, ci, cj in np.ndindex(rolled_array.shape[:-1]): 277 real = Obs([rolled_array[si, sj, ci, cj].real], [ens_id], idl=[idx]) 278 imag = Obs([rolled_array[si, sj, ci, cj].imag], [ens_id], idl=[idx]) 279 matrix[si, sj, ci, cj] = CObs(real, imag) 280 281 return Npr_matrix(matrix, mom_in=mom)
Read hadrons ExternalLeg hdf5 file and output an array of CObs
Parameters
- path (str): path to the files to read
- filestem (str): namestem of the files to read
- ens_id (str): name of the ensemble, required for internal bookkeeping
- idl (range): If specified only configurations in the given range are read in.
284def read_Bilinear_hd5(path, filestem, ens_id, idl=None): 285 """Read hadrons Bilinear hdf5 file and output an array of CObs 286 287 Parameters 288 ---------- 289 path : str 290 path to the files to read 291 filestem : str 292 namestem of the files to read 293 ens_id : str 294 name of the ensemble, required for internal bookkeeping 295 idl : range 296 If specified only configurations in the given range are read in. 297 """ 298 299 files, idx = _get_files(path, filestem, idl) 300 301 mom_in = None 302 mom_out = None 303 304 corr_data = {} 305 for hd5_file in files: 306 file = h5py.File(path + '/' + hd5_file, "r") 307 for i in range(16): 308 name = file['Bilinear/Bilinear_' + str(i) + '/info'].attrs['gamma'][0].decode('UTF-8') 309 if name not in corr_data: 310 corr_data[name] = [] 311 raw_data = file['Bilinear/Bilinear_' + str(i) + '/corr'][0][0].view('complex') 312 corr_data[name].append(raw_data) 313 if mom_in is None: 314 mom_in = np.array(str(file['Bilinear/Bilinear_' + str(i) + '/info'].attrs['pIn'])[3:-2].strip().split(), dtype=float) 315 if mom_out is None: 316 mom_out = np.array(str(file['Bilinear/Bilinear_' + str(i) + '/info'].attrs['pOut'])[3:-2].strip().split(), dtype=float) 317 318 file.close() 319 320 result_dict = {} 321 322 for key, data in corr_data.items(): 323 local_data = np.array(data) 324 325 rolled_array = np.rollaxis(local_data, 0, 5) 326 327 matrix = np.empty((rolled_array.shape[:-1]), dtype=object) 328 for si, sj, ci, cj in np.ndindex(rolled_array.shape[:-1]): 329 real = Obs([rolled_array[si, sj, ci, cj].real], [ens_id], idl=[idx]) 330 imag = Obs([rolled_array[si, sj, ci, cj].imag], [ens_id], idl=[idx]) 331 matrix[si, sj, ci, cj] = CObs(real, imag) 332 333 result_dict[key] = Npr_matrix(matrix, mom_in=mom_in, mom_out=mom_out) 334 335 return result_dict
Read hadrons Bilinear hdf5 file and output an array of CObs
Parameters
- path (str): path to the files to read
- filestem (str): namestem of the files to read
- ens_id (str): name of the ensemble, required for internal bookkeeping
- idl (range): If specified only configurations in the given range are read in.
338def read_Fourquark_hd5(path, filestem, ens_id, idl=None, vertices=["VA", "AV"]): 339 """Read hadrons FourquarkFullyConnected hdf5 file and output an array of CObs 340 341 Parameters 342 ---------- 343 path : str 344 path to the files to read 345 filestem : str 346 namestem of the files to read 347 ens_id : str 348 name of the ensemble, required for internal bookkeeping 349 idl : range 350 If specified only configurations in the given range are read in. 351 vertices : list 352 Vertex functions to be extracted. 353 """ 354 355 files, idx = _get_files(path, filestem, idl) 356 357 mom_in = None 358 mom_out = None 359 360 vertex_names = [] 361 for vertex in vertices: 362 vertex_names += _get_lorentz_names(vertex) 363 364 corr_data = {} 365 366 tree = 'FourQuarkFullyConnected/FourQuarkFullyConnected_' 367 368 for hd5_file in files: 369 file = h5py.File(path + '/' + hd5_file, "r") 370 371 for i in range(32): 372 name = (file[tree + str(i) + '/info'].attrs['gammaA'][0].decode('UTF-8'), file[tree + str(i) + '/info'].attrs['gammaB'][0].decode('UTF-8')) 373 if name in vertex_names: 374 if name not in corr_data: 375 corr_data[name] = [] 376 raw_data = file[tree + str(i) + '/corr'][0][0].view('complex') 377 corr_data[name].append(raw_data) 378 if mom_in is None: 379 mom_in = np.array(str(file[tree + str(i) + '/info'].attrs['pIn'])[3:-2].strip().split(), dtype=float) 380 if mom_out is None: 381 mom_out = np.array(str(file[tree + str(i) + '/info'].attrs['pOut'])[3:-2].strip().split(), dtype=float) 382 383 file.close() 384 385 intermediate_dict = {} 386 387 for vertex in vertices: 388 lorentz_names = _get_lorentz_names(vertex) 389 for v_name in lorentz_names: 390 if v_name in [('SigmaXY', 'SigmaZT'), 391 ('SigmaXT', 'SigmaYZ'), 392 ('SigmaYZ', 'SigmaXT'), 393 ('SigmaZT', 'SigmaXY')]: 394 sign = -1 395 else: 396 sign = 1 397 if vertex not in intermediate_dict: 398 intermediate_dict[vertex] = sign * np.array(corr_data[v_name]) 399 else: 400 intermediate_dict[vertex] += sign * np.array(corr_data[v_name]) 401 402 result_dict = {} 403 404 for key, data in intermediate_dict.items(): 405 406 rolled_array = np.moveaxis(data, 0, 8) 407 408 matrix = np.empty((rolled_array.shape[:-1]), dtype=object) 409 for index in np.ndindex(rolled_array.shape[:-1]): 410 real = Obs([rolled_array[index].real], [ens_id], idl=[idx]) 411 imag = Obs([rolled_array[index].imag], [ens_id], idl=[idx]) 412 matrix[index] = CObs(real, imag) 413 414 result_dict[key] = Npr_matrix(matrix, mom_in=mom_in, mom_out=mom_out) 415 416 return result_dict
Read hadrons FourquarkFullyConnected hdf5 file and output an array of CObs
Parameters
- path (str): path to the files to read
- filestem (str): namestem of the files to read
- ens_id (str): name of the ensemble, required for internal bookkeeping
- idl (range): If specified only configurations in the given range are read in.
- vertices (list): Vertex functions to be extracted.