DISPATCH
_dispatch.py
1 # Pythn 2/3 compatibility
2 from __future__ import print_function
3 
4 import os
5 import sys
6 import mmap
7 import f90nml
8 import numpy as np
9 import dispatch.graphics as dg
10 from time import time
11 from dispatch._dispatch_grid import GeometricFactors
12 from dispatch._aux import aux
13 from scipy.io import FortranFile
14 
15 class _timer():
16  """ timer functionality """
17  def __init__(self):
18  self.dict={}
19  self._start=time()
20  def start(self):
21  self.__init__()
22  def add(self,key):
23  now=time()
24  if key in self.dict.keys():
25  self.dict[key]+=now-self._start
26  else:
27  self.dict[key]=now-self._start
28  self._start=now
29  def print(self,verbose=0,timeit=False):
30  l=0
31  skip=not timeit
32  for k,v in self.dict.items():
33  l=max(l,len(k))
34  if v>2.0:
35  skip=False
36  if not skip or verbose:
37  s='{:>XX}: {:7.3f} sec'
38  s=s.replace('XX','{}'.format(l))
39  print('\ntimer:')
40  for k,v in self.dict.items():
41  print(s.format(k,v))
42 timer=_timer()
43 
44 class _units:
45  l=1.0
46  d=1.0
47  e=1.0
48  t=1.0
49  m=1.0
50 
51 def _p2():
52  """ convenience function to check Python version """
53  return sys.version_info.major==2
54 
55 def _test():
56  print ('a',_p2())
57 
58 def _pathjoin(d,f):
59  p=os.path.join(d,f)
60  p=p.replace('\\','/')
61  return p
62 
63 def _file(d,f):
64  """ Join a dir and a filename, and check that the file exists
65  """
66  p=os.path.join(d,f)
67  assert (os.path.isfile(p)), 'the file {} does not exist'.format(p)
68  return p
69 
70 def _dir(d,subd):
71  """ Join a dir and subdir, and check that the dir exists
72  """
73  p=os.path.join(d,subd)
74  assert (os.path.isdir(p)), 'the directory {} does not exist'.format(p)
75  return p
76 
77 class _obj(object):
78  """ Transform a namelist dict to an object, turning lists into numpy arrays
79  """
80  def __init__(self, d):
81  for a, b in d.items():
82  ba = np.array(b) if isinstance(b, list) else b
83  setattr(self, a, _obj(b) if isinstance(b, dict) else ba)
84 
85 def _add_nml_to(obj,d,verbose=0):
86  for key,value in d.items():
87  setattr(obj,key,value)
88  if verbose==2:
89  print(' adding property '+key+', with value',value)
90 
91 def map_var(p,iv):
92  """ Find the numeric index, if any, correspongind to a variable key """
93  if iv in p.keys['letters']:
94  jv=p.idx.dict[iv]
95  else:
96  jv=0
97  return jv
98 
99 class _param():
100  pass
101 
102 def _add_nml_list_to(obj,nml_list,verbose=0):
103  """ add all namelists as object attributes
104  """
105  # Repair strange interpretation of ntotal
106  snap_dict=nml_list['snapshot_nml']
107  if 'ntotal' in snap_dict:
108  if type(snap_dict['ntotal'])==list:
109  snap_dict['ntotal']=snap_dict['ntotal'][0]
110  for key,nml_dict in nml_list.items():
111  if (verbose==1):
112  print(' parsing',key)
113  if key=='snapshot_nml':
114  _add_nml_to(obj,nml_dict)
115  else:
116  name=key.replace('_nml','').replace('_params','')
117  if (verbose==1):
118  print(' adding property',name)
119  setattr(obj,name,_param())
120  subobj=getattr(obj,name)
121  _add_nml_to(subobj,nml_dict,verbose=verbose)
122 
123 class memmap2(np.memmap):
124  """ Attempt at minimal np.memmap subclass """
125  def __new__(subtype, *args, **kwargs):
126  obj = super(memmap2, subtype).__new__(subtype, *args, **kwargs)
127  obj._saved = {'len':obj._mmap.__len__(),'off':obj.offset,
128  'file':obj.filename}
129  fo=open(obj.filename,'rb+')
130  obj._mmap.close()
131  return obj
132  def reopen(self):
133  s=self._saved
134  if self._mmap.closed:
135  print('was closed, reopen')
136  fo=open(s['file'],'rb+')
137  self._mmap=mmap.mmap(fo.fileno(),offset=s['off'],length=s['len'])
138  else:
139  print('was open')
140  def __array_prepare__ (self, *args, **kwargs):
141  s=self._saved
142  if self._mmap.closed:
143  print('was closed, reopen')
144  fo=open(s['file'],'rb+')
145  self._fileobj=fo
146  self._mmap=mmap.mmap(fo.fileno(),offset=s['off'],length=s['len'])
147  else:
148  print('was open')
149  obj = super(memmap2, self).__array_prepare__(self, *args, **kwargs)
150  return obj
151 
152 class memmap3(np.memmap):
153  """ Working np.memmap subclass, with option to close after access """
154  def __new__(subtype, file, dtype=np.float32, mode='r+', offset=0,
155  shape=(10,), order='F'):
156  self = super(memmap3, subtype).__new__(subtype, file, dtype=dtype,
157  mode=mode, offset=offset, shape=shape, order=order)
158  #print('-> memmap3.__new__')
159  length = self._mmap.__len__()
160  offset = self.offset
161  saved = (file,length,offset)
162  print('-> memmap3.__new__: saved=',saved)
163  self._saved = saved
164  self._mmap.close()
165  return self
166  def reopen(self):
167  print("-> memmap3.reopen")
168  print(self._saved)
169  (file,length,offset)=self._saved
170  if self._mmap.closed:
171  #print('--> was closed, reopen')
172  fo=open(file,'rb+')
173  block=mmap.ALLOCATIONGRANULARITY
174  o=(offset//block)*block
175  self._mmap=mmap.mmap(fo.fileno(),length,offset=o)
176  def __array_wrap__(self, arr, context=None):
177  #print('-> memmap3.__array_wrap__')
178  arr = super(memmap3, self).__array_wrap__(arr, *args, **kwargs)
179  return arr
180  def __array_prepare__ (self, obj, context=None):
181  #print('-> memmap3.__array_prepare__')
182  self.reopen()
183  obj = super(memmap3, self).__array_prepare__(obj, *args, **kwargs)
184  return obj
185  def __array_finalize__ (self, obj):
186  #print('-> memmap3.__array_finalize__')
187  super(memmap3,self).__array_finalize__(obj)
188  if obj is None:
189  return
190  self._saved = obj._saved
191  self._mmap = obj._mmap
192  def __repr__(self):
193  #print('-> memmap3.__repr__')
194  self.reopen()
195  output = super(memmap3,self).__repr__()
196  return output
197  def __str__(self):
198  #print('-> memmap3.__str__')
199  self.reopen()
200  output = super(memmap3,self).__str__()
201  return output
202 
203 class memmap4(np.memmap):
204  """ Working np.memmap subclass, with verbose option """
205  def __new__(subtype, file, dtype=np.float32,mode='w+',offset=0,
206  shape=(10,), order='F',verbose=False):
207  self = super(memmap4, subtype).__new__(subtype,file,dtype=dtype,
208  mode=mode,offset=offset,shape=shape,order=order)
209  self._verbose=verbose
210  self._saved=(file,self._mmap.__len__(),self.offset)
211  self._mmap.close()
212  if self._verbose:
213  print('-> memmap4.__new__: saved =',self._saved,
214  type(self.offset),type(offset))
215  return self
216  def verbose(self):
217  return hasattr(self,'_verbose') and self._verbose
218  def reopen(self):
219  (file,length,offset)=self._saved
220  if self.verbose():
221  print("-> memmap4.reopen: length, offset =",length,offset)
222  if self._mmap.closed:
223  if self.verbose():
224  print('--> was closed, reopen')
225  fo=open(file,'rb+')
226  self._fileobj=fo
227  block=mmap.ALLOCATIONGRANULARITY
228  o=(offset//block)*block
229  self._mmap=mmap.mmap(fo.fileno(),length,offset=o)
230  else:
231  if self.verbose():
232  print('--> was already open')
233  def __array_wrap__(self, arr, context=None):
234  if self.verbose():
235  print('-> memmap4.__array_wrap__')
236  arr = super(memmap4, self).__array_wrap__(arr, context)
237  return arr
238  def __array_prepare__ (self, obj, context=None):
239  if self.verbose():
240  print('-> memmap4.__array_prepare__')
241  self.reopen()
242  obj = super(memmap4, self).__array_prepare__(obj, context)
243  return obj
244  def __array_finalize__ (self, obj):
245  super(memmap4,self).__array_finalize__(obj)
246  if self.verbose():
247  print('-> memmap4.__array_finalize__')
248  if obj is None:
249  return
250  self._saved = obj._saved
251  self._mmap = obj._mmap
252  def __repr__(self):
253  self.reopen()
254  if self.verbose():
255  print('-> memmap4.__repr__')
256  output = super(memmap4,self).__repr__()
257  return output
258  def __str__(self):
259  if self.verbose():
260  print('-> memmap4.__str__')
261  self.reopen()
262  output = super(memmap4,self).__str__()
263  return output
264 
265 def _var(patch,filed,snap,verbose=0,copy=None):
266  """ Too avoid the "too many file open" problem (Python on steno allows
267  "only" about 800 files), patch.var is defined as a function, which
268  returns a memmap. The function takes numeric or alphabetic arguments,
269  so patch.data(0) and patch.data('d') is typically the density.
270  """
271  if _p2():
272  bytes=long(4*np.product(patch.ncell))
273  else:
274  bytes=np.longlong(4*np.product(patch.ncell))
275  #print('bytes:',bytes)
276  shape=tuple(patch.ncell)
277  patch.offset=[]
278  # p.ip is the offset in the file; ranging from 0 to ntotal-1
279  if hasattr('snap','cartesian'):
280  nrank=np.product(snap.cartesian.mpi_dims)
281  ntask=np.product(snap.cartesian.per_rank)
282  else:
283  nrank = 1
284  ntask = patch.ntotal
285  patch.ip=(patch.id-1-patch.rank)//nrank + patch.rank*ntask
286  if verbose==5:
287  print('id:',patch.id)
288  for iv in range(patch.nv):
289  if patch.ioformat==5:
290  offset = patch.ip + iv*patch.ntotal
291  offset += patch.iout*patch.ntotal*patch.nv
292  elif patch.ioformat>=10:
293  offset = patch.ip + iv*patch.ntotal
294  offset += patch.iout*patch.ntotal*patch.nv
295  elif patch.ioformat>=6:
296  offset = iv + patch.ip*patch.nv
297  offset += patch.iout*patch.ntotal*patch.nv
298  else:
299  offset = iv
300  offset *= bytes
301  patch.offset.append(offset)
302  if verbose==5:
303  print (' iv, offset:',iv,patch.iout,patch.ntotal,patch.nv,offset)
304 
305  def mem(iv):
306  """ Translate alphabetic variable keys to numeric """
307  if type(iv)==type('d'):
308  iv=patch.idx.dict[iv]
309  if patch.memmap==2:
310  return memmap2(filed, dtype=np.float32, offset=patch.offset[iv],
311  mode='r', order='F', shape=shape)
312  elif patch.memmap==3:
313  return memmap3(filed, dtype=np.float32, offset=patch.offset[iv],
314  mode='r', order='F', shape=shape)
315  elif patch.memmap==4:
316  return memmap4(filed, dtype=np.float32, offset=patch.offset[iv],
317  mode='r', order='F', shape=shape)
318  else:
319  return np.memmap(filed, dtype=np.float32, offset=patch.offset[iv],
320  mode='r', order='F', shape=shape)
321 
322  def average_down(iv, axis=0):
323  q = mem(iv)
324  n = 2 # 2-point average (equivalent to average over i-1 and i)
325  x = np.cumsum(q,axis=axis,dtype=np.float)
326  if axis == 0 and q.shape[0] > 1:
327  x[n:,:,:] = x[n:,:,:] - x[:-n,:,:]
328  x[n - 1:,:,:] = x[n - 1:,:,:] / n
329  x[-1,:,:] = q[-2,:,:] # first and last element of average are copied from original
330  elif axis == 1 and q.shape[1] > 1:
331  x[:,n:,:] = x[:,n:,:] - x[:,:-n,:]
332  x[:,n - 1:,:] = x[:,n - 1:,:] / n
333  x[:,-1,:] = q[:,-2,:]
334  elif axis == 2 and q.shape[2] > 1:
335  x[:,:,n:] = x[:,:,n:] - x[:,:,:-n]
336  x[:,:,n - 1:] = x[:,:,n - 1:] / n
337  x[:,:,-1] = q[:,:,-2]
338  else:
339  x = q.copy()
340  return x
341 
342  def xdown(iv):
343  if patch.kind[0:4] == 'zeus' or patch.kind[0:7] == 'stagger':
344  return average_down(iv, axis=0)
345  else:
346  return mem(iv)
347  def ydown(iv):
348  if patch.kind[0:4] == 'zeus' or patch.kind[0:7] == 'stagger':
349  return average_down(iv, axis=1)
350  else:
351  return mem(iv)
352  def zdown(iv):
353  if patch.kind[0:4] == 'zeus' or patch.kind[0:7] == 'stagger':
354  return average_down(iv, axis=2)
355  else:
356  return mem(iv)
357 
358  def internal(v,all=False):
359  if all:
360  return v
361  elif patch.guard_zones:
362  l=patch.ng
363  u=l+patch.n
364  """ check if v.rank does not match the normal patch size.
365  If so, compute the guard zone size, and adjust """
366  rank=min(len(v.shape),len(patch.gn))
367  if v.shape[0:rank] != tuple(patch.gn[0:rank]):
368  gn=np.array(v.shape)
369  ng2=np.copy(gn)
370  for i in range(len(patch.gn)):
371  ng2[i]=patch.gn[i]-gn[i]
372  ng=ng2//2
373  n=gn-ng2
374  l=ng
375  u=l+n
376  return v[l[0]:u[0],l[1]:u[1],l[2]:u[2]]
377  else:
378  rank=min(len(v.shape),len(patch.gn))
379  if v.shape[0:rank] != tuple(patch.n[0:rank]):
380  gn=np.array(v.shape)
381  ng2=np.copy(gn)
382  for i in range(len(patch.gn)):
383  ng2[i]=gn[i]-patch.n[i]
384  ng=ng2//2
385  n=patch.n
386  l=ng
387  u=l+n
388  return v[l[0]:u[0],l[1]:u[1],l[2]:u[2]]
389  else:
390  return v
391 
392  def post_process(v,copy=False,all=False,i4=0):
393  if copy or _p2():
394  v=np.copy(v)
395  if np.ndim(v)==4:
396  v=v[:,:,:,i4]
397  return internal(v,all=all)
398 
399  def var(iv,all=False,copy=None,i4=0):
400  """
401  Special logic to calculate velocities from momenta on the fly.
402  If the data is in spherical or cylindrical coords., then it is the angular
403  momentum in the snapshot, and thus the division by metric factors.
404 
405  The `np.newaxis` bits are for broadcasting the metric factors to the right shape
406  before multiplying by the data.
407 
408  """
409  if type(iv)==int:
410  assert(iv in patch.keys['numbers']),'variable index unknown'
411  else:
412  assert(iv in patch.all_keys),'variable key "'+iv+'" unknown'
413 
414  """ Check if the index is numeric and corresponds to a cached array """
415  if hasattr(patch,'data'):
416  if iv in patch.data.keys():
417  v=patch.data[iv]
418  return post_process(v,all=all,copy=copy)
419 
420  """ Check if the key corresponds to aux data """
421  if hasattr(patch,'aux'):
422  if iv in patch.aux.vars.keys():
423  v=patch.aux.var(iv)
424  return post_process(v,copy=copy,all=all)
425 
426  """ Check special cases """
427  if iv in patch.keys['expressions']:
428  if iv=='ee' or iv=='E':
429  """ Expressions common to all solvers """
430  v=mem('e')/mem('d')
431  elif patch.kind[0:6]=='ramses':
432  """ Ramses solver expression """
433  if iv=='ux' or iv=='u1' or iv=='vx':
434  v=mem('p1')/mem('d')
435  elif iv=='uy' or iv=='u2' or iv=='vy':
436  v=mem('p2')/mem('d')
437  elif iv=='uz' or iv=='u3' or iv=='vz':
438  v=mem('p3')/mem('d')
439  else:
440  """ Stagger-like solver expressions """
441  if iv=='u1' or iv=='ux' or iv=='vx':
442  v=mem('p1')/xdown('d')
443  elif iv=='u2' or iv=='uy' or iv=='vy':
444  if patch.mesh_type != 'Cartesian':
445  v=mem('p2')/ydown('d') \
446  /patch.geometric_factors['h2c'][:,np.newaxis,np.newaxis]
447  else:
448  v=mem('p2')/ydown('d')
449  elif iv=='u3' or iv=='uz' or iv=='vz':
450  if patch.mesh_type != 'Cartesian':
451  gf = patch.geometric_factors # shorthand
452  v=mem('p3')/zdown('d')/gf['h31c'][:,np.newaxis,np.newaxis] \
453  /gf['h32c'][np.newaxis,:,np.newaxis]
454  else:
455  v=mem('p3')/zdown('d')
456  else:
457  v=mem(iv)
458  return post_process(v,copy=copy,all=all,i4=i4)
459  return var
460 
461 class _patch():
462  def __init__(self,id,patch_dict,snap,rank,verbose=0):
463  #timer.add('_patch(0)')
464  self.id=id
465  self.rank=rank
466  self.memmap=1
467  # add general attributes from snapshot_nml
468  for k,v in snap.dict.items():
469  setattr(self,k,v)
470  timer.add('attrib-from-snapshot_nml')
471  # add per-patch attributes from parsing
472  for k,v in patch_dict.items():
473  setattr(self,k,v)
474  #timer.add('_patch(2)')
475  if not self.guard_zones:
476  self.li[:]=0
477  self.ui[:]=self.n-1
478  # add idx attribute
479  if hasattr(snap,'idx'):
480  self.idx=snap.idx
481  self.idx.h=self._h()
482  timer.add('add-idx-attr')
483  # reconstruct items pruned from patch_nml
484  if hasattr(self,'size') and hasattr(self,'position') :
485  llc=self.position-self.size/2.0
486  urc=self.position+self.size/2.0
487  self.extent=np.array(((llc[1],urc[1],llc[2],urc[2]),
488  (llc[2],urc[2],llc[0],urc[0]),
489  (llc[0],urc[0],llc[1],urc[1])))
490  self.llc_cart=llc
491  timer.add('llc-urc-extent')
492  if not hasattr(self,'units'):
493  if hasattr(snap,'units'):
494  self.units=snap.units
495  # modify `mesh_type` from integer to string for readability
496  #timer.add('_patch(6)')
497  if hasattr(self,'mesh_type'):
498  if self.mesh_type == 1:
499  self.mesh_type = "Cartesian"
500  elif self.mesh_type == 2:
501  self.mesh_type = "spherical"
502  elif self.mesh_type == 3:
503  self.mesh_type = "cylindrical"
504  # support for legacy I/O method filenames
505  #timer.add('_patch(7))')
506  if snap.io.method.strip()=='legacy':
507  self.filename=snap.rundir+'/{:05d}/{:05d}.dat'.format(self.iout,self.id)
508  self.var=_var(self,self.filename,snap)
509  else:
510  self.var=_var(self,snap.datafiled,snap)
511  timer.add('_var') # 1.8s
512 
513  """ add a comprehensive set of variable keys """
514  self.keys={}
515  self.keys['letters']=list(snap.idx.dict.keys())
516  timer.add('letters')
517  self.keys['numbers']=list(snap.idx.dict.values())
518  timer.add('numbers')
519  self.keys['expressions']=['ux','uy','uz','u1','u2','u3','ee','E']
520  timer.add('expressions')
521  # attach an aux filaname, if it exists
522  auxfile='{:05d}.aux'.format(id)
523  #timer.add('_patch(9c))')
524  auxfile=_pathjoin(snap.datadir,auxfile)
525  timer.add('auxfile')
526  if 1:
527  if os.path.isfile(auxfile):
528  self.auxfile=auxfile
529  self.aux=aux(id=id,io=snap.iout,file=auxfile)
530  self.keys['aux']=self.aux.vars.keys()
531  elif verbose>0:
532  print(auxfile,'does not exist')
533  timer.add('read-aux')
534 
535  """ Now collect all keys in a single list """
536  all=[]
537  for key_list in self.keys.values():
538  all.extend(key_list)
539  self.all_keys=all
540  timer.add('all-values')
541 
542  def _h(self):
543  idx=self.idx
544  h=np.zeros((3,self.nv))
545  if self.kind[0:7]=='stagger':
546  if idx.p1>=0: h[0,idx.p1]=-0.5
547  if idx.b1>=0: h[0,idx.b1]=-0.5
548  if idx.p2>=0: h[1,idx.p2]=-0.5
549  if idx.b2>=0: h[1,idx.b2]=-0.5
550  if idx.p3>=0: h[2,idx.p3]=-0.5
551  if idx.b3>=0: h[2,idx.b3]=-0.5
552  return h
553 
554  def cache(self,verbose=0):
555  setattr(self,'o',_param())
556  self.data={}
557  for k,v in self.idx.vars.items():
558  self.data[k]=self.var(v)
559  self.data[v]=self.data[k]
560  setattr(self.o,v,self.data[v])
561  if verbose:
562  print("\nBinary data can always be accessed via the patch.data memmap, \
563 the patch.idx object, and the patch.idx.dict dictionary. Examples:\n\
564  d=p.data[0]\n\
565  d=p.data[p.idx.d]\n\
566  d=p.data[p.idx.dict['d']]\n\n\
567 patch.cache() adds an object .o and a dict .d as shorthand attributes. The dict \
568 object takes both numeric and text key values: Examples:\n\
569  d=p.o.d\n\
570  d=p.d[0]\n\
571  d=p.d['d']")
572 
573  def plane(self,x=None,y=None,z=None,iv=0):
574  if self.guard_zones:
575  li=self.li
576  ui=self.ui
577  else:
578  li=np.zeros(3)
579  ui=self.n-1
580  if x:
581  p=(x-self.x[0])/self.ds[0]
582  i=np.int(p)
583  i=np.minimum(i,ui[0]-2)
584  p=p-i
585  f = self.var(iv)[i ,li[1]:ui[1],li[2]:ui[2]]*(1.0-p) \
586  + self.var(iv)[i+1,li[1]:ui[1],li[2]:ui[2]]*p
587  if y:
588  p=(y-self.y[0])/self.ds[1]
589  i=np.int(p)
590  i=np.minimum(i,ui[1]-2)
591  p=p-i
592  f = (self.var(iv)[li[0]:ui[0],i ,li[2]:ui[2]]*(1.0-p) \
593  + self.var(iv)[li[0]:ui[0],i+1,li[2]:ui[2]]*p).transpose()
594  if z:
595  p=(z-self.z[2])/self.ds[2]
596  i=np.int(p)
597  i=np.minimum(i,ui[2]-2)
598  p=p-i
599  f = self.var(iv)[li[0]:ui[0],li[1]:ui[1],i ]*(1.0-p) \
600  + self.var(iv)[li[0]:ui[0],li[1]:ui[1],i+1]*p
601  if self.verbose:
602  print('plane: i, p = {:d} {:.3f}'.format(i,p))
603  return f
604 
605 
606 def _parse_namelist (items):
607  pos=items[1:]
608  if len(pos)==1:
609  pos[0]=pos[0].replace('3*','')
610  pos=[pos[0],pos[0],pos[0]]
611  elif len(pos)==2:
612  if pos[0].find('2*')>=0:
613  pos[0]=pos[0].replace('2*','')
614  pos=[pos[0],pos[0],pos[1]]
615  if pos[1].find('2*')>=0:
616  pos[1]=pos[1].replace('2*','')
617  pos=[pos[0],pos[1],pos[1]]
618  # choose right data type for the resulting array
619  try:
620  pos = [int(p) for p in pos]
621  except:
622  pos = [float(p) for p in pos]
623 
624  return np.array(pos)
625 
626 def parse_patches(snap,file='../data/00000/rank_00000_patches.nml'):
627  """ Optimized parsing of patch namelist entries.
628  """
629  prop_dict={}
630  class props():
631  pass
632 
633  with open(file,'r') as fo:
634  watch_block = False
635  for line in fo:
636  # strip commas and equal sign from line and split
637  line=line.replace('=',' ').replace(',','').replace('"',' ')
638  items=line.split()
639  if items[0] == "&PATCH_NML":
640  d={}
641  watch_block = True
642  elif items[0]=='ID':
643  id=int(items[1])
644  elif items[0]=='POSITION':
645  d['position'] = _parse_namelist(items)
646  elif items[0]=='SIZE':
647  d['size'] = _parse_namelist(items)
648  d['ds'] = d['size']/snap.n
649  elif items[0]=='LEVEL':
650  d['level']=int(items[1])
651  elif items[0]=='DTIME':
652  d['dtime']=float(items[1])
653  elif items[0]=='TIME':
654  d['time']=float(items[1])
655  elif items[0]=='ISTEP':
656  d['istep']=int(items[1])
657  elif items[0]=='DS':
658  d['ds'] = _parse_namelist(items)
659  elif items[0]=='MESH_TYPE':
660  d['mesh_type'] = int(items[1])
661  elif items[0]=='NCELL':
662  d['ncell'] = _parse_namelist(items)
663  elif items[0]=='KIND':
664  d['kind']=items[1]
665  elif items[0]=='/' and watch_block: # the final entry of a namelist is always "/"
666  prop_dict[id]=d
667  watch_block = False
668  return prop_dict
669 
670 class snapshot():
671  """ Return a snapshot worth of metadata, including memory mapped variables
672  """
673  def __init__(self, iout=0, run='', data='../data', datadir='', verbose=0, copy=False, timeit=False):
674  # Set the max number of open files to the hard limit
675  try:
676  import resource
677  limits=resource.getrlimit(resource.RLIMIT_NOFILE)
678  resource.setrlimit(resource.RLIMIT_NOFILE, (limits[1],limits[1]))
679  except:
680  pass
681  self.copy=copy
682  # Start time measurement
683  timer.start()
684  rundir=_dir(data,run)
685  if datadir=='':
686  datadir=_dir(rundir,'{:05d}'.format(iout))
687  self.datadir=datadir
688  self.rundir=rundir
689 
690  files=[f for f in os.listdir(datadir) if f.endswith('snapshot.nml')]
691  for f in files:
692  # add parameter groups from data/run/NNNNN/*snapshot.nml
693  file=_file(datadir,f)
694  if verbose==1:
695  print ('parsing',file)
696  nml_list=f90nml.read(file)
697  self.nml_list=nml_list
698  _add_nml_list_to(self,nml_list,verbose=verbose)
699  if 'idx_nml' in nml_list.keys():
700  idx_dict = nml_list['idx_nml']
701  idx = _obj(idx_dict)
702  idx.dict = idx_dict
703  # add a dict with (existing) alphabetic variable names
704  idx.vars={}
705  for k,v in idx.dict.items():
706  if not v in idx.vars.keys():
707  if v>=0:
708  idx.vars[v]=k
709  self.idx=idx
710  # add a list of all possible keys
711  self.keys=[]
712  for k,v in self.idx.vars.items():
713  self.keys.append(k)
714  self.keys.append(v)
715  timer.add('snapshot metadata')
716 
717  # Ignore a missing snapshots.dat, for methods that don't have it
718  try:
719  self.datafile=_file(rundir,'snapshots.dat')
720  self.datafiled=open(self.datafile,'rb')
721  except:
722  pass
723  self.dict=nml_list['snapshot_nml']
724 
725  # "dress" the snapshot with attributes from snapshot_nml, where
726  # lists are turned into arrays
727  for k,v in self.dict.items():
728  if isinstance(v,list):
729  self.dict[k]=np.array(v)
730  timer.add('lists-to-arrays')
731 
732  # add patches as a list of dicts
733  self.patches=[]
734  files=[f for f in os.listdir(datadir) if f.endswith('_patches.nml')]
735  save=[]
736  for f in files:
737  # split name to get rank number
738  rank=np.int(f.split('_')[1])
739  # add parameter groups from data/run/NNNNN/*patches.nml
740  file=_file(datadir,f)
741  if verbose==1:
742  print ('parsing',file)
743  patch_dict=parse_patches(self,file)
744  save.append(patch_dict)
745  timer.add('parse_patches') # 0.7s
746  for patch_dict in save:
747  """
748  At this point all metadata about patches is available in the
749  patch_dict, with patch IDs as keys. This makes it possible to
750  scan for files that can supply both regular and auxiliary data
751  """
752  ids=sorted(patch_dict.keys())
753  timer.add('sorted-keys')
754  for id in ids:
755  """ The _patch procedure collects all information about a patch
756  into an instancc of the _patch class, and appends it to
757  the snapshot.patches list """
758  p=_patch(id,patch_dict[id],self,rank,verbose=verbose)
759  timer.add('snapshot(1)')
760  self._add_axes(p)
761  # Append to array of patch instances
762  self.patches.append(p)
763 
764  # verbose info
765  if verbose==2 and hasattr(p,'idx'):
766  data=p.var('d')
767  dmax=data.max()
768  print (' id:{:4d} pos: {} {}'.format(p.id,p.position,dmax))
769  elif verbose==3:
770  print('id:',p.id)
771  for iv in range(p.nv):
772  data=p.var(iv)
773  vmin=float(data.min())
774  vmax=float(data.max())
775  print ('{:>5}: min = {:10.3e} max = {:10.3e}'.format(
776  p.idx.vars[iv], vmin, vmax))
777  elif verbose==4:
778  attributes(p)
779  timer.add('append+verbose')
780  if verbose==1:
781  print(' added',len(self.patches),'patches')
782  timer.add('_patches')
783  timer.print(verbose=verbose,timeit=timeit)
784 
785  def _add_axes(self,patch):
786  first=patch.llc_cart-patch.ng*patch.ds
787  n=patch.gn
788  ng=patch.ng
789  if patch.no_mans_land:
790  first=first+0.5*patch.ds
791  patch.x=first[0]+patch.ds[0]*np.arange(n[0])
792  patch.y=first[1]+patch.ds[1]*np.arange(n[1])
793  patch.z=first[2]+patch.ds[2]*np.arange(n[2])
794  timer.add('_add_axes(1)') # 6.9s
795  patch.xi=patch.x[ng[0]:-ng[0]]
796  patch.yi=patch.y[ng[0]:-ng[0]]
797  patch.zi=patch.z[ng[0]:-ng[0]]
798  patch.xs=patch.x-0.5*patch.ds[0]
799  patch.ys=patch.y-0.5*patch.ds[1]
800  patch.zs=patch.z-0.5*patch.ds[2]
801  patch.xyz=[patch.x,patch.y,patch.z]
802  patch.xyzi=[patch.xi,patch.yi,patch.zi]
803  timer.add('_add_axes(2)') # 6.9s
804  # add geometric factors to patch (important for spherical/cylindrical coords.)
805  if self.mesh_type==1:
806  patch.geometric_factors=None
807  else:
808  patch.geometric_factors=GeometricFactors(patch)
809  timer.add('geometric_factors') # 6.9s
810 
811  def patches_in (self, x=None, y=None, z=None):
812  pp=self.patches
813  if x:
814  pp=[p for p in pp if x>=p.extent[2,0] and x<p.extent[2,1]]
815  if y:
816  pp=[p for p in pp if y>=p.extent[0,0] and y<p.extent[0,1]]
817  if z:
818  pp=[p for p in pp if z>=p.extent[1,0] and z<p.extent[1,1]]
819  return pp
820 
821  def hdf5(self,verbose=0):
822  """ Add dict parameters to an HDF5 file """
823  import h5py
824  h5file=self.rundir+'/hdf5.dat'
825  rw = 'r+' if os.path.isfile(h5file) else 'w'
826  with h5py.File(h5file,rw) as f:
827  if verbose>0:
828  print('adding snapshot.dict attributes to '+h5file)
829  for k,v in self.dict.items():
830  f.attrs[k]=v
831  if verbose>1:
832  print('{:>20}: {}'.format(k,v))
833 
834  def plane(self,x=None,y=None,z=None,iv=0,verbose=0):
835  dg.image_plane(self,x=x,y=y,z=z,iv=iv,verbose=1)
836 
837 def snapshots(run='',data='../data', verbose=0):
838  """ Return a list of all snapshots in the data/run/ directory
839  """
840  rund=_dir(data,run)
841  snaps=[]
842  for dirname, subdirs, files in os.walk(rund):
843  if dirname==rund:
844  for subdir in subdirs:
845  datadir=_dir(rund,subdir)
846  snap=snapshot(data=data,run=run,datadir=datadir,verbose=verbose)
847  if hasattr(snap,'patches'):
848  if len(snap.patches)>0:
849  snaps.append(snap)
850  if verbose>0:
851  print (_dir(rund,subdir))
852  break
853  return snaps
854 
855 def attributes(patch):
856  """ Pretty-print patch attributes """
857  print ('id: {:}'.format(patch.id))
858  d=vars(patch)
859  for k,v in d.items():
860  if k != 'data':
861  print ('{:>12}: {}'.format(k,v))
862 
863 def pdf(iout=1,run='',data='../data'):
864  """ Read and accumulate PDF output files from all ranks """
865  rundir=data+'/'+run+'/'
866  assert os.path.isdir(rundir), 'the directory {} does not exist'.format(rundir)
867  files=os.listdir(rundir)
868  pattern='pdf_{:05d}'.format(iout)
869  files=[f for f in files if f.startswith(pattern)]
870  assert len(files) > 0, 'there are no {}_* files in {}'.format(pattern,rundir)
871  counts=0
872  for file in files:
873  with FortranFile(rundir+file,'r') as ff:
874  ioformat,nbins=ff.read_ints()
875  time=ff.read_reals('<f8')[0]
876  bins=ff.read_reals('<f4')/np.log(10)
877  counts+=ff.read_reals('<f4')
878  return _obj({'ioformat':ioformat,'time':time,'bins':bins,'counts':counts})
879 
880 if __name__ == "__main__":
881  pass
882 
def hdf5(self, verbose=0)
Definition: _dispatch.py:821
def read(nml_path)
Definition: __init__.py:12
def _add_axes(self, patch)
Definition: _dispatch.py:785