FLEXPART Testing Environment CTBTO WO8
 All Classes Namespaces Files Functions Variables Pages
pflexible.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 """
3 
4 SYNOPSIS
5 ========
6 
7  pflexible [-h] [-v,--verbose] [--version]
8 
9 DESCRIPTION
10 ===========
11 
12  pflexible: A python module for working with FLEXPART Output.
13 
14 
15 EXAMPLES
16 ========
17 
18  #TODO:
19 
20  A lot! This is just a starting point. See the doc strings
21  for information about the various functions.
22 
23 
24 AUTHOR
25 ======
26 
27  JFB: John F Burkhart <jfburkhart@gmail.com>
28 
29 CONTRIBUTORS
30 ============
31 
32  HSO: Harald Sodemann
33  SEC: Sabine Eckhardt
34  AST: Andreas Stohl
35 
36  Many functions are adaptations of Fortran / NCARG programs (AST)
37  or Matlab functions (HSO/SEC).
38 
39 LICENSE
40 =======
41 
42  This script follows creative commons usage.
43 
44 VERSION
45 =======
46 
47  ID: $Id$: $Rev$
48 """
49 # builtin imports
50 # import pdb
51 import sys
52 import os
53 import struct
54 import re
55 import optparse
56 import time
57 import datetime
58 import itertools
59 from math import pi, sqrt, cos
60 import traceback
61 import pdb
62 
63 
64 # Dependencies:
65 # Numpy
66 import numpy as np
67 # Matplotlib
68 import matplotlib as mpl
69 # mpl.use('Agg')
70 import matplotlib.pyplot as plt
71 from matplotlib.dates import date2num
72 import matplotlib.image as image
73 from matplotlib.patches import Ellipse
74 # Basemap
75 try:
76  from mpl_toolkits.basemap import shiftgrid, addcyclic
77 except ImportError:
78  from matplotlib.toolkits.basemap import shiftgrid, addcyclic
79 
80 
81 # local imports
82 #import mapping as mp
83 
84 __version__ = '0.9.5'
85 __path__ = os.path.abspath(os.curdir)
86 
87 #### Functions for reading FLEXPART output #####
88 
89 
90 def read_command(path, headerrows=7):
91  """
92  Reads a FLEXPART COMMAND file.
93 
94  .. note::
95  Assumes releases file has a header of 7 lines. Use
96  option "headerrows" to override this.
97 
98  USAGE::
99 
100  > A = read_command(filepath)
101 
102  where filepath is either a file object or a path.
103 
104 
105  Returns
106  a dictionary with the following keys
107 
108  ================ =================================================================
109  fields description
110  ================ =================================================================
111  SIM_DIR Simulation direction
112  SIM_START Text str of YYYYMMDD HHMMSS
113  SIM_END Text str of YYYYMMDD HHMMSS
114  AVG_CNC_INT Average concentrations are calculated every SSSSS seconds
115  AVG_CNC_TAVG The average concentrations are time averages of SSSSS sec
116  CNC_SAMP_TIME The concentrations are sampled every SSSSS seconds to
117  calculate the time average concentration.
118  T_PARTSPLIT Time constant for particle splitting.
119  SYNC All processes are synchronized with this time interval
120  CTL --
121  IFINE IFINE=Reduction factor for time step used for vertical wind
122  IOUT IOUT determines how the output shall be made: concentration
123  (ng/m3, Bq/m3), mixing ratio (pptv), or both, or plume
124  trajectory mode, or concentration + plume trajectory mode.
125  IPOUT IPOUT determines whether particle positions are outputted
126  (in addition to the gridded concentrations or mixing ratios)
127  or not. 0=no output, 1 output every output interval, 2 only
128  at end of the simulation
129  LSUBGRID Switch on/off subgridscale terrain parameterization
130  (increase of mixing heights due to subgridscale orog. var
131  LCONVECTION Switch on/off the convection parameterization
132  LAGESPECTRA Switch on/off the calculation of age spectra: if yes, the
133  file AGECLASSES must be available
134  IPIN If IPIN=1, a file "partposit_end" from a previous run must
135  be available in the output directory. Particle positions
136  are read in and previous simulation is continued. If
137  IPIN=0, no particles from a previous run are used
138  IOFR Switch on/off writing out each release.
139  IFLUX If IFLUX is set to 1, fluxes of each species through each
140  of the output boxes are calculated. Six fluxes,
141  corresponding to northward, southward, eastward, westward,
142  upward and downward are calculated for each grid cell of
143  the output grid. The control surfaces are placed in the
144  middle of each output grid cell. If IFLUX is set to 0,
145  no fluxes are determined.
146  MDOMAINFILL If MDOMAINFILL is set to 1, the first box specified in file
147  RELEASES is used as the domain where domain-filling
148  trajectory calculations are to be done. Particles are
149  initialized uniformly distributed (according to the air mass
150  distribution) in that domain at the beginning of the
151  simulation, and are created at the boundaries throughout
152  the simulation period
153  IND_SOURCE IND_SOURCE switches between different units for
154  concentrations at the source. NOTE that in backward
155  simulations the release of computational particles takes
156  place at the "receptor" and the sampling of particles at
157  the "source". 1=mass units (for bwd-runs = concentration)
158  2=mass mixing ratio units'''],
159 
160  IND_RECEPTOR IND_RECEPTOR switches between different units for
161  concentrations at the receptor 1=mass units (concentrations)
162  2=mass mixing ratio units
163 
164  MQUASILAG MQUASILAG indicates whether particles shall be numbered
165  consecutively (1) or with their release location number (0).
166  The first option allows tracking of individual particles
167  using the partposit output files
168 
169  NESTED_OUTPUT NESTED_OUTPUT decides whether model output shall be made
170  also for a nested output field (normally with higher resolution)
171  LINIT_COND For Backward Runs, sets initial conditions:
172  [0]=No, 1=Mass Unit, 2=Mass Mixing
173 
174  ================ =================================================================
175 
176  """
177  lines = file(path, 'r').readlines()
178  command_vals = [i.strip() for i in lines[headerrows:]] # clean line ends
179  COMMAND_KEYS = (
180  'SIM_DIR',
181  'SIM_START',
182  'SIM_END',
183  'AVG_CNC_INT',
184  'AVG_CNC_TAVG',
185  'CNC_SAMP_TIME',
186  'T_PARTSPLIT',
187  'SYNC',
188  'CTL',
189  'IFINE',
190  'IOUT',
191  'IPOUT',
192  'LSUBGRID',
193  'LCONVECTION',
194  'LAGESPECTRA',
195  'IPIN',
196  'OUTPUTFOREACHRELEASE',
197  'IFLUX',
198  'MDOMAINFILL',
199  'IND_SOURCE',
200  'IND_RECEPTOR',
201  'MQUASILAG',
202  'NESTED_OUTPUT',
203  'LINIT_COND')
204  float_keys = ['CTL']
205  date_keys = ['SIM_START', 'SIM_END']
206 
207  COMMAND = {}
208 
209  for i, key in enumerate(COMMAND_KEYS):
210  val = command_vals[i].split()
211  if key in date_keys:
212  val = val[:2]
213  elif key in float_keys:
214  val = float(val[0])
215  else:
216  val = int(val[0])
217 
218  COMMAND[key] = val
219  return COMMAND
220 
221 
222 
223 
224 def read_releases(path, headerrows=11):
225  """
226  Reads a FLEXPART releases file.
227 
228  .. note::
229  Assumes releases file has a header of 11 lines. Use
230  option "headerrows" to override this.
231 
232  USAGE::
233 
234  > A = read_releases(filepath)
235 
236  where filepath is either a file object or a path.
237 
238 
239  Returns
240  a record array with fields:
241 
242  ============ ==========================
243  fields description
244  ============ ==========================
245  start_time datetime start
246  end_time datetime end
247  lllon lower left longitude
248  llat lower left latitude
249  urlon upper right longitude
250  urlat upper right latitude
251  altunit 1=magl, 2=masl, 3=hPa
252  elv1 lower z level
253  elv2 upper z level
254  numpart numparticles
255  mass mass for each spec, so the
256  array will actually have
257  fields: mass0, mass1,..
258  id ID for each release
259  ============ ==========================
260 
261  """
262  lines = _getfile_lines(path)
263  lines = [i.strip() for i in lines] # clean line ends
264 
265  # we know nspec is at line 11
266  nspec = int(lines[headerrows])
267  blocklength = headerrows + nspec
268  spec = []
269  for i in range(nspec):
270  spec.append(int(lines[headerrows + 1 + i]))
271  indx = headerrows + 1 + (i + 2)
272  blocks = []
273  for i in range(indx, len(lines), blocklength + 1):
274  blocks.append(lines[i:i + blocklength])
275  for b in blocks:
276  b[0] = datetime.datetime.strptime(b[0], '%Y%m%d %H%M%S')
277  b[1] = datetime.datetime.strptime(b[1], '%Y%m%d %H%M%S')
278  b[2:6] = [np.float(i) for i in b[2:6]]
279  b[6] = int(b[6])
280  b[7] = float(b[7])
281  b[8] = float(b[8])
282  b[9] = int(b[9])
283  for i in range(nspec):
284  b[10 + i] = float(b[10 + i])
285  b = tuple(b)
286 
287 
288  names = ['start_time', 'end_time', 'lllon', 'lllat', 'urlon', 'urlat', \
289  'altunit', 'elv1', 'elv2', 'numpart']
290  # formats = [object, object, np.float, np.float, np.float, np.float,\
291  # int, np.float, np.float, int]
292  for i in range(nspec):
293  names.append('mass%s' % i)
294  # formats.append(np.float)
295  names.append('id')
296  # formats.append('S30')
297 
298  # dtype = {'names':names, 'formats':formats}
299  # RELEASES = np.rec.array(blocks,dtype=dtype)
300  return np.rec.fromrecords(blocks, names=names)
301 
302 
303 def read_trajectories(H, trajfile='trajectories.txt', \
304  ncluster=5, \
305  ageclasses=20):
306  """
307  Reads the trajectories.txt file in a FLEXPART run output directory.
308 
309  Based on output from plumetraj.f::
310 
311  ********************************************************************************
312  * *
313  * Determines a plume centroid trajectory for each release site, and manages *
314  * clustering of particle locations. Certain parameters (average PV, *
315  * tropopause height, etc., are provided along the plume trajectories. *
316  * At the end, output is written to file 'trajectories.txt'. *
317  * *
318  * Author: A. Stohl *
319  * *
320  * 24 January 2002 *
321  * *
322  * Variables: *
323  * fclust fraction of particles belonging to each cluster *
324  * hmixcenter mean mixing height for all particles *
325  * ncluster number of clusters to be used *
326  * pvcenter mean PV for all particles *
327  * pvfract fraction of particles with PV<2pvu *
328  * rms total horizontal rms distance after clustering *
329  * rmsdist total horizontal rms distance before clustering *
330  * rmsclust horizontal rms distance for each individual cluster *
331  * topocenter mean topography underlying all particles *
332  * tropocenter mean tropopause height at the positions of particles *
333  * tropofract fraction of particles within the troposphere *
334  * zrms total vertical rms distance after clustering *
335  * zrmsdist total vertical rms distance before clustering *
336  * xclust,yclust, Cluster centroid positions *
337  * zclust *
338  * *
339  ********************************************************************************
340 
341  USAGE::
342 
343  > T = read_trajectories(H_OR_path_to_directory, **kwargs)
344 
345  .. note::
346  The first argument is either a path to a trajectory file, or simply a :class:`Header`
347  instance.
348 
349  Returns a dictionary of the trajectories for each release.
350 
351  .. tabularcolumns:: |l|L|
352 
353  ============= ========================================
354  Keys Description
355  ============= ========================================
356  Trajectories array_of_floats(j,it1,xi,yi,zi,topoi,hmixi,tropoi,pvi,
357  rmsdisti,rmsi,zrmsdisti,zrmsi,hfri,pvfri,trfri,
358  (xclusti(k),yclusti(k),zclusti(k),fclusti(k),rmsclusti(k),k=1,5))
359  RELEASE_ID 1array(i1,i2,xp1,yp1,xp2,yp2,zp1,zp2,k,npart)
360  numpspec number of species
361  numageclass number of ageclasses
362  ============= ========================================
363 
364 
365  Arguments
366 
367  .. tabularcolumns:: |l|L|
368 
369  ============= ========================================
370  keyword Description [default]
371  ============= ========================================
372  trajfile over the name of the input
373  file ['trajectories.txt']
374  ncluster number of clusters [5]
375  ageclasses number of ageclasses [20]
376  ============= ========================================
377 
378  """
379 
380  if isinstance(H, str):
381  try:
382  alltraj = file(H, 'r').readlines()
383  except:
384  raise IOError('Could not open file: %s' % H)
385  else:
386  path = H.path
387  alltraj = file(os.path.join(path, trajfile), 'r').readlines()
388 
389 
390  try:
391  ibdate, ibtime, model, version = alltraj[0].strip().split()[:4]
392  except:
393  ibdate, ibtime = alltraj[0].strip().split()[:2]
394  model = 'Flexpart'
395  version = 'V.x'
396  dt = datetime.datetime.strptime(ibdate + ibtime.zfill(6), '%Y%m%d%H%M%S')
397  numpoint = int(alltraj[2].strip())
398  # Fill a dictionary with the Release points and information keyed by name
399  # RelTraj['RelTraj_ID'] = (i1,i2,xp1,yp1,xp2,yp2,zp1,zp2,k,npart)
400  RelTraj = Structure()
401  Trajectories = []
402 
403  for i in range(3, 3 + (numpoint * 2), 2):
404  i1, i2, xp1, yp1, xp2, yp2, zp1, zp2, k, npart , = \
405  tuple([float(j) for j in alltraj[i].strip().split()])
406  itimerel1 = dt + datetime.timedelta(seconds=i1)
407  itimerel2 = dt + datetime.timedelta(seconds=i2)
408  Xp = (xp1 + xp2) / 2
409  Yp = (yp1 + yp2) / 2
410  Zp = (zp1 + zp2) / 2
411  RelTraj[alltraj[i + 1].strip()] = np.array((itimerel1, itimerel2, Xp, Yp, Zp, k, npart))
412 
413  for i in range(3 + (numpoint * 2), len(alltraj)):
414  raw = alltraj[i]
415  FMT = [0, 5, 8, 9, 9, 8, 8, 8, 8, 8, 8, 8, 8, 8, 6, 6, 6] + ncluster * [8, 8, 7, 6, 8]
416 
417  data = [raw[sum(FMT[:ii]):sum(FMT[:ii + 1])] for ii in range(1, len(FMT) - 1)] + \
418  [raw[sum(FMT[:-1]):]]
419  ### FIX ###
420  # ## To get rid of '******' that is now in trajectories.txt
421  data = [float(r.replace('********', 'NaN')) for r in data]
422 
423  Trajectories.append(data)
424 
425  data = np.array(Trajectories)
426  RelTraj['version'] = model + ' ' + version
427  RelTraj['date'] = dt
428  RelTraj['Trajectories'] = data
429  RelTraj['labels'] = \
430  ['release number', 'seconds prior to release', 'lon', 'lat', 'height', 'mean topography', \
431  'mean mixing height', 'mean tropopause height', 'mean PV index', \
432  'rms distance', 'rms', 'zrms distance', 'zrms', \
433  'fraction height??', 'fraction PV<2pvu', 'fraction in troposphere'] + \
434  ncluster * ['xcluster', 'ycluster', 'zcluster', 'fcluster', 'rmscluster']
435  RelTraj['info'] = \
436  """
437  Returns a dictionary:
438  R['Trajectories'] = array_of_floats(
439  releasenum,it1,xi,yi,zi,topoi,hmixi,tropoi,pvi,
440  rmsdisti,rmsi,zrmsdisti,zrmsi,hfri,pvfri,trfri,
441  (xclusti(k),yclusti(k),zclusti(k),fclusti(k),rmsclusti(k),k=1,5))
442 
443  R['RELEASE_ID'] = (dt_i1,dt_i2,xp1,yp1,xp2,yp2,zp1,zp2,k,npart)
444  R['info'] = this message
445 
446  To plot a trajectory timeseries:
447  RT = read_trajectories(H)
448  T = RT['Trajectories']
449  rel = 1
450  t = T[np.where(T[:,0]==rel),:][0]
451  plt.plot(t[:,1],t[:,14])
452  plt.savefig('trajectories.png')
453  """
454  return RelTraj
455 
456 
457 def curtain_for_flightrack(H, flighttrack, nspec=0, npspec_int=0, index=0, get_track=False):
458  """
459  extracts curtain data along a given flight track
460 
461  input: H (a pf.Header with a FD data object from H.read_grid)
462  if the read_grid method has not been called, we'll call
463  it below.
464 
465  flighttrack
466  (a numpy array with datetime, lon, lat, elv values)
467 
468  nspec: is optional for reference to the FD dict.
469 
470  index: is optional in case numpointspec > 1
471 
472  get_track: if True extracts the points along the flight
473  track rather than the curtain.
474 
475  output: curtain (a 2-d array with shape (len(flighttrack), len(H.outheight)
476 
477  TODO::
478  add interpolation, extract a track, not curtain (e.g. z points)
479 
480  """
481 
482  # assert(isinstance(H, Header), 'ERROR: H Wrong Data Type')
483  if H.direction == 'forward':
484  if 'FD' in H.keys():
485  pass
486  else:
487  # need to call H.read_grid to fill read the grids
488  flighttimes = flighttrack[:, 0]
489  time_ret = H.closest_dates(flighttimes, take_set=True)
490  H.read_grid(nspec_ret=nspec, time_ret=time_ret,
491  npspec_int=npspec_int)
492 
493  elif H.direction == 'backward':
494  if 'C' in H.keys():
495  pass
496  else:
497  H.fill_backward()
498 
499  if get_track:
500  curtain = np.zeros(len(flighttrack))
501  else:
502  curtain = np.zeros((len(flighttrack), len(H.outheight)))
503 
504  for i, (t, lon, lat, elv) in enumerate(flighttrack):
505 
506 
507  # loop over FD tuples using t to extract the right time.
508  # assuming t is a datetime
509  idx = H.closest_date(t)
510  datestring = H.available_dates[idx]
511  if H.direction == 'forward':
512  grid = H.FD[(nspec, datestring)]['grid']
513  grid = grid[:, :, :, index]
514 
515  elif H.direction == 'backward':
516  grid = H.C[(0, idx)]['grid']
517 
518  I = closest(lon, H.longitude)
519  J = closest(lat, H.latitude)
520  if get_track:
521  K = closest(elv, H.outheight)
522  # print lon, I, H.longitude[I], '===========' , lat, J, H.latitude[J]
523  curtain[i] = grid[I, J, K]
524  else:
525  curtain[i] = grid[I, J, :]
526 
527  return curtain.T
528 
529 def curtain_for_line(grid, X, Y, coords, index=0):
530  """
531  extracts curtain data from a grid given pairs of lon, lat
532 
533  input: H (a pf.Header with a FD data object from H.read_grid)
534  if the read_grid method has not been called, we'll call
535  it below.
536 
537  flighttrack
538  (a numpy array with datetime, lon, lat, elv values)
539 
540  nspec: is optional for reference to the FD dict.
541 
542  index: is optional in case numpointspec > 1
543 
544  get_track: if True extracts the points along the flight
545  track rather than the curtain.
546 
547  output: curtain (a 2-d array with shape (len(flighttrack), len(H.outheight)
548 
549  TODO::
550  add interpolation, extract a track, not curtain (e.g. z points)
551 
552  """
553  if len(grid.shape) == 4:
554  grid = grid[:, :, :, index]
555  else:
556  grid = grid
557 
558  curtain = np.zeros((len(coords), grid.shape[2]))
559  for i, (x, y) in enumerate(coords):
560 
561  I = closest(x, X)
562  J = closest(y, Y)
563  curtain[i] = grid[I, J, :]
564  curtain = np.nan_to_num(curtain)
565  # curtain = np.ma.fix_invalid(np.array(curtain,dtype=float))
566  return curtain.T
567 
568 
569 def groundlevel_for_line(H, X, Y, coords, index=0):
570  """
571  extracts ground level from H.heightnn along a track of lon, lat
572 
573  input: H or H.heightnn (takes the lowest level)
574 
575  X, Y ,= H.longitude, H.latitude
576 
577  coords = zip(x, y)
578 
579  output: groundlevel (a 1-d array with shape (len(flighttrack)
580 
581 
582  """
583  try:
584  hgt = H.Heightnn[:, :, 0]
585  except:
586  hgt = H
587 
588  # fix for hgt offset
589  hgt = hgt - hgt.min()
590 
591  grndlvl = np.zeros(len(coords))
592 
593  for i, (x, y) in enumerate(coords):
594 
595  I = closest(x, X)
596  J = closest(y, Y)
597  grndlvl[i] = hgt[I, J]
598  grndlvl = np.nan_to_num(grndlvl)
599 
600  return grndlvl - grndlvl.min()
601 
602 
603 def curtain_agltoasl(H, curtain_agl, coords, below_gl=0.0):
604  """ converts the agl curtain to asl
605 
606  adds .asl_axis attribute to H
607  """
608 
609  gl = groundlevel_for_line(H, H.longitude, H.latitude, coords)
610 
611  H.asl_axis = np.linspace(0, H.outheight[-1])
612 
613  xp = H.outheight - H.outheight[0]
614 
615  casl = np.zeros((len(H.asl_axis), len(coords)))
616 
617  for i in xrange(len(coords)):
618  casl[:, i] = np.interp(H.asl_axis, xp + gl[i], \
619  curtain_agl[:, i], left=below_gl)
620 
621  return casl
622 
623 
624 def read_agespectrum(filename, part=False, ndays=20):
625  """
626  Reads the spectrum.txt files generated from the "make_webpages" scripts.
627 
628  .. note::
629  This functionality depends on having run the make_webpages scripts
630  in-house. It is not supported in the public API.
631 
632  USAGE::
633 
634  > A = read_agespectrum(filename)
635 
636  Returns a dictionary containing ageclass information
637 
638  .. tabularcolumns:: |l|L|
639 
640  ============= ========================================
641  Keys Description
642  ============= ========================================
643  agespectrum 2d-array
644  (dt, lat, lon, alt, tracer[:numageclass]
645  for each release)
646  ageclasses 1d-array(ageclass ages)
647  numpspec number of species
648  numageclass number of ageclassesA
649  part if it is from make webpages without the
650  header information.
651  ndays The number of ageclasses
652  ============= ========================================
653 
654  """
655 
656  f = file(filename, 'r').readlines()
657 
658  line1 = f[0].strip().split()
659  if part:
660  numageclass = ndays
661  ageclasses = np.arange(1, ndays * 1) * 60 * 60 * 24
662  numspec = 1
663  else:
664  numageclass = int(line1[0])
665  ageclasses = np.array([int(i) for i in line1[1:]])
666  numspec = int(f[1].strip().split()[0])
667  A = Structure()
668  D = []
669  for line in f[2:]:
670  line = line.strip().split()
671  # convert ibdate, ibtime to datetime
672  dt = datetime.datetime.strptime(line[0] + line[1].zfill(6), '%Y%m%d%H%M%S')
673  data = [float(i) for i in line[2:]]
674  # reordering the array, dt object will be '0'
675  data = [data[1]] + [data[0]] + data[2:]
676  D.append([dt] + data)
677  D = np.array(D)
678  A['agespectrum'] = D
679  A['numageclass'] = numageclass
680  A['ageclasses'] = ageclasses
681  A['numspec'] = numspec
682  A['filename'] = filename
683  A['info'] = \
684  """
685  A dictionary containing ageclass information:
686  keys:
687  agespectrum = 2d-array(dt, lon, lat, alt, tracer[:numageclass] for each release)
688  ageclasses = 1d-array(ageclass ages)
689  numpspec = number of species
690  numageclass = number of ageclasses
691  info = this message
692  """
693  return A
694 
695 
696 def save_spectrum(outf, H, agespectra, spectype='agespec',
697  header='## AGECLASS File'):
698  """ Save an ageclass or continents spectrum to an outfile. """
699 
700  if spectype == 'agespec':
701  # ftype = 'AGECLASS'
702  try:
703  T = H.releasetimes
704  spectrum = agespectra
705  nClasses = spectrum.shape[1]
706  except:
707  # assume H is a list or None
708  if H:
709  nClasses = H[0]
710  T = H[1]
711  spectrum = agespectra
712  else:
713  T = agespectra[:, 0]
714  nClasses = agespectra.shape[1] - 1
715  spectrum = agespectra[:, 1:]
716  elif spectype == 'contspec':
717  # assume H is a list or None
718  # ftype = 'SPECTRUM'
719  try:
720  T = H.releasetimes
721  spectrum = agespectra
722  nClasses = spectrum.shape[1]
723  header = '## Continental Spectrum File'
724  except:
725  # assume H is a list or None
726  if H:
727  nClasses = H[0]
728  T = H[1]
729  spectrum = agespectra
730  else:
731  T = agespectra[:, 0]
732  nClasses = agespectra.shape[1] - 1
733  spectrum = agespectra[:, 1:]
734 
735  T = np.array(T)
736  T = np.reshape(T, (len(T), 1))
737  xout = np.hstack((T, spectrum))
738  fmt = '%s ' + nClasses * '%10.5f '
739  outf.write("%s \n" % (header))
740  np.savetxt(outf, xout, fmt=fmt)
741  outf.close()
742 
743 
744 
745 
746 def gridarea(H):
747  """returns an array of area corresponding to each nx,ny,nz
748 
749  Usage::
750 
751  > area = gridarea(H)
752 
753 
754  Returns
755  OUT = array area corresponding to nx,ny,nz
756 
757  Arguments
758  H = :class:`Header` object from readheader function.
759 
760  """
761 
762 
763  pih = pi / 180.
764  r_earth = 6.371e6
765  cosfunc = lambda y : cos(y * pih) * r_earth
766 
767  nx = H['numxgrid']
768  ny = H['numygrid']
769  outlat0 = H['outlat0']
770  dyout = H['dyout']
771  dxout = H['dxout']
772  area = np.zeros((nx, ny))
773 
774  for iy in range(ny):
775  ylata = outlat0 + (float(iy) + 0.5) * dyout # NEED TO Check this, iy since arrays are 0-index
776  ylatp = ylata + 0.5 * dyout
777  ylatm = ylata - 0.5 * dyout
778  if (ylatm < 0 and ylatp > 0): hzone = dyout * r_earth * pih
779  else:
780  # cosfact = cosfunc(ylata)
781  cosfactp = cosfunc(ylatp)
782  cosfactm = cosfunc(ylatm)
783  if cosfactp < cosfactm:
784  hzone = sqrt(r_earth ** 2 - cosfactp ** 2) - sqrt(r_earth ** 2 - cosfactm ** 2)
785  else:
786  hzone = sqrt(r_earth ** 2 - cosfactm ** 2) - sqrt(r_earth ** 2 - cosfactp ** 2)
787 
788  gridarea = 2.*pi * r_earth * hzone * dxout / 360.
789  for ix in range(nx):
790  area[ix, iy] = gridarea
791 
792  return area
793 
795  """
796  Open and read the binary file (bf) header only to the point of
797  the Flexpart version string, return the string, seek to the
798  start of the file.
799 
800  """
801  try:
802  bf = BinaryFile(bf)
803  except:
804  bf = bf
805 
806  ret = bf.tell()
807  bf.seek(12) # start of version string
808  version = bf.read('13S')
809  bf.seek(ret)
810 
811  return version
812 
813 def read_header(pathname, **kwargs):
814  """
815  The readheader function returns a special class (Structure) which behaves
816  like a dictionary. It contains all the metadata from the simulation which
817  is contained in the "header" or "header_nest" binary files from the model
818  output.
819 
820  .. warning::
821  It is recommended to use the :class:`Header` class: H = pf.Header(path)
822 
823  This version is using the BinaryFile class rather than FortFlex.
824 
825  Usage::
826 
827  > H = read_header(pathname) #Don't include header filename
828 
829  Returns a dictionary
830 
831  H = dictionary like object with all the run metadata.
832  Arguments
833 
834  .. tabularcolumns:: |l|L|
835 
836  ============= ========================================
837  keyword Description [default]
838  ============= ========================================
839  pathname FLEXPART run output directory
840  readp read release points 0=no, [1]=y
841  nested nested output [False] or True
842  headerfile provide custom name for header file
843  datefile provide a custom name for the date file
844  verbose print information while loading header
845  ============= ========================================
846 
847  .. note::
848  **This function is in development**
849 
850  Please report any bugs found.
851 
852  .. TODO::
853 
854  probably a lot of things, among which ...
855 
856  [] choose skip/getbin or direct seek/read
857  [] define output keys in docstring
858 
859 
860  .. note::
861  The user is no longer required to indicate which version of FLEXPART
862  the header is from. Checks are made, and the header is read accordingly.
863  Please report any problems...
864 
865  """
866 
867  OPS = Structure()
868  OPS.readp = True
869  OPS.nested = False
870  OPS.ltopo = 1 # 1 for AGL, 0 for ASL
871  OPS.verbose = False
872  OPS.headerfile = None
873  OPS.datefile = None
874 
875  # # add keyword overides and options to header
876  for k in kwargs.keys():
877  if k not in OPS.keys():
878  print("WARNING: {0} not a valid input option.".format(k))
879 
880  # BW compat fixes
881  if 'nest' in kwargs.keys():
882  raise IOError("nest is no longer a valid keyword, see docs. \n Now use nested=True or nested=False")
883 
884 
885  if 'nested' in kwargs.keys():
886  if kwargs['nested'] is 1:
887  print("WARNING, use of nested=1, deprecated converting to nested=True")
888  kwargs['nested'] = True
889 
890  if 'nested' in kwargs.keys():
891  if kwargs['nested'] is 0:
892  print("WARNING, use of nested=0, deprecated converting to nested=False")
893  kwargs['nested'] = False
894 
895  OPS.update(kwargs)
896 
897  if OPS.verbose:
898  print "Reading Header with:\n"
899 
900  for o in OPS:
901  print "%s ==> %s" % (o, OPS[o])
902 
903  # Define utility functions for reading binary file
904  skip = lambda n = 8 : bf.seek(n, 1)
905  getbin = lambda dtype, n = 1 : bf.read(dtype, (n,))
906 
907 
908  h = Structure()
909 
910 
911  if OPS.headerfile:
912  filename = os.path.join(pathname, OPS.headerfile);
913 
914  elif OPS.nested is True:
915  filename = os.path.join(pathname, 'header_nest')
916  h['nested'] = True;
917  else:
918  filename = os.path.join(pathname, 'header')
919  h['nested'] = False;
920 
921  # Open header file in binary format
922  if not os.path.exists(filename):
923  raise IOError("No such file: {0}".format(filename))
924  else:
925  try:
926  bf = BinaryFile(filename, order="fortran")
927  except:
928  raise IOError("Error opening: {0} with BinaryFile class".format(filename))
929 
930  # Get available_dates from dates file in same directory as header
931  if OPS.datefile:
932  datefile = os.path.join(pathname, OPS.datefile)
933  else:
934  datefile = os.path.join(pathname, 'dates')
935 
936  if not os.path.exists(datefile):
937  raise IOError("No DATEFILE: {0}".format(datefile))
938  else:
939  try:
940  fd = file(datefile, 'r').readlines()
941  except:
942  raise IOError("Could not read datefile: {0}".format(datefile))
943 
944  # get rid of any duplicate dates (a fix for the forecast system)
945  fd = sorted(list(set(fd)))
946  h['available_dates'] = [d.strip('\n') for d in fd]
947 
948 
949  # which version format is header file:
950  version = _get_header_version(bf)
951 
952 
953  # required containers
954  junk = [] # for catching unused output
955  h['nz_list'] = []
956  h['species'] = []
957  h['wetdep'] = []
958  h['drydep'] = []
959  h['ireleasestart'] = []
960  h['ireleaseend'] = []
961  h['compoint'] = []
962 
963  # Define Header format and create Dictionary Keys
964  I = {0:'_0', 1:'ibdate', 2:'ibtime', 3:'flexpart', \
965  4:'_1', 5:'loutstep', 6:'loutaver', 7:'loutsample', \
966  8:'_2', 9:'outlon0', 10:'outlat0', 11:'numxgrid', \
967  12:'numygrid', 13:'dxout', 14:'dyout', 15:'_3', 16:'numzgrid', \
968  }
969  # format for binary reading first part of the header file
970  Dfmt = ['i', 'i', 'i', '13S', '2i', 'i', 'i', 'i', '2i', 'f', 'f', 'i', 'i', 'f', 'f', '2i', 'i']
971  if bf:
972  a = [bf.read(fmt) for fmt in Dfmt]
973  for j in range(len(a)):
974  h[I[j]] = a[j]
975 
976  h['outheight'] = np.array([bf.read('f') for i in range(h['numzgrid'])])
977  junk.append(bf.read('2i'))
978  h['jjjjmmdd'] = bf.read('i')
979  h['hhmmss'] = bf.read('i')
980  junk.append(bf.read('2i'))
981  h['nspec'] = bf.read('i') / 3 # why!?
982  h['numpointspec'] = bf.read('i')
983  junk.append(bf.read('2i'))
984 
985  # Read in the species names and levels for each nspec
986  # temp dictionaries
987  for i in range(h['nspec']):
988  if 'V6' in version:
989 
990  h['wetdep'].append(''.join([bf.read('c') for i in range(10)]).strip())
991  junk.append(bf.read('2i'))
992  junk.append(bf.read('i'))
993  h['drydep'].append(''.join([bf.read('c') for i in range(10)]).strip())
994  junk.append(bf.read('2i'))
995  h['nz_list'].append(getbin('i'))
996  h['species'].append(''.join([getbin('c') for i in range(10)]).strip())
997 
998  else:
999 
1000  junk.append(bf.read('i'))
1001  h['wetdep'].append(''.join([bf.read('c') for i in range(10)]).strip())
1002  junk.append(bf.read('2i'))
1003  junk.append(bf.read('i'))
1004  h['drydep'].append(''.join([bf.read('c') for i in range(10)]).strip())
1005  junk.append(bf.read('2i'))
1006  h['nz_list'].append(bf.read('i'))
1007  h['species'].append(''.join([bf.read('c') for i in range(10)]).strip())
1008  junk.append(bf.read('2i'))
1009 
1010  if 'V6' in version:
1011  bf.seek(8, 1)
1012  # pdb.set_trace()
1013  h['numpoint'] = bf.read('i')
1014 
1015  # read release info if requested
1016  # if OPS.readp pass has changed, we cycle through once,
1017  # then break the loop if OPS.readp is false. This is done
1018  # in order to get some of the information into the header.
1019  before_readp = bf.tell()
1020 
1021  # initialise release fields
1022  I = {2:'kindz', 3:'xp1', 4:'yp1', 5:'xp2', \
1023  6:'yp2', 7:'zpoint1', 8:'zpoint2', 9:'npart', 10:'mpart'}
1024 
1025  for k, v in I.iteritems():
1026  h[v] = np.zeros(h['numpoint']) # create zero-filled lists in H dict
1027 
1028  h['xmass'] = np.zeros((h['numpoint'], h['nspec']))
1029 
1030 
1031  if 'V6' in version:
1032  skip()
1033  _readV6(bf, h)
1034  else:
1035  junk.append(bf.read('i'))
1036  for i in range(h['numpoint']):
1037  junk.append(bf.read('i'))
1038 
1039  h['ireleasestart'].append(bf.read('i'))
1040  h['ireleaseend'].append(bf.read('i'))
1041  h['kindz'][i] = bf.read('h') # This is an int16, might need to to change something
1042  junk.append(bf.read('2i'))
1043 
1044  h['xp1'][i] = bf.read('f')
1045  h['yp1'][i] = bf.read('f')
1046  h['xp2'][i] = bf.read('f')
1047  h['yp2'][i] = bf.read('f')
1048  h['zpoint1'][i] = bf.read('f')
1049  h['zpoint2'][i] = bf.read('f')
1050 
1051  junk.append(bf.read('2i'))
1052  h['npart'][i] = bf.read('i')
1053  h['mpart'][i] = bf.read('i')
1054 
1055  junk.append(bf.read('i'))
1056  # initialise release fields
1057 
1058  l = bf.read('i') # get compoint length?
1059  gt = bf.tell() + l # create 'goto' point
1060  sp = ''
1061  while re.search("\w", bf.read('c')): # collect the characters for the compoint
1062  bf.seek(-1, 1)
1063  sp = sp + bf.read('c')
1064 
1065  bf.seek(gt) # skip ahead to gt point
1066 
1067  h['compoint'].append(sp) # species names in dictionary for each nspec
1068  # h['compoint'].append(''.join([bf.read('c') for i in range(45)]))
1069 
1070  junk.append(bf.read('i'))
1071  # now loop for nspec to get xmass
1072  for v in range(h['nspec']):
1073  Dfmt = ['i', 'f', '2i', 'f', '2i', 'f', 'i']
1074  a = [bf.read(fmt) for fmt in Dfmt]
1075  h['xmass'][i, v] = a[1]
1076 
1077  if OPS.readp is False:
1078  """
1079  We get the first set of release points here in order
1080  to get some information, but skip reading the rest
1081  """
1082  bf.seek(before_readp)
1083  if 'V6' in version:
1084  bf.seek(157 * h['numpoint'], 1);
1085  else:
1086  bf.seek(119 * h['numpoint'] + (h['nspec'] * 36) * h['numpoint'] + 4, 1)
1087  break
1088 
1089 
1090 
1091  if 'V6' in version:
1092  h['method'] = bf.read('i')
1093 
1094  else:
1095  junk.append(bf.read('i'))
1096  junk.append(bf.read('i'))
1097 
1098  h['lsubgrid'] = bf.read('i')
1099  h['lconvection'] = bf.read('i')
1100  h['ind_source'] = bf.read('i')
1101  h['ind_receptor'] = bf.read('i')
1102 
1103  if 'V6' in version:
1104  pass
1105  else:
1106  junk.append(bf.read('2i'))
1107 
1108  h['nageclass'] = bf.read('i')
1109 
1110  Lage_fmt = ['i'] * h.nageclass
1111  h['lage'] = [bf.read(fmt) for fmt in Lage_fmt]
1112 
1113  # Orography
1114  nx = h['numxgrid']
1115  ny = h['numygrid']
1116  Dfmt = ['f'] * nx
1117  h['oro'] = np.zeros((nx, ny), np.float)
1118  junk.append(bf.read('2i'))
1119 
1120  for ix in range(nx):
1121  h['oro'][ix] = bf.read('f', ny)
1122  bf.seek(8, 1)
1123 
1124  # Why was this? / deprecated.
1125  # if h['loutstep'] < 0:
1126  # h['nspec'] = h['numpoint']
1127 
1128  bf.close()
1129 
1130 
1131 
1132 
1133  ############# ADDITIONS ###########
1134  # attributes to header that can be added after reading below here
1135  h['pathname'] = pathname
1136  h['decayconstant'] = 0
1137  h.path = pathname
1138 
1139 
1140  # Calculate Height (outheight + topography)
1141  # There is an offset issue here related to the 0-indexing. Be careful.
1142  oro = h['oro'] # z is a numpy array
1143  nx = h['numxgrid']
1144  ny = h['numygrid']
1145  nz = h['numzgrid']
1146 
1147  Heightnn = np.zeros((nx, ny, nz), np.float)
1148  for ix in range(nx):
1149  if OPS.ltopo == 1:
1150  Heightnn[ix, :, 0] = oro[ix, :]
1151  else:
1152  Heightnn[ix, :, 0] = np.zeros(ny)
1153 
1154  for iz in range(nz):
1155  if OPS.ltopo == 1:
1156  Heightnn[ix, :, iz] = [h['outheight'][iz] + oro[ix, y] for y in range(ny)]
1157  else:
1158  Heightnn[ix, :, iz] = h['outheight'][iz]
1159 
1160  h['Area'] = gridarea(h)
1161  h['Heightnn'] = Heightnn
1162  h['nx'] = nx
1163  h['ny'] = ny
1164  h['nz'] = nz
1165 
1166  # Convert ireleasestart and ireleaseend to datetimes, fix issue #10
1167  start_day = datetime.datetime.strptime(str(h['ibdate']), '%Y%m%d')
1168  H, M, S , = [int(str(h['ibtime']).zfill(6)[i:i + 2]) for i in range(0, 6, 2)]
1169  start_time = datetime.timedelta(hours=H, minutes=M, seconds=S)
1170  h['simulationstart'] = start_day + start_time
1171 
1172  if OPS.readp:
1173  releasestart, releaseend = [], []
1174  for i in range(h.numpointspec):
1175  releasestart.append(h.simulationstart + \
1176  datetime.timedelta(seconds=int(h.ireleasestart[i])))
1177  releaseend.append(h.simulationstart + \
1178  datetime.timedelta(seconds=int(h.ireleaseend[i])))
1179  h.releasestart = releasestart
1180  h.releaseend = releaseend[:h.numpointspec]
1181  h.releasetimes = [b - ((b - a) / 2) for a, b in zip(h.releasestart, h.releaseend)]
1182 
1183  # Add datetime objects for dates
1184  available_dates_dt = []
1185  for i in h.available_dates:
1186  available_dates_dt.append(datetime.datetime(
1187  int(i[:4]), int(i[4:6]), int(i[6:8]), int(i[8:10]), int(i[10:12]), int(i[12:])))
1188  h.available_dates_dt = available_dates_dt
1189  h.first_date = available_dates_dt[0]
1190  h.last_date = available_dates_dt[-1]
1191  h.ageclasses = np.array([act - h.simulationstart for act in h.available_dates_dt])
1192  h.numageclasses = len(h.ageclasses)
1193 
1194  # Add other helpful attributes
1195  h.nxmax = h.numxgrid
1196  h.nymax = h.numygrid
1197  h.nzmax = h.numzgrid
1198  h.maxspec = h.nspec
1199  h.maxpoint = h.numpoint
1200  h.maxageclass = h.numageclasses
1201 
1202  h.area = h.Area # fix an annoyance
1203 
1204  if OPS.readp:
1205  h.xpoint = h.xp1
1206  h.ypoint = h.yp1
1207 
1208  # Add release unit derived from kindz
1209  if 'kindz' not in h.keys():
1210  h.kindz = [0]
1211  h.alt_unit = 'unkn.'
1212  if 3 in h.kindz:
1213  h.alt_unit = 'hPa'
1214  elif 2 in h.kindz:
1215  h.alt_unit = 'm.a.s.l.'
1216  elif 1 in h.kindz:
1217  h.alt_unit = 'm.a.g.l.'
1218 
1219  #
1220  if h.loutstep > 0:
1221  h.direction = 'forward'
1222  h.unit = 'conc' # could be pptv
1223  h.plot_unit = 'ppb'
1224  else:
1225  h.direction = 'backward'
1226  h.unit = 'time'
1227  h.plot_unit = 'ns / kg' # Not sure about this
1228 
1229  # Units based on Table 1, ACP 2005
1230  if h.direction == 'forward':
1231  if h.ind_source == 1:
1232  if h.ind_receptor == 1:
1233  h.output_unit = 'ng m-3'
1234  if h.ind_receptor == 2:
1235  h.output_unit = 'pptm'
1236  if h.ind_source == 2:
1237  if h.ind_receptor == 1:
1238  h.output_unit = 'ng m-3'
1239  if h.ind_receptor == 2:
1240  h.output_unit = 'pptm'
1241  if h.direction == 'backward':
1242  if h.ind_source == 1:
1243  if h.ind_receptor == 1:
1244  h.output_unit = 's'
1245  if h.ind_receptor == 2:
1246  h.output_unit = 's m^3 kg-1'
1247  if h.ind_source == 2:
1248  if h.ind_receptor == 1:
1249  h.output_unit = 's kg m-3'
1250  if h.ind_receptor == 2:
1251  h.output_unit = 's'
1252 
1253 
1254 
1255  # Add layer thickness
1256  layerthickness = [h.outheight[0]]
1257  for i, lh in enumerate(h.outheight[1:]):
1258  layerthickness.append(lh - h.outheight[i])
1259  h.layerthickness = layerthickness
1260 
1261  h.options = OPS
1262  h.fp_version = h.flexpart
1263  h.junk = junk # the extra bits that were read... more for debugging
1264 
1265 
1266  print('Read {0} Header: {1}'.format(h.fp_version, filename))
1267 
1268  return h
1269 
1270 # BW Compatability
1271 readheader = read_header
1272 readheaderV8 = read_header
1273 readheaderV6 = read_header
1274 
1275 def _readV6(bf, h):
1276  """
1277  Version 6, 7X read routines...
1278 
1279  """
1280  # Utility functions
1281  skip = lambda n = 8 : bf.seek(n, 1)
1282  getbin = lambda dtype, n = 1 : bf.read(dtype, (n,))
1283 
1284  if bf:
1285  # bf.read('i')
1286  print h['numpoint']
1287  for i in range(h['numpoint']):
1288  # r2=getbin('i')
1289  i1 = getbin('i')
1290  i2 = getbin('i')
1291  # h['ireleasestart'].append( ss_start + datetime.timedelta(seconds=float(i1)) )
1292  # h['ireleaseend'].append( ss_start + datetime.timedelta(seconds=float(i2)) )
1293  h['ireleasestart'].append(i1)
1294  h['ireleaseend'].append(i2)
1295  h['kindz'][i] = getbin('i') # This is an int16, might need to to change something
1296  skip() # get xp, yp,...
1297 
1298  h['xp1'][i] = getbin('f')
1299  h['yp1'][i] = getbin('f')
1300  h['xp2'][i] = getbin('f')
1301  h['yp2'][i] = getbin('f')
1302  h['zpoint1'][i] = getbin('f')
1303  h['zpoint2'][i] = getbin('f')
1304 
1305  skip() # get n/mpart
1306  h['npart'][i] = getbin('i')
1307  h['mpart'][i] = getbin('i')
1308 
1309  getbin('i')
1310  # initialise release fields
1311 
1312  l = getbin('i') # get compoint length?
1313  gt = bf.tell() + l # create 'goto' point
1314  sp = ''
1315  sp = sp.join(getbin('c', l))
1316  bf.seek(gt) # skip ahead to gt point
1317 
1318  h['compoint'].append(sp) # species names in dictionary for each nspec
1319  # h['compoint'].append(''.join([getbin('c') for i in range(45)]))
1320  skip()
1321  # r1=getbin('i')
1322 
1323  # now loop for nspec to get xmass
1324  for v in range(h['nspec']):
1325  Dfmt = ['i', 'f', '2i', 'f', '2i', 'f', 'i']
1326  a = [bf.read(fmt) for fmt in Dfmt]
1327  h['xmass'][i, v] = a[1]
1328 
1329 
1330  return
1331 
1332 
1333 ########### Grid Reading Routines ###############
1334 
1335 
1336 def _readgrid_noFF(H, **kwargs):
1337  """ accepts a header dictionary as input, returns dictionary of Grid values
1338  %===========================================
1339  %
1340  %-------------------------------------------
1341  % input
1342  % - H: required header dictionary
1343  %
1344  % optional (most is obtained from header dict)
1345  % - date: which yyyymmddhhmmss from dates
1346  % - unit: 'conc', 'pptv', ['time'], 'footprint'
1347  % - nspec_ret: numspecies
1348  % - pspec_ret:
1349  % - age_ret:
1350  % - time_ret:
1351  % - nested: nested = True
1352  %
1353  %
1354  % output
1355  % - grid dictionary object
1356  % - fail indicator (-1=fail, 0=success)
1357  %-------------------------------------------
1358  % FLEXPART python import routines
1359  %-------------------------------------------
1360  % last changes: JFB, 10.10.2008
1361  %===========================================
1362  """
1363  # if os.sys.platform != 'win32':
1364  # try:
1365  # from FortFlex import readgrid, sumgrid
1366  # useFortFlex = 1
1367  # print 'using FortFlex'
1368  # except:
1369  # useFortFlex = 0
1370  # print 'Cannot find FortFlex.so'
1371  useFortFlex = 0
1372 
1373  if 'date' in kwargs.keys():
1374  date = kwargs['date']
1375  # else: date = H['ibtime']
1376  else: date = None
1377 
1378  if 'unit' in kwargs.keys():
1379  unitname = kwargs['unit']
1380  else: unitname = 'time'
1381 
1382  units = ['conc', 'pptv', 'time', 'footprint']
1383  unit = units.index(unitname)
1384 
1385  if 'nspec_ret' in kwargs.keys():
1386  nspec_ret = kwargs['nspec_ret']
1387  else: nspec_ret = 1
1388 
1389  if 'pspec_ret' in kwargs.keys():
1390  pspec_ret = kwargs['ppsec_ret']
1391  else: pspec_ret = 1
1392 
1393  if 'age_ret' in kwargs.keys():
1394  age_ret = kwargs['age_ret']
1395  else: age_ret = 1
1396 
1397  if 'nested' in kwargs.keys():
1398  nested = kwargs['nested']
1399  else: nested = False
1400 
1401  if 'time_ret' in kwargs.keys():
1402  time_ret = kwargs['time_ret']
1403  else:
1404  time_ret = range(len(H['available_dates']))
1405 
1406  if 'scaledepo' in kwargs.keys():
1407  scaledepo = kwargs['scaledepo']
1408  else:
1409  scaledepo = 1.0
1410 
1411  if 'scaleconc' in kwargs.keys():
1412  scaleconc = kwargs['scaleconc']
1413  else:
1414  scaleconc = 1.0
1415 
1416  if 'decaycons' in kwargs.keys():
1417  decaycons = kwargs['decaycons']
1418  else:
1419  decaycons = 99999999990
1420  fail = -1
1421 
1422  # set filenames
1423  prefix = ['grid_conc_', 'grid_pptv_', \
1424  'grid_time_', 'footprint_total', \
1425  'grid_conc_nest_', 'grid_pptv_nest_', \
1426  'grid_time_nest_', 'footprint_total_nest']
1427 
1428  # local functions for reading binary data and creating grid dump
1429 # skip = lambda n=8 : f.read(n)
1430 # getbin = lambda fmt,n=1 : struct.unpack(fmt*n,f.read(struct.calcsize(fmt)))[0]
1431 
1432  # Utility functions
1433  skip = lambda n = 8 : f2.seek(n, 1)
1434  getbin = lambda dtype, n = 1 : f2.read(dtype, (n,))
1435 
1436 
1437  def getdump(n, fmt='f'):
1438  """ function to get the dump values for the sparse format """
1439  # print n
1440  skip()
1441  # Dfmt=[fmt]*n
1442 # a=[struct.unpack(ft,f.read(struct.calcsize(ft))) for ft in Dfmt]
1443  a = f2.read(fmt, n)
1444 # dumplist=[a[j][0] for j in range(len(a))]
1445  # dumplist=[a[j] for j in range(len(a))]
1446  return a # dumplist
1447 
1448  def key2var(D, key):
1449  cmd = "global %s; %s = D['%s'];" % (key, key, key)
1450  exec(cmd)
1451 
1452  def dumpgrid(dmp_i, cnt_r, dmp_r, grd, k, nage):
1453  """ function to dump sparse elements into grid """
1454  ii = 0
1455  fact = 1
1456  for ir in range(cnt_r):
1457  if dmp_r[ir] * fact > 0:
1458  n = dmp_i[ii]; ii = ii + 1; fact = fact * -1.
1459  else:
1460  n = n + 1
1461 
1462  kz = n / (numxgrid * numygrid)
1463  jy = (n - kz * numxgrid * numygrid) / numxgrid
1464  ix = n - numxgrid * numygrid * kz - numxgrid * jy
1465  # print "n ==> ix,jy,kz,k,nage"
1466  # print "%s ==> %s,%s,%s,%s,%s" % (n,ix,jy,kz,k,nage)
1467  # print grd.shape
1468  # print grd[0,0,0,0,0]
1469  grd[ix, jy, kz - 1, k, nage] = abs(dmp_r[ir])
1470  return grd # flipud(grd.transpose())
1471 
1472  G = {}
1473  # get values from header file
1474  H['nageclass'] = H['numageclasses']
1475  headervars = ['nested', 'nspec', 'numxgrid', 'numygrid', 'numzgrid', 'nageclass', \
1476  'available_dates', 'pathname', 'decayconstant', 'numpoint', 'Area', 'Heightnn']
1477 
1478  for k in headervars:
1479  key2var(H, k)
1480 
1481  # create zero arrays for datagrid and zplot
1482  datagrid = np.zeros((numxgrid, numygrid, numzgrid, numpoint), dtype=np.float32)
1483  zplot = np.empty((numxgrid, numygrid, numzgrid, numpoint))
1484 
1485  #--------------------------------------------------
1486  # Loop over all times, given in field H['dates']
1487  #--------------------------------------------------
1488  for ks in range(nspec_ret, nspec_ret + 1):
1489  # print 'processing: ' + str(H['dates'][date_i]) + ' ' + str(ks)
1490  # print 'processing: ' + str(date) + ' ' + str(ks)
1491  specs = '_' + str(ks).zfill(3)
1492  fpmax = -999.
1493  if date == None and time_ret == None:
1494  get_dates = [available_dates[0]]
1495  elif time_ret == None:
1496  get_dates = []
1497  date = date.strip().split(',')
1498  for d in date:
1499  try:
1500  get_dates.append(available_dates.index(d))
1501  except:
1502  shout("Cannot find date: %s in H['dates']\n" % d)
1503  else:
1504  get_dates = available_dates
1505 
1506  #print 'getting grid for: ', get_dates
1507 
1508  for date_i in range(len(get_dates)):
1509 
1510  if unit != 4:
1511  datestring = H['available_dates'][date_i]
1512  # datestring = date
1513  filename = os.path.join(H['pathname'], \
1514  prefix[(unit) + (nested * 4)] + datestring + specs)
1515  else:
1516  filename = os.path.join(H['pathname'], \
1517  prefix[(unit) + (nested * 4)])
1518 
1519  # print 'reading: ' + filename
1520 
1521  if os.path.exists(filename):
1522  # print nspec,numxgrid,numygrid,numzgrid,nageclass,scaledepo,scaleconc,decaycons
1523  # print ks
1524  # print date
1525 
1526  if useFortFlex == 1:
1527  # print 'Using FortFLEX'
1528  #####################################################################################
1529  # # FORTRAN WRAPPER CODE - only works on linux
1530  # #
1531  # # USAGE: grid = readgrid(filegrid,numxgrid,numygrid,numzgrid,\
1532  # # nspec,nageclass,scaleconc,decayconstant)
1533  # #
1534  # # zplot = sumgrid(zplot,grid,\
1535  # # area,heightnn,\
1536  # # [numxgrid,numygrid,numzgrid,numpoint,nageclass])
1537  # #
1538  # #
1539  # # NOTE: numpoint = number of releases, ageclass always 1 for backward
1540  # #
1541  # # RETURN: grid(numxgrid,numygrid,numzgrid,numpoint,nageclass)
1542  #####################################################################################
1543 
1544  concgrid = readgrid(filename, numxgrid, numygrid, numzgrid, \
1545  numpoint, nageclass, \
1546  scaleconc, decayconstant)
1547 
1548  # contribution[:,:,:] = concgrid[:,:,:,:,0]
1549  print np.min(concgrid)
1550  print np.max(concgrid)
1551 
1552  # altitude = 50000
1553  zplot = sumgrid(zplot, concgrid, \
1554  H.area, H.Heightnn)
1555 
1556 
1557  else:
1558  dat_cnt = 0
1559  nage = 0
1560  # read data:
1561  # #datagrid=np.zeros((numxgrid,numygrid,numzgrid[nspec-1],nspec,nageclass),np.float)
1562  datagrid = np.zeros((numxgrid, numygrid, numzgrid, 1, 1), np.float)
1563  # f = file(filename, 'rb')
1564  # print filename
1565  f2 = BinaryFile(filename, order='fortran')
1566  skip(4)
1567  G['itime'] = getbin('i');
1568  print H['available_dates'][date_i]
1569 
1570  # Read Wet Depostion
1571  skip()
1572  cnt_i = getbin('i')
1573  dmp_i = getdump(cnt_i, 'i')
1574  skip()
1575  cnt_r = getbin('i')
1576  dmp_r = getdump(cnt_r)
1577  # wet=dumpgrid(dmp_i, cnt_r, dmp_r, datagrid, ks-1, nage)
1578  # Read Dry Deposition
1579  skip()
1580  cnt_i = getbin('i')
1581  dmp_i = getdump(cnt_i, 'i')
1582  skip()
1583  cnt_r = getbin('i')
1584  dmp_r = getdump(cnt_r)
1585  # dry=dumpgrid(dmp_i, cnt_r, dmp_r, datagrid, ks-1, nage)
1586 
1587  # Read Concentrations
1588  skip()
1589  cnt_i = getbin('i')
1590  dmp_i = getdump(cnt_i, 'i')
1591  skip()
1592  cnt_r = getbin('i')
1593  dmp_r = getdump(cnt_r)
1594  # print dmp_i, cnt_r, dmp_r, datagrid, ks-1, nage
1595  concgrid = dumpgrid(dmp_i, cnt_r, dmp_r, datagrid, ks - 1, nage)
1596 
1597  # G[H['ibtime']].append(concgrid)
1598  G[H['ibtime']] = concgrid
1599  f2.close()
1600  fail = 0
1601  else:
1602  print "\n\n INPUT ERROR: Could not find file: %s" % filename
1603  raise IOError('No file: %s' % filename)
1604 
1605  if useFortFlex == 1: G = zplot
1606  return G
1607 
1608 readgridBF = _readgrid_noFF
1609 
1610 def _readgridBF(H, filename):
1611  """ Read grid using BinaryFile class"""
1612  # Utility functions
1613  skip = lambda n = 8 : f2.seek(n, 1)
1614  getbin = lambda dtype, n = 1 : f2.read(dtype, (n,))
1615 
1616 
1617  def getdump(n, fmt='f'):
1618  """ function to get the dump values for the sparse format """
1619  skip()
1620  # Dfmt=[fmt]*n
1621 # a=[struct.unpack(ft,f.read(struct.calcsize(ft))) for ft in Dfmt]
1622  a = f2.read(fmt, n)
1623  ### DJM HACK - 2013-09-02 - without the following, 'a' ends up being
1624  ### a float rather than a numpy array
1625  if n==1:
1626  a = np.array([a])
1627 # dumplist=[a[j][0] for j in range(len(a))]
1628  # dumplist=[a[j] for j in range(len(a))]
1629  return a # dumplist
1630 
1631  def key2var(D, key):
1632  cmd = "global %s; %s = D['%s'];" % (key, key, key)
1633  exec(cmd)
1634 
1635  def _dumpgrid(dmp_i, cnt_r, dmp_r, grd, k, nage, nx, ny):
1636  """ function to dump sparse elements into grid, fall back method if
1637  pflexcy.so module (cython) not available -- it is SLOW """
1638  conc = False
1639  if len(grd.shape) == 5:
1640  conc = True
1641  ii = 0
1642  fact = 1
1643  pos = 0
1644  '''
1645  print 'CNT_R: ' + str(cnt_r)
1646  print 'DMP_R: ' + str(dmp_r)
1647  print 'TYPE DMP_R: ' + str(type(dmp_r))
1648  '''
1649  for ir in range(cnt_r):
1650 
1651  if conc:
1652  if dmp_r[ir] * fact > 0:
1653  n = dmp_i[ii]
1654  ii = ii + 1
1655  fact = fact * -1.
1656  else:
1657  n = n + 1
1658 
1659  kz = n / (H.numxgrid * H.numygrid)
1660  jy = (n - kz * H.numxgrid * H.numygrid) / H.numxgrid
1661  ix = n - H.numxgrid * H.numygrid * kz - H.numxgrid * jy
1662  grd[ix, jy, kz - 1, k, nage] = abs(dmp_r[ir])
1663 
1664 #
1665 # print "n ==> ix,jy,kz,k,nage"
1666 # print "%s ==> %s,%s,%s,%s,%s" % (n,ix,jy,kz,k,nage)
1667 # print grd.shape
1668 # print grd[0,0,0,0,0]
1669 
1670  else:
1671  if dmp_r[ir] * fact > 0:
1672  n = dmp_i[ii]
1673  ii = ii + 1
1674  fact = fact * -1.
1675  else:
1676  n = n + 1
1677  jy = n / H.numxgrid
1678  ix = n - H.numxgrid * jy
1679  grd[ix, jy, k, nage] = abs(dmp_r[ir])
1680 
1681  return grd # flipud(grd.transpose())
1682 
1683  # # Import pflexcy.so (cython compiled version of dumpgrid)
1684  try:
1685 
1686  from sdspflexcy import dumpdatagrid, dumpdepogrid
1687  print 'using pflexcy'
1688  except:
1689  #print """WARNING: Using PURE Python to readgrid, execution will be slow.
1690  # Try compiling the FortFlex module or the pflexcy module
1691  # for your machine. For more information see the
1692  # pflexible/f2py_build directory or use cython with pflexcy.pyx
1693  #"""
1694  dumpdatagrid = _dumpgrid
1695  dumpdepogrid = _dumpgrid
1696 
1697  dat_cnt = 0
1698  nage = 1
1699  # #datagrid=np.zeros((numxgrid,numygrid,numzgrid[nspec-1],nspec,nageclass),np.float)
1700  wetgrid = np.zeros((H.numxgrid, H.numygrid, H.numpointspec, 1), np.float)
1701  drygrid = np.zeros((H.numxgrid, H.numygrid, H.numpointspec, 1), np.float)
1702  datagrid = np.zeros((H.numxgrid, H.numygrid, H.numzgrid, H.numpointspec, nage), np.float)
1703  # f = file(filename,'rb')
1704  # print filename
1705  f2 = BinaryFile(filename, order='fortran')
1706  # read data:
1707  skip(4)
1708  itime = getbin('i')
1709 
1710  for na in range(nage):
1711 
1712  for ks in range(H.numpointspec):
1713  # Read Wet Depostion
1714  skip()
1715  cnt_i = getbin('i')
1716  dmp_i = getdump(cnt_i, 'i')
1717  skip()
1718  cnt_r = getbin('i')
1719  dmp_r = getdump(cnt_r)
1720  if dmp_r.any():
1721  # print dmp_r, dmp_i
1722  wetgrid = dumpdepogrid(dmp_i, cnt_r, dmp_r, wetgrid, ks, na, H.numxgrid, H.numygrid)
1723 
1724  # Read Dry Deposition
1725  skip()
1726  cnt_i = getbin('i')
1727  dmp_i = getdump(cnt_i, 'i')
1728  skip()
1729  cnt_r = getbin('i')
1730  dmp_r = getdump(cnt_r)
1731  if dmp_r.any():
1732  # print dmp_r, dmp_i
1733  drygrid = dumpdepogrid(dmp_i, cnt_r, dmp_r, drygrid, ks, na, H.numxgrid, H.numygrid)
1734 
1735  # Read Concentrations
1736  skip()
1737  cnt_i = getbin('i')
1738  dmp_i = getdump(cnt_i, 'i')
1739  skip()
1740  cnt_r = getbin('i')
1741  dmp_r = getdump(cnt_r)
1742  # print len(dmp_r),len(dmp_i)
1743  # print cnt_r,cnt_i
1744  # print dmp_i
1745  # print type(dmp_i),type(cnt_r),type(dmp_r),type(datagrid),type(ks),type(na)
1746  # print type(H.numxgrid),type(H.numygrid)
1747  datagrid = dumpdatagrid(dmp_i, cnt_r, dmp_r, datagrid, ks, na, H.numxgrid, H.numygrid)
1748 
1749  # G[H['ibtime']].append(concgrid)
1750  f2.close()
1751 
1752  return datagrid, wetgrid, drygrid, itime
1753 
1754 
1755 
1756 def _read_headerFF(pathname, h=None,
1757  maxpoint=800000, maxspec=4, maxageclass=20,
1758  nxmax=722, nymax=362, nzmax=40, verbose=True):
1759  """
1760  DEPRECATED
1761 
1762  Called from read_header if readp_ff is True, uses FortFlex.readheader
1763  to read all releasepoints.
1764 
1765  This function is dependant on the FortFlex.so module
1766  see FortFlex.f and the f2py directory
1767  """
1768  try:
1769  from FortFlex import readheader
1770  except:
1771  print "Error with FortFlex.readheader, use read_header"
1772 
1773  headervars = ['numxgrid', 'numygrid', 'numzgrid', 'outlon0', 'outlat0', 'compoint', \
1774  'dxout', 'dyout', 'outheight', 'ibdate', 'ibtime', 'loutstep', \
1775  'nspec', 'nageclass', 'lage', 'ireleasestart', 'ireleaseend', \
1776  'numpoint', 'xpoint', 'ypoint', 'zpoint1', 'zpoint2', 'heightnn', 'area', \
1777  'maxpoint', 'maxspec', 'maxageclass', 'nxmax', 'nymax', 'nzmax', \
1778  'npart', 'kind', 'lage', 'loutaver', 'loutsample', 'yyyymmdd', 'hhmmss', 'method']
1779  if h == None:
1780  h = Structure()
1781 
1782  if verbose:
1783  print """Reading Header with:
1784  maxpoint : %s
1785  maxspec : %s
1786  maxageclass : %s
1787  nxmax : %s
1788  nymax : %s
1789  nzmax : %s
1790  """ % (maxpoint, maxspec, maxageclass, nxmax, nymax, nzmax)
1791 
1792 
1793  numxgrid, numygrid, numzgrid, outlon0, outlat0, dxout, dyout, outheight, \
1794  ibdate, ibtime, loutstep, nspec, nageclass, lage, ireleasestart, ireleaseend, \
1795  numpoint, xpoint, ypoint, zpoint1, zpoint2, heightnn, area, compoint, species_f, \
1796  npart, kind, loutaver, loutsample, yyyymmdd, hhmmss, method = \
1797  readheader(pathname, maxpoint, maxspec, maxageclass, nxmax, nymax, nzmax)
1798 
1799  for v in headervars:
1800  if v not in h.keys():
1801  exec("h.%s = %s" % (v, v))
1802  return h
1803 
1804 def read_grid(H, **kwargs):
1805  """
1806  Accepts a header object as input, returns dictionary of Grid values
1807  keyed by species and datestring from H['available_dates']. A grid instance from this
1808  dictionary may be passed to the get_slabs function to return a dictionary of slabs
1809  suitable for plotting with the plotting routines. See below for more information.
1810 
1811  **DEPENDENCY**
1812  Requires FortFlex.so module compiled using f2py. See FortFlex.f for more details.
1813 
1814  Usage::
1815 
1816  > FLEXDATA = read_grid(H,**kwargs)
1817 
1818  Returns:
1819 
1820  A grid dictionary key by tuples with (species,available_dates), where species is
1821  and integer. See grid.keys()
1822 
1823  FLEXDATA[(s,datestring)]['grid']
1824  FLEXDATA[(s,datestring)]['itime']
1825  FLEXDATA[(s,datestring)]['shape']
1826  FLEXDATA[(s,datestring)]['max']
1827  FLEXDATA[(s,datestring)]['min']
1828  FLEXDATA[(s,datestring)]['timestamp']
1829  FLEXDATA[(s,datestring)]['species']
1830  FLEXDATA[(s,datestring)]['gridfile']
1831  FLEXDATA[(s,datestring)]['rel_i']
1832  FLEXDATA[(s,datestring)]['spec_i']
1833 
1834  Arguments
1835 
1836  .. tabularcolumns:: |l|L|
1837 
1838  ============= ========================================
1839  keyword Description [default]
1840  ============= ========================================
1841  date which yyyymmddhhmmss from available_dates
1842  or use (time_ret)
1843  time_ret index to time
1844  unit 'conc', 'pptv', ['time'], 'footprint'
1845  nspec_ret numspecies
1846  pspec_ret index to ???
1847  age_ret index to ageclass
1848  nested obtained from H['nested']
1849  BinaryFile Use BinaryFile vs. FortFlex [False]
1850  getwet True, [False]
1851  getdry True, [False]
1852  scaledepo A float value to scale deposition [1.0]
1853  scaleconc A float value to scale conc [1.0]
1854  decayconstant A float for decay const. [9e6]
1855  calcfoot Will cause footprint to be calculated
1856  [False]
1857  verbose more output
1858  version A string 'V8' or 'V6', ['V8']
1859  ============= ========================================
1860 
1861 
1862  .. note::
1863  most arguments are able to be extracted fro the header "H"
1864 
1865  """
1866 
1867  if H.version == 'V9':
1868  return readgridV9(H, **kwargs)
1869  if H.version == 'V8':
1870  return readgridV8(H, **kwargs)
1871  if H.version == 'V6':
1872  return readgridV6(H, **kwargs)
1873  else:
1874  raise IOError("No version attribute defined for Header.")
1875 
1876 
1877 
1878 def readgridV8(H, **kwargs):
1879 
1880  """
1881  Accepts a header object as input, and selects appropriate readgrid function
1882  to use for reading in data from the flexpart binary Fortran files.
1883 
1884  See the :func:`read_grid` for more information on keyword arguments
1885 
1886  This is the 'V8' version of the function.
1887 
1888  """
1889  # # OPS is the options Structure, sets defaults, then update w/ kwargs
1890  OPS = Structure()
1891  OPS.unit = H.unit
1892  OPS.getwet = False
1893  OPS.getdry = False
1894  OPS.nspec_ret = 0
1895  OPS.npspec_int = False # allows to select an index of npsec when calling readgrid
1896  OPS.pspec_ret = 0
1897  OPS.age_ret = 0
1898  OPS.time_ret = 0
1899  OPS.scaledepo = 1.0
1900  OPS.scaleconc = 1.0
1901  OPS.decayconstant = 9e6
1902  OPS.date = None
1903  OPS.calcfoot = False
1904  OPS.verbose = False
1905  OPS.BinaryFile = False
1906  OPS.version = 'V8'
1907  # # add keyword overides and options to header
1908  OPS.update(kwargs)
1909  # H.update(OPS)
1910 
1911  # # set up the return dictionary (FLEXDATA updates fd, fd is returned)
1912  FLEXDATA = {}
1913  fd = Structure()
1914  fd.options = Structure()
1915 
1916  # # What direction is the run?
1917  unit = OPS.unit
1918 
1919  if H['loutstep'] > 0:
1920  forward = True
1921  if unit == 'time':
1922  # # default forward unit
1923  unit = 'conc'
1924  OPS.unit = unit
1925  else:
1926  forward = False
1927 
1928  # # What species to return?
1929  nspec_ret = OPS.nspec_ret
1930  if isinstance(nspec_ret, int):
1931  nspec_ret = [nspec_ret]
1932  assert iter(nspec_ret), "nspec_ret must be iterable."
1933 
1934  # # get times to return
1935  get_dates = None
1936  if OPS.time_ret is not None:
1937  get_dates = []
1938  time_ret = OPS.time_ret
1939  if isinstance(time_ret, int) == True:
1940  time_ret = [time_ret]
1941 
1942  if time_ret[0] < 0:
1943  #if forward == False:
1944  # # get all dates for calculating footprint.
1945  time_ret = np.arange(len(H.available_dates))
1946  #else:
1947  # raise ValueError("Must enter a positive time_ret for forward runs")
1948 
1949  for t in time_ret:
1950  get_dates.append(H.available_dates[t])
1951 
1952 
1953  # # define what dates to extract if user has explicitly defined a 'date'
1954  if OPS.date != None:
1955  date = OPS.date
1956  if time_ret is not None:
1957  Warning("overwriting time_ret variable, date was requested")
1958  get_dates = []
1959  if not isinstance(date, list):
1960  date = date.strip().split(',')
1961  for d in date:
1962  try:
1963  get_dates.append(H.available_dates[H.available_dates.index(d)])
1964  time_ret = None
1965  except:
1966  _shout("Cannot find date: %s in H['available_dates']\n" % d)
1967 
1968  if get_dates is None:
1969  raise ValueError("Must provide either time_ret or date value.")
1970  else:
1971  # # assign grid dates for indexing fd
1972  fd.grid_dates = get_dates[:]
1973 
1974  #print 'getting grid for: ', get_dates
1975  # Some predifinitions
1976  fail = 0
1977  # set filename prefix
1978  prefix = ['grid_conc_', 'grid_pptv_', \
1979  'grid_time_', 'footprint_', 'footprint_total', \
1980  'grid_conc_nest_', 'grid_pptv_nest_', \
1981  'grid_time_nest_', 'footprint_nest_', 'footprint_total_nest'
1982  ]
1983 
1984  units = ['conc', 'pptv', 'time', 'footprint', 'footprint_total']
1985  unit_i = units.index(unit)
1986 
1987  # Determine what module to read, try to use FortFlex, then dumpgrid, lastly pure Python
1988  # import the FortFlex / Fortran module
1989  try:
1990  if OPS.version == 'V6':
1991  from FortFlex import readgrid_v6 as readgrid
1992  from FortFlex import sumgrid
1993  useFortFlex = True
1994  print 'using FortFlex VERSION 6'
1995  else:
1996  #print('Assumed V8 Flexpart')
1997  from FortFlex import readgrid, sumgrid
1998  useFortFlex = True
1999  except:
2000  # get the original module (no memory allocation)
2001  try:
2002  from nilu.pflexpart.FortFlex import readgrid, sumgrid
2003  useFortFlex = True
2004  #print 'using nilu.pflexpart FortFlex'
2005  except:
2006  useFortFlex = False
2007  #print('Cannot load FortFlex, reverting to BinaryFile.')
2008  if not useFortFlex:
2009  readgrid = _readgridBF
2010  OPS.BinaryFile = True
2011 
2012 
2013  # reserve output fields
2014  #print H.numxgrid, H.numygrid, H.numzgrid, OPS.nspec_ret, OPS.pspec_ret, OPS.age_ret, len(get_dates), H.numpoint
2015 
2016  # -------------------------------------------------
2017 
2018  # # add the requests to the fd object to be returned
2019  OPS.unit = unit
2020  fd.options.update(OPS)
2021 
2022  #--------------------------------------------------
2023  # Loop over all times, given in field H['available_dates']
2024  #--------------------------------------------------
2025 
2026  for date_i in range(len(get_dates)):
2027  datestring = get_dates[date_i]
2028  #print datestring
2029  for s in nspec_ret: # range(OPS.nspec_ret,OPS.nspec_ret+1):A
2030 
2031  FLEXDATA[(s, datestring)] = Structure()
2032  spec_fid = '_' + str(s + 1).zfill(3)
2033 
2034  if unit_i != 4:
2035  filename = os.path.join(H['pathname'], \
2036  prefix[(unit_i) + (H.nested * 5)] + datestring + spec_fid)
2037  H.zdims = H.numzgrid
2038 
2039  else:
2040  # grid total footprint
2041  print "Total footprint"
2042  filename = os.path.join(H['pathname'], \
2043  prefix[(unit_i) + (H.nested * 5)] + spec_fid)
2044  H.zdims = 1
2045 
2046  if os.path.exists(filename):
2047  H.filename = filename
2048  # print 'reading: ' + filename
2049  if OPS.verbose:
2050  print 'with values:'
2051  inputvars = ['filename', 'numxgrid', 'numygrid',
2052  'zdims', 'numpoint', 'nageclass', \
2053  'scaledepo', 'scaleconc',
2054  'decayconstant', 'numpointspec']
2055  for v in inputvars:
2056  print v, " ==> ", H[v]
2057 
2058 
2059  if OPS.BinaryFile:
2060  print("Reading {0} with BinaryFile".format(filename))
2061  gridT, wetgrid, drygrid, itime = _readgridBF(H, filename)
2062  else:
2063  # # Quick fix for Sabine's Ship releases, added nspec_int so that only one
2064  # # field of the nspec dimension is actually read
2065  if OPS.npspec_int is not False:
2066  npspec_int = OPS.npspec_int
2067  numpointspec = 1
2068  else:
2069  npspec_int = 0
2070  numpointspec = H.numpointspec
2071 
2072  gridT, wetgrid, drygrid, itime = readgrid(filename, \
2073  H.numxgrid, H.numygrid,
2074  H.zdims, numpointspec, H.nageclass, \
2075  OPS.scaledepo, OPS.scaleconc, H.decayconstant, npspec_int)
2076 
2077  if OPS.getwet:
2078  wet = wetgrid.squeeze()
2079  if OPS.getdry:
2080  dry = drygrid.squeeze()
2081  if forward:
2082  zplot = gridT[:, :, :, :, 0]
2083  else:
2084  zplot = gridT[:, :, :, :, 0]
2085 
2086  if OPS.calcfoot:
2087 
2088  zplot = sumgrid(zplot, gridT, \
2089  H.area, H.Heightnn)
2090 
2091 
2092  # # get the total column and prep the grid
2093  if H.direction == 'forward':
2094  # not trying to do anything here... must be done
2095  # after retrieving the grid
2096  # D = get_slabs(H,np.squeeze(zplot))
2097  rel_i = 0 #H.available_dates.index(datestring)
2098  D = zplot
2099 
2100  else:
2101  D = zplot
2102  rel_i = 'k'
2103 
2104  # # NOTE:
2105  # # If you're changing things here, you might want to change
2106  # # them in fill_backward as well, yes I know... something is
2107  # # poorly designed ;(
2108 
2109  FLEXDATA[(s, datestring)]['grid'] = D # zplot
2110 
2111  FLEXDATA[(s, datestring)]['itime'] = itime
2112 
2113  FLEXDATA[(s, datestring)]['shape'] = zplot.shape
2114 
2115  FLEXDATA[(s, datestring)]['max'] = zplot.max()
2116 
2117  FLEXDATA[(s, datestring)]['min'] = zplot.min()
2118  FLEXDATA[(s, datestring)]['timestamp'] = datetime.datetime.strptime(datestring, '%Y%m%d%H%M%S')
2119  FLEXDATA[(s, datestring)]['species'] = H['species'][s]
2120  FLEXDATA[(s, datestring)]['gridfile'] = filename
2121  FLEXDATA[(s, datestring)]['rel_i'] = rel_i
2122  FLEXDATA[(s, datestring)]['spec_i'] = s
2123  if OPS.getwet:
2124  FLEXDATA[(s, datestring)]['wet'] = wet
2125  else:
2126  FLEXDATA[(s, datestring)]['wet'] = None
2127 
2128  if OPS.getdry:
2129  FLEXDATA[(s, datestring)]['dry'] = dry
2130  else:
2131  FLEXDATA[(s, datestring)]['dry'] = None
2132 
2133  else:
2134  _shout('***ERROR: file %s not found! \n' % filename)
2135  fail = 1
2136 
2137  fd.set_with_dict(FLEXDATA)
2138  try:
2139  # just for testing, set the first available grid as a shortcut
2140  # this will be removed.
2141  qind = (nspec_ret[0], fd.grid_dates[0])
2142  fd.grid = fd[qind][fd[qind].keys()[0]].grid
2143  except:
2144  pass
2145 
2146 
2147  return fd
2148 
2149 def readgridV6(H, **kwargs):
2150 
2151  """
2152  Accepts a header object as input, and selects appropriate readgrid function
2153  to use for reading in data from the flexpart binary Fortran files.
2154 
2155  See the :func:`read_grid` for more information on keyword arguments
2156 
2157  This is the 'V6' version of the function.
2158 
2159  """
2160  # # OPS is the options Structure, sets defaults, then update w/ kwargs
2161  OPS = Structure()
2162  OPS.unit = 'time'
2163  OPS.nspec_ret = 0
2164  OPS.pspec_ret = 0
2165  OPS.age_ret = 0
2166  OPS.time_ret = 0
2167  OPS.scaledepo = 1.0
2168  OPS.scaleconc = 1.0
2169  OPS.decayconstant = 9e6
2170  OPS.date = None
2171  OPS.calcfoot = False
2172  OPS.verbose = False
2173  OPS.BinaryFile = False
2174  # # add keyword overides and options to header
2175  OPS.update(kwargs)
2176  # H.update(OPS)
2177 
2178  # # set up the return dictionary (FLEXDATA updates fd, fd is returned)
2179  FLEXDATA = {}
2180  fd = Structure()
2181  fd.options = Structure()
2182  # # add the requests to the fd object to be returned
2183  fd.options.update(OPS)
2184 
2185  # # What direction is the run?
2186  unit = OPS.unit
2187  if H['loutstep'] > 0:
2188  forward = True
2189  if unit == 'time':
2190  # # default forward unit
2191  unit = 'conc'
2192  else:
2193  forward = False
2194 
2195  # # What species to return?
2196  nspec_ret = OPS.nspec_ret
2197  if isinstance(nspec_ret, int):
2198  nspec_ret = [nspec_ret]
2199  assert iter(nspec_ret), "nspec_ret must be iterable."
2200 
2201  # # get times to return
2202  get_dates = None
2203  if OPS.time_ret is not None:
2204  get_dates = []
2205  time_ret = OPS.time_ret
2206  if isinstance(time_ret, int) == True:
2207  time_ret = [time_ret]
2208 
2209  if time_ret[0] < 0:
2210  if forward == False:
2211  # # get all dates for calculating footprint.
2212  time_ret = np.arange(len(H.available_dates))
2213  else:
2214  raise ValueError("Must enter a positive time_ret for forward runs")
2215 
2216  for t in time_ret:
2217  get_dates.append(H.available_dates[t])
2218 
2219 
2220  # # define what dates to extract if user has explicitly defined a 'date'
2221  if OPS.date != None:
2222  date = OPS.date
2223  if time_ret is not None:
2224  Warning("overwriting time_ret variable, date was requested")
2225  get_dates = []
2226  if not isinstance(date, list):
2227  date = date.strip().split(',')
2228  for d in date:
2229  try:
2230  get_dates.append(H.available_dates[H.available_dates.index(d)])
2231  time_ret = None
2232  except:
2233  _shout("Cannot find date: %s in H['available_dates']\n" % d)
2234 
2235  if get_dates is None:
2236  raise ValueError("Must provide either time_ret or date value.")
2237  else:
2238  # # assign grid dates for indexing fd
2239  fd.grid_dates = get_dates[:]
2240 
2241  #print 'getting grid for: ', get_dates
2242  # Some predifinitions
2243  fail = 0
2244  # set filename prefix
2245  prefix = ['grid_conc_', 'grid_pptv_', \
2246  'grid_time_', 'footprint_', 'footprint_total', \
2247  'grid_conc_nest_', 'grid_pptv_nest_', \
2248  'grid_time_nest_', 'footprint_nest_', 'footprint_total_nest'
2249  ]
2250 
2251  units = ['conc', 'pptv', 'time', 'footprint', 'footprint_total']
2252  unit_i = units.index(unit)
2253  # Determine what module to read, try to use FortFlex, then dumpgrid, lastly pure Python
2254  # import the FortFlex / Fortran module
2255  try:
2256  from FortFlex import readgrid_v6 as readgrid
2257  from FortFlex import sumgrid
2258  useFortFlex = True
2259  OPS.useFortFlex = useFortFlex
2260  print 'using FortFlex VERSION 6'
2261  except:
2262  # get the original module (no memory allocation)
2263  useFortFlex = False
2264  raise Warning('Cannot import FortFlex trying to use BinaryFile')
2265  if not useFortFlex:
2266  readgrid = _readgridBF
2267  OPS.BinaryFile = True
2268 
2269  # cheat: use key2var function to get values from header dict, H
2270  # need to change this to using H.xxxxx
2271  # headervars = ['nested','nspec','numxgrid','numygrid','numzgrid','nageclass',\
2272  # 'dates','pathname','decayconstant','numpoint','numpointspec',
2273  # 'area','Heightnn','lage']
2274  # for k in headervars:
2275  # key2var(H,k)
2276 
2277 
2278 
2279  # reserve output fields
2280  #print H.numxgrid, H.numygrid, H.numzgrid, OPS.nspec_ret, OPS.pspec_ret, OPS.age_ret, len(get_dates), H.numpoint
2281 
2282  # -------------------------------------------------
2283 
2284  # if forward:
2285  # numpointspec = 1
2286  # else:
2287  # numpointspec = numpoint
2288  # numpointspec = H['numpointspec']
2289  # if unit_i == 4:
2290  # grid=np.empty((numxgrid,numygrid,1,nspec_ret,pspec_ret,age_ret,len(get_dates)))
2291  # else:
2292  # grid=np.empty((numxgrid,numygrid,numzgrid,nspec_ret,pspec_ret,age_ret,len(get_dates)))
2293  # zplot=np.empty((numxgrid,numygrid,numzgrid,numpointspec))
2294 
2295  #--------------------------------------------------
2296  # Loop over all times, given in field H['available_dates']
2297  #--------------------------------------------------
2298 
2299  for date_i in range(len(get_dates)):
2300  datestring = get_dates[date_i]
2301  #print datestring
2302  FLEXDATA[datestring] = {}
2303  for s in nspec_ret: # range(OPS.nspec_ret,OPS.nspec_ret+1):
2304  total_footprint = False
2305  FLEXDATA[(s, datestring)] = Structure()
2306  # spec_fid = '_'+str(s+1).zfill(3)
2307 
2308  if unit_i != 4:
2309  filename = os.path.join(H['pathname'], \
2310  prefix[(unit_i) + (H.nested * 5)] + datestring)
2311  H.zdims = H.numzgrid
2312 
2313  else:
2314  # grid total footprint
2315  print "Total footprint"
2316  total_footprint = True
2317  filename = os.path.join(H['pathname'], \
2318  prefix[(unit_i) + (H.nested * 5)])
2319  H.zdims = 1
2320 
2321  if os.path.exists(filename):
2322  H.filename = filename
2323  # print 'reading: ' + filename
2324  if OPS.verbose:
2325  print 'with values:'
2326  inputvars = ['filename', 'numxgrid', 'numygrid',
2327  'zdims', 'numpoint', 'nageclass', \
2328  'scaledepo', 'scaleconc',
2329  'decayconstant', 'numpointspec']
2330  for v in inputvars:
2331  print v, " ==> ", H[v]
2332 
2333 
2334  if OPS.BinaryFile:
2335  # print 'Using BinaryFile'
2336  gridT, wetgrid, drygrid, itime = _readgridBF(H, filename)
2337  else:
2338  gridT, wetgrid, drygrid, itime = readgrid(filename, \
2339  H.numxgrid, H.numygrid,
2340  H.zdims, H.numpointspec, H.nageclass, \
2341  OPS.scaledepo, OPS.scaleconc, H.decayconstant)
2342 
2343 
2344  if forward:
2345  zplot = gridT[:, :, :, :, 0]
2346  else:
2347  zplot = gridT[:, :, :, :, 0]
2348 
2349  # if total_footprint:
2350  # zplot = np.squeeze(gridT)
2351 
2352  if OPS.calcfoot:
2353  zplot = sumgrid(zplot, gridT, \
2354  H.area, H.Heightnn)
2355 
2356 
2357  # # get the total column and prep the grid
2358  if H.direction == 'forward':
2359  # not trying to do anything here... must be done
2360  # after retrieving the grid
2361  # D = get_slabs(H,np.squeeze(zplot))
2362  # rel_i = H.available_dates.index(datestring)
2363  D = zplot
2364  rel_i = 'k'
2365  else:
2366  D = zplot
2367  rel_i = 'k'
2368 
2369  # # NOTE:
2370  # # If you're changing things here, you might want to change
2371  # # them in fill_backward as well, yes I know... something is
2372  # # poorly designed ;(
2373  FLEXDATA[(s, datestring)]['grid'] = D # zplot
2374  FLEXDATA[(s, datestring)]['itime'] = itime
2375  FLEXDATA[(s, datestring)]['shape'] = zplot.shape
2376  FLEXDATA[(s, datestring)]['max'] = zplot.max()
2377  FLEXDATA[(s, datestring)]['min'] = zplot.min()
2378  FLEXDATA[(s, datestring)]['timestamp'] = datetime.datetime.strptime(datestring, '%Y%m%d%H%M%S')
2379  FLEXDATA[(s, datestring)]['species'] = H['species'][s]
2380  FLEXDATA[(s, datestring)]['gridfile'] = filename
2381  FLEXDATA[(s, datestring)]['rel_i'] = rel_i
2382  FLEXDATA[(s, datestring)]['spec_i'] = s
2383 
2384 
2385  else:
2386  _shout('***ERROR: file %s not found! \n' % filename)
2387  fail = 1
2388  fd.set_with_dict(FLEXDATA)
2389 
2390  try:
2391  # just for testing, set the first available grid as a shortcut
2392  # this will be removed.
2393  qind = (0, fd.grid_dates[0])
2394  fd.grid = fd[qind][fd[qind].keys()[0]].grid
2395  except:
2396  pass
2397 
2398 
2399  return fd
2400 
2402 
2403 
2404  footprints = np.zeros((H.ny, H.nx, len(H.C.keys())))
2405  for i, key in enumerate(H.C):
2406  footprints[:,:,i] = H.C[key].slabs[0]
2407 
2408  #footprints = np.average(footprints, axis=2)
2409 
2410  return footprints
2411 
2412 
2413 
2414 
2415 def fill_grids(H, nspec=0, FD=None, add_attributes=False):
2416  """ for backward runs, calculates the 20-day sensitivity at each release point.
2417 
2418  Usage::
2419 
2420  > C = fill_backward(H,nspec=(0))
2421 
2422 
2423  This will cycle through all available_dates and create the filled backward array
2424  for each k in H.numpointspec.
2425 
2426  Returns
2427 
2428  A dictionary keyed by a (species,k) tuple.
2429  OR
2430  C & FD attributes on H
2431 
2432  Each element in the dictionary is a 3D array (x,y,z) for each species,k
2433 
2434  .. note::
2435  USE with *caution*, this is **MEMORY** intensive!
2436 
2437  Arguments
2438 
2439 
2440  ============== ========================================
2441  keyword Description [default]
2442  ============== ========================================
2443  nspec the specied ID or a tuple of species IDs
2444  FD FD can be passed if it is already read
2445  add_attributes will add C and FD as attributes to H,
2446  rather than returning just C
2447  ============== ========================================
2448 
2449  .. todo::
2450  There's a lot of redundancy in the storage of attributes, maybe there is a
2451  better way to handle this.
2452 
2453  """
2454 
2455 # assert H.direction == 'backward', "fill_backward is only valid for backward runs"
2456  # # make sure npsec is iterable
2457  if isinstance(nspec, int):
2458  species = [nspec]
2459  else:
2460  species = nspec
2461  assert iter(species), 'nspec must be iterable, or you can pass an int'
2462  # # initialize variables
2463  if FD is None:
2464  # # then we need to read the grids
2465  FD = read_grid(H, time_ret= -1, nspec_ret=species)
2466 
2467  C = Structure()
2468 
2469  if H.direction == 'backward':
2470 
2471  for s, k in itertools.product(species, range(H.numpointspec)):
2472  C[(s, k)] = Structure()
2473  C[(s, k)].grid = np.zeros((H.numxgrid, H.numygrid, H.numzgrid))
2474  C[(s, k)]['itime'] = None
2475  C[(s, k)]['timestamp'] = H.releasetimes[k]
2476  C[(s, k)]['species'] = H['species'][s]
2477  C[(s, k)]['gridfile'] = 'multiple'
2478  C[(s, k)]['rel_i'] = k
2479  C[(s, k)]['spec_i'] = s
2480 
2481  # read data grids and attribute/sum sensitivity
2482  print species
2483  for s in species:
2484 
2485  for d in FD.grid_dates:
2486  # cycle through all the date grids (20days back)
2487  for k in range(H.numpointspec):
2488  # cycle through each release point
2489  contribution = FD[(s, d)].grid[:, :, :, k]
2490  C[(s, k)].grid = C[(s, k)].grid + contribution
2491 
2492  # added this, not sure if it makes sense to have this for 'forward'
2493  # however, adding 'C' does not add to memory, as it points to same
2494  # memory location as FD
2495  if H.direction == 'forward':
2496  for s in species:
2497  for k in range(len(FD.grid_dates)):
2498  d = FD.grid_dates[k]
2499  C[(s, k)] = FD[(s, d)]
2500 
2501  for s, k in C:
2502  # # add total column
2503  C[(s, k)].slabs = get_slabs(H, C[(s, k)].grid)
2504  # # shape, min, max based on total column
2505  if H.direction == 'backward':
2506  C[(s, k)]['shape'] = C[(s, k)].grid[0].shape
2507  C[(s, k)]['max'] = C[(s, k)].grid[0].max()
2508  C[(s, k)]['min'] = C[(s, k)].grid[0].min()
2509 
2510 
2511  if add_attributes:
2512  H.C = C
2513  H.FD = FD
2514  else:
2515  return C
2516 
2517 
2518 
2519 def read_emissions(emissionsfile, E=None, maxemissions=1):
2520  """ Use reademissions.so module to read emissions file """
2521  try:
2522  from FortFlex import reademissions
2523  except:
2524  raise ImportError("Cannot find FortFlex module or missing reademissions")
2525  if not E:
2526  E = Structure()
2527  # set defaults for global 0.5 degree emissions
2528  defaults = {'nxmax':720, 'nymax':360, 'outlon0':-180, 'outlat0':-90, \
2529  'numxgrid':720, 'numygrid':360, 'dxout':0.5, 'dyout':0.5}
2530  for k, v in defaults.iteritems():
2531  exec("E.%s = %s" % (k, v))
2532 
2533  emissions = reademissions(emissionsfile, maxemissions, E.nxmax, E.nymax, \
2534  E.outlon0, E.outlat0, E.numxgrid, E.numygrid, E.dxout, E.dyout)
2535  E.grid = emissions
2536  E.filename = emissionsfile
2537  return E
2538 
2539 
2540 def get_slabs(H, G, index=None, normAreaHeight=True, scale=1.0):
2541  """ Preps grid from readgridV8 for plotting.
2542 
2543  Accepts an 3D or 4D GRID from readgrid with optional index.
2544  shape ~= (360, 180, 3, 88, 1)
2545 
2546  Usage::
2547 
2548  plotgrid = get_slabs(H,G)
2549 
2550  Inputs
2551 
2552  H = A Header instance from a FLEXPART run
2553  G = A grid from the FLEXPARTDATA or FD dictionary returned from read_grid
2554 
2555  Returns
2556 
2557  slabs : a dictionary of rank-2 arrays corresponding to vertical levels.
2558 
2559  **Note**: transposes the grid for plotting.
2560  Total Column as D[0]
2561  Level 1 as D[1]
2562  Level 2 as D[2]
2563  ...
2564 
2565  Arguments
2566 
2567  .. tabularcolumns:: |l|L|
2568 
2569  =============== ========================================
2570  keyword Description [default]
2571  =============== ========================================
2572  index a release point index (k)
2573  normAreaHeight [True], normalizes by the area/height
2574  =============== ========================================
2575 
2576  .. todo::
2577  Need to check the normalization and indexing.
2578 
2579  """
2580  Heightnn = H['Heightnn']
2581  area = H['area']
2582  Slabs = Structure()
2583  grid_shape = G.shape
2584  if len(grid_shape) is 4:
2585  if index is None:
2586  if H.direction is 'forward':
2587  index = 0
2588  else:
2589  index = 0
2590  g = G[:, :, :, index]
2591  else:
2592  try:
2593  g = G[:, :, :, index]
2594  except:
2595  raise IOError( '######### ERROR: Which Release Point to get? ########## ')
2596 
2597  if len(grid_shape) is 3:
2598  if index is None:
2599  g = G
2600  else:
2601  g = G
2602 
2603 
2604  for i in range(g.shape[2]):
2605  # first time sum to create Total Column
2606  if i == 0:
2607  TC = np.sum(g, axis=2).T
2608  TC = TC * scale
2609  if normAreaHeight:
2610  data = g[:, :, i] / Heightnn[:, :, i]
2611  else:
2612  data = g[:, :, i]
2613 
2614  Slabs[i + 1] = data.T * scale
2615 
2616  Slabs[0] = TC
2617  return Slabs
2618 
2619 
2620 #### FLEXPART Plotting Functions ########
2621 
2623  try:
2624  del FIGURE.m
2625  plt.close('all')
2626  del m
2627  "m dropped"
2628  return 1
2629  except:
2630  print 'Cannot delete m, does not exist.'
2631  return 0
2632 
2633 
2634 
2635 
2636 def plot_trajchar(H, R, numrelease=1, varindex=[4, 15, 14], \
2637  FIGURE=None, map_region=None, projection=None, coords=None):
2638  """ plot FLEXPART trajectory (or set of trajectories) characteristics
2639  R is a dictionary returned from pflexible.read_trajectories
2640 
2641  varindex is a list of the variables to plot from the trajectories
2642  """
2643  if FIGURE == None:
2644  FIGURE = mp.get_FIGURE(getm=False)
2645 
2646  try:
2647  ax = FIGURE.ax
2648  fig = FIGURE.fig
2649  m = FIGURE.m
2650  except:
2651  print 'problem getting ax,m, or fig.'
2652 
2653  try:
2654  del fig.axes[0]
2655  except:
2656  print 'ax not removed'
2657 
2658 
2659  T = R['Trajectories']
2660  labels = R['labels']
2661  t = T[np.where(T[:, 0] == numrelease), :][0]
2662  numplot = len(varindex)
2663 
2664  for i in range(numplot):
2665  sp = fig.add_subplot(numplot, 1, i + 1)
2666  sp.plot(t[:, 1], t[:, varindex[i]])
2667  ax = plt.gca()
2668  if i + 1 != numplot: plt.setp(ax, xticklabels=[])
2669  plt.ylabel(labels[varindex[i]])
2670  plt.grid('on')
2671 
2672  return FIGURE
2673 
2674 
2675 def plot_releases(R, FIGURE=None, threedim=False,
2676  map_region=None, projection=None, coords=None,
2677  overlay=True,
2678  draw_circles=True,
2679  draw_labels=True,
2680  cbar2=True,
2681  MapPar=None):
2682  """ plot the llon,llat,elv1 of the releases.
2683 
2684  Usage::
2685  >F = plot_releases(R)
2686 
2687  See the :func:`read_releases` function for information on how to generate "R"
2688  """
2689 
2690  if threedim:
2691  try:
2692  from mpl_toolkits.mplot3d import Axes3D
2693  except:
2694  raise ImportError('mplot3d not available from mpl_toolkits!')
2695 
2696  fig = plt.figure()
2697  ax = Axes3D(fig)
2698 
2699  ax.plot(R.lllon, R.lllat, R.elv1)
2700 
2701  return fig
2702  else:
2703  # # Set up the FIGURE
2704  if FIGURE == None:
2705  FIGURE = mp.get_FIGURE(map_region=map_region, projection=projection,
2706  coords=coords, MapPar=MapPar)
2707  # #Get fig info and make active
2708  fig = FIGURE.fig
2709  m = FIGURE.m
2710  ax = FIGURE.ax
2711  plt.figure(fig.number)
2712  plt.axes(ax)
2713 
2714 
2715  # # prepare the data
2716  lon = R.lllon
2717  lat = R.lllat
2718  zlevel = R.elv1
2719  zsize = np.ones(len(lon)) * 30
2720  marker = 'o'
2721 
2722 
2723  # # clear the previous track
2724  if 'circles' in FIGURE.keys():
2725  del FIGURE['circles']
2726  if overlay is False:
2727  del ax.collections[FIGURE.indices.collections:]
2728  del ax.texts[FIGURE.indices.texts:]
2729 
2730  # # plot the track
2731  cx, cy = m(lon, lat)
2732  if draw_circles:
2733  cmap = plt.get_cmap('gist_gray')
2734  circles = m.scatter(cx, cy, zsize, zlevel, cmap=cmap,
2735  marker=marker, edgecolor=None,
2736  zorder=10, alpha=0.85)
2737  # # m.scatter has no color bar,
2738  # # so create a ghost 'scatter' instance:
2739  pos = ax.get_position()
2740  l, b, w, h = getattr(pos, 'bounds', pos)
2741  jnkfig = plt.figure()
2742  jnkax = jnkfig.add_axes([l, b, w, h], frameon=False)
2743  jnkmap = jnkax.scatter(cx, cy, zsize, zlevel,
2744  cmap=cmap, marker=marker,
2745  edgecolor=None)
2746 
2747  try:
2748  FIGURE.circles = circles
2749  except:
2750  pass
2751  # # make the figure active again
2752  plt.figure(fig.number);
2753  # # draw the legend and title
2754  # # check if a colorbar legend exists (this will cause
2755  # # problems for subplot routines!)
2756  if cbar2 is True:
2757  cax = plt.axes([l + w + 0.12, b, 0.02, h - 0.035])
2758  else:
2759  cax = plt.axes([l + w + 0.03, b, 0.025, h - 0.035])
2760  cb = fig.colorbar(jnkmap, cax=cax) # draw colorbar
2761  p_leg = mpl.font_manager.FontProperties(size='6')
2762  cax.set_title('altitude\n(m)', fontproperties=p_leg)
2763 
2764  # # delete the ghost instance
2765  plt.close(jnkfig.number)
2766  del jnkax, jnkfig, jnkmap
2767 
2768  plt.axes(ax)
2769  plt.setp(ax, xticks=[], yticks=[])
2770 
2771  if draw_labels:
2772  p_cax = mpl.font_manager.FontProperties(size='8',
2773  style='italic',
2774  weight='bold',
2775  )
2776 
2777  FIGURE.fig = fig
2778  FIGURE.m = m
2779  FIGURE.ax = ax
2780  return FIGURE
2781 
2782 
2783 def plot_spectra(inspectra,
2784  plt_title='', fig_title='',
2785  y_label=None,
2786  spectra_label='bins',
2787  FIGURE=None, y_datarange=None,
2788  cum=False, labels=[],
2789  bars=False, debug=False):
2790  """ plot a spectra
2791 
2792  Usage::
2793 
2794  > FIG = plot_agespectra(spectra,*kwargs)
2795 
2796 
2797  Returns
2798  A "mapping.py" ``FIGURE`` object.
2799 
2800  Arguments
2801 
2802  .. tabularcolumns:: |l|L|
2803 
2804  ============= ========================================
2805  keyword Description [default]
2806  ============= ========================================
2807  runid an ID for the title
2808  yunits units for the y-axis [None]
2809  FIGURE A "FIGURE" object[None] (see mapping.py)
2810  data_range y-axis data range
2811  web_version Set to True if using the make_webpages
2812  function. see: :func:`read_agespectrum`
2813  continental Set to True if continental_spectrum
2814  cum Set to true if data should be cumulative
2815  bars Plot using stacked bars rather than a
2816  filled line plot
2817  ============= ========================================
2818 
2819  .. todo::
2820  There's a lot of redundancy in the storage of attributes, maybe there is a
2821  better way to handle this.
2822 
2823 
2824  """
2825  # # make tick lables smaller
2826  mpl.rcParams['xtick.labelsize'] = 6
2827  mpl.rcParams['ytick.labelsize'] = 6
2828 
2829  if FIGURE == None:
2830  FIGURE = Structure()
2831  fig = plt.figure(figsize=(8, 6))
2832  FIGURE.fig = fig
2833  ax = fig.add_subplot(111) # ,pos=[0.1,0.2,.8,.7])
2834  FIGURE.ax = ax
2835  else:
2836  fig = FIGURE.fig
2837  ax = FIGURE.ax
2838 
2839 
2840  try:
2841  numageclasses = inspectra.numageclass
2842  releasetimes = inspectra.releasetimes
2843  inspectra = inspectra[:]
2844  except:
2845  numageclasses = inspectra.shape[1] - 1
2846  releasetimes = inspectra[:, 0]
2847  inspectra = inspectra[:, 1:]
2848 
2849  if cum == 'norm':
2850  # # Normalize the data so it fills to 100%
2851  # spectra = np.zeros(inspectra.shape)
2852  spectra = (inspectra.transpose() / np.sum(inspectra, axis=1)).transpose()
2853  spectra = np.cumsum(spectra[:, :], axis=1)
2854  # sums = np.sum(inspectra,axis=1)
2855  # for i,elem in enumerate(inspectra):
2856  # spectra[i,:] = elem/sums[i]
2857  elif cum is True and cum != 'norm':
2858  spectra = np.cumsum(inspectra[:, :], axis=1)
2859  else:
2860  spectra = inspectra
2861 
2862  # Set up plotting environment colors
2863  Nc = np.array([float(i) / numageclasses for i in range(numageclasses)])
2864  norm = mpl.colors.normalize(Nc.min(), Nc.max())
2865  # jet = plt.cm.get_cmap('jet')
2866  jet = _gen_flexpart_colormap()
2867  plt.hold('on')
2868  facecolors = []
2869  # for i in range(0,H.numageclasses-1):
2870  for i in range(numageclasses):
2871  facecolors.append(jet(norm(Nc[i])))
2872  if labels:
2873  lbl = labels[i]
2874  else:
2875  lbl = i
2876  # ax.plot(ageclass[:,i])
2877  if i == 0:
2878  # create the baseline
2879  if bars:
2880  ax.bar(releasetimes, spectra[:, i], 0.03, color=facecolors[-1])
2881  else:
2882  ax.fill_between(releasetimes, np.zeros(len(spectra[:, i])), spectra[:, i],
2883  color=facecolors[-1], label='%s' % lbl)
2884  else:
2885  if bars:
2886  ax.bar(releasetimes, spectra[:, i], 0.03, color=facecolors[-1], bottom=spectra[:, i - 1])
2887  else:
2888  ax.fill_between(releasetimes, spectra[:, i - 1], spectra[:, i],
2889  color=facecolors[-1], label='%s' % (lbl))
2890  # facecolors.append(jet(norm(Nc[i+1])))
2891 
2892 
2893  # ax.set_yscale('log')
2894  if y_datarange:
2895  print 'setting data range'
2896  ax.set_ylim(y_datarange)
2897  else:
2898  ax.set_ylim((0, spectra.max()))
2899  ax.grid(True)
2900  if y_label:
2901  ax.set_ylabel('%s' % y_label, rotation=90, ha='center', size='small')
2902 
2903  if plt_title:
2904  plt.title('%s' % (plt_title), size='small')
2905  if fig_title:
2906  fig.suptitle('Emissions by %s' % (spectra_type), size='small')
2907 
2908  fig.autofmt_xdate()
2909  # for xl in ax.get_xticklabels():
2910  # plt.setp(xl,size='x-small')
2911  # # ListedColormap
2912  pos = ax.get_position()
2913  l, b, w, h = getattr(pos, 'bounds', pos)
2914  # polygons = ax.collections
2915  # for i,p in enumerate(polygons): p.set_color(facecolors[i])
2916  # BoundaryNorm, and extended ends to show the "over" and "under"
2917  # value co
2918  ax2 = fig.add_axes([l, .08, w, 0.03])
2919  cmap = mpl.colors.ListedColormap(facecolors)
2920  # cmap.set_over('0.25')
2921  # cmap.set_under('0.75')
2922 
2923  # If a ListedColormap is used, the length of the bounds array must be
2924  # one greater than the length of the color list. The bounds must be
2925  # monotonically increasing.
2926  bounds = range(numageclasses + 1)
2927  # norm = mpl.colors.BoundaryNorm(bounds, cmap.N)
2928  cb2 = mpl.colorbar.ColorbarBase(ax2,
2929  cmap=cmap,
2930  # norm=norm,
2931  # # to use 'extend', you must
2932  # # specify two extra boundaries:
2933  boundaries=bounds,
2934  # extend='both',
2935  # values=range(H.numageclasses+1),
2936  ticks=bounds, # optional
2937  spacing='proportional',
2938  orientation='horizontal')
2939  if spectra_label:
2940  cb2.set_label(spectra_label, size='x-small')
2941  if labels:
2942  ax2.set_xticklabels(labels, va='center', ha='left', rotation=0, size='xx-small')
2943 
2944  if debug:
2945  plt.show()
2946 
2947  # make main axes current
2948  fig.add_subplot(ax)
2949 
2950  FIGURE.ax = ax
2951  FIGURE.fig = fig
2952 
2953  return FIGURE
2954 
2955 
2956 def plot_agespectra(H, agespectra,
2957  plt_title='', y_label=None,
2958  cbar_labels=None,
2959  FIGURE=None, y_datarange=None,
2960  web_version=False, continental=False, cum=False,
2961  bars=False, debug=False):
2962  """ plot an agespectra
2963 
2964  Usage::
2965 
2966  > FIG = plot_agespectra(H,agespectra,*kwargs)
2967 
2968  This creates an filled in between agespectra plot using information from
2969  the header "H".
2970 
2971  Returns
2972  A "mapping.py" ``FIGURE`` object.
2973 
2974  Arguments
2975 
2976  .. tabularcolumns:: |l|L|
2977 
2978  ============= ========================================
2979  keyword Description [default]
2980  ============= ========================================
2981  runid an ID for the title
2982  yunits units for the y-axis [None]
2983  FIGURE A "FIGURE" object[None] (see mapping.py)
2984  data_range y-axis data range
2985  web_version Set to True if using the make_webpages
2986  function. see: :func:`read_agespectrum`
2987  continental Set to True if continental_spectrum
2988  cum Set to true if data should be cumulative
2989  bars Plot using stacked bars rather than a
2990  filled line plot
2991  ============= ========================================
2992 
2993  .. todo::
2994  There's a lot of redundancy in the storage of attributes, maybe there is a
2995  better way to handle this.
2996 
2997  .. note::
2998  Required attributes of 'H' if you want to make a dummy H. Or just set
2999  H to "None" and it will be taken from the agespectra input.
3000  H.numageclasses
3001  H.releasetimes
3002 
3003 
3004  """
3005  # # make tick lables smaller
3006  mpl.rcParams['xtick.labelsize'] = 6
3007  mpl.rcParams['ytick.labelsize'] = 6
3008 
3009  if FIGURE == None:
3010  FIGURE = Structure()
3011  fig = plt.figure()
3012  FIGURE.fig = fig
3013  ax = fig.add_subplot(111)
3014  FIGURE.ax = ax
3015  else:
3016  fig = FIGURE.fig
3017  ax = FIGURE.ax
3018 
3019  if web_version:
3020  inspectra = agespectra.agespectrum[:, 4:]
3021  else:
3022  inspectra = agespectra[:]
3023 
3024  if continental:
3025  from nilu.pflexpart import emissions as em
3026  spectra_label = "Continents"
3027  spectra_type = "Continent"
3028  conts = em.Continents()
3029  cbar_labels = [i[1] for i in conts.flexpart_continents]
3030  else:
3031  spectra_label = "Ageclasses (Days)"
3032  spectra_type = "Ageclass"
3033 
3034  if not H:
3035  H = Structure()
3036  try:
3037  numageclasses = inspectra.numageclass
3038  releasetimes = inspectra.agespectrum[:, 0]
3039  except:
3040  numageclasses = inspectra.shape[1] - 1
3041  releasetimes = inspectra[:, 0]
3042  inspectra = inspectra[:, 1:]
3043  else:
3044  numageclasses = H.numageclasses
3045  releasetimes = H.releasetimes
3046 
3047  # if cum == 'norm':
3048  # ## Normalize the data so it fills to 100%
3049  # #spectra = np.zeros(inspectra.shape)
3050  # spectra = (inspectra.transpose()/np.sum(inspectra, axis=1)).transpose()
3051  # spectra = np.cumsum(spectra[:,:],axis=1)
3052  # #sums = np.sum(inspectra,axis=1)
3053  # #for i,elem in enumerate(inspectra):
3054  # # spectra[i,:] = elem/sums[i]
3055  # elif cum is True and cum != 'norm':
3056  # spectra = np.cumsum(inspectra[:,:],axis=1)
3057  # else:
3058  # spectra = inspectra
3059  if cum is not None:
3060  spectra = _cum_spec(inspectra, cum=cum)
3061 
3062  # Set up plotting environment colors
3063  Nc = np.array([float(i) / numageclasses for i in range(numageclasses)])
3064  norm = mpl.colors.normalize(Nc.min(), Nc.max())
3065  # jet = plt.cm.get_cmap('jet')
3066  jet = _gen_flexpart_colormap()
3067  plt.hold('on')
3068  facecolors = []
3069  # for i in range(0,H.numageclasses-1):
3070  for i in range(numageclasses):
3071  facecolors.append(jet(norm(Nc[i])))
3072  # ax.plot(ageclass[:,i])
3073  if i == 0:
3074  # create the baseline
3075  if bars:
3076  ax.bar(releasetimes, spectra[:, i], 0.03, color=facecolors[-1])
3077  else:
3078  ax.fill_between(releasetimes, np.zeros(len(spectra[:, i])), spectra[:, i],
3079  color=facecolors[-1], label='%s' % i)
3080  else:
3081  if bars:
3082  ax.bar(releasetimes, spectra[:, i], 0.03, color=facecolors[-1], bottom=spectra[:, i - 1])
3083  else:
3084  ax.fill_between(releasetimes, spectra[:, i - 1], spectra[:, i],
3085  color=facecolors[-1], label='%s' % (i))
3086  # facecolors.append(jet(norm(Nc[i+1])))
3087 
3088 
3089  # ax.set_yscale('log')
3090  if y_datarange:
3091  # print 'setting data range'
3092  ax.set_ylim(y_range)
3093  else:
3094  ax.set_ylim((0, spectra.max()))
3095  ax.grid(True)
3096  if y_label:
3097  ax.set_ylabel('%s' % y_label, rotation=90, ha='center', size='small')
3098 
3099  plt.title('%s' % (plt_title), size='small')
3100  fig.suptitle('Emissions by %s' % (spectra_type), size='small')
3101  fig.autofmt_xdate()
3102  # for xl in ax.get_xticklabels():
3103  # plt.setp(xl,size='x-small')
3104  # # ListedColormap
3105  pos = ax.get_position()
3106  l, b, w, h = getattr(pos, 'bounds', pos)
3107  # polygons = ax.collections
3108  # for i,p in enumerate(polygons): p.set_color(facecolors[i])
3109  # BoundaryNorm, and extended ends to show the "over" and "under"
3110  # value co
3111  ax2 = fig.add_axes([l, .08, w, 0.03])
3112  cmap = mpl.colors.ListedColormap(facecolors)
3113  # cmap.set_over('0.25')
3114  # cmap.set_under('0.75')
3115 
3116  # If a ListedColormap is used, the length of the bounds array must be
3117  # one greater than the length of the color list. The bounds must be
3118  # monotonically increasing.
3119  bounds = range(numageclasses + 1)
3120  # norm = mpl.colors.BoundaryNorm(bounds, cmap.N)
3121  cb2 = mpl.colorbar.ColorbarBase(ax2,
3122  cmap=cmap,
3123  # norm=norm,
3124  # # to use 'extend', you must
3125  # # specify two extra boundaries:
3126  boundaries=bounds,
3127  # extend='both',
3128  # values=range(H.numageclasses+1),
3129  ticks=bounds, # optional
3130  spacing='proportional',
3131  orientation='horizontal')
3132  cb2.set_label(spectra_label, size='x-small')
3133  if cbar_labels:
3134  cb2.set_ticklabels(cbar_labels)
3135 
3136  if debug:
3137  plt.show()
3138 
3139  return FIGURE
3140 
3141 
3142 def plot_clusters(H, T, rel_i=0,
3143  ncluster=0, sizescale=10000,
3144  map_region=None, projection=None, coords=None,
3145  FIGURE=None, overlay=False, opacity=0.7,
3146  draw_circles=True, draw_labels=True,
3147  MapPar=None):
3148  """ plots the clusters """
3149  trjs = T['Trajectories']
3150  labels = T['labels']
3151 
3152  # Set legend properties:
3153  p_legend = mpl.font_manager.FontProperties(size='8')
3154 
3155 
3156 
3157  # extract only releases of interest
3158  rel += 1 # account for zero indexing
3159  t = trjs[np.where(trjs[:, 0] == rel), :][0]
3160  if FIGURE == None:
3161  FIGURE = mp.get_FIGURE(map_region=map_region, projection=projection,
3162  coords=coords, MapPar=MapPar)
3163 
3164 
3165  try:
3166  ax = FIGURE.ax
3167  fig = FIGURE.fig
3168  m = FIGURE.m
3169  except:
3170  print 'problem getting ax,m, or fig.'
3171 
3172  # # Remove prior retroplume
3173  if not overlay:
3174  if len(ax.artists) > 1:
3175  try:
3176  art_ind = len(ax.artists)
3177  del ax.artists[:]
3178  # del previous texts, but not lat/lon labels
3179  del ax.texts[-art_ind:]
3180  except:
3181  'could not delete artists'
3182  pass
3183 
3184  # get cluster from data
3185  # set up cluster index, start column of T array
3186  cindx = [16, 20, 24, 28, 32, 36]
3187  if ncluster == 'all':
3188  # plot all clusters
3189  ncluster = range(5)
3190  elif isinstance(ncluster, int):
3191  ncluster = [ncluster]
3192 
3193  for nc in ncluster:
3194  # clstrindx = 16 + (5*(nc))
3195  clstrindx = cindx[nc]
3196  indx = [1] + range(clstrindx, clstrindx + 4)
3197  data = t[:, indx]
3198  # use ellipses
3199  ells = _genEllipse(data, m, sizescale=sizescale)
3200  texts = []
3201  for item in ells:
3202  e = item[0]
3203  xy = item[1]
3204  texts.append([xy, e.get_label()])
3205  e.set_clip_box(ax.bbox)
3206  e.set_alpha(opacity)
3207  if draw_circles:
3208  ax.add_artist(e)
3209  for txt in texts:
3210  x = txt[0][0]
3211  y = txt[0][1]
3212  # print x,y, t[1]
3213  if draw_labels:
3214  ax.text(x, y, txt[1])
3215  # plt.colorbar(ax)
3216  # ax.legend(ax.artists,(o.get_label() for o in ax.artists), prop=p_legend )
3217  FIGURE.ax = ax
3218  FIGURE.fig = fig
3219  FIGURE.m = m
3220 
3221  return FIGURE
3222 
3223 
3224 def plot_trajectory_ellipses(H, T, rel_i=0,
3225  ncluster=0, sizescale=10000,
3226  map_region=None, projection=None, coords=None,
3227  FIGURE=None, overlay=False, opacity=0.7,
3228  draw_circles=True, draw_labels=True,
3229  MapPar=None):
3230  """
3231  Plots trajectories from FLEXPART output.
3232 
3233  """
3234 
3235  trjs = T['Trajectories']
3236  labels = T['labels']
3237 
3238  # Set legend properties:
3239  p_legend = mpl.font_manager.FontProperties(size='8')
3240 
3241  # extract only releases of interest according to rel_i
3242  rel = rel_i + 1 # account for zero indexing
3243  t = trjs[np.where(trjs[:, 0] == rel), :][0]
3244  if FIGURE == None:
3245  FIGURE = mp.get_FIGURE(map_region=map_region, projection=projection,
3246  coords=coords, MapPar=MapPar)
3247 
3248  try:
3249  ax = FIGURE.ax
3250  fig = FIGURE.fig
3251  m = FIGURE.m
3252  except:
3253  print 'problem getting ax,m, or fig.'
3254 
3255  # # Remove prior retroplume
3256  if overlay:
3257  if len(ax.artists) > 1:
3258  try:
3259  art_ind = len(ax.artists)
3260  del ax.artists[:]
3261  # del previous texts, but not lat/lon labels
3262  del ax.texts[-art_ind:]
3263  except:
3264  'could not delete artists'
3265  pass
3266 
3267  # get trajectory from data
3268  indx = [1, 2, 3, 4] # return, j,x,y,z
3269  data = t[:, indx]
3270  # use ellipses
3271  ells = _genEllipse(data, m, sizescale=sizescale)
3272  texts = []
3273  for item in ells:
3274  e = item[0]
3275  xy = item[1]
3276  texts.append([xy, e.get_label()])
3277  e.set_clip_box(ax.bbox)
3278  e.set_alpha(opacity)
3279  if draw_circles:
3280  ax.add_artist(e)
3281  for txt in texts:
3282  x = txt[0][0]
3283  y = txt[0][1]
3284  # print x,y, t[1]
3285  if draw_labels:
3286  ax.text(x, y, txt[1])
3287  plt.draw()
3288  FIGURE.ax = ax
3289  FIGURE.fig = fig
3290  FIGURE.m = m
3291  FIGURE.circles = ells
3292 
3293  return FIGURE
3294 
3295 def plot_markers(H, lon, lat, zsize=None, zlevel=None,
3296  FIGURE=None,
3297  map_region=None, projection=None, coords=None,
3298  overlay=True,
3299  draw_circles=True,
3300  draw_labels=False,
3301  cbar2=True,
3302  cbar2_title=None,
3303  MapPar=None,
3304  color='blue', edgecolor='none',
3305  zorder=10, alpha=0.85):
3306 
3307  """Plot a group of x,y pairs, with optional size and color information.
3308 
3309  Usage::
3310 
3311  > FIG = plot_trajectory(H,RT,rel_id,*kwargs)
3312 
3313  .. note::
3314  You can substitude "None" for the :class:`Header` instance if you haven't
3315  created one
3316 
3317 
3318  Returns
3319  A :mod:`mapping` "FIGURE" object.
3320 
3321  Arguments
3322 
3323  .. tabularcolumns:: |l|L|
3324 
3325  ============= =============================================
3326  keyword Description [default]
3327  ============= =============================================
3328  rel_i **required** release index
3329  FIGURE A "FIGURE" object[None] (see mapping.py)
3330  projection A projection pre-defined in :mod:`mapping`
3331  coords A set of lon,lat coords for autosetting
3332  map map_region (not really working).
3333  overlay [True] will overlay the trajectory
3334  on top of another map instance.
3335  draw_circles [True] will mark the trajectory with
3336  circles. If [False] nothing is drawn.
3337  draw_labels [False] unimplemented
3338  cbar2 [True] draws the scale bar as a second axis.
3339  cbar2_title Optional argument to overide the cbar title.
3340  MapPar A Structure of mapping parameters to pass
3341  to the basemap instance if desired.
3342  ============= =============================================
3343 
3344  .. todo::
3345  Who knows?! Could probably handle the overlaying better.
3346 
3347  .. note::
3348  Just set H to "None" if you haven't already created a "Header" instance.
3349 
3350 
3351  """
3352 
3353  if H:
3354  pass
3355 
3356  # # Set up the FIGURE
3357  if FIGURE == None:
3358  FIGURE = mp.get_FIGURE(map_region=map_region, projection=projection,
3359  coords=coords, MapPar=MapPar)
3360  # #Get fig info and make active
3361  fig = FIGURE.fig
3362  m = FIGURE.m
3363  ax = FIGURE.fig.axes[0]
3364  plt.figure(fig.number)
3365  plt.axes(ax)
3366 
3367  # # prepare the data
3368  cx, cy = m(lon, lat)
3369  if zlevel is None:
3370  zlevel = np.ones(len(lon)) * 10.
3371  if zsize is None:
3372  zsize = np.ones(len(lon)) * 1
3373  marker = 'o'
3374 
3375 
3376  # # clear the previous track
3377  if 'circles' in FIGURE.keys():
3378  del FIGURE['circles']
3379  if overlay is False:
3380  del ax.collections[FIGURE.indices.collections:]
3381  del ax.texts[FIGURE.indices.texts:]
3382 
3383  # # plot the track
3384  if draw_circles:
3385  cmap = plt.get_cmap('jet')
3386  circles = m.scatter(cx, cy, zsize, zlevel, cmap=cmap,
3387  marker=marker, edgecolor=edgecolor,
3388  zorder=zorder, alpha=alpha)
3389 
3390  try:
3391  FIGURE.circles = circles
3392  except:
3393  pass
3394  # # make the figure active again
3395  plt.figure(fig.number)
3396  # # draw the legend and title
3397  # # CREATE COLORBAR
3398  # # does a colorbar already exist?
3399  # # Get the current axes, and properties for use later
3400  ax0 = fig.axes[0]
3401  pos = ax0.get_position()
3402  l, b, w, h = pos.bounds
3403  if cbar2:
3404  try:
3405  cb2 = FIGURE.cb2
3406  cax2 = FIGURE.cax2
3407  cb2.update_normal(circles)
3408  except:
3409  cax2 = plt.axes([l + w + 0.08, b, 0.02, h])
3410  # # using im2, not im (hack to prevent colors from being
3411  # # too compressed at the low end on the colorbar - results
3412  # # from highly nonuniform colormap)
3413  cb2 = fig.colorbar(circles, cax=cax2) # , format='%3.2g') # draw colorbar
3414  FIGURE.cax2 = cax2
3415  FIGURE.cb2 = cb2
3416  # # check if a colorbar legend exists (this will cause
3417  # # problems for subplot routines!)
3418  p_leg = mpl.font_manager.FontProperties(size='6')
3419  if cbar2_title:
3420  cax2.set_title(cbar2_title, fontproperties=p_leg)
3421  else:
3422  cax2.set_title('altitude\n(m)', fontproperties=p_leg)
3423  else:
3424  pass
3425  # cax2 = plt.axes([l+w+0.03, b, 0.025, h-0.2])
3426  # cb2 = fig.colorbar(jnkmap,cax=cax2) # draw colorbar
3427 
3428  # # delete the ghost instance
3429  # plt.close(jnkfig.number)
3430  # del jnkax, jnkfig, jnkmap
3431 
3432  plt.axes(ax)
3433  plt.setp(ax, xticks=[], yticks=[])
3434 
3435 
3436  FIGURE.fig = fig
3437  FIGURE.m = m
3438  FIGURE.ax = ax
3439  try:
3440  FIGURE.cax2 = cax2
3441  FIGURE.cb2 = cb2
3442  except:
3443  pass
3444  return FIGURE
3445 
3446 def plot_trajectory(H, T, rel_i, FIGURE=None,
3447  map_region=None, projection=None, coords=None,
3448  overlay=True,
3449  draw_circles=True,
3450  draw_labels=True, days_back=20,
3451  cbar2=True,
3452  cbar2_title=None,
3453  MapPar=None):
3454  """Plot the center trajectory of the plume on the map
3455 
3456  Usage::
3457 
3458  > FIG = plot_trajectory(H,RT,rel_id,*kwargs)
3459 
3460  .. note::
3461  You can substitude "None" for the :class:`Header` instance if you haven't
3462  created one
3463 
3464 
3465  Returns
3466  A :mod:`mapping` "FIGURE" object.
3467 
3468  Arguments
3469 
3470  .. tabularcolumns:: |l|L|
3471 
3472  ============= =============================================
3473  keyword Description [default]
3474  ============= =============================================
3475  rel_i **required** release index
3476  FIGURE A "FIGURE" object[None] (see mapping.py)
3477  projection A projection pre-defined in :mod:`mapping`
3478  coords A set of lon,lat coords for autosetting
3479  map map_region (not really working).
3480  overlay [True] will overlay the trajectory
3481  on top of another map instance.
3482  draw_circles [True] will mark the trajectory with
3483  circles. If [False] nothing is drawn.
3484  draw_labels [True] will draw the 'day' numbers on the
3485  trajectories.
3486  days_back For how many days back should the labels be
3487  shown? [20]
3488  cbar2 [True] draws the scale bar as a second axis.
3489  cbar2_title Optional argument to overide the cbar title.
3490  MapPar A Structure of mapping parameters to pass
3491  to the basemap instance if desired.
3492  ============= =============================================
3493 
3494  .. todo::
3495  Who knows?! Could probably handle the overlaying better.
3496 
3497  .. note::
3498  Just set H to "None" if you haven't already created a "Header" instance.
3499 
3500 
3501  """
3502 
3503  if H:
3504  pass
3505 
3506  # # Set up the FIGURE
3507  if FIGURE == None:
3508  FIGURE = mp.get_FIGURE(map_region=map_region, projection=projection,
3509  coords=coords, MapPar=MapPar)
3510  # #Get fig info and make active
3511  fig = FIGURE.fig
3512  m = FIGURE.m
3513  ax = FIGURE.fig.axes[0]
3514  plt.figure(fig.number)
3515  plt.axes(ax)
3516 
3517  # # prepare the data
3518  trjs = T['Trajectories']
3519  rel = rel_i + 1 # # account for zero indexing
3520 
3521  # #extract only releases of interest
3522  t = trjs[np.where(trjs[:, 0] == rel), :][0]
3523 
3524  # # Get the data for the days_back we're interested in
3525  day_labels = _gen_daylabels(t[:days_back, 1])
3526  lon = t[:days_back, 2]
3527  lat = t[:days_back, 3]
3528  zlevel = t[:days_back, 4]
3529  zsize = np.ones(len(lon)) * 50
3530  marker = 'o'
3531 
3532 
3533  # # clear the previous track
3534  if 'circles' in FIGURE.keys():
3535  del FIGURE['circles']
3536  if overlay is False:
3537  del ax.collections[FIGURE.indices.collections:]
3538  del ax.texts[FIGURE.indices.texts:]
3539 
3540  # # plot the track
3541  cx, cy = m(lon, lat)
3542  if draw_circles:
3543  cmap = plt.get_cmap('gist_gray')
3544  circles = m.scatter(cx, cy, zsize, zlevel, cmap=cmap,
3545  marker=marker, edgecolor=None,
3546  zorder=10, alpha=0.85)
3547 
3548  try:
3549  FIGURE.circles = circles
3550  except:
3551  pass
3552  # # make the figure active again
3553  plt.figure(fig.number)
3554  # # draw the legend and title
3555  # # CREATE COLORBAR
3556  # # does a colorbar already exist?
3557  # # Get the current axes, and properties for use later
3558  ax0 = fig.axes[1]
3559  pos = ax0.get_position()
3560  l, b, w, h = pos.bounds
3561  if cbar2:
3562  try:
3563  cb2 = FIGURE.cb2
3564  cax2 = FIGURE.cax2
3565  cb2.update_normal(circles)
3566  except:
3567  cax2 = plt.axes([l + w + 0.08, b, 0.02, h])
3568  # # using im2, not im (hack to prevent colors from being
3569  # # too compressed at the low end on the colorbar - results
3570  # # from highly nonuniform colormap)
3571  cb2 = fig.colorbar(circles, cax=cax2) # , format='%3.2g') # draw colorbar
3572  FIGURE.cax2 = cax2
3573  FIGURE.cb2 = cb2
3574  # # check if a colorbar legend exists (this will cause
3575  # # problems for subplot routines!)
3576  else:
3577  cax2 = plt.axes([l + w + 0.03, b, 0.025, h - 0.2])
3578  cb2 = fig.colorbar(jnkmap, cax=cax2) # draw colorbar
3579 
3580  p_leg = mpl.font_manager.FontProperties(size='6')
3581  if cbar2_title:
3582  cax.set_title(cbar2_title, fontproperties=p_leg)
3583  else:
3584  cax2.set_title('altitude\n(m)', fontproperties=p_leg)
3585  # # delete the ghost instance
3586  # plt.close(jnkfig.number)
3587  # del jnkax, jnkfig, jnkmap
3588 
3589  plt.axes(ax)
3590  plt.setp(ax, xticks=[], yticks=[])
3591 
3592  if draw_labels:
3593  p_cax = mpl.font_manager.FontProperties(size='10',
3594  style='italic',
3595  weight='bold',
3596  )
3597 
3598 
3599 
3600  for i, (p, x, y) in enumerate(zip(day_labels, cx, cy)):
3601  if x > m.llcrnrx and x < m.urcrnrx:
3602  if y > m.llcrnry and y < m.urcrnry:
3603  ax.text(x, y + y * .02, '{0}'.format(p), va='bottom', ha='left',
3604  fontproperties=p_cax, zorder=11,
3605  color='white', bbox=dict(facecolor='green', alpha=0.5)
3606  )
3607 
3608  FIGURE.fig = fig
3609  FIGURE.m = m
3610  FIGURE.ax = ax
3611  FIGURE.cax2 = cax2
3612  FIGURE.cb2 = cb2
3613  return FIGURE
3614 
3615 
3616 def plot_at_level(H, D, level=1, \
3617  ID=' ', \
3618  map_region=5, projection='lcc', \
3619  overlay=False,
3620  datainfo_str=None, log=True,
3621  data_range=None, coords=None, FIGURE=None,
3622  plot_title=None,
3623  units=None,
3624  **kwargs
3625  ):
3626  """
3627  TODO:
3628  -make units a function of H['species']
3629  """
3630  if units is None:
3631  units = H.output_unit
3632  if D is None:
3633  D = H.D
3634  rel_i = D.rel_i
3635  species = D.species
3636  timestamp = D.timestamp
3637  data = D.slabs[level]
3638 
3639  if level == 1:
3640  level_desc = 'Footprint'
3641  else:
3642  level_desc = H.outheight[level - 1]
3643 
3644  dmax = data.max()
3645  dmin = data.min()
3646  # print dmax,dmin
3647 
3648  if data_range == None:
3649  data_range = [dmin, dmax]
3650 
3651  if H.direction == 'backward' and H.options['readp']:
3652  zp1 = H['zpoint1'][rel_i]
3653  zp2 = H['zpoint2'][rel_i]
3654  if datainfo_str is None:
3655  # need to find a way to set m.a.s.l. or hPa here
3656  datainfo_str = """ Max Value: %.2g %s\n Release Z1: %.2f, Z2: %.2f (%s)\n""" \
3657  % (dmax, units, zp1, zp2, H.alt_unit)
3658  if plot_title is None:
3659  plot_title = """
3660  %s Sensitivity at %s %s: %s\n
3661  Release Start: %s, Release End: %s""" % \
3662  (ID, level_desc, H['alt_unit'], species, H['releasestart'][rel_i], H['releaseend'][rel_i])
3663  else:
3664  if datainfo_str is None:
3665  datainfo_str = """ Max Value: %.2g %s """ % (dmax, units)
3666  if plot_title is None:
3667  plot_title = """ %s Sensitivity at %s %s: %s \n %s """ % (ID, level_desc, H['alt_unit'], species, timestamp)
3668 
3669  FIGURE = plot_sensitivity(H, data, \
3670  data_range=data_range, \
3671  rel_i=rel_i, log=log,
3672  map_region=map_region, projection=projection,
3673  units=units, datainfo_str=datainfo_str,
3674  overlay=overlay,
3675  coords=coords, FIGURE=FIGURE, **kwargs)
3676 
3677 
3678  FIGURE.ax.set_title(plot_title, fontsize=10)
3679  return FIGURE
3680 
3681 plot_footprint = plot_at_level
3682 
3683 def plot_totalcolumn(H, D=None, \
3684  ID=' ', \
3685  map_region=5, projection='lcc', \
3686  data_range=None, coords=None,
3687  FIGURE=None, overlay=False,
3688  datainfo_str=None, **kwargs):
3689 
3690 
3691 
3692  if D is None:
3693  D = H.D[(0, 0)]
3694 
3695  if 'units' in kwargs:
3696  units = kwargs.pop('units')
3697  else:
3698  units = 'ns m kg-1'
3699 
3700  rel_i = D.rel_i
3701  species = D.species
3702  timestamp = D.timestamp
3703  data = D.slabs[0]
3704 
3705 
3706  if data_range == None:
3707  dmax = data.max()
3708  dmin = data.min()
3709  data_range = [dmin, dmax]
3710  else:
3711  dmin, dmax , = data_range
3712  # print dmin,dmax
3713 
3714 
3715  if H.direction == 'backward':
3716  rel_i = D.rel_i
3717  zp1 = H['zpoint1'][rel_i]
3718  zp2 = H['zpoint2'][rel_i]
3719  if datainfo_str is None:
3720  datainfo_str = """ Max Value: %.2g %s\n Release Z1: %.2f, Z2: %.2f (%s)\n""" % \
3721  (dmax, units, zp1, zp2, H.alt_unit)
3722  plot_title = """
3723  %s Total Column Sensitivity: %s\n
3724  Release Start: %s, Release End: %s""" % \
3725  (ID, species, H['releasestart'][rel_i], H['releaseend'][rel_i])
3726  else:
3727  if datainfo_str is None:
3728  datainfo_str = """ Max Value: %.2g %s""" % (dmax, units)
3729  plot_title = """
3730  %s Total Column Sensitivity: %s\n %s """ % (ID, species, timestamp)
3731 
3732 
3733  FIGURE = plot_sensitivity(H, data, \
3734  data_range=data_range, \
3735  rel_i=rel_i, map_region=map_region, \
3736  projection=projection, units=units, \
3737  datainfo_str=datainfo_str, coords=coords, \
3738  FIGURE=FIGURE, overlay=overlay, **kwargs)
3739 
3740  FIGURE.ax.set_title(plot_title, fontsize=10)
3741 
3742  return FIGURE
3743 
3744 
3745 def plot_sourcecontribution(H, D,
3746  rel_i=None, s=None,
3747  ID=' ',
3748  map_region='POLARCAT',
3749  data_range=None, coords=None,
3750  datainfo_str=None,
3751  units=None,
3752  FIGURE=None,
3753  overlay=False,
3754  cax_title=None,
3755  **kwargs):
3756  """ plot_sourcecontribution: a wrapper around :func:`plot_sensitivity` for
3757  plotting a source contribution field.
3758 
3759  .. warning:
3760  This function is still quite finicky, I'm working on resolving it.
3761 
3762  Usage::
3763 
3764  > FIG = plot_sourcecontribution(H,D,*kwargs)
3765 
3766  Inputs
3767  H = a :class:`Header` instance from a FLEXPART run
3768  D = a grid instance from a FLEXDATA dictionary. See :func:`readgridV8`
3769 
3770  .. note::
3771  I am trying to create this so D can be a number of different types
3772  from a 2-d array to the grid instance type.
3773 
3774 
3775  Returns
3776  A "mapping.py" ``FIGURE`` object.
3777 
3778  Arguments
3779 
3780  .. tabularcolumns:: |l|L|
3781 
3782  ============= ================================================
3783  keyword Description [default]
3784  ============= ================================================
3785  ID Can be passed to prefix the title.
3786  data_range range of data for scale bars, if None will
3787  be taken from min/max of data
3788  datainfo_str A string for labeling the scale bar.
3789  cax_title A string to pass to plot_sensitivity for colorbar
3790  title (units will be passed as format argument)
3791  k Release index to plot from the data array (aka rel_i)
3792  s Species index
3793  map_region A map_region specified in mapping.py
3794  projection [deprecated] use pre-defined map_regions.
3795  overlay Force removal of previous figure elements.
3796  FIGURE A FIGURE instance from mapping module get_FIGURE
3797  ============= ================================================
3798 
3799  .. todo::
3800  A lot!! There are some problems here and it is sensitive to options.
3801 
3802  .. note::
3803  k and rel_i are used throughout pflexible, should be more consistent in future.
3804 
3805 
3806  """
3807  if D is None:
3808  D = H.D[(0, 0)]
3809  data = D.slabs[1]
3810  elif isinstance(D, list):
3811  data = D[0]
3812  elif isinstance(D, np.ndarray):
3813  data = D
3814  else:
3815  try:
3816  D = D[(s, rel_i)]
3817  data = D.slabs[0] # take total column
3818  except:
3819  raise IOError("Unrecognized 'grid' type passed")
3820 
3821  if s is None:
3822  try:
3823  species = D.species
3824  except:
3825  species = 'unknown'
3826  else:
3827  species = H.species[s]
3828 
3829  try:
3830  timestamp = D.timestamp
3831  except:
3832  if rel_i is None:
3833  try:
3834  timestamp = D.timestamp
3835  rel_i = D.rel_i
3836  except:
3837  raise IOError("Either a release index (rel_i) must be passed, \
3838  or the grid must have a timestamp attribute")
3839  else:
3840  timestamp = H.releasetimes[rel_i]
3841 
3842  if units is None:
3843  units = 'emission units'
3844 
3845  dmax = data.max()
3846  dmin = data.min()
3847 
3848  if data_range == None:
3849  data_range = [dmin, dmax]
3850 
3851  if H.direction == 'backward':
3852  zp1 = H['zpoint1'][rel_i]
3853  zp2 = H['zpoint2'][rel_i]
3854  if datainfo_str is None:
3855  datainfo_str = """ Max Value: %.2g %s\n Release Z1: %.2f , Z2: %.2f (%s)\n""" % \
3856  (dmax, units, zp1, zp2, H.alt_unit)
3857  plot_title = """
3858  %s Total Column Sensitivity: %s\n
3859  Release Start: %s, Release End: %s""" % \
3860  (ID, species, H['releasestart'][rel_i], H['releaseend'][rel_i])
3861  else:
3862  if datainfo_str is None:
3863  datainfo_str = """ Max Value: %.2g %s\n Release Z1: %.2f , Z2: %.2f (%s)\n""" % \
3864  (dmax, units, zp1, zp2, H.alt_unit)
3865  plot_title = """
3866  %s Total Column Sensitivity: %s\n %s""" % (ID, species, timestamp)
3867 
3868  FIGURE = plot_sensitivity(H, data, \
3869  data_range=data_range, \
3870  rel_i=rel_i, map_region=map_region, \
3871  units=units, \
3872  datainfo_str=datainfo_str, coords=coords, \
3873  FIGURE=FIGURE,
3874  cax_title=cax_title,
3875  overlay=overlay, **kwargs)
3876  FIGURE.ax.set_title(plot_title, fontsize=10)
3877  return FIGURE
3878 
3879 def plot_sensitivity(H, data, \
3880  data_range=None, \
3881  units='ns m^2 / kg', \
3882  datainfo_str=None, \
3883  plottitle=None, \
3884  rel_i=None,
3885  map_region=None, projection=None,
3886  dropm=None, coords=None,
3887  overlay=False,
3888  transform=True,
3889  log=True,
3890  FIGURE=None,
3891  MapPar=None,
3892  FigPar=None,
3893  cax_title=None,
3894  autofit=False, method='contourf', lsmask=False):
3895  # autofit=False,method='imshow'):
3896  """ plot_sensitivity: core function for plotting FLEXPART output.
3897 
3898  Usage::
3899 
3900  > FIG = plot_sensitivity(H,data,*kwargs)
3901 
3902  This returns the FIGURE object, and plots the sensitivity from the data contained in the "D"
3903  array.
3904 
3905  Inputs
3906  H = a :class:`Header` instance for a FLEXPART run.
3907  data = a 2d data array containing the sensitivity values to plot, this can be extracted from a
3908  grid instance (see :func:`readgridV8` and :func:`get_slabs`)
3909 
3910  Returns
3911  A "mapping.py" ``FIGURE`` object.
3912 
3913  Arguments
3914 
3915  .. tabularcolumns:: |l|L|
3916 
3917  ============= ================================================
3918  keyword Description [default]
3919  ============= ================================================
3920  data_range range of data for scale bars, if None will
3921  be taken from min/max of data
3922  cax_title string to be passed as colorbar title (units will
3923  be passed to the format argument)
3924  units units for the scale bar
3925  datainfo_str A string for labeling the scale bar.
3926  plottitle Title for the plot.
3927  rel_i Release index to plot from the data array
3928  map_region A map_region specified in mapping.py
3929  projection [deprecated] use pre-defined map_regions.
3930  dropm Force creation of a new basemap instance
3931  coords Used with autofit option. An array of lat,lon
3932  values for the mapping module to autofit a
3933  basemap instance to.
3934  autofit Try to generate map_region automagically (flakey)
3935  overlay Force removal of previous figure elements.
3936  transform For use with imshow method, if your data is not
3937  in same coordinates as projection, try to transform
3938  the data to the basemap projection.
3939  log Create a logarithmic color scale.
3940  FIGURE A FIGURE instance from mapping module get_FIGURE
3941  MapPar A Structure of paramters to be passed to the
3942  basemap class when creating an instance.
3943  method The method to use for plotting array data. May be
3944  one of: [pcolormesh], imshow, or contourf
3945  lsmask set to True to draw a grey landseamask [False]
3946  ============= ================================================
3947 
3948  .. todo::
3949  A lot!! There are some problems here and it is sensitive to options.
3950  lsmask = True seems to only work with certain projections (POLARCAT)
3951 
3952  .. note::
3953  This is the primary main function for creating plots of flexpart output.
3954  Most the other routines are simply wrappers to this function, passing
3955  arguments in with some predefined settings. For more information on the
3956  mechanics of this function, see the mapping.py module and the matplotlib
3957  basemap toolkit.
3958 
3959 
3960  """
3961 
3962 
3963 
3964  methods = ['imshow', 'pcolormesh', 'contourf', 'contour', 'None']
3965  assert method in methods, "method keyword must be one of: %s" % methods
3966 
3967  if FIGURE is None:
3968  if map_region is None:
3969  if MapPar is None:
3970  if autofit:
3971  MapPar = _gen_MapPar_fromHeader(H)
3972 
3973  FIGURE = mp.get_FIGURE(map_region=map_region,
3974  projection=projection, coords=coords,
3975  MapPar=MapPar, FigPar=FigPar)
3976  else:
3977  if FIGURE.m is None:
3978  FIGURE = mp.get_FIGURE(fig=FIGURE.fig, ax=FIGURE.ax, map_region=map_region,
3979  projection=projection, coords=coords,
3980  MapPar=MapPar, FigPar=FigPar)
3981 
3982  if overlay is False:
3983  del FIGURE.ax.images[FIGURE.indices.images:]
3984  del FIGURE.ax.collections[FIGURE.indices.collections:]
3985  del FIGURE.ax.lines[FIGURE.indices.lines:]
3986 
3987  if dropm != None:
3988  try:
3989  del m
3990  plt.close('all')
3991  except:
3992  print 'could not drop m'
3993 
3994  # # make tick lables smaller
3995  mpl.rcParams['xtick.labelsize'] = 6
3996  mpl.rcParams['ytick.labelsize'] = 6
3997 
3998  fig = FIGURE.fig
3999  m = FIGURE.m
4000  ax = FIGURE.ax
4001 
4002  # # make the figure current
4003  plt.figure(fig.number)
4004  plt.axes(ax)
4005 
4006  # # set up transformations for the data array
4007  if method == 'imshow':
4008  if m.projection not in ['cyl', 'merc', 'mill']:
4009  lats = np.arange(H.outlat0, (H.outlat0 + (H.numygrid * H.dyout)), H.dyout)[:-1]
4010  lons = np.arange(H.outlon0, (H.outlon0 + (H.numxgrid * H.dxout)), H.dxout)[:-1]
4011  data = data[:-1, :-1]
4012  else:
4013  lats = np.arange(H.outlat0, (H.outlat0 + (H.numygrid * H.dyout)), H.dyout)
4014  lons = np.arange(H.outlon0, (H.outlon0 + (H.numxgrid * H.dxout)), H.dxout)
4015 
4016  # # transform to nx x ny regularly spaced native projection grid
4017  if transform:
4018  dx = 2.*np.pi * m.rmajor / len(lons)
4019  nx = int((m.xmax - m.xmin) / dx) + 1; ny = int((m.ymax - m.ymin) / dx) + 1
4020  if nx is 1:
4021  topodat = data
4022  else:
4023  topodat = m.transform_scalar(data, lons, lats, nx, ny)
4024  else:
4025  topodat = data
4026 
4027  if method != 'imshow':
4028  # # Check to see if a cyclic wraparound is required
4029 
4030  lons = np.arange(H.outlon0, H.outlon0 + (H.dxout * H.numxgrid), H.dxout)
4031  lats = np.arange(H.outlat0, H.outlat0 + (H.dyout * H.numygrid), H.dyout)
4032  # # if required add polar coordinates
4033  if 'npstere' in m.projection:
4034  if lats[-1] != 90.:
4035  npole = np.ones(len(lons)).T * data[0, :]
4036  npole = np.reshape(npole, (1, -1))
4037  data = np.vstack((npole, data))
4038  lats = np.hstack((lats, [lats[-1] + H.dyout]))
4039 
4040  if m.projection != 'merc':
4041  if lons[-1] - lons[0] < 360.:
4042  topodat, lons = addcyclic(data, lons)
4043 
4044  if m.projection == 'merc':
4045  topodat = data
4046 
4047  # # get min/max range
4048  if data_range != None:
4049  dat_min = data_range[0]
4050  dat_max = data_range[1]
4051  else:
4052  dat_min, dat_max = data_range(data)
4053 
4054 
4055  if log:
4056  clevs = _log_clevs(dat_min, dat_max)
4057 
4058  else:
4059  clevs = [i for i in np.arange(dat_min, dat_max, (dat_max - dat_min) / 100)]
4060 
4061  # # draw land sea mask
4062  # m.fillcontinents(zorder=0)
4063  if lsmask:
4064  m.drawlsmask(ocean_color='grey', zorder= -10)
4065 
4066  # # Plot Release Location if points were read
4067  if H.options['readp']:
4068  if rel_i:
4069  releaselocation = (H.xpoint[rel_i], H.ypoint[rel_i])
4070  xpt, ypt = m(releaselocation[0], releaselocation[1])
4071  # # Remove prior location point
4072  try:
4073  del ax.lines[-1]
4074  except:
4075  pass
4076  location, = m.plot([xpt], [ypt], 'bx', linewidth=6, markersize=20, zorder=1000)
4077 
4078  # # Plot the footprint
4079 
4080  # # Set up the IMAGE
4081  # # cmapnames = ['jet', 'hsv', 'gist_ncar', 'gist_rainbow', 'cool', 'spectral']
4082  # colmap = plt.get_cmap('jet')
4083  colmap = _gen_flexpart_colormap()
4084  colmap.set_over(color='k', alpha=0.8)
4085  # # Plotting METHODS (pcolormesh now default, imshow is smoother)
4086  # print topodat.max(), topodat.min(), topodat.shape
4087  if method == 'imshow':
4088  im = m.imshow(topodat, cmap=colmap, zorder= -1,
4089  norm=mpl.colors.LogNorm(vmin=clevs[0],
4090  vmax=clevs[-1]))
4091 
4092  if method == 'pcolormesh':
4093  nx, ny = m(*np.meshgrid(lons, lats))
4094  im = m.pcolormesh(nx, ny, topodat, cmap=colmap,
4095  norm=mpl.colors.LogNorm(vmin=clevs[0],
4096  vmax=clevs[-1]))
4097  if method == 'contourf':
4098  # # Trying some fancier scaling
4099  # cnts,bins = np.histogram(topodat,bins=100)
4100  # topodat = np.ma.masked_where(topodat< .05* np.average((0,bins[1])),topodat)
4101  nx, ny = m(*np.meshgrid(lons, lats))
4102  im = m.contourf(nx, ny, topodat, cmap=colmap, levels=clevs,
4103  norm=mpl.colors.LogNorm(vmin=clevs[0],
4104  vmax=clevs[-1]))
4105 
4106  if method == 'contour':
4107  nx, ny = m(*np.meshgrid(lons, lats))
4108  im = m.contour(nx, ny, topodat, cmap=colmap,
4109  norm=mpl.colors.LogNorm(vmin=clevs[0],
4110  vmax=clevs[-1]))
4111 
4112  # # Get the current axes, and properties for use later
4113  pos = ax.get_position()
4114  l, b, w, h = pos.bounds
4115 
4116  # # CREATE COLORBAR
4117  # # Note, with upgrades to matplotlib and basemap had to make some
4118  # # changes here... no more 'ghost' axes
4119  # # does a colorbar already exist?
4120  try:
4121  cb = FIGURE.cb
4122  cax = FIGURE.cax
4123  cb.update_normal(im)
4124  except:
4125  # # make a copy of the image object, change
4126  # # colormap to linear version of the precip colormap.
4127  # pdb.set_trace()
4128  # im2 = copy.copy(im)
4129  # im2.set_cmap(colmap)
4130  # # create new axis for colorbar.
4131  h = 0.5 * h
4132  l = l + w + .03
4133  b = 0.5 - (h / 2)
4134  w = 0.025
4135  cax = plt.axes([l, b, w, h])
4136  # # using im2, not im (hack to prevent colors from being
4137  # # too compressed at the low end on the colorbar - results
4138  # # from highly nonuniform colormap)
4139  cb = fig.colorbar(im, cax=cax) # , format='%3.2g') # draw colorbar
4140  FIGURE.cax = cax
4141  FIGURE.cb = cb
4142  # cb.update_normal(im2)
4143 
4144 
4145 
4146 
4147  # # set colorbar label and ticks
4148  # pdb.set_trace()
4149  p_cax = mpl.font_manager.FontProperties(size='6')
4150  clabels = list(clevs[::10]) # #clevs, by 10 steps
4151  clabels.append(clevs[-1]) # # add the last label
4152  # cax.set_yticks(np.linspace(clabels[0],clabels[-1],len(clabels)))
4153  cax.set_yticks(np.linspace(0, 1, len(clabels)))
4154  cax.set_yticklabels(['%3.2g' % cl for cl in clabels])
4155  # fontproperties=p_cax)
4156  if H.direction == 'forward':
4157  cax.set_title('%s' % units,
4158  fontproperties=p_cax)
4159  else:
4160  if cax_title:
4161  cax.set_title(cax_title.format(units), fontproperties=p_cax)
4162  else:
4163  cax.set_title('sensitivity\n({0})'.format(units),
4164  fontproperties=p_cax)
4165 
4166  # # make the original axes current again
4167  plt.axes(ax)
4168 
4169  # # write text information block on plot
4170  # # first try to remove prior text by accessing
4171  # # the last text element added to the axes from
4172  # # the prior iteration.
4173  # # This is tricky when using together with plot_clusters...
4174  # # need to figure out how to resolve the indexing
4175  # # of what texts, collections, etc to delete, when iterating.
4176  try:
4177  del ax.texts[FIGURE.indices.texts:]
4178  del ax.artists[FIGURE.indices.artists:]
4179  except:
4180  pass
4181  if datainfo_str:
4182  plt.text(l, b + 1000,
4183  datainfo_str,
4184  fontsize=10,
4185  bbox=dict(boxstyle="round",
4186  ec=(1., 0.5, 0.5),
4187  fc=(1., 0.8, 0.8),
4188  alpha=0.8
4189  )
4190  )
4191 
4192  FIGURE.ax = ax
4193  FIGURE.m = m
4194  FIGURE.fig = fig
4195 
4196  if plottitle != None:
4197  # plt.title(plottitle,fontproperties=p_cax)
4198  # plt = plt
4199  FIGURE.ax.set_title(plottitle, fontsize=10)
4200  return FIGURE
4201 
4202 
4203 def plot_curtain(H, data, \
4204  nx=None,
4205  ny=None,
4206  data_range=None, \
4207  units='ppbv', \
4208  datainfo_str=None, \
4209  asl=True,
4210  plottitle=None, \
4211  log=True,
4212  FIGURE=None,
4213  cax_title=None,
4214  method='contourf',
4215  figPar=None):
4216  """ plot_sensitivity: core function for plotting FLEXPART output.
4217 
4218  Usage::
4219 
4220  > FIG = plot_sensitivity(H,data,*kwargs)
4221 
4222  This returns the FIGURE object, and plots the sensitivity from the data contained in the "D"
4223  array.
4224 
4225  Inputs
4226  H = a :class:`Header` instance for a FLEXPART run.
4227  data = a 2d data array containing the sensitivity values to plot, this can be extracted from a
4228  grid instance (see :func:`readgridV8` and :func:`get_slabs`)
4229 
4230  Returns
4231  A "mapping.py" ``FIGURE`` object.
4232 
4233  Arguments
4234 
4235  .. tabularcolumns:: |l|L|
4236 
4237  ============= ================================================
4238  keyword Description [default]
4239  ============= ================================================
4240  data_range range of data for scale bars, if None will
4241  be taken from min/max of data
4242  asl [True] plot the units in m.a.s.l
4243  cax_title string to be passed as colorbar title (units will
4244  be passed to the format argument)
4245  units units for the scale bar
4246  datainfo_str A string for labeling the scale bar.
4247  plottitle Title for the plot.
4248  rel_i Release index to plot from the data array
4249  map_region A map_region specified in mapping.py
4250  projection [deprecated] use pre-defined map_regions.
4251  dropm Force creation of a new basemap instance
4252  coords Used with autofit option. An array of lat,lon
4253  values for the mapping module to autofit a
4254  basemap instance to.
4255  autofit Try to generate map_region automagically (flakey)
4256  overlay Force removal of previous figure elements.
4257  transform For use with imshow method, if your data is not
4258  in same coordinates as projection, try to transform
4259  the data to the basemap projection.
4260  log Create a logarithmic color scale.
4261  FIGURE A FIGURE instance from mapping module get_FIGURE
4262  MapPar A Structure of paramters to be passed to the
4263  basemap class when creating an instance.
4264  method The method to use for plotting array data. May be
4265  one of: [pcolormesh], imshow, or contourf
4266  lsmask set to True to draw a grey landseamask [False]
4267  ============= ================================================
4268 
4269  .. todo::
4270  A lot!! There are some problems here and it is sensitive to options.
4271  lsmask = True seems to only work with certain projections (POLARCAT)
4272 
4273  .. note::
4274  This is the primary main function for creating plots of flexpart output.
4275  Most the other routines are simply wrappers to this function, passing
4276  arguments in with some predefined settings. For more information on the
4277  mechanics of this function, see the mapping.py module and the matplotlib
4278  basemap toolkit.
4279 
4280 
4281  """
4282 
4283 
4284 
4285  methods = ['imshow', 'pcolormesh', 'contourf', 'contour', 'None']
4286  assert method in methods, "method keyword must be one of: %s" % methods
4287 
4288  if FIGURE is None:
4289  FIGURE = Structure()
4290 
4291 
4292  fig = plt.figure(**figPar)
4293  ax = fig.add_subplot(111)
4294 
4295  FIGURE['fig'] = fig
4296  FIGURE['ax'] = ax
4297 
4298 
4299  fig = FIGURE.fig
4300  ax = FIGURE.ax
4301 
4302  # # make the figure current
4303  plt.figure(fig.number)
4304  plt.axes(ax)
4305 
4306  # # get min/max range
4307  if data_range != None:
4308  dat_min = data_range[0]
4309  dat_max = data_range[1]
4310  else:
4311  dat_min, dat_max = data_range(data)
4312 
4313  if log:
4314  clevs = _log_clevs(dat_min, dat_max)
4315  else:
4316  clevs = [i for i in np.arange(dat_min, dat_max, (dat_max - dat_min) / 100)]
4317 
4318  # # Set up the IMAGE
4319  # # cmapnames = ['jet', 'hsv', 'gist_ncar', 'gist_rainbow', 'cool', 'spectral']
4320  colmap = _gen_flexpart_colormap()
4321  colmap.set_over(color='k', alpha=0.8)
4322  topodat = data
4323 
4324 
4325  if method == 'imshow':
4326  im = plt.imshow(np.flipud(topodat), cmap=colmap, zorder= -1,
4327  norm=mpl.colors.LogNorm(vmin=clevs[0],
4328  vmax=clevs[-1]))
4329 
4330  if method == 'pcolormesh':
4331  im = plt.pcolormesh(nx, ny, topodat, cmap=colmap,
4332  norm=mpl.colors.LogNorm(vmin=clevs[0],
4333  vmax=clevs[-1]))
4334  if method == 'contourf':
4335  im = plt.contourf(nx, ny, topodat, cmap=colmap, levels=clevs,
4336  norm=mpl.colors.LogNorm(vmin=clevs[0],
4337  vmax=clevs[-1]))
4338 
4339  if method == 'contour':
4340  im = plt.contour(nx, ny, topodat, cmap=colmap,
4341  norm=mpl.colors.LogNorm(vmin=clevs[0],
4342  vmax=clevs[-1]))
4343 
4344  # # Get the current axes, and properties for use later
4345  pos = ax.get_position()
4346  l, b, w, h = pos.bounds
4347 
4348  # # CREATE COLORBAR
4349  # # Note, with upgrades to matplotlib and basemap had to make some
4350  # # changes here... no more 'ghost' axes
4351  # # does a colorbar already exist?
4352  try:
4353  cb = FIGURE.cb
4354  cax = FIGURE.cax
4355  cb.update_normal(im)
4356  except:
4357  # # make a copy of the image object, change
4358  # # colormap to linear version of the precip colormap.
4359  # # create new axis for colorbar.
4360  h = 0.8 * h
4361  l = l + w + .02
4362  b = 0.5 - (h / 2)
4363  w = 0.025
4364  cax = plt.axes([l, b, w, h])
4365  # # using im2, not im (hack to prevent colors from being
4366  # # too compressed at the low end on the colorbar - results
4367  # # from highly nonuniform colormap)
4368  cb = fig.colorbar(im, cax=cax) # , format='%3.2g') # draw colorbar
4369  FIGURE.cax = cax
4370  FIGURE.cb = cb
4371 
4372 
4373  # # set colorbar label and ticks
4374  p_cax = mpl.font_manager.FontProperties(size='6')
4375  clabels = list(clevs[::10]) # #clevs, by 10 steps
4376  clabels.append(clevs[-1]) # # add the last label
4377  # cax.set_yticks(np.linspace(clabels[0],clabels[-1],len(clabels)))
4378  cax.set_yticks(np.linspace(0, 1, len(clabels)))
4379  cax.set_yticklabels(['%3.2g' % cl for cl in clabels])
4380  # fontproperties=p_cax)
4381 
4382  if cax_title:
4383  cax.set_title(cax_title.format(units), fontproperties=p_cax)
4384  else:
4385  cax.set_title('sensitivity\n({0})'.format(units),
4386  fontproperties=p_cax)
4387 
4388  # # make the original axes current again
4389  plt.axes(ax)
4390  plt.grid(True)
4391  FIGURE.ax = ax
4392  FIGURE.fig = fig
4393 
4394  if plottitle != None:
4395  FIGURE.ax.set_title(plottitle, fontsize=10)
4396 
4397  return FIGURE
4398 
4399 
4400 def plot_METDATA(METDATA, FIGURE, date=None, level=None):
4401  """ plots met data returned from :module:`pflexible.mapping.get_OpenDAP`
4402 
4403  """
4404 
4405 
4406  fig = FIGURE.fig
4407  m = FIGURE.m
4408  ax = FIGURE.ax
4409 
4410  # # make the figure current
4411  plt.figure(fig.number)
4412  plt.axes(ax)
4413 
4414  datelabels = METDATA['extracted_dates']
4415  slpin = METDATA['prmslmsl']['data'] * .01
4416  uin = METDATA['ugrdprs']['data']
4417  vin = METDATA['vgrdprs']['data']
4418  longitudes = METDATA['longitudes']
4419  latitudes = METDATA['latitudes']
4420 
4421  if date:
4422  cnt = datelabels.index(date)
4423  else:
4424  cnt = 0
4425  # # FOR OPENDAP OVERLAY
4426  slp_ = slpin[cnt, :, :]
4427  u_ = uin[cnt, :, :]
4428  v_ = vin[cnt, :, :]
4429 
4430  # plot wind vectors on projection grid (looks better).
4431  # first, shift grid so it goes from -180 to 180 (instead of 0 to 360
4432  # in longitude). Otherwise, interpolation is messed uplt.
4433 
4434  if longitudes[-1] - longitudes[0] < 360.:
4435  longarray = np.array(longitudes)
4436  slp, cyclon = addcyclic(slp_, longarray)
4437  u, cyclon = addcyclic(u_, longarray)
4438  v, cyclon = addcyclic(v_, longarray)
4439  ugrid, newlons = shiftgrid(180., u, cyclon, start=False)
4440  vgrid, newlons = shiftgrid(180., v, cyclon, start=False)
4441  slpgrid, newlons = shiftgrid(180., slp, cyclon, start=False)
4442  # transform vectors to projection grid.
4443 
4444  lons, lats = np.meshgrid(newlons, latitudes)
4445  # set desired contour levels.
4446  clevs = np.arange(960, 1061, 5)
4447  # compute native x,y coordinates of grid.
4448  x, y = m(lons, lats)
4449  # pos = FIG.ax.get_position()
4450  # l, b, w, h = pos.bounds
4451 
4452  CS = m.contour(x, y, slpgrid, clevs, linewidths=1, colors='k', animated=True)
4453  # CS = FIG.m.contourf(x,y,slpgrid,clevs,cmap=plt.cm.RdBu_r,animated=True,alpha=0.7)
4454  # CS = FIG.m.contour(x,y,slp[0,:,:],clevs,linewidths=0.5,colors='k',animated=True)
4455 # CS = FIG.m.contourf(x,y,slp[0,:,:],clevs,cmap=plt.cm.RdBu_r,animated=True,alpha=0.7)
4456  # plt.clabel(CS,inline=1,fontsize=10)
4457  # plot wind vectors over maplt.
4458  urot, vrot, xx, yy = m.transform_vector(ugrid, vgrid, newlons, latitudes, 51, 51,
4459  returnxy=True, masked=True)
4460  Q = m.quiver(xx, yy, urot, vrot, scale=500)
4461 # # make quiver key.
4462  qk = plt.quiverkey(Q, 0.1, 0.1, 20, '20 m/s', labelpos='W')
4463 
4464 
4465  FIGURE.ax = ax
4466  FIGURE.m = m
4467  FIGURE.fig = fig
4468 
4469  return FIGURE
4470 
4471 
4472 
4473 
4474 
4475 
4476 
4477 
4478 
4479 
4480 
4481 
4482 ########### HELPER FUNCTIONS ##########
4483 
4484 class BinaryFile(object):
4485 
4486  """
4487 
4488  BinaryFile: A class for accessing data to/from large binary files
4489 
4490 
4491  The data is meant to be read/write sequentially from/to a binary file.
4492  One can request to read a piece of data with a specific type and shape
4493  from it. Also, it supports the notion of Fortran and C ordered data,
4494  so that the returned data is always well-behaved (C-contiguous and
4495  aligned).
4496 
4497  This class is seeking capable.
4498 
4499  :Author: Francesc Alted
4500  :Contact: faltet@pytables.org
4501  :Created: 2010-03-18
4502  :Acknowledgment: Funding for the development of this code is provided
4503  through the Norwegian Research Council VAUUAV project #184724, 2010
4504 
4505  """
4506 
4507 
4508  # Common types whose conversion can be accelerated via the struct
4509  # module
4510  structtypes = {
4511  'i1': 'b', 'i2': 'h', 'i4': 'i',
4512  'f4': 'f', 'f8': 'd',
4513  }
4514 
4515  def __init__(self, filename, mode="r", order="fortran"):
4516  """Open the `filename` for write/read binary files.
4517 
4518  The `mode` can be 'r', 'w' or 'a' for reading (default),
4519  writing or appending. The file will be created if it doesn't
4520  exist when opened for writing or appending; it will be
4521  truncated when opened for writing. Add a '+' to the mode to
4522  allow simultaneous reading and writing.
4523 
4524  `order` specifies whether the file is is written in 'fortran'
4525  or 'c' order.
4526  """
4527  self.mode = mode + "b"
4528  self.file = open(filename, mode=self.mode, buffering=1)
4529  """The file handler."""
4530  if order not in ['fortran', 'c']:
4531  raise ValueError, "order should be either 'fortran' or 'c'."
4532  self.order = order
4533  """The order for file ('c' or 'fortran')."""
4534 
4535 
4536  def read(self, dtype, shape=(1,)):
4537  """Read an array of `dtype` and `shape` from current position.
4538 
4539  `shape` must be any tuple made of integers or even () for scalars.
4540 
4541  The current position will be updated to point to the end of
4542  read data.
4543  """
4544  if not isinstance(dtype, np.dtype):
4545  dtype = np.dtype(dtype)
4546  if type(shape) is int:
4547  shape = (shape,)
4548  if type(shape) is not tuple:
4549  raise ValueError, "shape must be a tuple"
4550  length = dtype.itemsize
4551  rank = len(shape)
4552  if rank == 1:
4553  length *= shape[0]
4554  elif rank > 1:
4555  length *= np.array(shape).prod()
4556 
4557  # Correct the shape in case dtype is multi-dimensional
4558  if shape != (1,):
4559  shape = shape + dtype.shape
4560  else:
4561  shape = dtype.shape
4562  rank = len(shape)
4563 
4564  if shape in (1, (1,)):
4565  order = "c"
4566  else:
4567  order = self.order
4568 
4569  # Read the data from file
4570  data = self.file.read(length)
4571  if len(data) < length:
4572  raise EOFError, "Asking for more data than available in file."
4573 
4574  # Convert read string into a regular array, or scalar
4575  dts = dtype.base.str[1:]
4576  if rank == 0:
4577  if dts[1] == "S":
4578  data = str(data)
4579  elif dts in self.structtypes:
4580  data = struct.unpack(self.structtypes[dts], data)[0]
4581  else:
4582  data = np.ndarray(shape=shape, buffer=data, dtype=dtype.base)
4583  if rank == 0:
4584  # Retrieve the scalar out of the 0-dim array
4585  data = data[()]
4586 
4587  if rank > 1:
4588  # If original data file is in fortran mode, reverse the
4589  # shape first
4590  if order == "fortran":
4591  shape = [i for i in shape[::-1]]
4592  data = data.reshape(shape)
4593  # If original data file is in fortran mode, do a transpose.
4594  # As the shape was reversed previously, we get the original
4595  # shape again.
4596  if self.order == "fortran":
4597  data = data.transpose().copy()
4598  # Do an additional copy just in case the array is not
4599  # well-behaved (i.e., it is not aligned or not contiguous).
4600  elif not data.flags.behaved:
4601  data = data.copy()
4602  return data
4603 
4604 
4605  def write(self, arr):
4606  """Write an `arr` to current position.
4607 
4608  The current position will be updated to point to the end of
4609  written data.
4610  """
4611  # Transpose data if case we need to
4612  if (self.order == "fortran") != (arr.flags.fortran):
4613  arr = arr.transpose().copy()
4614  # Write the data to file
4615  self.file.write(arr.data)
4616 
4617 
4618  def seek(self, offset, whence=0):
4619  """Move to new file position.
4620 
4621  Argument offset is a byte count. Optional argument whence
4622  defaults to 0 (offset from start of file, offset should be >=
4623  0); other values are 1 (move relative to current position,
4624  positive or negative), and 2 (move relative to end of file,
4625  usually negative, although many platforms allow seeking beyond
4626  the end of a file). If the file is opened in text mode, only
4627  offsets returned by tell() are legal. Use of other offsets
4628  causes undefined behavior.
4629  """
4630  self.file.seek(offset, whence)
4631 
4632 
4633  def tell(self):
4634  "Returns current file position, an integer (may be a long integer)."
4635  return self.file.tell()
4636 
4637 
4638  def flush(self):
4639  "Flush buffers to file."
4640  self.file.flush()
4641 
4642 
4643  def close(self):
4644  "End access to file."
4645  self.file.close()
4646 
4647 
4648 def closest(num, numlist):
4649  """ returns the index of the *closest* value in a list """
4650  # check if we're using datetimes
4651  dates = False
4652  if isinstance(num, datetime.datetime):
4653  dates = True
4654  if dates:
4655  num = date2num(num)
4656  assert isinstance(numlist[0], datetime.datetime), \
4657  "num is date, numlist must be a list of dates"
4658  numlist = date2num(numlist)
4659 
4660  return (np.abs(numlist - num)).argmin()
4661 
4662 
4663 
4664 class Structure(dict, object):
4665  """ A 'fancy' dictionary that provides 'MatLab' structure-like
4666  referencing.
4667 
4668  .. warning::
4669  may be replaced with a pure dict in future release.
4670 
4671  """
4672  def __getattr__(self, attr):
4673  # Fake a __getstate__ method that returns None
4674  if attr == "__getstate__":
4675  return lambda: None
4676  return self[attr]
4677 
4678  def __setattr__(self, attr, value):
4679  self[attr] = value
4680 
4681  def __dir__(self):
4682  """ necessary for Ipython tab-completion """
4683  return self.keys()
4684 
4685  def set_with_dict(self, D):
4686  """ set attributes with a dict """
4687  for k in D.keys():
4688  self.__setattr__(k, D[k])
4689 
4690  def __dir__(self):
4691  return self.keys()
4692 
4693 class Header(Structure):
4694  """This is the primary starting point for processing FLEXPART output.
4695  The Header class ( :class:`Structure` ) behaves like a dictionary.
4696  It contains all the metadata from the simulation run as read from the
4697  "header" or "header_nest" binary files from the model output.
4698 
4699  This version is using the BinaryFile class rather than FortFlex.
4700 
4701  Usage::
4702 
4703  > H = pf.Header(inputpath)
4704  > H.keys() #provides a list of keys available
4705 
4706  Returns a dictionary
4707 
4708  H = dictionary like object with all the run metadata. TODO: Fill in keys.
4709 
4710 
4711  Arguments
4712 
4713  .. tabularcolumns:: |l|L|
4714 
4715  ============== ========================================
4716  keyword Description [default]
4717  ============== ========================================
4718  path path to the run directory
4719  headerfile name of the header file if non standard
4720  readheader_ops optional dictionary to pass readheader
4721  ============== ========================================
4722 
4723  Arguments for readheader_ops
4724 
4725  .. tabularcolumns:: |l|L|
4726 
4727  ============= ========================================
4728  keyword Description [default]
4729  ============= ========================================
4730  pathname FLEXPART run output directory
4731  readp read release points 0=no, [1]=y
4732  nested nested output True or [False]
4733  version version of FLEXPART, default = 'V8'
4734  ============= ========================================
4735 
4736 
4737  .. note::
4738  **This function is in development**
4739 
4740  This function is being developed so that there is no dependence on
4741  using f2py to compile the FortFlex module. It is working using the
4742  :class:`BinaryFile`, but is notably slower than :class:`FortFlex`.
4743  Please report any bugs found.
4744 
4745 
4746  """
4747 
4748 
4749  def __init__(self, path=None, headerfile=None, version='V8', **readheader_ops):
4750  """
4751 
4752 
4753  """
4754  try:
4755  h = read_header(path, **readheader_ops)
4756  self.set_with_dict(h)
4757  self.lonlat()
4758  self.version = 'V8'
4759  except:
4760  traceback.print_exc()
4761  raise IOError('''
4762  Could not set header variables.
4763  Does the `header` file exist in path?\n{0}'''.format(path))
4764 
4765 
4766 
4767 
4768  def lonlat(self):
4769  """ Add longitude and latitude attributes using data from header """
4770  lons = np.arange(self.outlon0, self.outlon0 + (self.dxout * self.numxgrid), self.dxout)
4771  lats = np.arange(self.outlat0, self.outlat0 + (self.dyout * self.numygrid), self.dyout)
4772  self.longitude = lons
4773  self.latitude = lats
4774 
4775  def read_grid(self, **kwargs):
4776  """ see :func:`read_grid` """
4777  self.FD = read_grid(self, **kwargs)
4778 
4779  def fill_backward(self, **kwargs):
4780  """ see :func:`fill_backward` """
4781  fill_grids(self, add_attributes=True, **kwargs)
4782 
4783  def add_trajectory(self, **kwargs):
4784  """ see :func:`read_trajectories` """
4785  self.trajectory = read_trajectories(self)
4786 
4787  def add_fires(self, **kwargs):
4788  """ uses the :mod:`emissions` module to read the MODIS hotspot data and
4789  add it to the header class as a 'fires' attribute.
4790 
4791  **This function is only available within NILU.**
4792 
4793  """
4794 
4795  from nilu.pflexpart import emissions as em
4796  self.fires = None
4797  for day in self.available_dates_dt:
4798  # day = day[:8]
4799  firedata = em.MODIS_hotspot(day)
4800  daily = firedata.daily
4801  # pdb.set_trace()
4802  if daily is None:
4803  continue
4804  else:
4805  if self.fires == None:
4806  self.fires = daily
4807  else:
4808 
4809  self.fires = np.hstack((self.fires, daily)).view(np.recarray)
4810 
4811  def closest_dates(self, dateval, fmt=None, take_set=False):
4812  """ given an iterable of datetimes, finds the closest dates.
4813  if passed a list, assumes it is a list of datetimes
4814 
4815  if take_set=True, then a set of unique values will be returned.
4816  This can be used with H.read_grid as the value for time_ret to
4817  return only the grids matching the array of times.
4818  See (e.g. `extract_curtain`).
4819  """
4820 
4821  try:
4822  vals = [closest(d, self['available_dates_dt']) for d in dateval]
4823  if take_set:
4824  return list(set(vals))
4825  else:
4826  return vals
4827 
4828  except IOError:
4829  print('If dateval is iterable, must contain datetimes')
4830 
4831 
4832  def closest_date(self, dateval, fmt=None):
4833  """ given a datestring or datetime, tries to find the closest date.
4834  if passed a list, assumes it is a list of datetimes
4835 
4836  """
4837 
4838  if isinstance(dateval, str):
4839  if not fmt:
4840  if len(dateval) == 8:
4841  fmt = '%Y%m%d'
4842  if len(dateval) == 14:
4843  fmt = '%Y%m%d%H%M%S'
4844  else:
4845  raise IOError("no format provided for datestring")
4846  print("Assuming date format: {0}".format(fmt))
4847  dateval = datetime.datetime.strptime(dateval, fmt)
4848 
4849  return closest(dateval, self['available_dates_dt'])
4850 
4851 def data_range(data, min='median'):
4852  """
4853  return a data range for flexpart data
4854 
4855  optional keyword min = ['median', 'mean', 'min']
4856  """
4857  dmax = np.nanmax(data)
4858  if np.isnan(dmax):
4859  dmax = 1e5
4860 
4861  if min == 'mean':
4862  dmin = np.mean(data[data.nonzero()])
4863  elif min == 'median':
4864  dmin = np.median(data[data.nonzero()])
4865  else:
4866  dmin = np.nanmin(data[data.nonzero()])
4867 
4868  if np.isnan(dmin):
4869  dmin = 1e-5
4870 
4871  return [dmin, dmax]
4872 def _normdatetime(Y, M, D, h, m, s):
4873  return datetime.datetime(Y, M, D) + datetime.timedelta(hours=h, minutes=m, seconds=s)
4874 
4875 def _log_clevs(dat_min, dat_max):
4876  """
4877  ## create logorithmic color scale
4878  ## method uses strings to get the magnitude of variable
4879  #ss = "%1.2e" % (dat_max)
4880  #dmx = int('%s%s' % (ss[-3],int(ss[-2:])))
4881  #dmx=float(len(str(int(np.ceil(dat_max)))))
4882  #if data_range is None:
4883  # dmn = int(np.round(np.log((1e-10*(dat_max-dat_min)))))
4884  # ss = "%1.2e" % (1e-10*(dat_max-dat_min))
4885  #else:
4886  # ss = "%1.2e" % (dat_min)
4887  #dmn = int('%s%s' % (ss[-3],int(ss[-2:])))
4888  """
4889 
4890  if dat_max > 0:
4891  dmx = int(np.round(np.log10(dat_max))) + 1
4892  else:
4893  print 'dat_max not positive'
4894  print dat_max
4895  dmx = 1
4896 
4897  if dat_min > 0:
4898  dmn = int(np.round(np.log10(dat_min)))
4899  elif dat_min == 0. or np.isnan(dat_min):
4900  print 'dat_min not positive'
4901  print dat_min
4902  dmn = dmx - 3
4903 
4904 
4905  # # create equally spaced range
4906  if dmx == dmn:
4907  dmx = dmn + 1
4908  clevs = np.logspace(dmn, dmx, 100)
4909 
4910  return clevs
4911 
4912 
4913 
4914 
4915 def add_nilu_logo(fig, xo=10, yo=520, origin='upper'):
4916  """ Adds NILU logo to background of plot """
4917  im = image.imread('/xnilu_wrk/flex_wrk/WEB_APS/imgs/logo_nilu.png')
4918  im[:, :, -1] = 0.5
4919  fig.figimage(im, xo, yo, origin=origin)
4920  return fig
4921 
4922 def _cum_spec(inspectra, cum=True):
4923  """ helper function for the plot_spectra routines """
4924 
4925  if cum == 'norm':
4926  # # Normalize the data so it fills to 100%
4927  # spectra = np.zeros(inspectra.shape)
4928  spectra = (inspectra.transpose() / np.sum(inspectra, axis=1)).transpose()
4929  spectra = np.cumsum(spectra[:, :], axis=1)
4930  # sums = np.sum(inspectra,axis=1)
4931  # for i,elem in enumerate(inspectra):
4932  # spectra[i,:] = elem/sums[i]
4933  elif cum is True and cum != 'norm':
4934  spectra = np.cumsum(inspectra[:, :], axis=1)
4935  else:
4936  spectra = inspectra
4937 
4938  return spectra
4939 
4940 def _getfile_lines(infile):
4941  """ returns all lines from a file or file string
4942  reverts to beginning of file."""
4943 
4944  if isinstance(infile, str):
4945  return file(infile, 'r').readlines()
4946  else:
4947  infile.seek(0)
4948  return infile.readlines()
4949 
4951  """
4952  Define some default map parameters from the Header File.
4953  """
4954 
4955  MapPar = Structure()
4956  MapPar.llcrnrlat = H.outlat0
4957  MapPar.llcrnrlon = H.outlon0
4958  MapPar.urcrnrlat = H.outlat0 + (H.dyout * H.numygrid)
4959  MapPar.urcrnrlon = H.outlon0 + (H.dxout * H.numxgrid - 1)
4960  for k in MapPar.keys():
4961  print k, MapPar[k]
4962 
4963  return MapPar
4964 
4965 
4966 def _gen_altitude_color(p, altlim=(0, 5000), numcon=1, cmap_id='jet'):
4967  """
4968  Generates color based on altlim = (min,max)
4969  """
4970  norm = mpl.colors.Normalize(vmin=altlim[0], vmax=altlim[1])
4971  cmap = plt.cm.get_cmap(cmap_id)
4972  # p = p/altmax
4973  cm = cmap(norm(p))
4974  # print p, cm
4975  return cm
4976 
4977 
4978 def _gen_daylabels(P, H=None, dt=86400):
4979  """
4980  Uses H.loutstep to calculate 'days back' for plotting on clusters and trajectories.
4981  """
4982  if isinstance(P, int):
4983 
4984  if H:
4985  dt = abs(H.loutstep)
4986  return str(1 + int(abs(P)) / dt)
4987  else:
4988  return [str(1 + int(abs(p)) / dt) for p in P]
4989 
4990 
4991 
4992 def _datarange(H, G, index=None):
4993  """ returns max and min for footprint and total column
4994  for a given range of releases [default is all] """
4995  Heightnn = H['Heightnn']
4996  fpmax = -999.
4997  if index == None:
4998  seek = range(G.shape[-1])
4999  else:
5000  seek = [index]
5001 
5002  for i in seek:
5003  zpmax = np.max(G[:, :, 0, i] / Heightnn[:, :, 0])
5004  if zpmax > fpmax:
5005  fpmax = zpmax
5006  # print fpmax
5007  tcmax = np.max(G)
5008  return ((0, fpmax), (0, tfcmax))
5009 
5010 
5011 
5012 def _genEllipse(data, m, sizescale=20000,
5013  altlim=None):
5014  """
5015 
5016  Generates ellipses based on input array 'data'. Requires basemap instance, 'm'. NOTE::
5017 
5018  data = [itime,x,y,z,[size]]
5019 
5020  r,c = data.shape
5021 
5022  if c == 5:
5023  size function of data[:,4]
5024  else:
5025  size = 1*sizescale
5026 
5027  uses functions:
5028 
5029  * _gen_altitude_color
5030  * _gen_daylabels
5031 
5032  for labeling/color of ellipses
5033 
5034  """
5035  r, c = data.shape
5036  if altlim == None:
5037  altlim = (np.min(data[:, 3]), np.max(data[:, 3]))
5038 
5039  if c == 5:
5040 
5041  ell = [(Ellipse(xy=np.array(m(p[1], p[2])),
5042  width=p[4] * sizescale, height=p[4] * sizescale,
5043  angle=360,
5044  facecolor=_gen_altitude_color(p[3], altlim=altlim, cmap_id='gray'), \
5045  label=_gen_daylabels(p[0])), \
5046  np.array(m(p[1], p[2]))) for p in data]
5047  # np.array( m(p[1],p[2])) ) for p in data]
5048  else:
5049  ell = [(Ellipse(xy=np.array(m(p[1], p[2])),
5050  width=1e4 * sizescale, height=1e4 * sizescale,
5051  angle=360, \
5052  facecolor=_gen_altitude_color(p[3], altlim=altlim, cmap_id='gray'), \
5053  label=_gen_daylabels(p[0])), \
5054  np.array(m(p[1], p[2]))) for p in data]
5055 
5056  return ell
5057 
5058 
5059 def _shout(string, verbose=True):
5060  """ write a string to stdout if 'verbose' flag given """
5061  w = sys.stdout.write
5062  if string[-1] != '\n':
5063  string = string + '\n'
5064  if verbose:
5065  w(string)
5066 
5067 
5068 def _gen_flexpart_colormap(ctbfile=None, colors=None):
5069  """
5070  Generate the ast colormap for FLEXPART
5071  """
5072  from matplotlib.colors import ListedColormap
5073  if ctbfile:
5074  try:
5075  colors = np.loadtxt(ctbfile)
5076  except:
5077  print "WARNING: cannot load ctbfile. using colors"
5078  if colors:
5079  name = 'user_colormap'
5080  if not colors:
5081  # # AST Colorset for FLEXPART
5082  colors = [1.0000000e+00, 1.0000000e+00, 1.0000000e+00
5083  , 9.9607843e-01, 9.1372549e-01, 1.0000000e+00
5084  , 9.8431373e-01, 8.2352941e-01, 1.0000000e+00
5085  , 9.6470588e-01, 7.1764706e-01, 1.0000000e+00
5086  , 9.3333333e-01, 6.0000000e-01, 1.0000000e+00
5087  , 8.9019608e-01, 4.4705882e-01, 1.0000000e+00
5088  , 8.3137255e-01, 2.0000000e-01, 1.0000000e+00
5089  , 7.5686275e-01, 0.0000000e+00, 1.0000000e+00
5090  , 6.6274510e-01, 0.0000000e+00, 1.0000000e+00
5091  , 5.4901961e-01, 0.0000000e+00, 1.0000000e+00
5092  , 4.0784314e-01, 0.0000000e+00, 1.0000000e+00
5093  , 2.4705882e-01, 0.0000000e+00, 1.0000000e+00
5094  , 7.4509804e-02, 0.0000000e+00, 1.0000000e+00
5095  , 0.0000000e+00, 2.8235294e-01, 1.0000000e+00
5096  , 0.0000000e+00, 4.8627451e-01, 1.0000000e+00
5097  , 0.0000000e+00, 6.3137255e-01, 1.0000000e+00
5098  , 0.0000000e+00, 7.4509804e-01, 1.0000000e+00
5099  , 0.0000000e+00, 8.4705882e-01, 1.0000000e+00
5100  , 0.0000000e+00, 9.3725490e-01, 1.0000000e+00
5101  , 0.0000000e+00, 1.0000000e+00, 9.7647059e-01
5102  , 0.0000000e+00, 1.0000000e+00, 8.9411765e-01
5103  , 0.0000000e+00, 1.0000000e+00, 8.0000000e-01
5104  , 0.0000000e+00, 1.0000000e+00, 6.9019608e-01
5105  , 0.0000000e+00, 1.0000000e+00, 5.6470588e-01
5106  , 0.0000000e+00, 1.0000000e+00, 4.0000000e-01
5107  , 0.0000000e+00, 1.0000000e+00, 0.0000000e+00
5108  , 3.9607843e-01, 1.0000000e+00, 0.0000000e+00
5109  , 5.6470588e-01, 1.0000000e+00, 0.0000000e+00
5110  , 6.9019608e-01, 1.0000000e+00, 0.0000000e+00
5111  , 7.9607843e-01, 1.0000000e+00, 0.0000000e+00
5112  , 8.9411765e-01, 1.0000000e+00, 0.0000000e+00
5113  , 9.7647059e-01, 1.0000000e+00, 0.0000000e+00
5114  , 1.0000000e+00, 9.4509804e-01, 0.0000000e+00
5115  , 1.0000000e+00, 8.7450980e-01, 0.0000000e+00
5116  , 1.0000000e+00, 7.9215686e-01, 0.0000000e+00
5117  , 1.0000000e+00, 7.0588235e-01, 0.0000000e+00
5118  , 1.0000000e+00, 6.0392157e-01, 0.0000000e+00
5119  , 1.0000000e+00, 4.8235294e-01, 0.0000000e+00
5120  , 1.0000000e+00, 3.1372549e-01, 0.0000000e+00
5121  , 1.0000000e+00, 0.0000000e+00, 1.4901961e-01
5122  , 1.0000000e+00, 0.0000000e+00, 3.3333333e-01
5123  , 1.0000000e+00, 0.0000000e+00, 4.4705882e-01
5124  , 1.0000000e+00, 0.0000000e+00, 5.3725490e-01
5125  , 1.0000000e+00, 0.0000000e+00, 6.1176471e-01
5126  , 9.7647059e-01, 0.0000000e+00, 6.6666667e-01
5127  , 8.9411765e-01, 0.0000000e+00, 6.6666667e-01
5128  , 7.9607843e-01, 0.0000000e+00, 6.3921569e-01
5129  , 6.9019608e-01, 0.0000000e+00, 5.9215686e-01
5130  , 5.6470588e-01, 0.0000000e+00, 5.0980392e-01
5131  , 3.9607843e-01, 0.0000000e+00, 3.8039216e-01]
5132  colors = np.reshape(colors, (-1, 3))
5133  name = 'flexpart_cmap'
5134  cmap = ListedColormap(colors, name)
5135  return cmap
5136 
5137 
5138 
5139 #### Below for use from command line ##########################################
5140 
5141 def main ():
5142 
5143  global options, args
5144  here = os.path.curdir
5145  dataDir = options.pathname
5146  H = read_header(dataDir, **{'readp':1}) # readheader and release pointspiu
5147  G = readgridV8(H, **{'unit':'time'})
5148  D = get_slabs(H, G)
5149 
5150  plt = mp.plot_map(D[0], H)
5151  plot_name = 'test'
5152  plt.title(plot_name, fontsize=10)
5153  plt.savefig(os.path.join(here, plot_name + '.png'))
5154  print plot_name
5155 
5156 
5157 if __name__ == '__main__':
5158  print 'testing'
5159  try:
5160  start_time = time.time()
5161  parser = optparse.OptionParser(
5162  formatter=optparse.TitledHelpFormatter(),
5163  usage=globals()['__doc__'],
5164  version='$Id: py.tpl 327 2008-10-05 23:38:32Z root $')
5165  parser.add_option ('-v', '--verbose', action='store_true',
5166  default=False, help='verbose output')
5167  parser.add_option ('-T', '--latex', action='store_true',
5168  default=False, help='latex usage flag')
5169  parser.add_option ('-d', '--directory', dest='pathname',
5170  default=False, help='pathname of directory containg FLEXPART Output')
5171  (options, args) = parser.parse_args()
5172  # if len(args) < 1:
5173  # parser.error ('missing argument')
5174  if options.verbose: print time.asctime()
5175 
5176  #### LateX #####
5177  if options.latex:
5178  mpl.rc('font', **{'family':'sans-serif',
5179  'sans-serif':['Helvetica']}
5180  )
5181 
5182  mpl.rc('text', usetex=True)
5183 
5184  exit_code = main()
5185  if exit_code is None:
5186  exit_code = 0
5187  if options.verbose: print time.asctime()
5188  if options.verbose: print 'TOTAL TIME IN MINUTES:',
5189  if options.verbose: print (time.time() - start_time) / 60.0
5190  sys.exit(exit_code)
5191  except KeyboardInterrupt, e: # Ctrl-C
5192  raise e
5193  except SystemExit, e: # sys.exit()
5194  raise e
5195  except Exception, e:
5196  print 'ERROR, UNEXPECTED EXCEPTION'
5197  print str(e)
5198  traceback.print_exc()
5199  os._exit(1)
5200 
5201 # vim:set sr et ts=4 sw=4 ft=python fenc=utf-8: // See Vim, :help 'modeline'
5202 
def _plot_dropm
FLEXPART Plotting Functions ########.
Definition: pflexible.py:2622
def main
Below for use from command line ##########################################.
Definition: pflexible.py:5141
def _readgrid_noFF
Grid Reading Routines ###############.
Definition: pflexible.py:1336
def read_command
Functions for reading FLEXPART output #####.
Definition: pflexible.py:90
HELPER FUNCTIONS ##########.
Definition: pflexible.py:4484