7 pflexible [-h] [-v,--verbose] [--version]
12 pflexible: A python module for working with FLEXPART Output.
20 A lot! This is just a starting point. See the doc strings
21 for information about the various functions.
27 JFB: John F Burkhart <jfburkhart@gmail.com>
36 Many functions are adaptations of Fortran / NCARG programs (AST)
37 or Matlab functions (HSO/SEC).
42 This script follows creative commons usage.
59 from math
import pi, sqrt, cos
68 import matplotlib
as mpl
70 import matplotlib.pyplot
as plt
71 from matplotlib.dates
import date2num
72 import matplotlib.image
as image
73 from matplotlib.patches
import Ellipse
76 from mpl_toolkits.basemap
import shiftgrid, addcyclic
78 from matplotlib.toolkits.basemap
import shiftgrid, addcyclic
85 __path__ = os.path.abspath(os.curdir)
92 Reads a FLEXPART COMMAND file.
95 Assumes releases file has a header of 7 lines. Use
96 option "headerrows" to override this.
100 > A = read_command(filepath)
102 where filepath is either a file object or a path.
106 a dictionary with the following keys
108 ================ =================================================================
110 ================ =================================================================
111 SIM_DIR Simulation direction
112 SIM_START Text str of YYYYMMDD HHMMSS
113 SIM_END Text str of YYYYMMDD HHMMSS
114 AVG_CNC_INT Average concentrations are calculated every SSSSS seconds
115 AVG_CNC_TAVG The average concentrations are time averages of SSSSS sec
116 CNC_SAMP_TIME The concentrations are sampled every SSSSS seconds to
117 calculate the time average concentration.
118 T_PARTSPLIT Time constant for particle splitting.
119 SYNC All processes are synchronized with this time interval
121 IFINE IFINE=Reduction factor for time step used for vertical wind
122 IOUT IOUT determines how the output shall be made: concentration
123 (ng/m3, Bq/m3), mixing ratio (pptv), or both, or plume
124 trajectory mode, or concentration + plume trajectory mode.
125 IPOUT IPOUT determines whether particle positions are outputted
126 (in addition to the gridded concentrations or mixing ratios)
127 or not. 0=no output, 1 output every output interval, 2 only
128 at end of the simulation
129 LSUBGRID Switch on/off subgridscale terrain parameterization
130 (increase of mixing heights due to subgridscale orog. var
131 LCONVECTION Switch on/off the convection parameterization
132 LAGESPECTRA Switch on/off the calculation of age spectra: if yes, the
133 file AGECLASSES must be available
134 IPIN If IPIN=1, a file "partposit_end" from a previous run must
135 be available in the output directory. Particle positions
136 are read in and previous simulation is continued. If
137 IPIN=0, no particles from a previous run are used
138 IOFR Switch on/off writing out each release.
139 IFLUX If IFLUX is set to 1, fluxes of each species through each
140 of the output boxes are calculated. Six fluxes,
141 corresponding to northward, southward, eastward, westward,
142 upward and downward are calculated for each grid cell of
143 the output grid. The control surfaces are placed in the
144 middle of each output grid cell. If IFLUX is set to 0,
145 no fluxes are determined.
146 MDOMAINFILL If MDOMAINFILL is set to 1, the first box specified in file
147 RELEASES is used as the domain where domain-filling
148 trajectory calculations are to be done. Particles are
149 initialized uniformly distributed (according to the air mass
150 distribution) in that domain at the beginning of the
151 simulation, and are created at the boundaries throughout
152 the simulation period
153 IND_SOURCE IND_SOURCE switches between different units for
154 concentrations at the source. NOTE that in backward
155 simulations the release of computational particles takes
156 place at the "receptor" and the sampling of particles at
157 the "source". 1=mass units (for bwd-runs = concentration)
158 2=mass mixing ratio units'''],
160 IND_RECEPTOR IND_RECEPTOR switches between different units for
161 concentrations at the receptor 1=mass units (concentrations)
162 2=mass mixing ratio units
164 MQUASILAG MQUASILAG indicates whether particles shall be numbered
165 consecutively (1) or with their release location number (0).
166 The first option allows tracking of individual particles
167 using the partposit output files
169 NESTED_OUTPUT NESTED_OUTPUT decides whether model output shall be made
170 also for a nested output field (normally with higher resolution)
171 LINIT_COND For Backward Runs, sets initial conditions:
172 [0]=No, 1=Mass Unit, 2=Mass Mixing
174 ================ =================================================================
177 lines = file(path,
'r').readlines()
178 command_vals = [i.strip() for i
in lines[headerrows:]]
196 'OUTPUTFOREACHRELEASE',
205 date_keys = [
'SIM_START',
'SIM_END']
209 for i, key
in enumerate(COMMAND_KEYS):
210 val = command_vals[i].split()
213 elif key
in float_keys:
226 Reads a FLEXPART releases file.
229 Assumes releases file has a header of 11 lines. Use
230 option "headerrows" to override this.
234 > A = read_releases(filepath)
236 where filepath is either a file object or a path.
240 a record array with fields:
242 ============ ==========================
244 ============ ==========================
245 start_time datetime start
246 end_time datetime end
247 lllon lower left longitude
248 llat lower left latitude
249 urlon upper right longitude
250 urlat upper right latitude
251 altunit 1=magl, 2=masl, 3=hPa
255 mass mass for each spec, so the
256 array will actually have
257 fields: mass0, mass1,..
258 id ID for each release
259 ============ ==========================
263 lines = [i.strip()
for i
in lines]
266 nspec = int(lines[headerrows])
267 blocklength = headerrows + nspec
269 for i
in range(nspec):
270 spec.append(int(lines[headerrows + 1 + i]))
271 indx = headerrows + 1 + (i + 2)
273 for i
in range(indx, len(lines), blocklength + 1):
274 blocks.append(lines[i:i + blocklength])
276 b[0] = datetime.datetime.strptime(b[0],
'%Y%m%d %H%M%S')
277 b[1] = datetime.datetime.strptime(b[1],
'%Y%m%d %H%M%S')
278 b[2:6] = [np.float(i)
for i
in b[2:6]]
283 for i
in range(nspec):
284 b[10 + i] = float(b[10 + i])
288 names = [
'start_time',
'end_time',
'lllon',
'lllat',
'urlon',
'urlat', \
289 'altunit',
'elv1',
'elv2',
'numpart']
292 for i
in range(nspec):
293 names.append(
'mass%s' % i)
300 return np.rec.fromrecords(blocks, names=names)
307 Reads the trajectories.txt file in a FLEXPART run output directory.
309 Based on output from plumetraj.f::
311 ********************************************************************************
313 * Determines a plume centroid trajectory for each release site, and manages *
314 * clustering of particle locations. Certain parameters (average PV, *
315 * tropopause height, etc., are provided along the plume trajectories. *
316 * At the end, output is written to file 'trajectories.txt'. *
323 * fclust fraction of particles belonging to each cluster *
324 * hmixcenter mean mixing height for all particles *
325 * ncluster number of clusters to be used *
326 * pvcenter mean PV for all particles *
327 * pvfract fraction of particles with PV<2pvu *
328 * rms total horizontal rms distance after clustering *
329 * rmsdist total horizontal rms distance before clustering *
330 * rmsclust horizontal rms distance for each individual cluster *
331 * topocenter mean topography underlying all particles *
332 * tropocenter mean tropopause height at the positions of particles *
333 * tropofract fraction of particles within the troposphere *
334 * zrms total vertical rms distance after clustering *
335 * zrmsdist total vertical rms distance before clustering *
336 * xclust,yclust, Cluster centroid positions *
339 ********************************************************************************
343 > T = read_trajectories(H_OR_path_to_directory, **kwargs)
346 The first argument is either a path to a trajectory file, or simply a :class:`Header`
349 Returns a dictionary of the trajectories for each release.
351 .. tabularcolumns:: |l|L|
353 ============= ========================================
355 ============= ========================================
356 Trajectories array_of_floats(j,it1,xi,yi,zi,topoi,hmixi,tropoi,pvi,
357 rmsdisti,rmsi,zrmsdisti,zrmsi,hfri,pvfri,trfri,
358 (xclusti(k),yclusti(k),zclusti(k),fclusti(k),rmsclusti(k),k=1,5))
359 RELEASE_ID 1array(i1,i2,xp1,yp1,xp2,yp2,zp1,zp2,k,npart)
360 numpspec number of species
361 numageclass number of ageclasses
362 ============= ========================================
367 .. tabularcolumns:: |l|L|
369 ============= ========================================
370 keyword Description [default]
371 ============= ========================================
372 trajfile over the name of the input
373 file ['trajectories.txt']
374 ncluster number of clusters [5]
375 ageclasses number of ageclasses [20]
376 ============= ========================================
380 if isinstance(H, str):
382 alltraj = file(H,
'r').readlines()
384 raise IOError(
'Could not open file: %s' % H)
387 alltraj = file(os.path.join(path, trajfile),
'r').readlines()
391 ibdate, ibtime, model, version = alltraj[0].strip().split()[:4]
393 ibdate, ibtime = alltraj[0].strip().split()[:2]
396 dt = datetime.datetime.strptime(ibdate + ibtime.zfill(6),
'%Y%m%d%H%M%S')
397 numpoint = int(alltraj[2].strip())
403 for i
in range(3, 3 + (numpoint * 2), 2):
404 i1, i2, xp1, yp1, xp2, yp2, zp1, zp2, k, npart , = \
405 tuple([float(j)
for j
in alltraj[i].strip().split()])
406 itimerel1 = dt + datetime.timedelta(seconds=i1)
407 itimerel2 = dt + datetime.timedelta(seconds=i2)
411 RelTraj[alltraj[i + 1].strip()] = np.array((itimerel1, itimerel2, Xp, Yp, Zp, k, npart))
413 for i
in range(3 + (numpoint * 2), len(alltraj)):
415 FMT = [0, 5, 8, 9, 9, 8, 8, 8, 8, 8, 8, 8, 8, 8, 6, 6, 6] + ncluster * [8, 8, 7, 6, 8]
417 data = [raw[sum(FMT[:ii]):sum(FMT[:ii + 1])]
for ii
in range(1, len(FMT) - 1)] + \
418 [raw[sum(FMT[:-1]):]]
421 data = [float(r.replace(
'********',
'NaN'))
for r
in data]
423 Trajectories.append(data)
425 data = np.array(Trajectories)
426 RelTraj[
'version'] = model +
' ' + version
428 RelTraj[
'Trajectories'] = data
429 RelTraj[
'labels'] = \
430 [
'release number',
'seconds prior to release',
'lon',
'lat',
'height',
'mean topography', \
431 'mean mixing height',
'mean tropopause height',
'mean PV index', \
432 'rms distance',
'rms',
'zrms distance',
'zrms', \
433 'fraction height??',
'fraction PV<2pvu',
'fraction in troposphere'] + \
434 ncluster * [
'xcluster',
'ycluster',
'zcluster',
'fcluster',
'rmscluster']
437 Returns a dictionary:
438 R['Trajectories'] = array_of_floats(
439 releasenum,it1,xi,yi,zi,topoi,hmixi,tropoi,pvi,
440 rmsdisti,rmsi,zrmsdisti,zrmsi,hfri,pvfri,trfri,
441 (xclusti(k),yclusti(k),zclusti(k),fclusti(k),rmsclusti(k),k=1,5))
443 R['RELEASE_ID'] = (dt_i1,dt_i2,xp1,yp1,xp2,yp2,zp1,zp2,k,npart)
444 R['info'] = this message
446 To plot a trajectory timeseries:
447 RT = read_trajectories(H)
448 T = RT['Trajectories']
450 t = T[np.where(T[:,0]==rel),:][0]
451 plt.plot(t[:,1],t[:,14])
452 plt.savefig('trajectories.png')
459 extracts curtain data along a given flight track
461 input: H (a pf.Header with a FD data object from H.read_grid)
462 if the read_grid method has not been called, we'll call
466 (a numpy array with datetime, lon, lat, elv values)
468 nspec: is optional for reference to the FD dict.
470 index: is optional in case numpointspec > 1
472 get_track: if True extracts the points along the flight
473 track rather than the curtain.
475 output: curtain (a 2-d array with shape (len(flighttrack), len(H.outheight)
478 add interpolation, extract a track, not curtain (e.g. z points)
483 if H.direction ==
'forward':
488 flighttimes = flighttrack[:, 0]
489 time_ret = H.closest_dates(flighttimes, take_set=
True)
490 H.read_grid(nspec_ret=nspec, time_ret=time_ret,
491 npspec_int=npspec_int)
493 elif H.direction ==
'backward':
500 curtain = np.zeros(len(flighttrack))
502 curtain = np.zeros((len(flighttrack), len(H.outheight)))
504 for i, (t, lon, lat, elv)
in enumerate(flighttrack):
509 idx = H.closest_date(t)
510 datestring = H.available_dates[idx]
511 if H.direction ==
'forward':
512 grid = H.FD[(nspec, datestring)][
'grid']
513 grid = grid[:, :, :, index]
515 elif H.direction ==
'backward':
516 grid = H.C[(0, idx)][
'grid']
523 curtain[i] = grid[I, J, K]
525 curtain[i] = grid[I, J, :]
531 extracts curtain data from a grid given pairs of lon, lat
533 input: H (a pf.Header with a FD data object from H.read_grid)
534 if the read_grid method has not been called, we'll call
538 (a numpy array with datetime, lon, lat, elv values)
540 nspec: is optional for reference to the FD dict.
542 index: is optional in case numpointspec > 1
544 get_track: if True extracts the points along the flight
545 track rather than the curtain.
547 output: curtain (a 2-d array with shape (len(flighttrack), len(H.outheight)
550 add interpolation, extract a track, not curtain (e.g. z points)
553 if len(grid.shape) == 4:
554 grid = grid[:, :, :, index]
558 curtain = np.zeros((len(coords), grid.shape[2]))
559 for i, (x, y)
in enumerate(coords):
563 curtain[i] = grid[I, J, :]
564 curtain = np.nan_to_num(curtain)
571 extracts ground level from H.heightnn along a track of lon, lat
573 input: H or H.heightnn (takes the lowest level)
575 X, Y ,= H.longitude, H.latitude
579 output: groundlevel (a 1-d array with shape (len(flighttrack)
584 hgt = H.Heightnn[:, :, 0]
589 hgt = hgt - hgt.min()
591 grndlvl = np.zeros(len(coords))
593 for i, (x, y)
in enumerate(coords):
597 grndlvl[i] = hgt[I, J]
598 grndlvl = np.nan_to_num(grndlvl)
600 return grndlvl - grndlvl.min()
604 """ converts the agl curtain to asl
606 adds .asl_axis attribute to H
611 H.asl_axis = np.linspace(0, H.outheight[-1])
613 xp = H.outheight - H.outheight[0]
615 casl = np.zeros((len(H.asl_axis), len(coords)))
617 for i
in xrange(len(coords)):
618 casl[:, i] = np.interp(H.asl_axis, xp + gl[i], \
619 curtain_agl[:, i], left=below_gl)
626 Reads the spectrum.txt files generated from the "make_webpages" scripts.
629 This functionality depends on having run the make_webpages scripts
630 in-house. It is not supported in the public API.
634 > A = read_agespectrum(filename)
636 Returns a dictionary containing ageclass information
638 .. tabularcolumns:: |l|L|
640 ============= ========================================
642 ============= ========================================
644 (dt, lat, lon, alt, tracer[:numageclass]
646 ageclasses 1d-array(ageclass ages)
647 numpspec number of species
648 numageclass number of ageclassesA
649 part if it is from make webpages without the
651 ndays The number of ageclasses
652 ============= ========================================
656 f = file(filename,
'r').readlines()
658 line1 = f[0].strip().split()
661 ageclasses = np.arange(1, ndays * 1) * 60 * 60 * 24
664 numageclass = int(line1[0])
665 ageclasses = np.array([int(i)
for i
in line1[1:]])
666 numspec = int(f[1].strip().split()[0])
670 line = line.strip().split()
672 dt = datetime.datetime.strptime(line[0] + line[1].zfill(6),
'%Y%m%d%H%M%S')
673 data = [float(i)
for i
in line[2:]]
675 data = [data[1]] + [data[0]] + data[2:]
676 D.append([dt] + data)
679 A[
'numageclass'] = numageclass
680 A[
'ageclasses'] = ageclasses
681 A[
'numspec'] = numspec
682 A[
'filename'] = filename
685 A dictionary containing ageclass information:
687 agespectrum = 2d-array(dt, lon, lat, alt, tracer[:numageclass] for each release)
688 ageclasses = 1d-array(ageclass ages)
689 numpspec = number of species
690 numageclass = number of ageclasses
697 header=
'## AGECLASS File'):
698 """ Save an ageclass or continents spectrum to an outfile. """
700 if spectype ==
'agespec':
704 spectrum = agespectra
705 nClasses = spectrum.shape[1]
711 spectrum = agespectra
714 nClasses = agespectra.shape[1] - 1
715 spectrum = agespectra[:, 1:]
716 elif spectype ==
'contspec':
721 spectrum = agespectra
722 nClasses = spectrum.shape[1]
723 header =
'## Continental Spectrum File'
729 spectrum = agespectra
732 nClasses = agespectra.shape[1] - 1
733 spectrum = agespectra[:, 1:]
736 T = np.reshape(T, (len(T), 1))
737 xout = np.hstack((T, spectrum))
738 fmt =
'%s ' + nClasses *
'%10.5f '
739 outf.write(
"%s \n" % (header))
740 np.savetxt(outf, xout, fmt=fmt)
747 """returns an array of area corresponding to each nx,ny,nz
755 OUT = array area corresponding to nx,ny,nz
758 H = :class:`Header` object from readheader function.
765 cosfunc =
lambda y : cos(y * pih) * r_earth
769 outlat0 = H[
'outlat0']
772 area = np.zeros((nx, ny))
775 ylata = outlat0 + (float(iy) + 0.5) * dyout
776 ylatp = ylata + 0.5 * dyout
777 ylatm = ylata - 0.5 * dyout
778 if (ylatm < 0
and ylatp > 0): hzone = dyout * r_earth * pih
781 cosfactp = cosfunc(ylatp)
782 cosfactm = cosfunc(ylatm)
783 if cosfactp < cosfactm:
784 hzone = sqrt(r_earth ** 2 - cosfactp ** 2) - sqrt(r_earth ** 2 - cosfactm ** 2)
786 hzone = sqrt(r_earth ** 2 - cosfactm ** 2) - sqrt(r_earth ** 2 - cosfactp ** 2)
788 gridarea = 2.*pi * r_earth * hzone * dxout / 360.
790 area[ix, iy] = gridarea
796 Open and read the binary file (bf) header only to the point of
797 the Flexpart version string, return the string, seek to the
808 version = bf.read(
'13S')
815 The readheader function returns a special class (Structure) which behaves
816 like a dictionary. It contains all the metadata from the simulation which
817 is contained in the "header" or "header_nest" binary files from the model
821 It is recommended to use the :class:`Header` class: H = pf.Header(path)
823 This version is using the BinaryFile class rather than FortFlex.
827 > H = read_header(pathname) #Don't include header filename
831 H = dictionary like object with all the run metadata.
834 .. tabularcolumns:: |l|L|
836 ============= ========================================
837 keyword Description [default]
838 ============= ========================================
839 pathname FLEXPART run output directory
840 readp read release points 0=no, [1]=y
841 nested nested output [False] or True
842 headerfile provide custom name for header file
843 datefile provide a custom name for the date file
844 verbose print information while loading header
845 ============= ========================================
848 **This function is in development**
850 Please report any bugs found.
854 probably a lot of things, among which ...
856 [] choose skip/getbin or direct seek/read
857 [] define output keys in docstring
861 The user is no longer required to indicate which version of FLEXPART
862 the header is from. Checks are made, and the header is read accordingly.
863 Please report any problems...
872 OPS.headerfile =
None
876 for k
in kwargs.keys():
877 if k
not in OPS.keys():
878 print(
"WARNING: {0} not a valid input option.".format(k))
881 if 'nest' in kwargs.keys():
882 raise IOError(
"nest is no longer a valid keyword, see docs. \n Now use nested=True or nested=False")
885 if 'nested' in kwargs.keys():
886 if kwargs[
'nested']
is 1:
887 print(
"WARNING, use of nested=1, deprecated converting to nested=True")
888 kwargs[
'nested'] =
True
890 if 'nested' in kwargs.keys():
891 if kwargs[
'nested']
is 0:
892 print(
"WARNING, use of nested=0, deprecated converting to nested=False")
893 kwargs[
'nested'] =
False
898 print "Reading Header with:\n"
901 print "%s ==> %s" % (o, OPS[o])
904 skip =
lambda n = 8 : bf.seek(n, 1)
905 getbin =
lambda dtype, n = 1 : bf.read(dtype, (n,))
912 filename = os.path.join(pathname, OPS.headerfile);
914 elif OPS.nested
is True:
915 filename = os.path.join(pathname,
'header_nest')
918 filename = os.path.join(pathname,
'header')
922 if not os.path.exists(filename):
923 raise IOError(
"No such file: {0}".format(filename))
928 raise IOError(
"Error opening: {0} with BinaryFile class".format(filename))
932 datefile = os.path.join(pathname, OPS.datefile)
934 datefile = os.path.join(pathname,
'dates')
936 if not os.path.exists(datefile):
937 raise IOError(
"No DATEFILE: {0}".format(datefile))
940 fd = file(datefile,
'r').readlines()
942 raise IOError(
"Could not read datefile: {0}".format(datefile))
945 fd = sorted(list(set(fd)))
946 h[
'available_dates'] = [d.strip(
'\n')
for d
in fd]
959 h[
'ireleasestart'] = []
960 h[
'ireleaseend'] = []
964 I = {0:
'_0', 1:
'ibdate', 2:
'ibtime', 3:
'flexpart', \
965 4:
'_1', 5:
'loutstep', 6:
'loutaver', 7:
'loutsample', \
966 8:
'_2', 9:
'outlon0', 10:
'outlat0', 11:
'numxgrid', \
967 12:
'numygrid', 13:
'dxout', 14:
'dyout', 15:
'_3', 16:
'numzgrid', \
970 Dfmt = [
'i',
'i',
'i',
'13S',
'2i',
'i',
'i',
'i',
'2i',
'f',
'f',
'i',
'i',
'f',
'f',
'2i',
'i']
972 a = [bf.read(fmt)
for fmt
in Dfmt]
973 for j
in range(len(a)):
976 h[
'outheight'] = np.array([bf.read(
'f')
for i
in range(h[
'numzgrid'])])
977 junk.append(bf.read(
'2i'))
978 h[
'jjjjmmdd'] = bf.read(
'i')
979 h[
'hhmmss'] = bf.read(
'i')
980 junk.append(bf.read(
'2i'))
981 h[
'nspec'] = bf.read(
'i') / 3
982 h[
'numpointspec'] = bf.read(
'i')
983 junk.append(bf.read(
'2i'))
987 for i
in range(h[
'nspec']):
990 h[
'wetdep'].append(
''.join([bf.read(
'c')
for i
in range(10)]).strip())
991 junk.append(bf.read(
'2i'))
992 junk.append(bf.read(
'i'))
993 h[
'drydep'].append(
''.join([bf.read(
'c')
for i
in range(10)]).strip())
994 junk.append(bf.read(
'2i'))
995 h[
'nz_list'].append(getbin(
'i'))
996 h[
'species'].append(
''.join([getbin(
'c')
for i
in range(10)]).strip())
1000 junk.append(bf.read(
'i'))
1001 h[
'wetdep'].append(
''.join([bf.read(
'c')
for i
in range(10)]).strip())
1002 junk.append(bf.read(
'2i'))
1003 junk.append(bf.read(
'i'))
1004 h[
'drydep'].append(
''.join([bf.read(
'c')
for i
in range(10)]).strip())
1005 junk.append(bf.read(
'2i'))
1006 h[
'nz_list'].append(bf.read(
'i'))
1007 h[
'species'].append(
''.join([bf.read(
'c')
for i
in range(10)]).strip())
1008 junk.append(bf.read(
'2i'))
1013 h[
'numpoint'] = bf.read(
'i')
1019 before_readp = bf.tell()
1022 I = {2:
'kindz', 3:
'xp1', 4:
'yp1', 5:
'xp2', \
1023 6:
'yp2', 7:
'zpoint1', 8:
'zpoint2', 9:
'npart', 10:
'mpart'}
1025 for k, v
in I.iteritems():
1026 h[v] = np.zeros(h[
'numpoint'])
1028 h[
'xmass'] = np.zeros((h[
'numpoint'], h[
'nspec']))
1035 junk.append(bf.read(
'i'))
1036 for i
in range(h[
'numpoint']):
1037 junk.append(bf.read(
'i'))
1039 h[
'ireleasestart'].append(bf.read(
'i'))
1040 h[
'ireleaseend'].append(bf.read(
'i'))
1041 h[
'kindz'][i] = bf.read(
'h')
1042 junk.append(bf.read(
'2i'))
1044 h[
'xp1'][i] = bf.read(
'f')
1045 h[
'yp1'][i] = bf.read(
'f')
1046 h[
'xp2'][i] = bf.read(
'f')
1047 h[
'yp2'][i] = bf.read(
'f')
1048 h[
'zpoint1'][i] = bf.read(
'f')
1049 h[
'zpoint2'][i] = bf.read(
'f')
1051 junk.append(bf.read(
'2i'))
1052 h[
'npart'][i] = bf.read(
'i')
1053 h[
'mpart'][i] = bf.read(
'i')
1055 junk.append(bf.read(
'i'))
1061 while re.search(
"\w", bf.read(
'c')):
1063 sp = sp + bf.read(
'c')
1067 h[
'compoint'].append(sp)
1070 junk.append(bf.read(
'i'))
1072 for v
in range(h[
'nspec']):
1073 Dfmt = [
'i',
'f',
'2i',
'f',
'2i',
'f',
'i']
1074 a = [bf.read(fmt)
for fmt
in Dfmt]
1075 h[
'xmass'][i, v] = a[1]
1077 if OPS.readp
is False:
1079 We get the first set of release points here in order
1080 to get some information, but skip reading the rest
1082 bf.seek(before_readp)
1084 bf.seek(157 * h[
'numpoint'], 1);
1086 bf.seek(119 * h[
'numpoint'] + (h[
'nspec'] * 36) * h[
'numpoint'] + 4, 1)
1092 h[
'method'] = bf.read(
'i')
1095 junk.append(bf.read(
'i'))
1096 junk.append(bf.read(
'i'))
1098 h[
'lsubgrid'] = bf.read(
'i')
1099 h[
'lconvection'] = bf.read(
'i')
1100 h[
'ind_source'] = bf.read(
'i')
1101 h[
'ind_receptor'] = bf.read(
'i')
1106 junk.append(bf.read(
'2i'))
1108 h[
'nageclass'] = bf.read(
'i')
1110 Lage_fmt = [
'i'] * h.nageclass
1111 h[
'lage'] = [bf.read(fmt)
for fmt
in Lage_fmt]
1117 h[
'oro'] = np.zeros((nx, ny), np.float)
1118 junk.append(bf.read(
'2i'))
1120 for ix
in range(nx):
1121 h[
'oro'][ix] = bf.read(
'f', ny)
1135 h[
'pathname'] = pathname
1136 h[
'decayconstant'] = 0
1147 Heightnn = np.zeros((nx, ny, nz), np.float)
1148 for ix
in range(nx):
1150 Heightnn[ix, :, 0] = oro[ix, :]
1152 Heightnn[ix, :, 0] = np.zeros(ny)
1154 for iz
in range(nz):
1156 Heightnn[ix, :, iz] = [h[
'outheight'][iz] + oro[ix, y]
for y
in range(ny)]
1158 Heightnn[ix, :, iz] = h[
'outheight'][iz]
1161 h[
'Heightnn'] = Heightnn
1167 start_day = datetime.datetime.strptime(str(h[
'ibdate']),
'%Y%m%d')
1168 H, M, S , = [int(str(h[
'ibtime']).zfill(6)[i:i + 2])
for i
in range(0, 6, 2)]
1169 start_time = datetime.timedelta(hours=H, minutes=M, seconds=S)
1170 h[
'simulationstart'] = start_day + start_time
1173 releasestart, releaseend = [], []
1174 for i
in range(h.numpointspec):
1175 releasestart.append(h.simulationstart + \
1176 datetime.timedelta(seconds=int(h.ireleasestart[i])))
1177 releaseend.append(h.simulationstart + \
1178 datetime.timedelta(seconds=int(h.ireleaseend[i])))
1179 h.releasestart = releasestart
1180 h.releaseend = releaseend[:h.numpointspec]
1181 h.releasetimes = [b - ((b - a) / 2)
for a, b
in zip(h.releasestart, h.releaseend)]
1184 available_dates_dt = []
1185 for i
in h.available_dates:
1186 available_dates_dt.append(datetime.datetime(
1187 int(i[:4]), int(i[4:6]), int(i[6:8]), int(i[8:10]), int(i[10:12]), int(i[12:])))
1188 h.available_dates_dt = available_dates_dt
1189 h.first_date = available_dates_dt[0]
1190 h.last_date = available_dates_dt[-1]
1191 h.ageclasses = np.array([act - h.simulationstart
for act
in h.available_dates_dt])
1192 h.numageclasses = len(h.ageclasses)
1195 h.nxmax = h.numxgrid
1196 h.nymax = h.numygrid
1197 h.nzmax = h.numzgrid
1199 h.maxpoint = h.numpoint
1200 h.maxageclass = h.numageclasses
1209 if 'kindz' not in h.keys():
1211 h.alt_unit =
'unkn.'
1215 h.alt_unit =
'm.a.s.l.'
1217 h.alt_unit =
'm.a.g.l.'
1221 h.direction =
'forward'
1225 h.direction =
'backward'
1227 h.plot_unit =
'ns / kg'
1230 if h.direction ==
'forward':
1231 if h.ind_source == 1:
1232 if h.ind_receptor == 1:
1233 h.output_unit =
'ng m-3'
1234 if h.ind_receptor == 2:
1235 h.output_unit =
'pptm'
1236 if h.ind_source == 2:
1237 if h.ind_receptor == 1:
1238 h.output_unit =
'ng m-3'
1239 if h.ind_receptor == 2:
1240 h.output_unit =
'pptm'
1241 if h.direction ==
'backward':
1242 if h.ind_source == 1:
1243 if h.ind_receptor == 1:
1245 if h.ind_receptor == 2:
1246 h.output_unit =
's m^3 kg-1'
1247 if h.ind_source == 2:
1248 if h.ind_receptor == 1:
1249 h.output_unit =
's kg m-3'
1250 if h.ind_receptor == 2:
1256 layerthickness = [h.outheight[0]]
1257 for i, lh
in enumerate(h.outheight[1:]):
1258 layerthickness.append(lh - h.outheight[i])
1259 h.layerthickness = layerthickness
1262 h.fp_version = h.flexpart
1266 print(
'Read {0} Header: {1}'.format(h.fp_version, filename))
1271 readheader = read_header
1272 readheaderV8 = read_header
1273 readheaderV6 = read_header
1277 Version 6, 7X read routines...
1281 skip =
lambda n = 8 : bf.seek(n, 1)
1282 getbin =
lambda dtype, n = 1 : bf.read(dtype, (n,))
1287 for i
in range(h[
'numpoint']):
1293 h[
'ireleasestart'].append(i1)
1294 h[
'ireleaseend'].append(i2)
1295 h[
'kindz'][i] = getbin(
'i')
1298 h[
'xp1'][i] = getbin(
'f')
1299 h[
'yp1'][i] = getbin(
'f')
1300 h[
'xp2'][i] = getbin(
'f')
1301 h[
'yp2'][i] = getbin(
'f')
1302 h[
'zpoint1'][i] = getbin(
'f')
1303 h[
'zpoint2'][i] = getbin(
'f')
1306 h[
'npart'][i] = getbin(
'i')
1307 h[
'mpart'][i] = getbin(
'i')
1315 sp = sp.join(getbin(
'c', l))
1318 h[
'compoint'].append(sp)
1324 for v
in range(h[
'nspec']):
1325 Dfmt = [
'i',
'f',
'2i',
'f',
'2i',
'f',
'i']
1326 a = [bf.read(fmt)
for fmt
in Dfmt]
1327 h[
'xmass'][i, v] = a[1]
1337 """ accepts a header dictionary as input, returns dictionary of Grid values
1338 %===========================================
1340 %-------------------------------------------
1342 % - H: required header dictionary
1344 % optional (most is obtained from header dict)
1345 % - date: which yyyymmddhhmmss from dates
1346 % - unit: 'conc', 'pptv', ['time'], 'footprint'
1347 % - nspec_ret: numspecies
1351 % - nested: nested = True
1355 % - grid dictionary object
1356 % - fail indicator (-1=fail, 0=success)
1357 %-------------------------------------------
1358 % FLEXPART python import routines
1359 %-------------------------------------------
1360 % last changes: JFB, 10.10.2008
1361 %===========================================
1373 if 'date' in kwargs.keys():
1374 date = kwargs[
'date']
1378 if 'unit' in kwargs.keys():
1379 unitname = kwargs[
'unit']
1380 else: unitname =
'time'
1382 units = [
'conc',
'pptv',
'time',
'footprint']
1383 unit = units.index(unitname)
1385 if 'nspec_ret' in kwargs.keys():
1386 nspec_ret = kwargs[
'nspec_ret']
1389 if 'pspec_ret' in kwargs.keys():
1390 pspec_ret = kwargs[
'ppsec_ret']
1393 if 'age_ret' in kwargs.keys():
1394 age_ret = kwargs[
'age_ret']
1397 if 'nested' in kwargs.keys():
1398 nested = kwargs[
'nested']
1399 else: nested =
False
1401 if 'time_ret' in kwargs.keys():
1402 time_ret = kwargs[
'time_ret']
1404 time_ret = range(len(H[
'available_dates']))
1406 if 'scaledepo' in kwargs.keys():
1407 scaledepo = kwargs[
'scaledepo']
1411 if 'scaleconc' in kwargs.keys():
1412 scaleconc = kwargs[
'scaleconc']
1416 if 'decaycons' in kwargs.keys():
1417 decaycons = kwargs[
'decaycons']
1419 decaycons = 99999999990
1423 prefix = [
'grid_conc_',
'grid_pptv_', \
1424 'grid_time_',
'footprint_total', \
1425 'grid_conc_nest_',
'grid_pptv_nest_', \
1426 'grid_time_nest_',
'footprint_total_nest']
1433 skip =
lambda n = 8 : f2.seek(n, 1)
1434 getbin =
lambda dtype, n = 1 : f2.read(dtype, (n,))
1437 def getdump(n, fmt='f'):
1438 """ function to get the dump values for the sparse format """
1448 def key2var(D, key):
1449 cmd =
"global %s; %s = D['%s'];" % (key, key, key)
1452 def dumpgrid(dmp_i, cnt_r, dmp_r, grd, k, nage):
1453 """ function to dump sparse elements into grid """
1456 for ir
in range(cnt_r):
1457 if dmp_r[ir] * fact > 0:
1458 n = dmp_i[ii]; ii = ii + 1; fact = fact * -1.
1462 kz = n / (numxgrid * numygrid)
1463 jy = (n - kz * numxgrid * numygrid) / numxgrid
1464 ix = n - numxgrid * numygrid * kz - numxgrid * jy
1469 grd[ix, jy, kz - 1, k, nage] = abs(dmp_r[ir])
1474 H[
'nageclass'] = H[
'numageclasses']
1475 headervars = [
'nested',
'nspec',
'numxgrid',
'numygrid',
'numzgrid',
'nageclass', \
1476 'available_dates',
'pathname',
'decayconstant',
'numpoint',
'Area',
'Heightnn']
1478 for k
in headervars:
1482 datagrid = np.zeros((numxgrid, numygrid, numzgrid, numpoint), dtype=np.float32)
1483 zplot = np.empty((numxgrid, numygrid, numzgrid, numpoint))
1488 for ks
in range(nspec_ret, nspec_ret + 1):
1491 specs =
'_' + str(ks).zfill(3)
1493 if date ==
None and time_ret ==
None:
1494 get_dates = [available_dates[0]]
1495 elif time_ret ==
None:
1497 date = date.strip().split(
',')
1500 get_dates.append(available_dates.index(d))
1502 shout(
"Cannot find date: %s in H['dates']\n" % d)
1504 get_dates = available_dates
1508 for date_i
in range(len(get_dates)):
1511 datestring = H[
'available_dates'][date_i]
1513 filename = os.path.join(H[
'pathname'], \
1514 prefix[(unit) + (nested * 4)] + datestring + specs)
1516 filename = os.path.join(H[
'pathname'], \
1517 prefix[(unit) + (nested * 4)])
1521 if os.path.exists(filename):
1526 if useFortFlex == 1:
1544 concgrid = readgrid(filename, numxgrid, numygrid, numzgrid, \
1545 numpoint, nageclass, \
1546 scaleconc, decayconstant)
1549 print np.min(concgrid)
1550 print np.max(concgrid)
1553 zplot = sumgrid(zplot, concgrid, \
1562 datagrid = np.zeros((numxgrid, numygrid, numzgrid, 1, 1), np.float)
1567 G[
'itime'] = getbin(
'i');
1568 print H[
'available_dates'][date_i]
1573 dmp_i = getdump(cnt_i,
'i')
1576 dmp_r = getdump(cnt_r)
1581 dmp_i = getdump(cnt_i,
'i')
1584 dmp_r = getdump(cnt_r)
1590 dmp_i = getdump(cnt_i,
'i')
1593 dmp_r = getdump(cnt_r)
1595 concgrid = dumpgrid(dmp_i, cnt_r, dmp_r, datagrid, ks - 1, nage)
1598 G[H[
'ibtime']] = concgrid
1602 print "\n\n INPUT ERROR: Could not find file: %s" % filename
1603 raise IOError(
'No file: %s' % filename)
1605 if useFortFlex == 1: G = zplot
1608 readgridBF = _readgrid_noFF
1611 """ Read grid using BinaryFile class"""
1613 skip =
lambda n = 8 : f2.seek(n, 1)
1614 getbin =
lambda dtype, n = 1 : f2.read(dtype, (n,))
1617 def getdump(n, fmt='f'):
1618 """ function to get the dump values for the sparse format """
1631 def key2var(D, key):
1632 cmd =
"global %s; %s = D['%s'];" % (key, key, key)
1635 def _dumpgrid(dmp_i, cnt_r, dmp_r, grd, k, nage, nx, ny):
1636 """ function to dump sparse elements into grid, fall back method if
1637 pflexcy.so module (cython) not available -- it is SLOW """
1639 if len(grd.shape) == 5:
1645 print 'CNT_R: ' + str(cnt_r)
1646 print 'DMP_R: ' + str(dmp_r)
1647 print 'TYPE DMP_R: ' + str(type(dmp_r))
1649 for ir
in range(cnt_r):
1652 if dmp_r[ir] * fact > 0:
1659 kz = n / (H.numxgrid * H.numygrid)
1660 jy = (n - kz * H.numxgrid * H.numygrid) / H.numxgrid
1661 ix = n - H.numxgrid * H.numygrid * kz - H.numxgrid * jy
1662 grd[ix, jy, kz - 1, k, nage] = abs(dmp_r[ir])
1671 if dmp_r[ir] * fact > 0:
1678 ix = n - H.numxgrid * jy
1679 grd[ix, jy, k, nage] = abs(dmp_r[ir])
1686 from sdspflexcy
import dumpdatagrid, dumpdepogrid
1687 print 'using pflexcy'
1694 dumpdatagrid = _dumpgrid
1695 dumpdepogrid = _dumpgrid
1700 wetgrid = np.zeros((H.numxgrid, H.numygrid, H.numpointspec, 1), np.float)
1701 drygrid = np.zeros((H.numxgrid, H.numygrid, H.numpointspec, 1), np.float)
1702 datagrid = np.zeros((H.numxgrid, H.numygrid, H.numzgrid, H.numpointspec, nage), np.float)
1710 for na
in range(nage):
1712 for ks
in range(H.numpointspec):
1716 dmp_i = getdump(cnt_i,
'i')
1719 dmp_r = getdump(cnt_r)
1722 wetgrid = dumpdepogrid(dmp_i, cnt_r, dmp_r, wetgrid, ks, na, H.numxgrid, H.numygrid)
1727 dmp_i = getdump(cnt_i,
'i')
1730 dmp_r = getdump(cnt_r)
1733 drygrid = dumpdepogrid(dmp_i, cnt_r, dmp_r, drygrid, ks, na, H.numxgrid, H.numygrid)
1738 dmp_i = getdump(cnt_i,
'i')
1741 dmp_r = getdump(cnt_r)
1747 datagrid = dumpdatagrid(dmp_i, cnt_r, dmp_r, datagrid, ks, na, H.numxgrid, H.numygrid)
1752 return datagrid, wetgrid, drygrid, itime
1757 maxpoint=800000, maxspec=4, maxageclass=20,
1758 nxmax=722, nymax=362, nzmax=40, verbose=
True):
1762 Called from read_header if readp_ff is True, uses FortFlex.readheader
1763 to read all releasepoints.
1765 This function is dependant on the FortFlex.so module
1766 see FortFlex.f and the f2py directory
1769 from FortFlex
import readheader
1771 print "Error with FortFlex.readheader, use read_header"
1773 headervars = [
'numxgrid',
'numygrid',
'numzgrid',
'outlon0',
'outlat0',
'compoint', \
1774 'dxout',
'dyout',
'outheight',
'ibdate',
'ibtime',
'loutstep', \
1775 'nspec',
'nageclass',
'lage',
'ireleasestart',
'ireleaseend', \
1776 'numpoint',
'xpoint',
'ypoint',
'zpoint1',
'zpoint2',
'heightnn',
'area', \
1777 'maxpoint',
'maxspec',
'maxageclass',
'nxmax',
'nymax',
'nzmax', \
1778 'npart',
'kind',
'lage',
'loutaver',
'loutsample',
'yyyymmdd',
'hhmmss',
'method']
1783 print """Reading Header with:
1790 """ % (maxpoint, maxspec, maxageclass, nxmax, nymax, nzmax)
1793 numxgrid, numygrid, numzgrid, outlon0, outlat0, dxout, dyout, outheight, \
1794 ibdate, ibtime, loutstep, nspec, nageclass, lage, ireleasestart, ireleaseend, \
1795 numpoint, xpoint, ypoint, zpoint1, zpoint2, heightnn, area, compoint, species_f, \
1796 npart, kind, loutaver, loutsample, yyyymmdd, hhmmss, method = \
1797 readheader(pathname, maxpoint, maxspec, maxageclass, nxmax, nymax, nzmax)
1799 for v
in headervars:
1800 if v
not in h.keys():
1801 exec(
"h.%s = %s" % (v, v))
1806 Accepts a header object as input, returns dictionary of Grid values
1807 keyed by species and datestring from H['available_dates']. A grid instance from this
1808 dictionary may be passed to the get_slabs function to return a dictionary of slabs
1809 suitable for plotting with the plotting routines. See below for more information.
1812 Requires FortFlex.so module compiled using f2py. See FortFlex.f for more details.
1816 > FLEXDATA = read_grid(H,**kwargs)
1820 A grid dictionary key by tuples with (species,available_dates), where species is
1821 and integer. See grid.keys()
1823 FLEXDATA[(s,datestring)]['grid']
1824 FLEXDATA[(s,datestring)]['itime']
1825 FLEXDATA[(s,datestring)]['shape']
1826 FLEXDATA[(s,datestring)]['max']
1827 FLEXDATA[(s,datestring)]['min']
1828 FLEXDATA[(s,datestring)]['timestamp']
1829 FLEXDATA[(s,datestring)]['species']
1830 FLEXDATA[(s,datestring)]['gridfile']
1831 FLEXDATA[(s,datestring)]['rel_i']
1832 FLEXDATA[(s,datestring)]['spec_i']
1836 .. tabularcolumns:: |l|L|
1838 ============= ========================================
1839 keyword Description [default]
1840 ============= ========================================
1841 date which yyyymmddhhmmss from available_dates
1843 time_ret index to time
1844 unit 'conc', 'pptv', ['time'], 'footprint'
1845 nspec_ret numspecies
1846 pspec_ret index to ???
1847 age_ret index to ageclass
1848 nested obtained from H['nested']
1849 BinaryFile Use BinaryFile vs. FortFlex [False]
1850 getwet True, [False]
1851 getdry True, [False]
1852 scaledepo A float value to scale deposition [1.0]
1853 scaleconc A float value to scale conc [1.0]
1854 decayconstant A float for decay const. [9e6]
1855 calcfoot Will cause footprint to be calculated
1858 version A string 'V8' or 'V6', ['V8']
1859 ============= ========================================
1863 most arguments are able to be extracted fro the header "H"
1867 if H.version ==
'V9':
1868 return readgridV9(H, **kwargs)
1869 if H.version ==
'V8':
1871 if H.version ==
'V6':
1874 raise IOError(
"No version attribute defined for Header.")
1881 Accepts a header object as input, and selects appropriate readgrid function
1882 to use for reading in data from the flexpart binary Fortran files.
1884 See the :func:`read_grid` for more information on keyword arguments
1886 This is the 'V8' version of the function.
1895 OPS.npspec_int =
False
1901 OPS.decayconstant = 9e6
1903 OPS.calcfoot =
False
1905 OPS.BinaryFile =
False
1919 if H[
'loutstep'] > 0:
1929 nspec_ret = OPS.nspec_ret
1930 if isinstance(nspec_ret, int):
1931 nspec_ret = [nspec_ret]
1932 assert iter(nspec_ret),
"nspec_ret must be iterable."
1936 if OPS.time_ret
is not None:
1938 time_ret = OPS.time_ret
1939 if isinstance(time_ret, int) ==
True:
1940 time_ret = [time_ret]
1945 time_ret = np.arange(len(H.available_dates))
1950 get_dates.append(H.available_dates[t])
1954 if OPS.date !=
None:
1956 if time_ret
is not None:
1957 Warning(
"overwriting time_ret variable, date was requested")
1959 if not isinstance(date, list):
1960 date = date.strip().split(
',')
1963 get_dates.append(H.available_dates[H.available_dates.index(d)])
1966 _shout(
"Cannot find date: %s in H['available_dates']\n" % d)
1968 if get_dates
is None:
1969 raise ValueError(
"Must provide either time_ret or date value.")
1972 fd.grid_dates = get_dates[:]
1978 prefix = [
'grid_conc_',
'grid_pptv_', \
1979 'grid_time_',
'footprint_',
'footprint_total', \
1980 'grid_conc_nest_',
'grid_pptv_nest_', \
1981 'grid_time_nest_',
'footprint_nest_',
'footprint_total_nest'
1984 units = [
'conc',
'pptv',
'time',
'footprint',
'footprint_total']
1985 unit_i = units.index(unit)
1990 if OPS.version ==
'V6':
1991 from FortFlex
import readgrid_v6
as readgrid
1992 from FortFlex
import sumgrid
1994 print 'using FortFlex VERSION 6'
1997 from FortFlex
import readgrid, sumgrid
2002 from nilu.pflexpart.FortFlex
import readgrid, sumgrid
2009 readgrid = _readgridBF
2010 OPS.BinaryFile =
True
2020 fd.options.update(OPS)
2026 for date_i
in range(len(get_dates)):
2027 datestring = get_dates[date_i]
2032 spec_fid =
'_' + str(s + 1).zfill(3)
2035 filename = os.path.join(H[
'pathname'], \
2036 prefix[(unit_i) + (H.nested * 5)] + datestring + spec_fid)
2037 H.zdims = H.numzgrid
2041 print "Total footprint"
2042 filename = os.path.join(H[
'pathname'], \
2043 prefix[(unit_i) + (H.nested * 5)] + spec_fid)
2046 if os.path.exists(filename):
2047 H.filename = filename
2050 print 'with values:'
2051 inputvars = [
'filename',
'numxgrid',
'numygrid',
2052 'zdims',
'numpoint',
'nageclass', \
2053 'scaledepo',
'scaleconc',
2054 'decayconstant',
'numpointspec']
2056 print v,
" ==> ", H[v]
2060 print(
"Reading {0} with BinaryFile".format(filename))
2061 gridT, wetgrid, drygrid, itime =
_readgridBF(H, filename)
2065 if OPS.npspec_int
is not False:
2066 npspec_int = OPS.npspec_int
2070 numpointspec = H.numpointspec
2072 gridT, wetgrid, drygrid, itime = readgrid(filename, \
2073 H.numxgrid, H.numygrid,
2074 H.zdims, numpointspec, H.nageclass, \
2075 OPS.scaledepo, OPS.scaleconc, H.decayconstant, npspec_int)
2078 wet = wetgrid.squeeze()
2080 dry = drygrid.squeeze()
2082 zplot = gridT[:, :, :, :, 0]
2084 zplot = gridT[:, :, :, :, 0]
2088 zplot = sumgrid(zplot, gridT, \
2093 if H.direction ==
'forward':
2109 FLEXDATA[(s, datestring)][
'grid'] = D
2111 FLEXDATA[(s, datestring)][
'itime'] = itime
2113 FLEXDATA[(s, datestring)][
'shape'] = zplot.shape
2115 FLEXDATA[(s, datestring)][
'max'] = zplot.max()
2117 FLEXDATA[(s, datestring)][
'min'] = zplot.min()
2118 FLEXDATA[(s, datestring)][
'timestamp'] = datetime.datetime.strptime(datestring,
'%Y%m%d%H%M%S')
2119 FLEXDATA[(s, datestring)][
'species'] = H[
'species'][s]
2120 FLEXDATA[(s, datestring)][
'gridfile'] = filename
2121 FLEXDATA[(s, datestring)][
'rel_i'] = rel_i
2122 FLEXDATA[(s, datestring)][
'spec_i'] = s
2124 FLEXDATA[(s, datestring)][
'wet'] = wet
2126 FLEXDATA[(s, datestring)][
'wet'] =
None
2129 FLEXDATA[(s, datestring)][
'dry'] = dry
2131 FLEXDATA[(s, datestring)][
'dry'] =
None
2134 _shout(
'***ERROR: file %s not found! \n' % filename)
2137 fd.set_with_dict(FLEXDATA)
2141 qind = (nspec_ret[0], fd.grid_dates[0])
2142 fd.grid = fd[qind][fd[qind].keys()[0]].grid
2152 Accepts a header object as input, and selects appropriate readgrid function
2153 to use for reading in data from the flexpart binary Fortran files.
2155 See the :func:`read_grid` for more information on keyword arguments
2157 This is the 'V6' version of the function.
2169 OPS.decayconstant = 9e6
2171 OPS.calcfoot =
False
2173 OPS.BinaryFile =
False
2183 fd.options.update(OPS)
2187 if H[
'loutstep'] > 0:
2196 nspec_ret = OPS.nspec_ret
2197 if isinstance(nspec_ret, int):
2198 nspec_ret = [nspec_ret]
2199 assert iter(nspec_ret),
"nspec_ret must be iterable."
2203 if OPS.time_ret
is not None:
2205 time_ret = OPS.time_ret
2206 if isinstance(time_ret, int) ==
True:
2207 time_ret = [time_ret]
2210 if forward ==
False:
2212 time_ret = np.arange(len(H.available_dates))
2214 raise ValueError(
"Must enter a positive time_ret for forward runs")
2217 get_dates.append(H.available_dates[t])
2221 if OPS.date !=
None:
2223 if time_ret
is not None:
2224 Warning(
"overwriting time_ret variable, date was requested")
2226 if not isinstance(date, list):
2227 date = date.strip().split(
',')
2230 get_dates.append(H.available_dates[H.available_dates.index(d)])
2233 _shout(
"Cannot find date: %s in H['available_dates']\n" % d)
2235 if get_dates
is None:
2236 raise ValueError(
"Must provide either time_ret or date value.")
2239 fd.grid_dates = get_dates[:]
2245 prefix = [
'grid_conc_',
'grid_pptv_', \
2246 'grid_time_',
'footprint_',
'footprint_total', \
2247 'grid_conc_nest_',
'grid_pptv_nest_', \
2248 'grid_time_nest_',
'footprint_nest_',
'footprint_total_nest'
2251 units = [
'conc',
'pptv',
'time',
'footprint',
'footprint_total']
2252 unit_i = units.index(unit)
2256 from FortFlex
import readgrid_v6
as readgrid
2257 from FortFlex
import sumgrid
2259 OPS.useFortFlex = useFortFlex
2260 print 'using FortFlex VERSION 6'
2264 raise Warning(
'Cannot import FortFlex trying to use BinaryFile')
2266 readgrid = _readgridBF
2267 OPS.BinaryFile =
True
2299 for date_i
in range(len(get_dates)):
2300 datestring = get_dates[date_i]
2302 FLEXDATA[datestring] = {}
2304 total_footprint =
False
2309 filename = os.path.join(H[
'pathname'], \
2310 prefix[(unit_i) + (H.nested * 5)] + datestring)
2311 H.zdims = H.numzgrid
2315 print "Total footprint"
2316 total_footprint =
True
2317 filename = os.path.join(H[
'pathname'], \
2318 prefix[(unit_i) + (H.nested * 5)])
2321 if os.path.exists(filename):
2322 H.filename = filename
2325 print 'with values:'
2326 inputvars = [
'filename',
'numxgrid',
'numygrid',
2327 'zdims',
'numpoint',
'nageclass', \
2328 'scaledepo',
'scaleconc',
2329 'decayconstant',
'numpointspec']
2331 print v,
" ==> ", H[v]
2336 gridT, wetgrid, drygrid, itime =
_readgridBF(H, filename)
2338 gridT, wetgrid, drygrid, itime = readgrid(filename, \
2339 H.numxgrid, H.numygrid,
2340 H.zdims, H.numpointspec, H.nageclass, \
2341 OPS.scaledepo, OPS.scaleconc, H.decayconstant)
2345 zplot = gridT[:, :, :, :, 0]
2347 zplot = gridT[:, :, :, :, 0]
2353 zplot = sumgrid(zplot, gridT, \
2358 if H.direction ==
'forward':
2373 FLEXDATA[(s, datestring)][
'grid'] = D
2374 FLEXDATA[(s, datestring)][
'itime'] = itime
2375 FLEXDATA[(s, datestring)][
'shape'] = zplot.shape
2376 FLEXDATA[(s, datestring)][
'max'] = zplot.max()
2377 FLEXDATA[(s, datestring)][
'min'] = zplot.min()
2378 FLEXDATA[(s, datestring)][
'timestamp'] = datetime.datetime.strptime(datestring,
'%Y%m%d%H%M%S')
2379 FLEXDATA[(s, datestring)][
'species'] = H[
'species'][s]
2380 FLEXDATA[(s, datestring)][
'gridfile'] = filename
2381 FLEXDATA[(s, datestring)][
'rel_i'] = rel_i
2382 FLEXDATA[(s, datestring)][
'spec_i'] = s
2386 _shout(
'***ERROR: file %s not found! \n' % filename)
2388 fd.set_with_dict(FLEXDATA)
2393 qind = (0, fd.grid_dates[0])
2394 fd.grid = fd[qind][fd[qind].keys()[0]].grid
2404 footprints = np.zeros((H.ny, H.nx, len(H.C.keys())))
2405 for i, key
in enumerate(H.C):
2406 footprints[:,:,i] = H.C[key].slabs[0]
2416 """ for backward runs, calculates the 20-day sensitivity at each release point.
2420 > C = fill_backward(H,nspec=(0))
2423 This will cycle through all available_dates and create the filled backward array
2424 for each k in H.numpointspec.
2428 A dictionary keyed by a (species,k) tuple.
2430 C & FD attributes on H
2432 Each element in the dictionary is a 3D array (x,y,z) for each species,k
2435 USE with *caution*, this is **MEMORY** intensive!
2440 ============== ========================================
2441 keyword Description [default]
2442 ============== ========================================
2443 nspec the specied ID or a tuple of species IDs
2444 FD FD can be passed if it is already read
2445 add_attributes will add C and FD as attributes to H,
2446 rather than returning just C
2447 ============== ========================================
2450 There's a lot of redundancy in the storage of attributes, maybe there is a
2451 better way to handle this.
2457 if isinstance(nspec, int):
2461 assert iter(species),
'nspec must be iterable, or you can pass an int'
2465 FD =
read_grid(H, time_ret= -1, nspec_ret=species)
2469 if H.direction ==
'backward':
2471 for s, k
in itertools.product(species, range(H.numpointspec)):
2473 C[(s, k)].grid = np.zeros((H.numxgrid, H.numygrid, H.numzgrid))
2474 C[(s, k)][
'itime'] =
None
2475 C[(s, k)][
'timestamp'] = H.releasetimes[k]
2476 C[(s, k)][
'species'] = H[
'species'][s]
2477 C[(s, k)][
'gridfile'] =
'multiple'
2478 C[(s, k)][
'rel_i'] = k
2479 C[(s, k)][
'spec_i'] = s
2485 for d
in FD.grid_dates:
2487 for k
in range(H.numpointspec):
2489 contribution = FD[(s, d)].grid[:, :, :, k]
2490 C[(s, k)].grid = C[(s, k)].grid + contribution
2495 if H.direction ==
'forward':
2497 for k
in range(len(FD.grid_dates)):
2498 d = FD.grid_dates[k]
2499 C[(s, k)] = FD[(s, d)]
2503 C[(s, k)].slabs =
get_slabs(H, C[(s, k)].grid)
2505 if H.direction ==
'backward':
2506 C[(s, k)][
'shape'] = C[(s, k)].grid[0].shape
2507 C[(s, k)][
'max'] = C[(s, k)].grid[0].max()
2508 C[(s, k)][
'min'] = C[(s, k)].grid[0].min()
2520 """ Use reademissions.so module to read emissions file """
2522 from FortFlex
import reademissions
2524 raise ImportError(
"Cannot find FortFlex module or missing reademissions")
2528 defaults = {
'nxmax':720,
'nymax':360,
'outlon0':-180,
'outlat0':-90, \
2529 'numxgrid':720,
'numygrid':360,
'dxout':0.5,
'dyout':0.5}
2530 for k, v
in defaults.iteritems():
2531 exec(
"E.%s = %s" % (k, v))
2533 emissions = reademissions(emissionsfile, maxemissions, E.nxmax, E.nymax, \
2534 E.outlon0, E.outlat0, E.numxgrid, E.numygrid, E.dxout, E.dyout)
2536 E.filename = emissionsfile
2540 def get_slabs(H, G, index=None, normAreaHeight=True, scale=1.0):
2541 """ Preps grid from readgridV8 for plotting.
2543 Accepts an 3D or 4D GRID from readgrid with optional index.
2544 shape ~= (360, 180, 3, 88, 1)
2548 plotgrid = get_slabs(H,G)
2552 H = A Header instance from a FLEXPART run
2553 G = A grid from the FLEXPARTDATA or FD dictionary returned from read_grid
2557 slabs : a dictionary of rank-2 arrays corresponding to vertical levels.
2559 **Note**: transposes the grid for plotting.
2560 Total Column as D[0]
2567 .. tabularcolumns:: |l|L|
2569 =============== ========================================
2570 keyword Description [default]
2571 =============== ========================================
2572 index a release point index (k)
2573 normAreaHeight [True], normalizes by the area/height
2574 =============== ========================================
2577 Need to check the normalization and indexing.
2580 Heightnn = H[
'Heightnn']
2583 grid_shape = G.shape
2584 if len(grid_shape)
is 4:
2586 if H.direction
is 'forward':
2590 g = G[:, :, :, index]
2593 g = G[:, :, :, index]
2595 raise IOError(
'######### ERROR: Which Release Point to get? ########## ')
2597 if len(grid_shape)
is 3:
2604 for i
in range(g.shape[2]):
2607 TC = np.sum(g, axis=2).T
2610 data = g[:, :, i] / Heightnn[:, :, i]
2614 Slabs[i + 1] = data.T * scale
2630 print 'Cannot delete m, does not exist.'
2636 def plot_trajchar(H, R, numrelease=1, varindex=[4, 15, 14], \
2637 FIGURE=
None, map_region=
None, projection=
None, coords=
None):
2638 """ plot FLEXPART trajectory (or set of trajectories) characteristics
2639 R is a dictionary returned from pflexible.read_trajectories
2641 varindex is a list of the variables to plot from the trajectories
2644 FIGURE = mp.get_FIGURE(getm=
False)
2651 print 'problem getting ax,m, or fig.'
2656 print 'ax not removed'
2659 T = R[
'Trajectories']
2660 labels = R[
'labels']
2661 t = T[np.where(T[:, 0] == numrelease), :][0]
2662 numplot = len(varindex)
2664 for i
in range(numplot):
2665 sp = fig.add_subplot(numplot, 1, i + 1)
2666 sp.plot(t[:, 1], t[:, varindex[i]])
2668 if i + 1 != numplot: plt.setp(ax, xticklabels=[])
2669 plt.ylabel(labels[varindex[i]])
2676 map_region=
None, projection=
None, coords=
None,
2682 """ plot the llon,llat,elv1 of the releases.
2685 >F = plot_releases(R)
2687 See the :func:`read_releases` function for information on how to generate "R"
2692 from mpl_toolkits.mplot3d import Axes3D
2694 raise ImportError('mplot3d not available from mpl_toolkits!')
2699 ax.plot(R.lllon, R.lllat, R.elv1)
2703 # # Set up the FIGURE
2705 FIGURE = mp.get_FIGURE(map_region=map_region, projection=projection,
2706 coords=coords, MapPar=MapPar)
2707 # #Get fig info and make active
2711 plt.figure(fig.number)
2715 # # prepare the data
2719 zsize = np.ones(len(lon)) * 30
2723 # # clear the previous track
2724 if 'circles' in FIGURE.keys():
2725 del FIGURE['circles']
2726 if overlay is False:
2727 del ax.collections[FIGURE.indices.collections:]
2728 del ax.texts[FIGURE.indices.texts:]
2731 cx, cy = m(lon, lat)
2733 cmap = plt.get_cmap('gist_gray')
2734 circles = m.scatter(cx, cy, zsize, zlevel, cmap=cmap,
2735 marker=marker, edgecolor=None,
2736 zorder=10, alpha=0.85)
2737 # # m.scatter has no color bar,
2738 # # so create a ghost 'scatter' instance:
2739 pos = ax.get_position()
2740 l, b, w, h = getattr(pos, 'bounds', pos)
2741 jnkfig = plt.figure()
2742 jnkax = jnkfig.add_axes([l, b, w, h], frameon=False)
2743 jnkmap = jnkax.scatter(cx, cy, zsize, zlevel,
2744 cmap=cmap, marker=marker,
2748 FIGURE.circles = circles
2751 # # make the figure active again
2752 plt.figure(fig.number);
2753 # # draw the legend and title
2754 # # check if a colorbar legend exists (this will cause
2755 # # problems for subplot routines!)
2757 cax = plt.axes([l + w + 0.12, b, 0.02, h - 0.035])
2759 cax = plt.axes([l + w + 0.03, b, 0.025, h - 0.035])
2760 cb = fig.colorbar(jnkmap, cax=cax) # draw colorbar
2761 p_leg = mpl.font_manager.FontProperties(size='6')
2762 cax.set_title('altitude\n(m)', fontproperties=p_leg)
2764 # # delete the ghost instance
2765 plt.close(jnkfig.number)
2766 del jnkax, jnkfig, jnkmap
2769 plt.setp(ax, xticks=[], yticks=[])
2772 p_cax = mpl.font_manager.FontProperties(size='8',
2783 def plot_spectra(inspectra,
2784 plt_title='', fig_title='',
2786 spectra_label='bins',
2787 FIGURE=None, y_datarange=None,
2788 cum=False, labels=[],
2789 bars=False, debug=False):
2798 A
"mapping.py" ``FIGURE`` object.
2802 .. tabularcolumns:: |l|L|
2804 ============= ========================================
2805 keyword Description [default]
2806 ============= ========================================
2807 runid an ID
for the title
2808 yunits units
for the y-axis [
None]
2809 FIGURE A
"FIGURE" object[
None] (see mapping.py)
2810 data_range y-axis data range
2811 web_version Set to
True if using the make_webpages
2812 function. see: :func:`read_agespectrum`
2813 continental Set to
True if continental_spectrum
2814 cum Set to true
if data should be cumulative
2815 bars Plot using stacked bars rather than a
2817 ============= ========================================
2820 There
's a lot of redundancy in the storage of attributes, maybe there is a
2821 better way to handle this.
2825 # # make tick lables smaller
2826 mpl.rcParams['xtick.labelsize'] = 6
2827 mpl.rcParams['ytick.labelsize'] = 6
2830 FIGURE = Structure()
2831 fig = plt.figure(figsize=(8, 6))
2833 ax = fig.add_subplot(111) # ,pos=[0.1,0.2,.8,.7])
2841 numageclasses = inspectra.numageclass
2842 releasetimes = inspectra.releasetimes
2843 inspectra = inspectra[:]
2845 numageclasses = inspectra.shape[1] - 1
2846 releasetimes = inspectra[:, 0]
2847 inspectra = inspectra[:, 1:]
2850 # # Normalize the data so it fills to 100%
2851 # spectra = np.zeros(inspectra.shape)
2852 spectra = (inspectra.transpose() / np.sum(inspectra, axis=1)).transpose()
2853 spectra = np.cumsum(spectra[:, :], axis=1)
2854 # sums = np.sum(inspectra,axis=1)
2855 # for i,elem in enumerate(inspectra):
2856 # spectra[i,:] = elem/sums[i]
2857 elif cum is True and cum != 'norm':
2858 spectra = np.cumsum(inspectra[:, :], axis=1)
2862 # Set up plotting environment colors
2863 Nc = np.array([float(i) / numageclasses for i in range(numageclasses)])
2864 norm = mpl.colors.normalize(Nc.min(), Nc.max())
2865 # jet = plt.cm.get_cmap('jet')
2866 jet = _gen_flexpart_colormap()
2869 # for i in range(0,H.numageclasses-1):
2870 for i in range(numageclasses):
2871 facecolors.append(jet(norm(Nc[i])))
2876 # ax.plot(ageclass[:,i])
2878 # create the baseline
2880 ax.bar(releasetimes, spectra[:, i], 0.03, color=facecolors[-1])
2882 ax.fill_between(releasetimes, np.zeros(len(spectra[:, i])), spectra[:, i],
2883 color=facecolors[-1], label='%s' % lbl)
2886 ax.bar(releasetimes, spectra[:, i], 0.03, color=facecolors[-1], bottom=spectra[:, i - 1])
2888 ax.fill_between(releasetimes, spectra[:, i - 1], spectra[:, i],
2889 color=facecolors[-1], label='%s' % (lbl))
2890 # facecolors.append(jet(norm(Nc[i+1])))
2893 # ax.set_yscale('log')
2895 print 'setting data range'
2896 ax.set_ylim(y_datarange)
2898 ax.set_ylim((0, spectra.max()))
2901 ax.set_ylabel('%s' % y_label, rotation=90, ha='center', size='small')
2904 plt.title('%s' % (plt_title), size='small')
2906 fig.suptitle('Emissions by %s' % (spectra_type), size='small')
2909 # for xl in ax.get_xticklabels():
2910 # plt.setp(xl,size='x-small')
2912 pos = ax.get_position()
2913 l, b, w, h = getattr(pos, 'bounds', pos)
2914 # polygons = ax.collections
2915 # for i,p in enumerate(polygons): p.set_color(facecolors[i])
2916 # BoundaryNorm, and extended ends to show the "over" and "under"
2918 ax2 = fig.add_axes([l, .08, w, 0.03])
2919 cmap = mpl.colors.ListedColormap(facecolors)
2920 # cmap.set_over('0.25')
2921 # cmap.set_under('0.75')
2923 # If a ListedColormap is used, the length of the bounds array must be
2924 # one greater than the length of the color list. The bounds must be
2925 # monotonically increasing.
2926 bounds = range(numageclasses + 1)
2927 # norm = mpl.colors.BoundaryNorm(bounds, cmap.N)
2928 cb2 = mpl.colorbar.ColorbarBase(ax2,
2931 # # to use 'extend', you must
2932 # # specify two extra boundaries:
2935 # values=range(H.numageclasses+1),
2936 ticks=bounds, # optional
2937 spacing='proportional',
2938 orientation='horizontal')
2940 cb2.set_label(spectra_label, size='x-small')
2942 ax2.set_xticklabels(labels, va='center', ha='left', rotation=0, size='xx-small')
2947 # make main axes current
2956 def plot_agespectra(H, agespectra,
2957 plt_title='', y_label=None,
2959 FIGURE=None, y_datarange=None,
2960 web_version=False, continental=False, cum=False,
2961 bars=False, debug=False):
2962 """ plot an agespectra
2968 This creates an filled
in between agespectra plot using information
from
2972 A
"mapping.py" ``FIGURE`` object.
2976 .. tabularcolumns:: |l|L|
2978 ============= ========================================
2979 keyword Description [default]
2980 ============= ========================================
2981 runid an ID
for the title
2982 yunits units
for the y-axis [
None]
2983 FIGURE A
"FIGURE" object[
None] (see mapping.py)
2984 data_range y-axis data range
2985 web_version Set to
True if using the make_webpages
2986 function. see: :func:`read_agespectrum`
2987 continental Set to
True if continental_spectrum
2988 cum Set to true
if data should be cumulative
2989 bars Plot using stacked bars rather than a
2991 ============= ========================================
2994 There
's a lot of redundancy in the storage of attributes, maybe there is a
2995 better way to handle this.
2998 Required attributes of 'H' if you want to make a dummy H. Or just set
2999 H to
"None" and it will be taken
from the agespectra input.
3005 # # make tick lables smaller
3006 mpl.rcParams['xtick.labelsize'] = 6
3007 mpl.rcParams['ytick.labelsize'] = 6
3010 FIGURE = Structure()
3013 ax = fig.add_subplot(111)
3020 inspectra = agespectra.agespectrum[:, 4:]
3022 inspectra = agespectra[:]
3025 from nilu.pflexpart import emissions as em
3026 spectra_label = "Continents"
3027 spectra_type = "Continent"
3028 conts = em.Continents()
3029 cbar_labels = [i[1] for i in conts.flexpart_continents]
3031 spectra_label = "Ageclasses (Days)"
3032 spectra_type = "Ageclass"
3037 numageclasses = inspectra.numageclass
3038 releasetimes = inspectra.agespectrum[:, 0]
3040 numageclasses = inspectra.shape[1] - 1
3041 releasetimes = inspectra[:, 0]
3042 inspectra = inspectra[:, 1:]
3044 numageclasses = H.numageclasses
3045 releasetimes = H.releasetimes
3048 # ## Normalize the data so it fills to 100%
3049 # #spectra = np.zeros(inspectra.shape)
3050 # spectra = (inspectra.transpose()/np.sum(inspectra, axis=1)).transpose()
3051 # spectra = np.cumsum(spectra[:,:],axis=1)
3052 # #sums = np.sum(inspectra,axis=1)
3053 # #for i,elem in enumerate(inspectra):
3054 # # spectra[i,:] = elem/sums[i]
3055 # elif cum is True and cum != 'norm':
3056 # spectra = np.cumsum(inspectra[:,:],axis=1)
3058 # spectra = inspectra
3060 spectra = _cum_spec(inspectra, cum=cum)
3062 # Set up plotting environment colors
3063 Nc = np.array([float(i) / numageclasses for i in range(numageclasses)])
3064 norm = mpl.colors.normalize(Nc.min(), Nc.max())
3065 # jet = plt.cm.get_cmap('jet')
3066 jet = _gen_flexpart_colormap()
3069 # for i in range(0,H.numageclasses-1):
3070 for i in range(numageclasses):
3071 facecolors.append(jet(norm(Nc[i])))
3072 # ax.plot(ageclass[:,i])
3074 # create the baseline
3076 ax.bar(releasetimes, spectra[:, i], 0.03, color=facecolors[-1])
3078 ax.fill_between(releasetimes, np.zeros(len(spectra[:, i])), spectra[:, i],
3079 color=facecolors[-1], label='%s' % i)
3082 ax.bar(releasetimes, spectra[:, i], 0.03, color=facecolors[-1], bottom=spectra[:, i - 1])
3084 ax.fill_between(releasetimes, spectra[:, i - 1], spectra[:, i],
3085 color=facecolors[-1], label='%s' % (i))
3086 # facecolors.append(jet(norm(Nc[i+1])))
3089 # ax.set_yscale('log')
3091 # print 'setting data range'
3092 ax.set_ylim(y_range)
3094 ax.set_ylim((0, spectra.max()))
3097 ax.set_ylabel('%s' % y_label, rotation=90, ha='center', size='small')
3099 plt.title('%s' % (plt_title), size='small')
3100 fig.suptitle('Emissions by %s' % (spectra_type), size='small')
3102 # for xl in ax.get_xticklabels():
3103 # plt.setp(xl,size='x-small')
3105 pos = ax.get_position()
3106 l, b, w, h = getattr(pos, 'bounds', pos)
3107 # polygons = ax.collections
3108 # for i,p in enumerate(polygons): p.set_color(facecolors[i])
3109 # BoundaryNorm, and extended ends to show the "over" and "under"
3111 ax2 = fig.add_axes([l, .08, w, 0.03])
3112 cmap = mpl.colors.ListedColormap(facecolors)
3113 # cmap.set_over('0.25')
3114 # cmap.set_under('0.75')
3116 # If a ListedColormap is used, the length of the bounds array must be
3117 # one greater than the length of the color list. The bounds must be
3118 # monotonically increasing.
3119 bounds = range(numageclasses + 1)
3120 # norm = mpl.colors.BoundaryNorm(bounds, cmap.N)
3121 cb2 = mpl.colorbar.ColorbarBase(ax2,
3124 # # to use 'extend', you must
3125 # # specify two extra boundaries:
3128 # values=range(H.numageclasses+1),
3129 ticks=bounds, # optional
3130 spacing='proportional',
3131 orientation='horizontal')
3132 cb2.set_label(spectra_label, size='x-small')
3134 cb2.set_ticklabels(cbar_labels)
3142 def plot_clusters(H, T, rel_i=0,
3143 ncluster=0, sizescale=10000,
3144 map_region=None, projection=None, coords=None,
3145 FIGURE=None, overlay=False, opacity=0.7,
3146 draw_circles=True, draw_labels=True,
3148 """ plots the clusters
"""
3149 trjs = T['Trajectories']
3150 labels = T['labels']
3152 # Set legend properties:
3153 p_legend = mpl.font_manager.FontProperties(size='8')
3157 # extract only releases of interest
3158 rel += 1 # account for zero indexing
3159 t = trjs[np.where(trjs[:, 0] == rel), :][0]
3161 FIGURE = mp.get_FIGURE(map_region=map_region, projection=projection,
3162 coords=coords, MapPar=MapPar)
3170 print 'problem getting ax,m, or fig.'
3172 # # Remove prior retroplume
3174 if len(ax.artists) > 1:
3176 art_ind = len(ax.artists)
3178 # del previous texts, but not lat/lon labels
3179 del ax.texts[-art_ind:]
3181 'could not delete artists'
3184 # get cluster from data
3185 # set up cluster index, start column of T array
3186 cindx = [16, 20, 24, 28, 32, 36]
3187 if ncluster == 'all':
3190 elif isinstance(ncluster, int):
3191 ncluster = [ncluster]
3194 # clstrindx = 16 + (5*(nc))
3195 clstrindx = cindx[nc]
3196 indx = [1] + range(clstrindx, clstrindx + 4)
3199 ells = _genEllipse(data, m, sizescale=sizescale)
3204 texts.append([xy, e.get_label()])
3205 e.set_clip_box(ax.bbox)
3206 e.set_alpha(opacity)
3214 ax.text(x, y, txt[1])
3216 # ax.legend(ax.artists,(o.get_label() for o in ax.artists), prop=p_legend )
3224 def plot_trajectory_ellipses(H, T, rel_i=0,
3225 ncluster=0, sizescale=10000,
3226 map_region=None, projection=None, coords=None,
3227 FIGURE=None, overlay=False, opacity=0.7,
3228 draw_circles=True, draw_labels=True,
3231 Plots trajectories
from FLEXPART output.
3235 trjs = T['Trajectories']
3236 labels = T['labels']
3238 # Set legend properties:
3239 p_legend = mpl.font_manager.FontProperties(size='8')
3241 # extract only releases of interest according to rel_i
3242 rel = rel_i + 1 # account for zero indexing
3243 t = trjs[np.where(trjs[:, 0] == rel), :][0]
3245 FIGURE = mp.get_FIGURE(map_region=map_region, projection=projection,
3246 coords=coords, MapPar=MapPar)
3253 print 'problem getting ax,m, or fig.'
3255 # # Remove prior retroplume
3257 if len(ax.artists) > 1:
3259 art_ind = len(ax.artists)
3261 # del previous texts, but not lat/lon labels
3262 del ax.texts[-art_ind:]
3264 'could not delete artists'
3267 # get trajectory from data
3268 indx = [1, 2, 3, 4] # return, j,x,y,z
3271 ells = _genEllipse(data, m, sizescale=sizescale)
3276 texts.append([xy, e.get_label()])
3277 e.set_clip_box(ax.bbox)
3278 e.set_alpha(opacity)
3286 ax.text(x, y, txt[1])
3291 FIGURE.circles = ells
3295 def plot_markers(H, lon, lat, zsize=None, zlevel=None,
3297 map_region=None, projection=None, coords=None,
3304 color='blue', edgecolor='none',
3305 zorder=10, alpha=0.85):
3307 """Plot a group of x,y pairs, with optional size
and color information.
3314 You can substitude
"None" for the :
class:`Header` instance
if you haven
't
3319 A :mod:`mapping` "FIGURE" object.
3323 .. tabularcolumns:: |l|L|
3325 ============= =============================================
3326 keyword Description [default]
3327 ============= =============================================
3328 rel_i **required** release index
3329 FIGURE A
"FIGURE" object[
None] (see mapping.py)
3330 projection A projection pre-defined
in :mod:`mapping`
3331 coords A set of lon,lat coords
for autosetting
3332 map map_region (
not really working).
3333 overlay [
True] will overlay the trajectory
3334 on top of another map instance.
3335 draw_circles [
True] will mark the trajectory with
3336 circles. If [
False] nothing
is drawn.
3337 draw_labels [
False] unimplemented
3338 cbar2 [
True] draws the scale bar
as a second axis.
3339 cbar2_title Optional argument to overide the cbar title.
3340 MapPar A Structure of mapping parameters to
pass
3341 to the basemap instance
if desired.
3342 ============= =============================================
3345 Who knows?! Could probably handle the overlaying better.
3348 Just set H to
"None" if you haven
't already created a "Header" instance.
3356 # # Set up the FIGURE
3358 FIGURE = mp.get_FIGURE(map_region=map_region, projection=projection,
3359 coords=coords, MapPar=MapPar)
3360 # #Get fig info and make active
3363 ax = FIGURE.fig.axes[0]
3364 plt.figure(fig.number)
3367 # # prepare the data
3368 cx, cy = m(lon, lat)
3370 zlevel = np.ones(len(lon)) * 10.
3372 zsize = np.ones(len(lon)) * 1
3376 # # clear the previous track
3377 if 'circles' in FIGURE.keys():
3378 del FIGURE['circles']
3379 if overlay is False:
3380 del ax.collections[FIGURE.indices.collections:]
3381 del ax.texts[FIGURE.indices.texts:]
3385 cmap = plt.get_cmap('jet')
3386 circles = m.scatter(cx, cy, zsize, zlevel, cmap=cmap,
3387 marker=marker, edgecolor=edgecolor,
3388 zorder=zorder, alpha=alpha)
3391 FIGURE.circles = circles
3394 # # make the figure active again
3395 plt.figure(fig.number)
3396 # # draw the legend and title
3398 # # does a colorbar already exist?
3399 # # Get the current axes, and properties for use later
3401 pos = ax0.get_position()
3402 l, b, w, h = pos.bounds
3407 cb2.update_normal(circles)
3409 cax2 = plt.axes([l + w + 0.08, b, 0.02, h])
3410 # # using im2, not im (hack to prevent colors from being
3411 # # too compressed at the low end on the colorbar - results
3412 # # from highly nonuniform colormap)
3413 cb2 = fig.colorbar(circles, cax=cax2) # , format='%3.2g') # draw colorbar
3416 # # check if a colorbar legend exists (this will cause
3417 # # problems for subplot routines!)
3418 p_leg = mpl.font_manager.FontProperties(size='6')
3420 cax2.set_title(cbar2_title, fontproperties=p_leg)
3422 cax2.set_title('altitude\n(m)', fontproperties=p_leg)
3425 # cax2 = plt.axes([l+w+0.03, b, 0.025, h-0.2])
3426 # cb2 = fig.colorbar(jnkmap,cax=cax2) # draw colorbar
3428 # # delete the ghost instance
3429 # plt.close(jnkfig.number)
3430 # del jnkax, jnkfig, jnkmap
3433 plt.setp(ax, xticks=[], yticks=[])
3446 def plot_trajectory(H, T, rel_i, FIGURE=None,
3447 map_region=None, projection=None, coords=None,
3450 draw_labels=True, days_back=20,
3454 """Plot the center trajectory of the plume on the map
3461 You can substitude
"None" for the :
class:`Header` instance
if you haven
't
3466 A :mod:`mapping` "FIGURE" object.
3470 .. tabularcolumns:: |l|L|
3472 ============= =============================================
3473 keyword Description [default]
3474 ============= =============================================
3475 rel_i **required** release index
3476 FIGURE A
"FIGURE" object[
None] (see mapping.py)
3477 projection A projection pre-defined
in :mod:`mapping`
3478 coords A set of lon,lat coords
for autosetting
3479 map map_region (
not really working).
3480 overlay [
True] will overlay the trajectory
3481 on top of another map instance.
3482 draw_circles [
True] will mark the trajectory with
3483 circles. If [
False] nothing
is drawn.
3484 draw_labels [
True] will draw the
'day' numbers on the
3486 days_back For how many days back should the labels be
3488 cbar2 [
True] draws the scale bar
as a second axis.
3489 cbar2_title Optional argument to overide the cbar title.
3490 MapPar A Structure of mapping parameters to
pass
3491 to the basemap instance
if desired.
3492 ============= =============================================
3495 Who knows?! Could probably handle the overlaying better.
3498 Just set H to
"None" if you haven
't already created a "Header" instance.
3506 # # Set up the FIGURE
3508 FIGURE = mp.get_FIGURE(map_region=map_region, projection=projection,
3509 coords=coords, MapPar=MapPar)
3510 # #Get fig info and make active
3513 ax = FIGURE.fig.axes[0]
3514 plt.figure(fig.number)
3517 # # prepare the data
3518 trjs = T['Trajectories']
3519 rel = rel_i + 1 # # account for zero indexing
3521 # #extract only releases of interest
3522 t = trjs[np.where(trjs[:, 0] == rel), :][0]
3524 # # Get the data for the days_back we're interested in
3525 day_labels = _gen_daylabels(t[:days_back, 1])
3526 lon = t[:days_back, 2]
3527 lat = t[:days_back, 3]
3528 zlevel = t[:days_back, 4]
3529 zsize = np.ones(len(lon)) * 50
3533 # # clear the previous track
3534 if 'circles' in FIGURE.keys():
3535 del FIGURE['circles']
3536 if overlay is False:
3537 del ax.collections[FIGURE.indices.collections:]
3538 del ax.texts[FIGURE.indices.texts:]
3541 cx, cy = m(lon, lat)
3543 cmap = plt.get_cmap('gist_gray')
3544 circles = m.scatter(cx, cy, zsize, zlevel, cmap=cmap,
3545 marker=marker, edgecolor=None,
3546 zorder=10, alpha=0.85)
3549 FIGURE.circles = circles
3552 # # make the figure active again
3553 plt.figure(fig.number)
3554 # # draw the legend and title
3556 # # does a colorbar already exist?
3557 # # Get the current axes, and properties for use later
3559 pos = ax0.get_position()
3560 l, b, w, h = pos.bounds
3565 cb2.update_normal(circles)
3567 cax2 = plt.axes([l + w + 0.08, b, 0.02, h])
3568 # # using im2, not im (hack to prevent colors from being
3569 # # too compressed at the low end on the colorbar - results
3570 # # from highly nonuniform colormap)
3571 cb2 = fig.colorbar(circles, cax=cax2) # , format='%3.2g') # draw colorbar
3574 # # check if a colorbar legend exists (this will cause
3575 # # problems for subplot routines!)
3577 cax2 = plt.axes([l + w + 0.03, b, 0.025, h - 0.2])
3578 cb2 = fig.colorbar(jnkmap, cax=cax2) # draw colorbar
3580 p_leg = mpl.font_manager.FontProperties(size='6')
3582 cax.set_title(cbar2_title, fontproperties=p_leg)
3584 cax2.set_title('altitude\n(m)', fontproperties=p_leg)
3585 # # delete the ghost instance
3586 # plt.close(jnkfig.number)
3587 # del jnkax, jnkfig, jnkmap
3590 plt.setp(ax, xticks=[], yticks=[])
3593 p_cax = mpl.font_manager.FontProperties(size='10',
3600 for i, (p, x, y) in enumerate(zip(day_labels, cx, cy)):
3601 if x > m.llcrnrx and x < m.urcrnrx:
3602 if y > m.llcrnry and y < m.urcrnry:
3603 ax.text(x, y + y * .02, '{0}'.format(p), va='bottom', ha='left',
3604 fontproperties=p_cax, zorder=11,
3605 color='white', bbox=dict(facecolor='green', alpha=0.5)
3616 def plot_at_level(H, D, level=1, \
3618 map_region=5, projection='lcc', \
3620 datainfo_str=None, log=True,
3621 data_range=None, coords=None, FIGURE=None,
3628 -make units a function of H[
'species']
3631 units = H.output_unit
3636 timestamp = D.timestamp
3637 data = D.slabs[level]
3640 level_desc = 'Footprint'
3642 level_desc = H.outheight[level - 1]
3648 if data_range == None:
3649 data_range = [dmin, dmax]
3651 if H.direction == 'backward' and H.options['readp']:
3652 zp1 = H['zpoint1'][rel_i]
3653 zp2 = H['zpoint2'][rel_i]
3654 if datainfo_str is None:
3655 # need to find a way to set m.a.s.l. or hPa here
3656 datainfo_str = """ Max Value: %.2g %s\n Release Z1: %.2f, Z2: %.2f (%s)\n
""" \
3657 % (dmax, units, zp1, zp2, H.alt_unit)
3658 if plot_title is None:
3660 %s Sensitivity at %s %s: %s\n
3661 Release Start: %s, Release End: %s
""" % \
3662 (ID, level_desc, H['alt_unit'], species, H['releasestart'][rel_i], H['releaseend'][rel_i])
3664 if datainfo_str is None:
3665 datainfo_str = """ Max Value: %.2g %s
""" % (dmax, units)
3666 if plot_title is None:
3667 plot_title = """ %s Sensitivity at %s %s: %s \n %s
""" % (ID, level_desc, H['alt_unit'], species, timestamp)
3669 FIGURE = plot_sensitivity(H, data, \
3670 data_range=data_range, \
3671 rel_i=rel_i, log=log,
3672 map_region=map_region, projection=projection,
3673 units=units, datainfo_str=datainfo_str,
3675 coords=coords, FIGURE=FIGURE, **kwargs)
3678 FIGURE.ax.set_title(plot_title, fontsize=10)
3681 plot_footprint = plot_at_level
3683 def plot_totalcolumn(H, D=None, \
3685 map_region=5, projection='lcc', \
3686 data_range=None, coords=None,
3687 FIGURE=None, overlay=False,
3688 datainfo_str=None, **kwargs):
3695 if 'units' in kwargs:
3696 units = kwargs.pop('units')
3702 timestamp = D.timestamp
3706 if data_range == None:
3709 data_range = [dmin, dmax]
3711 dmin, dmax , = data_range
3715 if H.direction == 'backward':
3717 zp1 = H['zpoint1'][rel_i]
3718 zp2 = H['zpoint2'][rel_i]
3719 if datainfo_str is None:
3720 datainfo_str = """ Max Value: %.2g %s\n Release Z1: %.2f, Z2: %.2f (%s)\n
""" % \
3721 (dmax, units, zp1, zp2, H.alt_unit)
3723 %s Total Column Sensitivity: %s\n
3724 Release Start: %s, Release End: %s
""" % \
3725 (ID, species, H['releasestart'][rel_i], H['releaseend'][rel_i])
3727 if datainfo_str is None:
3728 datainfo_str = """ Max Value: %.2g %s
""" % (dmax, units)
3730 %s Total Column Sensitivity: %s\n %s
""" % (ID, species, timestamp)
3733 FIGURE = plot_sensitivity(H, data, \
3734 data_range=data_range, \
3735 rel_i=rel_i, map_region=map_region, \
3736 projection=projection, units=units, \
3737 datainfo_str=datainfo_str, coords=coords, \
3738 FIGURE=FIGURE, overlay=overlay, **kwargs)
3740 FIGURE.ax.set_title(plot_title, fontsize=10)
3745 def plot_sourcecontribution(H, D,
3748 map_region='POLARCAT',
3749 data_range=None, coords=None,
3756 """ plot_sourcecontribution: a wrapper around :func:`plot_sensitivity`
for
3757 plotting a source contribution field.
3760 This function
is still quite finicky, I
'm working on resolving it.
3767 H = a :class:`Header` instance
from a FLEXPART run
3768 D = a grid instance
from a FLEXDATA dictionary. See :func:`readgridV8`
3771 I am trying to create this so D can be a number of different types
3772 from a 2-d array to the grid instance type.
3776 A
"mapping.py" ``FIGURE`` object.
3780 .. tabularcolumns:: |l|L|
3782 ============= ================================================
3783 keyword Description [default]
3784 ============= ================================================
3785 ID Can be passed to prefix the title.
3786 data_range range of data
for scale bars,
if None will
3787 be taken
from min/max of data
3788 datainfo_str A string
for labeling the scale bar.
3789 cax_title A string to
pass to plot_sensitivity
for colorbar
3790 title (units will be passed
as format argument)
3791 k Release index to plot
from the data array (aka rel_i)
3793 map_region A map_region specified
in mapping.py
3794 projection [deprecated] use pre-defined map_regions.
3795 overlay Force removal of previous figure elements.
3796 FIGURE A FIGURE instance
from mapping module get_FIGURE
3797 ============= ================================================
3800 A lot!! There are some problems here
and it
is sensitive to options.
3803 k
and rel_i are used throughout pflexible, should be more consistent
in future.
3810 elif isinstance(D, list):
3812 elif isinstance(D, np.ndarray):
3817 data = D.slabs[0] # take total column
3819 raise IOError("Unrecognized 'grid' type passed")
3827 species = H.species[s]
3830 timestamp = D.timestamp
3834 timestamp = D.timestamp
3837 raise IOError("Either a release index (rel_i) must be passed, \
3838 or the grid must have a timestamp attribute")
3840 timestamp = H.releasetimes[rel_i]
3843 units = 'emission units'
3848 if data_range == None:
3849 data_range = [dmin, dmax]
3851 if H.direction == 'backward':
3852 zp1 = H['zpoint1'][rel_i]
3853 zp2 = H['zpoint2'][rel_i]
3854 if datainfo_str is None:
3855 datainfo_str = """ Max Value: %.2g %s\n Release Z1: %.2f , Z2: %.2f (%s)\n
""" % \
3856 (dmax, units, zp1, zp2, H.alt_unit)
3858 %s Total Column Sensitivity: %s\n
3859 Release Start: %s, Release End: %s
""" % \
3860 (ID, species, H['releasestart'][rel_i], H['releaseend'][rel_i])
3862 if datainfo_str is None:
3863 datainfo_str = """ Max Value: %.2g %s\n Release Z1: %.2f , Z2: %.2f (%s)\n
""" % \
3864 (dmax, units, zp1, zp2, H.alt_unit)
3866 %s Total Column Sensitivity: %s\n %s
""" % (ID, species, timestamp)
3868 FIGURE = plot_sensitivity(H, data, \
3869 data_range=data_range, \
3870 rel_i=rel_i, map_region=map_region, \
3872 datainfo_str=datainfo_str, coords=coords, \
3874 cax_title=cax_title,
3875 overlay=overlay, **kwargs)
3876 FIGURE.ax.set_title(plot_title, fontsize=10)
3879 def plot_sensitivity(H, data, \
3881 units='ns m^2 / kg', \
3882 datainfo_str=None, \
3885 map_region=None, projection=None,
3886 dropm=None, coords=None,
3894 autofit=False, method='contourf', lsmask=False):
3895 # autofit=False,method='imshow'):
3896 """ plot_sensitivity: core function
for plotting FLEXPART output.
3902 This returns the FIGURE object,
and plots the sensitivity
from the data contained
in the
"D"
3906 H = a :
class:`Header` instance
for a FLEXPART run.
3907 data = a 2d data array containing the sensitivity values to plot, this can be extracted
from a
3908 grid instance (see :func:`readgridV8`
and :func:`get_slabs`)
3911 A
"mapping.py" ``FIGURE`` object.
3915 .. tabularcolumns:: |l|L|
3917 ============= ================================================
3918 keyword Description [default]
3919 ============= ================================================
3920 data_range range of data
for scale bars,
if None will
3921 be taken
from min/max of data
3922 cax_title string to be passed
as colorbar title (units will
3923 be passed to the format argument)
3924 units units
for the scale bar
3925 datainfo_str A string
for labeling the scale bar.
3926 plottitle Title
for the plot.
3927 rel_i Release index to plot
from the data array
3928 map_region A map_region specified
in mapping.py
3929 projection [deprecated] use pre-defined map_regions.
3930 dropm Force creation of a new basemap instance
3931 coords Used with autofit option. An array of lat,lon
3932 values
for the mapping module to autofit a
3933 basemap instance to.
3934 autofit Try to generate map_region automagically (flakey)
3935 overlay Force removal of previous figure elements.
3936 transform For use with imshow method,
if your data
is not
3937 in same coordinates
as projection,
try to transform
3938 the data to the basemap projection.
3939 log Create a logarithmic color scale.
3940 FIGURE A FIGURE instance
from mapping module get_FIGURE
3941 MapPar A Structure of paramters to be passed to the
3942 basemap
class when creating an instance.
3943 method The method to use
for plotting array data. May be
3944 one of: [pcolormesh], imshow,
or contourf
3945 lsmask set to
True to draw a grey landseamask [
False]
3946 ============= ================================================
3949 A lot!! There are some problems here
and it
is sensitive to options.
3950 lsmask =
True seems to only work with certain projections (POLARCAT)
3953 This
is the primary main function
for creating plots of flexpart output.
3954 Most the other routines are simply wrappers to this function, passing
3955 arguments
in with some predefined settings. For more information on the
3956 mechanics of this function, see the mapping.py module
and the matplotlib
3964 methods = ['imshow', 'pcolormesh', 'contourf', 'contour', 'None']
3965 assert method in methods, "method keyword must be one of: %s" % methods
3968 if map_region is None:
3971 MapPar = _gen_MapPar_fromHeader(H)
3973 FIGURE = mp.get_FIGURE(map_region=map_region,
3974 projection=projection, coords=coords,
3975 MapPar=MapPar, FigPar=FigPar)
3977 if FIGURE.m is None:
3978 FIGURE = mp.get_FIGURE(fig=FIGURE.fig, ax=FIGURE.ax, map_region=map_region,
3979 projection=projection, coords=coords,
3980 MapPar=MapPar, FigPar=FigPar)
3982 if overlay is False:
3983 del FIGURE.ax.images[FIGURE.indices.images:]
3984 del FIGURE.ax.collections[FIGURE.indices.collections:]
3985 del FIGURE.ax.lines[FIGURE.indices.lines:]
3992 print 'could not drop m'
3994 # # make tick lables smaller
3995 mpl.rcParams['xtick.labelsize'] = 6
3996 mpl.rcParams['ytick.labelsize'] = 6
4002 # # make the figure current
4003 plt.figure(fig.number)
4006 # # set up transformations for the data array
4007 if method == 'imshow':
4008 if m.projection not in ['cyl', 'merc', 'mill']:
4009 lats = np.arange(H.outlat0, (H.outlat0 + (H.numygrid * H.dyout)), H.dyout)[:-1]
4010 lons = np.arange(H.outlon0, (H.outlon0 + (H.numxgrid * H.dxout)), H.dxout)[:-1]
4011 data = data[:-1, :-1]
4013 lats = np.arange(H.outlat0, (H.outlat0 + (H.numygrid * H.dyout)), H.dyout)
4014 lons = np.arange(H.outlon0, (H.outlon0 + (H.numxgrid * H.dxout)), H.dxout)
4016 # # transform to nx x ny regularly spaced native projection grid
4018 dx = 2.*np.pi * m.rmajor / len(lons)
4019 nx = int((m.xmax - m.xmin) / dx) + 1; ny = int((m.ymax - m.ymin) / dx) + 1
4023 topodat = m.transform_scalar(data, lons, lats, nx, ny)
4027 if method != 'imshow':
4028 # # Check to see if a cyclic wraparound is required
4030 lons = np.arange(H.outlon0, H.outlon0 + (H.dxout * H.numxgrid), H.dxout)
4031 lats = np.arange(H.outlat0, H.outlat0 + (H.dyout * H.numygrid), H.dyout)
4032 # # if required add polar coordinates
4033 if 'npstere' in m.projection:
4035 npole = np.ones(len(lons)).T * data[0, :]
4036 npole = np.reshape(npole, (1, -1))
4037 data = np.vstack((npole, data))
4038 lats = np.hstack((lats, [lats[-1] + H.dyout]))
4040 if m.projection != 'merc':
4041 if lons[-1] - lons[0] < 360.:
4042 topodat, lons = addcyclic(data, lons)
4044 if m.projection == 'merc':
4047 # # get min/max range
4048 if data_range != None:
4049 dat_min = data_range[0]
4050 dat_max = data_range[1]
4052 dat_min, dat_max = data_range(data)
4056 clevs = _log_clevs(dat_min, dat_max)
4059 clevs = [i for i in np.arange(dat_min, dat_max, (dat_max - dat_min) / 100)]
4061 # # draw land sea mask
4062 # m.fillcontinents(zorder=0)
4064 m.drawlsmask(ocean_color='grey', zorder= -10)
4066 # # Plot Release Location if points were read
4067 if H.options['readp']:
4069 releaselocation = (H.xpoint[rel_i], H.ypoint[rel_i])
4070 xpt, ypt = m(releaselocation[0], releaselocation[1])
4071 # # Remove prior location point
4076 location, = m.plot([xpt], [ypt], 'bx', linewidth=6, markersize=20, zorder=1000)
4078 # # Plot the footprint
4080 # # Set up the IMAGE
4081 # # cmapnames = ['jet', 'hsv', 'gist_ncar', 'gist_rainbow', 'cool', 'spectral']
4082 # colmap = plt.get_cmap('jet')
4083 colmap = _gen_flexpart_colormap()
4084 colmap.set_over(color='k', alpha=0.8)
4085 # # Plotting METHODS (pcolormesh now default, imshow is smoother)
4086 # print topodat.max(), topodat.min(), topodat.shape
4087 if method == 'imshow':
4088 im = m.imshow(topodat, cmap=colmap, zorder= -1,
4089 norm=mpl.colors.LogNorm(vmin=clevs[0],
4092 if method == 'pcolormesh':
4093 nx, ny = m(*np.meshgrid(lons, lats))
4094 im = m.pcolormesh(nx, ny, topodat, cmap=colmap,
4095 norm=mpl.colors.LogNorm(vmin=clevs[0],
4097 if method == 'contourf':
4098 # # Trying some fancier scaling
4099 # cnts,bins = np.histogram(topodat,bins=100)
4100 # topodat = np.ma.masked_where(topodat< .05* np.average((0,bins[1])),topodat)
4101 nx, ny = m(*np.meshgrid(lons, lats))
4102 im = m.contourf(nx, ny, topodat, cmap=colmap, levels=clevs,
4103 norm=mpl.colors.LogNorm(vmin=clevs[0],
4106 if method == 'contour':
4107 nx, ny = m(*np.meshgrid(lons, lats))
4108 im = m.contour(nx, ny, topodat, cmap=colmap,
4109 norm=mpl.colors.LogNorm(vmin=clevs[0],
4112 # # Get the current axes, and properties for use later
4113 pos = ax.get_position()
4114 l, b, w, h = pos.bounds
4117 # # Note, with upgrades to matplotlib and basemap had to make some
4118 # # changes here... no more 'ghost' axes
4119 # # does a colorbar already exist?
4123 cb.update_normal(im)
4125 # # make a copy of the image object, change
4126 # # colormap to linear version of the precip colormap.
4128 # im2 = copy.copy(im)
4129 # im2.set_cmap(colmap)
4130 # # create new axis for colorbar.
4135 cax = plt.axes([l, b, w, h])
4136 # # using im2, not im (hack to prevent colors from being
4137 # # too compressed at the low end on the colorbar - results
4138 # # from highly nonuniform colormap)
4139 cb = fig.colorbar(im, cax=cax) # , format='%3.2g') # draw colorbar
4142 # cb.update_normal(im2)
4147 # # set colorbar label and ticks
4149 p_cax = mpl.font_manager.FontProperties(size='6')
4150 clabels = list(clevs[::10]) # #clevs, by 10 steps
4151 clabels.append(clevs[-1]) # # add the last label
4152 # cax.set_yticks(np.linspace(clabels[0],clabels[-1],len(clabels)))
4153 cax.set_yticks(np.linspace(0, 1, len(clabels)))
4154 cax.set_yticklabels(['%3.2g' % cl for cl in clabels])
4155 # fontproperties=p_cax)
4156 if H.direction == 'forward':
4157 cax.set_title('%s' % units,
4158 fontproperties=p_cax)
4161 cax.set_title(cax_title.format(units), fontproperties=p_cax)
4163 cax.set_title('sensitivity\n({0})'.format(units),
4164 fontproperties=p_cax)
4166 # # make the original axes current again
4169 # # write text information block on plot
4170 # # first try to remove prior text by accessing
4171 # # the last text element added to the axes from
4172 # # the prior iteration.
4173 # # This is tricky when using together with plot_clusters...
4174 # # need to figure out how to resolve the indexing
4175 # # of what texts, collections, etc to delete, when iterating.
4177 del ax.texts[FIGURE.indices.texts:]
4178 del ax.artists[FIGURE.indices.artists:]
4182 plt.text(l, b + 1000,
4185 bbox=dict(boxstyle="round",
4196 if plottitle != None:
4197 # plt.title(plottitle,fontproperties=p_cax)
4199 FIGURE.ax.set_title(plottitle, fontsize=10)
4203 def plot_curtain(H, data, \
4208 datainfo_str=None, \
4216 """ plot_sensitivity: core function
for plotting FLEXPART output.
4222 This returns the FIGURE object,
and plots the sensitivity
from the data contained
in the
"D"
4226 H = a :
class:`Header` instance
for a FLEXPART run.
4227 data = a 2d data array containing the sensitivity values to plot, this can be extracted
from a
4228 grid instance (see :func:`readgridV8`
and :func:`get_slabs`)
4231 A
"mapping.py" ``FIGURE`` object.
4235 .. tabularcolumns:: |l|L|
4237 ============= ================================================
4238 keyword Description [default]
4239 ============= ================================================
4240 data_range range of data
for scale bars,
if None will
4241 be taken
from min/max of data
4242 asl [
True] plot the units
in m.a.s.l
4243 cax_title string to be passed
as colorbar title (units will
4244 be passed to the format argument)
4245 units units
for the scale bar
4246 datainfo_str A string
for labeling the scale bar.
4247 plottitle Title
for the plot.
4248 rel_i Release index to plot
from the data array
4249 map_region A map_region specified
in mapping.py
4250 projection [deprecated] use pre-defined map_regions.
4251 dropm Force creation of a new basemap instance
4252 coords Used with autofit option. An array of lat,lon
4253 values
for the mapping module to autofit a
4254 basemap instance to.
4255 autofit Try to generate map_region automagically (flakey)
4256 overlay Force removal of previous figure elements.
4257 transform For use with imshow method,
if your data
is not
4258 in same coordinates
as projection,
try to transform
4259 the data to the basemap projection.
4260 log Create a logarithmic color scale.
4261 FIGURE A FIGURE instance
from mapping module get_FIGURE
4262 MapPar A Structure of paramters to be passed to the
4263 basemap
class when creating an instance.
4264 method The method to use
for plotting array data. May be
4265 one of: [pcolormesh], imshow,
or contourf
4266 lsmask set to
True to draw a grey landseamask [
False]
4267 ============= ================================================
4270 A lot!! There are some problems here
and it
is sensitive to options.
4271 lsmask =
True seems to only work with certain projections (POLARCAT)
4274 This
is the primary main function
for creating plots of flexpart output.
4275 Most the other routines are simply wrappers to this function, passing
4276 arguments
in with some predefined settings. For more information on the
4277 mechanics of this function, see the mapping.py module
and the matplotlib
4285 methods = ['imshow', 'pcolormesh', 'contourf', 'contour', 'None']
4286 assert method in methods, "method keyword must be one of: %s" % methods
4289 FIGURE = Structure()
4292 fig = plt.figure(**figPar)
4293 ax = fig.add_subplot(111)
4302 # # make the figure current
4303 plt.figure(fig.number)
4306 # # get min/max range
4307 if data_range != None:
4308 dat_min = data_range[0]
4309 dat_max = data_range[1]
4311 dat_min, dat_max = data_range(data)
4314 clevs = _log_clevs(dat_min, dat_max)
4316 clevs = [i for i in np.arange(dat_min, dat_max, (dat_max - dat_min) / 100)]
4318 # # Set up the IMAGE
4319 # # cmapnames = ['jet', 'hsv', 'gist_ncar', 'gist_rainbow', 'cool', 'spectral']
4320 colmap = _gen_flexpart_colormap()
4321 colmap.set_over(color='k', alpha=0.8)
4325 if method == 'imshow':
4326 im = plt.imshow(np.flipud(topodat), cmap=colmap, zorder= -1,
4327 norm=mpl.colors.LogNorm(vmin=clevs[0],
4330 if method == 'pcolormesh':
4331 im = plt.pcolormesh(nx, ny, topodat, cmap=colmap,
4332 norm=mpl.colors.LogNorm(vmin=clevs[0],
4334 if method == 'contourf':
4335 im = plt.contourf(nx, ny, topodat, cmap=colmap, levels=clevs,
4336 norm=mpl.colors.LogNorm(vmin=clevs[0],
4339 if method == 'contour':
4340 im = plt.contour(nx, ny, topodat, cmap=colmap,
4341 norm=mpl.colors.LogNorm(vmin=clevs[0],
4344 # # Get the current axes, and properties for use later
4345 pos = ax.get_position()
4346 l, b, w, h = pos.bounds
4349 # # Note, with upgrades to matplotlib and basemap had to make some
4350 # # changes here... no more 'ghost' axes
4351 # # does a colorbar already exist?
4355 cb.update_normal(im)
4357 # # make a copy of the image object, change
4358 # # colormap to linear version of the precip colormap.
4359 # # create new axis for colorbar.
4364 cax = plt.axes([l, b, w, h])
4365 # # using im2, not im (hack to prevent colors from being
4366 # # too compressed at the low end on the colorbar - results
4367 # # from highly nonuniform colormap)
4368 cb = fig.colorbar(im, cax=cax) # , format='%3.2g') # draw colorbar
4373 # # set colorbar label and ticks
4374 p_cax = mpl.font_manager.FontProperties(size='6')
4375 clabels = list(clevs[::10]) # #clevs, by 10 steps
4376 clabels.append(clevs[-1]) # # add the last label
4377 # cax.set_yticks(np.linspace(clabels[0],clabels[-1],len(clabels)))
4378 cax.set_yticks(np.linspace(0, 1, len(clabels)))
4379 cax.set_yticklabels(['%3.2g' % cl for cl in clabels])
4380 # fontproperties=p_cax)
4383 cax.set_title(cax_title.format(units), fontproperties=p_cax)
4385 cax.set_title('sensitivity\n({0})'.format(units),
4386 fontproperties=p_cax)
4388 # # make the original axes current again
4394 if plottitle != None:
4395 FIGURE.ax.set_title(plottitle, fontsize=10)
4400 def plot_METDATA(METDATA, FIGURE, date=None, level=None):
4401 """ plots met data returned
from :module:`pflexible.mapping.get_OpenDAP`
4410 # # make the figure current
4411 plt.figure(fig.number)
4414 datelabels = METDATA['extracted_dates']
4415 slpin = METDATA['prmslmsl']['data'] * .01
4416 uin = METDATA['ugrdprs']['data']
4417 vin = METDATA['vgrdprs']['data']
4418 longitudes = METDATA['longitudes']
4419 latitudes = METDATA['latitudes']
4422 cnt = datelabels.index(date)
4425 # # FOR OPENDAP OVERLAY
4426 slp_ = slpin[cnt, :, :]
4430 # plot wind vectors on projection grid (looks better).
4431 # first, shift grid so it goes from -180 to 180 (instead of 0 to 360
4432 # in longitude). Otherwise, interpolation is messed uplt.
4434 if longitudes[-1] - longitudes[0] < 360.:
4435 longarray = np.array(longitudes)
4436 slp, cyclon = addcyclic(slp_, longarray)
4437 u, cyclon = addcyclic(u_, longarray)
4438 v, cyclon = addcyclic(v_, longarray)
4439 ugrid, newlons = shiftgrid(180., u, cyclon, start=False)
4440 vgrid, newlons = shiftgrid(180., v, cyclon, start=False)
4441 slpgrid, newlons = shiftgrid(180., slp, cyclon, start=False)
4442 # transform vectors to projection grid.
4444 lons, lats = np.meshgrid(newlons, latitudes)
4445 # set desired contour levels.
4446 clevs = np.arange(960, 1061, 5)
4447 # compute native x,y coordinates of grid.
4448 x, y = m(lons, lats)
4449 # pos = FIG.ax.get_position()
4450 # l, b, w, h = pos.bounds
4452 CS = m.contour(x, y, slpgrid, clevs, linewidths=1, colors='k', animated=True)
4453 # CS = FIG.m.contourf(x,y,slpgrid,clevs,cmap=plt.cm.RdBu_r,animated=True,alpha=0.7)
4454 # CS = FIG.m.contour(x,y,slp[0,:,:],clevs,linewidths=0.5,colors='k',animated=True)
4455 # CS = FIG.m.contourf(x,y,slp[0,:,:],clevs,cmap=plt.cm.RdBu_r,animated=True,alpha=0.7)
4456 # plt.clabel(CS,inline=1,fontsize=10)
4457 # plot wind vectors over maplt.
4458 urot, vrot, xx, yy = m.transform_vector(ugrid, vgrid, newlons, latitudes, 51, 51,
4459 returnxy=True, masked=True)
4460 Q = m.quiver(xx, yy, urot, vrot, scale=500)
4461 # # make quiver key.
4462 qk = plt.quiverkey(Q, 0.1, 0.1, 20, '20 m/s', labelpos='W')
4482 ########### HELPER FUNCTIONS ##########
4484 class BinaryFile(object):
4488 BinaryFile: A
class for accessing data to/
from large binary files
4491 The data
is meant to be read/write sequentially
from/to a binary file.
4492 One can request to read a piece of data with a specific type
and shape
4493 from it. Also, it supports the notion of Fortran
and C ordered data,
4494 so that the returned data
is always well-behaved (C-contiguous
and
4497 This
class is seeking capable.
4499 :Author: Francesc Alted
4500 :Contact: faltet@pytables.org
4501 :Created: 2010-03-18
4502 :Acknowledgment: Funding
for the development of this code
is provided
4503 through the Norwegian Research Council VAUUAV project
4508 # Common types whose conversion can be accelerated via the struct
4511 'i1': 'b', 'i2': 'h', 'i4': 'i',
4512 'f4': 'f', 'f8': 'd',
4515 def __init__(self, filename, mode="r", order="fortran"):
4516 """Open the `filename` for write/read binary files.
4518 The `mode` can be '
r', 'w' or 'a' for reading (default),
4519 writing or appending. The file will be created if it doesn't
4520 exist when opened for writing or appending; it will be
4521 truncated when opened for writing. Add a '+' to the mode to
4522 allow simultaneous reading and writing.
4524 `order` specifies whether the file is is written in 'fortran'
4527 self.mode = mode + "b"
4528 self.file = open(filename, mode=self.mode, buffering=1)
4529 """The file handler."""
4530 if order not in ['fortran', 'c']:
4531 raise ValueError, "order should be either 'fortran' or 'c'."
4533 """The order for file ('c' or 'fortran')."""
4536 def read(self, dtype, shape=(1,)):
4537 """Read an array of `dtype` and `shape` from current position.
4539 `shape` must be any tuple made of integers or even () for scalars.
4541 The current position will be updated to point to the end of
4544 if not isinstance(dtype, np.dtype):
4545 dtype = np.dtype(dtype)
4546 if type(shape) is int:
4548 if type(shape) is not tuple:
4549 raise ValueError, "shape must be a tuple"
4550 length = dtype.itemsize
4555 length *= np.array(shape).prod()
4557 # Correct the shape in case dtype is multi-dimensional
4559 shape = shape + dtype.shape
4564 if shape in (1, (1,)):
4569 # Read the data from file
4570 data = self.file.read(length)
4571 if len(data) < length:
4572 raise EOFError, "Asking for more data than available in file."
4574 # Convert read string into a regular array, or scalar
4575 dts = dtype.base.str[1:]
4579 elif dts in self.structtypes:
4580 data = struct.unpack(self.structtypes[dts], data)[0]
4582 data = np.ndarray(shape=shape, buffer=data, dtype=dtype.base)
4584 # Retrieve the scalar out of the 0-dim array
4588 # If original data file is in fortran mode, reverse the
4590 if order == "fortran":
4591 shape = [i for i in shape[::-1]]
4592 data = data.reshape(shape)
4593 # If original data file is in fortran mode, do a transpose.
4594 # As the shape was reversed previously, we get the original
4596 if self.order == "fortran":
4597 data = data.transpose().copy()
4598 # Do an additional copy just in case the array is not
4599 # well-behaved (i.e., it is not aligned or not contiguous).
4600 elif not data.flags.behaved:
4605 def write(self, arr):
4606 """Write an `arr` to current position.
4608 The current position will be updated to point to the end of
4611 # Transpose data if case we need to
4612 if (self.order == "fortran") != (arr.flags.fortran):
4613 arr = arr.transpose().copy()
4614 # Write the data to file
4615 self.file.write(arr.data)
4618 def seek(self, offset, whence=0):
4619 """Move to new file position.
4621 Argument offset is a byte count. Optional argument whence
4622 defaults to 0 (offset from start of file, offset should be >=
4623 0); other values are 1 (move relative to current position,
4624 positive or negative), and 2 (move relative to end of file,
4625 usually negative, although many platforms allow seeking beyond
4626 the end of a file). If the file is opened in text mode, only
4627 offsets returned by tell() are legal. Use of other offsets
4628 causes undefined behavior.
4630 self.file.seek(offset, whence)
4634 "Returns current file position, an integer (may be a long integer)."
4635 return self.file.tell()
4639 "Flush buffers to file."
4644 "End access to file."
4648 def closest(num, numlist):
4649 """ returns the index of the *closest* value in a list """
4650 # check if we're using datetimes
4652 if isinstance(num, datetime.datetime):
4656 assert isinstance(numlist[0], datetime.datetime), \
4657 "num is date, numlist must be a list of dates"
4658 numlist = date2num(numlist)
4660 return (np.abs(numlist - num)).argmin()
4664 class Structure(dict, object):
4665 """ A 'fancy' dictionary that provides 'MatLab' structure-like
4669 may be replaced with a pure dict in future release.
4672 def __getattr__(self, attr):
4673 # Fake a __getstate__ method that returns None
4674 if attr == "__getstate__":
4678 def __setattr__(self, attr, value):
4682 """ necessary for Ipython tab-completion """
4685 def set_with_dict(self, D):
4686 """ set attributes with a dict """
4688 self.__setattr__(k, D[k])
4693 class Header(Structure):
4694 """This is the primary starting point for processing FLEXPART output.
4695 The Header class ( :class:`Structure` ) behaves like a dictionary.
4696 It contains all the metadata from the simulation run as read from the
4697 "header" or "header_nest" binary files from the model output.
4699 This version is using the BinaryFile class rather than FortFlex.
4703 > H = pf.Header(inputpath)
4704 > H.keys() #provides a list of keys available
4706 Returns a dictionary
4708 H = dictionary like object with all the run metadata. TODO: Fill in keys.
4713 .. tabularcolumns:: |l|L|
4715 ============== ========================================
4716 keyword Description [default]
4717 ============== ========================================
4718 path path to the run directory
4719 headerfile name of the header file if non standard
4720 readheader_ops optional dictionary to pass readheader
4721 ============== ========================================
4723 Arguments for readheader_ops
4725 .. tabularcolumns:: |l|L|
4727 ============= ========================================
4728 keyword Description [default]
4729 ============= ========================================
4730 pathname FLEXPART run output directory
4731 readp read release points 0=no, [1]=y
4732 nested nested output True or [False]
4733 version version of FLEXPART, default = 'V8'
4734 ============= ========================================
4738 **This function is in development**
4740 This function is being developed so that there is no dependence on
4741 using f2py to compile the FortFlex module. It is working using the
4742 :class:`BinaryFile`, but is notably slower than :class:`FortFlex`.
4743 Please report any bugs found.
4749 def __init__(self, path=None, headerfile=None, version='V8', **readheader_ops):
4755 h = read_header(path, **readheader_ops)
4756 self.set_with_dict(h)
4760 traceback.print_exc()
4762 Could not set header variables.
4763 Does the `header` file exist in path?\n{0}'''.format(path))
4769 """ Add longitude and latitude attributes using data from header """
4770 lons = np.arange(self.outlon0, self.outlon0 + (self.dxout * self.numxgrid), self.dxout)
4771 lats = np.arange(self.outlat0, self.outlat0 + (self.dyout * self.numygrid), self.dyout)
4772 self.longitude = lons
4773 self.latitude = lats
4775 def read_grid(self, **kwargs):
4776 """ see :func:`read_grid` """
4777 self.FD = read_grid(self, **kwargs)
4779 def fill_backward(self, **kwargs):
4780 """ see :func:`fill_backward` """
4781 fill_grids(self, add_attributes=True, **kwargs)
4783 def add_trajectory(self, **kwargs):
4784 """ see :func:`read_trajectories` """
4785 self.trajectory = read_trajectories(self)
4787 def add_fires(self, **kwargs):
4788 """ uses the :mod:`emissions` module to read the MODIS hotspot data and
4789 add it to the header class as a 'fires' attribute.
4791 **This function is only available within NILU.**
4795 from nilu.pflexpart import emissions as em
4797 for day in self.available_dates_dt:
4799 firedata = em.MODIS_hotspot(day)
4800 daily = firedata.daily
4805 if self.fires == None:
4809 self.fires = np.hstack((self.fires, daily)).view(np.recarray)
4811 def closest_dates(self, dateval, fmt=None, take_set=False):
4812 """ given an iterable of datetimes, finds the closest dates.
4813 if passed a list, assumes it is a list of datetimes
4815 if take_set=True, then a set of unique values will be returned.
4816 This can be used with H.read_grid as the value for time_ret to
4817 return only the grids matching the array of times.
4818 See (e.g. `extract_curtain`).
4822 vals = [closest(d, self['available_dates_dt']) for d in dateval]
4824 return list(set(vals))
4829 print('If dateval is iterable, must contain datetimes')
4832 def closest_date(self, dateval, fmt=None):
4833 """ given a datestring or datetime, tries to find the closest date.
4834 if passed a list, assumes it is a list of datetimes
4838 if isinstance(dateval, str):
4840 if len(dateval) == 8:
4842 if len(dateval) == 14:
4843 fmt = '%Y%m%d%H%M%S'
4845 raise IOError("no format provided for datestring")
4846 print("Assuming date format: {0}".format(fmt))
4847 dateval = datetime.datetime.strptime(dateval, fmt)
4849 return closest(dateval, self['available_dates_dt'])
4851 def data_range(data, min='median'):
4853 return a data range for flexpart data
4855 optional keyword min = ['median', 'mean', 'min']
4857 dmax = np.nanmax(data)
4862 dmin = np.mean(data[data.nonzero()])
4863 elif min == 'median':
4864 dmin = np.median(data[data.nonzero()])
4866 dmin = np.nanmin(data[data.nonzero()])
4872 def _normdatetime(Y, M, D, h, m, s):
4873 return datetime.datetime(Y, M, D) + datetime.timedelta(hours=h, minutes=m, seconds=s)
4875 def _log_clevs(dat_min, dat_max):
4877 ## create logorithmic color scale
4878 ## method uses strings to get the magnitude of variable
4879 #ss = "%1.2e" % (dat_max)
4880 #dmx = int('%s%s' % (ss[-3],int(ss[-2:])))
4881 #dmx=float(len(str(int(np.ceil(dat_max)))))
4882 #if data_range is None:
4883 # dmn = int(np.round(np.log((1e-10*(dat_max-dat_min)))))
4884 # ss = "%1.2e" % (1e-10*(dat_max-dat_min))
4886 # ss = "%1.2e" % (dat_min)
4887 #dmn = int('%s%s' % (ss[-3],int(ss[-2:])))
4891 dmx = int(np.round(np.log10(dat_max))) + 1
4893 print 'dat_max not positive'
4898 dmn = int(np.round(np.log10(dat_min)))
4899 elif dat_min == 0. or np.isnan(dat_min):
4900 print 'dat_min not positive'
4905 # # create equally spaced range
4908 clevs = np.logspace(dmn, dmx, 100)
4915 def add_nilu_logo(fig, xo=10, yo=520, origin='upper'):
4916 """ Adds NILU logo to background of plot """
4917 im = image.imread('/xnilu_wrk/flex_wrk/WEB_APS/imgs/logo_nilu.png')
4919 fig.figimage(im, xo, yo, origin=origin)
4922 def _cum_spec(inspectra, cum=True):
4923 """ helper function for the plot_spectra routines """
4926 # # Normalize the data so it fills to 100%
4927 # spectra = np.zeros(inspectra.shape)
4928 spectra = (inspectra.transpose() / np.sum(inspectra, axis=1)).transpose()
4929 spectra = np.cumsum(spectra[:, :], axis=1)
4930 # sums = np.sum(inspectra,axis=1)
4931 # for i,elem in enumerate(inspectra):
4932 # spectra[i,:] = elem/sums[i]
4933 elif cum is True and cum != 'norm':
4934 spectra = np.cumsum(inspectra[:, :], axis=1)
4940 def _getfile_lines(infile):
4941 """ returns all lines from a file or file string
4942 reverts to beginning of file."""
4944 if isinstance(infile, str):
4945 return file(infile, '
r').readlines()
4948 return infile.readlines()
4952 Define some default map parameters from the Header File.
4956 MapPar.llcrnrlat = H.outlat0
4957 MapPar.llcrnrlon = H.outlon0
4958 MapPar.urcrnrlat = H.outlat0 + (H.dyout * H.numygrid)
4959 MapPar.urcrnrlon = H.outlon0 + (H.dxout * H.numxgrid - 1)
4960 for k
in MapPar.keys():
4968 Generates color based on altlim = (min,max)
4970 norm = mpl.colors.Normalize(vmin=altlim[0], vmax=altlim[1])
4971 cmap = plt.cm.get_cmap(cmap_id)
4980 Uses H.loutstep to calculate 'days back' for plotting on clusters and trajectories.
4982 if isinstance(P, int):
4985 dt = abs(H.loutstep)
4986 return str(1 + int(abs(P)) / dt)
4988 return [str(1 + int(abs(p)) / dt)
for p
in P]
4993 """ returns max and min for footprint and total column
4994 for a given range of releases [default is all] """
4995 Heightnn = H[
'Heightnn']
4998 seek = range(G.shape[-1])
5003 zpmax = np.max(G[:, :, 0, i] / Heightnn[:, :, 0])
5008 return ((0, fpmax), (0, tfcmax))
5016 Generates ellipses based on input array 'data'. Requires basemap instance, 'm'. NOTE::
5018 data = [itime,x,y,z,[size]]
5023 size function of data[:,4]
5029 * _gen_altitude_color
5032 for labeling/color of ellipses
5037 altlim = (np.min(data[:, 3]), np.max(data[:, 3]))
5041 ell = [(Ellipse(xy=np.array(m(p[1], p[2])),
5042 width=p[4] * sizescale, height=p[4] * sizescale,
5046 np.array(m(p[1], p[2])))
for p
in data]
5049 ell = [(Ellipse(xy=np.array(m(p[1], p[2])),
5050 width=1e4 * sizescale, height=1e4 * sizescale,
5054 np.array(m(p[1], p[2])))
for p
in data]
5060 """ write a string to stdout if 'verbose' flag given """
5061 w = sys.stdout.write
5062 if string[-1] !=
'\n':
5063 string = string +
'\n'
5070 Generate the ast colormap for FLEXPART
5072 from matplotlib.colors
import ListedColormap
5075 colors = np.loadtxt(ctbfile)
5077 print "WARNING: cannot load ctbfile. using colors"
5079 name =
'user_colormap'
5082 colors = [1.0000000e+00, 1.0000000e+00, 1.0000000e+00
5083 , 9.9607843e-01, 9.1372549e-01, 1.0000000e+00
5084 , 9.8431373e-01, 8.2352941e-01, 1.0000000e+00
5085 , 9.6470588e-01, 7.1764706e-01, 1.0000000e+00
5086 , 9.3333333e-01, 6.0000000e-01, 1.0000000e+00
5087 , 8.9019608e-01, 4.4705882e-01, 1.0000000e+00
5088 , 8.3137255e-01, 2.0000000e-01, 1.0000000e+00
5089 , 7.5686275e-01, 0.0000000e+00, 1.0000000e+00
5090 , 6.6274510e-01, 0.0000000e+00, 1.0000000e+00
5091 , 5.4901961e-01, 0.0000000e+00, 1.0000000e+00
5092 , 4.0784314e-01, 0.0000000e+00, 1.0000000e+00
5093 , 2.4705882e-01, 0.0000000e+00, 1.0000000e+00
5094 , 7.4509804e-02, 0.0000000e+00, 1.0000000e+00
5095 , 0.0000000e+00, 2.8235294e-01, 1.0000000e+00
5096 , 0.0000000e+00, 4.8627451e-01, 1.0000000e+00
5097 , 0.0000000e+00, 6.3137255e-01, 1.0000000e+00
5098 , 0.0000000e+00, 7.4509804e-01, 1.0000000e+00
5099 , 0.0000000e+00, 8.4705882e-01, 1.0000000e+00
5100 , 0.0000000e+00, 9.3725490e-01, 1.0000000e+00
5101 , 0.0000000e+00, 1.0000000e+00, 9.7647059e-01
5102 , 0.0000000e+00, 1.0000000e+00, 8.9411765e-01
5103 , 0.0000000e+00, 1.0000000e+00, 8.0000000e-01
5104 , 0.0000000e+00, 1.0000000e+00, 6.9019608e-01
5105 , 0.0000000e+00, 1.0000000e+00, 5.6470588e-01
5106 , 0.0000000e+00, 1.0000000e+00, 4.0000000e-01
5107 , 0.0000000e+00, 1.0000000e+00, 0.0000000e+00
5108 , 3.9607843e-01, 1.0000000e+00, 0.0000000e+00
5109 , 5.6470588e-01, 1.0000000e+00, 0.0000000e+00
5110 , 6.9019608e-01, 1.0000000e+00, 0.0000000e+00
5111 , 7.9607843e-01, 1.0000000e+00, 0.0000000e+00
5112 , 8.9411765e-01, 1.0000000e+00, 0.0000000e+00
5113 , 9.7647059e-01, 1.0000000e+00, 0.0000000e+00
5114 , 1.0000000e+00, 9.4509804e-01, 0.0000000e+00
5115 , 1.0000000e+00, 8.7450980e-01, 0.0000000e+00
5116 , 1.0000000e+00, 7.9215686e-01, 0.0000000e+00
5117 , 1.0000000e+00, 7.0588235e-01, 0.0000000e+00
5118 , 1.0000000e+00, 6.0392157e-01, 0.0000000e+00
5119 , 1.0000000e+00, 4.8235294e-01, 0.0000000e+00
5120 , 1.0000000e+00, 3.1372549e-01, 0.0000000e+00
5121 , 1.0000000e+00, 0.0000000e+00, 1.4901961e-01
5122 , 1.0000000e+00, 0.0000000e+00, 3.3333333e-01
5123 , 1.0000000e+00, 0.0000000e+00, 4.4705882e-01
5124 , 1.0000000e+00, 0.0000000e+00, 5.3725490e-01
5125 , 1.0000000e+00, 0.0000000e+00, 6.1176471e-01
5126 , 9.7647059e-01, 0.0000000e+00, 6.6666667e-01
5127 , 8.9411765e-01, 0.0000000e+00, 6.6666667e-01
5128 , 7.9607843e-01, 0.0000000e+00, 6.3921569e-01
5129 , 6.9019608e-01, 0.0000000e+00, 5.9215686e-01
5130 , 5.6470588e-01, 0.0000000e+00, 5.0980392e-01
5131 , 3.9607843e-01, 0.0000000e+00, 3.8039216e-01]
5132 colors = np.reshape(colors, (-1, 3))
5133 name =
'flexpart_cmap'
5134 cmap = ListedColormap(colors, name)
5143 global options, args
5144 here = os.path.curdir
5145 dataDir = options.pathname
5150 plt = mp.plot_map(D[0], H)
5152 plt.title(plot_name, fontsize=10)
5153 plt.savefig(os.path.join(here, plot_name +
'.png'))
5157 if __name__ ==
'__main__':
5160 start_time = time.time()
5161 parser = optparse.OptionParser(
5162 formatter=optparse.TitledHelpFormatter(),
5163 usage=globals()[
'__doc__'],
5164 version=
'$Id: py.tpl 327 2008-10-05 23:38:32Z root $')
5165 parser.add_option (
'-v',
'--verbose', action=
'store_true',
5166 default=
False, help=
'verbose output')
5167 parser.add_option (
'-T',
'--latex', action=
'store_true',
5168 default=
False, help=
'latex usage flag')
5169 parser.add_option (
'-d',
'--directory', dest=
'pathname',
5170 default=
False, help=
'pathname of directory containg FLEXPART Output')
5171 (options, args) = parser.parse_args()
5174 if options.verbose:
print time.asctime()
5178 mpl.rc(
'font', **{
'family':
'sans-serif',
5179 'sans-serif':[
'Helvetica']}
5182 mpl.rc(
'text', usetex=
True)
5185 if exit_code
is None:
5187 if options.verbose:
print time.asctime()
5188 if options.verbose:
print 'TOTAL TIME IN MINUTES:',
5189 if options.verbose:
print (time.time() - start_time) / 60.0
5191 except KeyboardInterrupt, e:
5193 except SystemExit, e:
5195 except Exception, e:
5196 print 'ERROR, UNEXPECTED EXCEPTION'
5198 traceback.print_exc()
def _plot_dropm
FLEXPART Plotting Functions ########.
def main
Below for use from command line ##########################################.
def curtain_for_flightrack
def plot_sourcecontribution
def _readgrid_noFF
Grid Reading Routines ###############.
def _gen_MapPar_fromHeader
def _gen_flexpart_colormap
def read_command
Functions for reading FLEXPART output #####.
HELPER FUNCTIONS ##########.