Remove scripts except for makeitso and preproc shell scripts

This commit is contained in:
wiltbemj
2024-08-13 07:38:34 -06:00
parent dbd8f2b04b
commit 4ab705b756
57 changed files with 0 additions and 12354 deletions

View File

@@ -1,233 +0,0 @@
#! /usr/bin/env python
import numpy as np
import os,sys,glob
from scipy import interpolate
import time
import h5py
import matplotlib.pyplot as plt
import kaipy.gamhelio.wsa2gamera.params as params
import kaipy.gamhelio.lib.wsa as wsa
import kaipy.gamera.gamGrids as gg
#----------- PARSE ARGUMENTS ---------#
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('ConfigFileName',help='The name of the configuration file to use',default='startup.config')
args = parser.parse_args()
#----------- PARSE ARGUMENTS ---------#
# Read params from config file
prm = params.params(args.ConfigFileName)
Ng=prm.NO2
gamma = prm.gamma
# constants
mp = 1.67e-24
kb = 1.38e-16
#grid parameters
tMin = prm.tMin
tMax = prm.tMax
Rin = prm.Rin
Rout = prm.Rout
Ni = prm.Ni
Nj = prm.Nj
Nk = prm.Nk
#normalization in IH
B0 = prm.B0
n0 = prm.n0
V0 = B0/np.sqrt(4*np.pi*mp*n0)
T0 = B0*B0/4/np.pi/n0/kb #in K p = nkT
print ("inner helio normalization")
print (B0, n0, V0, T0)
#normalization in OH
B0OH = 5.e-5 # [Gs] 5 nT = 5.e-5 Gs
n0OH = 10 # [cm-3]
V0OH = B0OH/np.sqrt(4*np.pi*mp*n0OH) #Alfven speed at 1 AU 34.5 [km/s]
T0OH = B0OH*B0OH/4/np.pi/n0OH/kb #in K p = nkT
print ("outer helio units")
print (B0OH, n0OH, V0OH, T0OH)
#----------GENERATE HELIO GRID------
print("Generating gamera-Ohelio grid ...")
X3,Y3,Z3 = gg.GenKSph(Ni=Ni,Nj=Nj,Nk=Nk,Rin=Rin,Rout=Rout,tMin=tMin,tMax=tMax)
#to generate non-uniform grid for GL cme (more fine in region 0.1-0.3 AU)
#X3,Y3,Z3 = gg.GenKSphNonUGL(Ni=Ni,Nj=Nj,Nk=Nk,Rin=Rin,Rout=Rout,tMin=tMin,tMax=tMax)
gg.WriteGrid(X3,Y3,Z3,fOut=os.path.join(prm.GridDir,prm.gameraGridFile))
print("Gamera-Ohelio grid ready!")
#----------GENERATE HELIO GRID------
############### READ GAMERA solution at 1 AU #####################
f = h5py.File(prm.wsaFile,'r')
#the latest Step saved in inner helio solution wsa.h5
step = 'Step#2'
f[step].attrs.keys()
Nphi, Nth, Nr = np.shape(f[step]['Vx'])
#coordinates of cell centers
x = 0.125*(f['X'][:-1,:-1,:-1]+f['X'][:-1,:-1,1:]+f['X'][:-1,1:,:-1]+f['X'][:-1,1:,1:]+
f['X'][1:,:-1,:-1]+f['X'][1:,:-1,1:]+f['X'][1:,1:,:-1]+f['X'][1:,1:,1:])
y = 0.125*(f['Y'][:-1,:-1,:-1]+f['Y'][:-1,:-1,1:]+f['Y'][:-1,1:,:-1]+f['Y'][:-1,1:,1:]+
f['Y'][1:,:-1,:-1]+f['Y'][1:,:-1,1:]+f['Y'][1:,1:,:-1]+f['Y'][1:,1:,1:])
z = 0.125*(f['Z'][:-1,:-1,:-1]+f['Z'][:-1,:-1,1:]+f['Z'][:-1,1:,:-1]+f['Z'][:-1,1:,1:]+
f['Z'][1:,:-1,:-1]+f['Z'][1:,:-1,1:]+f['Z'][1:,1:,:-1]+f['Z'][1:,1:,1:])
r = np.sqrt(x[:]**2 + y[:]**2 + z[:]**2)
rxy = np.sqrt(x[:]**2 + y[:]**2)
#phi and theta of centers
theta = np.arccos(z/r)
phi = np.arctan2(y[:], x[:])
phi[phi<0]=phi[phi<0]+2*np.pi
theta_wsa_c = theta[0,:,0]
phi_wsa_c = phi[:,0,0]
print ("grid dimensions from 1 AU input solution")
print (theta_wsa_c.shape, phi_wsa_c.shape)
#these are normilized according to inner helio normalization
Vr = (f[step]['Vx'][:]*x[:] + f[step]['Vy'][:]*y[:] + f[step]['Vz'][:]*z[:])/r[:]
#Br = f[step]['Br'][:]
Br = (f[step]['Bx'][:]*x[:] + f[step]['By'][:]*y[:] + f[step]['Bz'][:]*z[:])/r[:]
Rho = f[step]['D'][:]
T = f[step]['P'][:]/f[step]['D'][:]
#take solution from the last cell in i, already normilized
#use wsa variable names for now
bi_wsa = Br[:,:,Nr-1]
v_wsa = Vr[:,:,Nr-1]
n_wsa = Rho[:,:,Nr-1]
T_wsa = T[:,:,Nr-1]
print ("1AU arrays")
print (bi_wsa.shape, v_wsa.shape, n_wsa.shape, T_wsa.shape)
#renormalize inner helio solution
bi_wsa = bi_wsa * B0/B0OH
n_wsa = n_wsa * n0/n0OH
v_wsa = v_wsa * V0/V0OH
T_wsa = T_wsa * T0
# keep temperature in K
#######Interpolate to GAMERA grid###########
# GAMERA GRID
with h5py.File(os.path.join(prm.GridDir,prm.gameraGridFile),'r') as f:
x=f['X'][:]
y=f['Y'][:]
z=f['Z'][:]
xc = 0.125*(x[:-1,:-1,:-1]+x[:-1,1:,:-1]+x[:-1,:-1,1:]+x[:-1,1:,1:]
+x[1:,:-1,:-1]+x[1:,1:,:-1]+x[1:,:-1,1:]+x[1:,1:,1:])
yc = 0.125*(y[:-1,:-1,:-1]+y[:-1,1:,:-1]+y[:-1,:-1,1:]+y[:-1,1:,1:]
+y[1:,:-1,:-1]+y[1:,1:,:-1]+y[1:,:-1,1:]+y[1:,1:,1:])
zc = 0.125*(z[:-1,:-1,:-1]+z[:-1,1:,:-1]+z[:-1,:-1,1:]+z[:-1,1:,1:]
+z[1:,:-1,:-1]+z[1:,1:,:-1]+z[1:,:-1,1:]+z[1:,1:,1:])
# remove the ghosts from angular dimensions
R0 = np.sqrt(x[0,0,Ng]**2+y[0,0,Ng]**2+z[0,0,Ng]**2) # radius of the inner boundary
#cell corners including ghost cells
r = np.sqrt(x[:]**2+y[:]**2+z[:]**2)
#corners of physical cells
P = np.arctan2(y[Ng:-Ng,Ng:-Ng,:],x[Ng:-Ng,Ng:-Ng,:])
P[P<0] += 2*np.pi
#P = P % (2*np.pi) # sometimes the very first point may be a very
# small negative number, which the above call sets
# to 2*pi. This takes care of it.
T = np.arccos(z[Ng:-Ng,Ng:-Ng,:]/r[Ng:-Ng,Ng:-Ng,:])
#grid (corners) for output into innerbc.h5
P_out = P[:,:,0:Ng+1]
T_out = T[:,:,0:Ng+1]
R_out = r[Ng:-Ng,Ng:-Ng,0:Ng+1]
print ("shapes of output phi and theta ", P_out.shape, T_out.shape, R_out.shape)
#centers
Rc = np.sqrt(xc[Ng:-Ng,Ng:-Ng,:]**2+yc[Ng:-Ng,Ng:-Ng,:]**2+zc[Ng:-Ng,Ng:-Ng,:]**2)
Pc = np.arctan2(yc[Ng:-Ng,Ng:-Ng,:],xc[Ng:-Ng,Ng:-Ng,:])
Pc[Pc<0] += 2*np.pi
Tc = np.arccos(zc[Ng:-Ng,Ng:-Ng,:]/Rc)
# this is fast and better than griddata in that it nicely extrapolates boundaries:
fbi = interpolate.RectBivariateSpline(phi_wsa_c,theta_wsa_c,bi_wsa,kx=1,ky=1)
br = fbi(Pc[:,0,0],Tc[0,:,0])
############### SMOOTHING #####################
if not prm.gaussSmoothWidth==0:
import astropy
from astropy.convolution import convolve,Gaussian2DKernel
gauss=Gaussian2DKernel(width=prm.gaussSmoothWidth)
br =astropy.convolution.convolve(br,gauss,boundary='extend')
############### INTERPOLATE AND DUMP #####################
fv = interpolate.RectBivariateSpline(phi_wsa_c,theta_wsa_c,v_wsa,kx=1,ky=1)
vr = fv(Pc[:,0,0],Tc[0,:,0])
f = interpolate.RectBivariateSpline(phi_wsa_c,theta_wsa_c,n_wsa,kx=1,ky=1)
rho = f(Pc[:,0,0],Tc[0,:,0])
fT = interpolate.RectBivariateSpline(phi_wsa_c,theta_wsa_c,T_wsa,kx=1,ky=1)
temp = fT(Pc[:,0,0],Tc[0,:,0])
fbi = interpolate.RectBivariateSpline(Pc[:,0,0],Tc[0,:,0],br,kx=1,ky=1)
fv = interpolate.RectBivariateSpline(Pc[:,0,0],Tc[0,:,0],vr,kx=1,ky=1)
br_kface = fbi(P[:,0,0],Tc[0,:,0])
vr_kface = fv (P[:,0,0],Tc[0,:,0])
# Scale inside ghost region
(vr,vr_kface,rho,temp,br,br_kface) = [np.dstack(prm.NO2*[var]) for var in (vr,vr_kface,rho,temp,br,br_kface)]
rho *= (R0/Rc[0,0,:Ng])**2
br *= (R0/Rc[0,0,:Ng])**2
br_kface *= (R0/Rc[0,0,:Ng])**2
#FIX:
#For now I use wsa.h5 which do not have mjd inside
#so hardcoded using WSA fits value + 200*4637/60/60/24 [number of days]
mjd_c = 58005.83415
print ("writing out innerbc.h5...")
with h5py.File(os.path.join(prm.IbcDir,prm.gameraIbcFile),'w') as hf:
hf.attrs["MJD"] = mjd_c
hf.create_dataset("vr",data=vr)
hf.create_dataset("vr_kface",data=vr_kface)
hf.create_dataset("rho",data=rho)
hf.create_dataset("temp",data=temp)
hf.create_dataset("br",data=br)
hf.create_dataset("br_kface",data=br_kface)
hf.close()
#innerbc to plot in Paraview
with h5py.File(os.path.join(prm.IbcDir,'innerbc_OHighostgr.h5'),'w') as hfg:
hfg.create_dataset("X", data=P_out)
hfg.create_dataset("Y", data=T_out)
hfg.create_dataset("Z", data=R_out)
grname = "Step#0"
grp = hfg.create_group(grname)
grp.attrs.create("MJD", mjd_c)
#grp.attrs.create("time", time_sec)
grp.create_dataset("vr",data=vr)
grp.create_dataset("vr_kface",data=vr_kface)
grp.create_dataset("rho",data=rho)
grp.create_dataset("temp",data=temp)
grp.create_dataset("br",data=br)
grp.create_dataset("br_kface",data=br_kface)
hfg.close

View File

@@ -1,343 +0,0 @@
#!/usr/bin/env python
"""Compare gamhelio results with spacecraft data.
Compare heliospheric model results from gamhelio with data measured by
spacecraft.
Note that the terms "ephemeris" and "trajectory" are used interchangeably.
Authors
-------
Eric Winter (eric.winter@jhuapl.edu)
Mike Wiltberger
"""
# Include standard modules.
import argparse
from argparse import RawTextHelpFormatter
import os
# Include 3rd-party modules.
import numpy as np
import spacepy.datamodel as dm
# Include project modules.
import kaipy.kaiH5 as kaiH5
import kaipy.kaiViz as kv
import kaipy.kaiTools as kaiTools
import kaipy.satcomp.scutils as scutils
# Program constants.
# Program description string.
description = """Extract satellite trajectory and observations for various
heliospheric spacecraft from CDAWeb. Produce comparisons between the
observations and corresponding gamhelio model results."""
# Default path to sctrack.x
default_cmd = os.path.join(
os.environ["KAIJUHOME"], "build", "bin", "sctrack.x"
)
# Default time interval for ephemeris data returned from CDAWeb (seconds).
default_deltaT = 3600.00 # 1 hour
# Default run ID string.
default_runid = "wsa"
# Defaut number of segments to process.
default_numSeg = 1
# Default path to model results directory.
default_path = os.getcwd()
# Path to file of heliospheric spacecraft metadata.
helio_sc_metadata_path = os.path.join(
os.environ["KAIJUHOME"], "kaipy", "satcomp", "sc_helio.json"
)
def create_command_line_parser():
"""Create the command-line argument parser.
Create the command-line parser.
Parameters
----------
None
Returns
-------
parser : argparse.ArgumentParser
Parser for command-line arguments.
"""
parser = argparse.ArgumentParser(
description=description, formatter_class=RawTextHelpFormatter
)
parser.add_argument(
"-c", "--cmd", type=str, metavar="command", default=default_cmd,
help="Full path to sctrack.x command (default: %(default)s)."
)
parser.add_argument(
"-d", "--debug", action="store_true", default=False,
help="Print debugging output (default: %(default)s)."
)
parser.add_argument(
"--deltaT", type=float, metavar="deltaT", default=default_deltaT,
help="Time interval (seconds) for ephemeris points from CDAWeb " +
"(default: %(default)s)."
)
parser.add_argument(
"-i", "--id", type=str, metavar="runid", default=default_runid,
help="ID string of the run (default: %(default)s)"
)
parser.add_argument(
"-k", "--keep", action="store_true", default=False,
help="Keep intermediate files (default: %(default)s).")
parser.add_argument(
"-n", "--numSeg", type=int, metavar="number_segments",default=default_numSeg,
help="Number of segments to simultaneously process (default: %(default)s).")
parser.add_argument(
"-p", "--path", type=str, metavar="path", default=default_path,
help="Path to directory containing gamhelio results (default: %(default)s)"
)
parser.add_argument(
"-s", "--satId", type=str, metavar="satellite_id", default=None,
help="Name of Satellite to compare"
)
parser.add_argument(
"-v", "--verbose", action="store_true", default=False,
help="Print verbose output (default: %(default)s)."
)
return parser
if __name__ == "__main__":
"""Begin main program."""
# Set up the command-line parser.
parser = create_command_line_parser()
# Parse the command-line arguments.
args = parser.parse_args()
cmd_sctrack = args.cmd
debug = args.debug
cdaweb_data_interval = args.deltaT
gh_run_id = args.id
keep = args.keep
num_segments = args.numSeg
gh_result_directory = args.path
sc_to_compare = args.satId
verbose = args.verbose
if debug:
print("args = %s" % args)
# Read the list of available spacecraft from the YAML configuration file.
if verbose:
print("Reading heliosphere spacecraft metadata from %s." %
helio_sc_metadata_path)
sc_metadata = scutils.getScIds(helio_sc_metadata_path, doPrint=verbose)
if debug:
print("sc_metadata = %s" % sc_metadata)
# Compute the path to the gamhelio output files to examine.
if verbose:
print("Looking for gamhelio results for run %s in %s." %
(gh_run_id, gh_result_directory))
(gh_result_path, is_MPI, Ri, Rj, Rk) = kaiTools.getRunInfo(
gh_result_directory, gh_run_id
)
if debug:
print("gh_result_path = %s" % gh_result_path)
print("is_MPI = %s" % is_MPI)
print("(Ri, Rj, Rk) = (%s, %s, %s)" % (Ri, Rj, Rk))
# Determine the number of timesteps in the gamhelio output files, and get
# a list of the timestep indices.
if verbose:
print("Counting timesteps in %s." % gh_result_path)
(gh_n_timesteps, gh_timestep_indices) = kaiH5.cntSteps(gh_result_path)
if verbose:
print(" Found %s timesteps." % gh_n_timesteps)
if debug:
print("gh_n_timesteps = %s" % gh_n_timesteps)
print("gh_timestep_indices = %s" % gh_timestep_indices)
# Pull the timestep information from the gamhelio output files. This
# fetches the "MJD" attribute from each of the top-level groups called
# "Step#[\d]+". These MJD values are floats, and are assumed to be in
# increasing order.
if verbose:
print("Reading timestep times from %s." % gh_result_path)
gh_timestep_MJDs = kaiH5.getTs(
gh_result_path, gh_timestep_indices, aID="MJD"
)
if debug:
print("gh_timestep_MJDs = %s" % gh_timestep_MJDs)
# Get the MJDc value for use in computing the gamhelio frame. This value
# was specified in the WSA file of initial conditions.
if verbose:
print("Reading MJDc value to use for constructing gamhelio coordinate frame.")
gh_MJDc = scutils.read_MJDc(gh_result_path)
if verbose:
print(" Found MJDc = %s" % gh_MJDc)
if debug:
print("gh_MJDc = %s" % gh_MJDc)
# Now get the "time" attribute from each step. For gamhelio, these elapsed
# times are in seconds since the start of the simulation, and are assumed
# to be in increasing order
if verbose:
print("Reading timestep elapsed seconds from %s." % gh_result_path)
gh_timestep_elapsed_seconds = kaiH5.getTs(
gh_result_path, gh_timestep_indices, aID="time"
)
if debug:
print("gh_timestep_elapsed_seconds = %s" %
gh_timestep_elapsed_seconds)
# Convert the MJD values to Universal Time datetime objects.
if verbose:
print("Converting timestep MJDs to UTC datetimes.")
gh_timestep_UT = kaiTools.MJD2UT(gh_timestep_MJDs)
if debug:
print("gh_timestep_UT = %s" % gh_timestep_UT)
# Use the first (positive) elapsed time as the initial time.
# N.B. THIS SKIPS THE FIRST SIMULATION STEP SINCE IT TYPICALLY HAS
# gh_timestep_elapsed_seconds[0] = 0.
# Use the last time as the last MJD.
if verbose:
print("Identifying first timestep with elapsed seconds > 0.")
gh_first_step_used = np.argwhere(gh_timestep_elapsed_seconds > 0.0)[0][0]
if verbose:
print(" Found first non-zero elapsed seconds at timestep %s." %
gh_first_step_used)
gh_t0 = gh_timestep_UT[gh_first_step_used]
gh_t1 = gh_timestep_UT[-1]
if verbose:
print("Using %s as the starting datetime for data comparison." % gh_t0)
print("Using %s as the ending datetime for data comparison." % gh_t1)
if debug:
print("gh_t0 = %s" % gh_t0)
print("gh_t1 = %s" % gh_t1)
# Construct the string versions of the first and last times.
datestr_start = gh_t0.strftime("%Y-%m-%dT%H:%M:%SZ")
datestr_end = gh_t1.strftime("%Y-%m-%dT%H:%M:%SZ")
if debug:
print("datestr_start = %s" % datestr_start)
print("datestr_end = %s" % datestr_end)
# Save the (float) MJD of the first used step.
gh_first_MJD = gh_timestep_MJDs[gh_first_step_used]
if verbose:
print("Using %s as the starting MJD for data comparison." % gh_first_MJD)
if debug:
print("gh_first_MJD = %s" % gh_first_MJD)
# Save the elapsed simulation time (seconds) of the first used step.
gh_first_elapsed_seconds = gh_timestep_elapsed_seconds[gh_first_step_used]
if verbose:
print("Using %s as the starting elapsed seconds for data comparison." %
gh_first_elapsed_seconds)
if debug:
print("gh_first_elapsed_seconds = %s" % gh_first_elapsed_seconds)
# Determine the list of IDs of spacecraft to fetch data from. If no
# spacecraft were specified on the command line, use all spacecraft
# listed in the heliosphere spacecraft metadata file.
if sc_to_compare:
sc_to_compare = sc_to_compare.split(",")
else:
sc_to_compare = list(sc_metadata.keys())
if verbose:
print("Comparing gamhelio results to data measured by:")
for sc_id in sc_to_compare:
print(" %s" % sc_id)
if debug:
print("sc_to_compare = %s" % sc_to_compare)
# Fetch the ephemeris and observed data for each spacecraft in the list.
for sc_id in sc_to_compare:
# Fetch the ephemeris and observed data for the current spacecraft.
if verbose:
print("Fetching ephemeris and instrument data for %s from CDAWeb." % sc_id)
sc_data = scutils.get_helio_cdaweb_data(
sc_id, sc_metadata[sc_id],
datestr_start, datestr_end, cdaweb_data_interval,
verbose=verbose, debug=debug
)
if debug:
print("sc_data = %s" % sc_data)
# If no data was found for the spacecraft, go to the next.
if sc_data is None:
print("No data found for %s." % sc_id)
continue
# At this point, the data object contains only the raw spacecraft
# ephemeris, and the raw spacecraft-measured data, as returned from
# CDAWeb.
# Ingest the CDAWeb data. This means convert the data as originally
# retrieved from CDAWeb to the units and coordinate systems used by
# gamhelio so that comparisons may be made. This will create new
# variables in the data object, with names analogous to the
# corresponding gamhelio variables.
if verbose:
print("Converting CDAWeb data for %s into gamhelio format." % sc_id)
scutils.ingest_helio_cdaweb_data(
sc_id, sc_data, sc_metadata[sc_id], gh_MJDc,
verbose=verbose, debug=debug
)
# Use the spacecraft trajectory to interpolate simulated observations
# from the gamhelio output.
if verbose:
print("Interpolating gamhelio results along %s trajectory." % sc_id)
scutils.interpolate_gamhelio_results_to_trajectory(
sc_data, sc_metadata[sc_id], sc_id,
gh_first_MJD, gh_first_elapsed_seconds,
gh_result_directory, gh_run_id,
cmd_sctrack, num_segments, keep, gh_MJDc
)
# Save the important measured and simulated data as a CDF file for
# comparison.
cdf_path = os.path.join(gh_result_directory, sc_id + ".comp.cdf")
if verbose:
print("Saving comparison data to %s." % cdf_path)
if os.path.exists(cdf_path):
if verbose:
print("Deleting existing CDF comparison file %s" % cdf_path)
os.system("rm %s" % cdf_path)
if verbose:
print("Creating CDF file %s with %s and GAMERA data" % (cdf_path, sc_id))
dm.toCDF(cdf_path, sc_data)
# Compute the errors in the simulated data relative to the measured
# data and save in a file.
error_file_path = os.path.join(gh_result_directory, sc_id + "-error.txt")
if verbose:
print("Computing gamhelio-%s errors and saving to %s." %
(sc_id, error_file_path))
scutils.write_helio_error_report(error_file_path, sc_id, sc_data)
# Make a comparison plot of the measured and simulated results.
plot_file_path = os.path.join(gh_result_directory, sc_id + ".png")
if verbose:
print("Saving gamhelio-%s comparison plots in %s." %
(sc_id, plot_file_path))
kv.helioCompPlot_new(plot_file_path, sc_id, sc_data)
# Plot the spacecraft trajectory.
# plot_file_path = os.path.join(gh_result_directory, sc_id + "-traj.png")
# if verbose:
# print("Plotting %s trajectory in spacecraft frame to %s." % (sc_id, plot_file_path))
# kv.helioTrajPlot(plot_file_path, sc_id, sc_data)

View File

@@ -1,113 +0,0 @@
#!/usr/bin/env python
#standard python
import sys
import os
import datetime
import subprocess
from xml.dom import minidom
import argparse
from argparse import RawTextHelpFormatter
#import spacepy and cdasws
import spacepy
from spacepy.coordinates import Coords
from spacepy.time import Ticktock
import spacepy.datamodel as dm
from cdasws import CdasWs
#import numpy and matplotlib
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
#Kaipy and related
from astropy.time import Time
import h5py
import kaipy.kaiH5 as kaiH5
import kaipy.kaiViz as kv
import kaipy.kaiTools as kaiTools
import kaipy.kaijson as kj
import kaipy.satcomp.scutils as scutils
if __name__ == '__main__':
MainS = """Extracts information from satellite trajectory for various
spacecraft. Space craft data is pulled from CDAWeb. Output CDF files
contain data pulled from CDAWeb along with data extracted from GAMERA.
Image files of satellite comparisons are also produced.
"""
parser = argparse.ArgumentParser(description=MainS,
formatter_class=RawTextHelpFormatter)
parser.add_argument('-id',type=str,metavar='runid',default='msphere',
help='RunID of data (default: %(default)s)')
parser.add_argument('-path',type=str,metavar='path',default='.',
help='Path to directory containing REMIX files (default: %(default)s)')
parser.add_argument('-cmd',type=str,metavar='command',default=None,
help='Full path to sctrack.x command')
parser.add_argument('-numSeg',type=int,metavar='Number of segments',
default=1,help='Number of segments to simulateously process')
parser.add_argument('--keep',action='store_true',
help='Keep intermediate files')
args = parser.parse_args()
fdir = args.path
ftag = args.id
cmd = args.cmd
keep = args.keep
numSegments = args.numSeg
if fdir == '.':
fdir = os.getcwd()
if None == cmd:
my_env = os.environ.copy()
cmd = os.path.join(os.getenv('KAIJUDIR'),'build','bin','sctrack.x')
if not (os.path.isfile(cmd) and os.access(cmd, os.X_OK)):
print(cmd,'either not found or not executable')
sys.exit()
satCmd = os.path.join(os.getenv('KAIJUDIR'),'scripts',
'msphSatComp.py')
if not (os.path.isfile(satCmd) and os.access(satCmd, os.X_OK)):
print(satCmd,'either not found or not executable')
sys.exit()
scIds = scutils.getScIds()
process = []
logs = []
errs = []
# Open log files
for scId in scIds:
logfile = open(os.path.join(fdir,'log.'+scId+'.txt'),'w')
errfile = open(os.path.join(fdir,'err.'+scId+'.txt'),'w')
cmdList = [satCmd,'-id',ftag,'-path',fdir,'-cmd',cmd,'-satId',scId]
if keep:
cmdList.append('--keep')
if numSegments != 1:
cmdList.append('-numSeg')
cmdList.append(str(numSegments))
print(cmdList)
process.append(subprocess.Popen(cmdList,
stdout=logfile, stderr=errfile))
logs.append(logfile)
errs.append(errfile)
for proc in process:
proc.communicate()
for log in logs:
log.close()
for err in errs:
err.close()
if not keep:
subprocess.run(['rm',os.path.join(fdir,'log.*.txt')])
subprocess.run(['rm',os.path.join(fdir,'err.*.txt')])
print('All done!')

View File

@@ -1,181 +0,0 @@
#!/usr/bin/env python
#standard python
import sys
import os
import glob
import argparse
import subprocess
import time
from argparse import RawTextHelpFormatter
#numpy
import numpy as np
#Kaipy and related
from astropy.time import Time
import h5py
import kaipy.kaiH5 as kaiH5
import kaipy.kaiViz as kv
import kaipy.kaiTools as kaiTools
import kaipy.kaijson as kj
import kaipy.satcomp.scutils as scutils
import spacepy.datamodel as dm
if __name__ == '__main__':
MainS = """Checks the run for satellites with data avilable and then
sets up PBS job scripts for running interpolation in parallel."""
parser = argparse.ArgumentParser(description=MainS,
formatter_class=RawTextHelpFormatter)
parser.add_argument('-id',type=str,metavar='runid',default='msphere',
help='RunID of data (default: %(default)s)')
parser.add_argument('-path',type=str,metavar='path',default='.',
help='Path to directory containing files (default: %(default)s)')
parser.add_argument('-satId',type=str,metavar='Satellite Id',
default=None,help='Name of Satellite to compare')
parser.add_argument('-cmd',type=str,metavar='command',default=None,
help='Full path to sctrack.x command')
parser.add_argument('-acct',type=str,metavar='acct',default=None,
help='Account number to use in pbs script')
parser.add_argument('--keep',action='store_true',
help='Keep intermediate files')
args = parser.parse_args()
fdir = args.path
ftag = args.id
cmd = args.cmd
scRequested = args.satId
keep = args.keep
acct = args.acct
if None == acct:
acct = os.getenv('DAV_PROJECT')
if None == acct:
print('Must input a valid account to charge, use -acct flag')
sys.exit()
if fdir == '.':
fdir = os.getcwd()
if None == cmd:
my_env = os.environ.copy()
cmd = os.path.join(os.getenv('KAIJUDIR'),'build','bin','sctrack.x')
if not (os.path.isfile(cmd) and os.access(cmd, os.X_OK)):
print(cmd,'either not found or not executable')
sys.exit()
scIds = scutils.getScIds()
#print(cmddir)
#print('Extracting GAMERA data along',scId, 'trajectory')
#cmd = "/Users/wiltbemj/src/kaiju/build/bin/sctrack.x"
#Pull the timestep information from the magnetosphere files
(fname,isMPI,Ri,Rj,Rk) = kaiTools.getRunInfo(fdir,ftag)
nsteps,sIds=kaiH5.cntSteps(fname)
gamMJD=kaiH5.getTs(fname,sIds,aID='MJD')
gamT=kaiH5.getTs(fname,sIds,aID='time')
gamUT = kaiTools.MJD2UT(gamMJD)
## Deal with startup by using the first non zero time as the inital
## MJD
loc = np.argwhere(gamT > 0.0)[0][0]
t0 = gamUT[loc]
t1 = gamUT[-1]
deltaT = np.round(gamT[loc+1]-gamT[loc])
mjdFileStart = gamMJD[loc]
secFileStart = gamT[loc]
numPer = 3
numSegments=int(np.floor(((t1-t0).total_seconds()/deltaT)/numPer))
if None == scRequested:
scToDo = scIds
else:
scToDo = []
scToDo.append(scRequested)
for scId in scToDo:
print('Getting spacecraft data for', scId)
status,data = scutils.getSatData(scIds[scId],
t0.strftime("%Y-%m-%dT%H:%M:%SZ"),
t1.strftime("%Y-%m-%dT%H:%M:%SZ"),deltaT)
if status['http']['status_code'] != 200 or data is None:
print('No data available for', scId)
else:
(scTrackName,xmlFileName,toRe) = scutils.createInputFiles(data,
scIds[scId],scId,mjdFileStart,secFileStart,
fdir,ftag,numSegments)
lockCmdName = os.path.join(fdir,'makeLock.sh')
fLock = open(lockCmdName,'w')
fLock.write("#!/bin/bash\ntouch $1")
fLock.close()
os.chmod(lockCmdName,0o775)
pbsName = scutils.genSatCompPbsScript(scId,fdir,cmd,account=acct)
pbsCmd = ['qsub','-J','1-'+str(numSegments),pbsName]
results = subprocess.run(pbsCmd,capture_output=True)
jobId = results.stdout.decode('utf-8').split('.')
pbsLockName = scutils.genSatCompLockScript(scId,fdir,account=acct)
pbsLockCmd = ['qsub','-W','depend=afterok:'+jobId[0],pbsLockName]
results = subprocess.run(pbsLockCmd,capture_output=True)
lockId = results.stdout.decode('utf-8').split('.')
lockFileName = os.path.join(fdir,scId+'.lock')
sucess = False
for check in np.arange(5):
if os.path.exists(lockFileName):
sucess = True
break
else:
time.sleep(60)
if sucess:
h5name = scutils.mergeFiles(scId,fdir,numSegments)
scutils.addGAMERA(data,scIds[scId],h5name)
scutils.matchUnits(data)
cdfname = os.path.join(fdir, scId + '.comp.cdf')
if os.path.exists(cdfname):
print('Deleting %s' % cdfname)
os.system('rm %s' % cdfname)
print('Creating CDF file',cdfname,'with',scId,'and GAMERA data')
dm.toCDF(cdfname,data)
plotname = os.path.join(fdir,scId+'.png')
print('Plotting results to',plotname)
kv.compPlot(plotname,scId,data)
print('Computing Errors')
errname = os.path.join(fdir,scId+'-error.txt')
scutils.errorReport(errname,scId,data)
plotname = os.path.join(fdir,scId+'-traj.png')
print('Plotting trajectory to',plotname)
kv.trajPlot(plotname,scId,data,toRe)
if not keep:
h5parts = glob.glob(os.path.join(fdir,scId)+'.*.sc.h5')
for file in h5parts:
os.remove(file)
jobParts = glob.glob(os.path.join(fdir,scId)+
'.o'+jobId[0].split('[')[0]+'.*')
for file in jobParts:
os.remove(file)
pbsScripts = glob.glob(os.path.join(fdir,scId)+'*pbs')
for file in pbsScripts:
os.remove(file)
lockLog = os.path.join(fdir,scId+'.o'+lockId[0])
os.remove(lockLog)
lockFile = os.path.join(fdir,scId+'.lock')
os.remove(lockFile)
lockFile = os.path.join(fdir,scId+'.xml')
os.remove(lockFile)
else:
failedJobs = []
pbsLogFiles = glob.glob(os.path.join(fdir,scId)+
'.o'+jobId[0].split('[')[0]+'.*')
for logFile in pbsLogFiles:
with open(logFile) as f:
for line in f:
if 'job killed' in line:
failedJobs.append(logFile)
print('Following jobs failed')
print(*failedJobs,sep='\n')

View File

@@ -1,120 +0,0 @@
#!/usr/bin/env python
#standard python
import sys
import os
import argparse
from argparse import RawTextHelpFormatter
#numpy
import numpy as np
#Kaipy and related
from astropy.time import Time
import h5py
import kaipy.kaiH5 as kaiH5
import kaipy.kaiViz as kv
import kaipy.kaiTools as kaiTools
import kaipy.kaijson as kj
import kaipy.satcomp.scutils as scutils
import spacepy.datamodel as dm
if __name__ == '__main__':
MainS = """Extracts information from satellite trajectory for various
spacecraft. Space craft data is pulled from CDAWeb. Output CDF files
contain data pulled from CDAWeb along with data extracted from GAMERA.
Image files of satellite comparisons are also produced.
"""
parser = argparse.ArgumentParser(description=MainS,
formatter_class=RawTextHelpFormatter)
parser.add_argument('-id',type=str,metavar='runid',default='msphere',
help='RunID of data (default: %(default)s)')
parser.add_argument('-path',type=str,metavar='path',default='.',
help='Path to directory containing REMIX files (default: %(default)s)')
parser.add_argument('-cmd',type=str,metavar='command',default=None,
help='Full path to sctrack.x command')
parser.add_argument('-satId',type=str,metavar='Satellite Id',
default=None,help='Name of Satellite to compare')
parser.add_argument('-numSeg',type=int,metavar='Number of segments',
default=1,help='Number of segments to simulateously process')
parser.add_argument('--keep',action='store_true',
help='Keep intermediate files')
args = parser.parse_args()
fdir = args.path
ftag = args.id
cmd = args.cmd
scRequested = args.satId
numSegments = args.numSeg
keep = args.keep
if fdir == '.':
fdir = os.getcwd()
if None == cmd:
my_env = os.environ.copy()
cmd = os.path.join(os.getenv('KAIJUDIR'),'build','bin','sctrack.x')
if not (os.path.isfile(cmd) and os.access(cmd, os.X_OK)):
print(cmd,'either not found or not executable')
sys.exit()
scIds = scutils.getScIds()
#print(cmddir)
#print('Extracting GAMERA data along',scId, 'trajectory')
#cmd = "/Users/wiltbemj/src/kaiju/build/bin/sctrack.x"
#Pull the timestep information from the magnetosphere files
(fname,isMPI,Ri,Rj,Rk) = kaiTools.getRunInfo(fdir,ftag)
nsteps,sIds=kaiH5.cntSteps(fname)
gamMJD=kaiH5.getTs(fname,sIds,aID='MJD')
gamT=kaiH5.getTs(fname,sIds,aID='time')
gamUT = kaiTools.MJD2UT(gamMJD)
## Deal with startup by using the first non zero time as the inital
## MJD
loc = np.argwhere(gamT > 0.0)[0][0]
t0 = gamUT[loc]
t1 = gamUT[-1]
deltaT = np.round(gamT[loc+1]-gamT[loc])
mjdFileStart = gamMJD[loc]
secFileStart = gamT[loc]
#scToDo =['CLUSTER1']
if None == scRequested:
scToDo = scIds
else:
scToDo = []
scToDo.append(scRequested)
for scId in scToDo:
print('Getting spacecraft data for', scId)
status,data = scutils.getSatData(scIds[scId],
t0.strftime("%Y-%m-%dT%H:%M:%SZ"),
t1.strftime("%Y-%m-%dT%H:%M:%SZ"),deltaT)
if status['http']['status_code'] != 200 or data is None:
print('No data available for', scId)
else:
print('Extracting GAMERA data')
toRe = scutils.extractGAMERA(data,scIds[scId],scId,
mjdFileStart,secFileStart,fdir,
ftag,cmd,numSegments,keep)
scutils.matchUnits(data)
cdfname = os.path.join(fdir, scId + '.comp.cdf')
if os.path.exists(cdfname):
print('Deleting %s' % cdfname)
os.system('rm %s' % cdfname)
print('Creating CDF file',cdfname,'with',scId,'and GAMERA data')
dm.toCDF(cdfname,data)
plotname = os.path.join(fdir,scId+'.png')
print('Plotting results to',plotname)
kv.compPlot(plotname,scId,data)
print('Computing Errors')
errname = os.path.join(fdir,scId+'-error.txt')
scutils.errorReport(errname,scId,data)
plotname = os.path.join(fdir,scId+'-traj.png')
print('Plotting trajectory to',plotname)
kv.trajPlot(plotname,scId,data,toRe)

View File

@@ -1,317 +0,0 @@
#!/usr/bin/env python
#Pulls RBSP data from cdasws and compares to model output
import argparse
from argparse import RawTextHelpFormatter
#import spacepy and cdasws
import spacepy
from spacepy.coordinates import Coords
from spacepy.time import Ticktock
import spacepy.datamodel as dm
import spacepy.plot as splot
from cdasws import CdasWs
#import standard python stuff
import json
import datetime
import os
import errno
#import numpy and matplotlib
import numpy as np
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.colors as colors
import matplotlib.gridspec as gridspec
from matplotlib import dates
from astropy.time import Time
import scipy.interpolate
#Kaipy and related
import kaipy.kaiViz as kv
import kaipy.kaiTools as kaiTools
import kaipy.chimp.kCyl as kc
import kaipy.kaiH5 as kh5
# import kaipy.gamera.gampp as gampp
TINY = 1.0e-8
jScl = 1 # do we need a factor of 4*np.pi somewhere??
def TWin(Ik,Sig):
import scipy.ndimage.filters as sfil
import scipy.ndimage as ndimage
IkS = ndimage.gaussian_filter(Ik,sigma=Sig,mode='nearest')
return IkS
#Given sin^n(alpha) dep. on intensity calculate fraction based on accessible Alpha
def getJScl(Bmag,Beq,en=2.0):
Na = 360
A = np.linspace(0,0.5*np.pi,Na)
da = A[1]-A[0]
Ia = np.sin(A)**en
Ic = np.zeros(Ia.shape)
Nt = len(Bmag)
I0 = Ia.sum()
It = np.zeros(Nt)
for n in range(Nt):
if (Bmag[n]<TINY):
It[n] = 0.0
else:
Ac = np.arcsin(np.sqrt(Beq[n]/Bmag[n]))
Ic[:] = Ia[:]
Icut = (A>Ac)
Ic[Icut] = 0.0
It[n] = Ic.sum()/I0
return It
if __name__ == "__main__":
#Defaults
fdir = os.getcwd()
ftag = "eRBpsd.ps.h5"
trtag = "RBSP-A_MAGNETOMETER_1SEC-GSM_EMFISIS-L3.sc.h5"
sctag = 'A'
Ks = 1000
R0 = 2.0
MainS = """Pulls RBSP data and compares it to synthetic RBSP intensity measurementsfrom the simulation,
calculated from extracted RBSP trajectory and PSD files.
"""
parser = argparse.ArgumentParser(description=MainS, formatter_class=RawTextHelpFormatter)
parser.add_argument('-d',type=str,metavar="directory",default=fdir,help="Directory to read from (default: %(default)s)")
parser.add_argument('-id',type=str,metavar="runid",default=ftag,help="RunID of model data (default: %(default)s)")
parser.add_argument('-k' ,type=float,metavar="energy" ,default=Ks,help="Energy to comparen 1D profile [keV] (default: %(default)s)")
parser.add_argument('-trj',type=str,metavar="scTrk",default=trtag,help="spacecraft trajectory file (default: %(default)s)")
parser.add_argument('-sc',type=str,metavar="spacecraft",default=sctag,help="RBSP s/c to plot 'A' or 'B' (default: %(default)s)")
parser.add_argument('-r0',type=float,metavar="R0",default=R0,help="radius w/in which to mask observations (default: %(default)s)")
#Finalize parsing
args = parser.parse_args()
fdir = args.d
ftag = args.id
trtag = args.trj
sctag = args.sc
KS = args.k
R0 = args.r0
#======
#Init data
fIn = fdir+'/'+ftag
kh5.CheckOrDie(fIn)
fTrk = fdir+'/'+trtag
kh5.CheckOrDie(fTrk)
isotfmt = '%Y-%m-%dT%H:%M:%S.%f'
utfmt='%H:%M \n%Y-%m-%d'
#======
#Get track data
#======
xeq = kh5.PullVar(fTrk,"xeq")
yeq = kh5.PullVar(fTrk,"yeq")
scT = kh5.PullVar(fTrk,"T")
scMJDs = kh5.PullVar(fTrk,"MJDs")
#Get information for mirror ratio
Bx = kh5.PullVar(fTrk,"Bx")
By = kh5.PullVar(fTrk,"By")
Bz = kh5.PullVar(fTrk,"Bz")
Bmag = np.sqrt(Bx**2.0 + By**2.0 + Bz**2.0)
Beq = kh5.PullVar(fTrk,"Beq")
J0 = getJScl(Bmag,Beq)
Req = np.sqrt(xeq**2.0 + yeq**2.0)
Peq = np.arctan2(yeq,xeq)
Peq[Peq<0] = Peq[Peq<0] + 2*np.pi
Nsc = len(scT)
scTi = np.linspace(scT.min(),scT.max(),Nsc+1)
scMJDi = np.linspace(scMJDs.min(),scMJDs.max(),Nsc+1)
UTi = Time(scMJDi,format='mjd').isot
uti = [datetime.datetime.strptime(UTi[n],isotfmt) for n in range(len(UTi))]
UT = Time(scMJDs,format='mjd').isot
ut = [datetime.datetime.strptime(UT[n],isotfmt) for n in range(len(UT))]
#======
#Get RBSP data
#======
t0r = uti[0].strftime("%Y-%m-%dT%H:%M:%SZ")
t1r = uti[-1].strftime("%Y-%m-%dT%H:%M:%SZ")
if (sctag == 'A' or sctag == 'B'):
scStr = "RBSP%s_REL03_ECT-MAGEIS-L2"%(sctag)
ephStr = "RBSP-%s_MAGNETOMETER_1SEC-GSM_EMFISIS-L3"%(sctag)
else:
print("Unable to find s/c: %s, please set to 'A' or 'B';"%(sctag))
print("!!Exiting!!")
quit()
cdas = CdasWs()
Qstr = 'FESA'
status,data = cdas.get_data(scStr,[Qstr],t0r,t1r)
stat,ephdata = cdas.get_data(ephStr,['coordinates'],t0r,t1r)
#Get radius (ephemeris data at different cadence)
Rscr = np.sqrt( ephdata['coordinates'][:,0]**2.0 + ephdata['coordinates'][:,1]**2.0 + ephdata['coordinates'][:,2]**2.0)
Rscr = Rscr/6380.0
Tscr = ephdata['Epoch']
TINY = 1.0e-8
Estr = 'FEDU_Energy'
T = data['Epoch']
E = np.asarray(data[Estr])
Q = np.transpose(np.asarray(data[Qstr]))
Q[Q<0] = TINY
E = E[0,:]
k0 = (E>0).argmax()
#Chop out shenanigans
E = E[k0:]
Q = Q[k0:,:]
for n in range(len(T)):
n0 = np.abs(T[n]-Tscr).argmin() #Closest time in ephemeris
R = Rscr[n0]
if (R<=R0):
Q[:,n] = 0.0
# get indices for 1D comparison
k0sc = (np.abs(E-Ks)).argmin()
Q0 = Q[k0sc,:]
Q0[Q0<=TINY] = np.nan # dont plot bad data
#======
#Get PSD data
#======
xx,yy,Ki,Kc = kc.getGrid(ftag)
Nk = len(Kc)
J = np.zeros((Nsc,Nk))
Nig,Njg = xx.shape
Ni = Nig-1; Nj = Njg-1
Nk = len(Kc)
Nt,sIDs = kh5.cntSteps(ftag)
psT = kh5.getTs(ftag,sIDs,aID="time")
psMJDs = kh5.getTs(ftag,sIDs,aID="MJD")
Ri = xx[:,0] #L interfaces
Rc = 0.5*(Ri[1:] + Ri[0:-1])
Pi = np.linspace(0,2*np.pi,Nj+1)
Pc = 0.5*(Pi[1:] + Pi[0:-1])
s0 = sIDs.min()
sE = sIDs.max()
psTmin = psT.min()
psTmax = psT.max()
psMJDMax = psMJDs.max()
psMJDMin = psMJDs.min()
psSteps = np.arange(s0,sE)
Nt = len(psSteps)
Jrpt = np.zeros((Ni,Nj,Nk,Nt))
for n in range(Nt):
nStp = psSteps[n]
Jrpt[:,:,:,n] = kh5.PullVar(ftag,"jPSD",nStp)
Lmin = Ri[0]
Lmax = Ri[-1]
dphi = 2*np.pi/Nj
aD = 0.25*0.25
aF = 0.50*0.25
aC = 0.50*0.50
#Create interpolator
Dims = (Rc,Pc,Kc,psMJDs[0:-1])
JrptkI = scipy.interpolate.RegularGridInterpolator(Dims,Jrpt,method='linear',bounds_error=False,fill_value=0.0)
for n in range(Nsc):
isBad = (Req[n]<=Lmin) or (Req[n]>=Lmax) or (scMJDs[n]<psMJDMin) or (scMJDs[n]>psMJDMax)
if (isBad):
J[n,:] = 0.0
else:
#TODO: Should be interpolating here
i0 = (Ri>=Req[n]).argmax() - 1
j0 = int(np.floor(Peq[n]//dphi))
t0 = np.abs(psMJDs-scMJDs[n]).argmin()
for i in range(Nk):
J[n,i] = JrptkI( [Req[n],Peq[n],Kc[i],scMJDs[n]] )
J[n,:] = J0[n]*J[n,:]
k0f = (np.abs(Kc-Ks)).argmin()
fntSz = "small"
cmapName = 'gnuplot2'
vMin=1
vMax=1.0e6
norm = kv.genNorm(vMin,vMax,doLog=True)
fig = plt.figure(figsize=(12,8.5))
gs = gridspec.GridSpec(2,2,width_ratios=[10,0.25],wspace=0.025,hspace=0.05)
Ax10 = fig.add_subplot(gs[0,0])
Ax11 = fig.add_subplot(gs[1,0])
AxC1 = fig.add_subplot(gs[:,1])
xBds = [uti[0], uti[-1]]
jBds = [min(Q0),2*max(Q0)]
kBds = [min(E),max(E)]
kv.genCB(AxC1,norm,r'Intensity [$cm^{-2} sr^{-1} s^{-1} keV^{-1}$]',cM=cmapName,doVert=True)
Ax10.pcolormesh(T,E,Q,norm=norm,cmap=cmapName)
Ax10.set_facecolor('w')
Ax10.xaxis.set_ticklabels([])
Ax10.set_ylabel('RBSP-%s\nEnergy [keV]'%(sctag),fontsize=fntSz,family="monospace")
Ax10.set_yscale('log')
Ax10.set_ylim(kBds)
Ax10.set_xlim(xBds)
Ax11.pcolormesh(uti,Ki,jScl*J[:,:].T,norm=norm,cmap=cmapName)
Ax11.set_facecolor('w')
xfmt = dates.DateFormatter(utfmt)
Ax11.xaxis.set_major_formatter(xfmt)
Ax11.set_ylabel('Simulation \nEnergy [keV]',fontsize=fntSz,family="monospace")
Ax11.set_yscale('log')
Ax11.set_ylim(kBds)
Ax11.set_xlim(xBds)
kv.SetAxDate(Ax11)
kv.savePic('JktComp.png')
fig2 = plt.figure(figsize=(15,5))
gs2 = gridspec.GridSpec(1,1,hspace=0.125)
Ax20 = fig2.add_subplot(gs2[0,0])
kStr = "%d [keV]"%(Ks)
cLW = 2
clrs = ['black','#d95f02','#1b9e77','#7570b3'] # from colorbrewer2.org for colorblind safe
Ax20.plot(T,Q0,color=clrs[0],label='RBSP-%s'%(sctag),linewidth=cLW)
Ax20.plot(ut,jScl*J[:,k0f],color=clrs[1],label="Simulation",linewidth=cLW)
Ax20.set_facecolor('w')
xfmt = dates.DateFormatter(utfmt)
Ax20.xaxis.set_major_formatter(xfmt)
Ax20.set_ylabel('Intensity\n'+ r'[$cm^{-2} sr^{-1} s^{-1} keV^{-1}$]',fontsize=fntSz,family="monospace")
Ax20.set_yscale('log')
Ax20.set_ylim(jBds)
Ax20.set_xlim(xBds)
Ax20.legend(loc='upper right',fontsize=fntSz)
Ax20.text(0.01,0.95,kStr,color="black",fontsize=fntSz,transform=Ax20.transAxes,family="monospace")
kv.SetAxDate(Ax20)
kv.savePic('1dJComp.png')

View File

@@ -1,271 +0,0 @@
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
from matplotlib import dates
from astropy.time import Time
import datetime
import os, sys
import progressbar
import argparse
from argparse import RawTextHelpFormatter
import numpy as np
import kaipy.kaiH5 as kh5
import kaipy.kaiViz as kv
import kaipy.kaiTools as kT
import kaipy.satcomp.scutils as scutils
import kaipy.satcomp.scRCM as scRCM
def fmtTKL(AxTKL):
AxTKL.set_yscale('log')
AxTKL.tick_params(axis='y', pad=-1)
AxTKL.yaxis.labelpad = -1
AxTKL.tick_params(axis='x', pad=-1)
AxTKL.xaxis.labelpad = -1
AxTKL.title.set_text(str(ut_tkl[0]))
if __name__=="__main__":
fdir = os.getcwd()
ftag = "msphere"
trtag = "RBSP-%s_MAGNETOMETER_1SEC-GSM_EMFISIS-L3.sc.h5" # Spacecraft trajectory and values along track
vTag = "H-PAP_RBSPICE"
tStart = -1
tEnd = -1
tStride = 10
vidOut = "vid_rcm-rbsp-comp"
tklV_choices = ['press', 'odf']
jdir = "jstore"
pIDs = ['cdas','times','track','tkl']
MainS = """Pulls RBSP data and compares it to synthetic RBSP intensity measurementsfrom the simulation,
calculated from extracted RBSP trajectory and PSD files.
"""
parser = argparse.ArgumentParser(description=MainS, formatter_class=RawTextHelpFormatter)
parser.add_argument('-d',type=str,metavar="directory",default=fdir,help="Directory to read from (default: %(default)s)")
parser.add_argument('-id',type=str,metavar="runid",default=ftag,help="RunID of model data (default: %(default)s)")
parser.add_argument('-trj',type=str,metavar="scTrk",default=trtag,help="spacecraft trajectory file (default: %(default)s)")
parser.add_argument('-jdir',type=str,metavar="directory",default=jdir,help="Directory to store and find json files (default: %(default)s)")
parser.add_argument('-scId', type=str,choices=scRCM.supportedSats[:],default="RBSPB",help="Sat id (default: %(default)s)")
parser.add_argument('-v', type=str,choices=scRCM.supportedDsets[:],default="Hydrogen_omniflux_RBSPICE",help="Dataset (default: %(default)s)")
parser.add_argument('-tStart',type=int, default=tStart,help="Starting time step for L vs. E calculation (default: First step in RCM data)")
parser.add_argument('-tEnd',type=int, default=tEnd,help="Ending time step for L vs. E calculation (default: Last step in RCM data)")
parser.add_argument('-tStride',type=int, default=tStride,help="Time step stride for L vs. E calculation (default: %(default)s)")
parser.add_argument('-plotTag',type=str,default="",help="Extra tag for each plot")
parser.add_argument('-vidOut',type=str,default=vidOut,help="Output directory (relative to -d) for video images (default: %(default)s)")
parser.add_argument('-tklv', type=str,choices=tklV_choices,default=tklV_choices[0],help="Variable to plot in Lvsk panel (default: %(default)s)")
parser.add_argument('-forceCalc',type=str,metavar=pIDs,default="",help="Comma-separated process IDs to force recalculation for given process")
parser.add_argument('-HOPESPICE',action='store_true',help="Combine HOPE and RBSPICE hydrogen data")
#Finalize parsing
args = parser.parse_args()
fdir = args.d
ftag = args.id
trtag = args.trj
jdir = args.jdir
scId = args.scId
vTag = args.v
tStart = args.tStart
tEnd = args.tEnd
tStride = args.tStride
plotTag = args.plotTag
vidOut = args.vidOut
tklv = args.tklv
fcStr = args.forceCalc
doHopeSpice = args.HOPESPICE
#Extract RBSP identifier (A or B)
scTag = trtag.split('RBSP')[1][:2]
if '-' in scTag:
scTag = trtag.split('-')[1][0]
else:
scTag = scTag[0]
if fcStr == "":
fcIDs = []
elif "all" in fcStr:
fcIDs = pIDs
else:
fcIDs = fcStr.split(',')
#======
#Init data
#======
mhdrcm_fname = os.path.join(fdir, ftag+'.mhdrcm.h5')
rcm_fname = os.path.join(fdir, ftag+'.rcm.h5' )
trackf5 = os.path.join(fdir, trtag )
kh5.CheckOrDie(mhdrcm_fname)
kh5.CheckOrDie(rcm_fname)
kh5.CheckOrDie(trackf5)
kh5.CheckDirOrMake(jdir)
#Sort of start and end times
rcmNt, rcmSIDs = kh5.cntSteps(rcm_fname)
rcmSIDs = np.sort(rcmSIDs)
if tStart == -1:
tStart = rcmSIDs[0]
elif tStart < rcmSIDs[0]:
print("Step '{}' not in RCM times, starting from {}".format(tStart, rcmSIDs[0]))
tStart = rcmSIDs[0]
if tEnd == -1:
tEnd = rcmSIDs[-1]
elif tEnd > rcmSIDs[-1]:
print("Step '{}' not in RCM times, ending at {}".format(tEnd, rcmSIDs[-1]))
tEnd = rcmSIDs[-1]
#Get start and end times from sctrack file
isotfmt = '%Y-%m-%dT%H:%M:%S.%f'
scMJDs = kh5.PullVar(trackf5, 'MJDs')
#ut = scutils.mjd_to_ut(scMJDs)
ut = kT.MJD2UT(scMJDs)
t0r = ut[0].strftime("%Y-%m-%dT%H:%M:%SZ")
t1r = ut[-1].strftime("%Y-%m-%dT%H:%M:%SZ")
print("Retrieving RBSPICE dataset")
if doHopeSpice:
ephData, scData = scRCM.getSCOmniDiffFlux(scId, "Hydrogen_omniflux_RBSPICE", t0r, t1r, jdir=jdir,forceCalc=('cdas' in fcIDs))
hope_ephData, hope_scData = scRCM.getSCOmniDiffFlux(scId, "Hydrogen_PAFlux_HOPE", t0r, t1r, jdir=jdir,forceCalc=('cdas' in fcIDs))
else:
ephData, scData = scRCM.getSCOmniDiffFlux(scId, vTag, t0r, t1r, jdir=jdir,forceCalc=('cdas' in fcIDs))
print("\n\nGrabbing RCM time")
rcmTimes = scRCM.getRCMtimes(rcm_fname,mhdrcm_fname,jdir=jdir,forceCalc=('times' in fcIDs))
print("\n\nExtracting RCM values along sc trajectory")
rcmTrack = scRCM.getRCM_scTrack(trackf5, rcm_fname, rcmTimes, jdir=jdir, forceCalc=('track' in fcIDs), scName="RBSP-B")
print("\n\nConsolidating grids")
if doHopeSpice:
scEGrid = scData['energies']
hope_scEGrid = hope_scData['energies']
species = scData['species']
rcmEGrid = rcmTrack[species]['energies']
eMax = np.max([scEGrid.max(), hope_scEGrid.max(), rcmEGrid.max()])
eMin = np.min([scEGrid[scEGrid>0].min(), hope_scEGrid[hope_scEGrid>0].min(), rcmEGrid[rcmEGrid>0].min()])
eGrid = np.logspace(np.log10(eMin), np.log10(eMax), 200, endpoint=True)
consolData = scRCM.consolidateODFs(scData, rcmTrack, eGrid=eGrid, doPlot=False)
hope_consolData = scRCM.consolidateODFs(hope_scData, rcmTrack, eGrid=eGrid)
else:
eGrid = np.logspace(np.log10(40), np.log10(6E2), 200, endpoint=True)
consolData = scRCM.consolidateODFs(scData, rcmTrack, eGrid=eGrid)
print("\n\nCalculating tkl vars(var wedge)")
#tkldata = scRCM.getIntensitiesVsL('msphere.rcm.h5','msphere.mhdrcm.h5',tStart, tEnd, tStride, jdir=jdir, forceCalc=('tkl' in fcIDs))
tkldata = scRCM.getVarWedge(rcm_fname, mhdrcm_fname, tStart, tEnd, tStride, 5, jdir=jdir, eGrid=eGrid*1E3, forceCalc=('tkl' in fcIDs)) # This function expects eGrid to be in eV
#Works but very verbose
#print("\n\nTesting RCM eqlatlon grab")
rcm_eqlatlon = scRCM.getRCM_eqlatlon(mhdrcm_fname, rcmTimes, tStart, tEnd, tStride)
print('\n\nPlotting')
fig = plt.figure(figsize=(20,9))
gs = gridspec.GridSpec(8,16, wspace=0.8, hspace=0.6)
cmap_odf = "CMRmap"
cmap_press = 'viridis'
cmap_parpress = 'gnuplot2'
cmap_rcm = "CMRmap"
AxCB_odf = fig.add_subplot(gs[:,0])
AxSC = fig.add_subplot(gs[0:4,1:8])
AxRCM = fig.add_subplot(gs[4:8,1:8])
AxTL = fig.add_subplot(gs[0:2, 8:12])
AxTKL = fig.add_subplot(gs[2:7,8:12])
AxRCMLatLon = fig.add_subplot(gs[:4, 12:16], projection='polar')
AxRCMEq = fig.add_subplot(gs[4:7, 12:16])
#If tkl plot will match Diff Flux colorbar, let it use it and make pressure cbar span tkl and eqlatlon plots
if tklv == 'odf':
AxCB_press = fig.add_subplot(gs[7,8:16])
elif tklv == 'press':
#TKL will actually show partial pressure, which needs its own cbar
AxCB_parpress = fig.add_subplot(gs[7,8:12])
#Original colorbar only spans eqlatlon plots
AxCB_press = fig.add_subplot(gs[7,12:16])
odfnorm = kv.genNorm(1E4, 5E6, doLog=True)
ut_tkl = kT.MJD2UT(tkldata['MJD'])
pressnorm = kv.genNorm(1E-2, 75, doLog=False)
parpressnorm = kv.genNorm(1e-3, 5, doLog=True)
#Movie time
outdir = os.path.join(fdir, vidOut)
kh5.CheckDirOrMake(outdir)
n_pad = int(np.log10((len(tkldata['MJD'])))) + 1
ticker = 0
#Run first iteration manually
pltmjd = tkldata['MJD'][0]
if doHopeSpice:
scRCM.plt_ODF_Comp(AxSC, AxRCM, AxCB_odf, hope_consolData, mjd=pltmjd, norm=odfnorm, cmapName=cmap_odf)
scRCM.plt_ODF_Comp(AxSC, AxRCM, AxCB_odf, consolData, mjd=pltmjd, norm=odfnorm, cmapName=cmap_odf, forcePop=True)
else:
scRCM.plt_ODF_Comp(AxSC, AxRCM, AxCB_odf, consolData, mjd=pltmjd, norm=odfnorm, cmapName=cmap_odf)
AxSC.title.set_text('RBSP RCM Comparison')
AxCB_odf.yaxis.set_ticks_position('left')
AxCB_odf.yaxis.set_label_position('left')
AxSC.tick_params(axis='y', pad=-1)
AxSC.yaxis.labelpad = -3
AxRCM.tick_params(axis='y', pad=-1)
AxRCM.yaxis.labelpad = -3
scRCM.plt_tl(AxTL, tkldata, AxCB=AxCB_press, mjd=pltmjd, norm=pressnorm, cmapName=cmap_press)
if tklv == 'odf':
scRCM.plt_tkl(AxTKL, tkldata, vName=tklv, mjd=pltmjd, norm=odfnorm, cmapName=cmap_odf)
elif tklv == 'press':
#Actually partial pressure
scRCM.plt_tkl(AxTKL, tkldata, vName=tklv, AxCB=AxCB_parpress, mjd=pltmjd, norm=parpressnorm, cmapName=cmap_parpress)
AxTL.xaxis.set_ticks_position('top')
AxTL.xaxis.set_label_position('top')
AxTL.tick_params(axis='y', pad=-1)
AxTL.yaxis.labelpad = -1
fmtTKL(AxTKL)
AxCB_press.xaxis.labelpad = -1
scRCM.plt_rcm_eqlatlon(AxRCMLatLon, AxRCMEq, rcm_eqlatlon, rcmTrack, mjd=pltmjd, norm=pressnorm, cmapName=cmap_press)
if plotTag != "":
plt.suptitle(plotTag, fontsize=20)
ticker += 1
filename = "{}.{:0>{n}d}.png".format("vid", ticker, n=n_pad)
ofname = os.path.join(outdir, filename)
kv.savePic(ofname)
#Now just things that need to update
bar = progressbar.ProgressBar(max_value=len(tkldata['MJD']))
for n in range(1,len(tkldata['MJD'])):
bar.update(n)
pltmjd = tkldata['MJD'][n]
scRCM.plt_ODF_Comp(AxSC, AxRCM, AxCB_odf, consolData, mjd=pltmjd, norm=odfnorm, cmapName=cmap_odf)
scRCM.plt_tl(AxTL, tkldata, AxCB=AxCB_press, mjd=pltmjd, norm=pressnorm, cmapName=cmap_press)
if tklv == 'odf':
scRCM.plt_tkl(AxTKL, tkldata, vName=tklv, mjd=pltmjd, norm=odfnorm, cmapName=cmap_odf, satTrackData=rcmTrack)
elif tklv == 'press':
#Actually partial pressure
scRCM.plt_tkl(AxTKL, tkldata, vName=tklv, AxCB=AxCB_parpress, mjd=pltmjd, norm=parpressnorm, cmapName=cmap_parpress, satTrackData=rcmTrack)
fmtTKL(AxTKL)
AxTKL.title.set_text(str(ut_tkl[n]))
scRCM.plt_rcm_eqlatlon(AxRCMLatLon, AxRCMEq, rcm_eqlatlon, rcmTrack, mjd=pltmjd, norm=pressnorm, cmapName=cmap_press)
#Doesn't make labels until show is called (i think)
#tickLabels = [s.get_text() for s in AxTL.get_xticklabels()]
#AxTL.set_xticklabels(tickLabels)
ticker += 1
filename = "{}.{:0>{n}d}.png".format("vid", ticker, n=n_pad)
ofname = os.path.join(outdir, filename)
kv.savePic(ofname)
plt.show()

View File

@@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:a442a301447dc28f4f41d6f88accb9bd3a5ac6f3340dcf4298832117ec05ec8b
size 7775

View File

@@ -1,115 +0,0 @@
#!/usr/bin/env python
import h5py
import numpy as np
import sys
TW = 1.4493e+5 #Default temperature, K => 0.01 nPa
nW = 5 #Default density, #/cc
VxW = 400.0 #Default wind, km/s
f107val = 100.0 #Default f10.7 flux
tilt = 0.0 #Default dipole tilt, radians
mjd0 = 58767.0 #Default MJD, set for 2019-10-11 00:00:00
Bx0 = 0.0 #Default Bx offset for planar front, keep at zero
ByC = 0.0 #Default By coefficient used to calculate Bx, include if want tilted field
BzC = 0.0 #Default Bz coefficient used to calculate Bx, include if want tilted field
fOut = "bcwind_2.h5"
#Time bounds [hours]
tMin = 0.0
tMax = 16.0
dt = 15.0 #Cadence [s]
SimT = (tMax-tMin)*60.0*60.0
NumT = int( np.ceil(SimT/dt)+1 )
print("Generating %d slices, T=[%5.2f,%5.2f]"%(NumT,tMin,tMax))
T = np.linspace(tMin,tMax,NumT)
D = np.zeros(NumT)
Temp = np.zeros(NumT)
Vx = np.zeros(NumT)
Vy = np.zeros(NumT)
Vz = np.zeros(NumT)
Bx = np.zeros(NumT)
By = np.zeros(NumT)
Bz = np.zeros(NumT)
f107 = np.zeros(NumT)
ThT = np.zeros(NumT)
mjd = np.zeros(NumT)
symh = np.zeros(NumT)
tWin = 1.0 #Window times [hr]
for i in range(NumT):
t = T[i] #Time in hours
if (t <= tWin):
D[i] = nW
Vx[i] = -VxW
Temp[i] = TW
f107[i] = f107val
ThT[i] = tilt
mjd[i] = mjd0 + T[i]/24.0
elif (t <= 3.5*tWin):
D[i] = nW
Vx[i] = -VxW
Temp[i] = TW
Bz[i] = 5.0
f107[i] = f107val
ThT[i] = tilt
mjd[i] = mjd0 + T[i]/24.0
elif (t <= 6.0*tWin):
D[i] = nW
Vx[i] = -VxW
Temp[i] = TW
Bz[i] = -5.0
f107[i] = f107val
ThT[i] = tilt
mjd[i] = mjd0 + T[i]/24.0
elif (t <= 8.0*tWin):
D[i] = nW
Vx[i] = -VxW
Temp[i] = TW
Bz[i] = 5.0
f107[i] = f107val
ThT[i] = tilt
mjd[i] = mjd0 + T[i]/24.0
else:
D[i] = 2.0*nW
Vx[i] = -VxW
Temp[i] = TW
Bz[i] = -15.0
f107[i] = f107val
ThT[i] = tilt
mjd[i] = mjd0 + T[i]/24.0
#Write solar wind
#t,D,V,Temp,B = [s],[#/cm3],[m/s],[K],[nT]
oTScl = (60*60.0) #hr->s
oDScl = 1.0
oVScl = 1.0e+3 #km/s->m/s
oTempScl = 1.0
oBScl = 1.0
with h5py.File(fOut,'w') as hf:
hf.create_dataset("T" ,data=oTScl*T)
hf.create_dataset("symh" ,data=symh)
hf.create_dataset("D" ,data=oDScl*D)
hf.create_dataset("Temp" ,data=oTempScl*Temp)
hf.create_dataset("Vx",data=oVScl*Vx)
hf.create_dataset("Vy",data=oVScl*Vy)
hf.create_dataset("Vz",data=oVScl*Vz)
hf.create_dataset("Bx",data=oBScl*Bx)
hf.create_dataset("By",data=oBScl*By)
hf.create_dataset("Bz",data=oBScl*Bz)
hf.create_dataset("tilt",data=ThT)
hf.create_dataset("f10.7",data=f107)
hf.create_dataset("MJD",data=mjd)
hf.create_dataset("Bx0",data=Bx0)
hf.create_dataset("ByC",data=ByC)
hf.create_dataset("BzC",data=BzC)

View File

@@ -1,52 +0,0 @@
#!/usr/bin/env python
import sys
import argparse
from argparse import RawTextHelpFormatter
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
from astropy.time import Time
import pickle
import kaipy.kaiH5 as kaiH5
import kaipy.remix.remix as remix
remixFile = "sigmaH.2430.mix.h5"
step = 0
#x = np.zeros((89,720,1))
#y = np.zeros((1,1,64800))
#z = x+y
#print(z.shape)
#sys.exit(0)
# Initialize the remix class
#ion = remix.remix(remixFile,args.n)
# just temporary
phiStart = float(sys.argv[1])
phiEnd = float(sys.argv[2])
altitude = float(sys.argv[3])
phiStep = 0.5 # 0.5 degree resolution in phi
ion = remix.remix(remixFile,step)
p = np.arange(phiStart,phiEnd,phiStep)*np.pi/180.
t = (np.arange(0,45,0.5)+0.5)*np.pi/180. # 0.5 degree resolution in theta down to 45 colat, omitting pole
R = (6380.+altitude)/6500. # altitude in units of Rion. Note the definitions of Re and Ri
phi,theta = np.meshgrid(p,t)
x = R*np.sin(theta)*np.cos(phi)
y = R*np.sin(theta)*np.sin(phi)
z = R*np.cos(theta)
xyz = np.array([x.ravel(),y.ravel(),z.ravel()]).T
dBr,dBtheta,dBphi = ion.dB(xyz,hallOnly=False)
print("Done computing. Saving pickles.")
#print(dBr,dBtheta,dBphi)
pickle.dump([p,t,R,dBr,dBtheta,dBphi],open('db_%03d_%03d_%03d.pkl'%(phiStart,phiEnd,altitude),'wb'))

View File

@@ -1,140 +0,0 @@
#!/usr/bin/env python
#Generates HDF-5 grid for Gamera
#
#Grid types (gID)
#0: LFM
#1: Egg
#2: Spherical
#3: Ellipse
import argparse
import kaipy.gamera.gamGrids as gg
from argparse import RawTextHelpFormatter
import numpy as np
if __name__ == "__main__":
#Defaults
en = 1
gID = 0
doEpsY = True #Whether to set pole y values
Rin = 2.5
Rout = 27.5
xtail = 250.0
TINY = 1.0e-8 #Default tiny value
rngtol = 1.2 #Default ring tolerance
djwarp = -1.5
#Rout = 29.5 #LFM to include box
Ni0 = 48
Nj0 = 24
Nk0 = 32
fOut = "Grid.h5"
fIn = "lfm.hdf"
MainS = """Generates HDF5 grid for Gamera
(Ni,Nj,Nk) = en x (Ni0,Nj0,Nk0)
Grid types (gid)
0: LFM
1: Egg
2: Spherical
3: Warp egg
4: Fat egg"""
#3: Ellipse"""
parser = argparse.ArgumentParser(description=MainS, formatter_class=RawTextHelpFormatter)
parser.add_argument('-gid',type=int,metavar="gid",default=gID,help="Grid type ID (default: %(default)s)")
parser.add_argument('-ni0',type=int,metavar="Ni0",default=Ni0,help="Number of i0 cells (default: %(default)s)")
parser.add_argument('-nj0',type=int,metavar="Nj0",default=Nj0,help="Number of j0 cells (default: %(default)s)")
parser.add_argument('-nk0',type=int,metavar="Nk0",default=Nk0,help="Number of k0 cells (default: %(default)s)")
parser.add_argument('-en' ,type=int,metavar="en" ,default=en ,help="Cell multiplication factor (default: %(default)s)")
parser.add_argument('-o',type=str,metavar="file",default=fOut,help="File to write output grid to (default: %(default)s)")
parser.add_argument('-i',type=str,metavar="file",default=fIn ,help="Input LFM HDF4 file (default: %(default)s)")
parser.add_argument('-Rin',type=float,metavar="Rin",default=Rin ,help="Inner radius (default: %(default)s)")
parser.add_argument('-Rout',type=float,metavar="Rout",default=Rout ,help="Sunward outer radius (default: %(default)s)")
parser.add_argument('-xtail',type=float,metavar="xtail",default=xtail ,help="Tailward outer radius (default: %(default)s)")
parser.add_argument('-eps',type=float,metavar="eps",default=TINY ,help="Tiny number (default: %(default)s)")
parser.add_argument('-viz', action='store_true', default=False,help="Show 2D figure of grid (default: %(default)s)")
parser.add_argument('-chimp', action='store_true', default=False,help="Store grid in CHIMP format (default: %(default)s)")
parser.add_argument('-rngtol',type=float,metavar="rngtol",default=rngtol ,help="Ring-avg tolerance (default: %(default)s)")
parser.add_argument('-djwarp',type=float,metavar="djwarp",default=djwarp ,help="Theta-stretching for warped egg (default: %(default)s)")
parser.add_argument('-v', action='store_true', default=False,help="Verbose output (default: %(default)s)")
#Finalize parsing
args = parser.parse_args()
Ni0 = args.ni0
Nj0 = args.nj0
Nk0 = args.nk0
en = args.en
gID = args.gid
fOut = args.o
fIn = args.i
Rin = args.Rin
Rout = args.Rout
TINY = args.eps
doViz = args.viz
doChimp = args.chimp
rngtol = args.rngtol
djwarp = args.djwarp
doVerb = args.v
xtail = np.abs(args.xtail)
#---------------------
#Do work
Ni = Ni0*en
Nj = Nj0*en
Nk = Nk0*en
#fO = "Grid%d.h5"%(Nk)
if (gID == 0):
print("Generating LFM grid ...")
#print("\tReading from %s"%fIn)
XX,YY = gg.genLFM(Ni=Ni,Nj=Nj,Rin=Rin,Rout=Rout,fIn=fIn,TINY=TINY)
if (gID == 1):
print("Generating Egg grid ...")
XX,YY = gg.genEgg(Ni=Ni,Nj=Nj,Rin=Rin,Rout=Rout,xtail=xtail,TINY=TINY,A=0.0)
if (gID == 2):
print("Generating Spherical grid ...")
#Rin = 7.5
#Rout = 80.0
XX,YY = gg.genSph(Ni=Ni,Nj=Nj,Rin=Rin,Rout=Rout,TINY=TINY)
if (gID == 3):
print("Generating Stretched Egg grid ...")
XX,YY = gg.genEgg(Ni=Ni,Nj=Nj,Rin=Rin,Rout=Rout,xtail=xtail,TINY=TINY,A=djwarp)
if (gID == 4):
print("Generating Fat Egg grid ...")
XX,YY = gg.genFatEgg(Ni=Ni,Nj=Nj,Rin=Rin,Rout=Rout,xtail=xtail,TINY=TINY,A=djwarp)
#Calculate real outer radii, sunward/anti
rOutS = np.sqrt(XX[-1,0]**2.0 + YY[-1,0]**2.0)
rOutAS = np.sqrt(XX[-1,-1]**2.0 + YY[-1,-1]**2.0)
print("\tOutput: %s"%fOut)
print("\tSize: (%d,%d,%d)"%(Ni,Nj,Nk))
print("\tInner Radius: %f"%Rin)
print("\tSunward Outer Radius: %f"%rOutS)
print("\tTail Outer Radius: %f"%rOutAS)
llBC = np.arcsin(np.sqrt(1.0/Rin))*180.0/np.pi
print("\tLow-lat BC: %f"%(llBC))
print("\nWriting to %s"%(fOut))
xxG,yyG = gg.Aug2D(XX,YY,doEps=doEpsY)
X3,Y3,Z3 = gg.Aug3D(xxG,yyG,Nk=Nk,TINY=TINY)
if (doChimp):
gg.WriteChimp(X3,Y3,Z3,fOut=fOut)
else:
#Do ring checking
gg.genRing(XX,YY,Nk=Nk,Tol=rngtol,doVerb=doVerb)
gg.WriteGrid(X3,Y3,Z3,fOut=fOut)
if (doViz):
gg.VizGrid(XX,YY,xxG,yyG,fOut=fOut)

View File

@@ -1,77 +0,0 @@
#!/usr/bin/env python
#Joins MPI restart into serial one
import argparse
import kaipy.gamera.magsphereRescale as upscl
from argparse import RawTextHelpFormatter
import numpy as np
import h5py
import os
import kaipy.kaiH5 as kh5
if __name__ == "__main__":
dIn = os.getcwd()
Ri = 3
Rj = 6
Rk = 1
inid = "mpimsphere"
nres = "0"
outid = "msphere"
grid = "lfmQ.h5"
MainS = """Joins MPI-decomposed Gamera restart file into serial file
(Ri,Rj,Rk) : Input MPI decomposition
inid/nres : Run ID string and restart number, i.e. input file = inid.MPISTUFF.Res.#nres.h5
outid : Output Run ID
grid : Filename of grid corners file (with ghosts) generated by genLFM/genGrid
"""
parser = argparse.ArgumentParser(description=MainS, formatter_class=RawTextHelpFormatter)
parser.add_argument('-i',metavar='inid',default=inid,help="Input Run ID string (default: %(default)s)")
parser.add_argument('-n',type=int,metavar="nres",default=0,help="Restart number (default: %(default)s)")
parser.add_argument('-Ri',type=int,metavar="Ri",default=Ri,help="i-Ranks (default: %(default)s)")
parser.add_argument('-Rj',type=int,metavar="Rj",default=Rj,help="j-Ranks (default: %(default)s)")
parser.add_argument('-Rk',type=int,metavar="Rk",default=Rk,help="k-Ranks (default: %(default)s)")
parser.add_argument('-o',type=str,metavar="outid",default=outid,help="Output run ID (default: %(default)s)")
parser.add_argument('-grid',type=str,metavar="grid",default=grid,help="Grid file to read from (default: %(default)s)")
parser.add_argument('-d',type=str,metavar="directory",default=dIn,help="Directory to read from (default: %(default)s)")
#Finalize parsing
args = parser.parse_args()
bStr = args.i
nRes = args.n
outid = args.o
Ri = args.Ri
Rj = args.Rj
Rk = args.Rk
grid = args.grid
dIn = args.d
#Open output file
fOut = outid + ".Res.%05d.h5"%(nRes)
oH5 = h5py.File(fOut,'w')
G,M,G0 = upscl.PullRestartMPI(bStr,nRes,Ri,Rj,Rk,dIn,oH5)
#Write main data
print("Writing plasma and field data ...")
oH5.create_dataset("Gas",data=G)
if (G0 is not None):
print("Writing Gas0")
oH5.create_dataset("Gas0",data=G0)
oH5.create_dataset("magFlux",data=M)
gVals = ['X','Y','Z']
fGrid = dIn + "/" + grid
print("Reading grid from %s ..."%(fGrid))
iH5 = h5py.File(fGrid,'r')
for g in gVals:
oH5.create_dataset(g,data=iH5[g])
iH5.close()
oH5.close()

View File

@@ -1,87 +0,0 @@
#!/usr/bin/env python
#Turns collection of LFM HDF-4 data to Chimp-style (fake Gamera)
import argparse
import sys
import numpy as np
import os
import kaipy.gamera.gamGrids as gg
import kaipy.lfm2kaiju as l2k
import h5py
from glob import glob
if __name__ == "__main__":
#Defaults
fOut = "ebLFM.h5"
parser = argparse.ArgumentParser(description="Converts collection of LFM HDF-4 data to Chimp-style (fake Gamera)")
#Get required info
parser.add_argument('-o',type=str,metavar="Output file name",default=fOut,help="File to write output grid to (default: %(default)s)")
#Do only EB
parser.add_argument('--mhd',dest='doMHD',action='store_true',default=False,help="Do all MHD variables (default: %(default)s)")
#Do Jupiter
parser.add_argument('--jupiter',dest='doJupiter',action='store_true',default=False,help="Pull from Jovian grid (default: %(default)s)")
#Files to interpolate from
parser.add_argument('hdfs',nargs='+',metavar='lfm.hdf',help="List of files to convert")
#Finished getting arguments, parse and move on
args = parser.parse_args()
hdfs = list(args.hdfs)
lfmfile = hdfs[0]
fOut = args.o
doMHD = args.doMHD
doJupiter = args.doJupiter
print("Writing out to %s"%fOut)
#Get grid from LFM file and write to Gamera-style H5
if (doJupiter):
l2k.lfm2gg(lfmfile,fOut,doEarth=False,doJupiter=True)
else:
l2k.lfm2gg(lfmfile,fOut,doEarth=True,doJupiter=False)
#Get time slice information
Ts = l2k.lfmTimes(hdfs)
T0 = Ts.min() #Smallest time
#Loop through sorted timeslices
lfmSlcs = sorted(zip(Ts-T0,hdfs))
n = 0
oH5 = h5py.File(fOut,'r+')
for lS in lfmSlcs:
fIn = lS[1]
#print("Reading %s"%(fIn))
gID = "Step#%d"%(n)
#Create group
oH5.create_group(gID)
#Copy time
oH5[gID].attrs.create("time",lS[0])
#Get LFM V/B fields
Vx,Vy,Vz,Bx,By,Bz = l2k.lfmFields(fIn)
oH5[gID].create_dataset("Bx",data=np.single(Bx))
oH5[gID].create_dataset("By",data=np.single(By))
oH5[gID].create_dataset("Bz",data=np.single(Bz))
oH5[gID].create_dataset("Vx",data=np.single(Vx))
oH5[gID].create_dataset("Vy",data=np.single(Vy))
oH5[gID].create_dataset("Vz",data=np.single(Vz))
#Do other MHD variables if requested
if (doMHD):
D,P = l2k.lfmFlow(fIn)
oH5[gID].create_dataset("D",data=np.single(D))
oH5[gID].create_dataset("P",data=np.single(P))
n = n+1
oH5.close()

View File

@@ -1,90 +0,0 @@
#!/usr/bin/env python
#Up/Down-scales Gamera magnetosphere restart
import argparse
import kaipy.gamera.gamGrids as gg
import kaipy.gamera.magsphereRescale as upscl
from argparse import RawTextHelpFormatter
import numpy as np
import h5py
if __name__ == "__main__":
rStrs = ['U','D']
fIn = "msphere.00000.h5"
fOut = "msphere2x.00000.h5"
MainS = """Up/Down-scales Gamera magnetosphere restart
Run types (rx)
U: Upscale, 2x in each dimension
D: Downscale, 1/2x in each dimension
"""
parser = argparse.ArgumentParser(description=MainS, formatter_class=RawTextHelpFormatter)
parser.add_argument('-rx',type=str,default="U",choices=rStrs,help="Scaling Specifier (default: %(default)s)")
parser.add_argument('-i',type=str,metavar="file",default=fIn ,help="Input Restart HDF5 (default: %(default)s)")
parser.add_argument('-o',type=str,metavar="file",default=fOut,help="Output Restart HDF5 (default: %(default)s)")
#Finalize parsing
args = parser.parse_args()
rX = args.rx
fIn = args.i
fOut = args.o
#Now do work
print("Rescaling %s to %s"%(fIn,fOut))
#Open output
oH5 = h5py.File(fOut,'w')
#Open input
print("Reading from %s"%(fIn))
iH5 = h5py.File(fIn,'r')
Ns,Nv,Nk,Nj,Ni = iH5['Gas'].shape
G = np.zeros((Ns,Nv,Nk,Nj,Ni))
M = np.zeros((3,Nk+1,Nj+1,Ni+1))
G[:,:,:,:,:] = iH5['Gas'][:]
M[ :,:,:,:] = iH5['magFlux'][:]
X = iH5['X'][:]
Y = iH5['Y'][:]
Z = iH5['Z'][:]
doGas0 = ('Gas0' in iH5.keys()) #Whether there's a Gas0 array
if (doGas0):
G0 = np.zeros((Ns,Nv,Nk,Nj,Ni))
G0[:,:,:,:,:] = iH5['Gas0'][:]
#Transfer attributes to output
for k in iH5.attrs.keys():
aStr = str(k)
oH5.attrs.create(k,iH5.attrs[aStr])
#Close input
iH5.close()
if (rX == "U"):
print("Upscaling ...")
Xr,Yr,Zr = upscl.upGrid(X,Y,Z)
Gr = upscl.upGas(X,Y,Z,G,Xr.T,Yr.T,Zr.T)
FluxR = upscl.upFlux(X,Y,Z,M,Xr,Yr,Zr)
if (doGas0):
G0r = upscl.upGas(X,Y,Z,G0,Xr.T,Yr.T,Zr.T)
else:
print("Downscaling ...")
Xr,Yr,Zr = upscl.downGrid(X,Y,Z)
Gr = upscl.downGas(X,Y,Z,G,Xr.T,Yr.T,Zr.T)
FluxR = upscl.downFlux(X,Y,Z,M,Xr,Yr,Zr)
if (doGas0):
G0r = upscl.downGas(X,Y,Z,G0,Xr.T,Yr.T,Zr.T)
print("Writing to %s"%(fOut))
#Write out grid to restart
oH5.create_dataset("X",data=Xr.T)
oH5.create_dataset("Y",data=Yr.T)
oH5.create_dataset("Z",data=Zr.T)
#Write out gas/flux variables
oH5.create_dataset("Gas",data=Gr)
oH5.create_dataset("magFlux",data=FluxR)
if (doGas0):
oH5.create_dataset("Gas0",data=G0r)
#Close output
oH5.close()

View File

@@ -1,60 +0,0 @@
#!/usr/bin/env python
#Splits serial restart into MPI decomposition
import argparse
import kaipy.gamera.magsphereRescale as upscl
from argparse import RawTextHelpFormatter
import numpy as np
import h5py
import kaipy.kaiH5 as kh5
if __name__ == "__main__":
Ri = 3
Rj = 6
Rk = 1
runid = "msphere"
nres = "0"
outid = "mpimsphere"
MainS = """Splits serial Gamera restart file into MPI-decomposed restart file
(Ri,Rj,Rk) : Output MPI decomposition
runid/nres : Run ID string and restart number, i.e. input file = runid.Res.#nres.h5
outid : Output Run ID
"""
parser = argparse.ArgumentParser(description=MainS, formatter_class=RawTextHelpFormatter)
parser.add_argument('runid',metavar='runid',help="Run ID string")
parser.add_argument('-n',type=int,metavar="nres",default=0,help="Restart number (default: %(default)s)")
parser.add_argument('-Ri',type=int,metavar="Ri",default=Ri,help="i-Ranks (default: %(default)s)")
parser.add_argument('-Rj',type=int,metavar="Rj",default=Rj,help="j-Ranks (default: %(default)s)")
parser.add_argument('-Rk',type=int,metavar="Rk",default=Rk,help="k-Ranks (default: %(default)s)")
parser.add_argument('-o',type=str,metavar="outid",default=outid,help="Output run ID (default: %(default)s)")
#Finalize parsing
args = parser.parse_args()
bStr = args.runid
nRes = args.n
outid = args.o
Ri = args.Ri
Rj = args.Rj
Rk = args.Rk
#Open input file and get data
fIn = bStr + ".Res.%05d.h5"%(nRes)
print("Reading from %s"%(fIn))
iH5 = h5py.File(fIn,'r')
Ns,Nv,Nk,Nj,Ni = iH5['Gas'].shape
G = np.zeros((Ns,Nv,Nk,Nj,Ni))
M = np.zeros((3,Nk+1,Nj+1,Ni+1))
G[:,:,:,:,:] = iH5['Gas'][:]
M[ :,:,:,:] = iH5['magFlux'][:]
X = iH5['X'][:]
Y = iH5['Y'][:]
Z = iH5['Z'][:]
#Close input file
iH5.close()
upscl.PushRestartMPI(outid,nRes,Ri,Rj,Rk,X,Y,Z,G,M,fIn)

View File

@@ -1,230 +0,0 @@
#!/usr/bin/env python
# Make XMF files from an MPI-decomposed gamera run
import argparse
from argparse import RawTextHelpFormatter
import numpy as np
import kaipy.gamera.block_gampp as gampp
import xml.etree.ElementTree as et
import xml.dom.minidom
import kaipy.kaiH5 as kh5
import os
# Generate name of restart file
def VectorOutput(Vname,Vecs,VDims,n,h5f):
vAtt = et.SubElement(Grid, "Attribute")
vAtt.set("Name", Vname)
vAtt.set("AttributeType", "Vector")
vAtt.set("Center", "Cell")
FDI = et.SubElement(vAtt, "DataItem")
FDI.set("Dimensions", VDims)
FDI.set("Function", "JOIN( $0, $1, $2)" )
FDI.set("ItemType", "Function")
XDI = et.SubElement(FDI, "DataItem")
XDI.set("Dimensions", cDims)
XDI.set("NumberType", "Float")
XDI.set("Precision", "4")
XDI.set("Format", "HDF")
XDI.text = "%s:/Step#%d/%s" % (h5F, n, Vecs[0])
YDI = et.SubElement(FDI, "DataItem")
YDI.set("Dimensions", cDims)
YDI.set("NumberType", "Float")
YDI.set("Precision", "4")
YDI.set("Format", "HDF")
YDI.text = "%s:/Step#%d/%s" % (h5F, n, Vecs[1])
ZDI = et.SubElement(FDI, "DataItem")
ZDI.set("Dimensions", cDims)
ZDI.set("NumberType", "Float")
ZDI.set("Precision", "4")
ZDI.set("Format", "HDF")
ZDI.text = "%s:/Step#%d/%s" % (h5F, n, Vecs[2])
def genName(bStr, i, j, k, Ri, Rj, Rk,isOld):
n = j + i*Rj + k*Ri*Rj
if (isOld):
fID = bStr + \
"_%04d_%04d_%04d_%04d_%04d_%04d_%012d.h5" % (Ri, Rj, Rk, i, j, k,n)
else:
fID = bStr + \
"_%04d_%04d_%04d_%04d_%04d_%04d.gam.h5" % (Ri, Rj, Rk, i, j, k)
return fID
if __name__ == "__main__":
# Defaults
fdir = os.getcwd()
ftag = "msphere"
outid = "sim"
MainS = """Creates series of XMF files from MPI-decomposed Gamera run
"""
parser = argparse.ArgumentParser(
description=MainS, formatter_class=RawTextHelpFormatter)
parser.add_argument('-d', type=str, metavar="directory", default=fdir,
help="Directory to read from (default: %(default)s)")
parser.add_argument('-outd', type=str, metavar="directory", default=fdir,
help="Directory to output (default: %(default)s)")
parser.add_argument('-id', type=str, metavar="runid",
default=ftag, help="RunID of data (default: %(default)s)")
parser.add_argument('-outid', type=str, metavar="outid", default=outid,
help="RunID of output XMF files (default: %(default)s)")
# Finalize parsing
args = parser.parse_args()
fdir = args.d
fodir = args.outd
ftag = args.id
outid = args.outid
# ---------------------
# Init data
print(fdir,"ftag",ftag)
gamData = gampp.GameraPipe(fdir, ftag)
print("isOld = ", gamData.isOld)
print("number of variable",gamData.Nv)
print("Vars ",gamData.vIDs)
# ---------------------
# Do work
Ri = gamData.Ri
Rj = gamData.Rj
Rk = gamData.Rk
tOut = 0
topoStr = "3DSMesh"
geoStr = "X_Y_Z"
# vIDs = ["D", "Vx", "Vy", "Vz", "P", "Bx", "By", "Bz"] # ,"Jx","Jy","Jz"]
vIDs = ["D", "P"]
VVec =["Vx", "Vy", "Vz"]
BVec =["Bx", "By", "Bz"]
JVec =["Jx", "Jy", "Jz"]
EVec =["Ex", "Ey", "Ez"]
haveV = False
haveB = False
haveJ = False
haveE = False
for i in gamData.vIDs:
if ( i == "Vx" ): haveV = True
if ( i == "Bx" ): haveB = True
if ( i == "Jx" ): haveJ = True
if ( i == "Ex" ): haveE = True
Nv = len(vIDs)
for n in range(gamData.s0, gamData.sFin+1):
nslc = n-gamData.s0
print(n,gamData.T[nslc])
# Create XMF tree
Xdmf = et.Element("Xdmf")
Xdmf.set("Version", "2.0")
Dom = et.SubElement(Xdmf, "Domain")
# Spatial collection
meshName = "g%02d_mesh"%(nslc)
gCol = et.SubElement(Dom, "Grid")
gCol.set("Name", meshName)
gCol.set("GridType", "Collection")
gCol.set("CollectionType", "Spatial")
Time = et.SubElement(gCol, "Time")
Time.set("Value", "%s" % (str(gamData.T[nslc])))
Cyc = et.SubElement(gCol, "Cycle")
Cyc.set("Value", "%d" % (n))
# Now loop over MPI decomposition
for i in range(Ri):
for j in range(Rj):
for k in range(Rk):
nMPI = j + i*Rj + k*Ri*Rj
h5F = fdir + '/' + genName(ftag, i, j, k, Ri, Rj, Rk,gamData.isOld)
Ni = gamData.dNi[i]
Nj = gamData.dNj[j]
Nk = gamData.dNk[k]
Ndim = 3
iDims = "%d %d %d" % (Nk+1, Nj+1, Ni+1)
VDims = "%d %d %d %d" % (Nk+0, Nj+0, Ni+0, Ndim )
cDims = "%d %d %d" % (Nk+0, Nj+0, Ni+0)
iDimA = "%d %d %d" % (Nk*Rk+1, Nj*Rj+1, Ni*Ri+1)
# Create new subgrid
gName = meshName+"%d" % (nMPI)
Grid = et.SubElement(gCol, "Grid")
Grid.set("GridType", "Uniform")
Grid.set("Name", gName)
print(Grid)
# Time = et.SubElement(Grid,"Time")
# Time.set("TimeType","Single")
# Time.set("Value","%s"%(str(gamData.T[nslc])))
Topo = et.SubElement(Grid, "Topology")
Topo.set("TopologyType", topoStr)
Topo.set("NumberOfElements", iDims)
Geom = et.SubElement(Grid, "Geometry")
Geom.set("GeometryType", geoStr)
xC = et.SubElement(Geom, "DataItem")
xC.set("Dimensions", iDims)
xC.set("NumberType", "Float")
xC.set("Precision", "4")
xC.set("Format", "HDF")
xC.text = "%s:/X" % (h5F)
yC = et.SubElement(Geom, "DataItem")
yC.set("Dimensions", iDims)
yC.set("NumberType", "Float")
yC.set("Precision", "4")
yC.set("Format", "HDF")
yC.text = "%s:/Y" % (h5F)
zC = et.SubElement(Geom, "DataItem")
zC.set("Dimensions", iDims)
zC.set("NumberType", "Float")
zC.set("Precision", "4")
zC.set("Format", "HDF")
zC.text = "%s:/Z" % (h5F)
# Create variables
for v in range(Nv):
vID = vIDs[v]
vAtt = et.SubElement(Grid, "Attribute")
vAtt.set("Name", vID)
vAtt.set("AttributeType", "Scalar")
vAtt.set("Center", "Cell")
aDI = et.SubElement(vAtt, "DataItem")
aDI.set("Dimensions", cDims)
aDI.set("NumberType", "Float")
aDI.set("Precision", "4")
aDI.set("Format", "HDF")
aDI.text = "%s:/Step#%d/%s" % ( h5F, n, vID)
# create vectors
if (haveV): VectorOutput("V",VVec,VDims,n,h5F)
if (haveB): VectorOutput("B",BVec,VDims,n,h5F)
if (haveJ): VectorOutput("J",JVec,VDims,n,h5F)
if (haveE): VectorOutput("E",EVec,VDims,n,h5F)
# Write output
fOut = "%s/%s.%06d.xmf" % (fodir, outid, tOut)
print("Writing %s" % (fOut))
# xTree = et.ElementTree(Xdmf)
# xTree.write(fOut, pretty_print=True,
# xml_declaration=True, encoding='UTF-8')
xmlStr = xml.dom.minidom.parseString(et.tostring(Xdmf)).toprettyxml(indent=" ")
with open(fOut,"w") as f:
f.write(xmlStr)
# Prep for next step
tOut = tOut+1

View File

@@ -1,9 +0,0 @@
<?xml version="1.0"?>
<Kaiju>
<Chimp>
<sim runid="RUNID"/>
<time T0="0.0" dt="60.0" tFin="3600.0"/>
<fields ebfile="EBFILE" grType="LFM" doJ="T" isMPI="ISMPI"/>
<parallel Ri="RI" Rj="RJ" Rk="RK"/>
</Chimp>
</Kaiju>

View File

@@ -1,312 +0,0 @@
#!/usr/bin/env python
#Takes one MPI-decomposed restart and spits out an upscaled MPI restart
import argparse
import kaipy.gamera.magsphereRescale as upscl
from argparse import RawTextHelpFormatter
import numpy as np
import h5py
import os
import kaipy.kaiH5 as kh5
if __name__ == "__main__":
dIn = os.getcwd()
#Input tiling
iRi = 4
iRj = 4
iRk = 1
#Output tiling
oRi = 8
oRj = 8
oRk = 1
inid = "msphere"
outid = "msphere"
nRes = "0"
grid = "lfmQ.h5"
MainS = """Upscales and retiles a Gamera MPI resart
(iRi,iRj,iRk) : Input MPI decomposition
(oRi,oRj,oRk) : Output MPI decomposition
inid/nres : Run ID string and restart number, i.e. input file = inid.MPISTUFF.Res.#nres.h5
outid : Output Run ID
grid : Filename of input grid corners file (with ghosts) generated by genLFM/genGrid
"""
parser = argparse.ArgumentParser(description=MainS, formatter_class=RawTextHelpFormatter)
parser.add_argument('-i',metavar='inid',default=inid,help="Input Run ID string (default: %(default)s)")
parser.add_argument('-n',type=int,metavar="nres",default=0,help="Restart number (default: %(default)s)")
parser.add_argument('-iRi',type=int,metavar="iRi",default=iRi,help="Input i-Ranks (default: %(default)s)")
parser.add_argument('-iRj',type=int,metavar="iRj",default=iRj,help="Input j-Ranks (default: %(default)s)")
parser.add_argument('-iRk',type=int,metavar="iRk",default=iRk,help="Input k-Ranks (default: %(default)s)")
parser.add_argument('-o',type=str,metavar="outid",default=outid,help="Output run ID (default: %(default)s)")
parser.add_argument('-oRi',type=int,metavar="oRi",default=oRi,help="Input i-Ranks (default: %(default)s)")
parser.add_argument('-oRj',type=int,metavar="oRj",default=oRj,help="Input j-Ranks (default: %(default)s)")
parser.add_argument('-oRk',type=int,metavar="oRk",default=oRk,help="Input k-Ranks (default: %(default)s)")
parser.add_argument('-grid',type=str,metavar="grid",default=grid,help="inGrid file to read from (default: %(default)s)")
parser.add_argument('--keep',action='store_true',default=False,help='Keep intermediate files (default: %(default)s)')
parser.add_argument('--norescale',action='store_true',default=False,help='Do not rescale (up or down) (default: %(default)s)')
parser.add_argument('--down',action='store_true',default=False,help='Downscale instead of upscale (default: %(default)s)')
#Finalize parsing
args = parser.parse_args()
bStr = args.i
nRes = args.n
outid = args.o
iRi = args.iRi
iRj = args.iRj
iRk = args.iRk
oRi = args.oRi
oRj = args.oRj
oRk = args.oRk
grid = args.grid
doKeep = args.keep
doUp = not args.down
doRescale = not args.norescale
#Pull tiled restart, write to temp file
#Stupidly writing temp restart to reuse old code
fTmp = "tempRes.31337.h5" # temporarily written at the run directory.
oH5 = h5py.File(fTmp,'w') # fTmp needs to include all necessary variables. Use oH5.create_dataset below.
G,M,G0,oG,oM = upscl.PullRestartMPI(bStr,nRes,iRi,iRj,iRk,dIn=None,oH5=oH5)
#Write main data
print("Writing plasma and field data to temp file...")
oH5.create_dataset("Gas",data=G)
if (G0 is not None):
doGas0 = True
print("Writing Gas0")
oH5.create_dataset("Gas0",data=G0)
else:
doGas0 = False
oH5.create_dataset("magFlux",data=M)
oH5.create_dataset("oGas",data=oG)
oH5.create_dataset("omagFlux",data=oM)
gVals = ['X','Y','Z']
fGrid = grid
print("Reading grid from %s ..."%(fGrid))
iH5 = h5py.File(fGrid,'r')
for g in gVals:
oH5.create_dataset(g,data=iH5[g])
oH5.close()
#Upscale from temp file
# taking an mpi-decomposed restart, lazily stitching it together and writing it to one serial restart
fTmp2X = "tempRes.31337.2x.h5"
#Open input and output
oH5 = h5py.File(fTmp2X,'w')
iH5 = h5py.File(fTmp,'r') # iH5 is now the object for fTmp.
Ns,Nv,Nk,Nj,Ni = iH5['Gas'].shape
G = np.zeros((Ns,Nv,Nk,Nj,Ni))
M = np.zeros((3,Nk+1,Nj+1,Ni+1))
G[:,:,:,:,:] = iH5['Gas'][:]
M[ :,:,:,:] = iH5['magFlux'][:]
oG = np.zeros((Ns,Nv,Nk,Nj,Ni))
oM = np.zeros((3,Nk+1,Nj+1,Ni+1))
oG[:,:,:,:,:] = iH5['oGas'][:]
oM[ :,:,:,:] = iH5['omagFlux'][:]
if (doGas0):
G0[:,:,:,:,:] = iH5['Gas0'][:]
X = iH5['X'][:]
Y = iH5['Y'][:]
Z = iH5['Z'][:]
#Transfer attributes to output
for k in iH5.attrs.keys():
aStr = str(k)
oH5.attrs.create(k,iH5.attrs[aStr])
#Close input
iH5.close()
if (doRescale):
if (doUp):
print("Upscaling data ...")
#Do upscaling
Xr,Yr,Zr = upscl.upGrid(X,Y,Z)
Gr = upscl.upGas(X,Y,Z,G,Xr.T,Yr.T,Zr.T)
FluxR = upscl.upFlux(X,Y,Z,M,Xr,Yr,Zr)
oGr = upscl.upGas(X,Y,Z,oG,Xr.T,Yr.T,Zr.T)
oFluxR = upscl.upFlux(X,Y,Z,oM,Xr,Yr,Zr)
if (doGas0):
G0r = upscl.upGas(X,Y,Z,G0,Xr.T,Yr.T,Zr.T)
else:
print("Downscaling data ...")
Xr,Yr,Zr = upscl.downGrid(X,Y,Z)
Gr = upscl.downGas(X,Y,Z,G,Xr.T,Yr.T,Zr.T)
FluxR = upscl.downFlux(X,Y,Z,M,Xr,Yr,Zr)
oGr = upscl.downGas(X,Y,Z,oG,Xr.T,Yr.T,Zr.T)
oFluxR = upscl.downFlux(X,Y,Z,oM,Xr,Yr,Zr)
if (doGas0):
G0r = upscl.downGas(X,Y,Z,G0,Xr.T,Yr.T,Zr.T)
else:
#No rescale, just set variables
Xr = X.T #Adding transpose to be consistent w/ rescaling code
Yr = Y.T
Zr = Z.T
Gr = G
FluxR = M
oGr = oG
oFluxR = oM
if (doGas0):
G0r = G0
#Write out grid to restart
oH5.create_dataset("X",data=Xr.T)
oH5.create_dataset("Y",data=Yr.T)
oH5.create_dataset("Z",data=Zr.T)
#Write out gas/flux variables
oH5.create_dataset("Gas",data=Gr)
oH5.create_dataset("magFlux",data=FluxR)
oH5.create_dataset("oGas",data=oGr)
oH5.create_dataset("omagFlux",data=oFluxR)
if (doGas0):
oH5.create_dataset("Gas0",data=G0r)
#Close output
oH5.close()
#Split up upscaled file
if (doGas0):
upscl.PushRestartMPI(outid,nRes,oRi,oRj,oRk,Xr.T,Yr.T,Zr.T,Gr,FluxR,oGr,oFluxR,fTmp2X,G0r)
else:
upscl.PushRestartMPI(outid,nRes,oRi,oRj,oRk,Xr.T,Yr.T,Zr.T,Gr,FluxR,oGr,oFluxR,fTmp2X)
#Delete temp files
if (not doKeep):
os.remove(fTmp)
os.remove(fTmp2X)
# #!/usr/bin/env python
# #Takes one MPI-decomposed restart and spits out an upscaled MPI restart
# import argparse
# import kaipy.embiggenUtils as upscl
# from argparse import RawTextHelpFormatter
# import numpy as np
# import h5py
# import os
# import kaipy.kaiH5 as kh5
# if __name__ == "__main__":
# #Input tiling
# iRi = 4
# iRj = 4
# iRk = 1
# #Output tiling
# oRi = 8
# oRj = 8
# oRk = 1
# doFast = True
# doTest = False
# inid = "msphere"
# outid = "msphere"
# nRes = "0"
# MainS = """Upscales and retiles a Gamera MPI resart
# (iRi,iRj,iRk) : Input MPI decomposition
# (oRi,oRj,oRk) : Output MPI decomposition
# inid/nres : Run ID string and restart number, i.e. input file = inid.MPISTUFF.Res.#nres.h5
# outid : Output Run ID
# """
# parser = argparse.ArgumentParser(description=MainS, formatter_class=RawTextHelpFormatter)
# parser.add_argument('-i',metavar='inid',default=inid,help="Input Run ID string (default: %(default)s)")
# parser.add_argument('-n',type=int,metavar="nres",default=0,help="Restart number (default: %(default)s)")
# parser.add_argument('-iRi',type=int,metavar="iRi",default=iRi,help="Input i-Ranks (default: %(default)s)")
# parser.add_argument('-iRj',type=int,metavar="iRj",default=iRj,help="Input j-Ranks (default: %(default)s)")
# parser.add_argument('-iRk',type=int,metavar="iRk",default=iRk,help="Input k-Ranks (default: %(default)s)")
# parser.add_argument('-o',type=str,metavar="outid",default=outid,help="Output run ID (default: %(default)s)")
# parser.add_argument('-oRi',type=int,metavar="oRi",default=oRi,help="Input i-Ranks (default: %(default)s)")
# parser.add_argument('-oRj',type=int,metavar="oRj",default=oRj,help="Input j-Ranks (default: %(default)s)")
# parser.add_argument('-oRk',type=int,metavar="oRk",default=oRk,help="Input k-Ranks (default: %(default)s)")
# parser.add_argument('--down',action='store_true',default=False,help='Downscale instead of upscale (default: %(default)s)')
# #Finalize parsing
# args = parser.parse_args()
# bStr = args.i
# nRes = args.n
# outid = args.o
# iRi = args.iRi
# iRj = args.iRj
# iRk = args.iRk
# oRi = args.oRi
# oRj = args.oRj
# oRk = args.oRk
# doUp = not args.down
# #Start by pulling tiled restart into one brick w/ halos
# X,Y,Z,nG,nM,nB,oG,oM,oB,G0,fIn = upscl.PullRestartMPI(bStr,nRes,iRi,iRj,iRk)
# if (doTest):
# rX = X
# rY = Y
# rZ = Z
# nrG = nG
# nrM = nM
# nrB = nB
# orG = oG
# orM = oM
# orB = oB
# rG0 = G0
# upscl.PushRestartMPI(outid,nRes,oRi,oRj,oRk,rX,rY,rZ,nrG,nrM,nrB,orG,orM,orB,rG0,fIn,dtScl=1.0)
# #Toy check
# upscl.CompRestarts(bStr,outid,nRes,iRi,iRj,iRk)
# else:
# #Do upscaling on all variables
# #Chop out last 2 cells on each side, then upscale
# #Start w/ grid
# rX,rY,rZ = upscl.upGrid(X,Y,Z)
# dVr = upscl.Volume(rX,rY,rZ)
# dV0 = upscl.Volume( X, Y, Z)
# #Face-centered fluxes
# nrM = upscl.upFlux(nM)
# #Now ready to do cell-centered variables
# nrG = upscl.upGas(nG,dV0,dVr,"Gas")
# nrB = upscl.upCCMag(nB,dV0,dVr,"Bxyz")
# if (G0 is not None):
# rG0 = upscl.upGas(G0,dV0,dVr,"Gas0")
# if (doFast):
# #Just replicate for oState
# orM = nrM
# orB = nrB
# orG = nrG
# else:
# orM = upscl.upFlux (oM)
# orB = upscl.upCCMag(oB,dV0,dVr,"Bxyz")
# orG = upscl.upGas (oG,dV0,dVr,"Gas")
# #Push back out to arbitrary decomposition
# #Update dt0 by x1/2
# upscl.PushRestartMPI(outid,nRes,oRi,oRj,oRk,rX,rY,rZ,nrG,nrM,nrB,orG,orM,orB,rG0,fIn,dtScl=0.5)

View File

@@ -1,70 +0,0 @@
#!/usr/bin/env python
#Takes MIX restart and up/down-scales it
import argparse
import kaipy.gamera.magsphereRescale as upscl
from argparse import RawTextHelpFormatter
import numpy as np
import h5py
import os
import kaipy.kaiH5 as kh5
if __name__ == "__main__":
dIn = os.getcwd()
inid = "msphere"
outid = "msphereX"
nRes = "0"
MainS = """Up/down-scales a ReMIX restart
inid/nres : Run ID string and restart number, i.e. input file = inid.MPISTUFF.Res.#nres.h5
outid : Output Run ID
"""
parser = argparse.ArgumentParser(description=MainS, formatter_class=RawTextHelpFormatter)
parser.add_argument('-i',metavar='inid',default=inid,help="Input Run ID string (default: %(default)s)")
parser.add_argument('-n',type=int,metavar="nres",default=0,help="Restart number (default: %(default)s)")
parser.add_argument('-o',type=str,metavar="outid",default=outid,help="Output run ID (default: %(default)s)")
parser.add_argument('--down',action='store_true',default=False,help='Downscale instead of upscale (default: %(default)s)')
#Finalize parsing
args = parser.parse_args()
bStr = args.i
nRes = args.n
outid = args.o
doUp = not args.down
fIn = bStr + ".mix.Res.%05d.h5"%(nRes)
fOut = outid + ".mix.Res.%05d.h5"%(nRes)
print("Reading from %s and writing to %s"%(fIn,fOut))
vIDs = kh5.getRootVars(fIn)
#Open input and output
iH5 = h5py.File(fIn ,'r')
oH5 = h5py.File(fOut,'w')
#Start by scraping attributes
for k in iH5.attrs.keys():
aStr = str(k)
print(aStr)
oH5.attrs.create(k,iH5.attrs[aStr])
for vID in vIDs:
print(vID)
Q = iH5[vID][:]
if (doUp):
Qr = upscl.upMIX(Q)
else:
Qr = upscl.downMIX(Q)
oH5.create_dataset(vID,data=Qr)
print(Q.shape)
print(Qr.shape)
#Now get
iH5.close()
oH5.close()

View File

@@ -1,111 +0,0 @@
#!/usr/bin/env python
#Takes MIX restart and up/down-scales it
import argparse
import kaipy.gamera.magsphereRescale as upscl
from argparse import RawTextHelpFormatter
import numpy as np
import h5py
import os
import kaipy.kaiH5 as kh5
if __name__ == "__main__":
dIn = os.getcwd()
inid = "msphere"
outid = "msphereX"
nRes = "0"
MainS = """Up/down-scales a RCM restart (kinda)
inid/nres : Run ID string and restart number, i.e. input file = inid.MPISTUFF.Res.#nres.h5
outid : Output Run ID
"""
parser = argparse.ArgumentParser(description=MainS, formatter_class=RawTextHelpFormatter)
parser.add_argument('-i',metavar='inid',default=inid,help="Input Run ID string (default: %(default)s)")
parser.add_argument('-n',type=int,metavar="nres",default=0,help="Restart number (default: %(default)s)")
parser.add_argument('-o',type=str,metavar="outid",default=outid,help="Output run ID (default: %(default)s)")
parser.add_argument('--down',action='store_true',default=False,help='Downscale instead of upscale (default: %(default)s)')
#Finalize parsing
args = parser.parse_args()
bStr = args.i
nRes = args.n
outid = args.o
doUp = not args.down
#Start w/ mhd2imag restart
fIn = bStr + ".mhd2imag.Res.%05d.h5"%(nRes)
fOut = outid + ".mhd2imag.Res.%05d.h5"%(nRes)
print("Reading from %s and writing to %s"%(fIn,fOut))
vIDs = kh5.getRootVars(fIn)
imNi,imNj = kh5.getDims(fIn,"Bmin") #Array size of coupler
#Open input and output
iH5 = h5py.File(fIn ,'r')
oH5 = h5py.File(fOut,'w')
#Start by scraping attributes
for k in iH5.attrs.keys():
aStr = str(k)
#print(aStr)
oH5.attrs.create(k,iH5.attrs[aStr])
for vID in vIDs:
Q = iH5[vID][:]
if (doUp):
Qr = upscl.upRCMCpl(Q,N=imNj)
else:
#Qr = upscl.downMIX(Q)
print("Downscaling not implemented ...")
oH5.create_dataset(vID,data=Qr)
print("\t%s, Dims: %s => %s"%(vID,Q.shape,Qr.shape))
#Now get
iH5.close()
oH5.close()
#Now do RCM restart
fIn = bStr + ".RCM.Res.%05d.h5"%(nRes)
fOut = outid + ".RCM.Res.%05d.h5"%(nRes)
print("\n\nReading from %s and writing to %s"%(fIn,fOut))
vIDs = kh5.getRootVars(fIn)
Ni,Nj,Nk = kh5.getDims(fIn,"rcmeeta")
#Open input and output
iH5 = h5py.File(fIn ,'r')
oH5 = h5py.File(fOut,'w')
#Start by scraping attributes
for k in iH5.attrs.keys():
aStr = str(k)
#print(aStr)
oH5.attrs.create(k,iH5.attrs[aStr])
for vID in vIDs:
Q = iH5[vID][:]
if (doUp):
Qr = upscl.upRCM(Q,Ni=Ni,Nj=Nj,Nk=Nk)
else:
#Qr = upscl.downMIX(Q)
print("Downscaling not implemented ...")
print("\t%s, Dims: %s => %s"%(vID,Q.shape,Qr.shape))
oH5.create_dataset(vID,data=Qr)
#Now finish
iH5.close()
oH5.close()
Nri = Ni
Nrj = (Nj-2)*2+2
print("\n\nFinished rescaling, new RCM config must be:")
print("\tRCMSIZEI = %d"%(Nri))
print("\tRCMSIZEJ = %d"%(Nrj))
print("\tRCMSIZEK = %d"%(Nk))

View File

@@ -1,70 +0,0 @@
#!/usr/bin/env python
#Takes VOLT restart and up/down-scales it
import argparse
import kaipy.gamera.magsphereRescale as upscl
from argparse import RawTextHelpFormatter
import numpy as np
import h5py
import os
import kaipy.kaiH5 as kh5
if __name__ == "__main__":
dIn = os.getcwd()
inid = "msphere"
outid = "msphereX"
nRes = "0"
MainS = """Up/down-scales a Volt restart
inid/nres : Run ID string and restart number, i.e. input file = inid.MPISTUFF.Res.#nres.h5
outid : Output Run ID
"""
parser = argparse.ArgumentParser(description=MainS, formatter_class=RawTextHelpFormatter)
parser.add_argument('-i',metavar='inid',default=inid,help="Input Run ID string (default: %(default)s)")
parser.add_argument('-n',type=int,metavar="nres",default=0,help="Restart number (default: %(default)s)")
parser.add_argument('-o',type=str,metavar="outid",default=outid,help="Output run ID (default: %(default)s)")
parser.add_argument('--down',action='store_true',default=False,help='Downscale instead of upscale (default: %(default)s)')
#Finalize parsing
args = parser.parse_args()
bStr = args.i
nRes = args.n
outid = args.o
doUp = not args.down
fIn = bStr + ".volt.Res.%05d.h5"%(nRes)
fOut = outid + ".volt.Res.%05d.h5"%(nRes)
print("Reading from %s and writing to %s"%(fIn,fOut))
vIDs = kh5.getRootVars(fIn)
#Open input and output
iH5 = h5py.File(fIn ,'r')
oH5 = h5py.File(fOut,'w')
#Start by scraping attributes
for k in iH5.attrs.keys():
aStr = str(k)
print(aStr)
oH5.attrs.create(k,iH5.attrs[aStr])
for vID in vIDs:
print(vID)
Q = iH5[vID][:]
if (doUp):
Qr = upscl.upVolt(Q)
else:
Qr = upscl.downVolt(Q)
oH5.create_dataset(vID,data=Qr)
print(Q.shape)
print(Qr.shape)
#Now get
iH5.close()
oH5.close()

View File

@@ -1,298 +0,0 @@
#!/usr/bin/env python
import argparse
import os
import h5py as h5
import kaipy.kaiH5 as kh5
import kaipy.kaixdmf as kxmf
import xml.etree.ElementTree as et
import xml.dom.minidom
import numpy as np
presets = {"gam", "mhdrcm_eq", "mhdrcm_bmin"}
def getDimInfo(h5fname,s0IDstr,preset):
result = {}
if preset == "mhdrcm_eq":
gridVars = ['xMin', 'yMin']
with h5.File(h5fname, 'r') as h5f:
gDims = np.asarray(h5f[s0IDstr][gridVars[0]].shape)
result['gridVars'] = gridVars
result['gDims'] = gDims
result['vDims'] = gDims
result['Nd'] = len(gDims)
result['geoStr'] = "X_Y"
result['topoStr'] = "2DSMesh"
result['doAppendStep'] = True
elif preset == "mhdrcm_bmin":
gridVars = ['xMin','yMin','zMin']
with h5.File(h5fname, 'r') as h5f:
gDims = np.asarray(h5f[s0IDstr][gridVars[0]].shape)
#gDims = np.append(gDims, 1)
result['gridVars'] = gridVars
result['gDims'] = gDims
result['vDims'] = gDims
result['Nd'] = len(gDims)
result['geoStr'] = "X_Y_Z"
result['topoStr'] = "3DSMesh"
result['doAppendStep'] = True
elif preset=="rcm3D":
gridVars = ["rcmxmin_kji", "rcmymin_kji", "rcmalamc_kji"]
with h5.File(h5fname, 'r') as h5f:
gDims = np.asarray(h5f[s0IDstr][gridVars[0]].shape)
result['gridVars'] = gridVars
result['gDims'] = gDims
result['vDims'] = gDims
result['Nd'] = len(gDims)
result['geoStr'] = "X_Y_Z"
result['topoStr'] = "3DSMesh"
result['doAppendStep'] = True
else: # gam, mhdrcm_iono, etc.
#Get root-level XY(Z) dimensions
#First check to see if they exist
with h5.File(h5fname,'r') as f5:
if 'X' not in f5.keys():
print("No X(YZ) in root vars. Maybe try a preset")
quit()
gDims = kh5.getDims(h5fname,doFlip=False) #KJI ordering
Nd = len(gDims)
if Nd == 2:
gridVars = ['X', 'Y']
topoStr = "2DSMesh"
elif Nd == 3:
gridVars = ['X', 'Y', 'Z']
topoStr = "3DSMesh"
result['gridVars'] = gridVars
result['gDims'] = gDims
result['vDims'] = gDims - 1
result['Nd'] = len(gDims)
result['geoStr'] = '_'.join(gridVars)
result['topoStr'] = topoStr
result['doAppendStep'] = False
return result
def addRCMVars(Grid, dimInfo, rcmInfo, sID):
sIDstr = "Step#" + str(sID)
mr_vDims = dimInfo['vDims'] # mhdrcm var dims
mr_vDimStr = ' '.join([str(v) for v in mr_vDims])
mr_nDims = len(vDims)
rcmh5fname = rcmInfo['rcmh5fname']
rcmVars = rcmInfo['rcmVars'] # List of rcm.h5 variables we want in mhdrcm.xmf
rcmKs = rcmInfo['rcmKs'] # List if rcm.h5 k values for 3d rcm.h5 vars
rcm5 = h5.File(rcmh5fname,'r')
if 'Nk' not in rcmInfo.keys():
rcmInfo['Nj'], rcmInfo['Ni'] = rcm5[sIDstr]['aloct'].shape
rcmInfo['Nk'] = rcm5[sIDstr]['alamc'].shape[0]
#print("Adding Nk to rcmInfo")
Ni = rcmInfo['Ni']
Nj = rcmInfo['Nj']
Nk = rcmInfo['Nk']
for vName in rcmVars:
doHyperslab = False
r_var = rcm5[sIDstr][vName]
r_vShape = r_var.shape
r_vDimStr = " ".join([str(d) for d in r_vShape])
r_nDims = len(r_vShape)
dimTrim = 0
if (r_nDims == 2 and mr_vDims[0] < Nj) or (r_nDims == 3 and mr_vDims[1] < Nj):
doHyperslab = True
dimTrim = (Nj - mr_vDims[0]) if mr_nDims == 2 else (Nj - mr_vDims[1])
if r_nDims == 2 and doHyperslab == False: # Easy add
#print("Adding " + vName)
kxmf.AddData(Grid,rcmh5fname, vName,"Cell",mr_vDimStr,sID)
continue
#Add data as a hyperslab
if doHyperslab:
#Do 2D stuff. If 3D needed, will be added in a sec
dimStr = "3 2"
startStr = "{} 0".format(dimTrim)
strideStr = "1 1"
numStr = "{} {}".format(Nj-dimTrim, Ni)
text = "{}:/{}/{}".format(rcmh5fname,sIDstr,vName)
if r_nDims == 2:
kxmf.addHyperslab(Grid,vName,mr_vDimStr,dimStr,startStr,strideStr,numStr,r_vDimStr,text)
continue
elif r_nDims == 3:
dimStr = "3 3"
strideStr = str(Nk+1) + " 1 1"
numStr = "1 {} {}".format(Nj-dimTrim,Ni)
for k in rcmKs:
startStr = "{} {} 0".format(k,dimTrim)
vName_k = vName + "_k{}".format(k)
kxmf.addHyperslab(Grid,vName_k,mr_vDimStr,dimStr,startStr,strideStr,numStr,r_vDimStr,text)
if __name__ == "__main__":
outfname = ''
MainS = """Generates XDMF file from non-MPI HDF5 output
"""
parser = argparse.ArgumentParser(description="Generates XDMF file from Gamera HDF5 output")
parser.add_argument('h5F',type=str,metavar='model.h5',help="Filename of Kaiju HDF5 Output")
parser.add_argument('-outname',type=str,default=outfname,help="Name of generated XMF file")
parser.add_argument('-preset', type=str,choices=presets,help="Tell the script what the file is (in case not derivble from name)")
parser.add_argument('-rcmf',type=str,default="msphere.rcm.h5",help="rcm.h5 file to use with '-rcmv' and '-rcmk' args (default: %(default)s)")
parser.add_argument('-rcmv',type=str,help="Comma-separated rcm.h5 vars to include in an mhdrcm preset (ex: rcmvm, rcmeeta)")
parser.add_argument('-rcmk',type=str,help="Comma-separated RCM k values to pull from 3D vars specified with '-rcmv'")
args = parser.parse_args()
h5fname = args.h5F
outfname = args.outname
preset = args.preset
rcmh5fname = args.rcmf
rcmVars = args.rcmv
rcmKs = args.rcmk
pre,ext = os.path.splitext(h5fname)
if outfname is None or outfname == "":
fOutXML = pre + ".xmf"
else:
fOutXML = outfname
#Scrape necessary data from H5 file
nSteps,sIDs = kh5.cntSteps(h5fname)
sIDs = np.sort(sIDs)
sIDstrs = ['Step#'+str(s) for s in sIDs]
s0 = sIDs[0]
s0str = sIDstrs[0]
#Determine grid and dimensionality
if preset is None: preset = ""
dimInfo = getDimInfo(h5fname, s0str, preset)
gridVars = dimInfo['gridVars']
gDims = dimInfo['gDims']
vDims = dimInfo['vDims']
Nd = dimInfo['Nd']
geoStr = dimInfo['geoStr']
topoStr = dimInfo['topoStr']
doAppendStep = dimInfo['doAppendStep']
gDimStr = ' '.join([str(v) for v in gDims])
vDimStr = ' '.join([str(v) for v in vDims])
doAddRCMVars = False
#Prep to include some rcmh5 vars in mhdrcm.xmf file
if 'mhdrcm' in preset and rcmVars is not None:
doAddRCMVars = True
rcmVars = rcmVars.split(',')
rcmKs = [int(k) for k in rcmKs.split(',')] if rcmKs is not None else []
rcmInfo = {}
rcmInfo['rcmh5fname'] = rcmh5fname
rcmInfo['rcmVars'] = rcmVars
rcmInfo['rcmKs'] = rcmKs
#Get variable information
print("Getting variable information")
Nt = len(sIDstrs)
T = np.zeros(Nt)
# Also get file info, in case any of the steps are ExternalLinks to other files
# Assume this is done at the step level
fNames_link = [""]*Nt
steps_link = [""]*Nt
with h5.File(h5fname,'r') as f5:
for i,sIDstr in enumerate(sIDstrs):
if 'time' in f5[sIDstr].attrs.keys():
T[i] = f5[sIDstr].attrs['time']
else:
T[i] = int(sIDstr.split("#")[1])
fNames_link[i] = f5[sIDstr].file.filename.split('/')[-1] # !!NOTE: This means the xdmf file must live in the same directory as the data files
steps_link[i] = int(f5[sIDstr].name.split('#')[1])
#steps = np.array([k for k in f5.keys() if "Step" in k])
print("Getting Vars and RootVars")
vIds ,vLocs = kxmf.getVars(h5fname,s0str,gDims)
rvIds,rvLocs = kxmf.getRootVars(h5fname,gDims)
Nv = len(vIds)
Nrv = len(rvIds)
print("Generating XDMF from %s"%(h5fname))
print("Writing to %s"%(fOutXML))
print("\t%d Time Slices / %d Variables"%(nSteps,len(vIds)))
print("\tGrid: %s"%str(gDims))
print("\tSlices: %d -> %d"%(sIDs.min(),sIDs.max()))
print("\tTime: %3.3f -> %3.3f"%(T.min(),T.max()))
#Construct XDMF XML file
#-----------------------
Xdmf = et.Element("Xdmf")
Xdmf.set("Version","2.0")
Dom = et.SubElement(Xdmf,"Domain")
TGrid = et.SubElement(Dom,"Grid")
TGrid.set("Name","tMesh")
TGrid.set("GridType","Collection")
TGrid.set("CollectionType","Temporal")
#Loop over time slices
print("Writing info for each step")
for n in range(Nt):
nStp = sIDs[n]
Grid = et.SubElement(TGrid,"Grid")
mStr = "gMesh"#+str(nStp)
Grid.set("Name",mStr)
Grid.set("GridType","Uniform")
Topo = et.SubElement(Grid,"Topology")
Topo.set("TopologyType",topoStr)
Topo.set("NumberOfElements",gDimStr)
Geom = et.SubElement(Grid,"Geometry")
Geom.set("GeometryType",geoStr)
#Add grid info to each step
if doAppendStep:
stepStr = sIDstrs[n]
sgVars = [os.path.join(stepStr, v) for v in gridVars]
kxmf.AddGrid(fNames_link[n],Geom,gDimStr,sgVars)
else:
kxmf.AddGrid(fNames_link[n],Geom,gDimStr,gridVars)
Time = et.SubElement(Grid,"Time")
Time.set("Value","%f"%T[n])
if preset=="rcm3D":
with h5.File(h5fname,'r') as f5:
other = et.SubElement(Grid, "dtCpl")
other.set("Value","%f"%f5[sIDstrs[n]].attrs['dtCpl'])
#--------------------------------
#Step variables
for v in range(Nv):
kxmf.AddData(Grid,fNames_link[n],vIds[v],vLocs[v],vDimStr,steps_link[n])
#--------------------------------
#Base grid variables
for v in range(Nrv):
kxmf.AddData(Grid,fNames_link[n],rvIds[v],rvLocs[v],vDimStr)
if doAddRCMVars:
addRCMVars(Grid, dimInfo, rcmInfo, sIDs[n])
#--------------------------------
#Add some extra aliases
if preset=="gam":
kxmf.AddVectors(Grid,h5fname,vIds,cDims,vDims,Nd,nStp)
#Finished creating XML tree, now write
xmlStr = xml.dom.minidom.parseString(et.tostring(Xdmf)).toprettyxml(indent=" ")
print("Saving as {}".format(fOutXML))
with open(fOutXML,"w") as f:
f.write(xmlStr)

View File

@@ -1,188 +0,0 @@
#!/usr/bin/env python
import argparse
import os
import h5py
import kaipy.kaiH5 as kh5
#import lxml.etree as et
import xml.etree.ElementTree as et
import xml.dom.minidom
import numpy as np
import kaipy.kaiH5 as kh5
def cntX(fname,gID=None,StrX="/Step#"):
with h5py.File(fname,'r') as hf:
if (gID is not None):
grps = hf[gID].values()
else:
grps = hf.values()
grpNames = [str(grp.name) for grp in grps]
#Steps = [stp if "/Step#" in stp for stp in grpNames]
Steps = [stp for stp in grpNames if StrX in stp]
nSteps = len(Steps)
sIds = np.array([str.split(s,"#")[-1] for s in Steps],dtype=int)
return nSteps,sIds
def getVars(fname,smin):
#Get variable names from Step#0/Line#0
with h5py.File(fname,'r') as hf:
gID = "/Step#%d/Line#0"%(smin)
vIDs = []
for k in hf[gID].keys():
vIDs.append(str(k))
#Remove coordinate vars
xyzS = ["xyz","LCon"]
for s in xyzS:
if s in vIDs:
vIDs.remove(s)
Nv = len(vIDs)
return Nv,vIDs
def getAtts(fIn,n,m):
#Get attribute names from Step#0/Line#0
with h5py.File(fIn,'r') as hf:
gId = "Step#%d"%(n)
lId = "Line#%d"%(m)
Atts = hf[gId][lId].attrs.keys()
aNull = ["Np","n0"]
aIDs = [x for x in Atts if x not in aNull]
aVs = []
Na = len(aIDs)
for n in range(Na):
aVs.append(float(hf[gId][lId].attrs[aIDs[n]]))
aVs.append(float(m))
aIDs.append("ID")
return aIDs,aVs
def getNum(fIn,n,m):
with h5py.File(fIn,'r') as hf:
gId = "Step#%d"%(n)
lId = "Line#%d"%(m)
Np = hf[gId][lId].attrs["Np"]
return Np
if __name__ == "__main__":
#Set defaults
parser = argparse.ArgumentParser(description="Generates XDMF file from CHIMP tracer HDF5 output")
parser.add_argument('h5F',nargs=1,type=str,metavar='tracer.h5',help="Filename of CHIMP tracer HDF5 Output")
parser.add_argument('-noatts', action='store_true', default=False,help="Don't add XDMF scalars (default: %(default)s)")
#Finished getting arguments, parse and move on
args = parser.parse_args()
fIn = args.h5F[0]
doAtts = not args.noatts
#Create XML filename
pre,ext = os.path.splitext(fIn)
fOutXML = pre + ".xmf"
print("Reading from %s"%(fIn))
kh5.CheckOrDie(fIn)
#Count steps and lines
Nstp,sIds = cntX(fIn)
s0 = sIds.min()
if (s0 == 0):
s0 = s0+1
Nstp = Nstp-1
gID = "Step#%d"%(s0)
Nl,lIds = cntX(fIn,gID=gID,StrX="Line#")
Nv,vIds = getVars(fIn,s0)
print("\tFound %d steps"%(Nstp))
print("\tFound %d lines/step"%(Nl))
print("\tFound %d vars/line"%(Nv))
#Get times
T = kh5.getTs(fIn,sIds)
#Now build XDMF file
#-----------------------
Xdmf = et.Element("Xdmf")
Xdmf.set("Version","2.0")
Dom = et.SubElement(Xdmf,"Domain")
TGrid = et.SubElement(Dom,"Grid")
TGrid.set("Name","tlMesh")
TGrid.set("GridType","Collection")
TGrid.set("CollectionType","Temporal")
#Loop over time slices
for n in range(Nstp):
lGrid = et.SubElement(TGrid,"Grid")
lGrid.set("Name","tLines")
lGrid.set("GridType","Collection")
#Add time
tLab = et.SubElement(lGrid,"Time")
tLab.set("Value","%f"%T[n])
nStp = n + s0
#Loop over individual lines
for m in range(Nl):
#Get number of points for this step/line
Np = getNum(fIn,nStp,m)
#Create main grid structure
l0G = et.SubElement(lGrid,"Grid")
l0G.set("GridType","Uniform")
l0G.set("Name","Line#%d"%(m))
#Add topology/connectivity
Topo = et.SubElement(l0G,"Topology")
Topo.set("TopologyType","Polyline")
Topo.set("NumberOfElements","1")
Topo.set("NodesPerElement",str(Np-1))
Geom = et.SubElement(l0G,"Geometry")
Geom.set("GeometryType","XYZ")
xC = et.SubElement(Geom,"DataItem")
xC.set("Dimensions","%d 3"%(Np))
xC.set("NumberType","Float")
xC.set("Precision","4")
xC.set("Format","HDF")
xC.text = "%s:/Step#%d/Line#%d/xyz"%(fIn,nStp,m)
#Now loop over variables
for v in range(Nv):
vAtt = et.SubElement(l0G,"Attribute")
vAtt.set("Name",vIds[v])
vAtt.set("AttributeType","Scalar")
vAtt.set("Center","Node")
vDI = et.SubElement(vAtt,"DataItem")
vDI.set("Dimensions",str(Np))
vDI.set("NumberType","Float")
vDI.set("Precision","4")
vDI.set("Format","HDF")
vDI.text = "%s:/Step#%d/Line#%d/%s"%(fIn,nStp,m,vIds[v])
if (doAtts):
#Add scalar attributes in lazy XDMF way
aIDs,aVs = getAtts(fIn,nStp,m)
Na = len(aIDs)
for a in range(Na):
#Main variable
vAtt = et.SubElement(l0G,"Attribute")
vAtt.set("Name",aIDs[a])
vAtt.set("AttributeType","Scalar")
vAtt.set("Center","Node")
#Function setup
vDI = et.SubElement(vAtt,"DataItem")
vDI.set("Dimensions",str(Np))
vDI.set("Function","%e*$0/$0"%(float(aVs[a])))
vDI.set("ItemType","Function")
#Argument
vNull = et.SubElement(vDI,"DataItem")
vNull.set("Dimensions",str(Np))
vNull.set("NumberType","Float")
vNull.set("Precision","4")
vNull.set("Format","HDF")
vNull.text = "%s:/Step#%d/Line#%d/%s"%(fIn,nStp,m,vIds[v])
#Finished creating XML, now write
xmlStr = xml.dom.minidom.parseString(et.tostring(Xdmf)).toprettyxml(indent=" ")
with open(fOutXML,"w") as f:
f.write(xmlStr)

View File

@@ -1,172 +0,0 @@
#!/usr/bin/env python
#Make XMF files from an MPI-decomposed gamera run
import argparse
from argparse import RawTextHelpFormatter
import numpy as np
import kaipy.gamera.gampp as gampp
import kaipy.kaixdmf as kxmf
import xml.etree.ElementTree as et
import xml.dom.minidom
import kaipy.kaiH5 as kh5
import os
if __name__ == "__main__":
#Defaults
fdir = os.getcwd()
ftag = "msphere"
outid = "sim"
sStride = 1
MainS = """Creates series of XMF files from MPI-decomposed Gamera run
"""
parser = argparse.ArgumentParser(description=MainS, formatter_class=RawTextHelpFormatter)
parser.add_argument('-d',type=str,metavar="directory",default=fdir,help="Directory to read from (default: %(default)s)")
parser.add_argument('-id',type=str,metavar="runid",default=ftag,help="RunID of data (default: %(default)s)")
parser.add_argument('-outid',type=str,metavar="outid",default=outid,help="RunID of output XMF files (default: %(default)s)")
parser.add_argument('-sS',type=int,metavar="stride",default=sStride,help="Output cadence (default: %(default)s)")
#Finalize parsing
args = parser.parse_args()
fdir = args.d
ftag = args.id
outid = args.outid
sStride = args.sS
#---------------------
#Init data
gamData = gampp.GameraPipe(fdir,ftag)
#---------------------
#Do work
Ri = gamData.Ri
Rj = gamData.Rj
Rk = gamData.Rk
Ni = gamData.dNi
Nj = gamData.dNj
Nk = gamData.dNk
iDims = "%d %d %d"%(Nk+1,Nj+1,Ni+1)
cDims = "%d %d %d"%(Nk+0,Nj+0,Ni+0)
iDimA = "%d %d %d"%(Nk*Rk+1,Nj*Rj+1,Ni*Ri+1)
tOut = 0
topoStr = "3DSMesh"
geoStr = "X_Y_Z"
#for n in range(gamData.s0,gamData.sFin+1,sStride):
for n in gamData.sids:
if (n-gamData.s0)%sStride != 0: continue
nslc = n-gamData.s0
#print(n,gamData.T[nslc])
#Create XMF tree
Xdmf = et.Element("Xdmf")
Xdmf.set("Version","2.0")
Dom = et.SubElement(Xdmf,"Domain")
#Spatial collection
gCol = et.SubElement(Dom,"Grid")
gCol.set("Name","gMesh")
gCol.set("GridType","Collection")
gCol.set("CollectionType","Spatial")
Time = et.SubElement(gCol,"Time")
#Time.set("Value","%s"%(str(gamData.T[np.where(gamData.sids == n)][0])))
Time.set("Value","%s"%(gamData.T[nslc]))
Cyc = et.SubElement(gCol,"Cycle")
Cyc.set("Value","%d"%(n))
#Now loop over MPI decomposition
for i in range(Ri):
for j in range(Rj):
for k in range(Rk):
nMPI = j + i*Rj + k*Ri*Rj
h5F = kh5.genName(ftag,i,j,k,Ri,Rj,Rk)
h5F = os.path.join(fdir, h5F)
#Get variable info
gDims = np.array([gamData.dNk+1,gamData.dNj+1,gamData.dNi+1])
vDims = np.array([gamData.dNk,gamData.dNj,gamData.dNi])
vDimStr = ' '.join([str(v) for v in vDims])
if nMPI == 0 and tOut == 0: # Only do this the first time
vIds ,vLocs = kxmf.getVars(h5F,'Step#'+str(n),gDims)
rvIds,rvLocs = kxmf.getRootVars(h5F,gDims)
Nv = len(vIds)
Nrv = len(rvIds)
#Create new subgrid
gName = "gMesh%d"%(nMPI)
Grid = et.SubElement(gCol,"Grid")
Grid.set("GridType","Uniform")
Grid.set("Name",gName)
# Time = et.SubElement(Grid,"Time")
# Time.set("TimeType","Single")
# Time.set("Value","%s"%(str(gamData.T[nslc])))
Topo = et.SubElement(Grid,"Topology")
Topo.set("TopologyType",topoStr)
Topo.set("NumberOfElements",iDims)
Geom = et.SubElement(Grid,"Geometry")
Geom.set("GeometryType",geoStr)
xC = et.SubElement(Geom,"DataItem")
xC.set("Dimensions",iDims)
xC.set("NumberType","Float")
xC.set("Precision","4")
xC.set("Format","HDF")
xC.text = "%s:/X"%(h5F)
yC = et.SubElement(Geom,"DataItem")
yC.set("Dimensions",iDims)
yC.set("NumberType","Float")
yC.set("Precision","4")
yC.set("Format","HDF")
yC.text = "%s:/Y"%(h5F)
zC = et.SubElement(Geom,"DataItem")
zC.set("Dimensions",iDims)
zC.set("NumberType","Float")
zC.set("Precision","4")
zC.set("Format","HDF")
zC.text = "%s:/Z"%(h5F)
#Create variables
"""
for v in range(Nv):
vID = vIDs[v]
vAtt = et.SubElement(Grid,"Attribute")
vAtt.set("Name",vID)
vAtt.set("AttributeType","Scalar")
vAtt.set("Center","Cell")
aDI = et.SubElement(vAtt,"DataItem")
aDI.set("Dimensions",cDims)
aDI.set("NumberType","Float")
aDI.set("Precision","4")
aDI.set("Format","HDF")
aDI.text = "%s:/Step#%d/%s"%(h5F,n,vID)
"""
for v in range(Nv):
kxmf.AddData(Grid,h5F,vIds[v],vLocs[v],vDimStr,n)
for rv in range(Nrv):
kxmf.AddData(Grid,h5F,rvIds[rv],rvLocs[rv],vDimStr)
#Write output
fOut = "%s/%s.%06d.xmf"%(fdir,outid,tOut)
print("Writing %s"%(fOut))
#xTree = et.ElementTree(Xdmf)
#xTree.write(fOut,pretty_print=True,xml_declaration=True,encoding='UTF-8')
xmlStr = xml.dom.minidom.parseString(et.tostring(Xdmf)).toprettyxml(indent=" ")
with open(fOut,"w") as f:
f.write(xmlStr)
#Prep for next step
tOut = tOut+1

View File

@@ -1,50 +0,0 @@
#!/usr/bin/env python
#Simple script to spit out the step information of a Kaiju H5 file
import argparse
from argparse import RawTextHelpFormatter
import os
import kaipy.kaiH5 as kh5
import numpy as np
def MJD2Str(m0):
from astropy.time import Time
dtObj = Time(m0,format='mjd').datetime
tStr = dtObj.strftime("%H:%M:%S") + " " + dtObj.strftime("%m/%d/%Y")
return tStr
if __name__ == "__main__":
#Defaults
MainS = """Identifies the domain (in steps and time) of a Kaiju HDF-5 file"""
parser = argparse.ArgumentParser(description=MainS, formatter_class=RawTextHelpFormatter)
parser.add_argument('h5F',nargs='+',metavar='Gamera.h5',help="Filename of Gamera HDF5 Output")
#Finished getting arguments, parse and move on
args = parser.parse_args()
#h5F = args.h5F
for idx, h5F in enumerate(args.h5F):
print("Reading %s"%(h5F))
nSteps,sIds = kh5.cntSteps(h5F)
s0 = sIds.min()
sE = sIds.max()
print("\tFound %d steps"%(nSteps))
print("\tSteps = [%d,%d]"%(s0,sE))
tMin = kh5.getTs(h5F,np.array([s0]))
tMax = kh5.getTs(h5F,np.array([sE]))
print("\tTime = [%f,%f]"%(tMin,tMax))
MJDs = kh5.getTs(h5F,sIds,"MJD",-np.inf)
if (MJDs.max()>0):
MJDMin = MJDs.min()
MJDMax = MJDs.max()
print("\tMJD = [%f,%f]"%(MJDMin,MJDMax))
tS1 = MJD2Str(MJDMin)
tS2 = MJD2Str(MJDMax)
print("\t\tStart: %s"%(tS1))
print("\t\tStop : %s"%(tS2))
hStr = kh5.GetHash(h5F)
bStr = kh5.GetBranch(h5F)
print("\tGit: Hash = %s / Branch = %s"%(hStr,bStr))
#---------------------

View File

@@ -1,159 +0,0 @@
#!/usr/bin/env python
# Joins decomposed eb files generated using "Parallel In Time" into a single file
# Example: bit.ly/3OQg71F
import argparse
from argparse import RawTextHelpFormatter
import numpy as np
import h5py
import os
import kaipy.kaiH5 as kh5
import glob
import kaipy.kdefs as kd
tEps = 1.0e-3 #Small time
#Create new file w/ same root vars/attributes as old
def createfile(fIn,fOut,doLink=False):
print('Creating new output file:',fOut)
iH5 = h5py.File(fIn,'r')
oH5 = h5py.File(fOut,'w')
#Start by scraping all variables from root
#Copy root attributes
print("Copying root attributes ...")
for k in iH5.attrs.keys():
aStr = str(k)
oH5.attrs.create(k,iH5.attrs[aStr])
print("\t%s"%(aStr))
#Copy root groups
print("Copying root variables ...")
for Q in iH5.keys():
sQ = str(Q)
#Skip cache, we add it later
if kd.grpTimeCache in sQ:
continue
#Don't include stuff that starts with "Step"
if "Step" not in sQ:
if doLink:
oH5[sQ] = h5py.ExternalLink(fIn, sQ)
else:
oH5.create_dataset(sQ,data=iH5[sQ])
print("\t%s"%(sQ))
iH5.close()
return oH5
if __name__ == "__main__":
dIn = os.getcwd()
runid = "msphere"
typeid = "deltab"
MainS = """Joins blocks created by calcdb.x (or similar CHIMP routines) into single file
runid : Run ID
"""
parser = argparse.ArgumentParser(description=MainS, formatter_class=RawTextHelpFormatter)
parser.add_argument('-runid',type=str,metavar="runid",default=runid,help="Input run ID (default: %(default)s)")
parser.add_argument('-typeid',type=str,metavar="typeid",default=typeid,help="Input type ID (default: %(default)s)")
parser.add_argument('--link',action='store_true',help="Create links to existing files rather than copy data (default: %(default)s)")
#Finalize parsing
args = parser.parse_args()
runid = args.runid
typeid = args.typeid
doLink = args.link
globStr = '%s.????.%s.h5'%(runid,typeid)
dbIns = glob.glob(globStr)
dbIns.sort()
if doLink:
fOut = "%s.%s.link.h5"%(runid,typeid)
else:
fOut = "%s.%s.h5"%(runid,typeid)
N = len(dbIns)
print("Found %d files, writing output to %s"%(N,fOut))
if (N == 0):
print("No files found, exiting")
exit()
#Create file w/ attributes and root variables as first file
oH5 = createfile(dbIns[0],fOut, doLink)
# Store and concat timeAttributeCache to add at the very end
timeCacheVars = {}
with h5py.File(dbIns[0], 'r') as tacF:
for k in tacF[kd.grpTimeCache].keys():
timeCacheVars[k] = np.array([], dtype=tacF[kd.grpTimeCache][k].dtype)
s0 = 0 #Current step
nowTime = 0.0
oldTime = -np.inf
#Now loop over files
for n in range(N):
fIn = dbIns[n]
Ns,sIDs = kh5.cntSteps(fIn)
nS = sIDs.min()
nE = sIDs.max()
dN = nE-nS+1
print("Reading steps %d to %d from %s"%(nS,nE,fIn))
print("\tWriting to %d to %d"%(s0,s0+dN-1))
iH5 = h5py.File(fIn,'r')
# Grow timeAttributeCache
for k in iH5[kd.grpTimeCache].keys():
data = iH5[kd.grpTimeCache][k][:]
if k == 'step':
data += s0 # Cache for merged h5 file needs to remap original steps to their position in merged file
timeCacheVars[k] = np.append(timeCacheVars[k], data, axis=0)
#Loop over steps in the input file
for s in range(nS,nE+1):
#Input
igStr = "Step#%d"%(s)
ogStr = "Step#%d"%(s0)
#Check if this is too close to last value
nowTime = kh5.tStep(fIn,s)
#print(nowTime,oldTime)
if ( np.abs(nowTime-oldTime)<=tEps):
print("\tSkipping step %d"%(s))
continue
else:
#Good value, update old time
oldTime = nowTime
if doLink:
oH5[ogStr] = h5py.ExternalLink(fIn, igStr)
else:
oH5.create_group(ogStr)
print("Copying %s to %s"%(igStr,ogStr))
#Group atts
for k in iH5[igStr].attrs.keys():
aStr = str(k)
oH5[ogStr].attrs.create(k,iH5[igStr].attrs[aStr])
#print(aStr)
#Group vars
for Q in iH5[igStr].keys():
sQ = str(Q)
oH5[ogStr].create_dataset(sQ,data=iH5[igStr][sQ])
#Update s0
s0 = s0 + 1
iH5.close()
# Write timeAttributeCache to output file
print("Writing " + kd.grpTimeCache)
tag = oH5.create_group(kd.grpTimeCache)
for k in timeCacheVars:
tag.create_dataset(k, data=timeCacheVars[k], dtype=timeCacheVars[k].dtype)
#Done
oH5.close()

View File

@@ -1,104 +0,0 @@
#!/usr/bin/env python3
"""
Overengineered script to print the time of each restart file by parsing h5dump output
Uses only packages available on Cheyenne
"""
import subprocess
import glob
import argparse
from argparse import RawTextHelpFormatter
def sortFn(elem): #Used to sort final list in order of nRes
return int(elem['nRes'])
def getAttrKeyValue(lineList):
for line in lineList:
if 'ATTRIBUTE' in line:
key = line.split('"')[1]
if '(0)' in line:
value = line.split('(0):')[1]
return key, value
#Return dictionary of attrs for a single restart file
def getKVsFromFile(fName):
spOutput = subprocess.check_output(['h5dump','-a','nRes','-a','t','-a','DATETIME',fName])
output = spOutput.decode('utf-8').split('\n')
#Parse attributes (this could be better)
attrLocs = []
for i in range(len(output)):
if 'ATTRIBUTE' in output[i]:
attrLocs.append(i)
attrs = {}
for i in range(len(attrLocs)):
if i == len(attrLocs)-1:
k,v = getAttrKeyValue(output[attrLocs[i]:])
else:
k,v = getAttrKeyValue(output[attrLocs[i]:attrLocs[i+1]])
attrs[k] = v
return attrs
if __name__=='__main__':
idStr_noMPI = ".gam.Res.*.h5"
idStrMPI = "_0*_0*_0*_0000_0000_0000.gam.Res.*.h5"
ftag = "msphere"
timeFmt = "m"
MainS = """Overengineered script to print the time of each restart file by parsing h5dump output
"""
parser = argparse.ArgumentParser(description=MainS, formatter_class=RawTextHelpFormatter)
parser.add_argument('-id',type=str,metavar="runid",default=ftag,help="RunID of data (default: %(default)s)")
parser.add_argument('-f',type=str,metavar="timeFmt",default=timeFmt,help="Time format [s,m,h] (default: %(default)s)")
parser.add_argument('-dt',action='store_true',default=False,help="Print the datetime instead of simulation time (default: %(default)s)")
args = parser.parse_args()
ftag = args.id
timeFormat = args.f
doDatetime = args.dt
if timeFormat not in ['s','m','h']:
print('Unrecognized value "%s" for time format. Using "%s"'%(timeFormat, timeFmt))
timeFormat = timeFmt
if timeFormat == 's':
timeFormat = 'sec'
timeMult = 60
elif timeFormat == 'h':
timeFormat = 'hr'
timeMult = 1./60
else:
timeFormat = 'min'
timeMult = 1
#Get list of files
globStr = ftag + idStr_noMPI
print("Looking for nonMPI restarts...",end='')
fileList = glob.glob(globStr)
if len(fileList) == 0:
globStr = ftag + idStrMPI
print("Not found\nLooking for MPI restarts...",end='')
fileList = glob.glob(globStr)
if len(fileList) == 0:
print("Not found\nCheck id (globStr = %s)"%(globStr))
quit()
print("Found")
attrList = []
#Build list of attr dicts from list of files
for fStr in fileList:
if 'XXXXX' in fStr:
continue
fAttrs = getKVsFromFile(fStr)
fAttrs['fname'] = fStr
attrList.append(fAttrs)
#Print list (time from Gam restarts only, for now)
attrList.sort(key=sortFn)
for entry in attrList:
if doDatetime:
formatString = " {}: {}".format(entry['fname'], entry['DATETIME'])
else:
formatString = " {}: {:4.2f} [{}]".format(entry['fname'], float(entry['t'])*timeMult, timeFormat)
print(formatString)

View File

@@ -1,321 +0,0 @@
#!/usr/bin/env python
"""Run a SuperMag comparison for a MAGE magnetosphere run.
Perform a comparison of ground magnetic field perturbations computed for a
MAGE magnetosphere simulation with measured data from SuperMag.
Author
------
Eric Winter (eric.winter@jhuapl.edu)
"""
# Import standard modules.
import argparse
import os
import subprocess
from xml.etree import ElementTree
# Import 3rd-party modules.
import matplotlib as mpl
import matplotlib.pyplot as plt
# Import project-specific modules.
import kaipy.supermage as sm
# Program constants and defaults
# Program description.
DESCRIPTION = "Compare MAGE ground delta-B to SuperMag measurements."
# Default SuperMag user name for queries.
DEFAULT_SUPERMAG_USER = "ewinter"
# Location of template XML file.
XML_TEMPLATE = os.path.join(
os.environ["KAIJUHOME"], "scripts", "postproc", "calcdb.xml.template"
)
# Name of XML file read by calcdb.x.
XML_FILENAME_TEMPLATE = "calcdb_RUNID.xml"
# Number of microseconds in a second.
MICROSECONDS_PER_SECOND = 1e6
# Number of seconds in a day.
SECONDS_PER_DAY = 86400
# Location of SuperMag cache folder.
SUPERMAG_CACHE_FOLDER = os.path.join(os.environ["HOME"], "supermag")
def create_command_line_parser():
"""Create the command-line argument parser.
Create the parser for command-line arguments.
Parameters
----------
None
Returns
-------
parser : argparse.ArgumentParser
Command-line argument parser for this script.
"""
parser = argparse.ArgumentParser(description=DESCRIPTION)
parser.add_argument(
"-d", "--debug", action="store_true", default=False,
help="Print debugging output (default: %(default)s)."
)
parser.add_argument(
"--mpixml", type=str, default=None,
help="If results from an MPI run, provide XML filename for run "
"(default: %(default)s)."
)
parser.add_argument(
"--smuser", type=str, default=DEFAULT_SUPERMAG_USER,
help="SuperMag user ID to use for SuperMag queries "
"(default: %(default)s)."
)
parser.add_argument(
"-v", "--verbose", action="store_true", default=False,
help="Print verbose output (default: %(default)s)."
)
parser.add_argument(
"mage_results_path",
help="Path to a result file for a MAGE magnetosphere run."
)
return parser
def get_mpi_decomposition(filename):
"""Determine the MPI decomposition for the current MPI results.
Determine the MPI decomposition for the current MPI results.
The MPI decomposition is the triplet of values of the "N" attribute of
the elements (iPdir, jPdir, kPdir) in the <Gamera> element of the XML
file for the MAGE run.
Parameters
----------
filename : str
Name of XML file.
Returns
-------
iPdir, jPdir, kPdir : int
Values of "N" attribute of elements (iPdir, jPdir, kPdir).
"""
# Parse the XML file.
iPdir = jPdir = kPdir = None
tree = ElementTree.parse(filename)
Kaiju_el = tree.getroot()
iPdir = int(Kaiju_el.findall("Gamera/iPdir")[0].attrib["N"])
jPdir = int(Kaiju_el.findall("Gamera/jPdir")[0].attrib["N"])
kPdir = int(Kaiju_el.findall("Gamera/kPdir")[0].attrib["N"])
return iPdir, jPdir, kPdir
def filename_to_runid(filename):
"""Parse the runid from a MAGE results file name.
Parse the runid from a MAGE results file name.
The runid is all text before the first period or underscore in the name.
Parameters
----------
filename : str
Name of MAGE results file.
Returns
-------
runid : str
The MAGE runid for the file.
"""
parts = filename.split(".")
# parts = parts[0].split("_")
runid = parts[0]
return runid
def create_xml_file(runid, mpixml=None):
"""Create the XML input file for calcdb.x from a template.
Create the XML input file for calcdb.x from a template.
Parameters
----------
runid : str
runid for MAGE results file.
mpixml : str, default None
If results are from an MPI run of MAGE, name of XML run file in results
directory.
Returns
-------
xml_file : str
Name of XML file.
"""
# Read the template file.
with open(XML_TEMPLATE) as t:
lines = t.readlines()
# Process the template here.
# <HACK>
# This should be done with a proper templating package.
lines[3] = lines[3].replace("RUNID", runid)
lines[5] = lines[5].replace("EBFILE", runid)
lines[5] = lines[5].replace("ISMPI", "true")
if mpixml is not None:
iPdir, jPdir, kPdir = get_mpi_decomposition(mpixml)
lines[6] = lines[6].replace("RI", str(iPdir))
lines[6] = lines[6].replace("RJ", str(jPdir))
lines[6] = lines[6].replace("RK", str(kPdir))
else:
lines[6] = "\n"
# </HACK>
# Write out the processed XML.
xml_file = XML_FILENAME_TEMPLATE.replace("RUNID", runid)
with open(xml_file, "w") as f:
f.writelines(lines)
return xml_file
def compute_ground_delta_B(runid, mpixml=None):
"""Compute ground delta B values for a MAGE run.
Compute ground delta B values for a MAGE run. The computation is done with
the program calcdb.x.
Parameters
----------
runid : str
runid for MAGE results file.
mpixml : str, default None
If results are from an MPI run of MAGE, name of XML run file in results
directory.
Returns
-------
delta_B_file : str
Name of file containing calcdb.x results.
"""
# Create the XML file for calcdb.x from the template.
xml_file = create_xml_file(runid, mpixml)
# Run the command to compute ground delta B values.
cmd = "calcdb.x"
args = [xml_file]
subprocess.run([cmd] + args)
# Compute the name of the file containing the delta B values.
delta_B_file = runid + ".deltab.h5"
return delta_B_file
if __name__ == "__main__":
"""Begin main program."""
# Set up the command-line parser.
parser = create_command_line_parser()
# Parse the command-line arguments.
args = parser.parse_args()
debug = args.debug
mpixml = args.mpixml
smuser = args.smuser
verbose = args.verbose
mage_results_path = args.mage_results_path
if debug:
print("args = %s" % args)
# Split the MAGE results path into a directory and a file.
(mage_results_dir, mage_results_file) = os.path.split(mage_results_path)
if debug:
print("mage_results_dir = %s" % mage_results_dir)
print("mage_results_file = %s" % mage_results_file)
# Compute the runid from the file name.
runid = filename_to_runid(mage_results_file)
if debug:
print("runid = %s" % runid)
# Move to the results directory.
if verbose:
print("Moving to results directory %s." % mage_results_dir)
os.chdir(mage_results_dir)
# Compute the ground delta B values for this run.
if verbose:
print("Computing ground delta B values.")
delta_B_file = compute_ground_delta_B(runid, mpixml, mage_results_dir)
if debug:
print("delta_B_file = %s" % delta_B_file)
# Read the delta B values.
SIM = sm.ReadSimData(delta_B_file)
if debug:
print("SIM = %s" % SIM)
# Fetch the SuperMag indices for the desired time range.
# Fetch the start time (as a datetime object) of simulation data.
start = SIM["td"][0]
if debug:
print("start = %s" % start)
# Compute the duration of the simulated data, in seconds, then days.
duration = SIM["td"][-1] - SIM["td"][0]
duration_seconds = duration.seconds + duration.microseconds/MICROSECONDS_PER_SECOND
numofdays = duration_seconds/SECONDS_PER_DAY
if debug:
print("duration = %s" % duration)
print("duration_seconds = %s" % duration_seconds)
print("numofdays = %s" % numofdays)
# Fetch the SuperMag indices for this time period.
if verbose:
print("Fetching SuperMag indices.")
SMI = sm.FetchSMIndices(smuser, start, numofdays)
if debug:
print("SMI = %s" % SMI)
# Fetch the SuperMag data for this time period.
if verbose:
print("Fetching SuperMag data.")
SM = sm.FetchSMData(smuser, start, numofdays,
savefolder=SUPERMAG_CACHE_FOLDER)
if debug:
print("SM = %s" % SM)
# Interpolate the simulated delta B to the measurement times from SuperMag.
if verbose:
print("Interpolating simulated data to SuperMag times.")
SMinterp = sm.InterpolateSimData(SIM, SM)
if debug:
print("SMinterp = %s" % SMinterp)
# Create the plots in memory.
mpl.use("Agg")
# Make the indices plot.
if verbose:
print("Creating indices comparison plot.")
sm.MakeIndicesPlot(SMI, SMinterp, fignumber=1)
comparison_plot_file = runid + "_indices.png"
plt.savefig(comparison_plot_file)
# Make the contour plots.
if verbose:
print("Creating contour plots.")
sm.MakeContourPlots(SM, SMinterp, maxx = 1000, fignumber=2)
contour_plot_file = runid + "_contours.png"
plt.savefig(contour_plot_file)

View File

@@ -1,121 +0,0 @@
#!/usr/bin/env python
#Takes H5 field line file and slims it down
import argparse
import os
import kaipy.kaiH5 as kh5
import h5py
import numpy as np
#Create new file w/ same root vars/attributes as old
def createfile(iH5,fOut):
print('Creating new output file:',fOut)
oH5 = h5py.File(fOut,'w')
#Start by scraping all variables from root
#Copy root attributes
for k in iH5.attrs.keys():
aStr = str(k)
oH5.attrs.create(k,iH5.attrs[aStr])
#Copy root groups
for Q in iH5.keys():
sQ = str(Q)
#Don't include stuff that starts with "Step"
if "Step" not in sQ:
oH5.create_dataset(sQ,data=iH5[sQ])
return oH5
if __name__ == "__main__":
#Set defaults
ns = 0
ne = -1 #Proxy for last entry
parser = argparse.ArgumentParser(description="Slims down a FL file")
parser.add_argument('inH5',metavar='Fat.h5',help="Filename of input fat HDF5 file")
parser.add_argument('outH5',metavar='Slim.h5',help="Filename of slimmed HDF5 file")
parser.add_argument( '-pskip',metavar='pskip',default=1,help="Stride over points on field line")
parser.add_argument('-flskip',metavar='flskip',default=1,help="Stride over field lines")
#Finalize parsing
args = parser.parse_args()
fIn = args.inH5
fOut = args.outH5
pSk = int(args.pskip)
lSk = int(args.flskip)
kh5.CheckOrDie(fIn)
N,sIDs = kh5.cntSteps(fIn)
nS = sIDs.min()
nE = sIDs.max()
#For now assuming same number of lines per step
gID = "Step#%d"%(nS)
NumL,FLidS = kh5.cntX(fIn,gID,"Line#")
sFL = FLidS.min()
eFL = FLidS.max()
LID = "Line#%d"%(sFL)
print(sFL,eFL)
#Now open both files and get to work
#Open both files, get to work
iH5 = h5py.File(fIn,'r')
oH5=createfile(iH5,fOut)
#Get variables from first line
vIDs = [str(k) for k in iH5[gID][LID].keys()]
#Remove weird variables to do manually
vIDs.remove("xyz")
vIDs.remove("LCon")
for nStp in range(nS,nE+1):
print(nStp)
gStr = "Step#%d"%(nStp)
#Start by copying attributes from old to new
oH5.create_group(gStr)
#Root atts
for k in iH5[gStr].attrs.keys():
aStr = str(k)
oH5[gStr].attrs.create(k,iH5[gStr].attrs[aStr])
#Now loop over field lines
nOut = 0
for nFL in range(sFL,eFL+1,lSk):
#print(nFL)
iLine = "Line#%d"%(nFL)
oLine = "Line#%d"%(nOut)
#Copy iLine => oLine w/ new stride
oH5[gStr].create_group(oLine)
for vID in vIDs:
Q = iH5[gStr][iLine][vID]
oH5[gStr][oLine].create_dataset(vID,data=Q[::pSk])
#Get points on line
xyz0 = iH5[gStr][iLine]["xyz"]
xyzN = xyz0[::pSk,:]
oH5[gStr][oLine].create_dataset("xyz",data=xyzN)
#Now create connectivity
NumP = np.int32(xyzN.shape[0])
LCon = np.zeros((NumP-1,2),dtype=np.int32)
LCon[:,0] = np.arange(0,NumP-1)
LCon[:,1] = np.arange(1,NumP)
oH5[gStr][oLine].create_dataset("LCon",data=LCon)
#Finish up w/ attributes
aIDs = [str(k) for k in iH5[gStr][iLine].attrs.keys()]
aIDs.remove("Np")
aIDs.remove("n0")
for aID in aIDs:
oH5[gStr][oLine].attrs.create(aID,iH5[gStr][iLine].attrs[aID])
oH5[gStr][oLine].attrs.create("Np",NumP)
nOut = nOut+1
#Close up
iH5.close()
oH5.close()

View File

@@ -1,176 +0,0 @@
#!/usr/bin/env python
#Takes Gamera/Chimp/xxx file and slims it down based on start:stop:stride
import argparse
import os
import h5py
import numpy as np
cacheName = "timeAttributeCache"
def genMPIStr(di,dj,dk,i,j,k,n_pad=4):
inpList = [di, dj, dk, i, j, k]
sList = ["{:0>{n}d}".format(s, n=n_pad) for s in inpList]
mpiStr = '_'.join(sList)
return mpiStr
def cntSteps(fname):
with h5py.File(fname,'r') as hf:
"""
grps = hf.values()
grpNames = [str(grp.name) for grp in grps]
#Steps = [stp if "/Step#" in stp for stp in grpNames]
Steps = [stp for stp in grpNames if "/Step#" in stp]
nSteps = len(Steps)
"""
#sIds = np.array([str.split(s,"#")[-1] for s in Steps],dtype=int)
if(cacheName in hf.keys() and 'step' in hf[cacheName].keys()):
sIds = np.asarray(hf[cacheName]['step'])
nSteps = sIds.size
else:
sIds = np.array([str.split(s,"#")[-1] for s in hf.keys() if "Step#" in s],dtype=int)
nSteps = len(sIds)
return nSteps,sIds
def createfile(iH5,fOut):
print('Creating new output file:',fOut)
oH5 = h5py.File(fOut,'w')
#Start by scraping all variables from root
#Copy root attributes
for k in iH5.attrs.keys():
aStr = str(k)
oH5.attrs.create(k,iH5.attrs[aStr])
#Copy root groups
for Q in iH5.keys():
sQ = str(Q)
#Don't include stuff that starts with "Step"
if "Step" not in sQ and cacheName not in sQ:
oH5.create_dataset(sQ,data=iH5[sQ])
if cacheName in sQ:
oH5.create_group(sQ)
return oH5
if __name__ == "__main__":
#Set defaults
ns = -1
ne = -1 #Proxy for last entry
doMPI = False
parser = argparse.ArgumentParser(description="Slims down an HDF output file")
parser.add_argument('inH5',metavar='Fat.h5',help="Filename of input fat HDF5 file")
parser.add_argument('outH5',metavar='Slim.h5',help="Filename of slimmed HDF5 file")
parser.add_argument('-s',type=int,metavar="start",default=ns,help="Starting slice")
parser.add_argument('-e',type=int,metavar="end",default=-1,help="Ending slice (default: N)")
parser.add_argument('-sk',type=int,metavar="nsk",default=1,help="Stride (default: %(default)s)")
parser.add_argument('-sf',type=int,metavar="nsf",default=250,help="File write stride (default: %(default)s)")
parser.add_argument('-mpi',type=str,metavar="ijk",default="", help="Comma-separated mpi dimensions (example: '4,4,1', default: noMPI)")
parser.add_argument('--p',choices=('True','False'))
#Finalize parsing
args = parser.parse_args()
Ns = args.s
Ne = args.e
Nsk = args.sk
Nsf = args.sf
fIn = args.inH5
outTag = args.outH5
p = args.p == 'True'
mpiIn = args.mpi
N,sIds = cntSteps(fIn)
N0 = np.sort(sIds)[0]
if (Ns == -1):
Ns = np.sort(sIds)[0]
if (Ne == -1):
Ne = N+np.sort(sIds)[0]
if Nsf == -1:
Nsf = Ne
#Designed for 3-dim gamera mpi decomp
if mpiIn != "":
doMPI = True
spl = [int(x) for x in mpiIn.split(',')]
if len(spl) != 3:
print("Need 3 dimensions for MPI decomp, try again")
quit()
mi, mj, mk = spl
runTag = fIn.split('_')[0]
endTag = '.'.join(fIn.split('.')[1:]) #Exclude anything before the first '.'
inFiles = []
outFiles = []
for i in range(mi):
for j in range(mj):
for k in range(mk):
mpiStr = genMPIStr(mi,mj,mk,i,j,k)
fName = runTag+"_"+mpiStr+'.'+endTag
if os.path.exists(fName):
inFiles.append(fName)
outFiles.append("{}-{}_{}_{}.{}".format(Ns,Nsf,runTag,mpiStr,outTag))
else:
inFiles = [fIn]
outFiles = [str(Ns)+'-'+str(Nsf)+outTag]
for i in range(len(inFiles)):
fOut = str(Ns)+'-'+str(Nsf)+outTag
#Open both files, get to work
iH5 = h5py.File(inFiles[i],'r')
oH5=createfile(iH5,outFiles[i])
#Now loop through steps and do same thing
nOut = 0
for n in range(Ns,Ne,Nsk):
if(p):nOut = n
gIn = "Step#%d"%(n)
gOut = "Step#%d"%(nOut)
if(not p): nOut = nOut+1 # use the same group numbers as originally in file - frt
print("Copying %s to %s"%(gIn,gOut))
oH5.create_group(gOut)
#Root atts
for k in iH5[gIn].attrs.keys():
aStr = str(k)
oH5[gOut].attrs.create(k,iH5[gIn].attrs[aStr])
#Root vars
for Q in iH5[gIn].keys():
sQ = str(Q)
#print("\tCopying %s"%(sQ))
oH5[gOut].create_dataset(sQ,data=iH5[gIn][sQ])
for k in iH5[gIn][sQ].attrs.keys():
aStr = str(k)
oH5[gOut][sQ].attrs.create(k,iH5[gIn][sQ].attrs[aStr])
#If cache present
#Add the cache after steps, select the same steps for the cache that are contained in the
#Ns:Ne:Nsk start,end,stride
if cacheName in iH5.keys():
for Q in iH5[cacheName].keys():
sQ = str(Q)
nOffset = 0 if p else Ns
if(sQ == "step"):
oH5[cacheName].create_dataset(sQ, data=iH5[cacheName][sQ][Ns-N0:Ne-N0:Nsk]-nOffset)
else:
oH5[cacheName].create_dataset(sQ, data=iH5[cacheName][sQ][Ns-N0:Ne-N0:Nsk])
for k in iH5[cacheName][sQ].attrs.keys():
aStr = str(k)
oH5[cacheName][sQ].attrs.create(k,iH5[cacheName][sQ].attrs[aStr])
# make a new file every Nsf steps
if(n%Nsf==0 and n != 0):
oH5.close()
if not doMPI:
fOut = str(n)+'-'+str(Nsf+n)+args.outH5
else:
fOut = "{}-{}_{}_{}.{}".format(n,Nsf+n,runTag,mpiStr,outTag)
oH5=createfile(iH5,fOut)
#Close up
iH5.close()
oH5.close()

View File

@@ -1,171 +0,0 @@
#!/usr/bin/env python
#Takes Gamera/Chimp/xxx file and slims it down based on start:stop:stride
#Removes all that janky tecplot stuff that got into the regular slim script
import argparse
import os
import h5py
import numpy as np
cacheName = "timeAttributeCache"
def genMPIStr(di,dj,dk,i,j,k,n_pad=4):
inpList = [di, dj, dk, i, j, k]
sList = ["{:0>{n}d}".format(s, n=n_pad) for s in inpList]
mpiStr = '_'.join(sList)
return mpiStr
def cntSteps(fname):
with h5py.File(fname,'r') as hf:
"""
grps = hf.values()
grpNames = [str(grp.name) for grp in grps]
#Steps = [stp if "/Step#" in stp for stp in grpNames]
Steps = [stp for stp in grpNames if "/Step#" in stp]
nSteps = len(Steps)
"""
if(cacheName in hf.keys() and 'step' in hf[cacheName].keys()):
sIds = np.asarray(hf[cacheName]['step'])
nSteps = sIds.size
else:
#sIds = np.array([str.split(s,"#")[-1] for s in Steps],dtype=int)
sIds = np.array([str.split(s,"#")[-1] for s in hf.keys() if "Step#" in s],dtype=int)
nSteps = len(sIds)
return nSteps,sIds
def createfile(iH5,fOut):
print('Creating new output file:',fOut)
oH5 = h5py.File(fOut,'w')
#Start by scraping all variables from root
#Copy root attributes
for k in iH5.attrs.keys():
aStr = str(k)
oH5.attrs.create(k,iH5.attrs[aStr])
#Copy root groups
for Q in iH5.keys():
sQ = str(Q)
#Don't include stuff that starts with "Step"
if "Step" not in sQ and cacheName not in sQ:
oH5.create_dataset(sQ,data=iH5[sQ])
if cacheName in sQ:
oH5.create_group(sQ)
return oH5
if __name__ == "__main__":
#Set defaults
ns = -1
ne = -1 #Proxy for last entry
doMPI = False
parser = argparse.ArgumentParser(description="Slims down an HDF output file")
parser.add_argument('-intag' ,metavar='intag' ,default="msphere",help="Run ID of input fat HDF file (default: %(default)s)")
parser.add_argument('-outtag',metavar='outtag',default="slim",help="Run ID of output slimmed HDF file (default: %(default)s)")
parser.add_argument('-type' ,metavar='type',default="gam",help="Model type (default: %(default)s)")
parser.add_argument('-s',type=int,metavar="start",default=ns,help="Starting slice")
parser.add_argument('-e',type=int,metavar="end",default=-1,help="Ending slice (default: N)")
parser.add_argument('-sk',type=int,metavar="nsk",default=1,help="Stride (default: %(default)s)")
parser.add_argument('-mpi',type=str,metavar="ijk",default="", help="Comma-separated mpi dimensions (example: '4,4,1', default: noMPI)")
#Finalize parsing
args = parser.parse_args()
Ns = args.s
Ne = args.e
Nsk = args.sk
inTag = args.intag
outTag = args.outtag
mType = args.type
mpiIn = args.mpi
fIn = "%s.volt.h5"%(inTag)
N,sIds = cntSteps(fIn)
N0 = np.sort(sIds)[0]
if ( (Ns == -1) or (Ne == -1) ):
if (Ns == -1):
Ns = np.sort(sIds)[0]
if (Ne == -1):
Ne = N+np.sort(sIds)[0]
#Designed for 3-dim gamera mpi decomp
if mpiIn != "":
doMPI = True
spl = [int(x) for x in mpiIn.split(',')]
if len(spl) != 3:
print("Need 3 dimensions for MPI decomp, try again")
quit()
mi, mj, mk = spl
inFiles = []
outFiles = []
for i in range(mi):
for j in range(mj):
for k in range(mk):
mpiStr = genMPIStr(mi,mj,mk,i,j,k)
#fName = runTag+"_"+mpiStr+'.'+endTag
fIn = inTag + "_" + mpiStr + ".%s.h5"%(mType)
fOut = outTag + "_" + mpiStr + ".%s.h5"%(mType)
print("%s to %s"%(fIn,fOut))
if os.path.exists(fIn):
inFiles.append(fIn)
outFiles.append(fOut)
else:
inFiles = [inTag + '.' + str(mType) + '.h5']
outFiles = [outTag + '.' + str(mType) + '.h5']
for i in range(len(inFiles)):
fOut = outFiles[i]
print(fOut)
#Open both files, get to work
iH5 = h5py.File(inFiles[i],'r')
oH5=createfile(iH5,outFiles[i])
#Now loop through steps and do same thing
nOut = 0
for n in range(Ns,Ne,Nsk):
gIn = "Step#%d"%(n)
gOut = "Step#%d"%(nOut)
nOut = nOut + 1
print("Copying %s to %s"%(gIn,gOut))
oH5.create_group(gOut)
#Root atts
for k in iH5[gIn].attrs.keys():
aStr = str(k)
oH5[gOut].attrs.create(k,iH5[gIn].attrs[aStr])
#Root vars
for Q in iH5[gIn].keys():
sQ = str(Q)
#print("\tCopying %s"%(sQ))
oH5[gOut].create_dataset(sQ,data=iH5[gIn][sQ])
for k in iH5[gIn][sQ].attrs.keys():
aStr = str(k)
#print("\t\tCopying %s"%(aStr))
oH5[gOut][sQ].attrs.create(k,iH5[gIn][sQ].attrs[aStr])
#If cache present
#Add the cache after steps, select the same steps for the cache that are contained in the
#Ns:Ne:Nsk start,end,stride
if cacheName in iH5.keys():
for Q in iH5[cacheName].keys():
sQ = str(Q)
oH5[cacheName].create_dataset(sQ, data=iH5[cacheName][sQ][Ns-N0:Ne-N0:Nsk])
for k in iH5[cacheName][sQ].attrs.keys():
aStr = str(k)
oH5[cacheName][sQ].attrs.create(k,iH5[cacheName][sQ].attrs[aStr])
#Close up
iH5.close()
oH5.close()

View File

@@ -1,28 +0,0 @@
#!/usr/bin/env python
import argparse
from argparse import RawTextHelpFormatter
import datetime
fmt='%m/%d/%Y, %H:%M:%S'
if __name__ == "__main__":
t0="2010-01-01T00:00:00"
fmt='%Y-%m-%dT%H:%M:%S'
MainS = """ Returns MJD (modified Julian date) from a given UT
UT: UT string, yyyy-mm-ddThh:mm:ss format
ut2mjd.py 2010-02-05T5:00:00
"""
parser = argparse.ArgumentParser(description=MainS, formatter_class=RawTextHelpFormatter)
parser.add_argument('UT',type=str,metavar="UT",default=t0,help='UT string to convert (default: %(default)s)')
#Finalize parsing
args = parser.parse_args()
utStr = args.UT
ut = datetime.datetime.strptime(utStr,fmt)
mjd = Time(ut).mjd
print("%s (UT) => %f (MJD)"%(utStr,mjd))

View File

@@ -1,170 +0,0 @@
#! /usr/bin/env python
'''#############################################################################
Converts a XML configuration file for CGS models into INI configuration file(s).
Brent Smith (February 2022), JHU APL
#############################################################################'''
# imports (std lib)
import os
import sys
import shutil
import pprint
import argparse
import logging
import configparser
import xml.etree.ElementTree as ET
# imports (3rd party)
# imports (local)
# module-based configurations
logging.basicConfig()
logger = logging.getLogger(__name__)
#logger.setLevel(logging.INFO)
#===============================================================================
def parse_args(args=None):
'''=========================================================================
Command-Line Argument Parser
========================================================================='''
parser = argparse.ArgumentParser()
required = parser.add_argument_group('required positional arguments')
required.add_argument('input', help='Input XML file.')
required.add_argument('output', help='Output INI file.')
parser.add_argument(
'-v', '--verbose', action='store_true',
help='Activate verbose execution mode. (default: %(default)s)'
)
try:
if len(args) == 0:
parser.print_help()
sys.exit(2)
args = parser.parse_args(args)
except Exception as e:
logging.getLogger(__name__).error(e)
parser.print_help()
sys.exit(2)
return args
def get_xml_children(tree_element):
'''=========================================================================
Obtains recursively all children tags and attributes from the parent tree
element
========================================================================='''
if tree_element:
children = []
if len(list(tree_element)) > 1:
children = {tree_element.tag:{}}
for child in tree_element:
child_dict = get_xml_children(child)
for tag, attrib in child_dict.items():
children[tree_element.tag][tag] = attrib
return children
else:
for child in tree_element:
return {tree_element.tag: get_xml_children(child)}
else:
return {tree_element.tag: tree_element.attrib}
def set_ini_config(config, element, section=None):
'''=========================================================================
Uses configparser to assign section headings, section heading comments, and
configuration parameters with values from an XML element tree.
========================================================================='''
if element:
# at least one child
num_children = len(element)
if num_children > 1:
# more than 1 child
for key, value in element.items():
config.add_section(key)
for _key, _value in value.items():
config.set(key, _key, _value)
else:
# one child {key:{key:value}} or {key:value}
try:
# values can be another dictionary or string
for key, value in element.items():
set_ini_config(config, value, section=key)
except:
# it's a single dictionary {key:value}
config[section] = element
else:
# no children of element
print('...no children...')
print(element)
return config
def main(arguments=None):
'''=========================================================================
Main driver that converts an XML data file to an INI data file.
========================================================================='''
# parse command-line arguments
args = parse_args(arguments)
if args.verbose:
logging.getLogger(__name__).setLevel(logging.INFO)
# we do not have a template INI since they are basically YAML (single-level)
# Read XML file
with open(args.input) as f:
xml_data_str = f.read() # this contains all content
root = ET.fromstring(xml_data_str) # this ignores top-level comments
xml_data = get_xml_children(root)
logging.getLogger(__name__).info('Retrieved XML data...')
if args.verbose:
pprint.pprint(xml_data)
# remove kaiju level - case insensitive
xml_data_keys = [key.lower() for key in xml_data.keys()]
if 'kaiju' in xml_data_keys:
index = xml_data_keys.index('kaiju')
xml_data = xml_data[list(xml_data.keys())[index]]
# now the keys of the dictionary are the models
models = xml_data.keys()
# Create INI configuration for each model
for model in models:
# write INI file (can't append to an existing INI file, so we replace anew)
config = configparser.ConfigParser(allow_no_value=True)
config.optionxform = str # preserve case of strings
# create INI configuration
logging.getLogger(__name__).info('Creating {}-based configuration...'.format(str(model)))
config = set_ini_config(config, xml_data[model])
# output to temp file named after model
logging.getLogger(__name__).info('Writing temp file...')
with open(model, 'w') as f:
config.write(f)
# append heading to configuration
with open(model, 'r') as f:
contents = f.read()
contents = '## ' + model.upper() + ' ##\n' + contents
logging.getLogger(__name__).info('Writing INI data...')
with open(model, 'w') as f:
f.write(contents)
# concatenate all model-named temp files
logging.getLogger(__name__).info('Removing all temp files...')
with open(args.output, 'w') as f:
for model in models:
with open(model, 'r') as fi:
shutil.copyfileobj(fi, f)
os.remove(model)
logging.getLogger(__name__).info('Complete')
return 0
if __name__ == '__main__':
# put main stuff in the MAIN function
sys.exit(main(sys.argv[1:]))

View File

@@ -1 +0,0 @@
Scripts for preprocessing GAMERA (and/or MAGE) runs.

View File

@@ -1,375 +0,0 @@
#!/usr/bin/python3
import argparse
import configparser
import os
from string import ascii_letters
import subprocess
import xml.etree.ElementTree as ET
from xml.etree.ElementTree import Element
def convertUnits(myLine):
# Check if delete
if ("DEL!" in myLine):
# Get the setting
settingString = myLine.split("=")[0]
return (settingString + "= DEL!")
# Check if just a comment
elif ("[" not in myLine):
return myLine.split("#")[0]
# Get units
unitString = myLine.split("#")[1]
# Set multiplier for units
multiplier = 0
if ("[sec]" in unitString):
multiplier = 1
elif ("[min]" in unitString):
multiplier = 60
elif ("[hrs]" in unitString):
multiplier = 3600
else:
# Find what is in unit string an tell the user it is incorrect
incorrectUnit = unitString.split("[")[1]
incorrectUnit = incorrectUnit.split("]")[0]
print("ERROR: Incorrect unit type for conversion: " + incorrectUnit)
exit()
# Get the setting
settingString = myLine.split("=")[0]
# Get the number
numberList = []
for t in myLine.split():
try:
numberList.append(float(t))
except:
pass
# Multiply
actualNumber = numberList[0] * multiplier
# Set this to the number string
numberString = str(actualNumber)
# Put the whole thing back together and return
return (settingString + "= " + numberString)
def initialize(settingsFile):
"""Initialize using the settings file."""
# Set up whitelist for section names (letters only)
whiteList = set(ascii_letters)
# Open up settings file
settings = settingsFile # First argument is input file name
# Create a sub-folder called "Parsed Settings", or something
os.system("mkdir .Settings")
# Go through the settings file and check for section flags
with open(settings, 'r') as file:
content = file.read()
# Move to subfilder
os.chdir(".Settings")
# Put everything between section flags in it's own subfile in the created directory.
# The name should correspond to the section.
contentSplit = content.splitlines()
# Create a temporary string to hold the temporary settings files
temporary = ""
name = ''.join(l for l in contentSplit[0] if l in whiteList)
contentSplit.pop(0)
pos = -1
# Loop through the split file
for line in contentSplit:
pos += 1
if (len(line) < 1):
temporary = temporary + "\n"
# If there is a # at the beginning of the string
elif (line[0] == '#'):
# Then write file, change name, and reset temporary
tempFile = open(name + ".ini", "w")
tempFile.write(temporary)
tempFile.close()
name = ''.join(l for l in line if l in whiteList)
temporary = ""
elif (pos == (len(contentSplit) - 1)):
# Last one, add line then write everything out!
# Check for converting!
if ("#" in line):
temporary = temporary + convertUnits(line) + "\n"
else:
temporary = temporary + line + "\n"
tempFile = open(name + ".ini", "w")
tempFile.write(temporary)
tempFile.close()
# Check for conversion
elif ("#" in line):
temporary = temporary + convertUnits(line) + "\n"
else:
# Add line to temporary
temporary = temporary + line + "\n"
# Found on Stack Overflow.
# This indents everything in the elem node properly since apparently etree doesn't do that on it's own...
def indent(elem, level=0):
i = "\n" + level*" "
if len(elem):
if not elem.text or not elem.text.strip():
elem.text = i + " "
if not elem.tail or not elem.tail.strip():
elem.tail = i
for elem in elem:
indent(elem, level+1)
if not elem.tail or not elem.tail.strip():
elem.tail = i
else:
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = i
def create_from_xml_template(ini_file, xml_file, template):
"""Convert the .ini file to a .xml file using a template."""
# Get settings file name
settings = ini_file
# Get output file name
output = xml_file
# Run Initialization for Settings
initialize(settings)
os.chdir('..')
# Check if initialize failed/didn't produce output
settingsFolder = os.listdir(".Settings")
if (len(settingsFolder) == 0):
print("Initialization failed. Aborting.")
# Cleanup Settigns folder
subprocess.Popen("rm -rf .Settings/", shell=True)
exit(1)
# Read in default settings for this template
tree = ET.parse(template)
templateRoot = tree.getroot()
# Make a Parser
user = configparser.RawConfigParser()
# Try to turn on case sensitivity
user.optionxform = lambda option: option
os.chdir(".Settings")
inputDicts = {}
# Iterate through each file there and make the root node the key for the resultant tree in a dictionary
for filename in os.listdir():
user = configparser.RawConfigParser()
user.optionxform = lambda option: option
user.read(filename)
temp = filename.split('.')
newFileName = temp[0]
inputDicts[newFileName] = user
#print(inputDicts)
# Make bigger ETree by adding all roots as sub elements
top = Element('Kaiju')
for key in inputDicts.keys():
temp = Element(key)
# ET.dump(temp)
for section in inputDicts[key].sections():
deeperTemp = Element(section)
for option in inputDicts[key].options(section):
deeperTemp.set(option, inputDicts[key].get(section, option))
ET.SubElement(temp, deeperTemp.tag, deeperTemp.attrib)
top.append(temp)
# ET.dump(top)
# Go through the new settings and see if they match elements in the default
for child in top:
#print(child.tag)
# Try to find that child tag in the default tree
if (templateRoot.find(child.tag) is not None):
# If it exists, go one level down and iterate through those nodes
firstLevel = templateRoot.find(child.tag)
for lower in child:
# Find the corresponding tag in the default
nextLevel = firstLevel.find(lower.tag)
# Check if that tag exists. If not, just add it
if (nextLevel is not None):
for item in lower.keys():
# Check for the delete flag
if ("DEL!" in lower.get(item)):
# Check if that option exists.
if (nextLevel.get(item) is None):
# Just print Debug statements
#print(nextLevel)
#print(item)
#print(lower.get(item))
#print(nextLevel.attrib)
continue
else:
del nextLevel.attrib[item]
else:
nextLevel.set(item, lower.get(item))
else:
# Check for the delete flag
if ("DEL!" in lower.attrib):
# print(lower.tag)
# print(lower.attrib)
# Don't add anything
continue
else:
ET.SubElement(firstLevel, lower.tag, lower.attrib)
else:
# Else, just add that element to the root
templateRoot.insert(0, child)
# for key in child.keys():
# # If tag appears, check sub-entries
# if (templateRoot.find(key) is not None):
# subelement = templateRoot.find(key)
# print("I found " + key + " in the default tree")
# # For each option in element, add that to the ETree element
# for item in key:
# subelement.set(item[0], item[1])
# # If tag does not appear, append new one to ETree
# else:
# print("I did not find " + key + " in the default tree")
# tempElement = ET.Element(key)
# # Go through the options and add them to a new element
# for item in child:
# tempElement.set(item[0],item[1])
#
# # Insert this new element at the end of the current section
# templateRoot.insert(len(list(templateRoot)),tempElement)
# Run root through the indentation function
indent(templateRoot)
os.chdir('..')
# Write the XML file
tree.write(output)
# Cleanup Settigns folder
subprocess.Popen("rm -rf .Settings/", shell=True)
print("\n\nXML generation complete!\n\n")
def create_xml_template(ini_file, xml_file):
"""Convert the .ini file to a template .xml file."""
# Get settings file name
settings = ini_file
# Get output file name
output = xml_file
# Run Initialization for Settings
initialize(settings)
# Make a Parser
user = configparser.RawConfigParser()
# Try to turn on case sensitivity
user.optionxform = lambda option: option
#print(os.getcwd())
inputDicts = {}
# Iterate through each file there and make the root node the key for the resultant tree in a dictionary
for filename in os.listdir():
user = configparser.RawConfigParser()
user.optionxform = lambda option: option
user.read(filename)
temp = filename.split('.')
newFileName = temp[0]
inputDicts[newFileName] = user
# Make bigger ETree by adding all roots as sub elements
top = Element('Kaiju')
for key in inputDicts.keys():
temp = Element(key)
# ET.dump(temp)
for section in inputDicts[key].sections():
deeperTemp = Element(section)
for option in inputDicts[key].options(section):
if ("DEL!" in inputDicts[key].get(section, option)):
continue
else:
deeperTemp.set(option, inputDicts[key].get(section, option))
ET.SubElement(temp, deeperTemp.tag, deeperTemp.attrib)
top.append(temp)
# ET.dump(top)
# Run root through the indentation function
indent(top)
os.chdir('..')
# Create Etree with the root
myTree = ET.ElementTree(top)
# Write the XML file
myTree.write(output)
# Cleanup Settigns folder
subprocess.Popen("rm -rf .Settings/", shell=True)
print("\n\nTemplate creation complete!\n\n")
if __name__ == "__main__":
"""Convert a .ini file to a .xml file."""
# Create the command-line argument parser.
parser = argparse.ArgumentParser(description="Convert Kaiju .ini files to XML configuration or XML configuration template files.")
parser.add_argument("-t", dest="template", help="Path to .xml file as source template")
parser.add_argument("ini_file", help="Path to .ini file to convert")
parser.add_argument("xml_file", help="Path to .xml file output")
parser.add_argument("-d", "--debug", help="Activate debug mode", action="store_true")
parser.add_argument("-v", "--verbose", help="Activate verbose execution mode", action="store_true")
# Parse the commmand-line arguments.
args = parser.parse_args()
# If a template file was specified, use it. Otherwise, convert directly
# from .ini format to .xml format.
if args.template:
if args.verbose:
print("Converting %s to %s using template %s." % (args.ini_file, args.xml_file, args.template))
create_from_xml_template(args.ini_file, args.xml_file, args.template)
else:
if args.verbose:
print("Converting %s to XML output %s." % (args.ini_file, args.xml_file))
create_xml_template(args.ini_file, args.xml_file)

View File

@@ -1,568 +0,0 @@
#!/usr/bin/env python
#Converts OMNI output data to Gamera solar wind file to be used as boundary conditions
#Reads from ASCII file
#Time(min) Density (AMU/cm^-3) Vx(km/s) Vy(km/s) Vz(km/s) Cs(km/s) Bx(nT) By(nT) Bz(nT) B(nT) tilt(rad)
#Writes to HDF5 Gamera wind file
#t,D,V,P,B = [s],[#/cm3],[m/s],[nPa],[nT]
#Utilizes cdasws and geopack, make sure to install modules before running. For more info go to https://bitbucket.org/aplkaiju/kaiju/wiki/Gamerasphere
Mp = 1.67e-27 #Proton mass [kg]
gamma = 5/3.0
import argparse
from argparse import RawTextHelpFormatter
import numpy as np
import h5py
import matplotlib.pyplot as plt
import os
import kaipy.solarWind
from kaipy.solarWind import swBCplots
from kaipy.solarWind.OMNI import OMNI
from kaipy.solarWind.WIND import WIND
from kaipy.solarWind.SWPC import DSCOVRNC
from kaipy.solarWind.gfz_api import getGFZ
import datetime
from astropy.time import Time
from cdasws import CdasWs
import sys
# ANSI color codes for color output to terminal
class Color:
BLUE = '\033[94m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
RED = '\033[91m'
CYAN = '\033[96m'
DARKCYAN = '\033[36m'
BOLD = '\033[1m'
END = '\033[0m'
def bxFit(sw, fileType, filename):
def bxFitPlot(bxFit_array):
kaipy.solarWind.swBCplots.BasicPlot(sw.data, 'time_doy', 'bx', color='k')
plt.plot(sw.data.getData('time_doy'), bxFit_array, 'g')
plt.title('Bx Fit Coefficients ('+fileType+'):\n$Bx_{fit}(0)$=%f $By_{coef}$=%f $Bz_{coef}$=%f' % (coef[0], coef[1], coef[2]) )
plt.legend(('$Bx$','$Bx_{fit}$'))
coef = sw.bxFit()
print('Bx Fit Coefficients are ', coef)
by = sw.data.getData('by')
bz = sw.data.getData('bz')
bxFit = coef[0] + coef[1] * by + coef[2] * bz
# Save plot
bxFitPlot(bxFit)
bxPlotFilename = os.path.basename(filename) + '_bxFit.png'
print('Saving "%s"' % bxPlotFilename)
plt.savefig(bxPlotFilename)
return coef
def ChkTimes(starttime,endtime):
time_difference = endtime - starttime
hours_difference = time_difference.total_seconds()/3600.0
if (starttime > endtime) or (hours_difference < 2.0):
tsStr = starttime.strftime("%Y-%m-%dT%H:%M:%S")
teStr = endtime.strftime("%Y-%m-%dT%H:%M:%S")
sys.exit("Error! Start time (%s) must be al least 2 hours before the end time (%s)"%(tsStr,teStr))
def printErrMsg(errStr):
print(Color.BOLD+Color.YELLOW+'!!!!!!!!!! ERROR: %s'%(errStr)+ Color.END)
print(Color.BOLD+Color.YELLOW+'!!!!!!!!!! Not writing bcWind.h5 file'+ Color.END)
print(Color.BOLD+Color.YELLOW+'!!!!!!!!!! Contact model developers to proceed'+ Color.END)
sys.exit()
def getPrevDayF107(t0):
tm1 = t0-datetime.timedelta(days=1)
tm1 = tm1.replace(hour=0, minute=0, second=0, microsecond=0)
te1 = tm1.replace(hour=23, minute=59, second=59, microsecond=9999)
tm1r = tm1.strftime("%Y-%m-%dT%H:%M:%SZ")
te1r = te1.strftime("%Y-%m-%dT%H:%M:%SZ")
status,data = cdas.get_data('OMNI2_H0_MRG1HR', ['F10_INDEX1800'], tm1r,te1r)
#daily values so just return first value
prevF107 = data['F10_INDEX1800'][0]
return prevF107
if __name__ == "__main__":
fOut = "bcwind.h5"
mod = "LFM"
t0Str="2010-01-01T00:00:00"
t1Str="2010-01-01T02:00:00"
Ts = 0.0
sigma = 3.0
tOffset = 0.0
obs="OMNI"
filename=None
doBs = True
doEps = False
dfile = None
#Usually f107 above 300 is not reliable. The daily value could be distorted by flare emissions even if the flare may only last a short time during a day.
maxf107 = 300.0
minMfast = 1.5
MainS = """ This script does several things:
1. Fetch OMNI data from CDAWeb between the specified times (must be at least 2 hours in length)
2. Generate standard plots of solar wind data
3. Write output in a model file format.
- "LFM" format will:
a. Generate coefficients for Bx Fit
b. Save a bcwind.h5 file
- "TIEGCM" format will:
a. Compute 15-minute boxcar average lagged by 5 minutes
b. Sub-sample at 5-minutes
c. Write NetCDF IMF data file
"""
parser = argparse.ArgumentParser(description=MainS, formatter_class=RawTextHelpFormatter)
parser.add_argument('-t0',type=str,metavar="TStart",default=t0Str,help="Start time in 'YYYY-MM-DDThh:mm:ss' (default: %(default)s)")
parser.add_argument('-t1',type=str,metavar="TStop",default=t1Str,help="End time in 'YYYY-MM-DDThh:mm:ss' (default: %(default)s)")
parser.add_argument('-obs',type=str,metavar="OMNI",default=obs,help="Select spacecraft to obtain observations from (default: %(default)s)")
parser.add_argument('-offset',type=float,metavar="tOffset",default=tOffset,help="Minutes to offset spacecraft observation and simulation t0 (default: %(default)s)")
parser.add_argument('-o',type=str,metavar="wind.h5",default=fOut,help="Output Gamera wind file (default: %(default)s)")
parser.add_argument('-m',type=str,metavar="LFM",default=mod,help="Format to write. Options are LFM or TIEGCM (default: %(default)s)")
parser.add_argument('-TsG',type=float,metavar="GAMERA_TStart",default=Ts,help="Gamera start time [min] (default: %(default)s)")
parser.add_argument('-TsL',type=float,metavar="LFM_TStart",default=Ts,help="LFM start time [min] (default: %(default)s)")
parser.add_argument('-bx', action='store_true',default=False,help="Include Bx through ByC and BzC fit coefficients (default: %(default)s)")
parser.add_argument('-bs', action='store_false',default=True,help="Include Bowshock location (default: %(default)s)")
parser.add_argument('-interp', action='store_true',default=False,help="Include shaded region on plots where data is interpolated (default: %(default)s)")
parser.add_argument('-filter', action='store_true',default=False,help="Include additional filtering of data to remove outlier points (default: %(default)s)")
parser.add_argument('-sig',type=float,metavar="sigma",default=sigma,help="N used in N*sigma used for filtering threshold above which will be thrown out (default: %(default)s)")
parser.add_argument('-eps', action="store_true",default=False,help="Output eps figure. (default: %(default)s)")
parser.add_argument('-fn', type=str,metavar="filename",default=filename,help="Name of Wind file. Only used if obs is WINDF. (default: %(default)s)")
parser.add_argument('-f107', type=float,default=None,help="Set f10.7 value to use in bcwind file. Only used if no data available. (default: %(default)s)")
parser.add_argument('-kp', type=float,default=None,help="Set Kp value to use in bcwind file. Only used if no data available. (default: %(default)s)")
parser.add_argument('-safe', action='store_true',default=False,help="Run in SAFE mode. Does not create the h5 file if certain conditions are not met (default: %(default)s)")
#Finalize parsing
args = parser.parse_args()
fOut = args.o
mod = args.m
TsG = args.TsG
TsL = args.TsL
includeBx = args.bx
doBs = args.bs
plotInterped = args.interp
doCoarseFilter = args.filter
sigma = args.sig
doEps = args.eps
obs = args.obs
f107Def = args.f107
kpDef = args.kp
inSafeMode = args.safe
if (obs == 'OMNIW' and args.fn is None): raise Exception('Error: OMNIW requires -fn to specify a WIND file')
t0Str = args.t0
t1Str = args.t1
tOffset = args.offset
fmt='%Y-%m-%dT%H:%M:%S'
t0 = datetime.datetime.strptime(t0Str,fmt)
t1 = datetime.datetime.strptime(t1Str,fmt)
t0r = t0.strftime("%Y-%m-%dT%H:%M:%SZ")
t1r = t1.strftime("%Y-%m-%dT%H:%M:%SZ")
ChkTimes(t0,t1)
cdas = CdasWs()
# calculating average F10.7 over specified time period, can be converted into a timeseries
# pulling data from CDAWeb database
print('Retrieving f10.7 data from CDAWeb')
try:
statusf107,data = cdas.get_data('OMNI2_H0_MRG1HR', ['F10_INDEX1800','KP1800'], t0r,t1r)
totalMin = (t1-t0).days*24.0*60.0+(t1-t0).seconds/60
tmin = np.arange(totalMin)
t107 = data['Epoch']
t107min = np.zeros(len(t107))
for i in range(len(t107)):
t107min[i]=(t107[i]-t0).days*24.0*60.0+(t107[i]-t0).seconds/60
f107=data['F10_INDEX1800']
if (np.all(f107 > maxf107)): #bad values set to 999.9 by cdas
if inSafeMode:
printErrMsg('No valid f10.7 data')
print(Color.GREEN+'!!!!!!!!!! Warning: No valid f10.7 data, Attempting to take value from previous day !!!!!!!!!!'+Color.END)
prevF107 = getPrevDayF107(t0)
if(prevF107<=maxf107):
print(Color.GREEN+'\tSuccesful. Setting to f10.7 to %f'%(prevF107)+Color.END)
f107[:] = prevF107
else:
if f107Def is not None:
print(Color.GREEN+'!!!!!!!!!! Warning: No valid f10.7 data on previous day either. Setting f10.7 to %f!!!!!!!!!!'%(f107Def)+Color.END)
f107[:] = f107Def
else:
sys.exit(Color.YELLOW+'!!!!!!!!!! Error: No valid f10.7 data on previous day either. Set f10.7 to use with -f107 flag !!!!!!!!!!'+Color.END)
elif (f107[0] > maxf107):
indF = np.where(f107<maxf107)[0][0]
F107start = f107[indF]
f107[0] = F107start
print(Color.GREEN+'!!!!!!!!!! Warning: f10.7 starts with a bad value (>%d), setting initial value to first good value: %f !!!!!!!!!!'%(maxf107,F107start)+Color.END)
#Linearly interpolating and converting hourly cadence to minutes
f107min = np.interp(tmin, t107min[f107 < maxf107], f107[f107 < maxf107] )
kp = data['KP1800']
if (np.all(kp == 99)):
try:
(time,index,status) = getGFZ(t0Str+"Z",t1Str+"Z",'Kp')
tkp = np.zeros(len(time))
for i in range(len(time)):
tkp[i] = (datetime.datetime.strptime(time[i],fmt+"Z") - t0).days*24.0*60.0 + (datetime.datetime.strptime(time[i],fmt+"Z") - t0).seconds/60
kpmin = np.interp(tmin,tkp,index)
except:
if inSafeMode:
printErrMsg('No valid Kp data')
if kpDef is not None:
print(Color.BLUE+"!!!!!!!!!! Warning: No valid Kp data, setting all values in array to %d!!!!!!!!!!"%(kpDef)+Color.END)
kp[:] = kpDef
kpmin = np.interp(tmin, t107min, kp) # if no good values, setting all to bad values
else:
sys.exit(Color.YELLOW+'!!!!!!!!!! Error: No valid Kp data. Set Kp to use with -kp flag !!!!!!!!!!'+Color.END)
else:
if (kp[0] == 99):
indF = np.where(kp!=99)[0][0]
KpStart = kp[indF]
kp[0] = KpStart
print(Color.BLUE+'!!!!!!!!!! Warning: Kp starts with a bad value, setting to first good value: %d !!!!!!!!!!'%(KpStart)+Color.END)
kpmin = np.interp(tmin, t107min[kp != 99], kp[kp!=99]/10.0)
except Exception as e:
if isinstance(e, SystemExit):
raise # Re-raise SystemExit exception
else:
if inSafeMode:
printErrMsg('Issue pulling f10.7 and kp data from OMNI, need to be set manually.')
print(Color.DARKCYAN+"+'!!!!!!!!!! Issue pulling f10.7 and kp data from OMNI, setting manually"+Color.END)
totalMin = (t1-t0).days*24.0*60.0+(t1-t0).seconds/60
tmin = np.arange(totalMin)
totalMin = totalMin-1
if f107Def is None:
sys.exit(Color.YELLOW+'!!!!!!!!!! Error: Default f10.7 is not set. Update using -f107 flag at execution !!!!!!!!!!'+Color.END)
else:
print(Color.DARKCYAN+'\tSetting f10.7 to: %f !!!!!' %(f107Def)+Color.END)
f107min = np.ones(int(totalMin))*f107Def
try:
(time,index,status) = getKpindex.getKpindex(t0Str+"Z",t1Str+"Z",'Kp')
tkp = np.zeros(len(time))
for i in range(len(time)):
tkp[i] = (datetime.datetime.strptime(time[i],fmt+"Z") - t0).days*24.0*60.0 + (datetime.datetime.strptime(time[i],fmt+"Z") - t0).seconds/60
kpmin = np.interp(tmin,tkp,index)
except:
if kpDef is None:
sys.exit(Color.YELLOW+'!!!!!!!!!! Error: Default Kp is not set. Update using -kp flag at execution !!!!!!!!!!'+Color.END)
else:
print(Color.DARKCYAN+'Setting kp to: %f (can be changed with -kp flag at execution) !!!!!' %(kpDef)+Color.END)
kpmin = np.ones(int(totalMin))*kpDef
if (obs == 'OMNI'):
fileType = 'OMNI'
filename = 'OMNI_HRO_1MIN.txt'
#obtain 1 minute resolution observations from OMNI dataset
print('Retrieving solar wind data from CDAWeb')
status,fIn = cdas.get_data(
'OMNI_HRO_1MIN',
['BX_GSE','BY_GSE','BZ_GSE',
'Vx','Vy','Vz',
'proton_density','T',
'AE_INDEX','AL_INDEX','AU_INDEX','SYM_H',
'BSN_x','BSN_y','BSN_z'],
t0r,t1r)
# Read the solar wind data into 'sw' object and interpolate over the bad data.
if (doCoarseFilter): print(f"Using Coarse Filtering, removing values {sigma} sigma from the mean")
sw = eval('kaipy.solarWind.'+fileType+'.'+fileType)(fIn,doFilter=doCoarseFilter,sigmaVal=sigma)
elif (obs == 'WIND'):
# CDAS tips.
# use CDAweb to get the name of the spacecraft variables you want, such as "C4_CP_FGM_SPIN"
# then use cdas.get_variables('sp_phys','C4_CP_FGM_SPIN') to get a list of variables
# variable names do not exactly match the cdaweb outputs so check to make sure variables
fileType = 'WIND'
filename = 'WIND'
tBuffer = 100 # Extra padding for propagation
t0rb = (t0 - datetime.timedelta(minutes=tBuffer)).strftime("%Y-%m-%dT%H:%M:%SZ")
t1rb = (t1 + datetime.timedelta(minutes=tBuffer)).strftime("%Y-%m-%dT%H:%M:%SZ")
status,fMFI = cdas.get_data(
'WI_K0_MFI',
['BGSEc'],
t0rb,
t1rb
)
if status['http']['status_code'] != 200:
printErrMsg('No valid WIND MFI data during this period')
status,fSWE = cdas.get_data(
'WI_K0_SWE',
['SC_pos_gse','QF_V', 'QF_Np', 'V_GSE','THERMAL_SPD', 'Np'],
t0rb,
t1rb
)
if status['http']['status_code'] != 200:
printErrMsg('No valid WIND SWE data during this period')
sw = eval('kaipy.solarWind.'+fileType+'.'+fileType)(fSWE,fMFI,t0,t1)
elif (obs == 'OMNIW'):
fileType = 'OMNI'
fileType2 = 'OMNIW'
filename = args.fn
doBs = True
print("Working with OMNIW algorithm")
# Read the solar wind data into 'sw' object and interpolate over the bad data.
sw = eval('kaipy.solarWind.'+fileType+'.'+fileType2)(filename)
filename = 'OMNIW_'+filename
elif (obs == 'DSCOVRNC'):
fileType = 'SWPC'
fileType2 = 'DSCOVRNC'
doBs = False
sw = eval('kaipy.solarWind.'+fileType+'.'+fileType2)(t0,t1)
filename = fileType2
else:
raise Exception('Error: Not able to obtain dataset from spacecraft. Please select another mission.')
# Do output format-specific tasks:
if (mod == 'TIEGCM'):
# Write TIEGCM IMF solar wind file
#FIXME: need to update when want to include, example code in pyLTR.SolarWind.Writer.TIEGCM
raise Exception('Error: Cannot currently produce TIEGCM output.')
elif (mod == 'LFM'):
if (includeBx):
print("\tUsing Bx fields")
# Bx Fit
bCoef=bxFit( sw, fileType, filename)
# Setting Bx0 to zero to enforce a planar front with no Bx offset
bCoef[0] = 0.0
else:
print("\tNot using Bx fields")
bCoef = [0.0, 0.0, 0.0]
# Interpolate to one minute:
time_1minute = range(int(sw.data.getData('time_min').min()),
int(sw.data.getData('time_min').max()) )
n = np.interp(time_1minute, sw.data.getData('time_min'), sw.data.getData('n'))
tp = np.interp(time_1minute, sw.data.getData('time_min'), sw.data.getData('t'))
vx = np.interp(time_1minute, sw.data.getData('time_min'), sw.data.getData('vx'))
vy = np.interp(time_1minute, sw.data.getData('time_min'), sw.data.getData('vy'))
vz = np.interp(time_1minute, sw.data.getData('time_min'), sw.data.getData('vz'))
cs = np.interp(time_1minute, sw.data.getData('time_min'), sw.data.getData('cs'))
va = np.interp(time_1minute, sw.data.getData('time_min'), sw.data.getData('va'))
bx = np.interp(time_1minute, sw.data.getData('time_min'), sw.data.getData('bx'))
by = np.interp(time_1minute, sw.data.getData('time_min'), sw.data.getData('by'))
bz = np.interp(time_1minute, sw.data.getData('time_min'), sw.data.getData('bz'))
b = np.interp(time_1minute, sw.data.getData('time_min'), sw.data.getData('b'))
try:
ae = np.interp(time_1minute, sw.data.getData('time_min'), sw.data.getData('ae'))
except:
ae = np.zeros(len(time_1minute))
try:
al = np.interp(time_1minute, sw.data.getData('time_min'), sw.data.getData('al'))
except:
al = np.zeros(len(time_1minute))
try:
au = np.interp(time_1minute, sw.data.getData('time_min'), sw.data.getData('au'))
except:
au = np.zeros(len(time_1minute))
try:
symh = np.interp(time_1minute, sw.data.getData('time_min'), sw.data.getData('symh'))
except:
symh = np.zeros(len(time_1minute))
if doBs:
bsx = np.interp(time_1minute, sw.data.getData('time_min'), sw.data.getData('xBS'))
bsy = np.interp(time_1minute, sw.data.getData('time_min'), sw.data.getData('yBS'))
bsz = np.interp(time_1minute, sw.data.getData('time_min'), sw.data.getData('zBS'))
#grab info on where data is interpolated to include on plots if wanted
interped = np.zeros((11,len(symh)))
interped[0,:] =np.interp(time_1minute, sw.data.getData('time_min'), sw.data.getData('isBxInterped'))
interped[1,:] =np.interp(time_1minute, sw.data.getData('time_min'), sw.data.getData('isByInterped'))
interped[2,:] =np.interp(time_1minute, sw.data.getData('time_min'), sw.data.getData('isBzInterped'))
interped[3,:] =np.interp(time_1minute, sw.data.getData('time_min'), sw.data.getData('isVxInterped'))
interped[4,:] =np.interp(time_1minute, sw.data.getData('time_min'), sw.data.getData('isVyInterped'))
interped[5,:] =np.interp(time_1minute, sw.data.getData('time_min'), sw.data.getData('isVzInterped'))
interped[6,:] =np.interp(time_1minute, sw.data.getData('time_min'), sw.data.getData('isNInterped'))
try:
interped[7,:] =np.interp(time_1minute, sw.data.getData('time_min'), sw.data.getData('isCsInterped'))
except:
interped[7,:] =np.interp(time_1minute, sw.data.getData('time_min'), sw.data.getData('isTInterped'))
if doBs:
interped[8,:] =np.interp(time_1minute, sw.data.getData('time_min'), sw.data.getData('isxBSInterped'))
interped[9,:] =np.interp(time_1minute, sw.data.getData('time_min'), sw.data.getData('isyBSInterped'))
interped[10,:] =np.interp(time_1minute, sw.data.getData('time_min'), sw.data.getData('iszBSInterped'))
#finding locations where any variable is interpolated
isInterp=np.any(interped,axis=0)
pltInterp = np.zeros(len(isInterp),dtype=bool)
if (plotInterped):
pltInterp = isInterp
# calculating fast magnetosonic mach number
mfast = np.sqrt((vx**2+vy**2+vz**2)/(cs**2+va**2))
#initalize matrix to hold solar wind data
if doBs:
lfmD = np.zeros((n.shape[0],21))
else:
lfmD = np.zeros((n.shape[0],18))
date = sw.data.getData('meta')['Start date']
nSub = 0
vxSub = []
for i,time in enumerate(time_1minute):
# Convert relevant quantities to SM Coordinates
v_sm = sw._gsm2sm(date+datetime.timedelta(minutes=time), vx[i],vy[i],vz[i])
b_sm = sw._gsm2sm(date+datetime.timedelta(minutes=time), bx[i],by[i],bz[i])
if doBs:
bs_sm = sw._gsm2sm(date+datetime.timedelta(minutes=time), bsx[i],bsy[i],bsz[i])
tilt = sw._getTiltAngle(date+datetime.timedelta(minutes=time))
if doBs:
lfmD[i] = [time,n[i],v_sm[0],v_sm[1],v_sm[2],cs[i],b_sm[0],b_sm[1],b_sm[2],b[i],tilt,ae[i],al[i],au[i],symh[i],tp[i],va[i],mfast[i],bs_sm[0],bs_sm[1],bs_sm[2]]
else:
lfmD[i] = [time,n[i],v_sm[0],v_sm[1],v_sm[2],cs[i],b_sm[0],b_sm[1],b_sm[2],b[i],tilt,ae[i],al[i],au[i],symh[i],tp[i],va[i],mfast[i]]
if mfast[i] < minMfast:
nSub += 1
vxSub.append(v_sm[0])
if nSub > 0:
import kaipy.gamera.gamGrids as gg
#Pull defaul LFM grid
gIn = "./lfmG"
Nc0 = 8 #Number of outer i cells to cut out from LFM grid (OCT)
xx0,yy0 = gg.LoadTabG(gIn,Nc0)
#Calculate Rout in sunward direction from grid
Rout = np.sqrt(xx0[-1,0]**2.0 + yy0[-1,0]**2.0) #[Re]
Re_km = 6378.1
maxVsub = abs(max(vxSub))
nSubCrit = (Rout*Re_km)/maxVsub/60.0 # mins
if inSafeMode and (nSub > nSubCrit):
printErrMsg("Low Mach number solar wind persists for too long (%d minutes)"%(nSub))
print()
print(Color.CYAN+"!!!!!!!!!! WARNING LOW MACH NUMBER: Mfast < %.3f for %d minutes, may want to extend grid !!!!!!!!!!"%(minMfast,nSub)+Color.END)
print()
print("Converting to Gamera solar wind file")
Nt,Nv = lfmD.shape
print("\tFound %d variables and %d lines"%(Nv,Nt))
#Convert LFM time to seconds and reset to start at 0
print("\tOffsetting from LFM start (%5.2f min) to Gamera start (%5.2f min)"%(TsL,TsG))
T0 = lfmD[:,0].min()
T = (lfmD[:,0]-TsL+TsG)*60
#Calculating time in UT
UT = []
[UT.append(np.string_(date+datetime.timedelta(seconds=i)).strip()) for i in T]
#Calculating time in MJD
MJD = []
mjdRef=Time(date).mjd
[MJD.append(mjdRef+i/86400.0) for i in T]
#Density, temperature, magnetic field, and tilt don't require scaling
D = lfmD[:,1]
ThT = lfmD[:,10]
Bx = lfmD[:,6] # overwritten by Gamera using the coefficients
By = lfmD[:,7]
Bz = lfmD[:,8]
#Activity indices do not require scaling
AE = lfmD[:,11]
AL = lfmD[:,12]
AU = lfmD[:,13]
SYMH = lfmD[:,14]
# scaling Temperature from kK->K
Temp = lfmD[:,15]*1.0e+3
#Velocity
vScl = 1.0e+3 #km/s->m/s
Vx = vScl*lfmD[:,2]
Vy = vScl*lfmD[:,3]
Vz = vScl*lfmD[:,4]
Cs = vScl*lfmD[:,5] #km/s->m/s
Va = vScl*lfmD[:,16]
Mfast = lfmD[:,17]
#Bowshock position
if doBs:
xBS = lfmD[:,18]
yBS = lfmD[:,19]
zBS = lfmD[:,20]
# Save a plot of the solar wind data.
if doEps:
swPlotFilename = os.path.basename(filename) + '.eps'
else:
swPlotFilename = os.path.basename(filename) + '.png'
print('Saving "%s"' % swPlotFilename)
if doBs:
kaipy.solarWind.swBCplots.swQuickPlot(UT,D,Temp,Vx,Vy,Vz,Bx,By,Bz,SYMH,pltInterp,swPlotFilename,xBS,yBS,zBS,doEps=doEps)
else:
kaipy.solarWind.swBCplots.swQuickPlot(UT,D,Temp,Vx,Vy,Vz,Bx,By,Bz,SYMH,pltInterp,swPlotFilename,doEps=doEps)
print("Writing Gamera solar wind to %s"%(fOut))
with h5py.File(fOut,'w') as hf:
hf.create_dataset("T" ,data=T)
hf.create_dataset("UT",data=UT)
hf.create_dataset("MJD",data=MJD)
hf.create_dataset("D" ,data=D)
hf.create_dataset("Temp" ,data=Temp)
hf.create_dataset("Vx",data=Vx)
hf.create_dataset("Vy",data=Vy)
hf.create_dataset("Vz",data=Vz)
hf.create_dataset("Bx",data=Bx)
hf.create_dataset("By",data=By)
hf.create_dataset("Bz",data=Bz)
hf.create_dataset("tilt",data=ThT)
hf.create_dataset("ae",data=AE)
hf.create_dataset("al",data=AL)
hf.create_dataset("au",data=AU)
hf.create_dataset("symh",data=SYMH)
hf.create_dataset("Interped",data=1*isInterp)
hf.create_dataset("f10.7",data=f107min)
hf.create_dataset("Kp",data=kpmin)
hf.create_dataset("Bx0",data=bCoef[0])
hf.create_dataset("ByC",data=bCoef[1])
hf.create_dataset("BzC",data=bCoef[2])
hf.create_dataset("Va",data=Va)
hf.create_dataset("Cs",data=Cs)
if doBs:
hf.create_dataset("xBS",data=xBS)
hf.create_dataset("yBS",data=yBS)
hf.create_dataset("zBS",data=zBS)
hf.create_dataset("Magnetosonic Mach",data=Mfast)
else:
raise Exception('Error: Misunderstood output file format.')

View File

@@ -1,107 +0,0 @@
#!/usr/bin/env python
#Generates LFM-style HDF-5 grid for Gamera
import argparse
import kaipy.gamera.gamGrids as gg
from argparse import RawTextHelpFormatter
import numpy as np
#Ring params
rParams = {
"D": '<ring gid="lfm" doRing="T" Nr="4" Nc1="8" Nc2="16" Nc3="32" Nc4="32"/>',
"Q": '<ring gid="lfm" doRing="T" Nr="8" Nc1="8" Nc2="16" Nc3="32" Nc4="32" Nc5="64" Nc6="64" Nc7="64" Nc8="64"/>',
"O": '<ring gid="lfm" doRing="T" Nr="12" Nc1="8" Nc2="16" Nc3="32" Nc4="32" Nc5="64" Nc6="64" Nc7="64" Nc8="64" Nc9="128" Nc10="128" Nc11="128" Nc12="128"/>',
"H": '<ring gid="lfm" doRing="T" Nr="10" Nc1="16" Nc2="32" Nc3="64" Nc4="64" Nc5="128" Nc6="128" Nc7="128" Nc8="256" Nc9="256" Nc10="256"/>'
}
#"H": '<ring gid="lfm" doRing="T" Nr="16" Nc1="8" Nc2="16" Nc3="32" Nc4="32" Nc5="64" Nc6="64" Nc7="64" Nc8="64" Nc9="128" Nc10="128" Nc11="128" Nc12="128" Nc13="256" Nc14="256" Nc15="256" Nc16="256"/>'
if __name__ == "__main__":
#Arg parsing
Nc0 = 8 #Number of outer i cells to cut out from LFM grid (OCT)
fIn = "./lfmG"
doEpsY = True
TINY = 1.0e-8
Rin = 2.0
Rout = 0.0
#List of grids
gStrs = ['D','Q','O','H']
gLabs = ["Double","Quad","Oct","Hex"]
Nij0 = 48
Nk0 = 64
MainS = """Generates LFM-style HDF5 grid for Gamera
Grid types (gid)
D: Double ( 48, 48, 64)
Q: Quad ( 96, 96,128)
O: Oct (192,192,256)
H: Hex (384,384,512)
"""
parser = argparse.ArgumentParser(description=MainS, formatter_class=RawTextHelpFormatter)
parser.add_argument('-gid',type=str,default="D",choices=gStrs,help="Grid Resolution Specifier (default: %(default)s)")
parser.add_argument('-viz', action='store_true', default=False,help="Show 2D figure of grid (default: %(default)s)")
parser.add_argument('-chimp', action='store_true', default=False,help="Store grid in CHIMP format (default: %(default)s)")
parser.add_argument('-Rin',type=float,metavar="Rin",default=Rin ,help="Inner radius (default: %(default)s)")
parser.add_argument('-Rout',type=float,metavar="Rout",default=Rout ,help="Sunward outer radius (default: %(default)s)")
parser.add_argument('-vizG', action='store_true', default=False,help="Show 2D figure w/ ghosts (default: %(default)s)")
#Finalize parsing
args = parser.parse_args()
gid = args.gid
doChimp = args.chimp
doViz = args.viz
doVizG = args.vizG
Rin = args.Rin
Rout = args.Rout
if (doVizG):
doViz = True
n0 = gStrs.index(gid)
en = 2**(n0)
Nij = Nij0*en
Nk = Nk0 *en
Ni = Nij
Nj = Nij
print("Generating %s LFM-style grid ...\n"%(gLabs[n0]))
fOut = "lfm%s.h5"%(gStrs[n0])
#Read tab data of LFM grid
xx0,yy0 = gg.LoadTabG(fIn,Nc0)
#Regrid to new dimensions
XX,YY = gg.regrid(xx0,yy0,Nij,Nij,Rin=Rin,Rout=Rout)
Rin = XX[0,0]
#Calculate real outer radii, sunward/anti
rOutS = np.sqrt(XX[-1,0]**2.0 + YY[-1,0]**2.0)
rOutAS = np.sqrt(XX[-1,-1]**2.0 + YY[-1,-1]**2.0)
llBC = np.arcsin(np.sqrt(1.0/Rin))*180.0/np.pi
#Do full grid
xxG,yyG = gg.Aug2D(XX,YY,doEps=doEpsY,TINY=TINY)
X3,Y3,Z3 = gg.Aug3D(xxG,yyG,Nk=Nk,TINY=TINY)
#Write grid
if (doChimp):
gg.WriteChimp(X3,Y3,Z3,fOut=fOut)
else:
gg.WriteGrid(X3,Y3,Z3,fOut=fOut)
print("Output: %s"%fOut)
print("Size: (%d,%d,%d)"%(Ni,Nj,Nk))
print("Inner Radius: %f"%Rin)
print("Sunward Outer Radius: %f"%rOutS)
print("Tail Outer Radius: %f"%rOutAS)
print("Low-lat BC: %f"%(llBC))
if (not doChimp):
print("Ring params: \n%s"%(rParams[gid]))
print("\nWriting to %s"%(fOut))
if (doViz):
gg.VizGrid(XX,YY,xxG,yyG,fOut=fOut,doGhost=doVizG)
#gg.genRing(XX,YY,Nk=Nk,Tol=1.0,doVerb=True)

View File

@@ -1,134 +0,0 @@
#!/usr/bin/env python
#Generates RCM config data
import numpy as np
import argparse
from argparse import RawTextHelpFormatter
import kaipy.kaiTools as kT
import kaipy.rcm.lambdautils.AlamData as aD
import kaipy.rcm.lambdautils.AlamParams as aP
import kaipy.rcm.lambdautils.DistTypes as dT
import kaipy.rcm.lambdautils.genAlam as genAlam
from kaipy.rcm.wmutils.wmData import wmParams
import kaipy.rcm.wmutils.genWM as genWM
import kaipy.rcm.lambdautils.fileIO as fileIO
import kaipy.rcm.lambdautils.plotter as plotter
EFLAV = 1
PFLAV = 2
EFUDGE = 1./3.
PFUDGE = 0.0
if __name__ == "__main__":
#Arg parsing
fOut = "rcmconfig.h5"
num_e = 39
num_p = 120
eminp = 1 # [eV]
emine = 1 # [eV]
emaxp = 100 # [keV]
emaxe = 25 # [keV] , 1/4 of 100 keV
L_kt = 10
wolfP1 = 3
wolfP2 = 1
maxKp = 6
plotChoices = ['none', 'spec', 'vs']
MainS = """Generates RCM configuration data
"""
parser = argparse.ArgumentParser(description=MainS, formatter_class=RawTextHelpFormatter)
parser.add_argument('-o',type=str,default=fOut,metavar="fOut",help="Output file name (default: %(default)s)")
parser.add_argument('-ne', type=int,default=num_e, help="Number of electron channels (default: %(default)s)")
parser.add_argument('-np', type=int,default=num_p, help="Number of proton channels (default: %(default)s)")
parser.add_argument('-mine', type=float,default=emine, help="Min. energy [eV] for electrons at L_kt (default: %(default)s)")
parser.add_argument('-minp', type=float,default=eminp, help="Min. energy [eV] for protons at L_kt (default: %(default)s)")
parser.add_argument('-maxe', type=float,default=emaxe, help="Max. energy [keV] for electrons at L_kt (default: %(default)s)")
parser.add_argument('-maxp', type=float,default=emaxp, help="Max. energy [keV] for protons at L_kt (default: %(default)s)")
parser.add_argument('-L', type=float,default=L_kt, help="L shell [R_e] at which kt should be resolved (default: %(default)s [R_e])")
parser.add_argument('-p1', type=float,default=wolfP1, help="Wolf low-energy p* (default: %(default)s)")
parser.add_argument('-p2', type=float,default=wolfP2, help="Wolf high-energy p* (default: %(default)s)")
parser.add_argument('-plotType', choices=plotChoices,default=plotChoices[0], help="Plot mode (default: %(default)s)")
parser.add_argument('--nop',action='store_true',default=False,help="Do not add zero loss first channel (default: %(default)s)")
parser.add_argument('--noWaveModel',action='store_true',default=False, help="Don't use wave models in the electron/ion loss (default: %(default)s)")
parser.add_argument('--addWM', action='store_true',default=False, help="Add wave models to an existing rcmconfig file, input file needed to be presented (default: %(default)s)")
parser.add_argument('-maxKp', type=int,default=maxKp, help="Max. Kp index allowed in the electron wave model, integer only (default: %(default)s)")
parser.add_argument('-i', type=str,default=fOut,metavar="fIn", help="Input file name when addWM is true (default: %(default)s)")
#Finalize parsing
args = parser.parse_args()
fOut = args.o
num_e = args.ne
num_p = args.np
emine = args.mine
eminp = args.minp
emaxe = args.maxe*1e3 # [keV -> eV]
emaxp = args.maxp*1e3 # [keV -> eV]
L_kt = float(args.L)
wolfP1 = args.p1
wolfP2 = args.p2
addWM = args.addWM
noWaveModel = args.noWaveModel
maxKp = args.maxKp
fIn = args.i
plotType = args.plotType
if maxKp >= 7:
print ("Maximum Kp allowed is 6. Please re-enter a valid number.")
exit()
if addWM:
tauParams = wmParams(dim = 4, nKp = maxKp, nMLT = 97, nL = 41, nEk = 155)
genWM.genh5(fIn,fOut,tauParams)
else:
# Determine proton channel limits based on resolving a certain (proton) temperature at given L
bVol = kT.L_to_bVol(L_kt)
vm = bVol**(-2/3)
alamMin_p = eminp/vm
alamMax_p = emaxp/vm
alamMin_e = -1*emine/vm
alamMax_e = -1*emaxe/vm
dtWolf = dT.DT_Wolf(p1=wolfP1,p2=wolfP2) # Lambda channels will have a (slightly modified) Wolf distribution type
sPe = aP.SpecParams(num_e, alamMin_e, alamMax_e, dtWolf, EFLAV, EFUDGE, name='Electrons') # Parameters to create electron channels
sPp = aP.SpecParams(num_p, alamMin_p, alamMax_p, dtWolf, PFLAV, PFUDGE, name='Protons' ) # Parameters to create proton channels
alamParams = aP.AlamParams(True,[sPe, sPp]) # (doUsePsphere, List[SpecParams])
alamParams.emine = emine
alamParams.eminp = eminp
alamParams.emaxe = emaxe
alamParams.emaxp = emaxp
alamParams.L_kt = L_kt
alamData = genAlam.genAlamDataFromParams(alamParams) # Use AlamParams to generate all of the lambda distributions
# Save
fileIO.saveRCMConfig(alamData,params=alamParams,fname=fOut)
# Add data needed for wavemodel
if not noWaveModel:
tauParams = wmParams(dim = 4, nKp = maxKp, nMLT = 97, nL = 41, nEk = 155)
genWM.genh5(fOut,fOut,tauParams)
print("Wrote RCM configuration to %s"%(fOut))
# Plotting
if plotType == 'spec': # 1 figure per species
plotter.plotLambdasBySpec(alamData.specs,yscale='log',L=L_kt)
elif plotType == 'vs': # 2 figures (value and spacing), group all species
plotter.plotLambdas_Val_Spac(alamData.specs,yscale='log',L=L_kt)
if plotType != 'none': # Show energy range covered (assuming dipole field)
plotter.plotEnergyRange(alamData.specs, rInner=1.5, rOuter=15, rRes=100)

View File

@@ -1,491 +0,0 @@
#!/usr/bin/env python
#python wsa2TDgamera.py
import os,sys,glob
import scipy
from scipy import interpolate
from scipy.optimize import newton_krylov,anderson
import h5py
import numpy as np
import matplotlib.pyplot as plt
import time
import kaipy.gamhelio.wsa2TDgamera.params as params
import kaipy.gamhelio.lib.wsa as wsa
import kaipy.gamhelio.lib.poisson as poisson
import kaipy.gamera.gamGrids as gg
#plotting function for debug
def plot(wsa_file, var_wsa, var_wsa_rolled):
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
#fig=plt.figure(figsize=(16,12))
fig=plt.figure(figsize=(10,6.))
gs = gridspec.GridSpec(2,1,height_ratios=[20,1])
ax1 = fig.add_subplot(gs[0,0])
axc = fig.add_subplot(gs[1,0])
p1=ax1.pcolormesh(var_wsa_rolled[::-1,:]*1.e5,cmap='RdBu_r',vmin=-150.,vmax=150.)
ax1.contour(var_wsa_rolled[::-1,:],[0.],colors='white')
plt.colorbar(p1,cax=axc, orientation = 'horizontal').set_label('Br [nT]')
ax1.set_xlim((0,var_wsa.shape[1]))
ax1.set_ylim((0,var_wsa.shape[0]))
ax1.set_aspect("equal")
##in rotating system of coordinates
#ax2 = plt.subplot(212,sharex=ax1)
##p2=ax2.pcolormesh(var_wsa_rolled)
#p2=ax2.pcolormesh(var_wsa_rolled[::-1,:],cmap='RdBu_r',vmin=var_wsa_rolled.min(),vmax=-var_wsa_rolled.min())
#plt.colorbar(p2,ax=ax2).set_label('V')
#ax2.set_xlim((0,var_wsa_rolled.shape[1]))
#ax2.set_ylim((0,var_wsa_rolled.shape[0]))
fig.suptitle(wsaFile)
plt.savefig(wsaFile[:-4]+'png')
# [EP] function to plot boundary conditions in rotating frame to make a movie
def plotBc(wsa_file, phi, theta, var1, var2, var3, var4):
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
fig=plt.figure(figsize=(12,9))
gs = gridspec.GridSpec(2,2,wspace=0.2, hspace =0.1)
phi = phi*180./np.pi
theta = (np.pi/2.-theta)*180./np.pi
var4 = var4/1.e6 #temp in MK
ax1 = plt.subplot(gs[0,0], aspect='equal')
p1=ax1.pcolormesh(phi, theta, var1.T, shading = 'auto', cmap = 'rainbow', vmin = 300, vmax = 850)
plt.colorbar(p1,ax=ax1,aspect = 15, orientation = 'horizontal').set_label(r'$V_r$, km/s')
ax2 = plt.subplot(gs[0,1],sharex=ax1, aspect='equal')
p2=ax2.pcolormesh(phi, theta,var2.T, shading = 'auto', cmap = 'RdBu_r', vmin = -150, vmax = 150)
plt.colorbar(p2,ax=ax2,aspect = 15, orientation = 'horizontal').set_label(r'$B_r, nT$')
ax3 = plt.subplot(gs[1,0],sharex=ax1, aspect='equal')
p3=ax3.pcolormesh(phi, theta,var3.T, shading = 'auto', cmap = 'copper_r', vmin = 300, vmax = 1200)
plt.colorbar(p3,ax=ax3,aspect = 15, orientation = 'horizontal').set_label(r'$Rho, cm^{-3}$')
ax4 = plt.subplot(gs[1,1],sharex=ax1, aspect='equal')
p4=ax4.pcolormesh(phi, theta, var4.T,shading = 'auto', cmap = 'copper', vmin = 0.5, vmax = 2.5)
plt.colorbar(p4,ax=ax4,aspect = 15, orientation = 'horizontal').set_label('Temperature, K')
date = wsa_file.split('/')[-1][4:12]
year = date[0:4]
month = date[4:6]
day = date[6:8]
plt.suptitle(year + ':' + month + ':' + day, y=0.85)
plt.savefig(wsaFile[:-5]+'_bc.png', bbox_inches='tight')
#----------- PARSE ARGUMENTS ---------#
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('ConfigFileName',help='The name of the configuration file to use',default='startup.config')
args = parser.parse_args()
#----------- PARSE ARGUMENTS ---------#
# Read params from config file
prm = params.params(args.ConfigFileName)
(ni,nj,nk) = (prm.Ni,prm.Nj,prm.Nk)
Ng = prm.NO2
#grid parameters
tMin = prm.tMin
tMax = prm.tMax
Rin = prm.Rin
Rout = prm.Rout
Ni = prm.Ni
Nj = prm.Nj
Nk = prm.Nk
#----------GENERATE HELIO GRID------
print("Generating gamera-helio grid ...")
X3,Y3,Z3 = gg.GenKSph(Ni=Ni,Nj=Nj,Nk=Nk,Rin=Rin,Rout=Rout,tMin=tMin,tMax=tMax)
#to generate non-uniform grid for GL cme (more fine in region 0.1-0.3 AU)
#X3,Y3,Z3 = gg.GenKSphNonUGL(Ni=Ni,Nj=Nj,Nk=Nk,Rin=Rin,Rout=Rout,tMin=tMin,tMax=tMax)
gg.WriteGrid(X3,Y3,Z3,fOut=os.path.join(prm.GridDir,prm.gameraGridFile))
print("Gamera-helio grid ready!")
#----------GENERATE HELIO GRID------
# [EP] sorted list of WSA files
wsaFiles = sorted(glob.glob(os.path.join(prm.adaptdir,prm.adaptWildCard)))
print(wsaFiles)
# [EP] electric fields on edges
#+2 in j directions, two ghost cells at start and end
et_save = np.zeros( (nj+2,nk+1) )
ep_save = np.zeros( (nj+1+2,nk) )
#Normalization
Vnorm = 1.e5 #cm/s => km/s
Bnorm = 1.e-5 #Gs => nT
mp = 1.67e-24
kblts = 1.38e-16
#open innerbcTD.h5 for ouput
with h5py.File(os.path.join(prm.IbcDir,prm.gameraIbcFile),'w') as hf:
#[EP] going through the list of WSA files
for (fcount,wsaFile) in enumerate(wsaFiles):
#print(fcount)
############### WSA STUFF #####################
isFirstFile = (wsaFile == wsaFiles[0])
#[EP] reading WSA file
jd_c,phi_wsa_v,theta_wsa_v,phi_wsa_c,theta_wsa_c,bi_wsa,v_wsa,n_wsa,T_wsa = wsa.read(wsaFile,prm.densTempInfile,prm.normalized, verbose = isFirstFile)
#bi_wsa in Gs CGS units
#v_wsa in cm/s
#n_wsa in g/cm-3
#T_wsa in K
#convert julian date from wsa fits into modified julian date
mjd_c = jd_c - 2400000.5
if isFirstFile:
#take JD from the first wsa file
jd0 = jd_c
# GAMERA GRID
# read GAMERA grid from innerbc.h5
print ('reading heliogrid.h5 ...')
f = h5py.File(os.path.join(prm.GridDir,prm.gameraGridFile), 'r')
#Nphi, Nth, Nr = np.shape(f['X'])
#corners
x = f['X'][:]
y = f['Y'][:]
z = f['Z'][:]
#centers
xc = 0.125*(f['X'][:-1,:-1,:-1]+f['X'][:-1,:-1,1:]+f['X'][:-1,1:,:-1]+f['X'][:-1,1:,1:]+
f['X'][1:,:-1,:-1]+f['X'][1:,:-1,1:]+f['X'][1:,1:,:-1]+f['X'][1:,1:,1:])
yc = 0.125*(f['Y'][:-1,:-1,:-1]+f['Y'][:-1,:-1,1:]+f['Y'][:-1,1:,:-1]+f['Y'][:-1,1:,1:]+
f['Y'][1:,:-1,:-1]+f['Y'][1:,:-1,1:]+f['Y'][1:,1:,:-1]+f['Y'][1:,1:,1:])
zc = 0.125*(f['Z'][:-1,:-1,:-1]+f['Z'][:-1,:-1,1:]+f['Z'][:-1,1:,:-1]+f['Z'][:-1,1:,1:]+
f['Z'][1:,:-1,:-1]+f['Z'][1:,:-1,1:]+f['Z'][1:,1:,:-1]+f['Z'][1:,1:,1:])
#radius of inner boundary. Index order [k,j,i]
R0 = np.sqrt(x[0,0,Ng]**2+y[0,0,Ng]**2+z[0,0,Ng]**2)
#[EP for testing]
#cell corners including ghost cells
r = np.sqrt(x[:]**2+y[:]**2+z[:]**2)
rxy = np.sqrt(x[:]**2+y[:]**2)
# remove the ghosts from angular dimensions (corners)
P = np.arctan2(y[Ng:-Ng,Ng:-Ng,:],x[Ng:-Ng,Ng:-Ng,:])
P [ P < 0] += 2*np.pi
T = np.arccos(z[Ng:-Ng,Ng:-Ng,:]/r[Ng:-Ng,Ng:-Ng,:])
#grid for output into innerbc.h5
P_out = P[:,:,0:Ng+1]
T_out = T[:,:,0:Ng+1]
R_out = r[Ng:-Ng,Ng:-Ng,0:Ng+1]
print ("shapes of output phi and theta ", P_out.shape, T_out.shape, R_out.shape)
#centers spherical grid excluding ghosts in angular directions
#Rc = np.sqrt(xc[Ng:-Ng, Ng:-Ng,:]**2 + yc[Ng:-Ng, Ng:-Ng,:]**2 + zc[Ng:-Ng, Ng:-Ng,:]**2)
#Pc = np.arctan2(yc[Ng:-Ng, Ng:-Ng,:], xc[Ng:-Ng, Ng:-Ng,:])
#Tc = np.arccos(zc[Ng:-Ng,Ng:-Ng,:]/Rc)
#include one extra cell in j direction at start and end
Pg = Ng-1
Rc = np.sqrt(xc[Ng:-Ng, Pg:-Pg,:]**2 + yc[Ng:-Ng, Pg:-Pg,:]**2 + zc[Ng:-Ng, Pg:-Pg,:]**2)
Pc = np.arctan2(yc[Ng:-Ng, Pg:-Pg,:], xc[Ng:-Ng, Pg:-Pg,:])
Tc = np.arccos(zc[Ng:-Ng,Pg:-Pg,:]/Rc)
Pc [Pc < 0] += 2*np.pi
#GAMERA grid centers at the inner boundary, 1D array
phi = Pc[:,0,0]
theta = Tc[0,:,0]
#debug
#print (phi)
#print (theta)
# what exactly does this do???
pois = poisson.poisson(theta,phi)
#time from the 1st wsa map in seconds
time_sec = (jd_c - jd0)*24.*60.*60.
omega=2*np.pi/prm.Tsolar*(25.38/27.27)
#shift of wsa maps needed if wsa solutions are provided in inertial frame (folder UPDATED)
#if wsa solutions are provided in rotating carrington frame (folder CARR), no need to shift.
#shift phi coordinate in wsa data according to the shift of the wsa map relative to the first one
#wsa maps move to the right with cadence 1 day
phi_prime=(phi_wsa_c-omega*prm.adaptCadence*fcount)%(2*np.pi)
#looking for index of shift
if np.where(np.ediff1d(phi_prime)<0)[0].size!=0: #for the first map size =0, for other maps size=1
ind0=np.where(np.ediff1d(phi_prime)<0)[0][0]+1
#print 'ind = ', ind0
else:
ind0=0 # this is for the first map
#shifting phi_prime to the left
phi_prime=np.roll(phi_prime,-ind0)
bi_wsa_rolled=np.roll(bi_wsa,-ind0,axis=1)
v_wsa_rolled=np.roll(v_wsa,-ind0,axis=1)
n_wsa_rolled=np.roll(n_wsa,-ind0,axis=1)
T_wsa_rolled=np.roll(T_wsa,-ind0,axis=1)
#plot br from original wsa map (top plot) and shifted to the origin map(bottom plot)
#changes in time in the bottom plot are purely due to time-dependent variations of B_r (rotation is eliminted)
plot(wsaFile, bi_wsa, bi_wsa_rolled)
##plot(wsaFile, v_wsa, v_wsa_rolled)
###INTERPOLATION OF ROLLED WSA MAPS TO GAMERA GRID phi-theta####
# bivariate spline approximation over a rectangular mesh
fbi = interpolate.RectBivariateSpline(phi_wsa_c,theta_wsa_c,bi_wsa_rolled.T,kx=1,ky=1)
# interpolation to Gamera grid
br = fbi(phi,theta)
#Next Slava used SMOOTHING for br for the paper, we do not need it for now
fv = interpolate.RectBivariateSpline(phi_wsa_c,theta_wsa_c,v_wsa_rolled.T,kx=1,ky=1)
vr = fv(phi,theta)
f = interpolate.RectBivariateSpline(phi_wsa_c,theta_wsa_c,n_wsa_rolled.T,kx=1,ky=1)
rho = f(phi,theta)
#not interpolating temperature, calculating sound speed cs
#assuming uniform total pressure Rho_max*k*T0 = p+Br^2/8pi
#TODO: Check Temp calculation
T0 = 0.9e6
Rho0 = 1100.*mp #density in the HCS
#cs = np.sqrt(prm.gamma/rho*(rho.max()*1.38e-16*T0/1.67e-24-br**2/8/np.pi))
Temp = mp/rho/kblts*(Rho0*kblts*T0/mp-br**2/8./np.pi/2.)
# Poisson solver after interpolation onto GAMERA grid
if fcount>0:
print ('fcount = ', fcount)
#right-hand side of laplacian equation
pois.setRHS( (br-br_save).T) #after transponding it becomes (nj,nk)
guess=np.zeros_like(br.T)
#electric field potential psi: (Laplacian(Psi) = dB_r/dt)
Psi = newton_krylov(pois.residual,guess, method='lgmres',verbose=True,iter=100)#,f_rtol=1.e-6) #iter=100
print('Residual: %g' % abs(pois.residual(Psi)).max())
print ('Psi.shape = ', Psi.shape) # (nj, nk) =(128, 256)
#Psi is defined in cell centers
#calculate electric field componenet
#suffix _a denotes that this is adapt field
#E_theta = dPsi/dphi/sin(theta)
#E_phi = -dPsi/dtheta/r
et_a = np.zeros( (Psi.shape[0],Psi.shape[1]+1) ) #(128, 257)
et_a[:,1:-1] = np.diff(Psi,axis=1)/np.diff(phi) #except first and last cells in k
et_a[:,0] = (Psi[:,0] - Psi[:,-1])/(phi[0]-phi[-1]+2*np.pi) #k=0
et_a[:,-1]=et_a[:,0] #k=Nk+1
et_a /= np.sin(theta[:,None])
print ('E_theta.shape = ', et_a.shape)
"""
note, here we assume theta constant along phi and same theta on the
boundary and in the center of the GAMERA cell - Elena: we should probably fix that
"""
ep_a=np.zeros((Psi.shape[0]+1,Psi.shape[1])) #(129, 256)
ep_a[1:-1,:] = -np.diff(Psi,axis=0)/np.diff(theta)[:,None] #except first and last cells in j
#for j=0 and j=N_j we set nearby values
ep_a[0,:]=ep_a[1,:] # used to set these to zero, but more appropriate to repeat from next theta, since Ephi does not depend on theta at the pole.
ep_a[-1,:]=ep_a[-2,:]
print ('E_phi.shape = ', ep_a.shape)
#[EP]: two lines above are from LFM where theta went from pole to pole
# I do not understand all that business with interpolation of electric fields in time (see adapt2lfm.py)
# Convert to CGS. FIX ME!!! UNITS HARD CODED
et_a*= prm.Rin*prm.scale/prm.adaptCadence/24./3600.
ep_a*= prm.Rin*prm.scale/prm.adaptCadence/24./3600.
et_save = et_a
ep_save = ep_a
#[EP] for debug
dbr = br - br_save
#et_save and ep_save are defined at times of adapt and on cell edges
br_save = br
"""
After we obtained et_save ep_save at cell edges we calculate B_theta and B_phi
at cell centers and faces
"""
vrt = vr.T #(nj,nk) in cell centers
bp_a = np.zeros_like(vrt) #B_phi in cell centers
bt_a = np.zeros_like(vrt) #B_theta in cell centers
bp_kface_a = np.zeros( (vrt.shape[0],vrt.shape[1]+1) ) #(nj,nk+1)
bt_jface_a = np.zeros( (vrt.shape[0]+1,vrt.shape[1]) ) #(nj+1,nk)
vrt_kface = np.zeros( (vrt.shape[0],vrt.shape[1]+1) )
vrt_jface = np.zeros( (vrt.shape[0]+1,vrt.shape[1]) )
if fcount >0:
# B_phi and B_theta defined at cell centers
bp_a = 0.5*(et_save[:,:-1]+et_save[:,1:])/vrt
bt_a = -0.5*(ep_save[:-1,:]+ep_save[1:,:])/vrt
# the above are at cell centers, also need at the
# corresponding faces, see below
# First interpolate velocity to faces
vrt_kface[:,1:-1] = 0.5*(vrt[:,:-1]+vrt[:,1:]); vrt_kface[:,0] = 0.5*(vrt[:,-1]+vrt[:,0]); vrt_kface[:,-1] = vrt_kface[:,0]
vrt_jface[1:-1,:] = 0.5*(vrt[1:,:]+vrt[:-1,:]) ; vrt_jface[0,:]=vrt[1,:].mean(); vrt_jface[-1,:]=vrt[-2,:].mean();
#B_phi and B_theta at faces
bp_kface_a = et_save/vrt_kface
bt_jface_a = -ep_save/vrt_jface
#transponse again to agree with GAMERA indexing nk,nj,ni
# Note, these are defined at cell centers on the boundary (at rmin)
bp_a = bp_a.T
bt_a = bt_a.T
#in kaiju we do not to save B-components at cell centers, so we do not need bp_a and bt_a
# at faces; change shapes to match order in gamera nk, nj
bt_jface_a = bt_jface_a.T
bp_kface_a = bp_kface_a.T
et_save = et_save.T
ep_save = ep_save.T
# Scale inside ghost region
#print(rho.shape)
(vr,rho,Temp,br,bp_kface_a,bt_jface_a,et_save,ep_save) = [np.dstack(prm.NO2*[var]) for var in (vr,rho,Temp,br,bp_kface_a,bt_jface_a,et_save,ep_save)]
rho*=(R0/Rc[0,0,:Ng])**2
Temp*=(R0/Rc[0,0,:Ng])
br*=(R0/Rc[0,0,:Ng])**2
bp_kface_a*=(R0/Rc[0,0,:Ng])
et_save*=(R0/Rc[0,0,:Ng])
#tangential velocities are set to zero
#vp = zeros_like(vr)
#vt = zeros_like(vr)
#print vr.shape, rho.shape, cs.shape, br.shape, bt_jface_a.shape, bp_kface_a.shape
#print et_save.shape, ep_save.shape
#Agreement. For innerbcTD.h5 the output units are V[km/s], Rho[cm-3], T[K], B[nT]
#v_wsa /= Vnorm
#n_wsa /= mp
#bi_wsa /= Bnorm
print (wsaFile)
#removing two bounding cells in theta and normalizing
vrp = vr[:,1:-1,:]/Vnorm
vp = np.zeros_like(vrp)/Vnorm
vt = np.zeros_like(vrp)/Vnorm
rhop = rho[:,1:-1,:]/mp
Tempp = Temp[:,1:-1,:]
brp = br[:,1:-1,:]/Bnorm
bt_jface_a_p = bt_jface_a[:,1:-1,:]/Bnorm
bp_kface_a_p = bp_kface_a[:,1:-1,:]/Bnorm
et_save_p = et_save[:,1:-1,:]
ep_save_p = ep_save[:,1:-1,:]
#print vrp.shape, rhop.shape, csp.shape, brp.shape, bt_jface_a_p.shape, bp_kface_a_p.shape
#print et_save_p.shape, ep_save_p.shape
#V in cm/s B in Gs n in gcm-3
print (fcount, time_sec, mjd_c)
if prm.dumpBC:
if fcount == 0:
#write out phi and th coords of corners at inner boundary grid
hf.create_dataset("X", data=P_out)
hf.create_dataset("Y", data=T_out)
hf.create_dataset("Z", data=R_out)
grname = "Step#"+str(fcount)
grp = hf.create_group(grname)
grp.attrs.create("time", time_sec)
grp.attrs.create("MJD", mjd_c)
grp.create_dataset("vr",data=vrp) #cc
grp.create_dataset("vp",data=vp) #cc !zeros
grp.create_dataset("vt",data=vt) #cc !zeros
#hf.create_dataset("vr_kface",data=vr_kface) #kface
grp.create_dataset("rho",data=rhop) #cc
grp.create_dataset("T",data=Tempp) #cc
grp.create_dataset("br",data=brp) #cc
#hf.create_dataset("br_kface",data=br_kface) #kface
#hf.create_dataset("bp",data=bp_a) #cc
#hf.create_dataset("bt",data=bt_a) #cc
grp.create_dataset("bt_jface",data=bt_jface_a_p) #jface
grp.create_dataset("bp_kface",data=bp_kface_a_p) #kface
grp.create_dataset("et",data=et_save_p) #k-edges
grp.create_dataset("ep",data=ep_save_p) #j-edges
plotBc(wsaFile,phi, theta[1:-1], vrp[:,:,Ng-1], brp[:,:,Ng-1], rhop[:,:,Ng-1], Tempp[:,:,Ng-1])
# [EP] test if calculated tengential electric fields give Br from wsa
if fcount > 30:
print ('Elena debug')
print (fcount)
dphi = phi[2]-phi[1]
dtheta = theta[2]-theta[1]
dbrp = dbr[:,1:-1]
#cell edges dphi and dtheta at inner boundary face
dlp = dphi*rxy[Ng:-Ng-1,Ng:-Ng,Ng] #(256,129) dlp change with nj
#dlp = dphi*r[Ng:-Ng-1,Ng:-Ng,Ng]*sin(T[:,:])
dlt = dtheta*R0 #dlt is same for all cells
et_use = et_save_p.T #(257,128)
ep_use = ep_save_p.T #(256,129)
#rotE of the cell face
circE = zeros((nk,nj))
#circE1 = - (ep_use[:,:-1]*dlp + et_use[1:,:]*dlt - ep_use[:,1:]*dlp - et_use[:-1,:]*dlt)
for k in range(256):
for j in range(128):
circE[k,j] = - (ep_use[k,j+1]*dlp[k,j+1] - ep_use[k,j]*dlp[k,j] + et_use[k,j]*dlt - et_use[k+1,j]*dlt)
dt = prm.adaptCadence*24.*3600.
dbrdt = dlp[:,:-1]*dlt*prm.scale*dbrp/dt
resid = dbrdt - circE
fig1 = plt.figure(); plt.pcolormesh(np.log10(np.abs(resid.T))); plt.colorbar()
fig1.suptitle(wsaFile)
plt.savefig(wsaFile[:-5]+'_testFL.png')
#if fcount==2:
# sys.exit("passed two wsa files")

View File

@@ -1,184 +0,0 @@
#! /usr/bin/env python
import numpy as np
import os,sys,glob
from scipy import interpolate
import time
import h5py
import matplotlib.pyplot as plt
import kaipy.gamhelio.wsa2gamera.params as params
import kaipy.gamhelio.lib.wsa as wsa
from kaipy.kdefs import *
import kaipy.gamera.gamGrids as gg
# Parse arguments
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('ConfigFileName',help='The name of the configuration file to use',default='startup.config')
args = parser.parse_args()
# Read params from config file
prm = params.params(args.ConfigFileName)
Ng = prm.Nghost
gamma = prm.gamma
TCS = prm.TCS # Temperature in the current sheet for pressure balance calculation
nCS = prm.nCS # Density in the current sheet for pressure balance calculation
# Grid parameters
tMin = prm.tMin
tMax = prm.tMax
Rin = prm.Rin
Rout = prm.Rout
Ni = prm.Ni
Nj = prm.Nj
Nk = prm.Nk
#conversions from wsa to gamera units
cms2kms = 1.e-5 # cm/s => km/s
Gs2nT = 1.e5 # Gs => nT
# Conversion for E field 1 statV/cm = 3.e7 mV/m
eScl = 3.e7
ffits = prm.wsaFile
# Generate spherical helio grid
print("Generating gamera-helio grid Ni = %d, Nj = %d, Nk = %d " % (Ni, Nj, Nk))
X3,Y3,Z3 = gg.GenKSph(Ni=Ni,Nj=Nj,Nk=Nk,Rin=Rin,Rout=Rout,tMin=tMin,tMax=tMax)
gg.WriteGrid(X3,Y3,Z3,fOut=os.path.join(prm.GridDir,prm.gameraGridFile))
if os.path.exists(prm.gameraGridFile):
print("Grid file heliogrid.h5 is ready!")
# Read WSA
jd_c,phi_wsa_v,theta_wsa_v,phi_wsa_c,theta_wsa_c,bi_wsa,v_wsa,n_wsa,T_wsa = wsa.read(ffits,prm.densTempInfile,prm.normalized)
# Units of WSA input
# bi_wsa in [Gs]
# v_wsa in [cm/s]
# n_wsa in [g cm-3]
# T_wsa in [K]
# convert julian date in the center of the WSA map into modified julian date
mjd_c = jd_c - JD2MJD
# Get GAMERA grid for further interpolation
with h5py.File(os.path.join(prm.GridDir,prm.gameraGridFile),'r') as f:
x=f['X'][:]
y=f['Y'][:]
z=f['Z'][:]
# Cell centers, note order of indexes [k,j,i]
xc = 0.125*(x[:-1,:-1,:-1]+x[:-1,1:,:-1]+x[:-1,:-1,1:]+x[:-1,1:,1:]
+x[1:,:-1,:-1]+x[1:,1:,:-1]+x[1:,:-1,1:]+x[1:,1:,1:])
yc = 0.125*(y[:-1,:-1,:-1]+y[:-1,1:,:-1]+y[:-1,:-1,1:]+y[:-1,1:,1:]
+y[1:,:-1,:-1]+y[1:,1:,:-1]+y[1:,:-1,1:]+y[1:,1:,1:])
zc = 0.125*(z[:-1,:-1,:-1]+z[:-1,1:,:-1]+z[:-1,:-1,1:]+z[:-1,1:,1:]
+z[1:,:-1,:-1]+z[1:,1:,:-1]+z[1:,:-1,1:]+z[1:,1:,1:])
# radius of the inner boundary
R0 = np.sqrt(x[0,0,Ng]**2+y[0,0,Ng]**2+z[0,0,Ng]**2)
r = np.sqrt(x**2+y**2+z**2)
# Calculate phi and theta in physical domain (excluding ghost cells)
P = np.arctan2(y[Ng:-Ng,Ng:-Ng,:],x[Ng:-Ng,Ng:-Ng,:])
P[P<0]=P[P<0]+2*np.pi
#P = P % (2*np.pi) # sometimes the very first point may be a very
# small negative number, which the above call sets
# to 2*pi. This takes care of it.
T = np.arccos(z[Ng:-Ng,Ng:-Ng,:]/r[Ng:-Ng,Ng:-Ng,:])
#grid for inner i-ghost region; output to innerbc.h5
P_out = P[:,:,0:Ng+1]
T_out = T[:,:,0:Ng+1]
R_out = r[Ng:-Ng,Ng:-Ng,0:Ng+1]
# Calculate r, phi and theta coordinates of cell centers in physical domain (excluding ghost cells)
Rc = np.sqrt(xc[Ng:-Ng,Ng:-Ng,:]**2+yc[Ng:-Ng,Ng:-Ng,:]**2+zc[Ng:-Ng,Ng:-Ng,:]**2)
Pc = np.arctan2(yc[Ng:-Ng,Ng:-Ng,:],xc[Ng:-Ng,Ng:-Ng,:])
Pc[Pc<0]=Pc[Pc<0]+2*np.pi
Tc = np.arccos(zc[Ng:-Ng,Ng:-Ng,:]/Rc)
# this is fast and better than griddata in that it nicely extrapolates boundaries:
fbi = interpolate.RectBivariateSpline(phi_wsa_c,theta_wsa_c,bi_wsa.T,kx=1,ky=1)
br = fbi(Pc[:,0,0],Tc[0,:,0])
# Smoothing
if not prm.gaussSmoothWidth==0:
import astropy
from astropy.convolution import convolve,Gaussian2DKernel
gauss=Gaussian2DKernel(width=prm.gaussSmoothWidth)
br =astropy.convolution.convolve(br,gauss,boundary='extend')
# Interpolate to Gamera grid
fv = interpolate.RectBivariateSpline(phi_wsa_c,theta_wsa_c,v_wsa.T,kx=1,ky=1)
vr = fv(Pc[:,0,0],Tc[0,:,0])
f = interpolate.RectBivariateSpline(phi_wsa_c,theta_wsa_c,n_wsa.T,kx=1,ky=1)
rho = f(Pc[:,0,0],Tc[0,:,0])
# Not interpolating temperature, but calculating from the total pressure balance
# AFTER interpolating br and rho to the gamera grid
# n_CS*k*T_CS = n*k*T + Br^2/8pi
temp = (nCS*kbltz*TCS - br**2/8./np.pi)*Mp_cgs/rho/kbltz
#temperature in [K]
#check
#print ("Max and min of temperature in MK")
#print (np.amax(temp)*1.e-6, np.amin(temp)*1.e-6)
# note, redefining interpolation functions we could also
# interpolate from bi_wsa as above, but then we would have to
# smooth bk, if necessary. The way we're doing it here, bk will be
# smoothed or not, dependent on whether br has been smoothed.
# note also, this has to extrapolate
fbi = interpolate.RectBivariateSpline(Pc[:,0,0],Tc[0,:,0],br,kx=1,ky=1)
fv = interpolate.RectBivariateSpline(Pc[:,0,0],Tc[0,:,0],vr,kx=1,ky=1)
br_kface = fbi(P[:-1,0,0],Tc[0,:,0]) #(Nk,Nj)
vr_kface = fv (P[:-1,0,0],Tc[0,:,0]) #(Nk,Nj)
# before applying scaling inside ghost region
# get br values to the left of an edge for E_theta calculation
br_kedge = np.roll(br,1, axis=1)
# Scale inside ghost region
(vr,vr_kface,rho,temp,br,br_kface) = [np.dstack(Ng*[var]) for var in (vr,vr_kface,rho,temp,br,br_kface)]
rho*=(R0/Rc[0,0,:Ng])**2
br*=(R0/Rc[0,0,:Ng])**2
br_kface*=(R0/Rc[0,0,:Ng])**2
# Calculating E-field component on k_edges in [mV/m]
# E_theta = B_phi*Vr/c = - Omega*R*sin(theta)/Vr*Br * Vr/c = - Omega*R*sin(theta)*Br/c
omega = 2*np.pi/(Tsolar*Day2s) # [1/s]
# Theta at centers of k-faces (== theta at kedges)
Tcf = 0.25*(T[:,:-1,:-1] + T[:,1:,1:] + T[:,:-1,1:] + T[:,1:,:-1])
et_kedge = - omega*R0*Rsolar*np.sin(Tcf[:-1,:,Ng-1])*br_kedge/vc_cgs #[statV/cm]
# Unit conversion agreement. Input to GAMERA innerbc.h5 has units V[km/s], Rho[cm-3], T[K], B[nT], E[mV/m]
vr *= cms2kms
vr_kface *= cms2kms
rho /= Mp_cgs
br *= Gs2nT
br_kface *= Gs2nT
et_kedge *= eScl
with h5py.File(os.path.join(prm.IbcDir,prm.gameraIbcFile),'w') as hf:
hf.create_dataset("X", data=P_out)
hf.create_dataset("Y", data=T_out)
hf.create_dataset("Z", data=R_out)
grname = "Step#0"
grp = hf.create_group(grname)
grp.attrs.create("MJD", mjd_c)
grp.create_dataset("vr",data=vr)
grp.create_dataset("vr_kface",data=vr_kface) # size (Nk,Nj,Ng)
grp.create_dataset("rho",data=rho)
grp.create_dataset("temp",data=temp)
grp.create_dataset("br",data=br)
grp.create_dataset("br_kface",data=br_kface) # size (Nk,Nj,Ng)
grp.create_dataset("et_kedge",data=et_kedge) # size (Nk, Nj)
hf.close()

View File

@@ -1,151 +0,0 @@
#!/usr/bin/env python
#Make video of Gamera magnetosphere run
import argparse
from argparse import RawTextHelpFormatter
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
import kaipy.kaiViz as kv
import kaipy.kaiTools as ktools
import matplotlib.gridspec as gridspec
import numpy as np
import kaipy.gamera.gampp as gampp
import kaipy.kaiH5 as kh5
import kaipy.cmaps.kaimaps as kmaps
import kaipy.gamera.deltabViz as dbViz
import sys
import os
import cartopy.crs as ccrs
if __name__ == "__main__":
rad2deg = 180.0/np.pi
bMag = dbViz.dbMag
bLin = dbViz.dbLin
#Defaults
fdir = os.getcwd()
ftag = "msphere"
oDir = "vid2D"
k0 = 0 #Vertical slice to use
nS = 0
nE = -1
Nblk = 1 #Number of blocks
nID = 1 #Block ID of this job
MainS = """Creates visualization of ground dB
NOTE: Assumes ground dB has been calculated using calcdb.x on simulation data.
"""
parser = argparse.ArgumentParser(description=MainS, formatter_class=RawTextHelpFormatter)
parser.add_argument('-d',type=str,metavar="directory",default=fdir,help="Directory to read from (default: %(default)s)")
parser.add_argument('-o',type=str,metavar="directory",default=oDir,help="Subdirectory to write to (default: %(default)s)")
parser.add_argument('-id',type=str,metavar="runid",default=ftag,help="RunID of data (default: %(default)s)")
parser.add_argument('-nS' ,type=int,metavar="Step-Start",default=nS,help="Starting step (default: %(default)s)")
parser.add_argument('-nE' ,type=int,metavar="Step-End" ,default=nE,help="Ending step (default: %(default)s)")
parser.add_argument('-k0',type=int,metavar="layer" ,default=k0,help="Vertical layer to plot (default: %(default)s)")
parser.add_argument('-Nblk' ,type=int,metavar="Nblk",default=Nblk,help="Number of job blocks (default: %(default)s)")
parser.add_argument('-nID' ,type=int,metavar="nID" ,default=nID,help="Block ID of this job [1-Nblk] (default: %(default)s)")
#Finalize parsing
args = parser.parse_args()
fdir = args.d
ftag = args.id + ".deltab"
nS = args.nS
nE = args.nE
k0 = args.k0
oSub = args.o
Nblk = args.Nblk
nID = args.nID
#======
#Init data
fname = fdir + "/" + ftag + ".h5"
dbdata = gampp.GameraPipe(fdir,ftag)
print("---")
#Get coordinates
CoordID,Re = dbViz.GetCoords(fname)
print("Found %s coordinate data ..."%(CoordID))
#Check vertical level
Z0 = dbViz.CheckLevel(dbdata,k0,Re)
#Set step bounds
if (nE<0):
nE = dbdata.sFin
if (nS<dbdata.s0):
nS = dbdata.s0
#Setup parallel in time stuff
vO = np.arange(nS,nE+1)
Nt = len(vO)
print("Writing %d outputs between minutes %d and %d"%(Nt,nS,nE))
if (Nblk>1):
#Figure out work bounds
dI = (Nt//Nblk)
i0 = (nID-1)*dI
i1 = i0+dI
if (nID == Nblk):
i1 = Nt #Make sure we get last bit
print("\tBlock #%d: %d to %d"%(nID,i0,i1))
else:
i0 = 0
i1 = Nt
#Setup output directory
oDir = fdir + "/" + oSub
print("Writing output to %s"%(oDir))
#Check/create directory if necessary
if (not os.path.exists(oDir)):
try:
print("Creating directory %s"%(oDir))
os.makedirs(oDir)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(oDir):
pass
else:
raise
#=====
#Do cartopy stuff
crs = ccrs.PlateCarree()
LatI,LonI,LatC,LonC = dbViz.GenUniformLL(dbdata,k0)
#=====
#Do figure stuff
cmap = kmaps.cmDiv
vQ = kv.genNorm(bMag,doSymLog=True,linP=bLin)
cbStr = r"$\Delta B_N$ [nT]"
figSz = (12,6)
fig = plt.figure(figsize=figSz)
gs = gridspec.GridSpec(3,1,height_ratios=[20,1.0,1.0],hspace=0.025)
AxM = fig.add_subplot(gs [0,0],projection=crs)
AxCB = fig.add_subplot(gs[-1,0])
kv.genCB(AxCB,vQ,cbStr,cM=cmap)
#Loop over sub-range
for i in range(i0,i1):
nStp = vO[i]
AxM.clear()
Q = dbdata.GetVar("dBn",nStp,doVerb=False)[:,:,k0]
#Get MJD to UT
MJD = kh5.tStep(fname,nStp,aID="MJD")
utS = ktools.MJD2UT([MJD])
utDT= utS[0]
#Do plot
AxM.pcolormesh(LonI,LatI,Q,norm=vQ,cmap=cmap)
#Add decoration
tStr = dbViz.GenTStr(AxM,fname,nStp)
dbViz.DecorateDBAxis(AxM,crs,utDT)
#Save
npl = vO[i]-nS
fOut = oDir+"/vid.%04d.png"%(npl)
kv.savePic(fOut,bLenX=45)

View File

@@ -1,285 +0,0 @@
#!/usr/bin/env python
"""Plot the ground magnetic field perturbations from a magnetosphere run.
Plot the ground magnetic field perturbations from a magnetosphere run.
Author
------
Kareem Sorathia (kareem.sorathia@jhuapl.edu)
Eric Winter (eric.winter@jhuapl.edu)
"""
# Import standard modules.
import argparse
import datetime
import os
# Import 3rd-party modules.
import cartopy.crs as ccrs
import matplotlib as mpl
import matplotlib.gridspec as gridspec
import matplotlib.pyplot as plt
# Import project-specific modules.
import kaipy.cdaweb_utils as cdaweb_utils
import kaipy.cmaps.kaimaps as kmaps
import kaipy.gamera.deltabViz as dbViz
import kaipy.gamera.gampp as gampp
import kaipy.kaiH5 as kh5
import kaipy.kaiTools as ktools
import kaipy.kaiViz as kv
# Program constants and defaults
# Program description.
description = "Plot the ground magnetic field perturbations for a MAGE magnetosphere run."
# Default identifier for results to read.
default_runid = "msphere"
# Plot the last step by default.
default_step = -1
# Default vertical layer to plot.
default_k0 = 0
# Do not show anomalous currents by default.
default_Jr = False
# Default output filename.
default_output_filename = "qkdbpic.png"
# Size of figure in inches (width x height).
figSz = (12, 6)
# Color to use for magnetic footprint positions.
FOOTPRINT_COLOR = 'red'
def create_command_line_parser():
"""Create the command-line argument parser.
Create the parser for command-line arguments.
Parameters
----------
None
Returns
-------
parser : argparse.ArgumentParser
Command-line argument parser for this script.
"""
parser = argparse.ArgumentParser(description=description)
parser.add_argument(
"--debug", action="store_true", default=False,
help="Print debugging output (default: %(default)s)."
)
parser.add_argument(
"-d", type=str, metavar="directory", default=os.getcwd(),
help="Directory containing data to read (default: %(default)s)"
)
parser.add_argument(
"-n", type=int, metavar="step", default=default_step,
help="Time slice to plot (default: %(default)s)"
)
parser.add_argument(
"-id", type=str, metavar="runid", default=default_runid,
help="Run ID of data (default: %(default)s)"
)
parser.add_argument(
"-Jr", action="store_true", default=default_Jr,
help="Show radial component of anomalous current (default: %(default)s)."
)
parser.add_argument(
'-k0', type=int, metavar="layer", default=default_k0,
help="Vertical layer to plot (default: %(default)s)")
parser.add_argument(
"--spacecraft", type=str, metavar="spacecraft", default=None,
help="Names of spacecraft to plot magnetic footprints, separated by commas (default: %(default)s)"
)
parser.add_argument(
"-v", "--verbose", action="store_true", default=False,
help="Print verbose output (default: %(default)s)."
)
return parser
if __name__ == "__main__":
"""Plot the ground magnetic field perturbations."""
# Set up the command-line parser.
parser = create_command_line_parser()
# Parse the command-line arguments.
args = parser.parse_args()
debug = args.debug
fdir = args.d
runid = args.id
nStp = args.n
k0 = args.k0
doJr = args.Jr
spacecraft = args.spacecraft
verbose = args.verbose
if debug:
print("args = %s" % args)
# Fetch constants.
bLin = dbViz.dbLin
bMag = dbViz.dbMag
jMag = dbViz.jMag
# Compute the name of the file containing the ground magnetic field perturbations.
ftag = runid + ".deltab"
if debug:
print("ftag = %s" % ftag)
# Read the ground magnetic field perturbations.
fname = os.path.join(fdir, ftag + ".h5")
if debug:
print("fname = %s" % fname)
dbdata = gampp.GameraPipe(fdir, ftag)
if debug:
print("dbdata = %s" % dbdata)
print("---")
# Get the ID of the coordinate system, and the Earth radius.
CoordID, Re = dbViz.GetCoords(fname)
print("Found %s coordinate data ..." % CoordID)
if debug:
print("CoordID = %s" % CoordID)
print("Re = %s" % Re)
# If the last simulation step was requested, get the step number.
if nStp < 0:
nStp = dbdata.sFin
print("Using Step %d" % nStp)
# Check the vertical level.
Z0 = dbViz.CheckLevel(dbdata, k0, Re)
if debug:
print("Z0 = %s" % Z0)
# If currents were requested, read them. Otherwise, read the ground
# magnetic field perturbations.
if (doJr):
print("Reading Jr ...")
Jr = dbdata.GetVar("dbJ", nStp, doVerb=False)[:, :, k0]
Q = Jr
else:
dBn = dbdata.GetVar("dBn", nStp, doVerb=True)[:, :, k0]
Q = dBn
# Convert MJD to UT.
MJD = kh5.tStep(fname, nStp, aID="MJD")
if debug:
print("MJD = %s" % MJD)
utS = ktools.MJD2UT([MJD])
if debug:
print("utS = %s" % utS)
utDT= utS[0]
if debug:
print("utDT = %s" % utDT)
# Create the mapping grid.
crs = ccrs.PlateCarree()
if debug:
print("ccrs = %s" % ccrs)
LatI, LonI, LatC, LonC = dbViz.GenUniformLL(dbdata, k0)
if debug:
print("LatI = %s" % LatI)
print("LonI = %s" % LonI)
print("LatC = %s" % LatC)
print("LonC = %s" % LonC)
# Fetch the color map.
cmap = kmaps.cmDiv
if debug:
print("cmap = %s" % cmap)
# Determine color bar settings.
if (doJr):
vQ = kv.genNorm(jMag)
cbStr = "Anomalous current"
else:
vQ = kv.genNorm(bMag, doSymLog=True, linP=bLin)
cbStr = r"$\Delta B_N$ [nT]"
if debug:
print("vQ = %s" % vQ)
print("cbStr = %s" % cbStr)
# Create plot in memory.
mpl.use("Agg")
# Create the figure to hold the plot.
fig = plt.figure(figsize=figSz)
# Specify the grid for the subplots.
gs = gridspec.GridSpec(3, 1, height_ratios=[20, 1.0, 1.0], hspace=0.025)
# Create the subplots.
AxM = fig.add_subplot(gs[0, 0], projection=crs)
AxCB = fig.add_subplot(gs[-1, 0])
# Make the plot.
AxM.pcolormesh(LonI, LatI, Q, norm=vQ, cmap=cmap)
# If requested, overlay the spacecraft magnetic footprints.
if spacecraft:
print("Overplotting magnetic footprints of %s." % spacecraft)
# Split the list into individual spacecraft names.
spacecraft = spacecraft.split(',')
# Fetch the position of each footprint pair from CDAWeb.
for sc in spacecraft:
# Fetch the northern footprint position.
fp_nlat, fp_nlon = cdaweb_utils.fetch_satellite_magnetic_northern_footprint_position(
sc, utS[0]
)
if debug:
print("fp_nlat, fp_nlon = %s, %s" % (fp_nlat, fp_nlon))
# Fetch the southern footprint position.
fp_slat, fp_slon = cdaweb_utils.fetch_satellite_magnetic_southern_footprint_position(
sc, utS[0]
)
if debug:
print("fp_slat, fp_slon = %s, %s" % (fp_slat, fp_slon))
# Plot a labelled dot at the location of each footprint.
# Skip if no footprint position found.
if fp_nlon is not None:
AxM.plot(fp_nlon, fp_nlat, 'o', c=FOOTPRINT_COLOR)
lon_nudge = 2.0
lat_nudge = 2.0
AxM.text(fp_nlon + lon_nudge, fp_nlat + lat_nudge, sc + ' (N)')
else:
print("No northern footprint found for spacecraft %s." % sc)
if fp_slon is not None:
AxM.plot(fp_slon, fp_slat, 'o', c=FOOTPRINT_COLOR)
lon_nudge = 2.0
lat_nudge = 2.0
AxM.text(fp_slon + lon_nudge, fp_slat + lat_nudge, sc + ' (S)')
else:
print("No southern footprint found for spacecraft %s." % sc)
# Make the colorbar.
kv.genCB(AxCB, vQ, cbStr, cM=cmap)
# Add labels and other decorations.
tStr = dbViz.GenTStr(AxM, fname, nStp)
if debug:
print("tStr = %s" % tStr)
dbViz.DecorateDBAxis(AxM, crs, utDT)
# Save the figure.
fOut = default_output_filename
if debug:
print("fOut = %s" % fOut)
kv.savePic(fOut)

View File

@@ -1,112 +0,0 @@
#!/usr/bin/env python
#Make a plot of Dst from Gamera-RCM
import argparse
from argparse import RawTextHelpFormatter
import matplotlib as mpl
mpl.use('Agg')
import h5py
import matplotlib.pyplot as plt
import kaipy.gamera.magsphere as msph
import kaipy.kaiViz as kv
import numpy as np
import kaipy.kaiTools as kt
import datetime
from matplotlib import dates
import matplotlib.gridspec as gridspec
from astropy.time import Time
import os
import kaipy.kaiH5 as kaiH5
if __name__ == "__main__":
#Defaults
fdir = os.getcwd()
ftag = "msphere"
swfname = "bcwind.h5"
tpad = 8 #Number of hours beyond MHD to plot
iMax = -1
doDPS = False
MainS = """Creates simple plot comparing SYM-H from OMNI dataset to Gamera-RCM.
Need to run or point to directory that has the bcwind and msphere.gam files of interest
"""
parser = argparse.ArgumentParser(description=MainS, formatter_class=RawTextHelpFormatter)
parser.add_argument('-d',type=str,metavar="directory",default=fdir,help="Directory to read from (default: %(default)s)")
parser.add_argument('-id',type=str,metavar="runid",default=ftag,help="RunID of data (default: %(default)s)")
parser.add_argument('-tpad',type=float,metavar="time padding",default=tpad,help="Time beyond MHD data (in hours) to plot (default: %(default)s)")
parser.add_argument('-swfile',type=str,metavar='filename',default=swfname,help="Solar wind file name (default: %(default)s)")
parser.add_argument('--dps',action='store_true',help="Also plot the DPS Dst (default: %(default)s)")
#Finalizing parsing
args = parser.parse_args()
fdir = args.d
tpad = args.tpad
swfname = args.swfile
doDPS = args.dps
ftag = args.id
#UT formats for plotting
isotfmt = '%Y-%m-%dT%H:%M:%S.%f'
t0fmt = '%Y-%m-%d %H:%M:%S'
utfmt = '%H:%M \n%Y-%m-%d'
fBC = os.path.join(fdir, swfname)
kaiH5.CheckOrDie(fBC)
ut_symh,tD,dstD = kt.GetSymH(fBC)
fvolt = os.path.join(fdir,ftag+".volt.h5")
BSDst = kaiH5.getTs(fvolt,sIds=None,aID="BSDst")
MJD = kaiH5.getTs(fvolt,sIds=None,aID="MJD")
if doDPS:
DPSDst = kaiH5.getTs(fvolt,sIds=None,aID="DPSDst")
I = np.isinf(MJD)
MJD0 = MJD[~I].min()-1
MJD[I] = MJD0
tScl = 1.0/(60.0*60)
UT = Time(MJD,format='mjd').isot
ut = [datetime.datetime.strptime(UT[n],isotfmt) for n in range(len(UT))]
if iMax != -1:
iMax = np.min(len(ut)-1,iMax)
else:
iMax = len(ut)-1
# Remove Restart Step. Tends to cause weird artifacts
deldt = []
for it in range(iMax,1,-1):
dt = ut[it] - ut[it-1]
dt = dt.total_seconds()
if dt < 2.:
deldt.append(it)
BSDst = np.delete( BSDst,deldt )
ut = np.delete( ut,deldt )
LW = 0.75
fSz = (14,7)
fig = plt.figure(figsize=fSz)
gs = gridspec.GridSpec(1,1,hspace=0.05,wspace=0.05)
ax=fig.add_subplot(gs[0,0])
ax.plot(ut_symh,dstD,label="SYM-H",linewidth=2*LW)
ax.plot(ut,BSDst,label="Biot-Savart Dst",linewidth=LW)
if doDPS:
ax.plot(ut,DPSDst,label="Dessler-Parker-Sckopke Dst",linewidth=LW)
ax.legend(loc='upper right',fontsize="small",ncol=2)
ax.axhline(color='magenta',linewidth=0.5*LW)
ax.xaxis_date()
xfmt = dates.DateFormatter(utfmt)
ax.set_ylabel("Dst [nT]")
ax.xaxis.set_major_formatter(xfmt)
xMinD = np.array(ut_symh).min()
xMaxD = np.array(ut_symh).max()
xMinS = np.array(ut).min()
xMaxS = np.array(ut).max()
if (xMaxD>xMaxS):
xMax = min(xMaxS+datetime.timedelta(hours=tpad),xMaxD)
else:
xMax = xMaxS
xMin = xMinD
ax.set_xlim(xMin,xMax)
kv.savePic("qkdstpic.png")

View File

@@ -1,255 +0,0 @@
#!/usr/bin/env python
#Make video of error between two Gamera cases
import argparse
from argparse import RawTextHelpFormatter
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
import kaipy.kaiViz as kv
import matplotlib.gridspec as gridspec
import numpy as np
import numpy as np
import kaipy.gamera.msphViz as mviz
import kaipy.gamera.magsphere as msph
import kaipy.gamera.rcmpp as rcmpp
from alive_progress import alive_bar
import kaipy.kdefs as kdefs
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
import os
import errno
import subprocess
import shutil
import concurrent.futures
import multiprocessing
import traceback
cLW = 0.25
relColor = "tab:blue"
absColor = "tab:orange"
def makeMovie(frame_dir,movie_name):
frame_pattern = frame_dir + "/vid.%04d.png"
movie_file = os.getcwd() + "/" + movie_name + ".mp4"
ffmpegExe = "ffmpeg"
if shutil.which(ffmpegExe) is None:
ffmpegExe = "ffmpeg4"
if shutil.which(ffmpegExe) is None:
print("Could not find any ffmpeg executable. Video will not be generated.")
return
cmd = [
ffmpegExe, "-nostdin", "-i", frame_pattern,
"-vcodec", "libx264", "-crf", "14", "-profile:v", "high", "-pix_fmt", "yuv420p",
movie_file,"-y"
]
subprocess.run(cmd, check=True)
# python allows changes by reference to the errTimes,errListRel, errListAbs lists
def makeImage(i,gsph1,gsph2,tOut,doVerb,xyBds,fnList,oDir,errTimes,errListRel,errListAbs,cv,dataCounter):
if doVerb:
print("Making image %d"%(i))
#Convert time (in seconds) to Step #
nStp = np.abs(gsph1.T-tOut[i]).argmin()+gsph1.s0
if doVerb:
print("Minute = %5.2f / Step = %d"%(tOut[i]/60.0,nStp))
npl = vO[i]
#======
#Setup figure
fig = plt.figure(figsize=figSz)
gs = gridspec.GridSpec(5,2,height_ratios=[20,5,1,5,9],hspace=0.025)
AxTL = fig.add_subplot(gs[0,0])
AxTR = fig.add_subplot(gs[0,1])
AxB = fig.add_subplot(gs[-1,0:2])
AxB2 = AxB.twinx() # second plot on bottom axis
AxCT = fig.add_subplot(gs[2,0:2])
AxTL.clear()
AxTR.clear()
AxB.clear()
AxB2.clear()
#plot upper left msph error
mviz.PlotEqErrRel(gsph1,gsph2,nStp,xyBds,AxTL,fnList,AxCB=AxCT,doVerb=doVerb)
AxTL.set_title("Equatorial Slice of Relative Error")
#plot upper right k-axis error
mviz.PlotLogicalErrRel(gsph1,gsph2,nStp,AxTR,fnList,2,doVerb=doVerb)
AxTR.set_title("Per-Cell Relative Error along K-Axis")
if (not noMPI):
#plot I-MPI decomp on logical plot
if(gsph2.Ri > 1):
for im in range(gsph2.Ri):
i0 = im*gsph2.dNi
AxTR.plot([i0, i0],[0, gsph2.Nj],"deepskyblue",linewidth=0.25,alpha=0.5)
#plot J-MPI decomp on logical plot
if (gsph2.Rj>1):
for jm in range(1,gsph2.Rj):
j0 = jm*gsph2.dNj
AxTR.plot([0, gsph2.Ni],[j0, j0],"deepskyblue",linewidth=0.25,alpha=0.5)
#plot bottom line plot
etval = tOut[i]/60.0
erval = mviz.CalcTotalErrRel(gsph1,gsph2,nStp,fnList,doVerb=doVerb)
eaval = mviz.CalcTotalErrAbs(gsph1,gsph2,nStp,fnList,doVerb=doVerb)
# this section is the code must be performed sequentially to add data to the line plots one-by-one
with cv:
while not dataCounter.value == i:
cv.wait()
errTimes.append(etval)
errListRel.append(erval)
errListAbs.append(eaval)
if noLog:
AxB.plot(errTimes, errListRel,color=relColor)
AxB2.plot(errTimes, errListAbs,color=absColor)
else:
AxB.semilogy(errTimes, errListRel,color=relColor)
AxB2.semilogy(errTimes, errListAbs,color=absColor)
dataCounter.value = i+1
cv.notify_all()
# end of sequential region
AxB.set_xlabel('Time (min)')
AxB.set_ylabel('Per-Cell Mean Relative Error',color=relColor)
AxB.tick_params(axis='y',which='both',colors=relColor,left=True,right=True,labelleft=True,labelright=False)
AxB2.set_ylabel('Per-Cell Mean Absolute Error',color=absColor)
#AxB2.yaxis.tick_right()
AxB2.tick_params(axis='y',which='both',colors=absColor,left=True,right=True,labelleft=False,labelright=True)
AxB.set_title("'" + fieldNames + "' Per-Cell Error Over Time")
gsph1.AddTime(nStp,AxTL,xy=[0.025,0.84],fs="x-large")
#Add MPI decomp
if (not noMPI):
mviz.PlotMPI(gsph2,AxTL)
fOut = oDir+"/vid.%04d.png"%(npl)
kv.savePic(fOut,bLenX=45,saveFigure=fig,doClose=True)
if __name__ == "__main__":
#Defaults
fdir1 = os.getcwd()
ftag1 = "msphere"
fdir2 = os.getcwd()
ftag2 = "msphere"
oDir = "vid2D"
ts = 0 #[min]
te = 200 #[min]
dt = 0.0 #[sec] 0 default means every timestep
Nth = 1 #Number of threads
noMPI = False # Don't add MPI tiling
noLog = False
fieldNames = "Bx, By, Bz"
doVerb = False
skipMovie = False
MainS = """Creates simple multi-panel figure for Gamera magnetosphere run
Left Panel - Residual vertical magnetic field
Right Panel - Pressure (or density) and hemispherical insets
"""
parser = argparse.ArgumentParser(description=MainS, formatter_class=RawTextHelpFormatter)
parser.add_argument('-d1',type=str,metavar="directory",default=fdir1,help="Directory to read first dataset from (default: %(default)s)")
parser.add_argument('-id1',type=str,metavar="runid",default=ftag1,help="RunID of first dataset (default: %(default)s)")
parser.add_argument('-d2',type=str,metavar="directory",default=fdir2,help="Directory to read second dataset from (default: %(default)s)")
parser.add_argument('-id2',type=str,metavar="runid",default=ftag2,help="RunID of second dataset (default: %(default)s)")
parser.add_argument('-o',type=str,metavar="directory",default=oDir,help="Subdirectory to write to (default: %(default)s)")
parser.add_argument('-ts' ,type=int,metavar="tStart",default=ts,help="Starting time [min] (default: %(default)s)")
parser.add_argument('-te' ,type=int,metavar="tEnd" ,default=te,help="Ending time [min] (default: %(default)s)")
parser.add_argument('-dt' ,type=int,metavar="dt" ,default=dt,help="Cadence [sec] (default: %(default)s)")
parser.add_argument('-Nth' ,type=int,metavar="Nth",default=Nth,help="Number of threads to use (default: %(default)s)")
parser.add_argument('-f',type=str,metavar="fieldnames",default=fieldNames,help="Comma-separated fields to plot (default: %(default)s)")
parser.add_argument('-linear',action='store_true', default=noLog,help="Plot linear line plot instead of logarithmic (default: %(default)s)")
parser.add_argument('-v',action='store_true', default=doVerb,help="Do verbose output (default: %(default)s)")
parser.add_argument('-skipMovie',action='store_true', default=skipMovie,help="Skip automatic movie generation afterwards (default: %(default)s)")
#parser.add_argument('-nompi', action='store_true', default=noMPI,help="Don't show MPI boundaries (default: %(default)s)")
mviz.AddSizeArgs(parser)
#Finalize parsing
args = parser.parse_args()
fdir1 = args.d1
ftag1 = args.id1
fdir2 = args.d2
ftag2 = args.id2
ts = args.ts
te = args.te
dt = args.dt
oSub = args.o
Nth = args.Nth
fieldNames = args.f
noLog = args.linear
doVerb = args.v
#noMPI = args.noMPI
fnList = [item.strip() for item in fieldNames.split(',')]
#Setup output directory
oDir = os.getcwd() + "/" + oSub
print("Writing output to %s"%(oDir))
#Check/create directory if necessary
if (not os.path.exists(oDir)):
try:
print("Creating directory %s"%(oDir))
os.makedirs(oDir)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(oDir):
pass
else:
raise
#Get domain size
xyBds = mviz.GetSizeBds(args)
#---------
#Figure parameters
figSz = (12,7.5)
#======
#Init data
gsph1 = msph.GamsphPipe(fdir1,ftag1)
gsph2 = msph.GamsphPipe(fdir2,ftag2)
#Setup timing info
if(dt > 0):
tOut = np.arange(ts*60.0,te*60.0,dt)
else:
tOut = [t for t in gsph1.T if t > ts*60.0 and t < te*60.0]
Nt = len(tOut)
vO = np.arange(0,Nt)
print("Writing %d outputs between minutes %d and %d"%(Nt,ts,te))
print("Using %d threads"%(Nth))
errTimes = []
errListRel = []
errListAbs = []
#Loop over sub-range
titstr = "Comparing '%s' to '%s'"%(fdir1,fdir2)
with alive_bar(Nt,title=titstr.ljust(kdefs.barLab),length=kdefs.barLen,disable=doVerb) as bar:
#with concurrent.futures.ThreadPoolExecutor(max_workers=Nth) as executor:
with concurrent.futures.ProcessPoolExecutor(max_workers=Nth) as executor:
m = multiprocessing.Manager()
cv = m.Condition()
dataCounter = m.Value('i',0)
met = m.list(errTimes)
melr = m.list(errListRel)
mela = m.list(errListAbs)
#imageFutures = {executor.submit(makeImage,i,gsph1,gsph2,tOut,doVerb,xyBds,fnList,oDir,errTimes,errListRel,errListAbs,cv): i for i in range(0,Nt)}
imageFutures = {executor.submit(makeImage,i,gsph1,gsph2,tOut,doVerb,xyBds,fnList,oDir,met,melr,mela,cv,dataCounter): i for i in range(0,Nt)}
for future in concurrent.futures.as_completed(imageFutures):
try:
retVal = future.result()
except Exception as e:
print("Exception")
print(e)
traceback.print_exc()
exit()
bar()
makeMovie(oDir,oSub)

View File

@@ -1,99 +0,0 @@
#!/usr/bin/env python
#Make video of error between two Gamera cases
import argparse
from argparse import RawTextHelpFormatter
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
import kaipy.kaiViz as kv
import matplotlib.gridspec as gridspec
import numpy as np
import numpy as np
import kaipy.gamera.msphViz as mviz
import kaipy.gamera.magsphere as msph
import kaipy.gamera.rcmpp as rcmpp
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
import os
import errno
cLW = 0.25
if __name__ == "__main__":
#Defaults
fdir1 = os.getcwd()
ftag1 = "msphere"
fdir2 = os.getcwd()
ftag2 = "msphere"
nStp=1
fieldNames = "Bx, By, Bz"
doMPI = False #[Add MPI tiling]
noMPI = False
MainS = """Creates simple multi-panel figure for Gamera magnetosphere run
Left Panel - Residual vertical magnetic field
Right Panel - Pressure (or density) and hemispherical insets
"""
parser = argparse.ArgumentParser(description=MainS, formatter_class=RawTextHelpFormatter)
parser.add_argument('-d1',type=str,metavar="directory1",default=fdir1,help="Directory to read first dataset from (default: %(default)s)")
parser.add_argument('-id1',type=str,metavar="runid1",default=ftag1,help="RunID of first dataset (default: %(default)s)")
parser.add_argument('-d2',type=str,metavar="directory2",default=fdir2,help="Directory to read second dataset from (default: %(default)s)")
parser.add_argument('-id2',type=str,metavar="runid2",default=ftag2,help="RunID of second dataset (default: %(default)s)")
parser.add_argument('-n',type=int,metavar="nStp",default=nStp,help="Step number to plot (default: %(default)s)")
parser.add_argument('-f',type=str,metavar="fieldnames",default=fieldNames,help="Comma-separated fields to plot (default: %(default)s)")
#parser.add_argument('-nompi', action='store_true', default=noMPI,help="Don't show MPI boundaries (default: %(default)s)")
mviz.AddSizeArgs(parser)
#Finalize parsing
args = parser.parse_args()
fdir1 = args.d1
ftag1 = args.id1
fdir2 = args.d2
ftag2 = args.id2
nStp = args.n
fieldNames = args.f
oName = "gamErrPic.png"
#Get domain size
xyBds = mviz.GetSizeBds(args)
#---------
#Figure parameters
figSz = (12,7.5)
#======
#Init data
gsph1 = msph.GamsphPipe(fdir1,ftag1)
gsph2 = msph.GamsphPipe(fdir2,ftag2)
#======
#Setup figure
fig = plt.figure(figsize=figSz)
gs = gridspec.GridSpec(2,2,height_ratios=[20,1],hspace=0.025)
AxL = fig.add_subplot(gs[0,0])
AxR = fig.add_subplot(gs[0,1])
AxCL = fig.add_subplot(gs[-1,0])
AxCR = fig.add_subplot(gs[-1,1])
fnList = [item.strip() for item in fieldNames.split(',')]
AxL.clear()
AxR.clear()
mviz.PlotEqErrRel(gsph1,gsph2,nStp,xyBds,AxL,fnList,AxCB=AxCL)
mviz.PlotEqErrAbs(gsph1,gsph2,nStp,xyBds,AxR,fnList,AxCB=AxCR)
gsph1.AddTime(nStp,AxL,xy=[0.025,0.89],fs="x-large")
#Add MPI decomp
if (doMPI):
mviz.PlotMPI(gsph2,AxL)
mviz.PlotMPI(gsph2,AxR)
kv.savePic(oName,bLenX=45)

View File

@@ -1,195 +0,0 @@
#!/usr/bin/env python
#Make video of Gamera magnetosphere run
import argparse
from argparse import RawTextHelpFormatter
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
import kaipy.kaiViz as kv
import matplotlib.gridspec as gridspec
import numpy as np
import numpy as np
import kaipy.gamera.msphViz as mviz
import kaipy.remix.remix as remix
import kaipy.gamera.magsphere as msph
import kaipy.gamera.gampp as gampp
import kaipy.gamera.rcmpp as rcmpp
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
import os
import errno
cLW = 0.25
if __name__ == "__main__":
#Defaults
fdir = os.getcwd()
ftag = "msphere"
oDir = "vid2D"
doDen = False
ts = 0 #[min]
te = 200 #[min]
dt = 60.0 #[sec]
doBig = False #[Use big window]
noIon = False
noRCM = False
doMPI = False #[Add MPI tiling]
Nblk = 1 #Number of blocks
nID = 1 #Block ID of this job
noMPI = False
doJy = False
doBz = False
doBigRCM = False
MainS = """Creates simple multi-panel figure for Gamera magnetosphere run
Left Panel - Residual vertical magnetic field
Right Panel - Pressure (or density) and hemispherical insets
"""
parser = argparse.ArgumentParser(description=MainS, formatter_class=RawTextHelpFormatter)
parser.add_argument('-d',type=str,metavar="directory",default=fdir,help="Directory to read from (default: %(default)s)")
parser.add_argument('-id',type=str,metavar="runid",default=ftag,help="RunID of data (default: %(default)s)")
parser.add_argument('-o',type=str,metavar="directory",default=oDir,help="Subdirectory to write to (default: %(default)s)")
parser.add_argument('-ts' ,type=int,metavar="tStart",default=ts,help="Starting time [min] (default: %(default)s)")
parser.add_argument('-te' ,type=int,metavar="tEnd" ,default=te,help="Ending time [min] (default: %(default)s)")
parser.add_argument('-dt' ,type=int,metavar="dt" ,default=dt,help="Cadence [sec] (default: %(default)s)")
parser.add_argument('-Nblk' ,type=int,metavar="Nblk",default=Nblk,help="Number of job blocks (default: %(default)s)")
parser.add_argument('-nID' ,type=int,metavar="nID" ,default=nID,help="Block ID of this job [1-Nblk] (default: %(default)s)")
#parser.add_argument('-nompi', action='store_true', default=noMPI,help="Don't show MPI boundaries (default: %(default)s)")
parser.add_argument('-bz' , action='store_true', default=doBz ,help="Show Bz instead of dBz (default: %(default)s)")
parser.add_argument('-jy' , action='store_true', default=doJy ,help="Show Jy instead of pressure (default: %(default)s)")
parser.add_argument('-bigrcm', action='store_true',default=doBigRCM,help="Show entire RCM domain (default: %(default)s)")
parser.add_argument('-noion', action='store_true', default=noIon,help="Don't show ReMIX data (default: %(default)s)")
parser.add_argument('-norcm', action='store_true', default=noRCM,help="Don't show RCM data (default: %(default)s)")
mviz.AddSizeArgs(parser)
#Finalize parsing
args = parser.parse_args()
fdir = args.d
ftag = args.id
ts = args.ts
te = args.te
dt = args.dt
oSub = args.o
Nblk = args.Nblk
nID = args.nID
doJy = args.jy
doBz = args.bz
doBigRCM = args.bigrcm
#Setup timing info
tOut = np.arange(ts*60.0,te*60.0,dt)
Nt = len(tOut)
vO = np.arange(0,Nt)
print("Writing %d outputs between minutes %d and %d"%(Nt,ts,te))
if (Nblk>1):
#Figure out work bounds
dI = (Nt//Nblk)
i0 = (nID-1)*dI
i1 = i0+dI
if (nID == Nblk):
i1 = Nt #Make sure we get last bit
print("\tBlock #%d: %d to %d"%(nID,i0,i1))
else:
i0 = 0
i1 = Nt
#Setup output directory
oDir = fdir + "/" + oSub
print("Writing output to %s"%(oDir))
#Check/create directory if necessary
if (not os.path.exists(oDir)):
try:
print("Creating directory %s"%(oDir))
os.makedirs(oDir)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(oDir):
pass
else:
raise
#Get domain size
xyBds = mviz.GetSizeBds(args)
#---------
#Figure parameters
figSz = (12,7.5)
#======
#Init data
gsph = msph.GamsphPipe(fdir,ftag)
#Check for remix
rcmChk = fdir + "/%s.mhdrcm.h5"%(ftag)
rmxChk = fdir + "/%s.mix.h5"%(ftag)
doRCM = os.path.exists(rcmChk)
doMIX = os.path.exists(rmxChk)
if (doRCM and (not args.norcm)):
print("Found RCM data")
rcmdata = gampp.GameraPipe(fdir,ftag+".mhdrcm")
mviz.vP = kv.genNorm(1.0e-2,100.0,doLog=True)
rcmpp.doEll = not doBigRCM
if (doMIX and (not args.noion)):
print("Found ReMIX data")
#======
#Setup figure
fig = plt.figure(figsize=figSz)
gs = gridspec.GridSpec(3,6,height_ratios=[20,1,1],hspace=0.025)
AxL = fig.add_subplot(gs[0,0:3])
AxR = fig.add_subplot(gs[0,3:])
AxC1 = fig.add_subplot(gs[-1,0:2])
AxC2 = fig.add_subplot(gs[-1,2:4])
AxC3 = fig.add_subplot(gs[-1,4:6])
cbM = kv.genCB(AxC2,kv.genNorm(remix.facMax),"FAC",cM=remix.facCM,Ntk=4)
AxC2.xaxis.set_ticks_position('top')
#Loop over sub-range
for i in range(i0,i1):
#Convert time (in seconds) to Step #
nStp = np.abs(gsph.T-tOut[i]).argmin()+gsph.s0
print("Minute = %5.2f / Step = %d"%(tOut[i]/60.0,nStp))
npl = vO[i]
AxL.clear()
AxR.clear()
Bz = mviz.PlotEqB(gsph,nStp,xyBds,AxL,AxC1,doBz=doBz)
if (doJy):
mviz.PlotJyXZ(gsph,nStp,xyBds,AxR,AxC3)
else:
mviz.PlotMerid(gsph,nStp,xyBds,AxR,doDen,doRCM,AxC3)
gsph.AddTime(nStp,AxL,xy=[0.025,0.89],fs="x-large")
gsph.AddSW(nStp,AxL,xy=[0.625,0.025],fs="small")
#Add inset RCM plot
if (doRCM and (not args.norcm)):
AxRCM = inset_axes(AxL,width="30%",height="30%",loc=3)
rcmpp.RCMInset(AxRCM,rcmdata,nStp,mviz.vP)
AxRCM.contour(kv.reWrap(gsph.xxc),kv.reWrap(gsph.yyc),kv.reWrap(Bz),[0.0],colors=mviz.bz0Col,linewidths=mviz.cLW)
rcmpp.AddRCMBox(AxL)
if (doMIX and (not args.noion)):
ion = remix.remix(rmxChk,nStp)
gsph.AddCPCP(nStp,AxR,xy=[0.610,0.925])
mviz.AddIonBoxes(gs[0,3:],ion)
#Add MPI decomp
if (doMPI):
mviz.PlotMPI(gsph,AxL)
mviz.PlotMPI(gsph,AxR)
fOut = oDir+"/vid.%04d.png"%(npl)
kv.savePic(fOut,bLenX=45)

File diff suppressed because it is too large Load Diff

View File

@@ -1,657 +0,0 @@
#!/usr/bin/env python
"""Make a quick-look plot for a gamhelio run.
Make a quick-look plot for a gamhelio run.
Several different sets of plots are supported, and are distinguished by the
value of the "pic" argument.
pic1 (default): A 4-panel display showing pcolormesh plots in the z = 0
(equatorial) plane of the gamhelio frame used in the simulation. The plots
are (r0 is the inner radius of the grid, which should be 21.5 Rsun):
Upper left: Solar wind speed (km/s)
Upper right: Solar wind number density scaled by (r/r0)**2 (cm**-3)
Lower left: Solar wind temperature scaled by r/r0 (MK)
Lower right: Solar wind radial magnetic field scaled by (r/r0)**2 (nT)
pic2: A 4-panel display showing pcolormesh plots in the y = 0 (meridional,
containing Earth) plane of the gamhelio frame used in the simulation. The
plots are (r0 is the inner radius of the grid, which should be 21.5 Rsun):
Upper left: Solar wind speed (km/s)
Upper right: Solar wind number density scaled by (r/r0)**2 (cm**-3)
Lower left: Solar wind temperature scaled by r/r0 (MK)
Lower right: Solar wind radial magnetic field scaled by (r/r0)**2 (nT)
pic3: A 4-panel display showing pcolormesh plots in the r = 1 AU slice of the
gamhelio frame used in the simulation. The plots are:
Upper left: Solar wind speed (km/s)
Upper right: Solar wind number density (cm**-3)
Lower left: Solar wind temperature (MK)
Lower right: Solar wind radial magnetic field (nT)
pic4: A pcolormesh plot in the innermost radial slice (r = 22 Rsun) of the
gamhelio frame used in the simulation. The plot shows the radial magnetic
field in nT, in a coordinate frame rotating with the Sun.
pic5: A 3-panel display showing solar wind variables as a function of radius,
22 Rsun <= r <= 220 Rsun. The plots are:
Upper left: Solar wind number density (cm**-3)
Upper right: Solar wind speed (km/s)
Lower left: Solar wind radial momentum flux (km**2/s**2/cm**3)
pic6: A 4-panel display showing components of the solar wind magnetic field
in the solar equatorial plane (z=0), for -200 Rsun <= X, Y <= +200 Rsun.
Upper left: Radial component of magnetic field (nT)
Upper right: x-component of magnetic field (nT)
Lower left: y-component of magnetic field (nT)
Lower right: z-component of magnetic field (nT)
pic7: A 4-panel display showing pcolormesh plots in a j-slice. A j-slice is
a slice through the gamhelio data cube at a fixed colatitude. j = 0 corresponds
to the YZ plane of the gamhelio frame used in the simulation. The j = Nj/2-1
slice corresponds to the equatorial plane. The plots are:
Upper left: Solar wind speed (km/s)
Upper right: Solar wind number density scaled by (r/r0)**2 (cm**-3)
Lower left: Solar wind temperature scaled by r/r0 (MK)
Lower right: Solar wind radial magnetic field scaled by r/r0 (nT)
Authors
-------
Elena Provornikova (elena.provornikova@jhuapl.edu)
Andrew McCubbin (andrew.mccubbin@jhuapl.edu)
Eric Winter (eric.winter@jhuapl.edu)
"""
# Import standard modules.
import argparse
import os
import time
# Import supplemental modules.
import astropy
from astropy.coordinates import SkyCoord
import astropy.units as u
import matplotlib as mpl
from matplotlib import gridspec
import matplotlib.pyplot as plt
import numpy as np
import spacepy.datamodel as dm
from sunpy.coordinates import frames
# Import project-specific modules.
from kaipy import cdaweb_utils
import kaipy.gamhelio.helioViz as hviz
import kaipy.gamhelio.heliosphere as hsph
import kaipy.kaiH5 as kh5
import kaipy.kaiTools as ktools
import kaipy.kaiViz as kv
from kaipy.satcomp import scutils
# Program constants and defaults
# Program description.
DESCRIPTION = "Make a quicklook plot for a gamhelio run."
# Default identifier for results to read.
DEFAULT_RUNID = "wsa"
# List of steps
DEFAULT_STEPS = "1"
# Default slices
DEFAULT_SLICE = None
# Code for default picture type.
DEFAULT_PICTYPE = "pic1"
# Colors to use for spacecraft position symbols.
SPACECRAFT_COLORS = list(mpl.colors.TABLEAU_COLORS.keys())
def create_command_line_parser():
"""Create the command-line argument parser.
Create the parser for command-line arguments.
Parameters
----------
None
Returns
-------
parser : argparse.ArgumentParser
Command-line argument parser for this script.
Raises
------
None
"""
parser = argparse.ArgumentParser(description=DESCRIPTION)
parser.add_argument(
"--debug", action="store_true",
help="Print debugging output (default: %(default)s)."
)
parser.add_argument(
"--directory", "-d", type=str, metavar="directory",
default=os.getcwd(),
help="Directory containing data to read (default: %(default)s)"
)
parser.add_argument(
"--hgsplot", action="store_true",
help="Plot in the Heliographic Stonyhurst frame corresponding to the "
"date of the plot (default: %(default)s)."
)
parser.add_argument(
"-id", type=str, metavar="runid", default=DEFAULT_RUNID,
help="Run ID of data (default: %(default)s)"
)
parser.add_argument(
"-jslice", type=int, metavar="jSlice", default=None,
help="Index of j-slice for pic7 (default: Nj/2-1)"
)
parser.add_argument(
"-lon", type=float, metavar="lon", default=0.0,
help="Longitude of meridian slice (pic2) (default: %(default)s)"
)
parser.add_argument(
"--nlist", type=lambda n: [int(item) for item in n.split(',')],
metavar="list of steps", default=DEFAULT_STEPS,
help="List of time slice(s) n1,n2,... to plot (default: %(default)s)"
)
parser.add_argument(
"--nslice", type=lambda n: [int(item) for item in n.split(':')],
metavar="step slice", default=DEFAULT_SLICE,
help="Slice for range of time slice(s) n1:n2 to plot "
"(default: %(default)s)"
)
parser.add_argument(
"--nworkers", "-nw", type=int, metavar="nworkers", default=4,
help="Number of parallel workers (default: %(default)s)"
)
parser.add_argument(
"--parallel", "-p", action="store_true",
help="Read from HDF5 in parallel (default: %(default)s)."
)
parser.add_argument(
"-pic", type=str, metavar="pictype",
default=DEFAULT_PICTYPE,
help="Code for plot type (pic1-pic7) (default: %(default)s)"
)
parser.add_argument(
"--spacecraft", type=str, metavar="spacecraft", default=None,
help="Names of spacecraft to plot positions, separated by commas "
"(default: %(default)s)"
)
parser.add_argument(
"--verbose", "-v", action="store_true",
help="Print verbose output (default: %(default)s)."
)
parser.add_argument(
"--inner", action="store_true",
help="Plot inner i-slice for pic3 (default: %(default)s)."
)
# Return the parser.
return parser
def initFig(pic):
"""Create the matplotlib figure for a plot.
Determine figure size (width, height) (inches) based on the pic type.
Parameters
----------
pic : str
String representing picture type.
Returns
-------
fig : mpl.Figure
Matplotlib Figure to use for plots.
Raises
------
None
"""
# Figure dimensions are in inches.
if pic == "pic1" or pic == "pic2" or pic == "pic7":
figSz = (10, 12.5)
elif pic == "pic3":
figSz = (10, 6.5)
elif pic == "pic4":
figSz = (10, 6)
elif pic == "pic5":
figSz = (12, 12)
elif pic == "pic6":
figSz = (10, 12.5)
# Create the figure.
fig = plt.figure(figsize=figSz, layout="constrained")
return fig
def fOut(runid, pic, nStp, hgsplot):
"""Compute the name of the output file.
Compute the name of the output file.
Parameters
----------
runid : str
ID string for run
pic : str
String representing picture type.
nStp : int
Simulation step number used in plot.
hgsplot : bool
True if plot is in HGS frame at the date of the plot
Returns
-------
s : str
Name of file to receive the plot.
Raises
------
None
"""
if hgsplot:
s = f"qkpic_{runid}_{pic}_HGS_n{nStp}.png"
else:
s = f"qkpic_{runid}_{pic}_n{nStp}.png"
return s
def GHtoHGS(mjd_gh, x_gh, y_gh, z_gh, mjd_hgs):
"""Convert Cartesin GH coordinates to HGS.
Convert Cartesian coordinates in the gamhelio frame at time mjdc to
the Heliographic Sonyhurst frame at time mjd.
NOTE: The gamhelio frame at time t is related to the Heliographic
Stonyhurst frame at time t by the reflection of the x- and y-axes:
x_gh(t) = -x_hgs(t)
y_gh(t) = -y_hgs(t)
z_gh(t) = z_hgs(t)
Since HGS is a time-dependent frame, a time must be provided for each set
of coordinates.
Parameters
----------
mjd_gh : float
MJD of source gamhelio frame
x_gh, y_gh, z_gh : np.array of float (any shape) or scalar float
Cartesian coordinates in GH(mjdc) frame. All three arrays x, y, z must
have identical shapes.
mjd_hgs : float
MJD of target HGS frame
Returns
-------
x_hgs, y_hgs, z_hgs : np.array of float (same shape as x_gh, y_gh, z_gh)
Cartesian coordinates converted to HGS(mjd) frame.
Raises
------
None
"""
# Load the source coordinates (originially in the GH(mjd_gh) frame) into
# the equivalent HGS(mjd_gh) frame.
c_gh = SkyCoord(
-x_gh*u.Rsun, -y_gh*u.Rsun, z_gh*u.Rsun,
frame=frames.HeliographicStonyhurst,
obstime=ktools.MJD2UT(mjd_gh),
representation_type="cartesian"
)
# Create the target Heliographic Stonyhurst frame.
hgs_frame = frames.HeliographicStonyhurst(
obstime=ktools.MJD2UT(mjd_hgs)
)
# Convert the coordinates from GH(mjd_gh) to HGS(mjd_hgs).
c_hgs = c_gh.transform_to(hgs_frame)
# Extract and return the converted coordinates.
x_hgs = dm.dmarray(c_hgs.cartesian.x)
y_hgs = dm.dmarray(c_hgs.cartesian.y)
z_hgs = dm.dmarray(c_hgs.cartesian.z)
return x_hgs, y_hgs, z_hgs
def main():
"""Make a quick-look plot for a gamhelio run."""
# Set up the command-line parser.
parser = create_command_line_parser()
# Parse the command-line arguments.
args = parser.parse_args()
if args.debug:
print(f"args = {args}")
debug = args.debug
fdir = args.directory
hgsplot = args.hgsplot
ftag = args.id
jslice = args.jslice
pic2lon = args.lon
steps = args.nlist
slices = args.nslice
nWorkers = args.nworkers
doParallel = args.parallel
pic = args.pic
spacecraft = args.spacecraft
verbose = args.verbose
inner = args.inner
if slices:
print(f"Slice selected {slice(slices[0], slices[1], slices[2])}")
# Fetch the plot domain based on the picture type.
tic = time.perf_counter()
xyBds = hviz.GetSizeBds(pic)
toc = time.perf_counter()
print(xyBds)
print(f"Get bounds took {toc - tic} s")
# Do work?
doFast = False
# Open a pipe to the results data.
tic = time.perf_counter()
gsph = hsph.GamsphPipe(fdir, ftag, doFast=doFast, doParallel=doParallel,
nWorkers=nWorkers)
toc = time.perf_counter()
print(f"Open pipe took {toc-tic} s")
# Compute the range of time steps to use.
if slices and steps[0] == 1:
steps = range(gsph.s0,gsph.sFin + 1)[slice(slices[0], slices[1], slices[2])]
print(f"steps = {steps}")
# Get the MJDc value for use in computing the gamhelio frame.
fname = gsph.f0
MJDc = scutils.read_MJDc(fname)
# Split the list into individual spacecraft names.
if spacecraft:
spacecraft = spacecraft.split(',')
# Create figures in a memory buffer.
mpl.use("Agg")
# Make a plot for each time step in the list of time steps.
for nStp in steps:
if debug:
print(f"nStp = {nStp}")
tic = time.perf_counter()
print(f"Generating {pic} for time step {nStp}.")
fig = initFig(pic)
# Extract the MJD for the step.
if any(gsph.MJDs):
mjd = gsph.MJDs[nStp-gsph.s0]
time_stamp = ktools.MJD2UT(mjd)
else:
mjd = gsph.T[nStp-gsph.s0]/(3600./gsph.tScl)
time_stamp = f"{mjd:0.2f} [hr]"
if debug:
print(f"mjd = {mjd}")
# Lay out the subplots.
if pic in ["pic1", "pic2", "pic3", "pic6", "pic7"]:
gs = gridspec.GridSpec(4, 6, height_ratios=[20, 1, 20, 1], figure=fig)
# Axes for plots.
AxL0 = fig.add_subplot(gs[0, 0:3])
AxR0 = fig.add_subplot(gs[0, 3:])
AxL1 = fig.add_subplot(gs[2, 0:3])
AxR1 = fig.add_subplot(gs[2, 3:])
# Axes for colorbars.
AxC1_0 = fig.add_subplot(gs[1, 0:3])
AxC2_0 = fig.add_subplot(gs[1, 3:])
AxC1_1 = fig.add_subplot(gs[3, 0:3])
AxC2_1 = fig.add_subplot(gs[3, 3:])
elif pic == "pic4":
gs = gridspec.GridSpec(2, 1, height_ratios=[20, 1], figure=fig)
Ax = fig.add_subplot(gs[0, 0])
AxC = fig.add_subplot(gs[1, 0])
elif pic == "pic5":
gs = gridspec.GridSpec(2, 2, figure=fig)
Ax = fig.add_subplot(gs[0, 0])
AxC = fig.add_subplot(gs[0, 1])
AxC1 = fig.add_subplot(gs[1, 0])
else:
raise TypeError(f"Invalid figure type: {pic}!")
# If the step is -1, use the last step.
if nStp < 0:
nStp = gsph.sFin
print(f"Using Step {nStp}")
# Now create the actual plots.
if pic == "pic1":
# Equatorial plots in the XY plane of the modified HGS frame used
# by gamhelio. If hgsplot is True, then the plot frame is the true
# HGS frame at the time of the plot.
hviz.PlotEqMagV(gsph, nStp, xyBds, AxL0, AxC1_0,
hgsplot=hgsplot, MJDc=MJDc, MJD_plot=mjd)
hviz.PlotEqD(gsph, nStp, xyBds, AxR0, AxC2_0,
hgsplot=hgsplot, MJDc=MJDc, MJD_plot=mjd)
hviz.PlotEqTemp(gsph, nStp, xyBds, AxL1, AxC1_1,
hgsplot=hgsplot, MJDc=MJDc, MJD_plot=mjd)
hviz.PlotEqBr(gsph, nStp, xyBds, AxR1, AxC2_1,
hgsplot=hgsplot, MJDc=MJDc, MJD_plot=mjd)
if hgsplot:
fig.suptitle("Heliographic Stonyhurst frame for "
f"{time_stamp}")
else:
fig.suptitle(f"GAMERA-Helio frame for {time_stamp}")
elif pic == "pic2":
# Meridional plots in the XZ plane of the modified HGS frame used
# by gamhelio. If hgsplot is True, then the plot frame is the true
# HGS frame at the time of the plot.
hviz.PlotMerMagV(gsph, nStp, xyBds, AxL0, AxC1_0,
indx=(None, pic2lon),
hgsplot=hgsplot, MJDc=MJDc, MJD_plot=mjd)
hviz.PlotMerDNorm(gsph, nStp, xyBds, AxR0, AxC2_0,
indx=(None, pic2lon),
hgsplot=hgsplot, MJDc=MJDc, MJD_plot=mjd)
hviz.PlotMerTemp(gsph, nStp, xyBds, AxL1, AxC1_1,
indx=(None, pic2lon),
hgsplot=hgsplot, MJDc=MJDc, MJD_plot=mjd)
hviz.PlotMerBrNorm(gsph, nStp, xyBds, AxR1, AxC2_1,
indx=(None, pic2lon),
hgsplot=hgsplot, MJDc=MJDc, MJD_plot=mjd)
if hgsplot:
fig.suptitle("Heliographic Stonyhurst frame for "
f"{time_stamp}")
else:
fig.suptitle(f"GAMERA-Helio frame for {time_stamp}")
elif pic == "pic3":
# Lat/lon plot at 1 AU (the outer edge of the gamhelio grid), in
# the modified HGS frame rotating with the Sun.
AU_RSUN = 215.0
radius = AU_RSUN
I_RSUN = 21.5
if inner : radius = I_RSUN
hviz.PlotiSlMagV(gsph, nStp, xyBds, AxL0, AxC1_0, idx=radius,
idx_is_radius=True,
hgsplot=hgsplot, MJDc=MJDc, MJD_plot=mjd)
hviz.PlotiSlD(gsph, nStp, xyBds, AxR0, AxC2_0, idx=radius,
idx_is_radius=True,
hgsplot=hgsplot, MJDc=MJDc, MJD_plot=mjd,
use_outer_range=(not inner) )
hviz.PlotiSlTemp(gsph, nStp, xyBds, AxL1, AxC1_1, idx=radius,
idx_is_radius=True,
hgsplot=hgsplot, MJDc=MJDc, MJD_plot=mjd,
use_outer_range=(not inner))
hviz.PlotiSlBr(gsph, nStp, xyBds, AxR1, AxC2_1, idx=radius,
idx_is_radius=True,
hgsplot=hgsplot, MJDc=MJDc, MJD_plot=mjd,
use_outer_range=(not inner))
if hgsplot:
fig.suptitle(f"Heliographic Stonyhurst frame at {radius} [RS] for {time_stamp}")
else:
fig.suptitle(f"GAMERA-Helio frame at {radius} [RS] for {time_stamp}")
elif pic == "pic4":
# Plot at 1 AU in frame rotating with Sun.
hviz.PlotiSlBrRotatingFrame(gsph, nStp, xyBds, Ax, AxC)
elif pic == "pic5":
hviz.PlotDensityProf(gsph, nStp, xyBds, Ax)
hviz.PlotSpeedProf(gsph, nStp, xyBds, AxC)
hviz.PlotFluxProf(gsph, nStp, xyBds, AxC1)
elif pic == "pic6":
hviz.PlotEqBr(gsph, nStp, xyBds, AxL0, AxC1_0, hgsplot=hgsplot, MJDc=MJDc, MJD_plot=mjd)
hviz.PlotEqBx(gsph, nStp, xyBds, AxR0, AxC2_0, hgsplot=hgsplot, MJDc=MJDc, MJD_plot=mjd)
hviz.PlotEqBy(gsph, nStp, xyBds, AxL1, AxC1_1, hgsplot=hgsplot, MJDc=MJDc, MJD_plot=mjd)
hviz.PlotEqBz(gsph, nStp, xyBds, AxR1, AxC2_1, hgsplot=hgsplot, MJDc=MJDc, MJD_plot=mjd)
fig.suptitle("GAMERA-Helio frame for "
f"{time_stamp}")
elif pic == "pic7":
if jslice is None:
jidx = gsph.Nj//2 - 1
else:
jidx = jslice
hviz.PlotjMagV(gsph, nStp, xyBds, AxL0, AxC1_0, jidx=jidx, hgsplot=hgsplot, MJDc=MJDc, MJD_plot=mjd)
hviz.PlotjD(gsph, nStp, xyBds, AxR0, AxC2_0, jidx=jidx, hgsplot=hgsplot, MJDc=MJDc, MJD_plot=mjd)
hviz.PlotjTemp(gsph, nStp, xyBds, AxL1, AxC1_1, jidx=jidx, hgsplot=hgsplot, MJDc=MJDc, MJD_plot=mjd)
hviz.PlotjBr(gsph, nStp, xyBds, AxR1, AxC2_1, jidx=jidx, hgsplot=hgsplot, MJDc=MJDc, MJD_plot=mjd)
fig.suptitle("GAMERA-Helio frame for "
f"{time_stamp}")
else:
raise TypeError(f"Invalid figure type: {pic}!")
# Add time in the upper left (if not in figure title).
# if pic == "pic1" or pic == "pic2" or pic == "pic3" or pic == "pic6":
if pic == "pic4" or pic == "pic5":
gsph.AddTime(nStp, Ax, xy=[0.015, 0.92], fs="small")
# Overlay the spacecraft positions.
if spacecraft:
# Fetch the MJD at start and end of the model results.
MJD_start = kh5.tStep(fname, gsph.s0, aID="MJD")
MJD_end = kh5.tStep(fname, gsph.sFin, aID="MJD")
# Convert the start and stop MJD to a datetime object in UT.
ut_start = ktools.MJD2UT(MJD_start)
ut_end = ktools.MJD2UT(MJD_end)
# Fetch the trajectory of each spacecraft from CDAWeb. Then
# interpolate the position at the time of the plot, and plot the
# spacecraft at the interpolated position.
for (i_sc, sc_id) in enumerate(spacecraft):
if verbose:
print(f"Fetching trajectory for {sc_id}.")
# Fetch the spacecraft trajectory in whatever frame is
# available from CDAWeb.
sc_data = cdaweb_utils.fetch_helio_spacecraft_trajectory(
sc_id, ut_start, ut_end
)
if sc_data is None:
print(f"No trajectory found for {sc_id}.")
continue
# Ingest the trajectory by converting it to the GH(MJDc) frame.
if verbose:
print(f"Converting ephemeris for {sc_id} to the gamhelio "
f"frame at MJD {MJDc}.")
t_strings = np.array([str(t) for t in sc_data["Epoch"]])
t = astropy.time.Time(t_strings, scale='utc').mjd
x, y, z = cdaweb_utils.ingest_helio_spacecraft_trajectory(
sc_id, sc_data, MJDc)
# Interpolate the spacecraft position at the time for the plot.
t_sc = mjd
x_sc = np.interp(t_sc, t, x)
y_sc = np.interp(t_sc, t, y)
z_sc = np.interp(t_sc, t, z)
# If needed, convert the position to HGS(mjd).
if hgsplot:
x_sc, y_sc, z_sc = GHtoHGS(MJDc, x_sc, y_sc, z_sc, mjd)
# If needed, compute heliocentric spherical coordinates
# for the interpolated spacecraft position. Longitude is in
# the -180 to +180 range. Convert to 0-360 if not using
# hgsplot.
if pic == "pic3" or pic == "pic4":
rxy = np.sqrt(x_sc**2 + y_sc**2)
theta = np.arctan2(rxy, z_sc)
phi = np.arctan2(y_sc, x_sc)
lat = np.degrees(np.pi/2 - theta)
lon = np.degrees(phi)
lat_sc = lat
lon_sc = lon
if not hgsplot:
if lon_sc < 0:
lon_sc += 360
# Plot the position of the spacecraft at the plot time. Each
# spacecraft is plotted as a colored dot with a black outline.
color = SPACECRAFT_COLORS[i_sc % len(SPACECRAFT_COLORS)]
x_nudge = 0.0
y_nudge = 8.0
if pic == "pic1":
for ax in (AxL0, AxR0, AxL1, AxR1):
ax.plot(x_sc, y_sc, 'o', c=color)
ax.plot(x_sc, y_sc, 'o', c="black", fillstyle="none")
ax.text(x_sc + x_nudge, y_sc + y_nudge, sc_id,
c="black", horizontalalignment="center")
elif pic == "pic2":
for ax in (AxL0, AxR0, AxL1, AxR1):
ax.plot(x_sc, z_sc, 'o', c=color)
ax.plot(x_sc, z_sc, 'o', c="black", fillstyle="none")
ax.text(x_sc + x_nudge, z_sc + y_nudge, sc_id,
c="black", horizontalalignment="center")
elif pic == "pic3":
for ax in (AxL0, AxR0, AxL1, AxR1):
ax.plot(lon_sc, lat_sc, 'o', c=color)
ax.plot(lon_sc, lat_sc, 'o', c="black",
fillstyle="none")
ax.text(lon_sc + x_nudge, lat_sc + y_nudge, sc_id,
c="black", horizontalalignment="center")
elif pic == "pic4":
ax = Ax
ax.plot(lon_sc, lat_sc, 'o', c=color)
ax.plot(lon_sc, lat_sc, 'o', c="black", fillstyle="none")
ax.text(lon_sc + x_nudge, lat_sc + y_nudge, sc_id,
c="black", horizontalalignment="center")
elif pic == "pic5":
pass
elif pic == "pic6":
for ax in (AxL0, AxR0, AxL1, AxR1):
ax.plot(x_sc, y_sc, 'o', c=color)
ax.plot(x_sc, y_sc, 'o', c="black", fillstyle="none")
ax.text(x_sc + x_nudge, y_sc + y_nudge, sc_id,
c="black", horizontalalignment="center")
elif pic == "pic7":
raise TypeError("Spacecraft not supported for pic7!")
else:
raise TypeError(f"Invalid plot code: {pic}!")
# Save the figure to a file.
path = os.path.join(fdir, fOut(ftag, pic, nStp, hgsplot))
kv.savePic(path, bLenX=40)
plt.close()
toc = time.perf_counter()
print(f"Step {nStp} took {toc-tic} s")
if __name__ == "__main__":
main()

View File

@@ -1,389 +0,0 @@
#!/usr/bin/env python
"""Create north and south REMIX plots.
Create north and south REMIX plots.
Author
------
Kareem Sorathia (kareem.sorathia@jhuapl.edu)
Eric Winter (eric.winter@jhuapl.edu)
Example PBS script to use multiprocessing to make a video on Lou Data Analysis Nodes:
#!/bin/bash
#PBS -N MixVid
#PBS -j oe
#PBS -lselect=1:ncpus=36:mem=750GB
#PBS -lwalltime=2:00:00
#PBS -q ldan
#PBS -W group_list=s2521
export RUNID=“geospace”
# load modules and set python environment
source ~/.bashrc
setPy
cd $PBS_O_WORKDIR
mixpic.py -id $RUNID --vid -ncpus 36
"""
# Import standard modules.
import argparse
import sys
import os
# Import supplemental modules.
from astropy.time import Time
# import numpy as np
import matplotlib as mpl
import matplotlib.gridspec as gridspec
import matplotlib.pyplot as plt
import numpy as np
import h5py as h5
from alive_progress import alive_it
from multiprocessing import Pool
from psutil import cpu_count
# Import project-specific modules.
import kaipy.cdaweb_utils as cdaweb_utils
import kaipy.kaiH5 as kaiH5
import kaipy.kaiTools as ktools
import kaipy.remix.remix as remix
import kaipy.kaiViz as kv
import kaipy.kdefs as kd
# Program constants and defaults
# Program description.
description = """Creates simple multi-panel REMIX figure for a GAMERA magnetosphere run.
Top Row - FAC (with potential contours overplotted), Pedersen and Hall Conductances
Bottom Row - Joule heating rate, particle energy and energy flux
"""
# Default identifier for results to read.
default_runid = "msphere"
# Plot the last step by default.
default_step = -1
# Color to use for magnetic footprint positions.
FOOTPRINT_COLOR = 'red'
# Coordinate system for plotting
default_coord = 'SM'
def create_command_line_parser():
"""Create the command-line argument parser.
Create the parser for command-line arguments.
Parameters
----------
None
Returns
-------
parser : argparse.ArgumentParser
Command-line argument parser for this script.
"""
parser = argparse.ArgumentParser(
description=description,
formatter_class=argparse.RawTextHelpFormatter
)
parser.add_argument(
"--debug", action="store_true", default=False,
help="Print debugging output (default: %(default)s)."
)
parser.add_argument(
"-id", type=str, metavar="runid", default=default_runid,
help="Run ID of data (default: %(default)s)"
)
parser.add_argument(
"-n", type=int, metavar="step", default=default_step,
help="Time slice to plot (default: %(default)s)"
)
parser.add_argument(
'-nflux', action='store_true', default=False,
help="Show number flux instead of energy flux (default: %(default)s)"
)
parser.add_argument(
'-print', action='store_true', default=False,
help="Print list of all steps and time labels, then exit (default: %(default)s)"
)
parser.add_argument(
"--spacecraft", type=str, metavar="spacecraft", default=None,
help="Names of spacecraft to plot magnetic footprints, separated by commas (default: %(default)s)"
)
parser.add_argument(
"-v", "--verbose", action="store_true", default=False,
help="Print verbose output (default: %(default)s)."
)
parser.add_argument(
'-GTYPE', action='store_true', default=False,
help="Show RCM grid type in the eflx plot (default: %(default)s)"
)
parser.add_argument(
'-PP', action='store_true', default=False,
help="Show plasmapause (10/cc) in the eflx/nflx plot (default: %(default)s)"
)
parser.add_argument(
'-vid', action='store_true', default=False,
help="Make a video and store in mixVid directory (default: %(default)s)"
)
parser.add_argument(
'-overwrite', action='store_true', default=False,
help="Overwrite existing vid files (default: %(default)s)"
)
parser.add_argument(
'--ncpus', type=int, metavar="ncpus", default=1,
help="Number of threads to use with --vid (default: %(default)s)"
)
parser.add_argument(
'-nohash', action='store_true', default=False,
help="Don't display branch/hash info (default: %(default)s)"
)
parser.add_argument(
'--coord', type=str, metavar="coord", default=default_coord,
help="Coordinate system to use (default: %(default)s)"
)
return parser
def makePlot(i, remixFile, nStp):
with h5.File(remixFile, 'r') as f5:
foundT = f5['Step#'+str(nStp)].attrs['MJD']
#foundT = T[idxStp]
if debug:
print("foundT = %s" % foundT)
print('Found time:', Time(foundT, format='mjd').iso)
utS = ktools.MJD2UT(foundT)
if debug:
print("utS = %s" % utS)
# If both N and S files exist, skip it
if do_vid:
filenameN = "{}.{:0>{n}d}.png".format("remix_n", i, n=n_pad)
outPathN = os.path.join(outDir, filenameN)
filenameS = "{}.{:0>{n}d}.png".format("remix_s", i, n=n_pad)
outPathS = os.path.join(outDir, filenameN)
if not do_overwrite and os.path.exists(outPathN) and os.path.exists(outPathS):
return
# Read the data into the remix object.
ion = remix.remix(remixFile, nStp)
if debug:
print("ion = %s" % ion)
for h in ['NORTH','SOUTH']:
if h == 'NORTH':
hemi = 'n'
if h == 'SOUTH':
hemi = 's'
if do_vid:
filename = "{}.{:0>{n}d}.png".format("remix_"+hemi, i, n=n_pad)
outPath = os.path.join(outDir, filename)
else:
outPath = "remix_"+hemi+".png"
# Skip this file if it already exists and we're not supposed to overwrite
if not do_overwrite and os.path.exists(outPath) and do_vid:
continue
plt.clf()
# Add a label.
plt.figtext(
0.5, 0.94, 'MIX (' + h + ')\n' + Time(foundT, format='mjd').iso,
fontsize=12, multialignment='center', horizontalalignment='center'
)
# Initialize the remix object based on the current hemisphere.
ion.init_vars(h)
# Create the plot layout for the current hemisphere.
gs = gridspec.GridSpec(
2, 3, figure=fig, left=0.03, right=0.97, top=0.9, bottom=0.03
)
# Create the individual plots for the current hemisphere.
axs = [None]*6
axs[0] = ion.plot('current', gs=gs[0, 0])
axs[1] = ion.plot('sigmap', gs=gs[0, 1])
axs[2] = ion.plot('sigmah', gs=gs[0, 2])
axs[3] = ion.plot('joule', gs=gs[1, 0])
axs[4] = ion.plot('energy', gs=gs[1, 1])
if do_nflux:
axs[5] = ion.plot('flux', gs=gs[1, 2],doGTYPE=do_GTYPE,doPP=do_PP)
else:
axs[5] = ion.plot('eflux', gs=gs[1, 2],doGTYPE=do_GTYPE,doPP=do_PP)
# If requested, plot the magnetic footprints for the specified
# spacecraft.
if spacecraft:
for sc in spacecraft:
if verbose:
print("Overplotting %s magnetic footprint for %s." % (h, sc))
# Fetch the footprint position for this hemisphere.
if h.lower() == 'north':
fp_lat, fp_lon = cdaweb_utils.fetch_satellite_magnetic_northern_footprint_position(
sc, utS
)
else:
fp_lat, fp_lon = cdaweb_utils.fetch_satellite_magnetic_southern_footprint_position(
sc, utS
)
if debug:
print("fp_lat, fp_lon = %s, %s" % (fp_lat, fp_lon))
# Skip if no footprint found.
if fp_lat is None:
print("No %s footprint found for spacecraft %s." % (h, sc))
continue
# The footprint locations are in geographic (GEO) coordinates.
# They must be converted to Solar Magnetic (SM) coordinates
# for plotting.
# Convert the footprint position to the coordinate system used
# by these plots, which show contours at the surface of the
# ionosphere, about 122 km above the surface ofn the Earth.
# Note that this adjustment assumes the field lines impinging
# on the magnetic footprint descend vertically at the
# footprint point, which is not technically accurate.
fp_lat_rad = np.radians(fp_lat)
fp_lon_rad = np.radians(fp_lon)
fp_x = np.cos(fp_lat_rad)*np.cos(fp_lon_rad)
fp_y = np.cos(fp_lat_rad)*np.sin(fp_lon_rad)
fp_theta = np.arctan2(fp_y, fp_x) # [-pi, pi]
fp_r = np.sqrt(fp_x**2 + fp_y**2)
# Plot a labelled dot at the location of each footprint.
# Skip if no footprint position found.
for ax in axs:
ax.plot(fp_theta, fp_r, 'o', c=FOOTPRINT_COLOR)
theta_nudge = 0.0
r_nudge = 0.0
ax.text(fp_theta + theta_nudge, fp_r + r_nudge, sc)
# Add Branch and Hash info
if do_hash:
fig.text(0.1,0.95,f"branch/commit: {branch}/{githash}", fontsize=6)
# Save to file
kv.savePic(outPath, dpiQ=300)
#plt.close(fig)
if __name__ == "__main__":
"""Plot remix data, either a single time step or as a movie"""
# Set up the command-line parser.
parser = create_command_line_parser()
# Parse the command-line arguments.
args = parser.parse_args()
debug = args.debug
runid = args.id
nStp = args.n
do_nflux = args.nflux
do_print = args.print
spacecraft = args.spacecraft
verbose = args.verbose
do_GTYPE = args.GTYPE
do_PP = args.PP
do_vid = args.vid
do_overwrite = args.overwrite
do_hash = not args.nohash
ncpus = args.ncpus
if debug:
print("args = %s" % args)
# Construct the name of the REMIX results file.
remixFile = runid + '.mix.h5'
if debug:
print("remixFile = %s" % remixFile)
# Get branch/hash info
if do_hash:
branch = kaiH5.GetBranch(remixFile)
githash = kaiH5.GetHash(remixFile)
if debug:
print(f'branch/commit: {branch}/{githash}')
# Split the original string into a list of spacecraft IDs.
if spacecraft:
spacecraft = spacecraft.split(',')
if debug:
print("spacecraft = %s" % spacecraft)
# Enumerate the steps in the results file.
nsteps, sIds = kaiH5.cntSteps(remixFile)
sIds = sorted(sIds)
if debug:
print("nsteps = %s" % nsteps)
print("sIds = %s" % sIds)
# Check that the requested step exists.
if nStp >= 0 and not nStp in sIds: # ANY nStp<0 gets last step.
raise TypeError(f"Step #{nStp} not found in {remixFile}!")
# Get the times from the result file.
if do_print:
T = kaiH5.getTs(remixFile, sIds, aID='MJD')
if debug:
print("T = %s" % T)
for i, tt in enumerate(T):
print('Step#%06d: ' % sorted(sIds)[i], Time(tt, format='mjd').iso)
sys.exit(0)
# Create the plots in a memory buffer.
mpl.use('Agg')
# Set global plot font options.
mpl.rc('mathtext', fontset='stixsans', default='regular')
mpl.rc('font', size=10)
# Init figure
fig = plt.figure(figsize=(12, 7.5))
if not do_vid: # Then we are making a single image, keep original functionality
# Find the time for the specified step.
if nStp == -1:
# Take the last step.
nStp = sorted(sIds)[-1]
if debug:
print("nStp = %s" % nStp)
makePlot(nStp, remixFile, nStp)
else: # Then we make a video, i.e. series of images saved to mixVid
outDir = 'mixVid'
kaiH5.CheckDirOrMake(outDir)
# How many 0's do we need for filenames?
n_pad = int(np.log10((len(sIds)))) + 1
if ncpus == 1:
for i, nStp in enumerate(alive_it(sIds,length=kd.barLen,bar=kd.barDef)):
makePlot(i,remixFile, nStp)
else:
# Make list of parallel arguments
ag = ((i,remixFile,nStp) for i, nStp in enumerate(sIds) )
# Check we're not exceeding cpu_count on computer
ncpus = min(int(ncpus),cpu_count(logical=False))
print('Doing multithreading on ',ncpus,' threads')
# Do parallel job
with Pool(processes=ncpus) as pl:
pl.starmap(makePlot,ag)
print("Done making all the images. Go to mixVid folder")

View File

@@ -1,448 +0,0 @@
#!/usr/bin/env python
"""Make a quick figure of a Gamera magnetosphere run.
Make a quick figure of a Gamera magnetosphere run.
Author
------
Kareem Sorathia (kareem.sorathia@jhuapl.edu)
Eric Winter (eric.winter@jhuapl.edu)
"""
# Import standard modules.
import argparse
import os
# Import supplemental modules.
import matplotlib as mpl
import matplotlib.gridspec as gridspec
import matplotlib.pyplot as plt
import numpy as np
import warnings
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
from multiprocessing import Pool
from psutil import cpu_count
# Import project-specific modules.
from kaipy import cdaweb_utils
import kaipy.gamera.gampp as gampp
import kaipy.gamera.magsphere as msph
import kaipy.gamera.msphViz as mviz
import kaipy.gamera.rcmpp as rcmpp
import kaipy.kaiH5 as kh5
import kaipy.kaiViz as kv
import kaipy.kaiTools as ktools
import kaipy.kdefs as kdefs
import kaipy.remix.remix as remix
# Program constants and defaults
# Program description.
description = """Creates simple multi-panel figure for Gamera magnetosphere run
Top Panel - Residual vertical magnetic field
Bottom Panel - Pressure (or density) and hemispherical insets
NOTE: There is an optional -size argument for domain bounds options
(default: std), which is passed to kaiViz functions.
"""
# Default identifier for results to read.
default_runid = "msphere"
# Plot the last step by default.
default_step = -1
# Color to use for spacecraft position symbols.
SPACECRAFT_COLOR = 'red'
def create_command_line_parser():
"""Create the command-line argument parser.
Create the parser for command-line arguments.
Parameters
----------
None
Returns
-------
parser : argparse.ArgumentParser
Command-line argument parser for this script.
"""
parser = argparse.ArgumentParser(
description=description,
formatter_class=argparse.RawTextHelpFormatter
)
parser.add_argument(
"--debug", action="store_true", default=False,
help="Print debugging output (default: %(default)s)."
)
parser.add_argument(
"-d", type=str, metavar="directory", default=os.getcwd(),
help="Directory containing data to read (default: %(default)s)"
)
parser.add_argument(
"-id", type=str, metavar="runid", default=default_runid,
help="Run ID of data (default: %(default)s)"
)
parser.add_argument(
"-n", type=int, metavar="step", default=default_step,
help="Time slice to plot (default: %(default)s)"
)
parser.add_argument(
"-bz", action="store_true", default=False,
help="Show Bz instead of dBz (default: %(default)s)."
)
parser.add_argument(
"-den", action="store_true", default=False,
help="Show density instead of pressure (default: %(default)s)."
)
parser.add_argument(
"-jy", action="store_true", default=False,
help="Show Jy instead of pressure (default: %(default)s)."
)
parser.add_argument(
"-ephi", action="store_true", default=False,
help="Show Ephi instead of pressure (default: %(default)s)."
)
parser.add_argument(
"-noion", action="store_true", default=False,
help="Don't show ReMIX data (default: %(default)s)."
)
parser.add_argument(
"-nompi", action="store_true", default=False,
help="Don't show MPI boundaries (default: %(default)s)."
)
parser.add_argument(
"-norcm", action="store_true", default=False,
help="Don't show RCM data (default: %(default)s)."
)
parser.add_argument(
"-bigrcm", action="store_true", default=False,
help="Show entire RCM domain (default: %(default)s)."
)
parser.add_argument(
"-src", action="store_true", default=False,
help="Show source term (default: %(default)s)."
)
parser.add_argument(
"-v", "--verbose", action="store_true", default=False,
help="Print verbose output (default: %(default)s)."
)
parser.add_argument(
"--spacecraft", type=str, metavar="spacecraft", default=None,
help="Names of spacecraft to plot positions, separated by commas (default: %(default)s)"
)
parser.add_argument(
'-vid', action='store_true', default=False,
help="Make a video and store in mixVid directory (default: %(default)s)"
)
parser.add_argument(
'-overwrite', action='store_true', default=False,
help="Overwrite existing vid files (default: %(default)s)"
)
parser.add_argument(
'--ncpus', type=int, metavar="ncpus", default=1,
help="Number of threads to use with --vid (default: %(default)s)"
)
parser.add_argument(
'-nohash', action='store_true', default=False,
help="Don't display branch/hash info (default: %(default)s)"
)
# Add an option for plot domain size.
mviz.AddSizeArgs(parser)
return parser
def makePlot(i,spacecraft,nStp):
# Disable some warning spam if not debug
if not debug:
warnings.filterwarnings("ignore", message="The input coordinates to pcolor are interpreted as cell centers.*")
# Name of plot output file.
if do_vid:
fOut = "{}.{:0>{n}d}.png".format("msphpic", i, n=n_pad)
outPath = os.path.join(outDir, fOut)
else:
# Name of plot output file.
fOut = "qkmsphpic.png"
outPath = fOut
# Skip this file if it already exists and we're not supposed to overwrite
if not do_overwrite and os.path.exists(outPath) and do_vid:
return
# Open remix data if available.
if doMIX:
print("Found ReMIX data")
ion = remix.remix(rmxChk, nStp)
if debug:
print("ion = %s" % ion)
gs = gridspec.GridSpec(3, 6, height_ratios=[20, 1, 1], hspace=0.025)
if debug:
print("fig = %s" % fig)
print("gs = %s" % gs)
# Create the plotting Axes objects.
AxL = fig.add_subplot(gs[0, 0:3])
AxR = fig.add_subplot(gs[0, 3:])
AxC1 = fig.add_subplot(gs[-1, 0:2])
AxC2 = fig.add_subplot(gs[-1, 2:4])
AxC3 = fig.add_subplot(gs[-1, 4:6])
if debug:
print("AxL = %s" % AxL)
print("AxR = %s" % AxR)
print("AxC1 = %s" % AxC1)
print("AxC2 = %s" % AxC2)
print("AxC3 = %s" % AxC3)
# Create the field-aligned current colorbar on Axes #2.
cbM = kv.genCB(
AxC2, kv.genNorm(remix.facMax), "FAC", cM=remix.facCM, Ntk=4
)
AxC2.xaxis.set_ticks_position('top')
if debug:
print("cbM = %s" % cbM)
# On the left, plot the z-component of the residual magnetic field.
Bz = mviz.PlotEqB(gsph, nStp, xyBds, AxL, AxC1, doBz=doBz)
# Make any requested optional plots, or just pressure.
if doJy:
mviz.PlotJyXZ(gsph, nStp, xyBds, AxR, AxC3)
elif doEphi:
mviz.PlotEqEphi(gsph, nStp, xyBds, AxR, AxC3)
else:
mviz.PlotMerid(gsph, nStp, xyBds, AxR, doDen, doRCM, AxC3, doSrc=doSrc)
# Add the date and time for the plot.
gsph.AddTime(nStp, AxL, xy=[0.025, 0.89], fs="x-large")
# Add the solar wind description text.
gsph.AddSW(nStp, AxL, xy=[0.625, 0.025], fs="small")
# If available, add the inset RCM plot.
if not noRCM:
AxRCM = inset_axes(AxL, width="30%", height="30%", loc=3)
rcmpp.RCMInset(AxRCM, rcmdata, nStp, mviz.vP)
# Add dBz contours.
AxRCM.contour(
kv.reWrap(gsph.xxc), kv.reWrap(gsph.yyc), kv.reWrap(Bz), [0.0],
colors=mviz.bz0Col, linewidths=mviz.cLW
)
# Show the RCM region as a box.
rcmpp.AddRCMBox(AxL)
# Plot the REMIX data, if requested.
if doIon:
gsph.AddCPCP(nStp, AxR, xy=[0.610, 0.925])
mviz.AddIonBoxes(gs[0, 3:], ion)
# Show the MPI decomposition, if requested.
if doMPI:
mviz.PlotMPI(gsph, AxL)
mviz.PlotMPI(gsph, AxR)
# If requested, overlay the spacecraft locations.
if spacecraft:
print("Overplotting spacecraft trajectories of %s." % spacecraft)
# Split the list into individual spacecraft names.
spacecraft = spacecraft.split(',')
if debug:
print("spacecraft = %s" % spacecraft)
# Fetch the MJD start and end time of the model results.
fname = gsph.f0
if debug:
print("fname = %s" % fname)
MJD_start = kh5.tStep(fname, gsph.s0, aID="MJD")
if debug:
print("MJD_start = %s" % MJD_start)
MJD_end = kh5.tStep(fname, gsph.sFin, aID="MJD")
if debug:
print("MJD_end = %s" % MJD_end)
# Convert the statrt and stop MJD to a datetime object in UT.
ut_start = ktools.MJD2UT(MJD_start)
if debug:
print("ut_start = %s" % ut_start)
ut_end = ktools.MJD2UT(MJD_end)
if debug:
print("ut_end = %s" % ut_end)
# Fetch and plot the trajectory of each spacecraft from CDAWeb.
for (i_sc, sc) in enumerate(spacecraft):
# Fetch the spacecraft trajectory in Solar Magnetic (SM)
# Cartesian coordinates between the start and end times.
sc_x, sc_y, sc_z = cdaweb_utils.fetch_spacecraft_SM_trajectory(
sc, ut_start, ut_end
)
if debug:
print("sc_x, sc_y, sc_z = %s, %s, %s" % (sc_x, sc_y, sc_z))
# Skip if no trajectory found.
if sc_x is None:
print("No trajectory found for spacecraft %s." % sc)
continue
# Convert coordinates to units of Earth radius.
CM_TO_KM = 1e-5 # Centimeters to kilometers
Re_km = kdefs.Re_cgs*CM_TO_KM # Earth radius in kilometers
sc_x_Re = sc_x/Re_km
sc_y_Re = sc_y/Re_km
sc_z_Re = sc_z/Re_km
if debug:
print("sc_x_Re, sc_y_Re, sc_z_Re = %s, %s, %s" %
(sc_x_Re, sc_y_Re, sc_z_Re))
# Plot a labelled trajectory of the spacecraft. Also plot a larger
# dot at the last point in the trajectory.
# Left plot
SPACECRAFT_COLORS = list(mpl.colors.TABLEAU_COLORS.keys())
color = SPACECRAFT_COLORS[i_sc % len(SPACECRAFT_COLORS)]
AxL.plot(sc_x_Re, sc_y_Re, marker=None, linewidth=1, c=color)
AxL.plot(sc_x_Re[-1], sc_y_Re[-1], 'o', c=color)
x_nudge = 1.0
y_nudge = 1.0
AxL.text(sc_x_Re[-1] + x_nudge, sc_y_Re[-1] + y_nudge, sc, c=color)
# Right plot
AxR.plot(sc_x_Re, sc_z_Re, marker=None, linewidth=1, c=color)
AxR.plot(sc_x_Re[-1], sc_z_Re[-1], 'o', c=color)
x_nudge = 1.0
z_nudge = 1.0
AxR.text(sc_x_Re[-1] + x_nudge, sc_z_Re[-1] + z_nudge, sc, c=color)
# Add Branch and Hash info
if do_hash:
fig.text(0.1,0.87,f"branch/commit: {branch}/{githash}", fontsize=6)
# Save the plot to a file.
kv.savePic(outPath, bLenX=45)
if __name__ == "__main__":
"""Make a quick figure of a Gamera magnetosphere run."""
# Set up the command-line parser.
parser = create_command_line_parser()
# Parse the command-line arguments.
args = parser.parse_args()
debug = args.debug
verbose = args.verbose
fdir = args.d
ftag = args.id
nStp = args.n
doDen = args.den
noIon = args.noion
noMPI = args.nompi
doMPI = not noMPI
doJy = args.jy
doEphi = args.ephi
doSrc = args.src
doBz = args.bz
noRCM = args.norcm
doBigRCM = args.bigrcm
do_vid = args.vid
do_overwrite = args.overwrite
do_hash = not args.nohash
ncpus = args.ncpus
spacecraft = args.spacecraft
if debug:
print("args = %s" % args)
# Get the domain size in Re.
xyBds = mviz.GetSizeBds(args)
if debug:
print("xyBds = %s" % xyBds)
# Set figure parameters.
doFast = False
doIon = not noIon
figSz = (12, 7.5)
# Open the gamera results pipe.
gsph = msph.GamsphPipe(fdir, ftag, doFast=doFast)
# Check for the presence of RCM results.
rcmChk = os.path.join(fdir, "%s.mhdrcm.h5" % ftag)
doRCM = os.path.exists(rcmChk)
if debug:
print("rcmChk = %s" % rcmChk)
print("doRCM = %s" % doRCM)
# Check for the presence of remix results.
rmxChk = os.path.join(fdir, "%s.mix.h5" % ftag)
doMIX = os.path.exists(rmxChk)
if debug:
print("rmxChk = %s" % rmxChk)
print("doMIX = %s" % doMIX)
# Get branch/hash info
if doMIX:
branch = kh5.GetBranch(rmxChk)
githash = kh5.GetHash(rmxChk)
if debug:
print(f'branch/commit: {branch}/{githash}')
# Open RCM data if available, and initialize visualization.
if doRCM:
print("Found RCM data")
rcmdata = gampp.GameraPipe(fdir, ftag + ".mhdrcm")
mviz.vP = kv.genNorm(1.0e-2, 100.0, doLog=True)
rcmpp.doEll = not doBigRCM
if debug:
print("rcmdata = %s" % rcmdata)
else:
rcmdata = None
# Setup the figure.
mpl.use('Agg') # Plot in memory buffer.
# Set global plot font options.
mpl.rc('mathtext', fontset='stixsans', default='regular')
mpl.rc('font', size=10)
# Init figure
fig = plt.figure(figsize=figSz)
if not do_vid: # If we are making a single image, keep original functionality
# If needed, fetch the number of the last step.
if nStp < 0:
nStp = gsph.sFin
print("Using Step %d" % nStp)
makePlot(nStp,spacecraft,nStp)
else: # then we make a video, i.e. series of images saved to msphVid
# Get video loop parameters
s0 = max(gsph.s0,1) # Skip Step#0
sFin = gsph.sFin
nsteps = sFin - s0
sIds = np.array(range(s0,sFin))
outDir = 'msphVid'
kh5.CheckDirOrMake(outDir)
# How many 0's do we need for filenames?
n_pad = int(np.log10(nsteps)) + 1
if ncpus == 1:
for i, nStp in enumerate(sIds):
makePlot(i, spacecraft, nStp)
else:
# Make list of parallel arguments
ag = ((i,spacecraft,nStp) for i, nStp in enumerate(sIds) )
# Check we're not exceeding cpu_count on computer
ncpus = min(int(ncpus),cpu_count(logical=False))
print('Doing multithreading on ',ncpus,' threads')
# Do parallel job
with Pool(processes=ncpus) as pl:
pl.starmap(makePlot,ag)
print("Done making all the images. Go to mixVid folder")

View File

@@ -1,199 +0,0 @@
#!/usr/bin/env python
"""
Probes rcm.h5 data file and returns graph of variable vs. channel energy
"""
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
import kaipy.kaiViz as kv
import matplotlib.gridspec as gridspec
import numpy as np
import kaipy.kaiH5 as kh5
from matplotlib import rcParams, cycler
import argparse
from argparse import RawTextHelpFormatter
#Variable options
VOPT_SF = "specFlux"
VOPT_PSF = "precipSpecFlux"
VOPT_D = "den"
VOPT_P = "press"
# conversion factors
massi = 1.67e-27 # mass of ions in kg
ev = 1.607e-19 # 1ev in J
nt = 1.0e-9 # nt
re = 6.380e6 # radius of earth in m
nT2T = 1.e-9
m2cm = 1.e2
pressure_factor = 2./3.*ev/re*nt
density_factor = nt/re
specFlux_factor = 1/np.pi/np.sqrt(8)*np.sqrt(ev/massi)*nt/re/1.0e1 # [units/cm^2/keV/str]
precipSpecFlux_factor = 1/nT2T/(m2cm**2)/(4*np.pi)
if __name__ == "__main__":
varChoices = [VOPT_SF, VOPT_PSF, VOPT_D, VOPT_P]
ftag = "msphere"
cmap = plt.cm.plasma
timeStr = ""
locStr = "-5,0,0"
numSamples = 6
varStr = "specFlux"
MainS = """Creates a plot of the differential flux for RCM ions or electrons in units of cm^-2 keV^-1 str^-1
"""
parser = argparse.ArgumentParser(description=MainS, formatter_class=RawTextHelpFormatter)
parser.add_argument('-id',type=str,metavar="runid",default=ftag,help="RunID of data (default: %(default)s)")
parser.add_argument('-n',type=int,metavar="numSamples",default=numSamples,help="Number of evenly-spaced time samples (default: %(default)s)")
parser.add_argument('-t',type=str,metavar="times",default=timeStr,help="Comma-separated times (in hours) to plot (example: 1,2,4,6). Ignores numSamples")
parser.add_argument('-l',type=str,metavar="loc",default=locStr,help="Comma-separated x,y,z values for equatorial location (default: %(default)s)")
parser.add_argument('-e',action='store_true',default=False,help="Flag to plot electrons instead of ions (default: %(default)s)")
parser.add_argument('-v',choices=varChoices,default=varStr,help="Variable to plot (default: %(default)s)")
args = parser.parse_args()
ftag = args.id
numSamples = args.n
timeStr = args.t
locStr = args.l
doElectrons = args.e
varStr = args.v
fIn = ftag+".rcm.h5"
x0,y0,z0 = [float(x) for x in locStr.split(',')]
#--Time stuff--
#Get rcm data timesteps
nDataSteps,sIds = kh5.cntSteps(fIn)
dataTimes = kh5.getTs(fIn,sIds,"time")/3600 # [Hrs]
if timeStr == "": # If user didn't specify the times they wanted, do evenly spaced samples using numSamples
tStart = np.amin(np.abs(dataTimes))
tEnd = np.amax(dataTimes)
nHrs = np.linspace(tStart, tEnd, num=numSamples, endpoint=True)
else: # Otherwise use user-specified times
nHrs = [float(t) for t in timeStr.split(',')]
NumStps = len(nHrs)
#--------------
rcParams['axes.prop_cycle'] = cycler(color=cmap(np.linspace(0, 1, NumStps)))
fSz = (14,7)
fig = plt.figure(figsize=fSz)
Legs = []
eos = False #Flag for reaching end of steps
for n in range(NumStps):
nStpIdx = np.abs(dataTimes - nHrs[n]).argmin()
nStp = nStpIdx + sIds.min()
legStr = "T = +%2.1f Hours"%(dataTimes[nStpIdx])
if nStpIdx == nDataSteps: # Note that we're about to plot the last timestep and there's no need to plot any more afterwards
eos=True
elif nHrs[n] > dataTimes[-1]:
if not eos: # If this is the first time we've exceeded the end of the available timesteps, plot the last timestep anyways
print("%2.1f [hrs] out of time range (%2.1f, %2.1f), using last step time (%2.1f)"%(nHrs[n],dataTimes[0],dataTimes[-1],dataTimes[nStpIdx]))
eos = True
else:
print("%2.1f [hrs] out of time range (%2.1f, %2.1f), skipping."%(nHrs[n],dataTimes[0],dataTimes[-1]))
continue
#Pull 3D data
eeta = kh5.PullVar(fIn,"rcmeeta",nStp)
vm = kh5.PullVar(fIn,"rcmvm" ,nStp)
lamc = kh5.PullVar(fIn,"alamc" ,nStp)
xeq = kh5.PullVar(fIn,"rcmxmin" ,nStp)
yeq = kh5.PullVar(fIn,"rcmymin" ,nStp)
zeq = kh5.PullVar(fIn,"rcmzmin" ,nStp)
if varStr == VOPT_PSF:
try:
deleeta = kh5.PullVar(fIn,"rcmdeleeta",nStp)
bir = kh5.PullVar(fIn,"bir" ,nStp)
sini = kh5.PullVar(fIn,"sini" ,nStp)
dtCpl = kh5.tStep(fIn,nStp,aID="dtCpl",aDef=15.0)
except:
print("Variables needed for precipSpecFlux not found in this h5 file. Sorry")
quit()
#Do grid calculations on first time
if (n==0):
#Set interfaces
Nlat,Nlon,Nk = eeta.shape
kion = (lamc>0).argmax()
if doElectrons:
kStart=1 # Skip plasmasphere channel
kEnd=kion
else:
kStart=kion
kEnd=Nk
Nks = kEnd-kStart
ilamc = lamc[kStart:kEnd] # Cell center
ilami = np.zeros(Nks+1) # Cell interfaces
for m in range(0,Nks-1):
nk = m+kStart
ilami[m+1] = 0.5*(lamc[nk]+lamc[nk+1])
ilami[Nks] = lamc[-1] + 0.5*(lamc[-1]-lamc[-2])
#Ensure positive energies in case of electrons
ilami = np.abs(ilami)
ilamc = np.abs(ilamc)
lamscl = np.diff(ilami)*np.sqrt(ilamc) # Used in specFlux
#Find nearest point (everytime)
dR = np.sqrt( (xeq-x0)**2.0 + (yeq-y0)**2.0 + (zeq-z0)**2.0 )
i0,j0 = np.unravel_index(dR.argmin(), dR.shape)
ekscl = vm[i0,j0]*np.diff(ilami) # Used in precipSpecFlux
#Get energy bins in keV
Ki = vm[i0,j0]*ilami*1.0e-3
Kc = vm[i0,j0]*ilamc*1.0e-3
if varStr == VOPT_SF: ijEta = specFlux_factor*Kc*eeta[i0,j0,kStart:kEnd]/lamscl * 1.0e3 # 1E3 for [kev -> eV]
elif varStr == VOPT_D: ijEta = density_factor*eeta[i0,j0,kStart:kEnd]*vm[i0,j0]**1.5 * 1E-6 # [1/m^3 -> 1/cc]
elif varStr == VOPT_P: ijEta = pressure_factor*np.abs(ilamc)*eeta[i0,j0,kStart:kEnd]*vm[i0,j0]**2.5 * 1E9 # [Pa -> nPa]
elif varStr == VOPT_PSF: ijEta = precipSpecFlux_factor*deleeta[i0,j0,kStart:kEnd]*bir[i0,j0]*sini[i0,j0]/ekscl/dtCpl
Legs.append(legStr)
print(legStr)
print("\tMin/Mean/Max K = %f / %f / %f"%(Kc.min(),Kc.mean(),Kc.max()))
k0 =ijEta.argmax()
print("\tMax @ K = %f"%(Kc[k0]))
print("\tVM = %f"%(vm[i0,j0]))
plt.loglog(Kc,ijEta)
sName = "Electrons" if doElectrons else "Ions"
if varStr == VOPT_SF:
ylabelStr = "Differential energy flux /cm^2/keV/str/s"
filetag = "Spec"
elif varStr == VOPT_D:
ylabelStr = "Density [1/cc]"
filetag = "Den"
elif varStr == VOPT_P:
ylabelStr = "Pressure [nPa]"
filetag = "Press"
elif varStr == VOPT_PSF:
ylabelStr = "Precip. diff. energy flux /cm^2/eV/str/s"
filetag = "PrecipSpec"
titStr = "%s @ (x,y,z) = (%5.2f,%5.2f,%5.2f)"%(sName,x0,y0,z0)
Ax = plt.gca()
plt.legend(Legs,prop={'family': 'monospace'},loc='lower left')
Ax.legend(Legs,loc='lower left')
Ax.set_xlabel("Energy [keV]")
Ax.set_ylabel(ylabelStr)
Ax.set_title(titStr)
Ax.grid()
#Ax.set_ylim(1.0e+10,1.0e+19)
sTag = "e" if doElectrons else "i"
kv.savePic("qkrcm%s_%s.png"%(filetag, sTag))

View File

@@ -1,157 +0,0 @@
#!/usr/bin/env python
"""
this code computes number flux vs energy distribution of RCM diffuse precipitation in units of cm^-2 eV^-1 str^-1 s^-1
"""
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
import kaipy.kaiViz as kv
import matplotlib.gridspec as gridspec
import numpy as np
import kaipy.kaiH5 as kh5
from matplotlib import rcParams, cycler
import argparse
from argparse import RawTextHelpFormatter
if __name__ == "__main__":
ftag = "msphere"
cmap = plt.cm.plasma
timeStr = ""
locStr = "-5,0,0"
numSamples = 6
MainS = """Creates a plot of the differential flux for RCM ions or electrons in units of cm^-2 keV^-1 str^-1
"""
parser = argparse.ArgumentParser(description=MainS, formatter_class=RawTextHelpFormatter)
parser.add_argument('-id',type=str,metavar="runid",default=ftag,help="RunID of data (default: %(default)s)")
parser.add_argument('-n',type=int,metavar="numSamples",default=numSamples,help="Number of evenly-spaced time samples (default: %(default)s)")
parser.add_argument('-t',type=str,metavar="times",default=timeStr,help="Comma-separated times (in hours) to plot (example: 1,2,4,6). Ignores numSamples")
parser.add_argument('-l',type=str,metavar="loc",default=locStr,help="Comma-separated x,y,z values for equatorial location (default: %(default)s)")
parser.add_argument('-i',action='store_true',default=False,help="Flag to plot ions instead of electrons (default: %(default)s)")
args = parser.parse_args()
ftag = args.id
numSamples = args.n
timeStr = args.t
locStr = args.l
doIons = args.i
fIn = ftag+".rcm.h5"
x0,y0,z0 = [float(x) for x in locStr.split(',')]
#--Time stuff--
#Get rcm data timesteps
nDataSteps,sIds = kh5.cntSteps(fIn)
dataTimes = kh5.getTs(fIn,sIds,"time")/3600 # [Hrs]
if timeStr == "": # If user didn't specify the times they wanted, do evenly spaced samples using numSamples
tStart = np.amin(np.abs(dataTimes))
tEnd = np.amax(dataTimes)
nHrs = np.linspace(tStart, tEnd, num=numSamples, endpoint=True)
else: # Otherwise use user-specified times
nHrs = [float(t) for t in timeStr.split(',')]
NumStps = len(nHrs)
rcParams['axes.prop_cycle'] = cycler(color=cmap(np.linspace(0, 1, NumStps)))
fSz = (14,7)
fig = plt.figure(figsize=fSz)
# conversion factor
massi = 1.67e-27 # mass of ions in kg
ev = 1.607e-19 # 1ev in J
nT2T = 1.e-9
m2cm = 1.e2
re = 6.380e6 # radius of earth in m
Legs = []
eos = False #Flag for reaching end of steps
for n in range(NumStps):
nStpIdx = np.abs(dataTimes - nHrs[n]).argmin()
nStp = nStpIdx + sIds.min()
legStr = "T = +%2.1f Hours"%(dataTimes[nStpIdx])
if nStpIdx == nDataSteps: # Note that we're about to plot the last timestep and there's no need to plot any more afterwards
eos=True
elif nHrs[n] > dataTimes[-1]:
if not eos: # If this is the first time we've exceeded the end of the available timesteps, plot the last timestep anyways
print("%2.1f [hrs] out of time range (%2.1f, %2.1f), using last step time (%2.1f)"%(nHrs[n],dataTimes[0],dataTimes[-1],dataTimes[nStpIdx]))
eos = True
else:
print("%2.1f [hrs] out of time range (%2.1f, %2.1f), skipping."%(nHrs[n],dataTimes[0],dataTimes[-1]))
continue
#Pull 3D data
deleeta = kh5.PullVar(fIn,"rcmdeleeta",nStp)
vm = kh5.PullVar(fIn,"rcmvm" ,nStp)
lamc = kh5.PullVar(fIn,"alamc" ,nStp)
bir = kh5.PullVar(fIn,"bir" ,nStp)
sini = kh5.PullVar(fIn,"sini" ,nStp)
xeq = kh5.PullVar(fIn,"rcmxmin" ,nStp)
yeq = kh5.PullVar(fIn,"rcmymin" ,nStp)
zeq = kh5.PullVar(fIn,"rcmzmin" ,nStp)
#Pull dt in s
dtCpl = kh5.tStep(fIn,nStp,aID="dtCpl",aDef=15.0)
#Do grid calculations on first time
if (n==0):
#Set interfaces
Nlat,Nlon,Nk = deleeta.shape
kion = (lamc>0).argmax()
if doIons:
kStart=kion
kEnd=Nk
else:
kStart=1 # Skip plasmasphere channel
kEnd=kion
Nks = kEnd-kStart
ilamc = lamc[kStart:kEnd]
ilami = np.zeros(Nks+1)
for m in range(0,Nks-1):
nk = m+kStart
ilami[m+1] = 0.5*(lamc[nk]+lamc[nk+1])
ilami[Nks] = lamc[-1] + 0.5*(lamc[-1]-lamc[-2])
#Ensure positive energies in case of electrons
ilami = np.abs(ilami)
ilamc = np.abs(ilamc)
#Find nearest point (everytime)
dR = np.sqrt( (xeq-x0)**2.0 + (yeq-y0)**2.0 + (zeq-z0)**2.0 )
i0,j0 = np.unravel_index(dR.argmin(), dR.shape)
#Get energy bins in eV
Ki = vm[i0,j0]*ilami
Kc = vm[i0,j0]*ilamc
#Bin interval in eV
ekscl = vm[i0,j0]*np.diff(ilami)
#Get differential flux in 1/(eV cm^2 str s)
ijEta = deleeta[i0,j0,kStart:kEnd]*bir[i0,j0]*sini[i0,j0]/ekscl/dtCpl*nT2T/(m2cm**2)/(4*np.pi)
Legs.append(legStr)
print(legStr)
print("\tMin/Mean/Max K = %f / %f / %f"%(Kc.min(),Kc.mean(),Kc.max()))
k0 =ijEta.argmax()
print("\tMax @ K = %f"%(Kc[k0]))
print("\tVM = %f"%(vm[i0,j0]))
plt.loglog(Kc,ijEta)
sName = "Ions" if doIons else "Electrons"
titStr = "%s @ (x,y,z) = (%5.2f,%5.2f,%5.2f)"%(sName,x0,y0,z0)
Ax = plt.gca()
plt.legend(Legs,prop={'family': 'monospace'},loc='lower left')
Ax.legend(Legs,loc='lower left')
Ax.set_xlabel("Energy [eV]")
Ax.set_ylabel("differential energy flux /cm^2/eV/str/s")
Ax.set_title(titStr)
Ax.grid()
#Ax.set_ylim(1.0e+10,1.0e+19)
sTag = "_i" if doIons else "_e"
kv.savePic("qkRcmPrecipSpec%s.png"%sTag)
#plt.show()
#

View File

@@ -1,157 +0,0 @@
#!/usr/bin/env python
"""
this code computes differential energy flux in units of cm^-2 keV^-1 str^-1
"""
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
import kaipy.kaiViz as kv
import matplotlib.gridspec as gridspec
import numpy as np
import kaipy.kaiH5 as kh5
from matplotlib import rcParams, cycler
import argparse
from argparse import RawTextHelpFormatter
if __name__ == "__main__":
ftag = "msphere"
cmap = plt.cm.plasma
timeStr = ""
locStr = "-5,0,0"
numSamples = 6
MainS = """Creates a plot of the differential flux for RCM ions or electrons in units of cm^-2 keV^-1 str^-1
"""
parser = argparse.ArgumentParser(description=MainS, formatter_class=RawTextHelpFormatter)
parser.add_argument('-id',type=str,metavar="runid",default=ftag,help="RunID of data (default: %(default)s)")
parser.add_argument('-n',type=int,metavar="numSamples",default=numSamples,help="Number of evenly-spaced time samples (default: %(default)s)")
parser.add_argument('-t',type=str,metavar="times",default=timeStr,help="Comma-separated times (in hours) to plot (example: 1,2,4,6). Ignores numSamples")
parser.add_argument('-l',type=str,metavar="loc",default=locStr,help="Comma-separated x,y,z values for equatorial location (default: %(default)s)")
parser.add_argument('-e',action='store_true',default=False,help="Flag to plot electrons instead of ions (default: %(default)s)")
args = parser.parse_args()
ftag = args.id
numSamples = args.n
timeStr = args.t
locStr = args.l
doElectrons = args.e
fIn = ftag+".rcm.h5"
x0,y0,z0 = [float(x) for x in locStr.split(',')]
#--Time stuff--
#Get rcm data timesteps
nDataSteps,sIds = kh5.cntSteps(fIn)
dataTimes = kh5.getTs(fIn,sIds,"time")/3600 # [Hrs]
if timeStr == "": # If user didn't specify the times they wanted, do evenly spaced samples using numSamples
tStart = np.amin(np.abs(dataTimes))
tEnd = np.amax(dataTimes)
nHrs = np.linspace(tStart, tEnd, num=numSamples, endpoint=True)
else: # Otherwise use user-specified times
nHrs = [float(t) for t in timeStr.split(',')]
NumStps = len(nHrs)
rcParams['axes.prop_cycle'] = cycler(color=cmap(np.linspace(0, 1, NumStps)))
fSz = (14,7)
fig = plt.figure(figsize=fSz)
# conversion factor
massi = 1.67e-27 # mass of ions in kg
ev = 1.607e-19 # 1ev in J
nt = 1.0e-9 # nt
re = 6.380e6 # radius of earth in m
# this converts to units/cm^2/keV/str
conversion_factor = 1/np.pi/np.sqrt(8)*np.sqrt(ev/massi)*nt/re/1.0e1
print('conversion factor=',conversion_factor)
Legs = []
eos = False #Flag for reaching end of steps
for n in range(NumStps):
nStpIdx = np.abs(dataTimes - nHrs[n]).argmin()
nStp = nStpIdx + sIds.min()
legStr = "T = +%2.1f Hours"%(dataTimes[nStpIdx])
if nStpIdx == nDataSteps: # Note that we're about to plot the last timestep and there's no need to plot any more afterwards
eos=True
elif nHrs[n] > dataTimes[-1]:
if not eos: # If this is the first time we've exceeded the end of the available timesteps, plot the last timestep anyways
print("%2.1f [hrs] out of time range (%2.1f, %2.1f), using last step time (%2.1f)"%(nHrs[n],dataTimes[0],dataTimes[-1],dataTimes[nStpIdx]))
eos = True
else:
print("%2.1f [hrs] out of time range (%2.1f, %2.1f), skipping."%(nHrs[n],dataTimes[0],dataTimes[-1]))
continue
#Pull 3D data
eeta = kh5.PullVar(fIn,"rcmeeta",nStp)
vm = kh5.PullVar(fIn,"rcmvm" ,nStp)
lamc = kh5.PullVar(fIn,"alamc" ,nStp)
xeq = kh5.PullVar(fIn,"rcmxmin" ,nStp)
yeq = kh5.PullVar(fIn,"rcmymin" ,nStp)
zeq = kh5.PullVar(fIn,"rcmzmin" ,nStp)
#Do grid calculations on first time
if (n==0):
#Set interfaces
Nlat,Nlon,Nk = eeta.shape
kion = (lamc>0).argmax()
if doElectrons:
kStart=1 # Skip plasmasphere channel
kEnd=kion
else:
kStart=kion
kEnd=Nk
Nks = kEnd-kStart
ilamc = lamc[kStart:kEnd]
ilami = np.zeros(Nks+1)
for m in range(0,Nks-1):
nk = m+kStart
ilami[m+1] = 0.5*(lamc[nk]+lamc[nk+1])
ilami[Nks] = lamc[-1] + 0.5*(lamc[-1]-lamc[-2])
#Ensure positive energies in case of electrons
ilami = np.abs(ilami)
ilamc = np.abs(ilamc)
lamscl = np.diff(ilami)*np.sqrt(ilamc)
#Find nearest point (everytime)
dR = np.sqrt( (xeq-x0)**2.0 + (yeq-y0)**2.0 + (zeq-z0)**2.0 )
i0,j0 = np.unravel_index(dR.argmin(), dR.shape)
#Get energy bins in keV
Ki = vm[i0,j0]*ilami*1.0e-3
Kc = vm[i0,j0]*ilamc*1.0e-3
#ijEta = eeta[i0,j0,kion:]/lamscl
# 1e3 to convert back to eV
ijEta = conversion_factor*1.0e3*Kc*eeta[i0,j0,kStart:kEnd]/lamscl
Legs.append(legStr)
print(legStr)
print("\tMin/Mean/Max K = %f / %f / %f"%(Kc.min(),Kc.mean(),Kc.max()))
k0 =ijEta.argmax()
print("\tMax @ K = %f"%(Kc[k0]))
print("\tVM = %f"%(vm[i0,j0]))
plt.loglog(Kc,ijEta)
sName = "Electrons" if doElectrons else "Ions"
titStr = "%s @ (x,y,z) = (%5.2f,%5.2f,%5.2f)"%(sName,x0,y0,z0)
Ax = plt.gca()
plt.legend(Legs,prop={'family': 'monospace'},loc='lower left')
Ax.legend(Legs,loc='lower left')
Ax.set_xlabel("Energy [keV]")
Ax.set_ylabel("differential energy flux /cm^2/keV/str")
Ax.set_title(titStr)
Ax.grid()
#Ax.set_ylim(1.0e+10,1.0e+19)
sTag = "_e" if doElectrons else "_i"
kv.savePic("qkrcmspec%s.png"%sTag)
#plt.show()
#

View File

@@ -1,605 +0,0 @@
#!/usr/bin/env python
"""Make a quick figure of a Gamera magnetosphere run.
Make a quick figure of a Gamera magnetosphere run.
Author
------
Kareem Sorathia (kareem.sorathia@jhuapl.edu)
Eric Winter (eric.winter@jhuapl.edu)
"""
# Import standard modules.
import argparse
import os
# Import supplemental modules.
import matplotlib as mpl
mpl.use('Agg') # Create figures in memory.
import matplotlib.gridspec as gridspec
import matplotlib.pyplot as plt
import numpy as np
import sys
from multiprocessing import Pool
from psutil import cpu_count
import warnings
# Import project-specific modules.
import kaipy.cdaweb_utils as cdaweb_utils
import kaipy.gamera.gampp as gampp
import kaipy.gamera.rcmpp as rcmpp
import kaipy.kaiH5 as kh5
import kaipy.kaiTools as ktools
import kaipy.kaiViz as kv
import kaipy.kdefs as kdefs
import kaipy.kdefs as kd
# Program constants and defaults
# Program description.
description = """Creates simple multi-panel figure for RCM magnetosphere run
Top Panel - XXX
Bottom Panel - XXX
"""
# Default identifier for results to read.
default_runid = "msphere"
# Plot the last step by default.
default_step = -1
def create_command_line_parser():
"""Create the command-line argument parser.
Create the parser for command-line arguments.
Parameters
----------
None
Returns
-------
parser : argparse.ArgumentParser
Command-line argument parser for this script.
"""
parser = argparse.ArgumentParser(
description=description,
formatter_class=argparse.RawTextHelpFormatter
)
parser.add_argument(
'-beta', action='store_true', default=False,
help="Show beta instead of FTE (default: %(default)s)"
)
parser.add_argument(
"-big", action="store_true", default=False,
help="Show entire RCM grid (default: %(default)s)."
)
parser.add_argument(
'-bmin', action='store_true', default=False,
help="Show B-min (default: %(default)s)"
)
parser.add_argument(
"--debug", action="store_true", default=False,
help="Print debugging output (default: %(default)s)."
)
parser.add_argument(
"-d", type=str, metavar="directory", default=os.getcwd(),
help="Directory containing data to read (default: %(default)s)"
)
parser.add_argument(
'-elec', action='store_true', default=False,
help="Show electron pressure (default: %(default)s)"
)
parser.add_argument(
'-fac', action='store_true', default=False,
help="Show FAC (default: %(default)s)"
)
parser.add_argument(
"-id", type=str, metavar="runid", default=default_runid,
help="Run ID of data (default: %(default)s)"
)
parser.add_argument(
'-kt' , action='store_true', default=False,
help="Show temperature instead of FTE (default: %(default)s)"
)
parser.add_argument(
"-n", type=int, metavar="step", default=default_step,
help="Time slice to plot (default: %(default)s)"
)
parser.add_argument(
"--spacecraft", type=str, metavar="spacecraft", default=None,
help="Names of spacecraft to plot trajectories, separated by commas (default: %(default)s)"
)
parser.add_argument(
'-tbnc', action='store_true', default=False,
help="Show Tb instead of FTE (default: %(default)s)"
)
parser.add_argument(
"-v", "--verbose", action="store_true", default=False,
help="Print verbose output (default: %(default)s)."
)
parser.add_argument(
'-vol', action='store_true', default=False,
help="Show FTV instead of FTE (default: %(default)s)"
)
parser.add_argument(
'-wgt', action='store_true', default=False,
help="Show wRCM instead of FTE (default: %(default)s)"
)
parser.add_argument(
'-vid', action='store_true', default=False,
help="Make a video and store in mixVid directory (default: %(default)s)"
)
parser.add_argument(
'-overwrite', action='store_true', default=False,
help="Overwrite existing vid files (default: %(default)s)"
)
parser.add_argument(
'--ncpus', type=int, metavar="ncpus", default=1,
help="Number of threads to use with --vid (default: %(default)s)"
)
parser.add_argument(
'-nohash', action='store_true', default=False,
help="Don't display branch/hash info (default: %(default)s)"
)
return parser
def makePlot(i,rcmdata,nStp):
if not debug:
# Suppress the warning
warnings.filterwarnings("ignore", category=UserWarning, module="matplotlib")
warnings.filterwarnings("ignore", message="The input coordinates to pcolor are interpreted as cell centers.*")
warnings.filterwarnings("ignore", message="Log scale: values of z <= 0 have been masked.*")
warnings.filterwarnings("ignore", message="No contour levels were found within the data range.*")
# Name of plot output file.
if do_vid:
fOut = "{}.{:0>{n}d}.png".format("rcmpic", i, n=n_pad)
outPath = os.path.join(outDir, fOut)
else:
outPath = "qkrcmpic.png"
# Skip this file if it already exists and we're not supposed to overwrite
if not do_overwrite and os.path.exists(outPath) and do_vid:
return
plt.clf()
# Create the grid for laying out the subplots.
gs = gridspec.GridSpec(2, 3, height_ratios=[20, 1.0], hspace=0.025)
# Create the Axes objects for the individual plots.
AxL = fig.add_subplot(gs[0, 0])
AxM = fig.add_subplot(gs[0, 1])
AxR = fig.add_subplot(gs[0, -1])
# Create the Colorbar Axes.
AxC1 = fig.add_subplot(gs[-1, 0])
AxC2 = fig.add_subplot(gs[-1, 1])
AxC3 = fig.add_subplot(gs[-1, -1])
# Adjust the positions of the individual subplots.
AxL.set_position([0.05, 0.1, 0.25, 1.0])
AxM.set_position([0.35, 0.1, 0.25, 1.0])
AxR.set_position([0.65, 0.1, 0.25, 1.0])
# Update the subplot parameters for visibility.
AxL.tick_params(axis='both', which='both', bottom=True, top=False, left=True, right=False, labelbottom=True, labelleft=True)
AxM.tick_params(axis='both', which='both', bottom=True, top=False, left=True, right=False, labelbottom=True, labelleft=True)
AxR.tick_params(axis='both', which='both', bottom=True, top=False, left=True, right=False, labelbottom=True, labelleft=True)
# Create the Colorbars``.
kv.genCB(AxC1, vP, "Pressure [nPa]", cM=pCMap)
kv.genCB(AxC2, vD, "Density [#/cc]", cM=dCMap)
if doWgt:
kv.genCB(AxC3, vW, r"wRCM", cM=wCMap)
elif doVol:
kv.genCB(AxC3, vV, r"Flux-Tube Volume [Re/nT]", cM=vCMap)
elif doT:
kv.genCB(AxC3, vT, r"Temperature [keV]", cM=vCMap)
elif doBeta:
kv.genCB(AxC3, vB, r"Beta", cM=wCMap)
elif doTb:
kv.genCB(AxC3, vI, r"Tb", cM=sCMap)
elif doBMin:
kv.genCB(AxC3, vBM, r"B-Minimum [nT]", cM=sCMap)
elif doFAC:
kv.genCB(AxC3, vFAC, r"FAC [uA/m2]", cM=wCMap)
else:
kv.genCB(AxC3, vS, r"Flux-Tube Entropy [nPa (R$_{E}$/nT)$^{\gamma}$]", cM=sCMap)
# Clear the subplots (why is this needed?)
AxL.clear()
AxM.clear()
AxR.clear()
# Fetch the coordinates to plot.
try:
bmX, bmY = rcmpp.RCMEq(rcmdata, nStp, doMask=True)
except:
print(f"Step #{nStp} does not exist!")
sys.exit(1)
I = rcmpp.GetMask(rcmdata, nStp)
Ni = (~I).sum()
if debug:
print("bmX = %s" % bmX)
print("bmY = %s" % bmY)
print("I = %s" % I)
print("Ni = %s" % Ni)
# Abort if a closed-field region does not exist.
if Ni == 0:
print("No closed field region in RCM, exiting ...")
exit()
# This is not working yet. A blank bar still shows up
doVerb = debug
doVerb = True
# Fetch the data to plot.
if doElec:
Prcm = rcmpp.GetVarMask(rcmdata, nStp, "Pe", I, doVerb=doVerb)
else:
Prcm = rcmpp.GetVarMask(rcmdata, nStp, "P", I, doVerb=doVerb)
Nrcm = rcmpp.GetVarMask(rcmdata, nStp, "N", I, doVerb=doVerb)
Pmhd = rcmpp.GetVarMask(rcmdata, nStp, "Pmhd", I, doVerb=doVerb)
Nmhd = rcmpp.GetVarMask(rcmdata, nStp, "Nmhd", I, doVerb=doVerb)
S = rcmpp.GetVarMask(rcmdata,nStp, "S", I, doVerb=doVerb)
toMHD = rcmpp.GetVarMask(rcmdata, nStp, "toMHD", I, doVerb=doVerb)
pot, pVals = rcmpp.GetPotential(rcmdata, nStp, I, doVerb=doVerb)
wRCM = None
if doWgt:
wRCM = rcmpp.GetVarMask(rcmdata, nStp, "wIMAG", I, doVerb=doVerb)
bVol = None
if doVol:
bVol = rcmpp.GetVarMask(rcmdata, nStp, "bVol", I, doVerb=doVerb)
beta = None
if doBeta:
beta = rcmpp.GetVarMask(rcmdata, nStp, "beta", I, doVerb=doVerb)
Tb = None
if doTb:
Tb = rcmpp.GetVarMask(rcmdata,nStp,"Tb", I, doVerb=doVerb)
Bmin = None
if doBMin:
Bmin = rcmpp.GetVarMask(rcmdata, nStp, "bMin", I, doVerb=doVerb)
toRCM = None
if doBig:
toRCM = rcmpp.GetVarMask(rcmdata, nStp, "IOpen", I, doVerb=doVerb)
jBirk = None
if doFAC:
jBirk = rcmpp.GetVarMask(rcmdata, nStp, "birk", I, doVerb=doVerb)
if debug:
print("Prcm = %s" % Prcm)
print("Nrcm = %s" % Nrcm)
print("Pmhd = %s" % Pmhd)
print("Nmhd = %s" % Nmhd)
print("S = %s" % S)
print("toMHD = %s" % toMHD)
print("pot = %s" % pot)
print("pVals = %s" % pVals)
print("wRCM = %s" % wRCM)
print("bVol = %s" % bVol)
print("beta = %s" % beta)
print("Tb = %s" % Tb)
print("Bmin = %s" % Bmin)
print("toRCM = %s" % toRCM)
print("jBirk = %s" % jBirk)
# Read the dates the data file.
fStr = os.path.join(fdir, ftag + '.h5')
if debug or do_vid:
print("fStr = %s" % fStr)
MJD = kh5.tStep(fStr, nStp, aID="MJD")
if debug:
print("MJD = %s" % MJD)
utS = ktools.MJD2UT([MJD])
if debug:
print("utS = %s" % utS)
utDT = utS[0]
if debug:
print("utDT = %s" % utDT)
# Fetch the date of the earliest available step.
n_steps, step_numbers = kh5.cntSteps(fStr)
MJD = kh5.tStep(fStr, step_numbers[0], aID="MJD")
if debug:
print("MJD = %s" % MJD)
utS = ktools.MJD2UT([MJD])
if debug:
print("utS = %s" % utS)
ut0 = utS[0]
if debug:
print("ut0 = %s" % ut0)
# If needed, fetch the trajectory of each spacecraft from CDAWeb.
sc_X = []
sc_Y = []
sc_Z = []
if spacecraft:
spacecrafts = spacecraft.split(',')
for (i_sc, sc) in enumerate(spacecrafts):
if debug:
print("i_sc, sc = %s, %s" % (i_sc, sc))
# Fetch the spacecraft trajectory in Solar Magnetic (SM)
# Cartesian coordinates between the start and end times.
sc_x, sc_y, sc_z = cdaweb_utils.fetch_spacecraft_SM_trajectory(
sc, ut0, utDT
)
if debug:
print("sc_x, sc_y, sc_z = %s, %s, %s" % (sc_x, sc_y, sc_z))
sc_X.append(sc_x)
sc_Y.append(sc_y)
sc_Z.append(sc_z)
# Skip if no trajectory found.
if sc_x is None:
print("No trajectory found for spacecraft %s." % sc)
continue
# Convert coordinates to units of Earth radius.
CM_TO_KM = 1e-5 # Centimeters to kilometers
Re_km = kdefs.Re_cgs*CM_TO_KM # Earth radius in kilometers
sc_X[-1] = sc_x/Re_km
sc_Y[-1] = sc_y/Re_km
sc_Z[-1] = sc_z/Re_km
if debug:
print("sc_X, sc_Y, sc_Z = %s, %s, %s" % (sc_X, sc_Y, sc_Z))
# Assemble the left-hand plot.
AxL.set_title("RCM Pressure")
AxL.pcolor(bmX, bmY, Prcm, norm=vP, cmap=pCMap, shading='auto')
AxL.contour(bmX, bmY, pot, pVals, colors='grey', linewidths=cLW)
kv.addEarth2D(ax=AxL)
kv.SetAx(xyBds, AxL)
# Assemble the middle plot.
AxM.set_title("MHD Pressure")
AxM.pcolor(bmX, bmY, Pmhd, norm=vP, cmap=pCMap, shading='auto')
AxM.contour(bmX, bmY, Nmhd, cVals, norm=vD, cmap=dCMap, linewidths=cLW)
kv.addEarth2D(ax=AxM)
kv.SetAx(xyBds, AxM)
# Gather all Axes.
Axs = [AxL, AxM, AxR]
# Plot the full domain if needed.
if nStp > 0 and doBig:
for Ax in Axs:
CS1 = Ax.contour(bmX, bmY, toMHD, [0.5], colors=MHDCol, linewidths=MHDLW)
manloc = [(0.0, 8.0)]
fmt = {}
fmt[0.5] = 'MHD' # Key on a float is a bad idea.
Ax.clabel(
CS1, CS1.levels[::2], inline=True, fmt=fmt, fontsize=5,
inline_spacing=25, manual=manloc
)
CS2 = Ax.contour(
bmX, bmY, toRCM, [-0.5], colors=rcmpp.rcmCol,
linewidths=MHDLW, linestyles='solid'
)
# Assemble the right-hand plot.
if doWgt:
AxR.set_title("RCM Weight")
AxR.pcolor(bmX, bmY, wRCM, norm=vW, cmap=wCMap, shading='auto')
elif doVol:
AxR.set_title("Flux-tube Volume")
AxR.pcolor(bmX, bmY, bVol, norm=vV, cmap=vCMap, shading='auto')
elif doT:
kT = 6.25*Prcm/Nrcm
AxR.set_title("RCM Temperature")
AxR.pcolor(bmX, bmY, kT, norm=vT, cmap=vCMap, shading='auto')
elif doBeta:
AxR.set_title("Average Beta")
AxR.pcolor(bmX, bmY, beta, norm=vB, cmap=wCMap, shading='auto')
elif doTb:
AxR.set_title("Ingestion timescale")
AxR.pcolor(bmX, bmY, Tb, norm=vI, cmap=sCMap, shading='auto')
elif doBMin:
AxR.set_title("B Minimum")
AxR.pcolor(bmX, bmY, 1.0e+9*Bmin, norm=vBM, cmap=sCMap, shading='auto')
elif doFAC:
AxR.set_title("Vasyliunas FAC")
AxR.pcolor(bmX, bmY, jBirk, norm=vFAC, cmap=wCMap, shading='auto')
else:
AxR.set_title("Flux-Tube Entropy")
AxR.pcolor(bmX, bmY, S, norm=vS, cmap=sCMap, shading='auto')
AxR.plot(bmX, bmY, color=eCol, linewidth=eLW)
AxR.plot(bmX.T, bmY.T, color=eCol, linewidth=eLW)
kv.addEarth2D(ax=AxR)
kv.SetAx(xyBds, AxR)
# Overlay spacecraft trajectories if requested.
if spacecraft:
spacecrafts = spacecraft.split(',')
for (i_sc, sc) in enumerate(spacecrafts):
if debug:
print("i_sc, sc = %s, %s" % (i_sc, sc))
# Skip this spacecraft if no trajectory is available.
if sc_X[i_sc] is None:
continue
# Plot a labelled trajectory of the spacecraft. Also plot a larger
# dot at the last point in the trajectory.
# NOTE: Need to add a filter to not plot spacecraft positions
# outside of the plot limits. Many X values are > +10, which puts
# them off the right side of the plots.
# Left plot
SPACECRAFT_COLORS = list(mpl.colors.TABLEAU_COLORS.keys())
color = SPACECRAFT_COLORS[i_sc % len(SPACECRAFT_COLORS)]
AxL.plot(sc_X[i_sc], sc_Y[i_sc], marker=None, linewidth=1, c=color)
AxL.plot(sc_X[i_sc][-1], sc_Y[i_sc][-1], 'o', c=color)
x_nudge = 1.0
y_nudge = 1.0
AxL.text(sc_X[i_sc][-1] + x_nudge, sc_Y[i_sc][-1] + y_nudge, sc, c=color)
# Middle plot
AxM.plot(sc_X[i_sc], sc_Y[i_sc], marker=None, linewidth=1, c=color)
AxM.plot(sc_X[i_sc][-1], sc_Y[i_sc][-1], 'o', c=color)
x_nudge = 1.0
y_nudge = 1.0
AxM.text(sc_X[i_sc][-1] + x_nudge, sc_Y[i_sc][-1] + y_nudge, sc, c=color)
# Right plot
AxR.plot(sc_X[i_sc], sc_Y[i_sc], marker=None, linewidth=1, c=color)
AxR.plot(sc_X[i_sc][-1], sc_Y[i_sc][-1], 'o', c=color)
x_nudge = 1.0
y_nudge = 1.0
AxR.text(sc_X[i_sc][-1] + x_nudge, sc_Y[i_sc][-1] + y_nudge, sc, c=color)
# Set left labels for subplots.
ylabel = 'X [R$_E$]'
AxL.set_ylabel(ylabel)
# Set bottom labels for subplots.
xlabel = 'Y [R$_E$]'
AxL.set_xlabel(xlabel)
AxM.set_xlabel(xlabel)
AxR.set_xlabel(xlabel)
# Create the title for the complete figure.
tStr = "\n\n\n" + utDT.strftime("%m/%d/%Y, %H:%M:%S")
plt.suptitle(tStr, fontsize="x-large")
# Add Branch and Hash info
if do_hash:
fig.text(0.1,0.85,f"branch/commit: {branch}/{githash}", fontsize=4)
# Adjust layout to reduce white space
plt.subplots_adjust(top=0.9, bottom=0.1, hspace=0.025)
# Save the figure to a file.
kv.savePic(outPath, dpiQ=300)
if __name__ == "__main__":
"""Make a quick figure of a Gamera magnetosphere run."""
# Set up the command-line parser.
parser = create_command_line_parser()
# Parse the command-line arguments.
args = parser.parse_args()
doBeta = args.beta
doBig = args.big
doBMin = args.bmin
fdir = args.d
debug = args.debug
doElec = args.elec
doFAC = args.fac
ftag = args.id + ".mhdrcm"
doT = args.kt
nStp = args.n
spacecraft = args.spacecraft
doTb = args.tbnc
verbose = args.verbose
doVol = args.vol
doWgt = args.wgt
do_vid = args.vid
do_overwrite = args.overwrite
do_hash = not args.nohash
ncpus = args.ncpus
if debug:
print("args = %s" % args)
# Defaults from external modules.
MHDCol = rcmpp.MHDCol
MHDLW = rcmpp.MHDLW
rcmpp.doEll = not doBig
# Figure parameters
xTail = -20.0
xSun = 10.0
yMax = 15.0
xyBds = [xTail, xSun, -yMax, yMax]
figSz = (12, 6)
eCol = "slategrey"
eLW = 0.15
cLW = 0.5
vP = kv.genNorm(1.0e-1, 1.0e+2, doLog=True)
vS = kv.genNorm(0.0, 0.25)
vW = kv.genNorm(0, 1)
vV = kv.genNorm(1.0e-2, 1.0, doLog=True)
vT = kv.genNorm(0, 50)
vB = kv.genNorm(1.0e-2, 1.0e+2, doLog=True)
vI = kv.genNorm(0, 180)
vBM = kv.genNorm(0, 100)
vFAC = kv.genNorm(2.0)
Nc = 10
nMin = 1.0
nMax = 1.0e+3
vD = kv.genNorm(nMin, nMax, doLog=True)
cVals = np.logspace(1.0, 3.0, Nc)
pCMap = "viridis"
#sCMap = "terrain"
sCMap = "turbo"
dCMap = "cool"
wCMap = "bwr_r"
vCMap = "gnuplot2"
# Read the RCM results.
rcmdata = gampp.GameraPipe(fdir, ftag)
fnrcm = os.path.join(fdir, f'{ftag}.h5')
# Get branch/hash info
if do_hash:
branch = kh5.GetBranch(fnrcm)
githash = kh5.GetHash(fnrcm)
if debug:
print(f'branch/commit: {branch}/{githash}')
if debug:
print("rcmdata = %s" % rcmdata)
# Set up Figure Parameters
# Set global plot font options.
mpl.rc('mathtext', fontset='stixsans', default='regular')
mpl.rc('font', size=10)
# Init figure.
fig = plt.figure(figsize=figSz)
if not do_vid: # Then we are making a single image, keep original functionality
if nStp < 0: # ANY negative index gets the last step.
nStp = rcmdata.sFin
print("Using Step %d"%(nStp))
if debug:
print("nStp = %s" % nStp)
makePlot(nStp,rcmdata,nStp)
else: # Then we make a video, i.e. series of images saved to rcmVid
# Get video loop parameters
s0 = max(rcmdata.s0,1) # Skip Step#0
sFin = rcmdata.sFin
nsteps = sFin - s0
sIds = np.array(range(s0,sFin))
outDir = 'rcmVid'
kh5.CheckDirOrMake(outDir)
# How many 0's do we need for filenames?
n_pad = int(np.log10(nsteps)) + 1
if ncpus == 1:
for i, nStp in enumerate(sIds):
makePlot(i,rcmdata, nStp)
else:
# Make list of parallel arguments
ag = ((i,rcmdata,nStp) for i, nStp in enumerate(sIds) )
# Check we're not exceeding cpu_count on computer
ncpus = min(int(ncpus),cpu_count(logical=False))
print('Doing multithreading on ',ncpus,' threads')
# Do parallel job
with Pool(processes=ncpus) as pl:
pl.starmap(makePlot,ag)
print("Done making all the images. Go to mixVid folder")

View File

@@ -1,131 +0,0 @@
#!/usr/bin/env python
################ first figure out the time ################
#standard python
import sys
import os
import argparse
import pickle
from argparse import RawTextHelpFormatter
# Numpy and matplotlib
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
#Kaipy and related
import h5py
import kaipy.kaiH5 as kaiH5
import kaipy.remix.remix as remix
import kaipy.kaiViz as kv
from kaipy.kaiTools import MJD2UT
MainS = """Creates a summary plot of global ionospheric paramters including
CPCP, HP, FAC. Creates a PNG image file and python pickle file with the data.
"""
ftag = 'msphere'
ptag = '.'
parser = argparse.ArgumentParser(description=MainS,
formatter_class=RawTextHelpFormatter)
parser.add_argument('-id',type=str,metavar='runid',default=ftag,
help='RunID of data (default: %(default)s)')
parser.add_argument('-path',type=str,metavar='path',default=ptag,
help='Path to directory containing REMIX files (default: %(default)s)')
args = parser.parse_args()
#Open the file and read the time information
remixFile = os.path.join(args.path,args.id+'.mix.h5')
assert(os.path.isfile(remixFile))
nsteps,sIds=kaiH5.cntSteps(remixFile)
mjd = kaiH5.getTs(remixFile,sIds,aID='MJD')
utall = MJD2UT(mjd)
#Setup the dictionary data structures to handle the timeseries information
hemispheres = ('NORTH','SOUTH')
cpcp = {'NORTH':[],'SOUTH':[]}
hp = {'NORTH':[],'SOUTH':[]}
ipfac = {'NORTH':[],'SOUTH':[]}
#Read the data and calculate the integrated quantities
ri = 6500.0e3
for hemi in hemispheres:
ion = remix.remix(remixFile,sIds.min())
ion.init_vars(hemi)
x = ion.ion['X']
y = ion.ion['Y']
areaMixGrid = ion.calcFaceAreas(x,y)*ri*ri
for Id in sorted(sIds):
ion = remix.remix(remixFile,Id)
ion.init_vars(hemi)
# Cross polar cap potential
cpcp[hemi].append(np.max(ion.variables['potential']['data'])-
np.min(ion.variables['potential']['data']))
# Integrated Postive FAC
fac = ion.variables['current']['data']
fac[fac < 0] = 0.0
pfac = areaMixGrid*fac[:,:]
ipfac[hemi].append(pfac.sum()/1.0e12)
# Hemispheric Power
flux = ion.variables['flux']['data']
energy = ion.variables['energy']['data']
hpcalc = areaMixGrid*energy[:,:]*flux[:,:]
# Convert from keV/cm^2 to mW/m^2 to GW
hp[hemi].append(hpcalc.sum()*1.6e-21)
cpcp['units']='kV'
cpcp['name']=r'$\Phi$'
hp['units']='GW'
hp['name']='HP'
ipfac['units']='MA'
ipfac['name']='FAC'
#Plot the figure
figsize = (10,10)
fig = plt.figure(figsize=figsize)
gs = fig.add_gridspec(3,1)
Ax1 = fig.add_subplot(gs[0,0])
Ax2 = fig.add_subplot(gs[1,0],sharex=Ax1)
Ax3 = fig.add_subplot(gs[2,0],sharex=Ax1)
Ax1.plot(utall[1:],cpcp['NORTH'][1:])
Ax1.plot(utall[1:],cpcp['SOUTH'][1:])
kv.SetAxLabs(Ax1,None,cpcp['name']+' ['+cpcp['units']+']')
Ax2.plot(utall[1:],ipfac['NORTH'][1:])
Ax2.plot(utall[1:],ipfac['SOUTH'][1:])
kv.SetAxLabs(Ax2,None,ipfac['name']+' ['+ipfac['units']+']',doLeft=False)
Ax3.plot(utall[1:],hp['NORTH'][1:])
Ax3.plot(utall[1:],hp['SOUTH'][1:])
kv.SetAxLabs(Ax3,"UT",hp['name']+' ['+hp['units']+']')
kv.SetAxDate(Ax3)
Ax1.legend(hemispheres,loc='best')
Ax1.set_title(remixFile)
plt.subplots_adjust(hspace=0)
fn = os.path.join(ptag,'remixTimeSeries.png')
kv.savePic(fn)
#Save the results to python pickle file
fn = os.path.join(ptag,'remixTimeSeries.pkl')
fh = open(fn,'wb')
pickle.dump([cpcp,ipfac,hp,utall],fh)
fh.close()
#As well as a HDF5 file
with h5py.File(os.path.join(ptag,'remixTimeSeries.h5'),'w') as f:
dset = f.create_dataset('MJD',data=mjd)
for hemi in hemispheres:
dset = f.create_dataset('cpcp'+hemi,data=cpcp[hemi])
dset.attrs['units'] = cpcp['units']
dset.attrs['name'] = cpcp['name']
dset = f.create_dataset('hp'+hemi,data=hp[hemi])
dset.attrs['units'] = hp['units']
dset.attrs['name'] = hp['name']
dset = f.create_dataset('ipfac'+hemi,data=ipfac[hemi])
dset.attrs['units'] = ipfac['units']
dset.attrs['name'] = ipfac['name']

View File

@@ -1,92 +0,0 @@
#!/usr/bin/env python
#Creates a time vs distance plot from a 2D slice created by slice.x
import argparse
from argparse import RawTextHelpFormatter
import kaipy.kaiH5 as kh5
import kaipy.solarWind.swBCplots as swBCplots
import os
import datetime
import sys
if __name__ == "__main__":
#Defaults
fdir = os.getcwd()
swtag = "bcwind.h5"
imgtype = 'png'
MainS = """Creates simple multi-panel figure for the
solar wind conditions within the bcwind file and saves it as swBCplot.pdf
"""
parser = argparse.ArgumentParser(description=MainS, formatter_class=RawTextHelpFormatter)
parser.add_argument('-d',type=str,metavar="directory",default=fdir,help="Directory to read from (default: %(default)s)")
parser.add_argument('-id',type=str,metavar="swid",default=swtag,help="Solar wind file used (default: %(default)s)")
parser.add_argument('-type',type=str,metavar="type",default=imgtype,help="Image type (default: %(default)s)")
#Finalize parsing
args = parser.parse_args()
fdir = args.d
swtag = args.id
imgtype = args.type
allowTypes = ['pdf','png','jpeg','jpg']
if not (imgtype in allowTypes):
print('Image type not supported please try',*allowTypes)
sys.exit()
if (imgtype == 'pdf'):
#For some reason trimming PDF files doesn't work
doTrim = False
else:
doTrim = True
swIn = fdir+'/'+swtag
kh5.CheckOrDie(swIn)
# Name the output file the same as the solarwind file with the image extension
fOut = swtag.split('.')[0]+'.'+imgtype
# pulling UT variable for plotting
t0Fmts = ["%Y-%m-%d %H:%M:%S","%Y-%m-%dT%H:%M:%S.%f"]
utfmt='%H:%M \n%Y-%m-%d'
UTall = kh5.PullVar(swIn,"UT")
#Identify the correct time format
t0Fmt = None
for tfmt in t0Fmts:
try:
datetime.datetime.strptime(UTall[1].decode('utf-8'),tfmt)
t0Fmt = tfmt
break # datetime parse succeeded
except ValueError:
pass # datetime parse failed
if t0Fmt is None:
print("Time format in bcwind.h5 did not match any expected format.")
sys.exit()
utall = []
for n in range(len(UTall)):
utall.append(datetime.datetime.strptime(UTall[n].decode('utf-8'),t0Fmt))
# pulling the solar wind values from the table
varlist = kh5.getRootVars(swIn)
D = kh5.PullVar(swIn,"D")
Vx = kh5.PullVar(swIn,"Vx")
Vy = kh5.PullVar(swIn,"Vy")
Vz = kh5.PullVar(swIn,"Vz")
Bx = kh5.PullVar(swIn,"Bx")
By = kh5.PullVar(swIn,"By")
Bz = kh5.PullVar(swIn,"Bz")
Temp = kh5.PullVar(swIn,"Temp")
Tsec = kh5.PullVar(swIn,"T")
SYMH = kh5.PullVar(swIn,"symh")
if ('Interped' in varlist):
pltInterp = kh5.PullVar(swIn,"Interped")
else:
pltInterp = 0*D
doEps = False
swBCplots.swQuickPlot(UTall,D,Temp,Vx,Vy,Vz,Bx,By,Bz,SYMH,pltInterp,fOut,doEps=doEps,doTrim=doTrim,t0fmt=t0Fmt)

View File

@@ -1,95 +0,0 @@
#!/usr/bin/env python
#Make a quick figure of a CHIMP particle trajectory
import argparse
from argparse import RawTextHelpFormatter
import matplotlib as mpl
import matplotlib.pyplot as plt
import kaipy.kaiViz as kv
import kaipy.kaiH5 as kh5
import kaipy.chimp.chimph5p as ch5p
import numpy as np
from mpl_toolkits.mplot3d import axes3d, Axes3D
#Adds a simple Earth to 3d pics
def addEarth(ax,Re=1):
N = 100
T = 0.25 #Transparency
S = 4 #Stride
phi = np.linspace(0, 2 * np.pi, N)
theta = np.linspace(0, np.pi, N)
xm = Re * np.outer(np.cos(phi), np.sin(theta))
ym = Re * np.outer(np.sin(phi), np.sin(theta))
zm = Re * np.outer(np.ones(np.size(phi)), np.cos(theta))
ax.plot_surface(xm, ym, zm,rstride=S, cstride=S, color='b',alpha=T)
#Forces equal spacing on a 3d plot (surprisingly annoying to do)
def axEqual3d(ax):
x_limits = ax.get_xlim3d()
y_limits = ax.get_ylim3d()
z_limits = ax.get_zlim3d()
x_range = x_limits[1] - x_limits[0]; x_mean = np.mean(x_limits)
y_range = y_limits[1] - y_limits[0]; y_mean = np.mean(y_limits)
z_range = z_limits[1] - z_limits[0]; z_mean = np.mean(z_limits)
# The plot bounding box is a sphere in the sense of the infinity
# norm, hence I call half the max range the plot radius.
plot_radius = 0.5*max([x_range, y_range, z_range])
ax.set_xlim3d([x_mean - plot_radius, x_mean + plot_radius])
ax.set_ylim3d([y_mean - plot_radius, y_mean + plot_radius])
ax.set_zlim3d([z_mean - plot_radius, z_mean + plot_radius])
if __name__ == "__main__":
#Arg parsing
id0 = 0
LW = 0.5
fs = 16
MainS = """Plots trajectory of specified CHIMP particle in H5p file
ID: Particle ID
Color specified by energy [keV]
"""
parser = argparse.ArgumentParser(description=MainS, formatter_class=RawTextHelpFormatter)
parser.add_argument('h5p',metavar='input.h5part',help="Input H5Part file")
parser.add_argument('-id',type=int,metavar="id",default=id0,help="Display particle trajectory w/ given ID (default: %(default)s)")
args = parser.parse_args()
fIn = args.h5p
id0 = args.id
print("Reading from %s and looking for particle %d"%(fIn,id0))
kh5.CheckOrDie(fIn)
Ntp,nS,nE = ch5p.bndTPs(fIn)
print("\tFound %d TPs, IDs %d to %d"%(Ntp,nS,nE))
t,x = ch5p.getH5pid(fIn,"x",id0)
t,y = ch5p.getH5pid(fIn,"y",id0)
t,z = ch5p.getH5pid(fIn,"z",id0)
t,K = ch5p.getH5pid(fIn,"K",id0)
vMin = 0.0
vMax = K.max()
cbLab = "Energy [keV]"
fig = plt.figure()
#ax = fig.gca(projection='3d')
ax = Axes3D(fig)
addEarth(ax)
sct3d = ax.scatter(x,y,z,c=K,cmap=plt.get_cmap("cool"),s=20,vmin=vMin,vmax=vMax)
ax.plot(x,y,z,'k',linewidth=LW)
plt.colorbar(sct3d,label=cbLab,shrink=0.8)
ax.set_xlabel("X [Re]")
ax.set_ylabel("Y [Re]")
ax.set_zlabel("Z [Re]")
titS = "Particle trajectory, pID = %d"%(id0)
ax.set_title(titS,fontsize=fs)
axEqual3d(ax)
plt.show()