From f22d2f426773af19d597246fd7d20b9535db31f1 Mon Sep 17 00:00:00 2001 From: wiltbemj Date: Thu, 24 Apr 2025 09:03:55 -0600 Subject: [PATCH] Remove HDF4 capabilities related to LFM --- docs/requirements.txt | 1 - docs/source/kaipy/requirements.rst | 1 - kaipy/gamera/gamGrids.py | 81 -------------- kaipy/lfm2kaiju.py | 168 ----------------------------- pyproject.toml | 1 - requirements.txt | 1 - setup.py | 1 - 7 files changed, 254 deletions(-) delete mode 100644 kaipy/lfm2kaiju.py diff --git a/docs/requirements.txt b/docs/requirements.txt index e8eaa69..3909879 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -8,7 +8,6 @@ jupyterlab matplotlib pandas progressbar -pyhdf pyspedas pytest slack_sdk diff --git a/docs/source/kaipy/requirements.rst b/docs/source/kaipy/requirements.rst index cf8d1fc..6942676 100644 --- a/docs/source/kaipy/requirements.rst +++ b/docs/source/kaipy/requirements.rst @@ -13,7 +13,6 @@ kaipy requires: - matplotlib - pandas - progressbar -- pyhdf - pyspedas - pytest - slack_sdk diff --git a/kaipy/gamera/gamGrids.py b/kaipy/gamera/gamGrids.py index fed07c3..0959e21 100644 --- a/kaipy/gamera/gamGrids.py +++ b/kaipy/gamera/gamGrids.py @@ -4,8 +4,6 @@ from scipy import interpolate import sys import os from scipy.ndimage import gaussian_filter -import pyhdf -from pyhdf.SD import SD, SDC import matplotlib as mpl import matplotlib.cm as cm import matplotlib.pyplot as plt @@ -22,29 +20,6 @@ Ni0 = 32 Nj0 = 64 Nk0 = 32 -#Gen LFM egg -def genLFM(Ni=Ni0, Nj=Nj0, Rin=3.0, Rout=25.0, fIn="lfm.hdf", TINY=1.0e-8): - """ - Generate a regridded LFM data. - - Args: - Ni (int): Number of grid points in the x-direction. - Nj (int): Number of grid points in the y-direction. - Rin (float): Inner radius of the region of interest. - Rout (float): Outer radius of the region of interest. - fIn (str): Input file name for LFM data. - TINY (float): A small value used for numerical stability. - - Returns: - XX (numpy.ndarray): Regridded x-coordinates. - YY (numpy.ndarray): Regridded y-coordinates. - """ - # Get from LFM data - xx0, yy0 = getLFM(fIn=fIn, Rin=Rin, Rout=Rout) - XX, YY = regrid(xx0, yy0, Ni, Nj, TINY=TINY) - - return XX, YY - #Gen elliptical grid def genEllip(Ni=Ni0,Nj=Nj0,Rin=3.0,Rout=30,TINY=1.0e-8): """ @@ -936,62 +911,6 @@ def VizGrid(XX,YY,xxG=None,yyG=None,doGhost=False,doShow=True,xyBds=None,fOut="g if (doShow): plt.show() -#Read in LFM grid, return upper half plane corners -#Use Rin to cut out inner region, Rout to guarantee at last that much -def getLFM(fIn, Rin=3.0, Rout=25.0): - """ - Reads LFM grid data from an HDF file and returns the projected and scaled x and y arrays. - - Args: - fIn (str): The path to the HDF file. - Rin (float): The inner radius for scaling the x and y arrays. Default is 3.0. - Rout (float): The outer radius for cutting out the outer regions of the x and y arrays. Default is 25.0. - - Returns: - xxi (ndarray): The projected and scaled x array. - yyi (ndarray): The projected and scaled y array. - """ - hdffile = pyhdf.SD.SD(fIn) - # Grab x/y/z arrays from HDF file. Scale by Re - # LFM is k,j,i ordering - iRe = 1.0/kdefs.Re_cgs - x3 = iRe * np.double(hdffile.select('X_grid').get()) - y3 = iRe * np.double(hdffile.select('Y_grid').get()) - z3 = iRe * np.double(hdffile.select('Z_grid').get()) - lfmNc = x3.shape # Number of corners (k,j,i) - nk = x3.shape[0] - 1 - nj = x3.shape[1] - 1 - ni = x3.shape[2] - 1 - - print("Reading LFM grid from %s, size (%d,%d,%d)" % (fIn, ni, nj, nk)) - - # Project to plane, transpose and cut - ks = 0 # Upper half x-y plane - xxi = x3[ks, :, :].squeeze().T - yyi = y3[ks, :, :].squeeze().T - - # Scale so that inner is Rin - rr = np.sqrt(xxi ** 2.0 + yyi ** 2.0) - lfmIn = rr.min() - xyScl = Rin / lfmIn - xxi = xxi * xyScl - yyi = yyi * xyScl - - rr = np.sqrt(xxi ** 2.0 + yyi ** 2.0) - - # Get min/max radius per I shell - rMin = rr.min(axis=1) - rMax = rr.max(axis=1) - inCut = (rMin >= Rin).argmax() - outCut = (rMin >= Rout).argmax() - - # Cut out outer regions to create egg - xxi = xxi[0:outCut + 1, :] - yyi = yyi[0:outCut + 1, :] - - return xxi, yyi - - def LoadTabG(fIn="lfmG", Nc=0): """ LoadTabG function loads tabular data from files LFM X and Y grid files. diff --git a/kaipy/lfm2kaiju.py b/kaipy/lfm2kaiju.py deleted file mode 100644 index fd68a32..0000000 --- a/kaipy/lfm2kaiju.py +++ /dev/null @@ -1,168 +0,0 @@ -import numpy as np -import kaipy.gamera.gamGrids as gg -from pyhdf.SD import SD, SDC -import h5py - -clight = 2.9979e+10 #Speed of light [cm/s] -Mp = 1.6726219e-24 #g -gamma = (5.0/3) - -def lfm2gg(fIn, fOut, doEarth=True, doJupiter=False): - """ - Convert LFM grid data to GG (Geospace General) format. - - Args: - fIn (str): Input file path of the LFM grid data in HDF format. - fOut (str): Output file path for the converted GG format. - doEarth (bool, optional): Flag to indicate whether to use Earth scaling. Defaults to True. - doJupiter (bool, optional): Flag to indicate whether to use Jovian scaling. Defaults to False. - - Returns: - Output file in GG format. - """ - # Choose scaling - if doEarth: - xScl = gg.Re - print("Using Earth scaling ...") - elif doJupiter: - xScl = gg.Rj - print("Using Jovian scaling ...") - iScl = 1 / xScl - hdffile = SD(fIn) - # Grab x/y/z arrays from HDF file. Scale by Re/Rj/Rs - # LFM is k,j,i ordering - x3 = iScl * np.double(hdffile.select('X_grid').get()) - y3 = iScl * np.double(hdffile.select('Y_grid').get()) - z3 = iScl * np.double(hdffile.select('Z_grid').get()) - lfmNc = x3.shape # Number of corners (k,j,i) - nk = x3.shape[0] - 1 - nj = x3.shape[1] - 1 - ni = x3.shape[2] - 1 - - print("Reading LFM grid from %s, size (%d,%d,%d)" % (fIn, ni, nj, nk)) - - with h5py.File(fOut, 'w') as hf: - hf.create_dataset("X", data=x3) - hf.create_dataset("Y", data=y3) - hf.create_dataset("Z", data=z3) - - -#Get LFM times -def lfmTimes(hdfs): - """ - Get the time attribute from a list of files. - - Parameters: - hdfs (list): A list of file paths. - - Returns: - Ts (numpy.ndarray): An array containing the time attributes from the input files. - """ - Ts = [SD(fIn).attributes().get('time') for fIn in hdfs] - return np.array(Ts) -#Get LFM fields -def lfmFields(fIn): - """ - Retrieves cell-centered fields from an HDF file. - - Args: - fIn (str): The path to the HDF file. - - Returns: - tuple: A tuple containing the following fields: - - Vx3cc (numpy.ndarray): X-component of the cell-centered velocity field. - - Vy3cc (numpy.ndarray): Y-component of the cell-centered velocity field. - - Vz3cc (numpy.ndarray): Z-component of the cell-centered velocity field. - - Bx3cc (numpy.ndarray): X-component of the cell-centered magnetic field. - - By3cc (numpy.ndarray): Y-component of the cell-centered magnetic field. - - Bz3cc (numpy.ndarray): Z-component of the cell-centered magnetic field. - """ - hdffile = SD(fIn) - #Get cell-centered fields - Bx3cc, By3cc, Bz3cc = getHDFVec(hdffile, 'b') - Vx3cc, Vy3cc, Vz3cc = getHDFVec(hdffile, 'v') - - return Vx3cc, Vy3cc, Vz3cc, Bx3cc, By3cc, Bz3cc - -#Get LFM MHD variables -#Convert (D/Cs) -> (n,P) -#Returns units (#/cm3) and (nPa) - -def lfmFlow(fIn): - """ - Calculate the number density and pressure of a fluid flow. - - Parameters: - fIn (str): The input file path. - - Returns: - tuple (numpy.ndarray): A tuple containing the number density (n3) and pressure (P3) of the fluid flow. - - """ - hdffile = SD(fIn) - - #Get soundspeed [km/s] - C3 = getHDFScl(hdffile,"c",Scl=1.0e-5) - #Get rho [g/cm3] - D3 = getHDFScl(hdffile,"rho") - - #Conversion to MKS for P in Pascals - D_mks = (D3*1.0e-3)*( (1.0e+2)**3.0 ) #kg/m3 - C_mks = C3*1.0e+3 #m/s - - P3 = 1.0e+9*D_mks*C_mks*C_mks/gamma #nPa - n3 = D3/Mp #Number density, #/cm3 - - return n3,P3 - -#Get data from HDF-4 file -def getHDFVec(hdffile, qi, Scl=1.0): - """ - Retrieves vector components from an HDF file. - - Args: - hdffile (HDF file object): The HDF file object from which to retrieve the vector components. - qi (str): The base name of the vector components. - Scl (float, optional): Scaling factor for the vector components. Default is 1.0. - - Returns: - tuple: A tuple containing the following fields: - Qx3cc (ndarray): The x-component of the vector, with corners removed. - Qy3cc (ndarray): The y-component of the vector, with corners removed. - Qz3cc (ndarray): The z-component of the vector, with corners removed. - """ - qxi = qi + 'x_' - qyi = qi + 'y_' - qzi = qi + 'z_' - - Qx3 = hdffile.select(qxi).get() - Qy3 = hdffile.select(qyi).get() - Qz3 = hdffile.select(qzi).get() - - # These are too big, corner-sized but corners are poison - # Chop out corners - Qx3cc = Scl * Qx3[:-1, :-1, :-1] - Qy3cc = Scl * Qy3[:-1, :-1, :-1] - Qz3cc = Scl * Qz3[:-1, :-1, :-1] - - return Qx3cc, Qy3cc, Qz3cc - - -def getHDFScl(hdffile, q, Scl=1.0): - """ - Get the HDFScl (HDF Scale) for a given hdffile and variable q. - - Args: - hdffile (HDF file object): The HDF file containing the data. - q (str): The variable to select from the HDF file. - Scl (float, optional): The scaling factor to apply to the selected variable. Default is 1.0. - - Returns: - ndarray: The scaled selected variable with corners chopped out. - """ - qi = q + "_" - Q3 = np.double(hdffile.select(qi).get()) - # These are too big, corner-sized but corners are poison - # Chop out corners - Q3cc = Scl * Q3[:-1, :-1, :-1] - return Q3cc diff --git a/pyproject.toml b/pyproject.toml index 05ea0d6..ce5da08 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,6 @@ dependencies = [ "matplotlib", "pandas", "progressbar", - "pyhdf", "pyspedas", "pytest", "spacepy", diff --git a/requirements.txt b/requirements.txt index 876a6e5..724ac50 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,7 +8,6 @@ jupyterlab matplotlib pandas progressbar -pyhdf pyspedas pytest slack_sdk diff --git a/setup.py b/setup.py index 28ea62b..1d344da 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,6 @@ setup( 'matplotlib', 'pandas', 'progressbar', - 'pyhdf', 'pyspedas', 'pytest', 'slack_sdk',