mirror of
https://github.com/JHUAPL/kaiju.git
synced 2026-01-09 18:57:53 -05:00
Merged in documentation (pull request #59)
Documentation Approved-by: Jeff Approved-by: Slava Merkin
This commit is contained in:
122
.gitignore
vendored
122
.gitignore
vendored
@@ -1,76 +1,134 @@
|
||||
# User dependent stuff
|
||||
cmake/user.cmake
|
||||
build*/
|
||||
# -----------------------------
|
||||
# OS / Editors / IDEs
|
||||
# -----------------------------
|
||||
.DS_Store
|
||||
*.DS_STORE
|
||||
Thumbs.db
|
||||
.vscode/
|
||||
.idea/
|
||||
kaiju.sublime-workspace
|
||||
# Emacs backup files
|
||||
*~
|
||||
|
||||
# Related to unit testing
|
||||
# -----------------------------
|
||||
# User / Local config
|
||||
# -----------------------------
|
||||
cmake/user.cmake
|
||||
.env
|
||||
|
||||
# -----------------------------
|
||||
# Build / Artifacts
|
||||
# -----------------------------
|
||||
build*/
|
||||
dist/
|
||||
target/
|
||||
docs/_build/
|
||||
docs/build/
|
||||
|
||||
# -----------------------------
|
||||
# Dependency caches
|
||||
# -----------------------------
|
||||
node_modules/
|
||||
|
||||
# -----------------------------
|
||||
# Unit testing (third-party deps)
|
||||
# -----------------------------
|
||||
external/FARGPARSE-*/
|
||||
external/GFTL-*/
|
||||
external/GFTL_SHARED-*/
|
||||
external/PFUNIT-*/
|
||||
|
||||
# skip F90 files in the tests folder, except in specific subfolders
|
||||
# -----------------------------
|
||||
# Tests (Fortran)
|
||||
# skip F90 files in tests except specific helpers
|
||||
# -----------------------------
|
||||
tests/*/*.F90
|
||||
!tests/helperCode/*.F90
|
||||
!tests/helperCode_mpi/*.F90
|
||||
|
||||
# any local automated tests that users have run
|
||||
# Any local automated test runs
|
||||
test_runs/
|
||||
|
||||
# Pre-compile generated files
|
||||
# -----------------------------
|
||||
# Generated / Precompiled sources
|
||||
# -----------------------------
|
||||
src/base/git_info.F90
|
||||
|
||||
# Compiled Object files
|
||||
# -----------------------------
|
||||
# Python
|
||||
# -----------------------------
|
||||
*.py[cod]
|
||||
|
||||
# -----------------------------
|
||||
# Java
|
||||
# -----------------------------
|
||||
*.class
|
||||
*.jar
|
||||
|
||||
# -----------------------------
|
||||
# Logs
|
||||
# -----------------------------
|
||||
*.log
|
||||
TEST*.xml
|
||||
|
||||
# -----------------------------
|
||||
# Fortran / Compilers
|
||||
# -----------------------------
|
||||
# Object files
|
||||
*.slo
|
||||
*.lo
|
||||
*.o
|
||||
*.obj
|
||||
*.pyc
|
||||
*__genmod.f90
|
||||
|
||||
# Compiled Dynamic libraries
|
||||
*.so
|
||||
*.dylib
|
||||
*.dll
|
||||
|
||||
# Fortran module files
|
||||
# Modules
|
||||
*.mod
|
||||
*.smod
|
||||
|
||||
# Compiled Static libraries
|
||||
# ifort cruft
|
||||
*.i90
|
||||
|
||||
# Static libraries
|
||||
*.lai
|
||||
*.la
|
||||
*.a
|
||||
*.lib
|
||||
|
||||
# Dynamic libraries
|
||||
*.so
|
||||
*.dylib
|
||||
*.dll
|
||||
|
||||
# Executables
|
||||
*.exe
|
||||
*.out
|
||||
*.app
|
||||
*.x
|
||||
*.war
|
||||
|
||||
# Data files
|
||||
# -----------------------------
|
||||
# Data / Media (large files)
|
||||
# -----------------------------
|
||||
*.mp4
|
||||
*.h5
|
||||
*.xmf
|
||||
*.hdf
|
||||
*.png
|
||||
*.h5part
|
||||
*.tiff
|
||||
*.avi
|
||||
*.flv
|
||||
*.mov
|
||||
*.wmv
|
||||
|
||||
# Mac nonsense
|
||||
*.DS_STORE
|
||||
|
||||
#Notebook checkpoints
|
||||
# -----------------------------
|
||||
# Jupyter / Notebooks
|
||||
# -----------------------------
|
||||
analysis/notebooks/Tutorial/.ipynb_checkpoints
|
||||
|
||||
# Visual Studio Code configuration directory
|
||||
.vscode/
|
||||
|
||||
# Visual Studio Code environment files
|
||||
.env
|
||||
|
||||
# ifort cruft
|
||||
*.i90
|
||||
|
||||
# Emacs backup files
|
||||
*~
|
||||
# -----------------------------
|
||||
# Ignore exeptions
|
||||
# -----------------------------
|
||||
# Unignore docs/source/building
|
||||
!docs/source/building/
|
||||
!docs/source/building/**
|
||||
13
.readthedocs.yaml
Normal file
13
.readthedocs.yaml
Normal file
@@ -0,0 +1,13 @@
|
||||
version: "2"
|
||||
|
||||
build:
|
||||
os: "ubuntu-22.04"
|
||||
tools:
|
||||
python: "3.10"
|
||||
|
||||
python:
|
||||
install:
|
||||
- requirements: docs/requirements.txt
|
||||
|
||||
sphinx:
|
||||
configuration: docs/source/conf.py
|
||||
20
docs/Makefile
Normal file
20
docs/Makefile
Normal file
@@ -0,0 +1,20 @@
|
||||
# Minimal makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line, and also
|
||||
# from the environment for the first two.
|
||||
SPHINXOPTS ?=
|
||||
SPHINXBUILD ?= sphinx-build
|
||||
SOURCEDIR = source
|
||||
BUILDDIR = build
|
||||
|
||||
# Put it first so that "make" without argument is like "make help".
|
||||
help:
|
||||
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||
|
||||
.PHONY: help Makefile
|
||||
|
||||
# Catch-all target: route all unknown targets to Sphinx using the new
|
||||
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
||||
%: Makefile
|
||||
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||
35
docs/make.bat
Normal file
35
docs/make.bat
Normal file
@@ -0,0 +1,35 @@
|
||||
@ECHO OFF
|
||||
|
||||
pushd %~dp0
|
||||
|
||||
REM Command file for Sphinx documentation
|
||||
|
||||
if "%SPHINXBUILD%" == "" (
|
||||
set SPHINXBUILD=sphinx-build
|
||||
)
|
||||
set SOURCEDIR=source
|
||||
set BUILDDIR=build
|
||||
|
||||
if "%1" == "" goto help
|
||||
|
||||
%SPHINXBUILD% >NUL 2>NUL
|
||||
if errorlevel 9009 (
|
||||
echo.
|
||||
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
|
||||
echo.installed, then set the SPHINXBUILD environment variable to point
|
||||
echo.to the full path of the 'sphinx-build' executable. Alternatively you
|
||||
echo.may add the Sphinx directory to PATH.
|
||||
echo.
|
||||
echo.If you don't have Sphinx installed, grab it from
|
||||
echo.http://sphinx-doc.org/
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||
goto end
|
||||
|
||||
:help
|
||||
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||
|
||||
:end
|
||||
popd
|
||||
2
docs/requirements.txt
Normal file
2
docs/requirements.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
sphinx==7.1.2
|
||||
sphinx-rtd-theme==1.3.0rc1
|
||||
26
docs/source/_obsolete/FAQ.rst
Normal file
26
docs/source/_obsolete/FAQ.rst
Normal file
@@ -0,0 +1,26 @@
|
||||
Frequently Asked Questions
|
||||
==========================
|
||||
|
||||
This page contains frequently asked questions about running the ``kaiju``
|
||||
software.
|
||||
|
||||
Q1. I got Intel compiler warning #11021 (unresolved references in dynamic
|
||||
libraries) when linking the Fortran executable ``gamera.x``. For example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$ make gamera.x
|
||||
...
|
||||
'ipo: warning #11021: unresolved files_mp\ *checkandkill*
|
||||
Referenced in libgamlib.a(output.F90.o)
|
||||
Referenced in libgamlib.a(gioH5.F90.o)
|
||||
Referenced in libgamlib.a(init.F90.o)'
|
||||
|
||||
A1. Invalid directories may be included in ``$PATH`` and/or ``$PYTHONPATH``.
|
||||
|
||||
Q2. What's the code unit for current density from MHD output?
|
||||
|
||||
A2. ``gB0/gx0/mu0 = 4.58nT/6.38e6m/(4pi*1e-7)=5.7139e-10 A/m^2``
|
||||
|
||||
Check the units of other variables in the HDF5 file with ``h5info`` in Matlab
|
||||
or similar functions in ``python``.
|
||||
155
docs/source/_obsolete/chimp/chimpQuickStart.rst
Normal file
155
docs/source/_obsolete/chimp/chimpQuickStart.rst
Normal file
@@ -0,0 +1,155 @@
|
||||
CHIMP Quick Start Guide
|
||||
=======================
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
This page provides instructions to help the user get up and running quickly
|
||||
with `CHIMP <https://cgs.jhuapl.edu/Models/chimp.php>`_. We provide
|
||||
instructions for using the CHIMP tool ``push.x``, which computes the
|
||||
trajectories of test particles within the domain of the MAGE model run.
|
||||
|
||||
These instructions assume you are running on the ``derecho`` supercomputer,
|
||||
and that you have already made a successful ``kaiju`` model run in your
|
||||
current working directory. The model run uses the run identifier ``geospace``.
|
||||
|
||||
Running ``push.x``
|
||||
------------------
|
||||
|
||||
Create the XML file
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Create an XML file describing the input parameters for ``push.x``. It should
|
||||
look something like this on ``derecho``:
|
||||
|
||||
.. code-block:: xml
|
||||
|
||||
<?xml version="1.0" ?>
|
||||
<Kaiju>
|
||||
<Chimp>
|
||||
<sim runid="geospace"/>
|
||||
<time T0="36000.0" dt="0.5" tFin="43200.0"/>
|
||||
<fields doMHD="F" ebfile="geospace" grType="LFM" isMPI="T"/>
|
||||
<parallel Ri="4" Rj="4" Rk="1"/>
|
||||
<pusher epsht="0.05" imeth="FO"/>
|
||||
<tps Np="2500" species="Op"/>
|
||||
<units uid="LFM"/>
|
||||
<output doEQProj="T" dtOut="5.0" tsOut="100"/>
|
||||
<radius max="2.25" min="2.05"/>
|
||||
<phi max="360.0" min="0.0"/>
|
||||
<alpha max="180.0" min="90.0"/>
|
||||
<height max="90.0" min="15.0" doOutflow="T"/>
|
||||
<energy doEBInit="F" doLog="F" max="10.0" min="0.001"/>
|
||||
<domain rmax="25.0" rmin="2.005"/>
|
||||
<tracer epsds="0.05"/>
|
||||
<stream doStream="F" max="39600.0" min="36000.0"/>
|
||||
</Chimp>
|
||||
</Kaiju>
|
||||
|
||||
``T0`` is the start time for the test particle trajectories, in seconds from
|
||||
the start time of the simulation being used for the calculation.
|
||||
|
||||
``tFin`` is the stop time the test particle trajectories, in seconds from
|
||||
the start time of the simulation being used for the calculation.
|
||||
|
||||
For other parameters see the :doc:`Chimp XML <chimpXML>` page.
|
||||
|
||||
Create the PBS file
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The ``push.x`` tool is usually run in parallel (using a PBS "job array") to
|
||||
improve performance. The PBS script ``RunCHIMP.pbs`` might look something like
|
||||
this:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
#PBS -A P28100045
|
||||
#PBS -j oe
|
||||
#PBS -q regular
|
||||
#PBS -l walltime=12:00:00
|
||||
#PBS -l select=1:ncpus=128:ompthreads=128
|
||||
#PBS -m ae
|
||||
|
||||
export RUNID=${PBS_JOBNAME}
|
||||
|
||||
module list
|
||||
hostname
|
||||
date
|
||||
export OMP_NUM_THREADS=128
|
||||
export KMP_STACKSIZE=128M
|
||||
export JNUM=${PBS_ARRAY_INDEX:-0}
|
||||
EXE=./push.x
|
||||
echo "Running $EXE"
|
||||
${EXE} ${RUNID}.xml ${JNUM} >& ${RUNID}.${JNUM}.out
|
||||
date
|
||||
|
||||
Run ``push.x``
|
||||
^^^^^^^^^^^^^^
|
||||
|
||||
Copy the executable for ``push.x`` into your working directory. For example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
cp $KAIJUDIR/build/bin/push.x .
|
||||
|
||||
Here is an example command to submit two parallel jobs:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
qsub -v -J 1-2 -N geospace RunCHIMP.pbs
|
||||
|
||||
``-J 1-2`` means the same ``push.x`` run is split into 2 jobs that run in
|
||||
parallel on separate nodes.
|
||||
|
||||
``-N geospace`` gives the job array the same name as the run ID
|
||||
(``geospace``). It will also force ``push.x`` to use the XML file called
|
||||
``geospace.xml`` for its input.
|
||||
|
||||
The job index in the job array is used to seed the generation of test
|
||||
particles within each run. This allows the code to be reproducible. If you
|
||||
want to run additional test particles with the same xml file, you need to make
|
||||
sure the job numbers are not the same i.e. if your first set of runs was
|
||||
submitted with the command above, you can run two additional, unique jobs with
|
||||
the command:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
qsub -c -J 3-4 -N geospace RunCHIMP.pbs
|
||||
|
||||
CHIMP output format
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The output file from a CHIMP run will be look something like
|
||||
``runid.000000.h5part`` and contain the variables:
|
||||
|
||||
``x``, ``y``, ``z``
|
||||
Particle position coordinates
|
||||
|
||||
``K``
|
||||
particle energy [keV]
|
||||
|
||||
``isIn``
|
||||
``0`` or ``1`` depending on whether the particle is alive or not (left the
|
||||
simulation domain) and whether it's been "born" yet or not.
|
||||
|
||||
``id``
|
||||
Particle ID number
|
||||
|
||||
``xeq``, ``yeq``, ``Teq``
|
||||
Position where the particle last crossed the Z=0 plane, and at what time or
|
||||
instantaneous projection depending on ``doTrc`` in the XML file.
|
||||
|
||||
``Keq``/``ebKeq``
|
||||
Energy at the equator in lab and ExB frame
|
||||
|
||||
``Aeq``
|
||||
Equatorial pitch angle
|
||||
|
||||
``OCb``
|
||||
Set to ``0``/``1``/``2`` based on the topology of the field line at the
|
||||
particle position: ``0`` = IMF, ``1`` = open, ``2`` = closed. This will be
|
||||
all zeroes if you didn't tell it to do field-line tracing (computationally
|
||||
expensive) in the XML file.
|
||||
|
||||
``T0p``
|
||||
The "birthday" of the particle, when it was/will be "born".
|
||||
233
docs/source/_obsolete/chimp/chimpXML.rst
Normal file
233
docs/source/_obsolete/chimp/chimpXML.rst
Normal file
@@ -0,0 +1,233 @@
|
||||
CHIMP XML
|
||||
=========
|
||||
|
||||
``push.x``
|
||||
----------
|
||||
|
||||
.. code-block:: xml
|
||||
|
||||
<?xml version="1.0" ?>
|
||||
<Chimp>
|
||||
<sim runid="erc"/>
|
||||
<time T0="1290.0" dt="0.05" tFin="1310.0"/>
|
||||
<fields ebfile="ebBBFs.h5" grType="LFM"/>
|
||||
<pusher epsht="0.05" imeth="DYN"/>
|
||||
<tps Np="500000" species="e"/>
|
||||
<units uid="EARTH"/>
|
||||
<output T0Out="1300.0" doEQProj="T" dtOut="0.1" tsOut="100"/>
|
||||
<radius max="20.0" min="10.0"/>
|
||||
<phi max="150.0" min="105.0"/>
|
||||
<alpha max="180.0" min="0.0"/>
|
||||
<energy doLog="F" max="50.0" min="0.1"/>
|
||||
<tracer epsds="0.025"/>
|
||||
<stream doStream="T" max="1300.0" min="1290.0"/>
|
||||
<parallel Ri="4" Rj="4" Rk="1"/>
|
||||
</Chimp>
|
||||
|
||||
The XML elements are:
|
||||
|
||||
``<Chimp>``
|
||||
This element is required.
|
||||
|
||||
``<sim>``
|
||||
``runid`` The name of the simulation.
|
||||
|
||||
``<time>``
|
||||
``T0`` Start time for the particle trajectory (seconds).
|
||||
|
||||
``dt`` Time step for the particle trajectory (seconds).
|
||||
|
||||
``tFin`` End time for the particle trajectory (seconds).
|
||||
|
||||
``<fields>``
|
||||
``ebfile`` The name of the file containing the electromagnetic fields.
|
||||
|
||||
``grType`` The type of grid used for the electromagnetic fields.
|
||||
|
||||
``<pusher>``
|
||||
``epsht`` Small parameter for the integrator.
|
||||
|
||||
``imeth`` Integration method, FO (full orbit), GC (guiding center), DYN
|
||||
(switch between both).
|
||||
|
||||
``<tps>``
|
||||
``Np`` Number of test particles.
|
||||
|
||||
``species`` Species of the test particles.
|
||||
|
||||
``<units>``
|
||||
``uid`` Which units to use for reading input data. The default value is
|
||||
"Earth" for using GAMERA outputs of magnetosphere electromagnetic fields.
|
||||
CHIMP also works with LFM outputs by changing the uid to "LFM". Don't
|
||||
misuse or you will get absurdly wrong results and crazily slow
|
||||
performance. See more details in kaiju/src/chimp/chmpunits.F90.
|
||||
|
||||
``<output>``
|
||||
``T0Out`` When to start outputting.
|
||||
|
||||
``doEQProj`` Whether to project the test particle positions to the
|
||||
equatorial plane.
|
||||
|
||||
``dtOut`` Time interval for output files.
|
||||
|
||||
``tsOut`` Timestep interval for console output.
|
||||
|
||||
``<radius>``
|
||||
``max`` Maximum radius for the test particles.
|
||||
|
||||
``min`` Minimum radius for the test particles.
|
||||
|
||||
``<phi>``
|
||||
``max`` Maximum azimuth for the test particles.
|
||||
|
||||
``min`` Minimum azimuth for the test particles.
|
||||
|
||||
``<alpha>``
|
||||
``max`` Maximum pitch angle for the test particles.
|
||||
|
||||
``min`` Minimum pitch angle for the test particles.
|
||||
|
||||
``<energy>``
|
||||
``doLog`` Whether to use a logarithmic distribution for the test particle
|
||||
energies.
|
||||
|
||||
``max`` Maximum energy for the test particles.
|
||||
|
||||
``min`` Minimum energy for the test particles.
|
||||
|
||||
``<tracer>``
|
||||
``epsds`` Small parameter for the field line tracer.
|
||||
|
||||
``<stream>``
|
||||
``doStream`` Whether to continuously create test particles.
|
||||
|
||||
``max`` Maximum time for the test particle creation.
|
||||
|
||||
``min`` Minimum time for the test particle creation.
|
||||
|
||||
``<parallel>``
|
||||
``Ri`` Number of processors in the i-direction.
|
||||
|
||||
``Rj`` Number of processors in the j-direction.
|
||||
|
||||
``Rk`` Number of processors in the k-direction.
|
||||
|
||||
Initialization of test particles
|
||||
--------------------------------
|
||||
|
||||
Standard initialization creates a number of test particles (tps/Np) of species
|
||||
(tps/species; see kaiju/src/chimp/chmpunits.F90@getSpecies) in the (Z=0)
|
||||
plane. Radius (in Rx), phi (azimuth in degrees), alpha (pitch angle in
|
||||
degrees), energy (in keV) specify the bounds over which test particle
|
||||
parameters are randomly chosen.
|
||||
|
||||
By default particles are all created at T0, however stream/doStream can
|
||||
specify the continuous creation of test particles. If doStream is specified
|
||||
then each test particle will be given a randomly assigned birth day between
|
||||
time stream/min and stream/max.
|
||||
|
||||
Gamera output slicer (slice.x)
|
||||
------------------------------
|
||||
|
||||
Example XML file
|
||||
|
||||
.. code-block:: xml
|
||||
|
||||
<?xml version="1.0" ?>
|
||||
<Chimp>
|
||||
<sim runid="ebXY"/>
|
||||
<time T0="36000.0" dt="5.0" tFin="43200.0"/>
|
||||
<fields doMHD="T" ebfile="msphere" grType="LFM" isMPI="T"/>
|
||||
<parallel Ri="6" Rj="12" Rk="1"/>
|
||||
<domain dtype="LFMCYL"/>
|
||||
<units uid="EARTH"/>
|
||||
<slice Npow="1" doXY="T" grType="LFM2D" xSun="25.0"/>
|
||||
<tracer epsds="0.05"/>
|
||||
<output doSlim="T" doTrc="T"/>
|
||||
</Chimp>
|
||||
|
||||
An example pbs script to submit slice job:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
#!/bin/bash
|
||||
#PBS -A P28100045
|
||||
#PBS -N sliceXZ
|
||||
#PBS -j oe
|
||||
#PBS -q regular
|
||||
#PBS -l walltime=4:00:00
|
||||
#PBS -l select=1:ncpus=72:ompthreads=72
|
||||
|
||||
#Example usage
|
||||
#qsub -v KAIJUEXE="./pusher.x" -J 1-5 -N RC_ep RunK.pbs
|
||||
#Module savelist "kaiju"
|
||||
#Currently Loaded Modules:
|
||||
# 1) git/2.9.5 (H) 4) impi/2018.4.274 7) python/2.7.16
|
||||
# 2) intel/18.0.5 5) ncarenv/1.3 8) cmake/3.14.4
|
||||
# 3) hdf5/1.10.5 6) ncarcompilers/0.5.0
|
||||
|
||||
export EXE=${slice}
|
||||
export RUNID=${PBS_JOBNAME}
|
||||
|
||||
###source ~/.bashrc
|
||||
module restore kaiju
|
||||
|
||||
module list
|
||||
hostname
|
||||
date
|
||||
export OMP_NUM_THREADS=72
|
||||
export KMP_STACKSIZE=128M
|
||||
echo "Running $EXE"
|
||||
./slicer.x ${RUNID}.xml > ${RUNID}.out
|
||||
date
|
||||
|
||||
Gamera field line tracer (trace.x)
|
||||
----------------------------------
|
||||
|
||||
Example XML file
|
||||
|
||||
.. code-block:: xml
|
||||
|
||||
#!XML
|
||||
<?xml version="1.0" ?>
|
||||
<KAIJU>
|
||||
<Chimp>
|
||||
<sim runid="IonFL"/>
|
||||
<time T0="36000.0" dt="5.0" tFin="43200.0"/>
|
||||
<fields doMHD="T" ebfile="msphere" grType="LFM" isMPI="T"/>
|
||||
<parallel Ri="6" Rj="12" Rk="1"/>
|
||||
<domain dtype="LFMCYL"/>
|
||||
<units uid="EARTH"/>
|
||||
<interp wgt="TSC"/>
|
||||
<tracer epsds="0.05"/>
|
||||
<output dtOut="5.0"/>
|
||||
<points Nx1="1" Nx2="72" Nx3="16" grType="SPHERICAL"/>
|
||||
<radius min="2.05" max="2.05"/>
|
||||
<phi min="180.0" max="360.0"/>
|
||||
<theta min="5.0" max="45.0"/>
|
||||
</Chimp>
|
||||
</KAIJU>
|
||||
|
||||
This will create an HDF5 file with a similar group structure for time
|
||||
slices (Step#0,Step#1,xxx) but with each time slice also containing a group
|
||||
for each field line seed, i.e.
|
||||
|
||||
*
|
||||
Step#0
|
||||
|
||||
|
||||
* Line#0
|
||||
* Line#1
|
||||
|
||||
*
|
||||
Step#1
|
||||
|
||||
|
||||
* Line#0
|
||||
* Line#1
|
||||
|
||||
In the above example, the field line seed points are set in a spherical
|
||||
coordinate system with Nx1,Nx2,Nx3 points in r,phi,theta.
|
||||
|
||||
The script kaiju/scripts/genXLine.py can be run on the output HDF5 data to
|
||||
create an XDMF file that can be read by VisIt or Paraview.
|
||||
16
docs/source/_obsolete/chimp/index.rst
Normal file
16
docs/source/_obsolete/chimp/index.rst
Normal file
@@ -0,0 +1,16 @@
|
||||
Particle Simulations and Analysis Tools with CHIMP
|
||||
==================================================
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
This page provides links to documentation for CHIMP.
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
chimpQuickStart
|
||||
chimpXML
|
||||
psd.x
|
||||
push.x
|
||||
trace.x
|
||||
333
docs/source/_obsolete/chimp/psd.x.rst
Normal file
333
docs/source/_obsolete/chimp/psd.x.rst
Normal file
@@ -0,0 +1,333 @@
|
||||
Calculate phase space density from test particle simulations (psd.x)
|
||||
====================================================================
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
This executable takes output from an MHD and test particle simulation and
|
||||
calculates the time evolution of the resulting phase space density. The test
|
||||
particles are weighted assuming an initial phase space density distribution,
|
||||
taken from either the MHD solution at the particles location or from an
|
||||
external hdf5 file provided by the user. The weight corresponds to the number
|
||||
of real particles each test particle represents. For a detailed description of
|
||||
how the calculations are performed, see appendix A2 of `Sorathia et al (2018)
|
||||
<https://agupubs.onlinelibrary.wiley.com/doi/full/10.1029/2018JA025506>`_.
|
||||
|
||||
Example XML file
|
||||
----------------
|
||||
|
||||
.. code-block:: xml
|
||||
|
||||
<?xml version="1.0" ?>
|
||||
<Kaiju>
|
||||
<Chimp>
|
||||
<sim runid="eRBpsdH5" doShape="T"/>
|
||||
<tps species="e"/>
|
||||
<time T0="36600.0" dt="60.0" tFin="36610.0"/>
|
||||
<fields doMHD="T" ebfile="msphere" grType="LFM" isMPI="T"/>
|
||||
<parallel Ri="8" Rj="8" Rk="1"/>
|
||||
<units uid="EARTH"/>
|
||||
<output dtOut="60.0" tsOut="100" doFat="T"/>
|
||||
<radius N="30" doLog="F" max="10" min="2.5"/>
|
||||
<phi N="24" max="360.0" min="0.0"/>
|
||||
<alpha N="9" max="90.0" min="0.0"/>
|
||||
<energy N="30" doLog="T" max="7000.0" min="50.0"/>
|
||||
<domain rmax="20.0" rmin="1.05"/>
|
||||
<tracer epsds="0.05"/>
|
||||
<population f0="HDF5IN" f0data="psdInit.h5" ns="1" ne="20" popid="eRB"/>
|
||||
</Chimp>
|
||||
</Kaiju>
|
||||
|
||||
Parameter Descriptions
|
||||
----------------------
|
||||
|
||||
``<sim>`` (optional): Specify identifying information for this computation.
|
||||
|
||||
``runid`` (optional, default ``"Sim"``): String specifying an identifier for
|
||||
this run of ``psd.x``. A best practice is to use the ``runid`` in the name of
|
||||
the XML file.
|
||||
|
||||
``doShape`` (optional, default ``"true"``): Set to ``"true"`` to use a
|
||||
shaping function when calculating phase space density.
|
||||
|
||||
``<tps>`` (optional): Options related to test particles
|
||||
|
||||
``species`` (optional, default ``"X"``): Species simulated by push.x that
|
||||
you would like to weight. For full list of available species, see the
|
||||
getSpecies() function in chmpunits.F90.
|
||||
|
||||
``<time>`` (optional): Specify time range and interval for magnetic field
|
||||
calculation.
|
||||
|
||||
``T0`` (optional, default ``"0.0"``): Start time (simulated seconds) for
|
||||
ground magnetic field calculation, relative to start of simulation results
|
||||
used as input.
|
||||
|
||||
``dt`` (optional, default ``"1.0"``): Time interval and output cadence
|
||||
(simulated seconds) for ground magnetic field calculation.
|
||||
|
||||
``tFin`` (optional, default ``"60.0"``): Stop time (simulated seconds) for
|
||||
ground magnetic field calculation, relative to start of simulation results
|
||||
used as input.
|
||||
|
||||
``<fields>`` (required): Describes the input data from a MAGE model run.
|
||||
|
||||
``ebfile`` (optional, default ``"ebdata.h5"``): Path to HDF5 file containing
|
||||
the electric and magnetic fields computed by a MAGE model run.
|
||||
|
||||
``grType`` (optional, default ``"EGG"``): String specifying grid type used
|
||||
by the MAGE output file. Valid values are ``"EGG"``, ``"LFM"``, ``"SPH"``. If
|
||||
the string is not one of the supported grid types, the default value
|
||||
(``"EGG"``) is used, and a warning message is printed.
|
||||
|
||||
``doEBFix`` (optional, default ``"false"``): Set to ``"true"`` to "clean" the
|
||||
electric field E so that the dot product of the electric and magnetic fields
|
||||
is 0. See ``ebinterp.F90``.
|
||||
|
||||
``doMHD`` (optional, default ``"false"``): Set to ``"true"`` to pass the full
|
||||
set of magnetohydrodynamic variables to CHIMP, rather than just electric and
|
||||
magnetic fields. Includes velocity vector, density and pressure in the output
|
||||
file. See ``ebtypes.F90``.
|
||||
|
||||
``isMPI`` (optional, default ``"false"``): Set to ``"true"`` is the MAGE
|
||||
results file was generated with an MPI version of the model. See
|
||||
``eblCstd.F90``.
|
||||
|
||||
``rho0`` (optional, default ``"1.0"``): Default density used if not using MHD
|
||||
values to determine distribution function used to weight particles.
|
||||
|
||||
``kT0`` (optional, default ``"1.0"``): Default temperature used if not using
|
||||
MHD values to determine distribution function used to weight particles.
|
||||
|
||||
``<domain>`` (optional): Options for the problem domain
|
||||
|
||||
``dtype`` (optional, default ``"SPH"``): Domain over which to perform CHIMP
|
||||
calculations, separate from grid, enables the user to perform calculation on
|
||||
a subset of the grid to reduce computation where it is not needed - See
|
||||
``gridloc.F90``. Valid values are ``"SPH"``, ``"LFM"``, ``"LFMCYL"``,
|
||||
``"MAGE"``, ``"EGG"``, ``"ELL"``.
|
||||
|
||||
``rClosed`` (optional, default set by choice of ``units/uid``): Radial value
|
||||
for field line endpoint to reach to be considered closed - See
|
||||
``chmpunits.F90``.
|
||||
|
||||
``rmax`` (optional, default computed): Maximum radius of Domain region - See
|
||||
``gridloc.F90``.
|
||||
|
||||
``rmin`` (optional, default computed): Minimum radius of Domain region - See
|
||||
``gridloc.F90``.
|
||||
|
||||
``xSun`` (optional,default 20.0): if dType is "LFM" or "MAGE", the Domain
|
||||
region includes all i-shells which have distances along the Earth-Sun line is
|
||||
less than this value (in Re)
|
||||
|
||||
``xTail`` (optional,default -100.0): if dType is "LFM" or "MAGE", the Domain
|
||||
region includes cells in the magnetotail up until this value (in Re)
|
||||
|
||||
``yzMax`` (optional,default 40.0): if dType is "LFM" or "MAGE", the Domain
|
||||
region includes cells with Y and Z coordinates between +/- yzMax (in Re)
|
||||
|
||||
``<output>`` (optional): Options related to driver output
|
||||
|
||||
``timer`` (optional, default ``"false"``): Set to ``"true"`` to turn time
|
||||
flags on See ``starter.F90``.
|
||||
|
||||
``tsOut`` (optional, default ``"10"``): Cadence to output diagnostics to
|
||||
run-log file See ``starter.F90``.
|
||||
|
||||
``doFat`` (optional, default ``"false"``): Set to ``"true"`` to include 4D
|
||||
variable information in output files. See psdio.F90.
|
||||
|
||||
``<parallel>`` (optional): Options if ebfile was generated using an MPI
|
||||
version of the code (read if fields/doMPI is set to ``"true"``, file name in
|
||||
form of ebfile_Ri_Rj_Rk_i_j_k.gam.h5)
|
||||
|
||||
``Ri`` (optional, default ``"1"``): Number of ranks used in decomposition of
|
||||
``"i"`` dimension See iotable.F90.
|
||||
|
||||
``Rj`` (optional, default ``"1"``): Number of ranks used in decomposition of
|
||||
``"j"`` dimension See iotable.F90.
|
||||
|
||||
``Rk`` (optional, default ``"1"``): Number of ranks used in decomposition of
|
||||
``"k"`` dimension See iotable.F90.
|
||||
|
||||
``doOldNaming`` (optional, default ``"false"``): Allow for backward
|
||||
compatibility for MHD files generated with the now deprecated naming
|
||||
convention See ``chmpdefs.F90``.
|
||||
|
||||
``<tracer>`` (optional): Options related to field line tracing performed by
|
||||
CHIMP
|
||||
|
||||
``epsds`` (optional, default ``"1.0e-2"``): Tolerance for field line tracing
|
||||
computations See chmpdefs.F90.
|
||||
|
||||
``<units>`` (optional): Name of units system used in the model run.
|
||||
|
||||
``uID`` (optional, default ``"Earth"``): See chmpunits.F90. Valid values are
|
||||
``"EARTH"``, ``"EARTHCODE"``, ``"JUPITER"``, ``"JUPITERCODE"``, ``"SATURN"``,
|
||||
``"SATURNCODE"``, ``"HELIO"``, ``"LFM"``, ``"LFMJUPITER"``.
|
||||
|
||||
``<energy>`` (optional): Options for initialization of phase space density
|
||||
grid
|
||||
|
||||
``min`` (optional, default ``"1.0"``): Minimum energy (in keV) of grid.
|
||||
|
||||
``max`` (optional, default ``"100.0"``): Maximum energy (in keV) of grid.
|
||||
|
||||
``doLog`` (optional, default ``"false"``): Default behavior is uniform
|
||||
distribution between min/max. Set to ``"true"`` to distribute cells uniformly
|
||||
in log-space between min/max values.
|
||||
|
||||
``N`` (optional, default ``"15"``): Number of cells to use in this dimension.
|
||||
|
||||
``<alpha>`` (optional): Options for initialization of phase space density
|
||||
grid
|
||||
|
||||
``min`` (optional, default ``"0.0"``): Minimum pitch angle (in degrees) of
|
||||
grid.
|
||||
|
||||
``max`` (optional, default ``"360.0"``): Maximum pitch angle (in degrees) of
|
||||
grid.
|
||||
|
||||
``doLog`` (optional, default ``"false"``): Default behavior is uniform
|
||||
distribution between min/max. Set to ``"true"`` to distribute cells uniformly
|
||||
in log-space between min/max values.
|
||||
|
||||
``N`` (optional, default ``"10"``): Number of cells to use in this dimension.
|
||||
|
||||
``<radius>`` (optional): Options for initialization of phase space density
|
||||
grid
|
||||
|
||||
``min`` (optional, default ``"5.0"``): Minimum radius of grid.
|
||||
|
||||
``max`` (optional, default ``"25.0"``): Maximum radius of grid.
|
||||
|
||||
``doLog`` (optional, default ``"false"``): Default behavior is uniform
|
||||
distribution between min/max. Set to ``"true"`` to distribute cells uniformly
|
||||
in log-space between min/max values.
|
||||
|
||||
``N`` (optional, default ``"20"``): Number of cells to use in this dimension.
|
||||
|
||||
``<phi>`` (optional): Options for initialization of phase space density grid
|
||||
|
||||
``min`` (optional, default ``"0.0"``): Minimum longitude (in degrees) of grid.
|
||||
A value of 0 corresponds to the +X direction.
|
||||
|
||||
``max`` (optional, default ``"360.0"``): Maximum longitude (in degrees) of
|
||||
grid.
|
||||
|
||||
``doLog`` (optional, default ``"false"``): Default behavior is uniform
|
||||
distribution between min/max. Set to ``"true"`` to distribute cells uniformly
|
||||
in log-space between min/max values.
|
||||
|
||||
``N`` (optional, default ``"8"``): Number of cells to use in this dimension.
|
||||
|
||||
``<population>`` (optional): Options specific to phase space density
|
||||
calculation, see psdinit.F90 and pdfuns.F90
|
||||
|
||||
``popid`` (optional, default ``"chimp"``): String specifying an identifier for
|
||||
runID of test particle files being read in for PSD calculation.
|
||||
|
||||
``ns`` (optional, default ``"1"``): Starting job id/number of test particle
|
||||
files that should be included in PSD calculation.
|
||||
|
||||
``ne`` (optional, default ``"4"``): Ending job id/number of test particle
|
||||
files that should be included in PSD calculation.
|
||||
|
||||
``kTScl`` (optional, default ``"1.0"``): Factor used to scale temperature
|
||||
used in calculation.
|
||||
|
||||
``f0`` (optional, default ``"Max"``): String specifying an identifier for
|
||||
type of distribution function to use to weight the test particles. Valid
|
||||
values are ``"MAX"`` (maxwellian based on local MHD parameters),
|
||||
``"KAP"`` (kappa based on local MHD parameters), ``"RBSP"`` (function fit to
|
||||
Van Allen Probes data), ``"HDF5IN"`` (provided by an external hdf5 file).
|
||||
|
||||
``k0`` (optional, default ``"3.0"``): kappa value to use if f0=``"KAP"``
|
||||
|
||||
``f0data`` (optional, default ``"psd.h5"``: String specifying an name of hdf5
|
||||
file containing PSD distribution to weight test particles with. Read if
|
||||
f0=``"HDF5IN"``
|
||||
|
||||
``<stream>`` (optional): Options related to time dependent initialization of
|
||||
test particles
|
||||
|
||||
``dShell`` (optional, default ``"1.0"``): Width of streaming region, used to
|
||||
rescale PSD initial condition if test particles were injected over time.
|
||||
|
||||
Run Script Example
|
||||
------------------
|
||||
|
||||
An example pbs script on ``cheyenne``, RunPSD.pbs to submit a phase space
|
||||
density run:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
#!/bin/bash
|
||||
#PBS -A P28100045
|
||||
#PBS -N eRBpsdH5
|
||||
#PBS -j oe
|
||||
#PBS -q regular
|
||||
#PBS -l walltime=12:00:00
|
||||
#PBS -l select=1:ncpus=72:ompthreads=72
|
||||
|
||||
export EXE=${CHIMPEXE:-"psd.x"}
|
||||
export RUNID=${PBS_JOBNAME}
|
||||
|
||||
#Replace this with your module set
|
||||
module purge
|
||||
module restore kaiju
|
||||
|
||||
module list
|
||||
hostname
|
||||
date
|
||||
export OMP_NUM_THREADS=72
|
||||
export KMP_STACKSIZE=128M
|
||||
echo "Running $EXE"
|
||||
${EXE} ${RUNID}.xml > ${RUNID}.out
|
||||
date
|
||||
|
||||
This job can be submitted with the command
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
qsub RunPSD.pbs
|
||||
|
||||
Description of Output
|
||||
---------------------
|
||||
|
||||
The run outputs three files:
|
||||
|
||||
|
||||
``runID.wgt.h5`` - contains all the weights of the test particles, in order
|
||||
of their particle ID
|
||||
|
||||
``runID.ps.h5`` - phase space density and intensity as a function of energy,
|
||||
radius, and longitude, integrated over equatorial pitch angle.
|
||||
|
||||
``fPSD`` - phase space density [(keV s)^-3]
|
||||
|
||||
``jPSD`` - intensity [cm^-2 sr^-1 s^-1 keV^-1]
|
||||
|
||||
``Ntp`` - number of test particles in each cell
|
||||
|
||||
``dG`` - phase space density cell volume element
|
||||
|
||||
``runID.pseq.h5`` - outputs the moments of the phase space density
|
||||
distribution function in the equatorial plane.
|
||||
|
||||
A fourth file is output if the ``doFat`` flag is set to ``"true"`` in the xml
|
||||
file.
|
||||
|
||||
``runID.ps4.h5`` - 4D phase space density (fPSD) as a function of energy,
|
||||
equatorial pitch angle, radius, and longitude.
|
||||
|
||||
``fPSD`` - phase space density [(keV s)^-3]
|
||||
|
||||
``Ntp`` - number of test particles in each cell
|
||||
|
||||
``dG`` - phase space density cell volume element
|
||||
|
||||
``dGp`` - phase space density cell momentum volume element
|
||||
|
||||
``dGx`` - phase space density cell spatial volume element
|
||||
435
docs/source/_obsolete/chimp/push.x.rst
Normal file
435
docs/source/_obsolete/chimp/push.x.rst
Normal file
@@ -0,0 +1,435 @@
|
||||
Test particle pusher (push.x)
|
||||
=============================
|
||||
|
||||
Files needed in run directory
|
||||
-----------------------------
|
||||
|
||||
``ebfile`` - MHD data (such as msphere.gam.h5) through which to integrate test
|
||||
particle trajectories through
|
||||
|
||||
``eRB.xml`` - xml file detailing test particle initialization and simulation
|
||||
parameters
|
||||
|
||||
``RunCHIMP.pbs`` - job script to submit run
|
||||
|
||||
``push.x`` - compiled executable
|
||||
|
||||
No grid file is needed as CHIMP pulls this information form the MHD data file.
|
||||
CHIMP is not currently set up to run simultaneously with GAMERA, therefore MHD
|
||||
files are used as input to one-way drive test particle simulations with no
|
||||
feedback included on the large scale electromagnetic fields.
|
||||
|
||||
Can further parallelize the the code by running multiple jobs (one job per
|
||||
node) simultaneously. Each job uses a randomized seed based on the job number
|
||||
to seed the test particles enabling the use of the test particles from each to
|
||||
increase statistics.
|
||||
|
||||
Initialization of test particles
|
||||
--------------------------------
|
||||
|
||||
Standard initialization creates a number of test particles (tps/Np) of species
|
||||
(tps/species; see kaiju/src/chimp/chmpunits.F90@getSpecies) in the (Z=0)
|
||||
plane. Radius (in Rx), phi (azimuth in degrees), alpha (pitch angle in
|
||||
degrees), energy (in keV) specify the bounds over which test particle
|
||||
parameters are randomly chosen.
|
||||
|
||||
By default particles are all created at ``T0``, however stream/doStream can
|
||||
specify the continuous creation of test particles. If doStream is specified
|
||||
then each test particle will be given a randomly assigned birth day between
|
||||
time stream/min and stream/max.
|
||||
|
||||
Example XML file
|
||||
----------------
|
||||
|
||||
.. code-block:: xml
|
||||
|
||||
<?xml version="1.0" ?>
|
||||
<Kaiju>
|
||||
<Chimp>
|
||||
<sim runid="eRB"/>
|
||||
<time T0="36000.0" dt="10.0" tFin="86400.5"/>
|
||||
<restart doRes="T" dtRes="1800.0" resID="eRB" nRes="-1"/>
|
||||
<fields doEBFix="T" doMHD="F" ebfile="msphere" grType="LFM" isMPI="T"/>
|
||||
<parallel Ri="8" Rj="8" Rk="1"/>
|
||||
<pusher epsht="0.05" imeth="GC"/>
|
||||
<tps Np="25000" species="e"/>
|
||||
<units uid="EARTH"/>
|
||||
<output doTrc="T" doEQProj="T" dtOut="60.0" tsOut="100"/>
|
||||
<radius max="8.0" min="2.5"/>
|
||||
<phi max="360.0" min="0.0"/>
|
||||
<alpha max="177.5" min="2.5"/>
|
||||
<energy doEBInit="F" doLog="T" max="5000.0" min="50.0"/>
|
||||
<domain dtype="SPH" gtype="LFM" rmax="20.0" rgap="1.6" rmin="1.05"/>
|
||||
<tracer epsds="0.05"/>
|
||||
<stream doStream="F" max="72000.0" min="71100.0"/>
|
||||
</Chimp>
|
||||
</Kaiju>
|
||||
|
||||
Parameter Descriptions
|
||||
----------------------
|
||||
|
||||
``<sim>`` (optional): Specify identifying information for this computation.
|
||||
|
||||
``runid`` (optional, default ``"Sim"``): String specifying an identifier for
|
||||
this run of ``push.x``. A best practice is to use the ``runid`` in the name of
|
||||
the XML file.
|
||||
|
||||
``<time>`` (optional): Specify time range and interval for magnetic field
|
||||
calculation.
|
||||
|
||||
``T0`` (optional, default ``"0.0"``): Start time (simulated seconds) for
|
||||
ground magnetic field calculation, relative to start of simulation results
|
||||
used as input.
|
||||
|
||||
``dt`` (optional, default ``"1.0"``): Sets subcycling time of test particles,
|
||||
sets the frequency when checks particle diagnostics, i.e. if particle on open
|
||||
or closed field line, etc
|
||||
|
||||
``tFin`` (optional, default ``"60.0"``): Stop time (simulated seconds) for
|
||||
ground magnetic field calculation, relative to start of simulation results
|
||||
used as input.
|
||||
|
||||
``<fields>`` (required): Describes the input data from a MAGE model run.
|
||||
|
||||
``ebfile`` (optional, default ``"ebdata.h5"``): Path to HDF5 file containing
|
||||
the electric and magnetic fields computed by a MAGE model run.
|
||||
|
||||
``grType`` (optional, default ``"EGG"``): String specifying grid type used by
|
||||
the MAGE output file. Valid values are ``"EGG"``, ``"LFM"``, ``"SPH"``. If the
|
||||
string is not one of the supported grid types, the default value (``"EGG"``)
|
||||
is used, and a warning message is printed.
|
||||
|
||||
``doEBFix`` (optional, default ``"false"``): Set to ``"true"`` to "clean" the
|
||||
electric field E so that the dot product of the electric and magnetic fields
|
||||
is 0. See ``ebinterp.F90``.
|
||||
|
||||
``doMHD`` (optional, default ``"false"``): Set to ``"true"`` to pass the full
|
||||
set of magnetohydrodynamic variables to CHIMP, rather than just electric and
|
||||
magnetic fields. See ``ebtypes.F90``.
|
||||
|
||||
``isMPI`` (optional, default ``"false"``): Set to ``"true"`` is the MAGE
|
||||
results file was generated with an MPI version of the model. See
|
||||
``eblCstd.F90``.
|
||||
|
||||
``doNumB0`` (optional, default ``"false"``): Set to ``"true"`` to numerically
|
||||
compute the background magnetic field. See ``starter.F90``.
|
||||
|
||||
``doPureB0`` (optional, default ``"false"``): Set to ``"true"`` use the
|
||||
analytical form of the dipole field, sets electric field to 0 See
|
||||
``starter.F90``.
|
||||
|
||||
``doEBOut`` (optional, default ``"false"``): Set to ``"true"`` to output
|
||||
slices of the electric and magnetic fields.See ``starter.F90``.
|
||||
|
||||
``<domain>`` (optional): Options for the problem domain
|
||||
|
||||
``dtype`` (optional, default ``"SPH"``): Domain over which to perform CHIMP
|
||||
calculations, separate from grid, enables the user to perform calculation on a
|
||||
subset of the grid to reduce computation where it is not needed - See
|
||||
``gridloc.F90``, line 172. Valid values are ``"SPH"``, ``"LFM"``,
|
||||
``"LFMCYL"``, ``"MAGE"``, ``"EGG"``, ``"ELL"``.
|
||||
|
||||
``gtype`` (optional, default ``"NONE"``): If set to "SPH" or "LFM", extends
|
||||
domain from inner boundary of MAGE file to value set by rmin, this assumes a
|
||||
dipole magnetic field in this gap region, useful for test particle runs to
|
||||
allow for particles to bounce in the gap region that would be considered lost
|
||||
otherwise - See ``gridloc.F90``, line 207. Valid values are ``"NONE"``,
|
||||
``"SPH"``, ``"LFM"``.
|
||||
|
||||
``rClosed`` (optional, default set by choice of ``units/uid``): Radial value
|
||||
for field line endpoint to reach to be considered closed - See
|
||||
``chmpunits.F90``.
|
||||
|
||||
``rmax`` (optional, default computed): Maximum radius of Domain region - See
|
||||
``gridloc.F90``.
|
||||
|
||||
``rmin`` (optional, default computed): Minimum radius of Domain region - See
|
||||
``gridloc.F90``.
|
||||
|
||||
``rgap`` (optional, default computed): Assume Dipole field from
|
||||
rmin < r < rgap, usually set to inner boundary radius of MAGE grid, used if
|
||||
gtype is ``"SPH"`` or ``"LFM"`` - See ``gridloc.F90``.
|
||||
|
||||
``xSun`` (optional,default 20.0): if dType is "LFM" or "MAGE", the Domain
|
||||
region includes all i-shells which have distances along the Earth-Sun line is
|
||||
less than this value (in Re).
|
||||
|
||||
``xTail`` (optional,default -100.0): if dType is "LFM" or "MAGE", the Domain
|
||||
region includes cells in the magnetotail up until this value (in Re).
|
||||
|
||||
``yzMax`` (optional,default 40.0): if dType is "LFM" or "MAGE", the Domain
|
||||
region includes cells with Y and Z coordinates between +/- yzMax (in Re).
|
||||
|
||||
``<interp>`` (optional): Options related to interpolation
|
||||
|
||||
``wgt`` (optional, default ``"TSC"``): Sets 1D interpolation type. Valid
|
||||
values are ``"TSC"`` (1D triangular shaped cloud), ``"LIN"`` (linear),
|
||||
``"QUAD"`` (parabolic). See ``starter.F90``.
|
||||
|
||||
``<gciter>`` (optional): Options for guiding center computation
|
||||
|
||||
``maxIter`` (optional, default ``"20"``): Maximum number of iterations for
|
||||
guiding center computation.
|
||||
|
||||
``TolGC`` (optional, default ``"1.0e-3"``): Tolerance for guiding center
|
||||
computations.
|
||||
|
||||
``<output>`` (optional): Options related to driver output
|
||||
|
||||
``T0Out`` (optional, default value of ``T0`` attribute of ``<time>`` element):
|
||||
Time (in seconds from ``T0``) to begin output of data to h5 files See
|
||||
``starter.F90``.
|
||||
|
||||
``dtOut`` (optional, default ``"10.0"``): Output cadence.
|
||||
|
||||
``timer`` (optional, default ``"false"``): Set to ``"true"`` to turn time
|
||||
flags on See ``starter.F90``, line 139.
|
||||
|
||||
``tsOut`` (optional, default ``"10"``): Cadence to output diagnostics to
|
||||
run-log file See ``starter.F90``.
|
||||
|
||||
``doEQProj`` (optional, default ``"false"``): Set to ``.true.`` to include
|
||||
equatorial variables, projected down to magnetic equator along field line
|
||||
from particle position (i.e. Xeq,Yeq, equatorial pitch angle etc) See
|
||||
``chmpdefs.F90``.
|
||||
|
||||
``doLL`` (optional, default ``"false"``): Set to ``.true.`` to include
|
||||
latitude and longitude values of the particles position projected to the
|
||||
northern hemisphere along field line.
|
||||
|
||||
``doTrc`` (optional, default ``"false"``): Similar to doEQProj, used in
|
||||
slice.x See ``chmpdefs.F90``.
|
||||
|
||||
``<parallel>`` (optional): Options if ebfile was generated using an MPI
|
||||
version of the code (read if fields/doMPI is set to ``"true"``, file name in
|
||||
form of ebfile_Ri_Rj_Rk_i_j_k.gam.h5)
|
||||
|
||||
``Ri`` (optional, default ``"1"``): Number of ranks used in decomposition of
|
||||
``"i"`` dimension See iotable.F90.
|
||||
|
||||
``Rj`` (optional, default ``"1"``): Number of ranks used in decomposition of
|
||||
``"j"`` dimension See iotable.F90.
|
||||
|
||||
``Rk`` (optional, default ``"1"``): Number of ranks used in decomposition of
|
||||
``"k"`` dimension See iotable.F90.
|
||||
|
||||
``doOldNaming`` (optional, default ``"false"``): Allow for backward
|
||||
compatibility for MHD files generated with the now deprecated naming
|
||||
convention See ``chmpdefs.F90``.
|
||||
|
||||
``<plasmapause>`` (optional): Options for calculation to determine plasmapause
|
||||
location in MHD file
|
||||
|
||||
``doPP`` (optional, default ``"false"``): Set to ``.true.`` to calculate
|
||||
plasmapause location in the equator and include it in output file for slice.x
|
||||
- See chmpfields.F90.
|
||||
|
||||
``Lin`` (optional, default ``"2.0"``): Minimum L-shell value to begin
|
||||
plasmapause calculation - See plasmaputils.F90.
|
||||
|
||||
``Lout`` (optional, default ``"6.0"``): Maximum L-shell value to end
|
||||
plasmapause calculation - See plasmaputils.F90.
|
||||
|
||||
``Nl`` (optional, default ``"80"``): Number of cells/steps in L-shell See
|
||||
plasmaputils.F90.
|
||||
|
||||
``Nphi`` (optional, default ``"72"``): Number of cells/steps in longitude See
|
||||
plasmaputils.F90.
|
||||
|
||||
``phi0`` (optional, default ``"0.0"``): Minimum longitude to perform
|
||||
plasmapause calculation between (in radians, 0 is in the +X direction) -
|
||||
See plasmaputils.F90.
|
||||
|
||||
``phi1`` (optional, default ``"2*PI"``): Maximum longitude to perform
|
||||
plasmapause calculation between See plasmaputils.F90.
|
||||
|
||||
``<pusher>`` (optional): Options related to test particle integrator
|
||||
|
||||
``imeth`` (optional, default ``"FO"``): Select the integrator used to evolve
|
||||
test particles. Valid values are ``"FO"``, ``"GC"``, or ``"DYN"`` for full
|
||||
orbit/relativistic Lorentz equations, guiding center, or dynamic switching
|
||||
between the two when appropriate.
|
||||
|
||||
``epsht`` (optional, default ``"0.05"``): Parameter used in timestep
|
||||
calculation.
|
||||
|
||||
``epsgc`` (optional, default ``"0.05"``): Maximum value of adiabaticity
|
||||
parameter used to determine when to switch between GC and FO integration.
|
||||
|
||||
``<restart>`` (optional): Simulation restart options.
|
||||
|
||||
``doRes`` (optional, default ``"false"``): Set to ``"true"`` to read a
|
||||
restart file.
|
||||
|
||||
``resID`` (optional, default value of ``runid`` attribute of ``<sim>``
|
||||
element): String specifying an identifier for the runID of the files to be
|
||||
used to restart the test particle run See particleio.F90.
|
||||
|
||||
``nRes`` (optional, default ``"-1"``): Restart file number to read in, a value
|
||||
of -1 reads the symlinked file resID.Res.XXXXX.h5 See chmpio.F90.
|
||||
|
||||
``dtRes`` (optional, default ``"-1"``): Cadence to produce restart files
|
||||
|
||||
``<tps>`` (optional): Options related to test particle
|
||||
|
||||
``Np`` (optional, default ``"100"``): Number of test particles to simulate per
|
||||
job
|
||||
|
||||
``species`` (optional, default ``"X"``): Desired species to simulate. For full
|
||||
list of available species, see the getSpecies() function in chmpunits.F90.
|
||||
|
||||
``<energy>`` (optional): Options for initialization of test particle energies
|
||||
|
||||
``min`` (optional, default ``"1.0"``): Minimum energy (in keV).
|
||||
|
||||
``max`` (optional, default ``"100.0"``): Maximum energy (in keV).
|
||||
|
||||
``doLog`` (optional, default ``"false"``): Default behavior is uniform
|
||||
distribution between min/max. Set to ``"true"`` to distribute particles in
|
||||
uniformly in log-space between min/max values.
|
||||
|
||||
``doEBInit`` (optional, default ``"false"``): Set to ``"true"`` to have
|
||||
specified energy in ExB frame instead of lab frame.
|
||||
|
||||
``<alpha>`` (optional): Options for initialization of test particle pitch
|
||||
angles
|
||||
|
||||
``min`` (optional, default ``"0.0"``): Minimum pitch angle (in degrees).
|
||||
|
||||
``max`` (optional, default ``"360.0"``): Maximum pitch angle (in degrees).
|
||||
|
||||
``doLog`` (optional, default ``"false"``): Default behavior is uniform
|
||||
distribution between min/max. Set to ``"true"`` to distribute particles in
|
||||
uniformly in log-space between min/max values.
|
||||
|
||||
``<psi>`` (optional): Options for initialization for gyrophase of test
|
||||
particle
|
||||
|
||||
``min`` (optional, default ``"0.0"``): Minimum gyrophase angle (in degrees).
|
||||
|
||||
``max`` (optional, default ``"360.0"``): Maximum gyrophase angle (in degrees).
|
||||
|
||||
``doLog`` (optional, default ``"false"``): Default behavior is uniform
|
||||
distribution between min/max. Set to ``"true"`` to distribute particles in
|
||||
uniformly in log-space between min/max values.
|
||||
|
||||
``<radius>`` (optional): Options for initialization of test particle locations
|
||||
|
||||
``min`` (optional, default ``"5.0"``): Minimum radius particles can be
|
||||
initialized at.
|
||||
|
||||
``max`` (optional, default ``"25.0"``): Maximum radius particles can be
|
||||
initialized at.
|
||||
|
||||
``doLog`` (optional, default ``"false"``): Default behavior is uniform
|
||||
distribution between min/max. Set to ``"true"`` to distribute particles in
|
||||
uniformly in log-space between min/max values.
|
||||
|
||||
``<phi>`` (optional): Options for initialization of test particle locations
|
||||
|
||||
``min`` (optional, default ``"0.0"``): Minimum longitude (in degrees)
|
||||
particles can be initialized at. A value of 0 corresponds to the +X direction.
|
||||
|
||||
``max`` (optional, default ``"360.0"``): Maximum longitude (in degrees)
|
||||
particles can be initialized at.
|
||||
|
||||
``doLog`` (optional, default ``"false"``): Default behavior is uniform
|
||||
distribution between min/max. Set to ``"true"`` to distribute particles in
|
||||
uniformly in log-space between min/max values.
|
||||
|
||||
``<height>`` (optional): Options for initialization of test particle locations
|
||||
|
||||
``min`` (optional, default ``"0.0"``): Minimum value for "height" particles
|
||||
can be initialized at. Default is height above Z=0 plane.
|
||||
|
||||
``max`` (optional, default ``"0.0"``): Maximum value for "height" particles
|
||||
can be initialized at. Default is height above Z=0 plane.
|
||||
|
||||
``doLog`` (optional, default ``"false"``): Set to ``"true"`` to distribute
|
||||
particles in uniformly in log-space between min/max values.
|
||||
|
||||
``doOutflow`` (optional, default ``"false"``): Set to ``"true"`` for height to
|
||||
be treated as a latitude (in degrees), useful for tracing particle outflow
|
||||
from inner MHD boundary - See ``tpICstd.F90``.
|
||||
|
||||
``doWind`` (optional, default ``"false"``): Set to ``"true"`` to keep
|
||||
particles between Z = +/- zWind. Only valid for full orbit particles. See
|
||||
``tpICstd.F90``.
|
||||
|
||||
``zWind`` (optional, default ``"0.0"``): Value to constrain test particles
|
||||
between to keep particles between Z = +/- zWind See ``tpICstd.F90``.
|
||||
|
||||
``<stream>`` (optional): Options related to time dependent initialization of
|
||||
test particles
|
||||
|
||||
``doStream`` (optional, default ``"false"``): Set to ``"true"`` to inject test
|
||||
particles over time instead of all being injected at ``T0`` See chmpdefs.F90.
|
||||
|
||||
``min`` (optional, default ``"0.0"``): Minimum time (in seconds from ``T0``)
|
||||
to begin streaming particles.
|
||||
|
||||
``max`` (optional, default ``"0.0"``): Maximum time (in seconds from ``T0``)
|
||||
to stop streaming particles. Particles will be uniformly released between
|
||||
min/max times.
|
||||
|
||||
``<tracer>`` (optional): Options related to field line tracing performed by
|
||||
CHIMP
|
||||
|
||||
``epsds`` (optional, default ``"1.0e-2"``): Tolerance for field line tracing
|
||||
computations See chmpdefs.F90.
|
||||
|
||||
``<units>`` (optional): Name of units system used in the model run.
|
||||
|
||||
``uID`` (optional, default ``"Earth"``): See chmpunits.F90 line 148. Valid
|
||||
values are ``"EARTH"``, ``"EARTHCODE"``, ``"JUPITER"``, ``"JUPITERCODE"``
|
||||
``"SATURN"`` ``"SATURNCODE"``\ , ``"HELIO"``\ , ``"LFM"``\ , ``"LFMJUPITER"``.
|
||||
|
||||
Run Script example
|
||||
------------------
|
||||
|
||||
An example pbs script on ``cheyenne``\ , RunCHIMP.pbs to submit test particle
|
||||
simulations:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
#!/bin/bash
|
||||
#PBS -A P28100045
|
||||
#PBS -N CHIMP
|
||||
#PBS -j oe
|
||||
#PBS -q economy
|
||||
#PBS -l walltime=12:00:00
|
||||
#PBS -l select=1:ncpus=72:ompthreads=72
|
||||
|
||||
#Example usage
|
||||
#qsub -v CHIMPEXE="pusher.x" -J 1-5 -N RC_ep RunCHIMP.pbs
|
||||
|
||||
export EXE=${CHIMPEXE:-"./push.x"}
|
||||
export RUNID=${PBS_JOBNAME}
|
||||
|
||||
source ~/.bashrc
|
||||
module restore kaiju
|
||||
|
||||
module list
|
||||
hostname
|
||||
date
|
||||
export OMP_NUM_THREADS=72
|
||||
export KMP_STACKSIZE=128M
|
||||
export JNUM=${PBS_ARRAY_INDEX:-0}
|
||||
echo "Running $EXE"
|
||||
${EXE} ${RUNID}.xml ${JNUM} > ${RUNID}.${JNUM}.out
|
||||
date
|
||||
|
||||
This command submits a batch job of 20 test particle runs
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
qsub -J 1-20 -N eRB RunCHIMP.pbs
|
||||
|
||||
Output
|
||||
------
|
||||
|
||||
Creates runID.job#.h5part files (one for each job, holding the particles it
|
||||
simulated). These files can be read in directly into Visit or Paraview. It is
|
||||
beneficial to also extract a 2D plane from the equator of the MHD data (see
|
||||
slice.x documentation) and visualize particle location and MHD solution
|
||||
together for context.
|
||||
261
docs/source/_obsolete/chimp/trace.x.rst
Normal file
261
docs/source/_obsolete/chimp/trace.x.rst
Normal file
@@ -0,0 +1,261 @@
|
||||
Executable to perform field line tracing through GAMERA output data (trace.x)
|
||||
=============================================================================
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
This executable takes output from an MHD simulation and extracts magnetic
|
||||
field lines. Seed points are set uniformly in the in the equator (Z=0), within
|
||||
a region selected by the user. See below for more details.
|
||||
|
||||
Example XML file
|
||||
|
||||
.. code-block:: xml
|
||||
|
||||
<?xml version="1.0" ?>
|
||||
<KAIJU>
|
||||
<Chimp>
|
||||
<sim runid="IonFL"/>
|
||||
<time T0="36000.0" dt="5.0" tFin="43200.0"/>
|
||||
<fields doMHD="T" ebfile="msphere" grType="LFM" isMPI="T"/>
|
||||
<parallel Ri="6" Rj="12" Rk="1"/>
|
||||
<domain dtype="LFMCYL"/>
|
||||
<units uid="EARTH"/>
|
||||
<interp wgt="TSC"/>
|
||||
<tracer epsds="0.05"/>
|
||||
<points Nx1="1" Nx2="72" Nx3="16" grType="SPHERICAL"/>
|
||||
<radius min="2.05" max="2.05"/>
|
||||
<phi min="180.0" max="360.0"/>
|
||||
<theta min="5.0" max="45.0"/>
|
||||
</Chimp>
|
||||
</KAIJU>
|
||||
|
||||
Parameter Descriptions
|
||||
----------------------
|
||||
|
||||
``<sim>`` (optional): Specify identifying information for this computation.
|
||||
|
||||
``runid`` (optional, default ``"Sim"``): String specifying an identifier for
|
||||
this run of ``psd.x``. A best practice is to use the ``runid`` in the name of
|
||||
the XML file.
|
||||
|
||||
``<time>`` (optional): Specify time range and interval for calculation.
|
||||
|
||||
``T0`` (optional, default ``"0.0"``): Start time (simulated seconds) relative
|
||||
to start of simulation results used as input.
|
||||
|
||||
``dt`` (optional, default ``"1.0"``): Time interval and output cadence
|
||||
(simulated seconds) for calculation.
|
||||
|
||||
``tFin`` (optional, default ``"60.0"``): Stop time (simulated seconds)
|
||||
relative to start of simulation results used as input.
|
||||
|
||||
``<fields>`` (required): Describes the input data from a MAGE model run.
|
||||
|
||||
``ebfile`` (optional, default ``"ebdata.h5"``): Path to HDF5 file containing
|
||||
the electric and magnetic fields computed by a MAGE model run.
|
||||
|
||||
``grType`` (optional, default ``"EGG"``): String specifying grid type used by
|
||||
the MAGE output file. Valid values are ``"EGG"``, ``"LFM"``, ``"SPH"``. If the
|
||||
string is not one of the supported grid types, the default value (``"EGG"``)
|
||||
is used, and a warning message is printed.
|
||||
|
||||
``doEBFix`` (optional, default ``"false"``): Set to ``"true"`` to "clean" the
|
||||
electric field E so that the dot product of the electric and magnetic fields
|
||||
is 0. See ``ebinterp.F90``.
|
||||
|
||||
``doMHD`` (optional, default ``"false"``): Set to ``"true"`` to pass the full
|
||||
set of magnetohydrodynamic variables to CHIMP, rather than just electric and
|
||||
magnetic fields. Includes velocity vector, density and pressure in the output
|
||||
file. See ``ebtypes.F90``.
|
||||
|
||||
``isMPI`` (optional, default ``"false"``): Set to ``"true"`` is the MAGE
|
||||
results file was generated with an MPI version of the model. See
|
||||
``eblCstd.F90``.
|
||||
|
||||
``<domain>`` (optional): Options for the problem domain
|
||||
|
||||
``dtype`` (optional, default ``"SPH"``): Domain over which to perform CHIMP
|
||||
calculations, separate from grid, enables the user to perform calculation on
|
||||
a subset of the grid to reduce computation where it is not needed - See
|
||||
``gridloc.F90``. Valid values are ``"SPH"``, ``"LFM"``, ``"LFMCYL"``,
|
||||
``"MAGE"``, ``"EGG"``, ``"ELL"``.
|
||||
|
||||
``rClosed`` (optional, default set by choice of ``units/uid``): Radial value
|
||||
for field line endpoint to reach to be considered closed - See
|
||||
``chmpunits.F90``.
|
||||
|
||||
``rmax`` (optional, default computed): Maximum radius of Domain region - See
|
||||
``gridloc.F90``.
|
||||
|
||||
``rmin`` (optional, default computed): Minimum radius of Domain region - See
|
||||
``gridloc.F90``.
|
||||
|
||||
``xSun`` (optional,default 20.0): if dType is "LFM" or "MAGE", the Domain
|
||||
region includes all i-shells which have distances along the Earth-Sun line is
|
||||
less than this value (in Re)
|
||||
|
||||
``xTail`` (optional,default -100.0): if dType is "LFM" or "MAGE", the Domain
|
||||
region includes cells in the magnetotail up until this value (in Re)
|
||||
|
||||
``yzMax`` (optional,default 40.0): if dType is "LFM" or "MAGE", the Domain
|
||||
region includes cells with Y and Z coordinates between +/- yzMax (in Re)
|
||||
|
||||
``<output>`` (optional): Options related to driver output
|
||||
|
||||
``timer`` (optional, default ``"false"``): Set to ``"true"`` to turn time
|
||||
flags on See ``starter.F90``.
|
||||
|
||||
``tsOut`` (optional, default ``"10"``): Cadence to output diagnostics to
|
||||
run-log file See ``starter.F90``.
|
||||
|
||||
``<parallel>`` (optional): Options if ebfile was generated using an MPI
|
||||
version of the code (read if fields/doMPI is set to ``"true"``, file name in
|
||||
form of ebfile_Ri_Rj_Rk_i_j_k.gam.h5)
|
||||
|
||||
``Ri`` (optional, default ``"1"``): Number of ranks used in decomposition of
|
||||
``"i"`` dimension See iotable.F90.
|
||||
|
||||
``Rj`` (optional, default ``"1"``): Number of ranks used in decomposition of
|
||||
``"j"`` dimension See iotable.F90.
|
||||
|
||||
``Rk`` (optional, default ``"1"``): Number of ranks used in decomposition of
|
||||
``"k"`` dimension See iotable.F90.
|
||||
|
||||
``doOldNaming`` (optional, default ``"false"``): Allow for backward
|
||||
compatibility for MHD files generated with the now deprecated naming
|
||||
convention See ``chmpdefs.F90``.
|
||||
|
||||
``<units>`` (optional): Name of units system used in the model run.
|
||||
|
||||
``uID`` (optional, default ``"Earth"``): See chmpunits.F90. Valid values are
|
||||
``"EARTH"``, ``"EARTHCODE"``, ``"JUPITER"``, ``"JUPITERCODE"``, ``"SATURN"``,
|
||||
``"SATURNCODE"``, ``"HELIO"``, ``"LFM"``, ``"LFMJUPITER"``.
|
||||
|
||||
``<interp>`` (optional): Options related to interpolation
|
||||
|
||||
``wgt`` (optional, default ``"TSC"``): Sets 1D interpolation type. Valid
|
||||
values are ``"TSC"`` (1D triangular shaped cloud), ``"LIN"`` (linear),
|
||||
``"QUAD"`` (parabolic). See ``starter.F90``.
|
||||
|
||||
Parameters specific to field line tracing
|
||||
|
||||
``<tracer>`` (optional): Options related to field line tracing performed by
|
||||
CHIMP
|
||||
|
||||
``epsds`` (optional, default ``"1.0e-2"``): Tolerance for field line tracing
|
||||
computations See chmpdefs.F90.
|
||||
|
||||
``<points>`` (optional): Options related to initialization for field line seed
|
||||
points. All points are set uniformly between min/max bounds. 2D grids are
|
||||
seeded in the magnetic equator (Z=0).
|
||||
|
||||
``grType`` (optional, default ``"RP"``): Sets seed point grid type. Valid
|
||||
values are ``"RP"`` (2D polar grid), ``"XY"`` (2D cartesian grid), ``"RTP"``
|
||||
or ``"SPHERICAL"`` (3D spherical grid). See ``tracex.F90``.
|
||||
|
||||
``Nx1`` (optional, default ``"100"``): Number of cells in X or R depending on
|
||||
grid specified.
|
||||
|
||||
``Nx2`` (optional, default ``"100"``): Number of cells in Y or Phi depending
|
||||
on grid specified.
|
||||
|
||||
``Nx3`` (optional, default ``"1"``): Number of cells in Theta, set only if
|
||||
grType is ``"RTP"`` or ``"SPHERICAL"``.
|
||||
|
||||
``x1Max`` (optional, default ``"10.0"``): Maximum value of the X dimension
|
||||
used to initialize the seed points. Used if grType is ``"XY"``.
|
||||
|
||||
``x1Min`` (optional, default ``"-10.0"``): Minimum value of the X dimension
|
||||
used to initialize the seed points. Used if grType is ``"XY"``.
|
||||
|
||||
``x2Max`` (optional, default ``"10.0"``): Maximum value of the Y dimension
|
||||
used to initialize the seed points. Used if grType is ``"XY"``.
|
||||
|
||||
``x2Min`` (optional, default ``"-10.0"``): Minimum value of the Y dimension
|
||||
used to initialize the seed points. Used if grType is ``"XY"``.
|
||||
|
||||
``<radius>`` (optional): Radial range for initialization of seed point
|
||||
locations. Used if grType is ``"RP"``, ``"RTP"`` or ``"SPHERICAL"``.
|
||||
|
||||
``min`` (optional, default ``"5.0"``): Minimum seed points are initialized at.
|
||||
|
||||
``max`` (optional, default ``"25.0"``): Maximum radius seed points are
|
||||
initialized at.
|
||||
|
||||
``<phi>`` (optional): Azimuthal angle range for initialization of seed point
|
||||
locations. Used if grType is ``"RP"``, ``"RTP"`` or ``"SPHERICAL"``.
|
||||
|
||||
``min`` (optional, default ``"0.0"``): Minimum longitude (in degrees) seed
|
||||
points are initialized at. A value of 0 corresponds to the +X direction.
|
||||
|
||||
``max`` (optional, default ``"360.0"``\): Maximum longitude (in degrees) seed
|
||||
points are initialized at.
|
||||
|
||||
``<theta>`` (optional): Polar angle range for initialization of seed point
|
||||
locations. Used if grType is ``"RTP"`` or ``"SPHERICAL"``.
|
||||
|
||||
``min`` (optional, default ``"0.0"``): Minimum longitude (in degrees) seed
|
||||
points are initialized at. A value of 0 corresponds to the +X direction.
|
||||
|
||||
``max`` (optional, default ``"90.0"``): Maximum longitude (in degrees) seed
|
||||
points are initialized at.
|
||||
|
||||
Run Script example
|
||||
------------------
|
||||
|
||||
An example pbs script on ``cheyenne``, RunTrace.pbs to submit field line
|
||||
tracing job:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
#!/bin/bash
|
||||
#PBS -A P28100045
|
||||
#PBS -N traceFL
|
||||
#PBS -j oe
|
||||
#PBS -q regular
|
||||
#PBS -l walltime=4:00:00
|
||||
#PBS -l select=1:ncpus=72:ompthreads=72
|
||||
export EXE=${slice}
|
||||
export RUNID=${PBS_JOBNAME}
|
||||
|
||||
#Replace this with your module set
|
||||
module purge
|
||||
module restore kaiju
|
||||
|
||||
module list
|
||||
hostname
|
||||
date
|
||||
export OMP_NUM_THREADS=72
|
||||
export KMP_STACKSIZE=128M
|
||||
echo "Running $EXE"
|
||||
./trace.x ${RUNID}.xml > ${RUNID}.out
|
||||
date
|
||||
|
||||
This job can be submitted with the command
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
qsub RunTrace.pbs
|
||||
|
||||
This will create an HDF5 file with a similar group structure for time slices
|
||||
(Step#0,Step#1,xxx) but with each time slice also containing a group for each
|
||||
field line seed, i.e.
|
||||
|
||||
Step#0
|
||||
|
||||
Line#0
|
||||
|
||||
Line#1
|
||||
|
||||
Step#1
|
||||
|
||||
Line#0
|
||||
|
||||
Line#1
|
||||
|
||||
In the above example, the field line seed points are set in a spherical
|
||||
coordinate system with Nx1,Nx2,Nx3 points in r,phi,theta.
|
||||
|
||||
The script kaiju/scripts/genXLine.py can be run on the output HDF5 data to
|
||||
create an XDMF file that can be read by VisIt or Paraview.
|
||||
42
docs/source/_obsolete/codeInformation/codeOrg.rst
Normal file
42
docs/source/_obsolete/codeInformation/codeOrg.rst
Normal file
@@ -0,0 +1,42 @@
|
||||
Code Organization
|
||||
=================
|
||||
|
||||
The Kaiju repository holds source code, pre- and post-processing
|
||||
scripts, and run configurations for various the Kaiju elements (Gamera
|
||||
MHD, CHIMP test-particle and interpolation, ReMIX ionospheric solver,
|
||||
etc).
|
||||
|
||||
Directories from $KAIJUHOME
|
||||
|
||||
|
||||
* src: Main Fortran source files
|
||||
|
||||
* base: Common support libraries (IO, math, string parsing,
|
||||
timing, etc)
|
||||
|
||||
* base/kdefs.F90: Highest-level parameter definitions
|
||||
(precision, constants, etc)
|
||||
* defs: Parameter definitions for various Kaiju elements
|
||||
(Gamera, CHIMP, etc)
|
||||
* types: Type definitions for main data structures for Kaiju
|
||||
elements
|
||||
|
||||
* gamera/chimp/remix: Source files for various Kaiju elements
|
||||
|
||||
* ICs/: Problem files for specifying initial conditions, can be
|
||||
changed using CMake variables
|
||||
|
||||
* voltron: Files for running coupled magnetosphere-ionosphere runs
|
||||
* drivers: Fortran program files, contain main heartbeat loops for
|
||||
executables
|
||||
* examples: Run parameters for various simple example runs
|
||||
* places: Run scripts and configuration informatin for running on
|
||||
specific environments (e.g. Cheyenne)
|
||||
* kaipy: Python modules to support pre- and post-processing scripts
|
||||
* scripts: Command line python scripts for pre- and post-processing
|
||||
* cmake: CMake defaults
|
||||
|
||||
* user.cmake: Optional file (not included by default) to specify
|
||||
user over-rides to CMake variables
|
||||
|
||||
* tests: pFUnit scripts and cases for unit testing
|
||||
47
docs/source/_obsolete/codeInformation/dataCompression.rst
Normal file
47
docs/source/_obsolete/codeInformation/dataCompression.rst
Normal file
@@ -0,0 +1,47 @@
|
||||
Data compression in HDF5 files
|
||||
==============================
|
||||
|
||||
Datasets in each output time step (HDF5 Group) can be compressed using a
|
||||
number of algorithms yielding varying levels of performance and precision
|
||||
(lossless vs. lossy). The default algorithm is
|
||||
`SZIP <https://support.hdfgroup.org/documentation/hdf5/latest/group___s_z_i_p.html>`_.
|
||||
Other algorithms are available: `zlib <https://www.zlib.net/>`_,
|
||||
`zstandard <https://facebook.github.io/zstd/>`_ and
|
||||
`zfp <https://zfp.readthedocs.io/en/release1.0.0/>`_. In order to use
|
||||
compression, the HDF5 library must have been built with SZIP or zlib, or one
|
||||
may load 3rd party filter plugins.
|
||||
|
||||
Compression Performance
|
||||
-----------------------
|
||||
|
||||
.. list-table::
|
||||
:header-rows: 1
|
||||
|
||||
* - Algorithm
|
||||
- Settings
|
||||
- Compression Ratio
|
||||
- Built in to HDF5
|
||||
* - ZFP Lossy
|
||||
- Fixed-Rate: 4.0
|
||||
- 6.10
|
||||
- N
|
||||
* - ZFP Lossy
|
||||
- Fixed-Accuracy: 0.00001
|
||||
- 1.95
|
||||
- N
|
||||
* - ZFP Lossless
|
||||
- N/A
|
||||
- 1.94
|
||||
- N
|
||||
* - SZIP
|
||||
- ``pixels_per_block = 16``
|
||||
- 1.55
|
||||
- Y
|
||||
* - ZStandard
|
||||
- Level 22
|
||||
- 1.32
|
||||
- N
|
||||
* - zlib
|
||||
- Level 6
|
||||
- 1.21
|
||||
- Y
|
||||
@@ -0,0 +1,29 @@
|
||||
Derivation of Precipitation
|
||||
===========================
|
||||
|
||||
.. image:: https://bitbucket.org/repo/kMoBzBp/images/1884634656-Screen%20Shot%202022-03-03%20at%201.17.04%20PM.png
|
||||
:target: https://bitbucket.org/repo/kMoBzBp/images/1884634656-Screen%20Shot%202022-03-03%20at%201.17.04%20PM.png
|
||||
:alt: Screen Shot 2022-03-03 at 1.17.04 PM.png
|
||||
|
||||
|
||||
.. image:: https://bitbucket.org/repo/kMoBzBp/images/1328328394-Screen%20Shot%202022-03-03%20at%201.17.07%20PM.png
|
||||
:target: https://bitbucket.org/repo/kMoBzBp/images/1328328394-Screen%20Shot%202022-03-03%20at%201.17.07%20PM.png
|
||||
:alt: Screen Shot 2022-03-03 at 1.17.07 PM.png
|
||||
|
||||
|
||||
.. image:: https://bitbucket.org/repo/kMoBzBp/images/408027230-Screen%20Shot%202022-03-03%20at%201.17.10%20PM.png
|
||||
:target: https://bitbucket.org/repo/kMoBzBp/images/408027230-Screen%20Shot%202022-03-03%20at%201.17.10%20PM.png
|
||||
:alt: Screen Shot 2022-03-03 at 1.17.10 PM.png
|
||||
|
||||
|
||||
.. image:: https://bitbucket.org/repo/kMoBzBp/images/3238995540-Screen%20Shot%202022-03-03%20at%201.17.13%20PM.png
|
||||
:target: https://bitbucket.org/repo/kMoBzBp/images/3238995540-Screen%20Shot%202022-03-03%20at%201.17.13%20PM.png
|
||||
:alt: Screen Shot 2022-03-03 at 1.17.13 PM.png
|
||||
|
||||
|
||||
.. image:: https://bitbucket.org/repo/kMoBzBp/images/3030692407-Screen%20Shot%202022-03-03%20at%201.17.16%20PM.png
|
||||
:target: https://bitbucket.org/repo/kMoBzBp/images/3030692407-Screen%20Shot%202022-03-03%20at%201.17.16%20PM.png
|
||||
:alt: Screen Shot 2022-03-03 at 1.17.16 PM.png
|
||||
|
||||
|
||||
`A derivation of thermal number flux, energy flux, mean energy, and connections to the code can be found here <https://drive.google.com/file/d/1H_4phGITg53kplSW9nmvDbE-vdZ5Ht00/view?usp=sharing>`_. Contact Dong Lin (ldong@ucar.edu) if you can't access it.
|
||||
10
docs/source/_obsolete/codeInformation/index.rst
Normal file
10
docs/source/_obsolete/codeInformation/index.rst
Normal file
@@ -0,0 +1,10 @@
|
||||
Code Information
|
||||
================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
codeOrg
|
||||
derivation_of_precipitation
|
||||
interpolationInMAGE
|
||||
dataCompression
|
||||
@@ -0,0 +1,10 @@
|
||||
Interpolation in MAGE
|
||||
=====================
|
||||
|
||||
.. image:: https://bitbucket.org/repo/kMoBzBp/images/2723270503-Screen%20Shot%202020-09-17%20at%2010.49.32%20AM.png
|
||||
:target: https://bitbucket.org/repo/kMoBzBp/images/2723270503-Screen%20Shot%202020-09-17%20at%2010.49.32%20AM.png
|
||||
:alt: Screen Shot 2020-09-17 at 10.49.32 AM.png
|
||||
|
||||
|
||||
It takes a lot of time to type the equations. Contact Dong Lin
|
||||
(ldong@ucar.edu) if you need a word or PDF version.
|
||||
52
docs/source/_obsolete/heliosphere/compCostsHelio.rst
Normal file
52
docs/source/_obsolete/heliosphere/compCostsHelio.rst
Normal file
@@ -0,0 +1,52 @@
|
||||
Computational Costs
|
||||
===================
|
||||
|
||||
GAMERA-HELIO+GL model
|
||||
---------------------
|
||||
|
||||
Estimates of computational costs in cpu-hours (NCAR) or SBUs (NASA) for
|
||||
different model resolutions based on Helio runs
|
||||
|
||||
On the Derecho supercomputer:
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
AMD Epyc Milan (128 cores)
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
||||
256x128x256 - 64 ranks on 8 nodes
|
||||
160 Hour Spinup + 100 Hrs CME propagation - ~1000 cpu-hours
|
||||
|
||||
512x256x512 - 256 ranks on 32 nodes
|
||||
160 Hour Spinup + 50 Hrs CME propagation - ~8-10,000 cpu-hours
|
||||
|
||||
1024x512x1024 - 512 ranks on 64 nodes
|
||||
160 Hour Spinup + 50 hours of CME propagation time - ~85k cpu-hours
|
||||
|
||||
2048x1024x2048 - 1024 ranks on 128 nodes
|
||||
160 Hour Spinup ~490k cpu-hours, 32 hours of CME propagation
|
||||
~125k cpu-hours
|
||||
|
||||
On the Pleiades supercomputer:
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Cascade Lake Nodes (40 cores, 80 threads) Using HT and AVX-512
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
256x128x256 - 64 ranks on 8 nodes
|
||||
160 Hour Spinup + 100 Hrs CME propagation - ~200 SBUs
|
||||
|
||||
512x256x512 - 256 ranks on 16 nodes
|
||||
160 Hour Spinup + 100 Hrs CME propagation - ~3200 SBUs
|
||||
|
||||
1024x512x1024 - 512 ranks on 32 nodes
|
||||
160 Hour Spinup + 50 hours of CME propagation time - ~52k SBUs
|
||||
|
||||
Broadwell Nodes (28 cores, 28 threads)
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
256x128x256 - 64 ranks on 16 nodes
|
||||
160 Hour Spinup + 100 Hrs CME propagation - ~300 SBUs
|
||||
|
||||
512x256x512 - 256 ranks on 64 nodes
|
||||
160 Hour Spinup + 100 Hrs CME propagation - ~4000 SBUs Broadwell Nodes
|
||||
181
docs/source/_obsolete/heliosphere/create_gamhelio_ensemble.rst
Normal file
181
docs/source/_obsolete/heliosphere/create_gamhelio_ensemble.rst
Normal file
@@ -0,0 +1,181 @@
|
||||
``create_gamhelio_ensemble.py``
|
||||
===============================
|
||||
|
||||
The Python script ``create_gamhelio_ensemble.py`` was developed to simplify
|
||||
the process of configuring and running an ensemble of heliosphere models with
|
||||
the ``kaiju`` software. It provides an interactive, prompt-driven interface to
|
||||
specify all of the parameters needed for an ensemble of model runs.
|
||||
|
||||
|
||||
Running the ``create_gamhelio_ensemble.py`` script
|
||||
--------------------------------------------------
|
||||
|
||||
The ``create_gamhelio_ensemble.py`` script is provided as part of the
|
||||
``kaiju`` software. It is found at
|
||||
``$KAIJUHOME/scripts/makeitso-gamhelio/create_gamhelio_ensemble.py``, where
|
||||
``$KAIJUHOME`` is the location of your ``kaiju`` software tree. After
|
||||
configuring your ``kaiju`` software, you can get help text for the script
|
||||
like this:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
create_gamhelio_ensemble.py --help
|
||||
usage: create_gamhelio_ensemble.py [-h] [--clobber] [--debug] [--verbose] ensemble_description_path
|
||||
|
||||
Create an ensemble of gamhelio runs.
|
||||
|
||||
positional arguments:
|
||||
ensemble_description_path
|
||||
Path to .ini file describing ensemble (default: None)
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--clobber Overwrite existing files and directories (default: False).
|
||||
--debug, -d Print debugging output (default: False).
|
||||
--verbose, -v Print verbose output (default: False).
|
||||
|
||||
The ``ensemble_description_path`` option allows the user to specify an
|
||||
``.ini``-format file which provides the parameter ranges for the ensemble.
|
||||
|
||||
|
||||
The ensemble description file
|
||||
-----------------------------
|
||||
|
||||
The ensemble description file is an ``.ini``-format file which provides the
|
||||
parameter ranges for the ensemble, as well as the system-specific information
|
||||
required to run the ensemble. In this way, ``create_gamhelio_ensemble.py`` is a
|
||||
stripped-down, non-interactive version of ``makeitso-gamhelio.py``.
|
||||
|
||||
The ensemble description file starts with a section called ``[glparams]``
|
||||
which defines the ranges for each parameter of the Gibson & Low CME model to
|
||||
use in the ensemble. Next is the section ``[paths]``, which defines several
|
||||
important directory and file locations. This is followed by a ``[norm]``
|
||||
section which defines a few normalization parameters for the model. Last is
|
||||
the ``[pbs]`` section, which defines all of the parameters needed to create
|
||||
the ensemble as a set of PBS jobs.
|
||||
|
||||
.. note:: This script works on both ``derecho`` and ``pleiades``, but does not
|
||||
contain any system-specific information. All system-specific information
|
||||
must be included in the ``[pbs]`` section of the ensemble description file.
|
||||
|
||||
An example of an ensemble description file is provided below.
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[glparams]
|
||||
crot = 2095
|
||||
den_cme = 1500.
|
||||
dores = T
|
||||
dtres = 10.
|
||||
gl_bpar = 0.001, 0.002
|
||||
gl_lat = 7.
|
||||
gl_legsang = 10.
|
||||
gl_lon = 45.
|
||||
gl_orientation = 0.
|
||||
gl_topmorph = 1.75
|
||||
gl_vel_fh = 400.
|
||||
nres = 00000
|
||||
resid = helio
|
||||
t_cme = 500000.
|
||||
|
||||
[paths]
|
||||
rundir = /glade/u/home/ewinter/cgs/runs/test/create_gamhelio_ensemble
|
||||
execpath = /glade/u/home/ewinter/cgs/runs/test/create_gamhelio_ensemble/gamhelio_mpi.x
|
||||
gridpath = /glade/u/home/ewinter/cgs/runs/test/create_gamhelio_ensemble/heliogrid.h5
|
||||
innerbcpath = /glade/u/home/ewinter/cgs/runs/test/create_gamhelio_ensemble/innerbc.h5
|
||||
restartpath = /glade/u/home/ewinter/cgs/runs/test/create_gamhelio_ensemble/helio_0002_0002_0002_0000_0000_0000.gam.Res.00000.h5
|
||||
|
||||
[norm]
|
||||
gn0 = 200.
|
||||
gB0 = 1.e-3
|
||||
x0 = 6.956e10
|
||||
|
||||
[pbs]
|
||||
hpc_system = derecho
|
||||
account_name = P28100045
|
||||
queue = main
|
||||
job_priority = economy
|
||||
select = 4
|
||||
ncpus = 128
|
||||
mpiprocs = 2
|
||||
ompthreads = 64
|
||||
modules = ncarenv/23.06, craype/2.7.20, intel/2023.0.0, ncarcompilers/1.0.0, cray-mpich/8.1.25, hdf5/1.12.2
|
||||
kaiju_install_dir = /glade/u/home/ewinter/cgs/aplkaiju/kaiju-private/ewinter-gamhelio_ensembles/kaiju-private
|
||||
kaipy_install_dir = /glade/u/home/ewinter/cgs/aplkaiju/kaipy-private/development/kaipy-private
|
||||
|
||||
The most important of these parameters are:
|
||||
|
||||
:crot: Number of Carrington rotation represented in the WSA FITS file
|
||||
|
||||
:dores: Set to ``T`` to launch the simulation from a restart file.
|
||||
|
||||
:dtres: Cadence for creation of restart files (simulated hours).
|
||||
|
||||
:gl_bpar: Maximum magnitude (Gauss) of the magnetic field in the Gibson-Low
|
||||
CME model.
|
||||
|
||||
:gl_lat: Latitude (degrees) of the CME emergence in the initial WSA map at
|
||||
21.5 R\ :sub:`S`.
|
||||
|
||||
:gl_lon: Longitude (degrees) of the CME emergence in the initial WSA map at
|
||||
21.5 R\ :sub:`S`.
|
||||
|
||||
:gl_orientation: Orientation angle (degrees) of the flux rope in the Gibson-
|
||||
Low CME model. 0° orientation means that the flux rope is in the
|
||||
equatorial plane.
|
||||
|
||||
:gl_topmorph: Sets the topology parameter of the Gibson-Low CME model.
|
||||
|
||||
:gl_vel_fh: Radial velocity of CME front (km/s).
|
||||
|
||||
:nres: Specify index of restart file to use for restart run.
|
||||
|
||||
:resid: Run ID of the restart file to use.
|
||||
|
||||
:t_cme: Time of CME emergence (simulated hours relative to start of
|
||||
simulation).
|
||||
|
||||
|
||||
Creating the ensemble
|
||||
---------------------
|
||||
|
||||
The ensemble definition file is passed to ``create_gamhelio_ensemble.py`` on
|
||||
the command line:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
create_gamhelio_ensemble.py --verbose ensemble.ini
|
||||
|
||||
Loading ensemble description from ensemble.ini.
|
||||
Creating ensemble parameter grid.
|
||||
Computing additional parameters.
|
||||
Creating run directories.
|
||||
Creating gamhelio XML files.
|
||||
Creating PBS files.
|
||||
Creating bash script for ensemble.
|
||||
|
||||
These messages illustrate the following steps in the ensemble creation
|
||||
process:
|
||||
|
||||
* Read the ensemble description file.
|
||||
|
||||
* Create a grid of all possible parameter combinations.
|
||||
|
||||
* Compute additional parameters from the ensemble parameters.
|
||||
|
||||
* Create a directory for each ensemble member simulation.
|
||||
|
||||
* Create the input XML files for each ensemble member for use by
|
||||
``gamhelio_mpi.x``.
|
||||
|
||||
* Create the PBS script for each ensemble member.
|
||||
|
||||
* Create a bash script that submits each ensemble member as a separate PBS
|
||||
job.
|
||||
|
||||
Once this step is complete, you can submit the entire set of ensemble members
|
||||
for execution by running the script:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
bash ensemble.sh
|
||||
48
docs/source/_obsolete/heliosphere/gibson-low.rst
Normal file
48
docs/source/_obsolete/heliosphere/gibson-low.rst
Normal file
@@ -0,0 +1,48 @@
|
||||
Running the Gibson & Low Model
|
||||
==============================
|
||||
|
||||
.. Compiling Standalone
|
||||
.. --------------------
|
||||
|
||||
.. Load modules for serial runs.
|
||||
.. In your build directory
|
||||
|
||||
.. .. code-block:: bash
|
||||
|
||||
.. make gl.x
|
||||
|
||||
.. Model Configuration
|
||||
.. -------------------
|
||||
|
||||
.. Specify model parameters in ``<prob ...>`` XML config, defaults parameters for spheromak are set to Schmitt et. al 2011 in Table 2.
|
||||
|
||||
.. .. code-block:: xml
|
||||
|
||||
.. <?xml version="1.0"?>
|
||||
.. <Kaiju>
|
||||
.. <CME>
|
||||
.. <sim runid="sphere64" isLoud="F" isDebug="F" isTopomorph="F" isStandalone="T" isAtmosphere="T" model="GL"/>
|
||||
.. <time Tstart_transient="0.0" />
|
||||
.. <output doTimer="T" />
|
||||
.. <prob StateCoord="Sphere" orientation="1.57079" cmer="-1.57079" alpha="0.0" tfin="72000.0" dt="7200.0" frontheight="1.35" bmax="1.0" legsang="45.
|
||||
.. 0" vel_fh="50"/>
|
||||
.. <idir min="1.0" max="21.0" N="64"/>
|
||||
.. <jdir min="0.1" max="0.9" N="64"/>
|
||||
.. <kdir min="0.0" max="1.0" N="64"/>
|
||||
.. </CME>
|
||||
.. </Kaiju>
|
||||
|
||||
.. The last three lines in the CME block define a 3D grid. Here, we set a spherical shell 1-21 R_Sun in radius, 0.1\ *pi-0.9*\ pi in polar direction(theta angle), 0-316 degrees in azimuthal direction (phi angle).
|
||||
|
||||
.. Input parameters have the following meanings. ``orientation`` means the orientation of a spheromak, e.g. ``0`` means the spheromak axis of asymmetry is perpendicular to the solar equatorial plane, ``1.57`` means that the axis of asymmetry is perpendicular to the meridional slice. ``frontheight`` is the distance from the center of the Sun to the front of a spheromak. ``bmax`` is the maximum value of the magnetic field strength in a spheromak set in [Gs], the default value is 0.001. ``legsang`` is the angular width of a CME in degrees. ``vel_fh`` is the velocity at the CME front in [km/s]. ``tfin`` is the total simulation time in [s]. ``dt`` is the time interval in [s] between data output.
|
||||
|
||||
.. See more examples of XML files here
|
||||
|
||||
.. .. code-block::
|
||||
|
||||
.. ~/kaiju/examples/gl
|
||||
|
||||
.. Postprocessing
|
||||
.. --------------
|
||||
|
||||
.. You may then use the ``genXDMF.py`` script in order to process the standalone h5 output to visualize in Paraview. The "grid" is formatted such that it has coordinates X,Y,Z on a real sphere in units of rsun.
|
||||
36
docs/source/_obsolete/heliosphere/helio-cme.rst
Normal file
36
docs/source/_obsolete/heliosphere/helio-cme.rst
Normal file
@@ -0,0 +1,36 @@
|
||||
Running Gamera Helio with an embedded CME
|
||||
=========================================
|
||||
|
||||
.. Compile ``gamhelio.x`` or ``gamhelio_mpi.x`` as described in
|
||||
.. :doc:`Helio Quickstart </quickStart/helioQuickStart>` using the ``giblow``
|
||||
.. branch.
|
||||
|
||||
.. You will then need to add the ``<CME />`` element to the input xml fle and fill out the appropriate CME model parameters. Currently, only the Gibson-Low flux rope model is implemented and you may supply related model parameters in the ``<prob />`` element.
|
||||
|
||||
.. Some reasonable defaults are below representing a slightly elongated un-tethered spheromak launched centered on the equatorial plane at longitude = 2.95 [rad].
|
||||
|
||||
.. .. code-block:: xml
|
||||
|
||||
.. <?xml version="1.0"?>
|
||||
.. <Kaiju>
|
||||
.. <Gamera>
|
||||
.. <sim runid="wsa256cmeLon295" doH5g="T" H5Grid="heliogrid.h5" icType="user" pdmb="1.0" rmeth="7UP"/>
|
||||
.. <time tFin="72.0"/>
|
||||
.. <spinup doSpin="T" tSpin="160.0" tIO="0.0"/>
|
||||
.. <output dtOut="1.0" tsOut="100" doTimer="T"/>
|
||||
.. <physics doMHD="T" gamma="1.5"/>
|
||||
.. <prob doCME="T" rotateCME="T" isSpSymSW="F" DeltaT="0.0" model="monopole"/>
|
||||
.. <helio Tsolar ="25.38" vrin="385.0" vrkin="385.0" tin="4.d5" rhoin="800.0" brin="0.000" brkfin="0.000" />
|
||||
.. <restart resId = "wsa" nRes = "00008" dtRes="6." doRes="F"/>
|
||||
.. <!--- Next 3 lines only necessary for MPI runs, ignored for serial runs --->
|
||||
.. <iPdir N="4" bcPeriodic="F"/>
|
||||
.. <jPdir N="4" bcPeriodic="F"/>
|
||||
.. <kPdir N="4" bcPeriodic="T"/>
|
||||
.. </Gamera>
|
||||
.. <CME>
|
||||
.. <sim isLoud="T" isDebug="F" isTopomorph="F" isStandalone="F" model="GL" scaleBmax="T"/>
|
||||
.. <time Tstart_transient="0.0" />
|
||||
.. <output doTimer="T" />
|
||||
.. <prob Den_CME = "800.0" T_CME = "1000000.0" orientation="1.57079" lat="0.0" lon="2.95" alpha="0.0" frontheight="21.5" legsang="60.0" apar="0.05" Bmax="0.003" vel_fh="900.0" />
|
||||
.. </CME>
|
||||
.. </Kaiju>
|
||||
285
docs/source/_obsolete/heliosphere/helioQuickStart.rst
Normal file
285
docs/source/_obsolete/heliosphere/helioQuickStart.rst
Normal file
@@ -0,0 +1,285 @@
|
||||
Heliosphere Quick Start Guide
|
||||
=============================
|
||||
|
||||
** Notes for Updating ** Want to replace this page the instructions on
|
||||
steps to follow to do your own event simulation. Basic points, generate
|
||||
solar wind file, decide on what components Gamera, Mix, RCM, T*GCM
|
||||
you'll be using, decide on resolution, generate grid files, generate
|
||||
batch script files, run the code, refer them to the post processing
|
||||
steps.
|
||||
|
||||
**Note:** This quick start assumes you have completed the build instructions.
|
||||
|
||||
**Note:** Throughout the descriptions ``$KAIJUHOME`` refers to the
|
||||
base directory of the `kaiju <https://bitbucket.org/aplkaiju/kaiju>`_
|
||||
repository.
|
||||
|
||||
Initial Setup
|
||||
-------------
|
||||
|
||||
Simple build instructions
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
To compile GAMERA Helio do the following in your ``~/kaiju/build directory``:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
cmake -DENABLE_MPI=ON ..
|
||||
make gamhelio_mpi.x
|
||||
|
||||
Using ``makeitso`` to create input and batch submission files
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Note: Supported HPC systems only - NCAR Derecho and NASA
|
||||
Pleiades/Electra/Aitken (default Pleiades Broadwell nodes)
|
||||
|
||||
To run ``gamhelio-makeitso.py`` you must first ensure you have a proper
|
||||
python environment with the prerequisite packages, add the kaipy scripts to
|
||||
your path by running:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
. ~/kaiju/scripts/setupEnvironment.sh
|
||||
|
||||
you may then run the ``gamhelio-makeitso.py`` script which will guide you
|
||||
through setting up the desired model and job parameters. This script operates
|
||||
as a command-line input program, where basic parameter prompts will be
|
||||
displayed with default settings, which may be modified.
|
||||
``gamhelio-makeitso.py`` may be run in ``BASIC``, ``INTERMEDIATE``,
|
||||
or ``EXPERT`` mode, with varying levels of customization of run parameters.
|
||||
The command modes are available as an argument to the script. The script run
|
||||
with the ``--verbose`` option will display additional status output while the
|
||||
script runs. Upon completion, the script will have generated appropriate
|
||||
input files and batch scripts, and finally print instructions on how to
|
||||
submit the generated run to your system's batch scheduler.
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
gamhelio-makeitso.py --verbose --mode=(BASIC|INTERMEDIATE|EXPERT)
|
||||
|
||||
Manual Run Setup
|
||||
----------------
|
||||
|
||||
Set a grid and boundary conditions
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
To create files with a spherical grid and inner boundary conditions for the
|
||||
inner heliosphere simulation run
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
wsa2gamera.py ~/kaiju/kaipy/gamhelio/ConfigScripts/startup.config
|
||||
|
||||
If needed edit a config file
|
||||
~/kaiju/kaipy/gamhelio/ConfigScripts/startup.config. Ni, Nj, Nk set a number
|
||||
of cells in r, theta and phi directions, respectively. tMin and tMax set a
|
||||
range in theta counting from the North (+Z) direction corresponding to
|
||||
theta=0. Rin and Rout set a range in radius (distance unit is in solar radii).
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
;Comments and definitions:
|
||||
;Modify if needed paths to a grid file, output innerbc file and WSA fits file
|
||||
;tMin and tMax set a range for theta [tMin, tMax]*pi
|
||||
;Rin and Rout are inner and outer boundaries in a radial direction
|
||||
;Ni, Nj, Nk set number of cells in r, theta, phi directions
|
||||
;Nghost is a number of ghost cells
|
||||
|
||||
[Gamera]
|
||||
gameraGridFile = heliogrid.h5
|
||||
GridDir = ./
|
||||
gameraIbcFile = innerbc.h5
|
||||
IbcDir = ./
|
||||
|
||||
[Grid]
|
||||
tMin = 0.1
|
||||
tMax = 0.9
|
||||
Rin = 21.5
|
||||
Rout = 215.
|
||||
Ni = 128
|
||||
Nj = 64
|
||||
Nk = 128
|
||||
|
||||
[WSA]
|
||||
;wsafile is the path to the WSA fits file relative to $KAIJUHOME
|
||||
;Helio test uses WSA file for Carrington Rotation 2193
|
||||
wsafile = examples/helio/vel_201708132000R002_ahmi.fits
|
||||
density_temperature_infile = no
|
||||
gauss_smooth_width = 0 ; 8
|
||||
normalized = no
|
||||
|
||||
[Constants]
|
||||
gamma = 1.5
|
||||
Nghost = 4
|
||||
Tsolar = 27.27
|
||||
|
||||
[Normalization]
|
||||
B0 = 1.e-3 ; in [Gs] equal to 100 [nT]
|
||||
n0 = 200. ; in [cm-3]
|
||||
T0 = 1.e6 ; in [K]
|
||||
|
||||
By default a spherical grid for inner heliosphere simulation is uniform.
|
||||
Other grid options are in $KAIJUHOME/kaipy/gamera/gamGrids.py. GenKSphNonU
|
||||
creates a non-uniform grid in r-direction changing smoothly from finer grid
|
||||
near the inner boundary to coarser grid near the outer boundary; GenKSphNonUG
|
||||
creates a custom grid for a CME simulation with a fine uniform grid in the
|
||||
region 0.1-0.3 AU and a non-uniform coarser grid further out to 1 AU. If
|
||||
needed modify wsa2gamera.py to use any of these options or create your own
|
||||
grid function in $KAIJUHOME/kaipy/gamera/gamGrids.py.
|
||||
|
||||
Check that you successfully generated heliogrid.h5 with the grid and
|
||||
innerbc.h5 with boundary conditions in a run directory.
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
HDF5 "innerbc.h5" {
|
||||
FILE_CONTENTS {
|
||||
group /
|
||||
dataset /br
|
||||
dataset /br_kface
|
||||
dataset /et_kedge
|
||||
dataset /rho
|
||||
dataset /temp
|
||||
dataset /vr
|
||||
dataset /vr_kface
|
||||
}
|
||||
}
|
||||
|
||||
XML input file
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
An example wsa.xml input file for mpi gamera helio run is as follows (for
|
||||
resolution NixNjxNk = 128x64x128 or 256x128x256):
|
||||
|
||||
.. code-block:: xml
|
||||
|
||||
<?xml version="1.0"?>
|
||||
<Kaiju>
|
||||
<Gamera>
|
||||
<sim runid="wsa" doH5g="T" H5Grid="heliogrid.h5" icType="user" pdmb="1.0" rmeth="7UP"/>
|
||||
<time tFin="200."/>
|
||||
<output dtOut="50." tsOut="50" timer="F"/>
|
||||
<physics doMHD="T" gamma="1.5"/>
|
||||
<prob Tsolar = "25.38"/>
|
||||
<restart resFile = "wsa.Res.00008.h5" dtRes="1000." doRes="F"/>
|
||||
<iPdir N="4" bcPeriodic="F"/>
|
||||
<jPdir N="2" bcPeriodic="F"/>
|
||||
<kPdir N="4" bcPeriodic="T"/>
|
||||
</Gamera>
|
||||
</Kaiju>
|
||||
|
||||
For high-resolution run 1024x512x1024 use the following de-composition
|
||||
|
||||
.. code-block:: XML
|
||||
|
||||
<iPdir N="8" bcPeriodic="F"/>
|
||||
<jPdir N="4" bcPeriodic="F"/>
|
||||
<kPdir N="8" bcPeriodic="T"/>
|
||||
|
||||
Have wsa.xml in a run directory.
|
||||
|
||||
PBS script
|
||||
~~~~~~~~~~
|
||||
|
||||
Here is an example pbs script to run mpi gamera (for resolution helio run
|
||||
128x64x128)
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
#!/bin/bash
|
||||
#PBS -A UJHB0015
|
||||
#PBS -N heliompi
|
||||
#PBS -j oe
|
||||
#PBS -q regular
|
||||
#PBS -l walltime=02:00:00
|
||||
#PBS -l select=16:ncpus=36:mpiprocs=2:ompthreads=36
|
||||
#PBS -m abe
|
||||
#PBS -M your_email_address
|
||||
|
||||
#Example usage
|
||||
|
||||
export EXE="./gamera_mpi.x"
|
||||
export RUNID="wsa"
|
||||
|
||||
source ~/.bashrc
|
||||
|
||||
module list
|
||||
hostname
|
||||
date
|
||||
#export OMP_NUM_THREADS=36
|
||||
export KMP_STACKSIZE=128M
|
||||
export JNUM=${PBS_ARRAY_INDEX:-0}
|
||||
echo "Running $EXE"
|
||||
mpirun ${EXE} ${RUNID}.xml ${JNUM} > ${RUNID}.${JNUM}.out
|
||||
date
|
||||
|
||||
The example above uses 16 computer nodes (2 MPI ranks per node) creating 32
|
||||
processes for 32 MPI ranks (4x2x4 = 32 in decomposition for low resolution
|
||||
run above).
|
||||
|
||||
For high resolution run 1024x512x1024 we have 8x4x8 = 256 MPI ranks so we
|
||||
select 128 nodes (with 2 MPI ranks/node).
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
#PBS -l walltime=11:59:00
|
||||
#PBS -l select=128:ncpus=36:mpiprocs=2:ompthreads=36:mem=109GB
|
||||
|
||||
See PBS job basics `here <https://arc.ucar.edu/docs>`_.
|
||||
|
||||
Submitting a run
|
||||
----------------
|
||||
|
||||
Copy or link gamera executable ~/kaiju/build/bin/gamera_mpi.x.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
ln -s ~/kaiju/build/bin/gamhelio_mpi.x gamhelio_mpi.x
|
||||
|
||||
Have in a run directory grid file heliogrid.h5, boundary conditions file
|
||||
innerbc.h5, pbs script gamera.pbs and input xml file wsa.xml.
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
user@cheyenne5:/glade/work/user/helioRun> ls
|
||||
gamera_mpi.x gamera.pbs heliogrid.h5 innerbc.h5 wsa.xml
|
||||
|
||||
Run the job
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
qsub gamera.pbs
|
||||
|
||||
Check a status of your job in a queue
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
qstat -u username
|
||||
|
||||
Normalization in Gamera-Helio (!Move to Model Description!)
|
||||
-----------------------------------------------------------
|
||||
|
||||
The three main normalization parameters are
|
||||
|
||||
#. Length L = 1R_S = 6.955e10 cm
|
||||
#. Magnetic field magnitude B0 = 100 nT = 1.e-3 Gs
|
||||
#. Number density n0 = 200 cm-3
|
||||
|
||||
Velocity is normalized to the Alfven velocity V0 = B0/sqrt(4 pi rho0)
|
||||
~ 150 km/s. Time is normalized to t = L/V0 = 4637 s ~ 1.29 h ~ 1 hr 17 min.
|
||||
Pressure is normalized to the magnetic pressure B0^2/(4*pi).
|
||||
|
||||
Helio Test Run
|
||||
--------------
|
||||
|
||||
From Google Doc:
|
||||
|
||||
* Take 128x64x128 as the baseline resolution. It should run at
|
||||
50min/CR on one Cheyenne node.
|
||||
* Take CR2193 because it compares well (solar minimum). Ask Nick for
|
||||
permission to use this file as a test. Put this file somewhere where
|
||||
everything else binary is and use bitbucket LFS.
|
||||
* Run the script that generates the grid and BC files.
|
||||
* Make xml from ini (we provide ini file for the test).
|
||||
* Compile and run the code.
|
||||
* Compare the results with the quick-look plots.
|
||||
4
docs/source/_obsolete/heliosphere/helioVisualizing.rst
Normal file
4
docs/source/_obsolete/heliosphere/helioVisualizing.rst
Normal file
@@ -0,0 +1,4 @@
|
||||
Visualizing Heliosphere Results
|
||||
===============================
|
||||
|
||||
.. Elena is going to add some great stuff here
|
||||
14
docs/source/_obsolete/heliosphere/index.rst
Normal file
14
docs/source/_obsolete/heliosphere/index.rst
Normal file
@@ -0,0 +1,14 @@
|
||||
Heliosphere Simulations with MAGE
|
||||
=================================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
helioQuickStart
|
||||
create_gamhelio_ensemble
|
||||
compCostsHelio
|
||||
gibson-low
|
||||
helio-cme
|
||||
helioVisualizing
|
||||
steadyStateRun
|
||||
xmlFiles
|
||||
270
docs/source/_obsolete/heliosphere/steadyStateRun.rst
Normal file
270
docs/source/_obsolete/heliosphere/steadyStateRun.rst
Normal file
@@ -0,0 +1,270 @@
|
||||
Steady-State Run
|
||||
================
|
||||
|
||||
.. These instructions are to build the MPI gamhelio on Cheyenne supercomputer.
|
||||
|
||||
.. First, you must create a directory in which to compile the ``kaiju`` code for gamhelio case. Typically, this is ``$KAIJUHOME/build_helio``\ , where ``KAIJUHOME`` is the path to the directory created when you cloned the kaiju repository. Now you can build the ``kaiju`` software:
|
||||
|
||||
.. .. code-block:: shell
|
||||
|
||||
.. cd $KAIJUHOME
|
||||
.. mkdir build_helio
|
||||
.. cd build_helio
|
||||
.. cmake -DENABLE_MPI=ON ..
|
||||
.. make -j4 gamhelio_mpi
|
||||
|
||||
.. When the build is complete, you will find the compiled executable in the bin subdirectory of your build directory:
|
||||
|
||||
.. .. code-block:: shell
|
||||
|
||||
.. bash-4.2$ ls bin
|
||||
.. gamhelio_mpi.x
|
||||
|
||||
.. Creating Grid and Boundary Conditions
|
||||
.. ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. The default grid for the inner heliosphere simulations is spherical evenly spaced in r, theta, phi directions. For now, the south and north poles are cut out, so theta is within the range [0.1pi,0.9pi]. Grid parameters such as limits in r and theta directions and the number of cells Ni, Nj, Nk are set in ``KAIJUHOME/kaipy/gamhelio/ConfigScripts/startup.config`` under ``[Grid]``. The default values for these parameters are below.
|
||||
|
||||
.. Other options for the inner helio spherical grid exist in ``KAIJUHOME/kaipy/gamera/gamGrids.py``. GenKSph creates a default uniform spherical grid. Other options are GenKSphNonU which makes a non-uniform grid in r-direction changing smoothly from finer grid near the inner boundary to coarser grid near the outer boundary; and GenKSphNonUG which creates a custom grid for a CME project with a fine uniform grid in the region 0.1-0.3 AU and a non-uniform coarser grid further out to 1 AU. If needed modify the preprocessing script wsa2gamera.py to use any of these options or create your own grid function in ``KAIJUHOME/kaipy/gamera/gamGrids.py``.
|
||||
|
||||
.. To generate h5 files with grid and boundary conditions you need to have ready a config file ``KAIJUHOME/kaipy/gamhelio/ConfigScripts/startup.config``. If needed, modify paths to the output helio grid file, output file innerbc.h5, and input WSA fits file.
|
||||
|
||||
.. .. code-block:: shell
|
||||
|
||||
.. .. [Gamera]
|
||||
.. gameraGridFile = heliogrid.h5
|
||||
.. GridDir = ./
|
||||
.. gameraIbcFile = innerbc.h5
|
||||
.. IbcDir = ./
|
||||
|
||||
.. .. [Grid]
|
||||
.. tMin = 0.1
|
||||
.. tMax = 0.9
|
||||
.. Rin = 21.5
|
||||
.. Rout = 220.
|
||||
.. Ni = 128
|
||||
.. Nj = 64
|
||||
.. Nk = 128
|
||||
|
||||
.. .. [WSA]
|
||||
.. ;wsafile is the path to the WSA fits file relative to $KAIJUHOME
|
||||
.. ;Helio test uses WSA file for Carrington Rotation 2193, by default
|
||||
.. wsafile = examples/helio/vel_201708132000R002_ahmi.fits
|
||||
.. density_temperature_infile = no
|
||||
.. gauss_smooth_width = 0 ; 8
|
||||
.. normalized = no
|
||||
|
||||
.. .. [Constants]
|
||||
.. gamma = 1.5
|
||||
.. Nghost = 4
|
||||
.. Tsolar = 25.38
|
||||
.. nCS = 1100. ; in [cm-3]
|
||||
.. TCS = 1.e6 ; in [K]
|
||||
|
||||
.. .. [Normalization]
|
||||
.. B0 = 1.e-3 ; in [Gs] equals to 100 [nT]
|
||||
.. n0 = 200. ; in [cm-3]
|
||||
|
||||
.. Now run
|
||||
|
||||
.. .. code-block:: shell
|
||||
|
||||
.. wsa2gamera.py ~/kaiju/kaipy/gamhelio/ConfigScripts/startup.config
|
||||
|
||||
.. Check that you successfully produced files heliogrid.h5 and innerbc.h5.
|
||||
|
||||
.. .. code-block:: shell
|
||||
|
||||
.. user@cheyenne5:/rundir/> h5dump -n innerbc.h5
|
||||
.. HDF5 "innerbc.h5" {
|
||||
.. FILE_CONTENTS {
|
||||
.. group /
|
||||
.. dataset /br
|
||||
.. dataset /br_kface
|
||||
.. dataset /rho
|
||||
.. dataset /temp
|
||||
.. dataset /vr
|
||||
.. dataset /vr_kface
|
||||
.. }
|
||||
.. }
|
||||
|
||||
.. user@cheyenne5:/rundir> h5dump -n heliogrid.h5
|
||||
.. HDF5 "heliogrid.h5" {
|
||||
.. FILE_CONTENTS {
|
||||
.. group /
|
||||
.. dataset /X
|
||||
.. dataset /Y
|
||||
.. dataset /Z
|
||||
.. }
|
||||
.. }
|
||||
|
||||
.. XML input file
|
||||
.. ^^^^^^^^^^^^^^
|
||||
|
||||
.. An example xml input file for mpi gamera helio run is as follows:
|
||||
|
||||
.. .. code-block:: shell
|
||||
|
||||
.. <?xml version="1.0"?>
|
||||
.. <Kaiju>
|
||||
.. <Gamera>
|
||||
.. <sim runid="wsa" doH5g="T" H5Grid="heliogrid.h5" icType="user" pdmb="1.0" rmeth="7UP"/>
|
||||
.. <time tFin="850."/>
|
||||
.. <output dtOut="12." tsOut="50" timer="F"/>
|
||||
.. <physics doMHD="T" gamma="1.5"/>
|
||||
.. <prob Tsolar = "25.38"/>
|
||||
.. <restart resFile = "wsa.Res.00008.h5" dtRes="1000." doRes="F"/>
|
||||
.. <iPdir N="4" bcPeriodic="F"/>
|
||||
.. <jPdir N="2" bcPeriodic="F"/>
|
||||
.. <kPdir N="4" bcPeriodic="T"/>
|
||||
.. </Gamera>
|
||||
.. </Kaiju>
|
||||
|
||||
.. For high-resolution run 1024x512x1024 use this decomposition
|
||||
|
||||
.. .. code-block:: shell
|
||||
|
||||
.. <iPdir N="8" bcPeriodic="F"/>
|
||||
.. <jPdir N="4" bcPeriodic="F"/>
|
||||
.. <kPdir N="8" bcPeriodic="T"/>
|
||||
|
||||
.. PBS script
|
||||
.. ^^^^^^^^^^
|
||||
|
||||
.. Here is an example pbs script to run mpi gamera (for low resolution helio run 256x128x256)
|
||||
|
||||
.. .. code-block:: shell
|
||||
|
||||
.. #!/bin/bash
|
||||
.. #PBS -A ujhb0015
|
||||
.. #PBS -N heliompi
|
||||
.. #PBS -j oe
|
||||
.. #PBS -q regular
|
||||
.. #PBS -l walltime=00:20:00
|
||||
.. #PBS -l select=16:ncpus=36:mpiprocs=2:ompthreads=18
|
||||
|
||||
.. #Example usage
|
||||
|
||||
.. export TMPDIR=/glade/scratch/$USER/temp
|
||||
.. mkdir -p $TMPDIR
|
||||
|
||||
.. export EXE="./gamhelio_mpi.x"
|
||||
.. export RUNID="wsa"
|
||||
|
||||
.. #Optional stuff to load an environment
|
||||
.. source ~/.bash_profile
|
||||
|
||||
.. if [[ -z "$KAIJUHOME" ]]; then
|
||||
.. # $KAIJUHOME environment variable is not set
|
||||
.. echo "The KAIJUHOME environment variable is not set"
|
||||
.. echo "You must either pass your environment with the -V option or"
|
||||
.. echo " execute the kaiju/scripts/setupEnvironment script in your ~/.bashrc file"
|
||||
.. exit
|
||||
.. fi
|
||||
.. if [[ ! -z "$MODULE_LIST" ]]; then
|
||||
.. # user passed a list of modules to load as the environment variable MODULE_LIST
|
||||
.. # call this with the flag '-v MODULE_LIST="<modules>"' to use this option
|
||||
.. # where <modules> is a space-separated list of modules in quotes
|
||||
.. # Example:
|
||||
.. # qsub -v MODULE_LIST="intel/2021.2 ncarenv/1.3 ncarcompilers/0.5.0 mpt/2.22" RunMpi.pbs
|
||||
.. module purge
|
||||
.. module load $MODULE_LIST
|
||||
.. elif [[ ! -z "$MODULE_SET" ]]; then
|
||||
.. # user passed a module set name to load as the environment variable MODULE_SET
|
||||
.. # call this with the flag '-v MODULE_SET=<set name>' to use this option
|
||||
.. # where <set_name> is a saved set of modules, as printed by 'module savelist'
|
||||
.. # Example:
|
||||
.. # qsub -v MODULE_SET=kaiju21 RunMpi.pbs
|
||||
.. module purge
|
||||
.. module restore $MODULE_SET
|
||||
.. else
|
||||
.. # user did not pass a module set, load a default set
|
||||
.. module purge
|
||||
.. module restore mpikaiju
|
||||
.. fi
|
||||
|
||||
.. if [[ ! -z "$MPT_VERSION" ]]; then
|
||||
.. echo "USING MPIEXEC_MPT"
|
||||
.. export MPI_TYPE_DEPTH=32
|
||||
.. export OMP_NUM_THREADS=36
|
||||
.. export MPI_IB_CONGESTED=0
|
||||
.. export NODEFILE=$TMPDIR/nodefile.$PBS_JOBID
|
||||
.. cp $PBS_NODEFILE $NODEFILE
|
||||
.. export MPICOMMAND="mpiexec_mpt $KAIJUHOME/scripts/preproc/correctOMPenvironment.sh $NODEFILE omplace"
|
||||
.. else
|
||||
.. echo "USING MPIRUN"
|
||||
.. export MPICOMMAND="mpirun"
|
||||
.. export OMP_NUM_THREADS=18
|
||||
.. export I_MPI_PIN_DOMAIN="omp"
|
||||
.. fi
|
||||
|
||||
|
||||
.. module list
|
||||
.. hostname
|
||||
.. date
|
||||
.. export KMP_STACKSIZE=128M
|
||||
.. export JNUM=${PBS_ARRAY_INDEX:-0}
|
||||
.. echo "Running $EXE"
|
||||
.. ${MPICOMMAND} ${EXE} ${RUNID}.xml ${JNUM} >> ${RUNID}.${JNUM}.out
|
||||
.. date
|
||||
|
||||
.. The example above uses 16 computer nodes (2 MPI ranks per node) creating 32 processes for 32 MPI ranks (4x2x4 = 32 in decomposition for low resolution run above).
|
||||
|
||||
.. For high resolution run 1024x512x1024 we have 8x4x8 = 256 MPI ranks so we select 128 nodes (with 2 MPI ranks/node).
|
||||
|
||||
.. .. code-block:: shell
|
||||
|
||||
.. #PBS -l walltime=11:59:00
|
||||
.. #PBS -l select=128:ncpus=36:mpiprocs=2:ompthreads=36:mem=109GB
|
||||
|
||||
.. See PBS job basics `here <https://www2.cisl.ucar.edu/resources/computational-systems/cheyenne/running-jobs/submitting-jobs-pbs>`_ on Cheyenne.
|
||||
|
||||
.. Using MPT
|
||||
.. ^^^^^^^^^
|
||||
|
||||
.. If you are running with the MPT mpi library, the submission script will require some additional modifications, described in a dedicated wiki page [[Running with MPT]].
|
||||
|
||||
.. Submitting a run
|
||||
.. ^^^^^^^^^^^^^^^^
|
||||
|
||||
.. Create a run directory. Link files with the grid heliogrid.h5, inner boundary conditions innerbc.h5 and gamera executible gamera_mpi.x, for example
|
||||
|
||||
.. .. code-block:: shell
|
||||
|
||||
.. ln -s ~/kaiju/build/bin/gamera_mpi.x gamera_mpi.x
|
||||
|
||||
.. Place into run directory pbs script gamera.pbs and input xml file wsa.xml. So here is a content of your run directory
|
||||
|
||||
.. .. code-block:: shell
|
||||
|
||||
.. user@cheyenne5:/glade/work/user/helioRun> ls
|
||||
.. gamera_mpi.x gamera.pbs heliogrid.h5 innerbc.h5 wsa.xml
|
||||
|
||||
.. Run the job
|
||||
|
||||
.. .. code-block:: shell
|
||||
|
||||
.. qsub gamera.pbs
|
||||
|
||||
.. Check a status of your job in a queue
|
||||
|
||||
.. .. code-block:: shell
|
||||
|
||||
.. qstat -u username
|
||||
|
||||
.. Normalization in Gamera-Helio
|
||||
.. ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. The three main normalization parameters are
|
||||
|
||||
|
||||
.. #. Length L = 1R_S = 6.955e10 cm
|
||||
.. #. Magnetic field magnitude B0 = 100 nT = 1.e-3 Gs
|
||||
.. #. Number density n0 = 200 cm-3
|
||||
|
||||
.. Velocity is normalized to the Alfven velocity V0 = B0/sqrt(4 pi rho0) ~ 154 km/s.
|
||||
.. Time is normalized to t = L/V0 = 4637 s ~ 1.25 h
|
||||
.. Pressure is normalized to the magnetic pressure B0^2/(4*pi).
|
||||
|
||||
.. Output data
|
||||
.. ^^^^^^^^^^^
|
||||
4
docs/source/_obsolete/heliosphere/xmlFiles.rst
Normal file
4
docs/source/_obsolete/heliosphere/xmlFiles.rst
Normal file
@@ -0,0 +1,4 @@
|
||||
XML Files
|
||||
=========
|
||||
|
||||
.. TBD
|
||||
18
docs/source/_obsolete/index.rst
Normal file
18
docs/source/_obsolete/index.rst
Normal file
@@ -0,0 +1,18 @@
|
||||
Internal ``kaiju`` documentation
|
||||
======================================
|
||||
|
||||
These are documents that are not ready or not intended for public consumption.
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
chimp/index
|
||||
heliosphere/index
|
||||
magnetosphere/index
|
||||
rcm/index
|
||||
testing/index
|
||||
user_rules/index
|
||||
codeInformation/index
|
||||
tools/index
|
||||
FAQ
|
||||
|
||||
476
docs/source/_obsolete/magnetosphere/Gamerasphere.rst
Normal file
476
docs/source/_obsolete/magnetosphere/Gamerasphere.rst
Normal file
@@ -0,0 +1,476 @@
|
||||
Magnetosphere Quick Start
|
||||
=========================
|
||||
|
||||
This is a quick set of instructions to run a coupled Gamera-ReMIX
|
||||
(magnetosphere-ionosphere) run on Cheyenne. It uses the executable
|
||||
"voltron.x".
|
||||
|
||||
OMP Magnetosphere
|
||||
-----------------
|
||||
|
||||
Basic magnetosphere runs require five things
|
||||
|
||||
|
||||
* Grid file: Collection of grid corners in HDF5 format
|
||||
* Solar wind: Solar wind time series in HDF5 format
|
||||
* Input deck: Run configuration parameters in XML format
|
||||
* Executable: Duh (voltron.x)
|
||||
* Run script: Job script to setup run for a single node on Cheyenne
|
||||
|
||||
Note, this is assuming the main repo has been downloaded to a base directory
|
||||
kaiju
|
||||
|
||||
Grid file
|
||||
---------
|
||||
|
||||
The simplest way of generating is to use kaiju/scripts/genLFM.py to create a
|
||||
standard LFM-style grid of resolution D/Q/O/H for double/quad/oct/hex. Note,
|
||||
you'll likely have to do this on Casper, the post-processing node.
|
||||
|
||||
.. code-block::
|
||||
|
||||
[skareem@casper26:~/kaiju/scripts]$ genLFM.py -gid D
|
||||
Generating Double LFM-style grid ...
|
||||
|
||||
Output: lfmD.h5
|
||||
Size: (48,48,64)
|
||||
Inner Radius: 2.000000
|
||||
Sunward Outer Radius: 28.000106
|
||||
Tail Outer Radius: 301.011944
|
||||
Low-lat BC: 45.000000
|
||||
Ring params:
|
||||
<ring gid="lfm" doRing="T" Nr="4" Nc1="8" Nc2="16" Nc3="32" Nc4="32"/>
|
||||
|
||||
Writing to lfmD.h5
|
||||
|
||||
The grid generation spits parameters for ring average which will be added to
|
||||
the XML input deck.
|
||||
|
||||
Solar wind
|
||||
----------
|
||||
|
||||
Gamera reads solar wind data using HDF5. The simplest way to generate an
|
||||
appropriate HDF5 file is to use the omni2wind script, which downloads data
|
||||
from the CDAS database between the specified timeframe and converts it into
|
||||
the correct format to be read in by Gamera. omni2wind.py utilizes CDASWS as an
|
||||
interface to the CDAS datasets as well as geopack for coordinate
|
||||
transformations. The modules must be installed before the script can be run.
|
||||
ai.cdas can be installed using the command:
|
||||
|
||||
.. code-block::
|
||||
|
||||
pip install cdasws
|
||||
|
||||
The library has a dependency on Spacepy which also needs to be installed
|
||||
(https://spacepy.github.io/install.html). The instructions to install geopack
|
||||
are located in the README file in $KAIJUDIR/external/geopack-2008. You will
|
||||
need to specify the compiler, and must configure to use 8-byte reals. Here is
|
||||
an example installation command on an intel compiler:
|
||||
|
||||
.. code-block::
|
||||
|
||||
python setup-geopack.py install config_fc --fcompiler=intelem --f77flags=-r8
|
||||
|
||||
To install the modules on Cheyenne, you will need to create your own python
|
||||
library environment by cloning the NPL library and then activating it with the
|
||||
commands:
|
||||
|
||||
.. code-block::
|
||||
|
||||
ncar_pylib -c [version of library to be cloned] [location to clone library into]
|
||||
source $CLONEDLIBRARY/bin/activate
|
||||
|
||||
The cdasws and geopack modules can then be installed into the new library and
|
||||
will be included in your path when the cloned library is activated.
|
||||
omni2wind.py generates three files: bcwind.h5, the HDF5 solar wind input file
|
||||
for Gamera, as well as two images, the fit to Bx used and the solar wind input
|
||||
variables. Example command line output can be seen below. The time-period
|
||||
selected must be larger that an hour and a half, otherwise it will not produce
|
||||
a bcwind.h5 file and end in an error.
|
||||
|
||||
.. code-block::
|
||||
|
||||
(NPL) bash-4.2$ omni2wind.py -t0 2010-02-25T12:00:00 -t1 2010-02-25T14:00:00
|
||||
Retrieving f10.7 data from CDAWeb
|
||||
100% [................................................................................] 4668 / 4668
|
||||
Average f10.7: 81.1
|
||||
Retrieving solar wind data from CDAWeb
|
||||
100% [..............................................................................] 27878 / 27878
|
||||
|
||||
RECALC_08: RADIAL SOLAR WIND --> GSW SYSTEM IDENTICAL HERE
|
||||
TO STANDARD GSM (I.E., XGSW AXIS COINCIDES WITH EARTH-SUN LINE)
|
||||
|
||||
Bx Fit Coefficients are [-3.3785629 0.60784241 0.11600477]
|
||||
Saving "OMNI_HRO_1MIN.txt_bxFit.png"
|
||||
/glade/u/home/adamm/dav_pylib/lib/python3.6/site-packages/matplotlib/cbook/deprecation.py:107: MatplotlibDeprecationWarning: Adding an axes using the same arguments as a previous axes currently reuses the earlier instance. In a future version, a new instance will always be created and returned. Meanwhile, this warning can be suppressed, and the future behavior ensured, by passing a unique label to each axes instance.
|
||||
warnings.warn(message, mplDeprecation, stacklevel=1)
|
||||
Saving "OMNI_HRO_1MIN.txt.png"
|
||||
Converting to Gamera solar wind file
|
||||
Found 15 variables and 120 lines
|
||||
Offsetting from LFM start ( 0.00 min) to Gamera start ( 0.00 min)
|
||||
Writing Gamera solar wind to bcwind.h5
|
||||
|
||||
Notes
|
||||
-----
|
||||
|
||||
An HDF5 file can also be created from an existing LFM-style solar wind input
|
||||
using the reWind2.py script. For an example see
|
||||
kaiju/examples/earthcmi/OMNI_HRO_1MIN_16772.txt_SW-SM-DAT.
|
||||
|
||||
Note the GAMERA input of solar wind file requires more information than the
|
||||
density, velocity and IMF in LFM. The extra variables include F10.7,
|
||||
diple tilt, MJD (Julian date of each record), fitting coefficients of IMF
|
||||
(Bx0, ByC, BzC). Sound speed is not taken but the temperature is needed.
|
||||
|
||||
The MJD is especially helpful when a different time step of solar wind input
|
||||
is needed.
|
||||
|
||||
The default cadence of solar wind input is 1 min. In order to use higher rate
|
||||
solar wind input, we used to have to modify the init-fortran code in LFM. Here
|
||||
in the GAMERA world, the solar wind file MJD is the only variable to be
|
||||
modified for a flexible input cadence.
|
||||
|
||||
.. code-block::
|
||||
|
||||
[skareem@casper26:~]$ reWind2.py OMNI_HRO_1MIN_16772.txt_SW-SM-DAT
|
||||
Reading LFM solar wind from OMNI_HRO_1MIN_16772.txt_SW-SM-DAT
|
||||
Found 11 variables and 1440 lines
|
||||
Offsetting from LFM start ( 0.00 min) to Gamera start ( 0.00 min)
|
||||
Writing Gamera solar wind to bcwind.h5
|
||||
|
||||
Input deck
|
||||
----------
|
||||
|
||||
The input deck is an XML file that specifies various parameters to be read at
|
||||
runtime. An example for a double resolution run is shown below. Note that
|
||||
times used to be input in code units and the conversion is 63.8 seconds. Now
|
||||
we have changed the time variables in xml to be in seconds. This means
|
||||
|
||||
.. code-block::
|
||||
|
||||
#!xml
|
||||
<?xml version="1.0"?>
|
||||
<!-- Magnetosphere params, Voltron times in seconds -->
|
||||
<!-- MJD0 is modified Julian date of T=0 in solar wind file -->
|
||||
<VOLTRON>
|
||||
<time tFin="36000.0"/>
|
||||
<output dtOut="60.0" tsOut="100" doTimer="F"/>
|
||||
<restart dtRes="1800.0"/>
|
||||
<coupling dt="5.0"/>
|
||||
</VOLTRON>
|
||||
<Gamera>
|
||||
<sim runid="msphere" doH5g="T" H5Grid="lfmD.h5" icType="user" pdmb="1.0" pFloor="1.0e-8" dFloor="1.0e-6" rmeth="7UP"/>
|
||||
<restart doRes="F" resId="msphere" nRes="0"/>
|
||||
<physics doMHD="T" doBoris="T" Ca="10.0"/>
|
||||
<prob Rho0="0.2" P0="0.001"/>
|
||||
<ring gid="lfm" doRing="T" Nr="4" Nc1="8" Nc2="16" Nc3="32" Nc4="32"/>
|
||||
<wind tsfile="bcwind.h5"/>
|
||||
</Gamera>
|
||||
<!-- Remix params -->
|
||||
<REMIX>
|
||||
<grid Np="360" Nt="45" LowLatBoundary="45.0"/>
|
||||
<conductance pedmin="2.0" hallmin="1.0" sigma_ratio="3.0" const_sigma="True" ped0="5.0"/>
|
||||
</REMIX>
|
||||
|
||||
With these parameters the run will go for 10 hours (36000 s), outputting every
|
||||
1 minute (dtOut=60.0 s) and writing restarts every 30 minutes
|
||||
(dtRes=1800.0 s). The ReMIX-Gamera coupling is done every 5 seconds
|
||||
(coupling dt=5.0). Note the coupling dt is a required input for the
|
||||
ionospheric coupling.
|
||||
|
||||
The runid gives the name of the output mhd/mix file. H5Grid is the name of the
|
||||
grid file that has to be present in the run directory. If the job to be
|
||||
submitted with this xml file is not restarting from a previously saved state,
|
||||
set doRes as False. Otherwise, to restart, set doRes as True and make sure
|
||||
the resFile is linked to the right restarting file. As of 23 April 2020
|
||||
restarts are specified with ID/# instead of filename. Instead of
|
||||
restart/resFile, specify restart/resID and restart/nRes. The restart file
|
||||
msphere.Res.00005.h5 would be:
|
||||
|
||||
``<restart resId="msphere" nRes="5"/>``
|
||||
|
||||
Specifying nRes="-1" will read the XXXXX symbolic link.
|
||||
|
||||
``<physics>`` domain:
|
||||
|
||||
``<prob>`` domain:
|
||||
|
||||
``<ring>`` domain: gid tells the model which type of grid is being
|
||||
used. It is supposed to be consistent with the input grid file. Ring average
|
||||
technique is implemented if doRing is True. The number of parameters for ring
|
||||
average is set in Nr, and each parameter is listed as Nc1, Nc2, ... Usually
|
||||
there are four parameters for double resolution grid and 8 parameters for quad
|
||||
resolution grid.
|
||||
|
||||
``<wind>`` domain: tsfile takes the name of the solar wind file to
|
||||
be used.
|
||||
|
||||
Under the REMIX block, the Np and Nt gives the number of grid cells in Remix
|
||||
along longitude and latitude. The low latitude boundary is set at 45 deg
|
||||
latitude.
|
||||
|
||||
This setup uses constant conductance, an example of using the conductance
|
||||
model would replace the conductance block with this,
|
||||
|
||||
.. code-block::
|
||||
|
||||
#!xml
|
||||
<conductance F107="100.0" pedmin="2.0" hallmin="1.0" sigma_ratio="3.0" const_sigma="False" ped0="10.0"/>
|
||||
|
||||
Note, that starting the initial state (a pure dipole) with the conductance
|
||||
model turned on can sometimes be erratic. It's better to spin up for a while
|
||||
using constant conductance then do a restart where the conductance model is
|
||||
turned on. But whatever, you do you.
|
||||
|
||||
Executable and run script
|
||||
-------------------------
|
||||
|
||||
Compile the target "voltron.x" and then just run it with a PBS script. You
|
||||
should probably request a full node and run with 72 threads. Below is the PBS
|
||||
script that I use, but you might need to make changes to it for your setup.
|
||||
This is assuming an XML file called "cmiD.xml", that you have a preset module
|
||||
savelist, and that voltron.x is in your path. Probably some other stuff too.
|
||||
I ran it with
|
||||
|
||||
.. code-block::
|
||||
|
||||
qsub -v CHIMPEXE="voltron.x" -N cmiD RunCHIMP.pbs
|
||||
|
||||
and it worked.
|
||||
|
||||
.. code-block::
|
||||
|
||||
#!bash
|
||||
#!/bin/bash
|
||||
#PBS -A P28100045
|
||||
#PBS -N KAIJU
|
||||
#PBS -j oe
|
||||
#PBS -q regular
|
||||
#PBS -l walltime=12:00:00
|
||||
#PBS -l select=1:ncpus=72:ompthreads=72
|
||||
|
||||
#Example usage
|
||||
#qsub -v CHIMPEXE="slice.x" -N cmiD RunCHIMP.pbs
|
||||
#Module savelist "kaiju"
|
||||
#Currently Loaded Modules:
|
||||
# 1) git/2.9.5 (H) 4) impi/2018.4.274 7) python/2.7.16
|
||||
# 2) intel/18.0.5 5) ncarenv/1.3 8) cmake/3.14.4
|
||||
# 3) hdf5/1.10.5 6) ncarcompilers/0.5.0
|
||||
|
||||
export EXE=${CHIMPEXE:-"slice.x"}
|
||||
export RUNID=${PBS_JOBNAME}
|
||||
|
||||
source ~/.bashrc
|
||||
module restore kaiju
|
||||
|
||||
module list
|
||||
hostname
|
||||
date
|
||||
export OMP_NUM_THREADS=72
|
||||
export KMP_STACKSIZE=128M
|
||||
export JNUM=${PBS_ARRAY_INDEX:-0}
|
||||
echo "Running $EXE"
|
||||
${EXE} ${RUNID}.xml ${JNUM} > ${RUNID}.${JNUM}.out
|
||||
date
|
||||
|
||||
Generating solar wind file manually
|
||||
-----------------------------------
|
||||
|
||||
Solar wind data can be defined using an HDF-5 file. This is specified in the
|
||||
XML input deck under "Gamera/wind/tsfile" as seen in the example XML file
|
||||
above.
|
||||
|
||||
Solar wind HDF5 file input units
|
||||
|
||||
* Time [s]
|
||||
* Density [#/cc]
|
||||
* Velocity [m/s]
|
||||
* Pressure [nPa]
|
||||
* Magnetic field [nT]
|
||||
|
||||
Gamera will read the HDF file and convert to its internal code units (see
|
||||
normalization above). Below is an example of a Python script to generate a
|
||||
solar wind file.
|
||||
|
||||
.. code-block::
|
||||
|
||||
#!python
|
||||
|
||||
import h5py
|
||||
import numpy as np
|
||||
import sys
|
||||
|
||||
TW = 1.0e+5 #Default temperature, K
|
||||
nW = 5 #Default density, #/cc
|
||||
VxW = 400.0 #Default wind, km/s
|
||||
f107val = 100.0 #Default f10.7 flux
|
||||
tilt = 0.0 #Default dipole tilt, radians
|
||||
mjd0 = 58767.0 #Default MJD, set for 2019-10-11 00:00:00
|
||||
|
||||
Bx0 = 0.0 #Default Bx offset for planar front, keep at zero
|
||||
ByC = 0.0 #Default By coefficient used to calculate Bx, include if want tilted field
|
||||
BzC = 0.0 #Default Bz coefficient used to calculate Bx, include if want tilted field
|
||||
|
||||
fOut = "bcwind.h5"
|
||||
|
||||
#Time bounds [hours]
|
||||
tMin = 0.0
|
||||
tMax = 6.0
|
||||
dt = 60.0 #Cadence [s]
|
||||
|
||||
SimT = (tMax-tMin)*60.0*60.0
|
||||
NumT = np.int( np.ceil(SimT/dt)+1 )
|
||||
|
||||
print("Generating %d slices, T=[%5.2f,%5.2f]"%(NumT,tMin,tMax))
|
||||
|
||||
T = np.linspace(tMin,tMax,NumT)
|
||||
D = np.zeros(NumT)
|
||||
Temp = np.zeros(NumT)
|
||||
Vx = np.zeros(NumT)
|
||||
Vy = np.zeros(NumT)
|
||||
Vz = np.zeros(NumT)
|
||||
Bx = np.zeros(NumT)
|
||||
By = np.zeros(NumT)
|
||||
Bz = np.zeros(NumT)
|
||||
f107 = np.zeros(NumT)
|
||||
ThT = np.zeros(NumT)
|
||||
mjd = np.zeros(NumT)
|
||||
symh = np.zeros(NumT)
|
||||
|
||||
tWin = 1.0 #Window times [hr]
|
||||
for i in range(NumT):
|
||||
t = T[i] #Time in hours
|
||||
if (t <= tWin):
|
||||
D[i] = nW
|
||||
Vx[i] = -VxW
|
||||
Temp[i] = TW
|
||||
f107[i] = f107val
|
||||
ThT[i] = tilt
|
||||
mjd[i] = mjd0 + T[i]/24.0
|
||||
elif (t <= 3*tWin):
|
||||
D[i] = nW
|
||||
Vx[i] = -VxW
|
||||
Temp[i] = TW
|
||||
Bz[i] = -5.0
|
||||
f107[i] = f107val
|
||||
ThT[i] = tilt
|
||||
mjd[i] = mjd0 + T[i]/24.0
|
||||
elif (t <= 6.0*tWin):
|
||||
D[i] = nW
|
||||
Vx[i] = -VxW
|
||||
Temp[i] = TW
|
||||
Bz[i] = +5.0
|
||||
f107[i] = f107val
|
||||
ThT[i] = tilt
|
||||
mjd[i] = mjd0 + T[i]/24.0
|
||||
else:
|
||||
D[i] = nW
|
||||
Vx[i] = -VxW
|
||||
Temp[i] = TW
|
||||
Bz[i] = -5.0
|
||||
f107[i] = f107val
|
||||
ThT[i] = tilt
|
||||
mjd[i] = mjd0 + T[i]/24.0
|
||||
|
||||
#Write solar wind
|
||||
#t,D,V,Temp,B = [s],[#/cm3],[m/s],[K],[nT]
|
||||
|
||||
oTScl = (60*60.0) #hr->s
|
||||
oDScl = 1.0
|
||||
oVScl = 1.0e+3 #km/s->m/s
|
||||
oTempScl = 1.0
|
||||
oBScl = 1.0
|
||||
|
||||
|
||||
with h5py.File(fOut,'w') as hf:
|
||||
hf.create_dataset("T" ,data=oTScl*T)
|
||||
hf.create_dataset("symh" ,data=symh)
|
||||
hf.create_dataset("D" ,data=oDScl*D)
|
||||
hf.create_dataset("Temp" ,data=oTempScl*Temp)
|
||||
hf.create_dataset("Vx",data=oVScl*Vx)
|
||||
hf.create_dataset("Vy",data=oVScl*Vy)
|
||||
hf.create_dataset("Vz",data=oVScl*Vz)
|
||||
hf.create_dataset("Bx",data=oBScl*Bx)
|
||||
hf.create_dataset("By",data=oBScl*By)
|
||||
hf.create_dataset("Bz",data=oBScl*Bz)
|
||||
hf.create_dataset("tilt",data=ThT)
|
||||
hf.create_dataset("f10.7",data=f107)
|
||||
hf.create_dataset("MJD",data=mjd)
|
||||
hf.create_dataset("Bx0",data=Bx0)
|
||||
hf.create_dataset("ByC",data=ByC)
|
||||
hf.create_dataset("BzC",data=BzC)
|
||||
|
||||
Visualization
|
||||
-------------
|
||||
|
||||
Data can be read into VisIt or Paraview using the "kaiju/scripts/genXDMF.py"
|
||||
script which will create an XDMF file which either of those viewers can
|
||||
natively read.
|
||||
|
||||
Alternatively, you can try "kaiju/scripts/msphpic.py" script which uses the
|
||||
Python post-processing routines to generate a multi-panel plot of a
|
||||
magnetosphere run. It uses Python libraries that you may or may not have,
|
||||
I don't know your life.
|
||||
|
||||
Creating gamera videos in parallel via slurm job submission and gamsphVid.py.
|
||||
|
||||
.. code-block::
|
||||
|
||||
#!/bin/bash -l
|
||||
#SBATCH -J VidMHD
|
||||
#SBATCH --output=%x_%A_%a.out
|
||||
#SBATCH -t 24:00:00
|
||||
#SBATCH -A UJHB0010
|
||||
#SBATCH -p dav
|
||||
#SBATCH --array=1-36
|
||||
#Defaults
|
||||
export BASE="/glade/u/home/skareem/Work/gamercm/Data/"
|
||||
export RUNID="vapD9"
|
||||
|
||||
export TS="60"
|
||||
export TE="1480"
|
||||
export DT="60"
|
||||
|
||||
export ARG=""
|
||||
export ODIR="VidMHD"
|
||||
export JNUM=${SLURM_ARRAY_TASK_COUNT:-"1"}
|
||||
export JID=${SLURM_ARRAY_TASK_ID:-"1"}
|
||||
|
||||
echo "My JobID: " $SLURM_ARRAY_TASK_ID " of " $SLURM_ARRAY_TASK_COUNT
|
||||
export IDIR="${BASE}${RUNID}"
|
||||
source ~/.bashrc
|
||||
setdav
|
||||
gamsphVid.py -d $IDIR -ts $TS -te $TE -dt $DT -Nblk $JNUM -nID $JID -o $ODIR $ARG
|
||||
|
||||
Modify and launch the above script using "sbatch ABOVEFILE.s"
|
||||
|
||||
.. code-block::
|
||||
|
||||
#sbatch array#
|
||||
|
||||
Specifies how many parallel jobs you want (36 here)
|
||||
|
||||
.. code-block::
|
||||
|
||||
export TS="60"
|
||||
export TE="1480"
|
||||
export DT="160"
|
||||
|
||||
are the time domain in which you want to create movies. TS is the start time
|
||||
(in minutes) of your run starting from T=0. TE is time end (in minutes). DT
|
||||
is the time (in seconds) between slices. DT will attempt to find the cloest
|
||||
timestep, so if outputs are 60s but you input DT=15s, then you get duplicated
|
||||
frames.
|
||||
|
||||
.. code-block::
|
||||
|
||||
source ~/.bashrc
|
||||
setdav
|
||||
|
||||
Set up your environment variables. These are Kareem specific, so modify to
|
||||
match whatever your Casper environment is.
|
||||
|
||||
MPI Magnetosphere
|
||||
-----------------
|
||||
|
||||
That's still like a whole thing.
|
||||
BIN
docs/source/_obsolete/magnetosphere/analysisTools/contours.png
Normal file
BIN
docs/source/_obsolete/magnetosphere/analysisTools/contours.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 42 KiB |
437
docs/source/_obsolete/magnetosphere/analysisTools/groundMag.rst
Normal file
437
docs/source/_obsolete/magnetosphere/analysisTools/groundMag.rst
Normal file
@@ -0,0 +1,437 @@
|
||||
|
||||
SuperMAG: Ground Magnetometer Measurements
|
||||
==========================================
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
Comparison of magnetosphere model results to ground magnetometer measurements
|
||||
is a common technique for validating simulations. In the MAGE software, the
|
||||
program ``calcdb.x`` is used to calculate magnetic field perturbations on a
|
||||
grid on the Earth's surface using the
|
||||
`Biot-Savart Law <https://en.wikipedia.org/wiki/Biot%E2%80%93Savart_law>`_,
|
||||
and the ionospheric, field-aligned, and magnetospheric current systems
|
||||
extracted from the MAGE simulation results.
|
||||
|
||||
This page provides an overview of how to perform these calculations using
|
||||
results from a MAGE simulation. The :doc:`superMAGE` page provides
|
||||
instructions for performing more detailed comparisons between these model
|
||||
results and data obtained from the `SuperMAG <https://supermag.jhuapl.edu/>`_
|
||||
collection of ground magnetometer data.
|
||||
|
||||
A simple example
|
||||
----------------
|
||||
|
||||
The program ``calcdb.x`` requires an XML file as input. The XML file provides
|
||||
details on the MAGE simulation results to be used in the calculation of the
|
||||
ground magnetic field perturbations, and the output requirements for the
|
||||
calculation. This XML file is passed to the ``calcdb.x`` program as a
|
||||
command-line argument.
|
||||
|
||||
Assume we have completed a simulation of the magnetosphere using the MPI
|
||||
version of the MAGE code. The run ID is ``geospace``. A 4x4x1 MPI
|
||||
decomposition was used. All results are in the current directory. The
|
||||
simulation results are in the HDF5 files ``geospace_*.gam.h5``. We want to
|
||||
compute the ground magnetic field perturbations corresponding to this model
|
||||
output. Use the following specifications for the calculation:
|
||||
|
||||
* Start the computation at 0 simulated seconds after the start of the
|
||||
simulation results, and end at 14400 simulated seconds (4 simulated hours)
|
||||
after the start of the simulation results, and provide ground magnetic field
|
||||
perturbation values at an interval of 60 simulated seconds (1 simulated
|
||||
minute).
|
||||
|
||||
* The spatial grid on the ground will have 360 latitude bins and 720 longitude
|
||||
bins (0.5 x 0.5 degree/grid cell).
|
||||
|
||||
Here is a sample XML input file that implements these requirements.
|
||||
|
||||
.. code-block:: XML
|
||||
|
||||
<?xml version="1.0"?>
|
||||
<Kaiju>
|
||||
<Chimp>
|
||||
<sim runid="geospace"/>
|
||||
<time T0="0.0" dt="60.0" tFin="14400.0"/>
|
||||
<fields ebfile="geospace" grType="LFM" doJ="T" isMPI="true"/>
|
||||
<parallel Ri="4" Rj="4" Rk="1"/>
|
||||
<grid Nlat="360" Nlon="720" Nz="1"/>
|
||||
</Chimp>
|
||||
</Kaiju>
|
||||
|
||||
These XML elements and their attributes are described below. Note that *all*
|
||||
XML attribute values are entered as strings, in ``"double quotes"``. Defaults
|
||||
are supplied in ``calcdb.x`` for all values not set in the XML file, so
|
||||
technically this file is optional. In practice, you will want to create your
|
||||
own input XML file for ``calcdb.x``, since the defaults for items like
|
||||
``ebfile`` are not usually appropriate for a user run.
|
||||
|
||||
Once created, this XML file (``calcdb-geospace.xml``) can be used to run the
|
||||
ground magnetic field perturbation computation as follows:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
calcdb.x calcdb-geospace.xml >& calcdb-geospace.out
|
||||
|
||||
Making the calculation go faster
|
||||
--------------------------------
|
||||
|
||||
The computation can take a long time, so submitting this computation as a PBS
|
||||
job array can be more time-efficient, by taking advantage of parallel
|
||||
execution. To split the work of ``calcdb.x`` into 4 equally-sized batches, an
|
||||
additional line is needed in the XML file:
|
||||
|
||||
.. code-block:: XML
|
||||
|
||||
<?xml version="1.0"?>
|
||||
<Kaiju>
|
||||
<Chimp>
|
||||
<sim runid="geospace"/>
|
||||
<time T0="0.0" dt="60.0" tFin="14400.0"/>
|
||||
<fields ebfile="geospace" grType="LFM" doJ="T" isMPI="true"/>
|
||||
<parallel Ri="4" Rj="4" Rk="1"/>
|
||||
<grid Nlat="360" Nlon="720" Nz="1"/>
|
||||
<parintime NumB="4"/>
|
||||
</Chimp>
|
||||
</Kaiju>
|
||||
|
||||
The associated PBS script should look something like this (actual modules and
|
||||
paths will be different for your installation):
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
#!/bin/bash
|
||||
|
||||
# This script runs calcdb.x to compute ground delta-B values from the
|
||||
# results of a MAGE simulation.
|
||||
|
||||
# This script was generated to run on derecho.
|
||||
|
||||
#PBS -N calcdb-geospace
|
||||
#PBS -A YOUR_ACCOUNT_HERE
|
||||
#PBS -q main
|
||||
#PBS -l job_priority=economy
|
||||
#PBS -l select=1:ncpus=128:ompthreads=128
|
||||
#PBS -l walltime=01:00:00
|
||||
#PBS -j oe
|
||||
#PBS -m abe
|
||||
|
||||
echo "Job ${PBS_JOBID} started at `date` on `hostname` in directory `pwd`."
|
||||
|
||||
source $HOME/.bashrc
|
||||
|
||||
echo 'Loading modules.'
|
||||
module --force purge
|
||||
module load ncarenv/23.06
|
||||
module load craype/2.7.20
|
||||
module load intel/2023.0.0
|
||||
module load ncarcompilers/1.0.0
|
||||
module load cray-mpich/8.1.25
|
||||
module load hdf5-mpi/1.12.2
|
||||
echo 'The currently loaded modules are:'
|
||||
module list
|
||||
|
||||
echo 'Loading python environment.'
|
||||
conda activate YOUR_CONDA_ENVIRONMENT
|
||||
echo "The current conda environment is ${CONDA_PREFIX}."
|
||||
|
||||
echo 'Setting up kaipy environment.'
|
||||
source YOUR_KAIPY_PATH/kaipy/scripts/setupEnvironment.sh
|
||||
echo "The kaipy software is located at ${KAIPYHOME}."
|
||||
|
||||
echo 'Setting up MAGE environment.'
|
||||
source YOUR_KAIJU_PATH/scripts/setupEnvironment.sh
|
||||
echo "The kaiju software is located at ${KAIJUHOME}."
|
||||
|
||||
echo 'Setting environment variables.'
|
||||
export OMP_NUM_THREADS=128
|
||||
export KMP_STACKSIZE=128M
|
||||
export JNUM=${PBS_ARRAY_INDEX:-0}
|
||||
echo 'The active environment variables are:'
|
||||
printenv
|
||||
|
||||
# Compute the ground delta B values.
|
||||
log_file="calcdb.out.${JNUM}"
|
||||
cmd="./calcdb.x calcdb-geospace.xml ${JNUM} >& ${log_file}"
|
||||
echo "calcdb.x run command is:"
|
||||
echo $cmd
|
||||
eval $cmd
|
||||
|
||||
echo "Job ${PBS_JOBID} ended at `date` on `hostname` in directory `pwd`."
|
||||
|
||||
This job array can be submitted with the command
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
qsub -J 1-4 calcdb-geospace.pbs
|
||||
|
||||
The ``-J 1-4`` arguments indicates that a PBS *job array* will be used. Note
|
||||
that the value of the ``-J`` option must be ``1-NumB``, where ``NumB`` must
|
||||
match the value of the ``NumB`` attribute of the ``<parintime/>`` element from
|
||||
the XML file. When this job array completes, the result directory will contain
|
||||
files of the form ``calcdb.out.#`` which will contain the terminal output from
|
||||
``calcdb.x`` during the run for each batch ``#`` in the job array.
|
||||
|
||||
After your job array completes, you'll need to do one more step before you can
|
||||
use your results. The parallel processing results in multiple output HDF5
|
||||
files (``geospace.0001.deltab.h5``, ``geospace.0002.deltab.h5``, ...). The
|
||||
script ``$KAIPYHOME/kaipy/scripts/postproc/pitmerge.py`` will concatenate the
|
||||
individual HDF5 files into one file called ``geospace.deltab.h5``.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$KAIPYHOME/kaipy/scripts/postproc/pitmerge.py -runid geospace
|
||||
|
||||
You can now use ``geospace.deltab.h5`` for your analysis.
|
||||
|
||||
XML file syntax
|
||||
---------------
|
||||
|
||||
The elements and attributes of the XML file are described below.
|
||||
|
||||
* ``<?xml version="1.0"?>`` (required): Specifies XML version.
|
||||
|
||||
* ``<Kaiju>`` (required): Outer container for other elements.
|
||||
|
||||
* ``<Chimp>`` (required): Inner container for other elements.
|
||||
|
||||
* ``<sim>`` (optional): Specify identifying information for the MAGE run.
|
||||
|
||||
* ``runid`` (optional, default ``Sim``): String specifying the identifier
|
||||
for this run of ``calcdb.x``. A best practice is to use the ``runid`` in
|
||||
the name of the XML file.
|
||||
|
||||
* ``<time>`` (optional): Specify time range and interval for magnetic field
|
||||
calculation.
|
||||
|
||||
* ``T0`` (optional, default ``0.0``): Start time (simulated seconds) for
|
||||
ground magnetic field calculation, relative to start of simulation
|
||||
results used as input.
|
||||
* ``dt`` (optional, default ``1.0``): Time interval and output cadence
|
||||
(simulated seconds) for ground magnetic field calculation.
|
||||
* ``tFin`` (optional, default ``60.0``): Stop time (simulated seconds) for
|
||||
ground magnetic field calculation, relative to start of simulation
|
||||
results used as input.
|
||||
|
||||
* ``<fields>`` (required): Describes the MAGE model results to use.
|
||||
|
||||
* ``ebfile`` (optional, default ``ebdata``): Root name for HDF5 files
|
||||
containing the results produced by a MAGE model run.
|
||||
* ``grType`` (optional, default ``EGG``): String specifying grid type used
|
||||
by the MAGE output file. Valid values are ``EGG``, ``LFM``, ``SPH``. If
|
||||
the string is not one of the supported grid types, the default value
|
||||
(``EGG``) is used, and a warning message is printed.
|
||||
* ``doJ`` (required, must be ``T``): If ``T``, compute currents from the
|
||||
MAGE model results.
|
||||
* ``isMPI`` (optional): If ``true``, the MAGE results are from an MPI run.
|
||||
* ``doEBFix`` (optional, default ``false``): Set to ``true`` to "clean"
|
||||
the electric field E so that the dot product of the electric and
|
||||
magnetic fields is 0.
|
||||
* ``doMHD`` (optional, default ``false``): Set to ``true`` to pass the
|
||||
full set of magnetohydrodynamic variables to CHIMP, rather than just
|
||||
electric and magnetic fields..
|
||||
|
||||
* ``<parallel>`` (required): Describes the MPI decomposition of MAGE model
|
||||
run.
|
||||
|
||||
* ``Ri`` (optional, default ``1``): Number of ranks used in MPI
|
||||
decomposition of ``i`` dimension.
|
||||
* ``Rj`` (optional, default ``1``): Number of ranks used in MPI
|
||||
decomposition of ``j`` dimension.
|
||||
* ``Rk`` (optional, default ``1``): Number of ranks used in MPI
|
||||
decomposition of ``k`` dimension.
|
||||
* ``doOldNaming`` (optional, default ``false``): Allow for backward
|
||||
compatibility for MHD files generated with the now deprecated naming
|
||||
convention.
|
||||
|
||||
* ``<parintime>`` (optional): Options to run a PBS job array of ``calcdb.x``
|
||||
to increase calculation speed.
|
||||
|
||||
* ``NumB`` (optional, default ``0``): Number of batches into which the
|
||||
calculation will be split for parallel computation. Must equal the job
|
||||
array size ``NumB`` used in ``-J 1-NumB`` on the ``qsub`` command line.
|
||||
|
||||
* ``<grid>`` (optional): Options to specify the grid on the ground used in
|
||||
``calcdb.x``.
|
||||
|
||||
* ``doH5g`` (optional, default ``false``): Set to ``true`` to use a grid
|
||||
specified in an external HDF5 file, specified by ``H5Grid``. If
|
||||
``false``, will use a Cartesian grid in latitude, longitude, and
|
||||
altitude specified by ``Nlat``, ``Nlon``, and ``Nz``
|
||||
* ``H5Grid`` (optional, default ``grid.h5``): String specifying an the
|
||||
name of the HDF5 file where the grid is specified. Used if
|
||||
``doH5g="true"``.
|
||||
* ``Nlat`` (optional, default ``45``): Number of latitude cells.
|
||||
* ``Nlon`` (optional, default ``90``): Number of longitude cells.
|
||||
* ``Nz`` (optional, default ``2``): Number of altitude cells.
|
||||
* ``doGEO`` (optional, default ``false``): Set to ``true`` to use the
|
||||
geographic coordinate system on the ground. If set to ``false``, will
|
||||
use the SM coordinate system used in magnetosphere runs.
|
||||
* ``dzGG`` (optional, default ``10.0``): Height spacing (kilometers) of
|
||||
grid.
|
||||
* ``z0`` (optional, default ``0.0``): Starting height above ground
|
||||
(kilometers) for grid calculation.
|
||||
|
||||
* ``<calcdb>`` (optional): Optional settings for ``calcdb.x``.
|
||||
|
||||
* ``rMax`` (optional, default ``30``): Radius (as a multiple of the Earth
|
||||
radius) to perform integration over.
|
||||
* ``doCorot`` (optional, default ``false``): Set to ``true`` to use the
|
||||
corotation potential in the calculation.
|
||||
* ``doHall`` (optional, default ``true``): Set to ``true`` to include
|
||||
Hall currents in calculation.
|
||||
* ``doPed`` (optional, default ``true``): Set to ``true`` to include
|
||||
Pedersen currents in calculation.
|
||||
|
||||
* ``<interp>`` (optional): Options related to interpolation.
|
||||
|
||||
* ``wgt`` (optional, default ``TSC``): Sets 1D interpolation type. Valid
|
||||
values are ``TSC`` (1D triangular shaped cloud), ``LIN`` (linear),
|
||||
``QUAD`` (parabolic).
|
||||
|
||||
* ``<output>`` (optional): Options related to ``calcdb.x`` output.
|
||||
|
||||
* ``dtOut`` (optional, default ``10.0``): Output cadence (simulated
|
||||
seconds).
|
||||
* ``timer`` (optional, default ``false``): Set to ``true`` to turn time
|
||||
flags on.
|
||||
* ``tsOut`` (optional, default ``10``): Cadence to output diagnostics to
|
||||
run log file.
|
||||
* ``doFat`` (optional, default ``false``): Set to ``true`` to include
|
||||
spherical vector components of magnetic field perturbations and
|
||||
currents.
|
||||
|
||||
* ``<units>`` (optional): Describe units system used in the model run.
|
||||
|
||||
* ``uID`` (optional, default ``Earth``): Valid values (case-insensitive)
|
||||
are ``EARTH``, ``EARTHCODE``, ``JUPITER``, ``JUPITERCODE``, ``SATURN``,
|
||||
``SATURNCODE``, ``HELIO"``, ``LFM``, ``LFMJUPITER``.
|
||||
|
||||
Plotting the ground magnetic field perturbations
|
||||
------------------------------------------------
|
||||
|
||||
Once you have your results in a single file, such as ``geospace.deltab.h5``,
|
||||
you can plot the results. A script (``supermag_comparison.py``) has been
|
||||
provided as part of the ``kaipy`` package to make standard plots of delta-B
|
||||
results and compare them to data in the SuperMAG database.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
usage: supermag_comparison.py [-h] [--debug] [--smuser SMUSER] [--verbose] calcdb_results_path
|
||||
|
||||
Create MAGE-SuperMag comparison plots.
|
||||
|
||||
positional arguments:
|
||||
calcdb_results_path Path to a (possibly merged) result file from calcdb.x.
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--debug, -d Print debugging output (default: False).
|
||||
--smuser SMUSER SuperMag user ID to use for SuperMag queries (default: ).
|
||||
--verbose, -v Print verbose output (default: False).
|
||||
|
||||
As an example, assuming your delta-B results are in the current directory as
|
||||
``geospace.deltab.h5``, you can generate the standard plots with the command:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$KAIPYHOME/kaipy/scripts/postproc/supermag_comparison.py --verbose --smuser=ewinter /PATH/TO/geospace.deltab.h5
|
||||
|
||||
The ``smuser`` argument is the name of the account (which must already exist)
|
||||
that you use to fetch data from SuperMAG. Note also that the local SuperMAG
|
||||
cache directory (usually ``$HOME/supermag``) must already exist. You should
|
||||
find in your current directory a pair of plots (``contours.png`` and
|
||||
``indices.png``) that compare various computed and measured geomagnetic
|
||||
indices. Sample plots are provided below.
|
||||
|
||||
``contours.png``
|
||||
|
||||
.. image:: contours.png
|
||||
|
||||
``indices.png``
|
||||
|
||||
.. image:: indices.png
|
||||
|
||||
Putting it all together
|
||||
-----------------------
|
||||
|
||||
The ``kaipy`` distribution provides a wrapper script
|
||||
(``$KAIPYHOME/kaipy/scripts/postproc/run_ground_deltaB_analysis.py``) which
|
||||
encapsulates all of these steps, including splitting the calculation across
|
||||
multiple PBS jobs.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
usage: run_ground_deltaB_analysis.py [-h] [--calcdb CALCDB] [--debug] [--dt DT] [--hpc {derecho,pleiades}] [--parintime PARINTIME] [--pbs_account PBS_ACCOUNT] [--smuser SMUSER] [--verbose] mage_results_path
|
||||
|
||||
Compare MAGE ground delta-B to SuperMag measurements, and create SuperMAGE analysis maps.
|
||||
|
||||
positional arguments:
|
||||
mage_results_path Path to a result file for a MAGE magnetosphere run.
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--calcdb CALCDB Path to calcdb.x binary (default: calcdb.x).
|
||||
--debug, -d Print debugging output (default: False).
|
||||
--dt DT Time interval for delta-B computation (seconds) (default: 60.0).
|
||||
--hpc {derecho,pleiades}
|
||||
HPC system to run analysis (default: pleiades).
|
||||
--parintime PARINTIME
|
||||
Split the calculation into this many parallel chunks of MAGE simulation steps, one chunk per node (default: 1).
|
||||
--pbs_account PBS_ACCOUNT
|
||||
PBS account to use for job accounting (default: None).
|
||||
--smuser SMUSER Account name for SuperMag database access (default: None).
|
||||
--verbose, -v Print verbose output (default: False).
|
||||
|
||||
This script generates a set of PBS scripts which perform each stage of the
|
||||
process:
|
||||
|
||||
* Running ``calcdb.x`` to compute ground delta-B values.
|
||||
* Running ``pitmerge.py`` if needed to combine results.
|
||||
* Running ``supermag_comparison.py`` to generate the plots.
|
||||
|
||||
A ``bash`` script (``submit-RUNID.sh``) is also created which can be run to
|
||||
submit all of the PBS jobs with the proper dependencies. Continuing the
|
||||
earlier example, this wrapper script can be run in your MAGE result directory
|
||||
on ``derecho`` with the command:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
run_ground_deltaB_analysis.py --calcdb=/PATH/TO/calcdb.x --dt=60.0 --hpc=derecho --parintime=4 --pbs_account=YOUR_DERECHO_ACCOUNT --smuser=YOUR_SUPERMAG_ACCOUNT --verbose /PATH/TO_MAGE_HDF5_FILE
|
||||
|
||||
For the runid of ``geospace`` used in the above examples, your MAGE output
|
||||
directory will now contain several new files:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
calcdb-geospace.pbs
|
||||
calcdb-geospace.xml
|
||||
ground_deltab_analysis-geospace.pbs
|
||||
pitmerge-geospace.pbs
|
||||
submit-geospace.sh
|
||||
|
||||
To submit all of the PBS jobs to perform all steps in the analysis, just run
|
||||
the ``bash`` script:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
bash submit-geospace.sh
|
||||
|
||||
If desired, the individual PBS scripts can be run manually. They have been
|
||||
design (as much as possible) to be runnable as either PBS job scripts or
|
||||
standard ``bash`` shell scripts. For example, to run the ``calcdb.x`` job
|
||||
array on ``derecho``, followed by merging and plot generation on ``casper``
|
||||
you could use the commands:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
# On derecho
|
||||
qsub -J 1-4 calcdb-geospace.pbs
|
||||
|
||||
# On casper (edit PBS scripts appropriately)
|
||||
qsub pitmerge-geospace.pbs
|
||||
qsub ground_deltab_analysis-geospace.pbs
|
||||
|
||||
A similar procedure can be used if you wish to perform the merge and plotting
|
||||
steps on a non-HPC system, such as your laptop. In that case, slight
|
||||
system-dependent modifications to the individual PBS scripts may be required.
|
||||
14
docs/source/_obsolete/magnetosphere/analysisTools/index.rst
Normal file
14
docs/source/_obsolete/magnetosphere/analysisTools/index.rst
Normal file
@@ -0,0 +1,14 @@
|
||||
Analysis Tools
|
||||
==============
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
This page provides links to documentation for analysis tools in the MAGE
|
||||
software.
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
groundMag
|
||||
superMAGE
|
||||
BIN
docs/source/_obsolete/magnetosphere/analysisTools/indices.png
Normal file
BIN
docs/source/_obsolete/magnetosphere/analysisTools/indices.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 107 KiB |
240
docs/source/_obsolete/magnetosphere/analysisTools/superMAGE.rst
Normal file
240
docs/source/_obsolete/magnetosphere/analysisTools/superMAGE.rst
Normal file
@@ -0,0 +1,240 @@
|
||||
|
||||
SuperMAGE: SuperMAG indices
|
||||
===========================
|
||||
|
||||
The Python module ``supermage.py`` (SuperMAGE) is part of the ``kaipy``
|
||||
package. SuperMAGE is a collection of Python functions to compare simulated
|
||||
ground magnetic field data generated from a MAGE magnetosphere run (using
|
||||
``calcdb.x``), with `SuperMAG <https://supermag.jhuapl.edu/>`_ indices
|
||||
(auroral SME/U/L and SMR). SuperMAGE provides functions to create index plots
|
||||
and contour plots. Also included is a crude 1D E-Field calculator.
|
||||
|
||||
Requirements
|
||||
------------
|
||||
|
||||
To use the SuperMAGE module, just import it into your Python code:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
import supermage as sm
|
||||
|
||||
Reading simulated ground magnetic field perturbations from a MAGE run
|
||||
---------------------------------------------------------------------
|
||||
|
||||
This step assumes you have run ``calcdb.x`` to compute simulated ground
|
||||
magnetic field perturbations from your MAGE magnetosphere simulation results.
|
||||
Instructions are provided `here <./groundMag>`_.
|
||||
|
||||
Assume you have the file ``geospace.deltab.h5``, created using ``calcdb.x``.
|
||||
The data required for the comparison with SuperMAG measurements can be read in
|
||||
as follows:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
simulated_data = sm.ReadSimData("geospace.deltab.h5")
|
||||
|
||||
``simulated_data`` is a Python dictionary which contains the simulated data as
|
||||
NumPy arrays. The following dictionary keys are available:
|
||||
|
||||
``td`` (``datetime.datetime``): Timestamps for the individual simulated data
|
||||
points.
|
||||
|
||||
``glon`` (degrees): Geographic longitude
|
||||
|
||||
``glat`` (degrees): Geographic latitude
|
||||
|
||||
``mlat`` (degrees): Magnetic latitude
|
||||
|
||||
``mlt`` (hours): Magnetic local time
|
||||
|
||||
``smlon`` (degrees): Solar magnetic longitude
|
||||
|
||||
``dBt`` (nT): Spherical (polar angle) magnetic field perturbation at Earth
|
||||
surface
|
||||
|
||||
``dBp`` (nT): Spherical (azimuthal angle) magnetic field perturbation at Earth
|
||||
surface
|
||||
|
||||
``dBr`` (nT): Spherical (radial) magnetic field perturbation at Earth surface
|
||||
|
||||
``dBn`` (nT): Northward magnetic field perturbation at Earth surface
|
||||
|
||||
Fetching SuperMag magnetic indices
|
||||
----------------------------------
|
||||
|
||||
We can use ``FetchSMIndices()`` to retrieve the indices corresponding to the
|
||||
time period of our simulation:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
SMI = sm.FetchSMIndices(user, start, numofdays)
|
||||
|
||||
where
|
||||
|
||||
|
||||
``user`` is the user name you created on the
|
||||
`SuperMAG <https://supermag.jhuapl.edu/>`_ web site.
|
||||
|
||||
``start`` is a Python ``datetime.datetime`` object representing the starting
|
||||
point of the desired data.
|
||||
|
||||
``numofdays`` is the number of days of data to fetch, starting at ``start``.
|
||||
|
||||
This call returns a Python dictionary of all the SuperMAG indices for the
|
||||
chosen time period, stored as NumPy arrays. This call should only take a few
|
||||
seconds.
|
||||
|
||||
As an example, assume we have read in the simulated data from the file
|
||||
``storm1.deltab.h5`` described above. We can request 3 days of all SuperMAG
|
||||
magnetic indices for this time period with the call:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
SMI = sm.FetchSMIndices(user, SIM['td'][0], 3)
|
||||
|
||||
Fetching SuperMAG magnetic observatory data
|
||||
-------------------------------------------
|
||||
|
||||
The function ``FetchSMData()`` retrieves all of the available SuperMAG data
|
||||
(in the form of observatory locations, magnetic field components, and
|
||||
measurement times) for the desired time period, as shown below:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
import os
|
||||
savefolder = os.path.join(os.environ['HOME'], 'supermag')
|
||||
SM = sm.FetchSMData(user, start, numofdays, savefolder, badfrac=0.1)
|
||||
|
||||
``user`` is the user name you created on the
|
||||
`SuperMAG <https://supermag.jhuapl.edu/>`_ web site.
|
||||
|
||||
``start`` is a Python ``datetime.datetime`` object representing the starting
|
||||
point of the desired data.
|
||||
|
||||
``numofdays`` is the number of days of data to fetch, starting at ``start``.
|
||||
|
||||
``savefolder`` is a string containing the path to a local cache folder for
|
||||
SuperMAG data. If requested data is already available in this folder, it will
|
||||
not be re-downloaded.
|
||||
|
||||
``badfrac`` (optional) is a threshold value (0 <= ``badfrac`` <= 1, default
|
||||
``0.1``) for detecting bad data. If more than this fraction of the returned
|
||||
data is missing or bad for an observatory, ignore that observatory.
|
||||
|
||||
This call returns a Python dictionary for all of the SuperMag observatory
|
||||
data. The dictionary contains keys which list the observatory identifiers and
|
||||
locations (in geographic and magnetic coordinates), measurement times, and the
|
||||
observed magnetic field components.
|
||||
|
||||
This call can take several minutes per day of data to complete.
|
||||
|
||||
**Note**: The magnetic North component for this result is found using the
|
||||
dictionary key ``BNm``.
|
||||
|
||||
Calculating magnetic indices for MAGE simulation results
|
||||
--------------------------------------------------------
|
||||
|
||||
We can calculate a set of magnetic indices (SMR, SMU, SML, SMR) from our
|
||||
simulation data using ``InterpolateSimData()``. This function performs several
|
||||
operations:
|
||||
|
||||
#. Reject any simulated data which does not overlap with SuperMAG data.
|
||||
|
||||
#. Interpolate (in time) the simulated B data to the datetimes for the
|
||||
SuperMAG indices already retrieved.
|
||||
|
||||
#. Interpolate (in space) the simulated B-data to the positions of the
|
||||
SuperMag observatories
|
||||
|
||||
#. Calculates the indices listed above.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
SMinterp = sm.InterpolateSimData(SIM, SM)
|
||||
|
||||
This call should complete in a few seconds.
|
||||
|
||||
The Python dictionary returned by this call contains the times from the
|
||||
simulated results, the locations of each SuperMAG observatory
|
||||
|
||||
The dictionary key ``dBn`` will return the simulation data for the SuperMag
|
||||
locations, whereas ``dBnsmall`` returns the simulation data interpolated to.
|
||||
|
||||
Making Plots
|
||||
^^^^^^^^^^^^
|
||||
|
||||
There are two functions to make comparison plots: **MakeIndicesPlot** and
|
||||
**MakeContourPlot**. These can be called as follows:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
sm.MakeIndicesPlot(SMI, SMinterp, fignumber = 1)
|
||||
sm.MakeContourPlots(SM, SMinterp, maxx = 1000, fignumber = 2)
|
||||
|
||||
These return plots like the following respectively (\ **MakeContourPlots()**
|
||||
make take while):
|
||||
|
||||
.. image:: https://bitbucket.org/repo/kMoBzBp/images/2400869799-ComparisonPlots.png
|
||||
:target: https://bitbucket.org/repo/kMoBzBp/images/2400869799-ComparisonPlots.png
|
||||
:alt: ComparisonPlots.png
|
||||
|
||||
"Interpolated" is simulated data interpolated at SuperMag locations, "super"
|
||||
is simulated data using all simulated data and "real" is actual SuperMag data.
|
||||
|
||||
Calculating E-Field
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Included is the **EField1DCalculation()** function which calculates horizontal
|
||||
magnetic field components using a resistive 1-D ground resistivity model (the
|
||||
Quebec model found in DOI:10.1046/j.1365-246x.1998.00388.x).
|
||||
|
||||
Note: this function cannot handle nan values in the time-series. Any nan in
|
||||
the time-series will return all nans for that individual location. Also, this
|
||||
function is quite slow...
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
BX, BY, TD = SM['BNm'], SM['BEm'], SM['td']
|
||||
EX, EY = sm.EField1DCalculation(BX, BY, TD)
|
||||
SM['Ex'] = EX
|
||||
SM['Ey'] = EY
|
||||
|
||||
Example Run
|
||||
-----------
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# NEED IMPORT STATEMENTS HERE.
|
||||
|
||||
user = 'USERNAME' # username for Supermag downloads
|
||||
savefolder = '/glade/u/home/sblake/indices/FINAL/DATA/' # folder where the Supermag jsons are stored/fetched
|
||||
|
||||
# Working Example
|
||||
# Read in all needed data from .h5 simulation output
|
||||
filename = '/glade/u/home/skareem/Work/gemmie/stpatex/stpatdb.deltab.h5'
|
||||
#filename = '/glade/u/home/skareem/Work/dass/Data/newdassh/dassdb.deltab.h5'
|
||||
print("Reading SIM data")
|
||||
SIM = ReadSimData(filename)
|
||||
start = SIM['td'][0] # start time of simulation data
|
||||
numofdays = 3 # going to
|
||||
|
||||
print("Fetching SM indices")
|
||||
SMI = FetchSMIndices(user, start, numofdays)
|
||||
|
||||
print("Fetching SM data")
|
||||
SM = FetchSMData(user, start, numofdays, savefolder, badfrac = 0.1)
|
||||
|
||||
print("Interpolating SIM data") # interpolates and calculates SM indices
|
||||
SMinterp = InterpolateSimData(SIM, SM)
|
||||
|
||||
print("Making Indices Plot")
|
||||
MakeIndicesPlot(SMI, SMinterp, fignumber = 1)
|
||||
|
||||
print("Making Contour Plot")
|
||||
MakeContourPlots(SM, SMinterp, maxx = 1000, fignumber = 2)
|
||||
|
||||
print("Calculating E-Field for SM data")
|
||||
BX, BY, TD = SM['BNm'], SM['BEm'], SM['td']
|
||||
EX, EY = EField1DCalculation(BX, BY, TD)
|
||||
SM['Ex'] = EX
|
||||
SM['Ey'] = EY
|
||||
@@ -0,0 +1,88 @@
|
||||
Non-Earth Planetary Magnetospheres
|
||||
==================================
|
||||
|
||||
Instructions for running GAMERA-REMIX for non-Earth planetary magnetosphers.
|
||||
|
||||
By default, the voltron executable is designed to simulate Earth's
|
||||
magnetosphere, and the config file assumes that the planet is Earth. This page
|
||||
outlines how to run the model for non-Earth planetary magnetospheres, and what
|
||||
assumptions are made in the model when doing so.
|
||||
|
||||
Model choices you should know about
|
||||
-----------------------------------
|
||||
|
||||
Coordinate systems
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
GAMERA and REMIX assume that the magnetic dipole axis is along the +z
|
||||
direction. This means **the magnetic moment should always be positive**. The
|
||||
sign of the corotation potential should be with respect to the orientation of
|
||||
the rotation axis with the dipole axis. i.e., the corotation potential should
|
||||
be positive if the two axes are aligned (e.g. Earth) and negative if they are
|
||||
anti-aligned (e.g. Jupiter and Saturn). There is currently no correction to
|
||||
the solar wind parameters with regard to the planet's coordinate system, so
|
||||
you must do that conversion ahead of time.
|
||||
|
||||
IC files
|
||||
--------
|
||||
|
||||
The default IC file, (src/voltron/ICs/earthcmi.F90), is specific to Earth's
|
||||
magnetosphere. An alternative IC file, src/voltron/ICs/planetcmi.F90, can be
|
||||
used as a more generic starting point. This may be used as-is for some simple
|
||||
applications where only the planet parameters are changed.
|
||||
|
||||
You can configure voltron to build with this IC file either by setting the
|
||||
``VOLTIC`` variable in ccmake, or by setting it as an argument with cmake:
|
||||
|
||||
``cmake <other_args> -DVOLTIC=$KAIJUHOME/src/voltron/ICs/planetcmi.F90 ..``
|
||||
|
||||
More specific configurations (e.g. Jupiter & Saturn with special initial
|
||||
conditions, continuous mas loading from moons, etc.) should be defined in new
|
||||
IC files using planetcmi.F90 as a starting point.
|
||||
|
||||
Config/XML file options
|
||||
-----------------------
|
||||
|
||||
The planet parameters must also be specified in the xml config file. Here is
|
||||
an example snippet:
|
||||
|
||||
.. code-block:: xml
|
||||
|
||||
<Kaiju>
|
||||
<Gamera>
|
||||
<prob planet="OTHER" x0="4913961" Rion="1.1" M0="0.1" Psi0="0"/>
|
||||
</Gamera>
|
||||
</Kaiju>
|
||||
|
||||
where
|
||||
|
||||
``Planet`` - Planet name. Mercury, Earth, Jupiter, and Saturn all have defined
|
||||
parameters that don't need to be specified in the xml. "Other" can be used to
|
||||
simulate a custom planet, where all parameters default to Earth unless
|
||||
overwritten by any of the following:
|
||||
|
||||
``x0`` - Planet radius in meters
|
||||
|
||||
``Rion`` - Planetary ionosphere radius in planetary radii
|
||||
|
||||
``M0`` - Magnetic moment in Gauss (\ **Should always be positive.** See above
|
||||
about coordinate systems)
|
||||
|
||||
``G0`` - Gravitational acceleration at planet surface in m/s^2 (e.g. 9.81 for
|
||||
Earth)
|
||||
|
||||
``doGrav`` - uses ``G0`` to add gravitational term in GAMERA
|
||||
|
||||
``Psi0`` - Corotation potential in kV (See above about coordinate systems to
|
||||
determine if it should be positive or negative)
|
||||
|
||||
``doCorot`` - uses ``Psi0`` to enforce corotation
|
||||
|
||||
It is also highly advised to tell REMIX to use a constant conductance, as the
|
||||
more complex models are specific to Earth:
|
||||
|
||||
.. code-block:: xml
|
||||
|
||||
<REMIX>
|
||||
<conductance const_sigma="T" ped0="1"/>
|
||||
</REMIX>
|
||||
@@ -0,0 +1,13 @@
|
||||
Outer Planets and Exoplanets
|
||||
============================
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
This page provides links to documentation on using MAGE to model the
|
||||
magnetospheres of the outer planets and exoplanets.
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
exoOuterPlanets
|
||||
351
docs/source/_obsolete/magnetosphere/gameraRCM.rst
Normal file
351
docs/source/_obsolete/magnetosphere/gameraRCM.rst
Normal file
@@ -0,0 +1,351 @@
|
||||
GAMERA-RCM
|
||||
==========
|
||||
|
||||
Voltron is the executable name for the standard magnetosphere simulation. It
|
||||
consists of GAMERA global MHD model and RCM ring current model. GAMERA-RCM
|
||||
runs as a coupled magnetosphere-ring current system. Options are available in
|
||||
Voltron to not enable RCM coupling, i.e. pure MHD mode, while only keep the
|
||||
field line tracing capacity in RCM.
|
||||
|
||||
Checklist
|
||||
---------
|
||||
|
||||
Here is a checklist of SIX necessary ingredients you should go over before
|
||||
launching a Voltron run:
|
||||
|
||||
#. Executable: e.g., voltron.x (serial run) or voltron_mpi.x (parallel run).
|
||||
See compilation instructions.
|
||||
#. Grid file: e.g., lfmQ.h5. See script instructions for genLFM.py.
|
||||
#. Solar wind file: e.g., bcwind.h5. See script instructions for omni2wind.py
|
||||
or gen_SW_kaiju.py.
|
||||
#. RCM configuration file: e.g., rcmconfig.h5. See script instructions for
|
||||
genRCM.py.
|
||||
#. Configuration file: e.g., cmriQ.xml. See :doc:`XML <./xml/voltronXML>`
|
||||
instructions and pay attention to the differences between serial run and
|
||||
MPI run.
|
||||
#. Job submission file: e.g., RunVOLTRON.pbs. See pbs instructions and pay
|
||||
attention to the differences between serial run and MPI run.
|
||||
|
||||
Compilation
|
||||
-----------
|
||||
|
||||
Assume Kaiju repository has been installed successfully at $KAIJUDIR, and
|
||||
$KAIJUDIR/scripts has been added to $PATH and $KAIJUDIR added to $PYTHONPATH.
|
||||
|
||||
The commands to compile for MPI parallelized Voltron, which is the mostly used
|
||||
mode, include:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
cd $KAIJUDIR
|
||||
module purge
|
||||
module restore kaiju
|
||||
rm -r build
|
||||
mkdir build
|
||||
cd build
|
||||
cmake -DENABLE_MPI=ON -DENABLE_MKL=OFF ..
|
||||
make voltron_mpi.x
|
||||
|
||||
Here are more explanations and exceptions for the operations above.
|
||||
|
||||
#. "module restore kaiju" assumes that you have saved the eight necessary
|
||||
modules under your home directory. The eight necessary modules are listed
|
||||
below. If you haven't saved them, do "module purge", then
|
||||
"module load git/2.9.5" and one by one for each of the seven rest modules,
|
||||
then "module save kaiju".
|
||||
|
||||
.. code-block::
|
||||
|
||||
1) git/2.9.5
|
||||
2) intel/18.0.5
|
||||
3) hdf5/1.10.5
|
||||
4) impi/2018.4.274
|
||||
5) ncarenv/1.3
|
||||
6) ncarcompilers/0.5.0
|
||||
7) python/2.7.16
|
||||
8) cmake/3.14.4
|
||||
|
||||
#. "rm -r build" is to clean up existing build directory to avoid any residual
|
||||
settings from previous compilations. Skip this if there is no build directory
|
||||
under $KAIJUDIR.
|
||||
|
||||
#. "cmake -DENABLE_MPI=ON -DENABLE_MKL=OFF .." is for MPI parallelized run.
|
||||
The flag "-DENABLE_MPI" is by default off to compile serial run. The flag
|
||||
"-DENABLE_MKL" is also by default off and the gmres solver is used for the
|
||||
Posisson's equation. When "-DENABLE_MKL=ON", the threaded Intel pardiso solver
|
||||
is used. Recent tests show that Voltron results are not reproducible with MKL
|
||||
on. Pardiso only brings a few percent improvement of running speed. It is
|
||||
suggested to keep the default setting of MKL off.
|
||||
|
||||
#. "make voltron_mpi.x" is for MPI parallelized run. If compiling serial
|
||||
Voltron, simply use "make voltron.x" after "cmake ..".
|
||||
|
||||
#. Additional notes from history page: The build system uses cmake which will
|
||||
attempt to auto-detect HDF5/OMP/MPI settings, however optionally you can
|
||||
provide a file "cmake/user.cmake" to set various variables if the auto-detect
|
||||
doesn't work.
|
||||
|
||||
#. To check if your operations are in the right place, refer to the typical
|
||||
screen outputs when compiling:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
[(NPL) ] kaiju/build> cmake ..
|
||||
-- The Fortran compiler identification is Intel 18.0.5.20180823
|
||||
-- Check for working Fortran compiler: /glade/u/apps/ch/opt/ncarcompilers/0.5.0/intel/18.0.5/ifort
|
||||
-- Check for working Fortran compiler: /glade/u/apps/ch/opt/ncarcompilers/0.5.0/intel/18.0.5/ifort -- works
|
||||
-- Detecting Fortran compiler ABI info
|
||||
-- Detecting Fortran compiler ABI info - done
|
||||
-- Checking whether /glade/u/apps/ch/opt/ncarcompilers/0.5.0/intel/18.0.5/ifort supports Fortran 90
|
||||
-- Checking whether /glade/u/apps/ch/opt/ncarcompilers/0.5.0/intel/18.0.5/ifort supports Fortran 90 -- yes
|
||||
-- HDF5: Using hdf5 compiler wrapper for all Fortran compiling
|
||||
-- Found HDF5: Included by compiler wrappers found components: Fortran
|
||||
-- Found OpenMP_Fortran: -qopenmp (found version "5.0")
|
||||
-- Found OpenMP: TRUE (found version "5.0") found components: Fortran
|
||||
|
||||
Configuration summary ...
|
||||
System: cheyenne4
|
||||
OS: Linux
|
||||
Processor: x86_64
|
||||
Compiler: Intel / 18.0.5.20180823
|
||||
HDF5 Wrapper:
|
||||
Version: 91c9592 / master
|
||||
Build Type: Release
|
||||
Base Flags: -fPIC -free -implicitnone -qopenmp
|
||||
|
||||
Build Flags: -O3 -align array64byte -align rec32byte -no-prec-div -fast-transcendentals -ipo -march=corei7 -axCORE-AVX2
|
||||
-----------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
Adding CHIMP module ...
|
||||
EB IC file is /glade/u/home/ldong/aplkaiju/kaiju/src/chimp/ebICs/ebICstd.F90
|
||||
TP IC file is /glade/u/home/ldong/aplkaiju/kaiju/src/chimp/tpICs/tpICstd.F90
|
||||
Adding executable project.x
|
||||
Adding executable psd.x
|
||||
Adding executable push.x
|
||||
Adding executable slice.x
|
||||
Adding executable chop.x
|
||||
Adding executable trace.x
|
||||
Adding Gamera module ...
|
||||
Bricksize is 16
|
||||
IC file is /glade/u/home/ldong/aplkaiju/kaiju/src/gamera/ICs/null.F90
|
||||
Adding executable gamera.x
|
||||
Adding ReMIX module ...
|
||||
Adding executable remix.x
|
||||
Adding RCM module ...
|
||||
Adding executable rcm.x
|
||||
Adding Voltron module ...
|
||||
IC file is /glade/u/home/ldong/aplkaiju/kaiju/src/voltron/ICs/earthcmi.F90
|
||||
Adding executable voltron.x
|
||||
-- Configuring done
|
||||
-- Generating done
|
||||
-- Build files have been written to: /glade/u/home/ldong/aplkaiju/kaiju/build
|
||||
[(NPL) ] kaiju/build>
|
||||
|
||||
When cmake is ready, you can start compiling the executable with a simple make
|
||||
command:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
make voltron
|
||||
|
||||
Normal outputs look like this, showing the percentage of completeness:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
[(NPL) ] kaiju/build> make voltron
|
||||
Scanning dependencies of target baselib
|
||||
[ 1%] Building Fortran object src/base/CMakeFiles/baselib.dir/kdefs.F90.o
|
||||
...... Lines omitted by editor of this wiki page ......
|
||||
[ 20%] Linking Fortran static library libbaselib.a
|
||||
[ 20%] Built target baselib
|
||||
Scanning dependencies of target rcmlib
|
||||
...... Lines omitted by editor of this wiki page ......
|
||||
[ 34%] Built target rcmlib
|
||||
Scanning dependencies of target chimplib
|
||||
[ 34%] Building Fortran object src/chimp/CMakeFiles/chimplib.dir/chmpunits.F90.o
|
||||
...... Lines omitted by editor of this wiki page ......
|
||||
[ 57%] Linking Fortran static library libchimplib.a
|
||||
[ 57%] Built target chimplib
|
||||
Scanning dependencies of target gamlib
|
||||
[ 59%] Building Fortran object src/gamera/CMakeFiles/gamlib.dir/gamutils.F90.o
|
||||
...... Lines omitted by editor of this wiki page ......
|
||||
[ 78%] Linking Fortran static library libgamlib.a
|
||||
[ 78%] Built target gamlib
|
||||
Scanning dependencies of target remixlib
|
||||
[ 79%] Building Fortran object src/remix/CMakeFiles/remixlib.dir/mixconductance.F90.o
|
||||
...... Lines omitted by editor of this wiki page ......
|
||||
[ 85%] Linking Fortran static library libremixlib.a
|
||||
[ 85%] Built target remixlib
|
||||
Scanning dependencies of target voltlib
|
||||
[ 85%] Building Fortran object src/voltron/CMakeFiles/voltlib.dir/ICs/earthcmi.F90.o
|
||||
...... Lines omitted by editor of this wiki page ......
|
||||
[ 97%] Linking Fortran static library libvoltlib.a
|
||||
[ 97%] Built target voltlib
|
||||
Scanning dependencies of target voltron.x
|
||||
[ 98%] Building Fortran object CMakeFiles/voltron.x.dir/src/drivers/voltronx.F90.o
|
||||
[100%] Linking Fortran executable bin/voltron.x
|
||||
|
||||
and followed by hundreds of lines like this with "remark #15009", which are
|
||||
good messages telling that the compiler is making the code faster (-Kareem):
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
/glade/u/home/ldong/aplkaiju/kaiju/src/drivers/voltronx.F90(3): remark #15009: MAIN__ has been targeted for automatic cpu dispatch
|
||||
|
||||
The compilation for voltron.x is successful when you see this at the end:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
[100%] Built target voltron.x
|
||||
Scanning dependencies of target voltron
|
||||
[100%] Built target voltron
|
||||
|
||||
Check the running status (may move this part to another page)
|
||||
-------------------------------------------------------------
|
||||
|
||||
Check the status of submitted job with
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
qstat -u username
|
||||
|
||||
Or real time output with
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
tail -f *.out
|
||||
|
||||
Quick lookup of run outcomes
|
||||
----------------------------
|
||||
|
||||
A few python tools for diagnostics are available under $KAIJUDIR/scripts. The
|
||||
msphpic.py can be used to make some combined RCM/remix/Gamera figures like
|
||||
below:
|
||||
|
||||
.. image:: https://bitbucket.org/repo/kMoBzBp/images/1000851377-qkpic.png
|
||||
:target: https://bitbucket.org/repo/kMoBzBp/images/1000851377-qkpic.png
|
||||
:alt: qkpic.png
|
||||
|
||||
|
||||
The gamsphVid.py can be used to generate multiple plots for making animations.
|
||||
Use "gamsphVid.py -h" for instructions on usage.
|
||||
|
||||
MPI Differences
|
||||
---------------
|
||||
|
||||
Running a coupled Gamera-RCM case with MPI support requires three things:
|
||||
|
||||
#. Building the MPI version of the coupled executable.
|
||||
#. Modifying case XML to supply additional MPI decomposition information
|
||||
#. Modifying the submission script to request multiple nodes and use mpirun
|
||||
|
||||
Modifying Case XML
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Modifying a case XML for a coupled case is very similar to modifying one for
|
||||
an MHD-only case. The same modifications are required to **only the Gamera**
|
||||
section of the XML to define what the MPI decomposition is. Only Gamera
|
||||
currently supports decomposition, so no other sections of the XML require
|
||||
modification.
|
||||
|
||||
**Note**: currently coupled Gamera-RCM only supports MPI decomposition in the
|
||||
I and J dimensions. Decomposition along the K dimension will result in errors
|
||||
or bad results.
|
||||
|
||||
Three additional lines are required in the case XML file when running with MPI
|
||||
decomposition. And these lines are ignored by the non-MPI version of coupled
|
||||
Gamera, so you can safely leave them in the XML (if you want to) when not
|
||||
using MPI.
|
||||
|
||||
In the Gamera section of the XML, one line is required
|
||||
for each dimension that tells how many regions that dimension is decomposed
|
||||
into, and whether that dimension is periodic. Here is an example where the
|
||||
case is decomposed into 4 regions along the I and J axes, and not decomposed
|
||||
along the K axis. The I and J axes are not periodic, but the K axis is.
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
<Gamera>
|
||||
...
|
||||
<iPdir N="4" bcPeriodic="F"/>
|
||||
<jPdir N="4" bcPeriodic="F"/>
|
||||
<kPdir N="1" bcPeriodic="T"/>
|
||||
...
|
||||
</Gamera>
|
||||
|
||||
Modifying Job Submission Script
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
For information about creating job submission scripts, check the
|
||||
:doc:`Gamerasphere <Gamerasphere>` article. Assuming that you have a job
|
||||
submission script suitable for running serial jobs, here is how to modify it
|
||||
to run coupled MPI gamera. These examples will be designed to work with
|
||||
Cheyenne, but can be adapted to most clusters.
|
||||
|
||||
First, you need to request an appropriate number of nodes and tell the job
|
||||
submission system how many MPI ranks should be created per node. Continuing
|
||||
the example above, this case is decomposed 4 times along the I dimension, and
|
||||
not at all along the J or K dimensions. So this case will need a total of
|
||||
4\ *1*\ 1=4 MPI ranks **for Gamera**, and one additional MPI rank for
|
||||
coupled RCM.
|
||||
|
||||
For this example we will assign one MPI rank to each physical processor/socket
|
||||
for Gamera, which provides a reasonable balance between performance and cost.
|
||||
Each of Cheyenne's compute nodes has two processors/sockets, so this means
|
||||
that each compute node will receive two MPI Gamera ranks. The coupled RCM MPI
|
||||
rank is more resource intensive, and so will get an entire compute node to
|
||||
itself. The original, serial, resource request line from the job submission
|
||||
script looked like this:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
#PBS -l select=1:ncpus=72:ompthreads=72
|
||||
|
||||
That line requests all 72 cpus on a single compute node, which is perfect for
|
||||
a single process. We want to create a total of 5 processes spread across three
|
||||
compute nodes. We want 2 compute nodes with 2 MPI ranks each which will give
|
||||
us 4 ranks for Gamera, and then 1 compute node with 1 MPI rank by itself for
|
||||
coupled RCM. That looks like this:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
#PBS -l select=2:ncpus=36:mpiprocs=2:ompthreads=36+1:ncpus=36:mpiprocs=1:ompthreads=72
|
||||
|
||||
**Note** that in larger cases you will want to add helper ranks that share some
|
||||
of the workload assigned here to the coupled RCM rank. Those helper ranks will
|
||||
also require entire nodes, and should be added to the latter portion of the
|
||||
PBS select command. A command that adds 2 more nodes for helpers compared to
|
||||
the one above (5 nodes total) would look like this:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
#PBS -l select=2:ncpus=36:mpiprocs=2:ompthreads=36+3:ncpus=36:mpiprocs=1:ompthreads=72
|
||||
|
||||
The line that controls the number of OMP threads should also be cut in half
|
||||
since we now have 2 Gamera processes per node:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
export OMP_NUM_THREADS=36
|
||||
|
||||
The only other line we need to change is the one that calls the executable and
|
||||
starts the simulation. In the serial case that looked like this:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
${EXE} ${RUNID}.xml ${JNUM} > ${RUNID}.${JNUM}.out
|
||||
|
||||
That command literally calls the executable and passes it the input XML file.
|
||||
Instead, we now need to use a helper application called mpirun, which will
|
||||
call our executable for us:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
mpirun ${EXE} ${RUNID}.xml ${JNUM} > ${RUNID}.${JNUM}.out
|
||||
|
||||
Using MPT
|
||||
~~~~~~~~~
|
||||
|
||||
If you are running with the MPT mpi library, the submission script will
|
||||
require some additional modifications, described in a
|
||||
:doc:`dedicated page <runningWithMPT>`.
|
||||
125
docs/source/_obsolete/magnetosphere/gameraRemix.rst
Normal file
125
docs/source/_obsolete/magnetosphere/gameraRemix.rst
Normal file
@@ -0,0 +1,125 @@
|
||||
|
||||
GAMERA-REMIX
|
||||
============
|
||||
|
||||
A GAMERA-REMIX (GR) run without RCM simulates the magnetosphere without ring
|
||||
current but coupled with the ionosphere represented by the REMIX module. Note
|
||||
we still need to use a Voltron executable instead of Gamera executable for a
|
||||
GR run. To turn off RCM, set DtDeep = -1 and Gamera/dosrc to F.
|
||||
|
||||
Below are an example xml file of model parameters and an example pbs file of
|
||||
running parameters.
|
||||
|
||||
Example xml for a double-resolution MPI run
|
||||
-------------------------------------------
|
||||
|
||||
.. code-block:: xml
|
||||
|
||||
<KAIJU>
|
||||
<?xml version="1.0"?>
|
||||
<VOLTRON>
|
||||
<time tFin="43210.0"/>
|
||||
<spinup doSpin="T" tSpin="3600.0" tIO="0.0"/>
|
||||
<output dtOut="300.0" tsOut="100">
|
||||
<coupling dt="5.0"/>
|
||||
<restart dtRes="1800.0"/>
|
||||
<threading NumTh="36"/>
|
||||
</VOLTRON>
|
||||
<Gamera>
|
||||
<sim runid="msphere" doH5g="T" H5Grid="lfmD.h5" icType="user" pdmb="0.75" rmeth="7UP"/>
|
||||
<floors dFloor="1.0e-6" pFloor="1.0e-6"/>
|
||||
<restart doRes="F" resID="msphere" nRes="-1"/>
|
||||
<physics doMHD="T" doBoris="T" Ca="10.0"/>
|
||||
<ring gid="lfm" doRing="T"/>
|
||||
<wind tsfile="bcwind.h5"/>
|
||||
<source doSource="F"/>
|
||||
<iPdir N="2" bcPeriodic="F"/>
|
||||
<jPdir N="2" bcPeriodic="F"/>
|
||||
<kPdir N="1" bcPeriodic="T"/>
|
||||
<threading NumTh="18"/>
|
||||
</Gamera>
|
||||
<REMIX>
|
||||
<grid Np="360" Nt="45" LowLatBoundary="45.0"/>
|
||||
<conductance doStarlight="T" />
|
||||
<precipitation aurora_model_type="FEDDER" alpha="0.34" beta="9.4362323" R="0.042567956" doAuroralSmooth="F"/>
|
||||
</REMIX>
|
||||
</KAIJU>
|
||||
|
||||
Example pbs file for a double-resolution MPI run
|
||||
------------------------------------------------
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
#!/bin/bash
|
||||
#PBS -A P54048000
|
||||
#PBS -N 04Fe-1
|
||||
#PBS -j oe
|
||||
#PBS -q economy
|
||||
#PBS -l walltime=06:00:00
|
||||
#PBS -l select=2:ncpus=36:mpiprocs=2:ompthreads=18+1:ncpus=36:mpiprocs=1:ompthreads=36
|
||||
|
||||
#Example usage
|
||||
|
||||
export TMPDIR=/glade/scratch/$USER/temp
|
||||
mkdir -p $TMPDIR
|
||||
|
||||
export EXE="./voltron_mpi.x"
|
||||
export RUNID="cmriD1"
|
||||
|
||||
#Optional stuff to load an environment
|
||||
source ~/.bashrc
|
||||
|
||||
if [[ -z "$KAIJUHOME" ]]; then
|
||||
# $KAIJUHOME environment variable is not set
|
||||
echo "The KAIJUHOME environment variable is not set"
|
||||
echo "You must either pass your environment with the -V option or"
|
||||
echo " execute the kaiju/scripts/setupEnvironment script in your ~/.bashrc file"
|
||||
exit
|
||||
fi
|
||||
|
||||
if [[ ! -z "$MODULE_LIST" ]]; then
|
||||
# user passed a list of modules to load as the environment variable MODULE_LIST
|
||||
# call this with the flag '-v MODULE_LIST="<modules>"' to use this option
|
||||
# where <modules> is a space-separated list of modules in quotes
|
||||
# Example:
|
||||
# qsub -v MODULE_LIST="intel/2021.2 ncarenv/1.3 ncarcompilers/0.5.0 mpt/2.22" RunMpi.pbs
|
||||
module purge
|
||||
module load $MODULE_LIST
|
||||
elif [[ ! -z "$MODULE_SET" ]]; then
|
||||
# user passed a module set name to load as the environment variable MODULE_SET
|
||||
# call this with the flag '-v MODULE_SET=<set name>' to use this option
|
||||
# where <set_name> is a saved set of modules, as printed by 'module savelist'
|
||||
# Example:
|
||||
# qsub -v MODULE_SET=kaiju21 RunMpi.pbs
|
||||
module purge
|
||||
module restore $MODULE_SET
|
||||
else
|
||||
# user did not pass a module set, load a default set
|
||||
module purge
|
||||
module restore mpikaiju
|
||||
fi
|
||||
|
||||
if [[ ! -z "$MPT_VERSION" ]]; then
|
||||
echo "USING MPIEXEC_MPT"
|
||||
export MPI_TYPE_DEPTH=32
|
||||
export OMP_NUM_THREADS=36
|
||||
export MPI_IB_CONGESTED=0
|
||||
export NODEFILE=$TMPDIR/nodefile.$PBS_JOBID
|
||||
cp $PBS_NODEFILE $NODEFILE
|
||||
export MPICOMMAND="mpiexec_mpt $KAIJUHOME/scripts/preproc/correctOMPenvironment.sh $NODEFILE omplace"
|
||||
else
|
||||
echo "USING MPIRUN"
|
||||
export MPICOMMAND="mpirun"
|
||||
export OMP_NUM_THREADS=18
|
||||
export I_MPI_PIN_DOMAIN="omp"
|
||||
fi
|
||||
|
||||
|
||||
module list
|
||||
hostname
|
||||
date
|
||||
export KMP_STACKSIZE=128M
|
||||
export JNUM=${PBS_ARRAY_INDEX:-0}
|
||||
echo "Running $EXE"
|
||||
${MPICOMMAND} ${EXE} ${RUNID}.xml ${JNUM} > ${RUNID}.${JNUM}.out
|
||||
date
|
||||
189
docs/source/_obsolete/magnetosphere/hidra/hidra.rst
Normal file
189
docs/source/_obsolete/magnetosphere/hidra/hidra.rst
Normal file
@@ -0,0 +1,189 @@
|
||||
Running HIDRA on NASA HEC Systems
|
||||
=================================
|
||||
|
||||
What follows below is to compile and use Jupyterlab on Pleiades. Replace
|
||||
rmalbarr with your specific Pleiades username. Note that you will have a
|
||||
certain /nobackup directory number (for me it is /nobackupp12/rmalbarr).
|
||||
Change this according to your number. Similarly, I have /home7/rmalbarr as my
|
||||
home directory. Change this home directory number for you, accordingly.
|
||||
|
||||
Get set up on Pleiades
|
||||
----------------------
|
||||
|
||||
|
||||
#. Setup RSA tokens: copy from bitbucket to Pleiades Font End (pfe) and
|
||||
`follow wiki <https://www.nas.nasa.gov/hecc/support/kb/enabling-your-rsa-securid-hard-token-fob_59.html>`_
|
||||
|
||||
#. Setup of ssh pass through,
|
||||
`follow wiki <https://www.nas.nasa.gov/hecc/support/kb/setting-up-ssh-passthrough_232.html>`_
|
||||
|
||||
This should enable you to login to pfe with where the passcode is given by
|
||||
SecureID mobile app (dual-factor authentication):
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
ssh pfe
|
||||
|
||||
|
||||
#. Setup the sup client
|
||||
`from wiki <https://www.nas.nasa.gov/hecc/support/kb/using-the-secure-unattended-proxy-sup_145.html>`_
|
||||
|
||||
This will enable you to send large files between remote and local servers with
|
||||
``shiftc``
|
||||
|
||||
From local machine, for example, run:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
sup shiftc <files> rmalbar@pfe:/nobackupp12/rmalbarr
|
||||
|
||||
|
||||
#. Clone repo to your nobackup or home directory on pfe. Here, for example,
|
||||
check out the hidra branch of kaiju. From pfe prompt, run:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
install home-brew
|
||||
git lsf install
|
||||
|
||||
git branch
|
||||
git checkout hidra
|
||||
|
||||
From
|
||||
`kaiju wiki <https://bitbucket.org/aplkaiju/kaiju/wiki/quickStart/prerequisites>`_\ , run:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
git clone https://YOUR_BITBUCKET_USERNAME@bitbucket.org/aplkaiju/kaiju.git
|
||||
export KAIJUHOME=$HOME/kaiju
|
||||
|
||||
where $HOME, for me, is set to /nobackupp12/rmalbarr. Change this according
|
||||
to your username and desired directory. Note: Use Atlassian App password for
|
||||
the clone command above. This is found in your bitbucket profile.
|
||||
|
||||
Running (e.g. HIDRA) on Pleiades
|
||||
--------------------------------
|
||||
|
||||
#. Make a build directory within kaiju. For example, for me, it is
|
||||
/nobackupp12/rmalbarr/kaiju/build. From this build directory run (for hidra
|
||||
example):
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
module load pkgsrc/2021Q2
|
||||
module load comp-intel/2020.4.304
|
||||
module load mpi-hpe/mpt.2.25
|
||||
module load szip/2.1.1
|
||||
module load hdf5/1.8.18_mpt
|
||||
FC=ifort FFLAGS=“-mkl” cmake -DENABLE_MPI=ON $HOME
|
||||
make hidra.x
|
||||
|
||||
Send this executable file to your /nobackup for queue submission. Note:
|
||||
nobackup has much more storage space than home directory. Run qsub to submit a
|
||||
pbs script as usual. A sample pbs script that I use on pfe is below:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
#!/bin/bash
|
||||
#Example Gamera PBS script
|
||||
#PBS -A rmalbarr
|
||||
#PBS -N HIDRAN-001_ML1N_ssOpO2_nHem
|
||||
#PBS -j oe
|
||||
#PBS -q normal
|
||||
#PBS -l walltime=8:00:00
|
||||
#PBS -l select=8:ncpus=28:mpiprocs=4:ompthreads=7:model=bro
|
||||
|
||||
export EXE="./hidra_ML1N_ssOpO2.x"
|
||||
export RUNID=hidraN-001_ML1N_ssOpO2_nHem
|
||||
|
||||
# source ~/.bashrc
|
||||
|
||||
export KAIJUHOME=/nobackupp12/rmalbarr/kaiju/
|
||||
|
||||
if [[ -z "/nobackupp12/rmalbarr/kaiju/" ]]; then
|
||||
# $KAIJUHOME environment variable is not set
|
||||
echo "The KAIJUHOME environment variable is not set"
|
||||
echo "You must either pass your environment with the -V option or"
|
||||
echo " execute the kaiju/scripts/setupEnvironment script in your ~/.bashrc
|
||||
file"
|
||||
exit
|
||||
fi
|
||||
|
||||
if [[ ! -z "$MODULE_LIST" ]]; then
|
||||
# user passed a list of modules to load as the environment variable MODULE_LLIST
|
||||
# call this with the flag " -v MODULE_LIST="<modules>" " to use this option
|
||||
# where <modules> is a space-separated list of modules in quotes
|
||||
# Example:
|
||||
# qsub -v MODULE_LIST="intel/2021.2 ncarenv/1.3 ncarcompilers/0.5.0 mpt/2.222" RunMpi.pbs
|
||||
module purge
|
||||
module load $MODULE_LIST
|
||||
elif [[ ! -z "$MODULE_SET" ]]; then
|
||||
# user passed a module set name to load as the environment variable MODULE_SSET
|
||||
# call this with the flag "-v MODULE_SET=<set name>" to use this option
|
||||
# where <set_name> is a saved set of modules, as printed by "module savelistt"
|
||||
# Example:
|
||||
# qsub -v MODULE_SET=kaiju21 RunMpi.pbs
|
||||
module purge
|
||||
module restore $MODULE_SET
|
||||
else
|
||||
# user did not pass a module set, load a default set
|
||||
module purge
|
||||
module load pkgsrc/2021Q2
|
||||
module load comp-intel/2020.4.304
|
||||
module load mpi-hpe/mpt.2.25
|
||||
module load szip/2.1.1
|
||||
module load hdf5/1.8.18_mpt
|
||||
fi
|
||||
|
||||
module list
|
||||
hostname
|
||||
date
|
||||
|
||||
#module load arm-forge/21.1.1
|
||||
module load arm-forge/20.2
|
||||
|
||||
export KMP_STACKSIZE=128M
|
||||
export MPI_TYPE_DEPTH=32
|
||||
export MPI_IB_CONGESTED=0
|
||||
#export OMP_NUM_THREADS=9
|
||||
export NODEFILE=$TMPDIR/nodefile.$PBS_JOBID
|
||||
cp $PBS_NODEFILE $NODEFILE
|
||||
|
||||
#ddt --connect mpiexec_mpt ${KAIJUHOME}/scripts/preproc/correctOMPenvironment.shh
|
||||
${NODEFILE} omplace ${EXE} hidra-001.xml > ${RUNID}.out
|
||||
mpiexec_mpt /nobackupp12/rmalbarr/kaiju/scripts/preproc/correctOMPenvironment.shh
|
||||
${NODEFILE} omplace ${EXE} hidra-001.xml > ${RUNID}.out
|
||||
|
||||
date
|
||||
|
||||
qsub runHIDRA-002_ML1N_ssOpO2_nHem.pbs
|
||||
|
||||
Note: there are different systems to run on. Above, I run on Broadwell nodes
|
||||
(model=bro). For Pleiades Broadwell nodes (28 cores per node), I consider 2
|
||||
options:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
#PBS -l select=16:ncpus=28:mpiprocs=2:ompthreads=14:model=bro
|
||||
#PBS -l select=8:ncpus=28:mpiprocs=4:ompthreads=7:model=bro
|
||||
|
||||
Change these values and wall-time, etc, as needed.
|
||||
|
||||
#. Once run is complete, send all large output data files to Lou mass storage
|
||||
(lfe). For example, for me to send ``<files>`` to my lfe:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
shiftc <files> rmalbarr@lfe:/u/rmalbarr/
|
||||
|
||||
Check to see where they go. From pfe prompt:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
ssh lfe
|
||||
|
||||
To exit back to pfe prompt, run: ``exit``
|
||||
|
||||
Note: I routinely reach disk quota on /nobackup so its best to send all
|
||||
large output files to lfe. For Jupyter analysis, copy individual files back to
|
||||
/nobackup on pfe.
|
||||
12
docs/source/_obsolete/magnetosphere/hidra/index.rst
Normal file
12
docs/source/_obsolete/magnetosphere/hidra/index.rst
Normal file
@@ -0,0 +1,12 @@
|
||||
HIDRA
|
||||
=====
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
This page provides links to documentation for the HIDRA software.
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
hidra
|
||||
16
docs/source/_obsolete/magnetosphere/index.rst
Normal file
16
docs/source/_obsolete/magnetosphere/index.rst
Normal file
@@ -0,0 +1,16 @@
|
||||
Magnetosphere Simulations with MAGE
|
||||
===================================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
analysisTools/index
|
||||
exoOuterPlanets/index
|
||||
hidra/index
|
||||
mage/index
|
||||
xml/index
|
||||
planetaryQuickStart
|
||||
Gamerasphere
|
||||
gameraRCM
|
||||
gameraRemix
|
||||
runningWithMPT
|
||||
10
docs/source/_obsolete/magnetosphere/mage/index.rst
Normal file
10
docs/source/_obsolete/magnetosphere/mage/index.rst
Normal file
@@ -0,0 +1,10 @@
|
||||
MAGE
|
||||
====
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
This page provides links to documentation for the MAGE software.
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
@@ -0,0 +1,4 @@
|
||||
Planetary Quick Start Guide
|
||||
===========================
|
||||
|
||||
Someone will need to create this.
|
||||
58
docs/source/_obsolete/magnetosphere/runningWithMPT.rst
Normal file
58
docs/source/_obsolete/magnetosphere/runningWithMPT.rst
Normal file
@@ -0,0 +1,58 @@
|
||||
Running with MPT
|
||||
================
|
||||
|
||||
Running MPI jobs with MPT requires some modifications to the PBS submission
|
||||
scripts on any cluster (Cheyenne, Pleiades, likely others). Those
|
||||
modifications are outlined below.
|
||||
|
||||
Note: when compiling with MKL in addition to MPT and MPI, MKL should be
|
||||
enabled in the initial cmake command when initializing the build directory to
|
||||
allow the code to compile
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
cmake -DENABLE_MPI=ON -DENABLE_MKL=ON ..
|
||||
|
||||
Mpirun Command
|
||||
--------------
|
||||
|
||||
Inside a typical PBS submission script using Intel MPI, when the mpi
|
||||
application is executed the command would like something like this:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
mpirun ${EXE} ${RUNID}.xml ${JNUM} > ${RUNID}.${JNUM}.out
|
||||
|
||||
mpirun is the mpi launcher used with Intel MPI (and several other MPI
|
||||
libraries), and it runs the executable defined by the ${EXE} environment
|
||||
variable. However, MPT has three differences. First, it uses the launcher
|
||||
mpiexec_mpt instead of mpirun. Second, it requires explicit information about
|
||||
how to pin the applications threads to processor cores (something that Intel
|
||||
MPI can do automatically). And finally, MPT requires an additional environment
|
||||
variable allowing us to create complex custom data types.
|
||||
|
||||
The same line above must be replaced by these lines in a submission script
|
||||
using MPT:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
export MPI_TYPE_DEPTH=32
|
||||
export MPI_IB_CONGESTED=0
|
||||
export OMP_NUM_THREADS=36
|
||||
export NODEFILE=$TMPDIR/nodefile.$PBS_JOBID
|
||||
cp $PBS_NODEFILE $NODEFILE
|
||||
mpiexec_mpt ${KAIJUHOME}/scripts/preproc/correctOMPenvironment.sh ${NODEFILE} omplace ${EXE} ${RUNID}.xml ${JNUM} >> ${RUNID}.${JNUM}.out
|
||||
|
||||
Note that this does require the user to have set the TMPDIR environment
|
||||
variable at the top of the script, which is recommended on Cheyenne. If the
|
||||
script does not use TMPDIR, then NODEFILE should be set to simply
|
||||
"nodefile.$PBS_JOBID".
|
||||
|
||||
This also requires the user to have set the KAIJUHOME environment variable to
|
||||
the root directory of their kaiju repository (as mentioned in several other
|
||||
wiki pages). This is so that the PBS script can find the
|
||||
"correctOMPenvironment.sh" script. This is a custom replacement for the
|
||||
standard "omplace" script. Because our submission scripts typically have
|
||||
different numbers of MPI processes on different ranks (2 for gamera processes
|
||||
and 1 for voltron processes), the standard omplace script can't properly
|
||||
identify this and pin the processes threads.
|
||||
96
docs/source/_obsolete/magnetosphere/xml/gameraXML.rst
Normal file
96
docs/source/_obsolete/magnetosphere/xml/gameraXML.rst
Normal file
@@ -0,0 +1,96 @@
|
||||
GAMERA XML
|
||||
==========
|
||||
|
||||
Example XML file
|
||||
|
||||
.. code-block:: xml
|
||||
|
||||
<OMEGA>
|
||||
<sim tFin="677.115987461"/>
|
||||
</OMEGA>
|
||||
<Gamera>
|
||||
<sim runid="msphere" doH5g="T" H5Grid="lfmD.h5" icType="user" pdmb="1.0" pFloor="1.0e-8" dFloor="1.0e-6" rmeth="7UP"/>
|
||||
<restart dtRes="28.2131661" doRes="F" resFile="XXXXX.h5" doReset="F" tReset="0.0"/>
|
||||
<output dtOut="0.940438871" tsOut="100" timer="F"/>
|
||||
<physics doMHD="T" doBoris="T" Ca="10.0"/>
|
||||
<prob Rho0="0.2" P0="0.001"/>
|
||||
<ring gid="lfm" doRing="T" Nr="4" Nc1="8" Nc2="16" Nc3="32" Nc4="32"/>
|
||||
<wind tsfile="bcwind.h5"/>
|
||||
</Gamera>
|
||||
|
||||
Note, XML keys are not case sensitive but input strings (like filenames) are.
|
||||
|
||||
Overall run time of simulation (in code units) specified in omega/sim/tFin.
|
||||
|
||||
gamera/sim
|
||||
|
||||
runid: String to prepend to output files from this run
|
||||
|
||||
doH5g/H5Grid: Whether or not to read grid from HDF5 file and if so the grid
|
||||
filename
|
||||
|
||||
icType: Whether to do a canned problem file from prob.F90 or use the
|
||||
user-specified IC routine (GAMIC cmake variable)
|
||||
|
||||
pdmb: Value of beta for partial donor method, 1.0 is standard (see paper)
|
||||
|
||||
pFloor/dFloor: Global floors (in code units) for pressure/density
|
||||
|
||||
rmeth: Spatial reconstruction method, standard choices are 8CENT/7UP (see
|
||||
paper)
|
||||
|
||||
gamera/restart
|
||||
|
||||
dtRes: Time interval (in code units) to output restart dumps
|
||||
|
||||
doRes/resFile: Whether to load a restart dump for this run
|
||||
|
||||
doReset/tReset: If restarting whether to reset the code time to tReset
|
||||
|
||||
gamera/output
|
||||
|
||||
|
||||
dtOut: Time interval (in code units) to output 3D data slices
|
||||
|
||||
tsOut: Interval (in timesteps) to write output to console
|
||||
|
||||
timer: Whether to output to console timing data
|
||||
|
||||
gamera/physics
|
||||
|
||||
doMHD: If this is an MHD run
|
||||
|
||||
gamma: Adiabatic index
|
||||
|
||||
doBoris/Ca: Whether to use Boris correction and if so the Boris speed of light
|
||||
(in code units)
|
||||
|
||||
gamera/prob: Various parameters specified by the IC file
|
||||
|
||||
planet: Specify parameters for "EARTH", "SATURN", "JUPITER", "MERCURY",
|
||||
"NEPTUNE", or "OTHER"
|
||||
|
||||
If "OTHER", you may specify values for:
|
||||
|
||||
x0: planet radius [m]
|
||||
|
||||
v0: velocity scale [m/s]
|
||||
|
||||
G0: gravity [m/s2]
|
||||
|
||||
M0: magnetic moment [gauss]
|
||||
|
||||
Psi0: corotation potential [kV]
|
||||
|
||||
Rion: ionosphere radius [x0]
|
||||
|
||||
doGrav: True or False
|
||||
|
||||
If any values are not specified, they will default to Earth values.
|
||||
|
||||
gamera/ring
|
||||
|
||||
doRing: Whether to use ring-average
|
||||
|
||||
Nr: Number of rings to use followed by Nc1,Nc2 ... specifying the chunking for
|
||||
each ring
|
||||
366
docs/source/_obsolete/magnetosphere/xml/generatingXML.rst
Normal file
366
docs/source/_obsolete/magnetosphere/xml/generatingXML.rst
Normal file
@@ -0,0 +1,366 @@
|
||||
Generating XML Files
|
||||
====================
|
||||
|
||||
Kaiju requires XML files to provide key information on how a run is to occur.
|
||||
This wiki page should help ensure that users can generate their own XML files,
|
||||
convert old XML files into new ones, and use the new XMLGenerator script to
|
||||
use configuration (.ini) files to make the XML generating process easier.
|
||||
|
||||
Converting Old XML To The New Format
|
||||
------------------------------------
|
||||
|
||||
A good portion of users likely already have XML files that they have been
|
||||
using for a while. Unfortunately, those are likely broken now. *Technically*,
|
||||
they were never XML files to begin with because *technically* XML must have a
|
||||
single root node.
|
||||
|
||||
As a result, all XML files must now contain the ``<Kaiju></Kaiju>`` root node.
|
||||
To convert an XML that previously worked:
|
||||
|
||||
Add ``<Kaiju>`` to the top of the file alone on the first line.
|
||||
|
||||
Add "</Kaiju>" to the bottom of the file all alone on the last line.
|
||||
|
||||
Indent everything in between by one level.
|
||||
|
||||
So an old XML file that will no longer work might look like this:
|
||||
|
||||
.. code-block:: xml
|
||||
|
||||
<voltron>
|
||||
<sim tFin="677.115987461"/>
|
||||
<spinup doSpin="F"/>
|
||||
<coupling dt="5.0"/>
|
||||
</voltron>
|
||||
<Gamera>
|
||||
<iPdir N="1"/>
|
||||
<jPdir N="2"/>
|
||||
<kPdir N="1"/>
|
||||
<ibc bc="user" jperiod="F" kperiod="T"/>
|
||||
<jbc bc="user" iperiod="F" kperiod="T"/>
|
||||
<kbc bc="periodic" iperiod="F" jperiod="F"/>
|
||||
<sim runid="msphere" doH5g="T" H5Grid="lfmD.h5" icType="user" pdmb="4.0" rmeth="7UP" pFloor="1e-06" dFloor="0.0001"/>
|
||||
<restart dtRes="14.1065830721" doRes="F" resFile="msphere.Res.XXXXX.h5"/>
|
||||
<output dtOut="0.235109717868" tsOut="100" timer="F"/>
|
||||
<physics doMHD="T" doBoris="T" Ca="10.0"/>
|
||||
<prob Rho0="0.1" P0="0.001" rCut="16.0" lCut="8.0"/>
|
||||
<ring gid="lfm" doRing="T" Nr="3" Nc1="8" Nc2="16" Nc3="32"/>
|
||||
<wind tsfile="bcwind.h5"/>
|
||||
</Gamera>
|
||||
<REMIX>
|
||||
<grid Np="360" Nt="45" LowLatBoundary="45.0"/>
|
||||
<conductance const_sigma="T" ped0="10.0"/>
|
||||
</REMIX>
|
||||
|
||||
Whereas the corrected XML file will look like this:
|
||||
|
||||
.. code-block:: xml
|
||||
|
||||
<Kaiju>
|
||||
<voltron>
|
||||
<sim tFin="677.115987461"/>
|
||||
<spinup doSpin="F"/>
|
||||
<coupling dt="5.0"/>
|
||||
</voltron>
|
||||
<Gamera>
|
||||
<iPdir N="1"/>
|
||||
<jPdir N="2"/>
|
||||
<kPdir N="1"/>
|
||||
<ibc bc="user" jperiod="F" kperiod="T"/>
|
||||
<jbc bc="user" iperiod="F" kperiod="T"/>
|
||||
<kbc bc="periodic" iperiod="F" jperiod="F"/>
|
||||
<sim runid="msphere" doH5g="T" H5Grid="lfmD.h5" icType="user" pdmb="4.0" rmeth="7UP" pFloor="1e-06" dFloor="0.0001"/>
|
||||
<restart dtRes="14.1065830721" doRes="F" resFile="msphere.Res.XXXXX.h5"/>
|
||||
<output dtOut="0.235109717868" tsOut="100" timer="F"/>
|
||||
<physics doMHD="T" doBoris="T" Ca="10.0"/>
|
||||
<prob Rho0="0.1" P0="0.001" rCut="16.0" lCut="8.0"/>
|
||||
<ring gid="lfm" doRing="T" Nr="3" Nc1="8" Nc2="16" Nc3="32"/>
|
||||
<wind tsfile="bcwind.h5"/>
|
||||
</Gamera>
|
||||
<REMIX>
|
||||
<grid Np="360" Nt="45" LowLatBoundary="45.0"/>
|
||||
<conductance const_sigma="T" ped0="10.0"/>
|
||||
</REMIX>
|
||||
</Kaiju>
|
||||
|
||||
Generating New XML Files From Scratch
|
||||
-------------------------------------
|
||||
|
||||
Config Files
|
||||
^^^^^^^^^^^^
|
||||
|
||||
The XMLGenerator script uses configuration (.ini) files to generate XML files.
|
||||
A config file looks like this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
## MODULE NAME ##
|
||||
[Section Name 1]
|
||||
optionName1 = T
|
||||
|
||||
[Section Name 2]
|
||||
optionName2 = 20.0
|
||||
optionName3 = hello
|
||||
|
||||
## MODULE NAME 2 ##
|
||||
[Section Name 3]
|
||||
optionName4 = junk # Comments
|
||||
|
||||
"MODULE NAME" Corresponds to things like VOLTRON, Gamera, and CHIMP.
|
||||
|
||||
"[Section Name]" Corresponds to certain sections underneath modules. For
|
||||
example, time, output, and spinup under VOLTRON.
|
||||
|
||||
"optionName" Corresponds to an option underneath section headers. For
|
||||
example, tFin, dtOut, and doTimer. The values that can be assigned to these
|
||||
can be strings, integers, floats, or even comments.
|
||||
|
||||
Anything that follows a "#" is treated as a comment in the vanilla
|
||||
configuration file parser. However, we have added some more functionality that
|
||||
will be covered later.
|
||||
|
||||
Let's take a look at an XML that can be used with Kaiju and then see what it
|
||||
looks like as a config file. Here is is what "kaiju/tests/remix/cmiD.xml"
|
||||
looks like as an XML:
|
||||
|
||||
.. code-block:: xml
|
||||
|
||||
<Kaiju>
|
||||
<voltron>
|
||||
<sim tFin="677.115987461"/>
|
||||
<spinup doSpin="F"/>
|
||||
<coupling dt="5.0"/>
|
||||
</voltron>
|
||||
<Gamera>
|
||||
<iPdir N="1"/>
|
||||
<jPdir N="2"/>
|
||||
<kPdir N="1"/>
|
||||
<ibc bc="user" jperiod="F" kperiod="T"/>
|
||||
<jbc bc="user" iperiod="F" kperiod="T"/>
|
||||
<kbc bc="periodic" iperiod="F" jperiod="F"/>
|
||||
<sim runid="msphere" doH5g="T" H5Grid="lfmD.h5" icType="user" pdmb="4.0" rmeth="7UP" pFloor="1e-06" dFloor="0.0001"/>
|
||||
<restart dtRes="14.1065830721" doRes="F" resFile="msphere.Res.XXXXX.h5"/>
|
||||
<output dtOut="0.235109717868" tsOut="100" timer="F"/>
|
||||
<physics doMHD="T" doBoris="T" Ca="10.0"/>
|
||||
<prob Rho0="0.1" P0="0.001" rCut="16.0" lCut="8.0"/>
|
||||
<ring gid="lfm" doRing="T" Nr="3" Nc1="8" Nc2="16" Nc3="32"/>
|
||||
<wind tsfile="bcwind.h5"/>
|
||||
</Gamera>
|
||||
<REMIX>
|
||||
<grid Np="360" Nt="45" LowLatBoundary="45.0"/>
|
||||
<conductance const_sigma="T" ped0="10.0"/>
|
||||
</REMIX>
|
||||
</Kaiju>
|
||||
|
||||
Here is the exact same file, but written as a config file:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
## voltron ##
|
||||
[sim]
|
||||
tFin = 677.115987461
|
||||
|
||||
[spinup]
|
||||
doSpin = F
|
||||
|
||||
[coupling]
|
||||
dt= 5.0
|
||||
|
||||
## Gamera ##
|
||||
[iPdir]
|
||||
N = 1
|
||||
|
||||
[jPdir]
|
||||
N = 2
|
||||
|
||||
[kPdir]
|
||||
N = 1
|
||||
|
||||
[ibc]
|
||||
bc = user
|
||||
jperiod = F
|
||||
kperiod = T
|
||||
|
||||
[jbc]
|
||||
bc = user
|
||||
iperiod = F
|
||||
kperiod = T
|
||||
|
||||
[kbc]
|
||||
bc = periodic
|
||||
iperiod = F
|
||||
jperiod = F
|
||||
|
||||
[sim]
|
||||
runid = msphere
|
||||
doH5g = T
|
||||
H5Grid = lfmD.h5
|
||||
icType = user
|
||||
pdmb = 4.0
|
||||
rmeth = 7UP
|
||||
pFloor = 1e-06
|
||||
dFloor = 0.0001
|
||||
|
||||
[restart]
|
||||
dtRes = 14.1065830721
|
||||
doRes = F
|
||||
resFile = msphere.Res.XXXXX.h5
|
||||
|
||||
[output]
|
||||
dtOut = 0.235109717868
|
||||
tsOut = 100
|
||||
timer = F
|
||||
|
||||
[physics]
|
||||
doMHD = T
|
||||
doBoris = T
|
||||
Ca = 10.0
|
||||
|
||||
[prob]
|
||||
Rho0 = 0.1
|
||||
P0 = 0.001
|
||||
rCut = 16.0
|
||||
lCut = 8.0
|
||||
|
||||
[ring]
|
||||
gid = lfm
|
||||
doRing = T
|
||||
Nr = 3
|
||||
Nc1 = 8
|
||||
Nc2 = 16
|
||||
Nc3 = 32
|
||||
|
||||
[wind]
|
||||
tsfile = bcwind.h5
|
||||
|
||||
## REMIX ##
|
||||
[grid]
|
||||
Np = 360
|
||||
Nt = 45
|
||||
LowLatBoundary = 45.0
|
||||
|
||||
[conductance]
|
||||
const_sigm = T
|
||||
ped0 = 10.0
|
||||
|
||||
Creating A Usable XML File With XMLGenerator
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you already have a .ini file that is formatted properly, then the steps for
|
||||
generating a usable XML file are fairly simple.
|
||||
|
||||
Ensure that you have your $KAIJUDIR/scripts folder on your $PATH, $KAIJUDIR on
|
||||
your $PYTHONPATH, and your python environment setup according to the
|
||||
[[Quick Start]] guide.
|
||||
|
||||
Enter the following command: "XMLGenerator.py myConfigFile.ini
|
||||
myOutputXMLFile.xml"
|
||||
|
||||
If you get a message saying "Template creation complete!", then it succeeded
|
||||
and the "myOutputXMLFile.xml" (or whatever you decided to call it) should
|
||||
exist in your folder, ready for use.
|
||||
|
||||
Using Config Files To Modify Existing XML Files
|
||||
-----------------------------------------------
|
||||
|
||||
Ensure that you have your $KAIJUDIR/scripts folder on your $PATH, $KAIJUDIR on
|
||||
your $PYTHONPATH, and your python environment setup according to the
|
||||
[[Quick Start]] guide.
|
||||
|
||||
\Ensure that you have a valid XML file to use as a template for the new one
|
||||
you are about to generate.
|
||||
|
||||
Enter the following command:
|
||||
"XMLGenerator.py myTemplateXMLFile.xml myConfigFile.ini myOutputXMLFile.xml"
|
||||
|
||||
If you get a message saying "XML generation complete!", then it succeeded and
|
||||
the "myOutputXMLFile.xml" (or whatever you decided to call it) should exist in your folder, ready for use.
|
||||
|
||||
Other Functionality
|
||||
-------------------
|
||||
|
||||
There are a couple of other things you can do with your .ini configuration
|
||||
files.
|
||||
|
||||
Time Unit Conversion
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Some settings are in units of time, like tFin, and the XML requires that to
|
||||
be in seconds. However, a run can be set to run for a while and that means
|
||||
this number can get pretty big pretty quickly. As such, the XMLGenerator
|
||||
script allows you to set units for time settings in seconds, minutes, or
|
||||
hours. So instead of writing 43,200 seconds, you can just write 12 hours.
|
||||
|
||||
Here is an example of a time setting in an .ini file without any special
|
||||
options:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[sim]
|
||||
tFin = 43200
|
||||
|
||||
Here is the same line but with a special flag that says "This value is in
|
||||
minutes":
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[sim]
|
||||
tFin = 720 # [min]
|
||||
|
||||
And finally, one written in hours:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[sim]
|
||||
tFin = 12 # [hrs]
|
||||
|
||||
To add these flags to your .ini files follow these rules:
|
||||
|
||||
The flag must be placed on the same line as the value you want to convert
|
||||
|
||||
There must be a "#" placed after the value AND before the flag
|
||||
|
||||
The following options are valid: "[sec]", "[min]", "[hrs]".
|
||||
|
||||
If you use an incorrect unit like "dogs", the following message will appear:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
ERROR: Incorrect unit type for conversion: dogs
|
||||
|
||||
All of these flags will ensure that the final output of the XML file has your
|
||||
time values in seconds; just what Kaiju needs!
|
||||
|
||||
Deleting Options
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
Let's say there is an option in a template XML file that I don't want in my
|
||||
final, output XML file. In that case, you can use the "!DEL" flag to tell the
|
||||
script you don't want this value included.
|
||||
|
||||
There are two ways to use this flag: As the value of the option you want to
|
||||
delete, or as a comment like the unit conversion flags.
|
||||
|
||||
Here is an example of the first method:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[sim]
|
||||
tFin = !DEL
|
||||
|
||||
And here is an example of the second:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[sim]
|
||||
tFin = 12 # !DEL
|
||||
|
||||
Both methods will work to make sure that tFin will not appear in your final
|
||||
XML file.
|
||||
|
||||
Dependencies
|
||||
------------
|
||||
|
||||
The XMLGenerator script requires the following dependencies:
|
||||
|
||||
`Config Parser <https://pypi.org/project/config-parser/>`_
|
||||
15
docs/source/_obsolete/magnetosphere/xml/index.rst
Normal file
15
docs/source/_obsolete/magnetosphere/xml/index.rst
Normal file
@@ -0,0 +1,15 @@
|
||||
XML
|
||||
===
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
This page provides links to documentation for the XML files used by the MAGE
|
||||
software.
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
gameraXML
|
||||
generatingXML
|
||||
voltronXML
|
||||
239
docs/source/_obsolete/magnetosphere/xml/voltronXML.rst
Normal file
239
docs/source/_obsolete/magnetosphere/xml/voltronXML.rst
Normal file
@@ -0,0 +1,239 @@
|
||||
VOLTRON Block - Coupler
|
||||
=======================
|
||||
|
||||
VOLTRON
|
||||
-------
|
||||
|
||||
Example XML:
|
||||
|
||||
.. code-block:: xml
|
||||
|
||||
<VOLTRON>
|
||||
<time tFin="3601.0"/>
|
||||
<output dtOut="60" tsOut="100" doTimer="F"/>
|
||||
<spinup doSpin="T" tSpin="1800.0" tIO="0.0"/>
|
||||
<restart dtRes="1800.0"/>
|
||||
<imag doInit="T"/>
|
||||
<coupling dt="5.0" doGCM="T" dtDeep="15.0" rTrc="40.0" imType="RCM" doQkSquish="T"/>
|
||||
</VOLTRON>
|
||||
|
||||
VOLTRON/time
|
||||
------------
|
||||
|
||||
tFin: [seconds] - What time (in seconds) from start of simulation to stop run
|
||||
at
|
||||
|
||||
VOLTRON/output
|
||||
--------------
|
||||
|
||||
* dtOut: [seconds]
|
||||
* tsOut: [?]
|
||||
* doTimer: [True/False] - ?
|
||||
|
||||
VOLTRON/restart
|
||||
---------------
|
||||
|
||||
dtRes: [seconds] - What cadence we will output a restart file? 1800.0 =
|
||||
every 30 minutes
|
||||
|
||||
VOLTRON/imag
|
||||
------------
|
||||
|
||||
* doInit: [True/False] - Do we do Kareem's initialized ring current?
|
||||
|
||||
VOLTRON/coupling - The most important section
|
||||
---------------------------------------------
|
||||
|
||||
* dt: [seconds] - How often do we couple with REMIX (and GCM)
|
||||
* doGCM: [True/False] - Are we coupling to a GCM?
|
||||
* dtDeep: [seconds] - How often do we couple to RCM?
|
||||
* rTrc: [?] - ?
|
||||
* imType: [RCM] - ?
|
||||
* doQkSquish: [True/False] - this has to do with the very expensive operation
|
||||
of taking every (basically) cell on the gamera grid and projecting it to the
|
||||
northern hemisphere
|
||||
* doSerial: [True/False] - Voltron runs concurrently with Gamera by default.
|
||||
This can be set to True to force Voltron and Gamera to run serially, taking
|
||||
turns and waiting for each other.
|
||||
|
||||
Gamera Block - The MHD stuff
|
||||
----------------------------
|
||||
|
||||
Example XML:
|
||||
|
||||
.. code-block::
|
||||
|
||||
<Gamera>
|
||||
<sim runid="msphere" doH5g="T" H5Grid="lfmQ.h5" icType="user" pdmb="1.0" pFloor="1.0e-8" dFloor="1.0e-6" rmeth="7UP"/>
|
||||
<restart doRes="F" nRes="-1" resID="msphere"/>
|
||||
<physics doMHD="T" doBoris="T" Ca="10.0"/>
|
||||
<ring gid="lfm" doRing="T"/>
|
||||
<wind tsfile="bcwind.h5"/>
|
||||
<iPdir N="4" bcPeriodic="F"/>
|
||||
<jPdir N="4" bcPeriodic="F"/>
|
||||
<kPdir N="1" bcPeriodic="T"/>
|
||||
<source doSource="T"/>
|
||||
</Gamera>
|
||||
|
||||
Gamera/sim
|
||||
----------
|
||||
|
||||
REMIX Block - The thing that solves for Potential
|
||||
-------------------------------------------------
|
||||
|
||||
Example XML:
|
||||
|
||||
.. code-block::
|
||||
|
||||
<!-- Remix params -->
|
||||
<REMIX>
|
||||
<grid Np="360" Nt="45" LowLatBoundary="45.0"/>
|
||||
<conductance F107="100.0" pedmin="2.0" hallmin="1.0" sigma_ratio="3.0" const_sigma="False" ped0="10.0"/>
|
||||
<precipitation aurora_model_type="RCMONO" alpha="0.2" beta="0.4"/>
|
||||
</REMIX>
|
||||
|
||||
REMIX/grid
|
||||
----------
|
||||
|
||||
* Np: [integer] - How many bins in longitude direction (360/Np where Np=360
|
||||
means 1°).
|
||||
* Nt: [integer] - how many bins in colatitude direction ( (90-LowLat)/Nt where
|
||||
Nt=45 and LowLat=45 means 1°).
|
||||
* LowLatBoundary: [degree] - What colatitude degree is your Low Latitude
|
||||
Boundary?
|
||||
|
||||
REMIX/conductance
|
||||
-----------------
|
||||
|
||||
There are a lot of conditionals within this section. So this section may
|
||||
become complicated.
|
||||
|
||||
* const_sigma: [True/False] - Do we want to use uniform constant conductance?
|
||||
|
||||
If const_sigma = True
|
||||
|
||||
* ped0: [float] - set pedersen conductance to a uniform ped0 value
|
||||
|
||||
If const_sigma = False
|
||||
|
||||
F107: [float] - What SFU is the F10.7 for the run?
|
||||
|
||||
pedmin: [float] - What is the minimum pedersen conductance
|
||||
|
||||
hallmin: [float] - what is the minimum hall conductance
|
||||
|
||||
sigma_ratio: [float] - What is the maximum Ped/Hall that we allow?
|
||||
|
||||
ped0: [float] - Even if const_sigma = False, this is used to specify the
|
||||
conductance during spin-up (-60min) period.
|
||||
|
||||
If doGCM = True
|
||||
|
||||
pedmin: [float] - minimum pedersen conductance [min(gcmped,pedmin)]
|
||||
|
||||
hallmin: [float] - minimum hall conductnace [min(gcmhall,hallmin)]
|
||||
|
||||
REMIX/precipitation
|
||||
-------------------
|
||||
|
||||
auroral_model_type: [FEDDER,RCMONO] - Do we use Fedder precipitation or
|
||||
RCM+Zhang Mono precipitation? Keep in mind RCMONO requires RCM or it will do
|
||||
weird things.
|
||||
|
||||
alpha: [float] - Alpha parameter that specifies the Ti/Te temperature ratio.
|
||||
[default RCMONO=0.2, FEDDER=1.0332467]
|
||||
|
||||
beta: [float] - Beta parameter that approximately translates to loss cone
|
||||
filling factor. [default RCMONO = 0.4, FEDDER = 0.4362323]
|
||||
|
||||
R: [float] - Only used by FEDDER. [default FEDDER=0.083567956]
|
||||
|
||||
CHIMP Block - Crazy fast field line tracer
|
||||
------------------------------------------
|
||||
|
||||
Example XML:
|
||||
|
||||
.. code-block:: xml
|
||||
|
||||
<!-- EB-Tracer (CHIMP) params -->
|
||||
<CHIMP>
|
||||
<units uid="EARTHCODE"/>
|
||||
<fields grType="LFM"/>
|
||||
<domain dtype="SPH" rmin="2.0" rmax="40.0"/>
|
||||
<tracer epsds="0.05"/>
|
||||
</CHIMP>
|
||||
|
||||
RCM Block - The Rice Convection Model
|
||||
-------------------------------------
|
||||
|
||||
Example XML:
|
||||
|
||||
.. code-block:: xml
|
||||
|
||||
<RCM>
|
||||
<output debug="F" toRCM="F" toMHD="F"/>
|
||||
<clawpack doKaiClaw="F"/>
|
||||
<ellipse xSun="15.0" xTail="-15.0" yDD="15.0" isDynamic="T" dRadMHD="1.0"/>
|
||||
</RCM>
|
||||
|
||||
RCM/ellipse
|
||||
-----------
|
||||
|
||||
xSun: [float] - maximum positive X value of RCM grid in RE
|
||||
|
||||
xTail: [float] - furthest down tail RCM grid can go.
|
||||
|
||||
yDD: [float] - how wide can RCM's flanks be in the Y direction
|
||||
|
||||
isDynamic: [True/False] - Turn on dynamic plasmasphere vs gallagher.
|
||||
|
||||
dRadMHD: [float] - Magic number
|
||||
|
||||
Full Example XML - By your powers combined, I am VOLTRON/MiniMAGE!
|
||||
------------------------------------------------------------------
|
||||
|
||||
Example XML:
|
||||
|
||||
.. code-block:: xml
|
||||
|
||||
<Kaiju>
|
||||
<!-- Example XML file for coupled RCM+Gamera+ReMIX+CHIMP -->
|
||||
<?xml version="1.0"?>
|
||||
<!-- Magnetosphere params, Voltron times in seconds -->
|
||||
<VOLTRON>
|
||||
<time tFin="3601.0"/>
|
||||
<output dtOut="60" tsOut="100" doTimer="F"/>
|
||||
<restart dtRes="1800.0"/>
|
||||
<imag doInit="T"/>
|
||||
<coupling dt="5.0" doGCM="F" dtDeep="15.0" rDeep="8.0" rTrc="16.0" imType="RCM"/>
|
||||
</VOLTRON>
|
||||
<Gamera>
|
||||
<sim runid="msphere" doH5g="T" H5Grid="lfmQ.h5" icType="user" pdmb="1.0" pFloor="1.0e-8" dFloor="1.0e-6" rmeth="7UP"/>
|
||||
<restart doRes="T" nRes="0" resID="msphere"/>
|
||||
<physics doMHD="T" doBoris="T" Ca="10.0"/>
|
||||
<ring gid="lfm" doRing="T"/>
|
||||
<wind tsfile="bcwind.h5"/>
|
||||
<iPdir N="4" bcPeriodic="F"/>
|
||||
<jPdir N="4" bcPeriodic="F"/>
|
||||
<kPdir N="1" bcPeriodic="T"/>
|
||||
<source doSource="T"/>
|
||||
</Gamera>
|
||||
<!-- Remix params -->
|
||||
<REMIX>
|
||||
<grid Np="360" Nt="45" LowLatBoundary="45.0"/>
|
||||
<conductance F107="100.0" pedmin="2.0" hallmin="1.0" sigma_ratio="3.0" const_sigma="False" ped0="10.0"/>
|
||||
<precipitation aurora_model_type="RCMONO" alpha="0.2" beta="0.4"/>
|
||||
</REMIX>
|
||||
<!-- EB-Tracer (CHIMP) params -->
|
||||
<CHIMP>
|
||||
<units uid="EARTHCODE"/>
|
||||
<fields grType="LFM"/>
|
||||
<domain dtype="SPH" rmin="2.0" rmax="25.0"/>
|
||||
<tracer epsds="0.05"/>
|
||||
</CHIMP>
|
||||
<RCM>
|
||||
<output debug="F" toRCM="F" toMHD="F"/>
|
||||
<clawpack doKaiClaw="F"/>
|
||||
<ellipse xSun="15.0" xTail="-15.0" yDD="105.0" isDynamic="T" dRadMHD="1.0"/>
|
||||
</RCM>
|
||||
</Kaiju>
|
||||
10
docs/source/_obsolete/rcm/index.rst
Normal file
10
docs/source/_obsolete/rcm/index.rst
Normal file
@@ -0,0 +1,10 @@
|
||||
Rice Convection Model
|
||||
=====================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
lambdaUtils
|
||||
rcm_xml
|
||||
rcmx
|
||||
wmutils
|
||||
70
docs/source/_obsolete/rcm/lambdaUtils.rst
Normal file
70
docs/source/_obsolete/rcm/lambdaUtils.rst
Normal file
@@ -0,0 +1,70 @@
|
||||
Generate and Test Lambda Distributions
|
||||
======================================
|
||||
|
||||
Starting instructions for using kaipy/rcm/lambdautils to generate and test
|
||||
lambda distributions for use in rcmconfigs.
|
||||
|
||||
Generating an rcmconfig.h5 file using defaults:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
import kaipy.rcm.lambdautils.genAlam as genAlam
|
||||
from kaipy.rcm.lambdautils.AlamData import AlamParams
|
||||
|
||||
params = AlamParams() # All params can be set in constructor
|
||||
# Otherwise use defaults in AlamParams init
|
||||
genAlam.genh5("rcmconfig.h5", params) # Writes data to file
|
||||
|
||||
AlamParams is used to store the parameters used to generate electron and
|
||||
proton lambda channels. The parameters, along with their defaults (as of
|
||||
06-16-2021) are:
|
||||
|
||||
distType : 'wolf' - Distribution type. Accepts 'lin', 'log', and 'wolf'
|
||||
|
||||
num_e : 50 - Number of electron channels
|
||||
|
||||
num_p : 149 - Number of proton channels
|
||||
|
||||
aMin_e : -1.0 - Lower electron energy bound [eV]
|
||||
|
||||
aMin_p : -1.0 - Lower proton energy bound [eV]
|
||||
|
||||
ktMax : 50000 - Highest energy in [eV] that should be resolved at
|
||||
GAM-RCM coupling boundary
|
||||
|
||||
L_kt : 10 - L shell in [R_E] where ktMax should be resolved
|
||||
|
||||
tiote : 4.0 - T_i/T_e ratio
|
||||
|
||||
p1 : 3.0 - p1 value used in 'wolf' lambda distribution generator
|
||||
|
||||
p2 : 1.0 - p2 value used in 'wolf' lambda distribution generator
|
||||
|
||||
addPsphere : True - Add 0-energy plasmasphere channel
|
||||
|
||||
In ``genAlam.genh5(params)``\ , ``doShowPlot=True`` can be set to show the
|
||||
resulting lambda value vs. k channel plot. ``doTests=<bool>`` may also be set.
|
||||
This will run the generated lambda channels through a series checks found in
|
||||
``alamTester.py``. The output looks like:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Smear test at L = 2.5: Passed
|
||||
Worst smear/cellWidth: 8.04e-01
|
||||
kT min/max range within 2.0% tolerance:
|
||||
Maxwellian : 4.33e-02/8.11e+01 [keV]
|
||||
Variance: D = 5.37e-09 P = 4.24e-05
|
||||
kappa = 5: 3.05e-01/4.04e+01 [keV]
|
||||
Variance: D = 4.45e-05 P = 2.01e-05
|
||||
|
||||
The smear test checks if any lambda channel is so wide that lambda- and
|
||||
lambda+ would drift farther apart than a single grid cell over one GAM-RCM
|
||||
coupling tilmestep. Ideally, ``smear/cellWidth < 1``.
|
||||
|
||||
The second check will try to find the range of temperatures between which the
|
||||
lambda channels accurately reproduce the input density and pressure within a
|
||||
given tolerance. If everything is -1, then the first checked temperature
|
||||
(1 keV) was out of the tolerance range.
|
||||
|
||||
Many of the scripts in kaipy/rcm/lambdautils can be executed directly if
|
||||
desired.
|
||||
208
docs/source/_obsolete/rcm/rcm_xml.rst
Normal file
208
docs/source/_obsolete/rcm/rcm_xml.rst
Normal file
@@ -0,0 +1,208 @@
|
||||
XML input files for RCM
|
||||
=======================
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
These XML elements and attributes were extracted from ``src/rcm/modules.F90``,
|
||||
``src/rcm/rcm_mhd_io.F90``, and ``src/rcm/rcm_subs.F90``.
|
||||
|
||||
``<Kaiju>``
|
||||
|
||||
``<RCM>``
|
||||
|
||||
``<advect>``
|
||||
|
||||
``doSmoothDDV`` (optional, Boolean, default ``"T"``): Set to ``"T"`` to
|
||||
smooth ij derivatives of residual flux tube volume (FTV).
|
||||
|
||||
``epsPk`` (optional, float, default ``"1.0e-3"``): Cumulative pressure
|
||||
fraction threshold used to limit number of energy channels evolved
|
||||
|
||||
``<chargex>``
|
||||
|
||||
``kill_fudge`` (optional, Boolean, default ``"F"``: Set to ``"T"`` means no
|
||||
loss.
|
||||
|
||||
``L_dktime`` (optional, Boolean, default ``"T"``: Set to ``"T"`` to read the
|
||||
loss from the dktime table.
|
||||
|
||||
``sunspot_number`` (optional, float, default ``"96.0"``: No longer in use.
|
||||
|
||||
``<clawpack>``
|
||||
|
||||
``doOMPClaw`` (optional, Boolean, default ``"T"``: Set to ``"T"`` to use
|
||||
OpenMP threading on the clawpack solver.
|
||||
|
||||
``<eflux>``
|
||||
|
||||
``icorrect`` (optional, Boolean, default ``"T"``: Set to ``"T"`` make lat.
|
||||
correction to EFLUX.
|
||||
|
||||
``ifloor`` (optional, Boolean, default ``"T"``: Set to ``"T"`` to install a
|
||||
floor for EFLUX.
|
||||
|
||||
``<ellipse>``
|
||||
|
||||
``isDynamic`` (optional, Boolean, default ``"T"``):Set to ``"T"`` to use the
|
||||
ellipse boundary on RCM.
|
||||
|
||||
``xSun`` (optional, float, default ``"12.5"``): The Sun-Earth distance.
|
||||
|
||||
``xTail`` (optional, float, default ``"-15.0"``): The Earth-tail distance.
|
||||
|
||||
``yDD`` (optional, float, default ``"15.0"``): Positive/negative y-axis
|
||||
bounds.
|
||||
|
||||
``<experimental>``
|
||||
|
||||
``doNoBndFlow`` (optional, Boolean, default ``"F"``: Option to restrict
|
||||
inward boundary flow. Recommended to be left as false
|
||||
|
||||
``NBFLayers`` (optional, integer, default ``"2"``: Number of cells from the
|
||||
open-closed boudnary to restrict inward flow over. Only used if
|
||||
doNoBndFlow="T"
|
||||
|
||||
``<grid>``
|
||||
|
||||
``doLatStretch`` (optional, Boolean, default ``"F"``): Set to ``"T"`` to use
|
||||
non-uniform RCM grid
|
||||
|
||||
``HiLat`` (optional, float, default ``"75.0"``): High-latitude grid boundary
|
||||
(in degrees)
|
||||
|
||||
``LowLat`` (optional, float, default ``"30.0"``): Low-latitude grid boundary
|
||||
(in degrees)
|
||||
|
||||
``ibnd_type`` (optional, integer, default ``"4"``): Type of bndy (1-eq.p,
|
||||
2-iono).
|
||||
|
||||
``ipcp_type`` (optional, integer, default ``"13"``): Type of bndy (1-eq.p,
|
||||
2-iono).
|
||||
|
||||
`L_move_plasma_grid`` (optional, Boolean, default ``"T"``): No longer in use
|
||||
|
||||
``nsmthi`` (optional, integer, default ``"0"``): How much to smooth cond in
|
||||
I
|
||||
|
||||
``nsmthj`` (optional, integer, default ``"0"``): How much to smooth cond in
|
||||
J
|
||||
|
||||
``<loss>``
|
||||
|
||||
``doFLCLoss`` (optional, Boolean, default ``"T"``): Set to ``"T"`` to use
|
||||
FLC losses.
|
||||
|
||||
``doNewCX`` (optional, Boolean, default ``"T"``): Set to ``"T"`` to use
|
||||
newer CX loss estimate.
|
||||
|
||||
``doRelax`` (optional, Boolean, default ``"T"``): Set to ``"T"`` to relax
|
||||
energy distribution.
|
||||
|
||||
``doTDSLoss`` (optional, Boolean, default ``"T"``): Set to ``"T"`` to use
|
||||
TDS losses.
|
||||
|
||||
``eLossMethod`` (optional, string, default ``"FDG"``: Choose the electron
|
||||
loss method within FDG, WM, SS, C05,C19
|
||||
|
||||
``<output>``
|
||||
|
||||
``debug`` (optional, Boolean, default ``"F"``): Enable debug console output
|
||||
|
||||
``doDebug`` (optional, Boolean, default ``"F"``): Set to ``"T"`` to print
|
||||
out diagnostic messages
|
||||
|
||||
``doDebugH5`` (optional, Boolean, default ``"F"``): Enable debug output to
|
||||
hdf5 file. Greatly increases file size
|
||||
|
||||
``doFLOut`` (optional, Boolean, default ``"F"``): Set to ``"T"`` to output
|
||||
field lines (slow).
|
||||
|
||||
``iDebug`` (optional, integer, default ``"1"``): Set to ``0`` to do disk
|
||||
printout.
|
||||
|
||||
``nSkipFL`` (optional, integer, default ``"8"``): Stride for outputting
|
||||
field lines (UNITS?)
|
||||
|
||||
``toMHD`` (optional, Boolean, default ``"F"``): No longer in use
|
||||
|
||||
``toRCM`` (optional, Boolean, default ``"F"``): No longer in use
|
||||
|
||||
``<params>``
|
||||
|
||||
``cmax`` (optional, float, default ``"3.0"``): In rcm_mod_balgn.
|
||||
|
||||
``eeta_cutoff`` (optional, float, default ``"3.0"``): As a fraction.
|
||||
|
||||
``ipot`` (optional, integer, default ``"-1"``): Which potential solver to
|
||||
use.
|
||||
|
||||
``icond`` (optional, integer, default ``"3"``): 1 is active conductances, 2
|
||||
is Hardy with kp, 3 is input.
|
||||
|
||||
``iwind`` (optional, integer, default ``"0"``): 0 is no neutral winds.
|
||||
|
||||
``<plasmasphere>``
|
||||
|
||||
``DenPP0`` (optional, float, default ``"0.0"``): Plasmasphere density
|
||||
cutoff, [#/cc]
|
||||
|
||||
``doPPSmooth`` (optional, Boolean, default ``"T"``): Try to smooth
|
||||
plasmapause
|
||||
|
||||
``doRefill`` (optional, Boolean, default ``"F"``): Set to ``"T"`` to refill
|
||||
plasmasphere.
|
||||
|
||||
``initKp`` (optional, integer, default ``"1"``): Initial Kp condition for
|
||||
the Gallagher model
|
||||
|
||||
``isDynamic`` (optional, Boolean, default ``"T"``): Set to ``"T"`` to use
|
||||
the dynamic plasmasphere
|
||||
|
||||
``staticR`` (optional, float, default ``"0.0"``): Set the static part (in
|
||||
radius) of the plasmasphere
|
||||
|
||||
``tAvg`` (optional, float, default ``"0.0"``): Averaging timescale applied to
|
||||
the electrostatic potential used to evolve the plasmasphere (seconds).
|
||||
|
||||
``<restart>``
|
||||
|
||||
``nRes`` (optional, integer, default ``"-1"``): Restart number.
|
||||
|
||||
``resID`` (optional, string, default ``"msphere"``): Run ID for restart.
|
||||
|
||||
``<sim>``
|
||||
|
||||
``nSubstep`` (optional, integer, default ``"4"``): Number of substeps in each
|
||||
MHD-RCM coupling cycle.
|
||||
|
||||
``runid`` (optional, string, default ``"MAGE sim"``): ID string to use for
|
||||
the run
|
||||
|
||||
``<tilt>``
|
||||
|
||||
``isTilt`` (optional, Boolean, default ``"F"``): Set to ``"T"`` to tilt the
|
||||
dipole, must turn off corotation also.
|
||||
|
||||
``<tomhd>``
|
||||
|
||||
``doAvg2MHD`` (optional, Boolean, default ``"T"``): Determines whether the
|
||||
moments given back to MHD are from instantaneous values or averaged over the]
|
||||
coupling duration
|
||||
|
||||
``doQ0`` (optional, Boolean, default ``"T"``): Set to ``"T"`` to include
|
||||
implicit cold ions in tomhd moments.
|
||||
|
||||
``doRelax`` (optional, Boolean, default ``"T"``): Set to ``"T"`` to relax
|
||||
energy distribution.
|
||||
|
||||
``<torcm>``
|
||||
|
||||
``doKappa`` (optional, Boolean, default ``"T"``): Set to ``"T"`` to do kappa
|
||||
by default.
|
||||
|
||||
``doRescale`` (optional, Boolean, default ``"T"``): Set to ``"T"`` to rescale
|
||||
D, P => eta by default.
|
||||
|
||||
``doSmoothBNDLOC`` (optional, Boolean, default ``"T"``): Set to ``"T"`` to do
|
||||
bndloc smoothing.
|
||||
39
docs/source/_obsolete/rcm/rcmx.rst
Normal file
39
docs/source/_obsolete/rcm/rcmx.rst
Normal file
@@ -0,0 +1,39 @@
|
||||
Running rcm.x
|
||||
=============
|
||||
|
||||
These are instructions to run the RCM stand-alone driver.
|
||||
|
||||
Follow the instructions in the quick start section, once the standard drivers
|
||||
can be compiled you can use the same build system to run "make rcm.x" which
|
||||
will create the driver.
|
||||
|
||||
In addition to the binary "rcm.x" there are two additional files that are
|
||||
needed. The first is the RCM config which can be generated using the
|
||||
"genRCM.py" script under "kaiju/scripts", this will create a file
|
||||
"rcmconfig.h5". The second is an input deck XML, an example of which is below
|
||||
|
||||
.. code-block:: xml
|
||||
|
||||
<RCM>
|
||||
<output debug="F" toRCM="F" toMHD="F"/>
|
||||
</RCM>
|
||||
|
||||
Assuming the XML deck is called "rcmtest.xml" then the executable can be run
|
||||
simply with "rcm.x rcmtest.xml". The experimental new clawpack version can be
|
||||
enabled by editing "kaiju/src/rcm/rcm_include.h" and changing "doClaw95" to
|
||||
true.
|
||||
|
||||
.. code-block:: fortran
|
||||
|
||||
INTEGER (iprec), PARAMETER :: &
|
||||
isize = 200, &
|
||||
jsize = 101, &
|
||||
jwrap = 3, &
|
||||
ksize = 090, kcsize = 090, &
|
||||
nptmax = 50000, &
|
||||
iesize = 2, &
|
||||
ncoeff = 5
|
||||
LOGICAL, PARAMETER :: asci_flag = .FALSE.
|
||||
LOGICAL, PARAMETER :: isGAMRCM = .TRUE. !Whether running coupled to Gamera
|
||||
LOGICAL, PARAMETER :: doQuietRCM = .TRUE.
|
||||
LOGICAL, PARAMETER :: doClaw95 = .FALSE.
|
||||
56
docs/source/_obsolete/rcm/wmutils.rst
Normal file
56
docs/source/_obsolete/rcm/wmutils.rst
Normal file
@@ -0,0 +1,56 @@
|
||||
Diffuse Precipitation
|
||||
=====================
|
||||
|
||||
To run MAGE with wave model input:
|
||||
|
||||
Make sure DWang_chorus_lifetime.h5 and tauTDS.txt is inside
|
||||
kaiju/kaipy/rcm/wmutils
|
||||
|
||||
Activate the NLP environment:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
ncar_pylib casper_satcomp_pylib
|
||||
|
||||
To generate rcmconfig.h5 that contains the electron lifetime based on the wave
|
||||
model, run kaiju/scripts/preproc/genRCM.py with 'waveModel' option on.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
python genRCM.py -waveModel True
|
||||
|
||||
To add the electron lifetime to an existing rcmconfig.h5 file, run
|
||||
kaiju/scripts/preproc/genRCM.py with 'addWM' option on and enter input file
|
||||
name.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
python genRCM.py -addWM True -i rcmconfig.h5
|
||||
|
||||
The generated rcmconfig.h5 should contain
|
||||
|
||||
|
||||
* Eki Dataset {155}
|
||||
* Kpi Dataset {7}
|
||||
* Li Dataset {41}
|
||||
* MLTi Dataset {25}
|
||||
* Tau1i Dataset {155, 41, 25, 7}
|
||||
* Tau2i Dataset {155, 41, 25, 7}
|
||||
* EkTDSi Dataset {109}
|
||||
* TauTDSi Dataset {109}
|
||||
* alamc Dataset {160}
|
||||
* dktable Dataset {936}
|
||||
* fudgec Dataset {160}
|
||||
* ikflavc Dataset {160}
|
||||
|
||||
In the XML file, to run MAGE with wave model input (Dedong Wang Chorus +
|
||||
Orlova16 Hiss), set the loss method in the RCM section as
|
||||
|
||||
.. code-block:: xml
|
||||
|
||||
<RCM>
|
||||
<loss eLossMethod="WM"/>
|
||||
</RCM>
|
||||
|
||||
Go to Step 1. if you see “Wave model is missing in rcmconfig.h5” in .out file
|
||||
of the MAGE run.
|
||||
63
docs/source/_obsolete/testing/addingNewTests.rst
Normal file
63
docs/source/_obsolete/testing/addingNewTests.rst
Normal file
@@ -0,0 +1,63 @@
|
||||
Additional Automated Tests
|
||||
==========================
|
||||
|
||||
As new code is written and added to Kaiju, as many automated tests as possible
|
||||
should be written for it. There are not yet any strict rules about what parts
|
||||
of the code require tests, but if there are any parts of the code which are
|
||||
known to be error-prone, those are good targets for tests. Or at an absolute
|
||||
bare minimum create a few tests that run the code end-to-end, verifying the
|
||||
final output (a type of
|
||||
`Smoke testing <http://softwaretestingfundamentals.com/smoke-testing/>`_)
|
||||
|
||||
Writing tests for pFUnit
|
||||
------------------------
|
||||
|
||||
The `pFUnit website <https://github.com/Goddard-Fortran-Ecosystem/pFUnit>`_
|
||||
has documentation about how to write tests for pFUnit. In particular, I find
|
||||
the
|
||||
`examples <https://github.com/Goddard-Fortran-Ecosystem/pFUnit/tree/master/Examples/MPI_Halo/tests>`_
|
||||
very helpful.
|
||||
|
||||
Adding to Existing Test Executables
|
||||
-----------------------------------
|
||||
|
||||
The easiest way to add new tests is to add more tests to existing test
|
||||
executables. Cmake looks in the kaiju/tests/ folders for file with the
|
||||
extension .pf, and all such files are built into test executables. For
|
||||
example, all .pf files in the kaiju/tests/gamera/ folder get compiled into the
|
||||
"gamTests" binary. A new .pf file could be added to the gamera folder with new
|
||||
tests for Gamera, and they would automatically be included and run.
|
||||
|
||||
You can use these templates for serial and MPI .pf files as a starting point:
|
||||
|
||||
:doc:`Serial Test Template <serialTestTemplate>`
|
||||
|
||||
:doc:`MPI Test Template <mpiTestTemplate>`
|
||||
|
||||
Creating a New Test Executable
|
||||
------------------------------
|
||||
|
||||
If it isn't appropriate to add new tests to an existing test executable, then
|
||||
you will need to create another set of tests. This should be done by creating
|
||||
a new subfolder in the kaiju/tests/ folder, which will contain all of the
|
||||
pFUnit .pf test files, as well as any required supporting files.
|
||||
|
||||
The cmake file located at kaiju/tests/CMakeLists.txt will need to be updated
|
||||
with information about this new test executable. Each executable needs three
|
||||
lines of information in the cmake file. The first line contains the name of
|
||||
the new subfolder, and the search string to find the new .pf files. The second
|
||||
line lists the dependencies (libraries) that this test executable will need in
|
||||
order to compile and run. The third line provided the desired name of the
|
||||
output binary, combines that with the source files and dependencies provided
|
||||
on the previous two lines, and tells pFUnit what the maximum number of MPI
|
||||
ranks required is to run these tests. For non-mpi tests this number should
|
||||
simply be 1.
|
||||
|
||||
By way of example, this is the entry for the "gamMpiTests" executable,
|
||||
containing tests for MPI enabled gamera.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
file(GLOB gamMpiTestFiles "${CMAKE_CURRENT_SOURCE_DIR}/gamera_mpi/*.pf")
|
||||
set(gamMpiTestLibs baselib gamlib basempilib gammpilib tgiclib)
|
||||
add_pfunit_ctest (gamMpiTests TEST_SOURCES ${gamMpiTestFiles} LINK_LIBRARIES ${gamMpiTestLibs} MAX_PES 64)
|
||||
30
docs/source/_obsolete/testing/buildpFUnit.rst
Normal file
30
docs/source/_obsolete/testing/buildpFUnit.rst
Normal file
@@ -0,0 +1,30 @@
|
||||
Build pFUnit
|
||||
============
|
||||
|
||||
pFUnit can be downloaded from
|
||||
`https://github.com/Goddard-Fortran-Ecosystem/pFUnit <https://github.com/Goddard-Fortran-Ecosystem/pFUnit>`_.
|
||||
It is recommended that the user get the latest version of the master branch.
|
||||
|
||||
The pFUnit repository contains instructions for building pFUnit. pFUnit should
|
||||
be built with cmake, and with support for both MPI and OPENMP enabled.
|
||||
|
||||
Once pFUnit is built, the compiled files (along with any external dependencies
|
||||
pFUnit needs) must be placed into the "external" directory in the main kaiju
|
||||
repository. As of pFUnit version 4.1, this meant that 4 folders were copied
|
||||
from the pFUnit "installed" folder: "PFUNIT-4.1", "GFTL-1.1",
|
||||
"GFTL_SHARED-1.0", and "FARGPARSE-0.9". These 4 folders are all placed in the
|
||||
"external" folder.
|
||||
|
||||
NOTE
|
||||
====
|
||||
|
||||
Due to an incompatibility, the file ``add_pfunit_ctest.cmake`` in
|
||||
PFUNIT-4.1/include/ must be modified:
|
||||
|
||||
Line 50 must be changed from ``set (test_sources_f90)`` to
|
||||
``set (test_sources_f90 "")``
|
||||
|
||||
Line 56 containing ``list (APPEND test_sources_f90 ${f90_file})`` must be
|
||||
commented out or deleted
|
||||
|
||||
These line numbers may change in other versions of pFUnit.
|
||||
12
docs/source/_obsolete/testing/index.rst
Normal file
12
docs/source/_obsolete/testing/index.rst
Normal file
@@ -0,0 +1,12 @@
|
||||
Testing
|
||||
=======
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
buildpFUnit
|
||||
unitTesting
|
||||
serialTestTemplate
|
||||
mpiTestTemplate
|
||||
tiegcmBenchmarks
|
||||
addingNewTests
|
||||
56
docs/source/_obsolete/testing/mpiTestTemplate.rst
Normal file
56
docs/source/_obsolete/testing/mpiTestTemplate.rst
Normal file
@@ -0,0 +1,56 @@
|
||||
MPI Test Template
|
||||
=================
|
||||
|
||||
.. code-block:: none
|
||||
|
||||
module <INSERT MODULE NAME>
|
||||
use testHelperMpi
|
||||
# ADD ADDITIONAL USE STATEMENTS AS REQUIRED
|
||||
|
||||
implicit none
|
||||
|
||||
# DEFINE ANY OBJECTS THAT YOU WANT TO USE IN THE TESTS HERE
|
||||
# I RECOMMEND MAKING THEM ALLOCATABLE SO THAT THEY CAN BE DESTROYED BETWEEN TESTS
|
||||
|
||||
contains
|
||||
|
||||
@before
|
||||
subroutine setup(this)
|
||||
class (MpiTestMethod), intent(inout) :: this
|
||||
|
||||
# THIS SHOULD ALLOCATE AND INITIALIZE ANY OBJECTS YOU WANT TO USE IN YOUR TESTS
|
||||
|
||||
# YOU CAN GET THE MPI COMMUNICATOR CREATED FOR THIS TEST WITH THE FUNCTION
|
||||
# "getMpiF08Communicator(this)"
|
||||
|
||||
end subroutine
|
||||
|
||||
@after
|
||||
subroutine teardown(this)
|
||||
class (MpiTestMethod), intent(inout) :: this
|
||||
|
||||
# THIS SHOULD DEALLOCATE AND DESTROY ANY OBJECTS USED IN YOUR TESTS
|
||||
end subroutine
|
||||
|
||||
# IF YOU WANT TO CREATE ANY HELPER SUBROUTINES THAT YOUR TESTS USE, YOU CAN DO SO HERE
|
||||
|
||||
@test(npes=[1,4,16,64])
|
||||
subroutine exampleTest(this)
|
||||
class (MpiTestMethod), intent(inout) :: this
|
||||
|
||||
# THIS IS AN EXAMPLE TEST
|
||||
# THIS SHOULD BE DELETED, AND OTHER REAL TESTS SHOULD REPLACE IT BY PLACING THE
|
||||
# "@test" MARKER BEFORE THE TEST SUBROUTINES
|
||||
|
||||
# UNLIKE THE SERIAL TEST METHODS, THE MPI METHODS ALSO HAVE THE "npes=([...])" SECTION
|
||||
# IN THE "@test" MARKER. THIS TELLS PFUNIT HOW MANY MPI RANKS TO RUN THE TEST WITH
|
||||
# IN THIS CASE, THIS EXAMPLE TEST WILL BE RUN 4 DIFFERENT TIMES, WITH
|
||||
# FIRST 1, THEN 4, THEN 16, AND THEN 64 MPI RANKS
|
||||
# THIS ALLOWS YOU TO CHECK DIFFERENT MPI SETUPS WITH THE SAME TEST
|
||||
|
||||
# YOU CAN GET THE MPI COMMUNICATOR CREATED FOR THIS TEST WITH THE FUNCTION
|
||||
# "getMpiF08Communicator(this)"
|
||||
|
||||
end subroutine
|
||||
|
||||
end module
|
||||
36
docs/source/_obsolete/testing/serialTestTemplate.rst
Normal file
36
docs/source/_obsolete/testing/serialTestTemplate.rst
Normal file
@@ -0,0 +1,36 @@
|
||||
Serial Test Template
|
||||
====================
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
module <INSERT MODULE NAME>
|
||||
use testHelper
|
||||
! ADD ADDITIONAL USE STATEMENTS AS REQUIRED
|
||||
|
||||
implicit none
|
||||
|
||||
! DEFINE ANY OBJECTS THAT YOU WANT TO USE IN THE TESTS HERE
|
||||
! I RECOMMEND MAKING THEM ALLOCATABLE SO THAT THEY CAN BE DESTROYED BETWEEN TESTS
|
||||
|
||||
contains
|
||||
|
||||
@before
|
||||
subroutine setup()
|
||||
! THIS SHOULD ALLOCATE AND INITIALIZE ANY OBJECTS YOU WANT TO USE IN YOUR TESTS
|
||||
end subroutine
|
||||
|
||||
@after
|
||||
subroutine teardown()
|
||||
! THIS SHOULD DEALLOCATE AND DESTROY ANY OBJECTS USED IN YOUR TESTS
|
||||
end subroutine
|
||||
|
||||
! IF YOU WANT TO CREATE ANY HELPER SUBROUTINES THAT YOUR TESTS USE, YOU CAN DO SO HERE
|
||||
|
||||
@test
|
||||
subroutine exampleTest()
|
||||
! THIS IS AN EXAMPLE TEST
|
||||
! THIS SHOULD BE DELETED, AND OTHER REAL TESTS SHOULD REPLACE IT BY PLACING THE
|
||||
! "@test" MARKER BEFORE THE TEST SUBROUTINES
|
||||
end subroutine
|
||||
|
||||
end module
|
||||
166
docs/source/_obsolete/testing/tiegcmBenchmarks.rst
Normal file
166
docs/source/_obsolete/testing/tiegcmBenchmarks.rst
Normal file
@@ -0,0 +1,166 @@
|
||||
Instructions for running TIEGCM benchmarks on Cheyenne
|
||||
======================================================
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
These instructions are intended to be an update and augment to the standard
|
||||
`TIEGCM Benchmark instructions <https://www.hao.ucar.edu/modeling/tgcm/tiegcm2.0/userguide/html/benchmarks.html>`_
|
||||
that are bit dated and relate the yellowstone computer system. Using
|
||||
repository tag TBD will get you the updates for the scripting system for
|
||||
cheyenne.
|
||||
|
||||
In order to produce the graphics you will need to install the tgcmprocf90 code
|
||||
base that utilizes NCAR graphics to produce plots. This needs to be run on
|
||||
casper with specified set of modules.
|
||||
|
||||
Benchmark runs
|
||||
--------------
|
||||
|
||||
The TIEGCM benchmarks fall into three categories
|
||||
|
||||
|
||||
#. Full year climatologies (``run_climatology``)
|
||||
#. Seasons (``run_seasons``)
|
||||
#. Storms (``run_storms``)
|
||||
|
||||
for each category there is a tcsh script that creates run directories,
|
||||
complies the code, and submits the jobs at 5 and 2.5 degree resolutions. These
|
||||
scripts, located in the ``benchmarks`` subdirectory of the code distribution,
|
||||
use the tgcmrun program which is written in python 2 to create the batch
|
||||
scripts.
|
||||
|
||||
Compiling and running the code on cheyenne has been tested with the following
|
||||
set of modules
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Currently Loaded Modules:
|
||||
1) git/2.22.0 5) ncarcompilers/0.5.0 9) mpt/2.25
|
||||
2) ncarenv/1.3 6) mkl/2021.2 10) esmf_libs/8.3.0
|
||||
3) cmake/3.14.4 7) conda/latest 11) esmf-8.3.0-ncdfio-mpt-O
|
||||
4) intel/2021.2 8) netcdf/4.9.0
|
||||
|
||||
Visualizing the results
|
||||
-----------------------
|
||||
|
||||
As previously mentioned to visualize the results we are currently utilizing an
|
||||
older NCAR graphics package so the work will need to be conducted on casper
|
||||
using a specific module and conda environment. We are working on python
|
||||
utilities and will have that available shortly.
|
||||
|
||||
On casper you will need to have the following set of modules loaded.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Currently Loaded Modules:
|
||||
1) ncarenv/1.2 6) conda/latest 11) esmf-8.3.0-ncdfio-mpiuni-O
|
||||
2) ffmpeg/4.1.3 7) intel/19.1.1 12) ncl/6.6.2
|
||||
3) git/2.22.0 8) ncarcompilers/0.5.0 13) nco/5.1.4
|
||||
4) netcdf/4.9.1 9) openmpi/4.1.1
|
||||
5) cmake/3.14.4 10) esmf_libs/8.3.0
|
||||
|
||||
and your conda environment can be created from this YML file
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
name: tgcmproc
|
||||
channels:
|
||||
- conda-forge
|
||||
- ncar
|
||||
- r
|
||||
dependencies:
|
||||
- _libgcc_mutex=0.1=conda_forge
|
||||
- _openmp_mutex=4.5=2_gnu
|
||||
- backports=1.0=pyhd8ed1ab_3
|
||||
- backports.functools_lru_cache=1.6.1=py_0
|
||||
- backports_abc=0.5=py_1
|
||||
- bzip2=1.0.8=h7f98852_4
|
||||
- c-ares=1.19.1=hd590300_0
|
||||
- ca-certificates=2023.5.7=hbcca054_0
|
||||
- certifi=2019.11.28=py27h8c360ce_1
|
||||
- cftime=1.1.1=py27h588f082_0
|
||||
- curl=7.87.0=h6312ad2_0
|
||||
- cycler=0.10.0=py_2
|
||||
- dbus=1.13.6=hfdff14a_1
|
||||
- enum34=1.1.10=py27h8c360ce_1
|
||||
- expat=2.5.0=hcb278e6_1
|
||||
- fontconfig=2.14.2=h14ed4e7_0
|
||||
- freetype=2.12.1=hca18f0e_1
|
||||
- functools32=3.2.3.2=py_3
|
||||
- futures=3.3.0=py27h8c360ce_1
|
||||
- gettext=0.21.1=h27087fc_0
|
||||
- glib=2.66.3=h58526e2_0
|
||||
- gst-plugins-base=1.14.5=h0935bb2_2
|
||||
- gstreamer=1.14.5=h36ae1b5_2
|
||||
- hdf4=4.2.15=h9772cbc_5
|
||||
- hdf5=1.10.5=nompi_h7c3c948_1111
|
||||
- icu=64.2=he1b5a44_1
|
||||
- jpeg=9e=h0b41bf4_3
|
||||
- keyutils=1.6.1=h166bdaf_0
|
||||
- kiwisolver=1.1.0=py27h9e3301b_1
|
||||
- krb5=1.20.1=hf9c8cef_0
|
||||
- ld_impl_linux-64=2.40=h41732ed_0
|
||||
- libblas=3.9.0=8_openblas
|
||||
- libcblas=3.9.0=8_openblas
|
||||
- libclang=9.0.1=default_hb4e5071_5
|
||||
- libcurl=7.87.0=h6312ad2_0
|
||||
- libedit=3.1.20191231=he28a2e2_2
|
||||
- libev=4.33=h516909a_1
|
||||
- libexpat=2.5.0=hcb278e6_1
|
||||
- libffi=3.2.1=he1b5a44_1007
|
||||
- libgcc-ng=12.2.0=h65d4601_19
|
||||
- libgfortran-ng=7.5.0=h14aa051_20
|
||||
- libgfortran4=7.5.0=h14aa051_20
|
||||
- libglib=2.66.3=hbe7bbb4_0
|
||||
- libgomp=12.2.0=h65d4601_19
|
||||
- libiconv=1.17=h166bdaf_0
|
||||
- liblapack=3.9.0=8_openblas
|
||||
- libllvm9=9.0.1=default_hc23dcda_7
|
||||
- libnetcdf=4.7.3=nompi_h9f9fd6a_101
|
||||
- libnghttp2=1.51.0=hdcd2b5c_0
|
||||
- libopenblas=0.3.12=pthreads_hb3c22a3_1
|
||||
- libpng=1.6.39=h753d276_0
|
||||
- libsqlite=3.42.0=h2797004_0
|
||||
- libssh2=1.10.0=haa6b8db_3
|
||||
- libstdcxx-ng=12.2.0=h46fd767_19
|
||||
- libuuid=2.38.1=h0b41bf4_0
|
||||
- libxcb=1.15=h0b41bf4_0
|
||||
- libxkbcommon=0.10.0=he1b5a44_0
|
||||
- libxml2=2.9.10=hee79883_0
|
||||
- libzlib=1.2.13=h166bdaf_4
|
||||
- matplotlib=2.2.5=ha770c72_3
|
||||
- matplotlib-base=2.2.5=py27h250f245_1
|
||||
- ncurses=6.3=h27087fc_1
|
||||
- netcdf4=1.5.3=nompi_py27hd35fb8e_102
|
||||
- nspr=4.35=h27087fc_0
|
||||
- nss=3.89=he45b914_0
|
||||
- numpy=1.16.5=py27h95a1406_0
|
||||
- openssl=1.1.1t=h0b41bf4_0
|
||||
- pcre=8.45=h9c3ff4c_0
|
||||
- pip=20.1.1=pyh9f0ad1d_0
|
||||
- pthread-stubs=0.4=h36c2ea0_1001
|
||||
- pyparsing=2.4.7=pyh9f0ad1d_0
|
||||
- pyqt=5.12.3=py27hcca6a23_1
|
||||
- python=2.7.15=h5a48372_1011_cpython
|
||||
- python-dateutil=2.8.1=py_0
|
||||
- python_abi=2.7=1_cp27mu
|
||||
- pytz=2020.1=pyh9f0ad1d_0
|
||||
- qt=5.12.5=hd8c4c69_1
|
||||
- readline=8.2=h8228510_1
|
||||
- scipy=1.2.1=py27h921218d_2
|
||||
- setuptools=44.0.0=py27_0
|
||||
- singledispatch=3.6.1=pyh44b312d_0
|
||||
- six=1.16.0=pyh6c4a22f_0
|
||||
- sqlite=3.42.0=h2c6b66d_0
|
||||
- subprocess32=3.5.4=py27h516909a_0
|
||||
- tk=8.6.12=h27826a3_0
|
||||
- tornado=5.1.1=py27h14c3975_1000
|
||||
- wheel=0.37.1=pyhd8ed1ab_0
|
||||
- xorg-libxau=1.0.11=hd590300_0
|
||||
- xorg-libxdmcp=1.1.3=h7f98852_0
|
||||
- xz=5.2.6=h166bdaf_0
|
||||
- zlib=1.2.13=h166bdaf_4
|
||||
- pip:
|
||||
- pyqt5-sip==4.19.18
|
||||
- pyqtwebengine==5.12.1
|
||||
69
docs/source/_obsolete/testing/unitTesting.rst
Normal file
69
docs/source/_obsolete/testing/unitTesting.rst
Normal file
@@ -0,0 +1,69 @@
|
||||
Unit Testing
|
||||
============
|
||||
|
||||
Overview
|
||||
--------
|
||||
|
||||
Kaiju supports unit testing through the pFUnit library. In order to compile
|
||||
and run the built in tests, the user must add pFUnit to their kaiju
|
||||
repository.
|
||||
|
||||
Getting pFUnit
|
||||
--------------
|
||||
|
||||
On Cheyenne, there are pre-built binaries for pFUnit available at
|
||||
``/glade/p/hao/msphere/gamshare/``. There are a few different versions of
|
||||
pFUnit built with different versions of different compilers. For example, you
|
||||
can copy the build of pFUnit-4.1.5 made with Intel Fortan 18 from the folder
|
||||
``/glade/p/hao/msphere/gamshare/pFUnit-4.1.5/ifort-18/``.
|
||||
|
||||
Within a specific build folder you will find four subfolders named
|
||||
``FARGPARSE-X.X``, ``GFTL-X.X``, ``GFTL_SHARED-X.X``, and ``PFUNIT-X.X``.
|
||||
These four folders should either be copied or linked into your
|
||||
/kaiju/external/ folder. No additional setup is required, cmake will
|
||||
automatically find pFUnit once these four folders are properly located in
|
||||
your external folder (note that these four folders cannot be in a subfolder
|
||||
beneath the external folder, they must be directly within the external folder
|
||||
itself).
|
||||
|
||||
If you are not on Cheyenne, or cannot use these binares for whatever reason,
|
||||
there are instructions for how to :doc:`Build pFUnit <buildpFUnit>`.
|
||||
|
||||
Building the tests
|
||||
------------------
|
||||
|
||||
Once pFUnit has been installed into the external folder of the repository, the
|
||||
testing executables can be built with the cmake command "make allTests".
|
||||
|
||||
** Under Intel Fortran 21, the FC environment variable had to be manually set
|
||||
to mpiifort in order for compilation to succeed.
|
||||
|
||||
Running the tests
|
||||
-----------------
|
||||
|
||||
Once the test executables have been built, there are two submission scripts in
|
||||
the kaiju/tests/ folder that can be used to run them on Cheyenne. The script
|
||||
``runNonCaseTests.pbs`` runs a faster subset of tests which validate specific
|
||||
pieces of the code. The script ``runCaseTests.pbs`` runs a slower set of tests
|
||||
which run full simulations and then verify the final states. These submission
|
||||
scripts should be submitted from the folder containing the test executables,
|
||||
usually the build/bin folder.
|
||||
|
||||
Verifying test results
|
||||
----------------------
|
||||
|
||||
When the two jobs mentioned above are run, they create output files labeled
|
||||
with the job ID. They will look something like 1nonCaseTests.o12345671 and
|
||||
``caseTests.o1234567`` respectively, but with different numbers. These files
|
||||
contain summaries of all of the test executables that ran, and should contain
|
||||
a series of lines that look like this:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Gamera Tests Complete
|
||||
OK
|
||||
(10 tests)
|
||||
|
||||
If any of the tests don't look like this, it's an indication that some of the
|
||||
tests have failed, and the more detailed output file for that executable
|
||||
should be examined.
|
||||
246
docs/source/_obsolete/tools/Pokeball.rst
Normal file
246
docs/source/_obsolete/tools/Pokeball.rst
Normal file
@@ -0,0 +1,246 @@
|
||||
Project pokeball information
|
||||
============================
|
||||
|
||||
Motivation
|
||||
----------
|
||||
|
||||
we want to use Containers to make the Kaiju source code distribution that
|
||||
includes all dependencies, easy running the containerized kaiju, we
|
||||
furthermore want to host interactive tutorials using
|
||||
`Jupyter Hub <https://jupyter.org/>`_
|
||||
|
||||
General Setup
|
||||
-------------
|
||||
|
||||
(1) There is a dedicated branch in bitbucket ``pokeball/clean``, any push to
|
||||
which will trigger the build of a container.
|
||||
|
||||
(2) After the container is built, a series of tests could (!) be run.
|
||||
|
||||
(3) After the tests, the container image is pushed to a (private) registry
|
||||
(i.e. a storage/distribution place for OCI compliant images). The image will
|
||||
be tagged with two handles:
|
||||
|
||||
(a) the SHA of the build:
|
||||
|
||||
.. image:: https://bitbucket.org/repo/kMoBzBp/images/2637046703-Screenshot%202022-11-07%20at%2018.15.22.png
|
||||
:target: https://bitbucket.org/repo/kMoBzBp/images/2637046703-Screenshot%202022-11-07%20at%2018.15.22.png
|
||||
:alt: Screenshot 2022-11-07 at 18.15.22.png
|
||||
|
||||
(b) "latest" if it is the latest image:
|
||||
|
||||
(4) After the push, the image can now be retrieved ("pulled") by anyone who
|
||||
has the credentials. To make testing user-friendly, currently each push to
|
||||
bitbucket will trigger the deployment of the newest kaiju-image into
|
||||
Jupiter-hub, such that it can be tried out immediately afterwards, without
|
||||
having to modify kubernetes**.
|
||||
|
||||
.. image:: https://bitbucket.org/repo/kMoBzBp/images/2354825168-Screenshot%202022-11-07%20at%2014.34.00.png
|
||||
:target: https://bitbucket.org/repo/kMoBzBp/images/2354825168-Screenshot%202022-11-07%20at%2014.34.00.png
|
||||
:alt: Screenshot 2022-11-07 at 14.34.00.png {width=50%}
|
||||
|
||||
.. image:: https://bitbucket.org/repo/kMoBzBp/images/3615671085-Screenshot%202022-11-07%20at%2014.46.27.png
|
||||
:target: https://bitbucket.org/repo/kMoBzBp/images/3615671085-Screenshot%202022-11-07%20at%2014.46.27.png
|
||||
:alt: Screenshot 2022-11-07 at 14.46.27.png {width=50%}
|
||||
|
||||
Jupyter documentation
|
||||
---------------------
|
||||
|
||||
`Documentation <https://docs.jupyter.org/en/latest/>`_ of the Jupyter project
|
||||
with all the components.
|
||||
|
||||
`Introduction to Jupyter lab <https://nocomplexity.com/documents/jupyterlab/intro.html>`_
|
||||
skip installation, userful not sure if everything works exactly the same on
|
||||
our notebooks.
|
||||
|
||||
`Python Notebook Introduction <https://realpython.com/jupyter-notebook-introduction/>`_
|
||||
you can skip the installation and the the notebook is run from withing
|
||||
*jupyter lab*.
|
||||
|
||||
'ulimit -s'
|
||||
-----------
|
||||
|
||||
In the terminal the 'ulimit -s' will now be automatically ``unlimited`` for
|
||||
the notebook this is not used. Still looking if there is a way to have it
|
||||
automatically but what can be done is:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
import resource
|
||||
resource.setrlimit(resource.RLIMIT_STACK, (-1, -1))
|
||||
|
||||
in the top cell and a ``!ulimit -s`` in a following cell or execution with a
|
||||
magic ``%%bash`` will have ``unlimited`` as well.
|
||||
|
||||
Edit a file in jupyter
|
||||
----------------------
|
||||
|
||||
Jupyter-lab has a file editor. It also provides a multitude of syntax
|
||||
highlighting (View -> Text Editor Syntax Highlighting).
|
||||
|
||||
|
||||
|
||||
File browser
|
||||
~~~~~~~~~~~~
|
||||
|
||||
The file browser will not allow you to navigate *above* the root directory
|
||||
that it starts in. In our case this is ``/home/jovyan/``. As a result it is
|
||||
not possible to navigate to the ``/app/`` folder where the source is currently
|
||||
stored.
|
||||
|
||||
It is still possible to edit the files in jupyter with the magic commands:
|
||||
|
||||
Edit via magic commands
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
In a notebook type
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
%open /paht/to/file
|
||||
|
||||
and
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
%save /path/to/file
|
||||
|
||||
In case of the ``/app/`` folder this will not help you much as nobody has the
|
||||
rights to write there.
|
||||
|
||||
Path
|
||||
~~~~
|
||||
|
||||
If the kaiju *home* directory is moved, you can use
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
export KAIJUHOME=/path/to/new/dir
|
||||
source /app/source_kaiju
|
||||
|
||||
to update the path with the following:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
export PATH="${KAIJUHOME}/scripts/datamodel:${PATH}"
|
||||
export PATH="${KAIJUHOME}/scripts/preproc:${PATH}"
|
||||
export PATH="${KAIJUHOME}/scripts/postproc:${PATH}"
|
||||
export PATH="${KAIJUHOME}/scripts/OHelio:${PATH}"
|
||||
export PATH="${KAIJUHOME}/scripts/quicklook:${PATH}"
|
||||
export PATH="${KAIJUHOME}/scripts/legacy:${PATH}"
|
||||
export PYTHONPATH="${KAIJUHOME}:${PYTHONPATH}"
|
||||
|
||||
Storage
|
||||
~~~~~~~
|
||||
|
||||
You have three directories that are for different usage:
|
||||
|
||||
|
||||
``~/myhome/<yourid>`` is a persistent storage that should be used similar to
|
||||
a HPC cluster home
|
||||
|
||||
\``~/group_shares/jupyter_research_pokeball_student`` is a share that all of
|
||||
you have. You can exchange files here. If you want to have a folder that only
|
||||
you can delete set the sticky bit: ``chmod 1750 others_can_not_delete_me``
|
||||
|
||||
``~/work`` this is your scratch. It will exist for some time but not forever
|
||||
it has 10GB atm.
|
||||
|
||||
Intel version
|
||||
-------------
|
||||
|
||||
Currently the package version ``2022.2.1`` is installed for mkl, python and
|
||||
the compilers. MPI and the dev tools use ``2021.7.1``. Note that the version
|
||||
of the compiler will display ``2021.7.1``.
|
||||
|
||||
LFortran for interactive fortran
|
||||
--------------------------------
|
||||
|
||||
LFortran is added to the image you will see a Fortran icon next to the Python
|
||||
icon in *Notebooks*. Be aware, ``Lfortran`` is not like a normal fortran in
|
||||
terms of the scope, see
|
||||
https://stackoverflow.com/questions/70825597/lfortran-in-a-jupyter-notebook-kills-kernel
|
||||
|
||||
Plotting in notebook
|
||||
--------------------
|
||||
|
||||
As there is currently something not working with the usual ``ipywidgets`` and
|
||||
``matplotlib`` and the interactive part I looked up how to do interactive with
|
||||
``plotly`` and it works not to bad. An example can be found in the
|
||||
``Plotly_examples.ipynb``
|
||||
<https://bitbucket.org/aplkaiju/kaiju/src/2ff1f3321aa3c0c58e4b5012d26bfcda309c5951/Plotly_examples.ipynb?at=pokeball%2Fclean&viewer=nbviewer>`_.
|
||||
Fyi: as the plotly stuff is ``javascript`` it can not be previewed in the
|
||||
bitbucket ``nbviewer``. Therefore the stuff is slightly prolonged in the
|
||||
preview.
|
||||
|
||||
Important is that at the beginning of the first plot or before jupyter knows
|
||||
that the plots should be displayed in the notebook. This is done with
|
||||
``%matplotlib inline``.
|
||||
|
||||
ParaView and HDF5
|
||||
-----------------
|
||||
|
||||
The analysis notebook was extended with two new features:
|
||||
|
||||
HDF5 viewer
|
||||
|
||||
ParaView Kernel
|
||||
|
||||
Note: The HDF5 viewer is also in the "normal" notebook
|
||||
|
||||
**Note**: Extensions need to be activated to properly work. Forth symbol on
|
||||
the far left.
|
||||
|
||||
HDF5 viewer
|
||||
~~~~~~~~~~~
|
||||
|
||||
With `Jupyterlab-h5web <https://github.com/silx-kit/jupyterlab-h5web>`_ based
|
||||
on the
|
||||
`H5Web: React components for data visualization and exploration <https://h5web.panosc.eu/>`_
|
||||
framework it is possible to view hdf5 files in various ways. Depending on the
|
||||
size of the file this might take a while.
|
||||
|
||||
ParaView Kernel
|
||||
~~~~~~~~~~~~~~~
|
||||
|
||||
The recent development of Kitware of an
|
||||
`Jupyter ParaView Kernel <https://gitlab.kitware.com/paraview/iparaview-kernel>`_
|
||||
that allows interactive access to a ParaView Server out of jupyter.
|
||||
Unfortunately, the list of requirements is not complete. The kernel uses
|
||||
ParaView itself for building and there are additional dependencies, see the
|
||||
Dockerfile for the information.
|
||||
|
||||
In the background a ParaView server is started (currently >5.11.0 with QT) and
|
||||
one can interact with it. The
|
||||
`ParaView Python <https://kitware.github.io/paraview-docs/latest/python/>`_
|
||||
can also be used to interact with ParaView. A sample notebook is in the group
|
||||
share
|
||||
(``~/group_shares/jupyter_research_pokeball_student/ParaviewTest.ipynb``).
|
||||
|
||||
When opening a notebook from storage it might not have the correct kernel
|
||||
attached. Make sure that the kernel in the right top corner says
|
||||
``IParaView Kernel``:
|
||||
|
||||
.. image:: https://bitbucket.org/repo/kMoBzBp/images/3536368658-Bildschirmfoto%20vom%202023-02-14%2011-43-58.png
|
||||
:target: https://bitbucket.org/repo/kMoBzBp/images/3536368658-Bildschirmfoto%20vom%202023-02-14%2011-43-58.png
|
||||
:alt: Bildschirmfoto vom 2023-02-14 11-43-58.png
|
||||
|
||||
|
||||
Depending on the setup there might be an error with:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
QStandardPaths: XDG_RUNTIME_DIR not set, defaulting to '/tmp/runtime-'
|
||||
|
||||
This does not prevent work as a default is set but it is not nice to see.
|
||||
Nevertheless, in the ``Dockerfile_analysis_jupyter`` file this is now set to
|
||||
|
||||
.. code-block:: Docker
|
||||
|
||||
ENV XDG_RUNTIME_DIR="/tmp/runtime-${NB_USER}"
|
||||
|
||||
where
|
||||
|
||||
.. code-block:: Docker
|
||||
|
||||
ARG NB_USER="jovyan"
|
||||
70
docs/source/_obsolete/tools/Sublime_Text.rst
Normal file
70
docs/source/_obsolete/tools/Sublime_Text.rst
Normal file
@@ -0,0 +1,70 @@
|
||||
Sublime Text
|
||||
============
|
||||
|
||||
This is info on using sublime text for development. There's a sublime
|
||||
project file in the kaiju root directory which you can open with sublime
|
||||
text. When using that project file you'll be able to do hover-over to
|
||||
jump to function/type definitions which is very useful.
|
||||
|
||||
You should install “package control” into sublime,
|
||||
https://packagecontrol.io/installation
|
||||
|
||||
From there it’s easy to install new packages. These are the packages I
|
||||
use,
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"installed_packages":
|
||||
[
|
||||
"A File Icon",
|
||||
"Agila Theme",
|
||||
"Alignment",
|
||||
"Anaconda",
|
||||
"ayu",
|
||||
"CMakeEditor",
|
||||
"Dracula Color Scheme",
|
||||
"Fortran",
|
||||
"Git",
|
||||
"GitGutter",
|
||||
"LaTeXBox",
|
||||
"LaTeXTools",
|
||||
"MarkdownEditing",
|
||||
"Material Monokai",
|
||||
"Material Theme",
|
||||
"Monokai - Spacegray",
|
||||
"Package Control",
|
||||
"SideBarEnhancements",
|
||||
"Theme - Centurion",
|
||||
"Theme - Cyanide",
|
||||
"Theme - Darkmatter",
|
||||
"Theme - Soda",
|
||||
"Theme - Soda SolarizedDark",
|
||||
|
||||
This is a different Fortran highlighter `Fortran
|
||||
(Post-modern) <https://github.com/UCLA-Plasma-Simulation-Group/tools_sublimetext_fortran>`_
|
||||
|
||||
It takes a few extra minutes to install compared to the other Fortran
|
||||
highlighter, but this one is much better about understanding modern
|
||||
Fortran. If you install this and are editing with the kaiju project file
|
||||
you can hover over not just functions, but also hover over types and
|
||||
have it pull up where it’s defined.
|
||||
|
||||
This is what I use for my settings,
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"color_scheme": "Packages/Theme - Cyanide/Monocyanide.tmTheme",
|
||||
"font_face": "Inconsolata for Powerline Medium",
|
||||
"font_size": 22,
|
||||
"hot_exit": false,
|
||||
"ignored_packages":
|
||||
[
|
||||
"Markdown",
|
||||
"Vintage"
|
||||
],
|
||||
"remember_open_files": false,
|
||||
"theme": "Adaptive.sublime-theme"
|
||||
}
|
||||
|
||||
Inconsolata is the font I use.
|
||||
165
docs/source/_obsolete/tools/debugging.rst
Normal file
165
docs/source/_obsolete/tools/debugging.rst
Normal file
@@ -0,0 +1,165 @@
|
||||
Debugging
|
||||
=========
|
||||
|
||||
Compile in debug mode
|
||||
---------------------
|
||||
|
||||
These instructions are specific for debugging on Cheyenne.
|
||||
|
||||
The first step of debugging is to compile the code in the debug mode.
|
||||
For GAMERA or Voltron, add the debug flag to cmake when compiling, e.g.:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
mkdir build
|
||||
cd build
|
||||
cmake -DENABLE_MPI=ON -DENABLE_MKL=OFF -DCMAKE_BUILD_TYPE=DEBUG ..
|
||||
make voltron_mpi.x
|
||||
|
||||
|
||||
For TIEGCM, turn the debug flag to TRUE in the job file, e.g.:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
set debug=TRUE
|
||||
|
||||
|
||||
Detailed explanation:
|
||||
|
||||
Ensure that gamera (or whichever application you're debugging) has been
|
||||
built with debugging information included. For gamera, this can be done
|
||||
by setting the CMAKE option CMAKE_BUILD_TYPE to either RELWITHDEBINFO
|
||||
or DEBUG. RELWITHDEBINFO will build the standard optimized version of
|
||||
gamera, but include some debugging information. Not all debugging
|
||||
information can be included in the RELWITHDEBINFO version, so if you
|
||||
find that you can't get all of the information that you want out of
|
||||
Allinea, build with DEBUG instead. This is an unoptimized version of the
|
||||
application with full debugging information.
|
||||
|
||||
Launch Alinea
|
||||
-------------
|
||||
|
||||
Load the arm-forge module. As of 2022, the default version is
|
||||
arm-forge/20.2.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
module load arm-forge
|
||||
|
||||
|
||||
Load "arm-forge/20.2" when compiling gamera with intel 21 compilers and
|
||||
"arm-forge/19.1" for older versions of intel. Also remember to load the
|
||||
same modules when launching the debugging job.
|
||||
|
||||
Launch the Alinea debugger with the command "ddt". If the test case is
|
||||
multi-threaded or uses multiple MPI processes, it is recommended to run
|
||||
in an interactive job. Select the "Run" option, and specify appropriate
|
||||
settings for the application, working directory, MPI, OpenMP, etc....
|
||||
|
||||
|
||||
.. image:: https://bitbucket.org/repo/kMoBzBp/images/1029093052-allineaDDT.PNG
|
||||
:target: https://bitbucket.org/repo/kMoBzBp/images/1029093052-allineaDDT.PNG
|
||||
:alt: Allinea DDT
|
||||
|
||||
To use OpenMP, simply check the "OpenMP" box and specify how many OpenMP
|
||||
threads you want to use
|
||||
|
||||
|
||||
.. image:: https://bitbucket.org/repo/kMoBzBp/images/576706594-ddtopenmp.PNG
|
||||
:target: https://bitbucket.org/repo/kMoBzBp/images/576706594-ddtopenmp.PNG
|
||||
:alt: ddtopenmp
|
||||
|
||||
|
||||
To use MPI, check the "MPI" box and specify how many MPI processes you
|
||||
want to use. If this is gamera built with the recommended configuration,
|
||||
it should be set to use "Intel MPI(MPMD)", and there should be no
|
||||
additional arguments to mpiexec.hydra
|
||||
|
||||
|
||||
.. image:: https://bitbucket.org/repo/kMoBzBp/images/1369899180-allineampi.PNG
|
||||
:target: https://bitbucket.org/repo/kMoBzBp/images/1369899180-allineampi.PNG
|
||||
:alt: allineampi
|
||||
|
||||
|
||||
Once it is configured, click "Run" to start debugging. For more
|
||||
information about using DDT, such as setting breakpoints and checking
|
||||
the values of variables and arrays, there are DDT guides available
|
||||
online `DDT Guide <https://developer.arm.com/docs/101136/latest/ddt>`_.
|
||||
|
||||
Attaching DDT to batch jobs
|
||||
---------------------------
|
||||
|
||||
For particularly complicated or processor intensive cases, it is
|
||||
possible to submit a job normally through the qsub submission system,
|
||||
and have DDT attach to it once the job has started to run. This is
|
||||
usually as simple as changing the application command inside the
|
||||
submission script to have "ddt --connect" before it.
|
||||
|
||||
For example, if you are submitting a job which has the command:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
mpirun gamera.x
|
||||
|
||||
|
||||
Change it to be the command:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
ddt --connect mpirun gamera.x
|
||||
|
||||
Before you submit this job (or at least before the job begins to run
|
||||
after being in the queue), open DDT and let it sit at the main screen.
|
||||
Once the job begins, a connection request dialog will appear in the DDT
|
||||
GUI, and accepting it will begin the debugging session.
|
||||
|
||||
Running DDT With a Remote Client
|
||||
--------------------------------
|
||||
|
||||
It is strongly recommended that you run DDT with the GUI local to you,
|
||||
no matter where the software being debugged happens to be. This is
|
||||
called using a Remote Client. The performance of the DDT GUI over x11 is
|
||||
extremely poor, making the entire process slow and frustrating, and
|
||||
rendering some options (such as plotting data) impossible to use.
|
||||
|
||||
Setting up DDT to use a Remote Client is straightforward, and an example
|
||||
will be given here to run the GUI locally while debugging a job on
|
||||
cheyenne.
|
||||
|
||||
First, you must locally download the EXACT same version of the Remote
|
||||
Client DDT software as will be used remotely for debugging. So you must
|
||||
download either the 20.2 or the 19.1 version of the Remote Client. In
|
||||
the examples above, we are using arm-forge 19.1. The downloads page
|
||||
`here <https://developer.arm.com/tools-and-software/server-and-hpc/downloads/arm-forge/older-versions-of-remote-client-for-arm-forge>`_
|
||||
has all previous versions of the Remote Client for multiple operating
|
||||
systems. You can download either version for your machine from that
|
||||
page.
|
||||
|
||||
Once you have downloaded the Remote Client, it requires configuration to
|
||||
be used with Cheyenne. The official documentation for this process can
|
||||
be found
|
||||
`here <https://developer.arm.com/documentation/101136/2010/Arm-Forge/Connecting-to-a-remote-system>`_.
|
||||
|
||||
In order to connect specifically to cheyenne, click on the "Remote
|
||||
Launch" dropdown box, and then on "Configure...". Click on the "Add"
|
||||
button to create a new configuration, and then set it up to look like
|
||||
this, inserting your own username in the second box.
|
||||
|
||||
|
||||
.. image:: https://bitbucket.org/repo/kMoBzBp/images/20146052-CheyenneRemote.PNG
|
||||
:target: https://bitbucket.org/repo/kMoBzBp/images/20146052-CheyenneRemote.PNG
|
||||
:alt: cheyenneremote
|
||||
|
||||
|
||||
Once you've completed this, you can select "Cheyenne" at any time from
|
||||
the "Remote Launch" dropdown on the DDT Remote Client main page, and it
|
||||
will connect to Cheyenne, asking you to authenticate. Once it has
|
||||
connected you launch your software using the "ddt --connect" option as
|
||||
described above, and the connection request dialog will automatically
|
||||
appear in your local DDT Remote Client, where you can perform the entire
|
||||
debugging process.
|
||||
|
||||
The rest of the debugging process is the exact same as if the client
|
||||
were not on your local machine. You can perform all functions, including
|
||||
adding breakpoints and watchpoints, examining variables, restarting
|
||||
sessions, etc....
|
||||
56
docs/source/_obsolete/tools/globusEndpoint.rst
Normal file
56
docs/source/_obsolete/tools/globusEndpoint.rst
Normal file
@@ -0,0 +1,56 @@
|
||||
Setting up and sharing NCAR GLADE data via a Globus Collections
|
||||
===============================================================
|
||||
|
||||
Users with accounts on NCAR's HPC systems can now share data with other users
|
||||
via Globus. The other users will not need an NCAR account, but depending on
|
||||
the permissions you select for sharing they may need to setup a free Globus
|
||||
account to access the data.
|
||||
|
||||
First a bit of terminology. Users interact with collections and a *mapped
|
||||
collection* is a set of files hosted at an endpoint. The NCAR GLADE filesystem
|
||||
is a mapped collection. Users can create a guest collection from a mapped
|
||||
collection with distinct sharing permissions.
|
||||
|
||||
You will need to authenticate to the *NCAR mapped collections* via
|
||||
`Globus.org <https://app.globus.org/>`_ with your Globus Id. Using the File
|
||||
manager option you will need search for NCAR GLADE collection. Choose the one
|
||||
with the description for GridFTP access to GLADE filesystem using UCAS token
|
||||
authentication. You will need to login with your NCAR id and duo token. If
|
||||
you want to share data from the NCAR campaign storage system you need to use
|
||||
the file manager to search for NCAR Campaign Storage. Remember that the
|
||||
campaign storage system is not accessible from cheyenne and data must be
|
||||
transfer to it via casper.
|
||||
|
||||
It is highly recommended to create bookmarks of the NCAR Glade and NCAR
|
||||
Campaign collections as they will be the starting point for creating any guest
|
||||
collection to share data with other users.
|
||||
|
||||
.. image:: https://bitbucket.org/repo/kMoBzBp/images/551980672-GlobusBookmarks.png
|
||||
:target: https://bitbucket.org/repo/kMoBzBp/images/551980672-GlobusBookmarks.png
|
||||
:alt: GlobusBookmarks.png
|
||||
|
||||
|
||||
To share a directory with other Globus users first create the directory on the
|
||||
NCAR filesystem you wish to share with other users. Via the Globus website
|
||||
use your bookmarks to navigate to the file system you just created and then
|
||||
select the share option to create a new guest collection. The image below
|
||||
show how to share the directory
|
||||
/glade/campaign/hao/msphere/wiltbemj/ExampleShare as a guest collection.
|
||||
|
||||
|
||||
.. image:: https://bitbucket.org/repo/kMoBzBp/images/2261017910-GlobusShare.png
|
||||
:target: https://bitbucket.org/repo/kMoBzBp/images/2261017910-GlobusShare.png
|
||||
:alt: GlobusShare.png
|
||||
|
||||
|
||||
Once you click share you will need to choose the option for creating a new
|
||||
guest collection and then you'll need to provide the required display name and
|
||||
meta data. If the data is related to a publication you can provide the DOI in
|
||||
the information link available in *view more fields* option on the web page.
|
||||
|
||||
Once you create the collection you will have option to add permissions for
|
||||
sharing the data with other users. There are currently for levels of
|
||||
sharing, specific users, groups, all global users, and public. The web site
|
||||
will then provide a
|
||||
`link <https://app.globus.org/file-manager?origin_id=92ac5357-f5e6-4b01-bbf4-b1bb8c06af1f&origin_path=%2F>`_
|
||||
that you can use for sharing with other users.
|
||||
10
docs/source/_obsolete/tools/index.rst
Normal file
10
docs/source/_obsolete/tools/index.rst
Normal file
@@ -0,0 +1,10 @@
|
||||
MAGE Tools
|
||||
==========
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
debugging
|
||||
globusEndpoint
|
||||
Pokeball
|
||||
Sublime_Text
|
||||
71
docs/source/_obsolete/user_rules/contributingGuide.rst
Normal file
71
docs/source/_obsolete/user_rules/contributingGuide.rst
Normal file
@@ -0,0 +1,71 @@
|
||||
Contributing Guide
|
||||
==================
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
This guide assumes that you have already checked out a local copy of our git
|
||||
repository using the other pages in our wiki. If not, please check other pages
|
||||
first.
|
||||
|
||||
This guide will talk about concepts such as pull requests and branching.
|
||||
Details about them are beyond the scope of this guide, so for additional
|
||||
information about them please refer to resources such as these:
|
||||
|
||||
* `Pull Request Tutorial <https://support.atlassian.com/bitbucket-cloud/docs/tutorial-learn-about-bitbucket-pull-requests/>`_
|
||||
* `Making A Pull Request <https://www.atlassian.com/git/tutorials/making-a-pull-request>`_
|
||||
* `Git Branching Overview <https://www.atlassian.com/git/tutorials/learn-branching-with-bitbucket-cloud>`_
|
||||
|
||||
Branch Setup
|
||||
------------
|
||||
|
||||
In our repository we have designated the branch named "master" as both the
|
||||
"Main" branch, and as the "Production" branch.
|
||||
|
||||
Because it is the "Main" branch, whenever anyone downloads our repository,
|
||||
this is the default branch that they will see and use.
|
||||
|
||||
Because it is the "Production" branch, we only merge new changes and features
|
||||
into it periodically once we are confident that these new features are stable
|
||||
and correct.
|
||||
|
||||
We have also designated the branch named "development" as the "Development"
|
||||
branch. This branch is where new and experimental features should be added so
|
||||
that they can be tested thoroughly.
|
||||
|
||||
Periodically, the "development" branch gets pulled into the "master" branch
|
||||
and marks a new release of the code. This way anyone using the code does not
|
||||
accidentally check out an unstable or untested feature (but if they want to,
|
||||
they can use the "development" branch and all of its latest features).
|
||||
|
||||
Branch Access
|
||||
-------------
|
||||
|
||||
The "development" and "master" branches are locked from direct access, so
|
||||
noone is allowed to push any changes directly to either branch. All changes
|
||||
are required to be done by pull requests (explained below). This ensures that
|
||||
all code added to the "development" branch has some level of review, and
|
||||
prevents anyone from accidentally pushing changes directly to the production
|
||||
"master" branch.
|
||||
|
||||
Contributing Code
|
||||
-----------------
|
||||
|
||||
Whenever someone has a new feature, bugfix, or anything that they want to add
|
||||
to the repository, they begin by making a new branch off of the "development"
|
||||
branch. We recommend naming the new branch something descriptive about the
|
||||
work being added, but it can be named whatever you like. This new branch
|
||||
**must** come off of the "development" branch, because that is where new work
|
||||
gets merged into the code base.
|
||||
|
||||
Once the new feature has been completed (and **tested**\ ), you can submit a
|
||||
pull request through the bitbucket website so that this feature branch can be
|
||||
merged into the "development" branch. One or more developers or other
|
||||
appropriate experts will review the changes in the branch, and then either
|
||||
approve the pull request (at which point the branch is merged into
|
||||
"development" and the code is now part of the repository), or reject the pull
|
||||
request with an explanation as to what additional changes may be needed to the
|
||||
code in the feature branch.
|
||||
|
||||
A list of recommended reviewers is available
|
||||
:doc:`here <recommendedReviewers>`.
|
||||
65
docs/source/_obsolete/user_rules/developmentRoadmap.rst
Normal file
65
docs/source/_obsolete/user_rules/developmentRoadmap.rst
Normal file
@@ -0,0 +1,65 @@
|
||||
Development Roadmap
|
||||
===================
|
||||
|
||||
Major Efforts
|
||||
-------------
|
||||
|
||||
* Improve Multifluid support
|
||||
* Improve timing\&profiling (timing info for all ranks, perhaps use
|
||||
Allinea)
|
||||
* Incorporate Doxygen and link to wiki
|
||||
* CHIMP unit tests - Particle pushing in analytic/numerical dipole,
|
||||
invariant conservation, etc
|
||||
* Reorg/rewrite kaipy (better organization, and better speed of
|
||||
pipeline)
|
||||
* RCM re-write to be object-oriented (Model/Grid/State), add unit
|
||||
tests, and support thread and MPI parallelism
|
||||
* Grid optimization (use grid generation tools to improve default LFM
|
||||
grid?)
|
||||
* GPU Implementation
|
||||
* K-Decomposition of MHD solver
|
||||
* Handle rotation axis by adding corotation potential in remix, and
|
||||
letting it tell gamera/rcm
|
||||
|
||||
Minor Efforts
|
||||
-------------
|
||||
|
||||
* Hall MHD (Pull code from old omega repo)
|
||||
* Create "default" configs to be used with XMLGenerator to create XMLs
|
||||
just-in-time
|
||||
* Add simple reproducibility test/100 yard dash test
|
||||
* Add auto-generation of allinea performance report data/auto-link to
|
||||
wiki page
|
||||
* Use Allinea tools to do a extensive study of code
|
||||
performance/bottlenecks, investigate limitations on vectorization
|
||||
and OMP/MPI-scaling.
|
||||
* Reorg wiki for application instead of serial/MPI, and assign
|
||||
responsibilities
|
||||
* Auto-generation of spacecraft trajectory/MHD comparisons, pyspedas +
|
||||
sctrack.x
|
||||
* Auto-generation of spacecraft trajectory/RCM comparisons
|
||||
* Auto-generation of ampere/remix comparisons
|
||||
* Auto-generate comparisons of sim pressure vs. empirical (TS/SST)
|
||||
pressure
|
||||
* Handle remix/conductance mirror ratio based on Rin
|
||||
|
||||
Completed Efforts
|
||||
-----------------
|
||||
|
||||
* MPI Implementation for "vanilla" MHD
|
||||
* MPI Implementation for coupled Gamera/ReMIX (Voltron)
|
||||
* Coupling implementation for Voltron "deep coupling", field-line
|
||||
tracing and heating for empirical/RCM pressure ingestion
|
||||
* Make decomposed(MPI) face-centered data have a single master value
|
||||
at boundaries
|
||||
* Asymmetric MPI transfers for Voltron shallow and deep coupling
|
||||
* Reduce MPI transfers of unused regions of necessary variables
|
||||
* Rename types module to gamtypes for consistency
|
||||
* Handle units/msphutils globals better
|
||||
* Added support for Intel Fortran 19&21
|
||||
* Upgraded to Fortran 2008 MPI Interface
|
||||
* CMake support added for multiple compilers and MPI libraries
|
||||
* Git-LFS to store binary data in repo
|
||||
* Add unit testing framework for python (similar to pFUnit for
|
||||
Fortran)
|
||||
* Add intel mem/thread-checker autotest
|
||||
10
docs/source/_obsolete/user_rules/index.rst
Normal file
10
docs/source/_obsolete/user_rules/index.rst
Normal file
@@ -0,0 +1,10 @@
|
||||
User Rules
|
||||
==========
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
contributingGuide
|
||||
developmentRoadmap
|
||||
recommendedReviewers
|
||||
wikiContributing
|
||||
51
docs/source/_obsolete/user_rules/recommendedReviewers.rst
Normal file
51
docs/source/_obsolete/user_rules/recommendedReviewers.rst
Normal file
@@ -0,0 +1,51 @@
|
||||
Recommended reviewers for topics/sections of the code
|
||||
=====================================================
|
||||
|
||||
|
||||
Voltron
|
||||
-------
|
||||
* Anthony
|
||||
* Adam
|
||||
|
||||
Helio
|
||||
-----
|
||||
|
||||
Gamera/MHD
|
||||
----------
|
||||
* Kareem
|
||||
* Mike Wiltberger
|
||||
|
||||
Chimp
|
||||
-----
|
||||
* Adam
|
||||
|
||||
Tiegcm/GTR
|
||||
----------
|
||||
* Kevin
|
||||
|
||||
ReMIX
|
||||
-----
|
||||
* Kevin
|
||||
|
||||
RCM
|
||||
---
|
||||
* Anthony
|
||||
|
||||
MPI
|
||||
---
|
||||
* Jeffrey Garretson
|
||||
|
||||
OpenMP
|
||||
------
|
||||
* Jeffrey Garretson
|
||||
|
||||
General Code
|
||||
------------
|
||||
* Jeffrey Garretson
|
||||
* Kareem Sorathia
|
||||
* Mike Wiltberger
|
||||
|
||||
Python
|
||||
------
|
||||
* Mike Wiltberger
|
||||
* Jeffrey Garretson
|
||||
20
docs/source/_obsolete/user_rules/wikiContributing.rst
Normal file
20
docs/source/_obsolete/user_rules/wikiContributing.rst
Normal file
@@ -0,0 +1,20 @@
|
||||
Wiki Contributing Guide
|
||||
=======================
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
Thank you for investing your time in contributing to our
|
||||
`Kaiju project! <https://bitbucket.org/aplkaiju/kaiju>`_
|
||||
|
||||
Documentation
|
||||
-------------
|
||||
|
||||
This documentation uses the
|
||||
`Sphinx system <https://www.sphinx-doc.org/en/master/>`_ and the
|
||||
`RST syntax <https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html>`_.
|
||||
|
||||
**NOTE**: We have adopted the convention that all file and directory names
|
||||
will use `camelCase <https://en.wikipedia.org/wiki/Camel_case>`_.
|
||||
|
||||
Have fun!
|
||||
BIN
docs/source/_static/MAGE_Logo_final_dark-bg_vertical.png
Normal file
BIN
docs/source/_static/MAGE_Logo_final_dark-bg_vertical.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 125 KiB |
10
docs/source/_static/css/sidebar_theme.css
Normal file
10
docs/source/_static/css/sidebar_theme.css
Normal file
@@ -0,0 +1,10 @@
|
||||
/* Optional: Gradient Background for Sidebar */
|
||||
.wy-side-nav-search {
|
||||
background: linear-gradient(180deg, #2980b9, #343131);
|
||||
}
|
||||
|
||||
|
||||
.wy-side-nav-search .wy-dropdown > a img.logo,
|
||||
.wy-side-nav-search > a img.logo {
|
||||
max-width: 60% !important; /* Adjust max width as needed */
|
||||
}
|
||||
85
docs/source/building/buildAitken_GR.rst
Normal file
85
docs/source/building/buildAitken_GR.rst
Normal file
@@ -0,0 +1,85 @@
|
||||
Building the ``kaiju`` software on ``aitken`` for MAGE - Without TIEGCM (GR)
|
||||
==============================================================================================
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
This page provides instructions for building the ``kaiju`` software on the
|
||||
``aitken`` supercomputer. These instructions assume that you have cloned the
|
||||
``kaiju`` repository.
|
||||
|
||||
Prepare your software environment
|
||||
---------------------------------
|
||||
|
||||
Like most HPC systems, ``aitken`` uses the ``module`` system to manage the
|
||||
versions of software packages available to the user. When you log in to
|
||||
``aitken``, no modules are loaded by default:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
module list
|
||||
No Modulefiles Currently Loaded.
|
||||
|
||||
Start by purging any currently-loaded modules, then loading the following
|
||||
module set:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
module --force purge
|
||||
|
||||
module use -a /swbuild/analytix/tools/modulefiles
|
||||
module load nas
|
||||
module load pkgsrc/2022Q1-rome
|
||||
module load comp-intel/2020.4.304
|
||||
module load mpi-hpe/mpt.2.23
|
||||
module load hdf5/1.8.18_mpt
|
||||
module load miniconda3/v4
|
||||
|
||||
.. important::
|
||||
|
||||
You must use these exact versions of the modules to ensure the software
|
||||
compiles properly. If you use different versions of any of these modules,
|
||||
a successful build cannot be guaranteed. This module list is current as of
|
||||
**11 April 2025**, and is subject to change as the compute environment
|
||||
changes.
|
||||
|
||||
Build the ``kaiju`` software
|
||||
----------------------------
|
||||
|
||||
These instructions show how to build the MPI version of the ``kaiju``
|
||||
software. The MPI version is built in the subdirectory ``build_mpi``
|
||||
under the ``kaiju`` source code directory. In practice, you can place the
|
||||
build directory in any convenient location.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
# Move to your kaiju clone.
|
||||
cd /path/to/kaiju
|
||||
|
||||
# Create the build directory and enter it.
|
||||
mkdir build_mpi
|
||||
cd build_mpi
|
||||
|
||||
# Run cmake to create the Makefile, saving output.
|
||||
# NOTE: The FC definition is *required* for proper cmake operation.
|
||||
FC=`which ifort` cmake -DENABLE_MPI=ON .. >& cmake.out
|
||||
|
||||
# You can pick one compile target below or compile all of them, if you'd like
|
||||
|
||||
# Compile the MAGE model for geospace simulations
|
||||
make -j4 voltron_mpi.x >& make-voltron.out
|
||||
|
||||
# Compile the GAMERA-helio model for inner heliosphere simulations
|
||||
make -j4 gamhelio_mpi.x >& make-gamhelio.out
|
||||
|
||||
# Compile analysis tools
|
||||
make -j4 calcdb.x chop.x sctrack.x slice.x >& make-analysis.out
|
||||
|
||||
|
||||
When finished, your build directory will contain a ``bin``
|
||||
subdirectory which will contain the compiled ``kaiju`` executables.
|
||||
|
||||
.. note:: Documentation on the analysis tools is found
|
||||
:doc:`here </tools/index>`.
|
||||
|
||||
|
||||
396
docs/source/building/buildAitken_GTR.rst
Normal file
396
docs/source/building/buildAitken_GTR.rst
Normal file
@@ -0,0 +1,396 @@
|
||||
Building the ``kaiju`` software on ``aitken`` for MAGE - With TIEGCM (GTR)
|
||||
==============================================================================================
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
This page provides instructions for building the ``kaiju`` software on the
|
||||
``aitken`` supercomputer. These instructions assume that you have cloned the
|
||||
``kaiju`` repository.
|
||||
|
||||
Prepare your software environment
|
||||
---------------------------------
|
||||
|
||||
Like most HPC systems, ``aitken`` uses the ``module`` system to manage the
|
||||
versions of software packages available to the user. When you log in to
|
||||
``aitken``, no modules are loaded by default:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
module list
|
||||
No Modulefiles Currently Loaded.
|
||||
|
||||
Start by purging any currently-loaded modules, then loading the following
|
||||
module set for MAGE runs coupled with TIEGCM (known as "GTR"):
|
||||
|
||||
.. warning::
|
||||
|
||||
The GTR currently required custom built NetCDF and ESMF modules on ``aitken``. If you need to
|
||||
run GTR, you will need access to ``/home7/nrao3/local_aitken`` and ``/nobackup/nrao3/tiegcm/tiegcm3.0/data``,
|
||||
please reach out to ``nikhilr@ucar.edu`` with the following:
|
||||
|
||||
- Your aitken username
|
||||
- Your Name
|
||||
- Your Institution
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
module --force purge
|
||||
|
||||
module use -a /nasa/modulefiles/testing
|
||||
module use -a /swbuild/analytix/tools/modulefiles
|
||||
module load nas
|
||||
module load comp-intel/2020.4.304
|
||||
module load mpi-hpe/mpt.2.30
|
||||
module load szip/2.1.1
|
||||
module load hdf5/1.12.3_mpt
|
||||
module load miniconda3/v4
|
||||
|
||||
export FC=mpif90
|
||||
export CC=mpicc
|
||||
export CXX=mpicxx
|
||||
|
||||
export PREFIX=/home7/nrao3/local_aitken
|
||||
export LIBRARY_PATH=${LIBRARY_PATH}:$PREFIX/lib
|
||||
export LD_LIBRARY_PATH=$LIBRARY_PATH
|
||||
export CPATH=$PREFIX/include
|
||||
export PATH=${PATH}:$PREFIX/bin
|
||||
|
||||
.. important::
|
||||
|
||||
You must use these exact versions of the modules to ensure the software
|
||||
compiles properly. If you use different versions of any of these modules,
|
||||
a successful build cannot be guaranteed. This module list is current as of
|
||||
**11 April 2025**, and is subject to change as the compute environment
|
||||
changes.
|
||||
|
||||
Build the ``kaiju`` software
|
||||
----------------------------
|
||||
|
||||
These instructions show how to build the MPI version of the ``kaiju``
|
||||
software. The GTR version is built in the subdirectory ``build_gtr``
|
||||
under the ``kaiju`` source code directory. In practice, you can place the
|
||||
build directory in any convenient location.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
# Move to your kaiju clone.
|
||||
cd /path/to/kaiju
|
||||
|
||||
# Create the GTR build directory and enter it.
|
||||
mkdir build_gtr
|
||||
cd build_gtr
|
||||
|
||||
# Run cmake to create the Makefile, saving output.
|
||||
# NOTE: The FC definition is *required* for proper cmake operation.
|
||||
FC=`which ifort` cmake -DENABLE_MPI=ON .. >& cmake.out
|
||||
|
||||
# You can pick one compile target below or compile all of them, if you'd like
|
||||
|
||||
# Compile the MAGE model for geospace simulations
|
||||
make -j4 voltron_mpi.x >& make-voltron.out
|
||||
|
||||
# Compile the GAMERA-helio model for inner heliosphere simulations
|
||||
make -j4 gamhelio_mpi.x >& make-gamhelio.out
|
||||
|
||||
# Compile analysis tools
|
||||
make -j4 calcdb.x chop.x sctrack.x slice.x >& make-analysis.out
|
||||
|
||||
.. warning::
|
||||
|
||||
aitken faces issues while loading the ``hdf5/1.12.3_mpt`` module at times.
|
||||
If you aren't able to build ``kaiju`` with the above module set, unload
|
||||
the ``hdf5/1.12.3_mpt`` module and load it again:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
module unload hdf5/1.12.3_mpt
|
||||
module load hdf5/1.12.3_mpt
|
||||
|
||||
|
||||
When finished, your build directory will contain a ``bin``
|
||||
subdirectory which will contain the compiled ``kaiju`` executables.
|
||||
|
||||
|
||||
Build the ``tiegcm`` software
|
||||
-----------------------------
|
||||
|
||||
`TIEGCM <https://tiegcm-docs.readthedocs.io/>`_ is a comprehensive, first-principles, three-dimensional,
|
||||
non-linear representation of the coupled thermosphere and ionosphere system that includes a self-consistent solution
|
||||
of the middle and low-latitude dynamo field.
|
||||
|
||||
Getting the TIE-GCM source code
|
||||
************************************************
|
||||
|
||||
The ``TIE-GCM`` source code can be obtained by cloning the ``TIE-GCM`` repository
|
||||
on GitHub:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
git clone https://github.com/NCAR/tiegcm.git
|
||||
|
||||
Setting environment variables
|
||||
************************************************
|
||||
|
||||
Add paths to ``TIEGCMHOME`` and ``TIEGCMDATA``.
|
||||
For example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
export TIEGCMHOME=/path/to/your/tiegcm
|
||||
export TIEGCMDATA=/path/to/your/tiegcm/data
|
||||
|
||||
.. note::
|
||||
|
||||
The ``TIEGCMHOME`` and ``TIEGCMDATA`` environment variables are required
|
||||
for running the GTR model. They should point to the TIEGCM source code
|
||||
directory and the TIEGCM data directory, respectively.
|
||||
|
||||
The TIEGCMDATA directory is located in the following locations:
|
||||
- On ``derecho``: ``/glade/campaign/hao/itmodel/tiegcm3.0/new_data``
|
||||
- On ``aitken``: ``/nobackup/nrao3/tiegcm/tiegcm3.0/data``
|
||||
- The required data files can be downloaded from the NCAR Globus endpoint using the following link: `TIEGCM Data Files <https://app.globus.org/file-manager?origin_id=b2502c58-c3eb-470f-86d4-cbdcd0aeb6c8&origin_path=%2F>`_
|
||||
|
||||
|
||||
Resolution guide for TIEGCM
|
||||
************************************************
|
||||
Two TIEGCM executables are required for running the GTR model:
|
||||
|
||||
- TIEGCM Standalone
|
||||
This is the TIEGCM code that runs independently and is used for initialization of the model.
|
||||
- TIEGCM Coupled
|
||||
This is the TIEGCM code that runs in a coupled mode with the GR model, providing
|
||||
real-time updates to the thermosphere and ionosphere conditions during the simulation.
|
||||
|
||||
Depending on the Gamera resolution you will need to compile different TIEGCM resolution executables:
|
||||
- For a ``D`` run
|
||||
- TIEGCM Standalone: horires = 2.5, vertres = 0.25(1/4), mres = 2
|
||||
- TIEGCM Coupled: horires = 2.5, vertres = 0.25(1/4), mres = 2
|
||||
- For a ``Q`` run
|
||||
- TIEGCM Standalone: horires = 2.5, vertres = 0.25(1/4), mres = 2
|
||||
- TIEGCM Coupled: horires = 1.25, vertres = 0.125(1/8), mres = 1
|
||||
- For a ``O`` run
|
||||
- TIEGCM Standalone: horires = 1.25, vertres = 0.125(1/8), mres = 1
|
||||
- TIEGCM Coupled: horires = 0.625, vertres = 0.0625(1/16), mres = 0.5
|
||||
|
||||
|
||||
The TIEGCM code is built using the ``tiegcmrun`` script, which is provided in
|
||||
the ``tiegcm`` code repository. The script is provided in the
|
||||
``tiegcm/tiegcmrun`` directory. More information on ``tiegcmrun.py`` can be found
|
||||
in the `TIEGCM Quick Start Guide <https://tiegcm-docs.readthedocs.io/en/latest/tiegcm/quickstart.html>`_.
|
||||
|
||||
.. important::
|
||||
Make sure to load the modules lised in the ``kaiju`` build instructions
|
||||
before running the ``tiegcmrun`` script.
|
||||
|
||||
Build guide for TIEGCM code for a ``Q`` run:
|
||||
#########################################################################################
|
||||
|
||||
We will use ``tiegcmrun`` script to build the code which requires the minimum amount
|
||||
of input from the user. At each prompt, you can either type in a value, or hit
|
||||
the :kbd:`Return` key to accept the default value (shown in square brackets at
|
||||
the end of the prompt).
|
||||
|
||||
1. First we will create a directory for the "Q" TIEGCM build in the TIEGCMHOME directory.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
cd $TIEGCMHOME
|
||||
mkdir tiegcm_build_Q
|
||||
cd tiegcm_build_Q
|
||||
|
||||
2. Next, we will build the standalone TIEGCM executable by running the ``tiegcmrun.py`` script with the ``-oc``.
|
||||
|
||||
.. note::
|
||||
|
||||
The ``-oc`` option stands for "only compile", which means that the script will only compile the code and not run it.
|
||||
Since the Gamera resolution is ``Q``, we will set the following:
|
||||
|
||||
- horizontal resolution for the standalone TIEGCM to 2.5 degrees
|
||||
- vertical resolution to 0.25(1/4) scale height
|
||||
- magnetic grid resolution to 2 degrees
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$TIEGCMHOME/tiegcmrun/tiegcmrun.py -oc
|
||||
Instructions:
|
||||
-> Default Selected input parameter is given in GREEN
|
||||
-> Warnings and Information are given in YELLOW
|
||||
-> Errors are given in RED
|
||||
-> Valid values (if any) are given in brackets eg. (value1 | value2 | value3)
|
||||
-> Enter '?' for any input parameter to get a detailed description
|
||||
|
||||
|
||||
Run Options:
|
||||
User Mode = BASIC
|
||||
Compile = True
|
||||
Execute = False
|
||||
Coupling = False
|
||||
|
||||
|
||||
Name of HPC system (derecho|aitken|linux) [aitken]:
|
||||
Standalone Executable [/glade/derecho/scratch/nikhilr/tiegcm_build_Q/exec/tiegcm.exe]:
|
||||
Horizontal Resolution (Deg) (5.0|2.5|1.25|0.625) [2.5]:
|
||||
Vertical Resolution (Scale Height) (1/2|1/4|1/8|1/16) [1/4]:
|
||||
Magnetic grid resolution (Degree) (2|1|0.5) [2]:
|
||||
|
||||
After these inputs, the script will compile the TIEGCM code and create the standalone executable and should output something like this:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
..
|
||||
..
|
||||
gmake[1]: Leaving directory '/nobackup/nrao3/tiegcm/tiegcm_build_Q/exec'
|
||||
Executable copied from /nobackup/nrao3/tiegcm/tiegcm_build_Q/exec/tiegcm.exe to /nobackup/nrao3/tiegcm/tiegcm_build_Q/stdout
|
||||
|
||||
3. Next, we will build the coupled TIEGCM executable by running the ``tiegcmrun.py`` script with the ``-oc`` and ``-co`` options.
|
||||
|
||||
.. note::
|
||||
|
||||
The ``-co`` option stands for "coupled", which means that the script will compile the code for the coupled TIEGCM executable.
|
||||
Since the Gamera resolution is ``Q``, we will set the following:
|
||||
|
||||
- horizontal resolution for the coupled TIEGCM to 1.25 degrees
|
||||
- vertical resolution to 0.125(1/8) scale height
|
||||
- magnetic grid resolution to 1 degrees
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$TIEGCMHOME/tiegcmrun/tiegcmrun.py -oc -co
|
||||
Instructions:
|
||||
-> Default Selected input parameter is given in GREEN
|
||||
-> Warnings and Information are given in YELLOW
|
||||
-> Errors are given in RED
|
||||
-> Valid values (if any) are given in brackets eg. (value1 | value2 | value3)
|
||||
-> Enter '?' for any input parameter to get a detailed description
|
||||
|
||||
Run Options:
|
||||
User Mode = BASIC
|
||||
Compile = True
|
||||
Execute = False
|
||||
Coupling = True
|
||||
|
||||
Name of HPC system (derecho|aitken|linux) [aitken]:
|
||||
Coupled Executable [/glade/derecho/scratch/nikhilr/tiegcm_build/exec/tiegcm.x]:
|
||||
Horizontal Resolution (Deg) (5.0|2.5|1.25|0.625) [2.5]: 1.25
|
||||
Vertical Resolution (Scale Height) (1/2|1/4|1/8|1/16) [1/8]:
|
||||
Magnetic grid resolution (Degree) (2|1|0.5) [1]:
|
||||
|
||||
After these inputs, the script will compile the TIEGCM code and create the coupled executable and should output something like this:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
..
|
||||
..
|
||||
gmake[1]: Leaving directory '/nobackup/nrao3/tiegcm/tiegcm_build_Q/exec'
|
||||
Executable copied from /nobackup/nrao3/tiegcm/tiegcm_build_Q/exec/tiegcm.x to /nobackup/nrao3/tiegcm/tiegcm_build_Q/stdout
|
||||
|
||||
4. You should now see the following files in your run directory:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
ls
|
||||
exec hist stdout
|
||||
|
||||
The executables are located in the ``stdout`` directory, and the stdout files
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
ls stdout
|
||||
defs.h tiegcm.exe tiegcm.x
|
||||
|
||||
Build guide for TIEGCM code for a ``D`` run on ``derecho``:
|
||||
#########################################################################################
|
||||
|
||||
1. First we will create a directory for the "D" TIEGCM build in the TIEGCMHOME directory.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
cd $TIEGCMHOME
|
||||
mkdir tiegcm_build_D
|
||||
cd tiegcm_build_D
|
||||
|
||||
2. Next, we will build the standalone TIEGCM executable by running the ``tiegcmrun.py`` script with the ``-oc``.
|
||||
|
||||
.. note::
|
||||
|
||||
Since the Gamera resolution is ``D``, we will set the following:
|
||||
|
||||
- horizontal resolution for the standalone TIEGCM to 2.5 degrees
|
||||
- vertical resolution to 0.25(1/4) scale height
|
||||
- magnetic grid resolution to 2 degrees
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$TIEGCMHOME/tiegcmrun/tiegcmrun.py -oc
|
||||
|
||||
3. Next, we will build the coupled TIEGCM executable by running the ``tiegcmrun.py`` script with the ``-oc`` and ``-co`` options.
|
||||
|
||||
.. note::
|
||||
|
||||
Since the Gamera resolution is ``D``, we will set the following:
|
||||
|
||||
- horizontal resolution for the coupled TIEGCM to 2.5 degrees
|
||||
- vertical resolution to 0.25(1/4) scale height
|
||||
- magnetic grid resolution to 2 degrees
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$TIEGCMHOME/tiegcmrun/tiegcmrun.py -oc -co
|
||||
|
||||
4. The executables are located in the ``stdout`` directory, and the stdout files
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
ls stdout
|
||||
defs.h tiegcm.exe tiegcm.x
|
||||
|
||||
|
||||
Build guide for TIEGCM code for a ``O`` run on ``derecho``:
|
||||
#########################################################################################
|
||||
|
||||
1. First we will create a directory for the "D" TIEGCM build in the TIEGCMHOME directory.
|
||||
.. code-block:: bash
|
||||
|
||||
cd $TIEGCMHOME
|
||||
mkdir tiegcm_build_O
|
||||
cd tiegcm_build_O
|
||||
|
||||
2. Next, we will build the standalone TIEGCM executable by running the ``tiegcmrun.py`` script with the ``-oc``.
|
||||
|
||||
.. note::
|
||||
|
||||
Since the Gamera resolution is ``O``, we will set the following:
|
||||
|
||||
- horizontal resolution for the standalone TIEGCM to 1.25 degrees
|
||||
- vertical resolution to 0.125(1/8) scale height
|
||||
- magnetic grid resolution to 1 degrees
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$TIEGCMHOME/tiegcmrun/tiegcmrun.py -oc
|
||||
|
||||
3. Next, we will build the coupled TIEGCM executable by running the ``tiegcmrun.py`` script with the ``-oc`` and ``-co`` options.
|
||||
|
||||
.. note::
|
||||
Since the Gamera resolution is ``O``, we will set the following:
|
||||
|
||||
- horizontal resolution for the coupled TIEGCM to 0.625 degrees
|
||||
- vertical resolution to 0.0625(1/16) scale height
|
||||
- magnetic grid resolution to 0.5 degrees
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$TIEGCMHOME/tiegcmrun/tiegcmrun.py -oc -co
|
||||
|
||||
4. The executables are located in the ``stdout`` directory, and the stdout files
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
ls stdout
|
||||
defs.h tiegcm.exe tiegcm.x
|
||||
|
||||
.. note:: Documentation on the analysis tools is found
|
||||
:doc:`here </tools/index>`.
|
||||
|
||||
|
||||
96
docs/source/building/buildDerecho_GR.rst
Normal file
96
docs/source/building/buildDerecho_GR.rst
Normal file
@@ -0,0 +1,96 @@
|
||||
Building the ``kaiju`` software on ``derecho`` for MAGE - Without TIEGCM (GR)
|
||||
=============================================================================================
|
||||
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
This page provides instructions for building the ``kaiju`` software on the
|
||||
``derecho`` supercomputer. These instructions assume that you have cloned the
|
||||
``kaiju`` repository.
|
||||
|
||||
|
||||
Prepare your software environment
|
||||
---------------------------------
|
||||
|
||||
Like most HPC systems, ``derecho`` uses the ``module`` system to manage the
|
||||
versions of software packages available to the user. When you log in to
|
||||
``derecho``, the following modules are loaded by default:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
module list
|
||||
|
||||
Currently Loaded Modules:
|
||||
1) ncarenv/23.09 (S) 4) ncarcompilers/1.0.0 7) netcdf/4.9.2
|
||||
2) craype/2.7.23 5) cray-mpich/8.1.27
|
||||
3) intel/2023.2.1 6) hdf5/1.12.2
|
||||
|
||||
Where:
|
||||
S: Module is Sticky, requires --force to unload or purge
|
||||
|
||||
This set of modules **cannot** be used to build the ``kaiju`` code.
|
||||
|
||||
Start by purging any currently-loaded modules, then loading the following
|
||||
module set:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
module --force purge
|
||||
module load ncarenv/23.06
|
||||
module load cmake/3.26.3
|
||||
module load craype/2.7.20
|
||||
module load intel/2023.0.0
|
||||
module load ncarcompilers/1.0.0
|
||||
module load cray-mpich/8.1.25
|
||||
module load hdf5-mpi/1.12.2
|
||||
module load conda/latest
|
||||
|
||||
.. important::
|
||||
|
||||
You must use these exact versions of the modules to ensure the software
|
||||
compiles properly. If you use different versions of any of these modules,
|
||||
a successful build cannot be guaranteed. This module list is current as of
|
||||
**11 April 2025**, and is subject to change as the compute environment
|
||||
changes.
|
||||
|
||||
Build the ``kaiju`` software
|
||||
----------------------------
|
||||
|
||||
These instructions show how to build the MPI version of the ``kaiju``
|
||||
software. The MPI version is built in the subdirectory ``build_mpi``
|
||||
under the ``kaiju`` source code directory. In practice, you can place the
|
||||
build directory in any convenient location.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
# Move to your kaiju clone.
|
||||
cd /path/to/kaiju
|
||||
|
||||
# Create the build directory and enter it.
|
||||
mkdir build_mpi
|
||||
cd build_mpi
|
||||
|
||||
# Run cmake to create the Makefile, saving output.
|
||||
# NOTE: The FC definition is *required* for proper cmake operation.
|
||||
FC=`which ifort` cmake -DENABLE_MPI=ON .. >& cmake.out
|
||||
# If you are building the GTR version of the code, use the following cmake command instead:
|
||||
|
||||
# You can pick one compile target below or compile all of them, if you'd like
|
||||
|
||||
# Compile the MAGE model for geospace simulations
|
||||
make -j4 voltron_mpi.x >& make-voltron.out
|
||||
|
||||
# Compile the GAMERA-helio model for inner heliosphere simulations
|
||||
make -j4 gamhelio_mpi.x >& make-gamhelio.out
|
||||
|
||||
# Compile analysis tools
|
||||
make -j4 calcdb.x chop.x sctrack.x slice.x >& make-analysis.out
|
||||
|
||||
|
||||
When finished, your build directory will contain a ``bin``
|
||||
subdirectory which will contain the compiled ``kaiju`` executables.
|
||||
|
||||
.. note:: Documentation on the analysis tools is found
|
||||
:doc:`here </tools/index>`.
|
||||
|
||||
377
docs/source/building/buildDerecho_GTR.rst
Normal file
377
docs/source/building/buildDerecho_GTR.rst
Normal file
@@ -0,0 +1,377 @@
|
||||
Building the ``kaiju`` software on ``derecho`` for MAGE - With TIEGCM (GTR)
|
||||
==============================================================================================
|
||||
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
This page provides instructions for building the ``kaiju`` software on the
|
||||
``derecho`` supercomputer. These instructions assume that you have cloned the
|
||||
``kaiju`` repository.
|
||||
|
||||
|
||||
Prepare your software environment
|
||||
---------------------------------
|
||||
|
||||
Like most HPC systems, ``derecho`` uses the ``module`` system to manage the
|
||||
versions of software packages available to the user. When you log in to
|
||||
``derecho``, the following modules are loaded by default:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
module list
|
||||
|
||||
Currently Loaded Modules:
|
||||
1) ncarenv/23.09 (S) 4) ncarcompilers/1.0.0 7) netcdf/4.9.2
|
||||
2) craype/2.7.23 5) cray-mpich/8.1.27
|
||||
3) intel/2023.2.1 6) hdf5/1.12.2
|
||||
|
||||
Where:
|
||||
S: Module is Sticky, requires --force to unload or purge
|
||||
|
||||
This set of modules **cannot** be used to build the ``kaiju`` code.
|
||||
|
||||
Start by purging any currently-loaded modules, then loading the following
|
||||
module set For MAGE runs coupled with TIEGCM (known as "GTR"):
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
module --force purge
|
||||
module load ncarenv/23.09
|
||||
module load cmake/3.26.3
|
||||
module load craype/2.7.31
|
||||
module load intel-classic/2023.2.1
|
||||
module load cray-mpich/8.1.27
|
||||
module load ncarcompilers/1.0.0
|
||||
module load mkl/2023.2.0
|
||||
module load hdf5-mpi/1.12.2
|
||||
module load netcdf-mpi/4.9.2
|
||||
module load esmf/8.6.0
|
||||
module load conda/latest
|
||||
|
||||
.. important::
|
||||
|
||||
You must use these exact versions of the modules to ensure the software
|
||||
compiles properly. If you use different versions of any of these modules,
|
||||
a successful build cannot be guaranteed. This module list is current as of
|
||||
**11 April 2025**, and is subject to change as the compute environment
|
||||
changes.
|
||||
|
||||
Build the ``kaiju`` software
|
||||
----------------------------
|
||||
|
||||
These instructions show how to build the MPI version of the ``kaiju``
|
||||
software. The GTR version is built in the subdirectory ``build_gtr``
|
||||
under the ``kaiju`` source code directory. In practice, you can place the
|
||||
build directory in any convenient location.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
# Move to your kaiju clone.
|
||||
cd /path/to/kaiju
|
||||
|
||||
# Create the GTR build directory and enter it.
|
||||
mkdir build_gtr
|
||||
cd build_gtr
|
||||
|
||||
# Run cmake to create the Makefile, saving output.
|
||||
# NOTE: The FC definition is *required* for proper cmake operation.
|
||||
FC=`which ifort` cmake -DENABLE_MPI=ON make -DALLOW_INVALID_COMPILERS=ON .. >& cmake.out
|
||||
|
||||
# You can pick one compile target below or compile all of them, if you'd like
|
||||
|
||||
# Compile the MAGE model for geospace simulations
|
||||
make -j4 voltron_mpi.x >& make-voltron.out
|
||||
|
||||
# Compile the GAMERA-helio model for inner heliosphere simulations
|
||||
make -j4 gamhelio_mpi.x >& make-gamhelio.out
|
||||
|
||||
# Compile analysis tools
|
||||
make -j4 calcdb.x chop.x sctrack.x slice.x >& make-analysis.out
|
||||
|
||||
|
||||
When finished, your build directory will contain a ``bin``
|
||||
subdirectory which will contain the compiled ``kaiju`` executables.
|
||||
|
||||
Build the ``tiegcm`` software
|
||||
-----------------------------
|
||||
|
||||
`TIEGCM <https://tiegcm-docs.readthedocs.io/>`_ is a comprehensive, first-principles, three-dimensional,
|
||||
non-linear representation of the coupled thermosphere and ionosphere system that includes a self-consistent solution
|
||||
of the middle and low-latitude dynamo field.
|
||||
|
||||
Getting the TIE-GCM source code
|
||||
************************************************
|
||||
|
||||
The ``TIE-GCM`` source code can be obtained by cloning the ``TIE-GCM`` repository
|
||||
on GitHub:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
git clone https://github.com/NCAR/tiegcm.git
|
||||
|
||||
Setting environment variables
|
||||
************************************************
|
||||
|
||||
Add paths to ``TIEGCMHOME`` and ``TIEGCMDATA``.
|
||||
For example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
export TIEGCMHOME=/path/to/your/tiegcm
|
||||
export TIEGCMDATA=/path/to/your/tiegcm/data
|
||||
|
||||
.. note::
|
||||
|
||||
The ``TIEGCMHOME`` and ``TIEGCMDATA`` environment variables are required
|
||||
for running the GTR model. They should point to the TIEGCM source code
|
||||
directory and the TIEGCM data directory, respectively.
|
||||
|
||||
The TIEGCMDATA directory is located in the following locations:
|
||||
- On ``derecho``: ``/glade/campaign/hao/itmodel/tiegcm3.0/new_data``
|
||||
- On ``pleiades``: ``/nobackup/nrao3/tiegcm/tiegcm3.0/data``
|
||||
- The required data files can be downloaded from the NCAR Globus endpoint using the following link: `TIEGCM Data Files <https://app.globus.org/file-manager?origin_id=b2502c58-c3eb-470f-86d4-cbdcd0aeb6c8&origin_path=%2F>`_
|
||||
|
||||
|
||||
Resolution guide for TIEGCM
|
||||
************************************************
|
||||
Two TIEGCM executables are required for running the GTR model:
|
||||
|
||||
- TIEGCM Standalone
|
||||
This is the TIEGCM code that runs independently and is used for initialization of the model.
|
||||
- TIEGCM Coupled
|
||||
This is the TIEGCM code that runs in a coupled mode with the GR model, providing
|
||||
real-time updates to the thermosphere and ionosphere conditions during the simulation.
|
||||
|
||||
Depending on the Gamera resolution you will need to compile different TIEGCM resolution executables:
|
||||
- For a ``D`` run
|
||||
- TIEGCM Standalone: horires = 2.5, vertres = 0.25(1/4), mres = 2
|
||||
- TIEGCM Coupled: horires = 2.5, vertres = 0.25(1/4), mres = 2
|
||||
- For a ``Q`` run
|
||||
- TIEGCM Standalone: horires = 2.5, vertres = 0.25(1/4), mres = 2
|
||||
- TIEGCM Coupled: horires = 1.25, vertres = 0.125(1/8), mres = 1
|
||||
- For a ``O`` run
|
||||
- TIEGCM Standalone: horires = 1.25, vertres = 0.125(1/8), mres = 1
|
||||
- TIEGCM Coupled: horires = 0.625, vertres = 0.0625(1/16), mres = 0.5
|
||||
|
||||
|
||||
The TIEGCM code is built using the ``tiegcmrun`` script, which is provided in
|
||||
the ``tiegcm`` code repository. The script is provided in the
|
||||
``tiegcm/tiegcmrun`` directory. More information on ``tiegcmrun.py`` can be found
|
||||
in the `TIEGCM Quick Start Guide <https://tiegcm-docs.readthedocs.io/en/latest/tiegcm/quickstart.html>`_.
|
||||
|
||||
.. important::
|
||||
Make sure to load the modules lised in the ``kaiju`` build instructions
|
||||
before running the ``tiegcmrun`` script.
|
||||
|
||||
Build guide for TIEGCM code for a ``Q`` run:
|
||||
#########################################################################################
|
||||
|
||||
We will use ``tiegcmrun`` script to build the code which requires the minimum amount
|
||||
of input from the user. At each prompt, you can either type in a value, or hit
|
||||
the :kbd:`Return` key to accept the default value (shown in square brackets at
|
||||
the end of the prompt).
|
||||
|
||||
|
||||
1. First we will create a directory for the "Q" TIEGCM build in the TIEGCMHOME directory.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
cd $TIEGCMHOME
|
||||
mkdir tiegcm_build_Q
|
||||
cd tiegcm_build_Q
|
||||
|
||||
2. Next, we will build the standalone TIEGCM executable by running the ``tiegcmrun.py`` script with the ``-oc``.
|
||||
|
||||
.. note::
|
||||
|
||||
The ``-oc`` option stands for "only compile", which means that the script will only compile the code and not run it.
|
||||
Since the Gamera resolution is ``Q``, we will set the following:
|
||||
|
||||
- horizontal resolution for the standalone TIEGCM to 2.5 degrees
|
||||
- vertical resolution to 0.25(1/4) scale height
|
||||
- magnetic grid resolution to 2 degrees
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$TIEGCMHOME/tiegcmrun/tiegcmrun.py -oc
|
||||
Instructions:
|
||||
-> Default Selected input parameter is given in GREEN
|
||||
-> Warnings and Information are given in YELLOW
|
||||
-> Errors are given in RED
|
||||
-> Valid values (if any) are given in brackets eg. (value1 | value2 | value3)
|
||||
-> Enter '?' for any input parameter to get a detailed description
|
||||
|
||||
|
||||
Run Options:
|
||||
User Mode = BASIC
|
||||
Compile = True
|
||||
Execute = False
|
||||
Coupling = False
|
||||
|
||||
|
||||
Name of HPC system (derecho|pleiades|linux) [derecho]:
|
||||
Standalone Executable [/glade/derecho/scratch/nikhilr/tiegcm_build/exec/tiegcm.exe]:
|
||||
Horizontal Resolution (Deg) (5.0|2.5|1.25|0.625) [2.5]:
|
||||
Vertical Resolution (Scale Height) (1/2|1/4|1/8|1/16) [1/4]:
|
||||
Magnetic grid resolution (Degree) (2|1|0.5) [2]:
|
||||
|
||||
After these inputs, the script will compile the TIEGCM code and create the standalone executable and should output something like this:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
..
|
||||
..
|
||||
gmake[1]: Leaving directory '/glade/derecho/scratch/nikhilr/tiegcm_build/exec'
|
||||
Executable copied from /glade/derecho/scratch/nikhilr/tiegcm_build/exec/tiegcm.exe to /glade/derecho/scratch/nikhilr/tiegcm_build/stdout
|
||||
|
||||
3. Next, we will build the coupled TIEGCM executable by running the ``tiegcmrun.py`` script with the ``-oc`` and ``-co`` options.
|
||||
|
||||
.. note::
|
||||
|
||||
The ``-co`` option stands for "coupled", which means that the script will compile the code for the coupled TIEGCM executable.
|
||||
Since the Gamera resolution is ``Q``, we will set the following:
|
||||
|
||||
- horizontal resolution for the coupled TIEGCM to 1.25 degrees
|
||||
- vertical resolution to 0.125(1/8) scale height
|
||||
- magnetic grid resolution to 1 degrees
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$TIEGCMHOME/tiegcmrun/tiegcmrun.py -oc -co
|
||||
Instructions:
|
||||
-> Default Selected input parameter is given in GREEN
|
||||
-> Warnings and Information are given in YELLOW
|
||||
-> Errors are given in RED
|
||||
-> Valid values (if any) are given in brackets eg. (value1 | value2 | value3)
|
||||
-> Enter '?' for any input parameter to get a detailed description
|
||||
|
||||
Run Options:
|
||||
User Mode = BASIC
|
||||
Compile = True
|
||||
Execute = False
|
||||
Coupling = True
|
||||
|
||||
Name of HPC system (derecho|pleiades|linux) [derecho]:
|
||||
Coupled Executable [/glade/derecho/scratch/nikhilr/tiegcm_build/exec/tiegcm.x]:
|
||||
Horizontal Resolution (Deg) (5.0|2.5|1.25|0.625) [2.5]: 1.25
|
||||
Vertical Resolution (Scale Height) (1/2|1/4|1/8|1/16) [1/8]:
|
||||
Magnetic grid resolution (Degree) (2|1|0.5) [1]:
|
||||
|
||||
After these inputs, the script will compile the TIEGCM code and create the coupled executable and should output something like this:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
..
|
||||
..
|
||||
gmake[1]: Leaving directory '/glade/derecho/scratch/nikhilr/tiegcm_build/exec'
|
||||
Executable copied from /glade/derecho/scratch/nikhilr/tiegcm_build/exec/tiegcm.x to /glade/derecho/scratch/nikhilr/tiegcm_build/stdout
|
||||
|
||||
4. You should now see the following files in your run directory:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
ls
|
||||
exec hist stdout
|
||||
|
||||
The executables are located in the ``stdout`` directory, and the stdout files
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
ls stdout
|
||||
defs.h tiegcm.exe tiegcm.x
|
||||
|
||||
Build guide for TIEGCM code for a ``D`` run on ``derecho``:
|
||||
#########################################################################################
|
||||
|
||||
1. First we will create a directory for the "D" TIEGCM build in the TIEGCMHOME directory.
|
||||
.. code-block:: bash
|
||||
|
||||
cd $TIEGCMHOME
|
||||
mkdir tiegcm_build_D
|
||||
cd tiegcm_build_D
|
||||
|
||||
2. Next, we will build the standalone TIEGCM executable by running the ``tiegcmrun.py`` script with the ``-oc``.
|
||||
|
||||
.. note::
|
||||
|
||||
Since the Gamera resolution is ``D``, we will set the following:
|
||||
|
||||
- horizontal resolution for the standalone TIEGCM to 2.5 degrees
|
||||
- vertical resolution to 0.25(1/4) scale height
|
||||
- magnetic grid resolution to 2 degrees
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$TIEGCMHOME/tiegcmrun/tiegcmrun.py -oc
|
||||
|
||||
3. Next, we will build the coupled TIEGCM executable by running the ``tiegcmrun.py`` script with the ``-oc`` and ``-co`` options.
|
||||
|
||||
.. note::
|
||||
|
||||
Since the Gamera resolution is ``D``, we will set the following:
|
||||
|
||||
- horizontal resolution for the coupled TIEGCM to 2.5 degrees
|
||||
- vertical resolution to 0.25(1/4) scale height
|
||||
- magnetic grid resolution to 2 degrees
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$TIEGCMHOME/tiegcmrun/tiegcmrun.py -oc -co
|
||||
|
||||
4. The executables are located in the ``stdout`` directory, and the stdout files
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
ls stdout
|
||||
defs.h tiegcm.exe tiegcm.x
|
||||
|
||||
|
||||
Build guide for TIEGCM code for a ``O`` run on ``derecho``:
|
||||
#########################################################################################
|
||||
|
||||
1. First we will create a directory for the "D" TIEGCM build in the TIEGCMHOME directory.
|
||||
.. code-block:: bash
|
||||
|
||||
cd $TIEGCMHOME
|
||||
mkdir tiegcm_build_O
|
||||
cd tiegcm_build_O
|
||||
|
||||
2. Next, we will build the standalone TIEGCM executable by running the ``tiegcmrun.py`` script with the ``-oc``.
|
||||
|
||||
.. note::
|
||||
|
||||
Since the Gamera resolution is ``O``, we will set the following:
|
||||
|
||||
- horizontal resolution for the standalone TIEGCM to 1.25 degrees
|
||||
- vertical resolution to 0.125(1/8) scale height
|
||||
- magnetic grid resolution to 1 degrees
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$TIEGCMHOME/tiegcmrun/tiegcmrun.py -oc
|
||||
|
||||
3. Next, we will build the coupled TIEGCM executable by running the ``tiegcmrun.py`` script with the ``-oc`` and ``-co`` options.
|
||||
|
||||
.. note::
|
||||
|
||||
Since the Gamera resolution is ``O``, we will set the following:
|
||||
|
||||
- horizontal resolution for the coupled TIEGCM to 0.625 degrees
|
||||
- vertical resolution to 0.0625(1/16) scale height
|
||||
- magnetic grid resolution to 0.5 degrees
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$TIEGCMHOME/tiegcmrun/tiegcmrun.py -oc -co
|
||||
|
||||
4. The executables are located in the ``stdout`` directory, and the stdout files
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
ls stdout
|
||||
defs.h tiegcm.exe tiegcm.x
|
||||
|
||||
.. note:: Documentation on the analysis tools is found
|
||||
:doc:`here </tools/index>`.
|
||||
|
||||
60
docs/source/building/index.rst
Normal file
60
docs/source/building/index.rst
Normal file
@@ -0,0 +1,60 @@
|
||||
Building the ``kaiju`` software
|
||||
===============================
|
||||
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
This section describes how to build the ``kaiju`` software on two different
|
||||
supercomputers - ``derecho`` and ``aitken``. If you are trying to build the
|
||||
``kaiju`` software on a different system, use these instructions as a starting
|
||||
point.
|
||||
|
||||
|
||||
Before you begin
|
||||
----------------
|
||||
|
||||
The ``kaiju`` software is typically built with the Intel Fortran compiler
|
||||
(although the software can also be built with other Fortran compilers,
|
||||
e.g., GNU). Building the software also requires the ``cmake`` build tool, the
|
||||
``HDF5`` library, and an ``MPI`` library. Instructions for loading these
|
||||
packages are provided in the ``module`` commands for each HPC system.
|
||||
|
||||
|
||||
Getting the source code
|
||||
-----------------------
|
||||
|
||||
The ``kaiju`` source code can be obtained by cloning the ``kaiju`` repository
|
||||
on GitHub:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
git clone https://github.com/JHUAPL/kaiju.git
|
||||
|
||||
.. important::
|
||||
|
||||
The ``kaiju`` repository on GitHub uses ``git-lfs`` to support the
|
||||
use of large binary files. You *must* make sure ``git-lfs`` is
|
||||
available in your ``git`` installation to ensure a complete clone
|
||||
of the ``kaiju`` repository.
|
||||
|
||||
.. note:: The ``kaiju`` software can be built in serial or MPI versions. The
|
||||
serial version is best for single-processor machines such as a laptop,
|
||||
while supercomputers such as ``derecho`` and ``aitken`` typically use
|
||||
the MPI version, to take advantage of multiple compute nodes. These
|
||||
instructions describe how to build the MPI version of ``kaiju``. The build
|
||||
instructions for the single-machine serial version are essentially the
|
||||
same as for the MPI version - typically all that is required is a Fortran
|
||||
compiler and an HDF5 library.
|
||||
|
||||
|
||||
Building Guides
|
||||
-----------------
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
buildDerecho_GR
|
||||
buildAitken_GR
|
||||
buildDerecho_GTR
|
||||
buildAitken_GTR
|
||||
62
docs/source/conf.py
Normal file
62
docs/source/conf.py
Normal file
@@ -0,0 +1,62 @@
|
||||
# Configuration file for the Sphinx documentation builder.
|
||||
import os
|
||||
# -- Project information
|
||||
|
||||
project = 'kaiju'
|
||||
copyright = '2025, JHU/APL and NSF NCAR'
|
||||
author = 'Kaiju Development Team'
|
||||
|
||||
release = '0.75.3'
|
||||
version = '0.75.3'
|
||||
|
||||
# -- General configuration
|
||||
|
||||
extensions = [
|
||||
'sphinx.ext.duration',
|
||||
'sphinx.ext.doctest',
|
||||
'sphinx.ext.autodoc',
|
||||
'sphinx.ext.autosummary',
|
||||
'sphinx.ext.intersphinx',
|
||||
]
|
||||
|
||||
intersphinx_mapping = {
|
||||
'python': ('https://docs.python.org/3/', None),
|
||||
'sphinx': ('https://www.sphinx-doc.org/en/master/', None),
|
||||
}
|
||||
intersphinx_disabled_domains = ['std']
|
||||
|
||||
templates_path = ['_templates']
|
||||
|
||||
# -- Options for HTML output
|
||||
|
||||
html_theme = 'sphinx_rtd_theme'
|
||||
|
||||
# -- Options for EPUB output
|
||||
epub_show_urls = 'footnote'
|
||||
|
||||
html_static_path = ['_static']
|
||||
html_logo = '_static/MAGE_Logo_final_dark-bg_vertical.png'
|
||||
|
||||
html_theme_options = {
|
||||
'logo_only': True,
|
||||
'display_version': False,
|
||||
'collapse_navigation': False,
|
||||
'navigation_depth': 4,
|
||||
}
|
||||
|
||||
html_css_files = [
|
||||
'css/sidebar_theme.css',
|
||||
]
|
||||
|
||||
exclude_patterns = [
|
||||
"_build",
|
||||
"Thumbs.db",
|
||||
".DS_Store",
|
||||
]
|
||||
|
||||
#if not os.environ.get("BUILD_ALL"):
|
||||
# exclude_patterns.append("_obsolete/**")
|
||||
|
||||
def setup(app):
|
||||
if os.environ.get('BUILD_ALL'):
|
||||
app.tags.add('obsolete')
|
||||
46
docs/source/index.rst
Normal file
46
docs/source/index.rst
Normal file
@@ -0,0 +1,46 @@
|
||||
Welcome to the ``kaiju`` documentation
|
||||
======================================
|
||||
|
||||
This is the documentation for the ``kaiju`` software. ``kaiju`` includes the
|
||||
Multiscale Atmosphere-Geospace Environment (`MAGE
|
||||
<https://cgs.jhuapl.edu/Models/>`_) model developed by the `Center for
|
||||
Geospace Storms <https://cgs.jhuapl.edu/>`_ as well as other scientific
|
||||
software for simulation of heliospheric environments such as planetary
|
||||
magnetospheres and the solar wind. This documentation focuses on
|
||||
`MAGE <https://cgs.jhuapl.edu/Models/>`_ and `GAMERA-helio <https://cgs.jhuapl.edu/Models/gamera.php>`_, i.e., the geospace
|
||||
and inner heliosphere applications of the ``kaiju`` software.
|
||||
|
||||
.. important::
|
||||
Users of any code or data from this repository are expected to respect
|
||||
these :doc:`rules of the road <roadrules>`.
|
||||
|
||||
If you encounter any issues or have questions, first read the
|
||||
:doc:`rules of the road <roadrules>` above and then consider joining
|
||||
the Slack channel we set up for the Kaiju user community. If you want
|
||||
to join, send us a message with your
|
||||
request `here <https://cgs.jhuapl.edu/feedback/>`_.
|
||||
|
||||
====================
|
||||
Table of contents
|
||||
====================
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
self
|
||||
python/index
|
||||
building/index
|
||||
running/index
|
||||
makeitso/index
|
||||
tools/index
|
||||
roadrules
|
||||
misc/index
|
||||
|
||||
.. only:: obsolete
|
||||
|
||||
Internal Docs
|
||||
-------------
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
_obsolete/index
|
||||
505
docs/source/makeitso/engage.rst
Normal file
505
docs/source/makeitso/engage.rst
Normal file
@@ -0,0 +1,505 @@
|
||||
Engage -- Use for MAGE with TIEGCM
|
||||
=============================================
|
||||
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
The Python script ``engage.py`` was developed to simplify the process of
|
||||
configuring and running GTR MAGE (that is, the geospace application
|
||||
of the ``kaiju`` software with TIEGCM.) It
|
||||
provides an interactive, prompt-driven interface to specify all of the
|
||||
parameters needed for a model run.
|
||||
|
||||
The ``engage.py`` script is a wrapper around the ``makeitso.py`` and TIE-GCM's ``tiegcmrun``
|
||||
script, which is used to prepare the necessary files for a GTR MAGE model run.
|
||||
|
||||
- For more details on the ``makeitso.py`` script, see the :doc:`makeitso </makeitso/makeitso>` documentation.
|
||||
|
||||
- For more details on the TIE-GCM ``tiegcmrun`` script, see the `tiegcmrun <https://tiegcm-docs.readthedocs.io/en/latest/tiegcm/quickstart>`_ documentation.
|
||||
|
||||
The ``engage.py`` script can operate in one of three different modes:
|
||||
``BASIC``, ``INTERMEDIATE``, or ``EXPERT``. Each mode provides access to
|
||||
a subset of the ``kaiju`` and ``tiegcm`` parameters.
|
||||
|
||||
* The ``BASIC`` mode
|
||||
Requires the user to provide the minimum set of parameters needed to specify a model
|
||||
run, such as the run ID, and the simulation time periods.
|
||||
|
||||
* The ``INTERMEDIATE`` mode
|
||||
Allows the user to specify all of the
|
||||
parameters from the ``BASIC`` mode, as well as a wider set of run parameters,
|
||||
such as non-standard file locations and some MHD parameters and TIE-GCM parameters.
|
||||
|
||||
* The ``EXPERT`` mode
|
||||
Provides access to all of the user-adjustable
|
||||
parameters from the ``kaiju`` and ``TIE-GCM`` software.
|
||||
|
||||
When finished, the script generates the files needed to run a magnetosphere model, and saves
|
||||
all options in a convenient JSON file so that the run can be repeated at a
|
||||
later date.
|
||||
|
||||
|
||||
Running the ``engage.py`` script
|
||||
----------------------------------
|
||||
|
||||
The ``engage.py`` script is provided as part of the ``kaiju`` software. It
|
||||
is found at ``$KAIJUHOME/scripts/makeitso/engage.py``, where ``$KAIJUHOME``
|
||||
is the location of your ``kaiju`` software tree. After configuring your
|
||||
``kaiju`` software, you can get help text for the script like this:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
engage.py --help
|
||||
usage: engage.py [-h] [--clobber] [--debug] [--mode MODE] [--engage_options_path ENGAGE_OPTIONS_PATH] [--makeitso_options_path MAKEITSO_OPTIONS_PATH] [--tiegcm_options_path TIEGCM_OPTIONS_PATH] [--verbose]
|
||||
|
||||
Interactive script to prepare a MAGE magnetosphere model run.
|
||||
|
||||
options:
|
||||
-h, --help show this help message and exit
|
||||
--clobber Overwrite existing options file (default: False).
|
||||
--debug, -d Print debugging output (default: False).
|
||||
--mode MODE User mode (BASIC|INTERMEDIATE|EXPERT) (default: BASIC).
|
||||
--engage_options_path ENGAGE_OPTIONS_PATH, -eo ENGAGE_OPTIONS_PATH
|
||||
Path to engage JSON file of options (default: None)
|
||||
--makeitso_options_path MAKEITSO_OPTIONS_PATH, -mo MAKEITSO_OPTIONS_PATH
|
||||
Path to makeitso JSON file of options (default: None)
|
||||
--tiegcm_options_path TIEGCM_OPTIONS_PATH, -to TIEGCM_OPTIONS_PATH
|
||||
Path to tiegcm JSON file of options (default: None)
|
||||
--verbose, -v Print verbose output (default: False).
|
||||
|
||||
The ``--options_path`` option allows the user to specify an existing JSON file
|
||||
from a previous run of ``engage.py`` so that the entire process of model
|
||||
generation can be automated. More info on this given below.
|
||||
The ``--mode`` option specifies the user mode to run in, with ``BASIC`` being the default.
|
||||
|
||||
|
||||
An example in ``BASIC`` mode
|
||||
----------------------------
|
||||
|
||||
This section provdes an annotated example session of ``engage.py`` running
|
||||
in the default ``BASIC`` mode on the ``derecho`` supercomputer.
|
||||
|
||||
1. ``engage`` native parameters will be requested
|
||||
#####################################################
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
engage.py
|
||||
Name to use for PBS job(s) [geospace]:
|
||||
|
||||
Enter an identifying string to use for your model run. This name will be used
|
||||
as the basis for most of the files created by ``engage.py``, the
|
||||
``kaiju`` and ``TIE-GCM`` software. The default name is ``geospace``.
|
||||
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Start date for simulation (yyyy-mm-ddThh:mm:ss) [2001-06-01T23:00:00]:
|
||||
Stop date for simulation (yyyy-mm-ddThh:mm:ss) [2001-06-02T01:00:00]:
|
||||
|
||||
Enter the start and stop date and time for the solar wind data you want to
|
||||
use. The required data will be fetched from CDAWeb, and converted into a
|
||||
format usable by the ``kaiju`` software.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Do you want to split your job into multiple segments? (Y|N) [Y]:
|
||||
|
||||
Here ``Y`` is default and is required for the GTR run. This will
|
||||
split your simulation into multiple PBS jobs that are chained together, with
|
||||
each using the results of the previous job as a starting point.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Segment length in simulated seconds [7200.0]: 3600
|
||||
|
||||
Enter the length of each segment in simulated seconds. The default is the entire length
|
||||
of the simulation, but you can enter a shorter time to split the simulation into
|
||||
multiple segments. For example, if you enter ``3600``, the simulation will be
|
||||
split into two segments, each one hour long. The first segment will run from
|
||||
``2001-06-01T23:00:00`` to ``2001-06-02T00:00:00``, and the second segment will run
|
||||
from ``2001-06-02T00:00:00`` to ``2001-06-02T01:00:00``.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
GAMERA grid type (D|Q|O|H) [Q]:
|
||||
|
||||
The codes represent double- (``D``), quad- (``Q``), oct- (``O``) and
|
||||
hex- (``H``) resolutions in the LFM grid used in the ``kaiju`` software.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Name of HPC system (derecho|aitken) [aitken]: derecho
|
||||
|
||||
The ``engage.py`` script supports the ``derecho`` and ``aitken``
|
||||
supercomputers. The selection you make here will customize the remaining
|
||||
prompts for the selected system.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
PBS account name [your_login_name]:
|
||||
|
||||
On ``aitken``, your login name is usable here. On ``derecho``, you will need
|
||||
a PBS account ID.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Run directory [.]:
|
||||
|
||||
Specify the directory that you wish to perform the simulation in. The
|
||||
directory will contain all of the files generated by ``engage.py``.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Path to kaiju installation [YOUR_PATH_HERE]:
|
||||
Path to kaiju build directory [YOUR_PATH_HERE]:
|
||||
|
||||
Enter the paths to the location of your ``kaiju`` code, and the location of
|
||||
your ``kaiju`` build directory.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
PBS queue name (low|normal|long|debug|devel) [normal]:
|
||||
|
||||
Select a PBS queue to use on the selected supercomputer.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
You are responsible for ensuring that the wall time is sufficient
|
||||
to run a segment of your simulation! Requested wall time for each PBS job
|
||||
segment (HH:MM:SS) [01:00:00]:
|
||||
|
||||
Specify the wall clock time to request for your job (or each segment, if you
|
||||
split your job into multiple segments).
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Root directory for the simulation [<YOUR_RUN_DIRECTORY_HERE>]:
|
||||
|
||||
This is the root directory for your simulation. It will be used to store all
|
||||
of the files generated by ``engage.py`` and the ``kaiju`` and ``TIE-GCM``
|
||||
software. The default is the current directory.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Conda environment to use for the simulation [<YOUR_CONDA_ENVIRONMENT_DIRECTORY_HERE>]:
|
||||
|
||||
This is the path to the conda environment that you want to use for the
|
||||
simulation. This is automatically set to the conda environment that you have
|
||||
activated when you run the ``engage.py`` script.
|
||||
|
||||
|
||||
2. ``makeitso`` parameters will be requested
|
||||
#####################################################
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Extend TFIN by dtCouple - 1 seconds (T|F) [T]:
|
||||
|
||||
This option allows you to extend the voltron TFIN time by one second. This is
|
||||
required for coupled runs with TIE-GCM, and is set to ``T`` by default.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
(VOLTRON) Run in GCM mode (T|F) [T]:
|
||||
|
||||
This option allows you to run the voltron code in GCM mode, which is required
|
||||
for coupled runs with TIE-GCM. This is set to ``T`` by default.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Do you have an existing boundary condition file to use? (Y|N) [N]:
|
||||
|
||||
If you already have a file containing solar wind data to use for the inner
|
||||
boundary conditions of your simulation, enter ``Y``, and you will then be
|
||||
prompted for the path top the file. If you don't have the file, enter ``N``
|
||||
and you will be prompted for the date range to use.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
(GAMERA) Relative path to HDF5 file containing solar wind boundary conditions [bcwind.h5]:
|
||||
|
||||
This is the path to your existing solar wind file, or the path that
|
||||
``makeitso.py`` will use to create the file.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
(VOLTRON) File output cadence in simulated seconds [60.0]:
|
||||
|
||||
How often (in simulated seconds) the ``kaiju`` software should output results
|
||||
during the course of the simulation.
|
||||
|
||||
The script then runs several additional tools to prepare the files needed for
|
||||
your simulation.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Running preprocessing steps.
|
||||
Generating Quad LFM-style grid ...
|
||||
|
||||
Output: lfmQ.h5
|
||||
Size: (96,96,128)
|
||||
Inner Radius: 2.000000
|
||||
Sunward Outer Radius: 30.000000
|
||||
Tail Outer Radius: 322.511578
|
||||
Low-lat BC: 45.000000
|
||||
Ring params:
|
||||
<ring gid="lfm" doRing="T" Nr="8" Nc1="8" Nc2="16" Nc3="32" Nc4="32" Nc5="64" Nc6="64" Nc7="64" Nc8="64"/>
|
||||
|
||||
Writing to lfmQ.h5
|
||||
Retrieving f10.7 data from CDAWeb
|
||||
Retrieving solar wind data from CDAWeb
|
||||
Using Bx fields
|
||||
Bx Fit Coefficients are [-3.78792744 -0.77915822 -1.0774984 ]
|
||||
Saving "OMNI_HRO_1MIN.txt_bxFit.png"
|
||||
Converting to Gamera solar wind file
|
||||
Found 21 variables and 120 lines
|
||||
Offsetting from LFM start ( 0.00 min) to Gamera start ( 0.00 min)
|
||||
Saving "OMNI_HRO_1MIN.txt.png"
|
||||
Writing Gamera solar wind to bcwind.h5
|
||||
Making new raijuconfig.h5, destroying pre-existing file if there
|
||||
Stamping file with git hash and branch, and script args
|
||||
Adding waveModel to raijuconfig.h5
|
||||
Reading /glade/derecho/scratch/ewinter/cgs/aplkaiju/kaipy-private/dev_312/kaipy-private/kaipy/raiju/waveModel/chorus_polynomial.txt
|
||||
Adding Species to raijuconfig.h5
|
||||
Adding params used to generate lambda distribution as root attribute
|
||||
Creating .ini file(s) for run.
|
||||
Converting .ini file(s) to .xml file(s).
|
||||
|
||||
|
||||
Template creation complete!
|
||||
|
||||
|
||||
Template creation complete!
|
||||
|
||||
|
||||
The PBS scripts ['./geospace-SPINUP.pbs', './geospace-WARMUP-01.pbs', './geospace-WARMUP-02.pbs', './geospace-01.pbs'] have been created, each with a corresponding XML file. To submit the jobs with the proper dependency (to ensure each segment runs in order), please run the script geospace_pbs.sh like this:
|
||||
bash geospace_pbs.sh
|
||||
|
||||
3. ``tiegcmrun`` parameters will be requested
|
||||
#####################################################
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Instructions:
|
||||
-> Default Selected input parameter is given in GREEN
|
||||
-> Warnings and Information are given in YELLOW
|
||||
-> Errors are given in RED
|
||||
-> Valid values (if any) are given in brackets eg. (value1 | value2 | value3)
|
||||
-> Enter '?' for any input parameter to get a detailed description
|
||||
|
||||
|
||||
Run Options:
|
||||
User Mode = BASIC
|
||||
Compile = False
|
||||
Execute = False
|
||||
Coupling = True
|
||||
Engage = True
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Directory of model [<YOUR_TIEGCMHOME_HERE>]:
|
||||
Directory of Tiegcm Data Files [<YOUR_TIEGCMDATA_HERE>]:
|
||||
|
||||
This is the path to your TIE-GCM repository and TIE-GCM data directory. This is automatically set to
|
||||
to the TIEGCMHOME and TIEGCMDATA environment variables
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Standalone Executable [<YOUR_TIEGCM_STANDALONE_EXECUTABLE_HERE>]:
|
||||
|
||||
This is the path to the TIE-GCM standalone executable. This is automatically set
|
||||
to the ``tiegcm.exe`` in current directory.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Coupled Executable [<YOUR_TIEGCM_COUPLED_EXECUTABLE_HERE>]:
|
||||
|
||||
This is the path to the TIE-GCM coupled executable. This is automatically set
|
||||
to the ``tiegcm.x`` in current directory.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Low = 70, Medium = 140 , High = 200
|
||||
F107 flux level for TIEGCM spin up (low|medium|high) [low]:
|
||||
|
||||
This is the F10.7 flux level to use for the TIE-GCM source file in spin-up period. The
|
||||
default is ``low``, which corresponds to a value of 70. The other options are
|
||||
``medium`` (140) and ``high`` (200).
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
SOURCE file location [/glade/campaign/hao/itmodel/tiegcm3.0/new_data/source/junsol_f70.nc]:
|
||||
|
||||
This is the path to the TIE-GCM source file to use for the spin-up period. The default is
|
||||
automatically selected based on the start date of your simulation.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Selected date in source file Example: (173,0,0,0) [173 0 0 0]:
|
||||
STEP number [30]:
|
||||
NSTEP_SUB number [10]:
|
||||
|
||||
These parameters are set as default by the ``tiegcmrun``
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Secondary Output Fields [['TN', 'UN', 'VN', 'NE', 'TEC', 'POTEN', 'Z', 'ZG']] / ENTER to go next:
|
||||
|
||||
These are the secondary output fields to include in the TIE-GCM output.
|
||||
The default is a set of fields that are commonly used in geospace simulations.
|
||||
You can add another filed if you wish, or just hit :kbd:`Return` to accept the default.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
High-latitude potential model that is going to be used (HEELIS|WEIMER) [HEELIS]:
|
||||
|
||||
This is the high-latitude potential model to use in the TIE-GCM simulation.
|
||||
The default is ``HEELIS``, which is the Heelis potential model is required for
|
||||
coupled runs with the ``kaiju`` software.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
If GPI_NCFILE is specified, then KP and POWER/CTPOTEN are skipped. If further POTENTIAL_MODEL is WEIMER and IMF_NCFILE is specified, then the Weimer model and aurora will be driven by the IMF data, and only F107 and F107A will be read from the GPI data file.
|
||||
GPI file [/glade/campaign/hao/itmodel/tiegcm3.0/new_data/boundary_files/GPI/gpi_1960001-2024332.nc]:
|
||||
|
||||
This is the path to the GPI file to use for the TIE-GCM simulation which contrains solar wind
|
||||
data. The default is automatically selected based on the start date of your simulation.
|
||||
|
||||
|
||||
After these inputs, the script interpolates source file for TIEGCM, and generates XML and
|
||||
PBS files for the run, as well as a grid file for use in the model.
|
||||
|
||||
You should see output similar to this:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
/glade/derecho/scratch/nikhilr/GTR58 exitsts
|
||||
/glade/derecho/scratch/nikhilr/GTR58 exitsts
|
||||
/glade/derecho/scratch/nikhilr/GTR58 exitsts
|
||||
Interpolating primary file /glade/campaign/hao/itmodel/tiegcm3.0/new_data/source/junsol_f70.nc to create new primary file /glade/derecho/scratch/nikhilr/GTR58/tiegcm_standalone/geospace-tiegcm-standalone_prim.nc at horizontal resolution 2.5 and vertical resolution 0.25 with zitop 7.0.
|
||||
Creating new primary file: /glade/derecho/scratch/nikhilr/GTR58/tiegcm_standalone/geospace-tiegcm-standalone_prim.nc
|
||||
pbs_scripts = ['./geospace-01.pbs', './geospace-02.pbs']
|
||||
submit_all_jobs_script = geospace_pbs.sh
|
||||
|
||||
When finished, the script creates the file ``runid.json``, where ``runid`` is
|
||||
the identifying string for your simulation. This file contains a record of all
|
||||
of the parameters used in your simulation. This file can be passed back to
|
||||
``engage.py`` in a subsequent session to repeat the simulation, and also
|
||||
provides a convenient starting point for minor tweaks to your simulation
|
||||
parameters.
|
||||
|
||||
There are several types files created for each of the jobs, including:
|
||||
|
||||
* ``*.pbs``
|
||||
These are the PBS scripts that will be submitted to the job scheduler to run
|
||||
the segments of the simulation.
|
||||
* ``*.xml``
|
||||
These are the XML files that contain the parameters for GAMERA and RAIJU of the
|
||||
segment.
|
||||
* ``*.inp``
|
||||
These are the namelist files that contain parameters for TIE-GCM of the segment.
|
||||
* ``*.json``
|
||||
These are the JSON files that contain the parameters for the simulation. They
|
||||
are generated by the ``engage.py`` script with all the parameters required to run the
|
||||
simulation.
|
||||
|
||||
The run is divided into segments:
|
||||
|
||||
* ``geospace-SPINUP.*``
|
||||
This segment runs the GAMERA model to create the initial conditions for the
|
||||
simulation. It is run first, and its output is used by the next segment.
|
||||
* ``geospace-WARMUP-**.*``
|
||||
These segments runs the GAMERA RAIJU model to "warm up" for for the coupled model execution.
|
||||
The ``-01``, ``-02``, etc. suffixes indicate the segment number, and the
|
||||
segments are run in order.
|
||||
* ``tiegcm_standalone-**.*``
|
||||
This segment runs the TIE-GCM model to create the initial conditions for the coupled model.
|
||||
The ``-01`` to ``-02``, etc. suffixes indicate the segment number, and the
|
||||
segments are run in order.
|
||||
* ``geospace-**.*``
|
||||
These segments runs the GTR coupled modele. The ``-01``, ``-02``, etc.
|
||||
suffixes indicate the segment number, and the segments are run
|
||||
in order.
|
||||
|
||||
This image shows how the segments are run in order:
|
||||
|
||||
.. image:: ../running/GTRSegment.png
|
||||
|
||||
|
||||
Additional parameters in ``INTERMEDIATE`` and ``EXPERT`` mode
|
||||
-------------------------------------------------------------
|
||||
|
||||
Many more parameters are available in ``INTERMEDIATE`` and ``EXPERT`` modes.
|
||||
These parameters are documented in the file ``option_descriptions.json``,
|
||||
which is stored in the same directory as the ``engage.py`` script.
|
||||
|
||||
Using JSON files for ``engage.py``
|
||||
-------------------------------------------------------------
|
||||
The ``engage.py`` script can also be run in a non-interactive mode, where it
|
||||
reads a JSON file containing the parameters for the simulation. This allows
|
||||
you to automate the process of running the simulation, and to easily repeat
|
||||
the simulation with the same parameters.
|
||||
|
||||
The ``engage.py`` script requires three JSON files to be specified:
|
||||
* ``engage_options_path``
|
||||
This is the path to the JSON file containing the parameters for the
|
||||
``engage.py`` script. It contains the parameters that are specific to the
|
||||
``engage.py`` script, such as the run ID, start and stop dates, and so on.
|
||||
* ``makeitso_options_path``
|
||||
This is the path to the JSON file containing the parameters for the
|
||||
``makeitso.py`` script. It contains the parameters that are specific to the
|
||||
``makeitso.py`` script, such as the GAMERA grid type, segment length, and so on.
|
||||
* ``tiegcm_options_path``
|
||||
This is the path to the JSON file containing the parameters for the
|
||||
``tiegcmrun`` script. It contains the parameters that are specific to the
|
||||
TIE-GCM simulation, such as the source file, F10.7 flux level, and so on.
|
||||
|
||||
|
||||
To run the ``engage.py`` script in non-interactive mode, you can use the
|
||||
following command:
|
||||
.. code-block:: bash
|
||||
|
||||
engage.py --engage_options_path /path/to/engage_input.json --makeitso_options_path /path/to/makeitso_input.json --tiegcm_options_path /path/to/tiegcm_input.json
|
||||
|
||||
Here are templates for the JSON files:
|
||||
- Derecho:
|
||||
|
||||
- :download:`engage_input.json <engage_template/derecho/engage_input.json>`
|
||||
- :download:`makeitso_input.json <engage_template/derecho/makeitso_input.json>`
|
||||
- :download:`tiegcm_input.json <engage_template/derecho/tiegcmrun_input.json>`
|
||||
|
||||
- aitken:
|
||||
|
||||
- :download:`engage_input.json <engage_template/aitken/engage_input.json>`
|
||||
- :download:`makeitso_input.json <engage_template/aitken/makeitso_input.json>`
|
||||
- :download:`tiegcm_input.json <engage_template/aitken/tiegcmrun_input.json>`
|
||||
|
||||
These JSON files can be used as a starting point for your own simulations. You will
|
||||
need to modify certain parameters in them:
|
||||
|
||||
- engage_input.json:
|
||||
|
||||
- start_date: The start date of your simulation.
|
||||
- stop_date: The stop date of your simulation.
|
||||
- segment_duration: The duration of each segment in simulated seconds.
|
||||
- gamera_grid_type: The GAMERA grid type to use (D, Q, O, or H).
|
||||
- kaiju_install_directory: The path to your ``kaiju`` installation directory.
|
||||
- kaiju_build_directory: The path to your ``kaiju`` build directory.
|
||||
|
||||
- makeitso_input.json:
|
||||
|
||||
- Automcatically generated by the ``engage.py`` script, but you can modify the
|
||||
parameters if needed.
|
||||
|
||||
- tiegcm_input.json:
|
||||
|
||||
- modeldir: The path to your TIE-GCM repository.
|
||||
- tgcmdata: The path to your TIE-GCM data directory.
|
||||
- modelexe: The path to the TIE-GCM standalone executable.
|
||||
- coupled_modelexe: The path to the TIE-GCM coupled executable.
|
||||
- solar_flux_level: The F10.7 flux level to use for the TIE-GCM source file in spin-up period (low, medium, or high).
|
||||
- SECFLDS: The secondary output fields to include in the TIE-GCM output.
|
||||
- Automcatically generated by the ``engage.py`` script, but you can modify the
|
||||
parameters if needed.
|
||||
@@ -0,0 +1,27 @@
|
||||
{
|
||||
"simulation": {
|
||||
"job_name": "geospace",
|
||||
"start_date": "2016-08-09T09:00:00",
|
||||
"stop_date": "2016-08-09T11:00:00",
|
||||
"use_segments": "Y",
|
||||
"segment_duration": "3600",
|
||||
"gamera_grid_type": "D",
|
||||
"hpc_system": "aitken"
|
||||
},
|
||||
"pbs": {
|
||||
"account_name": "nrao3",
|
||||
"run_directory": ".",
|
||||
"kaiju_install_directory": "/nobackupp27/nrao3/engage_build/kaiju-private",
|
||||
"kaiju_build_directory": "/nobackupp27/nrao3/engage_build/kaiju-private/build_mpi",
|
||||
"group_list": "None",
|
||||
"queue": "normal",
|
||||
"walltime": "03:00:00"
|
||||
},
|
||||
"coupling": {
|
||||
"gr_warm_up_time": 14400,
|
||||
"gcm_spin_up_time": 604800,
|
||||
"root_directory": "/nobackupp27/nrao3/GTR152",
|
||||
"tfin_delta": "T",
|
||||
"doGCM": "T"
|
||||
}
|
||||
}
|
||||
140
docs/source/makeitso/engage_template/aitken/makeitso_input.json
Normal file
140
docs/source/makeitso/engage_template/aitken/makeitso_input.json
Normal file
@@ -0,0 +1,140 @@
|
||||
{
|
||||
"simulation": {
|
||||
"bcwind_available": "N",
|
||||
"segment_duration": "3600",
|
||||
"gamera_grid_inner_radius": "2.0",
|
||||
"gamera_grid_outer_radius": "30.0"
|
||||
},
|
||||
"pbs": {
|
||||
},
|
||||
"gamera": {
|
||||
"sim": {
|
||||
"doH5g": "T",
|
||||
"icType": "user",
|
||||
"pdmb": "0.75",
|
||||
"rmeth": "8C"
|
||||
},
|
||||
"floors": {
|
||||
"dFloor": "1.0e-4",
|
||||
"pFloor": "1.0e-6"
|
||||
},
|
||||
"timestep": {
|
||||
"doCPR": "T",
|
||||
"limCPR": "0.20"
|
||||
},
|
||||
"restart": {
|
||||
"doRes": "F",
|
||||
"nRes": "-1",
|
||||
"resID": "GTR152D"
|
||||
},
|
||||
"physics": {
|
||||
"doBoris": "T",
|
||||
"Ca": "10.0",
|
||||
"doMHD": "T"
|
||||
},
|
||||
"ring": {
|
||||
"doRing": "T",
|
||||
"gid": "lfm"
|
||||
},
|
||||
"wind": {
|
||||
"tsfile": "bcwind.h5"
|
||||
},
|
||||
"source": {
|
||||
"doSource": "T",
|
||||
"doBounceDT": "T",
|
||||
"nBounce": "1.0",
|
||||
"doWolfLim": "T"
|
||||
},
|
||||
"iPdir": {
|
||||
"bcPeriodic": "F"
|
||||
},
|
||||
"jPdir": {
|
||||
"bcPeriodic": "F"
|
||||
},
|
||||
"kPdir": {
|
||||
"bcPeriodic": "T"
|
||||
},
|
||||
"coupling": {
|
||||
"blockHalo": "F"
|
||||
}
|
||||
},
|
||||
"voltron": {
|
||||
"time": {
|
||||
"tFin": 7200.0
|
||||
},
|
||||
"spinup": {
|
||||
"doSpin": "T",
|
||||
"tSpin": "7200.0",
|
||||
"tIO": "0.0"
|
||||
},
|
||||
"output": {
|
||||
"dtOut": "60.0",
|
||||
"tsOut": "300.0"
|
||||
},
|
||||
"coupling": {
|
||||
"doQkSquish": "T",
|
||||
"doGCM": "F",
|
||||
"qkSquishStride": "2",
|
||||
"dtCouple": "5.0",
|
||||
"doDeep": "T",
|
||||
"imType": "RCM",
|
||||
"doAsyncCoupling": "T"
|
||||
},
|
||||
"restart": {
|
||||
"dtRes": "1800.0"
|
||||
},
|
||||
"imag": {
|
||||
"doInit": "T"
|
||||
},
|
||||
"helpers": {
|
||||
"doSquishHelp": "T"
|
||||
}
|
||||
},
|
||||
"chimp": {
|
||||
"units": {
|
||||
"uid": "EARTHCODE"
|
||||
},
|
||||
"fields": {
|
||||
"grType": "lfm"
|
||||
},
|
||||
"domain": {
|
||||
"dtype": "MAGE"
|
||||
},
|
||||
"tracer": {
|
||||
"epsds": "0.05"
|
||||
}
|
||||
},
|
||||
"remix": {
|
||||
"conductance": {
|
||||
"doStarlight": "T",
|
||||
"apply_cap": "T",
|
||||
"const_sigma": "F"
|
||||
},
|
||||
"precipitation": {
|
||||
"aurora_model_type": "LINMRG",
|
||||
"beta": "0.2",
|
||||
"doAuroralSmooth": "F"
|
||||
}
|
||||
},
|
||||
"rcm": {
|
||||
"rcmdomain": {
|
||||
"domType": "ELLIPSE"
|
||||
},
|
||||
"ellipse": {
|
||||
"xSun": "12.5",
|
||||
"yDD": "15.0",
|
||||
"xTail": "-15.0",
|
||||
"isDynamic": "T"
|
||||
},
|
||||
"grid": {
|
||||
"LowLat": "30.0",
|
||||
"HiLat": "75.0"
|
||||
},
|
||||
"plasmasphere": {
|
||||
"isDynamic": "T",
|
||||
"initKp": "5",
|
||||
"doRefill": "T",
|
||||
"tAvg": "60.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,96 @@
|
||||
{
|
||||
"simulation": {
|
||||
"job_name": "GTR152D",
|
||||
"hpc_system": "aitken"
|
||||
},
|
||||
"model": {
|
||||
"data": {
|
||||
"modeldir": "/nobackupp27/nrao3/engage_build/tiegcm",
|
||||
"tgcmdata": "/nobackup/nrao3/tiegcm/tiegcm3.0/data",
|
||||
"modelexe": "/nobackupp27/nrao3/GTR156/tiegcm.exe",
|
||||
"coupled_modelexe": "/nobackupp27/nrao3/GTR156/tiegcm.x"
|
||||
},
|
||||
"specification": {
|
||||
"zitop": 7
|
||||
}
|
||||
},
|
||||
"inp": {
|
||||
"LABEL": "tiegcm",
|
||||
"CALENDAR_ADVANCE": 1,
|
||||
"NSTEP_SUB": "10",
|
||||
"solar_flux_level": "low",
|
||||
"SECFLDS": [
|
||||
"TN",
|
||||
"UN",
|
||||
"VN",
|
||||
"NE",
|
||||
"TEC",
|
||||
"POTEN",
|
||||
"Z",
|
||||
"ZG"
|
||||
],
|
||||
"ELECTRON_HEATING": 6,
|
||||
"POTENTIAL_MODEL": "HEELIS",
|
||||
"GPI_NCFILE": "/nobackup/nrao3/tiegcm/tiegcm3.0/data/gpi_mgii_1979001-2010212.nc",
|
||||
"IMF_NCFILE": null,
|
||||
"KP": null,
|
||||
"POWER": null,
|
||||
"CTPOTEN": null,
|
||||
"BXIMF": null,
|
||||
"BYIMF": null,
|
||||
"BZIMF": null,
|
||||
"SWDEN": null,
|
||||
"SWVEL": null,
|
||||
"F107": null,
|
||||
"F107A": null,
|
||||
"ONEWAY": false,
|
||||
"AMIENH": null,
|
||||
"AMIESH": null,
|
||||
"AURORA": null,
|
||||
"DYNAMO": null,
|
||||
"CALC_HELIUM": null,
|
||||
"EDDY_DIF": null,
|
||||
"JOULEFAC": null,
|
||||
"COLFAC": null,
|
||||
"OPDIFFCAP": "2e9",
|
||||
"OPDIFFRATE": "0.3",
|
||||
"OPDIFFLEV": "7",
|
||||
"OPFLOOR": "3000",
|
||||
"OPRATE": "0.3",
|
||||
"OPLEV": "7",
|
||||
"OPLATWIDTH": "20",
|
||||
"TE_CAP": "8000",
|
||||
"TI_CAP": "8000",
|
||||
"CURRENT_PG": null,
|
||||
"CURRENT_KQ": null,
|
||||
"ET": null,
|
||||
"SAPS": null,
|
||||
"DOECLIPSE": null,
|
||||
"ECLIPSE_LIST": null,
|
||||
"HE_COEFS_NCFILE": "/nobackup/nrao3/tiegcm/tiegcm3.0/data/he_coefs_dres.nc",
|
||||
"BGRDDATA_NCFILE": null,
|
||||
"CTMT_NCFILE": null,
|
||||
"SABER_NCFILE": null,
|
||||
"TIDI_NCFILE": null,
|
||||
"TIDE": null,
|
||||
"TIDE2": null,
|
||||
"MIXFILE": null,
|
||||
"NUDGE_NCPRE": null,
|
||||
"NUDGE_NCPOST": null,
|
||||
"NUDGE_NCFILE": null,
|
||||
"NUDGE_FLDS": null,
|
||||
"NUDGE_LBC": null,
|
||||
"NUDGE_F4D": null,
|
||||
"NUDGE_USE_REFDATE": null,
|
||||
"NUDGE_REFDATE": null,
|
||||
"NUDGE_SPONGE": null,
|
||||
"NUDGE_DELTA": null,
|
||||
"NUDGE_POWER": null,
|
||||
"NUDGE_ALPHA": null,
|
||||
"other_input": [
|
||||
null
|
||||
]
|
||||
},
|
||||
"job": {
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
{
|
||||
"simulation": {
|
||||
"job_name": "geospace",
|
||||
"start_date": "2016-08-09T09:00:00",
|
||||
"stop_date": "2016-08-09T11:00:00",
|
||||
"use_segments": "Y",
|
||||
"segment_duration": "3600",
|
||||
"gamera_grid_type": "D",
|
||||
"hpc_system": "derecho"
|
||||
},
|
||||
"pbs": {
|
||||
"account_name": "P28100045",
|
||||
"run_directory": ".",
|
||||
"kaiju_install_directory": "/glade/u/home/nikhilr/kaiju_engage/kaiju-private",
|
||||
"kaiju_build_directory": "/glade/u/home/nikhilr/kaiju_engage/kaiju-private/build_mpi",
|
||||
"queue": "main",
|
||||
"job_priority": "premium",
|
||||
"walltime": "03:00:00"
|
||||
},
|
||||
"coupling": {
|
||||
"gr_warm_up_time": 14400,
|
||||
"gcm_spin_up_time": 604800,
|
||||
"root_directory": "/glade/derecho/scratch/nikhilr/GTR34",
|
||||
"conda_env": "/glade/work/nikhilr/conda-envs/kaiju",
|
||||
"tfin_delta": "T",
|
||||
"doGCM": "T"
|
||||
}
|
||||
}
|
||||
156
docs/source/makeitso/engage_template/derecho/makeitso_input.json
Normal file
156
docs/source/makeitso/engage_template/derecho/makeitso_input.json
Normal file
@@ -0,0 +1,156 @@
|
||||
{
|
||||
"simulation": {
|
||||
"bcwind_available": "N",
|
||||
"gamera_grid_inner_radius": "2.0",
|
||||
"gamera_grid_outer_radius": "30.0"
|
||||
},
|
||||
"pbs": {
|
||||
},
|
||||
"gamera": {
|
||||
"sim": {
|
||||
"doH5g": "T",
|
||||
"icType": "user",
|
||||
"runid": "gtr34",
|
||||
"pdmb": "0.75",
|
||||
"rmeth": "8C"
|
||||
},
|
||||
"floors": {
|
||||
"dFloor": "1.0e-4",
|
||||
"pFloor": "1.0e-6"
|
||||
},
|
||||
"timestep": {
|
||||
"doCPR": "T",
|
||||
"limCPR": "0.20"
|
||||
},
|
||||
"restart": {
|
||||
"doRes": "F",
|
||||
"nRes": "-1",
|
||||
"resID": "gtr34"
|
||||
},
|
||||
"physics": {
|
||||
"doBoris": "T",
|
||||
"Ca": "10.0",
|
||||
"doMHD": "T"
|
||||
},
|
||||
"ring": {
|
||||
"doRing": "T",
|
||||
"gid": "lfm"
|
||||
},
|
||||
"wind": {
|
||||
"tsfile": "bcwind.h5"
|
||||
},
|
||||
"source": {
|
||||
"doSource": "T",
|
||||
"doBounceDT": "T",
|
||||
"nBounce": "1.0",
|
||||
"doWolfLim": "T"
|
||||
},
|
||||
"iPdir": {
|
||||
"bcPeriodic": "F"
|
||||
},
|
||||
"jPdir": {
|
||||
"bcPeriodic": "F"
|
||||
},
|
||||
"kPdir": {
|
||||
"bcPeriodic": "T"
|
||||
},
|
||||
"coupling": {
|
||||
"blockHalo": "T"
|
||||
}
|
||||
},
|
||||
"voltron": {
|
||||
"time": {
|
||||
},
|
||||
"spinup": {
|
||||
"doSpin": "T",
|
||||
"tSpin": "7200.0",
|
||||
"tIO": "0.0"
|
||||
},
|
||||
"output": {
|
||||
"dtOut": "60.0",
|
||||
"tsOut": "300.0"
|
||||
},
|
||||
"coupling": {
|
||||
"doQkSquish": "T",
|
||||
"doGCM": "F",
|
||||
"qkSquishStride": "2",
|
||||
"dtCouple": "5.0",
|
||||
"doDeep": "T",
|
||||
"imType": "RAIJU",
|
||||
"doAsyncCoupling": "F"
|
||||
},
|
||||
"restart": {
|
||||
"dtRes": "1800.0"
|
||||
},
|
||||
"imag": {
|
||||
"doInit": "T"
|
||||
},
|
||||
"helpers": {
|
||||
"doSquishHelp": "T"
|
||||
}
|
||||
},
|
||||
"chimp": {
|
||||
"units": {
|
||||
"uid": "EARTHCODE"
|
||||
},
|
||||
"fields": {
|
||||
"grType": "lfm"
|
||||
},
|
||||
"domain": {
|
||||
"dtype": "MAGE"
|
||||
},
|
||||
"tracer": {
|
||||
"epsds": "0.05"
|
||||
}
|
||||
},
|
||||
"remix": {
|
||||
"conductance": {
|
||||
"doStarlight": "T",
|
||||
"apply_cap": "T",
|
||||
"const_sigma": "F"
|
||||
},
|
||||
"precipitation": {
|
||||
"aurora_model_type": "LINMRG",
|
||||
"beta": "0.2",
|
||||
"doAuroralSmooth": "F"
|
||||
}
|
||||
},
|
||||
"raiju": {
|
||||
"output": {
|
||||
"loudConsole": "F"
|
||||
},
|
||||
"grid": {
|
||||
"gType": "SHGRID",
|
||||
"ThetaL": "15"
|
||||
},
|
||||
"domain": {
|
||||
"tail_buffer": "15.0",
|
||||
"sun_buffer": "15.0",
|
||||
"tail_active": "12.0",
|
||||
"sun_active": "12.0"
|
||||
},
|
||||
"config": {
|
||||
"fname": "raijuconfig.h5"
|
||||
},
|
||||
"plasmasphere": {
|
||||
"doPsphere": "T",
|
||||
"doExcessMap": "T"
|
||||
},
|
||||
"losses": {
|
||||
"doLosses": "T",
|
||||
"doCX": "T",
|
||||
"doCC": "T",
|
||||
"doEWM": "T",
|
||||
"ewmType": "SS"
|
||||
},
|
||||
"cpl": {
|
||||
"nFluidsIn": "1",
|
||||
"startupTscl": "7200.0"
|
||||
},
|
||||
"fluidIn1": {
|
||||
"imhd": "0",
|
||||
"flav": "2",
|
||||
"excessToPsph": "T"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,111 @@
|
||||
{
|
||||
"simulation": {
|
||||
},
|
||||
"model": {
|
||||
"data": {
|
||||
"modeldir": "/glade/u/home/nikhilr/kaiju_engage/tiegcm",
|
||||
"tgcmdata": "/glade/campaign/hao/itmodel/tiegcm3.0/new_data",
|
||||
"modelexe": "/glade/derecho/scratch/nikhilr/GTR57/tiegcm.exe",
|
||||
"coupled_modelexe": "/glade/derecho/scratch/nikhilr/GTR57/tiegcm.x"
|
||||
},
|
||||
"specification": {
|
||||
"zitop": 7
|
||||
}
|
||||
},
|
||||
"inp": {
|
||||
"LABEL": "tiegcm",
|
||||
"CALENDAR_ADVANCE": 1,
|
||||
"NSTEP_SUB": "10",
|
||||
"solar_flux_level": "low",
|
||||
"SECFLDS": [
|
||||
"TN",
|
||||
"UN",
|
||||
"VN",
|
||||
"NE",
|
||||
"TEC",
|
||||
"POTEN",
|
||||
"Z",
|
||||
"ZG"
|
||||
],
|
||||
"ELECTRON_HEATING": 6,
|
||||
"POTENTIAL_MODEL": "HEELIS",
|
||||
"GPI_NCFILE": "/glade/campaign/hao/itmodel/tiegcm3.0/new_data/boundary_files/GPI/gpi_1960001-2025120.nc",
|
||||
"IMF_NCFILE": null,
|
||||
"KP": null,
|
||||
"POWER": null,
|
||||
"CTPOTEN": null,
|
||||
"BXIMF": null,
|
||||
"BYIMF": null,
|
||||
"BZIMF": null,
|
||||
"SWDEN": null,
|
||||
"SWVEL": null,
|
||||
"F107": null,
|
||||
"F107A": null,
|
||||
"ONEWAY": false,
|
||||
"AMIENH": null,
|
||||
"AMIESH": null,
|
||||
"AURORA": null,
|
||||
"DYNAMO": null,
|
||||
"CALC_HELIUM": null,
|
||||
"EDDY_DIF": null,
|
||||
"JOULEFAC": null,
|
||||
"COLFAC": null,
|
||||
"OPDIFFCAP": "2e9",
|
||||
"OPDIFFRATE": "0.3",
|
||||
"OPDIFFLEV": "7",
|
||||
"OPFLOOR": "3000",
|
||||
"OPRATE": "0.3",
|
||||
"OPLEV": "7",
|
||||
"OPLATWIDTH": "20",
|
||||
"TE_CAP": "8000",
|
||||
"TI_CAP": "8000",
|
||||
"CURRENT_PG": null,
|
||||
"CURRENT_KQ": null,
|
||||
"ET": null,
|
||||
"SAPS": null,
|
||||
"DOECLIPSE": null,
|
||||
"ECLIPSE_LIST": null,
|
||||
"HE_COEFS_NCFILE": "/glade/campaign/hao/itmodel/tiegcm3.0/new_data/boundary_files/other/he_coefs_dres.nc",
|
||||
"BGRDDATA_NCFILE": null,
|
||||
"CTMT_NCFILE": null,
|
||||
"SABER_NCFILE": null,
|
||||
"TIDI_NCFILE": null,
|
||||
"TIDE": null,
|
||||
"TIDE2": null,
|
||||
"MIXFILE": null,
|
||||
"NUDGE_NCPRE": null,
|
||||
"NUDGE_NCPOST": null,
|
||||
"NUDGE_NCFILE": null,
|
||||
"NUDGE_FLDS": null,
|
||||
"NUDGE_LBC": null,
|
||||
"NUDGE_F4D": null,
|
||||
"NUDGE_USE_REFDATE": null,
|
||||
"NUDGE_REFDATE": null,
|
||||
"NUDGE_SPONGE": null,
|
||||
"NUDGE_DELTA": null,
|
||||
"NUDGE_POWER": null,
|
||||
"NUDGE_ALPHA": null,
|
||||
"other_input": [
|
||||
null
|
||||
]
|
||||
},
|
||||
"job": {
|
||||
"modules": [
|
||||
"ncarenv/23.09",
|
||||
"cmake/3.26.3",
|
||||
"craype/2.7.31",
|
||||
"intel-classic/2023.2.1",
|
||||
"cray-mpich/8.1.27",
|
||||
"ncarcompilers/1.0.0",
|
||||
"mkl/2023.2.0",
|
||||
"hdf5-mpi/1.12.2",
|
||||
"netcdf-mpi/4.9.2",
|
||||
"esmf/8.6.0",
|
||||
"conda"
|
||||
],
|
||||
"other_job": [
|
||||
null
|
||||
]
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
{
|
||||
"simulation": {
|
||||
"job_name": "geospace",
|
||||
"start_date": "2016-08-09T09:00:00",
|
||||
"stop_date": "2016-08-09T11:00:00",
|
||||
"use_segments": "Y",
|
||||
"segment_duration": "3600",
|
||||
"gamera_grid_type": "D",
|
||||
"hpc_system": "pleiades"
|
||||
},
|
||||
"pbs": {
|
||||
"account_name": "nrao3",
|
||||
"run_directory": ".",
|
||||
"kaiju_install_directory": "/nobackupp27/nrao3/engage_build/kaiju-private",
|
||||
"kaiju_build_directory": "/nobackupp27/nrao3/engage_build/kaiju-private/build_mpi",
|
||||
"group_list": "None",
|
||||
"queue": "normal",
|
||||
"walltime": "03:00:00"
|
||||
},
|
||||
"coupling": {
|
||||
"gr_warm_up_time": 14400,
|
||||
"gcm_spin_up_time": 604800,
|
||||
"root_directory": "/nobackupp27/nrao3/GTR152",
|
||||
"tfin_delta": "T",
|
||||
"doGCM": "T"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,140 @@
|
||||
{
|
||||
"simulation": {
|
||||
"bcwind_available": "N",
|
||||
"segment_duration": "3600",
|
||||
"gamera_grid_inner_radius": "2.0",
|
||||
"gamera_grid_outer_radius": "30.0"
|
||||
},
|
||||
"pbs": {
|
||||
},
|
||||
"gamera": {
|
||||
"sim": {
|
||||
"doH5g": "T",
|
||||
"icType": "user",
|
||||
"pdmb": "0.75",
|
||||
"rmeth": "8C"
|
||||
},
|
||||
"floors": {
|
||||
"dFloor": "1.0e-4",
|
||||
"pFloor": "1.0e-6"
|
||||
},
|
||||
"timestep": {
|
||||
"doCPR": "T",
|
||||
"limCPR": "0.20"
|
||||
},
|
||||
"restart": {
|
||||
"doRes": "F",
|
||||
"nRes": "-1",
|
||||
"resID": "GTR152D"
|
||||
},
|
||||
"physics": {
|
||||
"doBoris": "T",
|
||||
"Ca": "10.0",
|
||||
"doMHD": "T"
|
||||
},
|
||||
"ring": {
|
||||
"doRing": "T",
|
||||
"gid": "lfm"
|
||||
},
|
||||
"wind": {
|
||||
"tsfile": "bcwind.h5"
|
||||
},
|
||||
"source": {
|
||||
"doSource": "T",
|
||||
"doBounceDT": "T",
|
||||
"nBounce": "1.0",
|
||||
"doWolfLim": "T"
|
||||
},
|
||||
"iPdir": {
|
||||
"bcPeriodic": "F"
|
||||
},
|
||||
"jPdir": {
|
||||
"bcPeriodic": "F"
|
||||
},
|
||||
"kPdir": {
|
||||
"bcPeriodic": "T"
|
||||
},
|
||||
"coupling": {
|
||||
"blockHalo": "F"
|
||||
}
|
||||
},
|
||||
"voltron": {
|
||||
"time": {
|
||||
"tFin": 7200.0
|
||||
},
|
||||
"spinup": {
|
||||
"doSpin": "T",
|
||||
"tSpin": "7200.0",
|
||||
"tIO": "0.0"
|
||||
},
|
||||
"output": {
|
||||
"dtOut": "60.0",
|
||||
"tsOut": "300.0"
|
||||
},
|
||||
"coupling": {
|
||||
"doQkSquish": "T",
|
||||
"doGCM": "F",
|
||||
"qkSquishStride": "2",
|
||||
"dtCouple": "5.0",
|
||||
"doDeep": "T",
|
||||
"imType": "RCM",
|
||||
"doAsyncCoupling": "T"
|
||||
},
|
||||
"restart": {
|
||||
"dtRes": "1800.0"
|
||||
},
|
||||
"imag": {
|
||||
"doInit": "T"
|
||||
},
|
||||
"helpers": {
|
||||
"doSquishHelp": "T"
|
||||
}
|
||||
},
|
||||
"chimp": {
|
||||
"units": {
|
||||
"uid": "EARTHCODE"
|
||||
},
|
||||
"fields": {
|
||||
"grType": "lfm"
|
||||
},
|
||||
"domain": {
|
||||
"dtype": "MAGE"
|
||||
},
|
||||
"tracer": {
|
||||
"epsds": "0.05"
|
||||
}
|
||||
},
|
||||
"remix": {
|
||||
"conductance": {
|
||||
"doStarlight": "T",
|
||||
"apply_cap": "T",
|
||||
"const_sigma": "F"
|
||||
},
|
||||
"precipitation": {
|
||||
"aurora_model_type": "LINMRG",
|
||||
"beta": "0.2",
|
||||
"doAuroralSmooth": "F"
|
||||
}
|
||||
},
|
||||
"rcm": {
|
||||
"rcmdomain": {
|
||||
"domType": "ELLIPSE"
|
||||
},
|
||||
"ellipse": {
|
||||
"xSun": "12.5",
|
||||
"yDD": "15.0",
|
||||
"xTail": "-15.0",
|
||||
"isDynamic": "T"
|
||||
},
|
||||
"grid": {
|
||||
"LowLat": "30.0",
|
||||
"HiLat": "75.0"
|
||||
},
|
||||
"plasmasphere": {
|
||||
"isDynamic": "T",
|
||||
"initKp": "5",
|
||||
"doRefill": "T",
|
||||
"tAvg": "60.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,96 @@
|
||||
{
|
||||
"simulation": {
|
||||
"job_name": "GTR152D",
|
||||
"hpc_system": "pleiades"
|
||||
},
|
||||
"model": {
|
||||
"data": {
|
||||
"modeldir": "/nobackupp27/nrao3/engage_build/tiegcm",
|
||||
"tgcmdata": "/nobackup/nrao3/tiegcm/tiegcm3.0/data",
|
||||
"modelexe": "/nobackupp27/nrao3/GTR156/tiegcm.exe",
|
||||
"coupled_modelexe": "/nobackupp27/nrao3/GTR156/tiegcm.x"
|
||||
},
|
||||
"specification": {
|
||||
"zitop": 7
|
||||
}
|
||||
},
|
||||
"inp": {
|
||||
"LABEL": "tiegcm",
|
||||
"CALENDAR_ADVANCE": 1,
|
||||
"NSTEP_SUB": "10",
|
||||
"solar_flux_level": "low",
|
||||
"SECFLDS": [
|
||||
"TN",
|
||||
"UN",
|
||||
"VN",
|
||||
"NE",
|
||||
"TEC",
|
||||
"POTEN",
|
||||
"Z",
|
||||
"ZG"
|
||||
],
|
||||
"ELECTRON_HEATING": 6,
|
||||
"POTENTIAL_MODEL": "HEELIS",
|
||||
"GPI_NCFILE": "/nobackup/nrao3/tiegcm/tiegcm3.0/data/gpi_mgii_1979001-2010212.nc",
|
||||
"IMF_NCFILE": null,
|
||||
"KP": null,
|
||||
"POWER": null,
|
||||
"CTPOTEN": null,
|
||||
"BXIMF": null,
|
||||
"BYIMF": null,
|
||||
"BZIMF": null,
|
||||
"SWDEN": null,
|
||||
"SWVEL": null,
|
||||
"F107": null,
|
||||
"F107A": null,
|
||||
"ONEWAY": false,
|
||||
"AMIENH": null,
|
||||
"AMIESH": null,
|
||||
"AURORA": null,
|
||||
"DYNAMO": null,
|
||||
"CALC_HELIUM": null,
|
||||
"EDDY_DIF": null,
|
||||
"JOULEFAC": null,
|
||||
"COLFAC": null,
|
||||
"OPDIFFCAP": "2e9",
|
||||
"OPDIFFRATE": "0.3",
|
||||
"OPDIFFLEV": "7",
|
||||
"OPFLOOR": "3000",
|
||||
"OPRATE": "0.3",
|
||||
"OPLEV": "7",
|
||||
"OPLATWIDTH": "20",
|
||||
"TE_CAP": "8000",
|
||||
"TI_CAP": "8000",
|
||||
"CURRENT_PG": null,
|
||||
"CURRENT_KQ": null,
|
||||
"ET": null,
|
||||
"SAPS": null,
|
||||
"DOECLIPSE": null,
|
||||
"ECLIPSE_LIST": null,
|
||||
"HE_COEFS_NCFILE": "/nobackup/nrao3/tiegcm/tiegcm3.0/data/he_coefs_dres.nc",
|
||||
"BGRDDATA_NCFILE": null,
|
||||
"CTMT_NCFILE": null,
|
||||
"SABER_NCFILE": null,
|
||||
"TIDI_NCFILE": null,
|
||||
"TIDE": null,
|
||||
"TIDE2": null,
|
||||
"MIXFILE": null,
|
||||
"NUDGE_NCPRE": null,
|
||||
"NUDGE_NCPOST": null,
|
||||
"NUDGE_NCFILE": null,
|
||||
"NUDGE_FLDS": null,
|
||||
"NUDGE_LBC": null,
|
||||
"NUDGE_F4D": null,
|
||||
"NUDGE_USE_REFDATE": null,
|
||||
"NUDGE_REFDATE": null,
|
||||
"NUDGE_SPONGE": null,
|
||||
"NUDGE_DELTA": null,
|
||||
"NUDGE_POWER": null,
|
||||
"NUDGE_ALPHA": null,
|
||||
"other_input": [
|
||||
null
|
||||
]
|
||||
},
|
||||
"job": {
|
||||
}
|
||||
}
|
||||
14
docs/source/makeitso/index.rst
Normal file
14
docs/source/makeitso/index.rst
Normal file
@@ -0,0 +1,14 @@
|
||||
Makeitso documentation
|
||||
========================
|
||||
|
||||
This page provides links to documentation for the ``makeitso.py``, ``engage.py`` and
|
||||
``makeitso-gamhelio.py`` scripts, which simplify the process of configuring
|
||||
and running MAGE and GAMERA-helio applications.
|
||||
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
makeitso
|
||||
engage
|
||||
makeitso-gamhelio
|
||||
179
docs/source/makeitso/makeitso-gamhelio.rst
Normal file
179
docs/source/makeitso/makeitso-gamhelio.rst
Normal file
@@ -0,0 +1,179 @@
|
||||
Makeitso-gamhelio -- Use for GAMERA-Helio
|
||||
========================================================================
|
||||
|
||||
The Python script ``makeits-gamhelio.py`` was developed to simplify the
|
||||
process of configuring and running GAMERA-helio (that is, the inner
|
||||
heliosphere applicatoin of the ``kaiju``
|
||||
software.) It provides an interactive, prompt-driven interface to specify all
|
||||
of the parameters needed for a model run.
|
||||
|
||||
The ``makeitso-gamhelio.py`` script can operate in one of three different
|
||||
modes: ``BASIC``, ``INTERMEDIATE``, or ``EXPERT``. Each mode provides access
|
||||
to a subset of the ``kaiju`` parameters. The ``BASIC`` mode requires the user
|
||||
to provide the minimum set of parameters needed to specify a model run, such
|
||||
as the run ID, and the simulation time period. The ``INTERMEDIATE`` mode
|
||||
allows the user to specify all of the parameters from the ``BASIC`` mode,
|
||||
as well as a wider set of run parameters, such as non-standard file
|
||||
locations and some MHD parameters. The ``EXPERT`` mode provides access to all
|
||||
of the user-adjustable parameters from the ``kaiju`` software. When finished,
|
||||
the script generates the files needed to run a helioosphere model, and saves
|
||||
all options in a convenient JSON file so that the run can be repeated at a
|
||||
later date.
|
||||
|
||||
|
||||
Running the ``makeitso-gamhelio.py`` script
|
||||
-------------------------------------------
|
||||
|
||||
The ``makeitso-gamhelio.py`` script is provided as part of the ``kaiju``
|
||||
software. It is found at
|
||||
``$KAIJUHOME/scripts/makeitso-gamhelio/makeitso-gamhelio.py``, where
|
||||
``$KAIJUHOME`` is the location of your ``kaiju`` software tree. After
|
||||
configuring your ``kaiju`` software, you can get help text for the script
|
||||
like this:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
makeitso-gamhelio.py --help
|
||||
usage: makeitso-gamhelio.py [-h] [--clobber] [--debug] [--mode MODE] [--options_path OPTIONS_PATH] [--verbose]
|
||||
|
||||
Interactive script to prepare a GAMERA heliosphere run
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--clobber Overwrite existing options file (default: False).
|
||||
--debug, -d Print debugging output (default: False).
|
||||
--mode MODE User mode (BASIC|INTERMEDIATE|EXPERT) (default: BASIC).
|
||||
--options_path OPTIONS_PATH, -o OPTIONS_PATH
|
||||
Path to JSON file of options (default: None)
|
||||
--verbose, -v Print verbose output (default: False).
|
||||
|
||||
The ``--options_path`` option allows the user to specify an existing JSON file
|
||||
from a previous run of ``makeitso-gamhelio.py`` so that the entire process of
|
||||
model generation can be automated. The ``--mode`` option specifies the user
|
||||
mode to run in, with ``BASIC`` being the default.
|
||||
|
||||
An example in ``BASIC`` mode
|
||||
----------------------------
|
||||
|
||||
This section provdes an annotated example session of ``makeitso-gamhelio.py``
|
||||
running in the default ``BASIC`` mode.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
makeitso-gamhelio.py --verbose
|
||||
Name to use for PBS job(s) [helio]:
|
||||
|
||||
Enter an identifying string to use for your model run. This name will be used
|
||||
as the basis for most of the files created by ``makeitso-gamhelio.py`` and
|
||||
the ``kaiju`` software. The default name is ``helio``.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Path to WSA boundary condition file to use [wsa.fits]:
|
||||
|
||||
Enter the path to the FITS file that contains output from the WSA model for
|
||||
use as the inner boundary conditions.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Start date for simulation (yyyy-mm-ddThh:mm:ss) [2017-07-20T05:22:47]:
|
||||
Stop date for simulation (yyyy-mm-ddThh:mm:ss) [2017-08-16T12:05:59]:
|
||||
|
||||
Enter the start and stop date and time for the simulation.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Do you want to split your job into multiple segments? (Y|y|N|n) [N]:
|
||||
|
||||
Enter ``Y`` here if you want to split your simulation into multiple PBS jobs.
|
||||
This will allow you to run long simulations that are chained together, with
|
||||
each using the results of the previous job as a starting point. If you enter
|
||||
``Y``, you will be prompted for segment length (in simulated time).
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Name of HPC system (derecho|aitken) [aitken]:
|
||||
|
||||
The ``makeitso-gamhelio.py`` script supports the ``derecho`` and ``aitken``
|
||||
supercomputers. The selection you make here will customize the remaining
|
||||
prompts for the selected system.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
PBS account name [your_login_name]:
|
||||
|
||||
On ``aitken``, your login name is usable here. On ``derecho``, you will need
|
||||
a PBS account ID.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Path to kaiju installation [YOUR_PATH_HERE]:
|
||||
Path to kaiju build directory [YOUR_PATH_HERE]:
|
||||
|
||||
Enter the paths to the location of your ``kaiju`` code, and the location of
|
||||
your ``kaiju`` build directory.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Run directory [.]:
|
||||
|
||||
Specify the directory that you wish to perform the simulation in. The
|
||||
directory will contain all of the files generated by ``makeitso-gamhelio.py``.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
PBS queue name (low|normal|long|debug|devel) [normal]:
|
||||
|
||||
Select a PBS queue to use on the selected supercomputer.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
WARNING: You are responsible for ensuring that the wall time is sufficient to run a segment of your simulation!
|
||||
Requested wall time for each PBS job segment (HH:MM:SS) [01:00:00]:
|
||||
|
||||
Specify the wall clock time to request for your job (or each segment, if you
|
||||
split your job into multiple segments).
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Number of radial grid cells [128]:
|
||||
Number of polar angle grid cells [64]:
|
||||
Number of azimuthal angle grid cells [128]:
|
||||
|
||||
Specify the number of grid cells to use in each dimension of the spherical
|
||||
coordinate system used to define the simulation.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Running preprocessing steps.
|
||||
Creating .ini file(s) for run.
|
||||
Converting .ini file(s) to .xml file(s).
|
||||
|
||||
The script then runs several additional tools to prepare the files needed for
|
||||
your simulation. This includes input files, as well as PBS job scripts for
|
||||
your simulation, and a bash shell script to submit the PBS jobs.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Template creation complete!
|
||||
|
||||
|
||||
Creating PBS job script(s) for run.
|
||||
The PBS job scripts ['./helio-00.pbs'] are ready.
|
||||
The PBS scripts ['./helio-00.pbs'] have been created, each with a corresponding XML file. To submit the jobs with the proper dependency (to ensure each segment runs in order), please run the script helio_pbs.sh like this:
|
||||
bash helio_pbs.sh
|
||||
|
||||
When finished, the script creates the file ``runid.json``, where ``runid`` is
|
||||
the identifying string for your simulation. This file contains a record of all
|
||||
of the parameters used in your simulation. This file can be passed back to
|
||||
``makeitso-gamhelio.py`` in a subsequent session to repeat the simulation, and
|
||||
also provides a convenient starting point for minor tweaks to your simulation
|
||||
parameters.
|
||||
|
||||
|
||||
Additional parameters in ``INTERMEDIATE`` and ``EXPERT`` mode
|
||||
-------------------------------------------------------------
|
||||
|
||||
Many more parameters are available in ``INTERMEDIATE`` and ``EXPERT`` modes.
|
||||
These parameters are documented in the file ``option_descriptions.json``,
|
||||
which is stored in the same directory as the ``makeitso-gamhelio.py`` script.
|
||||
224
docs/source/makeitso/makeitso.rst
Normal file
224
docs/source/makeitso/makeitso.rst
Normal file
@@ -0,0 +1,224 @@
|
||||
Makeitso -- Use for MAGE without TIEGCM
|
||||
=============================================
|
||||
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
The Python script ``makeitso.py`` was developed to simplify the process of
|
||||
configuring and running GR MAGE (that is, the geospace application of
|
||||
the ``kaiju`` software without TIEGCM.) It
|
||||
provides an interactive, prompt-driven interface to specify all of the
|
||||
parameters needed for a model run.
|
||||
|
||||
The ``makeitso.py`` script can operate in one of three different modes:
|
||||
``BASIC``, ``INTERMEDIATE``, or ``EXPERT``. Each mode provides access to
|
||||
a subset of the ``kaiju`` parameters. The ``BASIC`` mode requires
|
||||
the user to provide the minimum set of parameters needed to specify a model
|
||||
run, such as the run ID, and the simulation time period. The ``INTERMEDIATE``
|
||||
mode allows the user to specify all of the parameters from the ``BASIC`` mode,
|
||||
as well as a wider set of run parameters, such as non-standard file
|
||||
locations and some MHD parameters. The ``EXPERT`` mode provides access to all
|
||||
of the user-adjustable parameters from the ``kaiju`` software. When finished,
|
||||
the script generates the files needed to run a magnetosphere model, and saves
|
||||
all options in a convenient JSON file so that the run can be repeated at a
|
||||
later date.
|
||||
|
||||
|
||||
Running the ``makeitso.py`` script
|
||||
----------------------------------
|
||||
|
||||
The ``makeitso.py`` script is provided as part of the ``kaiju`` software. It
|
||||
is found at ``$KAIJUHOME/scripts/makeitso/makeitso.py``, where ``$KAIJUHOME``
|
||||
is the location of your ``kaiju`` software tree. After configuring your
|
||||
``kaiju`` software, you can get help text for the script like this:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
makeitso.py --help
|
||||
usage: makeitso.py [-h] [--clobber] [--debug] [--mode MODE] [--options_path OPTIONS_PATH] [--verbose]
|
||||
|
||||
Interactive script to prepare a MAGE magnetosphere model run.
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--clobber Overwrite existing options file (default: False).
|
||||
--debug, -d Print debugging output (default: False).
|
||||
--mode MODE User mode (BASIC|INTERMEDIATE|EXPERT) (default: BASIC).
|
||||
--options_path OPTIONS_PATH, -o OPTIONS_PATH
|
||||
Path to JSON file of options (default: None)
|
||||
--verbose, -v Print verbose output (default: False).
|
||||
|
||||
The ``--options_path`` option allows the user to specify an existing JSON file
|
||||
from a previous run of ``makeitso.py`` so that the entire process of model
|
||||
generation can be automated. The ``--mode`` option specifies the user mode to
|
||||
run in, with ``BASIC`` being the default.
|
||||
|
||||
|
||||
An example in ``BASIC`` mode
|
||||
----------------------------
|
||||
|
||||
This section provdes an annotated example session of ``makeitso.py`` running
|
||||
in the default ``BASIC`` mode on the ``aitken`` supercomputer.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
makeitso.py
|
||||
Name to use for PBS job(s) [geospace]:
|
||||
|
||||
Enter an identifying string to use for your model run. This name will be used
|
||||
as the basis for most of the files created by ``makeitso.py`` and the
|
||||
``kaiju`` software. The default name is ``geospace``.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Do you have an existing boundary condition file to use? (Y|N) [N]:
|
||||
|
||||
If you already have a file containing solar wind data to use for the inner
|
||||
boundary conditions of your simulation, enter ``Y``, and you will then be
|
||||
prompted for the path top the file. If you don't have the file, enter ``N``
|
||||
and you will be prompted for the date range to use.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Start date for simulation (yyyy-mm-ddThh:mm:ss) [2016-08-09T09:00:00]:
|
||||
Stop date for simulation (yyyy-mm-ddThh:mm:ss) [2016-08-09T11:00:00]:
|
||||
|
||||
Enter the start and stop date and time for the solar wind data you want to
|
||||
use. The required data will be fetched from CDAWeb, and converted into a
|
||||
format usable by the ``kaiju`` software.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Do you want to split your job into multiple segments? (Y|N) [N]:
|
||||
|
||||
Enter ``Y`` here if you want to split your simulation into multiple PBS jobs.
|
||||
This will allow you to run long simulations that are chained together, with
|
||||
each using the results of the previous job as a starting point. If you enter
|
||||
``Y``, you will be prompted for segment length (in simulated time).
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
GAMERA grid type (D|Q|O|H) [Q]:
|
||||
|
||||
The codes represent double- (``D``), quad- (``Q``), oct- (``O``) and
|
||||
hex- (``H``) resolutions in the LFM grid used in the ``kaiju`` software.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Name of HPC system (derecho|aitken) [aitken]:
|
||||
|
||||
The ``makeitso.py`` script supports the ``derecho`` and ``aitken``
|
||||
supercomputers. The selection you make here will customize the remaining
|
||||
prompts for the selected system.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
PBS account name [your_login_name]:
|
||||
|
||||
On ``aitken``, your login name is usable here. On ``derecho``, you will need
|
||||
a PBS account ID.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Run directory [.]:
|
||||
|
||||
Specify the directory that you wish to perform the simulation in. The
|
||||
directory will contain all of the files generated by ``makeitso.py``.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Path to kaiju installation [YOUR_PATH_HERE]:
|
||||
Path to kaiju build directory [YOUR_PATH_HERE]:
|
||||
|
||||
Enter the paths to the location of your ``kaiju`` code, and the location of
|
||||
your ``kaiju`` build directory.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
PBS queue name (low|normal|long|debug|devel) [normal]:
|
||||
|
||||
Select a PBS queue to use on the selected supercomputer.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
You are responsible for ensuring that the wall time is sufficient
|
||||
to run a segment of your simulation! Requested wall time for each PBS job
|
||||
segment (HH:MM:SS) [01:00:00]:
|
||||
|
||||
Specify the wall clock time to request for your job (or each segment, if you
|
||||
split your job into multiple segments).
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
(GAMERA) Relative path to HDF5 file containing solar wind boundary conditions [bcwind.h5]:
|
||||
|
||||
This is the path to your existing solar wind file, or the path that
|
||||
``makeitso.py`` will use to create the file.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
(VOLTRON) File output cadence in simulated seconds [60.0]:
|
||||
|
||||
How often (in simulated seconds) the ``kaiju`` software should output results
|
||||
during the course of the simulation.
|
||||
|
||||
The script then runs several additional tools to prepare the files needed for
|
||||
your simulation. This includes input files, as well as PBS job scripts for
|
||||
your simulation, and a bash shell script to submit the PBS jobs.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Running preprocessing steps.
|
||||
Generating Quad LFM-style grid ...
|
||||
|
||||
Output: lfmQ.h5
|
||||
Size: (96,96,128)
|
||||
Inner Radius: 2.000000
|
||||
Sunward Outer Radius: 30.000000
|
||||
Tail Outer Radius: 322.511578
|
||||
Low-lat BC: 45.000000
|
||||
Ring params:
|
||||
<ring gid="lfm" doRing="T" Nr="8" Nc1="8" Nc2="16" Nc3="32" Nc4="32" Nc5="64" Nc6="64" Nc7="64" Nc8="64"/>
|
||||
|
||||
Writing to lfmQ.h5
|
||||
Retrieving f10.7 data from CDAWeb
|
||||
Retrieving solar wind data from CDAWeb
|
||||
Using Bx fields
|
||||
Bx Fit Coefficients are [-3.78792744 -0.77915822 -1.0774984 ]
|
||||
Saving "OMNI_HRO_1MIN.txt_bxFit.png"
|
||||
Converting to Gamera solar wind file
|
||||
Found 21 variables and 120 lines
|
||||
Offsetting from LFM start ( 0.00 min) to Gamera start ( 0.00 min)
|
||||
Saving "OMNI_HRO_1MIN.txt.png"
|
||||
Writing Gamera solar wind to bcwind.h5
|
||||
Reading /glade/derecho/scratch/ewinter/cgs/aplkaiju/kaipy-private/development/kaipy-private/kaipy/rcm/dktable
|
||||
Reading /glade/derecho/scratch/ewinter/cgs/aplkaiju/kaipy-private/development/kaipy-private/kaipy/rcm/wmutils/chorus_polynomial.txt
|
||||
Dimension of parameters in Chorus wave model, Kp: 6 MLT: 97 L: 41 Ek: 155
|
||||
Wrote RCM configuration to rcmconfig.h5
|
||||
Creating .ini file(s) for run.
|
||||
Converting .ini file(s) to .xml file(s).
|
||||
|
||||
|
||||
Template creation complete!
|
||||
|
||||
|
||||
Creating PBS job script(s) for run.
|
||||
The PBS job scripts ['./geospace-00.pbs'] are ready.
|
||||
The PBS scripts ['./geospace-00.pbs'] have been created, each with a corresponding XML file. To submit the jobs with the proper dependency (to ensure each segment runs in order), please run the script geospace_pbs.sh like this:
|
||||
bash geospace_pbs.sh
|
||||
|
||||
When finished, the script creates the file ``runid.json``, where ``runid`` is
|
||||
the identifying string for your simulation. This file contains a record of all
|
||||
of the parameters used in your simulation. This file can be passed back to
|
||||
``makeitso.py`` in a subsequent session to repeat the simulation, and also
|
||||
provides a convenient starting point for minor tweaks to your simulation
|
||||
parameters.
|
||||
|
||||
|
||||
Additional parameters in ``INTERMEDIATE`` and ``EXPERT`` mode
|
||||
-------------------------------------------------------------
|
||||
|
||||
Many more parameters are available in ``INTERMEDIATE`` and ``EXPERT`` modes.
|
||||
These parameters are documented in the file ``option_descriptions.json``,
|
||||
which is stored in the same directory as the ``makeitso.py`` script.
|
||||
85
docs/source/misc/build_guides/Pleiades/buildPleiades_GR.rst
Normal file
85
docs/source/misc/build_guides/Pleiades/buildPleiades_GR.rst
Normal file
@@ -0,0 +1,85 @@
|
||||
Building the ``kaiju`` software on ``pleiades`` for MAGE - Without TIEGCM (GR)
|
||||
==============================================================================================
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
This page provides instructions for building the ``kaiju`` software on the
|
||||
``pleiades`` supercomputer. These instructions assume that you have cloned the
|
||||
``kaiju`` repository.
|
||||
|
||||
Prepare your software environment
|
||||
---------------------------------
|
||||
|
||||
Like most HPC systems, ``pleiades`` uses the ``module`` system to manage the
|
||||
versions of software packages available to the user. When you log in to
|
||||
``pleiades``, no modules are loaded by default:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
module list
|
||||
No Modulefiles Currently Loaded.
|
||||
|
||||
Start by purging any currently-loaded modules, then loading the following
|
||||
module set:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
module --force purge
|
||||
|
||||
module use -a /swbuild/analytix/tools/modulefiles
|
||||
module load nas
|
||||
module load pkgsrc/2022Q1-rome
|
||||
module load comp-intel/2020.4.304
|
||||
module load mpi-hpe/mpt.2.23
|
||||
module load hdf5/1.8.18_mpt
|
||||
module load miniconda3/v4
|
||||
|
||||
.. important::
|
||||
|
||||
You must use these exact versions of the modules to ensure the software
|
||||
compiles properly. If you use different versions of any of these modules,
|
||||
a successful build cannot be guaranteed. This module list is current as of
|
||||
**11 April 2025**, and is subject to change as the compute environment
|
||||
changes.
|
||||
|
||||
Build the ``kaiju`` software
|
||||
----------------------------
|
||||
|
||||
These instructions show how to build the MPI version of the ``kaiju``
|
||||
software. The MPI version is built in the subdirectory ``build_mpi``
|
||||
under the ``kaiju`` source code directory. In practice, you can place the
|
||||
build directory in any convenient location.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
# Move to your kaiju clone.
|
||||
cd /path/to/kaiju
|
||||
|
||||
# Create the build directory and enter it.
|
||||
mkdir build_mpi
|
||||
cd build_mpi
|
||||
|
||||
# Run cmake to create the Makefile, saving output.
|
||||
# NOTE: The FC definition is *required* for proper cmake operation.
|
||||
FC=`which ifort` cmake -DENABLE_MPI=ON .. >& cmake.out
|
||||
|
||||
# You can pick one compile target below or compile all of them, if you'd like
|
||||
|
||||
# Compile the MAGE model for geospace simulations
|
||||
make -j4 voltron_mpi.x >& make-voltron.out
|
||||
|
||||
# Compile the GAMERA-helio model for inner heliosphere simulations
|
||||
make -j4 gamhelio_mpi.x >& make-gamhelio.out
|
||||
|
||||
# Compile analysis tools
|
||||
make -j4 calcdb.x chop.x sctrack.x slice.x >& make-analysis.out
|
||||
|
||||
|
||||
When finished, your build directory will contain a ``bin``
|
||||
subdirectory which will contain the compiled ``kaiju`` executables.
|
||||
|
||||
.. note:: Documentation on the analysis tools is found
|
||||
:doc:`here </tools/index>`.
|
||||
|
||||
|
||||
396
docs/source/misc/build_guides/Pleiades/buildPleiades_GTR.rst
Normal file
396
docs/source/misc/build_guides/Pleiades/buildPleiades_GTR.rst
Normal file
@@ -0,0 +1,396 @@
|
||||
Building the ``kaiju`` software on ``pleiades`` for MAGE - With TIEGCM (GTR)
|
||||
==============================================================================================
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
This page provides instructions for building the ``kaiju`` software on the
|
||||
``pleiades`` supercomputer. These instructions assume that you have cloned the
|
||||
``kaiju`` repository.
|
||||
|
||||
Prepare your software environment
|
||||
---------------------------------
|
||||
|
||||
Like most HPC systems, ``pleiades`` uses the ``module`` system to manage the
|
||||
versions of software packages available to the user. When you log in to
|
||||
``pleiades``, no modules are loaded by default:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
module list
|
||||
No Modulefiles Currently Loaded.
|
||||
|
||||
Start by purging any currently-loaded modules, then loading the following
|
||||
module set for MAGE runs coupled with TIEGCM (known as "GTR"):
|
||||
|
||||
.. warning::
|
||||
|
||||
The GTR currently required custom built NetCDF and ESMF modules on ``pleiades``. If you need to
|
||||
run GTR, you will need access to ``/home7/nrao3/local3`` and ``/nobackup/nrao3/tiegcm/tiegcm3.0/data``,
|
||||
please reach out to ``nikhilr@ucar.edu`` with the following:
|
||||
|
||||
- Your Pleiades username
|
||||
- Your Name
|
||||
- Your Institution
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
module --force purge
|
||||
|
||||
module use -a /nasa/modulefiles/testing
|
||||
module use -a /swbuild/analytix/tools/modulefiles
|
||||
module load nas
|
||||
module load comp-intel/2020.4.304
|
||||
module load mpi-hpe/mpt.2.30
|
||||
module load szip/2.1.1
|
||||
module load hdf5/1.12.3_mpt
|
||||
module load miniconda3/v4
|
||||
|
||||
export FC=mpif90
|
||||
export CC=mpicc
|
||||
export CXX=mpicxx
|
||||
|
||||
export PREFIX=/home7/nrao3/local3
|
||||
export LIBRARY_PATH=${LIBRARY_PATH}:$PREFIX/lib
|
||||
export LD_LIBRARY_PATH=$LIBRARY_PATH
|
||||
export CPATH=$PREFIX/include
|
||||
export PATH=${PATH}:$PREFIX/bin
|
||||
|
||||
.. important::
|
||||
|
||||
You must use these exact versions of the modules to ensure the software
|
||||
compiles properly. If you use different versions of any of these modules,
|
||||
a successful build cannot be guaranteed. This module list is current as of
|
||||
**11 April 2025**, and is subject to change as the compute environment
|
||||
changes.
|
||||
|
||||
Build the ``kaiju`` software
|
||||
----------------------------
|
||||
|
||||
These instructions show how to build the MPI version of the ``kaiju``
|
||||
software. The GTR version is built in the subdirectory ``build_gtr``
|
||||
under the ``kaiju`` source code directory. In practice, you can place the
|
||||
build directory in any convenient location.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
# Move to your kaiju clone.
|
||||
cd /path/to/kaiju
|
||||
|
||||
# Create the GTR build directory and enter it.
|
||||
mkdir build_gtr
|
||||
cd build_gtr
|
||||
|
||||
# Run cmake to create the Makefile, saving output.
|
||||
# NOTE: The FC definition is *required* for proper cmake operation.
|
||||
FC=`which ifort` cmake -DENABLE_MPI=ON .. >& cmake.out
|
||||
|
||||
# You can pick one compile target below or compile all of them, if you'd like
|
||||
|
||||
# Compile the MAGE model for geospace simulations
|
||||
make -j4 voltron_mpi.x >& make-voltron.out
|
||||
|
||||
# Compile the GAMERA-helio model for inner heliosphere simulations
|
||||
make -j4 gamhelio_mpi.x >& make-gamhelio.out
|
||||
|
||||
# Compile analysis tools
|
||||
make -j4 calcdb.x chop.x sctrack.x slice.x >& make-analysis.out
|
||||
|
||||
.. warning::
|
||||
|
||||
Pleiades faces issues while loading the ``hdf5/1.12.3_mpt`` module at times.
|
||||
If you aren't able to build ``kaiju`` with the above module set, unload
|
||||
the ``hdf5/1.12.3_mpt`` module and load it again:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
module unload hdf5/1.12.3_mpt
|
||||
module load hdf5/1.12.3_mpt
|
||||
|
||||
|
||||
When finished, your build directory will contain a ``bin``
|
||||
subdirectory which will contain the compiled ``kaiju`` executables.
|
||||
|
||||
|
||||
Build the ``tiegcm`` software
|
||||
-----------------------------
|
||||
|
||||
`TIEGCM <https://tiegcm-docs.readthedocs.io/>`_ is a comprehensive, first-principles, three-dimensional,
|
||||
non-linear representation of the coupled thermosphere and ionosphere system that includes a self-consistent solution
|
||||
of the middle and low-latitude dynamo field.
|
||||
|
||||
Getting the TIE-GCM source code
|
||||
************************************************
|
||||
|
||||
The ``TIE-GCM`` source code can be obtained by cloning the ``TIE-GCM`` repository
|
||||
on GitHub:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
git clone https://github.com/NCAR/tiegcm.git
|
||||
|
||||
Setting environment variables
|
||||
************************************************
|
||||
|
||||
Add paths to ``TIEGCMHOME`` and ``TIEGCMDATA``.
|
||||
For example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
export TIEGCMHOME=/path/to/your/tiegcm
|
||||
export TIEGCMDATA=/path/to/your/tiegcm/data
|
||||
|
||||
.. note::
|
||||
|
||||
The ``TIEGCMHOME`` and ``TIEGCMDATA`` environment variables are required
|
||||
for running the GTR model. They should point to the TIEGCM source code
|
||||
directory and the TIEGCM data directory, respectively.
|
||||
|
||||
The TIEGCMDATA directory is located in the following locations:
|
||||
- On ``derecho``: ``/glade/campaign/hao/itmodel/tiegcm3.0/new_data``
|
||||
- On ``pleiades``: ``/nobackup/nrao3/tiegcm/tiegcm3.0/data``
|
||||
- The required data files can be downloaded from the NCAR Globus endpoint using the following link: `TIEGCM Data Files <https://app.globus.org/file-manager?origin_id=b2502c58-c3eb-470f-86d4-cbdcd0aeb6c8&origin_path=%2F>`_
|
||||
|
||||
|
||||
Resolution guide for TIEGCM
|
||||
************************************************
|
||||
Two TIEGCM executables are required for running the GTR model:
|
||||
|
||||
- TIEGCM Standalone
|
||||
This is the TIEGCM code that runs independently and is used for initialization of the model.
|
||||
- TIEGCM Coupled
|
||||
This is the TIEGCM code that runs in a coupled mode with the GR model, providing
|
||||
real-time updates to the thermosphere and ionosphere conditions during the simulation.
|
||||
|
||||
Depending on the Gamera resolution you will need to compile different TIEGCM resolution executables:
|
||||
- For a ``D`` run
|
||||
- TIEGCM Standalone: horires = 2.5, vertres = 0.25(1/4), mres = 2
|
||||
- TIEGCM Coupled: horires = 2.5, vertres = 0.25(1/4), mres = 2
|
||||
- For a ``Q`` run
|
||||
- TIEGCM Standalone: horires = 2.5, vertres = 0.25(1/4), mres = 2
|
||||
- TIEGCM Coupled: horires = 1.25, vertres = 0.125(1/8), mres = 1
|
||||
- For a ``O`` run
|
||||
- TIEGCM Standalone: horires = 1.25, vertres = 0.125(1/8), mres = 1
|
||||
- TIEGCM Coupled: horires = 0.625, vertres = 0.0625(1/16), mres = 0.5
|
||||
|
||||
|
||||
The TIEGCM code is built using the ``tiegcmrun`` script, which is provided in
|
||||
the ``tiegcm`` code repository. The script is provided in the
|
||||
``tiegcm/tiegcmrun`` directory. More information on ``tiegcmrun.py`` can be found
|
||||
in the `TIEGCM Quick Start Guide <https://tiegcm-docs.readthedocs.io/en/latest/tiegcm/quickstart.html>`_.
|
||||
|
||||
.. important::
|
||||
Make sure to load the modules lised in the ``kaiju`` build instructions
|
||||
before running the ``tiegcmrun`` script.
|
||||
|
||||
Build guide for TIEGCM code for a ``Q`` run:
|
||||
#########################################################################################
|
||||
|
||||
We will use ``tiegcmrun`` script to build the code which requires the minimum amount
|
||||
of input from the user. At each prompt, you can either type in a value, or hit
|
||||
the :kbd:`Return` key to accept the default value (shown in square brackets at
|
||||
the end of the prompt).
|
||||
|
||||
1. First we will create a directory for the "Q" TIEGCM build in the TIEGCMHOME directory.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
cd $TIEGCMHOME
|
||||
mkdir tiegcm_build_Q
|
||||
cd tiegcm_build_Q
|
||||
|
||||
2. Next, we will build the standalone TIEGCM executable by running the ``tiegcmrun.py`` script with the ``-oc``.
|
||||
|
||||
.. note::
|
||||
|
||||
The ``-oc`` option stands for "only compile", which means that the script will only compile the code and not run it.
|
||||
Since the Gamera resolution is ``Q``, we will set the following:
|
||||
|
||||
- horizontal resolution for the standalone TIEGCM to 2.5 degrees
|
||||
- vertical resolution to 0.25(1/4) scale height
|
||||
- magnetic grid resolution to 2 degrees
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$TIEGCMHOME/tiegcmrun/tiegcmrun.py -oc
|
||||
Instructions:
|
||||
-> Default Selected input parameter is given in GREEN
|
||||
-> Warnings and Information are given in YELLOW
|
||||
-> Errors are given in RED
|
||||
-> Valid values (if any) are given in brackets eg. (value1 | value2 | value3)
|
||||
-> Enter '?' for any input parameter to get a detailed description
|
||||
|
||||
|
||||
Run Options:
|
||||
User Mode = BASIC
|
||||
Compile = True
|
||||
Execute = False
|
||||
Coupling = False
|
||||
|
||||
|
||||
Name of HPC system (derecho|pleiades|linux) [pleiades]:
|
||||
Standalone Executable [/glade/derecho/scratch/nikhilr/tiegcm_build_Q/exec/tiegcm.exe]:
|
||||
Horizontal Resolution (Deg) (5.0|2.5|1.25|0.625) [2.5]:
|
||||
Vertical Resolution (Scale Height) (1/2|1/4|1/8|1/16) [1/4]:
|
||||
Magnetic grid resolution (Degree) (2|1|0.5) [2]:
|
||||
|
||||
After these inputs, the script will compile the TIEGCM code and create the standalone executable and should output something like this:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
..
|
||||
..
|
||||
gmake[1]: Leaving directory '/nobackup/nrao3/tiegcm/tiegcm_build_Q/exec'
|
||||
Executable copied from /nobackup/nrao3/tiegcm/tiegcm_build_Q/exec/tiegcm.exe to /nobackup/nrao3/tiegcm/tiegcm_build_Q/stdout
|
||||
|
||||
3. Next, we will build the coupled TIEGCM executable by running the ``tiegcmrun.py`` script with the ``-oc`` and ``-co`` options.
|
||||
|
||||
.. note::
|
||||
|
||||
The ``-co`` option stands for "coupled", which means that the script will compile the code for the coupled TIEGCM executable.
|
||||
Since the Gamera resolution is ``Q``, we will set the following:
|
||||
|
||||
- horizontal resolution for the coupled TIEGCM to 1.25 degrees
|
||||
- vertical resolution to 0.125(1/8) scale height
|
||||
- magnetic grid resolution to 1 degrees
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$TIEGCMHOME/tiegcmrun/tiegcmrun.py -oc -co
|
||||
Instructions:
|
||||
-> Default Selected input parameter is given in GREEN
|
||||
-> Warnings and Information are given in YELLOW
|
||||
-> Errors are given in RED
|
||||
-> Valid values (if any) are given in brackets eg. (value1 | value2 | value3)
|
||||
-> Enter '?' for any input parameter to get a detailed description
|
||||
|
||||
Run Options:
|
||||
User Mode = BASIC
|
||||
Compile = True
|
||||
Execute = False
|
||||
Coupling = True
|
||||
|
||||
Name of HPC system (derecho|pleiades|linux) [pleiades]:
|
||||
Coupled Executable [/glade/derecho/scratch/nikhilr/tiegcm_build/exec/tiegcm.x]:
|
||||
Horizontal Resolution (Deg) (5.0|2.5|1.25|0.625) [2.5]: 1.25
|
||||
Vertical Resolution (Scale Height) (1/2|1/4|1/8|1/16) [1/8]:
|
||||
Magnetic grid resolution (Degree) (2|1|0.5) [1]:
|
||||
|
||||
After these inputs, the script will compile the TIEGCM code and create the coupled executable and should output something like this:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
..
|
||||
..
|
||||
gmake[1]: Leaving directory '/nobackup/nrao3/tiegcm/tiegcm_build_Q/exec'
|
||||
Executable copied from /nobackup/nrao3/tiegcm/tiegcm_build_Q/exec/tiegcm.x to /nobackup/nrao3/tiegcm/tiegcm_build_Q/stdout
|
||||
|
||||
4. You should now see the following files in your run directory:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
ls
|
||||
exec hist stdout
|
||||
|
||||
The executables are located in the ``stdout`` directory, and the stdout files
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
ls stdout
|
||||
defs.h tiegcm.exe tiegcm.x
|
||||
|
||||
Build guide for TIEGCM code for a ``D`` run on ``derecho``:
|
||||
#########################################################################################
|
||||
|
||||
1. First we will create a directory for the "D" TIEGCM build in the TIEGCMHOME directory.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
cd $TIEGCMHOME
|
||||
mkdir tiegcm_build_D
|
||||
cd tiegcm_build_D
|
||||
|
||||
2. Next, we will build the standalone TIEGCM executable by running the ``tiegcmrun.py`` script with the ``-oc``.
|
||||
|
||||
.. note::
|
||||
|
||||
Since the Gamera resolution is ``D``, we will set the following:
|
||||
|
||||
- horizontal resolution for the standalone TIEGCM to 2.5 degrees
|
||||
- vertical resolution to 0.25(1/4) scale height
|
||||
- magnetic grid resolution to 2 degrees
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$TIEGCMHOME/tiegcmrun/tiegcmrun.py -oc
|
||||
|
||||
3. Next, we will build the coupled TIEGCM executable by running the ``tiegcmrun.py`` script with the ``-oc`` and ``-co`` options.
|
||||
|
||||
.. note::
|
||||
|
||||
Since the Gamera resolution is ``D``, we will set the following:
|
||||
|
||||
- horizontal resolution for the coupled TIEGCM to 2.5 degrees
|
||||
- vertical resolution to 0.25(1/4) scale height
|
||||
- magnetic grid resolution to 2 degrees
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$TIEGCMHOME/tiegcmrun/tiegcmrun.py -oc -co
|
||||
|
||||
4. The executables are located in the ``stdout`` directory, and the stdout files
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
ls stdout
|
||||
defs.h tiegcm.exe tiegcm.x
|
||||
|
||||
|
||||
Build guide for TIEGCM code for a ``O`` run on ``derecho``:
|
||||
#########################################################################################
|
||||
|
||||
1. First we will create a directory for the "D" TIEGCM build in the TIEGCMHOME directory.
|
||||
.. code-block:: bash
|
||||
|
||||
cd $TIEGCMHOME
|
||||
mkdir tiegcm_build_O
|
||||
cd tiegcm_build_O
|
||||
|
||||
2. Next, we will build the standalone TIEGCM executable by running the ``tiegcmrun.py`` script with the ``-oc``.
|
||||
|
||||
.. note::
|
||||
|
||||
Since the Gamera resolution is ``O``, we will set the following:
|
||||
|
||||
- horizontal resolution for the standalone TIEGCM to 1.25 degrees
|
||||
- vertical resolution to 0.125(1/8) scale height
|
||||
- magnetic grid resolution to 1 degrees
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$TIEGCMHOME/tiegcmrun/tiegcmrun.py -oc
|
||||
|
||||
3. Next, we will build the coupled TIEGCM executable by running the ``tiegcmrun.py`` script with the ``-oc`` and ``-co`` options.
|
||||
|
||||
.. note::
|
||||
Since the Gamera resolution is ``O``, we will set the following:
|
||||
|
||||
- horizontal resolution for the coupled TIEGCM to 0.625 degrees
|
||||
- vertical resolution to 0.0625(1/16) scale height
|
||||
- magnetic grid resolution to 0.5 degrees
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$TIEGCMHOME/tiegcmrun/tiegcmrun.py -oc -co
|
||||
|
||||
4. The executables are located in the ``stdout`` directory, and the stdout files
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
ls stdout
|
||||
defs.h tiegcm.exe tiegcm.x
|
||||
|
||||
.. note:: Documentation on the analysis tools is found
|
||||
:doc:`here </tools/index>`.
|
||||
|
||||
|
||||
66
docs/source/misc/build_guides/Pleiades/pleiades.rst
Normal file
66
docs/source/misc/build_guides/Pleiades/pleiades.rst
Normal file
@@ -0,0 +1,66 @@
|
||||
Creating a Python environment for ``kaiju`` on ``pleiades``
|
||||
===========================================================
|
||||
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
This file describes how to set up a python ``conda`` environment on
|
||||
``pleiades`` which can be used to run the ``kaiju`` code.
|
||||
|
||||
These steps assume that ``pip`` (only) is used for installing additional
|
||||
packages, and packages are only taken from PyPI.
|
||||
|
||||
These instructions assume you are using the ``bash`` shell.
|
||||
|
||||
|
||||
|
||||
Building the python environment
|
||||
-------------------------------
|
||||
|
||||
To create a Python environment for ``kaiju``, we first install/load the
|
||||
Conda software package, then create the environment, then populate it with
|
||||
the required Python packages.
|
||||
|
||||
.. note:: These instructions are designed for novice users. If you are
|
||||
comfortable with building and managing ``conda``-based environments, feel
|
||||
free to build your own environment using the NAS-provided
|
||||
`conda <https://www.nas.nasa.gov/hecc/support/kb/managing-and-installing-python-packages-in-conda-environments_627.html>`_
|
||||
software.
|
||||
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
|
||||
# Load the miniconda3 module.
|
||||
module use -a /swbuild/analytix/tools/modulefiles
|
||||
module load miniconda3/v4
|
||||
|
||||
# Now create the environment for kaiju, specifying only the python
|
||||
# version.
|
||||
conda create -n kaiju-3.12 python=3.12
|
||||
|
||||
# Activate the new environment.
|
||||
conda activate kaiju-3.12
|
||||
|
||||
# Install the kaipy software.
|
||||
# If you are using kaipy via pip:
|
||||
pip install kaipy
|
||||
# OR
|
||||
# If you are using a clone of the kaipy repository:
|
||||
cd /path/to/kaipy
|
||||
pip install -e .
|
||||
# -e is used to install the package in editable mode, which allows
|
||||
# you to make changes to the code and have them reflected without needing
|
||||
# to reinstall the package.
|
||||
|
||||
Using the python environment
|
||||
----------------------------
|
||||
|
||||
Once your python environment is created, you must activate it for use with the
|
||||
``kaiju`` software:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
conda activate kaiju-3.12
|
||||
|
||||
@@ -0,0 +1,57 @@
|
||||
Building the CDF library on CentOS-Stream 9
|
||||
===========================================
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
This file describes how to build and install the CDF library on CentOS-Stream 9 for use with the ``kaiju`` software.
|
||||
|
||||
**NOTE**: A CentOS-Stream 9-specific subdirectory of the user home directory is used for this build.
|
||||
|
||||
Building and installing the CDF library
|
||||
---------------------------------------
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
# Specify the name for this system.
|
||||
host_system=centos-stream-9
|
||||
|
||||
# Specify and create the root of the build tree.
|
||||
build_root=$HOME/$host_system/local/cdf/3.9.0
|
||||
mkdir -p $build_root/src
|
||||
cd $build_root/src
|
||||
|
||||
# Download the source tarball.
|
||||
wget https://spdf.gsfc.nasa.gov/pub/software/cdf/dist/cdf39_0/linux/cdf39_0-dist-all.tar.gz
|
||||
|
||||
# Unpack the source code.
|
||||
tar xzvf cdf39_0-dist-all.tar.gz
|
||||
|
||||
# Move into the code directory.
|
||||
cd cdf39_0-dist
|
||||
|
||||
# Build the library using the default system GNU compiler.
|
||||
date; time make OS=linux ENV=gnu CURSES=no all >& make.out; date
|
||||
# Took 0m32.670s
|
||||
|
||||
# Test the library.
|
||||
date; time make test >& make_test.out; date
|
||||
# Took 0m0.392s
|
||||
|
||||
# Install the library in a version-specific subdirectory.
|
||||
date; time make INSTALLDIR=$build_root install >& make_install.out; date
|
||||
# Took 0m1.691s
|
||||
|
||||
# Clean the build tree.
|
||||
date; time make clean >& make_clean.out; date
|
||||
# Took 0m0.378s
|
||||
|
||||
|
||||
Using the CDF library
|
||||
---------------------
|
||||
|
||||
To use this software, you must run the setup script:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
source $build_root/bin/definitions.B
|
||||
@@ -0,0 +1,74 @@
|
||||
Building and installing the HDF5 library on CentOS-Stream 9
|
||||
===========================================================
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
This file describes how to build and install HDF5 on an Intel-based system running CentOS-Stream 9 for use with the kaiju software.
|
||||
|
||||
These instructions assume that the Intel compiler suite has already been installed. If HDF5 is built with gcc, then it will not link with the kaiju code, which is built with the Intel compiler.
|
||||
|
||||
Building and installing the HDF5 library
|
||||
----------------------------------------
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
# Specify the name for this machine.
|
||||
host_system=centos-stream-9
|
||||
|
||||
# Specify and create the root of the build tree.
|
||||
build_root=$HOME/$host_system/local/hdf5/1.14.1-2
|
||||
mkdir -p $build_root/src
|
||||
cd $build_root/src
|
||||
|
||||
# Download the source tarball.
|
||||
wget https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.14/hdf5-1.14.1/src/hdf5-1.14.1-2.tar.gz
|
||||
|
||||
# Unpack the source code.
|
||||
tar xzvf hdf5-1.14.1-2.tar.gz
|
||||
|
||||
# Move into the code directory.
|
||||
cd hdf5-1.14.1-2
|
||||
|
||||
# Configure the Intel compilers.
|
||||
source /opt/intel/oneapi/setvars.sh
|
||||
|
||||
# Configure the library with Fortran support.
|
||||
date; time \
|
||||
./configure \
|
||||
FC=ifort \
|
||||
--prefix=$build_root \
|
||||
--enable-fortran \
|
||||
>& configure.out; date
|
||||
# Took 0m39.463s
|
||||
|
||||
# Compile the library.
|
||||
date; time make >& make.out; date
|
||||
# Took 9m29.899s
|
||||
|
||||
# Test the library.
|
||||
date; time make check >& make_check.out; date
|
||||
# Took 10m8.337s
|
||||
|
||||
# Install the library.
|
||||
date; time make install >& make_install.out
|
||||
# Took 0m5.598s
|
||||
|
||||
# Clean the build tree.
|
||||
date; time make clean >& make_clean.out
|
||||
# Took 0m3.342s
|
||||
|
||||
|
||||
Using the HDF5 library
|
||||
----------------------
|
||||
|
||||
To use the HDF library, you must set environment variables:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
export HDF5_DIR=$HOME/centos-stream-9/local/hdf5/1.14.1-2
|
||||
export PATH=$HDF5_DIR/bin:$PATH
|
||||
export HDF5_INCLUDE_DIRS=$HDF5_DIR/include
|
||||
export HDF5_LIBRARIES=$HDF5_DIR/lib
|
||||
export CPATH=$HDF5_INCLUDE_DIRS
|
||||
export INCLUDE="-I$HDF5_INCLUDE_DIRS"
|
||||
@@ -0,0 +1,221 @@
|
||||
Compiling the ``kaiju`` software on CentOS-Stream 9
|
||||
=======================================================
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
These instructions will walk you through the process of building and installing the ``kaiju`` software on a CentOS-Stream 9 system.
|
||||
|
||||
These instructions assume that the user is using the ``bash`` shell, and that no modifications have been made to the user "dotfiles" (``$HOME/.bashrc``, ``$HOME/.bash_profile``). If you have customized either of these files for your account, please carefully save and inspect the output from each command in the build process to ensure that no unexpected problems have crept in. To facilitate this practice, all of the commands shown below will illustrate how to save command output, and how to measure how long each step takes. The latter is a useful bit of information which can help identify build problems early in the process, avoiding much wasted time and effort later.
|
||||
|
||||
In the instructions below, all code will be built and installed in a CentOS-Stream 9-specific subdirectory of the user home directory, i.e. ``$HOME/centos-stream-9``. This particular organization is not required - it is intended as an example of one possible way to segregate software that has been built for multiple systems.
|
||||
|
||||
Building the serial version of the ``kaiju`` software on CentOS-Stream 9
|
||||
----------------------------------------------------------------------------
|
||||
|
||||
Step 1: Configure build tools
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Begin by configuring tools and libraries needed for the build:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
# Add the Intel compilers to PATH.
|
||||
source /opt/intel/oneapi/setvars.sh
|
||||
|
||||
# Set variables for HDF5.
|
||||
export HOST_SYSTEM=centos-stream-9
|
||||
export HDF5_DIR=$HOME/$HOST_SYSTEM/local/hdf5/1.14.1-2
|
||||
export PATH=$HDF5_DIR/bin:$PATH
|
||||
export HDF5_INCLUDE_DIRS=$HDF5_DIR/include
|
||||
export HDF5_LIBRARIES=$HDF5_DIR/lib
|
||||
export CPATH=$HDF5_INCLUDE_DIRS
|
||||
export INCLUDE="-I$HDF5_INCLUDE_DIRS"
|
||||
|
||||
# Configure CDF.
|
||||
source $HOME/$HOST_SYSTEM/local/cdf/3.9.0/bin/definitions.B
|
||||
|
||||
|
||||
Step 2: Create the build directory
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Create a system-specific build directory.
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
cd $HOME
|
||||
mkdir HOST_SYSTEM
|
||||
cd $HOST_SYSTEM
|
||||
|
||||
|
||||
Then make an additional subdirectory level for the branch of the code you are building (the ``development`` branch is used as an example). This arrangement is useful when you need to maintain simultaneous builds of different branches.
|
||||
|
||||
code-block:: shell
|
||||
|
||||
export KAIJU_BRANCH_NAME=development
|
||||
mkdir $KAIJU_BRANCH_NAME
|
||||
cd $KAIJU_BRANCH_NAME
|
||||
|
||||
|
||||
Step 3: Clone the ``kaiju`` repository from BitBucket
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
NOTE: This step assumes you have been granted access to the ``kaiju`` repository on BitBucket, and that you have configured an SSH key pair for use with BitBucket. If you need help with these tasks, please contact a CGS team member for assistance.
|
||||
|
||||
Clone the ``kaiju`` repository (or "repo") from BitBucket:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
git clone git@bitbucket.org:aplkaiju/kaiju.git
|
||||
|
||||
|
||||
This process should take a minute or so. When complete, verify that the ``kaiju`` code exists in your directory (the actual directory contents may differ slightly from what is shown below):
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
ls kaiju
|
||||
analysis cmake CMakeLists.txt examples external gitHookScripts kaiju.sublime-project kaipy places pytests quickstart README.md scripts setup.py src testingScripts tests xml
|
||||
|
||||
|
||||
Now move down into the cloned repo, and switch to the branch of the code you wish to use. By default, the cloned repository provides the ``master`` branch, but we want the ``development`` branch:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
cd kaiju
|
||||
git switch $KAIJU_BRANCH_NAME
|
||||
|
||||
|
||||
Step 4: Run ``cmake`` to create the ``Makefile`` needed to build the software
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Since the ``kaiju`` code can be built in serial and MPI forms, we first make a directory in which to build the serial version of the code (use whatever name you prefer, but ``build_serial`` is simple and unambiguous):
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
export KAIJU_BUILD_NAME=build_serial
|
||||
KAIJU_BUILD_PATH=$KAIJU_HOME/$KAIJU_BUILD_NAME
|
||||
mkdir -p $KAIJU_BUILD_PATH
|
||||
cd $KAIJU_BUILD_PATH
|
||||
|
||||
|
||||
Now run the ``cmake`` command. Save the ``cmake`` output, and use timestamps for each step. The options shown below direct the build process to use a recent version of the Intel Fortran compiler:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
date; time FC=`which ifort` cmake -DALLOW_INVALID_COMPILERS=ON >& cmake.out; date
|
||||
|
||||
|
||||
This command usually takes 2-3 seconds, depending on system activity. Examine the output file ``cmake.out`` for problems. It *should* look something like this:
|
||||
|
||||
.. code-block::
|
||||
|
||||
-- The Fortran compiler identification is Intel 2021.10.0.20230609
|
||||
-- Detecting Fortran compiler ABI info
|
||||
-- Detecting Fortran compiler ABI info - done
|
||||
-- Check for working Fortran compiler: /opt/intel/oneapi/compiler/2023.2.0/linux/bin/intel64/ifort - skipped
|
||||
-- Checking whether /opt/intel/oneapi/compiler/2023.2.0/linux/bin/intel64/ifort supports Fortran 90
|
||||
-- Checking whether /opt/intel/oneapi/compiler/2023.2.0/linux/bin/intel64/ifort supports Fortran 90 - yes
|
||||
-- Found HDF5: /home/ewinter/centos-stream-9/local/hdf5/1.14.1-2/lib/libhdf5_fortran.so;/home/ewinter/centos-stream-9/local/hdf5/1.14.1-2/lib/libhdf5.so;/usr/lib64/libz.so;/usr/lib64/libdl.a;/usr/lib64/libm.so (found version "1.14.1-2") found components: Fortran
|
||||
-- Found OpenMP_Fortran: -qopenmp (found version "5.0")
|
||||
-- Found OpenMP: TRUE (found version "5.0") found components: Fortran
|
||||
CMake Warning at cmake/compilers.cmake:61 (message):
|
||||
Setting default optimization to O2 to avoid certain Intel compiler bugs
|
||||
Call Stack (most recent call first):
|
||||
CMakeLists.txt:99 (include)
|
||||
|
||||
|
||||
-------------------------
|
||||
Configuration summary ...
|
||||
System: localhost.localdomain
|
||||
OS: Linux
|
||||
Processor: x86_64
|
||||
Compiler: Intel / 2021.10.0.20230609
|
||||
/opt/intel/oneapi/compiler/2023.2.0/linux/bin/intel64/ifort
|
||||
HDF5 Wrapper: /home/ewinter/centos-stream-9/local/hdf5/1.14.1-2/bin/h5fc
|
||||
Version: 3f4e147c / development
|
||||
Build Type: Release
|
||||
Base Flags: -fPIC -free -implicitnone -qopenmp
|
||||
Build Flags: -O2 -align array64byte -align rec32byte -no-prec-div -fast-transcendentals -ipo
|
||||
-------------------------
|
||||
|
||||
Adding CHIMP module ...
|
||||
EB IC file is /home/ewinter/centos-stream-9/cgs/kaiju/development/kaiju/src/chimp/ebICs/ebICstd.F90
|
||||
TP IC file is /home/ewinter/centos-stream-9/cgs/kaiju/development/kaiju/src/chimp/tpICs/tpICstd.F90
|
||||
Adding executable project.x
|
||||
Adding executable psd.x
|
||||
Adding executable push.x
|
||||
Adding executable slice.x
|
||||
Adding executable chop.x
|
||||
Adding executable trace.x
|
||||
Adding executable sctrack.x
|
||||
Adding executable calcdb.x
|
||||
Adding executable wpicheck.x
|
||||
Adding Gamera module ...
|
||||
Bricksize is 16
|
||||
IC file is /home/ewinter/centos-stream-9/cgs/kaiju/development/kaiju/src/gamera/ICs/null.F90
|
||||
Adding executable gamera.x
|
||||
Adding Gamera Helio module ...
|
||||
IC file is /home/ewinter/centos-stream-9/cgs/kaiju/development/kaiju/src/gamera/ICs/helio/wsa.F90
|
||||
Adding executable gamhelio.x
|
||||
Adding ReMIX module ...
|
||||
Adding executable remix.x
|
||||
Adding RCM module ...
|
||||
RCM Grid is of size 180 x 361 x 160
|
||||
Adding executable rcm.x
|
||||
Adding Voltron module ...
|
||||
IC file is /home/ewinter/centos-stream-9/cgs/kaiju/development/kaiju/src/voltron/ICs/earthcmi.F90
|
||||
Adding executable voltron.x
|
||||
-- Configuring done
|
||||
-- Generating done
|
||||
-- Build files have been written to: /home/ewinter/centos-stream-9/cgs/kaiju/development/kaiju/build_serial
|
||||
|
||||
Step 5: Compile the ``kaiju`` software
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Now use ``make`` to build the ``kaiju`` software, time-stamping and saving the output:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
date; time make >& make.out; date
|
||||
|
||||
|
||||
This command should complete in about 8-9 minutes on a CentOS-Stream 9 system running on an i9 processor. When the command is finished, check the output file ``make.out``. The file is long, but the last few lines should look something like this:
|
||||
|
||||
.. code-block::
|
||||
|
||||
[ 99%] Linking Fortran executable bin/sctrack.x
|
||||
[ 99%] Built target sctrack.x
|
||||
[ 99%] Built target chimp
|
||||
[ 99%] Built target voltron
|
||||
Scanning dependencies of target remix.x
|
||||
[100%] Building Fortran object CMakeFiles/remix.x.dir/src/drivers/remix.F90.o
|
||||
[100%] Linking Fortran executable bin/remix.x
|
||||
[100%] Built target remix.x
|
||||
[100%] Built target remix
|
||||
[100%] Built target gamera
|
||||
|
||||
To verify that all of the ``kaiju`` programs have been built, examine the ``bin`` subdirectory of your ``build_serial`` directory (this list will evolve as more programs are added):
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
ls bin
|
||||
calcdb.x gamhelio.x psd.x remix2rcm.x sctrack.x voltron.x
|
||||
chop.x kaitoy.x push.x remix2remix.x slice.x wpicheck.x
|
||||
gamera.x project.x rcm.x remix.x trace.x
|
||||
|
||||
|
||||
Using the ``kaiju`` software
|
||||
--------------------------------
|
||||
|
||||
Once built, you must run the setup script before using the ``kaiju`` software:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
source $KAIJU_HOME/scripts/setupEnvironment.sh
|
||||
|
||||
|
||||
This script will set environment variables needed by the ``kaiju`` software, including the ``KAIJUHOME`` environment variable (not the ``KAIJU_HOME`` environment variable). However, the path to the compiled programs is not added - you will need to specify the complete path when using compiled programs. For example, to run the serial version of ``gamera.x``:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
$KAIJUHOME/build_serial/bin/gamera.x
|
||||
@@ -0,0 +1,98 @@
|
||||
Building a python environment for ``kaiju`` on CentOS-Stream 9
|
||||
==================================================================
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
This file describes how to set up a python 3.8-based ``conda`` environment on an Intel-based machine running CentOS-Stream 9 which can be used to run the ``kaiju`` code.
|
||||
|
||||
These steps assume that ``pip`` (only) is used for installing additional packages, and packages are only taken from PyPI (and the default ``conda`` repository, if needed) - no other repositories are used.
|
||||
|
||||
*A NOTE OF EXPLANATION:* These instructions install ``miniconda3`` into a CentOS-Stream 9-specific subdirectory of the home directory, to maintain compatibility with instructions for other systems. Feel free to install ``miniconda3`` wherever is convenient.
|
||||
|
||||
Building the python environment
|
||||
-------------------------------
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
# Download the installer.
|
||||
cd $HOME
|
||||
wget https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh
|
||||
|
||||
# Run the installer.
|
||||
# Install into $HOME/centos-stream-9/miniconda3, use all defaults.
|
||||
bash ./Miniconda3-latest-MacOSX-x86_64.sh
|
||||
|
||||
# NOTE: This installation *should* run "conda init", which *should*
|
||||
# add the conda setup code to ~/.bashrc or ~/.bash_profile.
|
||||
|
||||
# NOTE: This installation creates ~/.conda/environments.txt, but
|
||||
# nothing else. This file just contains the path to the miniconda3
|
||||
# installation directory.
|
||||
|
||||
# Turn off auto-activation of base environment.
|
||||
# NOTE: This command creates ~/.condarc.
|
||||
conda config --set auto_activate_base false
|
||||
|
||||
# Update everything to latest version.
|
||||
conda update --all
|
||||
|
||||
# NOTE: This creates the directory ~/.cache/conda.
|
||||
|
||||
# Now create the environment for kaiju, specifying only the python version.
|
||||
conda create -n kaiju-3.8 python=3.8
|
||||
|
||||
# NOTE: This adds the path to the new environment in
|
||||
# ~/.conda/environments.txt.
|
||||
|
||||
# Activate the new environment.
|
||||
conda activate kaiju-3.8
|
||||
|
||||
# Update everything to latest version.
|
||||
conda update --all
|
||||
|
||||
# IMPORTANT: Use a pip cache specific to this machine.
|
||||
export PIP_CACHE_DIR=$HOME/centos-stream-9/pip_cache
|
||||
|
||||
# Make sure CDF, HDF5, and GEOS have been built and installed.
|
||||
|
||||
# Install packages required by kaipy.
|
||||
pip install -r requirements.txt
|
||||
|
||||
# Save the environment description.
|
||||
conda list >& kaiju-3.8-centos-stream-9-YYYYMMDD.env
|
||||
|
||||
|
||||
|
||||
Where ``requirements.txt`` is a text file containing:
|
||||
|
||||
.. code-block::
|
||||
|
||||
ai.cdas
|
||||
alive_progress
|
||||
cartopy
|
||||
cdasws
|
||||
cdflib
|
||||
configparser
|
||||
dataclasses_json
|
||||
h5py
|
||||
jinja2
|
||||
matplotlib
|
||||
netCDF4
|
||||
progressbar
|
||||
pyhdf
|
||||
pytest
|
||||
scipy
|
||||
spacepy
|
||||
sunpy
|
||||
xarray
|
||||
|
||||
|
||||
Using the python environment
|
||||
----------------------------
|
||||
|
||||
Once your python environment is created, you must activate it for use with the ``kaiju`` software:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
conda activate kaiju-3.8 # or whatever name you used for the python environment
|
||||
80
docs/source/misc/build_guides/centos-stream-9/index.rst
Normal file
80
docs/source/misc/build_guides/centos-stream-9/index.rst
Normal file
@@ -0,0 +1,80 @@
|
||||
Building the ``kaiju`` software on CentOS-Stream 9
|
||||
==================================================
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
This page provides instructions for building the ``kaiju`` software on an Intel-based CentOS-Stream 9 system. These instructions were developed for using a CentOS-Stream 9 virtual machine running under VirtualBox on a Mac, but the instructions should work for other Linux distributions, although minor changes may be needed, especially if your Linux distribution does no use the ``yum`` package manager.
|
||||
|
||||
NOTE: These instructions should also work, essentially unchanged, on a RHEL (Red Hat Enterprise Linux) system.
|
||||
|
||||
Step 0: Prepare your system
|
||||
---------------------------
|
||||
|
||||
A few tools must be installed on your system before you begin building the ``kaiju`` software. These instructions assume you have permission to use ``sudo`` to install software.
|
||||
|
||||
.. code-block::
|
||||
|
||||
#!shell
|
||||
sudo yum install epel-release
|
||||
sudo yum install make
|
||||
sudo yum install gcc
|
||||
sudo yum install cmake
|
||||
sudo yum install git
|
||||
sudo yum install git-lfs
|
||||
sudo yum install geos-devel
|
||||
|
||||
|
||||
Install the Intel compiler suite
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The ``kaiju`` software is developed using the Intel compiler suite, available `here <https://www.intel.com/content/www/us/en/developer/tools/oneapi/toolkits.html#gs.29x43u>`_. The compiler suite is free, but comes in several optional parts. You will need to install the `Intel oneAPI Base Toolkit <https://www.intel.com/content/www/us/en/developer/tools/oneapi/base-toolkit-download.html>`_ and the `Intel oneAPI HPC Toolkit <https://www.intel.com/content/www/us/en/developer/tools/oneapi/hpc-toolkit-download.html>`_. Once installed, this software should be available on your system under ``/opt/intel``.
|
||||
|
||||
Once the Intel tools are installed, you will need to make them available during the build process. Set up the tools as follows:
|
||||
|
||||
.. code-block::
|
||||
|
||||
#!shell
|
||||
source /opt/intel/oneapi/setvars.sh
|
||||
|
||||
|
||||
Step 1: Build prerequisite libraries
|
||||
------------------------------------
|
||||
|
||||
NASA CDF library
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
The `NASA CDF (Common Data Format) library <https://cdf.gsfc.nasa.gov/>`_ is used in parts of the ``kaiju`` post-processing software when fetching spacecraft data from `CDAWeb <https://cdaweb.gsfc.nasa.gov/>`_. Prior to building the ``kaiju`` software, the CDF library must be built and installed on your system, since it is not available via the ``yum`` package manager.
|
||||
|
||||
Instructions for building and installing the CDF library on your Mac are available `here <centos-stream-9_build_cdf>`__.
|
||||
|
||||
HDF5 library
|
||||
^^^^^^^^^^^^
|
||||
|
||||
The `HDF5 <https://www.hdfgroup.org/solutions/hdf5/>`_ file format is used extensively for data storage in the ``kaiju`` software. The HDF5 software is available via ``yum``\ , but this version will not link with the ``kaiju`` software when built with the Intel compiler suite. Prior to building the ``kaiju`` software, the HDF5 library must be built and installed on your system.
|
||||
|
||||
Instructions for building and installing the HDF5 library on your Mac are available `here <centos-stream-9_build_hdf5>`__.
|
||||
|
||||
NOTE: If you successfully build the ``kaiju`` software using the ``yum``\ -installed version of HDF5 on CentOS-Stream 9, please let us know how you did it!
|
||||
|
||||
Step 2: Create a python environment
|
||||
-----------------------------------
|
||||
|
||||
Most of the ``kaiju`` software for pre-processing, post-processing, and analysis is written in `Python <https://www.python.org/>`_. Python is available in many forms (or 'distributions'), but we recommend use of the `Miniconda] distribution <https://docs.conda.io/en/latest/miniconda.html>`_ for simplicity and compactness.
|
||||
|
||||
Instructions for installing python and building a python environment on your Mac are available `here <centos-stream-9_build_python>`__.
|
||||
|
||||
Step 3: Compile the ``kaiju`` software
|
||||
------------------------------------------
|
||||
|
||||
The ``kaiju`` software can be built in serial or MPI versions. The serial version should be used when running the code on a single computer. The MPI version should be used when running on an HPC system (a supercomputer).
|
||||
|
||||
Instructions for building the serial version of the ``kaiju`` software on CentOS-Stream 9 are available `here <centos-stream-9_build_kaiju>`__. The MPI version of ``kaiju`` is not supported on CentOS-Stream 9 at this time.
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
centos-stream-9_build_cdf
|
||||
centos-stream-9_build_hdf5
|
||||
centos-stream-9_build_kaiju
|
||||
centos-stream-9_build_python
|
||||
115
docs/source/misc/build_guides/generic/build_cdf.rst
Normal file
115
docs/source/misc/build_guides/generic/build_cdf.rst
Normal file
@@ -0,0 +1,115 @@
|
||||
Building the NASA CDF (Common Data Format) library
|
||||
==================================================
|
||||
|
||||
The CDF library is required by SpacePy for full functionality. SpacePy is installed as part of the process of setting up your Python environment, which is described `here <./install_python.md>`_. If your system already provides the CDF library, feel free to use that version instead of building the code from source.
|
||||
|
||||
MacOS
|
||||
-----
|
||||
|
||||
These instructions presume the use of MacOS Ventura. The instructions should also work for other versions of MacOS. These instructions illustrate steps required to build and insta;; version 3.9.0 of the CDF library. Modify these instructions as appropriate if you use a different version.
|
||||
|
||||
This procedure will use the default system-provided C compiler (``/usr/bin/gcc``).
|
||||
|
||||
This procedure builds and installs the code under ``$HOME/local``, creating a user-only installation. Substitute your desired build and installation locations for these paths as appropriate.
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
# Make a build tree.
|
||||
cd $HOME
|
||||
mkdir -p local/cdf/3.9.0/src
|
||||
cd local/cdf/3.9.0/src
|
||||
|
||||
# Download the source tarball.
|
||||
# NOTE: Source tarball is in a linux directory, not Mac.
|
||||
wget https://spdf.gsfc.nasa.gov/pub/software/cdf/dist/latest/linux/cdf39_0-dist-all.tar.gz
|
||||
|
||||
# Unpack the source code.
|
||||
tar xzvf cdf39_0-dist-all.tar.gz
|
||||
|
||||
# Build the library using the Apple compiler for Intel hardware.
|
||||
cd cdf39_0-dist
|
||||
date; time make OS=macosx ENV=x86_64 all >& make.out
|
||||
# Took about 22 s on a MacBook Pro i9/2019.
|
||||
# Examine make.out for errors.
|
||||
|
||||
# Test the library.
|
||||
date; time make test >& make_test.out
|
||||
# Took about 5 s on a MacBook Pro i9/2019.
|
||||
# Examine make_test.out for errors.
|
||||
|
||||
# Install the library in a version-specific subdirectory.
|
||||
date; time make INSTALLDIR=$HOME/local/cdf/3.8.1 install >&
|
||||
make_install.out
|
||||
# Took about 8 s on a MacBook Pro i9/2019.
|
||||
# Examine make_install.out for errors.
|
||||
|
||||
|
||||
In order to use the newly-compiled library, you must set up your environment (path variables and aliases) by "sourcing" the setup script:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
# For sh/bash/compatible shells:
|
||||
source $HOME/local/cdf/3.8.1/bin/definitions.B
|
||||
|
||||
# For csh/tcsh/compatible shells:
|
||||
source $HOME/local/cdf/3.8.1/bin/definitions.C
|
||||
|
||||
# For ksh/compatible shells:
|
||||
source $HOME/local/cdf/3.8.1/bin/definitions.K
|
||||
|
||||
|
||||
Linux and HPC systems
|
||||
---------------------
|
||||
|
||||
These instructions should work for any Linux distribution. These instructions were tested on ``pleiades``, as well as an Ubuntu-based virtual machine.
|
||||
|
||||
This procedure will use the default system-provided C compiler (``/usr/bin/gcc``).
|
||||
|
||||
This procedure builds the code under ``$HOME/local/src``, and installs the compiled files under ``$HOME/local/cdf/3.8.1``, creating a user-only installation. Substitute your desired build and installation locations for these paths as appropriate.
|
||||
|
||||
**NOTE**: These instructions should work under any command shell.
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
# Make a build tree.
|
||||
cd $HOME
|
||||
mkdir -p local/src
|
||||
cd local/src
|
||||
|
||||
# Download the source tarball.
|
||||
wget https://spdf.gsfc.nasa.gov/pub/software/cdf/dist/cdf38_1/linux/cdf38_1-dist-all.tar.gz
|
||||
# OR:
|
||||
# curl https://spdf.gsfc.nasa.gov/pub/software/cdf/dist/cdf38_1/linux/cdf38_1-dist-all.tar.gz -o cdf38_1-dist-all.tar.gz
|
||||
|
||||
# Unpack the source code.
|
||||
tar xzvf cdf38_1-dist-all.tar.gz
|
||||
|
||||
# Build the library using the default system compiler (presumed to be gcc).
|
||||
cd cdf38_1-dist
|
||||
date; time make OS=linux ENV=gnu all >& make.out
|
||||
# Took about 33 s on pleiades.
|
||||
# Examine make.out for errors.
|
||||
|
||||
# Test the library.
|
||||
date; time make test >& make_test.out
|
||||
# Took about < 1 s on pleiades.
|
||||
# Examine make_test.out for errors.
|
||||
|
||||
# Install the library in a version-specific subdirectory.
|
||||
date; time make INSTALLDIR=$HOME/local/cdf/3.8.1 install >& make_install.out
|
||||
# Took about < 1 s on pleiades.
|
||||
# Examine make_install.out for errors.
|
||||
|
||||
|
||||
In order to use the newly-compiled library, you must set up your environment (path variables and aliases) by "sourcing" the setup script:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
# For sh/bash/compatible shells:
|
||||
source $HOME/local/cdf/3.8.1/bin/definitions.B
|
||||
|
||||
# For csh/tcsh/compatible shells:
|
||||
source $HOME/local/cdf/3.8.1/bin/definitions.C
|
||||
|
||||
# For ksh/compatible shells:
|
||||
source $HOME/local/cdf/3.8.1/bin/definitions.K
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user