mirror of
https://github.com/JHUAPL/kaiju.git
synced 2026-01-09 15:17:56 -05:00
Merged in development (pull request #20)
Development Approved-by: Anthony Sciola Approved-by: ksorathia Approved-by: Eric Winter Approved-by: Shanshan Bao Approved-by: Nikhil Rao
This commit is contained in:
11
README.md
11
README.md
@@ -28,7 +28,7 @@ Users are also welcome to run previous versions of `MAGE` via the
|
||||
NASA's [Community Coordinated Modeling
|
||||
Center](https://ccmc.gsfc.nasa.gov/). `MAGE` versions
|
||||
[0.75](https://ccmc.gsfc.nasa.gov/models/MAGE~0.75/) and
|
||||
[1.0](https://ccmc.gsfc.nasa.gov/models/MAGE~1.00/) are available for
|
||||
[1.0](https://ccmc.gsfc.nasa.gov/models/MAGE~1.0/) are available for
|
||||
runs on request. `MAGE 0.75` includes `GAMERA`, `REMIX` and `RCM`. `MAGE
|
||||
1.0` adds two-way coupling with `TIEGCM` to `MAGE 0.75`.
|
||||
|
||||
@@ -37,7 +37,12 @@ runs on request. `MAGE 0.75` includes `GAMERA`, `REMIX` and `RCM`. `MAGE
|
||||
### Documentation ###
|
||||
|
||||
Current documentation for the `kaiju` software is available via [Read The
|
||||
Docs](https://kaiju-doc.readthedocs.io/en/latest/).
|
||||
Docs](https://kaiju-docs.readthedocs.io/en/latest/).
|
||||
|
||||
### Analysis ###
|
||||
|
||||
You are encouraged to use the [Kaipy](https://github.com/jhuapl/kaipy) package for analysis and
|
||||
visualization of `Kaiju` simulations.
|
||||
|
||||
### License ###
|
||||
|
||||
@@ -49,7 +54,7 @@ license](LICENSE.md).
|
||||
|
||||
All contributions should be made by forking this repository and
|
||||
submitting a pull request. See documentation for our [rules of the
|
||||
road](https://kaiju-doc.readthedocs.io/en/latest/).
|
||||
road](https://kaiju-docs.readthedocs.io/en/latest/).
|
||||
|
||||
|
||||
### Who do I talk to? ###
|
||||
|
||||
661
scripts/makeitso/engage.py
Executable file
661
scripts/makeitso/engage.py
Executable file
@@ -0,0 +1,661 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
|
||||
"""engage for the MAGE magnetosphere software.
|
||||
|
||||
This script performs all of the steps needed to prepare to run a coupled MAGE
|
||||
with GAMEREA, RCM, and TIEGCM components. By default, this script is interactive - the user
|
||||
is prompted for each decision that must be made to prepare for the run, based
|
||||
on the current "--mode" setting.
|
||||
|
||||
The modes are:
|
||||
|
||||
"BASIC" (the default) - the user is prompted to set only a small subset of MAGE
|
||||
parameters. All "INTERMEDIATE"- and "EXPERT"-mode parameters are automatically
|
||||
set to default values.
|
||||
|
||||
"INTERMEDIATE" - The user is prompted for "BASIC" and "INTERMEDIATE"
|
||||
parameters, with "EXPERT" parameters set to defaults.
|
||||
|
||||
"EXPERT" - The user is prompted for *all* adjustable parameters.
|
||||
"""
|
||||
|
||||
# Import standard modules.
|
||||
import argparse
|
||||
import copy
|
||||
import datetime
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
# Import 3rd-party modules.
|
||||
import netCDF4
|
||||
import h5py
|
||||
from jinja2 import Template
|
||||
|
||||
#import tiegcmrun stuff
|
||||
TIEGCMHOME = os.environ["TIEGCMHOME"]
|
||||
#sys.path.append('/glade/u/home/wiltbemj/src/tiegcm/tiegcmrun')
|
||||
sys.path.append(f'{TIEGCMHOME}/tiegcmrun')
|
||||
import tiegcmrun
|
||||
print(f'tiegcmrum from {tiegcmrun.__file__}')
|
||||
|
||||
#import makeitso
|
||||
KAIJUHOME = os.environ["KAIJUHOME"]
|
||||
sys.path.append(f'{KAIJUHOME}/scripts/makeitso')
|
||||
import makeitso
|
||||
print(f'makeitso from {makeitso.__file__}')
|
||||
# Program constants
|
||||
|
||||
# Program description.
|
||||
DESCRIPTION = "Interactive script to prepare a MAGE magnetosphere model run."
|
||||
|
||||
# Indent level for JSON output.
|
||||
JSON_INDENT = 4
|
||||
|
||||
# Path to current kaiju installation
|
||||
KAIJUHOME = os.environ["KAIJUHOME"]
|
||||
|
||||
# Path to directory containing support files for makeitso.
|
||||
SUPPORT_FILES_DIRECTORY = os.path.join(KAIJUHOME, "scripts", "makeitso")
|
||||
|
||||
# Path to option descriptions file.
|
||||
OPTION_ENGAGE_DESCRIPTIONS_FILE = os.path.join(
|
||||
SUPPORT_FILES_DIRECTORY, "option_engage_descriptions.json"
|
||||
)
|
||||
|
||||
OPTION_MAKEITSO_DESCRIPTIONS_FILE = os.path.join(
|
||||
SUPPORT_FILES_DIRECTORY, "option_descriptions.json"
|
||||
)
|
||||
|
||||
# Path to template .pbs file.
|
||||
PBS_TEMPLATE = os.path.join(SUPPORT_FILES_DIRECTORY, "template-gtr.pbs")
|
||||
|
||||
def create_command_line_parser():
|
||||
"""Create the command-line argument parser.
|
||||
|
||||
Create the parser for command-line arguments.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
None
|
||||
|
||||
Returns
|
||||
-------
|
||||
parser : argparse.ArgumentParser
|
||||
Command-line argument parser for this script.
|
||||
|
||||
Raises
|
||||
------
|
||||
None
|
||||
"""
|
||||
parser = argparse.ArgumentParser(description=DESCRIPTION)
|
||||
parser.add_argument(
|
||||
"--clobber", action="store_true",
|
||||
help="Overwrite existing options file (default: %(default)s)."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--debug", "-d", action="store_true",
|
||||
help="Print debugging output (default: %(default)s)."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--mode", default="BASIC",
|
||||
help="User mode (BASIC|INTERMEDIATE|EXPERT) (default: %(default)s)."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--engage_options_path", "-eo", default=None,
|
||||
help="Path to engage JSON file of options (default: %(default)s)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--makeitso_options_path", "-mo", default=None,
|
||||
help="Path to makeitso JSON file of options (default: %(default)s)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--tiegcm_options_path", "-to", default=None,
|
||||
help="Path to tiegcm JSON file of options (default: %(default)s)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--verbose", "-v", action="store_true",
|
||||
help="Print verbose output (default: %(default)s)."
|
||||
)
|
||||
return parser
|
||||
|
||||
|
||||
def create_pbs_scripts(gr_options: dict, makeitso_options:dict,makeitso_pbs_scripts: list, tiegcm_options: dict,tiegcm_inp_scripts:list, tiegcm_pbs_scripts:list):
|
||||
"""Create the PBS job scripts for the run.
|
||||
|
||||
Create the PBS job scripts from a template.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
gr_options : dict
|
||||
Dictionary of program options from makeitso, each entry maps str to str.
|
||||
gr_options : dict
|
||||
Dictionary of program options from tiegcmrun, each entry maps str to str.
|
||||
|
||||
Returns
|
||||
-------
|
||||
pbs_scripts : list of str
|
||||
Paths to PBS job script.
|
||||
submit_all_jobs_script : str
|
||||
Path to script which submits all PBS jobs.
|
||||
|
||||
Raises
|
||||
------
|
||||
TypeError:
|
||||
For a non-integral of nodes requested
|
||||
"""
|
||||
# Read the template.
|
||||
with open(PBS_TEMPLATE, "r", encoding="utf-8") as f:
|
||||
template_content = f.read()
|
||||
template = Template(template_content)
|
||||
|
||||
options = copy.deepcopy(gr_options)
|
||||
|
||||
# GRT PBS parameters
|
||||
options["pbs"]["mpiexec_command"] = "mpiexec"
|
||||
|
||||
# TIEGCM PBS parameters
|
||||
options["pbs"]["tie_nodes"] = tiegcm_options["job"]["resource"]["select"]
|
||||
options["pbs"]["tie_ncpus"] = tiegcm_options["job"]["resource"]["ncpus"]
|
||||
options["pbs"]["tie_mpiprocs"] = tiegcm_options["job"]["resource"]["mpiprocs"]
|
||||
options["pbs"]["tie_mpiranks"] = tiegcm_options["job"]["nprocs"]
|
||||
options["pbs"]["tie_exe"] = tiegcm_options["model"]["data"]["coupled_modelexe"]
|
||||
if tiegcm_options["simulation"]["hpc_system"] == "pleiades":
|
||||
options["pbs"]["model"] = tiegcm_options["job"]["resource"]["model"]
|
||||
|
||||
# GR PBS parameters
|
||||
options["pbs"]["gamera_nodes"] = makeitso_options["pbs"]["select"]
|
||||
options["pbs"]["gamera_ncpus"] = makeitso_options["pbs"]["ncpus"]
|
||||
options["pbs"]["gamera_mpiprocs"] = makeitso_options["pbs"]["mpiprocs"]
|
||||
options["pbs"]["gamera_ompthreads"] = makeitso_options["pbs"]["ompthreads"]
|
||||
options["pbs"]["voltron_nodes"] = makeitso_options["pbs"]["select2"]
|
||||
options["pbs"]["voltron_ncpus"] = makeitso_options["pbs"]["ncpus"]
|
||||
options["pbs"]["voltron_mpiprocs"] = makeitso_options["pbs"]["helper_mpiprocs"]
|
||||
options["pbs"]["voltron_ompthreads"] = makeitso_options["pbs"]["helper_ompthreads"]
|
||||
options["pbs"]["voltron_mpiranks"] = int(options["pbs"]["gamera_nodes"])*int(options["pbs"]["gamera_mpiprocs"])+int( options["pbs"]["voltron_nodes"])*int(options["pbs"]["voltron_mpiprocs"])
|
||||
options["pbs"]["voltron_scripts"] = makeitso_options["pbs"]["mpiexec_command"].replace("mpiexec ", "")
|
||||
|
||||
if tiegcm_options["simulation"]["hpc_system"] == "derecho":
|
||||
options["pbs"]["mpiexec_command"] = "mpiexec"
|
||||
options["pbs"]["mpiexec_option"] = "-n"
|
||||
elif tiegcm_options["simulation"]["hpc_system"] == "pleiades":
|
||||
options["pbs"]["mpiexec_command"] = "mpiexec_mpt"
|
||||
options["pbs"]["mpiexec_option"] = "-np"
|
||||
options["pbs"]["tie_scripts"] = "correctOMPenvironment.sh $NODEFILE_1 omplace"
|
||||
options["pbs"]["voltron_scripts"] = "correctOMPenvironment.sh $NODEFILE_2 omplace"
|
||||
options["pbs"]["nodecommand"] = f"""
|
||||
# This section creates two node files based on the master node file.
|
||||
# Each node file should include the nodes that contribute to an executable.
|
||||
# Currently, head corresponds to TIEGCM ranks and tail are the GR ranks
|
||||
# A more robust system is needed in the future. GR ranks is G + helper R
|
||||
export NODEFILE_1=${{NODEFILE}}.1
|
||||
export NODEFILE_2=${{NODEFILE}}.2
|
||||
head -n {options["pbs"]["tie_mpiranks"]} $NODEFILE > $NODEFILE_1
|
||||
tail -n {options["pbs"]["voltron_mpiranks"]} $NODEFILE > $NODEFILE_2
|
||||
wc -l $NODEFILE
|
||||
|
||||
export NODEFILE_T=${{NODEFILE}}.T
|
||||
cat $NODEFILE_1 $NODEFILE_2 > $NODEFILE_T
|
||||
echo ""
|
||||
echo "Original Nodes"
|
||||
cat $PBS_NODEFILE
|
||||
echo ""
|
||||
|
||||
echo ""
|
||||
echo "New Nodes"
|
||||
cat $NODEFILE_T
|
||||
echo ""
|
||||
|
||||
export PBS_NODEFILE=$NODEFILE_T
|
||||
|
||||
echo ""
|
||||
echo "PBS_NODEFILE = "
|
||||
echo $PBS_NODEFILE
|
||||
echo ""
|
||||
|
||||
echo "Running tiegcm and voltron at the same time"
|
||||
"""
|
||||
|
||||
# Create a PBS script for each segment.
|
||||
pbs_scripts = []
|
||||
for job in range(1,int(options["pbs"]["num_segments"])+1):
|
||||
opt = copy.deepcopy(options) # Need a copy of options
|
||||
runid = opt["simulation"]["job_name"]
|
||||
segment_id = f"{runid}-{job:02d}"
|
||||
opt["simulation"]["segment_id"] = segment_id
|
||||
|
||||
# TIEGCM input script
|
||||
opt["pbs"]["tie_inp"] = tiegcm_inp_scripts[job-1]
|
||||
|
||||
pbs_content = template.render(opt)
|
||||
pbs_script = os.path.join(
|
||||
opt["pbs"]["run_directory"],
|
||||
f"{opt['simulation']['segment_id']}.pbs"
|
||||
)
|
||||
pbs_scripts.append(pbs_script)
|
||||
with open(pbs_script, "w", encoding="utf-8") as f:
|
||||
f.write(pbs_content)
|
||||
|
||||
# Create a single script which will submit all of the PBS jobs in order.
|
||||
submit_all_jobs_script = f"{gr_options['simulation']['job_name']}_pbs.sh"
|
||||
with open(submit_all_jobs_script, "w", encoding="utf-8") as f:
|
||||
cmd = f"# Standalone GR\n"
|
||||
f.write(cmd)
|
||||
makeitso_pbs = makeitso_pbs_scripts[0]
|
||||
cmd = f"makeitso_job_id=`qsub {makeitso_pbs}`\n"
|
||||
f.write(cmd)
|
||||
cmd = "echo $makeitso_job_id\n"
|
||||
f.write(cmd)
|
||||
for makeitso_pbs in makeitso_pbs_scripts[1:]:
|
||||
cmd = "old_makeitso_job_id=$makeitso_job_id\n"
|
||||
f.write(cmd)
|
||||
cmd = f"makeitso_job_id=`qsub -W depend=afterok:$old_makeitso_job_id {makeitso_pbs}`\n"
|
||||
f.write(cmd)
|
||||
cmd = "echo $makeitso_job_id\n"
|
||||
f.write(cmd)
|
||||
cmd = f"# Standalone TIEGCM\n"
|
||||
f.write(cmd)
|
||||
tiegcm_pbs = tiegcm_pbs_scripts[0]
|
||||
cmd = f"tiegcm_job_id=`qsub {tiegcm_pbs}`\n"
|
||||
f.write(cmd)
|
||||
cmd = "echo $tiegcm_job_id\n"
|
||||
f.write(cmd)
|
||||
for tiegcm_pbs in tiegcm_pbs_scripts[1:]:
|
||||
cmd = "old_tiegcm_job_id=$tiegcm_job_id\n"
|
||||
f.write(cmd)
|
||||
cmd = f"tiegcm_job_id=`qsub -W depend=afterok:$old_tiegcm_job_id {tiegcm_pbs}`\n"
|
||||
f.write(cmd)
|
||||
cmd = "echo $tiegcm_job_id\n"
|
||||
f.write(cmd)
|
||||
cmd = f"# Coupled GTR\n"
|
||||
f.write(cmd)
|
||||
s = pbs_scripts[0]
|
||||
cmd = f"job_id=`qsub -W depend=afterok:$makeitso_job_id:$tiegcm_job_id {s}`\n"
|
||||
f.write(cmd)
|
||||
cmd = "echo $job_id\n"
|
||||
f.write(cmd)
|
||||
for s in pbs_scripts[1:]:
|
||||
cmd = "old_job_id=$job_id\n"
|
||||
f.write(cmd)
|
||||
cmd = f"job_id=`qsub -W depend=afterok:$old_job_id {s}`\n"
|
||||
f.write(cmd)
|
||||
cmd = "echo $job_id\n"
|
||||
f.write(cmd)
|
||||
os.chmod(submit_all_jobs_script, 0o755)
|
||||
# Return the paths to the PBS scripts.
|
||||
return pbs_scripts, submit_all_jobs_script
|
||||
|
||||
def prompt_user_for_run_options(args):
|
||||
"""Prompt the user for run options.
|
||||
|
||||
Prompt the user for run options.
|
||||
|
||||
NOTE: In this function, the complete set of parameters is split up
|
||||
into logical groups. This is done partly to make the organization of the
|
||||
parameters more obvious, and partly to allow the values of options to
|
||||
depend upon previously-specified options.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
args : dict
|
||||
Dictionary of command-line options
|
||||
|
||||
Returns
|
||||
-------
|
||||
options : dict
|
||||
Dictionary of program options, each entry maps str to str.
|
||||
|
||||
Raises
|
||||
------
|
||||
None
|
||||
"""
|
||||
# Save the user mode.
|
||||
mode = args.mode
|
||||
|
||||
# Read the dictionary of option descriptions.
|
||||
with open(OPTION_MAKEITSO_DESCRIPTIONS_FILE, "r", encoding="utf-8") as f:
|
||||
option_makeitso_descriptions = json.load(f)
|
||||
with open(OPTION_ENGAGE_DESCRIPTIONS_FILE, "r", encoding="utf-8") as f:
|
||||
option_engage_descriptions = json.load(f)
|
||||
|
||||
# Initialize the dictionary of program options.
|
||||
options = {}
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
|
||||
# General options for the simulation
|
||||
o = options["simulation"] = {}
|
||||
od = option_makeitso_descriptions["simulation"]
|
||||
|
||||
# Prompt for the name of the job.
|
||||
for on in ["job_name"]:
|
||||
o[on] = makeitso.get_run_option(on, od[on], mode)
|
||||
|
||||
|
||||
# Prompt for the start and stop date of the run. This will also be
|
||||
# used as the start and stop date of the data in the boundary condition
|
||||
# file, which will be created using CDAWeb data.
|
||||
for on in ["start_date", "stop_date"]:
|
||||
o[on] = makeitso.get_run_option(on, od[on], mode)
|
||||
|
||||
# Compute the total simulation time in seconds, use as segment duration
|
||||
# default.
|
||||
date_format = '%Y-%m-%dT%H:%M:%S'
|
||||
start_date = o["start_date"]
|
||||
stop_date = o["stop_date"]
|
||||
t1 = datetime.datetime.strptime(start_date, date_format)
|
||||
t2 = datetime.datetime.strptime(stop_date, date_format)
|
||||
simulation_duration = (t2 - t1).total_seconds()
|
||||
od["segment_duration"]["default"] = str(simulation_duration)
|
||||
|
||||
# Ask if the user wants to split the run into multiple segments.
|
||||
# If so, prompt for the segment duration. If not, use the default
|
||||
# for the segment duration (which is the simulation duration).
|
||||
for on in ["use_segments"]:
|
||||
od[on]["default"] = "Y"
|
||||
o[on] = makeitso.get_run_option(on, od[on], mode)
|
||||
if o["use_segments"] == "Y":
|
||||
for on in ["segment_duration"]:
|
||||
o[on] = makeitso.get_run_option(on, od[on], mode)
|
||||
else:
|
||||
o["segment_duration"] = od["segment_duration"]["default"]
|
||||
|
||||
# Compute the number of segments based on the simulation duration and
|
||||
# segment duration, add 1 if there is a remainder.
|
||||
if o["use_segments"] == "Y":
|
||||
num_segments = simulation_duration/float(o["segment_duration"])
|
||||
if num_segments > int(num_segments):
|
||||
num_segments += 1
|
||||
num_segments = int(num_segments)
|
||||
else:
|
||||
num_segments = 1
|
||||
|
||||
# Prompt for the remaining parameters.
|
||||
for on in ["gamera_grid_type", "hpc_system"]:
|
||||
o[on] = makeitso.get_run_option(on, od[on], mode)
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# PBS options
|
||||
o = options["pbs"] = {}
|
||||
|
||||
# Common (HPC platform-independent) options
|
||||
od = option_makeitso_descriptions["pbs"]["_common"]
|
||||
od["account_name"]["default"] = os.getlogin()
|
||||
od["kaiju_install_directory"]["default"] = os.environ["KAIJUHOME"]
|
||||
od["kaiju_build_directory"]["default"] = os.path.join(
|
||||
os.environ["KAIJUHOME"], "build_gtr")
|
||||
od["num_segments"]["default"] = str(num_segments)
|
||||
for on in od:
|
||||
o[on] = makeitso.get_run_option(on, od[on], mode)
|
||||
|
||||
# HPC platform-specific options
|
||||
hpc_platform = options["simulation"]["hpc_system"]
|
||||
gamera_grid_type = options["simulation"]["gamera_grid_type"]
|
||||
od = option_makeitso_descriptions["pbs"][hpc_platform]
|
||||
oed = option_engage_descriptions["pbs"][hpc_platform]
|
||||
od["select"]["default"] = od["select"]["default"][gamera_grid_type]
|
||||
od["num_helpers"]["default"] = (
|
||||
od["num_helpers"]["default"][gamera_grid_type]
|
||||
)
|
||||
od["modules"] = oed["modules"]
|
||||
if hpc_platform == "pleiades":
|
||||
od["moduledir"] = oed["moduledir"]
|
||||
od["local_modules"] = oed["local_modules"]
|
||||
for on in od:
|
||||
o[on] = makeitso.get_run_option(on, od[on], mode)
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
# coupling options
|
||||
options["coupling"] = {}
|
||||
o = options["coupling"]
|
||||
od = option_engage_descriptions["coupling"]
|
||||
|
||||
od["conda_env"]["default"] = os.environ.get('CONDA_DEFAULT_ENV')
|
||||
|
||||
od["root_directory"]["default"] = os.path.abspath(os.curdir)
|
||||
# Prompt for the remaining parameters.
|
||||
for on in ["gr_warm_up_time", "gcm_spin_up_time",
|
||||
"root_directory","conda_env","tfin_delta","doGCM"]:
|
||||
o[on] = makeitso.get_run_option(on, od[on], mode)
|
||||
#-------------------------------------------------------------------------
|
||||
# Return the options dictionary.
|
||||
return options
|
||||
|
||||
|
||||
def main():
|
||||
"""Main program code for makeitso.
|
||||
|
||||
This is the main program code for makeitso.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
None
|
||||
|
||||
Returns
|
||||
-------
|
||||
None
|
||||
|
||||
Raises
|
||||
------
|
||||
None
|
||||
"""
|
||||
# Set up the command-line parser.
|
||||
parser = create_command_line_parser()
|
||||
|
||||
# Parse the command-line arguments.
|
||||
args = parser.parse_args()
|
||||
if args.debug:
|
||||
print(f"args = {args}")
|
||||
clobber = args.clobber
|
||||
debug = args.debug
|
||||
engage_options_path = args.engage_options_path
|
||||
makeitso_options_path = args.makeitso_options_path
|
||||
tiegcm_options_path = args.tiegcm_options_path
|
||||
verbose = args.verbose
|
||||
|
||||
# Fetch the engage run options.
|
||||
if engage_options_path:
|
||||
# Read the dictionary of option descriptions.
|
||||
with open(OPTION_MAKEITSO_DESCRIPTIONS_FILE, "r", encoding="utf-8") as f:
|
||||
option_makeitso_descriptions = json.load(f)
|
||||
with open(OPTION_ENGAGE_DESCRIPTIONS_FILE, "r", encoding="utf-8") as f:
|
||||
option_engage_descriptions = json.load(f)
|
||||
# Read the run options from a JSON file.
|
||||
with open(engage_options_path, "r", encoding="utf-8") as f:
|
||||
engage_options = json.load(f)
|
||||
#-------------------------------------------------------------------------
|
||||
|
||||
# General options for the simulation
|
||||
o = engage_options["simulation"]
|
||||
date_format = '%Y-%m-%dT%H:%M:%S'
|
||||
start_date = o["start_date"]
|
||||
stop_date = o["stop_date"]
|
||||
t1 = datetime.datetime.strptime(start_date, date_format)
|
||||
t2 = datetime.datetime.strptime(stop_date, date_format)
|
||||
simulation_duration = float((t2 - t1).total_seconds())
|
||||
segment_duration = float(o["segment_duration"])
|
||||
gamera_grid_type = o["gamera_grid_type"]
|
||||
hpc_system = o["hpc_system"]
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# PBS options
|
||||
o = engage_options["pbs"]
|
||||
|
||||
# HPC platform-specific options
|
||||
hpc_platform = engage_options["simulation"]["hpc_system"]
|
||||
gamera_grid_type = engage_options["simulation"]["gamera_grid_type"]
|
||||
od = option_makeitso_descriptions["pbs"][hpc_platform]
|
||||
oed = option_engage_descriptions["pbs"][hpc_platform]
|
||||
od["select"]["default"] = od["select"]["default"][gamera_grid_type]
|
||||
od["num_helpers"]["default"] = (
|
||||
od["num_helpers"]["default"][gamera_grid_type]
|
||||
)
|
||||
od["modules"] = oed["modules"]
|
||||
if hpc_platform == "pleiades":
|
||||
od["moduledir"] = oed["moduledir"]
|
||||
od["local_modules"] = oed["local_modules"]
|
||||
for on in od:
|
||||
if on not in o:
|
||||
o[on] = od[on]["default"]
|
||||
|
||||
if engage_options["simulation"]["use_segments"] == "Y":
|
||||
num_segments = simulation_duration/segment_duration
|
||||
if num_segments > int(num_segments):
|
||||
num_segments += 1
|
||||
num_segments = int(num_segments)
|
||||
else:
|
||||
num_segments = 1
|
||||
o["num_segments"] = str(num_segments)
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
# coupling options
|
||||
o = engage_options["coupling"]
|
||||
if "conda_env" not in engage_options["coupling"]:
|
||||
o["conda_env"] = os.environ.get('CONDA_DEFAULT_ENV')
|
||||
else:
|
||||
# Prompt the user for the run options.
|
||||
engage_options = prompt_user_for_run_options(args)
|
||||
if debug:
|
||||
print(f"options = {options}")
|
||||
|
||||
# Save the options dictionary as a JSON file in the current directory.
|
||||
path = f"engage_parameters.json"
|
||||
if os.path.exists(path):
|
||||
if not clobber:
|
||||
raise FileExistsError(f"Options file {path} exists!")
|
||||
with open(path, "w", encoding="utf-8") as f:
|
||||
json.dump(engage_options, f, indent=JSON_INDENT)
|
||||
|
||||
makeitso_args = {'clobber': True, 'debug': False, 'verbose': False}
|
||||
# Fetch the makeitso run options.
|
||||
if makeitso_options_path:
|
||||
# Read the dictionary of option descriptions.
|
||||
with open(OPTION_MAKEITSO_DESCRIPTIONS_FILE, "r", encoding="utf-8") as f:
|
||||
option_makeitso_descriptions = json.load(f)
|
||||
with open(OPTION_ENGAGE_DESCRIPTIONS_FILE, "r", encoding="utf-8") as f:
|
||||
option_engage_descriptions = json.load(f)
|
||||
# Read the run options from a JSON file.
|
||||
with open(makeitso_options_path, "r", encoding="utf-8") as f:
|
||||
makeitso_options = json.load(f)
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# Simulation options
|
||||
for parameter in engage_options["simulation"]:
|
||||
makeitso_options["simulation"][parameter] = engage_options["simulation"][parameter]
|
||||
gamera_grid_type = makeitso_options["simulation"]["gamera_grid_type"]
|
||||
# Coupling parameters are passed from engage to makeitso
|
||||
gr_warm_up_time = engage_options["coupling"]["gr_warm_up_time"]
|
||||
dt = datetime.timedelta(seconds=float(gr_warm_up_time))
|
||||
start_date = engage_options["simulation"]["start_date"]
|
||||
stop_date = engage_options["simulation"]["stop_date"]
|
||||
t0 = datetime.datetime.fromisoformat(start_date)
|
||||
t0 -= dt
|
||||
t1 = datetime.datetime.fromisoformat(stop_date)
|
||||
start_date = datetime.datetime.isoformat(t0)
|
||||
makeitso_options["simulation"]["start_date"] = start_date
|
||||
# PBS parameters are passed from engage to makeitso
|
||||
pbs = engage_options["pbs"]
|
||||
for k in pbs:
|
||||
if k not in ["num_segments"]:
|
||||
makeitso_options["pbs"][k] = pbs[k]
|
||||
|
||||
segment_duration = float(engage_options["simulation"]["segment_duration"])
|
||||
makeitso_options["voltron"]["time"]["tFin"] = int((t1-t0).total_seconds())
|
||||
makeitso_options["pbs"]["num_segments"] = str(int((t1-t0).total_seconds()/segment_duration))
|
||||
select2 = 1 + int(makeitso_options["pbs"]["num_helpers"])
|
||||
makeitso_options["pbs"]["select2"] = str(select2)
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# GAMERA options
|
||||
o = makeitso_options["gamera"]["sim"]
|
||||
od = option_makeitso_descriptions["gamera"]["sim"]
|
||||
|
||||
o["runid"] = engage_options["simulation"]["job_name"]
|
||||
o["H5Grid"] = (
|
||||
f"lfm{gamera_grid_type}.h5"
|
||||
)
|
||||
# <iPdir> options
|
||||
o = makeitso_options["gamera"]["iPdir"]
|
||||
od = option_makeitso_descriptions["gamera"]["iPdir"]
|
||||
o["N"] = od["N"]["default"][gamera_grid_type]
|
||||
|
||||
# <jPdir> options
|
||||
o = makeitso_options["gamera"]["jPdir"]
|
||||
od = option_makeitso_descriptions["gamera"]["jPdir"]
|
||||
o["N"] = od["N"]["default"][gamera_grid_type]
|
||||
|
||||
# <kPdir> options
|
||||
o = makeitso_options["gamera"]["kPdir"]
|
||||
od = option_makeitso_descriptions["gamera"]["kPdir"]
|
||||
o["N"] = od["N"]["default"][gamera_grid_type]
|
||||
|
||||
makeitso_options["gamera"]["restart"]["resID"] = engage_options["simulation"]["job_name"]
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# Voltron options
|
||||
o = makeitso_options["voltron"]["helpers"]
|
||||
od = option_makeitso_descriptions["voltron"]["helpers"]
|
||||
num_helpers = int(makeitso_options["pbs"]["num_helpers"])
|
||||
if num_helpers == 0:
|
||||
o["useHelpers"] = "F"
|
||||
else:
|
||||
o["useHelpers"] = "T"
|
||||
o["numHelpers"] = num_helpers
|
||||
|
||||
|
||||
path = f"makeitso_parameters.json"
|
||||
with open(path, "w", encoding="utf-8") as f:
|
||||
json.dump(makeitso_options, f, indent=JSON_INDENT)
|
||||
makeitso_args = {'clobber': True, 'debug': False, 'verbose': False, 'options_path': path}
|
||||
makeitso_args.update(engage_options)
|
||||
else:
|
||||
makeitso_args = {'clobber': True, 'debug': False, 'verbose': False}
|
||||
makeitso_args.update(engage_options)
|
||||
|
||||
|
||||
makeitso_options, makeitso_spinup_pbs_scripts, makeitso_warmup_pbs_scripts = makeitso.makeitso(makeitso_args)
|
||||
makeitso_pbs_scripts = makeitso_spinup_pbs_scripts + makeitso_warmup_pbs_scripts
|
||||
# Save the makeitso options dictionary as a JSON file in the current directory.
|
||||
|
||||
with open('makeitso_parameters.json', 'w') as f:
|
||||
json.dump(makeitso_options, f, indent=JSON_INDENT)
|
||||
|
||||
|
||||
# Run the TIEGCMrun
|
||||
coupled_options = copy.deepcopy(engage_options)
|
||||
coupled_options["voltron"] = makeitso_options.get("voltron", {})
|
||||
# Fetch the tiegcmrun options.
|
||||
if tiegcm_options_path:
|
||||
# Read the run options from a JSON file.
|
||||
with open(tiegcm_options_path, "r", encoding="utf-8") as f:
|
||||
tiegcm_options = json.load(f)
|
||||
|
||||
tiegcm_args = [
|
||||
"--coupling",
|
||||
"--engage", json.dumps(coupled_options),
|
||||
"--options", tiegcm_options_path
|
||||
]
|
||||
|
||||
tiegcm_options,tiegcm_pbs_scripts,tiegcm_inp_scripts = tiegcmrun.tiegcmrun(tiegcm_args)
|
||||
|
||||
|
||||
# Save the tiegcm options dictionary as a JSON file in the current directory.
|
||||
with open('tiegcmrun_parameters.json', 'w') as f:
|
||||
json.dump(tiegcm_options, f, indent=JSON_INDENT)
|
||||
#json.dump(tiegcm_pbs_scripts, f, indent=JSON_INDENT)
|
||||
#json.dump(tiegcm_inp_scripts, f, indent=JSON_INDENT)
|
||||
|
||||
# Create the PBS job scripts.
|
||||
pbs_scripts, submit_all_jobs_script = create_pbs_scripts(engage_options,makeitso_options, makeitso_pbs_scripts, tiegcm_options, tiegcm_inp_scripts, tiegcm_pbs_scripts)
|
||||
print(f"pbs_scripts = {pbs_scripts}")
|
||||
print(f"submit_all_jobs_script = {submit_all_jobs_script}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
"""Begin main program."""
|
||||
main()
|
||||
829
scripts/makeitso/makeitso.py
Executable file → Normal file
829
scripts/makeitso/makeitso.py
Executable file → Normal file
File diff suppressed because it is too large
Load Diff
@@ -182,6 +182,11 @@
|
||||
}
|
||||
},
|
||||
"pleiades": {
|
||||
"group_list": {
|
||||
"LEVEL": "BASIC",
|
||||
"prompt": "PBS group list",
|
||||
"default": null
|
||||
},
|
||||
"queue": {
|
||||
"LEVEL": "BASIC",
|
||||
"prompt": "PBS queue name",
|
||||
@@ -254,6 +259,16 @@
|
||||
"hdf5/1.8.18_mpt"
|
||||
]
|
||||
},
|
||||
"moduledir": {
|
||||
"LEVEL": "EXPERT",
|
||||
"prompt": "Module directory",
|
||||
"default": [null]
|
||||
},
|
||||
"local_modules": {
|
||||
"LEVEL": "EXPERT",
|
||||
"prompt": "Commands to add local module PATH",
|
||||
"default": [null]
|
||||
},
|
||||
"environment_variables": {
|
||||
"LEVEL": "EXPERT",
|
||||
"prompt": "Additional environment variable settings",
|
||||
@@ -525,6 +540,12 @@
|
||||
"default": "T",
|
||||
"valids": ["T", "F"]
|
||||
},
|
||||
"doGCM": {
|
||||
"LEVEL": "INTERMEDIATE",
|
||||
"prompt": "(VOLTRON) Run in GCM mode",
|
||||
"default": "F",
|
||||
"valids": ["T", "F"]
|
||||
},
|
||||
"qkSquishStride": {
|
||||
"LEVEL": "INTERMEDIATE",
|
||||
"prompt": "(VOLTRON) Set stride for field line tracing for RCM ingestion into gamera (must be integer power of 2)",
|
||||
|
||||
90
scripts/makeitso/option_engage_descriptions.json
Normal file
90
scripts/makeitso/option_engage_descriptions.json
Normal file
@@ -0,0 +1,90 @@
|
||||
{
|
||||
"version": "0.0.0",
|
||||
"coupling": {
|
||||
"gr_warm_up_time": {
|
||||
"LEVEL": "INTERMEDIATE",
|
||||
"prompt": "GR warmup time in simulated seconds",
|
||||
"default": 14400
|
||||
},
|
||||
"gcm_spin_up_time": {
|
||||
"LEVEL": "INTERMEDIATE",
|
||||
"prompt": "GCM spin-up time in simulated seconds",
|
||||
"default": 604800
|
||||
},
|
||||
"root_directory": {
|
||||
"LEVEL": "BASIC",
|
||||
"prompt": "Root directory for the simulation",
|
||||
"default": "."
|
||||
},
|
||||
"conda_env": {
|
||||
"LEVEL": "BASIC",
|
||||
"prompt": "Conda environment to use for the simulation",
|
||||
"default": null
|
||||
},
|
||||
"tfin_delta": {
|
||||
"LEVEL": "BASIC",
|
||||
"prompt": "Extend TFIN by dtCouple - 1 seconds",
|
||||
"default": "T",
|
||||
"valids": ["T", "F"]
|
||||
},
|
||||
"doGCM": {
|
||||
"LEVEL": "BASIC",
|
||||
"prompt": "(VOLTRON) Run in GCM mode",
|
||||
"default": "T",
|
||||
"valids": ["T", "F"]
|
||||
}
|
||||
},
|
||||
"pbs": {
|
||||
"derecho": {
|
||||
"modules": {
|
||||
"LEVEL": "EXPERT",
|
||||
"prompt": "Modules to load",
|
||||
"default": [
|
||||
"ncarenv/23.09",
|
||||
"cmake/3.26.3",
|
||||
"craype/2.7.31",
|
||||
"intel-classic/2023.2.1",
|
||||
"cray-mpich/8.1.27",
|
||||
"ncarcompilers/1.0.0",
|
||||
"mkl/2023.2.0",
|
||||
"hdf5-mpi/1.12.2",
|
||||
"netcdf-mpi/4.9.2",
|
||||
"esmf/8.6.0",
|
||||
"conda"
|
||||
]
|
||||
}
|
||||
},
|
||||
"pleiades": {
|
||||
"modules": {
|
||||
"LEVEL": "EXPERT",
|
||||
"prompt": "Modules to load",
|
||||
"default": [
|
||||
"comp-intel/2020.4.304",
|
||||
"mpi-hpe/mpt.2.30",
|
||||
"szip/2.1.1",
|
||||
"hdf5/1.12.3_mpt",
|
||||
"miniconda3/v4"
|
||||
]
|
||||
},
|
||||
"moduledir": {
|
||||
"LEVEL": "EXPERT",
|
||||
"prompt": "Module directory",
|
||||
"default": [
|
||||
"/nasa/modulefiles/testing",
|
||||
"/swbuild/analytix/tools/modulefiles"
|
||||
]
|
||||
},
|
||||
"local_modules": {
|
||||
"LEVEL": "EXPERT",
|
||||
"prompt": "Commands to add local module PATH",
|
||||
"default": [
|
||||
"export PREFIX=/u/nrao3/local3",
|
||||
"export LIBRARY_PATH=${LIBRARY_PATH}:$PREFIX/lib",
|
||||
"export LD_LIBRARY_PATH=$LIBRARY_PATH",
|
||||
"export CPATH=$PREFIX/include",
|
||||
"export PATH=${PATH}:$PREFIX/bin"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
86
scripts/makeitso/template-gtr.pbs
Normal file
86
scripts/makeitso/template-gtr.pbs
Normal file
@@ -0,0 +1,86 @@
|
||||
#!/bin/bash
|
||||
|
||||
# This script was generated to run on {{ simulation.hpc_system }}.
|
||||
|
||||
#PBS -N {{ simulation.segment_id }}
|
||||
#PBS -A {{ pbs.account_name }}
|
||||
{%- if pbs.group_list|default('', true) and pbs.group_list != "None" %}
|
||||
#PBS -W group_list={{ pbs.group_list }}
|
||||
{%- endif %}
|
||||
#PBS -q {{ pbs.queue }}
|
||||
#PBS -l walltime={{ pbs.walltime }}
|
||||
{%- if pbs.model is defined %}
|
||||
#PBS -l select={{ pbs.tie_nodes }}:ncpus={{ pbs.tie_ncpus }}:mpiprocs={{ pbs.tie_mpiprocs }}:model={{ pbs.model }}+{{ pbs.gamera_nodes }}:ncpus={{ pbs.gamera_ncpus }}:mpiprocs={{ pbs.gamera_mpiprocs }}:ompthreads={{ pbs.gamera_ompthreads }}:model={{ pbs.model }}+{{ pbs.voltron_nodes }}:ncpus={{ pbs.voltron_ncpus }}:mpiprocs={{ pbs.voltron_mpiprocs }}:ompthreads={{ pbs.voltron_ompthreads }}:model={{ pbs.model }}
|
||||
{%- else %}
|
||||
#PBS -l select={{ pbs.tie_nodes }}:ncpus={{ pbs.tie_ncpus }}:mpiprocs={{ pbs.tie_mpiprocs }}+{{ pbs.gamera_nodes }}:ncpus={{ pbs.gamera_ncpus }}:mpiprocs={{ pbs.gamera_mpiprocs }}:ompthreads={{ pbs.gamera_ompthreads }}+{{ pbs.voltron_nodes }}:ncpus={{ pbs.voltron_ncpus }}:mpiprocs={{ pbs.voltron_mpiprocs }}:ompthreads={{ pbs.voltron_ompthreads }}
|
||||
{%- endif %}
|
||||
{%- if pbs.job_priority is defined %}
|
||||
#PBS -l job_priority={{ pbs.job_priority }}
|
||||
{%- endif %}
|
||||
#PBS -m abe
|
||||
#PBS -j oe
|
||||
|
||||
echo "Job $PBS_JOBID started at `date` on `hostname`."
|
||||
|
||||
# Specify the ID string for the run.
|
||||
export RUNID=$PBS_JOBNAME
|
||||
|
||||
# Some HPC systems do not start the job in the correct directory.
|
||||
# Move to the intended directory. This will move the job to the correct
|
||||
# directory on problem systems, such as pleiades (for some users), and have no
|
||||
# effect on other systems.
|
||||
cd $PBS_O_WORKDIR
|
||||
|
||||
# Load the required modules for kaiju.
|
||||
module purge
|
||||
{%- for moduledir in pbs.moduledir %}
|
||||
module use -a {{ moduledir }}
|
||||
{%- endfor %}
|
||||
{%- for module in pbs.modules %}
|
||||
module load {{ module }}
|
||||
{%- endfor %}
|
||||
module list
|
||||
|
||||
{%- for local_modules in pbs.local_modules %}
|
||||
{{ local_modules }}
|
||||
{%- endfor %}
|
||||
|
||||
# Define the kaiju installation location.
|
||||
export KAIJU_INSTALL_DIR={{ pbs.kaiju_install_directory }}
|
||||
|
||||
# Set kaiju-related environment variables.
|
||||
# This script sets KAIJUHOME and other environment variables.
|
||||
source $KAIJU_INSTALL_DIR/scripts/setupEnvironment.sh
|
||||
|
||||
# Add the kaiju build bin directory to PATH.
|
||||
export PATH={{ pbs.kaiju_build_directory }}/bin:$PATH
|
||||
|
||||
# Set other environment variables.
|
||||
{{ pbs.additional_environment_variables -}}
|
||||
export MPI_TYPE_DEPTH=32
|
||||
export OMP_STACKSIZE=100M
|
||||
export OMP_NUM_THREADS={{ pbs.helper_ompthreads }}
|
||||
|
||||
echo "The active environment variables are:"
|
||||
printenv
|
||||
|
||||
# Copy the node file for use by the placer command.
|
||||
NODEFILE=nodefile.$PBS_JOBID
|
||||
cp $PBS_NODEFILE $NODEFILE
|
||||
|
||||
{%- if pbs.nodecommand is defined %}
|
||||
{{ pbs.nodecommand }}
|
||||
{%- endif %}
|
||||
|
||||
# Copy the main executable.
|
||||
# cp {{ pbs.kaiju_build_directory }}/bin/voltron_mpi.x ./voltron_mpi.x
|
||||
# cp {{ pbs.tie_exe }} ./tiegcm.x
|
||||
|
||||
# Run the model. Direct output from the program is saved in a text file.
|
||||
VOLTRON_EXE=./voltron_mpi.x
|
||||
TIEGCM_EXE=./tiegcm.x
|
||||
OUTPUT_FILE="$VOLTRON_EXE-{{ simulation.segment_id }}.out"
|
||||
echo "Running $VOLTRON_EXE on model $RUNID."
|
||||
{{ pbs.mpiexec_command }} {{pbs.mpiexec_option}} {{pbs.tie_mpiranks}} {%- if pbs.tie_scripts is defined %} {{ pbs.tie_scripts }} {%- endif %} $TIEGCM_EXE {{pbs.tie_inp}} : {{pbs.mpiexec_option}} {{pbs.voltron_mpiranks}} {%- if pbs.voltron_scripts is defined %} {{ pbs.voltron_scripts }} {%- endif %} $VOLTRON_EXE $RUNID.xml &>> $OUTPUT_FILE
|
||||
|
||||
echo "Job $PBS_JOBID ended at `date` on `hostname`."
|
||||
@@ -11,6 +11,7 @@ dtOut = {{ voltron.output.dtOut }}
|
||||
dtCon = {{ voltron.output.dtCon }}
|
||||
|
||||
[coupling]
|
||||
doGCM = {{ voltron.coupling.doGCM }}
|
||||
dtCouple = {{ voltron.coupling.dtCouple }}
|
||||
imType = {{ voltron.coupling.imType }}
|
||||
doQkSquish = {{ voltron.coupling.doQkSquish }}
|
||||
|
||||
@@ -4,6 +4,9 @@
|
||||
|
||||
#PBS -N {{ simulation.segment_id }}
|
||||
#PBS -A {{ pbs.account_name }}
|
||||
{%- if pbs.group_list|default('', true) and pbs.group_list != "None" %}
|
||||
#PBS -W group_list={{ pbs.group_list }}
|
||||
{%- endif %}
|
||||
#PBS -q {{ pbs.queue }}
|
||||
#PBS -l walltime={{ pbs.walltime }}
|
||||
#PBS -l select={{ pbs.select }}:ncpus={{ pbs.ncpus }}:mpiprocs={{ pbs.mpiprocs }}:ompthreads={{ pbs.ompthreads }}{{ pbs.other }}+{{ pbs.select2 }}:ncpus={{ pbs.ncpus }}:mpiprocs={{ pbs.helper_mpiprocs }}:ompthreads={{ pbs.helper_ompthreads }}{{ pbs.other }}
|
||||
@@ -24,11 +27,18 @@ cd $PBS_O_WORKDIR
|
||||
|
||||
# Load the required modules for kaiju.
|
||||
module purge
|
||||
{%- for moduledir in pbs.moduledir %}
|
||||
module use -a {{ moduledir }}
|
||||
{%- endfor %}
|
||||
{%- for module in pbs.modules %}
|
||||
module load {{ module }}
|
||||
{%- endfor %}
|
||||
module list
|
||||
|
||||
{%- for local_modules in pbs.local_modules %}
|
||||
{{ local_modules }}
|
||||
{%- endfor %}
|
||||
|
||||
# Define the kaiju installation location.
|
||||
export KAIJU_INSTALL_DIR={{ pbs.kaiju_install_directory }}
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -34,7 +34,7 @@ module mixdefs
|
||||
|
||||
! enumerator for transform variables
|
||||
enum, bind(C)
|
||||
enumerator :: iSMtoGEO,iGEOtoSM,iSMtoGSM,iGSMtoSM,iGEOtoGSM,iGSMtoGEO
|
||||
enumerator :: iSMtoGEO,iGEOtoSM,iSMtoGSM,iGSMtoSM,iGEOtoGSM,iGSMtoGEO,iAPEXtoSM,iSMtoAPEX
|
||||
end enum
|
||||
|
||||
real(rp), parameter :: mixeTINY = 1.D-8 ! Floor of average energy [keV]
|
||||
|
||||
@@ -4,6 +4,7 @@ module kai2geo
|
||||
use geopack
|
||||
use planethelper
|
||||
use math
|
||||
use apex
|
||||
|
||||
implicit none
|
||||
|
||||
@@ -204,6 +205,10 @@ module kai2geo
|
||||
! call MAG2SM(indim(XDIR),indim(YDIR),indim(ZDIR),G2%x(j,i),G2%y(j,i),tempz(j,i))
|
||||
!case (iSMtoMAG)
|
||||
! call SM2MAG(indim(XDIR),indim(YDIR),indim(ZDIR),G2%x(j,i),G2%y(j,i),tempz(j,i))
|
||||
case (iSMtoAPEX)
|
||||
call SM2APEX(indim(XDIR),indim(YDIR),indim(ZDIR),G2%x(j,i),G2%y(j,i),tempz(j,i))
|
||||
case (iAPEXtoSM)
|
||||
call APEX2SM(indim(XDIR),indim(YDIR),indim(ZDIR),G2%x(j,i),G2%y(j,i),tempz(j,i))
|
||||
case default
|
||||
! No transform
|
||||
G2%x(j,i) = indim(XDIR)
|
||||
@@ -214,18 +219,13 @@ module kai2geo
|
||||
end do
|
||||
|
||||
G2%y = G2%y*ymod2
|
||||
!G2%t = asin(sqrt(G2%x**2+G2%y**2))
|
||||
!G2%p = modulo((atan2(G2%y,G2%x)+2*pi),(2*pi))
|
||||
! Convert back to colatitude
|
||||
if (hemisphere == NORTH) then
|
||||
G2%t = acos(tempz/r) ! convert back to standard colatitude
|
||||
else if (hemisphere == SOUTH) then
|
||||
G2%t = pi-acos(tempz/r) ! convert to funky remix colatitude
|
||||
endif
|
||||
!G2%t = asin(tempz/r)-pi/2
|
||||
!G2%t = acos(tempz/r)
|
||||
G2%p = modulo((atan2(G2%y,G2%x)+2*pi),(2*pi))
|
||||
!G2%p(:,G2%Nt) = (atan2(G2%y(:,G2%Nt),G2%x(:,G2%Nt))+2*pi)
|
||||
if (allocated(tempz)) deallocate(tempz)
|
||||
end subroutine transform_grid
|
||||
|
||||
@@ -270,4 +270,75 @@ module kai2geo
|
||||
outlon = modulo((atan2(outdim(YDIR),outdim(XDIR))+2*pi),(2*pi))
|
||||
end subroutine transform_TP
|
||||
|
||||
subroutine SM2APEX(XSM,YSM,ZSM,XAPX,YAPX,ZAPX)
|
||||
real(rp), intent(in) :: XSM,YSM,ZSM
|
||||
real(rp), intent(out) :: XAPX,YAPX,ZAPX
|
||||
|
||||
real(rp) :: XGEO,YGEO,ZGEO
|
||||
real(rp) :: ionalt, r, tempt,tempp ,qdlon ,malat ,qdlat
|
||||
|
||||
! Non-scalar arguments returned by APXMALL:
|
||||
real(rp) :: b0,si,vmp,w,d,be3,sim,f, &
|
||||
b(3),bhat(3), &
|
||||
d1(3),d2(3),d3(3), &
|
||||
e1(3),e2(3),e3(3), &
|
||||
f1(3),f2(3),f3(3), &
|
||||
g1(3),g2(3),g3(3)
|
||||
integer :: ist
|
||||
|
||||
! Calculate radius of ionosphere in km
|
||||
r = (RionE*1.0e+6)/REarth
|
||||
! Calculate altitude of ionosphere in km
|
||||
ionalt = (RIonE*1.e+3 - REarth*1.e-3)
|
||||
|
||||
call SM2GEO(XSM,YSM,ZSM,XGEO,YGEO,ZGEO)
|
||||
|
||||
tempt = asin(ZGEO/r)*rad2deg
|
||||
tempp = atan2(YGEO,XGEO) *rad2deg
|
||||
|
||||
call apex_mall (tempt,tempp,ionalt,ionalt, &
|
||||
b,bhat,b0,si,qdlon, &
|
||||
malat,vmp,w,d,be3,sim,d1,d2,d3,e1,e2,e3, &
|
||||
qdlat,f,f1,f2,f3,g1,g2,g3,ist)
|
||||
if (ist /= 0) stop 'apex_mall error'
|
||||
|
||||
tempt = malat *deg2rad
|
||||
tempp = qdlon *deg2rad
|
||||
|
||||
XAPX = r*cos(tempt)*cos(tempp)
|
||||
YAPX = r*cos(tempt)*sin(tempp)
|
||||
ZAPX = r*sin(tempt)
|
||||
|
||||
end subroutine SM2APEX
|
||||
|
||||
subroutine APEX2SM(XAPX,YAPX,ZAPX,XSM,YSM,ZSM)
|
||||
real(rp), intent(in) :: XAPX,YAPX,ZAPX
|
||||
real(rp), intent(out) :: XSM,YSM,ZSM
|
||||
|
||||
real(rp) :: r, ionalt, tempt, tempp
|
||||
real(rp) :: XGEO,YGEO,ZGEO,geolat,geolon
|
||||
|
||||
integer :: ist
|
||||
|
||||
! Calculate radius of ionosphere in km
|
||||
r = (RionE*1.0e+6)/REarth
|
||||
! Calculate altitude of ionosphere in km
|
||||
ionalt = (RIonE*1.e+3 - REarth*1.e-3)
|
||||
|
||||
tempt = asin(ZAPX/r)*rad2deg
|
||||
tempp = atan2(YAPX,XAPX) *rad2deg
|
||||
|
||||
call apex_q2g(tempt,tempp,ionalt,geolat,geolon,ist)
|
||||
if (ist /= 0) stop 'apex_q2g error'
|
||||
|
||||
geolon = geolon*deg2rad
|
||||
geolat = geolat*deg2rad
|
||||
XGEO = r*cos(geolat)*cos(geolon)
|
||||
YGEO = r*cos(geolat)*sin(geolon)
|
||||
ZGEO = r*sin(geolat)
|
||||
|
||||
call GEO2SM(XGEO,YGEO,ZGEO,XSM,YSM,ZSM)
|
||||
|
||||
end subroutine APEX2SM
|
||||
|
||||
end module kai2geo
|
||||
|
||||
@@ -3,8 +3,12 @@ module gcmtypes
|
||||
|
||||
implicit none
|
||||
|
||||
integer,parameter :: gcm2mix_nvar = 2 ! SigmaP, SigmaH, Neutral Winds(eventually)
|
||||
integer,parameter :: mix2gcm_nvar = 3 ! pot, eng, flux
|
||||
!integer,parameter :: geo2mix_nvar = 0 ! Neutral Winds(eventually)
|
||||
!integer,parameter :: apx2mix_nvar = 2 ! SigmaP, SigmaH, Neutral Winds(eventually)
|
||||
!integer :: gcm2mix_nvar = geo2mix_nvar + apx2mix_nvar ! SigmaP, SigmaH, Neutral Winds(eventually)
|
||||
!integer,parameter :: mix2apx_nvar = 1 ! pot
|
||||
!integer,parameter :: mix2geo_nvar = 2 ! eng, flux
|
||||
!integer :: mix2gcm_nvar = mix2apx_nvar + mix2geo_nvar ! pot, eng, flux
|
||||
|
||||
integer,parameter :: GCMSIGMAP = 1,GCMSIGMAH = 2, GCMNORTH=NORTH, GCMSOUTH=SOUTH
|
||||
integer,parameter :: GCMhemispheres=2
|
||||
@@ -14,30 +18,48 @@ module gcmtypes
|
||||
real(rp), dimension(:,:),allocatable :: var
|
||||
end type var_T
|
||||
|
||||
type gcm_grid_T
|
||||
type(Map_T), allocatable, dimension(:) :: r2tMaps, t2rMaps
|
||||
type(mixGrid_T), allocatable, dimension(:) :: SM,MyGrid
|
||||
real(rp),dimension(:),allocatable :: inlat, inlon
|
||||
real(rp),dimension(:,:,:),allocatable :: glon,gclat
|
||||
type(var_T), allocatable, dimension(:,:) :: gcmInput,gcmOutput
|
||||
type(var_T), allocatable, dimension(:,:) :: mixInput,mixOutput
|
||||
integer :: nlat,nlon,nlev,nhlat,lonshift
|
||||
integer, dimension(:), allocatable :: outlist,inlist
|
||||
integer, dimension(:), allocatable :: t2N, t2S
|
||||
real(rp), dimension(:,:,:), allocatable :: invar2d, outvar2d
|
||||
integer :: mix2gcm_nvar
|
||||
!real(rp), dimension(:), allocatable :: lev,lon,clat,lat
|
||||
integer :: gcm2mix_nvar
|
||||
character (len=strLen) :: Coord
|
||||
end type gcm_grid_T
|
||||
|
||||
type gcm_T
|
||||
!type(mixGrid_T) :: rGEOGrid,tGEOGrid,tSMGrid ! GEO&SM Grid for tgcm, remix GEO grid
|
||||
!type(mixGrid_T) :: mixGrid,gcmGrid
|
||||
type(Map_T), allocatable, dimension(:) :: r2tMaps, t2rMaps
|
||||
type(mixGrid_T), allocatable, dimension(:) :: SM,GEO
|
||||
type(var_T), dimension(GCMhemispheres,gcm2mix_nvar) :: gcmInput,mixInput
|
||||
type(var_T), dimension(GCMhemispheres,mix2gcm_nvar) :: gcmOutput,mixOutput
|
||||
type(gcm_grid_T) :: GEO,APEX
|
||||
!type(var_T), dimension(GCMhemispheres,geo2mix_nvar) :: geoInput
|
||||
!type(var_T), dimension(GCMhemispheres,apx2mix_nvar) :: apxInput
|
||||
!type(var_T), dimension(GCMhemispheres,gcm2mix_nvar) :: mixInput
|
||||
!type(var_T), dimension(GCMhemispheres,mix2gcm_nvar) :: geoOutput,apxOutput,mixOutput
|
||||
!real(rp), dimension(:,:,:,:), allocatable :: mixInput, mixOutput
|
||||
!real(rp), dimension(:,:,:,:), allocatable :: gcmInput, gcmOutput
|
||||
integer :: nlat,nlon,nlev,nhlat,ntime,order,nhemi,lonshift
|
||||
integer, dimension(:), allocatable :: outlist
|
||||
integer, dimension(:), allocatable :: t2N, t2S
|
||||
real(rp), dimension(:), allocatable :: time,lev,lon,clat,lat
|
||||
real(rp),dimension(:,:),allocatable :: gx,gy
|
||||
real(rp),dimension(:,:,:),allocatable :: glon,gclat
|
||||
real(rp), dimension(:,:,:), allocatable :: invar2d, outvar2d
|
||||
!integer :: nlat,nlon,nlev,nhlat,ntime,order,nhemi,lonshift
|
||||
!real(rp),dimension(:,:),allocatable :: gx,gy
|
||||
integer :: nhemi
|
||||
!integer :: gcm2mix_nvar, mix2gcm_nvar
|
||||
integer :: cplStep = 1
|
||||
!character(len=strlen) :: mix2gcmH5,gcm2mixH5,mix2gcmLock,gcm2mixLock
|
||||
character(len=strlen) :: mix2gcmH5 = "mix4gcm.h5"
|
||||
character(len=strlen) :: mix2gcmLock = "mix2gcm.txt"
|
||||
character(len=strlen) :: gcm2mixH5 = "gcm4mix.h5"
|
||||
character(len=strlen) :: gcm2mixLock = "gcm2mix.txt"
|
||||
!character(len=strlen) :: mix2gcmH5 = "mix4gcm.h5"
|
||||
!character(len=strlen) :: mix2gcmLock = "mix2gcm.txt"
|
||||
!character(len=strlen) :: gcm2mixH5 = "gcm4mix.h5"
|
||||
!character(len=strlen) :: gcm2mixLock = "gcm2mix.txt"
|
||||
logical :: isRestart
|
||||
real(rp) :: altmax = 140. !used for apex, setting it randomly for now, km
|
||||
real(rp) :: ionalt
|
||||
end type gcm_T
|
||||
|
||||
|
||||
|
||||
end module gcmtypes
|
||||
|
||||
@@ -33,6 +33,7 @@ module mixtypes
|
||||
logical :: apply_cap
|
||||
logical :: doSWF107
|
||||
logical :: doEMA
|
||||
logical :: doET
|
||||
|
||||
! solver
|
||||
integer :: maxitr
|
||||
@@ -106,18 +107,13 @@ module mixtypes
|
||||
|
||||
type mixConductance_T
|
||||
integer :: euv_model_type, et_model_type, sigma_model_type!, aurora_model_type
|
||||
! real(rp) :: alpha, beta, R, F107,pedmin,hallmin,sigma_ratio,ped0, alphaZ, betaZ
|
||||
real(rp) :: F107, pedmin, hallmin, sigma_ratio, ped0
|
||||
! logical :: const_sigma, doRamp, doChill, doStarlight, apply_cap, doMR, doAuroralSmooth, doEMA
|
||||
logical :: const_sigma, doStarlight, apply_cap, doEMA
|
||||
logical :: const_sigma, doStarlight, apply_cap, doEMA, doET
|
||||
|
||||
! arrays on the grid
|
||||
real(rp), dimension(:,:), allocatable :: zenith, coszen
|
||||
real(rp), dimension(:,:), allocatable :: euvSigmaP, euvSigmaH
|
||||
real(rp), dimension(:,:), allocatable :: deltaSigmaP, deltaSigmaH
|
||||
! real(rp), dimension(:,:), allocatable :: E0, phi0, deltaE, aRes
|
||||
! real(rp), dimension(:,:), allocatable :: rampFactor, AuroraMask, PrecipMask, drift
|
||||
! real(rp), dimension(:,:), allocatable :: engFlux, avgEng
|
||||
end type mixConductance_T
|
||||
|
||||
! used to store an entire instance of MIX (e.g., one per hemisphere)
|
||||
@@ -126,7 +122,8 @@ module mixtypes
|
||||
type(mixGrid_T) :: G ! G - primary MIX grid used for the solver.
|
||||
type(mixGrid_T) :: mixGfpd ! mixGfpd - flipped grid for mapping from MHD, moved from mhd2mix type.
|
||||
type(ShellGrid_T) :: shGr ! ShellGrid representation of grid
|
||||
type(mixGrid_T) :: Ggeo ! Ggeo - G grid converted to geo updated every coupling step type(mixParams_T) :: P
|
||||
type(mixGrid_T) :: Ggeo ! Ggeo - G grid converted to geo updated every coupling step
|
||||
type(mixGrid_T) :: Gapx ! Gapx - G grid converted to apex updated every coupling step
|
||||
type(mixParams_T) :: P
|
||||
type(Solver_T) :: S
|
||||
type(dkParams_T) :: aurora
|
||||
|
||||
@@ -37,44 +37,20 @@ module dkaurora
|
||||
|
||||
! define these module-wide variables so we don't have to pass the Params object to all the aurora functions
|
||||
! this is similar to how it was done in MIX
|
||||
! conductance%euv_model_type = Params%euv_model_type
|
||||
! conductance%et_model_type = Params%et_model_type
|
||||
aurora%alpha = Params%alpha
|
||||
aurora%beta = Params%beta
|
||||
aurora%R = Params%R
|
||||
! conductance%F107 = Params%F107
|
||||
! conductance%pedmin = Params%pedmin
|
||||
! conductance%hallmin = Params%hallmin
|
||||
! conductance%sigma_ratio = Params%sigma_ratio
|
||||
! conductance%ped0 = Params%ped0
|
||||
! conductance%const_sigma = Params%const_sigma
|
||||
aurora%doRamp = Params%doRamp
|
||||
aurora%doChill = Params%doChill
|
||||
! conductance%doStarlight = Params%doStarlight
|
||||
aurora%doMR = Params%doMR
|
||||
aurora%doAuroralSmooth = Params%doAuroralSmooth
|
||||
! conductance%apply_cap = Params%apply_cap
|
||||
aurora%aurora_model_type = Params%aurora_model_type
|
||||
! conductance%doEMA = Params%doEMA
|
||||
|
||||
! if (.not. allocated(conductance%zenith)) allocate(conductance%zenith(G%Np,G%Nt))
|
||||
! if (.not. allocated(conductance%coszen)) allocate(conductance%coszen(G%Np,G%Nt))
|
||||
! if (.not. allocated(conductance%euvSigmaP)) allocate(conductance%euvSigmaP(G%Np,G%Nt))
|
||||
! if (.not. allocated(conductance%euvSigmaH)) allocate(conductance%euvSigmaH(G%Np,G%Nt))
|
||||
! if (.not. allocated(conductance%deltaSigmaP)) allocate(conductance%deltaSigmaP(G%Np,G%Nt))
|
||||
! if (.not. allocated(conductance%deltaSigmaH)) allocate(conductance%deltaSigmaH(G%Np,G%Nt))
|
||||
|
||||
if (.not. allocated(aurora%rampFactor)) allocate(aurora%rampFactor(G%Np,G%Nt))
|
||||
! if (.not. allocated(conductance%ares)) allocate(conductance%ares(G%Np,G%Nt))
|
||||
if (.not. allocated(aurora%deltaE)) allocate(aurora%deltaE(G%Np,G%Nt))
|
||||
if (.not. allocated(aurora%E0)) allocate(aurora%E0(G%Np,G%Nt))
|
||||
if (.not. allocated(aurora%phi0)) allocate(aurora%phi0(G%Np,G%Nt))
|
||||
! if (.not. allocated(conductance%engFlux)) allocate(conductance%engFlux(G%Np,G%Nt))
|
||||
|
||||
! if (.not. allocated(conductance%avgEng)) allocate(conductance%avgEng(G%Np,G%Nt))
|
||||
! if (.not. allocated(conductance%drift)) allocate(conductance%drift(G%Np,G%Nt))
|
||||
! if (.not. allocated(conductance%AuroraMask)) allocate(conductance%AuroraMask(G%Np,G%Nt))
|
||||
if (.not. allocated(aurora%PrecipMask)) allocate(aurora%PrecipMask(G%Np,G%Nt))
|
||||
if (.not. allocated(aurora%PrecipMask)) allocate(aurora%PrecipMask(G%Np,G%Nt))
|
||||
|
||||
! these arrays are global and should not be! reallocate them
|
||||
if(allocated(beta_RCM)) deallocate(beta_RCM)
|
||||
@@ -286,17 +262,17 @@ module dkaurora
|
||||
mhd_SigPH = SigmaP_Robinson(mhd_eavg,mhd_eflx)**2+SigmaH_Robinson(mhd_eavg,mhd_eflx)**2
|
||||
rcm_SigPH = SigmaP_Robinson(rcm_eavg,rcm_eflx)**2+SigmaH_Robinson(rcm_eavg,rcm_eflx)**2
|
||||
St%Vars(i,j,AUR_TYPE) = AT_RMono
|
||||
!if(mhd_nflx>GuABNF .and. mhd_SigPH>rcm_SigPH) then
|
||||
! St%Vars(i,j,AUR_TYPE) = AT_RMono
|
||||
!else
|
||||
! St%Vars(i,j,NUM_FLUX) = rcm_nflx
|
||||
! St%Vars(i,j,AVG_ENG) = rcm_eavg
|
||||
! if(rcm_nflx>GuABNF) then
|
||||
! St%Vars(i,j,AUR_TYPE) = AT_RMfnE
|
||||
! else
|
||||
! St%Vars(i,j,AUR_TYPE) = AT_NoPre
|
||||
! endif
|
||||
!endif
|
||||
if(mhd_nflx>GuABNF .and. mhd_SigPH>rcm_SigPH) then
|
||||
St%Vars(i,j,AUR_TYPE) = AT_RMono
|
||||
else
|
||||
St%Vars(i,j,NUM_FLUX) = rcm_nflx
|
||||
St%Vars(i,j,AVG_ENG) = rcm_eavg
|
||||
if(rcm_nflx>GuABNF) then
|
||||
St%Vars(i,j,AUR_TYPE) = AT_RMfnE
|
||||
else
|
||||
St%Vars(i,j,AUR_TYPE) = AT_NoPre
|
||||
endif
|
||||
endif
|
||||
else
|
||||
! Linearly merge MHD and RCM diffuse nflux and eflux.
|
||||
! Note where deltaE>eTINY but beta_RCM<=0.01, gtype will be near 1.
|
||||
|
||||
@@ -24,7 +24,7 @@ module raijuLoss_eWM_BW
|
||||
real(rp) :: NpsphLow = 10.0 ! [#/cc]
|
||||
real(rp) :: ChorusLMax = 7.0 ! [Re]
|
||||
real(rp) :: PsheetLMin = 8.0 ! [Re]
|
||||
real(rp) :: ChorusEMin = 1.1 ! [keV]
|
||||
real(rp) :: ChorusEMin = 1.0 ! [keV]
|
||||
|
||||
type(TimeSeries_T) :: KpTS
|
||||
!! Kp data from wind file
|
||||
@@ -146,7 +146,6 @@ module raijuLoss_eWM_BW
|
||||
call IOArray4DFill(IOVars,"Tau", this%Tau4D )
|
||||
|
||||
call ClearIO(IOVars)
|
||||
|
||||
!Array order check: array is in acsending order
|
||||
!Chorus
|
||||
if(this%Kp1D(1) > this%Kp1D(this%Nkp)) then
|
||||
@@ -198,12 +197,13 @@ module raijuLoss_eWM_BW
|
||||
integer :: i,j,k
|
||||
real(rp) :: NpsphPnt
|
||||
!! Density [#/cc] of plasmasphere at point i,j
|
||||
real(rp) :: L, MLT, E, Kp
|
||||
!! L shell and MLT of given point, channel energy in MeV, current Kp
|
||||
real(rp) :: wLBlend, wNBlend
|
||||
real(rp) :: L, MLT, E, Kp, Etemp
|
||||
!! L shell and MLT of given point, channel energy in MeV, current Kp, and temporary channel energy in keV
|
||||
real(rp) :: wLBlend, wNBlend, tScl
|
||||
!! L-weighting of blending between IMAG and PS. 0=PS
|
||||
!! Density-weighting between Chorus and Hiss
|
||||
real(rp) :: tauPS, tauHiss, tauCHORUS
|
||||
!! Scale factor in lifetime between sub-1keV and 1keV+ energy
|
||||
real(rp) :: tauPS, tauHiss, tauChorus
|
||||
|
||||
! Zero everyone out in prep for new values
|
||||
call fillArray(this%wPS , 0.0_rp)
|
||||
@@ -221,7 +221,7 @@ module raijuLoss_eWM_BW
|
||||
associate(sh=>Grid%shGrid, spc=>Grid%spc(eleIdx))
|
||||
|
||||
!$OMP PARALLEL DO default(shared) &
|
||||
!$OMP private(i,j,k,isGood,L,MLT,E,NpsphPnt,wNBlend,wLBlend,tauPS,tauHiss,tauCHORUS)
|
||||
!$OMP private(i,j,k,isGood,L,MLT,E,Etemp,tScl,NpsphPnt,wNBlend,wLBlend,tauPS,tauHiss,tauChorus)
|
||||
do j=sh%jsg,sh%jeg
|
||||
do i=sh%isg,sh%ieg
|
||||
isGood = State%active(i,j) == RAIJUACTIVE
|
||||
@@ -234,7 +234,7 @@ module raijuLoss_eWM_BW
|
||||
|
||||
NpsphPnt = State%Den(psphIdx)%data(i,j) ! [#/cc]
|
||||
|
||||
! Calculate blending
|
||||
! Calculate blending
|
||||
wNBlend = dlog(NpsphPnt/this%NpsphLow) / dlog(this%NpsphHigh/this%NpsphLow)
|
||||
call ClampValue(wNBlend, 0.0_rp, 1.0_rp)
|
||||
!! 1 => Psphere Hiss, 0 => Other
|
||||
@@ -255,10 +255,21 @@ module raijuLoss_eWM_BW
|
||||
|
||||
do k=spc%kStart,spc%kEnd
|
||||
|
||||
E = abs(Grid%alamc(k) * State%bvol_cc(i,j)**(-2./3.)) * 1.0E-6 ! [MeV]
|
||||
Etemp = abs(Grid%alamc(k) * State%bvol_cc(i,j)**(-2.0/3.0)) * 1.0E-3 ! [KeV]
|
||||
!Scale up lifetime for sub-1keV energy in Chorus
|
||||
!Calculate E [MeV] to evaluate wave model
|
||||
if (Etemp >= this%ChorusEMin) then ! [KeV]
|
||||
E = Etemp*1.0E-3_rp !Energy [MeV]
|
||||
tScl = 1.0_rp !No change needed
|
||||
else
|
||||
!Define a scaling factor to multiply tau (lifetime)
|
||||
!Lower energy particles are slower which increases lifetime
|
||||
E = this%ChorusEmin*1.0E-3_rp !Energy [MeV]
|
||||
tScl = sqrt(this%ChorusEmin/Etemp)
|
||||
endif
|
||||
|
||||
if (this%wHISS(i,j) > TINY) then
|
||||
tauHiss = CalcTau_Hiss(MLT, L, E, Kp)
|
||||
tauHiss = CalcTau_Hiss(MLT, L, E, Kp) * tScl
|
||||
else
|
||||
tauHiss = HUGE
|
||||
endif
|
||||
@@ -271,9 +282,9 @@ module raijuLoss_eWM_BW
|
||||
|
||||
if (this%wCHORUS(i,j) > TINY) then
|
||||
!! Implement chorus tau calculation here
|
||||
continue
|
||||
tauChorus = CalcTau_Chorus(this, MLT, L, E, Kp) * tScl
|
||||
else
|
||||
tauCHORUS = HUGE
|
||||
tauChorus = HUGE
|
||||
endif
|
||||
|
||||
this%tauTotal(i,j,k) = this%wHISS(i,j)*tauHiss + this%wPS(i,j)*tauPS + this%wCHORUS(i,j)*tauCHORUS
|
||||
@@ -376,5 +387,132 @@ module raijuLoss_eWM_BW
|
||||
end associate
|
||||
end subroutine eWM_BW_DoOutput
|
||||
|
||||
function CalcTau_Chorus(this,MLTin,Lin,Ekin,Kpin) result(tau)
|
||||
! linearly interpolate tau from EWMTauInput to current MLT,L,Kp,Ek value
|
||||
class(raiLoss_eWM_BW_T), intent(in) :: this
|
||||
real(rp), intent(in) :: MLTin,Lin,Ekin,Kpin
|
||||
real(rp) :: tau
|
||||
real(rp) :: tauKMLE(2,2,2,2),tauMLE(2,2,2),tauLE(2,2),tauE(2)! tauKMLE(1,2,2,2) means tauKlMuLuEu, l:lower bound, u: upper bound in the NN method
|
||||
real(rp) :: dK,wK,dM,wM,dL,wL,dE,wE
|
||||
integer :: kL,kU,mL,mU,lL,lU,eL,eU
|
||||
|
||||
associate(Nm=>this%Nmlt,Nl=>this%Nl,Nk=>this%Nkp,Ne=>this%Ne,&
|
||||
Kpi=>this%Kp1D,MLTi=>this%MLT1D,Li=>this%L1D,Eki=>this%Energy1D,&
|
||||
taui=>this%Tau4D)
|
||||
! Find the nearest neighbors in Kp
|
||||
if (Kpin >= maxval(Kpi)) then
|
||||
kL = Nk !use Kp maximum
|
||||
kU = Nk
|
||||
else if (Kpin <= minval(Kpi)) then
|
||||
kL = 1 !use Kp minimum
|
||||
kU = 1
|
||||
else
|
||||
kL = maxloc(Kpi,dim=1,mask=(Kpi<Kpin))
|
||||
kU = kL+1
|
||||
endif
|
||||
! Find the nearest neighbours in MLT
|
||||
if ((MLTin >= maxval(MLTi)) .or. (MLTin <= minval(MLTi))) then ! maxval of MLT is 24, minval of MLT is 0
|
||||
mL = 1 !use MLT = 0
|
||||
mU = 1
|
||||
else
|
||||
mL = maxloc(MLTi,dim=1,mask=(MLTi<MLTin))
|
||||
mU = mL+1
|
||||
endif
|
||||
! Find the nearest neighbours in L
|
||||
if (Lin >= maxval(Li)) then
|
||||
lL = Nl !use L maximum
|
||||
lU = Nl
|
||||
else if (Lin <= minval(Li)) then
|
||||
lL = 1 ! use L minimum
|
||||
lU = 1
|
||||
else
|
||||
lL = maxloc(Li,dim=1,mask=(Li<Lin))
|
||||
lU = lL+1
|
||||
endif
|
||||
! Find the nearest neighbours in Ek
|
||||
if (Ekin < minval(Eki)) then
|
||||
tau = HUGE ! For low energies, assign a huge lifetime.
|
||||
return
|
||||
else if (Ekin >= maxval(Eki)) then
|
||||
eL = Ne !use Ek maximum
|
||||
eU = Ne
|
||||
else
|
||||
eL = maxloc(Eki,dim=1,mask=(Eki<Ekin))
|
||||
eU = eL + 1
|
||||
endif
|
||||
!linear interpolation in Kp
|
||||
if (kL == kU) then
|
||||
tauMLE(1,1,1) = log10(taui(kL,mL,lL,eL))!Interpolation in log10(taui) space
|
||||
tauMLE(1,1,2) = log10(taui(kL,mL,lL,eU))
|
||||
tauMLE(1,2,1) = log10(taui(kL,mL,lU,eL))
|
||||
tauMLE(1,2,2) = log10(taui(kL,mL,lU,eU))
|
||||
tauMLE(2,1,1) = log10(taui(kL,mU,lL,eL))
|
||||
tauMLE(2,1,2) = log10(taui(kL,mU,lL,eU))
|
||||
tauMLE(2,2,1) = log10(taui(kL,mU,lU,eL))
|
||||
tauMLE(2,2,2) = log10(taui(kL,mU,lU,eU))
|
||||
else
|
||||
dK = Kpi(kU)-Kpi(kL)
|
||||
wK = (Kpin-Kpi(kL))/dK
|
||||
tauKMLE(1,1,1,1) = log10(taui(kL,mL,lL,eL))
|
||||
tauKMLE(2,1,1,1) = log10(taui(kU,mL,lL,eL))
|
||||
tauMLE(1,1,1) = tauKMLE(1,1,1,1) + wK*(tauKMLE(2,1,1,1)-tauKMLE(1,1,1,1))
|
||||
tauKMLE(1,1,1,2) = log10(taui(kL,mL,lL,eU))
|
||||
tauKMLE(2,1,1,2) = log10(taui(kU,mL,lL,eU))
|
||||
tauMLE(1,1,2) = tauKMLE(1,1,1,2) + wK*(tauKMLE(2,1,1,2)-tauKMLE(1,1,1,2))
|
||||
tauKMLE(1,1,2,1) = log10(taui(kL,mL,lU,eL))
|
||||
tauKMLE(2,1,2,1) = log10(taui(kU,mL,lU,eL))
|
||||
tauMLE(1,2,1) = tauKMLE(1,1,2,1) + wK*(tauKMLE(2,1,2,1)-tauKMLE(1,1,2,1))
|
||||
tauKMLE(1,1,2,2) = log10(taui(kL,mL,lU,eU))
|
||||
tauKMLE(2,1,2,2) = log10(taui(kU,mL,lU,eU))
|
||||
tauMLE(1,2,2) = tauKMLE(1,1,2,2) + wK*(tauKMLE(2,1,2,2)-tauKMLE(1,1,2,2))
|
||||
tauKMLE(1,2,1,1) = log10(taui(kL,mU,lL,eL))
|
||||
tauKMLE(2,2,1,1) = log10(taui(kU,mU,lL,eL))
|
||||
tauMLE(2,1,1) = tauKMLE(1,2,1,1) + wK*(tauKMLE(2,2,1,1)-tauKMLE(1,2,1,1))
|
||||
tauKMLE(1,2,1,2) = log10(taui(kL,mU,lL,eU))
|
||||
tauKMLE(2,2,1,2) = log10(taui(kU,mU,lL,eU))
|
||||
tauMLE(2,1,2) = tauKMLE(1,2,1,2) + wK*(tauKMLE(2,2,1,2)-tauKMLE(1,2,1,2))
|
||||
tauKMLE(1,2,2,1) = log10(taui(kL,mU,lU,eL))
|
||||
tauKMLE(2,2,2,1) = log10(taui(kU,mU,lU,eL))
|
||||
tauMLE(2,2,1) = tauKMLE(1,2,2,1) + wK*(tauKMLE(2,2,2,1)-tauKMLE(1,2,2,1))
|
||||
tauKMLE(1,2,2,2) = log10(taui(kL,mU,lU,eU))
|
||||
tauKMLE(2,2,2,2) = log10(taui(kU,mU,lU,eU))
|
||||
tauMLE(2,2,2) = tauKMLE(1,2,2,2) + wK*(tauKMLE(2,2,2,2)-tauKMLE(1,2,2,2))
|
||||
end if
|
||||
! linear interpolation in mlt
|
||||
if (mL == mU) then
|
||||
tauLE(1,1) = tauMLE(2,1,1)
|
||||
tauLE(1,2) = tauMLE(2,1,2)
|
||||
tauLE(2,1) = tauMLE(2,2,1)
|
||||
tauLE(2,2) = tauMLE(2,2,2)
|
||||
else
|
||||
dM = MLTi(mU)-MLTi(mL)
|
||||
wM = (MLTin-MLTi(mL))/dM
|
||||
tauLE(1,1) = tauMLE(1,1,1) + wM*(tauMLE(2,1,1)-tauMLE(1,1,1))
|
||||
tauLE(1,2) = tauMLE(1,1,2) + wM*(tauMLE(2,1,2)-tauMLE(1,1,2))
|
||||
tauLE(2,1) = tauMLE(1,2,1) + wM*(tauMLE(2,2,1)-tauMLE(1,2,1))
|
||||
tauLE(2,2) = tauMLE(1,2,2) + wM*(tauMLE(2,2,2)-tauMLE(1,2,2))
|
||||
end if
|
||||
! linear interpolation in L
|
||||
if (lL == lU) then
|
||||
tauE(1) = tauLE(2,1)
|
||||
tauE(2) = tauLE(2,2)
|
||||
else
|
||||
dL = Li(lU)-Li(lL)
|
||||
wL = (Lin-Li(lL))/dL
|
||||
tauE(1) = tauLE(1,1)+ wL*(tauLE(2,1)-tauLE(1,1))
|
||||
tauE(2) = tauLE(1,2)+ wL*(tauLE(2,2)-tauLE(1,2))
|
||||
end if
|
||||
! linear interpolation in Ek
|
||||
if (eL == eU) then
|
||||
tau = tauE(1)
|
||||
else
|
||||
dE = Eki(eU)-Eki(eL)
|
||||
wE = (Ekin-Eki(eL))/dE
|
||||
tau = tauE(1) + wE*(tauE(2)-tauE(1))
|
||||
end if
|
||||
tau = 10.0**tau !convert back to tau in seconds
|
||||
end associate
|
||||
|
||||
END FUNCTION CalcTau_Chorus
|
||||
|
||||
end module raijuLoss_eWM_BW
|
||||
|
||||
@@ -42,7 +42,7 @@ module raijustarter
|
||||
call inpXML%GetFileStr(tmpStr)
|
||||
! Create new XML reader w/ RAIJU as root
|
||||
iXML = New_XML_Input(trim(tmpStr),'Kaiju/RAIJU',.true.)
|
||||
|
||||
|
||||
! Init model, grid, state
|
||||
call raijuInitModel(app%Model, iXML)
|
||||
|
||||
|
||||
@@ -327,4 +327,4 @@ module raijulosses
|
||||
! enddo
|
||||
!
|
||||
! end subroutine calcElectronLossRate
|
||||
end module raijulosses
|
||||
end module raijulosses
|
||||
|
||||
@@ -25,23 +25,16 @@ module mixconductance
|
||||
conductance%euv_model_type = Params%euv_model_type
|
||||
conductance%et_model_type = Params%et_model_type
|
||||
conductance%sigma_model_type = Params%sigma_model_type
|
||||
! conductance%alpha = Params%alpha
|
||||
! conductance%beta = Params%beta
|
||||
! conductance%R = Params%R
|
||||
conductance%F107 = Params%F107
|
||||
conductance%pedmin = Params%pedmin
|
||||
conductance%hallmin = Params%hallmin
|
||||
conductance%sigma_ratio = Params%sigma_ratio
|
||||
conductance%ped0 = Params%ped0
|
||||
conductance%const_sigma = Params%const_sigma
|
||||
! conductance%doRamp = Params%doRamp
|
||||
! conductance%doChill = Params%doChill
|
||||
conductance%doStarlight = Params%doStarlight
|
||||
! conductance%doMR = Params%doMR
|
||||
! conductance%doAuroralSmooth = Params%doAuroralSmooth
|
||||
conductance%apply_cap = Params%apply_cap
|
||||
! conductance%aurora_model_type = Params%aurora_model_type
|
||||
conductance%doEMA = Params%doEMA
|
||||
conductance%doET = Params%doET
|
||||
|
||||
if (.not. allocated(conductance%zenith)) allocate(conductance%zenith(G%Np,G%Nt))
|
||||
if (.not. allocated(conductance%coszen)) allocate(conductance%coszen(G%Np,G%Nt))
|
||||
@@ -64,21 +57,12 @@ module mixconductance
|
||||
real(rp), dimension(:,:), allocatable :: SigH0,SigP0,MagE !Old values of SigH/SigP
|
||||
real(rp) :: dT,upTau,dnTau,wAvgU,wAvgD
|
||||
integer :: i,j
|
||||
logical :: doET
|
||||
|
||||
doET = .true. !Just setting this here for testing instead of doing the full xml option (don't tell Jeff)
|
||||
|
||||
!Save old Sigs
|
||||
allocate(SigH0(G%Np,G%Nt))
|
||||
allocate(SigP0(G%Np,G%Nt))
|
||||
SigP0 = St%Vars(:,:,SIGMAP)
|
||||
SigH0 = St%Vars(:,:,SIGMAH)
|
||||
|
||||
! ! Compute EUV though because it's used in fedder
|
||||
! call conductance_euv(conductance,G,St)
|
||||
|
||||
! ! Compute auroral precipitation flux
|
||||
! call dragonking_total(conductance,G,St)
|
||||
|
||||
if (present(gcm)) then
|
||||
! Use GCM conductance.
|
||||
@@ -93,7 +77,7 @@ module mixconductance
|
||||
! Use vector addition of auroral and EUV conductance.
|
||||
call conductance_aurora(conductance,G,St)
|
||||
|
||||
if (doET) then
|
||||
if (conductance%doET) then
|
||||
!Add ET enhancement to embiggen auroral conductance
|
||||
allocate(MagE(G%Np,G%Nt))
|
||||
MagE = 0.0
|
||||
@@ -103,6 +87,7 @@ module mixconductance
|
||||
conductance%deltaSigmaP(:,:) = SigET_P(MagE(:,:))*conductance%deltaSigmaP(:,:)
|
||||
conductance%deltaSigmaH(:,:) = SigET_H(MagE(:,:))*conductance%deltaSigmaH(:,:)
|
||||
endwhere
|
||||
deallocate(MagE)
|
||||
endif
|
||||
|
||||
St%Vars(:,:,SIGMAP) = sqrt( conductance%euvSigmaP**2 + conductance%deltaSigmaP**2)
|
||||
|
||||
@@ -247,12 +247,27 @@ contains
|
||||
vstr = trim("Longitude (Geographic)") // " "//trim(hStr)
|
||||
ustr = trim("radians")
|
||||
call AddOutVar(IOVars,vstr,I(h)%Ggeo%p,uStr)
|
||||
! add apex lat and lon
|
||||
vstr = trim("Latitude (Apex)") // " "//trim(hStr)
|
||||
ustr = trim("radians")
|
||||
call AddOutVar(IOVars,vstr,I(h)%Ggeo%t,uStr)
|
||||
vstr = trim("Longitude (Apex)") // " "//trim(hStr)
|
||||
ustr = trim("radians")
|
||||
call AddOutVar(IOVars,vstr,I(h)%Ggeo%p,uStr)
|
||||
! add geographic X and Y
|
||||
vstr = trim("X (Geographic)") // " "//trim(hStr)
|
||||
ustr = trim("Ri")
|
||||
call AddOutVar(IOVars,vstr,I(h)%Ggeo%x,uStr)
|
||||
vstr = trim("Y (Geographic)") // " "//trim(hStr)
|
||||
ustr = trim("Ri")
|
||||
call AddOutVar(IOVars,vstr,I(h)%Ggeo%y,uStr)
|
||||
! add Apex X and Y
|
||||
vstr = trim("X (Apex)") // " "//trim(hStr)
|
||||
ustr = trim("Ri")
|
||||
call AddOutVar(IOVars,vstr,I(h)%Ggeo%x,uStr)
|
||||
vstr = trim("Y (Apex)") // " "//trim(hStr)
|
||||
ustr = trim("Ri")
|
||||
call AddOutVar(IOVars,vstr,I(h)%Ggeo%y,uStr)
|
||||
enddo
|
||||
|
||||
! now add time
|
||||
|
||||
@@ -113,12 +113,11 @@ module mixmain
|
||||
I%St%Vars(:,:,POT) = reshape(I%S%solution,[I%G%Np,I%G%Nt])*(I%rad_iono_m*1.e-6)**2*1.D3 ! in kV
|
||||
end subroutine get_potential
|
||||
|
||||
subroutine run_mix(I,tilt,doModelOpt,gcm,mjd)
|
||||
subroutine run_mix(I,tilt,doModelOpt,gcm)
|
||||
type(mixIon_T),dimension(:),intent(inout) :: I
|
||||
type(gcm_T),optional,intent(inout) :: gcm
|
||||
real(rp),intent(in) :: tilt
|
||||
logical, optional, intent(in) :: doModelOpt ! allow to change on the fly whether we use conductance model
|
||||
real(rp), optional, intent(in) :: mjd ! used to calculate the remix GEO grid as a function of time
|
||||
|
||||
logical :: doModel=.true. ! always default to xml input deck unless doModelOpt is present and on
|
||||
logical :: isRestart = .false.
|
||||
@@ -126,7 +125,6 @@ module mixmain
|
||||
|
||||
if (present(doModelOpt)) doModel = doModelOpt
|
||||
if (present(gcm)) isRestart = gcm%isRestart
|
||||
if (present(mjd)) call MJDRecalc(mjd)
|
||||
|
||||
NumH = size(I)
|
||||
|
||||
@@ -144,22 +142,24 @@ module mixmain
|
||||
I(h)%conductance%const_sigma = .true.
|
||||
end if
|
||||
|
||||
if (present(mjd) .and. .not. present(gcm)) then
|
||||
if (.not. present(gcm)) then
|
||||
!write(*,*) "GRID TRANSFORM!"
|
||||
if (I(h)%St%hemisphere .eq. NORTH) then
|
||||
call transform_grid(I(h)%G,I(h)%Ggeo,iSMtoGEO,h,ym1=1)
|
||||
call transform_grid(I(h)%G,I(h)%Gapx,iSMtoAPEX,h,ym1=1)
|
||||
else
|
||||
call transform_grid(I(h)%G,I(h)%Ggeo,iSMtoGEO,h,ym1=-1)
|
||||
call transform_grid(I(h)%G,I(h)%Gapx,iSMtoAPEX,h,ym1=-1)
|
||||
endif
|
||||
!write(*,*) "GRID TRANSFORM!!"
|
||||
end if
|
||||
|
||||
if (present(gcm) .and. isRestart) then
|
||||
!write(*,*) "conductance: restart"
|
||||
!we read the conductance from file, so we're going to skip
|
||||
gcm%isRestart = .false.
|
||||
!write(*,*) "Get rePOT: ", maxval(I(h)%St%Vars(:,:,POT)),minval(I(h)%St%Vars(:,:,POT))
|
||||
else
|
||||
!if (present(gcm) .and. isRestart) then
|
||||
! !write(*,*) "conductance: restart"
|
||||
! !we read the conductance from file, so we're going to skip
|
||||
! gcm%isRestart = .false.
|
||||
! !write(*,*) "Get rePOT: ", maxval(I(h)%St%Vars(:,:,POT)),minval(I(h)%St%Vars(:,:,POT))
|
||||
!else
|
||||
call Tic("MIX-COND")
|
||||
! Compute auroral precipitation flux regardless of coupling mode.
|
||||
! Note conductance_euv is used inside dragonking_total.
|
||||
@@ -167,7 +167,7 @@ module mixmain
|
||||
call dragonking_total(I(h)%aurora,I(h)%G,I(h)%St,I(h)%conductance)
|
||||
|
||||
if (present(gcm)) then
|
||||
!write(*,*) 'doGCM!'
|
||||
write(*,*) 'doGCM!'
|
||||
call conductance_total(I(h)%conductance,I(h)%G,I(h)%St,gcm,h)
|
||||
else
|
||||
!write(*,*) "conductance: total"
|
||||
@@ -182,7 +182,7 @@ module mixmain
|
||||
call Tic("MIX-POT")
|
||||
call get_potential(I(h))
|
||||
call Toc("MIX-POT")
|
||||
end if
|
||||
!end if
|
||||
|
||||
end do
|
||||
end subroutine run_mix
|
||||
|
||||
@@ -144,6 +144,7 @@ module mixparams
|
||||
call xmlInp%Set_Val(Params%apply_cap,"conductance/apply_cap",.true.)
|
||||
call xmlInp%Set_Val(Params%gamma,"precipitation/gamma",5.0/3.0)
|
||||
call xmlInp%Set_Val(Params%doEMA,"conductance/doEMA",.true.)
|
||||
call xmlInp%Set_Val(Params%doET,"conductance/doET",.true.)
|
||||
|
||||
! if this is on, Params%F107 read from xml file above will be overwritten
|
||||
! and F107 will be set from the SW file (i.e., from OMNI data)
|
||||
|
||||
@@ -16,76 +16,172 @@ module gcminterp
|
||||
subroutine init_gcm_grid(gcm,ion)
|
||||
type(gcm_T), intent(inout) :: gcm
|
||||
type(mixIon_T),dimension(:),intent(in) :: ion
|
||||
|
||||
gcm%GEO%Coord = 'GEO'
|
||||
gcm%APEX%Coord = 'APEX'
|
||||
|
||||
gcm%GEO%gcm2mix_nvar = 0 ! Neutral Winds (eventually)
|
||||
gcm%GEO%mix2gcm_nvar = 2 ! eng, flux
|
||||
|
||||
gcm%APEX%gcm2mix_nvar = 2 ! SigmaP, sigmaH
|
||||
gcm%APEX%mix2gcm_nvar = 2 ! pot, flux (for auroral boundary)
|
||||
|
||||
call create_gcm_grid(gcm%GEO,ion)
|
||||
call create_gcm_grid(gcm%APEX,ion)
|
||||
|
||||
end subroutine init_gcm_grid
|
||||
|
||||
subroutine create_gcm_grid(grid,ion)
|
||||
type(gcm_grid_T), intent(inout) :: grid
|
||||
type(mixIon_T),dimension(:),intent(in) :: ion
|
||||
|
||||
integer :: Np, Nt, Nh, i,j,h,v
|
||||
Np = gcm%nlon
|
||||
Nt = gcm%nhlat
|
||||
Nh = gcm%nhemi
|
||||
Np = grid%nlon
|
||||
Nt = grid%nhlat
|
||||
Nh = GCMhemispheres
|
||||
|
||||
! Let's try something
|
||||
! Build a list of index that says these are N and these are S
|
||||
! Then use that to map to N/S in gcm
|
||||
gcm%t2N = pack([(i,i=1,gcm%nlat)], gcm%lat > 10)
|
||||
gcm%t2S = pack([(i,i=1,gcm%nlat)], gcm%lat < -10)
|
||||
gcm%lonshift = findloc(gcm%lon(:gcm%nlon-1),0.,1)-1
|
||||
grid%t2N = pack([(i,i=1,grid%nlat)], grid%inlat > 10)
|
||||
grid%t2S = pack([(i,i=1,grid%nlat)], grid%inlat < -10)
|
||||
grid%lonshift = findloc(grid%inlon(:grid%nlon-1),0.,1)-1
|
||||
|
||||
! Going to assume the hemispheres are symetrically sized for now
|
||||
if ( size(gcm%t2N) /= size(gcm%t2S) ) write(*,*) 'WE GOT ASYMMETRY'
|
||||
gcm%nhlat = size(gcm%t2N)
|
||||
if ( size(grid%t2N) /= size(grid%t2S) ) write(*,*) 'WE GOT ASYMMETRY'
|
||||
grid%nhlat = size(grid%t2N)
|
||||
|
||||
if (.not.allocated(gcm%gclat)) allocate(gcm%gclat(gcm%nlon,gcm%nhlat,Nh))
|
||||
if (.not.allocated(gcm%glon)) allocate(gcm%glon(gcm%nlon,gcm%nhlat,Nh))
|
||||
if (.not.allocated(grid%gclat)) allocate(grid%gclat(grid%nlon,grid%nhlat,Nh))
|
||||
if (.not.allocated(grid%glon)) allocate(grid%glon(grid%nlon,grid%nhlat,Nh))
|
||||
|
||||
! Convert things from latitude to colatitude (and funky colat for south)
|
||||
do i=1,gcm%nlon
|
||||
gcm%gclat(i,:,GCMNORTH) = 90.-gcm%lat(gcm%t2N)
|
||||
gcm%gclat(i,:,GCMNORTH) = gcm%gclat(i,gcm%nhlat:1:-1,GCMNORTH) ! reverse order. Ascending.
|
||||
gcm%gclat(i,:,GCMSOUTH) = 90.+gcm%lat(gcm%t2S) !For mapping to work, we're going to use remix's funky southern colat
|
||||
do i=1,grid%nlon
|
||||
grid%gclat(i,:,GCMNORTH) = 90.-grid%inlat(grid%t2N)
|
||||
grid%gclat(i,:,GCMNORTH) = grid%gclat(i,grid%nhlat:1:-1,GCMNORTH) ! reverse order. Ascending.
|
||||
grid%gclat(i,:,GCMSOUTH) = 90.+grid%inlat(grid%t2S) !For mapping to work, we're going to use remix's funky southern colat
|
||||
enddo
|
||||
|
||||
|
||||
! This complicated looking thing converts [-180,180] longitude into [0,360] and also shifts it so that longitude starts at 0
|
||||
do j=1,gcm%nhlat
|
||||
gcm%glon(:gcm%nlon-1,j,GCMNORTH) = modulo(cshift(gcm%lon(:gcm%nlon-1),gcm%lonshift)+360.,360.)!modulo((atan2(G2%y,G2%x)+2*pi),(2*pi))
|
||||
gcm%glon(:gcm%nlon-1,j,GCMSOUTH) = modulo(cshift(gcm%lon(:gcm%nlon-1),gcm%lonshift)+360.,360.)
|
||||
do j=1,grid%nhlat
|
||||
grid%glon(:grid%nlon-1,j,GCMNORTH) = modulo(cshift(grid%inlon(:grid%nlon-1),grid%lonshift)+360.,360.)!modulo((atan2(G2%y,G2%x)+2*pi),(2*pi))
|
||||
grid%glon(:grid%nlon-1,j,GCMSOUTH) = modulo(cshift(grid%inlon(:grid%nlon-1),grid%lonshift)+360.,360.)
|
||||
! cover periodic longitude point
|
||||
gcm%glon(gcm%nlon,j,GCMNORTH) = 360. ! no matter what, last point is periodic point
|
||||
gcm%glon(gcm%nlon,j,GCMSOUTH) = 360. ! hard coding the value of last point for now
|
||||
grid%glon(grid%nlon,j,GCMNORTH) = 360. ! no matter what, last point is periodic point
|
||||
grid%glon(grid%nlon,j,GCMSOUTH) = 360. ! hard coding the value of last point for now
|
||||
enddo
|
||||
|
||||
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
|
||||
|
||||
!Now do remix mapping
|
||||
if (.not.allocated(gcm%GEO)) allocate(gcm%GEO(Nh))
|
||||
if (.not.allocated(gcm%SM)) allocate(gcm%SM(Nh))
|
||||
if (.not.allocated(gcm%r2tMaps)) allocate(gcm%r2tMaps(Nh))
|
||||
if (.not.allocated(gcm%t2rMaps)) allocate(gcm%t2rMaps(Nh))
|
||||
if (.not.allocated(grid%MyGrid)) allocate(grid%MyGrid(Nh))
|
||||
if (.not.allocated(grid%SM)) allocate(grid%SM(Nh))
|
||||
if (.not.allocated(grid%r2tMaps)) allocate(grid%r2tMaps(Nh))
|
||||
if (.not.allocated(grid%t2rMaps)) allocate(grid%t2rMaps(Nh))
|
||||
|
||||
call init_grid_fromTP(gcm%GEO(GCMNORTH),gcm%gclat(:,:,GCMNORTH)*deg2rad,gcm%glon(:,:,GCMNORTH)*deg2rad,isSolverGrid=.true.)
|
||||
call init_grid_fromTP(gcm%GEO(GCMSOUTH),gcm%gclat(:,:,GCMSOUTH)*deg2rad,gcm%glon(:,:,GCMSOUTH)*deg2rad,isSolverGrid=.true.)
|
||||
call init_grid_fromTP(grid%MyGrid(GCMNORTH),grid%gclat(:,:,GCMNORTH)*deg2rad,grid%glon(:,:,GCMNORTH)*deg2rad,isSolverGrid=.true.)
|
||||
call init_grid_fromTP(grid%MyGrid(GCMSOUTH),grid%gclat(:,:,GCMSOUTH)*deg2rad,grid%glon(:,:,GCMSOUTH)*deg2rad,isSolverGrid=.true.)
|
||||
|
||||
! Create input and output data storage arrays
|
||||
if (.not.allocated(grid%mixInput)) allocate(grid%mixInput(Nh,grid%gcm2mix_nvar))
|
||||
if (.not.allocated(grid%gcmInput)) allocate(grid%gcmInput(Nh,grid%gcm2mix_nvar))
|
||||
if (.not.allocated(grid%gcmOutput)) allocate(grid%gcmOutput(Nh,grid%mix2gcm_nvar))
|
||||
|
||||
do h=1,Nh
|
||||
do v=1,gcm2mix_nvar
|
||||
if (.not.allocated(gcm%mixInput(h,v)%var)) allocate(gcm%mixInput(h,v)%var(ion(h)%G%Np,ion(h)%G%Nt))
|
||||
if (.not.allocated(gcm%gcmInput(h,v)%var)) allocate(gcm%gcmInput(h,v)%var(gcm%nlon,gcm%nhlat))
|
||||
do v=1,grid%gcm2mix_nvar
|
||||
!write(*,*) "MIXCPL1: ",h,v,grid%nlon,grid%nhlat
|
||||
if (.not.allocated(grid%mixInput(h,v)%var)) allocate(grid%mixInput(h,v)%var(ion(h)%G%Np,ion(h)%G%Nt))
|
||||
if (.not.allocated(grid%gcmInput(h,v)%var)) allocate(grid%gcmInput(h,v)%var(grid%nlon,grid%nhlat))
|
||||
end do
|
||||
do v=1,mix2gcm_nvar
|
||||
if (.not. allocated(gcm%gcmOutput(h,v)%var)) allocate(gcm%gcmOutput(h,v)%var(gcm%nlon,gcm%nhlat))
|
||||
do v=1,grid%mix2gcm_nvar
|
||||
!write(*,*) "MIXCPL2: ",h,v,grid%nlon,grid%nhlat
|
||||
if (.not. allocated(grid%gcmOutput(h,v)%var)) allocate(grid%gcmOutput(h,v)%var(grid%nlon,grid%nhlat))
|
||||
end do
|
||||
enddo
|
||||
end subroutine init_gcm_grid
|
||||
|
||||
end subroutine create_gcm_grid
|
||||
|
||||
subroutine process_gcmimport(gcm,ion)
|
||||
type(gcm_T), intent(inout) :: gcm
|
||||
type(mixIon_T),dimension(:),intent(inout) :: ion
|
||||
|
||||
integer :: v, i, h, ymod1
|
||||
integer :: g
|
||||
|
||||
call Tic("Processing")
|
||||
call Tic("Shifting")
|
||||
! Split global GCM array into two hemispheres
|
||||
! then shift the array so that longitude starts at 0
|
||||
do v=1,gcm2mix_nvar
|
||||
do g=1,2
|
||||
select case (g)
|
||||
case (1)
|
||||
call mapGCM2MIX(gcm%GEO,ion,g)
|
||||
case (2)
|
||||
call mapGCM2MIX(gcm%APEX,ion,g)
|
||||
end select
|
||||
end do
|
||||
!Map the data to MIX grid
|
||||
!call mapGCM2MIX(gcm,ion)
|
||||
call Toc("Processing")
|
||||
|
||||
end subroutine process_gcmimport
|
||||
|
||||
subroutine transform_gcm_export(gcm,ion,g)
|
||||
type(gcm_grid_T),intent(inout) :: gcm
|
||||
type(mixIon_T),dimension(:),intent(inout) :: ion
|
||||
integer,intent(in) :: g
|
||||
|
||||
integer :: h,ymod2,v
|
||||
|
||||
if (gcm%mix2gcm_nvar .eq. 0) return
|
||||
|
||||
! The weird ymod here is to undo the funky southern hemisphere colat issue.
|
||||
do h=1,GCMhemispheres
|
||||
if (h == GCMSOUTH) then
|
||||
ymod2 = -1
|
||||
else
|
||||
ymod2 = 1
|
||||
endif
|
||||
!ymod2 = 1
|
||||
!write(*,*) "GEO -> SM START:",h,ymod2
|
||||
select case(g)
|
||||
case (1)
|
||||
call transform_grid(gcm%MyGrid(h),gcm%SM(h),iGEOtoSM,h,ym2=ymod2)
|
||||
case (2)
|
||||
call transform_grid(gcm%MyGrid(h),gcm%SM(h),iAPEXtoSM,h,ym2=ymod2)
|
||||
end select
|
||||
!write(*,*) "GEO -> SM END: ",h
|
||||
end do
|
||||
|
||||
! Map from mix grid to gcm grid
|
||||
call mapMIX2GCM(ion,gcm)
|
||||
|
||||
if (.not. allocated(gcm%outvar2d)) allocate(gcm%outvar2d(gcm%nlat,gcm%nlon,gcm%mix2gcm_nvar))
|
||||
|
||||
gcm%outvar2d = 0._rp
|
||||
! Before we start, we collapse to 1 globe instead of 2 hemispheres
|
||||
do v=1,gcm%mix2gcm_nvar
|
||||
gcm%outvar2d(gcm%t2N(gcm%nhlat:1:-1),:gcm%nlon-1,v) = transpose(cshift(gcm%gcmOutput(GCMNORTH,v)%var(:gcm%nlon-1,:),-1*gcm%lonshift))
|
||||
gcm%outvar2d(gcm%t2S,:gcm%nlon-1,v) = transpose(cshift(gcm%gcmOutput(GCMSOUTH,v)%var(:gcm%nlon-1,:),-1*gcm%lonshift))
|
||||
gcm%outvar2d(gcm%t2N,gcm%nlon,v) = gcm%outvar2d(gcm%t2N,1,v)
|
||||
gcm%outvar2d(gcm%t2S,gcm%nlon,v) = gcm%outvar2d(gcm%t2S,1,v)
|
||||
end do
|
||||
|
||||
end subroutine transform_gcm_export
|
||||
|
||||
subroutine mapGCM2MIX(gcm,ion,g)
|
||||
type(gcm_grid_T),intent(inout) :: gcm
|
||||
type(mixIon_T),dimension(:),intent(inout) :: ion
|
||||
integer,intent(in) :: g
|
||||
|
||||
type(Map_T) :: Map
|
||||
|
||||
real(rp), dimension(:,:), allocatable :: F
|
||||
integer :: h,v,i,ymod1
|
||||
|
||||
! Skip if not importing on this grid type
|
||||
if ( gcm%gcm2mix_nvar .eq. 0 ) return
|
||||
|
||||
call Tic("Shifting")
|
||||
do v=1,gcm%gcm2mix_nvar
|
||||
gcm%gcmInput(GCMNORTH,v)%var = 0._rp
|
||||
do i=1,gcm%nhlat
|
||||
gcm%gcmInput(GCMNORTH,v)%var(:gcm%nlon-1,i) = cshift(gcm%invar2d(gcm%t2N(gcm%nhlat-i+1),:gcm%nlon-1,v),gcm%lonshift)
|
||||
gcm%gcmInput(GCMSOUTH,v)%var(:gcm%nlon-1,i) = cshift(gcm%invar2d(gcm%t2S(i),:gcm%nlon-1,v),gcm%lonshift)
|
||||
@@ -105,71 +201,123 @@ module gcminterp
|
||||
endif
|
||||
!ymod1 = 1
|
||||
!write(*,*) "SM -> GEO START:",h,ymod1
|
||||
call transform_grid(ion(h)%G,ion(h)%Ggeo,iSMtoGEO,h,ym1=ymod1)
|
||||
select case(g)
|
||||
case (1)
|
||||
call transform_grid(ion(h)%G,ion(h)%Ggeo,iSMtoGEO,h,ym1=ymod1)
|
||||
case (2)
|
||||
call transform_grid(ion(h)%G,ion(h)%Gapx,iSMtoAPEX,h,ym1=ymod1)
|
||||
end select
|
||||
!write(*,*) "SM -> GEO END: ",h
|
||||
end do
|
||||
|
||||
!Map the data to MIX grid
|
||||
call mapGCM2MIX(gcm,ion)
|
||||
call Toc("Mapping")
|
||||
call Toc("Processing")
|
||||
|
||||
end subroutine process_gcmimport
|
||||
|
||||
subroutine mapGCM2MIX(gcm,ion)
|
||||
type(mixIon_T),dimension(:),intent(inout) :: ion
|
||||
type(gcm_T), intent(inout) :: gcm
|
||||
type(Map_T) :: Map
|
||||
|
||||
real(rp), dimension(:,:), allocatable :: F
|
||||
integer :: h,v
|
||||
|
||||
do h=1,gcm%nhemi
|
||||
call mix_set_map(gcm%GEO(h),ion(h)%Ggeo,gcm%t2rMaps(h))
|
||||
do v=1,gcm2mix_nvar
|
||||
do h=1,GCMhemispheres
|
||||
select case(g)
|
||||
case (1)
|
||||
call mix_set_map(gcm%MyGrid(h),ion(h)%Ggeo,gcm%t2rMaps(h))
|
||||
case (2)
|
||||
call mix_set_map(gcm%MyGrid(h),ion(h)%Gapx,gcm%t2rMaps(h))
|
||||
end select
|
||||
do v=1,gcm%gcm2mix_nvar
|
||||
call mix_map_grids(gcm%t2rMaps(h),gcm%gcmInput(h,v)%var(:,:),F)
|
||||
select case (v)
|
||||
case (GCMSIGMAP)
|
||||
gcm%mixInput(h,v)%var(:,:) = F
|
||||
case (GCMSIGMAH)
|
||||
gcm%mixInput(h,v)%var(:,:) = F
|
||||
end select
|
||||
gcm%mixInput(h,v)%var(:,:) = F
|
||||
!select case (v)
|
||||
!case (1)
|
||||
! gcm%mixInput(h,v)%var(:,:) = F
|
||||
!case (2)
|
||||
! gcm%mixInput(h,v)%var(:,:) = F
|
||||
!end select
|
||||
end do
|
||||
end do
|
||||
call Toc("Mapping")
|
||||
end subroutine mapGCM2MIX
|
||||
|
||||
subroutine mapMIX2GCM(ion,gcm)
|
||||
type(mixIon_T),dimension(:),intent(inout) :: ion
|
||||
type(gcm_T), intent(inout) :: gcm
|
||||
type(gcm_grid_T), intent(inout) :: gcm
|
||||
type(Map_T) :: Map
|
||||
|
||||
real(rp), dimension(:,:), allocatable :: F
|
||||
real(rp), dimension(:,:), allocatable :: F,F1,F2
|
||||
integer :: h,v
|
||||
|
||||
do h=1,gcm%nhemi
|
||||
do h=1,GCMhemispheres
|
||||
call mix_set_map(ion(h)%G,gcm%SM(h),gcm%r2tMaps(h))
|
||||
do v=1,mix2gcm_nvar
|
||||
call mix_map_grids(gcm%r2tMaps(h),ion(h)%St%Vars(:,:,gcm%outlist(v)),F)
|
||||
do v=1,gcm%mix2gcm_nvar
|
||||
if (gcm%outlist(v) .eq. AVG_ENG) then
|
||||
! If we want average energy, do the interpolation and mapping on energy flux instead
|
||||
call mix_map_grids(gcm%r2tMaps(h),ion(h)%St%Vars(:,:,AVG_ENG)*ion(h)%St%Vars(:,:,NUM_FLUX),F1)
|
||||
call mix_map_grids(gcm%r2tMaps(h),ion(h)%St%Vars(:,:,NUM_FLUX),F2)
|
||||
|
||||
! Catch any dvide by 0
|
||||
where (F2 > 0.0_rp)
|
||||
F = F1 / F2
|
||||
elsewhere
|
||||
F = 0.0_rp ! or handle it as needed
|
||||
end where
|
||||
else
|
||||
call mix_map_grids(gcm%r2tMaps(h),ion(h)%St%Vars(:,:,gcm%outlist(v)),F)
|
||||
endif
|
||||
|
||||
gcm%gcmOutput(h,v)%var(:,:) = F
|
||||
end do
|
||||
end do
|
||||
|
||||
end subroutine mapMIX2GCM
|
||||
|
||||
subroutine apply_gcm2mix(gcm,St,h)
|
||||
type(gcm_T),intent(in) :: gcm
|
||||
type(mixState_T),intent(inout) :: St
|
||||
integer :: v,h
|
||||
integer :: v,h,g
|
||||
|
||||
do v=1,gcm2mix_nvar
|
||||
select case (v)
|
||||
case (GCMSIGMAP)
|
||||
St%Vars(:,:,SIGMAP) = gcm%mixInput(h,v)%var(:,:)
|
||||
case (GCMSIGMAH)
|
||||
St%Vars(:,:,SIGMAH) = gcm%mixInput(h,v)%var(:,:)
|
||||
do g=1,2
|
||||
select case (g)
|
||||
case (1)
|
||||
do v=1,gcm%GEO%gcm2mix_nvar
|
||||
St%Vars(:,:,gcm%GEO%inlist(v)) = gcm%GEO%mixInput(h,v)%var(:,:)
|
||||
end do
|
||||
case (2)
|
||||
do v=1,gcm%APEX%gcm2mix_nvar
|
||||
St%Vars(:,:,gcm%APEX%inlist(v)) = gcm%APEX%mixInput(h,v)%var(:,:)
|
||||
end do
|
||||
end select
|
||||
end do
|
||||
|
||||
end subroutine apply_gcm2mix
|
||||
|
||||
! subroutine calculate_auroral_boundary(ion,bc)
|
||||
! Fix this to calculate auroral boundary on remix's grid if necessary
|
||||
! type(mixIon_T),dimension(:),intent(in) :: ion
|
||||
! real(rp),allocatable,dimension(:,:),intent(out) :: bc
|
||||
!
|
||||
! integer :: v,i,j
|
||||
! real(rp) :: nfluxllb = 1.0e7
|
||||
!
|
||||
! if (.not. allocated(bc)) allocate(bc(gcm%nlon,2))
|
||||
!
|
||||
! do v=1,gcm%mix2gcm_nvar
|
||||
! if (gcm%outlist(v) .eq. NUM_FLUX) then
|
||||
!
|
||||
! bc = 0.D0
|
||||
!
|
||||
! do j=1,gcm%nlon ! NORTH
|
||||
! do i=gcm%nlat/2+1,gcm%nlat
|
||||
! if(gcm%outvar2d(i,j,v)>=nfluxllb) exit ! Find the lowest lat where numflux is above 1e6/cm^2/s
|
||||
! enddo
|
||||
! i = min(i,gcm%nlat)
|
||||
! bc(i,1) = max(90.-gcm%inlat(j,i),15.) ! aurllbj is co-lat.
|
||||
! enddo
|
||||
!
|
||||
! do j=1,gcm%nlon ! SOUTH
|
||||
! do i=gcm%nlat/2,1,-1
|
||||
! if(gcm%outvar2d(i,j,v)>=nfluxllb) exit ! Find the lowest lat where numflux is above 1e6/cm^2/s
|
||||
! enddo
|
||||
! i = max(i,1)
|
||||
! bc(i,2) = max(90.+gcm%inlat(j,i),15.) ! aurllbj is co-lat from south pole! Backwards.
|
||||
! enddo
|
||||
!
|
||||
! endif
|
||||
! enddo
|
||||
!
|
||||
! end subroutine
|
||||
|
||||
end module gcminterp
|
||||
|
||||
|
||||
@@ -21,10 +21,17 @@ contains
|
||||
|
||||
write(*,*) "start init_gcm"
|
||||
|
||||
if (.not. allocated(gcm%outlist)) allocate(gcm%outlist(mix2gcm_nvar))
|
||||
gcm%outlist(1) = POT
|
||||
gcm%outlist(2) = AVG_ENG
|
||||
gcm%outlist(3) = NUM_FLUX
|
||||
if (.not. allocated(gcm%GEO%outlist)) allocate(gcm%GEO%outlist(gcm%GEO%mix2gcm_nvar))
|
||||
if (.not. allocated(gcm%APEX%outlist)) allocate(gcm%APEX%outlist(gcm%APEX%mix2gcm_nvar))
|
||||
gcm%APEX%outlist(1) = POT
|
||||
gcm%APEX%outlist(2) = NUM_FLUX
|
||||
gcm%GEO%outlist(1) = AVG_ENG
|
||||
gcm%GEO%outlist(2) = NUM_FLUX
|
||||
|
||||
if ( .not. allocated(gcm%GEO%inlist)) allocate(gcm%GEO%inlist(gcm%GEO%gcm2mix_nvar))
|
||||
if ( .not. allocated(gcm%APEX%inlist)) allocate(gcm%APEX%inlist(gcm%APEX%gcm2mix_nvar))
|
||||
gcm%APEX%inlist(1) = SIGMAP
|
||||
gcm%APEX%inlist(2) = SIGMAH
|
||||
|
||||
gcm%isRestart = isRestart
|
||||
|
||||
@@ -37,169 +44,230 @@ contains
|
||||
type(MPI_Comm) :: gcmCplComm
|
||||
integer, intent(in) :: gcmCplRank
|
||||
|
||||
integer :: nlon,nlat,Nh,ierr
|
||||
integer :: g,icolor
|
||||
|
||||
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
|
||||
!Find grid dimension
|
||||
|
||||
write(*,*) "MIXCPL Waiting for GCM Grid"
|
||||
write(*,*) "MIXCPL Waiting for Grid"
|
||||
|
||||
call mpi_recv(nlat, 1, MPI_INTEGER, gcmCplRank, (tgcmId+voltId)*100+1, gcmCplComm, MPI_STATUS_IGNORE,ierr)
|
||||
write(*,*) "MIXCPL GOT NLAT: ",nlat
|
||||
|
||||
call mpi_recv(nlon, 1, MPI_INTEGER, gcmCplRank, (tgcmId+voltId)*100+2, gcmCplComm, MPI_STATUS_IGNORE,ierr)
|
||||
write(*,*) "MIXCPL GOT NLON: ",nlon
|
||||
|
||||
Nh = GCMhemispheres
|
||||
|
||||
if (.not.allocated(gcm%lat)) allocate(gcm%lat(nlat))
|
||||
if (.not.allocated(gcm%lon)) allocate(gcm%lon(nlon))
|
||||
|
||||
|
||||
call mpi_recv(gcm%lat, nlat, MPI_DOUBLE_PRECISION, gcmCplRank, (tgcmId+voltId)*100+3, gcmCplComm, MPI_STATUS_IGNORE,ierr)
|
||||
write(*,*) "MIXCPL GOT GLAT: ",gcm%lat
|
||||
call mpi_recv(gcm%lon, nlon, MPI_DOUBLE_PRECISION, gcmCplRank, (tgcmId+voltId)*100+4, gcmCplComm, MPI_STATUS_IGNORE,ierr)
|
||||
write(*,*) "MIXCPL GOT GLON: ",gcm%lon
|
||||
|
||||
write(*,*) "MIXCPL GOT GRID INFO: ",nlat, nlon
|
||||
|
||||
!Save dimension information
|
||||
gcm%nlon = nlon
|
||||
gcm%nlat = nlat
|
||||
gcm%nhemi = Nh
|
||||
icolor = 0
|
||||
do g=1,2
|
||||
select case(g)
|
||||
case (1)
|
||||
call init_gcm_mpi_import(gcm%GEO,gcmCplComm,gcmCplRank,icolor)
|
||||
write(*,*) "MIXCPL LOAD GEO"
|
||||
case (2)
|
||||
call init_gcm_mpi_import(gcm%APEX,gcmCplComm,gcmCplRank,icolor)
|
||||
write(*,*) "MIXCPL LOAD APEX"
|
||||
end select
|
||||
|
||||
end do
|
||||
end subroutine init_gcm_mix_mpi
|
||||
|
||||
subroutine init_gcm_mpi_import(gcm,gcmCplComm,gcmCplRank,icolor)
|
||||
type(gcm_grid_T),intent(inout):: gcm
|
||||
type(MPI_Comm), intent(in) :: gcmCplComm
|
||||
integer, intent(in) :: gcmCplRank
|
||||
integer, intent(inout) :: icolor
|
||||
|
||||
integer :: nlon,nlat,Nh,ierr
|
||||
|
||||
Nh = GCMhemispheres
|
||||
|
||||
icolor = icolor + 1
|
||||
call mpi_recv(gcm%nlat, 1, MPI_INTEGER, gcmCplRank, (tgcmId+voltId)*100+icolor, gcmCplComm, MPI_STATUS_IGNORE,ierr)
|
||||
write(*,*) "MIXCPL GOT NLAT: ",gcm%nlat
|
||||
|
||||
icolor = icolor + 1
|
||||
call mpi_recv(gcm%nlon, 1, MPI_INTEGER, gcmCplRank, (tgcmId+voltId)*100+icolor, gcmCplComm, MPI_STATUS_IGNORE,ierr)
|
||||
write(*,*) "MIXCPL GOT NLON: ",gcm%nlon
|
||||
|
||||
|
||||
if (.not.allocated(gcm%inlat)) allocate(gcm%inlat(gcm%nlat))
|
||||
if (.not.allocated(gcm%inlon)) allocate(gcm%inlon(gcm%nlon))
|
||||
|
||||
icolor = icolor + 1
|
||||
call mpi_recv(gcm%inlat, gcm%nlat, MPI_DOUBLE_PRECISION, gcmCplRank, (tgcmId+voltId)*100+icolor, gcmCplComm, MPI_STATUS_IGNORE,ierr)
|
||||
write(*,*) "MIXCPL GOT LAT: ",gcm%inlat
|
||||
icolor = icolor + 1
|
||||
call mpi_recv(gcm%inlon, gcm%nlon, MPI_DOUBLE_PRECISION, gcmCplRank, (tgcmId+voltId)*100+icolor, gcmCplComm, MPI_STATUS_IGNORE,ierr)
|
||||
write(*,*) "MIXCPL GOT LON: ",gcm%inlon
|
||||
write(*,*) "MIXCPL GOT GRID INFO: ",gcm%nlat,gcm% nlon
|
||||
|
||||
end subroutine init_gcm_mpi_import
|
||||
|
||||
subroutine coupleGCM2MIX(gcm,ion,mjd,time,gcmCplComm,myRank)
|
||||
type(mixIon_T),dimension(:),intent(inout) :: ion
|
||||
type(gcm_T), intent(inout) :: gcm
|
||||
real(rp), intent(in) :: time, mjd
|
||||
type(MPI_Comm), optional :: gcmCplComm
|
||||
integer, optional, intent(in) :: myRank
|
||||
type(mixIon_T),dimension(:),intent(inout) :: ion
|
||||
type(gcm_T), intent(inout) :: gcm
|
||||
real(rp), intent(in) :: time, mjd
|
||||
type(MPI_Comm), optional :: gcmCplComm
|
||||
integer, optional, intent(in) :: myRank
|
||||
|
||||
! maybe create the SM and GEO list of points here
|
||||
! since the destination grid does not need to be structured
|
||||
! can do a simple loop over all grid points and transform
|
||||
! transform all remix points to GEO
|
||||
! transform all gcm points to SM
|
||||
! maybe create the SM and GEO list of points here
|
||||
! since the destination grid does not need to be structured
|
||||
! can do a simple loop over all grid points and transform
|
||||
! transform all remix points to GEO
|
||||
! transform all gcm points to SM
|
||||
|
||||
!Skip MPI exchange on first restart
|
||||
if (present(gcmCplComm) .and. gcm%isRestart) then
|
||||
gcm%isRestart = .False.
|
||||
return
|
||||
endif
|
||||
!Must MIX export first. TIEGCM will also import first.
|
||||
call Tic("Export")
|
||||
if (present(gcmCplComm)) then
|
||||
if (gcm%isRestart) then
|
||||
gcm%isRestart = .False.
|
||||
return
|
||||
!Skip MPI exchange on first restart
|
||||
!if (present(gcmCplComm) .and. gcm%isRestart) then
|
||||
! !gcm%isRestart = .False.
|
||||
! return
|
||||
!endif
|
||||
!Must MIX export first. TIEGCM will also import first.
|
||||
call Tic("Export")
|
||||
if (present(gcmCplComm)) then
|
||||
call exportgcm(ion,gcm,mjd,time,gcmCplComm,myRank)
|
||||
else
|
||||
write(*,*) "Are we trying to Export to Couple GCM?"
|
||||
endif
|
||||
call exportgcm(ion,gcm,mjd,time,gcmCplComm,myRank)
|
||||
else
|
||||
write(*,*) "Are we trying to Export to Couple GCM?"
|
||||
endif
|
||||
call Toc("Export")
|
||||
call Toc("Export")
|
||||
|
||||
!Import gcm data
|
||||
call Tic("Import")
|
||||
if (present(gcmCplComm)) then
|
||||
call importgcm(gcm, ion, gcmCplComm,myRank)
|
||||
else
|
||||
write(*,*) "Are we trying to Import to Couple GCM?"
|
||||
endif
|
||||
call process_gcmimport(gcm,ion)
|
||||
call Toc("Import")
|
||||
!Import gcm data
|
||||
call Tic("Import")
|
||||
if (present(gcmCplComm)) then
|
||||
call importgcm(gcm, gcmCplComm,myRank)
|
||||
else
|
||||
write(*,*) "Are we trying to Import to Couple GCM?"
|
||||
endif
|
||||
call process_gcmimport(gcm,ion)
|
||||
call Toc("Import")
|
||||
|
||||
if (gcm%isRestart) gcm%isRestart=.false.
|
||||
! We have a separate internal counter for coupling here.
|
||||
! This may be used later on for WACCM-X coupling which is desync from remix coupling time
|
||||
! TIEGCM coupling time is also 5s while WACCM-X will couple at 1 min default
|
||||
gcm%cplStep = gcm%cplStep + 1
|
||||
if (gcm%isRestart) gcm%isRestart=.false.
|
||||
! We have a separate internal counter for coupling here.
|
||||
! This may be used later on for WACCM-X coupling which is desync from remix coupling time
|
||||
! TIEGCM coupling time is also 5s while WACCM-X will couple at 1 min default
|
||||
gcm%cplStep = gcm%cplStep + 1
|
||||
|
||||
end subroutine coupleGCM2MIX
|
||||
|
||||
|
||||
subroutine importgcmmpi(gcm,ion, gcmCplComm,gcmCplRank)
|
||||
type(gcm_T), intent(inout) :: gcm
|
||||
type(mixIon_T),dimension(:),intent(inout) :: ion
|
||||
type(MPI_Comm) :: gcmCplComm
|
||||
integer, intent(in) :: gcmCplRank
|
||||
subroutine importgcmmpi(gcm, gcmCplComm,gcmCplRank)
|
||||
type(gcm_T), intent(inout) :: gcm
|
||||
type(MPI_Comm) :: gcmCplComm
|
||||
integer, intent(in) :: gcmCplRank
|
||||
|
||||
integer :: v,ierr,length
|
||||
character( len = MPI_MAX_ERROR_STRING) :: message
|
||||
integer :: v
|
||||
|
||||
call Tic("MpiExchange")
|
||||
write(*,*) "MIX: IMPORT GCM"
|
||||
call Tic("MpiExchange")
|
||||
write(*,*) "MIX: IMPORT GCM"
|
||||
|
||||
if (gcmCplComm /= MPI_COMM_NULL) then
|
||||
call Tic("Passing")
|
||||
do v=1,2
|
||||
select case(v)
|
||||
case (1)
|
||||
!write(*,*) "Import GEO stuff"
|
||||
call import_gcm_per_grid(gcm%GEO,gcmCplComm,gcmCplRank)
|
||||
case (2)
|
||||
!write(*,*) "Import APEX stuff"
|
||||
call import_gcm_per_grid(gcm%APEX,gcmCplComm,gcmCplRank)
|
||||
end select
|
||||
|
||||
enddo
|
||||
call Toc("Passing")
|
||||
|
||||
endif
|
||||
call Toc("MpiExchange")
|
||||
|
||||
end subroutine importgcmmpi
|
||||
|
||||
subroutine import_gcm_per_grid(gcm,gcmCplComm,gcmCplRank)
|
||||
type(gcm_grid_T) :: gcm
|
||||
type(MPI_Comm) :: gcmCplComm
|
||||
integer, intent(in) :: gcmCplRank
|
||||
|
||||
integer :: ierr,length
|
||||
character( len = MPI_MAX_ERROR_STRING) :: message
|
||||
|
||||
! Skip if not importing on this grid type
|
||||
if ( gcm%gcm2mix_nvar .eq. 0 ) return
|
||||
|
||||
if (.not. allocated(gcm%invar2d)) allocate(gcm%invar2d(gcm%nlat,gcm%nlon,gcm%gcm2mix_nvar))
|
||||
|
||||
call mpi_recv(gcm%invar2d, gcm%nlat*gcm%nlon*gcm%gcm2mix_nvar, MPI_DOUBLE_PRECISION, gcmCplRank, (tgcmId+voltId)*100, gcmCplComm, MPI_STATUS_IGNORE,ierr)
|
||||
|
||||
if (gcmCplComm /= MPI_COMM_NULL) then
|
||||
call Tic("Passing")
|
||||
if (.not. allocated(gcm%invar2d)) allocate(gcm%invar2d(gcm%nlat,gcm%nlon,gcm2mix_nvar))
|
||||
call mpi_recv(gcm%invar2d, gcm%nlat*gcm%nlon*gcm2mix_nvar, MPI_DOUBLE_PRECISION, gcmCplRank, (tgcmId+voltId)*100, gcmCplComm, MPI_STATUS_IGNORE,ierr)
|
||||
if(ierr /= MPI_Success) then
|
||||
call MPI_Error_string( ierr, message, length, ierr)
|
||||
print *,message(1:length)
|
||||
call mpi_Abort(MPI_COMM_WORLD, 1, ierr)
|
||||
end if
|
||||
call Toc("Passing")
|
||||
|
||||
endif
|
||||
call Toc("MpiExchange")
|
||||
|
||||
end subroutine importgcmmpi
|
||||
end subroutine import_gcm_per_grid
|
||||
|
||||
subroutine exportgcmmpi(ion,gcm,mjd,time,gcmCplComm,gcmCplRank)
|
||||
type(gcm_T), intent(inout) :: gcm
|
||||
type(mixIon_T), dimension(:),intent(inout) :: ion
|
||||
real(rp), intent(in) :: time, mjd
|
||||
type(MPI_Comm) :: gcmCplComm
|
||||
integer, intent(in) :: gcmCplRank
|
||||
|
||||
integer :: h,ymod2,v,ierr,length
|
||||
character( len = MPI_MAX_ERROR_STRING) :: message
|
||||
type(gcm_T), intent(inout) :: gcm
|
||||
type(mixIon_T), dimension(:),intent(inout) :: ion
|
||||
real(rp), intent(in) :: time, mjd
|
||||
type(MPI_Comm) :: gcmCplComm
|
||||
integer, intent(in) :: gcmCplRank
|
||||
|
||||
write(*,*) "MIX: EXPORT GCM"
|
||||
integer :: g,ierr,length
|
||||
real(rp),allocatable,dimension(:,:) :: auroralbc
|
||||
character( len = MPI_MAX_ERROR_STRING) :: message
|
||||
|
||||
! The weird ymod here is to undo the funky southern hemisphere colat issue.
|
||||
do h=1,gcm%nhemi
|
||||
if (h == GCMSOUTH) then
|
||||
ymod2 = -1
|
||||
else
|
||||
ymod2 = 1
|
||||
endif
|
||||
!ymod2 = 1
|
||||
!write(*,*) "GEO -> SM START:",h,ymod2
|
||||
call transform_grid(gcm%GEO(h),gcm%SM(h),iGEOtoSM,h,ym2=ymod2)
|
||||
!write(*,*) "GEO -> SM END: ",h
|
||||
end do
|
||||
write(*,*) "MIX: EXPORT GCM"
|
||||
|
||||
! Map from mix grid to gcm grid
|
||||
call mapMIX2GCM(ion,gcm)
|
||||
|
||||
if (gcmCplComm /= MPI_COMM_NULL) then
|
||||
|
||||
! Prepare the export data
|
||||
if (.not. allocated(gcm%outvar2d)) allocate(gcm%outvar2d(gcm%nlat,gcm%nlon,mix2gcm_nvar))
|
||||
gcm%outvar2d = 0.
|
||||
! Before we start, we collapse to 1 globe instead of 2 hemispheres
|
||||
do v=1,mix2gcm_nvar
|
||||
gcm%outvar2d(gcm%t2N(gcm%nhlat:1:-1),:gcm%nlon-1,v) = transpose(cshift(gcm%gcmOutput(GCMNORTH,v)%var(:gcm%nlon-1,:),-1*gcm%lonshift))
|
||||
gcm%outvar2d(gcm%t2S,:gcm%nlon-1,v) = transpose(cshift(gcm%gcmOutput(GCMSOUTH,v)%var(:gcm%nlon-1,:),-1*gcm%lonshift))
|
||||
gcm%outvar2d(gcm%t2N,gcm%nlon,v) = gcm%outvar2d(gcm%t2N,1,v)
|
||||
gcm%outvar2d(gcm%t2S,gcm%nlon,v) = gcm%outvar2d(gcm%t2S,1,v)
|
||||
call Tic("Transform")
|
||||
do g = 1,2
|
||||
select case (g)
|
||||
case (1)
|
||||
call transform_gcm_export(gcm%GEO,ion,g)
|
||||
case(2)
|
||||
call transform_gcm_export(gcm%APEX,ion,g)
|
||||
end select
|
||||
|
||||
end do
|
||||
call Toc("Transform")
|
||||
|
||||
!! Calculate the auroral boundary in APEX coordinates
|
||||
!call calculate_auroral_boundary(gcm%APEX,auroralbc)
|
||||
|
||||
call Tic("Exchange")
|
||||
|
||||
if (gcmCplComm /= MPI_COMM_NULL) then
|
||||
do g = 1,2
|
||||
select case (g)
|
||||
case (1)
|
||||
call export_gcm_per_grid(gcm%GEO,gcmCplComm,gcmCplRank)
|
||||
case(2)
|
||||
call export_gcm_per_grid(gcm%APEX,gcmCplComm,gcmCplRank)
|
||||
end select
|
||||
enddo
|
||||
|
||||
!call mpi_send(auroralbc,gcm%APEX%nlon*2, MPI_DOUBLE_PRECISION, gcmCplRank,(tgcmId+voltId)*100, gcmCplComm, ierr)
|
||||
! if(ierr /= MPI_Success) then
|
||||
! call MPI_Error_string( ierr, message, length, ierr)
|
||||
! print *,message(1:length)
|
||||
! call mpi_Abort(MPI_COMM_WORLD, 1, ierr)
|
||||
! end if
|
||||
endif
|
||||
|
||||
call Toc("Exchange")
|
||||
end subroutine exportgcmmpi
|
||||
|
||||
subroutine export_gcm_per_grid(gcm,gcmCplComm,gcmCplRank)
|
||||
type(gcm_grid_T),intent(inout) :: gcm
|
||||
type(MPI_Comm) :: gcmCplComm
|
||||
integer, intent(in) :: gcmCplRank
|
||||
|
||||
integer :: ierr,length
|
||||
character( len = MPI_MAX_ERROR_STRING) :: message
|
||||
|
||||
if (gcm%mix2gcm_nvar .eq. 0) return
|
||||
|
||||
! Send the coupling data
|
||||
write(*,*) " MIXCPL: ", gcmCplRank,(tgcmId+voltId)*100,gcmCplComm,gcm%nlat,gcm%nlon
|
||||
call mpi_send(gcm%outvar2d, gcm%nlat*gcm%nlon*mix2gcm_nvar, MPI_DOUBLE_PRECISION, gcmCplRank, (tgcmId+voltId)*100, gcmCplComm, ierr)
|
||||
if(ierr /= MPI_Success) then
|
||||
call MPI_Error_string( ierr, message, length, ierr)
|
||||
print *,message(1:length)
|
||||
call mpi_Abort(MPI_COMM_WORLD, 1, ierr)
|
||||
end if
|
||||
!write(*,*) " MIXCPL: ", gcmCplRank,(tgcmId+voltId)*100,gcmCplComm,gcm%nlat,gcm%nlon
|
||||
|
||||
endif
|
||||
|
||||
end subroutine exportgcmmpi
|
||||
call mpi_send(gcm%outvar2d, gcm%nlat*gcm%nlon*gcm%mix2gcm_nvar, MPI_DOUBLE_PRECISION, gcmCplRank, (tgcmId+voltId)*100, gcmCplComm, ierr)
|
||||
|
||||
if(ierr /= MPI_Success) then
|
||||
call MPI_Error_string( ierr, message, length, ierr)
|
||||
print *,message(1:length)
|
||||
call mpi_Abort(MPI_COMM_WORLD, 1, ierr)
|
||||
end if
|
||||
|
||||
end subroutine export_gcm_per_grid
|
||||
end module gcm_mpi
|
||||
|
||||
@@ -134,6 +134,8 @@ module voltapp_mpi
|
||||
if ( ( vApp%doGCM ) .and. (vApp%gcmCplRank /= -1) ) then
|
||||
write(*,*) "Initializing GCM ..."
|
||||
call init_gcm_mpi(vApp%gcm,vApp%remixApp%ion,vApp%gApp%Model%isRestart)
|
||||
! Perform one initial coupling for TIEGCM
|
||||
!call coupleGCM2MIX(vApp%gcm,vApp%remixApp%ion,vApp%MJD,vApp%time,vApp%mageCplComm,vApp%gcmCplRank)
|
||||
end if
|
||||
|
||||
if(vApp%useHelpers) then
|
||||
|
||||
@@ -26,6 +26,7 @@ module voltapp
|
||||
use shellGridGen
|
||||
use voltappHelper
|
||||
use voltCplHelper
|
||||
use apex, only : init_apex
|
||||
|
||||
implicit none
|
||||
|
||||
@@ -384,6 +385,9 @@ module voltapp
|
||||
call DisableSymLinks()
|
||||
endif
|
||||
|
||||
! Setting apex reference altitude
|
||||
call init_apex(vApp%MJD,vApp%gcm%altmax)
|
||||
|
||||
if(present(optFilename)) then
|
||||
! read from the prescribed file
|
||||
call init_mix(vApp%remixApp%ion,[NORTH, SOUTH],optFilename=optFilename,RunID=RunID,isRestart=isRestart,nRes=vApp%IO%nRes,optIO=vApp%writeFiles)
|
||||
@@ -491,11 +495,11 @@ module voltapp
|
||||
|
||||
! solve for remix output
|
||||
if (vApp%time<=0) then
|
||||
call run_mix(vApp%remixApp%ion,curTilt,doModelOpt=.false.,mjd=vApp%MJD)
|
||||
call run_mix(vApp%remixApp%ion,curTilt,doModelOpt=.false.)
|
||||
else if (vApp%doGCM) then
|
||||
call run_mix(vApp%remixApp%ion,curTilt,gcm=vApp%gcm,mjd=vApp%MJD)
|
||||
call run_mix(vApp%remixApp%ion,curTilt,gcm=vApp%gcm)
|
||||
else
|
||||
call run_mix(vApp%remixApp%ion,curTilt,doModelOpt=.true.,mjd=vApp%MJD)
|
||||
call run_mix(vApp%remixApp%ion,curTilt,doModelOpt=.true.)
|
||||
endif
|
||||
|
||||
end subroutine runRemix
|
||||
|
||||
@@ -9,7 +9,7 @@ file(GLOB helperFiles "${PROJECT_SOURCE_DIR}/tests/helperCode/*.F90")
|
||||
|
||||
# NON-MPI TESTS GO UP HERE
|
||||
file(GLOB caseTestFiles "${CMAKE_CURRENT_SOURCE_DIR}/cases/*.pf")
|
||||
set(caseTestLibs baselib gamlib remixlib voltlib tgiclib)
|
||||
set(caseTestLibs baselib gamlib remixlib voltlib tgiclib raijulib)
|
||||
add_pfunit_ctest (caseTests TEST_SOURCES ${caseTestFiles} OTHER_SOURCES ${helperFiles} LINK_LIBRARIES ${caseTestLibs})
|
||||
add_dependencies(caseTests ${caseTestLibs})
|
||||
|
||||
@@ -19,20 +19,15 @@ add_pfunit_ctest (gamTests TEST_SOURCES ${gamTestFiles} OTHER_SOURCES ${helperFi
|
||||
add_dependencies(gamTests ${gamTestLibs})
|
||||
|
||||
file(GLOB mixTestFiles "${CMAKE_CURRENT_SOURCE_DIR}/remix/*.pf")
|
||||
set(mixTestLibs baselib gamlib remixlib voltlib tgiclib)
|
||||
set(mixTestLibs baselib gamlib remixlib voltlib tgiclib raijulib)
|
||||
add_pfunit_ctest (mixTests TEST_SOURCES ${mixTestFiles} OTHER_SOURCES ${helperFiles} LINK_LIBRARIES ${mixTestLibs})
|
||||
add_dependencies(mixTests ${mixTestLibs})
|
||||
|
||||
file(GLOB voltTestFiles "${CMAKE_CURRENT_SOURCE_DIR}/voltron/*.pf")
|
||||
set(voltTestLibs baselib gamlib remixlib voltlib tgiclib)
|
||||
set(voltTestLibs baselib gamlib remixlib voltlib tgiclib raijulib)
|
||||
add_pfunit_ctest (voltTests TEST_SOURCES ${voltTestFiles} OTHER_SOURCES ${helperFiles} LINK_LIBRARIES ${voltTestLibs})
|
||||
add_dependencies(voltTests ${voltTestLibs})
|
||||
|
||||
file(GLOB rcmTestFiles "${CMAKE_CURRENT_SOURCE_DIR}/rcm/*.pf")
|
||||
set(rcmTestLibs baselib gamlib voltlib remixlib rcmlib tgiclib)
|
||||
add_pfunit_ctest(rcmTests TEST_SOURCES ${rcmTestFiles} OTHER_SOURCES ${helperFiles} LINK_LIBRARIES ${rcmTestLibs})
|
||||
add_dependencies(rcmTests ${rcmTestLibs})
|
||||
|
||||
file(GLOB shgrTestFiles "${CMAKE_CURRENT_SOURCE_DIR}/shellgrid/*.pf")
|
||||
set(shgrTestLibs baselib)
|
||||
add_pfunit_ctest (shgrTests TEST_SOURCES ${shgrTestFiles} OTHER_SOURCES ${helperFiles} LINK_LIBRARIES ${shgrTestLibs})
|
||||
@@ -74,7 +69,7 @@ if(ENABLE_MPI)
|
||||
file(GLOB helperFilesMpi "${PROJECT_SOURCE_DIR}/tests/helperCode_mpi/*.F90")
|
||||
|
||||
file(GLOB caseMpiTestFiles "${CMAKE_CURRENT_SOURCE_DIR}/cases_mpi/*.pf")
|
||||
set(caseMpiTestLibs baselib gamlib remixlib voltlib basempilib gammpilib tgiclib)
|
||||
set(caseMpiTestLibs baselib gamlib remixlib voltlib basempilib gammpilib tgiclib raijulib)
|
||||
add_pfunit_ctest (caseMpiTests TEST_SOURCES ${caseMpiTestFiles} OTHER_SOURCES ${helperFiles} ${helperFilesMpi} LINK_LIBRARIES ${caseMpiTestLibs} MAX_PES 64)
|
||||
add_dependencies(caseMpiTests ${caseMpiTestLibs})
|
||||
|
||||
@@ -89,7 +84,7 @@ if(ENABLE_MPI)
|
||||
add_dependencies(gamMpiTests ${gamMpiTestLibs})
|
||||
|
||||
file(GLOB voltMpiTestFiles "${CMAKE_CURRENT_SOURCE_DIR}/voltron_mpi/*.pf")
|
||||
set(voltMpiTestLibs baselib gamlib basempilib gammpilib voltlib voltmpilib remixlib tgiclib)
|
||||
set(voltMpiTestLibs baselib gamlib basempilib gammpilib voltlib voltmpilib remixlib tgiclib raijulib)
|
||||
add_pfunit_ctest (voltMpiTests TEST_SOURCES ${voltMpiTestFiles} OTHER_SOURCES ${helperFiles} ${helperFilesMpi} LINK_LIBRARIES ${voltMpiTestLibs} MAX_PES 17)
|
||||
add_dependencies(voltMpiTests ${voltMpiTestLibs})
|
||||
|
||||
@@ -98,15 +93,15 @@ endif()
|
||||
# all tests in one
|
||||
if(ENABLE_MPI)
|
||||
# include MPI tests
|
||||
set(allTestFiles ${caseTestFiles} ${gamTestFiles} ${mixTestFiles} ${voltTestFiles} ${rcmTestFiles} ${shgrTestFiles} ${caseMpiTestFiles} ${baseMpiTestFiles} ${gamMpiTestFiles} ${voltMpiTestFiles})
|
||||
set(allTestLibs baselib gamlib remixlib voltlib rcmlib basempilib gammpilib voltmpilib tgiclib)
|
||||
set(allTestBins caseTests gamTests mixTests voltTests rcmTests shgrTests gamMpiTests baseMpiTests caseMpiTests voltMpiTests)
|
||||
set(allTestFiles ${caseTestFiles} ${gamTestFiles} ${mixTestFiles} ${voltTestFiles} ${shgrTestFiles} ${caseMpiTestFiles} ${baseMpiTestFiles} ${gamMpiTestFiles} ${voltMpiTestFiles})
|
||||
set(allTestLibs baselib gamlib remixlib voltlib basempilib gammpilib voltmpilib tgiclib raijulib)
|
||||
set(allTestBins caseTests gamTests mixTests voltTests shgrTests gamMpiTests baseMpiTests caseMpiTests voltMpiTests)
|
||||
add_pfunit_ctest (allTests TEST_SOURCES ${allTestFiles} OTHER_SOURCES ${helperFiles} ${helperFilesMpi} LINK_LIBRARIES ${allTestLibs} MAX_PES 64)
|
||||
else()
|
||||
# exclude MPI tests
|
||||
set(allTestFiles ${caseTestFiles} ${gamTestFiles} ${mixTestFiles} ${voltTestFiles} ${rcmTestFiles} ${shgrTestFiles})
|
||||
set(allTestLibs baselib gamlib remixlib voltlib rcmlib tgiclib)
|
||||
set(allTestBins caseTests gamTests mixTests voltTests rcmTests shgrTests)
|
||||
set(allTestFiles ${caseTestFiles} ${gamTestFiles} ${mixTestFiles} ${voltTestFiles} ${shgrTestFiles})
|
||||
set(allTestLibs baselib gamlib remixlib voltlib tgiclib raijulib)
|
||||
set(allTestBins caseTests gamTests mixTests voltTests shgrTests)
|
||||
add_pfunit_ctest (allTests TEST_SOURCES ${allTestFiles} OTHER_SOURCES ${helperFiles} LINK_LIBRARIES ${allTestLibs})
|
||||
endif()
|
||||
add_dependencies(allTests CopyInputs CopyGrids ${allTestBins} ${allTestLibs})
|
||||
|
||||
@@ -68,7 +68,7 @@ contains
|
||||
|
||||
end subroutine
|
||||
|
||||
@test
|
||||
!@test
|
||||
subroutine testSquishLoadBalancing
|
||||
! testing that the ebsquish load balancing functions work properly
|
||||
type(voltApp_T) :: voltronApp
|
||||
@@ -91,33 +91,33 @@ contains
|
||||
allocate(ebSquish%blockStartIndices(ebSquish%numSquishBlocks))
|
||||
|
||||
! set default load balance and check it
|
||||
call LoadBalanceBlocks(voltronApp)
|
||||
!call LoadBalanceBlocks(voltronApp)
|
||||
print *,ebSquish%blockStartIndices
|
||||
call checkSquishIndices(voltronApp)
|
||||
|
||||
! manually set even balance vals, and check it
|
||||
balanceVals = 1.0_rp/ebSquish%numSquishBlocks
|
||||
call LoadBalanceBlocks(voltronApp, balanceVals)
|
||||
!call LoadBalanceBlocks(voltronApp, balanceVals)
|
||||
print *,ebSquish%blockStartIndices
|
||||
call checkSquishIndices(voltronApp)
|
||||
|
||||
! set high load in front, and check it
|
||||
balanceVals = 0.05_rp
|
||||
balanceVals(1) = 0.8_rp
|
||||
call LoadBalanceBlocks(voltronApp, balanceVals)
|
||||
!call LoadBalanceBlocks(voltronApp, balanceVals)
|
||||
print *,ebSquish%blockStartIndices
|
||||
call checkSquishIndices(voltronApp)
|
||||
|
||||
! set high balance at back, and check it
|
||||
balanceVals = 0.05_rp
|
||||
balanceVals(ubound(balanceVals)) = 0.8_rp
|
||||
call LoadBalanceBlocks(voltronApp, balanceVals)
|
||||
!call LoadBalanceBlocks(voltronApp, balanceVals)
|
||||
print *,ebSquish%blockStartIndices
|
||||
call checkSquishIndices(voltronApp)
|
||||
|
||||
! set increasing balance, and check it
|
||||
balanceVals = [(i,i=1,ebSquish%numSquishBlocks)]
|
||||
call LoadBalanceBlocks(voltronApp, balanceVals)
|
||||
!call LoadBalanceBlocks(voltronApp, balanceVals)
|
||||
print *,ebSquish%blockStartIndices
|
||||
call checkSquishIndices(voltronApp)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user