Merge branch 'master' into release

This commit is contained in:
Ian Bell
2023-08-06 16:20:13 -04:00
276 changed files with 47217 additions and 40480 deletions

View File

@@ -1,8 +0,0 @@
version: 2
jobs:
build:
docker:
- image: circleci/ruby:2.4.1
steps:
- checkout
- run: echo "A first hello"

99
.clang-format Normal file
View File

@@ -0,0 +1,99 @@
---
Language: Cpp
AccessModifierOffset: -1
AlignAfterOpenBracket: Align
AlignConsecutiveAssignments: false
AlignConsecutiveDeclarations: false
AlignEscapedNewlines: Left
AlignOperands: true
AlignTrailingComments: true
AllowAllParametersOfDeclarationOnNextLine: true
AllowShortBlocksOnASingleLine: false
AllowShortCaseLabelsOnASingleLine: false
AllowShortFunctionsOnASingleLine: Empty
AllowShortIfStatementsOnASingleLine: true
AllowShortLoopsOnASingleLine: false
AlwaysBreakAfterDefinitionReturnType: None
AlwaysBreakAfterReturnType: None
AlwaysBreakBeforeMultilineStrings: false
AlwaysBreakTemplateDeclarations: true
BinPackArguments: true
BinPackParameters: true
BraceWrapping:
AfterClass: true
AfterControlStatement: false
AfterEnum: true
AfterFunction: false
AfterNamespace: false
AfterObjCDeclaration: false
AfterStruct: true
AfterUnion: true
AfterExternBlock: true
BeforeCatch: false
BeforeElse: false
IndentBraces: false
SplitEmptyFunction: true
SplitEmptyRecord: true
SplitEmptyNamespace: true
BreakBeforeBinaryOperators: NonAssignment
BreakBeforeBraces: Custom
BreakBeforeInheritanceComma: true
BreakBeforeTernaryOperators: true
BreakConstructorInitializers: BeforeColon
BreakConstructorInitializersBeforeComma: false
BreakStringLiterals: true
ColumnLimit: 150
CommentPragmas: '^ IWYU pragma:'
CompactNamespaces: false
ConstructorInitializerAllOnOneLineOrOnePerLine: true
ConstructorInitializerIndentWidth: 2
ContinuationIndentWidth: 2
Cpp11BracedListStyle: true
DerivePointerAlignment: false
DisableFormat: false
ExperimentalAutoDetectBinPacking: false
FixNamespaceComments: true
IncludeBlocks: Preserve
IndentCaseLabels: true
IndentWidth: 4
IndentPPDirectives: AfterHash
IndentWrappedFunctionNames: true
NamespaceIndentation: None # Could consider Inner
PenaltyBreakAssignment: 2
PenaltyBreakBeforeFirstCallParameter: 19
PenaltyBreakComment: 300
PenaltyBreakFirstLessLess: 120
PenaltyBreakString: 1000
PenaltyExcessCharacter: 1000000
PenaltyReturnTypeOnItsOwnLine: 60
PointerAlignment: Left
ReflowComments: false
SpaceAfterCStyleCast: false
# SpaceAfterLogicalNot: false # No longer available in clang-format 6.0
SpaceAfterTemplateKeyword: true
SpaceBeforeAssignmentOperators: true
# SpaceBeforeCpp11BracedList: true # No longer available in clang-format 6.0
# SpaceBeforeCtorInitializerColon: true # No longer available in clang-format 6.0
# SpaceBeforeInheritanceColon: true # No longer available in clang-format 6.0
SpaceBeforeParens: ControlStatements
# SpaceBeforeRangeBasedForLoopColon: true # No longer available in clang-format 6.0
SpaceInEmptyParentheses: false
SpacesBeforeTrailingComments: 2
SpacesInAngles: false
SpacesInContainerLiterals: true
SpacesInCStyleCastParentheses: false
SpacesInParentheses: false
SpacesInSquareBrackets: false
SortIncludes: false
SortUsingDeclarations: true
Standard: c++17
TabWidth: 2
UseTab: Never
---
Language: JavaScript
BasedOnStyle: Mozilla
# Use 100 columns for JS.
ColumnLimit: 180
JavaScriptQuotes: Single
SpacesInContainerLiterals: false
...

79
.clang-tidy Normal file
View File

@@ -0,0 +1,79 @@
---
# magic numbers are useful to layout stuff in Qt...
# -readability-magic-numbers and its alias cppcoreguidelines-avoid-magic-numbers
# `protected`: followed by `protected slots:` would trigger it
# -readability-redundant-access-specifiers,
# Problem with OS_ASSERT macro
# -cppcoreguidelines-pro-bounds-array-to-pointer-decay,
# We use raw pointers for Qt, since usually the memory is then owned by the parent
# -cppcoreguidelines-owning-memory
# Because of Google Tests
# -cppcoreguidelines-avoid-non-const-global-variables
# I don't think this really helps clarify the intent
# -readability-else-after-return
# -modernize-concat-nested-namespaces
# Aliases
# - cppcoreguidelines-avoid-c-arrays => modernize-avoid-c-arrays
# - cppcoreguidelines-non-private-member-variables-in-classes => misc-non-private-member-variables-in-classes
# - cppcoreguidelines-explicit-virtual-functions, hicpp-use-override => modernize-use-override
# - bugprone-narrowing-conversions => cppcoreguidelines-narrowing-conversions
# Annoying: some config options exist only in later versions...
# cppcoreguidelines-narrowing-conversions.WarnOnEquivalentBitWidth was added in clang-tidy 13, and that would allow avoiding uint->int narrowing conversions
# Instead I have to disable the entire check...
Checks: |
*,
-fuchsia-*,
-google-*,
-zircon-*,
-abseil-*,
-llvm*,
-altera*,
-modernize-use-trailing-return-type,
-cppcoreguidelines-avoid-magic-numbers,
-readability-magic-numbers,
-cppcoreguidelines-pro-bounds-array-to-pointer-decay,
-cppcoreguidelines-owning-memory,
-cppcoreguidelines-pro-bounds-constant-array-index,
-readability-redundant-access-specifiers,
-cppcoreguidelines-explicit-virtual-functions,
-readability-else-after-return,
-modernize-concat-nested-namespaces,
-hicpp-*,
-hicpp-avoid-goto,
hicpp-exception-baseclass,
hicpp-multiway-paths-covered,
hicpp-no-assembler,
hicpp-signed-bitwise,
-cppcoreguidelines-avoid-c-arrays,
-cppcoreguidelines-non-private-member-variables-in-classes,
-bugprone-narrowing-conversions,
-cppcoreguidelines-narrowing-conversions,
-readability-function-cognitive-complexity,
-cppcoreguidelines-avoid-non-const-global-variables,
-modernize-use-override,
WarningsAsErrors: '*'
HeaderFilterRegex: '*'
FormatStyle: file
CheckOptions:
- key: modernize-use-override.AllowOverrideAndFinal
value: 'true'
- key: modernize-use-override.IgnoreDestructors
value: 'true'
- key: performance-for-range-copy.WarnOnAllAutoCopies
value: 'true'
- key: cppcoreguidelines-narrowing-conversions.WarnOnEquivalentBitWidth
value: 'false'
- key: readability-implicit-bool-conversion.AllowPointerConditions
value: 'true'
- key: misc-non-private-member-variables-in-classes.IgnoreClassesWithAllMemberVariablesBeingPublic
value: 'true'

10
.cmake-format.yaml Normal file
View File

@@ -0,0 +1,10 @@
bullet_char: '*'
dangle_parens: false
enum_char: .
line_ending: unix
line_width: 120
max_pargs_hwrap: 3
separate_ctrl_name_with_space: false
separate_fn_name_with_space: false
tab_size: 2

View File

@@ -0,0 +1,71 @@
name: Python Linux
on:
push:
branches: [ master, develop, actions_pypi ]
# Sequence of patterns matched against refs/tags
tags:
- 'v*' # Push events to matching v*, i.e. v1.0, v20.15.10
pull_request:
branches: [ master ]
jobs:
python_bindings:
name: Build ${{ matrix.name }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: 3.10.x
- name: Install Python dependencies
run: |
python -m pip install --upgrade pip
pip install twine
- name: Build manylinux Python wheels
uses: RalfG/python-wheels-manylinux-build@v0.4.2
with:
package-path: wrappers/Python/
pre-build-command: 'export COOLPROP_CMAKE=default,64'
python-versions: 'cp36-cp36m cp37-cp37m cp38-cp38 cp39-cp39 cp310-cp310'
build-requirements: 'cython'
pip-wheel-args: '-w ./dist --verbose'
- name: Zip the wheels to maintain case sensitivy and file permissions
working-directory: ./wrappers/Python/
shell: bash
run: |
tar -cvzf CoolProp-Linux_wheels.tar.gz dist/
- name: Upload .whl to artifact
uses: actions/upload-artifact@v2
with:
name: CoolProp-Linux_wheels
path: ./wrappers/Python/CoolProp-Linux_wheels.tar.gz
- name: Publish wheels to (Test)PyPI
# TODO: for now I'm effectively disabling uploading to testpypi on each build
if: contains(github.ref, 'refs/tags')
working-directory: ./wrappers/Python/
env:
TWINE_USERNAME: __token__
run: |
if [[ "$GITHUB_REF" == *"refs/tags"* ]]; then
TWINE_REPOSITORY=pypi
TWINE_PASSWORD=${{ secrets.PYPI_TOKEN }}
else
TWINE_REPOSITORY=testpypi
TWINE_PASSWORD=${{ secrets.TESTPYPI_TOKEN }}
fi;
echo "TWINE_REPOSITORY=$TWINE_REPOSITORY" >> $GITHUB_ENV
echo "TWINE_PASSWORD=$TWINE_PASSWORD" >> $GITHUB_ENV
twine upload dist/*-manylinux*.whl

View File

@@ -0,0 +1,16 @@
name: Delete old workflow runs
on:
schedule:
- cron: '0 2 * * *' # daily
jobs:
del_runs:
runs-on: ubuntu-latest
steps:
- name: Delete workflow runs
uses: Mattraks/delete-workflow-runs@v2
with:
token: ${{ github.token }}
repository: ${{ github.repository }}
retain_days: 30
keep_minimum_runs: 0

33
.github/workflows/dev_clangformat.yml vendored Normal file
View File

@@ -0,0 +1,33 @@
name: Development Clang Format
on:
push:
branches: [ 'master', 'main', 'develop', 'dev_checks' ]
# tags: [ 'v*' ]
pull_request:
branches: [ 'master', 'main', 'develop' ]
#schedule:
# - cron: '15 8 * * 3' # Run weekly
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0 # Fetch all history (especially branches)
- name: Run clang-format against C++ files touched by the PR
shell: bash
run: |
echo "GITHUB_REF=$GITHUB_REF GITHUB_BASE_REF=$GITHUB_BASE_REF GITHUB_HEAD_REF=$GITHUB_HEAD_REF"
git fetch --all
clang-format --version
./dev/ci/clang-format.sh remotes/origin/$GITHUB_HEAD_REF remotes/origin/$GITHUB_BASE_REF
- name: Upload clang-format patch as artifact
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: CoolProp-${{ github.sha }}-clang_format.patch
path: clang_format.patch

68
.github/workflows/dev_codeql.yml vendored Normal file
View File

@@ -0,0 +1,68 @@
name: Development CodeQL
on:
push:
branches: [ 'dev_checks' ]
# branches: [ 'master', 'main', 'develop', 'dev_checks' ]
# tags: [ 'v*' ]
#pull_request:
# branches: [ 'master', 'main', 'develop' ]
schedule:
- cron: '15 8 * * 0,4' # Run twice a week
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: [ cpp, python ]
steps:
- name: Checkout
uses: actions/checkout@v3
with:
submodules: recursive
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.x'
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
queries: +security-and-quality
- name: Autobuild Python
if: ${{ matrix.language == 'python' }}
uses: github/codeql-action/autobuild@v2
- name: Configure CPP
if: ${{ matrix.language == 'cpp' }}
run: cmake
-DCOOLPROP_MY_MAIN=dev/ci/main.cpp
-B ${{github.workspace}}/build
-S .
- name: Build CPP
if: ${{ matrix.language == 'cpp' }}
run: cmake
--build ${{github.workspace}}/build
--config Release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2
with:
category: "/language:${{ matrix.language }}"

31
.github/workflows/dev_coverity.yml vendored Normal file
View File

@@ -0,0 +1,31 @@
name: Development Coverity scan
on:
push:
branches: [ 'dev_checks' ]
# branches: [ 'master', 'main', 'develop', 'dev_checks' ]
# tags: [ 'v*' ]
#pull_request:
# branches: [ 'master', 'main', 'develop' ]
schedule:
- cron: '15 8 * * 0,4' # Run twice a week
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- name: Configure build
run: cmake
-DCOOLPROP_MY_MAIN=dev/ci/main.cpp
-B ${{github.workspace}}/build
-S .
- uses: vapier/coverity-scan-action@v1
with:
email: ${{ secrets.COVERITY_SCAN_EMAIL }}
token: ${{ secrets.COVERITY_SCAN_TOKEN }}
command: cmake --build ${{github.workspace}}/build --config Release

48
.github/workflows/dev_cppcheck.yml vendored Normal file
View File

@@ -0,0 +1,48 @@
name: Development cppcheck
on:
push:
branches: [ 'master', 'main', 'develop', 'dev_checks' ]
# tags: [ 'v*' ]
pull_request:
branches: [ 'master', 'main', 'develop' ]
#schedule:
# - cron: '15 8 * * 3' # Run weekly
jobs:
build:
runs-on: ubuntu-latest
container:
image: nrel/cppcheck:2.3
steps:
- uses: actions/checkout@v3
- name: Run cppcheck
shell: bash
run: |
# We ignore polypartition and nano since these are third party libraries
cppcheck \
--std=c++14 \
--enable=warning,style,information \
--suppress=noExplicitConstructor \
--suppress=useStlAlgorithm \
--suppress=unmatchedSuppression \
--suppress=unusedPrivateFunction \
--suppress=functionStatic:src/Backends/Helmholtz/Fluids/FluidLibrary.h \
--inline-suppr \
--inconclusive \
--template='[{file}:{line}]:({severity}),[{id}],{message}' \
-j $(nproc) \
--force \
./src \
3>&1 1>&2 2>&3 | tee cppcheck.txt
- name: Parse and colorize cppcheck
shell: bash
run: python ./dev/ci/colorize_cppcheck_results.py
- name: Upload cppcheck results as artifact
if: ${{ always() }}
uses: actions/upload-artifact@v3
with:
name: CoolProp-${{ github.sha }}-cppcheck_results.txt
path: cppcheck.txt

71
.github/workflows/dev_msvc.yml vendored Normal file
View File

@@ -0,0 +1,71 @@
# This workflow uses actions that are not certified by GitHub.
# They are provided by a third-party and are governed by
# separate terms of service, privacy policy, and support
# documentation.
#
# Find more information at:
# https://github.com/microsoft/msvc-code-analysis-action
name: Development Microsoft C++ Code Analysis
on:
push:
branches: [ 'dev_checks' ]
# branches: [ 'master', 'main', 'develop', 'dev_checks' ]
# tags: [ 'v*' ]
#pull_request:
# branches: [ 'master', 'main', 'develop' ]
schedule:
- cron: '15 8 * * 0,4' # Run twice a week
permissions:
contents: read
jobs:
analyze:
permissions:
contents: read # for actions/checkout to fetch code
security-events: write # for github/codeql-action/upload-sarif to upload SARIF results
actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status
name: Analyze
runs-on: windows-latest
steps:
- name: Checkout repository
uses: actions/checkout@v3
with:
submodules: recursive
- name: Configure CMake
run: cmake
-DCOOLPROP_MY_MAIN=dev/ci/main.cpp
-B ${{github.workspace}}/build
-S .
# Build is not required unless generated source files are used
- name: Build CMake
run: cmake
--build ${{github.workspace}}/build
--config Release
- name: Initialize MSVC Code Analysis
uses: microsoft/msvc-code-analysis-action@04825f6d9e00f87422d6bf04e1a38b1f3ed60d99
# Provide a unique ID to access the sarif output path
id: run-analysis
with:
cmakeBuildDirectory: ${{github.workspace}}/build
# Ruleset file that will determine what checks will be run
ruleset: NativeRecommendedRules.ruleset
# Upload SARIF file to GitHub Code Scanning Alerts
- name: Upload SARIF to GitHub
uses: github/codeql-action/upload-sarif@v2
with:
sarif_file: ${{ steps.run-analysis.outputs.sarif }}
# Upload SARIF file as an Artifact to download and view
# - name: Upload SARIF as an Artifact
# uses: actions/upload-artifact@v3
# with:
# name: sarif-file
# path: ${{ steps.run-analysis.outputs.sarif }}

57
.github/workflows/docs_docker-build.yml vendored Normal file
View File

@@ -0,0 +1,57 @@
# This workflow uses actions that are not certified by GitHub.
# They are provided by a third-party and are governed by
# separate terms of service, privacy policy, and support
# documentation.
# GitHub recommends pinning actions to a commit SHA.
# To get a newer version, you will need to update the SHA.
# You can also reference a tag or branch, but the action may change without warning.
name: Documentation Docker image
on:
workflow_dispatch:
push:
branches: [ 'master', 'main', 'develop', 'actions_docs' ]
tags: [ 'v*' ]
pull_request:
branches: [ 'master', 'main', 'develop' ]
env:
REGISTRY: ghcr.io
IMAGE_SUFFIX: docs_01_base
IMAGE_NAME: ${{ github.repository }}
jobs:
build:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Log in to the Container registry
uses: docker/login-action@v2
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }} # This uses the permissions of the user who triggered the workflow
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v4
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}_${{ env.IMAGE_SUFFIX }}
- name: Build and push Docker image
uses: docker/build-push-action@v3
with:
context: ./dev/docker/
file: ./dev/docker/${{ env.IMAGE_SUFFIX }}.Dockerfile
push: ${{ github.event_name != 'pull_request' }}
#tags: ${{ steps.meta.outputs.tags }},dev
tags: dev
labels: ${{ steps.meta.outputs.labels }}

161
.github/workflows/docs_docker-run.yml vendored Normal file
View File

@@ -0,0 +1,161 @@
name: Documentation builds (HTML)
on:
schedule:
- cron: '0 2 * * *' # daily
- cron: '0 4 * * 1' # weekly
workflow_dispatch:
inputs:
expensive:
description: 'Build expensive docs'
required: false
type: boolean
push:
branches: [ 'master', 'main', 'develop', 'actions_docs' ]
tags: [ 'v*' ]
pull_request:
branches: [ 'master', 'main', 'develop' ]
env:
DAILY: ${{ github.event.schedule == '0 2 * * *' }}
WEEKLY: ${{ github.event.schedule == '0 4 * * 1' }}
TAGGED: ${{ contains(github.ref, 'refs/tags') }}
jobs:
build:
runs-on: ubuntu-latest
container:
image: ghcr.io/coolprop/coolprop_docs_02_builder:dev
credentials:
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- uses: actions/cache@v3
id: cached-props # steps.cached-props.outputs.cache-hit != 'true'
with:
path: |
./Web/fluid_properties/fluids/Consistencyplots
./Web/_static/fluid_properties/Incompressibles_reports
./Web/fluid_properties/Incompressibles_mass-based-fluids
./Web/fluid_properties/Incompressibles_mole-based-fluids
./Web/fluid_properties/Incompressibles_volume-based-fluids
./Web/fluid_properties/Incompressibles_pure-fluids
./Web/scripts/incompressibles_consistency
key: cached-props
#key: cached-props-${{ github.sha }}
#restore-keys: |
# cached-props-
- name: Variable calculations
id: variable_calculation
shell: bash
# echo "EXPENSIVE_TRIGGERED=${{ inputs.expensive || env.TAGGED || env.WEEKLY }}" >> $GITHUB_ENV
run: |
set -x
echo "expensive_triggered=${{ inputs.expensive || env.TAGGED || env.WEEKLY }}" >> $GITHUB_OUTPUT
echo "expensive_cached=${{ steps.cached-props.outputs.cache-hit != 'true' }}" >> $GITHUB_OUTPUT
echo "expensive=${{ (steps.cached-props.outputs.cache-hit != 'true') || inputs.expensive || env.TAGGED || env.WEEKLY }}" >> $GITHUB_OUTPUT
cat $GITHUB_OUTPUT
conda install -y packaging
COOLPROP_VERSION=$(python dev/extract_version.py --cmake-only)
echo COOLPROP_VERSION=$COOLPROP_VERSION >> $GITHUB_ENV
- name: Build and install wheel using bdist_wheel
working-directory: ./wrappers/Python/
shell: bash
run: |
source activate docs
python setup.py bdist_wheel --dist-dir dist cmake=default,64
pip install -vvv --force-reinstall --ignore-installed --upgrade --no-index `ls dist/*.whl`
- name: Test the installed CoolProp version
shell: bash
run: |
source activate docs
python -c "import CoolProp; print(CoolProp.__gitrevision__)"
python -c "import CoolProp; print(CoolProp.__file__)"
- name: Build homepage and create graphs
# Use a single argument with "True" or "1" to trigger a full rebuild
working-directory: ./Web/scripts/
shell: bash
run: |
source activate docs
echo "Calling: python -u __init__.py ${{ steps.variable_calculation.outputs.expensive }}"
python -u __init__.py ${{ steps.variable_calculation.outputs.expensive }}
- name: Build documentation with Doxygen
shell: bash
run: |
source activate docs
doxygen --version
doxygen Doxyfile
- name: Build documentation with Sphinx
working-directory: ./Web
shell: bash
run: |
source activate docs
sphinx-apidoc -T -f -e -o apidoc ../wrappers/Python/CoolProp
make html
- name: Upload GitHub Pages artifact
uses: actions/upload-pages-artifact@v1
with:
path: ./Web/_build/html/
- name: Zip the HTML documentation
working-directory: ./Web/_build/
shell: bash
run: |
tar -cvzf CoolProp-${{ env.COOLPROP_VERSION }}-documentation-html.tar.gz html/*
# zip -rq CoolProp-${{ env.COOLPROP_VERSION }}-documentation-html.zip html/*
- name: Archive TGZ or ZIP artifacts
uses: actions/upload-artifact@v3
with:
name: docs
path: |
Web/_build/CoolProp-${{ env.COOLPROP_VERSION }}-documentation-html.tar.gz
# Web/_build/CoolProp-${{ env.COOLPROP_VERSION }}-documentation-html.zip
#- name: Upload TGZ or ZIP to release
# if: contains(github.ref, 'refs/tags')
# uses: svenstaro/upload-release-action@v2
# with:
# repo_token: ${{ secrets.GITHUB_TOKEN }}
# file: Web/_build/CoolProp-${{ env.COOLPROP_VERSION }}-documentation-html.tar.gz
# tag: ${{ github.ref }}
# overwrite: true
# file_glob: false
# Deploy job
deploy:
# Add a dependency to the build job
needs: build
# Do not deploy intermediate builds
if: ${{ github.ref == 'refs/heads/main' || github.ref == 'refs/heads/master' }}
# Grant GITHUB_TOKEN the permissions required to make a Pages deployment
permissions:
pages: write # to deploy to Pages
id-token: write # to verify the deployment originates from an appropriate source
# Deploy to the github-pages environment
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
# Specify runner + deployment step
runs-on: ubuntu-latest
steps:
- name: Deploy to GitHub Pages
#if: contains(github.ref, 'refs/tags')
id: deployment
uses: actions/deploy-pages@v1

80
.github/workflows/library_shared.yml vendored Normal file
View File

@@ -0,0 +1,80 @@
name: Library builds (shared)
on:
push:
branches: [ 'master', 'main', 'develop', 'actions_shared' ]
tags: [ 'v*' ]
pull_request:
branches: [ 'master', 'main', 'develop' ]
env:
# Customize the CMake build type here (Release, Debug, RelWithDebInfo, etc.)
BUILD_TYPE: Release
jobs:
build:
runs-on: ${{ matrix.os }}
strategy:
# fail-fast: Default is true, switch to false to allow one platform to fail and still run others
fail-fast: false
matrix:
os: [windows-latest, ubuntu-latest, macOS-latest]
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- uses: actions/setup-python@v4
with:
python-version: '3.x'
- name: Install Python dependencies
run: |
python -m pip install --upgrade pip
python -m pip install requests packaging
- name: Extract CoolProp version from CMakeLists.txt
shell: bash
run: |
set -x
COOLPROP_VERSION=$(python dev/extract_version.py --cmake-only)
echo COOLPROP_VERSION=$COOLPROP_VERSION >> $GITHUB_ENV
# Create the build directory too
mkdir build
- name: Configure CMake
working-directory: ./build
shell: bash
run: cmake -DCMAKE_BUILD_TYPE:STRING=$BUILD_TYPE -DCOOLPROP_SHARED_LIBRARY:BOOL=ON ..
- name: Build
working-directory: ./build
shell: bash
run: |
set -x
cmake --build . --target install -j $(nproc) --config $BUILD_TYPE
# - name: Tar.gz the shared library to maintain case sensitivy and file permissions
# working-directory: ./install_root/shared_library/
# shell: bash
# run: |
# set -x
# tar -cvzf CoolProp-${{ env.COOLPROP_VERSION }}-shared-${{ matrix.os }}.tar.gz ./*
- name: Archive artifacts
uses: actions/upload-artifact@v3
with:
name: shared_library
path: install_root/shared_library
# - name: Upload TGZ or ZIP to release
# if: contains(github.ref, 'refs/tags')
# uses: svenstaro/upload-release-action@v2
# with:
# repo_token: ${{ secrets.GITHUB_TOKEN }}
# file: install_root/shared_library/CoolProp-${{ env.COOLPROP_VERSION }}-shared-${{ matrix.os }}.tar.gz
# tag: ${{ github.ref }}
# overwrite: true
# file_glob: false

151
.github/workflows/python_buildwheels.yml vendored Normal file
View File

@@ -0,0 +1,151 @@
name: Python cibuildwheel
on:
push:
branches: [ 'master', 'main', 'develop', 'actions_pypi' ]
tags: [ 'v*' ]
pull_request:
branches: [ 'master', 'main', 'develop' ]
jobs:
python_source:
name: Build source package
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: 3.9.x
- name: Install dependencies
run: pip install setuptools wheel 'Cython<3.0' requests jinja2 pyyaml
- name: Build package, sdist
working-directory: ./wrappers/Python/pypi
run: python prepare_pypi.py --dist-dir=${GITHUB_WORKSPACE}/Python
- name: Store artifacts
uses: actions/upload-artifact@v3
with:
name: Python
path: Python
python_ubuntu:
strategy:
# Ensure that a wheel builder finishes even if another fails
fail-fast: false
matrix:
os: [ubuntu]
python-version: [36, 37, 38, 39, 310, 311]
arch: [i686, x86_64, aarch64, ppc64le, s390x]
exclude:
- os: ubuntu
arch: i686 # reduce the build time until people ask for the binaries
- os: ubuntu
arch: ppc64le # reduce the build time until people ask for the binaries
- os: ubuntu
arch: s390x # reduce the build time until people ask for the binaries
uses: ./.github/workflows/python_cibuildwheel.yml
with:
os: ${{ matrix.os }}
python-version: ${{ matrix.python-version }}
arch: ${{ matrix.arch }}
python_windows:
strategy:
# Ensure that a wheel builder finishes even if another fails
fail-fast: false
matrix:
os: [windows]
python-version: [36, 37, 38, 39, 310, 311]
arch: [AMD64, x86, ARM64]
exclude:
- os: windows
arch: ARM64 # creates problems with msgpack-c
- os: windows
arch: ARM64
python-version: 36
- os: windows
arch: ARM64
python-version: 37
- os: windows
arch: ARM64
python-version: 38
uses: ./.github/workflows/python_cibuildwheel.yml
with:
os: ${{ matrix.os }}
python-version: ${{ matrix.python-version }}
arch: ${{ matrix.arch }}
python_macos:
strategy:
# Ensure that a wheel builder finishes even if another fails
fail-fast: false
matrix:
os: [macos]
python-version: [36, 37, 38, 39, 310, 311]
arch: [x86_64, arm64, universal2]
exclude:
- os: macos
arch: arm64
python-version: 36
- os: macos
arch: arm64
python-version: 37
- os: macos
arch: universal2 # is redundant
uses: ./.github/workflows/python_cibuildwheel.yml
with:
os: ${{ matrix.os }}
python-version: ${{ matrix.python-version }}
arch: ${{ matrix.arch }}
upload_python_bindings_to_pypi:
needs: [python_source, python_ubuntu, python_windows, python_macos]
name: Upload to PyPi
runs-on: ubuntu-latest
steps:
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: 3.9.x
- name: Install Python dependencies
run: |
python -m pip install --upgrade pip
pip install setuptools wheel twine requests packaging
if [[ "$GITHUB_REF" == *"refs/tags"* ]]; then
TWINE_REPOSITORY=pypi
TWINE_PASSWORD=${{ secrets.PYPI_TOKEN }}
else
TWINE_REPOSITORY=testpypi
TWINE_PASSWORD=${{ secrets.TESTPYPI_TOKEN }}
fi;
echo "Using TWINE_REPOSITORY=$TWINE_REPOSITORY"
echo "TWINE_REPOSITORY=$TWINE_REPOSITORY" >> $GITHUB_ENV
echo "TWINE_PASSWORD=$TWINE_PASSWORD" >> $GITHUB_ENV
- name: Download ALL wheels
uses: actions/download-artifact@v3
with:
name: Python
path: Python
- name: Display structure of downloaded files
run: |
set -x
ls -R
du -sh
- name: Publish wheels to (Test)PyPI
if: ${{ github.event_name != 'pull_request' }}
env:
TWINE_USERNAME: __token__
run: python -m twine upload --skip-existing Python/*.whl Python/*.tar.gz

View File

@@ -0,0 +1,76 @@
name: Python build one single wheel
on:
workflow_call:
inputs:
python-version:
required: true
type: string
os:
required: true
type: string
arch:
required: true
type: string
jobs:
build:
#name: py${{ inputs.python-version }}-${{ inputs.os }}-${{ inputs.arch }}
runs-on: ${{ inputs.os }}-latest
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: 3.9.x
- name: Install Python dependencies
run: |
python -m pip install --upgrade pip
python -m pip install requests packaging
- name: Figure out the TestPyPi/PyPi Version
shell: bash
run: |
if [[ "$GITHUB_REF" == *"refs/tags"* ]]; then
python dev/extract_version.py --pypi --replace-setup-py
else
python dev/extract_version.py --replace-setup-py
fi;
- name: Set up QEMU
if: ${{ runner.os == 'Linux' }}
uses: docker/setup-qemu-action@v2
with:
platforms: all
- name: Build and test wheels
uses: pypa/cibuildwheel@v2.11.3
env:
MACOSX_DEPLOYMENT_TARGET: 10.9
CIBW_ENVIRONMENT_MACOS: MACOSX_DEPLOYMENT_TARGET=10.9 SDKROOT=/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk
CIBW_BEFORE_BUILD: >
pip install setuptools wheel "Cython<3.0" requests jinja2 pyyaml
CIBW_ENVIRONMENT_LINUX: COOLPROP_CMAKE=default,NATIVE
CIBW_BUILD: cp${{ inputs.python-version }}-*
CIBW_ARCHS_MACOS: ${{ inputs.arch }} # x86_64 arm64 # universal2 is redundant
CIBW_ARCHS_WINDOWS: ${{ inputs.arch }} # AMD64 x86 # ARM64 creates problems with msgpack-c
CIBW_ARCHS_LINUX: ${{ inputs.arch }} # i686 x86_64 aarch64 ppc64le s390x
CIBW_MANYLINUX_X86_64_IMAGE: manylinux2014
CIBW_MANYLINUX_I686_IMAGE: manylinux2014
CIBW_MANYLINUX_AARCH64_IMAGE: manylinux2014
CIBW_SKIP: "pp*"
CIBW_TEST_SKIP: "*-macosx_arm64 *-win_arm64"
with:
package-dir: ./wrappers/Python/
output-dir: Python
- name: Store artifacts
uses: actions/upload-artifact@v3
with:
name: Python
path: Python

172
.github/workflows/release_all_files.yml vendored Normal file
View File

@@ -0,0 +1,172 @@
name: Release workflow run
on:
push:
branches: [ 'feature/automated_release' ]
# branches: [ 'master', 'main', 'develop', 'feature/automated_release' ]
# tags: [ 'v*' ]
# pull_request:
# branches: [ 'master', 'main', 'develop' ]
schedule:
- cron: '0 2 * * *' # daily
workflow_dispatch:
inputs:
branch:
description: 'Branch or tag to retrieve the binaries from'
required: false
default: 'master'
jobs:
# release_job:
# uses: ./.github/workflows/release_binaries.yml
# with:
# branch: master
# target: nightly
# get_latest_tag:
# runs-on: ubuntu-latest
# outputs:
# tag: ${{ fromJson(steps.get_latest_release.outputs.data).tag_name }}
# steps:
# - uses: octokit/request-action@v2.x
# id: get_latest_release
# with:
# route: GET /repos/{owner}/{repo}/releases/latest
# owner: coolprop
# repo: coolprop
# env:
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# - run: "echo latest release id: ${{ fromJson(steps.get_latest_release.outputs.data).id }}"
# - run: "echo latest release tag: ${{ fromJson(steps.get_latest_release.outputs.data).tag_name }}"
set_vars:
runs-on: ubuntu-latest
outputs:
branch: ${{ steps.propagate_vars.outputs.branch }}
target: ${{ steps.propagate_vars.outputs.target }}
webdir: ${{ steps.propagate_vars.outputs.webdir }}
steps:
- run: echo "BRANCH=${{ (inputs.branch == '' && 'master') || inputs.branch }}" >> $GITHUB_ENV
- run: echo "TARGET=${{ (env.BRANCH == 'master' && 'nightly') || env.BRANCH }}" >> $GITHUB_ENV
- run: echo "WEBDIR=${{ (env.BRANCH == 'master' && 'dev') || '.' }}" >> $GITHUB_ENV
- run: echo ${{ env.BRANCH }}
- run: echo ${{ env.TARGET }}
- run: echo ${{ env.WEBDIR }}
- run: echo ${BRANCH}
- run: echo ${TARGET}
- run: echo ${WEBDIR}
- id: propagate_vars
run: |
echo "branch=${{ (inputs.branch == '' && 'master') || inputs.branch }}" >> $GITHUB_OUTPUT
echo "target=${{ (env.BRANCH == 'master' && 'nightly') || env.BRANCH }}" >> $GITHUB_OUTPUT
echo "webdir=${{ (env.BRANCH == 'master' && 'dev') || '.' }}" >> $GITHUB_OUTPUT
collect_binaries:
needs: [set_vars]
strategy:
fail-fast: false
matrix:
os: [ubuntu]
workflow: [library_shared.yml, windows_installer.yml, docs_docker-run.yml] # , python_buildwheels.yml]
uses: ./.github/workflows/release_get_artifact.yml
with:
branch: ${{ needs.set_vars.outputs.branch }}
workflow: ${{ matrix.workflow }}
prepare_sources:
needs: [set_vars]
name: Prepare the source code
runs-on: ubuntu-latest
steps:
- name: Fetch the sources
uses: actions/checkout@v3
with:
ref: ${{ needs.set_vars.outputs.branch }}
submodules: 'recursive'
path: source
- name: Update the headers
run: |
pushd source
git reset --hard HEAD
python "dev/generate_headers.py"
find . -iwholename "*/.git*" -prune -exec rm -rf {} \;
popd
zip -rq CoolProp_sources.zip source
mkdir -p binaries/source
mv CoolProp_sources.zip binaries/source/
- name: Store artifacts
uses: actions/upload-artifact@v3
with:
name: binaries
path: binaries
retention-days: 1
deploy_files:
needs: [set_vars, collect_binaries, prepare_sources]
name: Deploy collected files
runs-on: ubuntu-latest
steps:
- name: Download binaries
uses: actions/download-artifact@v3
with:
name: binaries
path: binaries
- name: Create info files
run: |
echo "CoolProp ${{ needs.set_vars.outputs.target }} binaries" > binaries/README.rst.txt
echo "-------------------------" >> binaries/README.rst.txt
echo -n "Binaries of the \`CoolProp project <http://coolprop.sourceforge.net>\`_ " >> binaries/README.rst.txt
echo "updated on $(date +%F) at $(date +%X) $(date +%Z)." >> binaries/README.rst.txt
cat binaries/README.rst.txt
mkdir -p binaries/Python
echo "Please use the following commands to install CoolProp for Python:" > binaries/Python/README.txt
echo "nightly: python -m pip install -i https://test.pypi.org/simple/ CoolProp" >> binaries/Python/README.txt
echo "release: python -m pip install --upgrade CoolProp" >> binaries/Python/README.txt
cat binaries/Python/README.txt
- name: Display structure of downloaded files
run: |
set -x
ls -R
du -sh
- name: Install SSH key
uses: shimataro/ssh-key-action@v2
with:
key: ${{ secrets.SF_SSH_KEY }}
name: id_rsa-${{ secrets.SF_SSH_USER }}
known_hosts: ${{ secrets.SF_HOST_KEYS }}
config: |
Host frs.sf.net-${{ secrets.SF_SSH_USER }}
HostName frs.sf.net
User ${{ secrets.SF_SSH_USER }}
IdentityFile ~/.ssh/id_rsa-${{ secrets.SF_SSH_USER }}
PubkeyAcceptedKeyTypes +ssh-rsa
if_key_exists: fail # replace / ignore / fail; optional (defaults to fail)
- name: Fix the docs
run: |
rm -rf binaries/github-pages
tar -xzf binaries/docs/*documentation*.tar.gz
mkdir -p html/v4
rsync frs.sf.net-${{ secrets.SF_SSH_USER }}:/home/frs/project/coolprop/CoolProp/4.2.5/coolpropv425docs.zip coolpropv4docs.zip
unzip -qo coolpropv4docs.zip -d html/v4
zip -rq documentation.zip html
rm -rf binaries/docs/*
mv documentation.zip binaries/docs/
- name: Upload using rsync over SSH
run: |
RSYNC_DRY_RUN=
RSYNC_OPTS="-a --chmod=Dug=rwx,Do=rx,Fug=rw,Fo=r -z --stats --delete"
rsync $RSYNC_DRY_RUN $RSYNC_OPTS "binaries/" frs.sf.net-${{ secrets.SF_SSH_USER }}:/home/frs/project/coolprop/CoolProp/${{ needs.set_vars.outputs.target }}
rsync $RSYNC_DRY_RUN $RSYNC_OPTS "html/" frs.sf.net-${{ secrets.SF_SSH_USER }}:/home/project-web/coolprop/htdocs/${{ needs.set_vars.outputs.webdir }}

View File

@@ -0,0 +1,91 @@
name: Download workflow artifacts
on:
workflow_call:
inputs:
branch:
required: true
type: string
workflow:
required: true
type: string
jobs:
# release_job:
# uses: ./.github/workflows/release_get_artifact.yml
# with:
# branch: master
# workflow: library_shared.yml
get_artifact:
runs-on: ubuntu-latest
steps:
- name: Download artifact
id: download-artifact
uses: dawidd6/action-download-artifact@v2
with:
# Optional, GitHub token, a Personal Access Token with `public_repo` scope if needed
# Required, if the artifact is from a different repo
# Required, if the repo is private a Personal Access Token with `repo` scope is needed
github_token: ${{ secrets.GITHUB_TOKEN }}
# Optional, workflow file name or ID
# If not specified, will be inferred from run_id (if run_id is specified), or will be the current workflow
workflow: ${{ inputs.workflow }}
# Optional, the status or conclusion of a completed workflow to search for
# Can be one of a workflow conclusion:
# "failure", "success", "neutral", "cancelled", "skipped", "timed_out", "action_required"
# Or a workflow status:
# "completed", "in_progress", "queued"
# Use the empty string ("") to ignore status or conclusion in the search
workflow_conclusion: success
# Optional, will get head commit SHA
#pr: ${{github.event.pull_request.number}}
# Optional, no need to specify if PR is
#commit: ${{github.event.pull_request.head.sha}}
# Optional, will use the specified branch. Defaults to all branches
branch: ${{ inputs.branch }}
# Optional, defaults to all types
#event: push
# Optional, will use specified workflow run
#run_id: 1122334455
# Optional, run number from the workflow
#run_number: 34
# Optional, uploaded artifact name,
# will download all artifacts if not specified
# and extract them into respective subdirectories
# https://github.com/actions/download-artifact#download-all-artifacts
#name: artifact_name
# Optional, a directory where to extract artifact(s), defaults to the current directory
path: binaries
# Optional, defaults to current repo
repo: ${{ github.repository }}
# Optional, check the workflow run to whether it has an artifact
# then will get the last available artifact from the previous workflow
# default false, just try to download from the last one
check_artifacts: false
# Optional, search for the last workflow run whose stored an artifact named as in `name` input
# default false
search_artifacts: false
# Optional, choose to skip unpacking the downloaded artifact(s)
# default false
skip_unpack: false
# Optional, choose how to exit the action if no artifact is found
# can be one of:
# "fail", "warn", "ignore"
# default fail
if_no_artifact_found: fail
- name: Display structure of downloaded files
run: |
set -x
ls -R
du -sh
- name: Store artifacts
uses: actions/upload-artifact@v3
with:
name: binaries
path: binaries
retention-days: 1

72
.github/workflows/test_catch2.yml vendored Normal file
View File

@@ -0,0 +1,72 @@
name: Testing Catch2
on:
push:
branches: [ 'master', 'main', 'develop', 'actions_tests' ]
tags: [ 'v*' ]
pull_request:
branches: [ 'master', 'main', 'develop' ]
#schedule:
# - cron: '15 8 * * 3' # Run weekly
env:
# Customize the CMake build type here (Release, Debug, RelWithDebInfo, etc.)
BUILD_TYPE: Release
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- name: Extract CoolProp version from CMakeLists.txt
shell: bash
run: |
set -x
COOLPROP_VERSION=$(python dev/extract_version.py --cmake-only)
echo COOLPROP_VERSION=$COOLPROP_VERSION >> $GITHUB_ENV
# Create the build directory too
mkdir build
- name: Configure CMake
working-directory: ./build
shell: bash
run: cmake -DCMAKE_BUILD_TYPE:STRING=$BUILD_TYPE -DCOOLPROP_SHARED_LIBRARY:BOOL=ON -DCOOLPROP_CATCH_MODULE:BOOL=ON ..
- name: Build
working-directory: ./build
shell: bash
run: |
set -x
cmake --build . --target install -j $(nproc) --config $BUILD_TYPE
- name: Test
working-directory: ./build
shell: bash
# Execute tests defined by the CMake configuration.
# See https://cmake.org/cmake/help/latest/manual/ctest.1.html for more detail
run: |
ctest -j $(nproc)
- name: Rerun failed Tests
if: failure()
working-directory: ./build
shell: bash
# Execute tests defined by the CMake configuration.
# See https://cmake.org/cmake/help/latest/manual/ctest.1.html for more detail
run: |
ctest -j $(nproc) --rerun-failed --output-on-failure
# TODO: pick one style, for now I'm doing this so I can report zero new failures compared to catch1
- name: Rerun failed Tests in CatchTestRunner directly
if: failure()
working-directory: ./build
shell: bash
# Execute tests defined by the CMake configuration.
# See https://cmake.org/cmake/help/latest/manual/ctest.1.html for more detail
run: |
./CatchTestRunner

78
.github/workflows/windows_installer.yml vendored Normal file
View File

@@ -0,0 +1,78 @@
name: Windows Installer
on:
push:
branches: [ 'master', 'main', 'develop', 'actions_installer' ]
tags: [ 'v*' ]
pull_request:
branches: [ 'master', 'main', 'develop' ]
env:
# Customize the CMake build type here (Release, Debug, RelWithDebInfo, etc.)
BUILD_TYPE: Release
jobs:
build:
runs-on: windows-latest
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- name: Install Python dependencies
run: |
python -m pip install --upgrade pip
python -m pip install requests packaging
- name: Extract CoolProp version from CMakeLists.txt
shell: bash
run: |
set -x
COOLPROP_VERSION=$(python dev/extract_version.py --cmake-only)
echo COOLPROP_VERSION=$COOLPROP_VERSION >> $GITHUB_ENV
# Create the build directory too
mkdir build
- name: Configure CMake
working-directory: ./build
shell: bash
run: cmake -DCMAKE_BUILD_TYPE:STRING=$BUILD_TYPE -DCOOLPROP_WINDOWS_PACKAGE:BOOL=ON ..
- name: Build
working-directory: ./build
shell: bash
run: |
set -x
cmake --build . --target COOLPROP_WINDOWS_PACKAGE_INSTALLER -j $(nproc) --config $BUILD_TYPE
# - name: Tar.gz the shared library to maintain case sensitivy and file permissions
# working-directory: ./build/InnoScript/bin/
# shell: bash
# run: |
# set -x
# tar -cvzf CoolProp-${{ env.COOLPROP_VERSION }}-WindowsInstaller.tar.gz ./Installers/*
#
# - name: Create a zip archive since Windows users often cannot handle tar.gz files
# working-directory: ./build/InnoScript/bin/
# shell: bash
# run: |
# set -x
# zip -r CoolProp-${{ env.COOLPROP_VERSION }}-WindowsInstaller.zip ./Installers/*
- name: Archive artifacts
uses: actions/upload-artifact@v3
with:
name: Installers
path: build/InnoScript/bin/Installers
# - name: Upload TGZ or ZIP to release
# if: contains(github.ref, 'refs/tags')
# uses: svenstaro/upload-release-action@v2
# with:
# repo_token: ${{ secrets.GITHUB_TOKEN }}
# file: build/InnoScript/bin/Installers/Windows/CoolProp-${{ env.COOLPROP_VERSION }}.exe
# tag: ${{ github.ref }}
# overwrite: true
# file_glob: false

3
.gitignore vendored
View File

@@ -67,3 +67,6 @@
/.vscode/
bld/
compile_commands.json
cppcheck.txt

4
.gitmodules vendored
View File

@@ -1,5 +1,5 @@
[submodule "externals/Catch"]
path = externals/Catch
[submodule "externals/Catch2"]
path = externals/Catch2
url = https://github.com/catchorg/Catch2
branch = master
[submodule "externals/Eigen"]

View File

@@ -1,144 +0,0 @@
notifications:
email:
on_success: never
on_failure: change
git:
depth: 5
#submodules_depth: 100
language:
- cpp
#os: linux
#dist: trusty
#
#compiler:
# - clang
# - gcc
#
#addons:
# apt:
# sources:
# - ubuntu-toolchain-r-test
# - llvm-toolchain-trusty
# packages:
# - cmake
# - gcc-6
# - g++-6
# - clang-3.7
matrix:
include:
# Use this code to enable certain debug builds - for example for pull requests
#- compiler: clang
# branches:
# only:
# - issues/1820
# before_script:
# - cmake --version
# - echo "$CXX" && "$CXX" -v
# - mkdir -p build && pushd build
# - cmake .. -DCMAKE_BUILD_TYPE=Release -DCOOLPROP_MY_MAIN=dev/Tickets/1820.cpp -DCMAKE_CXX_STANDARD=11
# - popd
# script:
# - cmake --build build --config Release
# - ./build/Main
- os: linux
dist: bionic
compiler: gcc
branches:
except:
- coverity_scan
before_script:
- cmake --version
- echo "$CXX" && "$CXX" -v
- mkdir -p build && pushd build
- cmake .. -DCMAKE_BUILD_TYPE=Release -DCOOLPROP_SHARED_LIBRARY=ON
- popd
script:
- cmake --build build --config Release
- os: linux
dist: bionic
compiler: clang
branches:
except:
- coverity_scan
before_script:
- cmake --version
- echo "$CXX" && "$CXX" -v
- mkdir -p build && pushd build
- cmake .. -DCMAKE_BUILD_TYPE=Release -DCOOLPROP_SHARED_LIBRARY=ON
- popd
script:
- cmake --build build --config Release
- os: osx
#osx_image: xcode8.3 # Xcode 8.3 OS X 10.12
branches:
except:
- coverity_scan
before_script:
- cmake --version
- echo "$CXX" && "$CXX" -v
- mkdir -p build && pushd build
- cmake .. -DCMAKE_BUILD_TYPE=Release -DCOOLPROP_SHARED_LIBRARY=ON
- popd
script:
- cmake --build build --config Release
- os: linux
dist: bionic
branches:
only:
- coverity_scan
# before_install:
# - echo -n | openssl s_client -connect scan.coverity.com:443 | sed -ne '/-BEGIN CERTIFICATE-/,/-END CERTIFICATE-/p' | sudo tee -a /etc/ssl/certs/ca-
addons:
coverity_scan:
project:
name: "CoolProp/CoolProp"
description: "Build submitted via Travis CI"
notification_email: jowr@ipu.dk
build_command_prepend: "cmake . -DCMAKE_BUILD_TYPE=Release -DCOOLPROP_SHARED_LIBRARY=ON"
build_command: "cmake --build . --config Release"
branch_pattern: coverity_scan
before_script: cmake . -DCMAKE_BUILD_TYPE=Release -DCOOLPROP_SHARED_LIBRARY=ON
script: cmake --build . --config Release
# build_command_prepend: "cmake . -DCOOLPROP_MY_MAIN=dev/coverity/main.cxx"
# build_command: "cmake --build . --target Main"
# branch_pattern: coverity_scan
# before_script: cmake . -DCOOLPROP_MY_MAIN=dev/coverity/main.cxx
# script: cmake --build . --target Main
#branches:
# only:
# - master
# - release
# - coverity_scan
# - travis_integration
# # Build all branches
# # - /.*/
env:
global:
- secure: "XGfICTnfFv9xpVDBbNwJIkXV0OZCAcOT46aeFYmODm3jl+ya60k0C91G9zlZ9fEYiQwAEBTO2Y/Ge0AMaoFqtu3H3vu7S5JjAbpMV3ZRDJqampScy550yPpziOuxvB6h23PZRfLOBVEsUGHnCO5rLb20iPM94XsHSBL3Smn2o9c="
#install:
# - if [ "$CXX" = "g++" ]; then export CXX="g++-6" CC="gcc-6"; fi
# - if [ "$CXX" = "clang++" ]; then export CXX="clang++-3.7" CC="clang-3.7"; fi
# before_script:
# # Check compiler and cmake versions
# - cmake --version; gcc -v; clang -v
# - echo "$CXX" && "$CXX" -v
# # Run your build commands next
# - mkdir build && pushd build
# - echo "${TRAVIS_BUILD_DIR}"
# - cmake .. -DCMAKE_BUILD_TYPE=Release # Same as "${TRAVIS_BUILD_DIR}"
# - popd
# script:
# - cmake --build build --config Release

View File

@@ -1,192 +0,0 @@
notifications:
email:
on_success: never
on_failure: change
language: python
#os:
# - linux
# - osx
matrix:
include:
- os: linux
dist: trusty
sudo: required # false or required, use required to get docker
env: BUILD_TARGET="PYTHON_LINUX"
# - os: linux
# dist: precise
# sudo: required # false or required
# env: BUILD_TARGET="PYTHON_LINUX"
# - os: linux
# dist: trusty
# sudo: false # false or required
# env: BUILD_TARGET="PYTHON_LINUX"
# - os: linux
# dist: precise
# sudo: false # false or required
# env: BUILD_TARGET="PYTHON_LINUX"
# - os: osx
# language: generic
# osx_image: xcode8.3 # Xcode 8.3 OS X 10.12
# env: BUILD_TARGET="PYTHON_APPLE"
# - os: osx
# language: generic
# osx_image: xcode8.2 # Xcode 8.2 OS X 10.12
# env: BUILD_TARGET="PYTHON_APPLE"
# - os: osx
# language: generic
# osx_image: xcode8.1 # Xcode 8.1 OS X 10.12
# env: BUILD_TARGET="PYTHON_APPLE"
# - os: osx
# language: generic
# osx_image: xcode8 # Xcode 8gm OS X 10.11
# env: BUILD_TARGET="PYTHON_APPLE"
# - os: osx
# language: generic
# osx_image: xcode7.3 # Default Xcode 7.3.1 OS X 10.11
# env: BUILD_TARGET="PYTHON_APPLE"
# The old image does not have pip available ...
# - os: osx
# language: generic
# osx_image: xcode6.4 # Xcode 6.4 OS X 10.10
# Build the Linux wheels based on https://github.com/pypa/python-manylinux-demo
# - os: linux
# dist: trusty
# sudo: required
# services:
# - docker
# env: BUILD_TARGET="PYTHON_LINUX_WHEELS_CUSTOM"
# BITNESS="32"
- os: linux
dist: trusty
sudo: required
services:
- docker
env: BUILD_TARGET="PYTHON_LINUX_WHEELS"
DOCKER_IMAGE=quay.io/pypa/manylinux1_x86_64
DOCKER_IMAGE=dockcross/manylinux-x64
PRE_CMD=""
SETUP_PY_ARGS="cmake=default,64"
- os: linux
dist: trusty
sudo: required
services:
- docker
env: BUILD_TARGET="PYTHON_LINUX_WHEELS"
DOCKER_IMAGE=quay.io/pypa/manylinux1_i686
DOCKER_IMAGE=dockcross/manylinux-x86
PRE_CMD=linux32
SETUP_PY_ARGS="cmake=default,32"
- os: osx
language: generic
env: BUILD_TARGET="PYTHON_APPLE"
PYTHON_VERSION="2.7.13"
- os: osx
language: generic
env: BUILD_TARGET="PYTHON_APPLE"
PYTHON_VERSION="3.3.5"
- os: osx
language: generic
env: BUILD_TARGET="PYTHON_APPLE"
PYTHON_VERSION="3.4.5"
- os: osx
language: generic
env: BUILD_TARGET="PYTHON_APPLE"
PYTHON_VERSION="3.5.3"
- os: osx
language: generic
env: BUILD_TARGET="PYTHON_APPLE"
PYTHON_VERSION="3.6.1"
# - os: linux
# dist: trusty
# sudo: required
# - os: osx
# osx_image: xcode7.2
#addons:
# apt:
# packages:
# - cmake
#before_install:
# - if [ "$TRAVIS_OS_NAME" == "osx" ]; then brew update ; fi
# - if [ "$TRAVIS_OS_NAME" == "osx" ]; then brew install cmake; fi
# - if [ "$TRAVIS_OS_NAME" == "linux" ]; then sudo apt-get update ; fi
# - if [ "$TRAVIS_OS_NAME" == "linux" ]; then sudo apt-get install cmake; fi
#See: http://stackoverflow.com/questions/41916656/how-to-use-travis-ci-to-build-modern-c-using-modern-cmake
#dist: trusty
#sudo: required
#language:
# - cpp
#compiler:
# - gcc
#language:
# - python
install:
# Commands for building the Python packages
- if [ "$BUILD_TARGET" = "PYTHON_APPLE" ]; then git clone --recursive https://github.com/MacPython/terryfy.git; fi
- if [ "$BUILD_TARGET" = "PYTHON_APPLE" ]; then source terryfy/travis_tools.sh; fi
- if [ "$BUILD_TARGET" = "PYTHON_APPLE" ]; then get_python_environment macpython "${PYTHON_VERSION}" venv; fi
- if [ "$BUILD_TARGET" = "PYTHON_APPLE" ]; then pip install cython wheel; fi
- if [ "$BUILD_TARGET" = "PYTHON_LINUX" ]; then pip install cython wheel auditwheel; fi
# Commands for building the Python wheel
# - if [ "$DOCKER_IMAGE" == *"manylinux"* ]; then docker pull $DOCKER_IMAGE; fi
- if [ "$BUILD_TARGET" = "PYTHON_LINUX_WHEELS" ]; then docker pull $DOCKER_IMAGE; fi
#addons:
# apt:
# sources:
# - ubuntu-toolchain-r-test
# packages:
# - gcc-6
# - g++-6
# - cmake
# Build all branches
branches:
only:
- /.*/
script:
# # Link gcc-6 and g++-6 to their standard commands
# - ln -s /usr/bin/gcc-6 /usr/local/bin/gcc
# - ln -s /usr/bin/g++-6 /usr/local/bin/g++
# # Export CC and CXX to tell cmake which compiler to use
# - export CC=/usr/bin/gcc-6
# - export CXX=/usr/bin/g++-6
# # Check versions of gcc, g++ and cmake
# - gcc -v && g++ -v && cmake --version
# # Run your build commands next
# - git clone --recursive https://github.com/CoolProp/CoolProp.git
# - cd CoolProp
# - mkdir build
# - cd build
# - cmake .. -DCOOLPROP_SHARED_LIBRARY=ON
# - cmake --build . --config Release
- |
if [ "$BUILD_TARGET" = "PYTHON_LINUX" -o "$BUILD_TARGET" = "PYTHON_APPLE" ]; then
cd wrappers/Python
python setup.py bdist_wheel
pip install dist/*.whl
if [ "$BUILD_TARGET" = "PYTHON_LINUX" ]; then mkdir -p dist_audit; auditwheel repair dist/*.whl -w dist_audit/; fi
cd ../..
python -c 'from CoolProp.CoolProp import get_global_param_string; print("CoolProp gitrevision:", get_global_param_string("gitrevision"))'
fi
# Commands for building the Python wheels
- |
if [ "$BUILD_TARGET" = "PYTHON_LINUX_WHEELS_CUSTOM" ]; then
pushd wrappers/Python/manylinux/
chmod +x 00_prepare_docker.sh
./00_prepare_docker.sh "${BITNESS}"
fi
- |
if [ "$BUILD_TARGET" = "PYTHON_LINUX_WHEELS" ]; then
chmod +x .travis/build_wheels.sh
docker run --rm -v `pwd`:/io $DOCKER_IMAGE $PRE_CMD /io/.travis/build_wheels.sh "${SETUP_PY_ARGS}"
fi
# - if [ "$BUILD_TARGET" = *"PYTHON_LINUX_WHEELS"* ]; then ls wheelhouse/; fi

View File

@@ -1,63 +0,0 @@
#!/bin/bash
SETUP_PY_ARGS="$1"
# https://github.com/pypa/python-manylinux-demo/blob/master/travis/build-wheels.sh
set -e -x
# Get the directory containing this script
# see http://stackoverflow.com/a/246128/1360263
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
# Install a system package required by our library
#yum install -y atlas-devel
#yum install -y cmake
#if [ "$SETUP_PY_ARGS" = *"32" ]; then
# CMAKE_URL="https://cmake.org/files/v3.6/cmake-3.6.3-Linux-i386.tar.gz"
#else
# CMAKE_URL="https://cmake.org/files/v3.7/cmake-3.7.2-Linux-x86_64.tar.gz"
#fi
#mkdir cmake && wget --no-check-certificate --quiet -O - ${CMAKE_URL} | tar --strip-components=1 -xz -C cmake
#export PATH=${DIR}/cmake/bin:${PATH}
mkdir -p /io/wheelhouse_tmp
mkdir -p /io/wheelhouse
OLD_PATH=${PATH}
# Compile wheels
for PYBIN in /opt/python/*/bin; do
PYV_MAJOR=`"${PYBIN}/python" -c "import sys;print(list(sys.version_info[:2])[0])";`
PYV_MINOR=`"${PYBIN}/python" -c "import sys;print(list(sys.version_info[:2])[1])";`
echo Detected Python ${PYV_MAJOR}.${PYV_MINOR}
if [ "${PYV_MAJOR}" -le "2" -a "${PYV_MINOR}" -lt "7" ]; then
continue
fi
export PATH="${PYBIN}:$OLD_PATH"
#ls -lh "${PYBIN}"
pip install cython wheel
#"${PYBIN}/pip" install scikit-build cmake
pushd /io/wrappers/Python
python setup.py bdist_wheel ${SETUP_PY_ARGS}
cp dist/*.whl /io/wheelhouse_tmp/
popd
#deactivate
#"${PYBIN}/pip" install cython wheel
#"${PYBIN}/pip" wheel /io/wrappers/Python --wheel-dir /io/wheelhouse_tmp/ --build-options ${SETUP_PY_ARGS}
#"${PYBIN}/pip" wheel /io/wrappers/Python -w /io/wheelhouse_tmp/
done
export PATH="$OLD_PATH"
# Bundle external shared libraries into the wheels
for whl in /io/wheelhouse_tmp/*.whl; do
auditwheel repair "$whl" -w /io/wheelhouse/
done
## Install packages and test
#for PYBIN in /opt/python/*/bin/; do
# "${PYBIN}/pip" install python-manylinux-demo --no-index -f /io/wheelhouse
# (cd "$HOME"; "${PYBIN}/nosetests" pymanylinuxdemo)
#done

13
CITATION.bib Normal file
View File

@@ -0,0 +1,13 @@
@article{doi:10.1021/ie4033999,
author = {Bell, Ian H. and Wronski, Jorrit and Quoilin, Sylvain and Lemort, Vincent},
title = {Pure and Pseudo-pure Fluid Thermophysical Property Evaluation and
the Open-Source Thermophysical Property Library CoolProp},
journal = {Industrial \& Engineering Chemistry Research},
volume = {53},
number = {6},
pages = {2498--2508},
year = {2014},
doi = {10.1021/ie4033999},
URL = {http://pubs.acs.org/doi/abs/10.1021/ie4033999},
eprint = {http://pubs.acs.org/doi/pdf/10.1021/ie4033999}
}

File diff suppressed because it is too large Load Diff

View File

@@ -618,6 +618,19 @@
Timestamp = {2013.04.08}
}
@Article{Fuchs-IECR-2006,
Title = {{Solubility of Amino Acids: Influence of the pH value and the Addition of Alcoholic Cosolvents on Aqueous Solubility}},
Volume = {45},
Url = {https://doi.org/10.1021/ie0602097},
Doi = {10.1021/ie0602097},
Number = {19},
Journal = {Industrial \& Engineering Chemistry Research},
Author = {Fuchs, Dominik and Fischer, Jan and Tumakaka, Feelly and Sadowski, Gabriele},
Month = {sep},
Year = {2006},
Pages = {6578--6584},
}
@Article{Gao-JCED-2016,
Title = {{A Helmholtz Energy Equation of State for Sulfur Dioxide}},
Author = {Kehui Gao and Jiangtao Wu and Penggang Zhang and Eric W. Lemmon},
@@ -707,6 +720,45 @@
Publisher = {Elsevier}
}
@Article{Ghosh-FPE-2003,
Title = {Gas solubility in hydrocarbons-a SAFT-based approach},
Volume = {209},
Url = {https://www.sciencedirect.com/science/article/pii/S037838120300147X},
Doi = {10.1016/S0378-3812(03)00147-X},
Number = {2},
Journal = {Fluid Phase Equilibria},
Author = {Ghosh, Auleen and Chapman, Walter G and French, Ray N},
Month = jul,
Year = {2003},
Pages = {229--243},
}
@Article{Gross-IECR-2001,
Title = {{Perturbed-Chain SAFT: An Equation of State Based on a Perturbation Theory for Chain Molecules}},
Volume = {40},
Url = {https://doi.org/10.1021/ie0003887},
Doi = {10.1021/ie0003887},
Number = {4},
Journal = {Industrial \& Engineering Chemistry Research},
Author = {Gross, Joachim and Sadowski, Gabriele},
Month = {feb},
Year = {2001},
Pages = {1244--1260},
}
@Article{Gross-IECR-2002,
Title = {{Application of the Perturbed-Chain SAFT Equation of State to Associating Systems}},
Volume = {41},
Url = {https://doi.org/10.1021/ie010954d},
Doi = {10.1021/ie010954d},
Number = {22},
Journal = {Industrial \& Engineering Chemistry Research},
Author = {Gross, Joachim and Sadowski, Gabriele},
Month = {oct},
year = {2002},
pages = {5510--5515},
}
@Article{Guder-JPCRD-2009,
Title = {{A Reference Equation of State for the Thermodynamic Properties of Sulfur Hexafluoride SF6 for Temperatures from the Melting Line to 625 K and Pressures up to 150 MPa}},
Author = {C. Guder and W. Wagner},
@@ -760,6 +812,19 @@
Timestamp = {2013.04.08}
}
@Article{Held-CERD-2014,
Title = {{ePC-SAFT revised}},
Volume = {92},
Url = {https://www.sciencedirect.com/science/article/pii/S0263876214002469},
Doi = {10.1016/j.cherd.2014.05.017},
Number = {12},
Journal = {Chemical Engineering Research and Design},
Author = {Held, Christoph and Reschke, Thomas and Mohammad, Sultan and Luza, Armando and Sadowski, Gabriele},
Month = {dec},
Year = {2014},
Pages = {2884--2897},
}
@Article{Herrig-JPCRD-2019,
Title = {{A Reference Equation of State for Heavy Water}},
Author = {S. Herrig and M. Thol and R. Span and A.H. Harvey and E.W. Lemmon},
@@ -960,11 +1025,10 @@
Timestamp = {2015.05.01}
}
@Misc{IAPWS-SurfaceTension-1994,
Title = {IAPWS Release on Surface Tension of Heavy Water Substance},
Author = {IAPWS},
Year = {1994}
@Misc{IAPWS-IF97-2012,
author = {IAPWS},
title = {{Revised Release on the IAPWS Industrial Formulation 1997 for the Thermodynamic Properties of Water and Steam, revision 7}},
year = {2012},
}
@Article{Jacobsen-FPE-1992,
@@ -1117,6 +1181,19 @@
Timestamp = {2014.12.06}
}
@Article{Kleiner-JPCC-2007,
Title = {{Modeling of Polar Systems Using PCP-SAFT: An Approach to Account for Induced-Association Interactions}},
Volume = {111},
Url = {https://doi.org/10.1021/jp072640v},
Doi = {10.1021/jp072640v},
Number = {43},
Journal = {The Journal of Physical Chemistry C},
Author = {Kleiner, Matthias and Sadowski, Gabriele},
Month = {nov},
Year = {2007},
Pages = {15544--15553},
}
@Article{Kondou-IJR-2015,
Title = {{Surface tension of low GWP refrigerants R1243zf, R1234ze(Z), and R1233zd(E)}},
Author = {Chieko Kondou and Ryuichi Nagata and Noriko Nii and Shigeru Koyama and Yukihiro Higashi},
@@ -1179,7 +1256,7 @@
@Book{Kunz-BOOK-2007,
Title = {{The GERG-2004 Wide-Range Equation of State for Natural Gases and Other Mixtures}},
Author = {O. Kunz and R. Klimeck and W. Wagner and M. Jaeschke},
Publisher = {VDI Verlag GmbH},
Publisher = {VDI Verlag GmbH, D{\"u}sseldorf},
Year = {2007},
Owner = {Belli},
@@ -1459,15 +1536,15 @@
}
@Article{Lemmon-FPE-1999,
Title = {{A Helmholtz energy equation of state for calculating the thermodynamic properties of fluid mixtures}},
Author = {Eric W. Lemmon and Reiner Tillner-Roth},
Journal = {Fluid Phase Equilib.},
Year = {1999},
Pages = {1-21},
Volume = {165},
Owner = {ihb},
Timestamp = {2015.07.20}
author = {Eric W. Lemmon and Reiner Tillner-Roth},
journal = {Fluid Phase Equilib.},
title = {{A Helmholtz energy equation of state for calculating the thermodynamic properties of fluid mixtures}},
year = {1999},
pages = {1-21},
volume = {165},
doi = {10.1016/S0378-3812(99)00262-9},
owner = {ihb},
timestamp = {2015.07.20},
}
@Article{Llovell-JPCB-2013,
@@ -2337,7 +2414,7 @@
@Book{Span-BOOK-2000,
Title = {{Multiparameter Equations of State - An Accurate Source of Thermodynamic Property Data}},
Author = {Roland Span},
Publisher = {Springer},
Publisher = {Springer-Verlag, Berlin},
Year = {2000},
Owner = {Belli},
@@ -3174,4 +3251,31 @@
year = {2019},
}
@Misc{IAPWS-SurfaceTension-1994,
Title = {IAPWS Release on Surface Tension of Heavy Water Substance},
Author = {IAPWS},
Year = {1994}
}
@article{Huber-JPCRD-2016-CO2,
title = {{Reference Correlation of the Thermal Conductivity of Carbon Dioxide from the Triple Point to 1100 K and up to 200 MPa}},
volume = {45},
doi = {10.1063/1.4940892},
number = {1},
year = {2016},
journal = {Journal of Physical and Chemical Reference Data},
author = {M. L. Huber and E. A. Sykioti and M. J. Assael and R. A. Perkins},
}
@article{Laesecke-JPCRD-2017-CO2,
title = {{Reference Correlation for the Viscosity of Carbon Dioxide}},
volume = {46},
doi = {10.1063/1.4977429},
number = {1},
journal = {Journal of Physical and Chemical Reference Data},
author = {A. Laesecke and C. D. Muzny},
year = {2017},
}
@Comment{jabref-meta: databaseType:bibtex;}

View File

@@ -16,7 +16,7 @@ It was originally developed by Ian Bell, at the time a post-doc at the Universit
* The documentation is available for the `latest release <http://www.coolprop.org>`_ and the `development version <http://www.coolprop.org/dev>`_
* For any kind of question regarding CoolProp and its usage, you can ask the `CoolProp user group <https://goo.gl/Pa7FBT>`_
* For any kind of question regarding CoolProp and its usage, you can ask the `CoolProp Discussions <https://github.com/CoolProp/CoolProp/discussions>`_
* ... you might also find answers in our `FAQ <https://github.com/CoolProp/CoolProp/blob/master/FAQ.md>`_

View File

@@ -1,13 +0,0 @@
@echo off
REM ~ make latex
REM ~ cd _build/latex
REM ~ pdflatex CoolPropdoc.tex
REM ~ pdflatex CoolPropdoc.tex
REM ~ pdflatex CoolPropdoc.tex
REM ~ pdflatex CoolPropdoc.tex
REM ~ pdflatex CoolPropdoc.tex
REM ~ copy /Y CoolPropdoc.pdf ..\..\_static\
REM ~ cd ..\..
sphinx-apidoc -T -f -e -o apidoc C:\\Miniconda\\lib\\site-packages\\coolprop-5.0.0-py2.7-win-amd64.egg\\CoolProp
mingw32-make html_release

View File

@@ -1,13 +0,0 @@
@echo off
REM ~ make latex
REM ~ cd _build/latex
REM ~ pdflatex CoolPropdoc.tex
REM ~ pdflatex CoolPropdoc.tex
REM ~ pdflatex CoolPropdoc.tex
REM ~ pdflatex CoolPropdoc.tex
REM ~ pdflatex CoolPropdoc.tex
REM ~ copy /Y CoolPropdoc.pdf ..\..\_static\
REM ~ cd ..\..
rem sphinx-apidoc -f -o apidoc ../CoolProp
make html 2>&1 | wtee log.txt

View File

@@ -44,7 +44,7 @@ if isRelease:
}
else:
extlinks = {'sfdownloads': ('http://sourceforge.net/projects/coolprop/files/CoolProp/' + release + '/%s', ''),
'sfnightly': ('http://www.coolprop.dreamhosters.com/binaries/%s', ''),
'sfnightly': ('http://sourceforge.net/projects/coolprop/files/CoolProp/nightly/%s', ''),
# 'bbbinaries' : ('http://www.coolprop.dreamhosters.com:8010/binaries/%s',''),
# 'bbsphinx' : ('http://www.coolprop.dreamhosters.com:8010/sphinx/%s','')
}
@@ -69,7 +69,7 @@ if isRelease:
}
else:
doxylink = {
'cpapi': ('_static/doxygen/CoolPropDoxyLink.tag', 'http://www.coolprop.dreamhosters.com/binaries/sphinx/_static/doxygen/html')
'cpapi': ('_static/doxygen/CoolPropDoxyLink.tag', 'http://www.coolprop.org/dev/_static/doxygen/html')
}
# -- General configuration -----------------------------------------------------
@@ -83,7 +83,7 @@ extensions = ['IPython.sphinxext.ipython_console_highlighting',
'sphinx.ext.mathjax',
'sphinx.ext.extlinks',
'sphinxcontrib.bibtex',
'sphinxcontrib.napoleon',
'sphinx.ext.napoleon',
'sphinxcontrib.doxylink',
'matplotlib.sphinxext.plot_directive',
'edit_on_github', # see https://gist.github.com/mgedmin/6052926#file-edit_on_github-pyb
@@ -172,8 +172,11 @@ pygments_style = 'sphinx'
# 'both' - Both the class and the __init__ methods docstring are concatenated and inserted
autoclass_content = 'both'
# Don't generate HTML5 docs, recommendation from https://stackoverflow.com/a/56822558
html4_writer = True
## Don't generate HTML5 docs, recommendation from https://stackoverflow.com/a/56822558
#html4_writer = True
# Fix the bibtext extension
bibtex_bibfiles = ["../CoolPropBibTeXLibrary.bib"]
# -- Options for HTML output ---------------------------------------------------

74
Web/coolprop/PCSAFT.rst Normal file
View File

@@ -0,0 +1,74 @@
.. _pcsaft_backend:
************************
PC-SAFT Equations of State
************************
.. contents:: :depth: 2
Introduction
============
CoolProp (as of version 6.4) includes the PC-SAFT equation of state. The PC-SAFT equation of state was originally proposed in 2001 by `Gross and Sadowski <>`. In addition to the hard chain and dispersion terms, the PC-SAFT backend in CoolProp also includes terms for associating, polar, and electrolyte compounds. For the polar term the formulation given by `Gross and Vrabec (2006) <>` was used, and this is sometimes called PCP-SAFT. For electrolyte compounds the equations presented by `Held et al. (2014) <>` were used, and this version of the equation of state is sometimes called electrolyte PC-SAFT (ePC-SAFT).
Caveats
-------
.. warning:: NOT ALL PROPERTIES ARE AVAILABLE AS INPUTS/OUTPUTS
Only a limited subset of properties are available currently. You can do:
* Flash calculations with TP, PQ, DT, QT inputs
* Calculation of some mixture flashes
.. warning:: The flash algorithm for the PC-SAFT backend is not yet as robust as for other backends. For some conditions it may fail to find the correct solution.
Pure Fluids
===========
Usage
-----
Similar to other backends in CoolProp, in the :ref:`high-level interface <high_level_api>`, all that you have to do to evaluate properties using the PC-SAFT equation of state is to change the backend name.
.. ipython::
In [0]: import CoolProp.CoolProp as CP
# The multi-parameter Helmholtz backend
In [0]: CP.PropsSI("T","P",101325,"Q",0,"HEOS::Propane")
# PC-SAFT
In [0]: CP.PropsSI("T","P",101325,"Q",0,"PCSAFT::PROPANE")
The same holds for the :ref:`low-level interface <low_level_api>`:
.. ipython::
In [0]: import CoolProp.CoolProp as CP
In [0]: AS = CP.AbstractState("PCSAFT", "PROPANE"); AS.update(CP.QT_INPUTS, 0, 300); print(AS.p())
The PC-SAFT equation of state is available for more than 100 fluids for which parameter were available in the literature.
Mixtures
========
Interaction Parameters
----------------------
For mixtures, PC-SAFT generally uses a binary interaction parameter between pairs of fluids. CoolProp does have some of these parameters for the PC-SAFT EOS, and it is possible to add more yourself.
.. ipython::
In [0]: import CoolProp.CoolProp as CP
In [0]: CAS_water = CP.get_fluid_param_string("WATER","CAS")
In [0]: CAS_aacid = "64-19-7"
In [0]: CP.set_mixture_binary_pair_pcsaft(CAS_water, CAS_aacid, "kij", -0.127)
In [0]: T = CP.PropsSI("T", "P", 72915.92217342, "Q", 0, "PCSAFT::WATER[0.2691800943]&ACETIC ACID[0.7308199057]")
In [0]: print(T)

View File

@@ -215,13 +215,29 @@ More Information
The tables are stored in a zipped format using the msgpack package and miniz. If you want to see what data is serialized in the tabular data, you can unzip and unpack into python (or other high-level languages) using something roughly like::
import msgpack, zlib, StringIO, numpy as np
with open(r'/path/to/home/.CoolProp/Tables/HelmholtzEOSBackend(R245fa)/single_phase_logph.bin.z','rb') as fp:
ph = zlib.decompress(fp.read())
values = msgpack.load(StringIO.StringIO(ph))
import msgpack, zlib, io, numpy as np, matplotlib.pyplot as plt
root = r'C:\Users\ian\.CoolProp\Tables\REFPROPMixtureBackend(R32[0.8292500000]&R1234yf[0.1707500000])'
with open(root+'/single_phase_logph.bin.z','rb') as fp:
values = msgpack.load(io.BytesIO(zlib.decompress(fp.read())))
revision, matrices = values[0:2]
T,h,p,rho = np.array(matrices['T']), np.array(matrices['hmolar']), np.array(matrices['p']), np.array(matrices['rhomolar'])
You'll need msgpack wrapper for your target language.
plt.plot(np.array(matrices['p']),np.array(matrices['hmolar']),'x')
with open(root+'/phase_envelope.bin.z','rb') as fp:
values = msgpack.load(io.BytesIO(zlib.decompress(fp.read())))
revision, matrices = values[0:2]
plt.plot(np.array(matrices['p']),np.array(matrices['hmolar_vap']),'-')
plt.show()
with open(root+'/single_phase_logpT.bin.z','rb') as fp:
values = msgpack.load(io.BytesIO(zlib.decompress(fp.read())))
revision, matrices = values[0:2]
T,h,p,rho = np.array(matrices['T']), np.array(matrices['hmolar']), np.array(matrices['p']), np.array(matrices['rhomolar'])
plt.plot(np.array(matrices['p']),np.array(matrices['T']),'x')
with open(root+'/phase_envelope.bin.z','rb') as fp:
values = msgpack.load(io.BytesIO(zlib.decompress(fp.read())))
revision, matrices = values[0:2]
plt.plot(np.array(matrices['p']),np.array(matrices['T']),'-')
plt.show()
You'll need msgpack wrapper for your target language.

View File

@@ -1,6 +1,149 @@
Changelog for CoolProp
======================
6.5.0
-----
Highlights:
* Mostly small bugfixes and dependency updates
* Added ability to add predefined mixtures at runtime
* Updated transport models for CO2
Issues closed:
* `#2277 <https://github.com/CoolProp/CoolProp/issues/2277>`_ : Update State class
Pull requests merged:
* `#2207 <https://github.com/CoolProp/CoolProp/pull/2207>`_ : Verify that mole fractions are set before using them
* `#2214 <https://github.com/CoolProp/CoolProp/pull/2214>`_ : Change links from Google group to GitHub discussions
* `#2223 <https://github.com/CoolProp/CoolProp/pull/2223>`_ : Topic 2142
* `#2225 <https://github.com/CoolProp/CoolProp/pull/2225>`_ : update cyclopentane.json
* `#2230 <https://github.com/CoolProp/CoolProp/pull/2230>`_ : Topic-2200: Correct typo in n-Hexane rhoV auxilliary
* `#2238 <https://github.com/CoolProp/CoolProp/pull/2238>`_ : Incomp liqna
* `#2241 <https://github.com/CoolProp/CoolProp/pull/2241>`_ : Update index.rst
* `#2252 <https://github.com/CoolProp/CoolProp/pull/2252>`_ : Update fmt submodule to 10.0.0
* `#2261 <https://github.com/CoolProp/CoolProp/pull/2261>`_ : Create CITATION.bib
* `#2267 <https://github.com/CoolProp/CoolProp/pull/2267>`_ : implemented TCX Huber-JPCRD-2016 for CO2
* `#2268 <https://github.com/CoolProp/CoolProp/pull/2268>`_ : implemented VISC LAESECKE-JPCRD-2017-CO2
* `#2270 <https://github.com/CoolProp/CoolProp/pull/2270>`_ : Fix failing python_cibuildwheel workflows
* `#2271 <https://github.com/CoolProp/CoolProp/pull/2271>`_ : Revert "Fix failing python_cibuildwheel workflows" and update
6.4.3
-----
Highlights:
* The first automated release that updates the homepage and all binaries
Issues closed:
* `#2196 <https://github.com/CoolProp/CoolProp/issues/2196>`_ : Automatically publish release binaries
* `#2197 <https://github.com/CoolProp/CoolProp/issues/2197>`_ : Add sdist for Python
6.4.2
-----
Highlights:
* The first release after 2 years
* Fixed the values in the vicinity of the critical point of ammonia
* Added Python wheels for Python 3.6 through 3.11 on many different architectures
* Added a reverse T(p,h) function to IF97
* Exposed more functions in the CoolPropLib interface
* Fixed a faulty density calculation for ice
* Added PC-SAFT as indepedent backend
Deprecated:
* Dropped support for Python 2.x
Issues Closed:
* `#1867 <https://github.com/CoolProp/CoolProp/issues/1867>`_ : TypeError after importing CoolProp / pip installation on Raspberry Pi
* `#1884 <https://github.com/CoolProp/CoolProp/issues/1884>`_ : Typo in enthalpy's unit of measure
* `#1962 <https://github.com/CoolProp/CoolProp/issues/1962>`_ : Ammonia (and maybe other?) calculations fail at the critical point
* `#1963 <https://github.com/CoolProp/CoolProp/issues/1963>`_ : Some examples don't work in docs
* `#1974 <https://github.com/CoolProp/CoolProp/issues/1974>`_ : Fix reducing density for Nitrogen
* `#1980 <https://github.com/CoolProp/CoolProp/issues/1980>`_ : Wrong alias in "R1243zf.json"
* `#1981 <https://github.com/CoolProp/CoolProp/issues/1981>`_ : Python CoolProp package doesn't work on Python 3.9.0 (32 bit and 64 bit)
* `#1992 <https://github.com/CoolProp/CoolProp/issues/1992>`_ : Installation errors with Python 3.9
* `#1999 <https://github.com/CoolProp/CoolProp/issues/1999>`_ : PropsSI failed ungracefully with Water::IF97
* `#2003 <https://github.com/CoolProp/CoolProp/issues/2003>`_ : build error on MacOS 11.2 Big Sur
* `#2010 <https://github.com/CoolProp/CoolProp/issues/2010>`_ : cannot build the object library (COOLPROP_OBJECT_LIBRARY)
* `#2017 <https://github.com/CoolProp/CoolProp/issues/2017>`_ : I'm not able to install the coolprop with pip in python ...
* `#2020 <https://github.com/CoolProp/CoolProp/issues/2020>`_ : PC-SAFT integration
* `#2025 <https://github.com/CoolProp/CoolProp/issues/2025>`_ : Error in HAPropsSI when using enthalpy as an input (Excel VBA)
* `#2033 <https://github.com/CoolProp/CoolProp/issues/2033>`_ : Compatibility with Silicon chip in MacOS Big Sur 11.5.1
* `#2043 <https://github.com/CoolProp/CoolProp/issues/2043>`_ : Cannot create propertyplot for ammonia
* `#2049 <https://github.com/CoolProp/CoolProp/issues/2049>`_ : PropsSI("PHASE") calculate with ammonia, get error "options.p is not valid in saturation_T_pure_1D_P"
* `#2052 <https://github.com/CoolProp/CoolProp/issues/2052>`_ : How to install Coolprop in MacOS which has M1 chip?
* `#2053 <https://github.com/CoolProp/CoolProp/issues/2053>`_ : Small rounding issues for water
* `#2054 <https://github.com/CoolProp/CoolProp/issues/2054>`_ : Rounding for reducing density for R236ea
* `#2055 <https://github.com/CoolProp/CoolProp/issues/2055>`_ : Rounding for reducing density for nitrogen
* `#2067 <https://github.com/CoolProp/CoolProp/issues/2067>`_ : Adding a new fluid and compiled it. Not working when function is used.
* `#2073 <https://github.com/CoolProp/CoolProp/issues/2073>`_ : PHI0 density derivatives with REFPROP backend are wrong
* `#2078 <https://github.com/CoolProp/CoolProp/issues/2078>`_ : Python 3.8: Error in import
* `#2081 <https://github.com/CoolProp/CoolProp/issues/2081>`_ : Add support to release linux aarch64 wheels
* `#2095 <https://github.com/CoolProp/CoolProp/issues/2095>`_ : Issue when compiling shared library in docker on M1 - unrecognized command-line option -m64
* `#2100 <https://github.com/CoolProp/CoolProp/issues/2100>`_ : Cubic Mixtures: ideal gas contribution doesn't work properly (Rcomponent is wrong))
* `#2113 <https://github.com/CoolProp/CoolProp/issues/2113>`_ : Installation failed when using command: pip install coolprop
* `#2114 <https://github.com/CoolProp/CoolProp/issues/2114>`_ : Trouble installing MATLAB wrapper via Python
* `#2119 <https://github.com/CoolProp/CoolProp/issues/2119>`_ : Python bindings: Call for help from the community
* `#2126 <https://github.com/CoolProp/CoolProp/issues/2126>`_ : CoolProp 6.4.2dev0, MATLAB wrapper with Python 3.9
* `#2149 <https://github.com/CoolProp/CoolProp/issues/2149>`_ : Bug in the departure function parameters for GeneralizedHFC in CoolProp
* `#2178 <https://github.com/CoolProp/CoolProp/issues/2178>`_ : Please update github release
* `#2184 <https://github.com/CoolProp/CoolProp/issues/2184>`_ : CoolProp Online throwing internal error
* `#2186 <https://github.com/CoolProp/CoolProp/issues/2186>`_ : Ammonia critical point issue behaviour
* `#2187 <https://github.com/CoolProp/CoolProp/issues/2187>`_ : The online version of CoolProp cannot work
* `#2190 <https://github.com/CoolProp/CoolProp/issues/2190>`_ : Humid air property function HAPropsSI is not reversible
* `#2192 <https://github.com/CoolProp/CoolProp/issues/2192>`_ : Update the changelog for v6.4.2
Pull requests merged:
* `#1977 <https://github.com/CoolProp/CoolProp/pull/1977>`_ : Add Rust Wrapper
* `#1990 <https://github.com/CoolProp/CoolProp/pull/1990>`_ : Fix cxx17
* `#1993 <https://github.com/CoolProp/CoolProp/pull/1993>`_ : LibreOffice: Use pip for installing CoolProp python package
* `#2005 <https://github.com/CoolProp/CoolProp/pull/2005>`_ : Fix cxx17
* `#2008 <https://github.com/CoolProp/CoolProp/pull/2008>`_ : Fix build on macOS
* `#2011 <https://github.com/CoolProp/CoolProp/pull/2011>`_ : A minor correction in case of COOLPROP_OBJECT_LIBRARY=ON
* `#2050 <https://github.com/CoolProp/CoolProp/pull/2050>`_ : Update index.rst for the C# Wrapper
* `#2056 <https://github.com/CoolProp/CoolProp/pull/2056>`_ : Fix typo in iQ description
* `#2058 <https://github.com/CoolProp/CoolProp/pull/2058>`_ : IF97 Backend Q and Phase Patch
* `#2062 <https://github.com/CoolProp/CoolProp/pull/2062>`_ : Updated info for the C# Wrapper
* `#2076 <https://github.com/CoolProp/CoolProp/pull/2076>`_ : Included CoolPropJavascriptDemo
* `#2084 <https://github.com/CoolProp/CoolProp/pull/2084>`_ : Add functions to CoolPropLib
* `#2097 <https://github.com/CoolProp/CoolProp/pull/2097>`_ : Add github action to build python wheels (including python 3.9 and 3.10)
* `#2098 <https://github.com/CoolProp/CoolProp/pull/2098>`_ : Github Actions: add shared library and doxygen workflows.
* `#2101 <https://github.com/CoolProp/CoolProp/pull/2101>`_ : Fix Rcomponent in calc_alpha0_deriv_nocache
* `#2103 <https://github.com/CoolProp/CoolProp/pull/2103>`_ : Lint: use automated tooling to reformat C++ and CMakeLists files
* `#2105 <https://github.com/CoolProp/CoolProp/pull/2105>`_ : Bump Catch 1 to Catch v3.0.0-preview4
* `#2106 <https://github.com/CoolProp/CoolProp/pull/2106>`_ : Cppcheck workflow
* `#2107 <https://github.com/CoolProp/CoolProp/pull/2107>`_ : Add bound-check to setter and getter functions
* `#2108 <https://github.com/CoolProp/CoolProp/pull/2108>`_ : Format macros + strip trailing whitespaces
* `#2109 <https://github.com/CoolProp/CoolProp/pull/2109>`_ : Configure upload to pypi/testpypi
* `#2110 <https://github.com/CoolProp/CoolProp/pull/2110>`_ : Fix mac cibuildwheel
* `#2116 <https://github.com/CoolProp/CoolProp/pull/2116>`_ : Fix mac sed
* `#2118 <https://github.com/CoolProp/CoolProp/pull/2118>`_ : Python bindings upload to (test)pypi fixes
* `#2120 <https://github.com/CoolProp/CoolProp/pull/2120>`_ : Missing a py37 build for Windows x64 + fix py38 win32 and py39 win32
* `#2122 <https://github.com/CoolProp/CoolProp/pull/2122>`_ : Simplify CoolProp python bindings cibuildwheel
* `#2132 <https://github.com/CoolProp/CoolProp/pull/2132>`_ : Bump IF97 to included reverse T(P,H) patch [skip ci]
* `#2133 <https://github.com/CoolProp/CoolProp/pull/2133>`_ : New functions for CoolPropLib
* `#2134 <https://github.com/CoolProp/CoolProp/pull/2134>`_ : Add fluid_param_string and get_JSONstring to cubic backend
* `#2135 <https://github.com/CoolProp/CoolProp/pull/2135>`_ : AbstractState functions for CoolPropLib
* `#2143 <https://github.com/CoolProp/CoolProp/pull/2143>`_ : Corrected rho_ice route by replacing g_ice with dg_dp_Ice in Ice.cpp
* `#2146 <https://github.com/CoolProp/CoolProp/pull/2146>`_ : Bump FindMathematica to most recent version
* `#2161 <https://github.com/CoolProp/CoolProp/pull/2161>`_ : improve PC-SAFT flash
* `#2164 <https://github.com/CoolProp/CoolProp/pull/2164>`_ : Updated info about SharpProp (3-party wrapper for C#)
* `#2165 <https://github.com/CoolProp/CoolProp/pull/2165>`_ : Added info about PyFluids (3-party wrapper for Python)
* `#2173 <https://github.com/CoolProp/CoolProp/pull/2173>`_ : Prevent crashes near critical density due to saturation calc
* `#2176 <https://github.com/CoolProp/CoolProp/pull/2176>`_ : add PCSAFT page in docs
* `#2191 <https://github.com/CoolProp/CoolProp/pull/2191>`_ : Build the docs for v6.4.2
6.4.1
-----

View File

@@ -14,5 +14,6 @@ This section includes information about the CoolProp software, listings of input
Configuration.rst
REFPROP.rst
Cubics.rst
PCSAFT.rst
examples.rst
changelog.rst

View File

@@ -3,10 +3,9 @@
#include <iostream>
#include "crossplatform_shared_ptr.h"
using namespace CoolProp;
int main()
{
shared_ptr<AbstractState> Water(AbstractState::factory("HEOS","Water"));
Water->update(PQ_INPUTS, 101325, 0); // SI units
int main() {
shared_ptr<AbstractState> Water(AbstractState::factory("HEOS", "Water"));
Water->update(PQ_INPUTS, 101325, 0); // SI units
std::cout << "T: " << Water->T() << " K" << std::endl;
std::cout << "rho': " << Water->rhomass() << " kg/m^3" << std::endl;
std::cout << "rho': " << Water->rhomolar() << " mol/m^3" << std::endl;

View File

@@ -3,12 +3,12 @@
#include <vector>
#include <time.h>
int main(){
int main() {
double t1, t2;
const long buffersize = 500;
long errcode = 0;
char buffer[buffersize];
long handle = AbstractState_factory("BICUBIC&HEOS","Water", &errcode, buffer, buffersize);
long handle = AbstractState_factory("BICUBIC&HEOS", "Water", &errcode, buffer, buffersize);
long _HmassP = get_input_pair_index("HmassP_INPUTS");
long _Dmass = get_param_index("Dmass");
long len = 20000;
@@ -16,11 +16,11 @@ int main(){
std::vector<double> p = linspace(2.8e6, 3.0e6, len);
double summer = 0;
t1 = clock();
for (long i = 0; i < len; ++i){
for (long i = 0; i < len; ++i) {
AbstractState_update(handle, _HmassP, h[i], p[i], &errcode, buffer, buffersize);
summer += AbstractState_keyed_output(handle, _Dmass, &errcode, buffer, buffersize);
}
t2 = clock();
std::cout << format("value(all): %0.13g, %g us/call\n", summer, ((double)(t2-t1))/CLOCKS_PER_SEC/double(len)*1e6);
std::cout << format("value(all): %0.13g, %g us/call\n", summer, ((double)(t2 - t1)) / CLOCKS_PER_SEC / double(len) * 1e6);
return EXIT_SUCCESS;
}

View File

@@ -3,7 +3,7 @@
#include <vector>
#include <time.h>
int main(){
int main() {
const long buffer_size = 1000, length = 100000;
long ierr;
char herr[buffer_size];
@@ -13,11 +13,10 @@ int main(){
std::vector<double> input2 = linspace(2.8e6, 3.0e6, length);
long input_pair = get_input_pair_index("HmassP_INPUTS");
double t1 = clock();
AbstractState_update_and_common_out(handle, input_pair, &(input1[0]), &(input2[0]), length,
&(T[0]), &(p[0]), &(rhomolar[0]), &(hmolar[0]), &(smolar[0]),
&ierr, herr, buffer_size);
AbstractState_update_and_common_out(handle, input_pair, &(input1[0]), &(input2[0]), length, &(T[0]), &(p[0]), &(rhomolar[0]), &(hmolar[0]),
&(smolar[0]), &ierr, herr, buffer_size);
double t2 = clock();
std::cout << format("value(commons): %g us/call\n", ((double)(t2-t1))/CLOCKS_PER_SEC/double(length)*1e6);
std::cout << format("value(commons): %g us/call\n", ((double)(t2 - t1)) / CLOCKS_PER_SEC / double(length) * 1e6);
std::vector<long> outputs(5);
outputs[0] = get_param_index("T");
@@ -27,9 +26,8 @@ int main(){
outputs[4] = get_param_index("Smolar");
std::vector<double> out1(length), out2(length), out3(length), out4(length), out5(length);
t1 = clock();
AbstractState_update_and_5_out(handle, input_pair, &(input1[0]), &(input2[0]), length,
&(outputs[0]), &(out1[0]), &(out2[0]), &(out3[0]), &(out4[0]), &(out5[0]),
&ierr, herr, buffer_size);
AbstractState_update_and_5_out(handle, input_pair, &(input1[0]), &(input2[0]), length, &(outputs[0]), &(out1[0]), &(out2[0]), &(out3[0]),
&(out4[0]), &(out5[0]), &ierr, herr, buffer_size);
t2 = clock();
std::cout << format("value(user-specified): %g us/call\n", ((double)(t2-t1))/CLOCKS_PER_SEC/double(length)*1e6);
std::cout << format("value(user-specified): %g us/call\n", ((double)(t2 - t1)) / CLOCKS_PER_SEC / double(length) * 1e6);
}

View File

@@ -2,121 +2,106 @@
#include "Backends/Helmholtz/MixtureDerivatives.h"
#include <iostream>
using namespace CoolProp;
int main()
{
int main() {
// Ethane/Propane mixture, 25/75 molar
std::vector<std::string> components(2,"Ethane"); components[1] = "Propane";
std::vector<CoolPropDbl> z(2,0.25); z[1] = 0.75;
std::vector<std::string> components(2, "Ethane");
components[1] = "Propane";
std::vector<CoolPropDbl> z(2, 0.25);
z[1] = 0.75;
shared_ptr<HelmholtzEOSMixtureBackend> HEOS(new HelmholtzEOSMixtureBackend(components));
HelmholtzEOSMixtureBackend &rHEOS = *(HEOS.get());
HelmholtzEOSMixtureBackend& rHEOS = *(HEOS.get());
HEOS->set_mole_fractions(z);
HEOS->specify_phase(iphase_gas); // So that we don't do a phase check
HEOS->specify_phase(iphase_gas); // So that we don't do a phase check
HEOS->update(DmolarT_INPUTS, 300, 300);
std::vector<std::string> terms;
terms.push_back("p");
terms.push_back("p2(deriv)");
terms.push_back("rhor");
terms.push_back("Tr");
terms.push_back("dalphar_dDelta");
terms.push_back("dTr_dxi");
terms.push_back("drhor_dxi");
terms.push_back("ndpdV__constT_n");
terms.push_back("dpdxj__constT_V_xi");
terms.push_back("dalphar_dxi|T,V,xk");
terms.push_back("dalphar_dxi|tau,delta,xk");
terms.push_back("ln_fugacity_coefficient");
terms.push_back("ndpdni__constT_V_nj");
terms.push_back("tau*d_ndalphardni_dTau");
terms.push_back("delta*d_ndalphardni_dDelta");
terms.push_back("p2(deriv)");
terms.push_back("rhor");
terms.push_back("Tr");
terms.push_back("dalphar_dDelta");
terms.push_back("dTr_dxi");
terms.push_back("drhor_dxi");
terms.push_back("ndpdV__constT_n");
terms.push_back("dpdxj__constT_V_xi");
terms.push_back("dalphar_dxi|T,V,xk");
terms.push_back("dalphar_dxi|tau,delta,xk");
terms.push_back("ln_fugacity_coefficient");
terms.push_back("ndpdni__constT_V_nj");
terms.push_back("tau*d_ndalphardni_dTau");
terms.push_back("delta*d_ndalphardni_dDelta");
/// ------------- GOOD above this line -------------------------
terms.push_back("d_ndalphardni_dxj__constdelta_tau_xi");
terms.push_back("d_ndalphardni_dxj__constT_V_xi");
terms.push_back("dln_fugacity_coefficient_dxj__constT_p_xi");
terms.push_back("d2nalphar_dxj_dni__constT_V");
terms.push_back("delta*d2alphar_dxi_dDelta");
for (std::vector<std::string>::iterator it = terms.begin(); it != terms.end(); ++it)
{
if (!it->compare("p")){
/// ------------- GOOD above this line -------------------------
terms.push_back("d_ndalphardni_dxj__constdelta_tau_xi");
terms.push_back("d_ndalphardni_dxj__constT_V_xi");
terms.push_back("dln_fugacity_coefficient_dxj__constT_p_xi");
terms.push_back("d2nalphar_dxj_dni__constT_V");
terms.push_back("delta*d2alphar_dxi_dDelta");
for (std::vector<std::string>::iterator it = terms.begin(); it != terms.end(); ++it) {
if (!it->compare("p")) {
printf("p: %0.16g\n", HEOS->p());
}
else if (!it->compare("p2(deriv)")){
printf("p calculated by rho*R*T*(1+delta*deltadar_dDelta): %0.16Lg\n", HEOS->rhomolar()*HEOS->gas_constant()*HEOS->T()*(1+HEOS->delta()*HEOS->dalphar_dDelta()));
}
else if (!it->compare("dalphar_dDelta")){
} else if (!it->compare("p2(deriv)")) {
printf("p calculated by rho*R*T*(1+delta*deltadar_dDelta): %0.16Lg\n",
HEOS->rhomolar() * HEOS->gas_constant() * HEOS->T() * (1 + HEOS->delta() * HEOS->dalphar_dDelta()));
} else if (!it->compare("dalphar_dDelta")) {
printf("dalphar_dDelta: %0.16Lg\n", HEOS->dalphar_dDelta());
}
else if (!it->compare("rhor")){
printf("rhor: %0.16g\n", HEOS->get_reducing_state().rhomolar);
}
else if (!it->compare("Tr")){
printf("Tr: %0.16g\n", HEOS->get_reducing_state().T);
}
else if (!it->compare("dTr_dxi")){
printf("dTr_dxi: %0.16Lg\n", HEOS->Reducing->dTrdxi__constxj(rHEOS.get_mole_fractions(), 0, XN_DEPENDENT));
}
else if (!it->compare("drhor_dxi")){
printf("drhor_dxi: %0.16Lg\n", HEOS->Reducing->drhormolardxi__constxj(rHEOS.get_mole_fractions(), 0, XN_DEPENDENT));
}
else if(!it->compare("ndpdV__constT_n")){
} else if (!it->compare("rhor")) {
printf("rhor: %0.16g\n", HEOS->get_reducing_state().rhomolar);
} else if (!it->compare("Tr")) {
printf("Tr: %0.16g\n", HEOS->get_reducing_state().T);
} else if (!it->compare("dTr_dxi")) {
printf("dTr_dxi: %0.16Lg\n", HEOS->Reducing->dTrdxi__constxj(rHEOS.get_mole_fractions(), 0, XN_DEPENDENT));
} else if (!it->compare("drhor_dxi")) {
printf("drhor_dxi: %0.16Lg\n", HEOS->Reducing->drhormolardxi__constxj(rHEOS.get_mole_fractions(), 0, XN_DEPENDENT));
} else if (!it->compare("ndpdV__constT_n")) {
printf("ndpdV__constT_n: %0.16Lg\n", MixtureDerivatives::ndpdV__constT_n(rHEOS));
}
else if(!it->compare("ln_fugacity_coefficient")){
printf("ln_fugacity_coefficient(0): %0.16Lg\n", MixtureDerivatives::ln_fugacity_coefficient(rHEOS, 0, XN_DEPENDENT));
} else if (!it->compare("ln_fugacity_coefficient")) {
printf("ln_fugacity_coefficient(0): %0.16Lg\n", MixtureDerivatives::ln_fugacity_coefficient(rHEOS, 0, XN_DEPENDENT));
printf("ln_fugacity_coefficient(1): %0.16Lg\n", MixtureDerivatives::ln_fugacity_coefficient(rHEOS, 1, XN_DEPENDENT));
}
else if(!it->compare("dln_fugacity_coefficient_dxj__constT_p_xi")){
printf("dln_fugacity_coefficient_dxj__constT_p_xi(0,0): %0.16Lg\n", MixtureDerivatives::dln_fugacity_coefficient_dxj__constT_p_xi(rHEOS, 0, 0, XN_DEPENDENT));
} else if (!it->compare("dln_fugacity_coefficient_dxj__constT_p_xi")) {
printf("dln_fugacity_coefficient_dxj__constT_p_xi(0,0): %0.16Lg\n",
MixtureDerivatives::dln_fugacity_coefficient_dxj__constT_p_xi(rHEOS, 0, 0, XN_DEPENDENT));
//printf("dln_fugacity_coefficient_dxj__constT_p_xi(0,1): %0.16Lg\n", MixtureDerivatives::dln_fugacity_coefficient_dxj__constT_p_xi(rHEOS, 0, 1, XN_DEPENDENT));
printf("dln_fugacity_coefficient_dxj__constT_p_xi(1,0): %0.16Lg\n", MixtureDerivatives::dln_fugacity_coefficient_dxj__constT_p_xi(rHEOS, 1, 0, XN_DEPENDENT));
printf("dln_fugacity_coefficient_dxj__constT_p_xi(1,0): %0.16Lg\n",
MixtureDerivatives::dln_fugacity_coefficient_dxj__constT_p_xi(rHEOS, 1, 0, XN_DEPENDENT));
//printf("dln_fugacity_coefficient_dxj__constT_p_xi(1,1): %0.16Lg\n", MixtureDerivatives::dln_fugacity_coefficient_dxj__constT_p_xi(rHEOS, 1, 1, XN_DEPENDENT));
}
else if(!it->compare("delta*d_ndalphardni_dDelta")){
printf("delta*d_ndalphardni_dDelta(0): %0.16Lg\n", rHEOS.delta()*MixtureDerivatives::d_ndalphardni_dDelta(rHEOS, 0, XN_DEPENDENT));
printf("delta*d_ndalphardni_dDelta(1): %0.16Lg\n", rHEOS.delta()*MixtureDerivatives::d_ndalphardni_dDelta(rHEOS, 1, XN_DEPENDENT));
}
else if(!it->compare("tau*d_ndalphardni_dTau")){
printf("tau*d_ndalphardni_dTau(0): %0.16Lg\n", rHEOS.tau()*MixtureDerivatives::d_ndalphardni_dTau(rHEOS, 0, XN_DEPENDENT));
printf("tau*d_ndalphardni_dTau(1): %0.16Lg\n", rHEOS.tau()*MixtureDerivatives::d_ndalphardni_dTau(rHEOS, 1, XN_DEPENDENT));
}
else if(!it->compare("d_ndalphardni_dxj__constdelta_tau_xi")){
printf("d_ndalphardni_dxj__constdelta_tau_xi(0, 0): %0.16Lg\n", MixtureDerivatives::d_ndalphardni_dxj__constdelta_tau_xi(rHEOS, 0, 0, XN_DEPENDENT));
}
else if(!it->compare("d_ndalphardni_dxj__constT_V_xi")){
printf("d_ndalphardni_dxj__constT_V_xi(0, 0): %0.16Lg\n", MixtureDerivatives::d_ndalphardni_dxj__constT_V_xi(rHEOS, 0, 0, XN_DEPENDENT));
}
else if(!it->compare("d2nalphar_dxj_dni__constT_V")){
} else if (!it->compare("delta*d_ndalphardni_dDelta")) {
printf("delta*d_ndalphardni_dDelta(0): %0.16Lg\n", rHEOS.delta() * MixtureDerivatives::d_ndalphardni_dDelta(rHEOS, 0, XN_DEPENDENT));
printf("delta*d_ndalphardni_dDelta(1): %0.16Lg\n", rHEOS.delta() * MixtureDerivatives::d_ndalphardni_dDelta(rHEOS, 1, XN_DEPENDENT));
} else if (!it->compare("tau*d_ndalphardni_dTau")) {
printf("tau*d_ndalphardni_dTau(0): %0.16Lg\n", rHEOS.tau() * MixtureDerivatives::d_ndalphardni_dTau(rHEOS, 0, XN_DEPENDENT));
printf("tau*d_ndalphardni_dTau(1): %0.16Lg\n", rHEOS.tau() * MixtureDerivatives::d_ndalphardni_dTau(rHEOS, 1, XN_DEPENDENT));
} else if (!it->compare("d_ndalphardni_dxj__constdelta_tau_xi")) {
printf("d_ndalphardni_dxj__constdelta_tau_xi(0, 0): %0.16Lg\n",
MixtureDerivatives::d_ndalphardni_dxj__constdelta_tau_xi(rHEOS, 0, 0, XN_DEPENDENT));
} else if (!it->compare("d_ndalphardni_dxj__constT_V_xi")) {
printf("d_ndalphardni_dxj__constT_V_xi(0, 0): %0.16Lg\n", MixtureDerivatives::d_ndalphardni_dxj__constT_V_xi(rHEOS, 0, 0, XN_DEPENDENT));
} else if (!it->compare("d2nalphar_dxj_dni__constT_V")) {
printf("d2nalphar_dxj_dni__constT_V(0,0): %0.16Lg\n", MixtureDerivatives::d2nalphar_dxj_dni__constT_V(rHEOS, 0, 0, XN_DEPENDENT));
//printf("d2nalphar_dxj_dni__constT_V(0,1): %0.16Lg\n", MixtureDerivatives::d2nalphar_dxj_dni__constT_V(rHEOS, 0, 1, XN_DEPENDENT));
printf("d2nalphar_dxj_dni__constT_V(1,0): %0.16Lg\n", MixtureDerivatives::d2nalphar_dxj_dni__constT_V(rHEOS, 1, 0, XN_DEPENDENT));
//printf("d2nalphar_dxj_dni__constT_V(1,1): %0.16Lg\n", MixtureDerivatives::d2nalphar_dxj_dni__constT_V(rHEOS, 1, 1, XN_DEPENDENT));
}
else if(!it->compare("dalphar_dxi|T,V,xk")){
printf("dalphar_dxi|T,V,xk(0): %0.16Lg\n", MixtureDerivatives::dalphar_dxj__constT_V_xi(rHEOS, 0, XN_DEPENDENT));
printf("d2nalphar_dxj_dni__constT_V(1,0): %0.16Lg\n", MixtureDerivatives::d2nalphar_dxj_dni__constT_V(rHEOS, 1, 0, XN_DEPENDENT));
//printf("d2nalphar_dxj_dni__constT_V(1,1): %0.16Lg\n", MixtureDerivatives::d2nalphar_dxj_dni__constT_V(rHEOS, 1, 1, XN_DEPENDENT));
} else if (!it->compare("dalphar_dxi|T,V,xk")) {
printf("dalphar_dxi|T,V,xk(0): %0.16Lg\n", MixtureDerivatives::dalphar_dxj__constT_V_xi(rHEOS, 0, XN_DEPENDENT));
//printf("dalphar_dxi(1): %0.16Lg\n", MixtureDerivatives::dalphar_dxi(rHEOS, 1, XN_DEPENDENT));
}
else if(!it->compare("dalphar_dxi|tau,delta,xk")){
printf("dalphar_dxi|tau_delta_xk(0): %0.16Lg\n", MixtureDerivatives::dalphar_dxi(rHEOS, 0, XN_DEPENDENT));
} else if (!it->compare("dalphar_dxi|tau,delta,xk")) {
printf("dalphar_dxi|tau_delta_xk(0): %0.16Lg\n", MixtureDerivatives::dalphar_dxi(rHEOS, 0, XN_DEPENDENT));
//printf("dalphar_dxi|tau,delta,xk(1): %0.16Lg\n", MixtureDerivatives::dalphar_dxi(rHEOS, 1, XN_DEPENDENT));
}
else if(!it->compare("delta*d2alphar_dxi_dDelta")){
printf("delta*d2alphar_dxi_dDelta(0): %0.16Lg\n", MixtureDerivatives::d2alphar_dxi_dDelta(rHEOS, 0, XN_DEPENDENT));
} else if (!it->compare("delta*d2alphar_dxi_dDelta")) {
printf("delta*d2alphar_dxi_dDelta(0): %0.16Lg\n", MixtureDerivatives::d2alphar_dxi_dDelta(rHEOS, 0, XN_DEPENDENT));
//printf("d2alphar_dxi_dDelta(1): %0.16Lg\n", MixtureDerivatives::d2alphar_dxi_dDelta(rHEOS, 1, XN_DEPENDENT));
}
else if(!it->compare("ndpdni__constT_V_nj")){
} else if (!it->compare("ndpdni__constT_V_nj")) {
printf("ndpdni__constT_V_nj(0): %0.16Lg\n", MixtureDerivatives::ndpdni__constT_V_nj(rHEOS, 0, XN_DEPENDENT));
//printf("ndpdni__constT_V_nj(1): %0.16Lg\n", MixtureDerivatives::ndpdni__constT_V_nj(rHEOS, 1, XN_DEPENDENT));
}
else if(!it->compare("dpdxj__constT_V_xi")){
} else if (!it->compare("dpdxj__constT_V_xi")) {
printf("dpdxj__constT_V_xi(0): %0.16Lg\n", MixtureDerivatives::dpdxj__constT_V_xi(rHEOS, 0, XN_DEPENDENT));
//printf("dpdxj__constT_V_xi(1): %0.16Lg\n", MixtureDerivatives::dpdxj__constT_V_xi(rHEOS, 1, XN_DEPENDENT));
}
}
}
return EXIT_SUCCESS;
}

View File

@@ -2,22 +2,24 @@
#include <iostream>
#include <stdlib.h>
using namespace CoolProp;
int main()
{
int main() {
// First type (slowest, due to most string processing, exposed in DLL)
std::cout << PropsSI("Dmolar","T",298,"P",1e5,"Propane[0.5]&Ethane[0.5]") << std::endl; // Default backend is HEOS
std::cout << PropsSI("Dmolar","T",298,"P",1e5,"HEOS::Propane[0.5]&Ethane[0.5]") << std::endl;
std::cout << PropsSI("Dmolar","T",298,"P",1e5,"REFPROP::Propane[0.5]&Ethane[0.5]") << std::endl;
std::cout << PropsSI("Dmolar", "T", 298, "P", 1e5, "Propane[0.5]&Ethane[0.5]") << std::endl; // Default backend is HEOS
std::cout << PropsSI("Dmolar", "T", 298, "P", 1e5, "HEOS::Propane[0.5]&Ethane[0.5]") << std::endl;
std::cout << PropsSI("Dmolar", "T", 298, "P", 1e5, "REFPROP::Propane[0.5]&Ethane[0.5]") << std::endl;
// Vector example
std::vector<double> z(2,0.5);
std::vector<double> z(2, 0.5);
// Second type (C++ only, a bit faster, allows for vector inputs and outputs)
std::vector<std::string> fluids; fluids.push_back("Propane"); fluids.push_back("Ethane");
std::vector<std::string> outputs; outputs.push_back("Dmolar");
std::vector<double> T(1,298), p(1,1e5);
std::cout << PropsSImulti(outputs,"T", T, "P", p, "", fluids, z)[0][0] << std::endl; // Default backend is HEOS
std::cout << PropsSImulti(outputs,"T", T, "P", p, "HEOS", fluids, z)[0][0] << std::endl;
std::vector<std::string> fluids;
fluids.push_back("Propane");
fluids.push_back("Ethane");
std::vector<std::string> outputs;
outputs.push_back("Dmolar");
std::vector<double> T(1, 298), p(1, 1e5);
std::cout << PropsSImulti(outputs, "T", T, "P", p, "", fluids, z)[0][0] << std::endl; // Default backend is HEOS
std::cout << PropsSImulti(outputs, "T", T, "P", p, "HEOS", fluids, z)[0][0] << std::endl;
// Comment me out if REFPROP is not installed
std::cout << PropsSImulti(outputs,"T", T, "P", p, "REFPROP", fluids, z)[0][0] << std::endl;
std::cout << PropsSImulti(outputs, "T", T, "P", p, "REFPROP", fluids, z)[0][0] << std::endl;
// All done return
return EXIT_SUCCESS;
}

View File

@@ -6,18 +6,21 @@ C# Wrapper
.. contents:: :depth: 2
Nuget package (3-party wrapper)
============
NuGet packages (3-party wrappers)
=================================
SharpFluids
-----------
This C# NuGet package uses CoolProp to perform all Fluid Properties lookups. It combines the speed from the low-level lookup with a units of measurement system packed into a easy-to-use system. If you are new to using CoolProp, this is a good place to start.
How to start
How to start
- Create a new C# Console App(.NET Framework) project in Visual studio
- Right click your new project and press 'Manage NuGet Packages'
- Go to 'Browse' and search for 'SharpFluids' and press 'Install'
- Add this to the top of your code ::
- Add this to the top of your code ::
using SharpFluids;
using UnitsNet;
@@ -32,6 +35,99 @@ How to start
- If you have problems or questions, `Find SharpFluids at Github <https://github.com/MadsKirkFoged/SharpFluids>`_.
SharpProp
---------
It is a simple, full-featured, lightweight, cross-platform CoolProp wrapper for C#. SharpProp gets published on `NuGet <https://www.nuget.org/packages/SharpProp/>`_.
All CoolProp features are included: thermophysical properties of pure fluids, mixtures and humid air.
Calculations of thermophysical properties are *unit safe* (thanks to `UnitsNet <https://github.com/angularsen/UnitsNet>`_). This allows you to avoid errors associated with incorrect dimensions of quantities, and will help you save a lot of time on their search and elimination. In addition, you will be able to convert all values to many other dimensions without the slightest difficulty.
Also you can easily convert the results to a JSON string, add new properties or inputs for lookups, and more.
Examples
^^^^^^^^
To calculate the specific heat of saturated water vapor at *1 atm*: ::
using System;
using SharpProp;
using UnitsNet.NumberExtensions.NumberToPressure;
using UnitsNet.Units;
::
var waterVapour = new Fluid(FluidsList.Water)
.DewPointAt((1).Atmospheres());
Console.WriteLine(waterVapour.SpecificHeat.JoulesPerKilogramKelvin); // 2079.937085633241
Console.WriteLine(waterVapour.SpecificHeat); // 2.08 kJ/kg·K
Console.WriteLine(waterVapour.SpecificHeat
.ToUnit(SpecificEntropyUnit.CaloriePerGramKelvin)); // 0.5 cal/g·K
To calculate the dynamic viscosity of propylene glycol aqueous solution with *60 %* mass fraction at *100 kPa* and *-20 °C*: ::
using System;
using SharpProp;
using UnitsNet.NumberExtensions.NumberToPressure;
using UnitsNet.NumberExtensions.NumberToRatio;
using UnitsNet.NumberExtensions.NumberToTemperature;
using UnitsNet.Units;
::
var propyleneGlycol = new Fluid(FluidsList.MPG, (60).Percent())
.WithState(Input.Pressure((100).Kilopascals()),
Input.Temperature((-20).DegreesCelsius()));
Console.WriteLine(propyleneGlycol.DynamicViscosity?.PascalSeconds); // 0.13907391053938878
Console.WriteLine(propyleneGlycol.DynamicViscosity); // 139.07 mPa·s
Console.WriteLine(propyleneGlycol.DynamicViscosity?
.ToUnit(DynamicViscosityUnit.Poise)); // 1.39 P
To calculate the density of ethanol aqueous solution (with ethanol *40 %* mass fraction) at *200 kPa* and *277.15 K*: ::
using System;
using System.Collections.Generic;
using SharpProp;
using UnitsNet;
using UnitsNet.NumberExtensions.NumberToPressure;
using UnitsNet.NumberExtensions.NumberToRatio;
using UnitsNet.NumberExtensions.NumberToTemperature;
using UnitsNet.Units;
::
var mixture = new Mixture(
new List<FluidsList> {FluidsList.Water, FluidsList.Ethanol},
new List<Ratio> {(60).Percent(), (40).Percent()})
.WithState(Input.Pressure((200).Kilopascals()),
Input.Temperature((277.15).Kelvins()));
Console.WriteLine(mixture.Density.KilogramsPerCubicMeter); // 883.3922771627759
Console.WriteLine(mixture.Density); // 883.39 kg/m3
Console.WriteLine(mixture.Density.ToUnit(DensityUnit.GramPerDeciliter)); // 88.34 g/dl
To calculate the wet bulb temperature of humid air at *99 kPa*, *30 °C* and *50 %* relative humidity: ::
using System;
using SharpProp;
using UnitsNet.NumberExtensions.NumberToPressure;
using UnitsNet.NumberExtensions.NumberToRelativeHumidity;
using UnitsNet.NumberExtensions.NumberToTemperature;
using UnitsNet.Units;
::
var humidAir = new HumidAir().WithState(
InputHumidAir.Pressure((99).Kilopascals()),
InputHumidAir.Temperature((30).DegreesCelsius()),
InputHumidAir.RelativeHumidity((50).Percent()));
Console.WriteLine(humidAir.WetBulbTemperature.Kelvins); // 295.0965785590792
Console.WriteLine(humidAir.WetBulbTemperature); // 21.95 °C
Console.WriteLine(humidAir.WetBulbTemperature
.ToUnit(TemperatureUnit.DegreeFahrenheit)); // 71.5 °F
For any questions or more examples, `see SharpProp on GitHub <https://github.com/portyanikhin/SharpProp>`_.
Pre-compiled Binaries
=====================
@@ -51,7 +147,7 @@ When you are finished, you should have a folder layout something like ::
|- AbstractState.cs
|- Configuration.cs
|- ...
There is example code :ref:`at the end of this page <csharp_example>`
Windows
@@ -74,7 +170,7 @@ Same idea as windows, but command line is just a bit different::
mcs Example.cs platform-independent/*.cs -platform:x64
./Example
Use `-platform:x86` to tell C# that your shared library is 32-bit if you are on 32-bit, or `-platform:x64` if you are on a 64-bit platform.
User-Compiled Binaries
@@ -92,7 +188,7 @@ OSX
---
For OSX, to install the necessary tools using homebrew, you can do::
homebrew install mono
Linux
@@ -128,8 +224,8 @@ If you want to change the package that CoolProp resides in, you can do so by cha
cmake .. -DCOOLPROP_CSHARP_MODULE=ON -DBUILD_TESTING=ON -DCOOLPROP_SWIG_OPTIONS="-namespace package.name"
where ``package.name`` is replaced with the desired name
where ``package.name`` is replaced with the desired name
.. _csharp_example:
Example Code

View File

@@ -136,7 +136,7 @@ Part 4:
Open Excel, go to ``Tools/Add-ins...``. In browse, go to the folder listed above with the ``BF8T346G9.Office`` in it. Select CoolProp.xlam.
Part 4b:
-------
--------
Go to Tools/Macro/Visual_Basic_Editor and open Module 1 in CoolProp.xlam. Replace all references to “libCoolProp.dylib” with references to "/Users/${USER}/Library/Group Containers/UBF8T346G9.Office/libCoolProp.dylib”, again changing ${USER} to your user name. Save and close the Visual Basic Editor.
Part 5:

View File

@@ -6,6 +6,67 @@ Python Wrapper
.. contents:: :depth: 2
PyFluids (3-party wrapper)
==========================
It is a simple, full-featured, lightweight CoolProp wrapper for Python.
PyFluids gets published on `PyPI <https://pypi.org/project/pyfluids/>`_, so you can easily install it using: ::
pip install pyfluids
All CoolProp features are included: thermophysical properties of pure fluids, mixtures and humid air.
Also you can easily convert the results to a JSON string or Python dict, add new properties or inputs for lookups, and more.
Benefits
--------
* Easy to use: all fluids and properties are at hand, no need to remember CoolProp keys.
* Processes for fluids and humid air are included: there is no need to code it anymore.
* User-friendly interface: writing code is faster.
Examples
--------
To calculate the specific heat of saturated water vapor at *1 atm*: ::
from pyfluids import Fluid, FluidsList
water_vapour = Fluid(FluidsList.Water).dew_point_at_pressure(101325)
print(water_vapour.specific_heat) # 2079.937085633241
To calculate the dynamic viscosity of propylene glycol aqueous solution with *60 %* mass fraction at *100 kPa* and *-20 °C*: ::
from pyfluids import Fluid, FluidsList, Input
propylene_glycol = Fluid(FluidsList.MPG, 60).with_state(
Input.pressure(100e3), Input.temperature(-20)
)
print(propylene_glycol.dynamic_viscosity) # 0.13907391053938878
To calculate the density of ethanol aqueous solution (with ethanol *40 %* mass fraction) at *200 kPa* and *4 °C*: ::
from pyfluids import Mixture, FluidsList, Input
mixture = Mixture([FluidsList.Water, FluidsList.Ethanol], [60, 40]).with_state(
Input.pressure(200e3), Input.temperature(4)
)
print(mixture.density) # 883.3922771627963
To calculate the wet bulb temperature of humid air at *99 kPa*, *30 °C* and *50 %* relative humidity: ::
from pyfluids import HumidAir, InputHumidAir
humid_air = HumidAir().with_state(
InputHumidAir.pressure(99e3),
InputHumidAir.temperature(30),
InputHumidAir.relative_humidity(50),
)
print(humid_air.wet_bulb_temperature) # 21.946578559079228
For any questions or more examples, `see PyFluids on GitHub <https://github.com/portyanikhin/PyFluids>`_.
Automatic installation
======================
@@ -139,21 +200,21 @@ Example Code Output
Code Warnings
=============
Messages may be issued from the Python CoolProp wrapper via the Python `warnings` module. This module allows
non-fatal warning messages to be issued to the calling program and stdout to warn of
improper function usage or deprecation of features. These warnings will, by
default, be issued each and every time a suspect call is made to CoolProp. While, the best
solution is to correct the calling code according to the message received, sometimes this is
Messages may be issued from the Python CoolProp wrapper via the Python `warnings` module. This module allows
non-fatal warning messages to be issued to the calling program and stdout to warn of
improper function usage or deprecation of features. These warnings will, by
default, be issued each and every time a suspect call is made to CoolProp. While, the best
solution is to correct the calling code according to the message received, sometimes this is
difficult to do in a legacy or third party code and can result in many, many warning messages that obscure
the output and hinder debugging.
Suppressing warning messages
----------------------------
The calling code can suppress or ignore these warning messages by overriding the default
warnings filter and changing the behavior of the warnings module. As an example, the
following script will result in a `DeprecationWarning` on each call to the deprecated function
Props()::
The calling code can suppress or ignore these warning messages by overriding the default
warnings filter and changing the behavior of the warnings module. As an example, the
following script will result in a `DeprecationWarning` on each call to the deprecated function
Props()::
from CoolProp.CoolProp import Props
Rho = Props('D','T',298.15,'P',10000,'R744')
@@ -195,4 +256,3 @@ Module Documentation
.. toctree::
../../../apidoc/CoolProp.rst

View File

@@ -24,7 +24,7 @@ the CoolProp library.
check the issues for you:
- Move into the ``dev/scripts`` folder and run
``python milestone2rst.py check v6.3.0``. This command finds the date
``python milestone2rst.py check vX.X.X``. This command finds the date
of the latest release and looks at all issues that have been closed
since then. It reports problems such as missing labels.
- Take the time to fix all problems and label issues and PRs.
@@ -38,30 +38,25 @@ the CoolProp library.
- Move into the ``dev/scripts`` folder and do ``python milestone2rst.py
changelog vX.X.X`` where ``vX.X.X`` is the version number of the
milestone on GitHub.
- Copy the generated text (goes to stdout) into the changelog file in
- Review the generated text from ``snippet_issues.rst.txt`` and
``snippet_pulls.rst.txt`` and update the changelog file in
``Web/coolprop/changelog.rst``. You might also use the same text for
the annotated tag / release on GitHub.
* **Merge to release**: Merge *master* into *release* branch.
* **Build Bots**: Force all buildbots to run on the *release* branch, this
will also change the upload folder from *binaries* to *release*.
* **Release**: Wait for all bots to finish and run the release script by
launching the ``release version`` bot with dry run disabled and the
correct version number. This uploads binaries to pypi and sourceforge.
Ignore the warning ``failed to set times on
"/home/project-web/coolprop/htdocs/jscript/coolprop-latest.js"``,
it is a symlink and will be overwritten. If you encounter problems, log
in via SSH and have a look at the logs. If you would like to finished the
release manually, consider editing the case statement in bash script and
run the commands from ``release.bsh.cmds.txt`` manually.
* **Clean and Tag**: If everything went well, you can proceed:
- Create a new tag and a new release on GitHub. Remember to
make an annotated tag and include the information on the closed
issues here as well.
- Change the default download file on sourceforge to point to the new
* **Push to master**: Merge your changes to the *master* branch and wait for the
CI system to complete the work. Only proceed if all builds finish successfully.
* **Tag a release**: Tag the master branch for using ``vX.X.X`` and wait once more
for all CI actions to complete. Make sure that the Pythpon wheels get uploaded
to PyPi automatically.
* **Release**: Wait for all actions to finish and manually launch the release action
with the version number vX.X.X as input. This updates the homepage and uploads the
binaries to SourceForge.
* **Clean up**: If everything went well, you can proceed:
- Create a new release on GitHub using the vX.X.X tag.
- Add a note with the download link: https://sourceforge.net/projects/coolprop/files/CoolProp/X.X.X/
- Change the default download file on SourceForge to point to the new
zipped sources.
- Bump the version number in the CMake file and commit.
- Announce the new features if you like...
- Announce the new features if you like.
That's all folks.

View File

@@ -1,38 +0,0 @@
#!/bin/bash
set -v
set -e
# This script is intended to be run INSIDE the docker container, and
# if all goes to plan, the _build/html folder should contain the contents
# of the build
# Turn on our conda environment
source activate docs
# Try to install dependencies on the fly, or rely on the existing environment
#conda install six numpy cython matplotlib requests jinja2 pyyaml
# Build/Install CoolProp and check
cd /coolprop/wrappers/Python
python setup.py bdist_wheel --dist-dir dist cmake=default,64
pip install -vvv --force-reinstall --ignore-installed --upgrade --no-index `ls dist/CoolProp*.whl`
rm -rf dist
cd /coolprop
python -c "import CoolProp; print(CoolProp.__gitrevision__)"
python -c "import CoolProp; print(CoolProp.__file__)"
# Run the slow stuff, if needed, or demanded
cd /coolprop/Web/scripts
python -u __init__.py $1
# Doxygen
cd /coolprop
doxygen --version && doxygen Doxyfile
# api documentation
cd /coolprop/Web
sphinx-apidoc -T -f -e -o apidoc ../wrappers/Python/CoolProp
# All the rest of the docs
cd /coolprop/Web
make html

View File

@@ -1,12 +0,0 @@
version: '3.2'
services:
worker:
build:
context: ./
dockerfile: docworker.Dockerfile
command: /coolprop/Web/docker/build_docs.sh
volumes:
- type: bind
source: ../..
target: /coolprop

View File

@@ -1,11 +0,0 @@
set -v
set -e
cat build_docs.sh
# Copy the REFPROP files here
cp -r ${HOME}/REFPROP_sources .
# Run the build of the docs
docker-compose build
docker-compose run worker bash /coolprop/Web/docker/build_docs.sh $1

View File

@@ -1,39 +0,0 @@
FROM continuumio/miniconda3
RUN mkdir /usr/share/man/man1/
RUN apt-get -y -m update && \
apt-get install -y \
g++ make cmake swig doxygen p7zip-full \
mono-mcs \
octave liboctave-dev \
r-base-dev \
default-jre default-jdk \
texlive-extra-utils \
imagemagick rsync
ADD conda_environment.yml /environment.yml
RUN conda env create -f /environment.yml
# This ADD block forces a build (invalidates the cache) if the git repo contents have changed, otherwise leaves it untouched.
# See https://stackoverflow.com/a/39278224
ADD https://api.github.com/repos/usnistgov/REFPROP-cmake/git/refs/heads/master RPcmake-version.json
RUN git clone --recursive https://github.com/usnistgov/REFPROP-cmake /REFPROP-cmake
ADD REFPROP_sources /REFPROP_sources
WORKDIR /REFPROP-cmake
SHELL ["/bin/bash", "-c"] # https://github.com/moby/moby/issues/7281#issuecomment-389440503
RUN source activate docs && \
python -c "import numpy; print(numpy.__file__)" && \
mkdir build && \
cd build && \
cmake .. -DREFPROP_FORTRAN_PATH=/REFPROP_sources/FORTRAN && \
cmake --build . && \
mkdir -p /opt/refprop && \
cp librefprop.so /opt/refprop && \
cp -r /REFPROP_sources/FLUIDS /opt/refprop && \
cp -r /REFPROP_sources/MIXTURES /opt/refprop
RUN python -m pip install pybtex

View File

@@ -470,4 +470,6 @@ parameters by running the script located at ``dev/incompressible_liquids/all_inc
Your new fluid is now part of the codebase and should be available to all CoolProp functions as
soon as you recompile the sources.
Remember that the incompressible fluids are prefixed with the backend name ``INCOMP`` when accessed
via the ``PropsSI`` function. If you define a new fluid called ``H2O2`` in ``PureFluids.py``, you
can use it later by typing ``INCOMP::H2O2``.

View File

@@ -7,6 +7,8 @@ These pages help you to get started using CoolProp and provide detailed informat
returning user. Please feel free to browse the pages and use the menu on the left to navigate
on this website.
.. warning:: The documentation has not been updated to reflect all changes since v6.4.1. Sorry for any inconvenience.
What is CoolProp?
-----------------
@@ -20,6 +22,7 @@ CoolProp is a C++ library that implements:
* :ref:`User-friendly interface around the full capabilities of NIST REFPROP <REFPROP>`
* :ref:`Fast IAPWS-IF97 (Industrial Formulation) for Water/Steam <IF97>`
* :ref:`Cubic equations of state (SRK, PR) <cubic_backend>`
* :ref:`PC-SAFT equation of state <pcsaft_backend>`
Environments Supported
----------------------
@@ -49,7 +52,7 @@ See more examples of PropsSI usage at :ref:`High-Level interface <high_level_api
Help
----
* (**General Discussion**) Email the `Google group <https://groups.google.com/d/forum/coolprop-users>`_
* (**General Discussion**) Create a new discussion at `Github CoolProp Discussions <https://github.com/CoolProp/CoolProp/discussions>`_
* (**Bugs, feature requests**) File a `Github issue <https://github.com/CoolProp/CoolProp/issues>`_
* `Docs for v4 of CoolProp <http://www.coolprop.org/v4/>`_
* `Docs for development version of CoolProp <http://www.coolprop.org/dev/>`_
@@ -64,7 +67,9 @@ Projects Using CoolProp
* `StateCalc <https://itunes.apple.com/us/app/statecalc/id891848148?ls=1&mt=8>`_
* `SmoWeb <http://platform.sysmoltd.com>`_
* `T-Props <https://play.google.com/store/apps/details?id=com.innoversetech.tprops>`_
* `PropiedadesDeFluidos <http://jfc.us.es/propiedadesdefluidos/descripcion/>`_
* `PropiedadesDeFluidos <https://personal.us.es/jfc/PropiedadesDeFluidos/descripcion/>`_ (FluidProperties)
* `CoolPropJavascriptDemo <https://github.com/dvd101x/CoolPropJavascriptDemo>`_
* `CuadernoDeProblemas <https://personal.us.es/jfc/CuadernoDeProblemas/>`_ (ProblemNotebook)
Main Developers
---------------
@@ -82,7 +87,7 @@ Please be so kind and cite our work in your publication: :ref:`Citation informat
Supporters
----------
\
\
.. image:: _static/logo_labothap.png
:height: 100px
@@ -116,9 +121,9 @@ Supporters
:height: 50px
:alt: IPU Refrigeration and Energy Technology
:target: https://www.ipu.dk
License Information
-------------------
CoolProp has flexible licensing terms and you can use it for commercial projects and academic work free of charge. Have a look at the actual `license <https://github.com/CoolProp/CoolProp/blob/master/LICENSE>`_, if you are in doubt.
CoolProp has flexible licensing terms and you can use it for commercial projects and academic work free of charge. Have a look at the actual `license <https://github.com/CoolProp/CoolProp/blob/master/LICENSE>`_, if you are in doubt.

View File

@@ -23,15 +23,15 @@ Fluid Information
:widths: 40, 60
:delim: ;
:file: {fluid:s}-info.csv
REFPROP Validation Data
=======================
.. note::
This figure compares the results generated from CoolProp and those generated from REFPROP. They are all results obtained in the form :math:`Y(T,\\rho)`, where :math:`Y` is the parameter of interest and which for all EOS is a direct evaluation of the EOS
You can download the script that generated the following figure here: :download:`(link to script)<REFPROPplots/{fluid:s}.py>`, right-click the link and then save as... or the equivalent in your browser. You can also download this figure :download:`as a PDF<REFPROPplots/{fluid:s}.pdf>`.
You can download the script that generated the following figure here: :download:`(link to script)<REFPROPplots/{fluid:s}.py>`, right-click the link and then save as... or the equivalent in your browser. You can also download this figure :download:`as a PDF<REFPROPplots/{fluid:s}.pdf>`.
.. image:: REFPROPplots/{fluid:s}.png
@@ -44,7 +44,7 @@ In this figure, we start off with a state point given by T,P and then we calcula
.. note::
You can download the script that generated the following figure here: :download:`(link to script)<Consistencyplots/{fluid:s}.py>`, right-click the link and then save as... or the equivalent in your browser. You can also download this figure :download:`as a PDF<Consistencyplots/{fluid:s}.pdf>`.
You can download the script that generated the following figure here: :download:`(link to script)<Consistencyplots/{fluid:s}.py>`, right-click the link and then save as... or the equivalent in your browser. You can also download this figure :download:`as a PDF<Consistencyplots/{fluid:s}.pdf>`.
.. image:: Consistencyplots/{fluid:s}.png
@@ -218,7 +218,7 @@ class FluidGenerator(object):
ITG.write(path)
del_old = CP.get_config_string(CP.LIST_STRING_DELIMITER)
CP.set_config_string(CP.LIST_STRING_DELIMITER, '|')
try:
aliases = ', '.join(['``' + a.strip() + '``' for a in CoolProp.CoolProp.get_fluid_param_string(self.fluid, 'aliases').strip().split('|') if a])

View File

@@ -1,92 +1,52 @@
#!/usr/bin/env python
# -*- coding: utf8 -*-
import os.path, glob, subprocess, sys, time, datetime, pytz
#
if len(sys.argv) < 2:
full_rebuild = False
if len(sys.argv) == 2:
if sys.argv[1] == "True": full_rebuild = True
elif sys.argv[1] == "1": full_rebuild = True
else: full_rebuild = False
if len(sys.argv) > 2:
full_rebuild = False
print("Cannot process more than one parameter: {0}".format(str(sys.argv)))
#
# Start with detecting the full build
def detect_full_rebuild():
if len(sys.argv) >= 2:
arg = str(sys.argv[1]).lower()
if arg == "true": return True
if arg == "1": return True
return False
full_rebuild = detect_full_rebuild()
print("Detected rebuild argument: full_rebuild = {}".format(full_rebuild))
# File system functions
def touch(fname):
if os.path.exists(fname): os.utime(fname, None)
else: open(fname, 'a').close()
#
def get_ftime(fname):
if os.path.isfile(fname): return os.path.getctime(fname)
else: return 0
#
# Directory settings
script_root_dir = os.path.abspath(os.path.dirname(__file__))
repo_root_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))
task_list = []
def add_if_exists(fname):
if os.path.isfile(fname):
task_list.append(fname)
print("Added '{}' to the task list.".format(fname))
return True
return False
#
web_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
script_dir = os.path.abspath(os.path.join(web_dir, 'scripts'))
touch_file = os.path.abspath(os.path.join(script_dir, 'last_run'))
root_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))
#
cur_time = time.time()
fil_time = get_ftime(touch_file)
#
# Static execution time
#reg_hour = time.strftime("%H")
#reg_minute = time.strftime("%M")
# sch_hour = 12 #scheduled hour = 3am Boulder = 12pm CPH
# sch_minute = 7 #scheduled minute = 7 past
#
# Dynamically calculated execution (includes daylight saving time etc
masterTime = pytz.timezone('US/Pacific')
#slaveTime = pytz.timezone('Europe/Copenhagen')
now_master = datetime.datetime.now(masterTime)
run_master = datetime.datetime.strptime("03:00:00", '%H:%M:%S')
#
now_master = datetime.time(now_master.hour, now_master.minute, now_master.second)
run_master = datetime.time(run_master.hour, run_master.minute, run_master.second)
run_master_end = datetime.time(run_master.hour, run_master.minute + 5, run_master.second)
#
lim_days = 0.90
lim_time = cur_time - 60 * 60 * 24 * lim_days # seconds
#
if now_master >= run_master and \
now_master <= run_master_end and \
not full_rebuild:
print("This is a scheduled rebuild at {0}.".format(run_master))
if fil_time < lim_time: full_rebuild = True
else: print("It looks like the files have been rebuilt during the last day.")
#
lim_days = 3
lim_time = cur_time - 60 * 60 * 24 * lim_days # seconds
if fil_time < lim_time and not full_rebuild:
print("The static files have not been updated in {0} days, forcing an update now.".format(lim_days))
full_rebuild = True
#req_dir = [os.path.abspath(os.path.join(web_dir,'_static','fluid_properties','Incompressibles_reports'))]
# req_fil = [os.path.abspath(os.path.join(web_dir,'fluid_properties','Mixtures.csv')),
# os.path.abspath(os.path.join(web_dir,'fluid_properties','PurePseudoPure.csv')),
# os.path.abspath(os.path.join(web_dir,'fluid_properties','Incompressibles_pure-fluids.csv'))]
#
# for d in req_dir:
# if not os.path.exists(d) and not full_rebuild:
# print "The required directory {0} is missing, trying to rebuild it.".format(d)
# full_rebuild = True
# for f in req_fil:
# if not os.path.exists(f):
# print "The required file {0} is missing, trying to rebuild it.".format(f)
# full_rebuild = True
# print "Executing the normal scripts for generating the static files."
# script_files = glob.glob(os.path.join(script_dir,'*.py')) # Avoid recursion
# script_files = [os.path.abspath(f) for f in script_files if not os.path.abspath(f)==os.path.abspath(__file__)]
# for script in script_files:
# print "Executing {0}".format(script)
# subprocess.call('python {0}'.format(os.path.basename(script)), cwd=script_dir, shell=True)
#
def add_to_task_list(fname_in):
fname = fname_in
if add_if_exists(os.path.abspath(fname)):
return True
fname = os.path.join(script_root_dir, fname_in)
if add_if_exists(os.path.abspath(fname)):
return True
fname = os.path.join(repo_root_dir, fname_in)
if add_if_exists(os.path.abspath(fname)):
return True
print("Error: Could not find '{}'.".format(fname_in))
return False
def run_script(path):
if os.path.exists(path):
@@ -109,23 +69,29 @@ def run_script(path):
# Inject the version of CoolProp into the doxygen configuration files
# Put it at the end, overwrites prior value
import CoolProp
with open(os.path.join(root_dir, 'Doxyfile'), 'a+') as fp:
fp.write('\n\n PROJECT_NUMBER = ' + CoolProp.__version__ + '\n')
with open(os.path.join(repo_root_dir, 'Doxyfile'), 'a+') as fp:
fp.write('\n\n PROJECT_NUMBER = {}\n'.format(CoolProp.__version__))
# The normal tasks that are carried out each time the script runs
normal_tasks = ["../../dev/scripts/examples/LinuxRun.py", "coolprop.tabular.speed.py", "fluid_properties.phase_envelope.py", "fluid_properties.PurePseudoPure.py", "fluid_properties.Mixtures.py", "coolprop.parametric_table.py", "coolprop.configuration.py", "logo_2014.py", "fluid_properties.REFPROPcomparison.py"]
print("Adding the normal scripts to the task list.")
add_to_task_list("dev/scripts/examples/LinuxRun.py")
add_to_task_list("coolprop.tabular.speed.py")
add_to_task_list("fluid_properties.phase_envelope.py")
add_to_task_list("fluid_properties.PurePseudoPure.py")
add_to_task_list("fluid_properties.Mixtures.py")
add_to_task_list("coolprop.parametric_table.py")
add_to_task_list("coolprop.configuration.py")
add_to_task_list("logo_2014.py")
add_to_task_list("fluid_properties.REFPROPcomparison.py")
# The expensive tasks that are fired when full_rebuild is True
expensive_tasks = ["fluid_properties.Consistency.py", "fluid_properties.Incompressibles.sh"]
print("Executing the normal scripts for generating static files.")
for script in normal_tasks:
print("Executing {0}".format(script))
run_script(os.path.normpath(os.path.join(script_dir, script)))
#
if full_rebuild:
print("Executing the computationally expensive scripts for generating the static files.")
for script in expensive_tasks:
print("Executing {0}".format(script))
run_script(os.path.join(script_dir, script))
touch(touch_file)
else:
print("Skipping the computationally expensive scripts for generating the static files.")
print("Adding the computationally expensive scripts to the task list.")
add_to_task_list("fluid_properties.Consistency.py")
add_to_task_list("fluid_properties.Incompressibles.sh")
# Run all the files in the task list
print("Processing the selected tasks to generate the static files.")
for fname in task_list:
print("Executing {0}".format(fname))
run_script(os.path.normpath(fname))

View File

@@ -8,7 +8,7 @@ pushd ../../dev/incompressible_liquids
echo "Generating fitting reports"
python all_incompressibles.py -ns
echo "Creating example figures"
pdfnup --quiet --nup 2x1 --delta "1cm 0cm" -o report/report2up.pdf report/DowQ_fitreport.pdf report/LiBr_fitreport.pdf
pdfjam --suffix nup --nup 2x1 --landscape --quiet --delta "1cm 0cm" -o report/report2up.pdf report/DowQ_fitreport.pdf report/LiBr_fitreport.pdf
convert -background "#FFFFFF" -density 300 report/report2up.pdf report/report2up.jpg # Convert the PDF to JPG
convert -crop 100%x47%+0+30 -resize '850x' -quality 75 report/report2up.jpg report/report2up.jpg # Resize it
echo "Copying the reports to Web/_static/fluid_properties"

View File

@@ -3,30 +3,28 @@
#include <AbstractState.h>
#include <CoolProp.h>
int main(int argc, const char* argv[]) {
shared_ptr<CoolProp::AbstractState> pState;
pState.reset(CoolProp::AbstractState::factory("HEOS", "Water"));
double T_test = 25 + 273.15;
pState->update(CoolProp::QT_INPUTS, 0.3, T_test);
double rho_test = pState->rhomass();
double s_test = pState->smass();
double drho_new = 0;
double T_test = 25 + 273.15;
pState->update(CoolProp::QT_INPUTS, 0.3, T_test);
double rho_test = pState->rhomass();
double s_test = pState->smass();
double drho_new = 0;
pState->specify_phase(CoolProp::iphase_not_imposed);
pState->update(CoolProp::SmassT_INPUTS, s_test, T_test);
drho_new = pState->rhomass() - rho_test;
pState->specify_phase(CoolProp::iphase_not_imposed);
pState->update(CoolProp::SmassT_INPUTS, s_test, T_test);
drho_new = pState->rhomass() - rho_test;
pState->specify_phase(CoolProp::iphase_not_imposed);
pState->update(CoolProp::DmassT_INPUTS, rho_test, T_test);
drho_new = pState->rhomass() - rho_test;
pState->specify_phase(CoolProp::iphase_not_imposed);
pState->update(CoolProp::DmassT_INPUTS, rho_test, T_test);
drho_new = pState->rhomass() - rho_test;
pState->specify_phase(CoolProp::iphase_twophase);
pState->update(CoolProp::SmassT_INPUTS, s_test, T_test);
drho_new = pState->rhomass() - rho_test;
pState->specify_phase(CoolProp::iphase_twophase);
pState->update(CoolProp::DmassT_INPUTS, rho_test, T_test);
drho_new = pState->rhomass() - rho_test;
pState->specify_phase(CoolProp::iphase_twophase);
pState->update(CoolProp::SmassT_INPUTS, s_test, T_test);
drho_new = pState->rhomass() - rho_test;
pState->specify_phase(CoolProp::iphase_twophase);
pState->update(CoolProp::DmassT_INPUTS, rho_test, T_test);
drho_new = pState->rhomass() - rho_test;
}

40
dev/Tickets/1611.cpp Normal file
View File

@@ -0,0 +1,40 @@
#include <vector>
#include <string>
#include <chrono>
#include <memory>
#include "CoolProp.h"
#include "AbstractState.h"
int main(int argc, const char* argv[]) {
CoolProp::set_debug_level(1000);
std::shared_ptr<CoolProp::AbstractState> ptr;
std::string backend;
std::vector<std::string> fluids;
double Q, T, p, s, h;
// Test as described in https://github.com/CoolProp/CoolProp/issues/1611
backend = "HEOS";
fluids = {"R407C"};
ptr.reset(CoolProp::AbstractState::factory(backend, fluids));
p = 4863285.0;
Q = 0;
ptr->update(CoolProp::PQ_INPUTS, p, Q);
// test as described in https://github.com/CoolProp/CoolProp/issues/1678
backend = "HEOS";
fluids = {"Water"};
ptr.reset(CoolProp::AbstractState::factory(backend, fluids));
p = ptr->p_critical();
Q = 0;
ptr->update(CoolProp::PQ_INPUTS, p, Q);
s = 4000;
ptr->update(CoolProp::PSmass_INPUTS, p, s);
return 0;
}

View File

@@ -20,10 +20,10 @@
// return outputs;
//}
std::vector<std::pair<std::string, double> > generate_values(double T, double R, double P = 101325) {
std::vector<std::pair<std::string, double>> generate_values(double T, double R, double P = 101325) {
double psi_w = HumidAir::HAPropsSI("psi_w", "T", T, "R", R, "P", P);
std::vector<std::string> other_output_keys = { "T_wb","T_dp","Hda","Sda","Vda","Omega" };
std::vector<std::pair<std::string, double> > outputs;
std::vector<std::string> other_output_keys = {"T_wb", "T_dp", "Hda", "Sda", "Vda", "Omega"};
std::vector<std::pair<std::string, double>> outputs;
outputs.push_back(std::pair<std::string, double>("psi_w", psi_w));
outputs.push_back(std::pair<std::string, double>("T", T));
outputs.push_back(std::pair<std::string, double>("P", P));
@@ -34,8 +34,8 @@ std::vector<std::pair<std::string, double> > generate_values(double T, double R,
return outputs;
}
std::vector<std::pair<std::string, std::string> > get_supported_input_pairs() {
std::vector<std::pair<std::string, std::string> > good_ones;
std::vector<std::pair<std::string, std::string>> get_supported_input_pairs() {
std::vector<std::pair<std::string, std::string>> good_ones;
auto inputs = generate_values(300, 0.5);
std::string k1, k2;
double v1 = -_HUGE, v2 = -_HUGE, p = -_HUGE;
@@ -55,16 +55,11 @@ std::vector<std::pair<std::string, std::string> > get_supported_input_pairs() {
v1 = inputs[i].second;
v2 = inputs[j].second;
try {
double psi_w_new = HumidAir::HAPropsSI(
"psi_w",
k1, v1,
k2, v2,
"P", p);
double psi_w_new = HumidAir::HAPropsSI("psi_w", k1, v1, k2, v2, "P", p);
if (ValidNumber(psi_w_new)) {
good_ones.push_back(std::pair<std::string, std::string>(k1, k2));
}
}
catch (std::exception & e) {
} catch (std::exception& e) {
std::cout << e.what();
}
}
@@ -72,7 +67,8 @@ std::vector<std::pair<std::string, std::string> > get_supported_input_pairs() {
return good_ones;
}
void calculate(std::vector<std::pair<std::string, double> > inputs, std::size_t& clc_count, std::size_t& err_count, std::size_t& acc_count, const std::vector<std::pair<std::string, std::string> >& supported_pairs) {
void calculate(std::vector<std::pair<std::string, double>> inputs, std::size_t& clc_count, std::size_t& err_count, std::size_t& acc_count,
const std::vector<std::pair<std::string, std::string>>& supported_pairs) {
//auto errors = []
std::string k1, k2;
@@ -80,12 +76,14 @@ void calculate(std::vector<std::pair<std::string, double> > inputs, std::size_t&
for (const auto& kv : inputs) {
if (kv.first == "psi_w") {
psi_w_input = kv.second; break;
psi_w_input = kv.second;
break;
}
}
for (const auto& kv : inputs) {
if (kv.first == "P") {
P_input = kv.second; break;
P_input = kv.second;
break;
}
}
@@ -96,24 +94,20 @@ void calculate(std::vector<std::pair<std::string, double> > inputs, std::size_t&
for (std::size_t j = 0; j < inputs.size(); j++) {
if (inputs[j].first.compare(k1) == 0) {
v1 = inputs[j].second;
}
else if (inputs[j].first.compare(k2) == 0) {
} else if (inputs[j].first.compare(k2) == 0) {
v2 = inputs[j].second;
}
}
clc_count += 1;
try {
double psi_w_new = HumidAir::HAPropsSI(
"psi_w",
k1, v1,
k2, v2,
"P", P_input);
double psi_w_new = HumidAir::HAPropsSI("psi_w", k1, v1, k2, v2, "P", P_input);
double delta = std::abs(psi_w_input - psi_w_new);
if (delta > 1e-6) {
acc_count += 1;
HumidAir::HAPropsSI("psi_w", k1, v1, k2, v2, "P", P_input);
std::cout << "deviation: " << delta << " @ HAPropsSI(\"psi_w\",\"" << k1 << "\"," << v1 << ",\"" << k2 << "\"," << v2 << ",\"P\",101325); error: " + CoolProp::get_global_param_string("errstring") << std::endl;
std::cout << "deviation: " << delta << " @ HAPropsSI(\"psi_w\",\"" << k1 << "\"," << v1 << ",\"" << k2 << "\"," << v2
<< ",\"P\",101325); error: " + CoolProp::get_global_param_string("errstring") << std::endl;
// std::cout << "\n-------------- Error --------------\n";
// std::cout << "delta = " << delta << "\n";
@@ -121,8 +115,7 @@ void calculate(std::vector<std::pair<std::string, double> > inputs, std::size_t&
// std::cout << k2 << " = " << v2 << "\n";
// std::cout << "P" << " = " << P_input << "\n";
}
}
catch (std::exception & e) {
} catch (std::exception& e) {
err_count += 1;
std::cout << e.what();
}
@@ -136,7 +129,8 @@ int main(int argc, const char* argv[]) {
// for (auto R = 0.0; R < 1.0; R += 0.01){
// std::cout << R << " " << HumidAir::HAPropsSI("Hda", "T", 240, "R", R, "P", 101325) << "\n";
// }
auto hh = HumidAir::HAPropsSI("psi_w", "R", 0.0333333, "Vda", 0.958997, "P", 101325);;
auto hh = HumidAir::HAPropsSI("psi_w", "R", 0.0333333, "Vda", 0.958997, "P", 101325);
;
double h = HumidAir::HAPropsSI("S", "T", 240, "P", 101325, "R", 0);
// double T = HumidAir::HAPropsSI("W", "P", 101325, "S", h, "T", 240);
// T = HumidAir::HAPropsSI("T", "H", h, "R", 1.0, "P", 101325);
@@ -150,7 +144,7 @@ int main(int argc, const char* argv[]) {
std::vector<double> T(num), R(num);
// Full range : -143.15 C to 350.0 C
double T_lo = (-143.15 + 273.15) * 1.001;
double T_hi = ( 350.00 + 273.15) * 0.999;
double T_hi = (350.00 + 273.15) * 0.999;
// Full range : 0.0 to 1.0
double R_lo = 0.0 * 1.001;
double R_hi = 1.0 * 0.999;
@@ -190,7 +184,6 @@ int main(int argc, const char* argv[]) {
std::cout << "Time: " << time << " s / " << clc_count << " = " << time / clc_count * 1e3 << " ms per call \n";
}
// # Humid air example from Sphinx
// from CoolProp.HumidAirProp import HAPropsSI
// h = HAPropsSI("H","T",298.15,"P",101325,"R",0.5); print(h)
@@ -200,60 +193,59 @@ int main(int argc, const char* argv[]) {
// import sys
// sys.exit()
// # Verification script
// # Verification script
// import CoolProp.CoolProp as CP
// import numpy as np
// import itertools
// from multiprocessing import Pool
// def generate_values(TR,P=101325):
// """ Starting with T,R as inputs, generate all other values """
// T,R = TR
// psi_w = CP.HAPropsSI("psi_w","T",T,"R",R,"P",P)
// other_output_keys = ["T_wb","T_dp","Hda","Sda","Vda","Omega"]
// outputs = {"psi_w":psi_w,"T":T,"P":P,"R":R}
// for k in other_output_keys:
// outputs[k] = CP.HAPropsSI(k,"T",T,"R",R,"P",P)
// return outputs
// """ Starting with T,R as inputs, generate all other values """
// T,R = TR
// psi_w = CP.HAPropsSI("psi_w","T",T,"R",R,"P",P)
// other_output_keys = ["T_wb","T_dp","Hda","Sda","Vda","Omega"]
// outputs = {"psi_w":psi_w,"T":T,"P":P,"R":R}
// for k in other_output_keys:
// outputs[k] = CP.HAPropsSI(k,"T",T,"R",R,"P",P)
// return outputs
// def get_supported_input_pairs():
// """ Determine which input pairs are supported """
// good_ones = []
// inputs = generate_values((300, 0.5))
// for k1, k2 in itertools.product(inputs.keys(), inputs.keys()):
// if "P" in [k1,k2] or k1==k2:
// continue
// args = ("psi_w", k1, inputs[k1], k2, inputs[k2], "P", inputs["P"])
// try:
// psi_w_new = CP.HAPropsSI(*args)
// good_ones.append((k1,k2))
// except BaseException as BE:
// pass
// if "currently at least one of" in str(BE) or "cannot provide two inputs" in str(BE):
// pass
// else:
// print(BE)
// good_ones.append((k1,k2))
// return good_ones
// """ Determine which input pairs are supported """
// good_ones = []
// inputs = generate_values((300, 0.5))
// for k1, k2 in itertools.product(inputs.keys(), inputs.keys()):
// if "P" in [k1,k2] or k1==k2:
// continue
// args = ("psi_w", k1, inputs[k1], k2, inputs[k2], "P", inputs["P"])
// try:
// psi_w_new = CP.HAPropsSI(*args)
// good_ones.append((k1,k2))
// except BaseException as BE:
// pass
// if "currently at least one of" in str(BE) or "cannot provide two inputs" in str(BE):
// pass
// else:
// print(BE)
// good_ones.append((k1,k2))
// return good_ones
// def calculate(inputs):
// """ For a given input, try all possible input pairs """
// errors = []
// supported_pairs = get_supported_input_pairs()
// for k1, k2 in supported_pairs:
// psi_w_input = inputs["psi_w"]
// args = "psi_w",k1,inputs[k1],k2,inputs[k2],"P",inputs["P"]
// try:
// psi_w_new = CP.HAPropsSI(*args)
// except BaseException as BE:
// errors.append((str(BE),args, inputs))
// return errors
// """ For a given input, try all possible input pairs """
// errors = []
// supported_pairs = get_supported_input_pairs()
// for k1, k2 in supported_pairs:
// psi_w_input = inputs["psi_w"]
// args = "psi_w",k1,inputs[k1],k2,inputs[k2],"P",inputs["P"]
// try:
// psi_w_new = CP.HAPropsSI(*args)
// except BaseException as BE:
// errors.append((str(BE),args, inputs))
// return errors
// if __name__ == "__main__":
// TR = itertools.product(np.linspace(240, 360, 31), np.linspace(0, 1, 31))
// with Pool(processes=2) as pool:
// input_values = pool.map(generate_values, TR)
// errors = pool.map(calculate, input_values)
// for err in itertools.chain.from_iterable(errors):
// print(err)
// TR = itertools.product(np.linspace(240, 360, 31), np.linspace(0, 1, 31))
// with Pool(processes=2) as pool:
// input_values = pool.map(generate_values, TR)
// errors = pool.map(calculate, input_values)
// for err in itertools.chain.from_iterable(errors):
// print(err)

View File

@@ -5,7 +5,7 @@ h = HAPropsSI('H','T',298.15,'P',101325,'R',0.5); print(h)
T = HAPropsSI('T','P',101325,'H',h,'R',1.0); print(T)
T = HAPropsSI('T','H',h,'R',1.0,'P',101325); print(T)
# Verification script
# Verification script
import CoolProp.CoolProp as CP
import numpy as np
import itertools

22
dev/Tickets/2190.cpp Normal file
View File

@@ -0,0 +1,22 @@
#include <vector>
#include <string>
#include <chrono>
#include <memory>
#include "CoolProp.h"
#include "HumidAirProp.h"
int main(int argc, const char* argv[]) {
CoolProp::set_debug_level(1000);
double T = 393.15;
double p = 101325;
double R = 0.1;
double h = HumidAir::HAPropsSI("H", "T", T, "P", p, "R", R);
double R_test = HumidAir::HAPropsSI("R", "T", T, "P", p, "H", h);
return 0;
}

File diff suppressed because it is too large Load Diff

View File

@@ -3,16 +3,14 @@
#include <AbstractState.h>
#include <CoolProp.h>
int main(int argc, const char* argv[]) {
shared_ptr<CoolProp::AbstractState> pState;
pState.reset(CoolProp::AbstractState::factory("TTSE&HEOS", "Water"));
std::cout << "T crit: " << pState->T_critical() << std::endl;
pState->update(CoolProp::QT_INPUTS, 0.2, 373.15);
double res = pState->first_two_phase_deriv_splined(CoolProp::iDmass, CoolProp::iHmass, CoolProp::iP, 0.3);
pState->update(CoolProp::QT_INPUTS, 0.2, 373.15);
double res = pState->first_two_phase_deriv_splined(CoolProp::iDmass, CoolProp::iHmass, CoolProp::iP, 0.3);
/*x, y1 = [], []
/*x, y1 = [], []
for Q in np.linspace(0, 0.3, steps) :
AS.update(CoolProp.PQ_INPUTS, 101325, Q)
x.append(AS.Q())
@@ -25,5 +23,4 @@ int main(int argc, const char* argv[]) {
plt.plot(x, y1, label = 'Two-phase (splined, tabular)', ls = '--', lw = 3)
´*/
}

61
dev/ci/clang-format.sh Executable file
View File

@@ -0,0 +1,61 @@
#!/bin/bash
# This script compares two branches (eg. pr-bugfix - <> develop )# and finds files of types cpp, hpp, c, h
# that have changed between branches and processes each of these files using clang-format.
# The default behavior of clang-format is to auto change the file and format according to the style guide (.clang-format).
# If these files change the CI will alert fail and alert the user to correct the changes.
# The user should run this script locally and accept the auto changes proposed by clang-format.
#
# You can also run the clang-format on the entire /src directory if desired. To do that can use the following:
# using find on for Mac osx
# find -E ./src -regex '.*\.(cpp|hpp|c|h)$' | xargs clang-format -style=file -i -fallback-style=none
# using find on linux
# find ./src -regextype posix-extended -regex '.*\.(cpp|hpp|c|h)$' | xargs clang-format -style=file -i -fallback-style=none
display_usage() {
echo -e "\nUsage:\ PR_BRANCH_NAME TARGET_BRANCH_NAME \n"
}
if [ $# -le 1 ]
then
display_usage
exit 1
fi
PR_BRANCH_NAME=$1
TARGET_BRANCH_NAME=$2
# If pointing at HEAD, then also include uncommited changes, but only the cached (=staged for next commit) so we can use it as a precommit script
if [ $PR_BRANCH_NAME == "HEAD" ]
then
PR_BRANCH_NAME="--cached "
fi
# first find if any files changed
num=$(git diff $PR_BRANCH_NAME $TARGET_BRANCH_NAME --name-only | grep '.*\.\(cpp\|c\|hpp\|h\)$' | wc -l | tr -d '[:space:]')
if [ $num -eq 0 ]
then
echo "No files of type (cpp, c, hpp, h) changed. Skipping clang-formatting"
exit 0
fi
git diff $PR_BRANCH_NAME $TARGET_BRANCH_NAME --name-only | grep '.*\.\(cpp\|c\|hpp\|h\)$' | xargs clang-format -style=file -i -fallback-style=none
# clang-format will auto correct files so prepare the diff and use this as artifact
git diff > clang_format.patch
# Delete if nothhing otherwise exit 1 to indicate a failed job
if [ ! -s clang_format.patch ]
then
rm clang_format.patch
exit 0
else
echo "clang-format auto corrected files:"
git diff --name-only
echo -e "\nPlease correct these files. You can run ci/clang-format.sh locally and commit changes"
exit 1
fi
exit 0

View File

@@ -0,0 +1,148 @@
import re
from collections import Counter
def colorize(lines):
def bold(s):
return '\x1b[1m{}\x1b[0m'.format(s)
def red(s):
return '\x1b[31m{}\x1b[0m'.format(s)
def green(s):
return '\x1b[32m{}\x1b[0m'.format(s)
def yellow(s):
return '\x1b[33m{}\x1b[0m'.format(s)
def blue(s):
return '\x1b[34m{}\x1b[0m'.format(s)
def magenta(s): # purple
return '\x1b[35m{}\x1b[0m'.format(s)
def cyan(s):
return '\x1b[36m{}\x1b[0m'.format(s)
def format_severity(txt, severity):
"""
http://cppcheck.sourceforge.net/devinfo/doxyoutput/classSeverity.html
enum:
none, error, warning, style, performance,
portability, information, debug
"""
if severity == "none":
return txt
if severity == "error":
return red(txt)
if severity == "warning":
return yellow(txt)
if severity == 'style':
return blue(txt)
if severity == "performance":
return cyan(txt)
if severity == "portability":
return magenta(txt)
if severity == "information":
return green(txt)
if severity == "debug":
return txt
return txt
re_message = re.compile(r'\[(?P<file>.*):(?P<line>.*?)\]:'
r'\((?P<severity>.*?)\),\[(?P<id>.*?)\],'
r'(?P<message>.*)')
colored_lines = []
matched_messages = []
colored_lines = []
matched_messages = []
for line in lines:
m = re_message.match(line)
if m:
d = m.groupdict()
matched_messages.append(d)
else:
colored_lines.append(red(line))
severity_order = ['error', 'warning', 'performance', 'portability',
'style', 'information', 'debug', 'none']
counter = Counter(d['severity'] for d in matched_messages)
summary_line = "\n\n==========================================\n"
summary_line += " {}:\n".format(bold(red("CPPCHECK Summary")))
summary_line += "------------------------------------------"
for severity in severity_order:
n_severity = counter[severity]
summary_line += "\n * "
if n_severity:
summary_line += format_severity(n_severity, severity)
else:
# summary_line += green("No {}(s)".format(severity))
summary_line += green("No")
summary_line += " {}(s)".format(format_severity(severity, severity))
summary_line += "\n==========================================\n\n"
n_errors = counter['error']
# if n_errors:
# summary_line += red("{} Errors".format(n_errors))
# else:
# summary_line = green("No Errors")
n_warnings = counter['warning']
# if n_warnings:
# summary_line += yellow("{} Warnings".format(n_warnings))
# else:
# summary_line = green("No Warnings")
n_styles = counter['style']
n_performances = counter['performance']
n_portabilities = counter['portability']
# n_informations = counter['information']
# n_debugs = counter['debug']
# Start by sorting by filename
matched_messages.sort(key=lambda d: d['file'])
matched_messages.sort(key=lambda d: severity_order.index(d['severity']))
# Now sort by the severity we cared about
for d in matched_messages:
f = d['file']
line = d['line']
severity = d['severity']
iid = d['id']
message = d['message']
colored_lines.append(
"[{f}:{line}]:({severity}),[{i}],{message}"
.format(f=magenta(f), # format_severity(f, severity),
line=green(line),
severity=format_severity(severity, severity),
i=bold(iid),
message=message))
return (colored_lines, summary_line, n_errors, n_warnings,
n_performances, n_portabilities, n_styles)
if __name__ == '__main__':
with open('cppcheck.txt', 'r') as f:
content = f.read()
lines = content.splitlines()
(colored_lines, summary_line, n_errors, n_warnings,
n_performances, n_portabilities, n_styles) = colorize(lines)
print(summary_line)
# sys.stdout.writelines(colored_lines)
print("\n".join(colored_lines))
n_tot = (n_errors + n_warnings + n_performances
+ n_portabilities + n_styles)
if n_tot > 0:
exit(1)

10
dev/ci/main.cpp Normal file
View File

@@ -0,0 +1,10 @@
#include "CoolProp.h"
#include <iostream>
#include <stdlib.h>
using namespace CoolProp;
int main() {
// First type (slowest, due to most string processing, exposed in DLL)
std::cout << PropsSI("Dmolar", "T", 298, "P", 1e5, "Propane[0.5]&Ethane[0.5]") << std::endl; // Default backend is HEOS
std::cout << PropsSI("Dmolar", "T", 298, "P", 1e5, "HEOS::Propane[0.5]&Ethane[0.5]") << std::endl;
return EXIT_SUCCESS;
}

View File

@@ -1,11 +0,0 @@
#include "CoolProp.h"
#include <iostream>
#include <stdlib.h>
using namespace CoolProp;
int main()
{
// First type (slowest, due to most string processing, exposed in DLL)
std::cout << PropsSI("Dmolar","T",298,"P",1e5,"Propane[0.5]&Ethane[0.5]") << std::endl; // Default backend is HEOS
std::cout << PropsSI("Dmolar","T",298,"P",1e5,"HEOS::Propane[0.5]&Ethane[0.5]") << std::endl;
return EXIT_SUCCESS;
}

View File

@@ -1,6 +1,6 @@
name: docs
dependencies:
- python=3.6
- python
- scipy
- numpy
- matplotlib
@@ -9,8 +9,9 @@ dependencies:
- pip
- six
- requests
- jinja2
- jinja2<=3.0.0 # needed for cloud theme compatibility
- pyyaml
- packaging
- pip:
- cython
- sphinx
@@ -19,3 +20,4 @@ dependencies:
- sphinxcontrib-bibtex
- cloud_sptheme
- openpyxl
- pybtex

View File

@@ -0,0 +1,34 @@
# This is a dockerfile for building the docker container that is
# the foundation for the documentation builder. All components of
# this image are publicly available and it could be built using
# github actions or some other CI tool. However, it does not change
# frequently and building the image is expensive which is why it
# is not part the frequent CI runs.
#
# Normally, a CI workflow should take care of executing the commands
# to build the docker image. However, you can also use an access
# token to manually build the new image and push it to github.
#
# $ cat your_token | docker login ghcr.io -u USERNAME --password-stdin
# $ docker build --file docs_01_base.Dockerfile --tag ghcr.io/coolprop/coolprop_docs_01_base:dev .
# $ docker push ghcr.io/coolprop/coolprop_docs_01_base:dev
FROM continuumio/miniconda3
RUN apt-get -y -m update && \
apt-get install -y \
g++ make cmake swig doxygen p7zip-full \
mono-mcs \
octave liboctave-dev \
r-base-dev \
default-jre default-jdk \
texlive-extra-utils \
imagemagick rsync && \
apt-get autoclean
# Allow ImageMagick to invoke Ghostscript
RUN sed -i '/disable ghostscript format types/,+6d' /etc/ImageMagick-6/policy.xml
ADD conda_environment.yml /environment.yml
RUN conda update -n base -c defaults conda && conda env create -f /environment.yml && conda clean --all --yes
RUN mkdir -p /opt

View File

@@ -0,0 +1,47 @@
# This is a dockerfile for building the docker container that can
# create the documentation. It requires REFPROP and cannot be made
# publicly available.
#
# Normally, a CI workflow should take care of executing the commands
# to build the docker image. However, you can also use an access
# token to manually build the new image and push it to github.
#
# $ copy the REFPROP sources to the directory of this file.
# $ cat your_token | docker login ghcr.io -u USERNAME --password-stdin
# $ docker build --file docs_02_builder.Dockerfile --tag ghcr.io/coolprop/coolprop_docs_02_builder:dev .
# $ docker push ghcr.io/coolprop/coolprop_docs_02_builder:dev
# Use an intermediate container to build REFPROP
FROM ghcr.io/coolprop/coolprop_docs_01_base:dev as intermediate
# This ADD block forces a build (invalidates the cache) if the git repo contents have changed, otherwise leaves it untouched.
# See https://stackoverflow.com/a/39278224
ADD https://api.github.com/repos/usnistgov/REFPROP-cmake/git/refs/heads/master RPcmake-version.json
RUN git clone --recursive https://github.com/usnistgov/REFPROP-cmake /REFPROP-cmake
# Add the REFPROP source code to the repository, manage the build context accordingly
ADD REFPROP_sources /REFPROP_sources
# Build the sources using the Fortran compiler
SHELL ["/bin/bash", "-c"] # https://github.com/moby/moby/issues/7281#issuecomment-389440503
RUN source activate docs && \
python -c "import numpy; print(numpy.__file__)" && \
cmake -B /REFPROP-build -S /REFPROP-cmake -DREFPROP_FORTRAN_PATH=/REFPROP_sources/FORTRAN && \
cmake --build /REFPROP-build
# Install the REFPROP files
SHELL ["/bin/bash", "-c"] # https://github.com/moby/moby/issues/7281#issuecomment-389440503
RUN mkdir -p /opt/refprop && \
cp /REFPROP-build/librefprop.so /opt/refprop && \
cp -r /REFPROP_sources/FLUIDS /opt/refprop && \
cp -r /REFPROP_sources/MIXTURES /opt/refprop
# Delete the sources to avoid distributing them
RUN rm -rf /REFPROP_sources
# Start with the second stage image
FROM ghcr.io/coolprop/coolprop_docs_01_base:dev
# Use the output of the earlier build
COPY --from=intermediate /opt/refprop /opt/refprop

View File

@@ -58,7 +58,7 @@ HELIUM helium Helium 5.1953 R-704 - - - - - - N/A - - - - - - - - - - - -
HEPTANE heptane n-Heptane 540.13 * * * - - - 2.58 0.5 0.5 0.592 - - - - - - - - -
HEXANE hexane n-Hexane 507.82 * 3.1 * - - - 2.57 0.5 0.4 0.495 4.94E-01 - - - - - - - - -
HYDROGEN Hydrogen 33.145 R-702 - - - - - - N/A - - - 4.94E-01 - - - - - - - - -
IBUTENE 2-methyl-1-propene/methylpropene/isobutene/isobutylene Isobutene 418.09 * * * - - - N/A 0.6 0.6 6.27E-01 - - - - - - 6.67E+04
IBUTENE 2-methyl-1-propene/methylpropene/isobutene/isobutylene Isobutene 418.09 * * * - - - N/A 0.6 0.6 6.27E-01 - - - - - - 6.67E+04
IHEXANE 2-methylpentane (methylpentane) Isohexane 497.7 * * * - - - N/A - - - - - - - - - - - -
IPENTANE 2-methylbutane Isopentane 460.35 * * * - - - 1.8 0.3 0.3 4.05E-01 - - - - - - - - -
ISOBUTAN 2-methylpropane IsoButane 407.817 * * * - - - 1.74 0.4 0.3 3.07E-01 - - - - - - - - -
@@ -246,7 +246,7 @@ HH = """:'0','2','0','3','0',
:'1','1','1','NA','2',
:'1','NA','1','NA','2',
:'NA','1','1','1','1',
:'3','0','2','0','0',
:'3','0','2','0','0',
:'1','1','NA','NA','NA','1',
:'1','1','1','2','1','1',
:'1','1','1','1'"""
@@ -269,7 +269,7 @@ FH = """:'4','3','0','1','0',
:'1','1','1','NA','1',
:'0','NA','0','NA','0',
:'NA','1','2','2','0',
:'0','4','3','0','0',
:'0','4','3','0','0',
:'1','1','NA','NA','NA','1',
:'1','0','0','1','1','4',
:'1','1','1','0' """
@@ -292,7 +292,7 @@ PH = """:'0','0','0','0','0',
:'0','0','0','NA','1',
:'0','NA','1','NA','1',
:'NA','0','0','0','0',
:'0','1','0','0','3',
:'0','1','0','0','3',
:'0','0','NA','NA','NA','0',
:'0','0','0','0','0','1',
:'0','0','0','2'"""

127
dev/extract_version.py Normal file
View File

@@ -0,0 +1,127 @@
import argparse
import re
import requests
from packaging import version
from pathlib import Path
ROOT_DIR = Path(__file__).parent.parent
def parse_pypi_version(pypi=False):
if pypi:
response = requests.get('https://pypi.org/pypi/CoolProp/json')
else:
response = requests.get('https://test.pypi.org/pypi/CoolProp/json')
response.raise_for_status()
data = response.json()
releases = [version.parse(v) for v in data['releases'].keys()]
return releases
def parse_cmake_version_info():
with open(ROOT_DIR / 'CMakeLists.txt', 'r') as f:
content = f.read()
no_comments_lines = []
for line in content.splitlines():
l = line.strip().split('#')[0]
if l:
no_comments_lines.append(l)
content = "\n".join(no_comments_lines)
m_major = re.search(r'set\s*\(COOLPROP_VERSION_MAJOR (\d+)\)', content)
m_minor = re.search(r'set\s*\(COOLPROP_VERSION_MINOR (\d+)\)', content)
m_patch = re.search(r'set\s*\(COOLPROP_VERSION_PATCH (\d+)\)', content)
m_rev = re.search(r'set\s*\(COOLPROP_VERSION_REVISION "*(.*?)"*\)', content)
coolprop_version = ''
if m_major:
COOLPROP_VERSION_MAJOR = m_major.groups()[0]
coolprop_version += COOLPROP_VERSION_MAJOR
if m_minor:
COOLPROP_VERSION_MINOR = m_minor.groups()[0]
coolprop_version += "." + COOLPROP_VERSION_MINOR
if m_patch:
COOLPROP_VERSION_PATCH = m_patch.groups()[0]
coolprop_version += "." + COOLPROP_VERSION_PATCH
if m_rev:
try:
COOLPROP_VERSION_REV = str(m_rev.groups()[0])
if len(COOLPROP_VERSION_REV) > 0:
coolprop_version += "-" + COOLPROP_VERSION_REV
except:
pass
return coolprop_version
def replace_setup_py(new_v: version.Version):
fp = ROOT_DIR / 'wrappers/Python/setup.py'
with open(fp, 'r') as f:
content = f.read()
with open(fp, 'w') as f:
f.write(content.replace('version=version,', f"version='{new_v}',"))
print(f"Replaced version '{new_v}' in {fp}")
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="Find the right version from pypi/testpypi")
parser.add_argument("--cmake-only", default=False,
action='store_true',
help="Print the version from cmake only")
parser.add_argument("--pypi", default=False,
action='store_true',
help="Check pypi instead of testpypi")
parser.add_argument("--current", default=False,
action='store_true',
help="Check current version instead of incrementing by one")
parser.add_argument("--replace-setup-py", default=False,
action='store_true',
help="Do replacement in setup.py")
args = parser.parse_args()
current_v = parse_cmake_version_info()
if args.cmake_only:
print(current_v, end="")
exit(0)
current_v = version.Version(current_v)
releases = parse_pypi_version(pypi=args.pypi)
matched_releases = [v for v in releases
if v.base_version == current_v.base_version]
new_v = current_v.base_version
if matched_releases:
max_v = max(matched_releases)
if max_v.pre:
pre_iden, pre_v = max_v.pre
if args.current:
new_v += f"{pre_iden}{pre_v}"
else:
new_v += f"{pre_iden}{pre_v + 1}"
else:
new_v += ".post1"
else:
new_v = str(current_v)
new_v = version.Version(new_v)
if args.replace_setup_py:
remote = "PyPi" if args.pypi else "TestPyPi"
print(f"Found next available version on {remote}: {new_v}")
replace_setup_py(new_v=new_v)
else:
print(new_v, end="")

View File

@@ -4,78 +4,67 @@ class EOSFitter;
#include "DataTypes.h"
#include "Fitter.h"
double NonlinearExperimentalDataPoint::residual(const std::vector<double> &n)
{
double summer = a_0(n);
for (unsigned int i = 1; i < n.size(); i++)
{
summer -= n[i]*a_i(i);
}
return summer;
double NonlinearExperimentalDataPoint::residual(const std::vector<double>& n) {
double summer = a_0(n);
for (unsigned int i = 1; i < n.size(); i++) {
summer -= n[i] * a_i(i);
}
return summer;
}
double LinearExperimentalDataPoint::residual(const std::vector<double> &n)
{
double summer = a_0();
for (unsigned int i = 1; i < n.size(); i++)
{
summer -= n[i]*a_i(i);
}
return summer;
double LinearExperimentalDataPoint::residual(const std::vector<double>& n) {
double summer = a_0();
for (unsigned int i = 1; i < n.size(); i++) {
summer -= n[i] * a_i(i);
}
return summer;
}
PressureDataPoint::PressureDataPoint(EOSFitter* EOS, double T, double rho, double p, double variance)
{
this->EOS = EOS;
this->T = T;
this->rho = rho;
this->p = p;
this->tau = EOS->Tr/this->T;
this->delta = this->rho/EOS->rhor;
this->log_tau = log(tau);
this->log_delta = log(delta);
this->variance = variance;
PressureDataPoint::PressureDataPoint(EOSFitter* EOS, double T, double rho, double p, double variance) {
this->EOS = EOS;
this->T = T;
this->rho = rho;
this->p = p;
this->tau = EOS->Tr / this->T;
this->delta = this->rho / EOS->rhor;
this->log_tau = log(tau);
this->log_delta = log(delta);
this->variance = variance;
}
/// The part that does not depend on the coefficients
double PressureDataPoint::a_0()
{
double rhoRT = this->rho*EOS->R*this->T;
return this->p/rhoRT-1;
double PressureDataPoint::a_0() {
double rhoRT = this->rho * EOS->R * this->T;
return this->p / rhoRT - 1;
}
/// The part that multiplies the coefficients
double PressureDataPoint::a_i(int i)
{
return delta*EOS->dA_dDelta(log_tau, log_delta, delta, i);
double PressureDataPoint::a_i(int i) {
return delta * EOS->dA_dDelta(log_tau, log_delta, delta, i);
}
SpecificHeatCPDataPoint::SpecificHeatCPDataPoint(EOSFitter* EOS, double T, double rho, double cp, double variance)
{
this->EOS = EOS;
this->T = T;
this->rho = rho;
this->cp = cp;
this->cp_over_R = this->cp/EOS->R;
this->tau = EOS->Tr/this->T;
this->delta = this->rho/EOS->rhor;
this->log_tau = log(tau);
this->log_delta = log(delta);
this->variance = variance;
SpecificHeatCPDataPoint::SpecificHeatCPDataPoint(EOSFitter* EOS, double T, double rho, double cp, double variance) {
this->EOS = EOS;
this->T = T;
this->rho = rho;
this->cp = cp;
this->cp_over_R = this->cp / EOS->R;
this->tau = EOS->Tr / this->T;
this->delta = this->rho / EOS->rhor;
this->log_tau = log(tau);
this->log_delta = log(delta);
this->variance = variance;
}
/// The part that does not depend on the coefficients
/// Here it requires that the coefficients be passed in to calculate the precorrelation factor
double SpecificHeatCPDataPoint::a_0(const std::vector<double> &n)
{
// Only calculate this function once to save on calls
double _dalpha_ddelta = EOS->dalphar_dDelta(log_tau, log_delta, delta);
// The precorrelation factor
double e_cp = (pow(1+delta*_dalpha_ddelta-delta*tau*EOS->d2alphar_dDelta_dTau(log_tau, log_delta, delta),(int)2)
/(1+2*delta*_dalpha_ddelta+delta*delta*EOS->d2alphar_dDelta2(log_tau, log_delta, delta)));
// The a_0 term
return cp_over_R+tau*tau*EOS->d2alpha0_dTau2(tau, delta)-e_cp;
double SpecificHeatCPDataPoint::a_0(const std::vector<double>& n) {
// Only calculate this function once to save on calls
double _dalpha_ddelta = EOS->dalphar_dDelta(log_tau, log_delta, delta);
// The precorrelation factor
double e_cp = (pow(1 + delta * _dalpha_ddelta - delta * tau * EOS->d2alphar_dDelta_dTau(log_tau, log_delta, delta), (int)2)
/ (1 + 2 * delta * _dalpha_ddelta + delta * delta * EOS->d2alphar_dDelta2(log_tau, log_delta, delta)));
// The a_0 term
return cp_over_R + tau * tau * EOS->d2alpha0_dTau2(tau, delta) - e_cp;
};
/// The part that multiplies the coefficients
double SpecificHeatCPDataPoint::a_i(int i)
{
return -pow(tau,(int)2)*EOS->d2A_dTau2(log_tau, log_delta, delta, i);
double SpecificHeatCPDataPoint::a_i(int i) {
return -pow(tau, (int)2) * EOS->d2A_dTau2(log_tau, log_delta, delta, i);
}

View File

@@ -3,59 +3,58 @@
class ExperimentalDataPoint
{
public:
EOSFitter* EOS;
double T, /// The temperature [K]
rho, /// The density [mol/m^3]
p, /// The pressure [Pa]
tau, /// The reciprocal reduced temperature [-]
delta, /// The reduced density [-]
log_tau, /// The natural logarithm of the reciprocal reduced temperature
log_delta, /// The natural logarithm of the reduced density
variance; /// The total variance of the datapoint
virtual double residual(const std::vector<double> &n) = 0;
double sum_squares(const std::vector<double> &n)
{
return pow(residual(n)/variance,(int)2);
}
public:
EOSFitter* EOS;
double T, /// The temperature [K]
rho, /// The density [mol/m^3]
p, /// The pressure [Pa]
tau, /// The reciprocal reduced temperature [-]
delta, /// The reduced density [-]
log_tau, /// The natural logarithm of the reciprocal reduced temperature
log_delta, /// The natural logarithm of the reduced density
variance; /// The total variance of the datapoint
virtual double residual(const std::vector<double>& n) = 0;
double sum_squares(const std::vector<double>& n) {
return pow(residual(n) / variance, (int)2);
}
};
class NonlinearExperimentalDataPoint : public ExperimentalDataPoint
{
public:
virtual double a_0(const std::vector<double> &n) = 0;
virtual double a_i(int i) = 0;
double residual(const std::vector<double> &n);
public:
virtual double a_0(const std::vector<double>& n) = 0;
virtual double a_i(int i) = 0;
double residual(const std::vector<double>& n);
};
class LinearExperimentalDataPoint : public ExperimentalDataPoint
{
public:
virtual double a_0(void) = 0;
virtual double a_i(int i) = 0;
double residual(const std::vector<double> &n);
public:
virtual double a_0(void) = 0;
virtual double a_i(int i) = 0;
double residual(const std::vector<double>& n);
};
class PressureDataPoint : public LinearExperimentalDataPoint
{
public:
PressureDataPoint(EOSFitter* EOS, double T, double rho, double p, double variance);
/// The part that does not depend on the coefficients
double a_0(void);
/// The part that multiplies the coefficients
double a_i(int i);
public:
PressureDataPoint(EOSFitter* EOS, double T, double rho, double p, double variance);
/// The part that does not depend on the coefficients
double a_0(void);
/// The part that multiplies the coefficients
double a_i(int i);
};
class SpecificHeatCPDataPoint : public NonlinearExperimentalDataPoint
{
public:
double cp, cp_over_R;
SpecificHeatCPDataPoint(EOSFitter* EOS, double T, double rho, double cp, double variance);
/// The part that does not depend on the coefficients
/// Here it requires that the coefficients be passed in to calculate the precorrelation factor
double a_0(const std::vector<double> &n);
/// The part that multiplies the coefficients
double a_i(int i);
public:
double cp, cp_over_R;
SpecificHeatCPDataPoint(EOSFitter* EOS, double T, double rho, double cp, double variance);
/// The part that does not depend on the coefficients
/// Here it requires that the coefficients be passed in to calculate the precorrelation factor
double a_0(const std::vector<double>& n);
/// The part that multiplies the coefficients
double a_i(int i);
};
#endif

View File

@@ -6,140 +6,113 @@ class EOSFitter;
#include "DataTypes.h"
#include "Fitter.h"
EOSFitter::EOSFitter()
{
this->Tr = Tr;
this->rhor = rhor;
this->R = R;
EOSFitter::EOSFitter() {
this->Tr = Tr;
this->rhor = rhor;
this->R = R;
};
double EOSFitter::dA_dDelta(double log_tau, double log_delta, double delta, int i)
{
return alphar.dA_dDelta(log_tau, log_delta, delta, i);
double EOSFitter::dA_dDelta(double log_tau, double log_delta, double delta, int i) {
return alphar.dA_dDelta(log_tau, log_delta, delta, i);
};
double EOSFitter::d2A_dTau2(double log_tau, double log_delta, double delta, int i)
{
return alphar.d2A_dTau2(log_tau, log_delta, delta, i);
double EOSFitter::d2A_dTau2(double log_tau, double log_delta, double delta, int i) {
return alphar.d2A_dTau2(log_tau, log_delta, delta, i);
};
double EOSFitter::dalphar_dDelta(double log_tau, double log_delta, double delta)
{
double summer = 0;
for (unsigned int i = 0; i < alphar.n.size(); i++)
{
summer += alphar.n[i]*alphar.dA_dDelta(log_tau, log_delta, delta, i);
}
return summer;
double EOSFitter::dalphar_dDelta(double log_tau, double log_delta, double delta) {
double summer = 0;
for (unsigned int i = 0; i < alphar.n.size(); i++) {
summer += alphar.n[i] * alphar.dA_dDelta(log_tau, log_delta, delta, i);
}
return summer;
};
double EOSFitter::d2alphar_dDelta2(double log_tau, double log_delta, double delta)
{
double summer = 0;
for (unsigned int i = 0; i < alphar.n.size(); i++)
{
summer += alphar.n[i]*alphar.d2A_dDelta2(log_tau, log_delta, delta, i);
}
return summer;
double EOSFitter::d2alphar_dDelta2(double log_tau, double log_delta, double delta) {
double summer = 0;
for (unsigned int i = 0; i < alphar.n.size(); i++) {
summer += alphar.n[i] * alphar.d2A_dDelta2(log_tau, log_delta, delta, i);
}
return summer;
};
double EOSFitter::d2alphar_dTau2(double log_tau, double log_delta, double delta)
{
double summer = 0;
for (unsigned int i = 0; i < alphar.n.size(); i++)
{
summer += alphar.n[i]*alphar.d2A_dTau2(log_tau, log_delta, delta, i);
}
return summer;
double EOSFitter::d2alphar_dTau2(double log_tau, double log_delta, double delta) {
double summer = 0;
for (unsigned int i = 0; i < alphar.n.size(); i++) {
summer += alphar.n[i] * alphar.d2A_dTau2(log_tau, log_delta, delta, i);
}
return summer;
};
double EOSFitter::d2alpha0_dTau2(double tau, double delta)
{
double summer = 0;
for (std::vector<phi_BC*>::iterator it = alpha0.begin(); it != alpha0.end(); it++){
summer += (*it)->dTau2(tau,delta);
}
return summer;
double EOSFitter::d2alpha0_dTau2(double tau, double delta) {
double summer = 0;
for (std::vector<phi_BC*>::iterator it = alpha0.begin(); it != alpha0.end(); it++) {
summer += (*it)->dTau2(tau, delta);
}
return summer;
};
double EOSFitter::d2alphar_dDelta_dTau(double log_tau, double log_delta, double delta)
{
double summer = 0;
for (unsigned int i = 0; i < alphar.n.size(); i++)
{
summer += alphar.n[i]*alphar.d2A_dDelta_dTau(log_tau, log_delta, delta, i);
}
return summer;
double EOSFitter::d2alphar_dDelta_dTau(double log_tau, double log_delta, double delta) {
double summer = 0;
for (unsigned int i = 0; i < alphar.n.size(); i++) {
summer += alphar.n[i] * alphar.d2A_dDelta_dTau(log_tau, log_delta, delta, i);
}
return summer;
};
/// Set the coefficients in the EOS
void EOSFitter::set_n(const std::vector<double> &n)
{
alphar.n = n;
void EOSFitter::set_n(const std::vector<double>& n) {
alphar.n = n;
};
void EOSFitter::solve_for_n(std::vector<double> &n, bool non_linear_terms_enabled)
{
Eigen::MatrixXd A = Eigen::MatrixXd::Random(21, 21);
Eigen::VectorXd Q = Eigen::VectorXd::Random(21);
void EOSFitter::solve_for_n(std::vector<double>& n, bool non_linear_terms_enabled) {
Eigen::MatrixXd A = Eigen::MatrixXd::Random(21, 21);
Eigen::VectorXd Q = Eigen::VectorXd::Random(21);
// Build the A matrix and the Q vector
for (int i = 1; i <= A.rows(); i++)
{
// The i-th row of the A matrix (Span 2000 Eq. 4.9)
for (int j = 1; j <= A.cols(); j++)
{
// The entry for the j-th column and i-th row
double summer = 0;
for (unsigned int m = 0; m < linear_data_points.size(); m++)
{
LinearExperimentalDataPoint &pt = *linear_data_points[m];
summer += (pt.a_i(i)*pt.a_i(j))/pow(pt.variance,(int)2);
}
if (non_linear_terms_enabled)
{
for (unsigned int m = 0; m < nonlinear_data_points.size(); m++)
{
NonlinearExperimentalDataPoint &pt = *nonlinear_data_points[m];
summer += (pt.a_i(i)*pt.a_i(j))/pow(pt.variance,(int)2);
}
}
A(i-1,j-1) = summer;
}
// The i-th entry in the Q column vector
double summer = 0;
for (unsigned int m = 0; m < linear_data_points.size(); m++)
{
LinearExperimentalDataPoint &pt = *linear_data_points[m];
summer += (pt.a_i(i)*pt.a_0())/pow(pt.variance,(int)2);
}
if (non_linear_terms_enabled)
{
for (unsigned int m = 0; m < nonlinear_data_points.size(); m++)
{
NonlinearExperimentalDataPoint &pt = *nonlinear_data_points[m];
summer += (pt.a_i(i)*pt.a_0(n))/pow(pt.variance,(int)2);
}
}
Q(i-1) = summer;
}
// Build the A matrix and the Q vector
for (int i = 1; i <= A.rows(); i++) {
// The i-th row of the A matrix (Span 2000 Eq. 4.9)
for (int j = 1; j <= A.cols(); j++) {
// The entry for the j-th column and i-th row
double summer = 0;
for (unsigned int m = 0; m < linear_data_points.size(); m++) {
LinearExperimentalDataPoint& pt = *linear_data_points[m];
summer += (pt.a_i(i) * pt.a_i(j)) / pow(pt.variance, (int)2);
}
if (non_linear_terms_enabled) {
for (unsigned int m = 0; m < nonlinear_data_points.size(); m++) {
NonlinearExperimentalDataPoint& pt = *nonlinear_data_points[m];
summer += (pt.a_i(i) * pt.a_i(j)) / pow(pt.variance, (int)2);
}
}
A(i - 1, j - 1) = summer;
}
// The i-th entry in the Q column vector
double summer = 0;
for (unsigned int m = 0; m < linear_data_points.size(); m++) {
LinearExperimentalDataPoint& pt = *linear_data_points[m];
summer += (pt.a_i(i) * pt.a_0()) / pow(pt.variance, (int)2);
}
if (non_linear_terms_enabled) {
for (unsigned int m = 0; m < nonlinear_data_points.size(); m++) {
NonlinearExperimentalDataPoint& pt = *nonlinear_data_points[m];
summer += (pt.a_i(i) * pt.a_0(n)) / pow(pt.variance, (int)2);
}
}
Q(i - 1) = summer;
}
Eigen::VectorXd N = A.colPivHouseholderQr().solve(Q);
for (unsigned int i = 0; i < n.size()-1; i++)
{
n[i+1] = N(i);
}
Eigen::VectorXd N = A.colPivHouseholderQr().solve(Q);
for (unsigned int i = 0; i < n.size() - 1; i++) {
n[i + 1] = N(i);
}
double relative_error = (A*N - Q).norm() / Q.norm();
double relative_error = (A * N - Q).norm() / Q.norm();
};
double EOSFitter::sum_squares(std::vector<double> &n, bool non_linear_terms_enabled)
{
double summer = 0;
for (unsigned int m = 0; m < linear_data_points.size(); m++)
{
LinearExperimentalDataPoint &pt = *linear_data_points[m];
summer += pt.sum_squares(n);
}
if (non_linear_terms_enabled)
{
for (unsigned int m = 0; m < nonlinear_data_points.size(); m++)
{
NonlinearExperimentalDataPoint &pt = *nonlinear_data_points[m];
summer += pt.sum_squares(n);
}
}
return summer;
double EOSFitter::sum_squares(std::vector<double>& n, bool non_linear_terms_enabled) {
double summer = 0;
for (unsigned int m = 0; m < linear_data_points.size(); m++) {
LinearExperimentalDataPoint& pt = *linear_data_points[m];
summer += pt.sum_squares(n);
}
if (non_linear_terms_enabled) {
for (unsigned int m = 0; m < nonlinear_data_points.size(); m++) {
NonlinearExperimentalDataPoint& pt = *nonlinear_data_points[m];
summer += pt.sum_squares(n);
}
}
return summer;
}

View File

@@ -7,40 +7,38 @@
class EOSFitter
{
public:
double Tr, /// The reducing temperature for tau [K]
rhor, /// The reducing density for delta [mol/m^3]
R; /// The universal gas constant [J/mol/K]
std::vector<LinearExperimentalDataPoint*> linear_data_points;
std::vector<NonlinearExperimentalDataPoint*> nonlinear_data_points;
phir_power alphar; // Temporary for now
std::vector <phi_BC*> alpha0; /// A vector of instances of the phi_BC classes for the ideal-gas Helmholtz energy contribution
EOSFitter();
double dA_dDelta(double log_tau, double log_delta, double delta, int i);
double d2A_dTau2(double log_tau, double log_delta, double delta, int i);
double dalphar_dDelta(double log_tau, double log_delta, double delta);
double d2alphar_dDelta2(double log_tau, double log_delta, double delta);
double d2alphar_dTau2(double log_tau, double log_delta, double delta);
double d2alpha0_dTau2(double tau, double delta);
double d2alphar_dDelta_dTau(double log_tau, double log_delta, double delta);
/// Set the coefficients in the EOS
void set_n(const std::vector<double> &n);
public:
double Tr, /// The reducing temperature for tau [K]
rhor, /// The reducing density for delta [mol/m^3]
R; /// The universal gas constant [J/mol/K]
std::vector<LinearExperimentalDataPoint*> linear_data_points;
std::vector<NonlinearExperimentalDataPoint*> nonlinear_data_points;
phir_power alphar; // Temporary for now
std::vector<phi_BC*> alpha0; /// A vector of instances of the phi_BC classes for the ideal-gas Helmholtz energy contribution
EOSFitter();
void solve_for_n(std::vector<double> &n, bool non_linear_terms_enabled);
double sum_squares(std::vector<double> &n, bool non_linear_terms_enabled);
double dA_dDelta(double log_tau, double log_delta, double delta, int i);
double d2A_dTau2(double log_tau, double log_delta, double delta, int i);
double dalphar_dDelta(double log_tau, double log_delta, double delta);
double d2alphar_dDelta2(double log_tau, double log_delta, double delta);
double d2alphar_dTau2(double log_tau, double log_delta, double delta);
double d2alpha0_dTau2(double tau, double delta);
double d2alphar_dDelta_dTau(double log_tau, double log_delta, double delta);
/// Set the coefficients in the EOS
void set_n(const std::vector<double>& n);
void solve_for_n(std::vector<double>& n, bool non_linear_terms_enabled);
double sum_squares(std::vector<double>& n, bool non_linear_terms_enabled);
};
class EOSFitterFixedForm : public EOSFitter
{
public:
EOSFitterFixedForm(double Tr, double rhor, double R)
{
this->Tr = Tr;
this->rhor = rhor;
this->R = R;
};
public:
EOSFitterFixedForm(double Tr, double rhor, double R) {
this->Tr = Tr;
this->rhor = rhor;
this->R = R;
};
};
#endif

View File

@@ -8,104 +8,101 @@ class EOSFitter;
#include "Fitter.h"
#include "DataTypes.h"
int main() {
double n[] = {0.0, 0.5586817e-3, 0.4982230e0, 0.2458698e-0, 0.8570145e-3, 0.4788584e-3, -0.1800808e-1, 0.2671641e0,
-0.4781652e1, 0.1423987e1, 0.3324062e0, -0.7485907e-2, 0.1017263e-3, -0.5184567e+0, -0.8692288e-1, 0.2057144e+0,
-0.5000457e-2, 0.4603262e-3, -0.3497836e-2, 0.6995038e-2, -0.1452184e-1, -0.1285458e-3};
double d[] = {0, 2, 1, 3, 6, 6, 1, 1, 2, 5, 2, 2, 4, 1, 4, 1, 2, 4, 1, 5, 3, 10};
double t[] = {0.0, -1.0 / 2.0, 0.0, 0.0, 0.0, 3.0 / 2.0, 3.0 / 2.0, 2.0, 2.0, 1.0, 3.0,
5.0, 1.0, 5.0, 5.0, 6.0, 10.0, 10.0, 10.0, 18.0, 22.0, 50.0};
double c[] = {0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 3.0, 3.0, 3.0, 4.0};
int main()
{
double n[]={0.0, 0.5586817e-3, 0.4982230e0, 0.2458698e-0, 0.8570145e-3, 0.4788584e-3, -0.1800808e-1, 0.2671641e0, -0.4781652e1, 0.1423987e1, 0.3324062e0, -0.7485907e-2, 0.1017263e-3, -0.5184567e+0, -0.8692288e-1, 0.2057144e+0, -0.5000457e-2, 0.4603262e-3, -0.3497836e-2, 0.6995038e-2, -0.1452184e-1, -0.1285458e-3};
double d[]={0,2,1,3,6,6,1,1,2,5,2,2,4,1,4,1,2,4,1,5,3,10};
double t[]={0.0,-1.0/2.0,0.0,0.0,0.0,3.0/2.0,3.0/2.0,2.0,2.0,1.0,3.0,5.0,1.0,5.0,5.0,6.0,10.0,10.0,10.0,18.0,22.0,50.0};
double c[]={0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,2.0,2.0,2.0,2.0,2.0,2.0,3.0,3.0,3.0,4.0};
std::vector<double> nv(n, n + sizeof(n) / sizeof(double));
std::vector<double> nv(n,n+sizeof(n)/sizeof(double));
double mm = Props1SI("R134a", "molemass");
double rhoL, rhoV;
bool supercritical_T;
double mm = Props1SI("R134a","molemass");
double rhoL, rhoV;
bool supercritical_T;
double Tr = Props1SI("R134a", "Treduce");
EOSFitter* pEOS = new EOSFitterFixedForm(Props1SI("R134a", "Treduce"), Props1SI("R134a", "rhoreduce") / mm * 1000, 8.314471);
EOSFitter& EOS = *pEOS;
double Tr = Props1SI("R134a","Treduce");
EOSFitter* pEOS = new EOSFitterFixedForm(Props1SI("R134a","Treduce"),Props1SI("R134a","rhoreduce")/mm*1000,8.314471);
EOSFitter &EOS = *pEOS;
// ----------------------------
// Generate "experimental" data
// ----------------------------
for (double T = 250; T < 500; T+=10)
{
if (T < Tr)
{
rhoL = PropsSI("D","T",T,"Q",0,"R134a");
rhoV = PropsSI("D","T",T,"Q",1,"R134a");
supercritical_T = false;
}
else
{
rhoL = -1;
rhoV = -1;
supercritical_T = true;
}
// ----------------------------
// Generate "experimental" data
// ----------------------------
for (double T = 250; T < 500; T += 10) {
if (T < Tr) {
rhoL = PropsSI("D", "T", T, "Q", 0, "R134a");
rhoV = PropsSI("D", "T", T, "Q", 1, "R134a");
supercritical_T = false;
} else {
rhoL = -1;
rhoV = -1;
supercritical_T = true;
}
for (double rho = 1e-10; rho < 1200; rho *= 1.5)
{
if (!supercritical_T && (rho < rhoL && rho > rhoV)){ continue; }
double p = PropsSI("P","T",T,"D",rho,"R134a");
double rhobar = rho/mm*1000;
double cp = PropsSI("C","T",T,"D",rho,"R134a"); // [J/kg/K]; convert to J/mol/K by *mm/1000
double variance = 1; // TODO; change this
EOS.linear_data_points.push_back(new PressureDataPoint(pEOS,T,rho/mm*1000,p,variance));
EOS.nonlinear_data_points.push_back(new SpecificHeatCPDataPoint(pEOS,T,rho/mm*1000,cp*mm/1000,variance*100));
}
}
for (double rho = 1e-10; rho < 1200; rho *= 1.5) {
if (!supercritical_T && (rho < rhoL && rho > rhoV)) {
continue;
}
double p = PropsSI("P", "T", T, "D", rho, "R134a");
double rhobar = rho / mm * 1000;
double cp = PropsSI("C", "T", T, "D", rho, "R134a"); // [J/kg/K]; convert to J/mol/K by *mm/1000
double variance = 1; // TODO; change this
EOS.linear_data_points.push_back(new PressureDataPoint(pEOS, T, rho / mm * 1000, p, variance));
EOS.nonlinear_data_points.push_back(new SpecificHeatCPDataPoint(pEOS, T, rho / mm * 1000, cp * mm / 1000, variance * 100));
}
}
// Setup the EOS
EOS.alphar = phir_power(n,d,t,c,1,21,22);
// Setup the EOS
EOS.alphar = phir_power(n, d, t, c, 1, 21, 22);
static const double a0[]={
0.0, //[0]
-1.019535, //[1]
9.047135, //[2]
-1.629789, //[3]
-9.723916, //[4]
-3.927170 //[5]
};
static const double t0[]={
0.0, //[0]
0.0, //[1]
0.0, //[2]
0.0, //[3]
-1.0/2.0, //[4]
-3.0/4.0 //[5]
};
static const double a0[] = {
0.0, //[0]
-1.019535, //[1]
9.047135, //[2]
-1.629789, //[3]
-9.723916, //[4]
-3.927170 //[5]
};
static const double t0[] = {
0.0, //[0]
0.0, //[1]
0.0, //[2]
0.0, //[3]
-1.0 / 2.0, //[4]
-3.0 / 4.0 //[5]
};
// phi0=log(delta)+a0[1]+a0[2]*tau+a0[3]*log(tau)+a0[4]*pow(tau,-1.0/2.0)+a0[5]*pow(tau,-3.0/4.0);
EOS.alpha0.push_back(new phi0_lead(a0[1],a0[2]));
EOS.alpha0.push_back(new phi0_logtau(a0[3]));
EOS.alpha0.push_back(new phi0_power(a0,t0,4,5,6));
/*for (unsigned int i = 0; i < EOS.nonlinear_data_points.size();i++)
// phi0=log(delta)+a0[1]+a0[2]*tau+a0[3]*log(tau)+a0[4]*pow(tau,-1.0/2.0)+a0[5]*pow(tau,-3.0/4.0);
EOS.alpha0.push_back(new phi0_lead(a0[1], a0[2]));
EOS.alpha0.push_back(new phi0_logtau(a0[3]));
EOS.alpha0.push_back(new phi0_power(a0, t0, 4, 5, 6));
/*for (unsigned int i = 0; i < EOS.nonlinear_data_points.size();i++)
{
std::cout << EOS.nonlinear_data_points[i]->residual(nv) << std::endl;
}*/
// Set the coefficients in the preliminary EOS
EOS.set_n(nv);
std::cout << format("before fit x2 %g\n",EOS.sum_squares(nv,false));
// Solve for n without nonlinear terms to get an approximate solution
EOS.solve_for_n(nv, false);
std::cout << format("solved for n x2 %g\n",EOS.sum_squares(nv,false));
EOS.set_n(nv);
std::cout << format("applied n x2 %g\n",EOS.sum_squares(nv,true));
// Set the coefficients in the preliminary EOS
EOS.set_n(nv);
std::cout << format("before fit x2 %g\n", EOS.sum_squares(nv, false));
// Solve for n without nonlinear terms to get an approximate solution
EOS.solve_for_n(nv, false);
std::cout << format("solved for n x2 %g\n", EOS.sum_squares(nv, false));
EOS.set_n(nv);
std::cout << format("applied n x2 %g\n", EOS.sum_squares(nv, true));
for (int iter = 0; iter < 5; iter++)
{
EOS.set_n(nv);
for (int iter = 0; iter < 5; iter++) {
EOS.set_n(nv);
// Turn on the nonlinear terms and try again
EOS.solve_for_n(nv, true);
// Turn on the nonlinear terms and try again
EOS.solve_for_n(nv, true);
std::cout << nv[1] << " " << nv[2] << std::endl;
std::cout << nv[1] << " " << nv[2] << std::endl;
std::cout << format("iter: %d x2 %g\n",iter, EOS.sum_squares(nv,true));
}
std::cout << format("iter: %d x2 %g\n", iter, EOS.sum_squares(nv, true));
}
double rr = 0;
double rr = 0;
}

File diff suppressed because it is too large Load Diff

View File

@@ -615,172 +615,106 @@
},
"TRANSPORT": {
"conductivity": {
"BibTeX": "Scalabrin-JPCRD-2006-CO2",
"BibTeX": "Huber-JPCRD-2016-CO2",
"critical": {
"hardcoded": "CarbonDioxideScalabrinJPCRD2006"
"GAMMA": 0.052,
"R0": 1.02,
"T_ref": 456.19,
"gamma": 1.239,
"nu": 0.63,
"qD": 2500000000.0,
"zeta0": 1.5e-10,
"type": "simplified_Olchowy_Sengers"
},
"dilute": {
"hardcoded": "none"
"hardcoded": "CarbonDioxideHuberJPCRD2016"
},
"residual": {
"A": [
0.0370597124660408,
0.0007696647124242399,
0.0075538113451464,
-0.032416436589336,
0.078894098855904,
0.0177830586854928,
0.10744756315137599,
0.31839746259479995,
-0.00082691726160072,
2.0846013855224798e-05
"B": [
0.0100128,
0.0560488,
-0.0811620,
0.0624337,
-0.0206336,
0.00253248,
0.00430829,
-0.0358563,
0.0671480,
-0.0522855,
0.0174571,
-0.00196414
],
"T_reducing": 304.1282,
"T_reducing_units": "K",
"d": [
1.0,
2.0,
3.0,
4.0,
5.0,
1.0,
6.0,
1.0,
2.0,
0.0,
3.0,
4.0,
5.0,
9.0,
0.0,
0.0
],
"gamma": [
0,
0,
0,
5,
5,
5,
5,
5,
5,
5
],
"l": [
0,
0,
0,
2,
2,
2,
2,
2,
2,
2
6.0
],
"rhomass_reducing": 467.6,
"rhomass_reducing_units": "kg/m^3",
"t": [
0.0,
0.0,
-1.5,
0.0,
0.0,
0.0,
0.0,
-1.0,
-1.5,
-1.5,
-1.5,
-3.5,
-5.5
-1.0,
-1.0,
-1.0,
-1.0,
-1.0
],
"type": "polynomial_and_exponential"
"type": "polynomial"
}
},
"viscosity": {
"BibTeX": "Fenghour-JPCRD-1998",
"_note": "sigma set to 1 nm in since sigma wrapped into constant in equation in Fenghour",
"BibTeX": "Laesecke-JPCRD-2017-CO2",
"dilute": {
"C": 1.5178953643112785e-07,
"_note": "Leading coefficient was back calculated from 1.00697e-6/(44.0098)**0.5 (using sigma = 1 nm)",
"a": [
0.235156,
-0.491266,
0.05211155,
0.05347906,
-0.01537102
],
"molar_mass": 0.0440098,
"molar_mass_units": "kg/mol",
"t": [
0,
1,
2,
3,
4
],
"type": "collision_integral"
"hardcoded": "CarbonDioxideLaeseckeJPCRD2017"
},
"epsilon_over_k": 251.196,
"initial_density": {
"type": "Rainwater-Friend",
"b": [
-19.572881,
219.73999,
-1015.3226,
2471.0125,
-3375.1717,
2491.6597,
-787.26086,
14.085455,
-0.34664158
],
"t": [
0.0,
-0.25,
-0.5,
-0.75,
-1.0,
-1.25,
-1.5,
-2.5,
-5.5
]
},
"epsilon_over_k": 200.760,
"epsilon_over_k_units": "K",
"higher_order": {
"T_reduce": 304.1282,
"T_reduce_units": "K",
"_note": "All of the coefficients for higher order viscosity contribution were converted to be in terms of delta and tau",
"a": [
1.9036541208525784e-06,
1.57384720473354e-05,
1.4207809578440784e-07,
6.79058431241662e-08,
-3.0732988514867565e-08
],
"d1": [
1,
2,
6,
8,
8
],
"d2": [
1
],
"f": [
0
],
"g": [
1
],
"gamma": [
0,
0,
0,
0,
0
],
"h": [
0
],
"l": [
1,
1,
1,
1,
0
],
"p": [
1
],
"q": [
0
],
"rhomolar_reduce": 10624.9,
"rhomolar_reduce_units": "mol/m^3",
"t1": [
0,
0,
3,
0,
1
],
"t2": [
0
],
"type": "modified_Batschinski_Hildebrand"
"hardcoded": "CarbonDioxideLaeseckeJPCRD2017"
},
"sigma_eta": 1e-09,
"sigma_eta": 0.378421e-09,
"sigma_eta_units": "m"
}
}

View File

@@ -200,9 +200,9 @@
"T_units": "K",
"hmolar": 38057.35612816187,
"hmolar_units": "J/mol",
"p": 4571200,
"p": 4582800,
"p_units": "Pa",
"rhomolar": 3820.000000000001,
"rhomolar": 3920.000000000001,
"rhomolar_units": "mol/m^3",
"smolar": 87.99746047649523,
"smolar_units": "J/mol/K"
@@ -240,26 +240,26 @@
"acentric_units": "-",
"alpha0": [
{
"a1": 3.2489131288,
"a2": 2.6444166315,
"a1": -0.3946233253,
"a2": 2.4918910143,
"type": "IdealGasHelmholtzLead"
},
{
"a": 0.96,
"a": 3.0,
"type": "IdealGasHelmholtzLogTau"
},
{
"n": [
3.34,
18.6,
13.9,
4.86
1.34,
13.4,
17.4,
6.65
],
"t": [
0.2345032439615415,
2.540451809583366,
5.276322989134683,
10.35722660830141
0.4494645509262878,
2.3059485656218244,
4.299226139294927,
10.161807238333463
],
"type": "IdealGasHelmholtzPlanckEinstein"
}
@@ -291,73 +291,79 @@
1
],
"n": [
0.0536938,
1.60394,
-2.41244,
-0.474009,
0.203482,
-0.965616,
-0.344543,
0.353975,
-0.231373,
-0.0379099
0.0630928,
1.50365,
-2.37099,
-0.484886,
0.191843,
-0.835582,
-0.435929,
0.545607,
-0.209741,
-0.0387635
],
"t": [
1,
1.0,
0.29,
0.8,
1.14,
0.5,
2,
0.85,
1.185,
0.45,
2.28,
1.8,
1.5,
1,
3.36,
0.95
],
2.9,
0.93],
"type": "ResidualHelmholtzPower"
},
{
"beta": [
1.15,
1.61,
0.66,
2.72
0.63,
2.8,
0.5,
0.95,
0.23
],
"d": [
1,
1,
3,
3
3,
2
],
"epsilon": [
0.68,
0.97,
0.84,
0.66
0.684,
0.7,
0.77,
0.625,
0.42
],
"eta": [
0.82,
1.19,
0.79,
1.52
0.86,
0.85,
0.86,
1.53,
5.13
],
"gamma": [
1.08,
0.36,
0.09,
1.48
1.22,
0.32,
0.22,
1.94,
1.21
],
"n": [
0.867586,
-0.381827,
-0.108741,
-0.0976984
0.677674,
-0.137043,
-0.0852862,
-0.128085,
-0.00389381
],
"t": [
1,
2.5,
2.5,
1.5
1.05,
4.0,
2.33,
1.5,
1.0
],
"type": "ResidualHelmholtzGaussian"
}
@@ -557,4 +563,4 @@
"type": "Chung"
}
}
}
}

View File

@@ -302,7 +302,7 @@
"INCHI_KEY": "IAYPIBMASNFSPL-UHFFFAOYSA-N",
"INCHI_STRING": "InChI=1S/C2H4O/c1-2-3-1/h1-2H2",
"NAME": "EthyleneOxide",
"REFPROP_NAME": "ETYOXIDE",
"REFPROP_NAME": "ETHYLENEOXIDE",
"SMILES": "C1CO1"
},
"STATES": {

View File

@@ -268,6 +268,8 @@
"type": "IdealGasHelmholtzLogTau"
},
{
"Tcrit": 33.145,
"Tcrit_units": "K",
"n": [
1.616,
-0.4117,
@@ -275,14 +277,14 @@
0.758,
1.217
],
"t": [
16.0205159149,
22.6580178006,
60.0090511389,
74.9434303817,
206.9392065168
],
"type": "IdealGasHelmholtzPlanckEinstein"
"type": "IdealGasHelmholtzPlanckEinsteinFunctionT",
"v": [
531,
751,
1989,
2484,
6859
]
}
],
"alphar": [

View File

@@ -268,6 +268,8 @@
"type": "IdealGasHelmholtzLogTau"
},
{
"Tcrit": 190.564,
"Tcrit_units": "K",
"n": [
0.008449,
4.6942,
@@ -275,14 +277,14 @@
1.6572,
1.4115
],
"t": [
3.400432401,
10.26951575,
20.43932747,
29.93744884,
79.13351945
],
"type": "IdealGasHelmholtzPlanckEinstein"
"type": "IdealGasHelmholtzPlanckEinsteinFunctionT",
"v": [
648,
1957,
3895,
5705,
15080
]
},
{
"a1": -12.8829893867948,

View File

@@ -221,7 +221,7 @@
"hmolar_units": "J/mol",
"p": 3395800,
"p_units": "Pa",
"rhomolar": 11183.90146458062,
"rhomolar": 11183.901464580624,
"rhomolar_units": "mol/m^3",
"smolar": 118.07697126543378,
"smolar_units": "J/mol/K"
@@ -281,13 +281,15 @@
"type": "IdealGasHelmholtzPower"
},
{
"Tcrit": 126.192,
"Tcrit_units": "K",
"n": [
1.012941
],
"t": [
26.65788
"v": [
3364.011
],
"type": "IdealGasHelmholtzPlanckEinstein"
"type": "IdealGasHelmholtzPlanckEinsteinFunctionT"
}
],
"alphar": [

View File

@@ -238,19 +238,21 @@
"type": "IdealGasHelmholtzLogTau"
},
{
"Tcrit": 33.22,
"Tcrit_units": "K",
"n": [
2.54151,
-2.3661,
1.00365,
1.22447
],
"t": [
25.7676098736,
43.4677904877,
66.044551475,
209.7531607465
],
"type": "IdealGasHelmholtzPlanckEinstein"
"type": "IdealGasHelmholtzPlanckEinsteinFunctionT",
"v": [
856,
1444,
2194,
6968
]
}
],
"alphar": [

View File

@@ -272,6 +272,8 @@
"type": "IdealGasHelmholtzLogTau"
},
{
"Tcrit": 32.938,
"Tcrit_units": "K",
"n": [
4.30256,
13.0289,
@@ -281,16 +283,16 @@
0.993973,
0.536078
],
"t": [
15.14967511472,
25.0925982148,
29.4735563787,
35.4059141417,
40.724998482,
163.7925799988,
309.2173173842
],
"type": "IdealGasHelmholtzPlanckEinstein"
"type": "IdealGasHelmholtzPlanckEinsteinFunctionT",
"v": [
499,
826.5,
970.8,
1166.2,
1341.4,
5395,
10185
]
}
],
"alphar": [

View File

@@ -1,27 +1,5 @@
{
"ANCILLARIES": {
"hL": {
"A": [
-59081.90149961518,
-56.71632899355515,
3.4601929994947436,
-0.022382015166099334,
7.983388457665615e-05,
-1.6902202857616328e-07,
1.9661600933963034e-10,
-9.735546908737005e-14
],
"B": [
1,
-0.0022550755060008
],
"Tmax": 439.5,
"Tmin": 195.15,
"_note": "coefficients are in increasing order; input in K, output in J/mol; value is enthalpy minus hs_anchor enthalpy",
"max_abs_error": 271.53420989192637,
"max_abs_error_units": "J/mol",
"type": "rational_polynomial"
},
"hLV": {
"A": [
-5135.181894492507,
@@ -405,7 +383,7 @@
"INCHI_KEY": "LDTMPQQAWUMPKS-UHFFFAOYSA-N",
"INCHI_STRING": "InChI=1S/C3H2ClF3/c4-2-1-3(5,6)7/h1-2H",
"NAME": "R1233zd(E)",
"REFPROP_NAME": "R1233ZD",
"REFPROP_NAME": "R1233ZDE",
"SMILES": "C(=C/Cl)\\C(F)(F)F"
},
"STATES": {

View File

@@ -183,7 +183,7 @@
"EOS": [
{
"BibTeX_CP0": "",
"BibTeX_EOS": "Thol-IJT-2016-R1234zeE,Thol-REFPROP-9.1",
"BibTeX_EOS": "Thol-IJT-2016-R1234zeE",
"STATES": {
"hs_anchor": {
"T": 420.772,
@@ -198,7 +198,7 @@
"smolar_units": "J/mol/K"
},
"reducing": {
"T": 382.52,
"T": 382.513,
"T_units": "K",
"hmolar": 45109.50216279902,
"hmolar_units": "J/mol",
@@ -627,7 +627,7 @@
"INCHI_KEY": "CDOOAUSHHFGWSA-OWOJBTEDSA-N",
"INCHI_STRING": "InChI=1S/C3H2F4/c4-2-1-3(5,6)7/h1-2H/b2-1+",
"NAME": "R1234ze(E)",
"REFPROP_NAME": "R1234ZE",
"REFPROP_NAME": "R1234ZEE",
"SMILES": "C(=C/F)\\C(F)(F)F"
},
"STATES": {
@@ -790,4 +790,4 @@
}
]
}
}
}

View File

@@ -598,7 +598,8 @@
"INFO": {
"2DPNG_URL": "http://www.chemspider.com/ImagesHandler.ashx?id=9291157",
"ALIASES": [
"R1234ZE(Z)"
"R1234ZE(Z)",
"R1234ZEZ"
],
"CAS": "29118-25-0",
"CHEMSPIDER_ID": 9291157,
@@ -606,7 +607,7 @@
"INCHI_KEY": "CDOOAUSHHFGWSA-UPHRSURJSA-N",
"INCHI_STRING": "InChI=1S/C3H2F4/c4-2-1-3(5,6)7/h1-2H/b2-1-",
"NAME": "R1234ze(Z)",
"REFPROP_NAME": "N/A",
"REFPROP_NAME": "R1234ZEZ",
"SMILES": "F[C@H]=CC(F)(F)F"
},
"STATES": {
@@ -647,4 +648,4 @@
"smolar_units": "J/mol/K"
}
}
}
}

View File

@@ -270,7 +270,7 @@
"INFO": {
"2DPNG_URL": "http://www.chemspider.com/ImagesHandler.ashx?id=12151",
"ALIASES": [
"R1234ZF"
"R1243ZF"
],
"CAS": "677-21-4",
"CHEMSPIDER_ID": 12151,
@@ -330,4 +330,4 @@
"smolar_units": "J/mol/K"
}
}
}
}

View File

@@ -84,7 +84,7 @@
91.75937442449276,
-17637.12637362363
],
"reducing_value": 3715.9999999999995,
"reducing_value": 3716.16644216198,
"t": [
0.643,
0.695,
@@ -110,7 +110,7 @@
-158549.1553855615,
335529.06839972973
],
"reducing_value": 3715.9999999999995,
"reducing_value": 3716.16644216198,
"t": [
0.463,
1.088,
@@ -204,7 +204,7 @@
"hmolar_units": "J/mol",
"p": 3420000,
"p_units": "Pa",
"rhomolar": 3716,
"rhomolar": 3716.16644216198,
"rhomolar_units": "mol/m^3",
"smolar": 246.34416839114922,
"smolar_units": "J/mol/K"
@@ -432,7 +432,7 @@
"hmolar_units": "J/mol",
"p": 3420000.0,
"p_units": "Pa",
"rhomolar": 3716.0,
"rhomolar": 3716.16644216198,
"rhomolar_units": "mol/m^3",
"smolar": 246.32568384647226,
"smolar_units": "J/mol/K"

View File

@@ -436,7 +436,7 @@
6
],
"n": [
0.0125335479355233,
0.012533547935523,
7.8957634722828,
-8.7803203303561,
0.31802509345418,

View File

@@ -96,7 +96,7 @@
"T_r": 507.82,
"Tmax": 507.82,
"Tmin": 177.83,
"description": "rho'' = rhoc*exp(Tc/T*sum(n_i*theta^t_i))",
"description": "rho'' = rhoc*exp(sum(n_i*theta^t_i))",
"max_abserror_percentage": 1.0104195769602642,
"n": [
-3.4056,
@@ -116,7 +116,7 @@
30.0
],
"type": "rhoV",
"using_tau_r": true
"using_tau_r": false
},
"sL": {
"A": [

View File

@@ -292,9 +292,10 @@ def combine_json(root_dir):
def generate():
import shutil
shutil.copy2(os.path.join(repo_root_path, 'externals', 'Catch', 'single_include', 'catch.hpp'), os.path.join(repo_root_path, 'include', 'catch.hpp'))
#shutil.copy2(os.path.join(repo_root_path, 'externals','REFPROP-headers','REFPROP_lib.h'),os.path.join(repo_root_path,'include','REFPROP_lib.h'))
# import shutil
# shutil.copy2(
# os.path.join(repo_root_path, 'externals', 'REFPROP-headers', 'REFPROP_lib.h'),
# os.path.join(repo_root_path, 'include', 'REFPROP_lib.h'))
version_to_file(root_dir=repo_root_path)
gitrev_to_file(root_dir=repo_root_path)

View File

@@ -3,6 +3,32 @@ import numpy as np
from .DataObjects import PureData
class LiquidSodium(PureData):
"""
Heat transfer fluid Liquid Sodium
"""
def __init__(self):
PureData.__init__(self)
self.density.source = self.density.SOURCE_DATA
self.specific_heat.source = self.specific_heat.SOURCE_DATA
self.conductivity.source = self.conductivity.SOURCE_DATA
self.viscosity.source = self.viscosity.SOURCE_DATA
self.saturation_pressure.source = self.saturation_pressure.SOURCE_DATA
self.temperature.data = np.array([400.0,500.0,600.0,700.0,800.0,900.0,1000.0,1100.0,1200.0,1300.0,1400.0,1500.0,1600.0,1700.0,1800.0,1900.0,2000.0,2100.0,2200.0,2300.0,2400.0,2500.0]) # kelvin
self.density.data = np.array([919.0,897.0,874.0,852.0,828.0,805.0,781.0,756.0,732.0,706.0,680.0,653.0,626.0,597.0,568.0,537.0,504.0,469.0,431.0,387.0,335.0,239.0]) # kg/m3
self.specific_heat.data = np.array([1372.0,1334.0,1301.0,1277.0,1260.0,1252.0,1252.0,np.NAN,np.NAN,np.NAN,np.NAN,np.NAN,np.NAN,np.NAN,np.NAN,np.NAN,np.NAN,np.NAN,np.NAN,np.NAN,np.NAN,np.NAN]) # J/kg-K
self.conductivity.data = np.array([87.22, 80.09, 73.70, 68.00, 62.90, 58.34, 54.24, 50.54, 47.16, 44.03, 41.08, 38.24, 35.44, 32.61, 29.68, 26.57, 23.21, 19.54, 15.48, 10.97, 5.92, 0.27]) # W/m-K
self.viscosity.data = np.array([5.99, 4.15, 3.21, 2.64, 2.27, 2.01, 1.81, 1.66, 1.53, 1.43, 1.35, 1.28, 1.22, 1.17, 1.12, 1.08, 1.04, 1.01, 0.98, 0.95, 0.92, np.NAN]) * 1.E-4 # Pa-s
self.saturation_pressure.data = np.array([1.8E-10, 8.99E-8, 5.57E-6, 1.05E-4, 9.41E-4, 5.147E-3, 1.995E-2, 6.016E-2, 0.1504, 0.3257, 0.6298, 1.113, 1.828, 2.828, 4.161, 5.870, 7.991, 10.55, 13.57, 17.06, 21.03, 25.47]) * 1000000. # MPa
self.Tmin = np.min(self.temperature.data)
self.Tmax = np.max(self.temperature.data)
self.TminPsat = self.Tmin
self.name = "LiqNa"
self.description = "LiqNa"
self.reference = "LiqNa"
self.reshapeAll()
class TherminolD12(PureData):
"""
Heat transfer fluid Therminol D12 by Solutia

View File

@@ -266,7 +266,7 @@ class SecCoolSolutionData(DigitalData):
for j in range(c):
nu = np.NAN
try:
nu = np.float(res[i, j])
nu = float(res[i, j])
if i == 0: nu *= 1e-2 # Percent to fraction
if j == 0: nu += 273.15 # Celsius to Kelvin
if not self.allowNegativeData and nu < 0:
@@ -493,7 +493,7 @@ class SecCoolIceData(SecCoolSolutionData):
for j in range(c):
nu = np.NAN
try:
nu = np.float(res[i, j])
nu = float(res[i, j])
if i == 0: nu *= 1e-2 # Percent to fraction
if not self.allowNegativeData and nu < 0:
nu = np.NAN # invalid entries

View File

@@ -393,7 +393,7 @@ class SolutionDataWriter(object):
# Let the base class default method raise the TypeError
return json.JSONEncoder.default(self, obj)
dump = json.dumps(jobj, indent=2, sort_keys=True, cls=RoundingEncoder)
# print dump
hashes = self.load_hashes()

View File

@@ -0,0 +1,105 @@
{
"T_freeze": {
"NRMS": null,
"coeffs": "null",
"type": "notdefined"
},
"Tbase": 1450.0,
"Tmax": 2500.0,
"Tmin": 400.0,
"TminPsat": 400.0,
"conductivity": {
"NRMS": 2.703067851958419e-05,
"coeffs": [
[
39.652357954542296
],
[
-0.0283520543358315
],
[
3.7129446640339625e-06
],
[
-1.1838256708252004e-08
]
],
"type": "polynomial"
},
"density": {
"NRMS": 0.012510245084014686,
"coeffs": [
[
672.0767045454262
],
[
-0.2524942259559509
],
[
-6.885234330888945e-05
],
[
-5.679564524241343e-08
]
],
"type": "polynomial"
},
"description": "LiqNa",
"mass2input": {
"NRMS": null,
"coeffs": "null",
"type": "notdefined"
},
"mole2input": {
"NRMS": null,
"coeffs": "null",
"type": "notdefined"
},
"name": "LiqNa",
"reference": "LiqNa",
"saturation_pressure": {
"NRMS": 0.0007472008004592615,
"coeffs": [
-11758.328842732395,
-14.056138686870915,
-21.815692362749207
],
"type": "exponential"
},
"specific_heat": {
"NRMS": 0.0027059884319416837,
"coeffs": [
[
1376.6592261522192
],
[
0.5205059522205394
],
[
0.0005791666665749348
],
[
8.33333333449815e-08
]
],
"type": "polynomial"
},
"viscosity": {
"NRMS": 0.006087518657847591,
"coeffs": [
1477.5936204092418,
217.0383167635442,
9.835135835300713
],
"type": "exponential"
},
"volume2input": {
"NRMS": null,
"coeffs": "null",
"type": "notdefined"
},
"xbase": 0.0,
"xid": "pure",
"xmax": 1.0,
"xmin": 0.0
}

View File

@@ -27,7 +27,7 @@
"n" : [-0.013073, 0.018259,0.0000081299,0.0078496],
"t" : [7.4, 0.35, 10.0, 5.3],
"d" : [1, 3, 11, 2],
"l" : [1, 1, 3, 2]
"l" : [1, 1, 2, 3]
},
{
"Name" : "GeneralizedAirComponents",

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More