mirror of
https://github.com/CoolProp/CoolProp.git
synced 2026-01-12 15:38:29 -05:00
1468 lines
68 KiB
Python
1468 lines
68 KiB
Python
# -*- python -*-
|
|
# ex: set syntax=python:
|
|
|
|
# This is a sample buildmaster config file. It must be installed as
|
|
# 'master.cfg' in your buildmaster's base directory.
|
|
|
|
# This is the dictionary that the buildmaster pays attention to. We also use
|
|
# a shorter alias to save typing.
|
|
c = BuildmasterConfig = {}
|
|
|
|
####### STATUS TARGETS
|
|
|
|
# 'status' is a list of Status Targets. The results of each build will be
|
|
# pushed to these targets. buildbot/status/*.py has a variety to choose from,
|
|
# including web pages, email senders, and IRC bots.
|
|
|
|
c['status'] = []
|
|
|
|
from buildbot.status import html
|
|
from buildbot.status.web import authz, auth
|
|
from buildbot_private import web_auth
|
|
|
|
|
|
authz_cfg=authz.Authz(
|
|
# change any of these to True to enable; see the manual for more
|
|
# options
|
|
auth=auth.BasicAuth([(web_auth['user'], web_auth['pass'])]),
|
|
gracefulShutdown = False,
|
|
forceBuild = 'auth', # use this to test your slave once it is set up
|
|
forceAllBuilds = 'auth',
|
|
pingBuilder = False,
|
|
stopBuild = 'auth',
|
|
stopAllBuilds = 'auth',
|
|
cancelPendingBuild = 'auth',
|
|
)
|
|
c['status'].append(html.WebStatus(http_port=8010, authz=authz_cfg))
|
|
|
|
from buildbot.status import mail
|
|
from buildbot_private import email_auth
|
|
mn = mail.MailNotifier(fromaddr="buildbot@coolprop.dreamhosters.com",
|
|
sendToInterestedUsers=False,
|
|
mode=('problem'),
|
|
extraRecipients=["jowr@ipu.dk", "ian.h.bell@gmail.com"],
|
|
#useTls=True,
|
|
relayhost="smtp.dreamhost.com",
|
|
smtpPort=587, smtpUser=email_auth['user'],
|
|
smtpPassword=email_auth['pass'])
|
|
|
|
c['status'].append(mn)
|
|
|
|
|
|
####### HOUSEKEEPING AND DISK SPACE
|
|
|
|
# By default, buildbot stores information on all previous builds and we
|
|
# hardly consult these data for debugging purposes - at least not the
|
|
# historical build logs. These settings help to save some disk space on
|
|
# worker (slave) machines and on the master.
|
|
# http://docs.buildbot.net/current/manual/cfg-global.html#data-lifetime
|
|
c['changeHorizon'] = 50 # Number of change records on master, default 200
|
|
c['buildHorizon'] = 25 # Number of builds for each builder which should be kept on disk, default 100
|
|
c['eventHorizon'] = 12 # Number of events to keep (mostly connections and disconnections of slaves), default 50
|
|
c['logHorizon'] = 25 # Number of builds for which logs should be maintained, default 40
|
|
c['buildCacheSize'] = 6 # Number of builds for each builder which are cached in memory, default 15
|
|
# This number should be larger than the number of builds required for commonly-used status
|
|
# displays (the waterfall or grid views), so that those displays do not miss the cache on a refresh.
|
|
|
|
|
|
|
|
|
|
####### BUILDSLAVES
|
|
|
|
# The 'slaves' list defines the set of recognized buildslaves. Each element is
|
|
# a BuildSlave object, specifying a unique slave name and password. The same
|
|
# slave name and password must be configured on the slave.
|
|
from buildbot.buildslave import BuildSlave
|
|
from buildbot_private import pass_dict
|
|
slave_commons = dict(notify_on_missing=["ian.h.bell@gmail.com", "jowr@ipu.dk"], missing_timeout=900)
|
|
c['slaves'] = [
|
|
# BuildSlave("linux-slave", pass_dict["linux-slave"], **slave_commons),
|
|
BuildSlave("OSX-slave", pass_dict["OSX-slave"], max_builds = 1, **slave_commons),
|
|
# BuildSlave("windows-slave", pass_dict["windows-slave"], **slave_commons),
|
|
BuildSlave("OSX-IPU-worker", pass_dict["OSX-IPU-worker"], max_builds = 1, **slave_commons),
|
|
BuildSlave("LIN-IPU-worker", pass_dict["LIN-IPU-worker"], max_builds = 1, **slave_commons),
|
|
BuildSlave("WIN-IPU-worker", pass_dict["WIN-IPU-worker"], max_builds = 1, **slave_commons)
|
|
]
|
|
|
|
# 'slavePortnum' defines the TCP port to listen on for connections from slaves.
|
|
# This must match the value configured into the buildslaves (with their
|
|
# --master option)
|
|
c['slavePortnum'] = 9989
|
|
|
|
####### CHANGESOURCES
|
|
|
|
# the 'change_source' setting tells the buildmaster how it should find out
|
|
# about source code changes. Here we point to the CoolProp source code.
|
|
|
|
from buildbot.changes.gitpoller import GitPoller
|
|
c['change_source'] = []
|
|
c['change_source'].append(GitPoller(
|
|
'https://github.com/CoolProp/CoolProp',
|
|
workdir='gitpoller-workdir', branch='master',
|
|
pollinterval=300)) # Interval between triggering a build
|
|
|
|
####### BUILDERS
|
|
|
|
# The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
|
|
# what steps, and which slaves can execute them. Note that any particular build will
|
|
# only take place on one slave.
|
|
|
|
from buildbot.process.factory import BuildFactory
|
|
from buildbot.steps.source.git import Git
|
|
from buildbot.steps.shell import ShellCommand
|
|
from buildbot.steps.master import MasterShellCommand
|
|
from buildbot.steps.slave import MakeDirectory, RemoveDirectory, CopyDirectory
|
|
from buildbot.steps.transfer import DirectoryUpload, FileDownload
|
|
from buildbot.process import properties
|
|
|
|
# A custom platform and Python identifiers
|
|
platformID = {
|
|
"windows" : 1,
|
|
"osx" : 2,
|
|
"linux" : 3
|
|
}
|
|
|
|
bitnessID = {
|
|
"32bit" : 1,
|
|
"64bit" : 2
|
|
}
|
|
|
|
pythonID = {
|
|
"py27" : 1,
|
|
"py32" : 2,
|
|
"py34" : 3,
|
|
"py35" : 4,
|
|
"py36" : 5,
|
|
"py37" : 6,
|
|
"py38" : 7,
|
|
}
|
|
|
|
# A couple of functions to make it simpler to use the shorthand notation for the
|
|
# different platform, bitness, python combinations. These functions are candidates
|
|
# for the most inefficient code written in 2014, but they make the rest of this
|
|
# config file so much nicer and shorter.
|
|
def getIDtuple(testID):
|
|
platformTEST = None
|
|
bitnessTEST = None
|
|
pythonTEST = None
|
|
#
|
|
TEST = str(testID)
|
|
i = len(TEST)
|
|
if i < 1: raise ValueError("The given testID \"{0}\" is too short.".format(TEST))
|
|
if i >= 1: platformTEST = TEST[0]
|
|
if i >= 2: bitnessTEST = TEST[1]
|
|
if i >= 3: pythonTEST = TEST[2]
|
|
if i > 3: raise ValueError("The given testID \"{0}\" is too long.".format(TEST))
|
|
#
|
|
return platformTEST, bitnessTEST, pythonTEST
|
|
|
|
def getIDstr(platform=None, bitness=None, python=None):
|
|
if platform is None and bitness is None and python is None:
|
|
raise ValueError("All given parameters are None, this does not work.")
|
|
#
|
|
def getIDstr_helper(input, alt):
|
|
if input is None: return str(0)
|
|
if str(input) == "0": return str(0)
|
|
if input in alt: return str(alt[input])
|
|
return str(int(input))
|
|
#
|
|
platform = getIDstr_helper(platform,platformID)
|
|
bitness = getIDstr_helper(bitness,bitnessID)
|
|
python = getIDstr_helper(python,pythonID)
|
|
#
|
|
return platform+bitness+python
|
|
|
|
def compareID(in1, in2):
|
|
"""Takes int or str data and compares it to the other input.
|
|
0 can be used ad a joker, which always compares to True.
|
|
"""
|
|
if in1 is None or in2 is None : return True
|
|
if int(in1) == 0 or int(in2) == 0 : return True
|
|
return int(in1) == int(in2)
|
|
|
|
def checkID(inID, teID=None, strict=True):
|
|
if inID is None: return False
|
|
platformIN, bitnessIN, pythonIN = getIDtuple(testID=inID)
|
|
platformTE, bitnessTE, pythonTE = getIDtuple(testID=teID)
|
|
#
|
|
result = compareID(platformTE, platformIN)
|
|
if not result and strict: raise ValueError("The given input for the platform \"{0}\" did not match \"{1}\"".format(platformIN,platformTE))
|
|
result = compareID(bitnessTE, bitnessIN) and result
|
|
if not result and strict: raise ValueError("The given input for the bitness \"{0}\" did not match \"{1}\"".format(bitnessIN,bitnessTE))
|
|
result = compareID(pythonTE, pythonIN) and result
|
|
if not result and strict: raise ValueError("The given input for the Python version \"{0}\" did not match \"{1}\"".format(pythonIN,pythonTE))
|
|
return result
|
|
|
|
def getFromDict(inDict, inID):
|
|
res = inDict.get(inID)
|
|
if res is None: # Try some more
|
|
for key in inDict:
|
|
if checkID(inID, key, strict=False):
|
|
if res is not None:
|
|
raise ValueError("Already found a matching entry \"{0}\" for \"{1}\", please specify more parameters.".format(res,inID))
|
|
else:
|
|
res = inDict[key]
|
|
if res is None:
|
|
raise ValueError("Could not find a matching entry for \"{0}\".".format(inID))
|
|
else:
|
|
return res
|
|
|
|
def getKeyFromVal(dic,val):
|
|
for key in dic:
|
|
if str(dic[key])==str(val):
|
|
return key
|
|
return None
|
|
|
|
def getJobName(inID):
|
|
platform, bitness, python = getIDtuple(inID)
|
|
platform = getKeyFromVal(platformID, platform)
|
|
bitness = getKeyFromVal( bitnessID, bitness )
|
|
python = getKeyFromVal( pythonID, python )
|
|
res = []
|
|
if platform is not None: res.append(platform)
|
|
if bitness is not None: res.append(bitness)
|
|
if python is not None: res.append(python)
|
|
return "-".join(res)
|
|
|
|
|
|
class PythonSlaveConfig(object):
|
|
def __init__(self,name):
|
|
self.name = name
|
|
self.pyact = {}
|
|
self.pyenv = {}
|
|
self.pyins = {}
|
|
|
|
def getPyact(self, inID):
|
|
return getFromDict(self.pyact, inID)
|
|
|
|
def getPyenv(self, inID):
|
|
return getFromDict(self.pyenv, inID)
|
|
|
|
def getPyins(self, inID):
|
|
return getFromDict(self.pyins, inID)
|
|
|
|
def getIDs(self):
|
|
IDs = []
|
|
for pl in platformID:
|
|
for bi in bitnessID:
|
|
for py in pythonID:
|
|
tmpID = getIDstr(pl, bi, py)
|
|
try:
|
|
#print "Testing for {0} in act".format(tmpID)
|
|
self.getPyact(tmpID)
|
|
#print "Testing for {0} in env".format(tmpID)
|
|
self.getPyenv(tmpID)
|
|
#print "Testing for {0} in ins".format(tmpID)
|
|
self.getPyins(tmpID)
|
|
#print "Appending {0}".format(tmpID)
|
|
IDs.append(tmpID)
|
|
except:
|
|
pass
|
|
return IDs
|
|
|
|
|
|
import os
|
|
|
|
@properties.renderer
|
|
def _branch(props): return props.getProperty('branch')
|
|
|
|
def _master_loc_rel(git_branch):
|
|
"""
|
|
If building from release branch, upload to public_html/release
|
|
If building from master branch, upload to normal public_html/binaries folder
|
|
If another branch, upload to public_html/unstable
|
|
"""
|
|
if git_branch == 'release': return os.path.join('public_html','release')
|
|
elif git_branch == 'master': return os.path.join('public_html','binaries')
|
|
else: return os.path.join('public_html','unstable')
|
|
|
|
@properties.renderer
|
|
def master_loc_rel(props):
|
|
"""
|
|
If building from release branch, upload to public_html/release
|
|
If building from master branch, upload to normal public_html/binaries folder
|
|
If another branch, upload to public_html/unstable
|
|
"""
|
|
return _master_loc_rel(props.getProperty('branch'))
|
|
|
|
def _master_loc_abs(git_branch):
|
|
server_dir = '/home/coolprop/buildbot/server-master'
|
|
server_dir = os.path.join(server_dir, _master_loc_rel(git_branch))
|
|
return os.path.abspath(server_dir)
|
|
|
|
@properties.renderer
|
|
def master_loc_abs(props):
|
|
return _master_loc_abs(props.getProperty('branch'))
|
|
|
|
def _master_loc_uri(git_branch):
|
|
server_uri = 'coolprop@coolprop.dreamhosters.com'
|
|
server_dir = _master_loc_abs(git_branch)
|
|
return "{0}:{1}".format(server_uri, server_dir)
|
|
|
|
@properties.renderer
|
|
def master_loc_uri(props):
|
|
return _master_loc_uri(props.getProperty('branch'))
|
|
|
|
############# Upload folder permissions #######################
|
|
def fixPermissions(factory):
|
|
factory.addStep(MasterShellCommand(command = '${HOME}/scripts/binPerms.sh'))
|
|
|
|
from buildbot.plugins import util
|
|
upload_lock = util.MasterLock("upload_lock")
|
|
|
|
def upload_command(factory, slavesrc, masterdest=None, branch=None, platform=None, buildID=None, method=None):
|
|
"""Upload files to the master server. Avoids buildbot upload on platforms other than Windows."""
|
|
if (masterdest is not None and branch is not None):
|
|
raise ValueError("Unknown target, specify either \"masterdest\" or \"branch\".")
|
|
if (platform is not None and buildID is not None):
|
|
raise ValueError("Unknown target, specify either \"platform\" or \"buildID\".")
|
|
#
|
|
if masterdest is not None: target = masterdest
|
|
elif branch is not None: target = _master_loc_rel(branch)
|
|
else: target = master_loc_rel
|
|
#
|
|
if method is None:
|
|
if platform == 'windows' or checkID(buildID, teID=100, strict=False): method='buildbot' # method='winscp'
|
|
elif platform == 'osx' or checkID(buildID, teID=200, strict=False): method='rsync'
|
|
elif platform == 'linux' or checkID(buildID, teID=300, strict=False): method='buildbot'
|
|
else: method='buildbot'
|
|
|
|
#
|
|
if method=='buildbot':
|
|
factory.addStep(DirectoryUpload(slavesrc=slavesrc, masterdest=target, locks=[upload_lock.access('exclusive')]))
|
|
elif method=='winscp':
|
|
if masterdest is not None: target = masterdest
|
|
elif branch is not None: target = _master_loc_abs(branch)
|
|
else: target = master_loc_abs
|
|
winscpCommand = ['winscp.com', '/ini=nul', '/script=C:/CoolProp-windows-DTU-slave/_ssh/winscp.txt', '/parameter', slavesrc, target]
|
|
factory.addStep(ShellCommand(command=winscpCommand, haltOnFailure = True))
|
|
elif method=='rsync':
|
|
if masterdest is not None: target = masterdest
|
|
elif branch is not None: target = _master_loc_uri(branch)
|
|
else: target = master_loc_uri
|
|
rsyncCommand = ['rsync', '-aP', '--no-perms', '--no-owner', '--no-group', '--stats', '{0}/'.format(slavesrc), target]
|
|
factory.addStep(ShellCommand(command=rsyncCommand, haltOnFailure = True))
|
|
else:
|
|
raise ValueError("Upload method undefined - aborting.")
|
|
#
|
|
fixPermissions(factory)
|
|
|
|
|
|
# A centralised method to provide the objects with some presets
|
|
@properties.renderer
|
|
def _git_mode(props):
|
|
""" If we are doing a full clean, this will tell it to clobber all the files """
|
|
if props.getProperty('fullclean',default=False) or props.getProperty('branch',default='master') == 'release':
|
|
return 'full'
|
|
else:
|
|
return 'incremental'
|
|
|
|
def getBaseFactory():
|
|
factory = BuildFactory()
|
|
factory.addStep(Git(
|
|
repourl= 'git://github.com/CoolProp/CoolProp',
|
|
mode = _git_mode,
|
|
method = 'clobber',
|
|
submodules = True,
|
|
progress=True,
|
|
haltOnFailure = True))
|
|
return factory
|
|
|
|
def docActivateCmd():
|
|
#return 'source activate CoolPropWeb &&'
|
|
return ''
|
|
|
|
@properties.renderer
|
|
def fullBuildCommand(props):
|
|
return ' '.join([docActivateCmd(), "python", "__init__.py", str(props.getProperty('fullBuild', default = False) or props.getProperty('branch') == 'release')])
|
|
|
|
@properties.renderer
|
|
def rsyncCommand(props):
|
|
"""
|
|
A renderable command that creates the rsync command to be run
|
|
"""
|
|
|
|
# Some basic preparations, make sure the machine has passwordless SSH access to the server
|
|
#
|
|
server_uri = 'coolprop@coolprop.dreamhosters.com'
|
|
server_dir = '/home/coolprop/buildbot/server-master'
|
|
server_des = "{0}:{1}".format(server_uri, server_dir)
|
|
#
|
|
local_build_dir = 'Web/_build/html'
|
|
|
|
if props.getProperty('branch') == 'release':
|
|
server_target_dir = '{0}/public_html/release/sphinx'.format(server_des)
|
|
elif props.getProperty('branch') == 'master':
|
|
server_target_dir = '{0}/public_html/binaries/sphinx'.format(server_des)
|
|
else:
|
|
server_target_dir = '{0}/public_html/unstable/sphinx'.format(server_des)
|
|
|
|
return ' '.join([docActivateCmd(), 'rsync', '-a', '--stats', '{0}/ {1}'.format(local_build_dir,server_target_dir)])
|
|
|
|
# All what is needed to create the website, it makes sense to run the
|
|
# nightly builds on the same machine. This avoids extra data transfer.
|
|
def websiteFactory_old(platform, fullBuild=False):
|
|
if 'win' in platform.lower():
|
|
raise ValueError("The docs cannot be build on a Windows machine, we rely on rsync...")
|
|
#
|
|
# Create the factory to add the actions to
|
|
factory = getBaseFactory()
|
|
## Upgrade cython if needed
|
|
#factory.addStep(ShellCommand(command=' '.join([docActivateCmd(), "pip", "install", "--upgrade", "cython"]), workdir= 'build/wrappers/Python', haltOnFailure = True))
|
|
# Make a wheel - this is advantageous because it forces pip to uninstall coolprop, ensuring that all files installed are from this wheel
|
|
factory.addStep(ShellCommand(
|
|
command=' '.join([docActivateCmd(), "python", "setup.py", "bdist_wheel", '--dist-dir', 'dist']),
|
|
env={'MACOSX_DEPLOYMENT_TARGET': '10.9'}, # This removes backwards compatibility, but should be fine for a local build
|
|
workdir= 'build/wrappers/Python',
|
|
haltOnFailure = True))
|
|
# List the files in the dist directory
|
|
factory.addStep(ShellCommand(command=' '.join([docActivateCmd(), "ls", 'dist/*.*']), workdir= 'build/wrappers/Python', haltOnFailure = True))
|
|
# Install the wheel - this will uninstall the old version
|
|
factory.addStep(ShellCommand(command=' '.join([docActivateCmd(), "pip", "install", "-vvv", "--force-reinstall", "--ignore-installed", "--upgrade", "--no-index", "`ls dist/CoolProp*.whl`"]), workdir= 'build/wrappers/Python', haltOnFailure = True))
|
|
# Remove the generated wheel
|
|
factory.addStep(ShellCommand(command=' '.join([docActivateCmd(), "rm", 'dist/*.whl']), workdir= 'build/wrappers/Python', haltOnFailure = True))
|
|
# Test the compiled CoolProp
|
|
factory.addStep(ShellCommand(command=' '.join([docActivateCmd(), "python", "-c", "\"import CoolProp; print(CoolProp.__gitrevision__)\""]), workdir= 'build/wrappers', haltOnFailure = True))
|
|
# Test the compiled CoolProp
|
|
factory.addStep(ShellCommand(command=' '.join([docActivateCmd(), "python", "-c", "\"import CoolProp; print(CoolProp.__file__)\""]), workdir= 'build/wrappers', haltOnFailure = True))
|
|
# Create plots and fluid tables
|
|
factory.addStep(ShellCommand(command=fullBuildCommand, workdir= 'build/Web/scripts', haltOnFailure = True))
|
|
# Run doxygen build
|
|
factory.addStep(ShellCommand(command=' '.join([docActivateCmd(), "doxygen", "--version", "&&", "doxygen", "Doxyfile"]), workdir= 'build', haltOnFailure = True))
|
|
# Run sphinx apidoc
|
|
factory.addStep(ShellCommand(command=' '.join([docActivateCmd(), "sphinx-apidoc","-T","-f","-e","-o","apidoc","../wrappers/Python/CoolProp"]),workdir= 'build/Web', haltOnFailure = True))
|
|
# Run sphinx website builder
|
|
factory.addStep(ShellCommand(command=' '.join([docActivateCmd(), "make", "html"]), workdir= 'build/Web', haltOnFailure = True))
|
|
# Upload the generated files
|
|
factory.addStep(ShellCommand(command=rsyncCommand, haltOnFailure = True))
|
|
#
|
|
fixPermissions(factory)
|
|
#
|
|
return factory
|
|
|
|
@properties.renderer
|
|
def fullBuildCommand(props):
|
|
return ' '.join(["cat","docs_runner.sh","&&","chmod", "+x", "docs_runner.sh", "&&", "./docs_runner.sh", str(props.getProperty('fullBuild', default = False) or props.getProperty('branch') == 'release')])
|
|
|
|
def websiteFactory(platform, fullBuild=False):
|
|
if 'win' in platform.lower():
|
|
raise ValueError("The docs cannot be build on a Windows machine, we rely on rsync...")
|
|
#
|
|
# Create the factory to add the actions to
|
|
factory = getBaseFactory()
|
|
# Build the docs with docker, rendering a full build of docs as needed
|
|
factory.addStep(ShellCommand(command=fullBuildCommand, workdir= 'build/Web/docker', haltOnFailure = True))
|
|
# Upload the generated files
|
|
factory.addStep(ShellCommand(command=rsyncCommand, haltOnFailure = True))
|
|
#
|
|
fixPermissions(factory)
|
|
#
|
|
return factory
|
|
|
|
# The reworked Python builder factory. It relies on Miniconda and can handle both 32bit and 64bit builds
|
|
# You have to have both CMake and Git available on your standard command line, pay attention tp this
|
|
# when installing these two tools on your Windows machine.
|
|
def pythonFactory(pyID, pyCFG=PythonSlaveConfig("name")):
|
|
#
|
|
# Do you want me to install the required packages? Enable this for new slaves.
|
|
installPackages = False
|
|
buildPyPI = checkID(pyID, teID=100, strict=False) or checkID(pyID, teID=200, strict=False) # Only build and upload Windows and Mac wheels
|
|
buildConda = False
|
|
workingFolder = "build/wrappers/Python"
|
|
installFolder = "install_root"
|
|
defTimeout = 1800 # New timeout required for slow 64bit Linux builds: timeout=defTimeout
|
|
#
|
|
# Setting the appropriate virtual environment activator
|
|
pyact = pyCFG.getPyact(pyID)
|
|
# Setting the appropriate virtual environment
|
|
pyenv = pyCFG.getPyenv(pyID)
|
|
# Setting the appropriate installer options
|
|
pyins = pyCFG.getPyins(pyID)
|
|
# Getting the appropriate virtual environment activator
|
|
if pyact is None:
|
|
raise ValueError("Your selected Python installation \"{0}\" is not supported by this builder factory".format(pyID))
|
|
# Getting the appropriate virtual environment
|
|
if pyenv is None:
|
|
raise ValueError("Your selected Python environment \"{0}\" is not supported by this builder factory".format(pyID))
|
|
# Getting the appropriate installer options
|
|
if pyins is None:
|
|
raise ValueError("Your selected Python environment \"{0}\" does not have installer options in this builder factory".format(pyID))
|
|
|
|
#
|
|
pkgs = ["requests", "jinja2", "pyyaml"]
|
|
if checkID(pyID, teID=100, strict=False):
|
|
pkgs.extend(["unxutils","pywin32"]) # Add Windows-only dependency
|
|
if checkID(pyID, teID=101, strict=False):
|
|
pkgs.append("ndg-httpsclient") # Add Windows-only and python2 dependency
|
|
#
|
|
activateCMD = " ".join([pyact, pyenv]) # We always activate an environment, regardless of the host
|
|
def combinedCMD(cmd): return " && ".join([activateCMD, cmd])
|
|
#
|
|
# Create the factory to add the actions to
|
|
factory = getBaseFactory()
|
|
## Upgrade cython
|
|
#factory.addStep(ShellCommand(command=combinedCMD('pip install --upgrade cython'), workdir=workingFolder, haltOnFailure=True))
|
|
#
|
|
# Install dependencies
|
|
if installPackages and False: # disabled
|
|
installCMD = " ".join(["pip", "install"]) + " " + " ".join(pkgs)
|
|
factory.addStep(ShellCommand(command=combinedCMD(installCMD), workdir=workingFolder, haltOnFailure=True))
|
|
installCMD = " ".join(["python", "generate_meta_info.py"])
|
|
factory.addStep(ShellCommand(command=combinedCMD(installCMD), workdir=workingFolder, haltOnFailure=True))
|
|
if installPackages and False: # disabled
|
|
installCMD = " ".join(["pip", "install","-r","requirements.txt"])
|
|
factory.addStep(ShellCommand(command=combinedCMD(installCMD), workdir=workingFolder, haltOnFailure=True))
|
|
# setuptools installation for PyPI packages
|
|
factory.addStep(RemoveDirectory(dir=os.path.join('build',installFolder), haltOnFailure = False))
|
|
installCMD = " ".join(["python", "setup.py"] + pyins)
|
|
factory.addStep(ShellCommand(command=combinedCMD(installCMD), workdir=workingFolder, haltOnFailure=True))
|
|
installCMD = " ".join(["python", "setup.py", "clean"])
|
|
factory.addStep(ShellCommand(command=combinedCMD(installCMD), workdir=workingFolder, haltOnFailure=True))
|
|
if buildPyPI:
|
|
upload_command(factory, installFolder, buildID=pyID)
|
|
|
|
if buildConda:
|
|
# Install dependencies
|
|
if installPackages:
|
|
installCMD = " ".join(["conda", "install", "-yq"]) + " " + " ".join(pkgs)
|
|
factory.addStep(ShellCommand(command=combinedCMD(installCMD), haltOnFailure=True))
|
|
installCMD = " ".join(["python", "generate_meta_info.py"])
|
|
factory.addStep(ShellCommand(command=combinedCMD(installCMD), workdir=workingFolder, haltOnFailure=True))
|
|
# conda installation for binstar packages
|
|
factory.addStep(RemoveDirectory(dir=os.path.join('build',installFolder), haltOnFailure = False))
|
|
factory.addStep(RemoveDirectory(dir=os.path.join(workingFolder,'build'), haltOnFailure = False))
|
|
factory.addStep(RemoveDirectory(dir=os.path.join(workingFolder,'src'), haltOnFailure = False))
|
|
installCMD = " ".join(["python", "runner.py"])
|
|
factory.addStep(ShellCommand(command=combinedCMD(installCMD), workdir='build', haltOnFailure=True, timeout=defTimeout))
|
|
upload_command(factory, installFolder, buildID=pyID)
|
|
|
|
return factory
|
|
|
|
|
|
def cmakeFactory(mod_name = None, install = True, pre_cmd = [], cmake_args = [], build_args = [], ctest_args = [], cmake_env={}, test = True, platform = None, buildID = None):
|
|
"""
|
|
Parameters
|
|
----------
|
|
mod_name: string
|
|
The module to be built, one of 'Octave','python','Csharp', etc. - turns on the macro -DCOOLPROP_OCTAVE_MODULE=ON for instance if you pass octave
|
|
install: bool
|
|
True for install, False for just build
|
|
pre_cmd: list of strings
|
|
List of strings of commands to be executed before the cmake command, gets merged with &&
|
|
cmake_args: list of strings
|
|
List of strings of arguments to be passed to cmake (Makefile generating) step
|
|
cmake_env: dictionary (string->string)
|
|
A dictionary including keys that can be set in the build environment
|
|
build_args: list of strings
|
|
List of strings of arguments to be passed to cmake install or build command depending on value of install
|
|
ctest_args: list of strings
|
|
List of strings of arguments to be passed to ctest
|
|
"""
|
|
working_folder = "build/build"
|
|
|
|
# Create the factory to add the actions to
|
|
factory = getBaseFactory()
|
|
#
|
|
factory.addStep(MakeDirectory(dir=working_folder, haltOnFailure = True))
|
|
factory.addStep(RemoveDirectory(dir="build/install_root", haltOnFailure = False))
|
|
#
|
|
if len(pre_cmd)>0 and not pre_cmd[-1]=="&&": pre_cmd.append("&&")
|
|
if mod_name is not None:
|
|
cmake_args = ["-DCOOLPROP_"+mod_name.upper()+"_MODULE=ON","-DBUILD_TESTING=ON"]+cmake_args
|
|
|
|
cmakeCMD = pre_cmd + ["cmake", ".."]+cmake_args
|
|
factory.addStep(ShellCommand(
|
|
command=' '.join(cmakeCMD),
|
|
env = cmake_env,
|
|
workdir= working_folder,
|
|
haltOnFailure = True))
|
|
#
|
|
if install: installCMD = ["--target", "install"]
|
|
else: installCMD = []
|
|
factory.addStep(ShellCommand(
|
|
command=' '.join(pre_cmd+["cmake", "--build", "."]+installCMD+build_args),
|
|
workdir = working_folder,
|
|
haltOnFailure = True))
|
|
#
|
|
if test:
|
|
factory.addStep(ShellCommand(
|
|
command=["ctest", "--extra-verbose"] + ctest_args,
|
|
workdir = working_folder,
|
|
haltOnFailure = True))
|
|
#
|
|
if install:
|
|
#factory.addStep(DirectoryUpload(
|
|
# slavesrc="install_root",
|
|
# masterdest=master_loc_rel,
|
|
# url="binaries",
|
|
# compress="bz2"))
|
|
upload_command(factory, "install_root", platform = platform, buildID = buildID)
|
|
return factory
|
|
|
|
|
|
def fortranFactory(buildID=None,pre=''):
|
|
working_folder = "build/build" # Same as in cmakeFactory
|
|
fortran_folder = "build/wrappers/Fortran/detailed_example" #
|
|
#
|
|
cmake_args = ["-DCOOLPROP_STATIC_LIBRARY=ON","-DCOOLPROP_EXTERNC_LIBRARY=ON"]
|
|
#
|
|
if checkID(buildID, teID="010", strict=False):
|
|
cmake_args.append('-DFORCE_BITNESS_32=ON')
|
|
bitflag = "-m32"
|
|
elif checkID(buildID, teID="020", strict=False):
|
|
cmake_args.append('-DFORCE_BITNESS_64=ON')
|
|
bitflag = "-m64"
|
|
else:
|
|
raise ValueError("Unknown Fortran bitness: {0}".format(buildID))
|
|
#
|
|
if checkID(buildID, teID=100, strict=False):
|
|
cmake_args += ["-G", "\"MinGW Makefiles\""]
|
|
cp_cmd = "copy /Y *.f90 ..\\..\\..\\build"
|
|
build_cmd = pre+"gfortran -c -Wall cpinterface.f90 cool_fortran_bind.f90 "+bitflag
|
|
link_cmd = pre+"gcc -o main cpinterface.o cool_fortran_bind.o libCoolProp.a -lquadmath -lstdc++ -lgfortran "+bitflag
|
|
exec_cmd = "main"
|
|
elif checkID(buildID, teID=200, strict=False):
|
|
cmake_args += ['-DCMAKE_C_COMPILER="gcc-7"','-DCMAKE_CXX_COMPILER="g++-7"']
|
|
cp_cmd = "cp *.f90 ../../../build/"
|
|
build_cmd = pre+"gfortran-7 -c -Wall cpinterface.f90 cool_fortran_bind.f90 "+bitflag
|
|
link_cmd = pre+"gcc-7 -o main cpinterface.o cool_fortran_bind.o libCoolProp.a -lquadmath -lstdc++ -ldl -lgfortran "+bitflag
|
|
#link_cmd = "gfortran-7 -o main cpinterface.o cool_fortran_bind.o libCoolProp.a -lstdc++ -ldl"
|
|
exec_cmd = "./main"
|
|
elif checkID(buildID, teID=300, strict=False):
|
|
cmake_args += []
|
|
cp_cmd = "cp *.f90 ../../../build/"
|
|
build_cmd = pre+"gfortran -c -Wall cpinterface.f90 cool_fortran_bind.f90 "+bitflag
|
|
link_cmd = pre+"gcc -o main cpinterface.o cool_fortran_bind.o libCoolProp.a -lquadmath -lstdc++ -ldl -lgfortran -lm "+bitflag
|
|
exec_cmd = "./main"
|
|
else:
|
|
raise ValueError("Unknown Fortran ID: {0}".format(buildID))
|
|
|
|
# Create the factory to add the actions to
|
|
factory = cmakeFactory(cmake_args=cmake_args,install=False,test=False,pre_cmd=[] if pre=='' else [pre.rstrip('&& ')])
|
|
#
|
|
#factory.addStep(CopyDirectory(src=fortran_folder, dest=working_folder))
|
|
factory.addStep(ShellCommand(command=cp_cmd, workdir = fortran_folder, haltOnFailure = True))
|
|
for cmd in [build_cmd,link_cmd,exec_cmd]:
|
|
factory.addStep(ShellCommand(command=cmd, workdir = working_folder, haltOnFailure = True))
|
|
return factory
|
|
|
|
|
|
def javascript_slave(platform, cmake_args = [], cmake_env = {}, build_args = []):
|
|
working_folder = "build"
|
|
# Create the factory to add the actions to
|
|
factory = getBaseFactory()
|
|
# Remove the temporary folder for installs
|
|
factory.addStep(RemoveDirectory(dir="build/install_root", haltOnFailure = False))
|
|
factory.addStep(ShellCommand(command="docker run --rm --user 996:994 -v $(pwd):/src:z trzeci/emscripten bash /src/wrappers/Javascript/docker_build.sh",
|
|
workdir= working_folder,
|
|
haltOnFailure = True))
|
|
#factory.addStep(DirectoryUpload(slavesrc="install_root", masterdest=master_loc_rel, url="binaries", compress="bz2"))
|
|
#fixPermissions(factory)
|
|
upload_command(factory, "install_root", platform=platform)
|
|
return factory
|
|
|
|
def python_source_slave(key, platform, conda_env, cmake_args = [], cmake_env = {}, build_args = []):
|
|
working_folder = "build/build"
|
|
|
|
# Create the factory to add the actions to
|
|
factory = getBaseFactory()
|
|
|
|
# Remove the temporary folder for installs
|
|
factory.addStep(RemoveDirectory(dir="build/install_root", haltOnFailure = False))
|
|
factory.addStep(MakeDirectory(dir=working_folder, haltOnFailure = True))
|
|
|
|
factory.addStep(ShellCommand(command = ' '.join(['source',"activate",conda_env,"&&","cmake", "..", "-DCOOLPROP_PYTHON_PYPI=ON","&&","cmake","--build",".","--target","CoolProp"]+cmake_args),
|
|
env = cmake_env,
|
|
workdir= working_folder,
|
|
haltOnFailure = True))
|
|
#factory.addStep(DirectoryUpload(slavesrc="install_root", masterdest=master_loc_rel, url="binaries", compress="bz2"))
|
|
#fixPermissions(factory)
|
|
upload_command(factory, "install_root", platform=platform)
|
|
return factory
|
|
|
|
def python_manylinux_builder(bitness="64"):
|
|
working_folder = "build/wrappers/Python/manylinux"
|
|
# Create the factory to add the actions to
|
|
factory = getBaseFactory()
|
|
# Remove the temporary folder for installs
|
|
factory.addStep(RemoveDirectory(dir="build/install_root", haltOnFailure = False))
|
|
factory.addStep(ShellCommand(command = ' '.join(['chmod',"+x","00_prepare_docker.sh","&&","./00_prepare_docker.sh",str(bitness)]),
|
|
workdir= working_folder,
|
|
haltOnFailure = True))
|
|
#factory.addStep(DirectoryUpload(slavesrc="install_root", masterdest=master_loc_rel, url="binaries", compress="bz2"))
|
|
pyID = getIDstr(platform="linux", bitness=str(bitness)+"bit")
|
|
upload_command(factory, "install_root", buildID=pyID)
|
|
return factory
|
|
|
|
|
|
def windows_installer_slave():
|
|
"""
|
|
"""
|
|
working_folder = "build/build"
|
|
output_folder = "build/build/InnoScript/deploy"
|
|
# Create the factory to add the actions to build the Windows installer
|
|
factory = getBaseFactory()
|
|
factory.addStep(MakeDirectory(dir=working_folder, haltOnFailure = True))
|
|
factory.addStep(RemoveDirectory(dir=output_folder, haltOnFailure = False))
|
|
factory.addStep(ShellCommand(command=["cmake", "..", "-DCOOLPROP_WINDOWS_PACKAGE=ON","-G", "Visual Studio 16 2019",'-A','Win32'],
|
|
workdir= working_folder,
|
|
haltOnFailure = True))
|
|
factory.addStep(ShellCommand(command=["cmake", "--build", ".", "--target", "COOLPROP_WINDOWS_PACKAGE_INSTALLER"],
|
|
workdir = working_folder,
|
|
haltOnFailure = True))
|
|
# Upload the files
|
|
#factory.addStep(DirectoryUpload(slavesrc="build/InnoScript/bin",masterdest=master_loc_rel,url="installer",compress="bz2"))
|
|
#fixPermissions(factory)
|
|
upload_command(factory, "build/InnoScript/bin", platform='windows')
|
|
return factory
|
|
|
|
def mathcad_slave():
|
|
"""
|
|
"""
|
|
working_folder = "build/build"
|
|
|
|
# Create the factory to add the actions to
|
|
factory = getBaseFactory()
|
|
#
|
|
factory.addStep(MakeDirectory(dir=working_folder+'/mathcadprime', haltOnFailure = True))
|
|
factory.addStep(MakeDirectory(dir=working_folder+'/mathcad15', haltOnFailure = True))
|
|
factory.addStep(RemoveDirectory(dir="build/install_root", haltOnFailure = False))
|
|
|
|
# ***************
|
|
# Build the MathCAD 15 dll
|
|
# ***************
|
|
factory.addStep(ShellCommand(command=' '.join(["cmake", "../..",
|
|
"-DCOOLPROP_MATHCAD15_MODULE=ON","-G", '"Visual Studio 16 2019"', '-A','Win32','-DCOOLPROP_MATHCAD15_ROOT="C:/Program Files (x86)/Mathcad/Mathcad 15"']),
|
|
workdir= working_folder+'/mathcad15',
|
|
haltOnFailure = True))
|
|
factory.addStep(ShellCommand(command=["cmake", "--build", ".", "--target", "install", "--config", "Release"], workdir = working_folder+'/mathcad15', haltOnFailure = True))
|
|
|
|
# *************************
|
|
# Build the MathCAD PRIME dll
|
|
# *************************
|
|
factory.addStep(ShellCommand(command=' '.join(["cmake", "../..",
|
|
"-DCOOLPROP_PRIME_MODULE=ON", "-G", '"Visual Studio 16 2019"', '-DCOOLPROP_PRIME_ROOT="C:/Program Files/PTC/Mathcad Prime 6.0.0.0"']),
|
|
workdir= working_folder+'/mathcadprime',
|
|
haltOnFailure = True))
|
|
factory.addStep(ShellCommand(command=["cmake", "--build", ".", "--target", "install", "--config", "Release"], workdir = working_folder+'/mathcadprime', haltOnFailure = True))
|
|
|
|
# Upload the files
|
|
#factory.addStep(DirectoryUpload(slavesrc="install_root",masterdest=master_loc_rel,url="binaries",compress="bz2"))
|
|
#fixPermissions(factory)
|
|
upload_command(factory, "install_root", platform='windows')
|
|
return factory
|
|
|
|
def smath_builder():
|
|
"""
|
|
"""
|
|
working_folder = "build/build"
|
|
|
|
# Create the factory to add the actions to
|
|
factory = getBaseFactory()
|
|
#
|
|
factory.addStep(MakeDirectory(dir='build/bin/SMath', haltOnFailure = True))
|
|
factory.addStep(MakeDirectory(dir='build/wrappers/SMath/coolprop_wrapper/bin/Release', haltOnFailure = True))
|
|
factory.addStep(MakeDirectory(dir=working_folder+'/32bitDLL', haltOnFailure = True))
|
|
factory.addStep(MakeDirectory(dir=working_folder+'/64bitDLL', haltOnFailure = True))
|
|
factory.addStep(RemoveDirectory(dir="build/install_root", haltOnFailure = False))
|
|
factory.addStep(ShellCommand(command=["cmake", "..","-DCOOLPROP_SMATH_MODULE=ON","-DCOOLPROP_SMATH_WORK_INPLACE=ON","-G","Visual Studio 16 2019"],
|
|
workdir= 'build/build',
|
|
haltOnFailure = True))
|
|
# Build the SMath C# class library using visual studio
|
|
factory.addStep(ShellCommand(command=["cmake", "--build", ".", "--config", "Release"],
|
|
workdir= 'build/build', haltOnFailure = True))
|
|
|
|
# *************************
|
|
# Make 32-bit __stdcall DLL
|
|
# *************************
|
|
factory.addStep(ShellCommand(command=["cmake", "../..", "-DCOOLPROP_SHARED_LIBRARY=ON","-DCOOLPROP_STDCALL_LIBRARY=ON","-G", "Visual Studio 16 2019", "-A", "Win32"],
|
|
workdir= working_folder+'/32bitDLL',
|
|
haltOnFailure = True))
|
|
factory.addStep(ShellCommand(command=["cmake", "--build", ".", "--target", "install", "--config", "Release"], workdir = working_folder+'/32bitDLL', haltOnFailure = True))
|
|
# Copy the created DLL
|
|
factory.addStep(ShellCommand(command=' '.join(["copy", "/Y", "install_root\\shared_library\\Windows\\32bit__stdcall\\CoolProp.dll", "wrappers\\SMath\\coolprop_wrapper\\bin\\Release\\CoolProp_x86.dll"]), workdir = 'build', haltOnFailure = True))
|
|
|
|
# ***************
|
|
# Make 64-bit DLL
|
|
# ***************
|
|
factory.addStep(ShellCommand(command=["cmake", "../..","-DCOOLPROP_SHARED_LIBRARY=ON","-G", "Visual Studio 16 2019"],
|
|
workdir= working_folder+'/64bitDLL',
|
|
haltOnFailure = True))
|
|
factory.addStep(ShellCommand(command=["cmake", "--build", ".", "--target", "install", "--config", "Release"], workdir = working_folder+'/64bitDLL', haltOnFailure = True))
|
|
factory.addStep(ShellCommand(command=' '.join(["copy", "/Y", "install_root\\shared_library\\Windows\\64bit\\CoolProp.dll", "wrappers\\SMath\\coolprop_wrapper\\bin\\Release\\CoolProp_x64.dll"]), workdir = 'build', haltOnFailure = True))
|
|
|
|
# Copy other files to a temporary directory
|
|
factory.addStep(ShellCommand(command=["build_zip"], workdir = 'build\\wrappers\\SMath\\coolprop_wrapper', haltOnFailure = True))
|
|
factory.addStep(ShellCommand(command=["copy", "/Y", "wrappers\\SMath\\coolprop_wrapper\\coolprop_wrapper.7z", "bin\\SMath"], workdir = 'build', haltOnFailure = True))
|
|
# Upload the files
|
|
#factory.addStep(DirectoryUpload(slavesrc="bin",masterdest=master_loc_rel,url="SMath",compress="bz2"))
|
|
#fixPermissions(factory)
|
|
upload_command(factory, "bin", platform='windows')
|
|
return factory
|
|
|
|
def julia_builder():
|
|
"""
|
|
"""
|
|
working_folder = "build/build"
|
|
|
|
# Create the factory to add the actions to
|
|
factory = getBaseFactory()
|
|
# Clean the install_root
|
|
factory.addStep(RemoveDirectory(dir="build/install_root/Julia", haltOnFailure = False))
|
|
factory.addStep(MakeDirectory(dir='build/install_root/Julia', haltOnFailure = True))
|
|
# Copy other files
|
|
factory.addStep(ShellCommand(command=["cp", "wrappers/Julia/CoolProp.jl", "install_root/Julia/"], workdir = 'build', haltOnFailure = True))
|
|
# Upload the files - TODO: Is this the correct directory?
|
|
#factory.addStep(DirectoryUpload(slavesrc="install_root",masterdest=master_loc_rel,url="binaries",compress="bz2"))
|
|
#fixPermissions(factory)
|
|
upload_command(factory, "install_root", platform='osx')
|
|
return factory
|
|
|
|
def cmake_slave(mod_name, platform, install = True, cmake_args = [], build_args = [], ctest_args = [], cmake_env={}, test = True):
|
|
"""
|
|
Parameters
|
|
----------
|
|
mod_name: string
|
|
The module to be built, one of 'Octave','python','Csharp', etc. - turns on the macro -DCOOLPROP_OCTAVE_MODULE=ON for instance if you pass octave
|
|
install: bool
|
|
True for install, False for just build
|
|
cmake_args: list of strings
|
|
List of strings of arguments to be passed to cmake (Makefile generating) step
|
|
cmake_env: dictionary (string->string)
|
|
A dictionary including keys that can be set in the build environment
|
|
build_args: list of strings
|
|
List of strings of arguments to be passed to cmake install or build command depending on value of install
|
|
ctest_args: list of strings
|
|
List of strings of arguments to be passed to ctest
|
|
"""
|
|
working_folder = "build/build"
|
|
|
|
# Create the factory to add the actions to
|
|
factory = getBaseFactory()
|
|
#
|
|
factory.addStep(MakeDirectory(dir=working_folder, haltOnFailure = True))
|
|
factory.addStep(RemoveDirectory(dir="build/install_root", haltOnFailure = True))
|
|
pre = []
|
|
if platform.lower() == 'windows':
|
|
if mod_name.lower() == 'octave':
|
|
pre = ['set','"PATH=%OCTAVE_ROOT%/mingw64/bin;%MINGW64_BIN%;%PATH%"','&&']
|
|
elif mod_name.lower() == 'java':
|
|
pre = ['set','"PATH=C:/Program Files/Java/jdk-14.0.1/bin;%MINGW64_BIN%;%PATH%"','&&']
|
|
elif mod_name.lower() == 'r':
|
|
pre = ['set','"PATH=%MINGW64_BIN%;%PATH%"','&&']
|
|
elif 'xtern' in mod_name.lower():
|
|
pre = ['set','"PATH=%MINGW64_BIN%;%PATH%"','&&'] ## GCC+C++
|
|
elif 'shared_library_mingw' in mod_name:
|
|
if '32' in mod_name:
|
|
pre = ['set','"PATH=%MINGW_BIN%;%PATH%"','&&']
|
|
else:
|
|
pre = ['set','"PATH=%MINGW64_BIN%;%PATH%"','&&']
|
|
|
|
factory.addStep(ShellCommand(command=' '.join(pre+["cmake", "..", "-DCOOLPROP_"+mod_name.upper()+"_MODULE=ON","-DBUILD_TESTING=ON"]+cmake_args),
|
|
env = cmake_env,
|
|
workdir= working_folder,
|
|
haltOnFailure = True))
|
|
if install:
|
|
factory.addStep(ShellCommand(command=' '.join(pre+["cmake", "--build", ".", "--target", "install"]+build_args), workdir = working_folder, haltOnFailure = True))
|
|
else:
|
|
factory.addStep(ShellCommand(command=' '.join(pre+["cmake", "--build", "."]+build_args), workdir = working_folder, haltOnFailure = True))
|
|
if test:
|
|
factory.addStep(ShellCommand(command=' '.join(pre+["ctest", "--extra-verbose"] + ctest_args), workdir = working_folder, haltOnFailure = True, env = cmake_env))
|
|
if install:
|
|
#factory.addStep(DirectoryUpload(slavesrc="install_root",masterdest=master_loc_rel,url="binaries",compress="bz2"))
|
|
#fixPermissions(factory)
|
|
upload_command(factory, "install_root", platform=platform)
|
|
return factory
|
|
|
|
|
|
def memory_sanitizer_builder():
|
|
""" Run clang's memory sanitizer on the Catch tests to check for memory leaks """
|
|
|
|
commons = dict(workdir = 'build/dev/asan', haltOnFailure = True)
|
|
|
|
# Create the factory to add the actions to
|
|
factory = getBaseFactory()
|
|
factory.addStep(MakeDirectory(dir='build/dev/asan', haltOnFailure = True))
|
|
factory.addStep(ShellCommand(command='docker-compose up --build --exit-code-from worker', **commons))
|
|
return factory
|
|
|
|
from buildbot.config import BuilderConfig
|
|
|
|
c['builders'] = []
|
|
|
|
# c['builders'].append(
|
|
# BuilderConfig(name="sphinx pages",
|
|
# slavenames=["linux32-slave"],
|
|
# factory = website_factory(platform = 'linux')
|
|
# )
|
|
# )
|
|
|
|
# c['builders'].append(
|
|
# BuilderConfig(name="Debian package",
|
|
# slavenames=["linux-slave"],
|
|
# factory = deb_slave()
|
|
# )
|
|
# )
|
|
|
|
c['builders'].append(
|
|
BuilderConfig(name="Windows installer",
|
|
slavenames=["WIN-IPU-worker"],
|
|
factory = windows_installer_slave() ) )
|
|
c['builders'].append(
|
|
BuilderConfig(name="MathCAD",
|
|
slavenames=["WIN-IPU-worker"],
|
|
factory = mathcad_slave() ) )
|
|
# c['builders'].append(
|
|
# BuilderConfig(name="SMath",
|
|
# slavenames=["WIN-IPU-worker"],
|
|
# factory = smath_builder() ) )
|
|
|
|
|
|
c['builders'].append(
|
|
BuilderConfig(
|
|
name="Sphinx docs",
|
|
slavenames=["OSX-slave"],
|
|
factory = websiteFactory(platform = 'osx')
|
|
)
|
|
)
|
|
|
|
#c['builders'].append(
|
|
# BuilderConfig(
|
|
# name="Sphinx docs",
|
|
# slavenames=["LinuxWeb-IPU-worker"],
|
|
# factory = websiteFactory(platform = 'linux')
|
|
# )
|
|
#)
|
|
|
|
c['builders'].append(
|
|
BuilderConfig(name="Javascript-linux",
|
|
slavenames=["LIN-IPU-worker"],
|
|
factory = javascript_slave(platform = 'linux')
|
|
)
|
|
)
|
|
c['builders'].append(
|
|
BuilderConfig(name="Python-sdist",
|
|
slavenames=["OSX-IPU-worker"],
|
|
factory = python_source_slave("PYPI", platform = 'OSX', conda_env = 'CoolProp27')
|
|
)
|
|
)
|
|
c['builders'].append(
|
|
BuilderConfig(name="Python-binaries-linux-64bit",
|
|
slavenames=["LIN-IPU-worker"],
|
|
factory = python_manylinux_builder(bitness=64)
|
|
)
|
|
)
|
|
# c['builders'].append(
|
|
# BuilderConfig(name="Python-binaries-linux-32bit",
|
|
# slavenames=["LIN-IPU-worker"],
|
|
# factory = python_manylinux_builder(bitness=32)
|
|
# )
|
|
# )
|
|
c['builders'].append(
|
|
BuilderConfig(name="memory sanitizer",
|
|
slavenames=["OSX-slave"],
|
|
factory = memory_sanitizer_builder()
|
|
)
|
|
)
|
|
|
|
# We centralise the Python builder configuration here
|
|
#
|
|
# Setting the appropriate installer options
|
|
relinstFolder = "../../install_root" # relative path
|
|
baseins = ['bdist_wheel', '--dist-dir', relinstFolder+'/Python']
|
|
|
|
windowsIPUslave = PythonSlaveConfig("WIN-IPU-worker")
|
|
windowsIPUslave.pyact[getIDstr("windows", "32bit", 0 )] = "call \"C:\Program Files (x86)\Miniconda32_27\condabin\conda.bat\" activate"
|
|
windowsIPUslave.pyact[getIDstr("windows", "64bit", 0 )] = "conda activate"
|
|
windowsIPUslave.pyenv[getIDstr("windows", 0 , "py27")] = "CoolProp27"
|
|
windowsIPUslave.pyenv[getIDstr("windows", 0 , "py36")] = "CoolProp36"
|
|
windowsIPUslave.pyact[getIDstr("windows", "32bit", "py36")] = r'"C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" x86 && call "C:\Program Files (x86)\Miniconda32_27\condabin\conda.bat" activate'
|
|
windowsIPUslave.pyact[getIDstr("windows", "64bit", "py36")] = r'"C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" x64 && conda activate'
|
|
windowsIPUslave.pyenv[getIDstr("windows", 0 , "py37")] = "CoolProp37"
|
|
windowsIPUslave.pyact[getIDstr("windows", "32bit", "py37")] = r'"C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" x86 && call "C:\Program Files (x86)\Miniconda32_27\condabin\conda.bat" activate'
|
|
windowsIPUslave.pyact[getIDstr("windows", "64bit", "py37")] = r'"C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" x64 && conda activate'
|
|
windowsIPUslave.pyenv[getIDstr("windows", 0 , "py38")] = "CoolProp38"
|
|
windowsIPUslave.pyact[getIDstr("windows", "32bit", "py38")] = r'"C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" x86 && call "C:\Program Files (x86)\Miniconda32_27\condabin\conda.bat" activate'
|
|
windowsIPUslave.pyact[getIDstr("windows", "64bit", "py38")] = r'"C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" x64 && conda activate'
|
|
tmpins = baseins + ['bdist_wininst','--dist-dir', relinstFolder+'/Python']
|
|
windowsIPUslave.pyins[getIDstr("windows", "32bit", "py27")] = tmpins# + ['--cmake-compiler', 'vc9' , '--cmake-bitness', '32']
|
|
windowsIPUslave.pyins[getIDstr("windows", "32bit", "py36")] = tmpins
|
|
windowsIPUslave.pyins[getIDstr("windows", "32bit", "py37")] = tmpins
|
|
windowsIPUslave.pyins[getIDstr("windows", "32bit", "py38")] = tmpins
|
|
windowsIPUslave.pyins[getIDstr("windows", "64bit", "py27")] = tmpins# + ['--cmake-compiler', 'vc9' , '--cmake-bitness', '64']
|
|
windowsIPUslave.pyins[getIDstr("windows", "64bit", "py36")] = tmpins
|
|
windowsIPUslave.pyins[getIDstr("windows", "64bit", "py37")] = tmpins
|
|
windowsIPUslave.pyins[getIDstr("windows", "64bit", "py38")] = tmpins
|
|
|
|
# linuxDTUslave = PythonSlaveConfig("linux32-slave")
|
|
# linuxDTUslave.pyact[getIDstr( "linux" , "32bit", 0 )] = "source /home/jowr/miniconda/bin/activate"
|
|
# linuxDTUslave.pyenv[getIDstr( "linux" , 0 , "py27")] = "CoolProp27"
|
|
# linuxDTUslave.pyenv[getIDstr( "linux" , 0 , "py34")] = "CoolProp34"
|
|
# linuxDTUslave.pyins[getIDstr( "linux" , "32bit", 0 )] = baseins
|
|
#
|
|
# linuxJorritSlave = PythonSlaveConfig("linux64-slave")
|
|
# linuxJorritSlave.pyact[getIDstr( "linux" , "64bit", 0 )] = "source /home/jorrit/miniconda/bin/activate"
|
|
# linuxJorritSlave.pyenv[getIDstr( "linux" , 0 , "py27")] = "CoolProp27"
|
|
# linuxJorritSlave.pyenv[getIDstr( "linux" , 0 , "py34")] = "CoolProp34"
|
|
# linuxJorritSlave.pyins[getIDstr( "linux" , "64bit", 0 )] = baseins
|
|
|
|
osxIPUslave = PythonSlaveConfig("OSX-IPU-worker")
|
|
osxIPUslave.pyact[getIDstr( "osx" , "64bit", 0 )] = "source activate"
|
|
osxIPUslave.pyenv[getIDstr( "osx" , "64bit", "py27")] = "CoolProp27"
|
|
osxIPUslave.pyenv[getIDstr( "osx" , "64bit", "py36")] = "CoolProp36"
|
|
osxIPUslave.pyenv[getIDstr( "osx" , "64bit", "py37")] = "CoolProp37"
|
|
osxIPUslave.pyenv[getIDstr( "osx" , "64bit", "py38")] = "CoolProp38"
|
|
osxIPUslave.pyins[getIDstr( "osx" , 0 , 0 )] = baseins
|
|
|
|
#l64IPUslave = PythonSlaveConfig("Linux64-IPU-worker")
|
|
#l64IPUslave.pyact[getIDstr( "linux" , "64bit", 0 )] = "source activate"
|
|
#l64IPUslave.pyenv[getIDstr( "linux" , "64bit", "py27")] = "CoolProp27"
|
|
#l64IPUslave.pyenv[getIDstr( "linux" , "64bit", "py34")] = "CoolProp34"
|
|
#l64IPUslave.pyenv[getIDstr( "linux" , "64bit", "py35")] = "CoolProp35"
|
|
#l64IPUslave.pyins[getIDstr( "linux" , 0 , 0 )] = baseins
|
|
|
|
#l32IPUslave = PythonSlaveConfig("Linux32-IPU-worker")
|
|
#l32IPUslave.pyact[getIDstr( "linux" , "32bit", 0 )] = "source activate"
|
|
#l32IPUslave.pyenv[getIDstr( "linux" , "32bit", "py27")] = "CoolProp27"
|
|
#l32IPUslave.pyenv[getIDstr( "linux" , "32bit", "py34")] = "CoolProp34"
|
|
#l32IPUslave.pyenv[getIDstr( "linux" , "32bit", "py35")] = "CoolProp35"
|
|
#l32IPUslave.pyins[getIDstr( "linux" , 0 , 0 )] = baseins
|
|
|
|
pythonSlaves = [windowsIPUslave, osxIPUslave]#, l64IPUslave, l32IPUslave]
|
|
|
|
for slave in pythonSlaves:
|
|
for pyID in slave.getIDs():
|
|
c['builders'].append(
|
|
BuilderConfig(
|
|
name="-".join(["Python", "binaries", getJobName(pyID)]),
|
|
slavenames=[slave.name],
|
|
factory = pythonFactory(pyID, pyCFG=slave)
|
|
)
|
|
)
|
|
|
|
# # Enable multiple slaves for one build for load
|
|
# # balancing and backup purposes.
|
|
# # Note that the configuration has to be the same.
|
|
# pythonSlaves = [windowsIPUslave, windowsIANslave, linuxDTUslave]
|
|
# enabledIDs = {}
|
|
# for slave in pythonSlaves:
|
|
# for pyID in slave.getIDs():
|
|
# if pyID in enabledIDs:
|
|
# enabledIDs[pyID].append(slave.name)
|
|
# else:
|
|
# enabledIDs[pyID] = [slave.name]
|
|
#
|
|
# for pyID in enabledIDs:
|
|
# c['builders'].append(
|
|
# BuilderConfig(
|
|
# name="-".join(["Python", "binaries", getJobName(pyID)]),
|
|
# slavenames=enabledIDs[pyID],
|
|
# factory = pythonFactory(pyID, pyCFG=slave)
|
|
# )
|
|
# )
|
|
|
|
windowsIPUslave.pyact[getIDstr("windows", "32bit", 0 )] = "set \"PATH=C:\\Miniconda3_32\\Scripts;%PATH%\" && activate"
|
|
windowsIPUslave.pyact[getIDstr("windows", "64bit", 0 )] = "set \"PATH=C:\\Miniconda3\\Scripts;%PATH%\" && activate"
|
|
windowsIPUslave.pyenv[getIDstr("windows", 0 , "py27")] = "CoolProp27"
|
|
|
|
|
|
for pl in platformID:
|
|
for bi in [bitnessID["64bit"]]: # Skip 32 bit for now
|
|
pre = ''
|
|
tmp_id = getIDstr(platform=pl, bitness=bi)
|
|
if checkID(tmp_id, teID=100, strict=False):
|
|
tmp_slave = "WIN-IPU-worker"
|
|
pre = 'set PATH=%MINGW64_BIN%;%PATH% && '
|
|
elif checkID(tmp_id, teID=200, strict=False): tmp_slave = "OSX-IPU-worker"
|
|
elif checkID(tmp_id, teID=300, strict=False): tmp_slave = "LIN-IPU-worker"
|
|
else: raise ValueError("Unknown Fortran ID: {0}".format(tmp_id))
|
|
c['builders'].append(BuilderConfig(
|
|
name="Fortran-executable-"+str(pl)+"-"+str(bi)+"-GCC-ExternC",
|
|
slavenames=[tmp_slave],
|
|
factory = fortranFactory(buildID=tmp_id, pre=pre),
|
|
))
|
|
|
|
|
|
# trying to reorganise the cmake slaves, starting with the libraries
|
|
for pl in platformID:
|
|
for bi in bitnessID:
|
|
tmp_id = getIDstr(platform=pl, bitness=bi)
|
|
ctest_args = []
|
|
cmake_args = ['-DCMAKE_BUILD_TYPE=Release', '-DCMAKE_VERBOSE_MAKEFILE=ON']
|
|
build_args = ['--config','Release']
|
|
cmake_envs = {}
|
|
#
|
|
tmp_libs = []
|
|
tmp_slave = None
|
|
tmp_names = []
|
|
#
|
|
if checkID(tmp_id, teID=110, strict=False): # Windows 32bit
|
|
tmp_libs.append(['-DCOOLPROP_SHARED_LIBRARY=ON', '-DFORCE_BITNESS_32=ON', '-G','"MinGW Makefiles"'])
|
|
tmp_names.append('shared_library_mingw')
|
|
tmp_libs.append(['-DCOOLPROP_SHARED_LIBRARY=ON', '-DCOOLPROP_CDECL_LIBRARY=ON','-G','"Visual Studio 16 2019"','-A','Win32'])
|
|
tmp_names.append('shared_library_cdecl')
|
|
tmp_libs.append(['-DCOOLPROP_SHARED_LIBRARY=ON', '-DCOOLPROP_STDCALL_LIBRARY=ON', '-DCOOLPROP_LIBRARY_EXPORTS="src/CoolPropLib.def"', '-G','"Visual Studio 16 2019"','-A','Win32'])
|
|
tmp_names.append('shared_library_stdcall')
|
|
#tmp_libs.append(['-DCOOLPROP_STATIC_LIBRARY=ON', '-DCOOLPROP_EXTERNC_LIBRARY=ON', '-DFORCE_BITNESS_32=ON', '-G','"MinGW Makefiles"'])
|
|
#tmp_names.append('static_library_mingw')
|
|
#tmp_libs.append(['-DCOOLPROP_STATIC_LIBRARY=ON', '-DCOOLPROP_EXTERNC_LIBRARY=ON', '-G','"Visual Studio 16 2019"'])
|
|
#tmp_names.append('static_library')
|
|
tmp_slave = "WIN-IPU-worker"
|
|
elif checkID(tmp_id, teID=120, strict=False): # Windows 64bit
|
|
tmp_libs.append(['-DCOOLPROP_SHARED_LIBRARY=ON', '-DFORCE_BITNESS_64=ON','-G','"MinGW Makefiles"'])
|
|
tmp_names.append('shared_library_mingw')
|
|
tmp_libs.append(['-DCOOLPROP_SHARED_LIBRARY=ON', '-G','"Visual Studio 16 2019"'])
|
|
tmp_names.append('shared_library')
|
|
#tmp_libs.append(['-DCOOLPROP_STATIC_LIBRARY=ON', '-DCOOLPROP_EXTERNC_LIBRARY=ON', '-DFORCE_BITNESS_64=ON','-G','"MinGW Makefiles"'])
|
|
#tmp_names.append('static_library_mingw')
|
|
#tmp_libs.append(['-DCOOLPROP_STATIC_LIBRARY=ON', '-DCOOLPROP_EXTERNC_LIBRARY=ON', '-G','"Visual Studio 16 2019"'])
|
|
#tmp_names.append('static_library')
|
|
tmp_slave = "WIN-IPU-worker"
|
|
elif checkID(tmp_id, teID=210, strict=False): # OSX 32bit
|
|
continue #disabled 32-bit shared library (deprecated in OSX)
|
|
tmp_libs.append(['-DCOOLPROP_SHARED_LIBRARY=ON', '-DFORCE_BITNESS_32=ON'])
|
|
tmp_names.append('shared_library')
|
|
#tmp_libs.append(['-DCOOLPROP_STATIC_LIBRARY=ON', '-DCOOLPROP_EXTERNC_LIBRARY=ON', '-DFORCE_BITNESS_32=ON'])
|
|
#tmp_names.append('static_library')
|
|
tmp_slave = "OSX-IPU-worker"
|
|
elif checkID(tmp_id, teID=220, strict=False): # OSX 64bit
|
|
tmp_libs.append(['-DCOOLPROP_SHARED_LIBRARY=ON', '-DFORCE_BITNESS_64=ON'])
|
|
tmp_names.append('shared_library')
|
|
#tmp_libs.append(['-DCOOLPROP_STATIC_LIBRARY=ON', '-DCOOLPROP_EXTERNC_LIBRARY=ON', '-DFORCE_BITNESS_64=ON'])
|
|
#tmp_names.append('static_library')
|
|
tmp_slave = "OSX-IPU-worker"
|
|
elif checkID(tmp_id, teID=310, strict=False): # Linux 32bit
|
|
continue
|
|
# tmp_libs.append(['-DCOOLPROP_SHARED_LIBRARY=ON', '-DFORCE_BITNESS_32=ON'])
|
|
# tmp_names.append('shared_library')
|
|
# #tmp_libs.append(['-DCOOLPROP_STATIC_LIBRARY=ON', '-DCOOLPROP_EXTERNC_LIBRARY=ON', '-DFORCE_BITNESS_32=ON'])
|
|
# #tmp_names.append('static_library')
|
|
# tmp_slave = "LIN-IPU-worker"
|
|
elif checkID(tmp_id, teID=320, strict=False): # Linux 64bit
|
|
tmp_libs.append(['-DCOOLPROP_SHARED_LIBRARY=ON', '-DFORCE_BITNESS_64=ON'])
|
|
tmp_names.append('shared_library')
|
|
#tmp_libs.append(['-DCOOLPROP_STATIC_LIBRARY=ON', '-DCOOLPROP_EXTERNC_LIBRARY=ON', '-DFORCE_BITNESS_64=ON'])
|
|
#tmp_names.append('static_library')
|
|
tmp_slave = "LIN-IPU-worker"
|
|
else:
|
|
raise ValueError("Unknown ID: {0}".format(tmp_id))
|
|
|
|
for i_conf in range(len(tmp_libs)):
|
|
tmp_name = "-".join([tmp_names[i_conf], pl, bi])
|
|
tmp_args = cmake_args[:]
|
|
tmp_args.extend(tmp_libs[i_conf])
|
|
c['builders'].append(
|
|
BuilderConfig(
|
|
name = tmp_name,
|
|
slavenames = [tmp_slave],
|
|
factory = cmake_slave(
|
|
tmp_name,
|
|
platform = pl,
|
|
cmake_env = cmake_envs,
|
|
ctest_args = ctest_args,
|
|
cmake_args = tmp_args,
|
|
build_args = build_args
|
|
)
|
|
)
|
|
)
|
|
|
|
#Common boring 64-bit modules for windows, linux and OSX
|
|
### OSX
|
|
for platform in ['OSX', 'linux', 'windows']:
|
|
for wrapper in ['Java','Csharp','Octave','PHP','VBDOTNET','R']:
|
|
new_slave = None
|
|
if wrapper == 'PHP' and platform != 'linux': continue # only build PHP on linux
|
|
if wrapper == 'VBDOTNET' and not platform.startswith('windows'): continue # only build VB.net on windows
|
|
if platform.startswith('windows') and wrapper in ['Java','Csharp','Octave','VBDOTNET','R']:
|
|
new_slave = 'WIN-IPU-worker' # overwrite the slave
|
|
if platform == 'linux': # No linux builds
|
|
continue
|
|
ctest_args, cmake_args, build_args = [], ['-DCMAKE_BUILD_TYPE=Release','-DCMAKE_VERBOSE_MAKEFILE=ON'], ['--config','Release']
|
|
cmake_env = {}
|
|
if wrapper == 'R':
|
|
if platform.startswith('windows'):
|
|
cmake_args += ['-DR_BIN="C:/Program Files/R/R-4.0.0/bin/x64"','-DFORCE_BITNESS_64=ON']
|
|
elif platform == 'OSX':
|
|
cmake_args += ['-DR_BIN="/usr/local/bin"']
|
|
cmake_env = {'DYLD_LIBRARY_PATH': '/opt/refprop'}
|
|
else:
|
|
cmake_args += ['-DR_BIN="/usr/bin"']
|
|
if platform.startswith('windows'):
|
|
ctest_args = ['-C', 'Release']
|
|
if wrapper in ['Octave', 'R']:
|
|
cmake_args += ['-G', '"MinGW Makefiles"']
|
|
elif wrapper == 'VBDOTNET':
|
|
cmake_args += ['-G', '"Visual Studio 16 2019"']
|
|
else:
|
|
cmake_args += ['-G', '"Visual Studio 16 2019"']
|
|
|
|
c['builders'].append(
|
|
BuilderConfig(
|
|
name = wrapper + "-" + platform,
|
|
slavenames = [platform + "-slave" if new_slave is None else new_slave],
|
|
factory = cmake_slave(
|
|
wrapper,
|
|
platform = platform,
|
|
cmake_env = cmake_env,
|
|
ctest_args = ctest_args,
|
|
cmake_args = cmake_args,
|
|
build_args = build_args
|
|
)
|
|
)
|
|
)
|
|
# Run catch tests
|
|
if platform == 'windows':
|
|
slavenames = ["WIN-IPU-worker"]
|
|
cmake_args = ['-G', '"Visual Studio 16 2019"']
|
|
build_args = ['--config','Release']
|
|
elif platform == 'OSX':
|
|
slavenames = ["OSX-IPU-worker"]
|
|
cmake_args = ['-DCMAKE_BUILD_TYPE=Release']
|
|
build_args = []
|
|
elif platform == 'linux':
|
|
slavenames = ["LIN-IPU-worker"]
|
|
cmake_args = ['-DCMAKE_BUILD_TYPE=Release']
|
|
build_args = []
|
|
for slavename in slavenames:
|
|
c['builders'].append(
|
|
BuilderConfig(
|
|
name = "Catch-"+slavename.split("-")[0],
|
|
slavenames = [slavename],
|
|
factory = cmake_slave(
|
|
'Catch',
|
|
platform = platform,
|
|
install = False,
|
|
cmake_args = cmake_args,
|
|
build_args = build_args
|
|
)
|
|
)
|
|
)
|
|
|
|
c['builders'].append(
|
|
BuilderConfig(name="Julia-OSX",
|
|
slavenames=["OSX-IPU-worker"],
|
|
factory = julia_builder()
|
|
)
|
|
)
|
|
|
|
#~ c['builders'].append(
|
|
#~ BuilderConfig(name="VxWorks-Linux",
|
|
#~ slavenames=["linux-slave"],
|
|
#~ factory = vxworks_module_builder()
|
|
#~ )
|
|
#~ )
|
|
|
|
c['builders'].append(
|
|
BuilderConfig(name="Csharp-windows32",
|
|
slavenames=["WIN-IPU-worker"],
|
|
factory = cmake_slave('Csharp',
|
|
test = True,
|
|
platform = 'windows',
|
|
cmake_args=['-G','"Visual Studio 16 2019"','-A','Win32'],
|
|
build_args = ['--config','Release'],
|
|
ctest_args = ['-C','Release']
|
|
)
|
|
)
|
|
)
|
|
|
|
#c['builders'].append(
|
|
# BuilderConfig(name="EES-windows",
|
|
# slavenames=["windows-slave"],
|
|
# factory = cmake_slave('EES',
|
|
# platform = 'windows',
|
|
# test = False,
|
|
# cmake_args = ['-G','"Visual Studio 10 2010"'],
|
|
# build_args = ['--config','Release'])
|
|
# )
|
|
# )
|
|
|
|
#for platform in ['windows', 'linux', 'OSX']:
|
|
# for platform in ['linux', 'OSX']:
|
|
# cmake_args = ['-DBUILD_TESTING=ON']
|
|
# ctest_args = []
|
|
# build_args = []
|
|
# if platform == 'windows':
|
|
# build_args = ['--config','Release']
|
|
# cmake_args += ['-G', '"Visual Studio 10 2010 Win64"']
|
|
# ctest_args = ['-C','Release']
|
|
# c['builders'].append(BuilderConfig(name="MATLAB-"+platform, slavenames=[platform+"-slave"], factory = swig_matlab_builder(platform, build_args = build_args, cmake_args = cmake_args, ctest_args = ctest_args)))
|
|
# #if platform == 'linux':
|
|
# # c['builders'].append(BuilderConfig(name="Scilab-"+platform, slavenames=[platform+"-slave"], factory = swig_scilab_builder(platform)))
|
|
# c['builders'].append(BuilderConfig(name="MATLAB-windows32",
|
|
# slavenames=["windows-slave"],
|
|
# factory = swig_matlab_builder('windows',
|
|
# cmake_env={'MATLAB_ROOT':'c:\Program Files (x86)\MATLAB\R2014a'},
|
|
# cmake_args = ['-G', '"Visual Studio 10 2010"'],
|
|
# build_args = ['--config','Release'],
|
|
# ctest_args = ['-C','Release'])))
|
|
|
|
# c['builders'].append(BuilderConfig(
|
|
# name="Matlab-binaries-windows-64bit-VS10",
|
|
# slavenames = ["WIN-IPU-worker"],
|
|
# factory = swig_matlab_builder("windows",
|
|
# cmake_env = {'MATLAB_ROOT':'c:\Program Files\MatLab R2014a'},
|
|
# cmake_args = ['-G', '"Visual Studio 10 2010 Win64"'],
|
|
# build_args = ['--config','Release'],
|
|
# ctest_args = ['-C','Release'])))
|
|
|
|
# c['builders'].append(BuilderConfig(
|
|
# name="Matlab-binaries-OSX-10.10",
|
|
# slavenames = ["OSX-IB-slave"],
|
|
# factory = swig_matlab_builder("OSX",
|
|
# cmake_args = ['-DBUILD_TESTING=OFF', '-DCOOLPROP_MATLAB_INSTALL_PREFIX=/Users/ian/buildbot-slave/Matlab-binaries-OSX-10_10/build/install_root/MATLAB/OSX10.10+'])))
|
|
|
|
# c['builders'].append(BuilderConfig(
|
|
# name="Matlab-binaries-windows-32bit-VS10",
|
|
# slavenames = ["windows-slave"],
|
|
# factory = swig_matlab_builder("windows",
|
|
# cmake_env = {'MATLAB_ROOT':'c:\Program Files (x86)\MATLAB\R2014a'},
|
|
# cmake_args = ['-G', '"Visual Studio 10 2010"'],
|
|
# build_args = ['--config','Release'],
|
|
# ctest_args = ['-C','Release'])))
|
|
|
|
# Sort the list in place and store the names
|
|
c['builders'].sort(key=lambda x: x.name)
|
|
all_builder_names = [builder.name for builder in c['builders']]
|
|
|
|
####### NIGHTLY build ###############
|
|
swig_builders = []
|
|
for platform in ['windows', 'linux', 'OSX']:
|
|
platname = platform
|
|
if platform == 'windows':
|
|
platform = 'linux'
|
|
windows = True
|
|
else:
|
|
windows = False
|
|
|
|
# c['builders'].append(
|
|
# BuilderConfig(name="nightlySWIG+MATLAB-" + platname,
|
|
# slavenames=[platform + "-slave"],
|
|
# factory = SWIG_MATLAB_bin_builder(platform, windows = windows)
|
|
# )
|
|
# )
|
|
# swig_builders.append(c['builders'][-1].name)
|
|
#c['builders'].append(
|
|
# BuilderConfig(name="nightlySWIG+scilab-" + platname,
|
|
# slavenames=[platform + "-slave"],
|
|
# factory = SWIG_scilab_bin_builder(platform, windows = windows)
|
|
# )
|
|
# )
|
|
#swig_builders.append(c['builders'][-1].name)
|
|
|
|
|
|
|
|
|
|
############# RELEASE BUILDER #######################
|
|
|
|
@properties.renderer
|
|
def releaseCommand(props):
|
|
if props.getProperty('dryrun', default = False):
|
|
release = 'dryrun'
|
|
else:
|
|
release = 'release'
|
|
|
|
version = props.getProperty('version')
|
|
|
|
return ' '.join(['${HOME}/scripts/release.bsh',version, release])
|
|
|
|
def release_builder():
|
|
f = BuildFactory()
|
|
f.addStep(MasterShellCommand(command = releaseCommand))
|
|
return f
|
|
|
|
c['builders'].append(
|
|
BuilderConfig(name="release version",
|
|
slavenames=["OSX-slave"], # Slave is not used, all commands run on master
|
|
factory = release_builder()
|
|
)
|
|
)
|
|
|
|
####### SCHEDULERS
|
|
|
|
from buildbot.schedulers.basic import SingleBranchScheduler
|
|
from buildbot.schedulers.timed import Nightly
|
|
from buildbot.schedulers.forcesched import *
|
|
from buildbot.changes import filter
|
|
|
|
c['schedulers'] = []
|
|
c['schedulers'].append(SingleBranchScheduler(
|
|
name="all",
|
|
change_filter=filter.ChangeFilter(branch='master'),
|
|
treeStableTimer=None,
|
|
builderNames=all_builder_names))
|
|
c['schedulers'].append(ForceScheduler(
|
|
name="force",
|
|
builderNames=all_builder_names,
|
|
properties=[
|
|
BooleanParameter(name="fullclean", label="Do a full clean", default=False),
|
|
BooleanParameter(name="fullBuild", label="Do a full build of all the expensive docs", default=False)
|
|
]
|
|
))
|
|
c['schedulers'].append(ForceScheduler(
|
|
name="release_scheduler",
|
|
builderNames=['release version'],
|
|
properties=[
|
|
StringParameter(name="version", label="Version number", default="0.0.0"),
|
|
BooleanParameter(name="dryrun", label="Do a dry-run of release", default=True)]
|
|
))
|
|
c['schedulers'].append(ForceScheduler(
|
|
name="force_swig_builders",
|
|
builderNames=swig_builders,
|
|
properties=[
|
|
BooleanParameter(name="fullclean",
|
|
label="Do a full clean", default=False)]))
|
|
c['schedulers'].append(ForceScheduler(
|
|
name="force_sphinx_expensive",
|
|
builderNames=['Sphinx docs'],
|
|
properties=[
|
|
BooleanParameter(name="fullBuild",
|
|
label="Do a full build of all the expensive docs", default=False)]))
|
|
c['schedulers'].append(Nightly(name='nightly',
|
|
branch='master',
|
|
builderNames=['Sphinx docs'],
|
|
hour=3, # 3am in Boulder = 12pm in Copenhagen
|
|
minute=7,
|
|
onlyIfChanged=True))
|
|
|
|
#~ from buildbot.schedulers import basic
|
|
|
|
#~ swig_matlab_pre_scheduler = SingleBranchScheduler(
|
|
#~ name="swig-matlab-scheduler",
|
|
#~ change_filter=filter.ChangeFilter(branch='master'),
|
|
#~ treeStableTimer=None,
|
|
#~ builderNames=['SWIG-MATLAB-pre'])
|
|
#~ c['schedulers'].append(swig_matlab_pre_scheduler)
|
|
|
|
#~ c['schedulers'].append(basic.Dependent(name="swig-matlab-dependency",
|
|
#~ upstream=swig_matlab_pre_scheduler, # <- no quotes!
|
|
#~ builderNames=["MATLAB32-windows"])
|
|
#~ )
|
|
|
|
|
|
|
|
|
|
####### PROJECT IDENTITY
|
|
|
|
# the 'title' string will appear at the top of this buildbot
|
|
# installation's html.WebStatus home page (linked to the
|
|
# 'titleURL') and is embedded in the title of the waterfall HTML page.
|
|
|
|
c['title'] = "CoolProp"
|
|
c['titleURL'] = "https://www.coolprop.org"
|
|
|
|
# the 'buildbotURL' string should point to the location where the buildbot's
|
|
# internal web server (usually the html.WebStatus page) is visible. This
|
|
# typically uses the port number set in the Waterfall 'status' entry, but
|
|
# with an externally-visible host name which the buildbot cannot figure out
|
|
# without some help.
|
|
|
|
c['buildbotURL'] = "http://www.coolprop.dreamhosters.com:8010/"
|
|
|
|
####### DB URL
|
|
|
|
c['db'] = {
|
|
# This specifies what database buildbot uses to store its state. You can leave
|
|
# this at its default for all but the largest installations.
|
|
'db_url' : "sqlite:///state.sqlite",
|
|
}
|