Compare commits
76 Commits
v2.2.1
...
v2.3.2-old
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ecb7e43660 | ||
|
|
27f823139a | ||
|
|
88831439b1 | ||
|
|
177dfd4615 | ||
|
|
3bd2bb6dff | ||
|
|
393a470d05 | ||
|
|
519b75aef2 | ||
|
|
e8b01bf1a0 | ||
|
|
79cc560594 | ||
|
|
32bdf84806 | ||
|
|
070eab1473 | ||
|
|
11e0a60e3b | ||
|
|
22d3a9db15 | ||
|
|
a3532d3c55 | ||
|
|
b502aa7049 | ||
|
|
d8e53e84fd | ||
|
|
16bb305d24 | ||
|
|
6d0f34ac65 | ||
|
|
2fd1f9ec16 | ||
|
|
ca74a8424b | ||
|
|
ddf6b961b1 | ||
|
|
e5af5c2bfe | ||
|
|
d02dda5775 | ||
|
|
4a6a979f89 | ||
|
|
81bf29b8ca | ||
|
|
a8321aff92 | ||
|
|
48832354da | ||
|
|
dbfba732fd | ||
|
|
2f8f4e7fb1 | ||
|
|
e96f77d8b1 | ||
|
|
e14b97b18e | ||
|
|
5d4fdec627 | ||
|
|
fc141f874a | ||
|
|
a006293461 | ||
|
|
b7d505c2e2 | ||
|
|
eceeab66cf | ||
|
|
e50271765f | ||
|
|
d9e1df2367 | ||
|
|
7881b3527c | ||
|
|
0ac6c96e2a | ||
|
|
eb5dd2a86c | ||
|
|
b3efae2451 | ||
|
|
9c58196b6d | ||
|
|
421fe54fe6 | ||
|
|
11f5c94236 | ||
|
|
ff14d5ceb4 | ||
|
|
11cb469fb9 | ||
|
|
0ae628673c | ||
|
|
e11632798a | ||
|
|
24a70882d0 | ||
|
|
430a699d7f | ||
|
|
c056b5ad0f | ||
|
|
a88300bdd7 | ||
|
|
85f570ac09 | ||
|
|
d5c419bc8e | ||
|
|
08cdff9495 | ||
|
|
d052350738 | ||
|
|
b53468e50e | ||
|
|
e47e35bae4 | ||
|
|
6071fdf198 | ||
|
|
00f003afa5 | ||
|
|
c3918cdbaa | ||
|
|
e2399dc7f3 | ||
|
|
5d17fdf98d | ||
|
|
249db7db22 | ||
|
|
4dcfbb2079 | ||
|
|
9742c5f9c6 | ||
|
|
2847f78ab2 | ||
|
|
28aeda558b | ||
|
|
2dfa55420f | ||
|
|
22d0feaa05 | ||
|
|
cd0becff06 | ||
|
|
355a5c2fb7 | ||
|
|
139b491614 | ||
|
|
b15d9bb62e | ||
|
|
761152babe |
159
.appveyor.yml
Normal file
159
.appveyor.yml
Normal file
@@ -0,0 +1,159 @@
|
||||
# .appveyor.yml for testing EPICS Base ci-scripts
|
||||
# (see: https://github.com/epics-base/ci-scripts)
|
||||
|
||||
# Note:
|
||||
# Paths to scripts are different in this test configuration
|
||||
# (your module has one more directory level: .ci)
|
||||
|
||||
# Ralph Lange <ralph.lange@gmx.de>
|
||||
# Copyright (c) 2020 ITER Organization
|
||||
|
||||
#---------------------------------#
|
||||
# build cache #
|
||||
#---------------------------------#
|
||||
|
||||
cache:
|
||||
- C:\Users\appveyor\.tools -> appveyor\do.py
|
||||
|
||||
#---------------------------------#
|
||||
# additional packages #
|
||||
#---------------------------------#
|
||||
|
||||
install:
|
||||
# for the sequencer
|
||||
- cinst re2c
|
||||
|
||||
#---------------------------------#
|
||||
# repository cloning #
|
||||
#---------------------------------#
|
||||
|
||||
# Called at very beginning, before repo cloning
|
||||
init:
|
||||
# Set autocrlf to make batch files work
|
||||
- git config --global core.autocrlf true
|
||||
|
||||
# Set clone depth (do not fetch complete history)
|
||||
clone_depth: 50
|
||||
|
||||
# Skipping commits affecting only specific files
|
||||
skip_commits:
|
||||
files:
|
||||
- 'documentation/*'
|
||||
- 'templates/*'
|
||||
- '**/*.html'
|
||||
- '**/*.md'
|
||||
|
||||
|
||||
#---------------------------------#
|
||||
# build matrix configuration #
|
||||
#---------------------------------#
|
||||
|
||||
# Build Configurations: dll/static, regular/debug
|
||||
configuration:
|
||||
- dynamic
|
||||
- static
|
||||
- dynamic-debug
|
||||
- static-debug
|
||||
|
||||
# Environment variables: compiler toolchain, base version, setup file, ...
|
||||
environment:
|
||||
# common / default variables for all jobs
|
||||
SETUP_PATH: .:.ci
|
||||
SET: test01
|
||||
BASE_RECURSIVE: NO
|
||||
VV: 1
|
||||
|
||||
matrix:
|
||||
- CMP: vs2019
|
||||
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
|
||||
SET: test00
|
||||
- CMP: mingw
|
||||
- CMP: vs2019
|
||||
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
|
||||
VV: 0
|
||||
- CMP: vs2019
|
||||
BASE: 3.15
|
||||
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
|
||||
- CMP: vs2019
|
||||
BASE: 3.14
|
||||
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
|
||||
- CMP: vs2017
|
||||
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
|
||||
- CMP: vs2015
|
||||
- CMP: vs2013
|
||||
- CMP: vs2012
|
||||
- CMP: vs2010
|
||||
- CMP: vs2008
|
||||
|
||||
# Platform: architecture
|
||||
platform:
|
||||
- x86
|
||||
- x64
|
||||
|
||||
# Matrix configuration: allow specific failing jobs
|
||||
matrix:
|
||||
exclude:
|
||||
# Run test00 only once: x64 dynamic
|
||||
- platform: x86
|
||||
SET: test00
|
||||
- configuration: static
|
||||
SET: test00
|
||||
- configuration: dynamic-debug
|
||||
SET: test00
|
||||
- configuration: static-debug
|
||||
SET: test00
|
||||
# VS2012 and older installs don't have the 64 bit compiler
|
||||
- platform: x64
|
||||
CMP: vs2012
|
||||
- platform: x64
|
||||
CMP: vs2010
|
||||
- platform: x64
|
||||
CMP: vs2008
|
||||
|
||||
# Run test script for unit tests (SET = test00)
|
||||
for:
|
||||
-
|
||||
matrix:
|
||||
only:
|
||||
- SET: test00
|
||||
build_script:
|
||||
- cmd: python appveyor-test.py
|
||||
test_script:
|
||||
- cmd: echo Tests have been run in the build phase
|
||||
|
||||
#---------------------------------#
|
||||
# building & testing #
|
||||
#---------------------------------#
|
||||
|
||||
build_script:
|
||||
- cmd: python appveyor/do.py prepare
|
||||
- cmd: python appveyor/do.py build
|
||||
|
||||
test_script:
|
||||
- cmd: python appveyor/do.py test
|
||||
|
||||
#---------------------------------#
|
||||
# debugging #
|
||||
#---------------------------------#
|
||||
|
||||
## if you want to connect by remote desktop to a failed build, uncomment these lines
|
||||
## note that you will need to connect within the usual build timeout limit (60 minutes)
|
||||
## so you may want to adjust the build matrix above to just build the one of interest
|
||||
|
||||
#on_failure:
|
||||
# - ps: iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
|
||||
# - ps: $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
|
||||
|
||||
|
||||
#---------------------------------#
|
||||
# notifications #
|
||||
#---------------------------------#
|
||||
|
||||
notifications:
|
||||
|
||||
# - provider: Email
|
||||
# to:
|
||||
# - core-talk@aps.anl.gov
|
||||
# on_build_success: false
|
||||
|
||||
# - provider: GitHubPullRequest
|
||||
20
README.md
20
README.md
@@ -1,5 +1,6 @@
|
||||
<a target="_blank" href="http://semver.org">![Version][badge.version]</a>
|
||||
<a target="_blank" href="https://travis-ci.org/epics-base/ci-scripts">![Travis status][badge.travis]</a>
|
||||
<a target="_blank" href="https://ci.appveyor.com/project/epics-base/ci-scripts">![AppVeyor status][badge.appveyor]</a>
|
||||
|
||||
# Continuous Integration Scripts for EPICS Modules
|
||||
|
||||
@@ -56,11 +57,17 @@ example.
|
||||
- Compile on MacOS
|
||||
- Built dependencies are cached (for faster builds)
|
||||
|
||||
### AppVeyor
|
||||
- Use different compilers (Visual Studio, MinGW)
|
||||
- Use different Visual Studio versions: \
|
||||
2008, 2010, 2012, 2013, 2015, 2017, 2019
|
||||
- Compile for Windows 32bit and 64bit
|
||||
|
||||
## How to Use the CI-Scripts
|
||||
|
||||
1. Get an account on a supported CI service provider platform.
|
||||
(e.g. [Travis-CI](https://travis-ci.org/),
|
||||
AppVeyor, Azure Pipelines...)
|
||||
[AppVeyor](https://www.appveyor.com/), Azure Pipelines...)
|
||||
|
||||
(More details in the specific README of the subdirectory.)
|
||||
|
||||
@@ -79,10 +86,10 @@ example.
|
||||
|
||||
BASE=3.15
|
||||
ASYN=R4-34
|
||||
SNCSEQ=R2-2-7
|
||||
SNCSEQ=R2-2-8
|
||||
```
|
||||
will compile against the EPICS Base 3.15 branch, the Sequencer
|
||||
release 2.2.7 and release 4.34 of asyn.
|
||||
release 2.2.8 and release 4.34 of asyn.
|
||||
(Any settings can be overridden from the specific job configuration
|
||||
in e.g. `.travis.yml`.)
|
||||
|
||||
@@ -214,16 +221,16 @@ This will make all builds (not just for your module) verbose.
|
||||
|
||||
Update the submodule in `.ci` first, then change your CI configuration
|
||||
(if needed) and commit both to your module. E.g., to update your Travis
|
||||
setup to release 2.1.0 of ci-scripts:
|
||||
setup to release 2.2.1 of ci-scripts:
|
||||
```bash
|
||||
cd .ci
|
||||
git pull origin v2.1.0
|
||||
git pull origin v2.2.1
|
||||
cd -
|
||||
git add .ci
|
||||
# if needed:
|
||||
edit .travis.yml
|
||||
git add .travis.yml
|
||||
git commit -m "Update ci-scripts submodule to v2.1.0"
|
||||
git commit -m "Update ci-scripts submodule to v2.2.1"
|
||||
```
|
||||
|
||||
Check the example configuration files inside ci-scripts (and their
|
||||
@@ -266,6 +273,7 @@ in file LICENSE that is included with this distribution.
|
||||
<!-- Links -->
|
||||
[badge.version]: https://badge.fury.io/gh/epics-base%2Fci-scripts.svg
|
||||
[badge.travis]: https://travis-ci.org/epics-base/ci-scripts.svg?branch=master
|
||||
[badge.appveyor]: https://ci.appveyor.com/api/projects/status/xwdv8fpxu0byp3hn?svg=true
|
||||
|
||||
[reddit.bash]: https://www.reddit.com/r/bash/comments/393oqv/why_is_the_version_of_bash_included_in_os_x_so_old/
|
||||
|
||||
|
||||
394
appveyor-test.py
Normal file
394
appveyor-test.py
Normal file
@@ -0,0 +1,394 @@
|
||||
#!/usr/bin/env python
|
||||
"""Module ci-scripts AppVeyor unit tests
|
||||
"""
|
||||
|
||||
# SET=test00 in the environment (.appveyor.yml) runs the tests in this script
|
||||
# all other jobs are started as compile jobs
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import sys, os, shutil, fileinput
|
||||
import distutils.util
|
||||
import re
|
||||
import subprocess as sp
|
||||
import unittest
|
||||
import logging
|
||||
from argparse import Namespace
|
||||
|
||||
builddir = os.getcwd()
|
||||
|
||||
def find_in_file(regex, filename):
|
||||
file = open (filename, "r")
|
||||
for line in file:
|
||||
if re.search(regex, line):
|
||||
return True
|
||||
return False
|
||||
|
||||
def getStringIO():
|
||||
if (sys.version_info > (3, 0)):
|
||||
import io
|
||||
return io.StringIO()
|
||||
else:
|
||||
import StringIO
|
||||
return StringIO.StringIO()
|
||||
|
||||
sys.path.append('appveyor')
|
||||
import do
|
||||
|
||||
# we're working with tags (detached heads) a lot: suppress advice
|
||||
do.call_git(['config', '--global', 'advice.detachedHead', 'false'])
|
||||
|
||||
class TestSourceSet(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
os.environ['SETUP_PATH'] = '.:appveyor'
|
||||
if 'BASE' in os.environ:
|
||||
del os.environ['BASE']
|
||||
do.clear_lists()
|
||||
os.chdir(builddir)
|
||||
|
||||
def test_EmptySetupDirsPath(self):
|
||||
del os.environ['SETUP_PATH']
|
||||
self.assertRaisesRegexp(NameError, '\(SETUP_PATH\) is empty', do.source_set, 'test01')
|
||||
|
||||
def test_InvalidSetupName(self):
|
||||
self.assertRaisesRegexp(NameError, 'does not exist in SETUP_PATH', do.source_set, 'xxdoesnotexistxx')
|
||||
|
||||
def test_ValidSetupName(self):
|
||||
capturedOutput = getStringIO()
|
||||
sys.stdout = capturedOutput
|
||||
do.source_set('test01')
|
||||
sys.stdout = sys.__stdout__
|
||||
self.assertEqual(do.setup['BASE'], '7.0', 'BASE was not set to \'7.0\'')
|
||||
|
||||
def test_SetupDoesNotOverridePreset(self):
|
||||
os.environ['BASE'] = 'foo'
|
||||
capturedOutput = getStringIO()
|
||||
sys.stdout = capturedOutput
|
||||
do.source_set('test01')
|
||||
sys.stdout = sys.__stdout__
|
||||
self.assertEqual(do.setup['BASE'], 'foo',
|
||||
'Preset BASE was overridden by test01 setup (expected \'foo\' got {0})'
|
||||
.format(do.setup['BASE']))
|
||||
|
||||
def test_IncludeSetupFirstSetWins(self):
|
||||
capturedOutput = getStringIO()
|
||||
sys.stdout = capturedOutput
|
||||
do.source_set('test02')
|
||||
sys.stdout = sys.__stdout__
|
||||
self.assertEqual(do.setup['BASE'], 'foo',
|
||||
'BASE set in test02 was overridden by test01 setup (expected \'foo\' got {0})'
|
||||
.format(do.setup['BASE']))
|
||||
self.assertEqual(do.setup['FOO'], 'bar', 'Setting of single word does not work')
|
||||
self.assertEqual(do.setup['FOO2'], 'bar bar2', 'Setting of multiple words does not work')
|
||||
self.assertEqual(do.setup['FOO3'], 'bar bar2', 'Indented setting of multiple words does not work')
|
||||
self.assertEqual(do.setup['SNCSEQ'], 'R2-2-7', 'Setup test01 was not included')
|
||||
|
||||
def test_DoubleIncludeGetsIgnored(self):
|
||||
capturedOutput = getStringIO()
|
||||
sys.stdout = capturedOutput
|
||||
do.source_set('test03')
|
||||
sys.stdout = sys.__stdout__
|
||||
self.assertRegexpMatches(capturedOutput.getvalue(), 'Ignoring already included setup file')
|
||||
|
||||
class TestUpdateReleaseLocal(unittest.TestCase):
|
||||
|
||||
release_local = os.path.join(do.cachedir, 'RELEASE.local')
|
||||
|
||||
def setUp(self):
|
||||
if os.path.exists(self.release_local):
|
||||
os.remove(self.release_local)
|
||||
os.chdir(builddir)
|
||||
|
||||
def test_SetModule(self):
|
||||
do.update_release_local('MOD1', '/foo/bar')
|
||||
found = 0
|
||||
for line in fileinput.input(self.release_local, inplace=1):
|
||||
if 'MOD1=' in line:
|
||||
self.assertEqual(line.strip(), 'MOD1=/foo/bar', 'MOD1 not set correctly')
|
||||
found += 1
|
||||
fileinput.close()
|
||||
self.assertEqual(found, 1, 'MOD1 not written once to RELEASE.local (found {0})'.format(found))
|
||||
|
||||
def test_SetBaseAndMultipleModules(self):
|
||||
do.update_release_local('EPICS_BASE', '/bar/foo')
|
||||
do.update_release_local('MOD1', '/foo/bar')
|
||||
do.update_release_local('MOD2', '/foo/bar2')
|
||||
do.update_release_local('MOD1', '/foo/bar1')
|
||||
found = {}
|
||||
foundat = {}
|
||||
for line in fileinput.input(self.release_local, inplace=1):
|
||||
if 'MOD1=' in line:
|
||||
self.assertEqual(line.strip(), 'MOD1=/foo/bar1',
|
||||
'MOD1 not set correctly (expected \'MOD1=/foo/bar1\' found \'{0}\')'
|
||||
.format(line))
|
||||
if 'mod1' in found:
|
||||
found['mod1'] += 1
|
||||
else:
|
||||
found['mod1'] = 1
|
||||
foundat['mod1'] = fileinput.filelineno()
|
||||
if 'MOD2=' in line:
|
||||
self.assertEqual(line.strip(), 'MOD2=/foo/bar2',
|
||||
'MOD2 not set correctly (expected \'MOD2=/foo/bar2\' found \'{0}\')'
|
||||
.format(line))
|
||||
if 'mod2' in found:
|
||||
found['mod2'] += 1
|
||||
else:
|
||||
found['mod2'] = 1
|
||||
foundat['mod2'] = fileinput.filelineno()
|
||||
if 'EPICS_BASE=' in line:
|
||||
self.assertEqual(line.strip(), 'EPICS_BASE=/bar/foo',
|
||||
'EPICS_BASE not set correctly (expected \'EPICS_BASE=/bar/foo\' found \'{0}\')'
|
||||
.format(line))
|
||||
if 'base' in found:
|
||||
found['base'] += 1
|
||||
else:
|
||||
found['base'] = 1
|
||||
foundat['base'] = fileinput.filelineno()
|
||||
fileinput.close()
|
||||
self.assertEqual(found['mod1'], 1,
|
||||
'MOD1 does not appear once in RELEASE.local (found {0})'.format(found['mod1']))
|
||||
self.assertEqual(found['mod2'], 1,
|
||||
'MOD2 does not appear once in RELEASE.local (found {0})'.format(found['mod2']))
|
||||
self.assertEqual(found['base'], 1,
|
||||
'EPICS_BASE does not appear once in RELEASE.local (found {0})'.format(found['base']))
|
||||
self.assertGreater(foundat['base'], foundat['mod2'],
|
||||
'EPICS_BASE (line {0}) appears before MOD2 (line {1})'
|
||||
.format(foundat['base'], foundat['mod2']))
|
||||
self.assertGreater(foundat['mod2'], foundat['mod1'],
|
||||
'MOD2 (line {0}) appears before MOD1 (line {1})'.format(foundat['mod2'], foundat['mod1']))
|
||||
|
||||
class TestAddDependencyUpToDateCheck(unittest.TestCase):
|
||||
|
||||
hash_3_15_6 = "ce7943fb44beb22b453ddcc0bda5398fadf72096"
|
||||
location = os.path.join(do.cachedir, 'base-R3.15.6')
|
||||
licensefile = os.path.join(location, 'LICENSE')
|
||||
checked_file = os.path.join(location, 'checked_out')
|
||||
release_file = os.path.join(location, 'configure', 'RELEASE')
|
||||
|
||||
def setUp(self):
|
||||
os.environ['SETUP_PATH'] = '.:appveyor'
|
||||
if os.path.exists(self.location):
|
||||
shutil.rmtree(self.location, onerror=do.remove_readonly)
|
||||
do.clear_lists()
|
||||
os.chdir(builddir)
|
||||
do.source_set('defaults')
|
||||
do.complete_setup('BASE')
|
||||
|
||||
def test_MissingDependency(self):
|
||||
do.setup['BASE'] = 'R3.15.6'
|
||||
do.add_dependency('BASE')
|
||||
self.assertTrue(os.path.exists(self.licensefile), 'Missing dependency was not checked out')
|
||||
self.assertTrue(os.path.exists(self.checked_file), 'Checked-out commit marker was not written')
|
||||
with open(self.checked_file, 'r') as bfile:
|
||||
checked_out = bfile.read().strip()
|
||||
bfile.close()
|
||||
self.assertEqual(checked_out, self.hash_3_15_6,
|
||||
'Wrong commit of dependency checked out (expected=\"{0}\" found=\"{1}\")'
|
||||
.format(self.hash_3_15_6, checked_out))
|
||||
self.assertFalse(find_in_file('include \$\(TOP\)/../RELEASE.local', self.release_file),
|
||||
'RELEASE in Base includes TOP/../RELEASE.local')
|
||||
|
||||
def test_UpToDateDependency(self):
|
||||
do.setup['BASE'] = 'R3.15.6'
|
||||
do.add_dependency('BASE')
|
||||
os.remove(self.licensefile)
|
||||
do.add_dependency('BASE')
|
||||
self.assertFalse(os.path.exists(self.licensefile), 'Check out on top of existing up-to-date dependency')
|
||||
|
||||
def test_OutdatedDependency(self):
|
||||
do.setup['BASE'] = 'R3.15.6'
|
||||
do.add_dependency('BASE')
|
||||
os.remove(self.licensefile)
|
||||
with open(self.checked_file, "w") as fout:
|
||||
print('XXX not the right hash XXX', file=fout)
|
||||
fout.close()
|
||||
do.add_dependency('BASE')
|
||||
self.assertTrue(os.path.exists(self.licensefile), 'No check-out on top of out-of-date dependency')
|
||||
with open(self.checked_file, 'r') as bfile:
|
||||
checked_out = bfile.read().strip()
|
||||
bfile.close()
|
||||
self.assertEqual(checked_out, self.hash_3_15_6,
|
||||
"Wrong commit of dependency checked out (expected='{0}' found='{1}')"
|
||||
.format(self.hash_3_15_6, checked_out))
|
||||
|
||||
def is_shallow_repo(place):
|
||||
check = sp.check_output(['git', 'rev-parse', '--is-shallow-repository'], cwd=place).strip()
|
||||
if check == '--is-shallow-repository':
|
||||
if os.path.exists(os.path.join(place, '.git', 'shallow')):
|
||||
check = 'true'
|
||||
else:
|
||||
check = 'false'
|
||||
return check == 'true'
|
||||
|
||||
class TestAddDependencyOptions(unittest.TestCase):
|
||||
|
||||
location = os.path.join(do.cachedir, 'mcoreutils-master')
|
||||
testfile = os.path.join(location, '.ci', 'LICENSE')
|
||||
|
||||
def setUp(self):
|
||||
os.environ['SETUP_PATH'] = '.:appveyor'
|
||||
if os.path.exists(do.cachedir):
|
||||
shutil.rmtree(do.cachedir, onerror=do.remove_readonly)
|
||||
do.clear_lists()
|
||||
do.source_set('defaults')
|
||||
do.complete_setup('MCoreUtils')
|
||||
do.setup['MCoreUtils'] = 'master'
|
||||
|
||||
def test_Default(self):
|
||||
do.add_dependency('MCoreUtils')
|
||||
self.assertTrue(os.path.exists(self.testfile),
|
||||
'Submodule (.ci) not checked out recursively (requested: default=YES')
|
||||
self.assertTrue(is_shallow_repo(self.location),
|
||||
'Module not checked out shallow (requested: default=5)')
|
||||
|
||||
def test_SetRecursiveNo(self):
|
||||
do.setup['MCoreUtils_RECURSIVE'] = 'NO'
|
||||
do.add_dependency('MCoreUtils')
|
||||
self.assertFalse(os.path.exists(self.testfile), 'Submodule (.ci) checked out recursively')
|
||||
|
||||
def test_SetDepthZero(self):
|
||||
do.setup['MCoreUtils_DEPTH'] = '0'
|
||||
do.add_dependency('MCoreUtils')
|
||||
self.assertFalse(is_shallow_repo(self.location), 'Module checked out shallow (requested full)')
|
||||
|
||||
def test_SetDepthThree(self):
|
||||
do.setup['MCoreUtils_DEPTH'] = '3'
|
||||
do.add_dependency('MCoreUtils')
|
||||
self.assertTrue(is_shallow_repo(self.location),
|
||||
'Module not checked out shallow (requested: default=5)')
|
||||
|
||||
def test_AddMsiTo314(self):
|
||||
do.complete_setup('BASE')
|
||||
do.setup['BASE'] = 'R3.14.12.1'
|
||||
msifile = os.path.join(do.cachedir, 'base-R3.14.12.1', 'src', 'dbtools', 'msi.c')
|
||||
do.add_dependency('BASE')
|
||||
self.assertTrue(os.path.exists(msifile), 'MSI was not added to Base 3.14')
|
||||
|
||||
def repo_access(dep):
|
||||
do.set_setup_from_env(dep)
|
||||
do.setup.setdefault(dep + "_DIRNAME", dep.lower())
|
||||
do.setup.setdefault(dep + "_REPONAME", dep.lower())
|
||||
do.setup.setdefault('REPOOWNER', 'epics-modules')
|
||||
do.setup.setdefault(dep + "_REPOOWNER", do.setup['REPOOWNER'])
|
||||
do.setup.setdefault(dep + "_REPOURL", 'https://github.com/{0}/{1}.git'
|
||||
.format(do.setup[dep + '_REPOOWNER'], do.setup[dep + '_REPONAME']))
|
||||
with open(os.devnull, 'w') as devnull:
|
||||
return do.call_git(['ls-remote', '--quiet', '--heads', do.setup[dep + '_REPOURL']],
|
||||
stdout=devnull, stderr=devnull)
|
||||
|
||||
class TestDefaultModuleURLs(unittest.TestCase):
|
||||
|
||||
modules = ['BASE', 'PVDATA', 'PVACCESS', 'NTYPES',
|
||||
'SNCSEQ', 'STREAM', 'ASYN', 'STD',
|
||||
'CALC', 'AUTOSAVE', 'BUSY', 'SSCAN',
|
||||
'IOCSTATS', 'MOTOR', 'IPAC', ]
|
||||
|
||||
def setUp(self):
|
||||
os.environ['SETUP_PATH'] = '.:appveyor'
|
||||
do.clear_lists()
|
||||
os.chdir(builddir)
|
||||
do.source_set('defaults')
|
||||
|
||||
def test_Repos(self):
|
||||
for mod in self.modules:
|
||||
self.assertEqual(repo_access(mod), 0, 'Defaults for {0} do not point to a valid git repository at {1}'
|
||||
.format(mod, do.setup[mod + '_REPOURL']))
|
||||
|
||||
class TestVCVars(unittest.TestCase):
|
||||
def test_vcvars(self):
|
||||
if ('CMP' in os.environ and os.environ['CMP'] in ('mingw',)) \
|
||||
or distutils.util.get_platform() != "win32":
|
||||
raise unittest.SkipTest()
|
||||
|
||||
do.with_vcvars('env')
|
||||
|
||||
class TestSetupForBuild(unittest.TestCase):
|
||||
configuration = os.environ['CONFIGURATION']
|
||||
platform = os.environ['PLATFORM']
|
||||
cc = os.environ['CMP']
|
||||
args = Namespace(paths=[])
|
||||
|
||||
def setUp(self):
|
||||
os.environ.pop('EPICS_HOST_ARCH', None)
|
||||
|
||||
def tearDown(self):
|
||||
os.environ['CONFIGURATION'] = self.configuration
|
||||
os.environ['PLATFORM'] = self.platform
|
||||
os.environ['CMP'] = self.cc
|
||||
|
||||
def test_AddPathsOption(self):
|
||||
os.environ['FOOBAR'] = 'BAR'
|
||||
args = Namespace(paths=['/my/{FOOBAR}/dir', '/my/foobar'])
|
||||
do.setup_for_build(args)
|
||||
self.assertTrue(re.search('/my/BAR/dir', os.environ['PATH']), 'Expanded path not in PATH')
|
||||
self.assertTrue(re.search('/foobar', os.environ['PATH']), 'Plain path not in PATH')
|
||||
os.environ.pop('FOOBAR', None)
|
||||
|
||||
def test_HostArchConfiguration(self):
|
||||
for config in ['dynamic', 'dynamic-debug', 'static', 'static-debug']:
|
||||
os.environ['CONFIGURATION'] = config
|
||||
do.setup_for_build(self.args)
|
||||
self.assertTrue('EPICS_HOST_ARCH' in os.environ,
|
||||
'EPICS_HOST_ARCH is not set for Configuration={0}'.format(config))
|
||||
if re.search('static', config):
|
||||
self.assertTrue(re.search('-static$', os.environ['EPICS_HOST_ARCH']),
|
||||
'EPICS_HOST_ARCH is not -static for Configuration={0}'.format(config))
|
||||
self.assertFalse(re.search('debug', os.environ['EPICS_HOST_ARCH']),
|
||||
'EPICS_HOST_ARCH is -debug for Configuration={0}'.format(config))
|
||||
elif re.search('debug', config):
|
||||
self.assertFalse(re.search('static', os.environ['EPICS_HOST_ARCH']),
|
||||
'EPICS_HOST_ARCH is -static for Configuration={0}'.format(config))
|
||||
self.assertTrue(re.search('-debug$', os.environ['EPICS_HOST_ARCH']),
|
||||
'EPICS_HOST_ARCH is not -debug for Configuration={0}'.format(config))
|
||||
else:
|
||||
self.assertFalse(re.search('static', os.environ['EPICS_HOST_ARCH']),
|
||||
'EPICS_HOST_ARCH is -static for Configuration={0}'.format(config))
|
||||
self.assertFalse(re.search('debug', os.environ['EPICS_HOST_ARCH']),
|
||||
'EPICS_HOST_ARCH is -debug for Configuration={0}'.format(config))
|
||||
|
||||
def test_HostArchPlatform(self):
|
||||
for platform in ['x86', 'x64', 'X64']:
|
||||
for cc in ['vs2019', 'mingw']:
|
||||
os.environ['PLATFORM'] = platform
|
||||
os.environ['CMP'] = cc
|
||||
os.environ['CONFIGURATION'] = 'dynamic'
|
||||
do.setup_for_build(self.args)
|
||||
self.assertTrue('EPICS_HOST_ARCH' in os.environ,
|
||||
'EPICS_HOST_ARCH is not set for {0} / {1}'.format(cc, platform))
|
||||
if platform == 'x86':
|
||||
self.assertTrue(re.search('^win32-x86', os.environ['EPICS_HOST_ARCH']),
|
||||
'EPICS_HOST_ARCH is not win32-x86 for {0} / {1}'.format(cc, platform))
|
||||
else:
|
||||
self.assertTrue(re.search('^windows-x64', os.environ['EPICS_HOST_ARCH']),
|
||||
'EPICS_HOST_ARCH is not windows-x64 for {0} / {1}'.format(cc, platform))
|
||||
if cc == 'mingw':
|
||||
self.assertTrue(re.search('-mingw$', os.environ['EPICS_HOST_ARCH']),
|
||||
'EPICS_HOST_ARCH is not -mingw for {0} / {1}'.format(cc, platform))
|
||||
if platform == 'x86':
|
||||
pattern = 'mingw32'
|
||||
else:
|
||||
pattern = 'mingw64'
|
||||
self.assertTrue(re.search(pattern, os.environ['PATH']),
|
||||
'Binary location for {0} not in PATH'.format(pattern))
|
||||
self.assertTrue(re.search(pattern, os.environ['INCLUDE']),
|
||||
'Include location for {0} not in INCLUDE'.format(pattern))
|
||||
|
||||
def test_StrawberryInPath(self):
|
||||
os.environ['CMP'] = 'vs2019'
|
||||
do.setup_for_build(self.args)
|
||||
self.assertTrue(re.search('strawberry', os.environ['PATH'], flags=re.IGNORECASE),
|
||||
'Strawberry Perl location not in PATH for vs2019')
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if 'VV' in os.environ and os.environ['VV'] == '1':
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
do.silent_dep_builds = False
|
||||
|
||||
do.host_info()
|
||||
if sys.argv[1:]==['env']:
|
||||
# testing with_vcvars
|
||||
[print(K,'=',V) for K, V in os.environ.items()]
|
||||
else:
|
||||
unittest.main()
|
||||
151
appveyor/.appveyor.yml.example-full
Normal file
151
appveyor/.appveyor.yml.example-full
Normal file
@@ -0,0 +1,151 @@
|
||||
# .appveyor.yml for use with EPICS Base ci-scripts
|
||||
# (see: https://github.com/epics-base/ci-scripts)
|
||||
|
||||
# This is YAML - indentation levels are crucial
|
||||
|
||||
#---------------------------------#
|
||||
# build cache #
|
||||
#---------------------------------#
|
||||
# The AppVeyor cache allowance is way too small (1GB per account across all projects, branches and jobs)
|
||||
# to be used for the dependency builds.
|
||||
|
||||
cache:
|
||||
- C:\Users\appveyor\.tools
|
||||
|
||||
#---------------------------------#
|
||||
# additional packages #
|
||||
#---------------------------------#
|
||||
|
||||
install:
|
||||
# for the sequencer
|
||||
- cinst re2c
|
||||
|
||||
#---------------------------------#
|
||||
# repository cloning #
|
||||
#---------------------------------#
|
||||
|
||||
# Called at very beginning, before repo cloning
|
||||
init:
|
||||
# Set autocrlf to make batch files work
|
||||
- git config --global core.autocrlf true
|
||||
|
||||
# Set clone depth (do not fetch complete history)
|
||||
clone_depth: 50
|
||||
|
||||
# Skipping commits affecting only specific files
|
||||
skip_commits:
|
||||
files:
|
||||
- 'documentation/*'
|
||||
- 'templates/*'
|
||||
- '**/*.html'
|
||||
- '**/*.md'
|
||||
|
||||
#---------------------------------#
|
||||
# build matrix configuration #
|
||||
#---------------------------------#
|
||||
|
||||
# Since dependencies cannot be cached and AppVeyor only grants a single builder VM, all jobs
|
||||
# are executed sequentially, each one taking 10-15 minutes.
|
||||
# Consider this when defining your build matrix. (A full matrix build takes more than 8 hours.)
|
||||
|
||||
# Build Configurations: dll/static, regular/debug
|
||||
configuration:
|
||||
- dynamic
|
||||
- static
|
||||
- dynamic-debug
|
||||
- static-debug
|
||||
|
||||
# Environment variables: compiler toolchain, base version, setup file, ...
|
||||
environment:
|
||||
# common / default variables for all jobs
|
||||
SETUP_PATH: .ci-local:.ci
|
||||
|
||||
matrix:
|
||||
- CMP: vs2019
|
||||
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
|
||||
SET: test00
|
||||
- CMP: mingw
|
||||
- CMP: vs2019
|
||||
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
|
||||
- CMP: vs2019
|
||||
BASE: 3.15
|
||||
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
|
||||
- CMP: vs2019
|
||||
BASE: 3.14
|
||||
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
|
||||
- CMP: vs2017
|
||||
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
|
||||
- CMP: vs2015
|
||||
- CMP: vs2013
|
||||
- CMP: vs2012
|
||||
- CMP: vs2010
|
||||
- CMP: vs2008
|
||||
|
||||
# Platform: processor architecture
|
||||
platform:
|
||||
- x86
|
||||
- x64
|
||||
|
||||
# Matrix configuration: exclude sets of jobs
|
||||
matrix:
|
||||
exclude:
|
||||
# VS2012 and older installs don't have the 64 bit compiler
|
||||
- platform: x64
|
||||
CMP: vs2012
|
||||
- platform: x64
|
||||
CMP: vs2010
|
||||
- platform: x64
|
||||
CMP: vs2008
|
||||
# Exclude more jobs to reduce build time
|
||||
# E.g., skip 32-bit for newer compilers
|
||||
#- platform: x86
|
||||
# CMP: vs2019
|
||||
#- platform: x86
|
||||
# CMP: vs2017
|
||||
|
||||
#---------------------------------#
|
||||
# building & testing #
|
||||
#---------------------------------#
|
||||
|
||||
install:
|
||||
- cmd: git submodule update --init --recursive
|
||||
- cmd: python .ci/appveyor/do.py prepare
|
||||
|
||||
build_script:
|
||||
- cmd: python .ci/appveyor/do.py build
|
||||
|
||||
test_script:
|
||||
- cmd: python .ci/appveyor/do.py test
|
||||
|
||||
on_finish:
|
||||
- ps: Get-ChildItem *.tap -Recurse -Force | % { Push-AppveyorArtifact $_.FullName -FileName $_.Name }
|
||||
- cmd: python .ci/appveyor/do.py build test-results -s
|
||||
|
||||
#---------------------------------#
|
||||
# debugging #
|
||||
#---------------------------------#
|
||||
|
||||
## if you want to connect by remote desktop to a failed build, uncomment these lines
|
||||
## note that you will need to connect within the usual build timeout limit (60 minutes)
|
||||
## so you may want to adjust the build matrix above to just build the one of interest
|
||||
|
||||
# print the connection info
|
||||
#init:
|
||||
# - ps: iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
|
||||
|
||||
# block a failed build (until the watchdog barks)
|
||||
#on_failure:
|
||||
# - ps: $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
|
||||
|
||||
#---------------------------------#
|
||||
# notifications #
|
||||
#---------------------------------#
|
||||
|
||||
notifications:
|
||||
|
||||
- provider: Email
|
||||
to:
|
||||
- me@example.com
|
||||
on_build_success: false
|
||||
|
||||
- provider: GitHubPullRequest
|
||||
71
appveyor/.appveyor.yml.example-mini
Normal file
71
appveyor/.appveyor.yml.example-mini
Normal file
@@ -0,0 +1,71 @@
|
||||
# .appveyor.yml for use with EPICS Base ci-scripts
|
||||
# (see: https://github.com/epics-base/ci-scripts)
|
||||
|
||||
# This is YAML - indentation levels are crucial
|
||||
|
||||
cache:
|
||||
- C:\Users\appveyor\.tools
|
||||
|
||||
init:
|
||||
- git config --global core.autocrlf true
|
||||
|
||||
clone_depth: 50
|
||||
|
||||
skip_commits:
|
||||
files:
|
||||
- 'documentation/*'
|
||||
- 'templates/*'
|
||||
- '**/*.html'
|
||||
- '**/*.md'
|
||||
|
||||
# Build Configurations: dll/static, regular/debug
|
||||
configuration:
|
||||
- dynamic
|
||||
# - static
|
||||
- dynamic-debug
|
||||
# - static-debug
|
||||
|
||||
environment:
|
||||
# common / default variables for all jobs
|
||||
SETUP_PATH: .ci-local:.ci
|
||||
|
||||
matrix:
|
||||
- CMP: vs2019
|
||||
BASE: 7.0
|
||||
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
|
||||
- CMP: vs2019
|
||||
BASE: 3.15
|
||||
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
|
||||
|
||||
# Platform: processor architecture
|
||||
platform:
|
||||
# - x86
|
||||
- x64
|
||||
|
||||
# Matrix configuration: exclude sets of jobs
|
||||
matrix:
|
||||
exclude:
|
||||
# VS2012 and older installs don't have the 64 bit compiler
|
||||
- platform: x64
|
||||
CMP: vs2012
|
||||
- platform: x64
|
||||
CMP: vs2010
|
||||
- platform: x64
|
||||
CMP: vs2008
|
||||
|
||||
install:
|
||||
- cmd: git submodule update --init --recursive
|
||||
- cmd: python .ci/appveyor/do.py prepare
|
||||
|
||||
build_script:
|
||||
- cmd: python .ci/appveyor/do.py build
|
||||
|
||||
test_script:
|
||||
- cmd: python .ci/appveyor/do.py test
|
||||
|
||||
on_finish:
|
||||
- ps: Get-ChildItem *.tap -Recurse -Force | % { Push-AppveyorArtifact $_.FullName -FileName $_.Name }
|
||||
- cmd: python .ci/appveyor/do.py build test-results -s
|
||||
|
||||
notifications:
|
||||
- provider: GitHubPullRequest
|
||||
55
appveyor/README.md
Normal file
55
appveyor/README.md
Normal file
@@ -0,0 +1,55 @@
|
||||
# AppVeyor Scripts for EPICS Modules
|
||||
|
||||
## Features
|
||||
|
||||
- Use different compilers (Visual Studio, MinGW)
|
||||
- Use different VS versions (2008, 2010, 2012, 2013, 2015, 2017, 2019)
|
||||
- Compile for Windows 32bit and 64bit
|
||||
- Create static libraries or DLLs (plus the matching executables)
|
||||
- Create optimized or debug builds
|
||||
|
||||
## How to Use these Scripts
|
||||
|
||||
1. Get an account on [AppVeyor](https://www.appveyor.com/), connect
|
||||
it to your GitHub account and activate your support module's
|
||||
repository. For more details, please refer to the
|
||||
[AppVeyor documentation](https://www.appveyor.com/docs/).
|
||||
|
||||
2. Add the ci-scripts respository as a Git Submodule
|
||||
(see [README](../README.md) one level above).
|
||||
|
||||
3. Add settings files defining which dependencies in which versions
|
||||
you want to build against
|
||||
(see [README](../README.md) one level above).
|
||||
|
||||
4. Create an AppVeyor configuration by copying one of the examples into
|
||||
the root directory of your module.
|
||||
```
|
||||
$ cp .ci/appveyor/.appveyor.yml.example-full .appveyor.yml
|
||||
```
|
||||
|
||||
5. Edit the `.appveyor.yml` configuration to include the jobs you want
|
||||
AppVeyor to run.
|
||||
|
||||
AppVeyor automatically creates a build matrix with the following axes:
|
||||
1. `configuration:` \
|
||||
Select static or dynamic (DLL) as well as regular or debug builds.
|
||||
2. `platform:` \
|
||||
Select 32bit or 64bit processor architecture.
|
||||
3. `environment: / matrix:` \
|
||||
List of environment variable settings. Each list element (starting with
|
||||
a dash) is one step on the axis of the build matrix. \
|
||||
Set `CMP` to select the compiler: `mingw` for the native
|
||||
[MinGW](http://mingw-w64.org/) GNU compiler, `vs2008` ...`vs2019`
|
||||
(options listed above) for the Microsoft Visual Studio compilers.
|
||||
|
||||
Your builds will take long. \
|
||||
AppVeyor only grants a single worker VM - all jobs of the matrix are
|
||||
executed sequentially. Each job will take around 10 minutes.
|
||||
|
||||
The `matrix: / exclude:` setting can be used to reduce the number of
|
||||
jobs. Check the [AppVeyor docs](https://www.appveyor.com/docs/build-configuration/#build-matrix)
|
||||
for more ways to reduce the build matrix size.
|
||||
|
||||
6. Push your changes and check
|
||||
[ci.appveyor.com](https://ci.appveyor.com/) for your build results.
|
||||
632
appveyor/do.py
Normal file
632
appveyor/do.py
Normal file
@@ -0,0 +1,632 @@
|
||||
#!/usr/bin/env python
|
||||
"""Windows (AppVeyor) ci build script
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import sys, os, stat, shutil
|
||||
import fileinput
|
||||
import logging
|
||||
import re
|
||||
import subprocess as sp
|
||||
import distutils.util
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Setup ANSI Colors
|
||||
ANSI_RED = "\033[31;1m"
|
||||
ANSI_GREEN = "\033[32;1m"
|
||||
ANSI_YELLOW = "\033[33;1m"
|
||||
ANSI_BLUE = "\033[34;1m"
|
||||
ANSI_MAGENTA = "\033[35;1m"
|
||||
ANSI_CYAN = "\033[36;1m"
|
||||
ANSI_RESET = "\033[0m"
|
||||
ANSI_CLEAR = "\033[0K"
|
||||
|
||||
seen_setups = []
|
||||
modules_to_compile = []
|
||||
setup = {}
|
||||
places = {}
|
||||
|
||||
if 'HomeDrive' in os.environ:
|
||||
cachedir = os.path.join(os.getenv('HomeDrive'), os.getenv('HomePath'), '.cache')
|
||||
toolsdir = os.path.join(os.getenv('HomeDrive'), os.getenv('HomePath'), '.tools')
|
||||
elif 'HOME' in os.environ:
|
||||
cachedir = os.path.join(os.getenv('HOME'), '.cache')
|
||||
toolsdir = os.path.join(os.getenv('HOME'), '.tools')
|
||||
else:
|
||||
cachedir = os.path.join('.', '.cache')
|
||||
toolsdir = os.path.join('.', '.tools')
|
||||
|
||||
if 'CACHEDIR' in os.environ:
|
||||
cachedir = os.environ['CACHEDIR']
|
||||
|
||||
vcvars_table = {
|
||||
# https://en.wikipedia.org/wiki/Microsoft_Visual_Studio#History
|
||||
'vs2019':r'C:\Program Files (x86)\Microsoft Visual Studio\2019\Community\VC\Auxiliary\Build\vcvarsall.bat',
|
||||
'vs2017':r'C:\Program Files (x86)\Microsoft Visual Studio\2017\Community\VC\Auxiliary\Build\vcvarsall.bat',
|
||||
'vs2015':r'C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat',
|
||||
'vs2013':r'C:\Program Files (x86)\Microsoft Visual Studio 12.0\VC\vcvarsall.bat',
|
||||
'vs2012':r'C:\Program Files (x86)\Microsoft Visual Studio 11.0\VC\vcvarsall.bat',
|
||||
'vs2010':r'C:\Program Files (x86)\Microsoft Visual Studio 10.0\VC\vcvarsall.bat',
|
||||
'vs2008':r'C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\vcvarsall.bat',
|
||||
}
|
||||
|
||||
ciscriptsdir = os.path.abspath(os.path.dirname(sys.argv[0]))
|
||||
if os.path.basename(ciscriptsdir) == 'appveyor':
|
||||
ciscriptsdir = ciscriptsdir.rstrip(os.pathsep+'appveyor')
|
||||
|
||||
if 'BASE' not in os.environ or os.environ['BASE'] == 'SELF':
|
||||
building_base = True
|
||||
places['EPICS_BASE'] = '.'
|
||||
else:
|
||||
building_base = False
|
||||
|
||||
def modlist():
|
||||
if building_base:
|
||||
ret = []
|
||||
else:
|
||||
for var in ['ADD_MODULES', 'MODULES']:
|
||||
setup.setdefault(var, '')
|
||||
if var in os.environ:
|
||||
setup[var] = os.environ[var]
|
||||
logger.debug('ENV assignment: %s = %s', var, setup[var])
|
||||
ret = ['BASE'] + setup['ADD_MODULES'].upper().split() + setup['MODULES'].upper().split()
|
||||
logger.debug('Effective module list: %s', ret)
|
||||
return ret
|
||||
|
||||
zip7 = r'C:\Program Files\7-Zip\7z'
|
||||
make = ''
|
||||
isbase314 = False
|
||||
silent_dep_builds = True
|
||||
|
||||
def host_info():
|
||||
print('{0}Python setup{1}'.format(ANSI_CYAN, ANSI_RESET))
|
||||
print(sys.version)
|
||||
print('PYTHONPATH')
|
||||
for dname in sys.path:
|
||||
print(' ', dname)
|
||||
print('platform =', distutils.util.get_platform())
|
||||
|
||||
print('{0}Available Visual Studio versions{1}'.format(ANSI_CYAN, ANSI_RESET))
|
||||
for key in vcvars_table:
|
||||
if os.path.exists(vcvars_table[key]):
|
||||
print('Found', key, 'in', vcvars_table[key])
|
||||
sys.stdout.flush()
|
||||
|
||||
# Used from unittests
|
||||
def clear_lists():
|
||||
del seen_setups[:]
|
||||
del modules_to_compile[:]
|
||||
setup.clear()
|
||||
places.clear()
|
||||
|
||||
# Error-handler to make shutil.rmtree delete read-only files on Windows
|
||||
def remove_readonly(func, path, excinfo):
|
||||
os.chmod(path, stat.S_IWRITE)
|
||||
func(path)
|
||||
|
||||
# source_set(setup)
|
||||
#
|
||||
# Source a settings file (extension .set) found in the setup_dirs path
|
||||
# May be called recursively (from within a setup file)
|
||||
def source_set(name):
|
||||
# allowed separators: colon or whitespace
|
||||
setup_dirs = os.getenv('SETUP_PATH', "").replace(':', ' ').split()
|
||||
if len(setup_dirs) == 0:
|
||||
raise NameError("{0}Search path for setup files (SETUP_PATH) is empty{1}".format(ANSI_RED,ANSI_RESET))
|
||||
|
||||
for set_dir in setup_dirs:
|
||||
set_file = os.path.join(set_dir, name) + ".set"
|
||||
|
||||
if set_file in seen_setups:
|
||||
print("Ignoring already included setup file {0}".format(set_file))
|
||||
return
|
||||
|
||||
if os.path.isfile(set_file):
|
||||
seen_setups.append(set_file)
|
||||
print("Loading setup file {0}".format(set_file))
|
||||
sys.stdout.flush()
|
||||
with open(set_file) as fp:
|
||||
for line in fp:
|
||||
logger.debug('Next line: %s', line.strip())
|
||||
if not line.strip() or line.strip()[0] == '#':
|
||||
continue
|
||||
if line.startswith("include"):
|
||||
logger.debug('Found an include, reading %s', line.split()[1])
|
||||
source_set(line.split()[1])
|
||||
continue
|
||||
assign = line.replace('"', '').strip().split("=", 1)
|
||||
logger.debug('Interpreting as assignment')
|
||||
setup.setdefault(assign[0], os.getenv(assign[0], ""))
|
||||
if not setup[assign[0]].strip():
|
||||
logger.debug('Doing assignment: %s = %s', assign[0], assign[1])
|
||||
setup[assign[0]] = assign[1]
|
||||
break
|
||||
else:
|
||||
raise NameError("{0}Setup file {1} does not exist in SETUP_PATH search path ({2}){3}"
|
||||
.format(ANSI_RED, name, setup_dirs, ANSI_RESET))
|
||||
|
||||
# update_release_local(var, location)
|
||||
# var name of the variable to set in RELEASE.local
|
||||
# location location (absolute path) of where variable should point to
|
||||
#
|
||||
# Manipulate RELEASE.local in the cache location:
|
||||
# - replace "$var=$location" line if it exists and has changed
|
||||
# - otherwise add "$var=$location" line and possibly move EPICS_BASE=... line to the end
|
||||
# Set places[var] = location
|
||||
def update_release_local(var, location):
|
||||
release_local = os.path.join(cachedir, 'RELEASE.local')
|
||||
updated_line = '{0}={1}'.format(var, location.replace('\\', '/'))
|
||||
places[var] = location
|
||||
|
||||
if not os.path.exists(release_local):
|
||||
logger.debug('RELEASE.local does not exist, creating it')
|
||||
try:
|
||||
os.makedirs(cachedir)
|
||||
except:
|
||||
pass
|
||||
fout = open(release_local, 'w')
|
||||
fout.close()
|
||||
base_line = ''
|
||||
found = False
|
||||
logger.debug("Opening RELEASE.local for adding '%s'", updated_line)
|
||||
for line in fileinput.input(release_local, inplace=1):
|
||||
outputline = line.strip()
|
||||
if 'EPICS_BASE=' in line:
|
||||
base_line = line.strip()
|
||||
logger.debug("Found EPICS_BASE line '%s', not writing it", base_line)
|
||||
continue
|
||||
elif '{0}='.format(var) in line:
|
||||
logger.debug("Found '%s=' line, replacing", var)
|
||||
found = True
|
||||
outputline = updated_line
|
||||
logger.debug("Writing line to RELEASE.local: '%s'", outputline)
|
||||
print(outputline)
|
||||
fileinput.close()
|
||||
fout = open(release_local,"a")
|
||||
if not found:
|
||||
logger.debug("Adding new definition: '%s'", updated_line)
|
||||
print(updated_line, file=fout)
|
||||
if base_line:
|
||||
logger.debug("Writing EPICS_BASE line: '%s'", base_line)
|
||||
print(base_line, file=fout)
|
||||
fout.close()
|
||||
|
||||
def set_setup_from_env(dep):
|
||||
for postf in ['', '_DIRNAME', '_REPONAME', '_REPOOWNER', '_REPOURL',
|
||||
'_VARNAME', '_RECURSIVE', '_DEPTH', '_HOOK']:
|
||||
if dep+postf in os.environ:
|
||||
setup[dep+postf] = os.environ[dep+postf]
|
||||
logger.debug('ENV assignment: %s = %s', dep+postf, setup[dep+postf])
|
||||
|
||||
def call_git(args, **kws):
|
||||
if 'cwd' in kws:
|
||||
place = kws['cwd']
|
||||
else:
|
||||
place = os.getcwd()
|
||||
logger.debug("EXEC '%s' in %s", ' '.join(['git'] + args), place)
|
||||
sys.stdout.flush()
|
||||
exitcode = sp.call(['git'] + args, **kws)
|
||||
logger.debug('EXEC DONE')
|
||||
return exitcode
|
||||
|
||||
def call_make(args=[], **kws):
|
||||
place = kws.get('cwd', os.getcwd())
|
||||
parallel = kws.pop('parallel', 2)
|
||||
silent = kws.pop('silent', False)
|
||||
# no parallel make for Base 3.14
|
||||
if parallel <= 0 or isbase314:
|
||||
makeargs = []
|
||||
else:
|
||||
makeargs = ['-j{0}'.format(parallel), '-Otarget']
|
||||
if silent:
|
||||
makeargs += ['-s']
|
||||
logger.debug("EXEC '%s' in %s", ' '.join([make] + makeargs + args), place)
|
||||
sys.stdout.flush()
|
||||
exitcode = sp.call([make] + makeargs + args, **kws)
|
||||
logger.debug('EXEC DONE')
|
||||
if exitcode != 0:
|
||||
sys.exit(exitcode)
|
||||
|
||||
def get_git_hash(place):
|
||||
logger.debug("EXEC 'git log -n1 --pretty=format:%%H' in %s", place)
|
||||
sys.stdout.flush()
|
||||
head = sp.check_output(['git', 'log', '-n1', '--pretty=format:%H'], cwd=place).decode()
|
||||
logger.debug('EXEC DONE')
|
||||
return head
|
||||
|
||||
def complete_setup(dep):
|
||||
set_setup_from_env(dep)
|
||||
setup.setdefault(dep, 'master')
|
||||
setup.setdefault(dep+"_DIRNAME", dep.lower())
|
||||
setup.setdefault(dep+"_REPONAME", dep.lower())
|
||||
setup.setdefault('REPOOWNER', 'epics-modules')
|
||||
setup.setdefault(dep+"_REPOOWNER", setup['REPOOWNER'])
|
||||
setup.setdefault(dep+"_REPOURL", 'https://github.com/{0}/{1}.git'
|
||||
.format(setup[dep+'_REPOOWNER'], setup[dep+'_REPONAME']))
|
||||
setup.setdefault(dep+"_VARNAME", dep)
|
||||
setup.setdefault(dep+"_RECURSIVE", 'YES')
|
||||
setup.setdefault(dep+"_DEPTH", -1)
|
||||
|
||||
# add_dependency(dep, tag)
|
||||
#
|
||||
# Add a dependency to the cache area:
|
||||
# - check out (recursive if configured) in the CACHE area unless it already exists and the
|
||||
# required commit has been built
|
||||
# - Defaults:
|
||||
# $dep_DIRNAME = lower case ($dep)
|
||||
# $dep_REPONAME = lower case ($dep)
|
||||
# $dep_REPOURL = GitHub / $dep_REPOOWNER (or $REPOOWNER or epics-modules) / $dep_REPONAME .git
|
||||
# $dep_VARNAME = $dep
|
||||
# $dep_DEPTH = 5
|
||||
# $dep_RECURSIVE = 1/YES (0/NO to for a flat clone)
|
||||
# - Add $dep_VARNAME line to the RELEASE.local file in the cache area (unless already there)
|
||||
# - Add full path to $modules_to_compile
|
||||
def add_dependency(dep):
|
||||
recurse = setup[dep+'_RECURSIVE'].lower()
|
||||
if recurse not in ['0', 'no']:
|
||||
recursearg = ["--recursive"]
|
||||
elif recurse not in ['1', 'yes']:
|
||||
recursearg = []
|
||||
else:
|
||||
raise RuntimeError("Invalid value for {}_RECURSIVE='{}' not 0/NO/1/YES".format(dep, recurse))
|
||||
deptharg = {
|
||||
'-1':['--depth', '5'],
|
||||
'0':[],
|
||||
}.get(str(setup[dep+'_DEPTH']), ['--depth', str(setup[dep+'_DEPTH'])])
|
||||
|
||||
tag = setup[dep]
|
||||
|
||||
logger.debug('Adding dependency %s with tag %s', dep, setup[dep])
|
||||
|
||||
# determine if dep points to a valid release or branch
|
||||
if call_git(['ls-remote', '--quiet', '--exit-code', '--refs', setup[dep+'_REPOURL'], tag]):
|
||||
raise RuntimeError("{0}{1} is neither a tag nor a branch name for {2} ({3}){4}"
|
||||
.format(ANSI_RED, tag, dep, setup[dep+'_REPOURL'], ANSI_RESET))
|
||||
|
||||
dirname = setup[dep+'_DIRNAME']+'-{0}'.format(tag)
|
||||
place = os.path.join(cachedir, dirname)
|
||||
checked_file = os.path.join(place, "checked_out")
|
||||
|
||||
if os.path.isdir(place):
|
||||
logger.debug('Dependency %s: directory %s exists, comparing checked-out commit', dep, place)
|
||||
# check HEAD commit against the hash in marker file
|
||||
if os.path.exists(checked_file):
|
||||
with open(checked_file, 'r') as bfile:
|
||||
checked_out = bfile.read().strip()
|
||||
bfile.close()
|
||||
else:
|
||||
checked_out = 'never'
|
||||
head = get_git_hash(place)
|
||||
logger.debug('Found checked_out commit %s, git head is %s', checked_out, head)
|
||||
if head != checked_out:
|
||||
logger.debug('Dependency %s out of date - removing', dep)
|
||||
shutil.rmtree(place, onerror=remove_readonly)
|
||||
else:
|
||||
print('Found {0} of dependency {1} up-to-date in {2}'.format(tag, dep, place))
|
||||
sys.stdout.flush()
|
||||
|
||||
if not os.path.isdir(place):
|
||||
if not os.path.isdir(cachedir):
|
||||
os.makedirs(cachedir)
|
||||
# clone dependency
|
||||
print('Cloning {0} of dependency {1} into {2}'
|
||||
.format(tag, dep, place))
|
||||
sys.stdout.flush()
|
||||
call_git(['clone', '--quiet'] + deptharg + recursearg + ['--branch', tag, setup[dep+'_REPOURL'], dirname], cwd=cachedir)
|
||||
|
||||
sp.check_call(['git', 'log', '-n1'], cwd=place)
|
||||
modules_to_compile.append(place)
|
||||
|
||||
if dep == 'BASE':
|
||||
# add MSI 1.7 to Base 3.14
|
||||
versionfile = os.path.join(place, 'configure', 'CONFIG_BASE_VERSION')
|
||||
if os.path.exists(versionfile):
|
||||
with open(versionfile) as f:
|
||||
if 'BASE_3_14=YES' in f.read():
|
||||
print('Adding MSI 1.7 to {0}'.format(place))
|
||||
sys.stdout.flush()
|
||||
sp.check_call(['patch', '-p0', '-i', os.path.join(ciscriptsdir, 'add-msi-to-314.patch')],
|
||||
cwd=place)
|
||||
else:
|
||||
# force including RELEASE.local for non-base modules by overwriting their configure/RELEASE
|
||||
release = os.path.join(place, "configure", "RELEASE")
|
||||
if os.path.exists(release):
|
||||
with open(release, 'w') as fout:
|
||||
print('-include $(TOP)/../RELEASE.local', file=fout)
|
||||
|
||||
# run hook if defined
|
||||
if dep+'_HOOK' in setup:
|
||||
hook = os.path.join(place, setup[dep+'_HOOK'])
|
||||
if os.path.exists(hook):
|
||||
print('Running hook {0} in {1}'.format(setup[dep+'_HOOK'], place))
|
||||
sys.stdout.flush()
|
||||
sp.check_call(hook, shell=True, cwd=place)
|
||||
|
||||
# write checked out commit hash to marker file
|
||||
head = get_git_hash(place)
|
||||
logger.debug('Writing hash of checked-out dependency (%s) to marker file', head)
|
||||
with open(checked_file, "w") as fout:
|
||||
print(head, file=fout)
|
||||
fout.close()
|
||||
|
||||
update_release_local(setup[dep+"_VARNAME"], place)
|
||||
|
||||
def setup_for_build(args):
|
||||
global make, isbase314
|
||||
dllpaths = []
|
||||
|
||||
# there is no combined static and debug EPICS_HOST_ARCH target,
|
||||
# so a combined debug and static target will appear to be just static
|
||||
# but debug will have been specified in CONFIG_SITE by prepare()
|
||||
hostarchsuffix=''
|
||||
if re.search('debug', os.environ['CONFIGURATION']):
|
||||
hostarchsuffix = '-debug'
|
||||
if re.search('static', os.environ['CONFIGURATION']):
|
||||
hostarchsuffix = '-static'
|
||||
|
||||
if os.environ['PLATFORM'].lower() == 'x86':
|
||||
os.environ['EPICS_HOST_ARCH'] = 'win32-x86' + hostarchsuffix
|
||||
elif os.environ['PLATFORM'].lower() == 'x64':
|
||||
os.environ['EPICS_HOST_ARCH'] = 'windows-x64' + hostarchsuffix
|
||||
|
||||
if os.environ['CMP'] == 'vs2019':
|
||||
# put strawberry perl in the PATH
|
||||
os.environ['PATH'] = os.pathsep.join([os.path.join(r'C:\Strawberry\perl\site\bin'),
|
||||
os.path.join(r'C:\Strawberry\perl\bin'),
|
||||
os.environ['PATH']])
|
||||
if os.environ['CMP'] == 'mingw':
|
||||
if 'INCLUDE' not in os.environ:
|
||||
os.environ['INCLUDE'] = ''
|
||||
if os.environ['PLATFORM'].lower() == 'x86':
|
||||
os.environ['EPICS_HOST_ARCH'] = 'win32-x86-mingw'
|
||||
os.environ['INCLUDE'] = os.pathsep.join([r'C:\mingw-w64\i686-6.3.0-posix-dwarf-rt_v5-rev1\mingw32\include',
|
||||
os.environ['INCLUDE']])
|
||||
os.environ['PATH'] = os.pathsep.join([r'C:\mingw-w64\i686-6.3.0-posix-dwarf-rt_v5-rev1\mingw32\bin',
|
||||
os.environ['PATH']])
|
||||
elif os.environ['PLATFORM'].lower() == 'x64':
|
||||
os.environ['EPICS_HOST_ARCH'] = 'windows-x64-mingw'
|
||||
os.environ['INCLUDE'] = os.pathsep.join([r'C:\mingw-w64\x86_64-8.1.0-posix-seh-rt_v6-rev0\mingw64\include',
|
||||
os.environ['INCLUDE']])
|
||||
os.environ['PATH'] = os.pathsep.join([r'C:\mingw-w64\x86_64-8.1.0-posix-seh-rt_v6-rev0\mingw64\bin',
|
||||
os.environ['PATH']])
|
||||
|
||||
make = os.path.join(toolsdir, 'make.exe')
|
||||
|
||||
base_place = '.'
|
||||
if not building_base:
|
||||
with open(os.path.join(cachedir, 'RELEASE.local'), 'r') as f:
|
||||
lines = f.readlines()
|
||||
for line in lines:
|
||||
(mod, place) = line.strip().split('=')
|
||||
bindir = os.path.join(place, 'bin', os.environ['EPICS_HOST_ARCH'])
|
||||
if os.path.isdir(bindir):
|
||||
dllpaths.append(bindir)
|
||||
if mod == 'EPICS_BASE':
|
||||
base_place = place
|
||||
|
||||
cfg_base_version = os.path.join(base_place, 'configure', 'CONFIG_BASE_VERSION')
|
||||
if os.path.exists(cfg_base_version):
|
||||
with open(cfg_base_version) as myfile:
|
||||
if 'BASE_3_14=YES' in myfile.read():
|
||||
isbase314 = True
|
||||
|
||||
bindir = os.path.join(os.getcwd(), 'bin', os.environ['EPICS_HOST_ARCH'])
|
||||
if os.path.isdir(bindir):
|
||||
dllpaths.append(bindir)
|
||||
|
||||
os.environ['PATH'] = os.pathsep.join(dllpaths + [os.environ['PATH']])
|
||||
|
||||
# apparently %CD% is handled automagically
|
||||
os.environ['TOP'] = os.getcwd()
|
||||
|
||||
addpaths = []
|
||||
for path in args.paths:
|
||||
try:
|
||||
addpaths.append(path.format(**os.environ))
|
||||
except KeyError:
|
||||
print('Environment')
|
||||
[print(' ',K,'=',repr(V)) for K,V in os.environ.items()]
|
||||
raise
|
||||
|
||||
os.environ['PATH'] = os.pathsep.join([os.environ['PATH']] + addpaths)
|
||||
|
||||
def prepare(args):
|
||||
host_info()
|
||||
|
||||
print('{0}Loading setup files{1}'.format(ANSI_YELLOW, ANSI_RESET))
|
||||
source_set('defaults')
|
||||
if 'SET' in os.environ:
|
||||
source_set(os.environ['SET'])
|
||||
|
||||
[complete_setup(mod) for mod in modlist()]
|
||||
|
||||
logger.debug('Loaded setup')
|
||||
kvs = list(setup.items())
|
||||
kvs.sort()
|
||||
[logger.debug(' %s = "%s"', *kv) for kv in kvs]
|
||||
|
||||
# we're working with tags (detached heads) a lot: suppress advice
|
||||
call_git(['config', '--global', 'advice.detachedHead', 'false'])
|
||||
|
||||
print('{0}Checking/cloning dependencies{1}'.format(ANSI_YELLOW, ANSI_RESET))
|
||||
sys.stdout.flush()
|
||||
|
||||
[add_dependency(mod) for mod in modlist()]
|
||||
|
||||
if not building_base:
|
||||
if os.path.isdir('configure'):
|
||||
targetdir = 'configure'
|
||||
else:
|
||||
targetdir = '.'
|
||||
shutil.copy(os.path.join(cachedir, 'RELEASE.local'), targetdir)
|
||||
|
||||
print('{0}Configuring EPICS build system{1}'.format(ANSI_YELLOW, ANSI_RESET))
|
||||
|
||||
with open(os.path.join(places['EPICS_BASE'], 'configure', 'CONFIG_SITE'), 'a') as config_site:
|
||||
if re.search('static', os.environ['CONFIGURATION']):
|
||||
config_site.write('SHARED_LIBRARIES=NO\n')
|
||||
config_site.write('STATIC_BUILD=YES\n')
|
||||
linktype = 'static'
|
||||
else:
|
||||
linktype = 'dynamic (DLL)'
|
||||
if re.search('debug', os.environ['CONFIGURATION']):
|
||||
config_site.write('HOST_OPT=NO\n')
|
||||
optitype = 'debug'
|
||||
else:
|
||||
optitype = 'optimized'
|
||||
|
||||
print('EPICS Base build system set up for {0} build with {1} linking'
|
||||
.format(optitype, linktype))
|
||||
|
||||
if not os.path.isdir(toolsdir):
|
||||
os.makedirs(toolsdir)
|
||||
|
||||
makever = '4.2.1'
|
||||
if not os.path.exists(os.path.join(toolsdir, 'make.exe')):
|
||||
print('Installing Make 4.2.1 from ANL web site')
|
||||
sys.stdout.flush()
|
||||
sp.check_call(['curl', '-fsS', '--retry', '3', '-o', 'make-{0}.zip'.format(makever),
|
||||
'https://epics.anl.gov/download/tools/make-{0}-win64.zip'.format(makever)],
|
||||
cwd=toolsdir)
|
||||
sp.check_call([zip7, 'e', 'make-{0}.zip'.format(makever)], cwd=toolsdir)
|
||||
os.remove(os.path.join(toolsdir, 'make-{0}.zip'.format(makever)))
|
||||
|
||||
setup_for_build(args)
|
||||
|
||||
print('{0}EPICS_HOST_ARCH = {1}{2}'.format(ANSI_CYAN, os.environ['EPICS_HOST_ARCH'], ANSI_RESET))
|
||||
print('{0}$ {1} --version{2}'.format(ANSI_CYAN, make, ANSI_RESET))
|
||||
sys.stdout.flush()
|
||||
call_make(['--version'], parallel=0)
|
||||
print('{0}$ perl --version{1}'.format(ANSI_CYAN, ANSI_RESET))
|
||||
sys.stdout.flush()
|
||||
sp.check_call(['perl', '--version'])
|
||||
|
||||
if os.environ['CMP'] == 'mingw':
|
||||
print('{0}$ gcc --version{1}'.format(ANSI_CYAN, ANSI_RESET))
|
||||
sys.stdout.flush()
|
||||
sp.check_call(['gcc', '--version'])
|
||||
else:
|
||||
print('{0}$ cl{1}'.format(ANSI_CYAN, ANSI_RESET))
|
||||
sys.stdout.flush()
|
||||
sp.check_call(['cl'])
|
||||
|
||||
if not building_base:
|
||||
for mod in modlist():
|
||||
place = places[setup[mod+"_VARNAME"]]
|
||||
print('{0}Building dependency {1} in {2}{3}'.format(ANSI_YELLOW, mod, place, ANSI_RESET))
|
||||
call_make(cwd=place, silent=silent_dep_builds)
|
||||
|
||||
print('{0}Dependency module information{1}'.format(ANSI_CYAN, ANSI_RESET))
|
||||
print('Module Tag Binaries Commit')
|
||||
print(100 * '-')
|
||||
for mod in modlist():
|
||||
commit = sp.check_output(['git', 'log', '-n1', '--oneline'], cwd=places[setup[mod+"_VARNAME"]]).strip()
|
||||
print("%-10s %-12s %-11s %s" % (mod, setup[mod], 'rebuilt', commit))
|
||||
|
||||
print('{0}Contents of RELEASE.local{1}'.format(ANSI_CYAN, ANSI_RESET))
|
||||
with open(os.path.join(cachedir, 'RELEASE.local'), 'r') as f:
|
||||
print(f.read().strip())
|
||||
|
||||
def build(args):
|
||||
setup_for_build(args)
|
||||
print('{0}Building the main module{1}'.format(ANSI_YELLOW, ANSI_RESET))
|
||||
call_make(args.makeargs)
|
||||
|
||||
def test(args):
|
||||
setup_for_build(args)
|
||||
print('{0}Running the main module tests{1}'.format(ANSI_YELLOW, ANSI_RESET))
|
||||
call_make(['tapfiles'], parallel=0)
|
||||
call_make(['test-results'], parallel=0, silent=True)
|
||||
|
||||
def doExec(args):
|
||||
'exec user command with vcvars'
|
||||
setup_for_build(args)
|
||||
os.environ['MAKE'] = make
|
||||
print('Execute command {}'.format(args.cmd))
|
||||
sys.stdout.flush()
|
||||
sp.check_call(' '.join(args.cmd), shell=True)
|
||||
|
||||
def with_vcvars(cmd):
|
||||
'''re-exec main script with a (hopefully different) command
|
||||
'''
|
||||
CC = os.environ['CMP']
|
||||
|
||||
# cf. https://docs.microsoft.com/en-us/cpp/build/building-on-the-command-line
|
||||
|
||||
info = {
|
||||
'python': sys.executable,
|
||||
'self': sys.argv[0],
|
||||
'cmd':cmd,
|
||||
}
|
||||
|
||||
info['arch'] = {
|
||||
'x86': 'x86', # 'amd64_x86' ??
|
||||
'x64': 'amd64',
|
||||
}[os.environ['PLATFORM'].lower()] # 'x86' or 'x64'
|
||||
|
||||
info['vcvars'] = vcvars_table[CC]
|
||||
|
||||
script='''
|
||||
call "{vcvars}" {arch}
|
||||
|
||||
"{python}" "{self}" {cmd}
|
||||
'''.format(**info)
|
||||
|
||||
logger.debug('----- Creating vcvars-trampoline.bat -----')
|
||||
for line in script.split('\n'):
|
||||
logger.debug(line)
|
||||
logger.debug('----- snip -----')
|
||||
|
||||
with open('vcvars-trampoline.bat', 'w') as F:
|
||||
F.write(script)
|
||||
|
||||
print('{0}Calling vcvars-trampoline.bat to set environment for {1} on {2}{3}'
|
||||
.format(ANSI_YELLOW, CC, os.environ['PLATFORM'], ANSI_RESET))
|
||||
sys.stdout.flush()
|
||||
returncode = sp.call('vcvars-trampoline.bat', shell=True)
|
||||
if returncode != 0:
|
||||
sys.exit(returncode)
|
||||
|
||||
def getargs():
|
||||
from argparse import ArgumentParser, REMAINDER
|
||||
P = ArgumentParser()
|
||||
P.add_argument('--no-vcvars', dest='vcvars', default=True, action='store_false',
|
||||
help='Assume vcvarsall.bat has already been run')
|
||||
P.add_argument('--add-path', dest='paths', default=[], action='append',
|
||||
help='Append directory to %PATH%. Expands {ENVVAR}')
|
||||
SP = P.add_subparsers()
|
||||
|
||||
CMD = SP.add_parser('prepare')
|
||||
CMD.set_defaults(func=prepare)
|
||||
|
||||
CMD = SP.add_parser('build')
|
||||
CMD.add_argument('makeargs', nargs=REMAINDER)
|
||||
CMD.set_defaults(func=build)
|
||||
|
||||
CMD = SP.add_parser('test')
|
||||
CMD.set_defaults(func=test)
|
||||
|
||||
CMD = SP.add_parser('exec')
|
||||
CMD.add_argument('cmd', nargs=REMAINDER)
|
||||
CMD.set_defaults(func=doExec)
|
||||
|
||||
return P
|
||||
|
||||
def main(raw):
|
||||
global silent_dep_builds
|
||||
args = getargs().parse_args(raw)
|
||||
if 'VV' in os.environ and os.environ['VV'] == '1':
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
silent_dep_builds = False
|
||||
|
||||
if args.vcvars and os.environ['CMP'].startswith('vs'):
|
||||
# re-exec with MSVC in PATH
|
||||
with_vcvars(' '.join(['--no-vcvars']+raw))
|
||||
|
||||
else:
|
||||
args.func(args)
|
||||
|
||||
if __name__=='__main__':
|
||||
main(sys.argv[1:])
|
||||
@@ -53,6 +53,7 @@ script:
|
||||
# SET source setup file
|
||||
# ADD_MODULES extra modules (for a specific job)
|
||||
# EXTRA content will be added to make command line
|
||||
# (embedded quotes must be escaped as \\\")
|
||||
# STATIC set to YES for static build (default: NO)
|
||||
# TEST set to NO to skip running the tests (default: YES)
|
||||
# VV set to make build scripts verbose (default: unset)
|
||||
|
||||
@@ -13,7 +13,7 @@ export EPICS_BASE
|
||||
[ -z "$EPICS_HOST_ARCH" -a -f $EPICS_BASE/startup/EpicsHostArch.pl ] && EPICS_HOST_ARCH=$(perl $EPICS_BASE/startup/EpicsHostArch.pl)
|
||||
export EPICS_HOST_ARCH
|
||||
|
||||
make -j2 $EXTRA
|
||||
[ -z "$EXTRA" ] && make -j2 || make -j2 "$EXTRA"
|
||||
|
||||
ret=0
|
||||
|
||||
@@ -21,7 +21,7 @@ if [ "$TEST" != "NO" ]
|
||||
then
|
||||
make tapfiles || ret=$?
|
||||
|
||||
make -s test-results
|
||||
make -sk test-results
|
||||
fi
|
||||
|
||||
exit $ret
|
||||
|
||||
@@ -29,6 +29,23 @@ CACHEDIR=${CACHEDIR:-${HOME}/.cache}
|
||||
|
||||
echo -e "${ANSI_YELLOW}Using bash version $BASH_VERSION${ANSI_RESET}"
|
||||
|
||||
if [ -f /etc/hosts ]
|
||||
then
|
||||
# The travis-ci "bionic" image throws us a curveball in /etc/hosts
|
||||
# by including two entries for localhost. The first for 127.0.1.1
|
||||
# which causes epicsSockResolveTest to fail.
|
||||
# cat /etc/hosts
|
||||
# ...
|
||||
# 127.0.1.1 localhost localhost ip4-loopback
|
||||
# 127.0.0.1 localhost nettuno travis vagrant travis-job-....
|
||||
|
||||
sudo sed -i -e '/^127\.0\.1\.1/ s|localhost\s*||g' /etc/hosts
|
||||
|
||||
echo "==== /etc/hosts"
|
||||
cat /etc/hosts
|
||||
echo "===="
|
||||
fi
|
||||
|
||||
# Load settings
|
||||
# -------------
|
||||
|
||||
@@ -45,23 +62,31 @@ fold_end load.settings
|
||||
# Check out dependencies
|
||||
# ----------------------
|
||||
|
||||
fold_start check.out.dependencies "Checking/cloning dependencies"
|
||||
if [ "$BASE" != "SELF" ]
|
||||
then
|
||||
fold_start check.out.dependencies "Checking/cloning dependencies"
|
||||
|
||||
for mod in BASE $ADD_MODULES $MODULES
|
||||
do
|
||||
mod_uc=${mod^^}
|
||||
eval add_dependency $mod_uc \${${mod_uc}:=master}
|
||||
done
|
||||
[ -e ./configure ] && cp ${CACHEDIR}/RELEASE.local ./configure/RELEASE.local
|
||||
for mod in BASE $ADD_MODULES $MODULES
|
||||
do
|
||||
mod_uc=${mod^^}
|
||||
eval add_dependency $mod_uc \${${mod_uc}:=master}
|
||||
done
|
||||
[ -e ./configure ] && cp ${CACHEDIR}/RELEASE.local ./configure/RELEASE.local
|
||||
|
||||
fold_end check.out.dependencies
|
||||
fold_end check.out.dependencies
|
||||
fi
|
||||
|
||||
# Set up compiler
|
||||
# ---------------
|
||||
|
||||
fold_start set.up.epics_build "Setting up EPICS build system"
|
||||
|
||||
eval $(grep "EPICS_BASE=" ${CACHEDIR}/RELEASE.local)
|
||||
if [ "$BASE" = "SELF" ]
|
||||
then
|
||||
EPICS_BASE=$CURDIR
|
||||
else
|
||||
eval $(grep "EPICS_BASE=" ${CACHEDIR}/RELEASE.local)
|
||||
fi
|
||||
export EPICS_BASE
|
||||
echo "EPICS_BASE=$EPICS_BASE"
|
||||
|
||||
@@ -70,7 +95,7 @@ echo "EPICS_BASE=$EPICS_BASE"
|
||||
export EPICS_HOST_ARCH
|
||||
echo "EPICS_HOST_ARCH=$EPICS_HOST_ARCH"
|
||||
|
||||
if echo ${modules_to_compile} | grep -q "$EPICS_BASE"
|
||||
if echo ${modules_to_compile} | grep -q "$EPICS_BASE" || [ "$BASE" = "SELF" ]
|
||||
then
|
||||
|
||||
# requires wine and g++-mingw-w64-i686
|
||||
@@ -180,6 +205,8 @@ fold_end set.up.compiler
|
||||
echo "\$ make --version"
|
||||
make --version
|
||||
|
||||
[ "$BASE" = "SELF" ] && exit 0
|
||||
|
||||
# Build required dependencies
|
||||
# ---------------------------
|
||||
|
||||
|
||||
Reference in New Issue
Block a user