133 Commits

Author SHA1 Message Date
Ralph Lange
98bc1d8f1f Readme: add links to lower level readme pages, fix badge URL 2020-04-30 15:23:56 +02:00
Ralph Lange
4484a9c302 appveyor: explicitly set default build worker image
- also update paragraph on worker images to AppVeyor README
2020-04-30 12:00:15 +02:00
Ralph Lange
ebc4b5ca2a Readme: update references; advise to clear cache after update 2020-04-30 11:21:50 +02:00
Ralph Lange
26c268eb28 Merge pull request #36 from mdavidsaver:msi314
- updated with a required change to the AppVeyor script
2020-04-30 09:41:51 +02:00
Ralph Lange
02c1c016c4 example: add host-side expanded substitutions file
- checks availablity of a working MSI tool (see #20 and #36)
2020-04-30 09:34:04 +02:00
Michael Davidsaver
27a1224d6d Refresh MSI 3.14 patch to allow using it
- set MSI to $EPICS_BASE_BIN/msi
- make patch file more symmetrical (patch with -p1)
- closes #20 (again), closes #36
2020-04-30 09:19:03 +02:00
Ralph Lange
157a7af098 appveyor: move init part of debugging settings to init section 2020-04-29 16:58:12 +02:00
Ralph Lange
3c1e223dc7 appveyor: don't build for changes in .travis.yml 2020-04-28 15:31:59 +02:00
Ralph Lange
4f4f76a5db appveyor: log name of build worker image 2020-04-28 14:37:30 +02:00
Ralph Lange
e91a588370 travis: copy RELEASE.local to top of checkout
- closes #32
  (fix is in AppVeyor do.py script as per 88831439)
2020-04-27 11:34:01 +02:00
Ralph Lange
29e657d585 Run tests using parallel make (reducing build time) 2020-04-27 11:34:01 +02:00
Ralph Lange
4413c7d75e travis: fix build.sh (set EPICS_BASE correctly) for BASE=SELF 2020-04-24 19:16:12 +02:00
Ralph Lange
48b15417a6 travis: fix RTEMS cross builds for Base 3.15 2020-04-24 19:12:20 +02:00
Ralph Lange
1ac8bf7479 appveyor: fix behavior when BASE set in setup file 2020-04-24 16:16:50 +02:00
Ralph Lange
d0f93f1920 travis: fix for EXTRA arguments with spaces/quotes
- feed EXTRA variables into an array to be properly expanded
2020-04-24 12:27:30 +02:00
Ralph Lange
27f823139a appveyor: don't walk() through the file system in host_info()
- was taking ~3min on AppVeyor builders
2020-04-23 14:12:03 +02:00
Ralph Lange
88831439b1 appveyor: consider base build (BASE=SELF) 2020-04-23 14:11:18 +02:00
Ralph Lange
177dfd4615 travis: fix /etc/hosts issue on bionic image 2020-04-23 14:11:17 +02:00
Ralph Lange
3bd2bb6dff travis: consider base build (BASE=SELF) 2020-04-22 13:36:16 +02:00
Ralph Lange
393a470d05 appveyor: add CMP doc to README 2020-04-22 10:25:18 +02:00
Ralph Lange
519b75aef2 appveyor: use pre-installed strawberry perl on vs2019 image 2020-04-22 10:25:18 +02:00
Ralph Lange
e8b01bf1a0 Merge pull request #29 from ralphlange:devel/add-appveyor
- closes #29, closes #6
2020-04-21 18:32:12 +02:00
Ralph Lange
79cc560594 appveyor: add minimal example file 2020-04-21 17:48:26 +02:00
Ralph Lange
32bdf84806 appveyor: improve RDP debugging options 2020-04-21 17:40:29 +02:00
Ralph Lange
070eab1473 appveyor: re-add recursive submodule update 2020-04-21 17:31:24 +02:00
Ralph Lange
11e0a60e3b appveyor: update example and README files 2020-04-08 15:56:00 +02:00
Ralph Lange
22d3a9db15 appveyor: add MSI 1.7 to any Base 3.14 dependency
- add test for patch being applied
2020-04-08 15:14:43 +02:00
Ralph Lange
a3532d3c55 appveyor: make cachedir configurable 2020-04-08 15:14:43 +02:00
Ralph Lange
b502aa7049 appveyor: update .appveyor.yml.example-full 2020-04-08 15:14:43 +02:00
Ralph Lange
d8e53e84fd appveyor: CC -> CMP for compiler toolchain setting 2020-04-08 15:14:43 +02:00
Ralph Lange
16bb305d24 appveyor: add setup_for_build() tests 2020-04-08 15:14:43 +02:00
Michael Davidsaver
6d0f34ac65 appveyor: do --add-path 2020-04-08 15:14:43 +02:00
Michael Davidsaver
2fd1f9ec16 appveyor: show test-results after .tap upload 2020-04-08 15:14:43 +02:00
Ralph Lange
ca74a8424b appveyor: create jobs for base 7.0, 3.15, 3.14
- default Base 7.0 / recursive = no
- add base 3.15 and 3.14 on vs2019
2020-04-08 15:14:43 +02:00
Ralph Lange
ddf6b961b1 appveyor: use VV to set logging level and silence dependency builds 2020-04-08 15:14:43 +02:00
Ralph Lange
e5af5c2bfe appveyor: print dependency table and RELEASE.local 2020-04-08 15:14:43 +02:00
Ralph Lange
d02dda5775 appveyor: add keys to call_make(); make test-results 2020-04-08 15:14:43 +02:00
Ralph Lange
4a6a979f89 appveyor: choco install re2c (for the sequencer) 2020-04-08 15:14:43 +02:00
Ralph Lange
81bf29b8ca appveyor: fix modlist() to allow MODULES in setup files 2020-04-08 15:14:43 +02:00
Ralph Lange
a8321aff92 appveyor: add binary locations to PATH for tools and DLLs 2020-04-08 15:14:42 +02:00
Michael Davidsaver
48832354da appveyor: export MAKE executable to environment
Save user scripts from having to compute
2020-04-08 15:14:42 +02:00
Michael Davidsaver
dbfba732fd appveyor: build action accepts arguments (passed on to make) 2020-04-08 15:14:42 +02:00
Ralph Lange
2f8f4e7fb1 appveyor: add tests for _RECURSIVE and _DEPTH 2020-04-08 15:14:42 +02:00
Ralph Lange
e96f77d8b1 appveyor: fix _DEPTH option 2020-04-08 15:14:42 +02:00
Michael Davidsaver
e14b97b18e appveyor: fix _RECURSIVE option 2020-04-08 15:14:42 +02:00
Michael Davidsaver
5d4fdec627 appveyor: add newlines when writing to CONFIG_SITE 2020-04-08 15:14:42 +02:00
Ralph Lange
fc141f874a appveyor: run unit tests via matrix 'only' setting 2020-04-08 15:14:42 +02:00
Michael Davidsaver
a006293461 appveyor: fixup argument parsing 2020-04-08 15:14:42 +02:00
Ralph Lange
b7d505c2e2 appveyor: use '/' in RELEASE.local paths 2020-04-08 15:14:42 +02:00
Ralph Lange
eceeab66cf appveyor: test full matrix (no 64bit builds on <= vs2012) 2020-04-08 15:14:42 +02:00
Ralph Lange
e50271765f appveyor: add build cache for external tools 2020-04-08 15:14:42 +02:00
Ralph Lange
d9e1df2367 appveyor: add HOST_ARCH suffix for -debug and -static 2020-04-08 15:14:42 +02:00
Ralph Lange
7881b3527c appveyor: fix for older vcvars.bat manipulating PLATFORM 2020-04-08 15:14:42 +02:00
Ralph Lange
0ac6c96e2a appveyor: add call_make() wrapper, forward returncode 2020-04-08 15:14:41 +02:00
Ralph Lange
eb5dd2a86c appveyor: read Base location from RELEASE.local 2020-04-08 15:14:41 +02:00
Ralph Lange
b3efae2451 appveyor: improve logging / print formatting 2020-04-08 15:14:41 +02:00
Ralph Lange
9c58196b6d appveyor: use Base 3.15 for tests to speed up build 2020-04-08 15:14:41 +02:00
Ralph Lange
421fe54fe6 appveyor: move environment setting into setup_for_build() 2020-04-08 15:14:41 +02:00
Ralph Lange
11f5c94236 appveyor: use r'' string constants for all Windows paths 2020-04-08 15:14:41 +02:00
Ralph Lange
ff14d5ceb4 appveyor: adding make calls for 'build' and 'test' actions 2020-04-08 15:14:41 +02:00
Ralph Lange
11cb469fb9 appveyor: silence Perl relocation batch script 2020-04-08 15:14:41 +02:00
Ralph Lange
0ae628673c appveyor: reduce number of SET=test00 runs 2020-04-08 15:14:41 +02:00
Ralph Lange
e11632798a appveyor: print make version 2020-04-08 15:14:41 +02:00
Ralph Lange
24a70882d0 appveyor: use parallel make (except for Base 3.14) 2020-04-08 15:14:41 +02:00
Ralph Lange
430a699d7f appveyor: add mingw definitions 2020-04-08 15:14:41 +02:00
Ralph Lange
c056b5ad0f appveyor: only grep 'vcvarsall.bat' in VS install list 2020-04-08 15:14:41 +02:00
Ralph Lange
a88300bdd7 appveyor: put strawberry perl in the PATH 2020-04-08 15:14:41 +02:00
Ralph Lange
85f570ac09 appveyor: set EPICS_HOST_ARCH 2020-04-08 15:14:41 +02:00
Ralph Lange
d5c419bc8e appveyor: don't run TestVCVars unless on Windows 2020-04-08 15:14:40 +02:00
Ralph Lange
08cdff9495 appveyor: don't run host_info() twice for compile tests 2020-04-08 15:14:40 +02:00
Ralph Lange
d052350738 appveyor: add complete_setup() and do a complete log 2020-04-08 15:14:40 +02:00
Ralph Lange
b53468e50e appveyor: make modlist a function 2020-04-08 15:14:40 +02:00
Michael Davidsaver
e47e35bae4 appveyor: add do_exec() action; 'make' dependencies 2020-04-08 15:14:40 +02:00
Michael Davidsaver
6071fdf198 appveyor: minor fixes
- using 'place' as both a local and a global is confusing
- use "with open()" instead of "open() / close()"
2020-04-08 15:14:40 +02:00
Michael Davidsaver
00f003afa5 appveyor: add with_vcvars to read VS environment settings
- writes and calls a "trampoline" batch that calls the
  appropriate "vcvarsall" script, then calls back into python
2020-04-08 15:14:40 +02:00
Michael Davidsaver
c3918cdbaa appveyor: add printing host_info (python settings, VS versions) 2020-04-08 15:14:40 +02:00
Michael Davidsaver
e2399dc7f3 appveyor: avoid chdir, use 'cwd' key instead 2020-04-08 15:14:40 +02:00
Ralph Lange
5d17fdf98d appveyor: configure EPICS build; install make and Perl 2020-04-08 15:14:40 +02:00
Ralph Lange
249db7db22 appveyor: add cloning the dependency modules to 'prepare' action 2020-04-08 15:14:40 +02:00
Ralph Lange
4dcfbb2079 appveyor: add default repo URL test 2020-04-08 15:14:40 +02:00
Ralph Lange
9742c5f9c6 appveyor: use portable os.chdir() instead of "cd" 2020-04-08 15:14:40 +02:00
Ralph Lange
2847f78ab2 appveyor: add error handler to fix shutil.rmtree on Windows 2020-04-08 15:14:39 +02:00
Ralph Lange
28aeda558b appveyor: use decode() on git hashes 2020-04-08 15:14:39 +02:00
Ralph Lange
2dfa55420f appveyor-test: always chdir into builddir 2020-04-08 15:14:39 +02:00
Ralph Lange
22d0feaa05 appveyor: enable debugging 2020-04-08 15:14:39 +02:00
Ralph Lange
cd0becff06 appveyor: add add_dependency() 2020-04-08 15:14:39 +02:00
Ralph Lange
355a5c2fb7 appveyor: fix logging in do.py 2020-04-08 15:14:39 +02:00
Ralph Lange
139b491614 appveyor: improve tests (capture stdout; use dictionaries) 2020-04-08 15:14:39 +02:00
Ralph Lange
b15d9bb62e appveyor: first version (source_set, update_release_local) 2020-04-08 15:14:39 +02:00
Ralph Lange
761152babe appveyor: first version 2020-04-08 15:14:39 +02:00
Michael Davidsaver
a34bb7d2b2 travis: always show test-results
even if one of the tests crashes
2020-04-08 14:52:06 +02:00
Ralph Lange
94fdfbe802 travis: fix MSI patch file location 2020-04-08 14:50:06 +02:00
Ralph Lange
4cad610601 travis: add MacOS Bash 3.x workaround to travis-test.sh 2020-04-06 18:31:52 +02:00
Ralph Lange
25a60b8490 travis: fix broken test for detecting empty SETUP_PATH
- Test was broken by commit 487d8eb2
  this remained undetected due to name conflict between
  die() in test script and in utils.sh
2020-04-06 17:46:25 +02:00
Ralph Lange
953b2960da travis: refactor die() -> fail() in tests
- avoids name conflict with die() in utils that does not quit under test
2020-04-06 17:46:25 +02:00
Ralph Lange
a8bee0552d Add MSI 1.7 to any Base 3.14 dependency
- add test for patch being applied
- fixes #20
2020-04-06 17:46:25 +02:00
Ralph Lange
c8b0894cb6 Add setup files for synApps 6.0 and 6.1
(fixes #15)
2020-04-02 18:00:25 +02:00
Ralph Lange
47d3f0c0f3 example: add testing on RTEMS 2020-03-31 18:43:36 +02:00
Ralph Lange
1d430e1bfd example: add simple test to check correct linkage
- needs to be 3.14 compatible
- fixes #27
2020-03-31 18:43:36 +02:00
Ralph Lange
12fca1961f Remove specific setting for EPICS_BASE in configure/RELEASE 2020-03-31 18:43:36 +02:00
Ralph Lange
ee803fc38d example: use TYPED_RSET for xxxRecord.c 2020-03-31 18:43:36 +02:00
Ralph Lange
0b589770bf Add Python things to .gitignore 2020-03-31 09:34:37 +02:00
Ralph Lange
f92c1e716e Readme: add Bash4@Mac requirement, add ASYN to references 2020-03-20 10:18:06 +01:00
Ralph Lange
f5047a9e11 travis: fix test error message 2020-03-17 15:26:12 +01:00
Ralph Lange
eb471d9539 Recompile remaining MODULE list after a miss
(fixes #18, closes #21)
2020-03-02 18:22:43 +01:00
Ralph Lange
e3dace9ee3 Fix FOO_RECURSIVE behavior
(fixes #25, closes #24)
2020-03-02 15:12:28 +01:00
Ralph Lange
660c1c6773 Add test for FOO_RECURSIVE behavior
- regression test for issue #25
2020-03-02 15:07:03 +01:00
Ralph Lange
b0ab3bf333 Allow CACHEDIR to be set externally.
fixes #22, closes #23
2020-02-28 12:40:40 +01:00
Ralph Lange
17ce951e99 Merge pull request #17 from EuropeanSpallationSource/overwrite-configure-RELEASE
travis: overwrite configure/RELEASE instead of appending to it
2020-02-12 16:42:54 +00:00
Ralph Lange
7ae7054196 Make ADD_MODULES built before MODULES 2020-02-12 13:50:11 +00:00
Torsten Bögershausen
aee11f266b Overwrite configure/RELEASE
The current scripts append the line

-include $(TOP)/../RELEASE.local

at the end of the configure/RELEASE file (if needed).
While this works for most EPICS modules, there is one drawback:

Definitions like
SUPPORT=/myfavorite/build/server
are not allways overwritten (better say undefined).

Fix this and create a configure/RELEASE file which is the same
for all EPICS modules.
Simply overwrite configure/RELEASE with what we want.

Thanks to the EPICS community for this suggestion
2020-02-12 13:08:56 +01:00
Ralph Lange
94744c9a8f Readme: small improvements and clarifications 2020-01-17 13:01:34 +01:00
Ralph Lange
741a293029 Add ipac to the list of tested modules 2020-01-17 13:01:34 +01:00
Ralph Lange
aa8f35f086 Support ADD_MODULES to add specific modules to jobs
(closes #14)
2020-01-17 13:01:34 +01:00
Ralph Lange
82685b0280 Readme: small fixes, add FAQ how to update ci-scripts 2020-01-15 13:27:57 +01:00
Ralph Lange
a39346bc78 Readme: add references, FAQ, Travis badge 2020-01-14 14:36:28 +01:00
Ralph Lange
5d76e1ff07 Add motor to the list of tested modules 2020-01-13 17:24:05 +01:00
Ralph Lange
c721b7ac32 Readme: fix heading levels 2020-01-13 11:10:40 +01:00
Ralph Lange
8233b9f81b travis: auto-fix missing inclusion of ../RELEASE.local 2020-01-10 16:27:33 +01:00
Ralph Lange
7de5a7edc3 travis: pre-install homebrew packages (in global addons)
(closes #13)
2019-12-18 17:24:19 +01:00
Ralph Lange
80ab30469e travis: avoid spawning 'tr' by using bash builtins 2019-12-18 17:22:11 +01:00
4dfd098545 travis: consistently use SETUP_PATH user variable 2019-12-18 17:00:46 +01:00
3929851deb travis: implement die() in utils.sh 2019-12-18 16:58:31 +01:00
e6f722914c travis: fix error message formats 2019-12-18 16:57:09 +01:00
d4ab170b3c avoid 'tr' calls to improve performance a bit 2019-12-16 14:37:32 +01:00
487d8eb287 refer to user variable, not internal variable in error message 2019-12-13 14:58:57 +01:00
c7aca7cd73 implement die function 2019-12-13 14:39:53 +01:00
bdcb2f3173 fix error message formats 2019-12-13 14:03:27 +01:00
Ralph Lange
e81ec3aa0c Merge pull request #12 from dirk-zimoch/formatfix
travis: fix formats for folders and colors
2019-12-13 11:45:20 +01:00
64e382b46e enable line feed after fold_start message 2019-12-12 10:44:51 +01:00
add7bbcf88 enable escape codes for color 2019-12-12 10:43:26 +01:00
Ralph Lange
377bd2a915 Readme: update, add version badge, mention License 2019-12-11 16:42:52 +01:00
Ralph Lange
7782f928c2 Add LICENSE 2019-12-11 16:40:37 +01:00
30 changed files with 3203 additions and 273 deletions

162
.appveyor.yml Normal file
View File

@@ -0,0 +1,162 @@
# .appveyor.yml for testing EPICS Base ci-scripts
# (see: https://github.com/epics-base/ci-scripts)
# Note:
# Paths to scripts are different in this test configuration
# (your module has one more directory level: .ci)
# Ralph Lange <ralph.lange@gmx.de>
# Copyright (c) 2020 ITER Organization
#---------------------------------#
# build cache #
#---------------------------------#
cache:
- C:\Users\appveyor\.tools -> appveyor\do.py
#---------------------------------#
# additional packages #
#---------------------------------#
install:
# for the sequencer
- cinst re2c
#---------------------------------#
# repository cloning #
#---------------------------------#
# Called at very beginning, before repo cloning
init:
# Set autocrlf to make batch files work
- git config --global core.autocrlf true
# Set clone depth (do not fetch complete history)
clone_depth: 50
# Skipping commits affecting only specific files
skip_commits:
files:
- 'documentation/*'
- 'templates/*'
- '**/*.html'
- '**/*.md'
- '.travis.yml'
#---------------------------------#
# build matrix configuration #
#---------------------------------#
# Default build worker image
image: Visual Studio 2015
# Build Configurations: dll/static, regular/debug
configuration:
- dynamic
- static
- dynamic-debug
- static-debug
# Environment variables: compiler toolchain, base version, setup file, ...
environment:
# common / default variables for all jobs
SETUP_PATH: .:.ci
SET: test01
BASE_RECURSIVE: NO
VV: 1
matrix:
- CMP: vs2019
SET: test00
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
- CMP: mingw
- CMP: vs2019
VV: 0
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
- CMP: vs2019
BASE: 3.15
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
- CMP: vs2019
BASE: 3.14
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
- CMP: vs2017
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
- CMP: vs2015
- CMP: vs2013
- CMP: vs2012
- CMP: vs2010
- CMP: vs2008
# Platform: architecture
platform:
- x86
- x64
# Matrix configuration: allow specific failing jobs
matrix:
exclude:
# Run test00 only once: x64 dynamic
- platform: x86
SET: test00
- configuration: static
SET: test00
- configuration: dynamic-debug
SET: test00
- configuration: static-debug
SET: test00
# VS2012 and older installs don't have the 64 bit compiler
- platform: x64
CMP: vs2012
- platform: x64
CMP: vs2010
- platform: x64
CMP: vs2008
# Run test script for unit tests (SET = test00)
for:
-
matrix:
only:
- SET: test00
build_script:
- cmd: python appveyor-test.py
test_script:
- cmd: echo Tests have been run in the build phase
#---------------------------------#
# building & testing #
#---------------------------------#
build_script:
- cmd: python appveyor/do.py prepare
- cmd: python appveyor/do.py build
test_script:
- cmd: python appveyor/do.py test
#---------------------------------#
# debugging #
#---------------------------------#
## if you want to connect by remote desktop to a failed build, uncomment these lines
## note that you will need to connect within the usual build timeout limit (60 minutes)
## so you may want to adjust the build matrix above to just build the one of interest
#on_failure:
# - ps: iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
# - ps: $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
#---------------------------------#
# notifications #
#---------------------------------#
notifications:
# - provider: Email
# to:
# - core-talk@aps.anl.gov
# on_build_success: false
# - provider: GitHubPullRequest

5
.gitignore vendored
View File

@@ -1,5 +1,10 @@
/QtC-* /QtC-*
*.orig *.orig
*.log *.log
.*.swp .*.swp
*.autosave *.autosave
/.idea/
*.py[cod]
__pycache__/

View File

@@ -27,6 +27,11 @@ addons:
- g++-mingw-w64-i686 - g++-mingw-w64-i686
- g++-mingw-w64-x86-64 - g++-mingw-w64-x86-64
- qemu-system-x86 - qemu-system-x86
homebrew:
packages:
- re2c
- bash
update: true
install: install:
- ./travis/prepare.sh - ./travis/prepare.sh
@@ -69,7 +74,8 @@ jobs:
- env: SET=test01 - env: SET=test01
- env: SET=test01 # On the side: test ADD_MODULES
- env: SET=test01 ADD_MODULES=ipac
compiler: clang compiler: clang
- env: VV="" SET=test01 - env: VV="" SET=test01
@@ -106,9 +112,9 @@ jobs:
# Cross-compilation to RTEMS # Cross-compilation to RTEMS
# (needs EPICS Base >= 3.16.2) # (needs EPICS Base >= 3.16.2)
- env: SET=test01 RTEMS=4.10 TEST=NO - env: SET=test01 RTEMS=4.10
- env: SET=test01 RTEMS=4.9 TEST=NO - env: SET=test01 RTEMS=4.9
# Other gcc versions (adding as an extra package) # Other gcc versions (adding as an extra package)
@@ -126,7 +132,6 @@ jobs:
- env: SET=test01 SNCSEQ=master - env: SET=test01 SNCSEQ=master
os: osx os: osx
compiler: clang compiler: clang
addons: { homebrew: { packages: ["re2c"], update: true } }
# Base 3.15 builds # Base 3.15 builds
# ================ # ================
@@ -145,17 +150,16 @@ jobs:
# Cross-compilation to RTEMS # Cross-compilation to RTEMS
# (needs EPICS Base >= 3.16.2) # (needs EPICS Base >= 3.16.2)
- env: BASE=R3.16.2 SET=test01 RTEMS=4.10 TEST=NO - env: BASE=R3.16.2 SET=test01 RTEMS=4.10
dist: trusty dist: trusty
- env: BASE=R3.16.2 SET=test01 RTEMS=4.9 TEST=NO - env: BASE=R3.16.2 SET=test01 RTEMS=4.9
dist: trusty dist: trusty
# SNCSEQ 2.2.7 fails to build on MacOS; currently needs master # SNCSEQ 2.2.7 fails to build on MacOS; currently needs master
- env: BASE=R3.15.7 SET=test01 SNCSEQ=master - env: BASE=R3.15.7 SET=test01 SNCSEQ=master
os: osx os: osx
compiler: clang compiler: clang
addons: { homebrew: { packages: ["re2c"], update: true } }
# Base 3.14 builds # Base 3.14 builds
# ================ # ================
@@ -175,4 +179,3 @@ jobs:
- env: BASE=R3.14.12.8 SET=test01 SNCSEQ=master - env: BASE=R3.14.12.8 SET=test01 SNCSEQ=master
os: osx os: osx
compiler: clang compiler: clang
addons: { homebrew: { packages: ["re2c"], update: true } }

65
LICENSE Normal file
View File

@@ -0,0 +1,65 @@
Copyright (c) 2019 EPICS. All rights reserved.
EPICS CI-Scripts are distributed subject to the following
license conditions:
SOFTWARE LICENSE AGREEMENT
Software: EPICS CI-Scripts
1. The "Software", below, refers to EPICS CI-Scripts (in
either source code, or binary form and accompanying documentation).
Each licensee is addressed as "you" or "Licensee."
2. The copyright holders shown above and their third-party licensors
hereby grant Licensee a royalty-free nonexclusive license, subject to
the limitations stated herein and U.S. Government license rights.
3. You may modify and make a copy or copies of the Software for use
within your organization, if you meet the following conditions:
a. Copies in source code must include the copyright notice and this
Software License Agreement.
b. Copies in binary form must include the copyright notice and this
Software License Agreement in the documentation and/or other
materials provided with the copy.
4. You may modify a copy or copies of the Software or any portion of it,
thus forming a work based on the Software, and distribute copies of
such work outside your organization, if you meet all of the following
conditions:
a. Copies in source code must include the copyright notice and this
Software License Agreement;
b. Copies in binary form must include the copyright notice and this
Software License Agreement in the documentation and/or other
materials provided with the copy;
c. Modified copies and works based on the Software must carry
prominent notices stating that you changed specified portions of
the Software.
5. Portions of the Software resulted from work developed under a U.S.
Government contract and are subject to the following license: the
Government is granted for itself and others acting on its behalf a
paid-up, nonexclusive, irrevocable worldwide license in this computer
software to reproduce, prepare derivative works, and perform publicly
and display publicly.
6. WARRANTY DISCLAIMER. THE SOFTWARE IS SUPPLIED "AS IS" WITHOUT WARRANTY
OF ANY KIND. THE COPYRIGHT HOLDERS, THEIR THIRD PARTY LICENSORS, THE
UNITED STATES, THE UNITED STATES DEPARTMENT OF ENERGY, AND THEIR
EMPLOYEES: (1) DISCLAIM ANY WARRANTIES, EXPRESS OR IMPLIED, INCLUDING
BUT NOT LIMITED TO ANY IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE, TITLE OR NON-INFRINGEMENT, (2) DO NOT ASSUME
ANY LEGAL LIABILITY OR RESPONSIBILITY FOR THE ACCURACY, COMPLETENESS,
OR USEFULNESS OF THE SOFTWARE, (3) DO NOT REPRESENT THAT USE OF THE
SOFTWARE WOULD NOT INFRINGE PRIVATELY OWNED RIGHTS, (4) DO NOT WARRANT
THAT THE SOFTWARE WILL FUNCTION UNINTERRUPTED, THAT IT IS ERROR-FREE
OR THAT ANY ERRORS WILL BE CORRECTED.
7. LIMITATION OF LIABILITY. IN NO EVENT WILL THE COPYRIGHT HOLDERS, THEIR
THIRD PARTY LICENSORS, THE UNITED STATES, THE UNITED STATES DEPARTMENT
OF ENERGY, OR THEIR EMPLOYEES: BE LIABLE FOR ANY INDIRECT, INCIDENTAL,
CONSEQUENTIAL, SPECIAL OR PUNITIVE DAMAGES OF ANY KIND OR NATURE,
INCLUDING BUT NOT LIMITED TO LOSS OF PROFITS OR LOSS OF DATA, FOR ANY
REASON WHATSOEVER, WHETHER SUCH LIABILITY IS ASSERTED ON THE BASIS OF
CONTRACT, TORT (INCLUDING NEGLIGENCE OR STRICT LIABILITY), OR
OTHERWISE, EVEN IF ANY OF SAID PARTIES HAS BEEN WARNED OF THE
POSSIBILITY OF SUCH LOSS OR DAMAGES.

154
README.md
View File

@@ -1,3 +1,7 @@
<a target="_blank" href="http://semver.org">![Version][badge.version]</a>
<a target="_blank" href="https://travis-ci.org/epics-base/ci-scripts">![Travis status][badge.travis]</a>
<a target="_blank" href="https://ci.appveyor.com/project/epics-base/ci-scripts">![AppVeyor status][badge.appveyor]</a>
# Continuous Integration Scripts for EPICS Modules # Continuous Integration Scripts for EPICS Modules
The scripts inside this repository are intended to provide a common, The scripts inside this repository are intended to provide a common,
@@ -6,10 +10,10 @@ software modules, e.g. Device or Driver Support modules.
By including this repository as a Git Submodule, you will be able to By including this repository as a Git Submodule, you will be able to
use the same flexible, powerful CI setup that EPICS Bases uses, use the same flexible, powerful CI setup that EPICS Bases uses,
including a mechanism to specify sets of dependent modules including a way to specify sets of dependent modules
(with versions) that you want to compile your module against. (with versions) that you want to compile your module against.
By using the submodule mechnism, your module will always use an By using the submodule mechanism, your module will always use an
explicit commit, i.e. a fixed version of the scripts. explicit commit, i.e. a fixed version of the scripts.
This ensures that any further development of the ci-scripts will This ensures that any further development of the ci-scripts will
never break existing use. never break existing use.
@@ -45,7 +49,7 @@ example.
## Supported CI Services ## Supported CI Services
### Travis-CI ### [Travis-CI](https://travis-ci.org/)
- Use different compilers (gcc, clang) - Use different compilers (gcc, clang)
- Use different gcc versions - Use different gcc versions
- Cross-compile for Windows 32bit and 64bit using MinGW and WINE - Cross-compile for Windows 32bit and 64bit using MinGW and WINE
@@ -53,19 +57,30 @@ example.
- Compile on MacOS - Compile on MacOS
- Built dependencies are cached (for faster builds) - Built dependencies are cached (for faster builds)
### How to Use the CI-Scripts See specific **[ci-scripts on Travis-CI README](travis/README.md)** for more details.
### [AppVeyor](https://www.appveyor.com/)
- Use different compilers (Visual Studio, MinGW)
- Use different Visual Studio versions: \
2008, 2010, 2012, 2013, 2015, 2017, 2019
- Compile for Windows 32bit and 64bit
See specific **[ci-scripts on AppVeyor README](appveyor/README.md)** for more details.
## How to Use the CI-Scripts
1. Get an account on a supported CI service provider platform. 1. Get an account on a supported CI service provider platform.
(e.g. [Travis-CI](https://travis-ci.org/), (e.g. [Travis-CI](https://travis-ci.org/),
Appveyor, Azure Pipelines...) [AppVeyor](https://www.appveyor.com/), Azure Pipelines...)
(More details in the specific README of the subdirectory.) (More details in the specific README of the subdirectory.)
2. In your Support Module, add this ci-scripts respository 2. In your Support Module, add this ci-scripts repository
as a Git Submodule (name suggestion: `.ci`). as a Git Submodule (name suggestion: `.ci`).
```bash
git submodule add https://github.com/epics-base/ci-scripts .ci
``` ```
$ git submodule add https://github.com/epics-base/ci-scripts .ci
```
3. Create setup files for different sets of dependencies you 3. Create setup files for different sets of dependencies you
want to compile against. (See below.) want to compile against. (See below.)
@@ -73,13 +88,14 @@ example.
``` ```
MODULES=sncseq asyn MODULES=sncseq asyn
BASE=R3.15.6 BASE=3.15
ASYN=R4-34 ASYN=R4-34
SNCSEQ=R2-2-7 SNCSEQ=R2-2-8
``` ```
will compile against the EPICS Base release 3.15.6, the Sequencer will compile against the EPICS Base 3.15 branch, the Sequencer
release 2.2.7 and release 4.34 of asyn. release 2.2.8 and release 4.34 of asyn.
(Any settings can be overridden from `.travis.yml`.) (Any settings can be overridden from the specific job configuration
in e.g. `.travis.yml`.)
4. Create a configuration for the CI service by copying one of 4. Create a configuration for the CI service by copying one of
the examples provided in the service specific subdirectory the examples provided in the service specific subdirectory
@@ -106,7 +122,7 @@ latest released versions and one for the development branches.
## Setup File Syntax ## Setup File Syntax
Setup files are loaded by the bash scripts. They are found by searching Setup files are loaded by the build scripts. They are found by searching
the locations in `SETUP_PATH` (space or colon separated list of directories, the locations in `SETUP_PATH` (space or colon separated list of directories,
relative to your module's root directory). relative to your module's root directory).
@@ -115,15 +131,19 @@ Setup files can include other setup files by calling `include <setup>`
`SETUP_PATH` is searched for the include. `SETUP_PATH` is searched for the include.
Any `VAR=value` setting of a variable is only executed if `VAR` is unset or Any `VAR=value` setting of a variable is only executed if `VAR` is unset or
empty. That way any settings can be overridden by settings in `.travis.yml`. empty. That way any settings can be overridden by settings in the main
configuration (e.g., `.travis.yml`).
Empty lines or lines starting with `#` are ignored. Empty lines or lines starting with `#` are ignored.
`MODULES="<list of names>"` should list the dependencies (software modules) `MODULES=<list of names>` should list the dependencies (software modules)
by using their well-known slugs, separated by spaces. by using their well-known slugs, separated by spaces.
EPICS Base (slug: `base`) will always be a dependency and will be added and EPICS Base (slug: `base`) will always be a dependency and will be added and
compiled first. The other dependencies are added and compiled in the order compiled first. The other dependencies are added and compiled in the order
they are defined in `MODULES`. they are defined in `MODULES`.
Modules needed only for specific jobs (e.g., on specific architectures)
can be added in the main configuration file by setting `ADD_MODULES`
for the specific job(s).
`REPOOWNER=<name>` sets the default GitHub owner (or organization) for all `REPOOWNER=<name>` sets the default GitHub owner (or organization) for all
dependency modules. Useful if you want to compile against a complete set dependency modules. Useful if you want to compile against a complete set
@@ -133,9 +153,7 @@ For any module mentioned as `foo` in the `MODULES` setting (and for `BASE`),
the following settings can be configured: the following settings can be configured:
`FOO=<version>` Set version of the module that should be used. Must either `FOO=<version>` Set version of the module that should be used. Must either
be a *tag* name (in that case the module is checked out into Travis' cache be a *tag* name or a *branch* name. [default: `master`]
system) or a *branch* name (in that case the module is always checked out
and recompiled as part of the job). [default: `master`]
`FOO_REPONAME=<name>` Set the name of the remote repository as `<name>.git`. `FOO_REPONAME=<name>` Set the name of the remote repository as `<name>.git`.
[default is the slug in lower case: `foo`] [default is the slug in lower case: `foo`]
@@ -143,7 +161,8 @@ and recompiled as part of the job). [default: `master`]
`FOO_REPOOWNER=<name>` Set the name of the GitHub owner (or organization) `FOO_REPOOWNER=<name>` Set the name of the GitHub owner (or organization)
that the module repository can be found under. that the module repository can be found under.
`FOO_REPOURL="<url>"` Set the complete URL of the remote repository. `FOO_REPOURL="<url>"` Set the complete URL of the remote repository. Useful
for dependencies that are not hosted on GitHub.
The default URL for the repository is pointing to GitHub, under The default URL for the repository is pointing to GitHub, under
`$FOO_REPOOWNER` else `$REPOOWNER` else `epics-modules`, `$FOO_REPOOWNER` else `$REPOOWNER` else `epics-modules`,
@@ -174,12 +193,81 @@ Feel free to suggest more default settings using a Pull Request.
## Debugging ## Debugging
Setting `VV=1` in your `.travis.yml` configuration for a specific job Setting `VV=1` in your service configuration (e.g., `.travis.yml`) for a
will run the job with high verbosity, printing every command as it is being specific job will run the job with high verbosity,
executed and switching the dependency builds to higher verbosity. printing every command as it is being executed and switching the dependency
builds to higher verbosity.
For local debugging, you may set `CACHEDIR` to change the location for the
dependency builds. [default is `$HOME/.cache`]
## References: EPICS Modules Using ci-scripts
[EPICS Base](https://github.com/epics-base/epics-base) and its submodules
[pvData](https://github.com/epics-base/pvDataCPP),
[pvAccess](https://github.com/epics-base/pvAccessCPP),
[pva2pva](https://github.com/epics-base/pva2pva)
EPICS Modules:
[ASYN](https://github.com/epics-modules/asyn),
[devlib2](https://github.com/epics-modules/devlib2),
[ecmc](https://github.com/epics-modules/ecmc),
[ip](https://github.com/epics-modules/ip),
[lua](https://github.com/epics-modules/lua),
[MCoreUtils](https://github.com/epics-modules/MCoreUtils),
[modbus](https://github.com/epics-modules/modbus),
[motor](https://github.com/epics-modules/motor),
[PCAS](https://github.com/epics-modules/pcas),
[sscan](https://github.com/epics-modules/sscan),
[vac](https://github.com/epics-modules/vac)
ESS: [EtherCAT MC Motor Driver][ref.ethercatmc]
ITER: [OPC UA Device Support](https://github.com/ralphlange/opcua)
## Frequently Asked Questions
**How can I see what the dependency building jobs are actually doing?**
Set `VV=1` in the configuration line of the job you are interested in.
This will make all builds (not just for your module) verbose.
**How do I update my module to use a newer release of ci-scripts?**
Update the submodule in `.ci` first, then change your CI configuration
(if needed) and commit both to your module. E.g., to update your Travis
setup to release 2.3.4 of ci-scripts:
```bash
cd .ci
git pull origin v2.3.4
cd -
git add .ci
# if needed:
edit .travis.yml
git add .travis.yml
git commit -m "Update ci-scripts submodule to v2.3.4"
```
Check the example configuration files inside ci-scripts (and their
changes) to see what might be needed and/or interesting to change
in your configuration.
Depending on the changes contained in the ci-scripts update, it might
be advisable to clear the CI caches after updating ci-scripts. E.g.,
a change in setting up EPICS Base will not be applied if Base is found
in the cache.
**Why does running the scripts locally on my MacOS machine fail?**
The ci-scripts for Travis-CI require Bash version 4.
As Apple ships an older Bash for [political reasons][reddit.bash],
you need to install a more recent Bash, e.g. using MacPorts
or Homebrew.
## Release Numbering of this Module ## Release Numbering of this Module
The module tries to apply [Semantic Versioning](https://semver.org/).
Major release numbers refer to the API, which is more or less defined Major release numbers refer to the API, which is more or less defined
by the full configuration examples in the service specific by the full configuration examples in the service specific
subdirectories. subdirectories.
@@ -187,12 +275,26 @@ If one of these files has to be changed for the existing configuration
options or important new options are being added, a new major release options or important new options are being added, a new major release
is created. is created.
Minor release numbers refer to bugfixes that should not require the Minor release numbers refer to additions and enhancements that do not
configuration inside a user module to be changed. require the configuration inside an existing user module to be changed.
Again: using the git submodule mechanism to include these scripts means Again: using the git submodule mechanism to include these scripts means
that user modules always work with a fixed, frozen version. that user modules always work with a fixed, frozen version.
I.e., developments in the ci-scripts repository will never break an\ I.e., developments in the ci-scripts repository will never break an
existing application. existing application.
These release numbering considerations are just a hint to assess the These release numbering considerations are just a hint to assess the
risks when updating the submodule. risks when updating the submodule.
## License
This module is distributed subject to a Software License Agreement found
in file LICENSE that is included with this distribution.
<!-- Links -->
[badge.version]: https://badge.fury.io/gh/epics-base%2Fci-scripts.svg
[badge.travis]: https://travis-ci.org/epics-base/ci-scripts.svg?branch=master
[badge.appveyor]: https://ci.appveyor.com/api/projects/status/8b578alg974axvux?svg=true
[reddit.bash]: https://www.reddit.com/r/bash/comments/393oqv/why_is_the_version_of_bash_included_in_os_x_so_old/
[ref.ethercatmc]: https://github.com/EuropeanSpallationSource/m-epics-ethercatmc

851
add-msi-to-314.patch Normal file
View File

@@ -0,0 +1,851 @@
diff --git a/config/RULES.Db b/config/RULES.Db
index b4946c7aa..90b76ed08 100644
--- a/config/RULES.Db
+++ b/config/RULES.Db
@@ -12,11 +12,7 @@
#
MAKEBPT = $(EPICS_BASE_HOST_BIN)/makeBpt$(EXE)
-ifndef MSI
-# Tool from R3.14 extensions bin, R3.13 extensions bin, or user path
-MSI = $(firstword $(wildcard $(EPICS_EXTENSIONS_HOST_BIN)/msi$(HOSTEXE) \
- $(EPICS_EXTENSIONS)/bin/$(HOST_ARCH)/msi$(HOSTEXE)) msi$(HOSTEXE))
-endif
+MSI = $(EPICS_BASE_HOST_BIN)/msi$(HOSTEXE)
DBEXPAND = $(EPICS_BASE_HOST_BIN)/dbExpand$(EXE)
DBST = dbst
diff --git a/configure/CONFIG_BASE b/configure/CONFIG_BASE
index 7ee5a5b89..9a9793093 100644
--- a/configure/CONFIG_BASE
+++ b/configure/CONFIG_BASE
@@ -112,8 +112,5 @@ ifndef DBST
DBST = dbst
endif
-ifndef MSI
-MSI = msi
-endif
-
+MSI = $(EPICS_BASE_HOST_BIN)/msi$(HOSTEXE)
diff --git a/src/dbtools/Makefile b/src/dbtools/Makefile
index 38ed52c9e..8655a5337 100644
--- a/src/dbtools/Makefile
+++ b/src/dbtools/Makefile
@@ -11,6 +11,11 @@ TOP=../..
include $(TOP)/configure/CONFIG
+PROD_HOST += msi
+
+msi_SRCS = msi.c
+msi_LIBS += Com
+
INC += dbLoadTemplate.h
INC += dbtoolsIocRegister.h
diff --git a/src/dbtools/msi.c b/src/dbtools/msi.c
new file mode 100644
index 000000000..525d4f25b
--- /dev/null
+++ b/src/dbtools/msi.c
@@ -0,0 +1,798 @@
+/*************************************************************************\
+* Copyright (c) 2002 The University of Chicago, as Operator of Argonne
+* National Laboratory.
+* Copyright (c) 2002 The Regents of the University of California, as
+* Operator of Los Alamos National Laboratory.
+* This file is distributed subject to a Software License Agreement found
+* in the file LICENSE that is included with this distribution.
+\*************************************************************************/
+/*msi - macro sunstitutions and include */
+
+/*
+ * Modification Log:
+ * -----------------
+ * .01 08DEC97 mrk Original version
+ */
+
+#include <stdlib.h>
+#include <stddef.h>
+#include <stdio.h>
+#include <string.h>
+#include <ctype.h>
+#include <errno.h>
+
+#include <epicsVersion.h>
+#include <dbDefs.h>
+#include <macLib.h>
+#include <ellLib.h>
+#include <errlog.h>
+
+#define MAX_BUFFER_SIZE 4096
+
+#if ((EPICS_VERSION <= 3) && (EPICS_REVISION <= 13))
+#define macEnvExpand(x) strdup(x)
+#endif
+
+/*Forward references to local routines*/
+static void usageExit(void);
+static void addMacroReplacements(MAC_HANDLE *macPvt,char *pval);
+static void makeSubstitutions(void *inputPvt,void *macPvt,char *templateName);
+
+/*Routines that read the template files */
+static void inputConstruct(void **inputPvt);
+static void inputDestruct(void *inputPvt);
+static void inputAddPath(void *inputPvt, char *pval);
+static void inputBegin(void *inputPvt,char *fileName);
+static char *inputNextLine(void *inputPvt);
+static void inputNewIncludeFile(void *inputPvt,char *name);
+static void inputErrPrint(void *inputPvt);
+
+/*Routines that read the substitution file */
+static void substituteDestruct(void *substitutePvt);
+static void substituteOpen(void **substitutePvt,char *substitutionName);
+static int substituteGetNextSet(void *substitutePvt,char **filename);
+static char *substituteGetReplacements(void *substitutePvt);
+
+/*Exit status*/
+static int exitStatus = 0;
+
+int opt_V = 0;
+
+
+int main(int argc,char **argv)
+{
+ void *inputPvt;
+ MAC_HANDLE *macPvt;
+ char *pval;
+ int narg;
+ char *substitutionName=0;
+ char *templateName=0;
+ int i;
+
+ inputConstruct(&inputPvt);
+ macCreateHandle(&macPvt,0);
+ macSuppressWarning(macPvt,1);
+ while((argc>1) && (argv[1][0] == '-')) {
+ narg = (strlen(argv[1])==2) ? 2 : 1;
+ pval = (narg==1) ? (argv[1]+2) : argv[2];
+ if(strncmp(argv[1],"-I",2)==0) {
+ inputAddPath(inputPvt,pval);
+ } else if(strncmp(argv[1],"-o",2)==0) {
+ if(freopen(pval,"w",stdout)==NULL) {
+ fprintf(stderr,"Can't open %s for writing: %s\n", pval, strerror(errno));
+ exit(1);
+ }
+ } else if(strncmp(argv[1],"-M",2)==0) {
+ addMacroReplacements(macPvt,pval);
+ } else if(strncmp(argv[1],"-S",2)==0) {
+ substitutionName = calloc(strlen(pval)+1,sizeof(char));
+ strcpy(substitutionName,pval);
+ } else if(strncmp(argv[1],"-V",2)==0) {
+ macSuppressWarning(macPvt,0);
+ opt_V = 1;
+ narg = 1; /* no argument for this option */
+ } else {
+ usageExit();
+ }
+ argc -= narg;
+ for(i=1; i<argc; i++) argv[i] = argv[i + narg];
+ }
+ if(argc>2) {
+ fprintf(stderr,"too many filename arguments\n");
+ usageExit();
+ }
+ if(argc==2) {
+ templateName = calloc(strlen(argv[1])+1,sizeof(char));
+ strcpy(templateName,argv[1]);
+ }
+ if(!substitutionName) {
+ makeSubstitutions(inputPvt,macPvt,templateName);
+ } else {
+ void *substitutePvt;
+ char *filename = 0;
+
+ substituteOpen(&substitutePvt,substitutionName);
+ while(substituteGetNextSet(substitutePvt,&filename)) {
+ if(templateName) filename = templateName;
+ if(!filename) {
+ fprintf(stderr,"no template file\n");
+ usageExit();
+ }
+ macPushScope(macPvt);
+ while((pval = substituteGetReplacements(substitutePvt))){
+ addMacroReplacements(macPvt,pval);
+ makeSubstitutions(inputPvt,macPvt,filename);
+ }
+ macPopScope(macPvt);
+ }
+ substituteDestruct(substitutePvt);
+ }
+ errlogFlush();
+ inputDestruct(inputPvt);
+ free((void *)templateName);
+ free((void *)substitutionName);
+ return(exitStatus);
+}
+
+void usageExit(void)
+{
+ fprintf(stderr,"usage: msi -V -opath -Ipath ... -Msub ... -Ssubfile template\n");
+ fprintf(stderr," Specifying path will replace the default '.'\n");
+ fprintf(stderr," stdin is used if template is not given\n");
+ exit(1);
+}
+
+static void addMacroReplacements(MAC_HANDLE *macPvt,char *pval)
+{
+ char **pairs;
+ long status;
+
+ status = macParseDefns(macPvt,pval,&pairs);
+ if(!status) {
+ fprintf(stderr,"Error macParseDefns error\n");
+ usageExit();
+ }
+ status = macInstallMacros(macPvt,pairs);
+ if(!status) {
+ fprintf(stderr,"Error macInstallMacros error\n");
+ usageExit();
+ }
+ free((void *)pairs);
+}
+
+typedef enum {cmdInclude,cmdSubstitute} cmdType;
+static const char *cmdNames[] = {"include","substitute"};
+static void makeSubstitutions(void *inputPvt,void *macPvt,char *templateName)
+{
+ char *input;
+ static char buffer[MAX_BUFFER_SIZE];
+ int n;
+ static int unexpWarned = 0;
+
+ inputBegin(inputPvt,templateName);
+ while((input = inputNextLine(inputPvt))) {
+ int expand=1;
+ char *p;
+ char *command = 0;
+
+ p = input;
+ /*skip whitespace at beginning of line*/
+ while(*p && (isspace(*p))) ++p;
+ /*Look for i or s */
+ if(*p && (*p=='i' || *p=='s')) command = p;
+ if(command) {
+ char *pstart;
+ char *pend;
+ char *copy;
+ int cmdind=-1;
+ int i;
+
+ for(i=0; i< NELEMENTS(cmdNames); i++) {
+ if(strstr(command,cmdNames[i])) {
+ cmdind = i;
+ }
+ }
+ if(cmdind<0) goto endif;
+ p = command + strlen(cmdNames[cmdind]);
+ /*skip whitespace after command*/
+ while(*p && (isspace(*p))) ++p;
+ /*Next character must be quote*/
+ if((*p==0) || (*p!='"')) goto endif;
+ pstart = ++p;
+ /*Look for end quote*/
+ while(*p && (*p!='"')) {
+ /*allow escape for imbeded quote*/
+ if((*p=='\\') && *(p+1)=='"') {
+ p += 2; continue;
+ } else {
+ if(*p=='"') break;
+ }
+ ++p;
+ }
+ pend = p;
+ if(*p==0) goto endif;
+ /*skip quote and any trailing blanks*/
+ while(*++p==' ') ;
+ if(*p != '\n' && *p !=0) goto endif;
+ copy = calloc(pend-pstart+1,sizeof(char));
+ strncpy(copy,pstart,pend-pstart);
+ switch(cmdind) {
+ case cmdInclude:
+ inputNewIncludeFile(inputPvt,copy);
+ break;
+ case cmdSubstitute:
+ addMacroReplacements(macPvt,copy);
+ break;
+ default:
+ fprintf(stderr,"Logic Error: makeSubstitutions\n");
+ inputErrPrint(inputPvt);
+ exit(1);
+ }
+ free(copy);
+ expand = 0;
+ }
+endif:
+ if (expand) {
+ n = macExpandString(macPvt,input,buffer,MAX_BUFFER_SIZE-1);
+ fputs(buffer,stdout);
+ if (!unexpWarned && n<0) {
+ const char * pErrMsg = "Warning: Undefined macros present, use msi -V to list\n";
+ if ( opt_V ) {
+ exitStatus = 2;
+ pErrMsg = "Error: Undefined macros present\n";
+ }
+ fprintf( stderr, pErrMsg );
+ unexpWarned++;
+ }
+ }
+ }
+}
+
+typedef struct inputFile{
+ ELLNODE node;
+ char *filename;
+ FILE *fp;
+ int lineNum;
+}inputFile;
+
+typedef struct pathNode {
+ ELLNODE node;
+ char *directory;
+} pathNode;
+
+typedef struct inputData {
+ ELLLIST inputFileList;
+ ELLLIST pathList;
+ char inputBuffer[MAX_BUFFER_SIZE];
+}inputData;
+
+static void inputOpenFile(inputData *pinputData,char *filename);
+static void inputCloseFile(inputData *pinputData);
+static void inputCloseAllFiles(inputData *pinputData);
+
+static void inputConstruct(void **ppvt)
+{
+ inputData *pinputData;
+
+ pinputData = calloc(1,sizeof(inputData));
+ ellInit(&pinputData->inputFileList);
+ ellInit(&pinputData->pathList);
+ *ppvt = pinputData;
+}
+
+static void inputDestruct(void *pvt)
+{
+ inputData *pinputData = (inputData *)pvt;
+ pathNode *ppathNode;
+
+ inputCloseAllFiles(pinputData);
+ while((ppathNode = (pathNode *)ellFirst(&pinputData->pathList))) {
+ ellDelete(&pinputData->pathList,&ppathNode->node);
+ free((void *)ppathNode->directory);
+ free((void *)ppathNode);
+ }
+ free(pvt);
+}
+
+static void inputAddPath(void *pvt, char *path)
+{
+ inputData *pinputData = (inputData *)pvt;
+ ELLLIST *ppathList = &pinputData->pathList;
+ pathNode *ppathNode;
+ const char *pcolon;
+ const char *pdir;
+ int len;
+ int emptyName;
+
+ pdir = path;
+ /*an empty name at beginning, middle, or end means current directory*/
+ while(pdir && *pdir) {
+ emptyName = ((*pdir == ':') ? 1 : 0);
+ if(emptyName) ++pdir;
+ ppathNode = (pathNode *)calloc(1,sizeof(pathNode));
+ ellAdd(ppathList,&ppathNode->node);
+ if(!emptyName) {
+ pcolon = strchr(pdir,':');
+ len = (pcolon ? (pcolon - pdir) : strlen(pdir));
+ if(len>0) {
+ ppathNode->directory = (char *)calloc(len+1,sizeof(char));
+ strncpy(ppathNode->directory,pdir,len);
+ pdir = pcolon;
+ /*unless at end skip past first colon*/
+ if(pdir && *(pdir+1)!=0) ++pdir;
+ } else { /*must have been trailing : */
+ emptyName=1;
+ }
+ }
+ if(emptyName) {
+ ppathNode->directory = (char *)calloc(2,sizeof(char));
+ strcpy(ppathNode->directory,".");
+ }
+ }
+ return;
+}
+
+static void inputBegin(void *pvt,char *fileName)
+{
+ inputData *pinputData = (inputData *)pvt;
+
+ inputCloseAllFiles(pinputData);
+ inputOpenFile(pinputData,fileName);
+}
+
+static char *inputNextLine(void *pvt)
+{
+ inputData *pinputData = (inputData *)pvt;
+ inputFile *pinputFile;
+ char *pline;
+
+ while((pinputFile = (inputFile *)ellFirst(&pinputData->inputFileList))) {
+ pline = fgets(pinputData->inputBuffer,MAX_BUFFER_SIZE,pinputFile->fp);
+ if(pline) {
+ ++pinputFile->lineNum;
+ return(pline);
+ }
+ inputCloseFile(pinputData);
+ }
+ return(0);
+}
+
+static void inputNewIncludeFile(void *pvt,char *name)
+{
+ inputData *pinputData = (inputData *)pvt;
+
+ inputOpenFile(pinputData,name);
+}
+
+static void inputErrPrint(void *pvt)
+{
+ inputData *pinputData = (inputData *)pvt;
+ inputFile *pinputFile;
+
+ fprintf(stderr,"input: %s which is ",pinputData->inputBuffer);
+ pinputFile = (inputFile *)ellFirst(&pinputData->inputFileList);
+ while(pinputFile) {
+ fprintf(stderr,"line %d of ",pinputFile->lineNum);
+ if(pinputFile->filename) {
+ fprintf(stderr," file %s\n",pinputFile->filename);
+ } else {
+ fprintf(stderr,"stdin:\n");
+ }
+ pinputFile = (inputFile *)ellNext(&pinputFile->node);
+ if(pinputFile) {
+ fprintf(stderr," which is included from ");
+ } else {
+ fprintf(stderr,"\n");
+ }
+ }
+ fprintf(stderr,"\n");
+}
+
+static void inputOpenFile(inputData *pinputData,char *filename)
+{
+ ELLLIST *ppathList = &pinputData->pathList;
+ pathNode *ppathNode = 0;
+ inputFile *pinputFile;
+ char *fullname = 0;
+ FILE *fp = 0;
+
+ if(!filename) {
+ fp = stdin;
+ } else if((ellCount(ppathList)==0) || strchr(filename,'/')){
+ fp = fopen(filename,"r");
+ } else {
+ ppathNode = (pathNode *)ellFirst(ppathList);
+ while(ppathNode) {
+ fullname = calloc(strlen(filename)+strlen(ppathNode->directory) +2,
+ sizeof(char));
+ strcpy(fullname,ppathNode->directory);
+ strcat(fullname,"/");
+ strcat(fullname,filename);
+ fp = fopen(fullname,"r");
+ if(fp) break;
+ free((void *)fullname);
+ ppathNode = (pathNode *)ellNext(&ppathNode->node);
+ }
+ }
+ if(!fp) {
+ fprintf(stderr,"Could not open %s\n",filename);
+ inputErrPrint((void *)pinputData);
+ exit(1);
+ }
+ pinputFile = calloc(1,sizeof(inputFile));
+ if(ppathNode) {
+ pinputFile->filename = calloc(1,strlen(fullname)+1);
+ strcpy(pinputFile->filename,fullname);
+ free((void *)fullname);
+ } else if(filename) {
+ pinputFile->filename = calloc(1,strlen(filename)+1);
+ strcpy(pinputFile->filename,filename);
+ } else {
+ pinputFile->filename = calloc(1,strlen("stdin")+1);
+ strcpy(pinputFile->filename,"stdin");
+ }
+ pinputFile->fp = fp;
+ ellInsert(&pinputData->inputFileList,0,&pinputFile->node);
+}
+
+static void inputCloseFile(inputData *pinputData)
+{
+ inputFile *pinputFile;
+
+ pinputFile = (inputFile *)ellFirst(&pinputData->inputFileList);
+ if(!pinputFile) return;
+ ellDelete(&pinputData->inputFileList,&pinputFile->node);
+ if(fclose(pinputFile->fp))
+ fprintf(stderr,"fclose failed: file %s\n",pinputFile->filename);
+ free(pinputFile->filename);
+ free(pinputFile);
+}
+
+static void inputCloseAllFiles(inputData *pinputData)
+{
+ inputFile *pinputFile;
+
+ while((pinputFile=(inputFile *)ellFirst(&pinputData->inputFileList))){
+ inputCloseFile(pinputData);
+ }
+}
+
+/*start of code that handles substitution file*/
+typedef enum {
+ tokenLBrace,tokenRBrace,tokenSeparater,tokenString,tokenEOF
+}tokenType;
+
+typedef struct subFile {
+ char *substitutionName;
+ FILE *fp;
+ int lineNum;
+ char inputBuffer[MAX_BUFFER_SIZE];
+ char *pnextChar;
+ tokenType token;
+ char string[MAX_BUFFER_SIZE];
+} subFile;
+
+typedef struct patternNode {
+ ELLNODE node;
+ char *var;
+}patternNode;
+
+typedef struct subInfo {
+ subFile *psubFile;
+ int isFile;
+ char *filename;
+ int isPattern;
+ ELLLIST patternList;
+ size_t size;
+ size_t curLength;
+ char *macroReplacements;
+}subInfo;
+
+static char *subGetNextLine(subFile *psubFile);
+static tokenType subGetNextToken(subFile *psubFile);
+static void subFileErrPrint(subFile *psubFile,char * message);
+static void freeSubFile(subInfo *psubInfo);
+static void freePattern(subInfo *psubInfo);
+static void catMacroReplacements(subInfo *psubInfo,const char *value);
+
+void freeSubFile(subInfo *psubInfo)
+{
+ subFile *psubFile = psubInfo->psubFile;
+ if(psubFile->fp) {
+ if(fclose(psubFile->fp))
+ fprintf(stderr,"fclose failed on substitution file\n");
+ }
+ free((void *)psubFile);
+ free((void *)psubInfo->filename);
+ psubInfo->psubFile = 0;
+}
+
+void freePattern(subInfo *psubInfo)
+{
+ patternNode *ppatternNode;
+ while((ppatternNode = (patternNode *)ellFirst(&psubInfo->patternList))) {
+ ellDelete(&psubInfo->patternList,&ppatternNode->node);
+ free(ppatternNode->var);
+ free(ppatternNode);
+ }
+ psubInfo->isPattern = 0;
+}
+
+static void substituteDestruct(void *pvt)
+{
+ subInfo *psubInfo = (subInfo *)pvt;
+
+ freeSubFile(psubInfo);
+ freePattern(psubInfo);
+ free((void *)psubInfo);
+ return;
+}
+
+static void substituteOpen(void **ppvt,char *substitutionName)
+{
+ subInfo *psubInfo;
+ subFile *psubFile;
+ FILE *fp;
+
+ psubInfo = calloc(1,sizeof(subInfo));
+ *ppvt = (void *)psubInfo;
+ psubFile = calloc(1,sizeof(subFile));
+ psubInfo->psubFile = psubFile;
+ ellInit(&psubInfo->patternList);
+ fp = fopen(substitutionName,"r");
+ if(!fp) {
+ fprintf(stderr,"Could not open %s\n",substitutionName);
+ exit(1);
+ }
+ psubFile->substitutionName = substitutionName;
+ psubFile->fp = fp;
+ psubFile->lineNum = 0;
+ psubFile->inputBuffer[0] = 0;
+ psubFile->pnextChar = &psubFile->inputBuffer[0];
+ subGetNextToken(psubFile);
+ return;
+}
+
+static int substituteGetNextSet(void *pvt,char **filename)
+{
+ subInfo *psubInfo = (subInfo *)pvt;
+ subFile *psubFile = psubInfo->psubFile;
+ patternNode *ppatternNode;
+
+ *filename = 0;
+ while(psubFile->token==tokenSeparater) subGetNextToken(psubFile);
+ if(psubFile->token==tokenEOF) return(0);
+ if(psubFile->token==tokenString && strcmp(psubFile->string,"file")==0) {
+ psubInfo->isFile = 1;
+ if(subGetNextToken(psubFile)!=tokenString) {
+ subFileErrPrint(psubFile,"Expecting filename");
+ exit(1);
+ }
+ freePattern(psubInfo);
+ free((void *)psubInfo->filename);
+ if(psubFile->string[0]=='"'&&psubFile->string[strlen(psubFile->string)-1]=='"') {
+ psubFile->string[strlen(psubFile->string)-1]='\0';
+ psubInfo->filename = macEnvExpand(psubFile->string+1);
+ }
+ else {
+ psubInfo->filename = macEnvExpand(psubFile->string);
+ }
+ while(subGetNextToken(psubFile)==tokenSeparater);
+ if(psubFile->token!=tokenLBrace) {
+ subFileErrPrint(psubFile,"Expecting {");
+ exit(1);
+ }
+ subGetNextToken(psubFile);
+ }
+ *filename = psubInfo->filename;
+ while(psubFile->token==tokenSeparater) subGetNextToken(psubFile);
+ if(psubFile->token==tokenLBrace) return(1);
+ if(psubFile->token==tokenRBrace) return(0);
+ if(psubFile->token!=tokenString
+ || strcmp(psubFile->string,"pattern")!=0) {
+ subFileErrPrint(psubFile,"Expecting pattern");
+ exit(1);
+ }
+ freePattern(psubInfo);
+ psubInfo->isPattern = 1;
+ while(subGetNextToken(psubFile)==tokenSeparater);
+ if(psubFile->token!=tokenLBrace) {
+ subFileErrPrint(psubFile,"Expecting {");
+ exit(1);
+ }
+ while(1) {
+ while(subGetNextToken(psubFile)==tokenSeparater);
+ if(psubFile->token!=tokenString) break;
+ ppatternNode = calloc(1,sizeof(patternNode));
+ ellAdd(&psubInfo->patternList,&ppatternNode->node);
+ ppatternNode->var = calloc(strlen(psubFile->string)+1,sizeof(char));
+ strcpy(ppatternNode->var,psubFile->string);
+ }
+ if(psubFile->token!=tokenRBrace) {
+ subFileErrPrint(psubFile,"Expecting }");
+ exit(1);
+ }
+ subGetNextToken(psubFile);
+ return(1);
+}
+
+static char *substituteGetReplacements(void *pvt)
+{
+ subInfo *psubInfo = (subInfo *)pvt;
+ subFile *psubFile = psubInfo->psubFile;
+ patternNode *ppatternNode;
+
+ if(psubInfo->macroReplacements) psubInfo->macroReplacements[0] = 0;
+ psubInfo->curLength = 0;
+ while(psubFile->token==tokenSeparater) subGetNextToken(psubFile);
+ if(psubFile->token==tokenRBrace && psubInfo->isFile) {
+ psubInfo->isFile = 0;
+ free((void *)psubInfo->filename);
+ psubInfo->filename = 0;
+ freePattern(psubInfo);
+ subGetNextToken(psubFile);
+ return(0);
+ }
+ if(psubFile->token==tokenEOF) return(0);
+ if(psubFile->token!=tokenLBrace) return(0);
+ if(psubInfo->isPattern) {
+ int gotFirstPattern = 0;
+
+ while(subGetNextToken(psubFile)==tokenSeparater);
+ ppatternNode = (patternNode *)ellFirst(&psubInfo->patternList);
+ while(1) {
+ if(psubFile->token==tokenRBrace) {
+ if(ppatternNode)
+ subFileErrPrint(psubFile,"less values than patterns");
+ subGetNextToken(psubFile);
+ return(psubInfo->macroReplacements);
+ }
+ if(psubFile->token!=tokenString) {
+ subFileErrPrint(psubFile,"Illegal token");
+ exit(-1);
+ }
+ if(gotFirstPattern) catMacroReplacements(psubInfo,",");
+ gotFirstPattern = 1;
+ if(ppatternNode) {
+ catMacroReplacements(psubInfo,ppatternNode->var);
+ catMacroReplacements(psubInfo,"=");
+ catMacroReplacements(psubInfo,psubFile->string);
+ ppatternNode = (patternNode *)ellNext(&ppatternNode->node);
+ } else {
+ subFileErrPrint(psubFile,"more values than patterns");
+ }
+ while(subGetNextToken(psubFile)==tokenSeparater);
+ }
+ } else while(1) {
+ switch(subGetNextToken(psubFile)) {
+ case tokenRBrace:
+ subGetNextToken(psubFile);
+ return(psubInfo->macroReplacements);
+ case tokenSeparater:
+ catMacroReplacements(psubInfo,",");
+ break;
+ case tokenString:
+ catMacroReplacements(psubInfo,psubFile->string);
+ break;
+ default:
+ subFileErrPrint(psubFile,"Illegal token");
+ exit(1);
+ }
+ }
+}
+
+static char *subGetNextLine(subFile *psubFile)
+{
+ char *pline;
+
+ pline = fgets(psubFile->inputBuffer,MAX_BUFFER_SIZE,psubFile->fp);
+ ++psubFile->lineNum;
+ while(pline && psubFile->inputBuffer[0]=='#') {
+ pline = fgets(psubFile->inputBuffer,MAX_BUFFER_SIZE,psubFile->fp);
+ ++psubFile->lineNum;
+ }
+ if(!pline) {
+ psubFile->token = tokenEOF;
+ psubFile->inputBuffer[0] = 0;
+ psubFile->pnextChar = 0;
+ return(0);
+ }
+ psubFile->pnextChar = &psubFile->inputBuffer[0];
+ return(&psubFile->inputBuffer[0]);
+}
+
+static void subFileErrPrint(subFile *psubFile,char * message)
+{
+ fprintf(stderr,"substitution file %s line %d: %s",
+ psubFile->substitutionName,
+ psubFile->lineNum,psubFile->inputBuffer);
+ fprintf(stderr,"%s\n",message);
+}
+
+
+static tokenType subGetNextToken(subFile *psubFile)
+{
+ char *p;
+ char *pto;
+
+ p = psubFile->pnextChar;
+ if(!p) { psubFile->token = tokenEOF; return(tokenEOF);}
+ if(*p==0 || *p=='\n' || *p=='#') {
+ p = subGetNextLine(psubFile);
+ if(!p) { psubFile->token = tokenEOF; return(tokenEOF);}
+ else { psubFile->token = tokenSeparater; return(tokenSeparater);}
+ }
+ while(isspace(*p)) p++;
+ if(*p=='{') {
+ psubFile->token = tokenLBrace;
+ psubFile->pnextChar = ++p;
+ return(tokenLBrace);
+ }
+ if(*p=='}') {
+ psubFile->token = tokenRBrace;
+ psubFile->pnextChar = ++p;
+ return(tokenRBrace);
+ }
+ if(*p==0 || isspace(*p) || *p==',') {
+ while(isspace(*p) || *p==',') p++;
+ psubFile->token = tokenSeparater;
+ psubFile->pnextChar = p;
+ return(tokenSeparater);
+ }
+ /*now handle quoted strings*/
+ if(*p=='"') {
+ pto = &psubFile->string[0];
+ *pto++ = *p++;
+ while(*p!='"') {
+ if(*p==0 || *p=='\n') {
+ subFileErrPrint(psubFile,"Strings must be on single line\n");
+ exit(1);
+ }
+ /*allow escape for imbeded quote*/
+ if((*p=='\\') && *(p+1)=='"') {
+ *pto++ = *p++;
+ *pto++ = *p++;
+ continue;
+ }
+ *pto++ = *p++;
+ }
+ *pto++ = *p++;
+ psubFile->pnextChar = p;
+ *pto = 0;
+ psubFile->token = tokenString;
+ return(tokenString);
+ }
+ /*Now take anything up to next non String token and not space*/
+ pto = &psubFile->string[0];
+ while(!isspace(*p) && (strspn(p,"\",{}")==0)) *pto++ = *p++;
+ *pto = 0;
+ psubFile->pnextChar = p;
+ psubFile->token = tokenString;
+ return(tokenString);
+}
+
+static void catMacroReplacements(subInfo *psubInfo,const char *value)
+{
+ size_t len = strlen(value);
+
+ if(psubInfo->size <= (psubInfo->curLength + len)) {
+ size_t newsize = psubInfo->size + MAX_BUFFER_SIZE;
+ char *newbuf;
+
+ if(newsize <= psubInfo->curLength + len)
+ newsize = psubInfo->curLength + len + 1;
+ newbuf = calloc(1,newsize);
+ if(!newbuf) {
+ fprintf(stderr,"calloc failed for size %Zu\n",newsize);
+ exit(1);
+ }
+ if(psubInfo->macroReplacements) {
+ memcpy(newbuf,psubInfo->macroReplacements,psubInfo->curLength);
+ free(psubInfo->macroReplacements);
+ }
+ psubInfo->size = newsize;
+ psubInfo->macroReplacements = newbuf;
+ }
+ strcat(psubInfo->macroReplacements,value);
+ psubInfo->curLength += len;
+}

394
appveyor-test.py Normal file
View File

@@ -0,0 +1,394 @@
#!/usr/bin/env python
"""Module ci-scripts AppVeyor unit tests
"""
# SET=test00 in the environment (.appveyor.yml) runs the tests in this script
# all other jobs are started as compile jobs
from __future__ import print_function
import sys, os, shutil, fileinput
import distutils.util
import re
import subprocess as sp
import unittest
import logging
from argparse import Namespace
builddir = os.getcwd()
def find_in_file(regex, filename):
file = open (filename, "r")
for line in file:
if re.search(regex, line):
return True
return False
def getStringIO():
if (sys.version_info > (3, 0)):
import io
return io.StringIO()
else:
import StringIO
return StringIO.StringIO()
sys.path.append('appveyor')
import do
# we're working with tags (detached heads) a lot: suppress advice
do.call_git(['config', '--global', 'advice.detachedHead', 'false'])
class TestSourceSet(unittest.TestCase):
def setUp(self):
os.environ['SETUP_PATH'] = '.:appveyor'
if 'BASE' in os.environ:
del os.environ['BASE']
do.clear_lists()
os.chdir(builddir)
def test_EmptySetupDirsPath(self):
del os.environ['SETUP_PATH']
self.assertRaisesRegexp(NameError, '\(SETUP_PATH\) is empty', do.source_set, 'test01')
def test_InvalidSetupName(self):
self.assertRaisesRegexp(NameError, 'does not exist in SETUP_PATH', do.source_set, 'xxdoesnotexistxx')
def test_ValidSetupName(self):
capturedOutput = getStringIO()
sys.stdout = capturedOutput
do.source_set('test01')
sys.stdout = sys.__stdout__
self.assertEqual(do.setup['BASE'], '7.0', 'BASE was not set to \'7.0\'')
def test_SetupDoesNotOverridePreset(self):
os.environ['BASE'] = 'foo'
capturedOutput = getStringIO()
sys.stdout = capturedOutput
do.source_set('test01')
sys.stdout = sys.__stdout__
self.assertEqual(do.setup['BASE'], 'foo',
'Preset BASE was overridden by test01 setup (expected \'foo\' got {0})'
.format(do.setup['BASE']))
def test_IncludeSetupFirstSetWins(self):
capturedOutput = getStringIO()
sys.stdout = capturedOutput
do.source_set('test02')
sys.stdout = sys.__stdout__
self.assertEqual(do.setup['BASE'], 'foo',
'BASE set in test02 was overridden by test01 setup (expected \'foo\' got {0})'
.format(do.setup['BASE']))
self.assertEqual(do.setup['FOO'], 'bar', 'Setting of single word does not work')
self.assertEqual(do.setup['FOO2'], 'bar bar2', 'Setting of multiple words does not work')
self.assertEqual(do.setup['FOO3'], 'bar bar2', 'Indented setting of multiple words does not work')
self.assertEqual(do.setup['SNCSEQ'], 'R2-2-7', 'Setup test01 was not included')
def test_DoubleIncludeGetsIgnored(self):
capturedOutput = getStringIO()
sys.stdout = capturedOutput
do.source_set('test03')
sys.stdout = sys.__stdout__
self.assertRegexpMatches(capturedOutput.getvalue(), 'Ignoring already included setup file')
class TestUpdateReleaseLocal(unittest.TestCase):
release_local = os.path.join(do.cachedir, 'RELEASE.local')
def setUp(self):
if os.path.exists(self.release_local):
os.remove(self.release_local)
os.chdir(builddir)
def test_SetModule(self):
do.update_release_local('MOD1', '/foo/bar')
found = 0
for line in fileinput.input(self.release_local, inplace=1):
if 'MOD1=' in line:
self.assertEqual(line.strip(), 'MOD1=/foo/bar', 'MOD1 not set correctly')
found += 1
fileinput.close()
self.assertEqual(found, 1, 'MOD1 not written once to RELEASE.local (found {0})'.format(found))
def test_SetBaseAndMultipleModules(self):
do.update_release_local('EPICS_BASE', '/bar/foo')
do.update_release_local('MOD1', '/foo/bar')
do.update_release_local('MOD2', '/foo/bar2')
do.update_release_local('MOD1', '/foo/bar1')
found = {}
foundat = {}
for line in fileinput.input(self.release_local, inplace=1):
if 'MOD1=' in line:
self.assertEqual(line.strip(), 'MOD1=/foo/bar1',
'MOD1 not set correctly (expected \'MOD1=/foo/bar1\' found \'{0}\')'
.format(line))
if 'mod1' in found:
found['mod1'] += 1
else:
found['mod1'] = 1
foundat['mod1'] = fileinput.filelineno()
if 'MOD2=' in line:
self.assertEqual(line.strip(), 'MOD2=/foo/bar2',
'MOD2 not set correctly (expected \'MOD2=/foo/bar2\' found \'{0}\')'
.format(line))
if 'mod2' in found:
found['mod2'] += 1
else:
found['mod2'] = 1
foundat['mod2'] = fileinput.filelineno()
if 'EPICS_BASE=' in line:
self.assertEqual(line.strip(), 'EPICS_BASE=/bar/foo',
'EPICS_BASE not set correctly (expected \'EPICS_BASE=/bar/foo\' found \'{0}\')'
.format(line))
if 'base' in found:
found['base'] += 1
else:
found['base'] = 1
foundat['base'] = fileinput.filelineno()
fileinput.close()
self.assertEqual(found['mod1'], 1,
'MOD1 does not appear once in RELEASE.local (found {0})'.format(found['mod1']))
self.assertEqual(found['mod2'], 1,
'MOD2 does not appear once in RELEASE.local (found {0})'.format(found['mod2']))
self.assertEqual(found['base'], 1,
'EPICS_BASE does not appear once in RELEASE.local (found {0})'.format(found['base']))
self.assertGreater(foundat['base'], foundat['mod2'],
'EPICS_BASE (line {0}) appears before MOD2 (line {1})'
.format(foundat['base'], foundat['mod2']))
self.assertGreater(foundat['mod2'], foundat['mod1'],
'MOD2 (line {0}) appears before MOD1 (line {1})'.format(foundat['mod2'], foundat['mod1']))
class TestAddDependencyUpToDateCheck(unittest.TestCase):
hash_3_15_6 = "ce7943fb44beb22b453ddcc0bda5398fadf72096"
location = os.path.join(do.cachedir, 'base-R3.15.6')
licensefile = os.path.join(location, 'LICENSE')
checked_file = os.path.join(location, 'checked_out')
release_file = os.path.join(location, 'configure', 'RELEASE')
def setUp(self):
os.environ['SETUP_PATH'] = '.:appveyor'
if os.path.exists(self.location):
shutil.rmtree(self.location, onerror=do.remove_readonly)
do.clear_lists()
os.chdir(builddir)
do.source_set('defaults')
do.complete_setup('BASE')
def test_MissingDependency(self):
do.setup['BASE'] = 'R3.15.6'
do.add_dependency('BASE')
self.assertTrue(os.path.exists(self.licensefile), 'Missing dependency was not checked out')
self.assertTrue(os.path.exists(self.checked_file), 'Checked-out commit marker was not written')
with open(self.checked_file, 'r') as bfile:
checked_out = bfile.read().strip()
bfile.close()
self.assertEqual(checked_out, self.hash_3_15_6,
'Wrong commit of dependency checked out (expected=\"{0}\" found=\"{1}\")'
.format(self.hash_3_15_6, checked_out))
self.assertFalse(find_in_file('include \$\(TOP\)/../RELEASE.local', self.release_file),
'RELEASE in Base includes TOP/../RELEASE.local')
def test_UpToDateDependency(self):
do.setup['BASE'] = 'R3.15.6'
do.add_dependency('BASE')
os.remove(self.licensefile)
do.add_dependency('BASE')
self.assertFalse(os.path.exists(self.licensefile), 'Check out on top of existing up-to-date dependency')
def test_OutdatedDependency(self):
do.setup['BASE'] = 'R3.15.6'
do.add_dependency('BASE')
os.remove(self.licensefile)
with open(self.checked_file, "w") as fout:
print('XXX not the right hash XXX', file=fout)
fout.close()
do.add_dependency('BASE')
self.assertTrue(os.path.exists(self.licensefile), 'No check-out on top of out-of-date dependency')
with open(self.checked_file, 'r') as bfile:
checked_out = bfile.read().strip()
bfile.close()
self.assertEqual(checked_out, self.hash_3_15_6,
"Wrong commit of dependency checked out (expected='{0}' found='{1}')"
.format(self.hash_3_15_6, checked_out))
def is_shallow_repo(place):
check = sp.check_output(['git', 'rev-parse', '--is-shallow-repository'], cwd=place).strip()
if check == '--is-shallow-repository':
if os.path.exists(os.path.join(place, '.git', 'shallow')):
check = 'true'
else:
check = 'false'
return check == 'true'
class TestAddDependencyOptions(unittest.TestCase):
location = os.path.join(do.cachedir, 'mcoreutils-master')
testfile = os.path.join(location, '.ci', 'LICENSE')
def setUp(self):
os.environ['SETUP_PATH'] = '.:appveyor'
if os.path.exists(do.cachedir):
shutil.rmtree(do.cachedir, onerror=do.remove_readonly)
do.clear_lists()
do.source_set('defaults')
do.complete_setup('MCoreUtils')
do.setup['MCoreUtils'] = 'master'
def test_Default(self):
do.add_dependency('MCoreUtils')
self.assertTrue(os.path.exists(self.testfile),
'Submodule (.ci) not checked out recursively (requested: default=YES')
self.assertTrue(is_shallow_repo(self.location),
'Module not checked out shallow (requested: default=5)')
def test_SetRecursiveNo(self):
do.setup['MCoreUtils_RECURSIVE'] = 'NO'
do.add_dependency('MCoreUtils')
self.assertFalse(os.path.exists(self.testfile), 'Submodule (.ci) checked out recursively')
def test_SetDepthZero(self):
do.setup['MCoreUtils_DEPTH'] = '0'
do.add_dependency('MCoreUtils')
self.assertFalse(is_shallow_repo(self.location), 'Module checked out shallow (requested full)')
def test_SetDepthThree(self):
do.setup['MCoreUtils_DEPTH'] = '3'
do.add_dependency('MCoreUtils')
self.assertTrue(is_shallow_repo(self.location),
'Module not checked out shallow (requested: default=5)')
def test_AddMsiTo314(self):
do.complete_setup('BASE')
do.setup['BASE'] = 'R3.14.12.1'
msifile = os.path.join(do.cachedir, 'base-R3.14.12.1', 'src', 'dbtools', 'msi.c')
do.add_dependency('BASE')
self.assertTrue(os.path.exists(msifile), 'MSI was not added to Base 3.14')
def repo_access(dep):
do.set_setup_from_env(dep)
do.setup.setdefault(dep + "_DIRNAME", dep.lower())
do.setup.setdefault(dep + "_REPONAME", dep.lower())
do.setup.setdefault('REPOOWNER', 'epics-modules')
do.setup.setdefault(dep + "_REPOOWNER", do.setup['REPOOWNER'])
do.setup.setdefault(dep + "_REPOURL", 'https://github.com/{0}/{1}.git'
.format(do.setup[dep + '_REPOOWNER'], do.setup[dep + '_REPONAME']))
with open(os.devnull, 'w') as devnull:
return do.call_git(['ls-remote', '--quiet', '--heads', do.setup[dep + '_REPOURL']],
stdout=devnull, stderr=devnull)
class TestDefaultModuleURLs(unittest.TestCase):
modules = ['BASE', 'PVDATA', 'PVACCESS', 'NTYPES',
'SNCSEQ', 'STREAM', 'ASYN', 'STD',
'CALC', 'AUTOSAVE', 'BUSY', 'SSCAN',
'IOCSTATS', 'MOTOR', 'IPAC', ]
def setUp(self):
os.environ['SETUP_PATH'] = '.:appveyor'
do.clear_lists()
os.chdir(builddir)
do.source_set('defaults')
def test_Repos(self):
for mod in self.modules:
self.assertEqual(repo_access(mod), 0, 'Defaults for {0} do not point to a valid git repository at {1}'
.format(mod, do.setup[mod + '_REPOURL']))
class TestVCVars(unittest.TestCase):
def test_vcvars(self):
if ('CMP' in os.environ and os.environ['CMP'] in ('mingw',)) \
or distutils.util.get_platform() != "win32":
raise unittest.SkipTest()
do.with_vcvars('env')
class TestSetupForBuild(unittest.TestCase):
configuration = os.environ['CONFIGURATION']
platform = os.environ['PLATFORM']
cc = os.environ['CMP']
args = Namespace(paths=[])
def setUp(self):
os.environ.pop('EPICS_HOST_ARCH', None)
def tearDown(self):
os.environ['CONFIGURATION'] = self.configuration
os.environ['PLATFORM'] = self.platform
os.environ['CMP'] = self.cc
def test_AddPathsOption(self):
os.environ['FOOBAR'] = 'BAR'
args = Namespace(paths=['/my/{FOOBAR}/dir', '/my/foobar'])
do.setup_for_build(args)
self.assertTrue(re.search('/my/BAR/dir', os.environ['PATH']), 'Expanded path not in PATH')
self.assertTrue(re.search('/foobar', os.environ['PATH']), 'Plain path not in PATH')
os.environ.pop('FOOBAR', None)
def test_HostArchConfiguration(self):
for config in ['dynamic', 'dynamic-debug', 'static', 'static-debug']:
os.environ['CONFIGURATION'] = config
do.setup_for_build(self.args)
self.assertTrue('EPICS_HOST_ARCH' in os.environ,
'EPICS_HOST_ARCH is not set for Configuration={0}'.format(config))
if re.search('static', config):
self.assertTrue(re.search('-static$', os.environ['EPICS_HOST_ARCH']),
'EPICS_HOST_ARCH is not -static for Configuration={0}'.format(config))
self.assertFalse(re.search('debug', os.environ['EPICS_HOST_ARCH']),
'EPICS_HOST_ARCH is -debug for Configuration={0}'.format(config))
elif re.search('debug', config):
self.assertFalse(re.search('static', os.environ['EPICS_HOST_ARCH']),
'EPICS_HOST_ARCH is -static for Configuration={0}'.format(config))
self.assertTrue(re.search('-debug$', os.environ['EPICS_HOST_ARCH']),
'EPICS_HOST_ARCH is not -debug for Configuration={0}'.format(config))
else:
self.assertFalse(re.search('static', os.environ['EPICS_HOST_ARCH']),
'EPICS_HOST_ARCH is -static for Configuration={0}'.format(config))
self.assertFalse(re.search('debug', os.environ['EPICS_HOST_ARCH']),
'EPICS_HOST_ARCH is -debug for Configuration={0}'.format(config))
def test_HostArchPlatform(self):
for platform in ['x86', 'x64', 'X64']:
for cc in ['vs2019', 'mingw']:
os.environ['PLATFORM'] = platform
os.environ['CMP'] = cc
os.environ['CONFIGURATION'] = 'dynamic'
do.setup_for_build(self.args)
self.assertTrue('EPICS_HOST_ARCH' in os.environ,
'EPICS_HOST_ARCH is not set for {0} / {1}'.format(cc, platform))
if platform == 'x86':
self.assertTrue(re.search('^win32-x86', os.environ['EPICS_HOST_ARCH']),
'EPICS_HOST_ARCH is not win32-x86 for {0} / {1}'.format(cc, platform))
else:
self.assertTrue(re.search('^windows-x64', os.environ['EPICS_HOST_ARCH']),
'EPICS_HOST_ARCH is not windows-x64 for {0} / {1}'.format(cc, platform))
if cc == 'mingw':
self.assertTrue(re.search('-mingw$', os.environ['EPICS_HOST_ARCH']),
'EPICS_HOST_ARCH is not -mingw for {0} / {1}'.format(cc, platform))
if platform == 'x86':
pattern = 'mingw32'
else:
pattern = 'mingw64'
self.assertTrue(re.search(pattern, os.environ['PATH']),
'Binary location for {0} not in PATH'.format(pattern))
self.assertTrue(re.search(pattern, os.environ['INCLUDE']),
'Include location for {0} not in INCLUDE'.format(pattern))
def test_StrawberryInPath(self):
os.environ['CMP'] = 'vs2019'
do.setup_for_build(self.args)
self.assertTrue(re.search('strawberry', os.environ['PATH'], flags=re.IGNORECASE),
'Strawberry Perl location not in PATH for vs2019')
if __name__ == "__main__":
if 'VV' in os.environ and os.environ['VV'] == '1':
logging.basicConfig(level=logging.DEBUG)
do.silent_dep_builds = False
do.host_info()
if sys.argv[1:]==['env']:
# testing with_vcvars
[print(K,'=',V) for K, V in os.environ.items()]
else:
unittest.main()

View File

@@ -0,0 +1,156 @@
# .appveyor.yml for use with EPICS Base ci-scripts
# (see: https://github.com/epics-base/ci-scripts)
# This is YAML - indentation levels are crucial
#---------------------------------#
# build cache #
#---------------------------------#
# The AppVeyor cache allowance is way too small (1GB per account across all projects, branches and jobs)
# to be used for the dependency builds.
cache:
- C:\Users\appveyor\.tools
#---------------------------------#
# additional packages #
#---------------------------------#
install:
# for the sequencer
- cinst re2c
#---------------------------------#
# repository cloning #
#---------------------------------#
# Called at very beginning, before repo cloning
init:
# Set autocrlf to make batch files work
- git config --global core.autocrlf true
# print the connection info for RDP connections (see 'debugging' below)
#- ps: iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
# Set clone depth (do not fetch complete history)
clone_depth: 50
# Skipping commits affecting only specific files
skip_commits:
files:
- 'documentation/*'
- 'templates/*'
- '**/*.html'
- '**/*.md'
- '.travis.yml'
#---------------------------------#
# build matrix configuration #
#---------------------------------#
# Since dependencies cannot be cached and AppVeyor only grants a single builder VM, all jobs
# are executed sequentially, each one taking 10-15 minutes.
# Consider this when defining your build matrix. (A full matrix build takes more than 8 hours.)
# Default build worker image
image: Visual Studio 2015
# Build Configurations: dll/static, regular/debug
configuration:
- dynamic
- static
- dynamic-debug
- static-debug
# Environment variables: compiler toolchain, base version, setup file, ...
environment:
# common / default variables for all jobs
SETUP_PATH: .ci-local:.ci
matrix:
- CMP: vs2019
SET: test00
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
- CMP: mingw
- CMP: vs2019
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
- CMP: vs2019
BASE: 3.15
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
- CMP: vs2019
BASE: 3.14
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
- CMP: vs2017
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
- CMP: vs2015
- CMP: vs2013
- CMP: vs2012
- CMP: vs2010
- CMP: vs2008
# Platform: processor architecture
platform:
- x86
- x64
# Matrix configuration: exclude sets of jobs
matrix:
exclude:
# VS2012 and older installs don't have the 64 bit compiler
- platform: x64
CMP: vs2012
- platform: x64
CMP: vs2010
- platform: x64
CMP: vs2008
# Exclude more jobs to reduce build time
# E.g., skip 32-bit for newer compilers
#- platform: x86
# CMP: vs2019
#- platform: x86
# CMP: vs2017
#---------------------------------#
# building & testing #
#---------------------------------#
install:
- cmd: git submodule update --init --recursive
- cmd: python .ci/appveyor/do.py prepare
build_script:
- cmd: python .ci/appveyor/do.py build
test_script:
- cmd: python .ci/appveyor/do.py test
on_finish:
- ps: Get-ChildItem *.tap -Recurse -Force | % { Push-AppveyorArtifact $_.FullName -FileName $_.Name }
- cmd: python .ci/appveyor/do.py build test-results -s
#---------------------------------#
# debugging #
#---------------------------------#
## if you want to connect by remote desktop to a failed build, uncomment these lines
## note that you will need to connect within the usual build timeout limit (60 minutes)
## so you may want to adjust the build matrix above to just build the one of interest
# to print the RDP connection info
# uncomment the appropriate line in the init: section above
# block a failed build (until the watchdog barks)
#on_failure:
# - ps: $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
#---------------------------------#
# notifications #
#---------------------------------#
notifications:
- provider: Email
to:
- me@example.com
on_build_success: false
- provider: GitHubPullRequest

View File

@@ -0,0 +1,72 @@
# .appveyor.yml for use with EPICS Base ci-scripts
# (see: https://github.com/epics-base/ci-scripts)
# This is YAML - indentation levels are crucial
cache:
- C:\Users\appveyor\.tools
init:
- git config --global core.autocrlf true
clone_depth: 50
skip_commits:
files:
- 'documentation/*'
- 'templates/*'
- '**/*.html'
- '**/*.md'
- '.travis.yml'
image: Visual Studio 2019
# Build Configurations: dll/static, regular/debug
configuration:
- dynamic
# - static
- dynamic-debug
# - static-debug
environment:
# common / default variables for all jobs
SETUP_PATH: .ci-local:.ci
matrix:
- CMP: vs2019
BASE: 7.0
- CMP: vs2019
BASE: 3.15
# Platform: processor architecture
platform:
# - x86
- x64
# Matrix configuration: exclude sets of jobs
matrix:
exclude:
# VS2012 and older installs don't have the 64 bit compiler
- platform: x64
CMP: vs2012
- platform: x64
CMP: vs2010
- platform: x64
CMP: vs2008
install:
- cmd: git submodule update --init --recursive
- cmd: python .ci/appveyor/do.py prepare
build_script:
- cmd: python .ci/appveyor/do.py build
test_script:
- cmd: python .ci/appveyor/do.py test
on_finish:
- ps: Get-ChildItem *.tap -Recurse -Force | % { Push-AppveyorArtifact $_.FullName -FileName $_.Name }
- cmd: python .ci/appveyor/do.py build test-results -s
notifications:
- provider: GitHubPullRequest

68
appveyor/README.md Normal file
View File

@@ -0,0 +1,68 @@
# AppVeyor Scripts for EPICS Modules
## Features
- Use different compilers (Visual Studio, MinGW)
- Use different VS versions (2008, 2010, 2012, 2013, 2015, 2017, 2019)
- Compile for Windows 32bit and 64bit
- Create static libraries or DLLs (plus the matching executables)
- Create optimized or debug builds
## How to Use these Scripts
1. Get an account on [AppVeyor](https://www.appveyor.com/), connect
it to your GitHub account and activate your support module's
repository. For more details, please refer to the
[AppVeyor documentation](https://www.appveyor.com/docs/).
2. Add the ci-scripts respository as a Git Submodule
(see [README](../README.md) one level above).
3. Add settings files defining which dependencies in which versions
you want to build against
(see [README](../README.md) one level above).
4. Create an AppVeyor configuration by copying one of the examples into
the root directory of your module.
```
$ cp .ci/appveyor/.appveyor.yml.example-full .appveyor.yml
```
5. Edit the `.appveyor.yml` configuration to include the jobs you want
AppVeyor to run.
AppVeyor automatically creates a build matrix with the following axes:
1. `configuration:` \
Select static or dynamic (DLL) as well as regular or debug builds.
2. `platform:` \
Select 32bit or 64bit processor architecture.
3. `environment: / matrix:` \
List of environment variable settings. Each list element (starting with
a dash) is one step on the axis of the build matrix. \
Set `CMP` to select the compiler: `mingw` for the native
[MinGW](http://mingw-w64.org/) GNU compiler, `vs2008` ...`vs2019`
(options listed above) for the Microsoft Visual Studio compilers.
Your builds will take long. \
AppVeyor only grants a single worker VM - all jobs of the matrix are
executed sequentially. Each job will take around 10 minutes.
The `matrix: / exclude:` setting can be used to reduce the number of
jobs. Check the [AppVeyor docs](https://www.appveyor.com/docs/build-configuration/#build-matrix)
for more ways to reduce the build matrix size.
6. Push your changes and check
[ci.appveyor.com](https://ci.appveyor.com/) for your build results.
## Known Issues
#### Build Worker Images
The AppVeyor documentation on build worker images doesn't seem to fully
describe the way things are handled internally.
The tested and suggested reproducible way of defining the build worker image
is shown in the example configuration files:
- Set the default image using the `image:` tag.
- Override the image for specific jobs by setting the
`APPVEYOR_BUILD_WORKER_IMAGE` environment variable.

635
appveyor/do.py Normal file
View File

@@ -0,0 +1,635 @@
#!/usr/bin/env python
"""Windows (AppVeyor) ci build script
"""
from __future__ import print_function
import sys, os, stat, shutil
import fileinput
import logging
import re
import subprocess as sp
import distutils.util
logger = logging.getLogger(__name__)
# Setup ANSI Colors
ANSI_RED = "\033[31;1m"
ANSI_GREEN = "\033[32;1m"
ANSI_YELLOW = "\033[33;1m"
ANSI_BLUE = "\033[34;1m"
ANSI_MAGENTA = "\033[35;1m"
ANSI_CYAN = "\033[36;1m"
ANSI_RESET = "\033[0m"
ANSI_CLEAR = "\033[0K"
seen_setups = []
modules_to_compile = []
setup = {}
places = {}
if 'HomeDrive' in os.environ:
cachedir = os.path.join(os.getenv('HomeDrive'), os.getenv('HomePath'), '.cache')
toolsdir = os.path.join(os.getenv('HomeDrive'), os.getenv('HomePath'), '.tools')
elif 'HOME' in os.environ:
cachedir = os.path.join(os.getenv('HOME'), '.cache')
toolsdir = os.path.join(os.getenv('HOME'), '.tools')
else:
cachedir = os.path.join('.', '.cache')
toolsdir = os.path.join('.', '.tools')
if 'CACHEDIR' in os.environ:
cachedir = os.environ['CACHEDIR']
vcvars_table = {
# https://en.wikipedia.org/wiki/Microsoft_Visual_Studio#History
'vs2019':r'C:\Program Files (x86)\Microsoft Visual Studio\2019\Community\VC\Auxiliary\Build\vcvarsall.bat',
'vs2017':r'C:\Program Files (x86)\Microsoft Visual Studio\2017\Community\VC\Auxiliary\Build\vcvarsall.bat',
'vs2015':r'C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat',
'vs2013':r'C:\Program Files (x86)\Microsoft Visual Studio 12.0\VC\vcvarsall.bat',
'vs2012':r'C:\Program Files (x86)\Microsoft Visual Studio 11.0\VC\vcvarsall.bat',
'vs2010':r'C:\Program Files (x86)\Microsoft Visual Studio 10.0\VC\vcvarsall.bat',
'vs2008':r'C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\vcvarsall.bat',
}
ciscriptsdir = os.path.abspath(os.path.dirname(sys.argv[0]))
if os.path.basename(ciscriptsdir) == 'appveyor':
ciscriptsdir = ciscriptsdir.rstrip(os.pathsep+'appveyor')
if 'BASE' in os.environ and os.environ['BASE'] == 'SELF':
building_base = True
places['EPICS_BASE'] = '.'
else:
building_base = False
def modlist():
if building_base:
ret = []
else:
for var in ['ADD_MODULES', 'MODULES']:
setup.setdefault(var, '')
if var in os.environ:
setup[var] = os.environ[var]
logger.debug('ENV assignment: %s = %s', var, setup[var])
ret = ['BASE'] + setup['ADD_MODULES'].upper().split() + setup['MODULES'].upper().split()
logger.debug('Effective module list: %s', ret)
return ret
zip7 = r'C:\Program Files\7-Zip\7z'
make = ''
isbase314 = False
silent_dep_builds = True
def host_info():
print('{0}AppVeyor Build Worker Image:{1} {2}'
.format(ANSI_CYAN, ANSI_RESET, os.environ['APPVEYOR_BUILD_WORKER_IMAGE']))
print('{0}Python setup{1}'.format(ANSI_CYAN, ANSI_RESET))
print(sys.version)
print('PYTHONPATH')
for dname in sys.path:
print(' ', dname)
print('platform =', distutils.util.get_platform())
print('{0}Available Visual Studio versions{1}'.format(ANSI_CYAN, ANSI_RESET))
for key in vcvars_table:
if os.path.exists(vcvars_table[key]):
print('Found', key, 'in', vcvars_table[key])
sys.stdout.flush()
# Used from unittests
def clear_lists():
del seen_setups[:]
del modules_to_compile[:]
setup.clear()
places.clear()
# Error-handler to make shutil.rmtree delete read-only files on Windows
def remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
# source_set(setup)
#
# Source a settings file (extension .set) found in the setup_dirs path
# May be called recursively (from within a setup file)
def source_set(name):
# allowed separators: colon or whitespace
setup_dirs = os.getenv('SETUP_PATH', "").replace(':', ' ').split()
if len(setup_dirs) == 0:
raise NameError("{0}Search path for setup files (SETUP_PATH) is empty{1}".format(ANSI_RED,ANSI_RESET))
for set_dir in setup_dirs:
set_file = os.path.join(set_dir, name) + ".set"
if set_file in seen_setups:
print("Ignoring already included setup file {0}".format(set_file))
return
if os.path.isfile(set_file):
seen_setups.append(set_file)
print("Loading setup file {0}".format(set_file))
sys.stdout.flush()
with open(set_file) as fp:
for line in fp:
logger.debug('Next line: %s', line.strip())
if not line.strip() or line.strip()[0] == '#':
continue
if line.startswith("include"):
logger.debug('Found an include, reading %s', line.split()[1])
source_set(line.split()[1])
continue
assign = line.replace('"', '').strip().split("=", 1)
logger.debug('Interpreting as assignment')
setup.setdefault(assign[0], os.getenv(assign[0], ""))
if not setup[assign[0]].strip():
logger.debug('Doing assignment: %s = %s', assign[0], assign[1])
setup[assign[0]] = assign[1]
break
else:
raise NameError("{0}Setup file {1} does not exist in SETUP_PATH search path ({2}){3}"
.format(ANSI_RED, name, setup_dirs, ANSI_RESET))
# update_release_local(var, location)
# var name of the variable to set in RELEASE.local
# location location (absolute path) of where variable should point to
#
# Manipulate RELEASE.local in the cache location:
# - replace "$var=$location" line if it exists and has changed
# - otherwise add "$var=$location" line and possibly move EPICS_BASE=... line to the end
# Set places[var] = location
def update_release_local(var, location):
release_local = os.path.join(cachedir, 'RELEASE.local')
updated_line = '{0}={1}'.format(var, location.replace('\\', '/'))
places[var] = location
if not os.path.exists(release_local):
logger.debug('RELEASE.local does not exist, creating it')
try:
os.makedirs(cachedir)
except:
pass
fout = open(release_local, 'w')
fout.close()
base_line = ''
found = False
logger.debug("Opening RELEASE.local for adding '%s'", updated_line)
for line in fileinput.input(release_local, inplace=1):
outputline = line.strip()
if 'EPICS_BASE=' in line:
base_line = line.strip()
logger.debug("Found EPICS_BASE line '%s', not writing it", base_line)
continue
elif '{0}='.format(var) in line:
logger.debug("Found '%s=' line, replacing", var)
found = True
outputline = updated_line
logger.debug("Writing line to RELEASE.local: '%s'", outputline)
print(outputline)
fileinput.close()
fout = open(release_local,"a")
if not found:
logger.debug("Adding new definition: '%s'", updated_line)
print(updated_line, file=fout)
if base_line:
logger.debug("Writing EPICS_BASE line: '%s'", base_line)
print(base_line, file=fout)
fout.close()
def set_setup_from_env(dep):
for postf in ['', '_DIRNAME', '_REPONAME', '_REPOOWNER', '_REPOURL',
'_VARNAME', '_RECURSIVE', '_DEPTH', '_HOOK']:
if dep+postf in os.environ:
setup[dep+postf] = os.environ[dep+postf]
logger.debug('ENV assignment: %s = %s', dep+postf, setup[dep+postf])
def call_git(args, **kws):
if 'cwd' in kws:
place = kws['cwd']
else:
place = os.getcwd()
logger.debug("EXEC '%s' in %s", ' '.join(['git'] + args), place)
sys.stdout.flush()
exitcode = sp.call(['git'] + args, **kws)
logger.debug('EXEC DONE')
return exitcode
def call_make(args=[], **kws):
place = kws.get('cwd', os.getcwd())
parallel = kws.pop('parallel', 2)
silent = kws.pop('silent', False)
# no parallel make for Base 3.14
if parallel <= 0 or isbase314:
makeargs = []
else:
makeargs = ['-j{0}'.format(parallel), '-Otarget']
if silent:
makeargs += ['-s']
logger.debug("EXEC '%s' in %s", ' '.join([make] + makeargs + args), place)
sys.stdout.flush()
exitcode = sp.call([make] + makeargs + args, **kws)
logger.debug('EXEC DONE')
if exitcode != 0:
sys.exit(exitcode)
def get_git_hash(place):
logger.debug("EXEC 'git log -n1 --pretty=format:%%H' in %s", place)
sys.stdout.flush()
head = sp.check_output(['git', 'log', '-n1', '--pretty=format:%H'], cwd=place).decode()
logger.debug('EXEC DONE')
return head
def complete_setup(dep):
set_setup_from_env(dep)
setup.setdefault(dep, 'master')
setup.setdefault(dep+"_DIRNAME", dep.lower())
setup.setdefault(dep+"_REPONAME", dep.lower())
setup.setdefault('REPOOWNER', 'epics-modules')
setup.setdefault(dep+"_REPOOWNER", setup['REPOOWNER'])
setup.setdefault(dep+"_REPOURL", 'https://github.com/{0}/{1}.git'
.format(setup[dep+'_REPOOWNER'], setup[dep+'_REPONAME']))
setup.setdefault(dep+"_VARNAME", dep)
setup.setdefault(dep+"_RECURSIVE", 'YES')
setup.setdefault(dep+"_DEPTH", -1)
# add_dependency(dep, tag)
#
# Add a dependency to the cache area:
# - check out (recursive if configured) in the CACHE area unless it already exists and the
# required commit has been built
# - Defaults:
# $dep_DIRNAME = lower case ($dep)
# $dep_REPONAME = lower case ($dep)
# $dep_REPOURL = GitHub / $dep_REPOOWNER (or $REPOOWNER or epics-modules) / $dep_REPONAME .git
# $dep_VARNAME = $dep
# $dep_DEPTH = 5
# $dep_RECURSIVE = 1/YES (0/NO to for a flat clone)
# - Add $dep_VARNAME line to the RELEASE.local file in the cache area (unless already there)
# - Add full path to $modules_to_compile
def add_dependency(dep):
recurse = setup[dep+'_RECURSIVE'].lower()
if recurse not in ['0', 'no']:
recursearg = ["--recursive"]
elif recurse not in ['1', 'yes']:
recursearg = []
else:
raise RuntimeError("Invalid value for {}_RECURSIVE='{}' not 0/NO/1/YES".format(dep, recurse))
deptharg = {
'-1':['--depth', '5'],
'0':[],
}.get(str(setup[dep+'_DEPTH']), ['--depth', str(setup[dep+'_DEPTH'])])
tag = setup[dep]
logger.debug('Adding dependency %s with tag %s', dep, setup[dep])
# determine if dep points to a valid release or branch
if call_git(['ls-remote', '--quiet', '--exit-code', '--refs', setup[dep+'_REPOURL'], tag]):
raise RuntimeError("{0}{1} is neither a tag nor a branch name for {2} ({3}){4}"
.format(ANSI_RED, tag, dep, setup[dep+'_REPOURL'], ANSI_RESET))
dirname = setup[dep+'_DIRNAME']+'-{0}'.format(tag)
place = os.path.join(cachedir, dirname)
checked_file = os.path.join(place, "checked_out")
if os.path.isdir(place):
logger.debug('Dependency %s: directory %s exists, comparing checked-out commit', dep, place)
# check HEAD commit against the hash in marker file
if os.path.exists(checked_file):
with open(checked_file, 'r') as bfile:
checked_out = bfile.read().strip()
bfile.close()
else:
checked_out = 'never'
head = get_git_hash(place)
logger.debug('Found checked_out commit %s, git head is %s', checked_out, head)
if head != checked_out:
logger.debug('Dependency %s out of date - removing', dep)
shutil.rmtree(place, onerror=remove_readonly)
else:
print('Found {0} of dependency {1} up-to-date in {2}'.format(tag, dep, place))
sys.stdout.flush()
if not os.path.isdir(place):
if not os.path.isdir(cachedir):
os.makedirs(cachedir)
# clone dependency
print('Cloning {0} of dependency {1} into {2}'
.format(tag, dep, place))
sys.stdout.flush()
call_git(['clone', '--quiet'] + deptharg + recursearg + ['--branch', tag, setup[dep+'_REPOURL'], dirname], cwd=cachedir)
sp.check_call(['git', 'log', '-n1'], cwd=place)
modules_to_compile.append(place)
if dep == 'BASE':
# add MSI 1.7 to Base 3.14
versionfile = os.path.join(place, 'configure', 'CONFIG_BASE_VERSION')
if os.path.exists(versionfile):
with open(versionfile) as f:
if 'BASE_3_14=YES' in f.read():
print('Adding MSI 1.7 to {0}'.format(place))
sys.stdout.flush()
sp.check_call(['patch', '-p1', '-i', os.path.join(ciscriptsdir, 'add-msi-to-314.patch')],
cwd=place)
else:
# force including RELEASE.local for non-base modules by overwriting their configure/RELEASE
release = os.path.join(place, "configure", "RELEASE")
if os.path.exists(release):
with open(release, 'w') as fout:
print('-include $(TOP)/../RELEASE.local', file=fout)
# run hook if defined
if dep+'_HOOK' in setup:
hook = os.path.join(place, setup[dep+'_HOOK'])
if os.path.exists(hook):
print('Running hook {0} in {1}'.format(setup[dep+'_HOOK'], place))
sys.stdout.flush()
sp.check_call(hook, shell=True, cwd=place)
# write checked out commit hash to marker file
head = get_git_hash(place)
logger.debug('Writing hash of checked-out dependency (%s) to marker file', head)
with open(checked_file, "w") as fout:
print(head, file=fout)
fout.close()
update_release_local(setup[dep+"_VARNAME"], place)
def setup_for_build(args):
global make, isbase314
dllpaths = []
# there is no combined static and debug EPICS_HOST_ARCH target,
# so a combined debug and static target will appear to be just static
# but debug will have been specified in CONFIG_SITE by prepare()
hostarchsuffix=''
if re.search('debug', os.environ['CONFIGURATION']):
hostarchsuffix = '-debug'
if re.search('static', os.environ['CONFIGURATION']):
hostarchsuffix = '-static'
if os.environ['PLATFORM'].lower() == 'x86':
os.environ['EPICS_HOST_ARCH'] = 'win32-x86' + hostarchsuffix
elif os.environ['PLATFORM'].lower() == 'x64':
os.environ['EPICS_HOST_ARCH'] = 'windows-x64' + hostarchsuffix
if os.environ['CMP'] == 'vs2019':
# put strawberry perl in the PATH
os.environ['PATH'] = os.pathsep.join([os.path.join(r'C:\Strawberry\perl\site\bin'),
os.path.join(r'C:\Strawberry\perl\bin'),
os.environ['PATH']])
if os.environ['CMP'] == 'mingw':
if 'INCLUDE' not in os.environ:
os.environ['INCLUDE'] = ''
if os.environ['PLATFORM'].lower() == 'x86':
os.environ['EPICS_HOST_ARCH'] = 'win32-x86-mingw'
os.environ['INCLUDE'] = os.pathsep.join([r'C:\mingw-w64\i686-6.3.0-posix-dwarf-rt_v5-rev1\mingw32\include',
os.environ['INCLUDE']])
os.environ['PATH'] = os.pathsep.join([r'C:\mingw-w64\i686-6.3.0-posix-dwarf-rt_v5-rev1\mingw32\bin',
os.environ['PATH']])
elif os.environ['PLATFORM'].lower() == 'x64':
os.environ['EPICS_HOST_ARCH'] = 'windows-x64-mingw'
os.environ['INCLUDE'] = os.pathsep.join([r'C:\mingw-w64\x86_64-8.1.0-posix-seh-rt_v6-rev0\mingw64\include',
os.environ['INCLUDE']])
os.environ['PATH'] = os.pathsep.join([r'C:\mingw-w64\x86_64-8.1.0-posix-seh-rt_v6-rev0\mingw64\bin',
os.environ['PATH']])
make = os.path.join(toolsdir, 'make.exe')
base_place = '.'
if not building_base:
with open(os.path.join(cachedir, 'RELEASE.local'), 'r') as f:
lines = f.readlines()
for line in lines:
(mod, place) = line.strip().split('=')
bindir = os.path.join(place, 'bin', os.environ['EPICS_HOST_ARCH'])
if os.path.isdir(bindir):
dllpaths.append(bindir)
if mod == 'EPICS_BASE':
base_place = place
cfg_base_version = os.path.join(base_place, 'configure', 'CONFIG_BASE_VERSION')
if os.path.exists(cfg_base_version):
with open(cfg_base_version) as myfile:
if 'BASE_3_14=YES' in myfile.read():
isbase314 = True
bindir = os.path.join(os.getcwd(), 'bin', os.environ['EPICS_HOST_ARCH'])
if os.path.isdir(bindir):
dllpaths.append(bindir)
os.environ['PATH'] = os.pathsep.join(dllpaths + [os.environ['PATH']])
# apparently %CD% is handled automagically
os.environ['TOP'] = os.getcwd()
addpaths = []
for path in args.paths:
try:
addpaths.append(path.format(**os.environ))
except KeyError:
print('Environment')
[print(' ',K,'=',repr(V)) for K,V in os.environ.items()]
raise
os.environ['PATH'] = os.pathsep.join([os.environ['PATH']] + addpaths)
def prepare(args):
host_info()
print('{0}Loading setup files{1}'.format(ANSI_YELLOW, ANSI_RESET))
source_set('defaults')
if 'SET' in os.environ:
source_set(os.environ['SET'])
[complete_setup(mod) for mod in modlist()]
logger.debug('Loaded setup')
kvs = list(setup.items())
kvs.sort()
[logger.debug(' %s = "%s"', *kv) for kv in kvs]
# we're working with tags (detached heads) a lot: suppress advice
call_git(['config', '--global', 'advice.detachedHead', 'false'])
print('{0}Checking/cloning dependencies{1}'.format(ANSI_YELLOW, ANSI_RESET))
sys.stdout.flush()
[add_dependency(mod) for mod in modlist()]
if not building_base:
if os.path.isdir('configure'):
targetdir = 'configure'
else:
targetdir = '.'
shutil.copy(os.path.join(cachedir, 'RELEASE.local'), targetdir)
print('{0}Configuring EPICS build system{1}'.format(ANSI_YELLOW, ANSI_RESET))
with open(os.path.join(places['EPICS_BASE'], 'configure', 'CONFIG_SITE'), 'a') as config_site:
if re.search('static', os.environ['CONFIGURATION']):
config_site.write('SHARED_LIBRARIES=NO\n')
config_site.write('STATIC_BUILD=YES\n')
linktype = 'static'
else:
linktype = 'dynamic (DLL)'
if re.search('debug', os.environ['CONFIGURATION']):
config_site.write('HOST_OPT=NO\n')
optitype = 'debug'
else:
optitype = 'optimized'
print('EPICS Base build system set up for {0} build with {1} linking'
.format(optitype, linktype))
if not os.path.isdir(toolsdir):
os.makedirs(toolsdir)
makever = '4.2.1'
if not os.path.exists(os.path.join(toolsdir, 'make.exe')):
print('Installing Make 4.2.1 from ANL web site')
sys.stdout.flush()
sp.check_call(['curl', '-fsS', '--retry', '3', '-o', 'make-{0}.zip'.format(makever),
'https://epics.anl.gov/download/tools/make-{0}-win64.zip'.format(makever)],
cwd=toolsdir)
sp.check_call([zip7, 'e', 'make-{0}.zip'.format(makever)], cwd=toolsdir)
os.remove(os.path.join(toolsdir, 'make-{0}.zip'.format(makever)))
setup_for_build(args)
print('{0}EPICS_HOST_ARCH = {1}{2}'.format(ANSI_CYAN, os.environ['EPICS_HOST_ARCH'], ANSI_RESET))
print('{0}$ {1} --version{2}'.format(ANSI_CYAN, make, ANSI_RESET))
sys.stdout.flush()
call_make(['--version'], parallel=0)
print('{0}$ perl --version{1}'.format(ANSI_CYAN, ANSI_RESET))
sys.stdout.flush()
sp.check_call(['perl', '--version'])
if os.environ['CMP'] == 'mingw':
print('{0}$ gcc --version{1}'.format(ANSI_CYAN, ANSI_RESET))
sys.stdout.flush()
sp.check_call(['gcc', '--version'])
else:
print('{0}$ cl{1}'.format(ANSI_CYAN, ANSI_RESET))
sys.stdout.flush()
sp.check_call(['cl'])
if not building_base:
for mod in modlist():
place = places[setup[mod+"_VARNAME"]]
print('{0}Building dependency {1} in {2}{3}'.format(ANSI_YELLOW, mod, place, ANSI_RESET))
call_make(cwd=place, silent=silent_dep_builds)
print('{0}Dependency module information{1}'.format(ANSI_CYAN, ANSI_RESET))
print('Module Tag Binaries Commit')
print(100 * '-')
for mod in modlist():
commit = sp.check_output(['git', 'log', '-n1', '--oneline'], cwd=places[setup[mod+"_VARNAME"]]).strip()
print("%-10s %-12s %-11s %s" % (mod, setup[mod], 'rebuilt', commit))
print('{0}Contents of RELEASE.local{1}'.format(ANSI_CYAN, ANSI_RESET))
with open(os.path.join(cachedir, 'RELEASE.local'), 'r') as f:
print(f.read().strip())
def build(args):
setup_for_build(args)
print('{0}Building the main module{1}'.format(ANSI_YELLOW, ANSI_RESET))
call_make(args.makeargs)
def test(args):
setup_for_build(args)
print('{0}Running the main module tests{1}'.format(ANSI_YELLOW, ANSI_RESET))
call_make(['tapfiles'])
call_make(['test-results'], parallel=0, silent=True)
def doExec(args):
'exec user command with vcvars'
setup_for_build(args)
os.environ['MAKE'] = make
print('Execute command {}'.format(args.cmd))
sys.stdout.flush()
sp.check_call(' '.join(args.cmd), shell=True)
def with_vcvars(cmd):
'''re-exec main script with a (hopefully different) command
'''
CC = os.environ['CMP']
# cf. https://docs.microsoft.com/en-us/cpp/build/building-on-the-command-line
info = {
'python': sys.executable,
'self': sys.argv[0],
'cmd':cmd,
}
info['arch'] = {
'x86': 'x86', # 'amd64_x86' ??
'x64': 'amd64',
}[os.environ['PLATFORM'].lower()] # 'x86' or 'x64'
info['vcvars'] = vcvars_table[CC]
script='''
call "{vcvars}" {arch}
"{python}" "{self}" {cmd}
'''.format(**info)
logger.debug('----- Creating vcvars-trampoline.bat -----')
for line in script.split('\n'):
logger.debug(line)
logger.debug('----- snip -----')
with open('vcvars-trampoline.bat', 'w') as F:
F.write(script)
print('{0}Calling vcvars-trampoline.bat to set environment for {1} on {2}{3}'
.format(ANSI_YELLOW, CC, os.environ['PLATFORM'], ANSI_RESET))
sys.stdout.flush()
returncode = sp.call('vcvars-trampoline.bat', shell=True)
if returncode != 0:
sys.exit(returncode)
def getargs():
from argparse import ArgumentParser, REMAINDER
P = ArgumentParser()
P.add_argument('--no-vcvars', dest='vcvars', default=True, action='store_false',
help='Assume vcvarsall.bat has already been run')
P.add_argument('--add-path', dest='paths', default=[], action='append',
help='Append directory to %PATH%. Expands {ENVVAR}')
SP = P.add_subparsers()
CMD = SP.add_parser('prepare')
CMD.set_defaults(func=prepare)
CMD = SP.add_parser('build')
CMD.add_argument('makeargs', nargs=REMAINDER)
CMD.set_defaults(func=build)
CMD = SP.add_parser('test')
CMD.set_defaults(func=test)
CMD = SP.add_parser('exec')
CMD.add_argument('cmd', nargs=REMAINDER)
CMD.set_defaults(func=doExec)
return P
def main(raw):
global silent_dep_builds
args = getargs().parse_args(raw)
if 'VV' in os.environ and os.environ['VV'] == '1':
logging.basicConfig(level=logging.DEBUG)
silent_dep_builds = False
if args.vcvars and os.environ['CMP'].startswith('vs'):
# re-exec with MSVC in PATH
with_vcvars(' '.join(['--no-vcvars']+raw))
else:
args.func(args)
if __name__=='__main__':
main(sys.argv[1:])

View File

@@ -30,7 +30,7 @@
#SNCSEQ = $(MODULES)/seq-ver #SNCSEQ = $(MODULES)/seq-ver
# EPICS_BASE should appear last so earlier modules can override stuff: # EPICS_BASE should appear last so earlier modules can override stuff:
EPICS_BASE = /home/ralph/work/EPICS/V3/base-3.15.6 EPICS_BASE = /path/to/base/must/be/set/in/a/RELEASE.local/file
# Set RULES here if you want to use build rules from somewhere # Set RULES here if you want to use build rules from somewhere
# other than EPICS_BASE: # other than EPICS_BASE:

View File

@@ -33,3 +33,5 @@ STREAM_REPOOWNER=paulscherrerinstitute
# busy # busy
# sscan # sscan
# iocStats # iocStats
# motor
# ipac

View File

@@ -9,6 +9,9 @@ DB += dbExample2.db
DB += dbSubExample.db DB += dbSubExample.db
DB += user.substitutions DB += user.substitutions
# Host-side expansion of substitutions file with MSI
DB += dbExample3.db
# If <anyname>.db template is not named <anyname>*.template add # If <anyname>.db template is not named <anyname>*.template add
# <anyname>_TEMPLATE = <templatename> # <anyname>_TEMPLATE = <templatename>

View File

@@ -0,0 +1,8 @@
# Example host-side substitutions file
file dbExample2.db {
pattern { user, no, scan }
{ "ralph", 4, "1 second" }
{ "ralph", 5, "2 second" }
{ "ralph", 6, "5 second" }
}

View File

@@ -4,5 +4,10 @@ DIRS := $(DIRS) $(filter-out $(DIRS), $(wildcard *src*))
DIRS := $(DIRS) $(filter-out $(DIRS), $(wildcard *Src*)) DIRS := $(DIRS) $(filter-out $(DIRS), $(wildcard *Src*))
DIRS := $(DIRS) $(filter-out $(DIRS), $(wildcard *db*)) DIRS := $(DIRS) $(filter-out $(DIRS), $(wildcard *db*))
DIRS := $(DIRS) $(filter-out $(DIRS), $(wildcard *Db*)) DIRS := $(DIRS) $(filter-out $(DIRS), $(wildcard *Db*))
DIRS := $(DIRS) test
test_DEPEND_DIRS += src
include $(TOP)/configure/RULES_DIRS include $(TOP)/configure/RULES_DIRS

View File

@@ -4,6 +4,9 @@ include $(TOP)/configure/CONFIG
#---------------------------------------- #----------------------------------------
# ADD MACRO DEFINITIONS BELOW HERE # ADD MACRO DEFINITIONS BELOW HERE
# use the new RSET definition
USR_CPPFLAGS += -DUSE_TYPED_RSET
# xxxRecord.h will be created from xxxRecord.dbd # xxxRecord.h will be created from xxxRecord.dbd
DBDINC += xxxRecord DBDINC += xxxRecord

View File

@@ -25,111 +25,111 @@
/* Create RSET - Record Support Entry Table */ /* Create RSET - Record Support Entry Table */
#define report NULL #define report NULL
#define initialize NULL #define initialize NULL
static long init_record(); static long init_record(struct dbCommon *, int);
static long process(); static long process(struct dbCommon *);
#define special NULL #define special NULL
#define get_value NULL #define get_value NULL
#define cvt_dbaddr NULL #define cvt_dbaddr NULL
#define get_array_info NULL #define get_array_info NULL
#define put_array_info NULL #define put_array_info NULL
static long get_units(); static long get_units(DBADDR *, char *);
static long get_precision(); static long get_precision(const DBADDR *, long *);
#define get_enum_str NULL #define get_enum_str NULL
#define get_enum_strs NULL #define get_enum_strs NULL
#define put_enum_str NULL #define put_enum_str NULL
static long get_graphic_double(); static long get_graphic_double(DBADDR *, struct dbr_grDouble *);
static long get_control_double(); static long get_control_double(DBADDR *, struct dbr_ctrlDouble *);
static long get_alarm_double(); static long get_alarm_double(DBADDR *, struct dbr_alDouble *);
rset xxxRSET={ rset xxxRSET={
RSETNUMBER, RSETNUMBER,
report, report,
initialize, initialize,
init_record, init_record,
process, process,
special, special,
get_value, get_value,
cvt_dbaddr, cvt_dbaddr,
get_array_info, get_array_info,
put_array_info, put_array_info,
get_units, get_units,
get_precision, get_precision,
get_enum_str, get_enum_str,
get_enum_strs, get_enum_strs,
put_enum_str, put_enum_str,
get_graphic_double, get_graphic_double,
get_control_double, get_control_double,
get_alarm_double get_alarm_double
}; };
epicsExportAddress(rset,xxxRSET); epicsExportAddress(rset,xxxRSET);
typedef struct xxxset { /* xxx input dset */ typedef struct xxxset { /* xxx input dset */
long number; long number;
DEVSUPFUN dev_report; DEVSUPFUN dev_report;
DEVSUPFUN init; DEVSUPFUN init;
DEVSUPFUN init_record; /*returns: (-1,0)=>(failure,success)*/ DEVSUPFUN init_record; /*returns: (-1,0)=>(failure,success)*/
DEVSUPFUN get_ioint_info; DEVSUPFUN get_ioint_info;
DEVSUPFUN read_xxx; DEVSUPFUN read_xxx;
}xxxdset; }xxxdset;
static void checkAlarms(xxxRecord *prec); static void checkAlarms(xxxRecord *prec);
static void monitor(xxxRecord *prec); static void monitor(xxxRecord *prec);
static long init_record(void *precord,int pass) static long init_record(struct dbCommon *pcommon, int pass)
{ {
xxxRecord *prec = (xxxRecord *)precord; xxxRecord *prec = (xxxRecord *)pcommon;
xxxdset *pdset; xxxdset *pdset;
long status; long status;
if (pass==0) return(0); if (pass==0) return(0);
if(!(pdset = (xxxdset *)(prec->dset))) { if(!(pdset = (xxxdset *)(prec->dset))) {
recGblRecordError(S_dev_noDSET,(void *)prec,"xxx: init_record"); recGblRecordError(S_dev_noDSET,(void *)prec,"xxx: init_record");
return(S_dev_noDSET); return(S_dev_noDSET);
} }
/* must have read_xxx function defined */ /* must have read_xxx function defined */
if( (pdset->number < 5) || (pdset->read_xxx == NULL) ) { if( (pdset->number < 5) || (pdset->read_xxx == NULL) ) {
recGblRecordError(S_dev_missingSup,(void *)prec,"xxx: init_record"); recGblRecordError(S_dev_missingSup,(void *)prec,"xxx: init_record");
return(S_dev_missingSup); return(S_dev_missingSup);
} }
if( pdset->init_record ) { if( pdset->init_record ) {
if((status=(*pdset->init_record)(prec))) return(status); if((status=(*pdset->init_record)(prec))) return(status);
} }
return(0); return(0);
} }
static long process(void *precord) static long process(struct dbCommon *pcommon)
{ {
xxxRecord *prec = (xxxRecord *)precord; xxxRecord *prec = (xxxRecord *)pcommon;
xxxdset *pdset = (xxxdset *)(prec->dset); xxxdset *pdset = (xxxdset *)(prec->dset);
long status; long status;
unsigned char pact=prec->pact; unsigned char pact=prec->pact;
if( (pdset==NULL) || (pdset->read_xxx==NULL) ) { if( (pdset==NULL) || (pdset->read_xxx==NULL) ) {
prec->pact=TRUE; prec->pact=TRUE;
recGblRecordError(S_dev_missingSup,(void *)prec,"read_xxx"); recGblRecordError(S_dev_missingSup,(void *)prec,"read_xxx");
return(S_dev_missingSup); return(S_dev_missingSup);
} }
/* pact must not be set until after calling device support */ /* pact must not be set until after calling device support */
status=(*pdset->read_xxx)(prec); status=(*pdset->read_xxx)(prec);
/* check if device support set pact */ /* check if device support set pact */
if ( !pact && prec->pact ) return(0); if ( !pact && prec->pact ) return(0);
prec->pact = TRUE; prec->pact = TRUE;
recGblGetTimeStamp(prec); recGblGetTimeStamp(prec);
/* check for alarms */ /* check for alarms */
checkAlarms(prec); checkAlarms(prec);
/* check event list */ /* check event list */
monitor(prec); monitor(prec);
/* process the forward scan link record */ /* process the forward scan link record */
recGblFwdLink(prec); recGblFwdLink(prec);
prec->pact=FALSE; prec->pact=FALSE;
return(status); return(status);
} }
static long get_units(DBADDR *paddr, char *units) static long get_units(DBADDR *paddr, char *units)
{ {
xxxRecord *prec=(xxxRecord *)paddr->precord; xxxRecord *prec=(xxxRecord *)paddr->precord;
@@ -138,7 +138,7 @@ static long get_units(DBADDR *paddr, char *units)
return(0); return(0);
} }
static long get_precision(DBADDR *paddr, long *precision) static long get_precision(const DBADDR *paddr, long *precision)
{ {
xxxRecord *prec=(xxxRecord *)paddr->precord; xxxRecord *prec=(xxxRecord *)paddr->precord;
@@ -176,8 +176,8 @@ static long get_control_double(DBADDR *paddr,struct dbr_ctrlDouble *pcd)
|| fieldIndex == xxxRecordHIGH || fieldIndex == xxxRecordHIGH
|| fieldIndex == xxxRecordLOW || fieldIndex == xxxRecordLOW
|| fieldIndex == xxxRecordLOLO) { || fieldIndex == xxxRecordLOLO) {
pcd->upper_ctrl_limit = prec->hopr; pcd->upper_ctrl_limit = prec->hopr;
pcd->lower_ctrl_limit = prec->lopr; pcd->lower_ctrl_limit = prec->lopr;
} else recGblGetControlDouble(paddr,pcd); } else recGblGetControlDouble(paddr,pcd);
return(0); return(0);
} }
@@ -195,79 +195,79 @@ static long get_alarm_double(DBADDR *paddr,struct dbr_alDouble *pad)
} else recGblGetAlarmDouble(paddr,pad); } else recGblGetAlarmDouble(paddr,pad);
return(0); return(0);
} }
static void checkAlarms(xxxRecord *prec) static void checkAlarms(xxxRecord *prec)
{ {
double val, hyst, lalm; double val, hyst, lalm;
float hihi, high, low, lolo; float hihi, high, low, lolo;
unsigned short hhsv, llsv, hsv, lsv; unsigned short hhsv, llsv, hsv, lsv;
if(prec->udf == TRUE ){ if(prec->udf == TRUE ){
recGblSetSevr(prec,UDF_ALARM,INVALID_ALARM); recGblSetSevr(prec,UDF_ALARM,INVALID_ALARM);
return; return;
} }
hihi = prec->hihi; lolo = prec->lolo; high = prec->high; low = prec->low; hihi = prec->hihi; lolo = prec->lolo; high = prec->high; low = prec->low;
hhsv = prec->hhsv; llsv = prec->llsv; hsv = prec->hsv; lsv = prec->lsv; hhsv = prec->hhsv; llsv = prec->llsv; hsv = prec->hsv; lsv = prec->lsv;
val = prec->val; hyst = prec->hyst; lalm = prec->lalm; val = prec->val; hyst = prec->hyst; lalm = prec->lalm;
/* alarm condition hihi */ /* alarm condition hihi */
if (hhsv && (val >= hihi || ((lalm==hihi) && (val >= hihi-hyst)))){ if (hhsv && (val >= hihi || ((lalm==hihi) && (val >= hihi-hyst)))){
if (recGblSetSevr(prec,HIHI_ALARM,prec->hhsv)) prec->lalm = hihi; if (recGblSetSevr(prec,HIHI_ALARM,prec->hhsv)) prec->lalm = hihi;
return; return;
} }
/* alarm condition lolo */ /* alarm condition lolo */
if (llsv && (val <= lolo || ((lalm==lolo) && (val <= lolo+hyst)))){ if (llsv && (val <= lolo || ((lalm==lolo) && (val <= lolo+hyst)))){
if (recGblSetSevr(prec,LOLO_ALARM,prec->llsv)) prec->lalm = lolo; if (recGblSetSevr(prec,LOLO_ALARM,prec->llsv)) prec->lalm = lolo;
return; return;
} }
/* alarm condition high */ /* alarm condition high */
if (hsv && (val >= high || ((lalm==high) && (val >= high-hyst)))){ if (hsv && (val >= high || ((lalm==high) && (val >= high-hyst)))){
if (recGblSetSevr(prec,HIGH_ALARM,prec->hsv)) prec->lalm = high; if (recGblSetSevr(prec,HIGH_ALARM,prec->hsv)) prec->lalm = high;
return; return;
} }
/* alarm condition low */ /* alarm condition low */
if (lsv && (val <= low || ((lalm==low) && (val <= low+hyst)))){ if (lsv && (val <= low || ((lalm==low) && (val <= low+hyst)))){
if (recGblSetSevr(prec,LOW_ALARM,prec->lsv)) prec->lalm = low; if (recGblSetSevr(prec,LOW_ALARM,prec->lsv)) prec->lalm = low;
return; return;
} }
/* we get here only if val is out of alarm by at least hyst */ /* we get here only if val is out of alarm by at least hyst */
prec->lalm = val; prec->lalm = val;
return; return;
} }
static void monitor(xxxRecord *prec) static void monitor(xxxRecord *prec)
{ {
unsigned short monitor_mask; unsigned short monitor_mask;
double delta; double delta;
monitor_mask = recGblResetAlarms(prec); monitor_mask = recGblResetAlarms(prec);
/* check for value change */ /* check for value change */
delta = prec->mlst - prec->val; delta = prec->mlst - prec->val;
if(delta<0.0) delta = -delta; if(delta<0.0) delta = -delta;
if (delta > prec->mdel) { if (delta > prec->mdel) {
/* post events for value change */ /* post events for value change */
monitor_mask |= DBE_VALUE; monitor_mask |= DBE_VALUE;
/* update last value monitored */ /* update last value monitored */
prec->mlst = prec->val; prec->mlst = prec->val;
} }
/* check for archive change */ /* check for archive change */
delta = prec->alst - prec->val; delta = prec->alst - prec->val;
if(delta<0.0) delta = -delta; if(delta<0.0) delta = -delta;
if (delta > prec->adel) { if (delta > prec->adel) {
/* post events on value field for archive change */ /* post events on value field for archive change */
monitor_mask |= DBE_LOG; monitor_mask |= DBE_LOG;
/* update last archive value monitored */ /* update last archive value monitored */
prec->alst = prec->val; prec->alst = prec->val;
} }
/* send out monitors connected to the value field */ /* send out monitors connected to the value field */
if (monitor_mask){ if (monitor_mask){
db_post_events(prec,&prec->val,monitor_mask); db_post_events(prec,&prec->val,monitor_mask);
} }
return; return;
} }

58
exampleApp/test/Makefile Normal file
View File

@@ -0,0 +1,58 @@
#*************************************************************************
# Copyright (c) 2020 ITER Organization.
# EPICS BASE is distributed subject to a Software License Agreement found
# in the file LICENSE that is included with this distribution.
#*************************************************************************
CURDIR := $(patsubst %/,%,$(dir $(lastword $(MAKEFILE_LIST))))
TOP = ../..
include $(TOP)/configure/CONFIG
# use the new RSET definition
USR_CPPFLAGS += -DUSE_TYPED_RSET
TARGETS += $(COMMON_DIR)/exampleTest.dbd
DBDDEPENDS_FILES += exampleTest.dbd$(DEP)
exampleTest_DBD += example.dbd
TESTFILES += $(COMMON_DIR)/exampleTest.dbd
testHarness_SRCS += exampleTest_registerRecordDeviceDriver.cpp
PROD_LIBS += exampleSupport
ifneq ($(SNCSEQ),)
PROD_LIBS += seq pv
endif
PROD_LIBS += $(EPICS_BASE_IOC_LIBS)
TESTPROD_HOST += exampleTest
exampleTest_SRCS += exampleTest.c
exampleTest_SRCS += exampleTest_registerRecordDeviceDriver.cpp
testHarness_SRCS += exampleTest.c
TESTFILES += ../../../db/dbExample1.db
TESTS += exampleTest
# This runs all the test programs in a known working order:
testHarness_SRCS += epicsRunExampleTests.c
exampleTestHarness_SRCS += $(testHarness_SRCS)
exampleTestHarness_SRCS_RTEMS += rtemsTestHarness.c
PROD_SRCS_RTEMS += rtemsTestData.c
PROD_vxWorks = exampleTestHarness
PROD_RTEMS = exampleTestHarness
TESTSPEC_vxWorks = exampleTestHarness.munch; epicsRunExampleTests
TESTSPEC_RTEMS = exampleTestHarness.boot; epicsRunExampleTests
TESTSCRIPTS_HOST += $(TESTS:%=%.t)
ifneq ($(filter $(T_A),$(CROSS_COMPILER_RUNTEST_ARCHS)),)
TESTPROD_RTEMS = $(TESTPROD_HOST)
TESTSCRIPTS_RTEMS += $(TESTS:%=%.t)
endif
include $(TOP)/configure/RULES
rtemsTestData.c : $(TESTFILES) $(TOOLS)/epicsMakeMemFs.pl
$(PERL) $(TOOLS)/epicsMakeMemFs.pl $@ epicsRtemsFSImage $(TESTFILES)

View File

@@ -0,0 +1,28 @@
/*************************************************************************\
* Copyright (c) 2011 UChicago Argonne LLC, as Operator of Argonne
* National Laboratory.
* EPICS BASE is distributed subject to a Software License Agreement found
* in file LICENSE that is included with this distribution.
\*************************************************************************/
/*
* Run Example tests as a batch.
*
*/
#include "epicsUnitTest.h"
#include "epicsExit.h"
#include "dbmf.h"
int exampleTest(void);
void epicsRunExampleTests(void)
{
testHarness();
runTest(exampleTest);
dbmfFreeChunks();
epicsExit(0); /* Trigger test harness */
}

View File

@@ -0,0 +1,58 @@
/*************************************************************************\
* Copyright (c) 2020 ITER Organization.
* EPICS BASE is distributed subject to a Software License Agreement found
* in file LICENSE that is included with this distribution.
\*************************************************************************/
/*
* Author: Ralph Lange <ralph.lange@gmx.de>
*/
#include <string.h>
#include <epicsUnitTest.h>
#include <testMain.h>
#include <dbAccess.h>
#include <dbStaticLib.h>
#include <errlog.h>
void exampleTest_registerRecordDeviceDriver(struct dbBase *);
static dbCommon *prec;
/* from Base 3.15 dbUnitTest.c */
static
dbCommon* testdbRecordPtr(const char* pv)
{
DBADDR addr;
if (dbNameToAddr(pv, &addr))
testAbort("Missing record \"%s\"", pv);
return addr.precord;
}
static void testOnce(void)
{
testDiag("check that tests work");
dbReadDatabase(&pdbbase, "exampleTest.dbd", "../O.Common", NULL);
exampleTest_registerRecordDeviceDriver(pdbbase);
dbReadDatabase(&pdbbase, "dbExample1.db", "../../../db", "user=test");
testDiag("Searching for records from example application");
prec = testdbRecordPtr("test:xxxExample");
testOk((prec != NULL), "record test:xxxExample");
prec = testdbRecordPtr("test:aiExample");
testOk((prec != NULL), "record test:aiExample");
}
MAIN(exampleTest)
{
testPlan(2);
testOnce();
return testDone();
}

View File

@@ -0,0 +1,14 @@
/*************************************************************************\
* Copyright (c) 2011 UChicago Argonne LLC, as Operator of Argonne
* National Laboratory.
* EPICS BASE is distributed subject to a Software License Agreement found
* in file LICENSE that is included with this distribution.
\*************************************************************************/
extern void epicsRunExampleTests(void);
int main(int argc, char **argv)
{
epicsRunExampleTests(); /* calls epicsExit(0) */
return 0;
}

43
synApps-6.0.set Normal file
View File

@@ -0,0 +1,43 @@
# Release tags for synApps modules as per synApps-6.0
# see https://github.com/EPICS-synApps/support/blob/21f7fcd0f33cef5d34aacbd4e33511b43398a6dc/assemble_synApps.sh
# also for additional configuration that could be done in hook scripts
ALLENBRADLEY=2.3
ALIVE=R1-1-0
AREA_DETECTOR=R3-3-1
ASYN=R4-33
AUTOSAVE=R5-9
BUSY=R1-7
CALC=R3-7-1
CAMAC=R2-7-1
CAPUTRECORDER=R1-7-1
DAC128V=R2-9
DELAYGEN=R1-2-0
DXP=R5-0
DXPSITORO=R1-1
DEVIOCSTATS=3.1.15
#GALIL=V3-6
IP=R2-19-1
IPAC=2.15
IP330=R2-9
IPUNIDIG=R2-11
LOVE=R3-2-6
LUA=R1-2-2
MCA=R7-7
MEASCOMP=R2-1
MODBUS=R2-11
MOTOR=R6-10-1
OPTICS=R2-13-1
QUADEM=R9-1
SNCSEQ=2.2.5
SOFTGLUE=R2-8-1
SOFTGLUEZYNQ=R2-0-1
SSCAN=R2-11-1
STD=R3-5
STREAM=R2-7-7c
VAC=R1-7
VME=R2-9
YOKOGAWA_DAS=R1-0-0
XXX=R6-0
include synApps-common

44
synApps-6.1.set Normal file
View File

@@ -0,0 +1,44 @@
# Release tags for synApps modules as per synApps-6.1
# see https://github.com/EPICS-synApps/support/blob/cc5adba5b8848c9cb98ab96768d668ae927d8859/assemble_synApps.sh
# also for additional configuration that could be done in hook scripts
#ALLENBRADLEY=2.3
ALIVE=R1-1-1
AREA_DETECTOR=R3-7
ASYN=R4-36
AUTOSAVE=R5-10
BUSY=R1-7-2
CALC=R3-7-3
CAMAC=R2-7-1
CAPUTRECORDER=R1-7-2
DAC128V=R2-9
DELAYGEN=R1-2-1
DXP=R6-0
DXPSITORO=R1-2
DEVIOCSTATS=3.1.16
#ETHERIP=ether_ip-3-1
#GALIL=V3-6
IP=R2-20-1
IPAC=2.15
IP330=R2-9
IPUNIDIG=R2-11
LOVE=R3-2-7
LUA=R2-0
MCA=R7-8
MEASCOMP=R2-3
MODBUS=R3-0
MOTOR=R7-1
OPTICS=R2-13-3
QUADEM=R9-2-1
SNCSEQ=2.2.6
SOFTGLUE=R2-8-2
SOFTGLUEZYNQ=R2-0-2
SSCAN=R2-11-3
STD=R3-6
STREAM=2.8.9
VAC=R1-9
VME=R2-9-2
YOKOGAWA_DAS=R2-0-1
XXX=R6-1
include synApps-common

7
synApps-common.set Normal file
View File

@@ -0,0 +1,7 @@
# Common settings for all synApps releases
DEVIOCSTATS_REPONAME=iocStats
ETHERIP_REPOOWNER=EPICSTools
GALIL_REPOOWNER=motorapp
GALIL_REPONAME=Galil-3-0
AREADETECTOR_REPOOWNER=areaDetector

View File

@@ -5,6 +5,18 @@
# SET=test00 in .travis.yml runs the tests in this script # SET=test00 in .travis.yml runs the tests in this script
# all other jobs are started as compile jobs # all other jobs are started as compile jobs
# The following if clause can be removed for ci-scripts major version 3
if [ "$TRAVIS_OS_NAME" == osx -a "$BASH_VERSINFO" -lt 4 ]
then
brew install bash
if [ $(/usr/local/bin/bash -c 'echo $BASH_VERSINFO') -lt 4 ]
then
echo "Failed to install a recent bash" >&2
exit 1
fi
exec /usr/local/bin/bash $0 "$@"
fi
# Set VV empty in .travis.yml to make scripts terse # Set VV empty in .travis.yml to make scripts terse
[ "${VV:-1}" ] && set -x [ "${VV:-1}" ] && set -x
@@ -16,8 +28,8 @@ UTILS_UNITTEST=1
readlinkf() { perl -MCwd -e 'print Cwd::abs_path shift' "$1"; } readlinkf() { perl -MCwd -e 'print Cwd::abs_path shift' "$1"; }
# test utilities # test utilities
die() { fail() {
echo "${ANSI_RED}$1${ANSI_RESET}" echo -e "${ANSI_RED}$1${ANSI_RESET}"
exit 1 exit 1
} }
@@ -27,7 +39,7 @@ fn_exists() {
repo_exists() { repo_exists() {
DEP=$1 DEP=$1
dep_lc=$(echo $DEP | tr 'A-Z' 'a-z') dep_lc=${DEP,,}
eval dirname=\${${DEP}_DIRNAME:=${dep_lc}} eval dirname=\${${DEP}_DIRNAME:=${dep_lc}}
eval reponame=\${${DEP}_REPONAME:=${dep_lc}} eval reponame=\${${DEP}_REPONAME:=${dep_lc}}
eval repourl=\${${DEP}_REPOURL:="https://github.com/\${${DEP}_REPOOWNER:=${REPOOWNER:-epics-modules}}/${reponame}.git"} eval repourl=\${${DEP}_REPOURL:="https://github.com/\${${DEP}_REPOOWNER:=${REPOOWNER:-epics-modules}}/${reponame}.git"}
@@ -35,69 +47,72 @@ repo_exists() {
git ls-remote --quiet --heads --exit-code $repourl > /dev/null 2>&1 git ls-remote --quiet --heads --exit-code $repourl > /dev/null 2>&1
} }
SETUP_DIRS=$(echo $SETUP_PATH | tr ":" "\n") SETUP_DIRS=${SETUP_PATH//:/ }
SCRIPTDIR=$(dirname $(readlinkf $0))/travis SCRIPTDIR=$(dirname $(readlinkf $0))/travis
CURDIR="$PWD" CURDIR="$PWD"
CACHEDIR="$HOME/.cache" CACHEDIR=${CACHEDIR:-${HOME}/.cache}
[ -e ${CACHEDIR} ] || mkdir -p ${CACHEDIR}
echo "Testing contents of utils.sh" echo "Testing contents of utils.sh"
[ -d "$SCRIPTDIR" ] || die "SCRIPTDIR does not exist" [ -d "$SCRIPTDIR" ] || fail "SCRIPTDIR does not exist"
[ -e "$SCRIPTDIR/utils.sh" ] || die "SCRIPTDIR/utils.sh does not exist" [ -e "$SCRIPTDIR/utils.sh" ] || fail "SCRIPTDIR/utils.sh does not exist"
# source functions # source functions
. $SCRIPTDIR/utils.sh . $SCRIPTDIR/utils.sh
# check for functions # check for functions
fn_exists fold_start || die "function fold_start missing from SCRIPTDIR/utils.sh" fn_exists fold_start || fail "function fold_start missing from SCRIPTDIR/utils.sh"
fn_exists fold_end || die "function fold_end missing from SCRIPTDIR/utils.sh" fn_exists fold_end || fail "function fold_end missing from SCRIPTDIR/utils.sh"
fn_exists source_set || die "function source_set missing from SCRIPTDIR/utils.sh" fn_exists source_set || fail "function source_set missing from SCRIPTDIR/utils.sh"
fn_exists update_release_local || die "function update_release_local missing from SCRIPTDIR/utils.sh" fn_exists update_release_local || fail "function update_release_local missing from SCRIPTDIR/utils.sh"
fn_exists add_dependency || die "function add_dependency missing from SCRIPTDIR/utils.sh" fn_exists add_dependency || fail "function add_dependency missing from SCRIPTDIR/utils.sh"
# test source_set() # test source_set()
###################################################################### ######################################################################
SETUP_DIRS= source_set test01 | grep -q "(SETUP_PATH) is empty" || die "empty search path not detected" SETUP_PATH= source_set test01 | grep -q "(SETUP_PATH) is empty" || fail "empty search path not detected"
source_set xxdoesnotexistxx | grep -q "does not exist" || die "missing setup file not detected" source_set xxdoesnotexistxx | grep -q "does not exist" || fail "missing setup file not detected"
source_set test01 | grep -q "Loading setup file" || die "test01 setup file not found" source_set test01 | grep -q "Loading setup file" || fail "test01 setup file not found"
unset SEEN_SETUPS unset SEEN_SETUPS
export BASE=foo export BASE=foo
source_set test01 source_set test01
[ "$BASE" = "foo" ] || die "preset module BASE version does not override test01 setup file (expected foo got $BASE)" [ "$BASE" = "foo" ] || fail "preset module BASE version does not override test01 setup file (expected foo got $BASE)"
unset SEEN_SETUPS unset SEEN_SETUPS
BASE= BASE=
source_set test02 source_set test02
[ "$BASE" = "foo" ] || die "BASE set in test02 does not override included test01 setup file (expected foo got $BASE)" [ "$BASE" = "foo" ] || fail "BASE set in test02 does not override included test01 setup file (expected foo got $BASE)"
[ "$FOO" = "bar" ] || die "Setting of single word does not work" [ "$FOO" = "bar" ] || fail "Setting of single word does not work"
[ "$FOO2" = "bar bar2" ] || die "Setting of multiple words does not work" [ "$FOO2" = "bar bar2" ] || fail "Setting of multiple words does not work"
[ "$FOO3" = "bar bar2" ] || die "Indented setting of multiple words does not work" [ "$FOO3" = "bar bar2" ] || fail "Indented setting of multiple words does not work"
[ "$SNCSEQ" = "R2-2-7" ] || die "Setup test01 was not included" [ "$SNCSEQ" = "R2-2-7" ] || fail "Setup test01 was not included"
unset SEEN_SETUPS unset SEEN_SETUPS
source_set test03 | grep -q "Ignoring already included setup file" || die "test01 setup file included twice" source_set test03 | grep -q "Ignoring already included setup file" || fail "test01 setup file included twice"
# test default settings file # test default settings file
###################################################################### ######################################################################
echo "Testing default settings for completeness and valid git repo settings" echo "Testing default settings for completeness and valid git repo settings"
[ -e ./defaults.set ] || die "defaults.set does not exist" [ -e ./defaults.set ] || fail "defaults.set does not exist"
source_set defaults source_set defaults
repo_exists BASE || die "Defaults for BASE do not point to a valid git repository at $repourl" repo_exists BASE || fail "Defaults for BASE do not point to a valid git repository at $repourl"
repo_exists PVDATA || die "Defaults for PVDATA do not point to a valid git repository at $repourl" repo_exists PVDATA || fail "Defaults for PVDATA do not point to a valid git repository at $repourl"
repo_exists PVACCESS || die "Defaults for PVACCESS do not point to a valid git repository at $repourl" repo_exists PVACCESS || fail "Defaults for PVACCESS do not point to a valid git repository at $repourl"
repo_exists NTYPES || die "Defaults for NTYPES do not point to a valid git repository at $repourl" repo_exists NTYPES || fail "Defaults for NTYPES do not point to a valid git repository at $repourl"
repo_exists SNCSEQ || die "Defaults for SNCSEQ do not point to a valid git repository at $repourl" repo_exists SNCSEQ || fail "Defaults for SNCSEQ do not point to a valid git repository at $repourl"
repo_exists STREAM || die "Defaults for STREAM do not point to a valid git repository at $repourl" repo_exists STREAM || fail "Defaults for STREAM do not point to a valid git repository at $repourl"
repo_exists ASYN || die "Defaults for STREAM do not point to a valid git repository at $repourl" repo_exists ASYN || fail "Defaults for ASYN do not point to a valid git repository at $repourl"
repo_exists STD || die "Defaults for STD do not point to a valid git repository at $repourl" repo_exists STD || fail "Defaults for STD do not point to a valid git repository at $repourl"
repo_exists CALC || die "Defaults for CALC do not point to a valid git repository at $repourl" repo_exists CALC || fail "Defaults for CALC do not point to a valid git repository at $repourl"
repo_exists AUTOSAVE || die "Defaults for AUTOSAVE do not point to a valid git repository at $repourl" repo_exists AUTOSAVE || fail "Defaults for AUTOSAVE do not point to a valid git repository at $repourl"
repo_exists BUSY || die "Defaults for BUSY do not point to a valid git repository at $repourl" repo_exists BUSY || fail "Defaults for BUSY do not point to a valid git repository at $repourl"
repo_exists SSCAN || die "Defaults for SSCAN do not point to a valid git repository at $repourl" repo_exists SSCAN || fail "Defaults for SSCAN do not point to a valid git repository at $repourl"
repo_exists IOCSTATS || die "Defaults for IOCSTATS do not point to a valid git repository at $repourl" repo_exists IOCSTATS || fail "Defaults for IOCSTATS do not point to a valid git repository at $repourl"
repo_exists MOTOR || fail "Defaults for MOTOR do not point to a valid git repository at $repourl"
repo_exists IPAC || fail "Defaults for IPAC do not point to a valid git repository at $repourl"
# test update_release_local() # test update_release_local()
###################################################################### ######################################################################
@@ -111,34 +126,34 @@ rm -f $release_local
# Set a module # Set a module
update_release_local MOD1 /tmp/mod1 update_release_local MOD1 /tmp/mod1
updated_line="MOD1=/tmp/mod1" updated_line="MOD1=/tmp/mod1"
grep -q "MOD1=" $release_local || die "Line for MOD1 not added to RELEASE.local" grep -q "MOD1=" $release_local || fail "Line for MOD1 not added to RELEASE.local"
existing_line=$(grep "MOD1=" $release_local) existing_line=$(grep "MOD1=" $release_local)
[ "${existing_line}" = "${updated_line}" ] || die "Wrong line for MOD1 in RELEASE.local (expected=\"$updated_line\" found=\"$existing_line\")" [ "${existing_line}" = "${updated_line}" ] || fail "Wrong line for MOD1 in RELEASE.local (expected=\"$updated_line\" found=\"$existing_line\")"
# Set base # Set base
update_release_local EPICS_BASE /tmp/base update_release_local EPICS_BASE /tmp/base
updated_line="EPICS_BASE=/tmp/base" updated_line="EPICS_BASE=/tmp/base"
grep -q "EPICS_BASE=" $release_local || die "Line for EPICS_BASE not added to RELEASE.local" grep -q "EPICS_BASE=" $release_local || fail "Line for EPICS_BASE not added to RELEASE.local"
# Set another module # Set another module
update_release_local MOD2 /tmp/mod2 update_release_local MOD2 /tmp/mod2
updated_line="MOD2=/tmp/mod2" updated_line="MOD2=/tmp/mod2"
grep -q "MOD2=" $release_local || die "Line for MOD2 not added to RELEASE.local" grep -q "MOD2=" $release_local || fail "Line for MOD2 not added to RELEASE.local"
existing_line=$(grep "MOD2=" $release_local) existing_line=$(grep "MOD2=" $release_local)
[ "${existing_line}" = "${updated_line}" ] || die "Wrong line for MOD2 in RELEASE.local (expected=\"$updated_line\" found=\"$existing_line\")" [ "${existing_line}" = "${updated_line}" ] || fail "Wrong line for MOD2 in RELEASE.local (expected=\"$updated_line\" found=\"$existing_line\")"
tail -n 1 $release_local | grep -q "EPICS_BASE=" || die "Line for EPICS_BASE not moved to the end of RELEASE.local" tail -n 1 $release_local | grep -q "EPICS_BASE=" || fail "Line for EPICS_BASE not moved to the end of RELEASE.local"
# Update a module # Update a module
update_release_local MOD1 /tmp/mod1b update_release_local MOD1 /tmp/mod1b
updated_line="MOD1=/tmp/mod1b" updated_line="MOD1=/tmp/mod1b"
grep -q "MOD1=" $release_local || die "Line for MOD1 not present in RELEASE.local" grep -q "MOD1=" $release_local || fail "Line for MOD1 not present in RELEASE.local"
existing_line=$(grep "MOD1=" $release_local) existing_line=$(grep "MOD1=" $release_local)
[ "${existing_line}" = "${updated_line}" ] || die "Wrong line for MOD1 in RELEASE.local (expected=\"$updated_line\" found=\"$existing_line\")" [ "${existing_line}" = "${updated_line}" ] || fail "Wrong line for MOD1 in RELEASE.local (expected=\"$updated_line\" found=\"$existing_line\")"
head -n 1 $release_local | grep -q "MOD1=" || die "Line for MOD1 not at the top of RELEASE.local" head -n 1 $release_local | grep -q "MOD1=" || fail "Line for MOD1 not at the top of RELEASE.local"
tail -n 1 $release_local | grep -q "EPICS_BASE=" || die "Line for EPICS_BASE not moved to the end of RELEASE.local" tail -n 1 $release_local | grep -q "EPICS_BASE=" || fail "Line for EPICS_BASE not moved to the end of RELEASE.local"
# Check that RELEASE.local only contains variable settings # Check that RELEASE.local only contains variable settings
[ $(grep -v -c '[^ =]*=.*' $release_local) -ne 0 ] && die "RELEASE.local contains invalid lines" [ $(grep -v -c '[^ =]*=.*' $release_local) -ne 0 ] && fail "RELEASE.local contains invalid lines"
rm -f $release_local rm -f $release_local
@@ -153,23 +168,65 @@ location=$CACHEDIR/base-R3.15.6
# CAREFUL: order of the following check matters (speeds up the test) # CAREFUL: order of the following check matters (speeds up the test)
# dependency does not exist in the cache # dependency does not exist in the cache
rm -fr $location rm -fr $location; modules_to_compile=
add_dependency BASE R3.15.6 add_dependency BASE R3.15.6
[ -e $location/LICENSE ] || die "Missing dependency was not checked out" [ -e $location/LICENSE ] || fail "Missing dependency was not checked out"
BUILT=$(cat "$location/built") BUILT=$(cat "$location/built")
[ "$BUILT" != "$hash_3_15_6" ] && die "Wrong commit of dependency checked out (expected=\"$hash_3_15_6\" found=\"$BUILT\")" [ "$BUILT" != "$hash_3_15_6" ] && fail "Wrong commit of dependency checked out (expected=\"$hash_3_15_6\" found=\"$BUILT\")"
grep -q "include \$(TOP)/../RELEASE.local" $location/configure/RELEASE && fail "RELEASE in Base includes RELEASE.local"
[ "$do_recompile" ] || fail "do_recompile flag was not set for missing dependency"
echo "$modules_to_compile" | grep -q "$location" || fail "Missing dependency was not set to compile"
# up-to-date dependency does exist in the cache # up-to-date dependency does exist in the cache
( cd $CACHEDIR; git clone --quiet --depth 5 --recursive --branch R3.15.6 https://github.com/epics-base/epics-base.git base-R3.15.6 ) ( cd $CACHEDIR; git clone --quiet --depth 5 --recursive --branch R3.15.6 https://github.com/epics-base/epics-base.git base-R3.15.6 )
rm -f $location/LICENSE rm -f $location/LICENSE
unset do_recompile; modules_to_compile=
add_dependency BASE R3.15.6 add_dependency BASE R3.15.6
[ -e $location/LICENSE ] && die "Existing correct dependency was checked out on top" [ -e $location/LICENSE ] && fail "Existing correct dependency was checked out on top"
[ "$do_recompile" ] && fail "do_recompile flag was set for up-to-date dependency"
echo "$modules_to_compile" | grep -q "$location" && fail "Up-to-date dependency was set to compile"
do_recompile=yes
add_dependency BASE R3.15.6
echo "$modules_to_compile" | grep -q "$location" || fail "Up-to-date module was not set to compile wile do_recompile=yes"
# dependency in the cache is outdated # dependency in the cache is outdated
echo "nottherighthash" > "$location/built" echo "nottherighthash" > "$location/built"
unset do_recompile
add_dependency BASE R3.15.6 add_dependency BASE R3.15.6
[ -e $location/LICENSE ] || die "Outdated dependency was not checked out" [ -e $location/LICENSE ] || fail "Outdated dependency was not checked out"
BUILT=$(cat "$location/built") BUILT=$(cat "$location/built")
[ "$BUILT" != "$hash_3_15_6" ] && die "Wrong commit of dependency checked out (expected=\"$hash_3_15_6\" found=\"$BUILT\")" [ "$BUILT" != "$hash_3_15_6" ] && fail "Wrong commit of dependency checked out (expected=\"$hash_3_15_6\" found=\"$BUILT\")"
[ "$do_recompile" ] || fail "do_recompile flag was not set for outdated dependency"
echo "$modules_to_compile" | grep -q "$location" || fail "Outdated dependency was not set to compile"
rm -fr $location # msi is automatically added to 3.14
rm -fr $location; modules_to_compile=
location=$CACHEDIR/base-R3.14.12.1
rm -fr $location;
add_dependency BASE R3.14.12.1
[ -e $location/src/dbtools/msi.c ] || fail "MSI was not added to Base 3.14"
rm -fr $CACHEDIR/*; modules_to_compile=
# missing inclusion of RELEASE.local in configure/RELEASE
location=$CACHEDIR/std-R3-4
add_dependency STD R3-4
grep -q "include \$(TOP)/../RELEASE.local" $location/configure/RELEASE || fail "Inclusion of RELEASE.local not added to configure/RELEASE"
rm -fr $location; modules_to_compile=
# correct handling of FOO_RECURSIVE setting (https://github.com/epics-base/ci-scripts/issues/25 regression)
export SSCAN_RECURSIVE=NO
add_dependency SSCAN master
add_dependency ASYN master
[ -e $CACHEDIR/sscan-master/.ci/README.md ] && fail "Sscan was checked out recursively despite SSCAN_RECURSIVE=NO"
[ -e $CACHEDIR/asyn-master/.ci/README.md ] || fail "Asyn was not checked out recursively"
rm -fr $CACHEDIR/*; modules_to_compile=
unset SSCAN_RECURSIVE
export ASYN_RECURSIVE=NO
add_dependency SSCAN master
add_dependency ASYN master
[ -e $CACHEDIR/sscan-master/.ci/README.md ] || fail "Sscan was not checked out recursively"
[ -e $CACHEDIR/asyn-master/.ci/README.md ] && fail "Asyn was checked out recursively despite ASYN_RECURSIVE=NO"
rm -fr $CACHEDIR/*

View File

@@ -29,6 +29,13 @@ addons:
- g++-mingw-w64-x86-64 - g++-mingw-w64-x86-64
# for RTEMS cross builds # for RTEMS cross builds
- qemu-system-x86 - qemu-system-x86
homebrew:
packages:
# for all EPICS builds
- bash
# for the sequencer
- re2c
update: true
install: install:
- ./.ci/travis/prepare.sh - ./.ci/travis/prepare.sh
@@ -43,18 +50,21 @@ script:
# Define build jobs # Define build jobs
# Well-known variables to use # Well-known variables to use
# SET source setup file # SET source setup file
# EXTRA content will be added to make command line # ADD_MODULES extra modules (for a specific job)
# STATIC set to YES for static build (default: NO) # EXTRA content will be added to make command line
# TEST set to NO to skip running the tests (default: YES) # EXTRA1..5 additional arguments for the make command
# VV set to make build scripts verbose (default: unset) # (one argument per variable)
# STATIC set to YES for static build (default: NO)
# TEST set to NO to skip running the tests (default: YES)
# VV set to make build scripts verbose (default: unset)
# Usually from setup files, but may be specified or overridden # Usually from setup files, but may be specified or overridden
# on a job line # on a job line
# MODULES list of dependency modules # MODULES list of dependency modules
# BASE branch or release tag name of the EPICS Base to use # BASE branch or release tag name of the EPICS Base to use
# <MODULE> branch or release tag for a specific module # <MODULE> branch or release tag for a specific module
# ... see README for setup file syntax description # ... see README for setup file syntax description
jobs: jobs:
include: include:
@@ -108,4 +118,3 @@ jobs:
- env: BASE=7.0 - env: BASE=7.0
os: osx os: osx
compiler: clang compiler: clang
addons: { homebrew: { packages: ["re2c"], update: true } }

View File

@@ -4,19 +4,40 @@ set -e
# Set VV in .travis.yml to make scripts verbose # Set VV in .travis.yml to make scripts verbose
[ "$VV" ] && set -x [ "$VV" ] && set -x
CACHEDIR="$HOME/.cache" CACHEDIR=${CACHEDIR:-${HOME}/.cache}
eval $(grep "EPICS_BASE=" ${CACHEDIR}/RELEASE.local) if [ "$BASE" = "SELF" ]
then
EPICS_BASE=$CURDIR
else
eval $(grep "EPICS_BASE=" ${CACHEDIR}/RELEASE.local)
fi
export EPICS_BASE export EPICS_BASE
[ -z "$EPICS_HOST_ARCH" -a -f $EPICS_BASE/src/tools/EpicsHostArch.pl ] && EPICS_HOST_ARCH=$(perl $EPICS_BASE/src/tools/EpicsHostArch.pl) [ -z "$EPICS_HOST_ARCH" -a -f $EPICS_BASE/src/tools/EpicsHostArch.pl ] && EPICS_HOST_ARCH=$(perl $EPICS_BASE/src/tools/EpicsHostArch.pl)
[ -z "$EPICS_HOST_ARCH" -a -f $EPICS_BASE/startup/EpicsHostArch.pl ] && EPICS_HOST_ARCH=$(perl $EPICS_BASE/startup/EpicsHostArch.pl) [ -z "$EPICS_HOST_ARCH" -a -f $EPICS_BASE/startup/EpicsHostArch.pl ] && EPICS_HOST_ARCH=$(perl $EPICS_BASE/startup/EpicsHostArch.pl)
export EPICS_HOST_ARCH export EPICS_HOST_ARCH
make -j2 $EXTRA # Base 3.15 doesn't have -qemu target architecture and needs an extra define
[ -e $EPICS_BASE/configure/os/CONFIG.Common.RTEMS-pc386-qemu ] || EXTRA_QEMU=RTEMS_QEMU_FIXUPS=YES
# use array variable to get the quoting right while using separate words for arguments
[ -n "$EXTRA0" ] && EXTRA[0]="$EXTRA0"
[ -n "$EXTRA1" ] && EXTRA[1]="$EXTRA1"
[ -n "$EXTRA2" ] && EXTRA[2]="$EXTRA2"
[ -n "$EXTRA3" ] && EXTRA[3]="$EXTRA3"
[ -n "$EXTRA4" ] && EXTRA[4]="$EXTRA4"
[ -n "$EXTRA5" ] && EXTRA[5]="$EXTRA5"
make -j2 $EXTRA_QEMU "${EXTRA[@]}"
ret=0
if [ "$TEST" != "NO" ] if [ "$TEST" != "NO" ]
then then
make tapfiles make -j2 tapfiles || ret=$?
make -s test-results
make -sk test-results
fi fi
exit $ret

View File

@@ -1,21 +1,51 @@
#!/bin/bash #!/bin/bash
set -e set -e
# The following if clause can be removed for ci-scripts major version 3
if [ "$TRAVIS_OS_NAME" == osx -a "$BASH_VERSINFO" -lt 4 ]
then
brew install bash
if [ $(/usr/local/bin/bash -c 'echo $BASH_VERSINFO') -lt 4 ]
then
echo "Failed to install a recent bash" >&2
exit 1
fi
exec /usr/local/bin/bash $0 "$@"
fi
# Set VV in .travis.yml to make scripts verbose # Set VV in .travis.yml to make scripts verbose
[ "$VV" ] && set -x [ "$VV" ] && set -x
# Perl version of "readlink -f" (which MacOS does not provide) # Perl version of "readlink -f" (which MacOS does not provide)
readlinkf() { perl -MCwd -e 'print Cwd::abs_path shift' "$1"; } readlinkf() { perl -MCwd -e 'print Cwd::abs_path shift' "$1"; }
SETUP_DIRS=$(echo $SETUP_PATH | tr ":" "\n")
SCRIPTDIR=$(dirname $(readlinkf $0)) SCRIPTDIR=$(dirname $(readlinkf $0))
CURDIR="$PWD" CURDIR="$PWD"
CACHEDIR="$HOME/.cache" CACHEDIR=${CACHEDIR:-${HOME}/.cache}
[ -e ${CACHEDIR} ] || mkdir -p ${CACHEDIR}
# source functions # source functions
. $SCRIPTDIR/utils.sh . $SCRIPTDIR/utils.sh
echo -e "${ANSI_YELLOW}Using bash version $BASH_VERSION${ANSI_RESET}"
if [ -f /etc/hosts ]
then
# The travis-ci "bionic" image throws us a curveball in /etc/hosts
# by including two entries for localhost. The first for 127.0.1.1
# which causes epicsSockResolveTest to fail.
# cat /etc/hosts
# ...
# 127.0.1.1 localhost localhost ip4-loopback
# 127.0.0.1 localhost nettuno travis vagrant travis-job-....
sudo sed -i -e '/^127\.0\.1\.1/ s|localhost\s*||g' /etc/hosts
echo "==== /etc/hosts"
cat /etc/hosts
echo "===="
fi
# Load settings # Load settings
# ------------- # -------------
@@ -32,23 +62,32 @@ fold_end load.settings
# Check out dependencies # Check out dependencies
# ---------------------- # ----------------------
fold_start check.out.dependencies "Checking/cloning dependencies" if [ "$BASE" != "SELF" ]
then
fold_start check.out.dependencies "Checking/cloning dependencies"
for mod in BASE $MODULES for mod in BASE $ADD_MODULES $MODULES
do do
mod_uc=$(echo $mod | tr 'a-z' 'A-Z') mod_uc=${mod^^}
eval add_dependency $mod_uc \${${mod_uc}:=master} eval add_dependency $mod_uc \${${mod_uc}:=master}
done done
[ -e ./configure ] && cp ${CACHEDIR}/RELEASE.local ./configure/RELEASE.local [ -d ./configure ] && target=./configure/RELEASE.local || target=./RELEASE.local
cp ${CACHEDIR}/RELEASE.local $target
fold_end check.out.dependencies fold_end check.out.dependencies
fi
# Set up compiler # Set up compiler
# --------------- # ---------------
fold_start set.up.epics_build "Setting up EPICS build system" fold_start set.up.epics_build "Setting up EPICS build system"
eval $(grep "EPICS_BASE=" ${CACHEDIR}/RELEASE.local) if [ "$BASE" = "SELF" ]
then
EPICS_BASE=$CURDIR
else
eval $(grep "EPICS_BASE=" ${CACHEDIR}/RELEASE.local)
fi
export EPICS_BASE export EPICS_BASE
echo "EPICS_BASE=$EPICS_BASE" echo "EPICS_BASE=$EPICS_BASE"
@@ -57,7 +96,7 @@ echo "EPICS_BASE=$EPICS_BASE"
export EPICS_HOST_ARCH export EPICS_HOST_ARCH
echo "EPICS_HOST_ARCH=$EPICS_HOST_ARCH" echo "EPICS_HOST_ARCH=$EPICS_HOST_ARCH"
if echo ${modules_to_compile} | grep -q "$EPICS_BASE" if echo ${modules_to_compile} | grep -q "$EPICS_BASE" || [ "$BASE" = "SELF" ]
then then
# requires wine and g++-mingw-w64-i686 # requires wine and g++-mingw-w64-i686
@@ -145,13 +184,15 @@ EOF
RTEMS_VERSION=$RTEMS RTEMS_VERSION=$RTEMS
RTEMS_BASE=$HOME/.rtems RTEMS_BASE=$HOME/.rtems
EOF EOF
# Base 3.15 doesn't have -qemu target architecture
[ -e $EPICS_BASE/configure/os/CONFIG.Common.RTEMS-pc386-qemu ] && QEMU=-qemu
cat << EOF >> $EPICS_BASE/configure/CONFIG_SITE cat << EOF >> $EPICS_BASE/configure/CONFIG_SITE
CROSS_COMPILER_TARGET_ARCHS += RTEMS-pc386-qemu CROSS_COMPILER_TARGET_ARCHS += RTEMS-pc386$QEMU
EOF EOF
fi fi
else else
echo "${ANSI_GREEN}EPICS build system already set up (Base was loaded from cache)${ANSI_RESET}" echo -e "${ANSI_GREEN}EPICS build system already set up (Base was loaded from cache)${ANSI_RESET}"
fi fi
# Download RTEMS cross compiler # Download RTEMS cross compiler
@@ -167,6 +208,8 @@ fold_end set.up.compiler
echo "\$ make --version" echo "\$ make --version"
make --version make --version
[ "$BASE" = "SELF" ] && exit 0
# Build required dependencies # Build required dependencies
# --------------------------- # ---------------------------
@@ -174,7 +217,7 @@ fold_start build.dependencies "Build missing/outdated dependencies"
[ "$VV" ] && silent="-s" || silent= [ "$VV" ] && silent="-s" || silent=
[ -z "$modules_to_compile" ] && echo "${ANSI_GREEN}All dependency modules are up-to-date (nothing to do)${ANSI_RESET}" [ -z "$modules_to_compile" ] && echo -e "${ANSI_GREEN}All dependency modules are up-to-date (nothing to do)${ANSI_RESET}"
for module in ${modules_to_compile} for module in ${modules_to_compile}
do do
@@ -186,13 +229,13 @@ done
fold_end build.dependencies fold_end build.dependencies
echo "${ANSI_BLUE}Dependency module information${ANSI_RESET}" echo -e "${ANSI_BLUE}Dependency module information${ANSI_RESET}"
echo "Module Tag Binaries Commit" echo "Module Tag Binaries Commit"
echo "-----------------------------------------------------------------------------------" echo "-----------------------------------------------------------------------------------"
for mod in base $MODULES for mod in base $MODULES $ADD_MODULES
do do
mod_uc=$(echo $mod | tr 'a-z' 'A-Z') mod_uc=${mod^^}
eval tag=\${${mod_uc}} eval tag=\${${mod_uc}}
eval dir=${CACHEDIR}/\${${mod_uc}_DIRNAME}-$tag eval dir=${CACHEDIR}/\${${mod_uc}_DIRNAME}-$tag
echo "$modules_to_compile" | grep -q "$dir" && stat="rebuilt" || stat="from cache" echo "$modules_to_compile" | grep -q "$dir" && stat="rebuilt" || stat="from cache"
@@ -200,5 +243,5 @@ do
printf "%-10s %-12s %-11s %s\n" "$mod" "$tag" "$stat" "$commit" printf "%-10s %-12s %-11s %s\n" "$mod" "$tag" "$stat" "$commit"
done done
echo "${ANSI_BLUE}Contents of RELEASE.local${ANSI_RESET}" echo -e "${ANSI_BLUE}Contents of RELEASE.local${ANSI_RESET}"
cat ${CACHEDIR}/RELEASE.local cat ${CACHEDIR}/RELEASE.local

View File

@@ -1,6 +1,7 @@
# Utility functions for Travis scripts in ci-scripts # Utility functions for Travis scripts in ci-scripts
# #
# This file is sourced by the executable scripts # This file is sourced by the executable scripts
# CACHEDIR must be defined and existing before calling these functions
# Portable version of 'sed -i' (that MacOS doesn't provide) # Portable version of 'sed -i' (that MacOS doesn't provide)
@@ -22,28 +23,29 @@ export ANSI_CLEAR="\033[0K"
# from https://github.com/travis-ci/travis-rubies/blob/build/build.sh # from https://github.com/travis-ci/travis-rubies/blob/build/build.sh
fold_start() { fold_start() {
echo -en "travis_fold:start:$1\\r${ANSI_YELLOW}$2${ANSI_RESET}" echo -e "travis_fold:start:$1\\r${ANSI_YELLOW}$2${ANSI_RESET}"
} }
fold_end() { fold_end() {
echo -en "travis_fold:end:$1\\r" echo -en "travis_fold:end:$1\\r"
} }
die() {
echo -e "${ANSI_RED}$1${ANSI_RESET}"
[ "$UTILS_UNITTEST" ] || exit 1
}
# source_set(settings) # source_set(settings)
# #
# Source a settings file (extension .set) found in the SETUP_DIRS path # Source a settings file (extension .set) found in SETUP_PATH
# May be called recursively (from within a settings file) # May be called recursively (from within a settings file)
declare -a SEEN_SETUPS declare -a SEEN_SETUPS
source_set() { source_set() {
local set_file=${1//[$'\r']} local set_file=${1//[$'\r']}
local set_dir local set_dir
local found=0 local found=0
if [ -z "${SETUP_DIRS}" ] [ "${SETUP_PATH}" ] || die "Search path for setup files (SETUP_PATH) is empty"
then for set_dir in ${SETUP_PATH//:/ }
echo "${ANSI_RED}Search path for setup files (SETUP_PATH) is empty${ANSI_RESET}"
[ "$UTILS_UNITTEST" ] || exit 1
fi
for set_dir in ${SETUP_DIRS}
do do
if [ -e $set_dir/$set_file.set ] if [ -e $set_dir/$set_file.set ]
then then
@@ -75,11 +77,7 @@ source_set() {
break break
fi fi
done done
if [ $found -eq 0 ] [ $found -ne 0 ] || die "Setup file $set_file.set does not exist in SETUP_PATH search path ($SETUP_PATH)"
then
echo "${ANSI_RED}Setup file $set_file.set does not exist in SETUP_DIRS search path ($SETUP_DIRS)${ANSI_RESET}"
[ "$UTILS_UNITTEST" ] || exit 1
fi
} }
# update_release_local(varname, place) # update_release_local(varname, place)
@@ -135,21 +133,19 @@ add_dependency() {
curdir="$PWD" curdir="$PWD"
DEP=$1 DEP=$1
TAG=$2 TAG=$2
dep_lc=$(echo $DEP | tr 'A-Z' 'a-z') dep_lc=${DEP,,}
eval dirname=\${${DEP}_DIRNAME:=${dep_lc}} eval dirname=\${${DEP}_DIRNAME:=${dep_lc}}
eval reponame=\${${DEP}_REPONAME:=${dep_lc}} eval reponame=\${${DEP}_REPONAME:=${dep_lc}}
eval repourl=\${${DEP}_REPOURL:="https://github.com/\${${DEP}_REPOOWNER:=${REPOOWNER:-epics-modules}}/${reponame}.git"} eval repourl=\${${DEP}_REPOURL:="https://github.com/\${${DEP}_REPOOWNER:=${REPOOWNER:-epics-modules}}/${reponame}.git"}
eval varname=\${${DEP}_VARNAME:=${DEP}} eval varname=\${${DEP}_VARNAME:=${DEP}}
eval recursive=\${${DEP}_RECURSIVE:=1} eval recursive=\${${DEP}_RECURSIVE:=1}
recursive=$(echo $recursive | tr 'A-Z' 'a-z') recursive=${recursive,,}
local recurse=""
[ "$recursive" != "0" -a "$recursive" != "no" ] && recurse="--recursive" [ "$recursive" != "0" -a "$recursive" != "no" ] && recurse="--recursive"
# determine if $DEP points to a valid release or branch # determine if $DEP points to a valid release or branch
if ! git ls-remote --quiet --exit-code --refs $repourl "$TAG" > /dev/null 2>&1 git ls-remote --quiet --exit-code --refs $repourl "$TAG" > /dev/null 2>&1 ||
then die "$TAG is neither a tag nor a branch name for $DEP ($repourl)"
echo "${ANSI_RED}$TAG is neither a tag nor a branch name for $DEP ($repourl)${ANSI_RESET}"
[ "$UTILS_UNITTEST" ] || exit 1
fi
if [ -e $CACHEDIR/$dirname-$TAG ] if [ -e $CACHEDIR/$dirname-$TAG ]
then then
@@ -181,7 +177,24 @@ add_dependency() {
echo "Cloning $TAG of dependency $DEP into $CACHEDIR/$dirname-$TAG" echo "Cloning $TAG of dependency $DEP into $CACHEDIR/$dirname-$TAG"
git clone --quiet $deptharg $recurse --branch "$TAG" $repourl $dirname-$TAG git clone --quiet $deptharg $recurse --branch "$TAG" $repourl $dirname-$TAG
( cd $dirname-$TAG && git log -n1 ) ( cd $dirname-$TAG && git log -n1 )
modules_to_compile="${modules_to_compile} $CACHEDIR/$dirname-$TAG" do_recompile=yes
# add MSI to Base 3.14
if [ $DEP == "BASE" ]
then
versionfile=$CACHEDIR/$dirname-$TAG/configure/CONFIG_BASE_VERSION
if [ -e ${versionfile} ] && grep -q "BASE_3_14=YES" ${versionfile}
then
echo "Adding MSI 1.7 to $CACHEDIR/$dirname-$TAG"
( cd $dirname-$TAG; patch -p1 < $SCRIPTDIR/../add-msi-to-314.patch )
fi
else
# fix non-base modules that do not include the .local files in configure/RELEASE
release=$CACHEDIR/$dirname-$TAG/configure/RELEASE
if [ -e $release ]
then
echo "-include \$(TOP)/../RELEASE.local" > $release
fi
fi
# run hook # run hook
eval hook="\${${DEP}_HOOK}" eval hook="\${${DEP}_HOOK}"
if [ "$hook" ] if [ "$hook" ]
@@ -191,8 +204,7 @@ add_dependency() {
echo "Running hook $hook in $CACHEDIR/$dirname-$TAG" echo "Running hook $hook in $CACHEDIR/$dirname-$TAG"
( cd $CACHEDIR/$dirname-$TAG; "$curdir/$hook" ) ( cd $CACHEDIR/$dirname-$TAG; "$curdir/$hook" )
else else
echo "${ANSI_RED}Hook script $hook is not executable or does not exist.${ANSI_RESET}" die "Hook script $hook is not executable or does not exist."
exit 1
fi fi
fi fi
HEAD=$(cd "$CACHEDIR/$dirname-$TAG" && git log -n1 --pretty=format:%H) HEAD=$(cd "$CACHEDIR/$dirname-$TAG" && git log -n1 --pretty=format:%H)
@@ -200,5 +212,7 @@ add_dependency() {
cd "$curdir" cd "$curdir"
fi fi
[ "${do_recompile}" ] && modules_to_compile="${modules_to_compile} $CACHEDIR/$dirname-$TAG"
update_release_local ${varname} $CACHEDIR/$dirname-$TAG update_release_local ${varname} $CACHEDIR/$dirname-$TAG
} }