Compare commits
185 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ba5508b39e | ||
|
|
d5eab412b4 | ||
|
|
fff4249771 | ||
|
|
11e2f1852b | ||
|
|
53e23e3684 | ||
|
|
f0ce39129c | ||
|
|
1583d41197 | ||
|
|
98bc1d8f1f | ||
|
|
4484a9c302 | ||
|
|
ebc4b5ca2a | ||
|
|
26c268eb28 | ||
|
|
02c1c016c4 | ||
|
|
27a1224d6d | ||
|
|
157a7af098 | ||
|
|
3c1e223dc7 | ||
|
|
4f4f76a5db | ||
|
|
e91a588370 | ||
|
|
29e657d585 | ||
|
|
4413c7d75e | ||
|
|
48b15417a6 | ||
|
|
1ac8bf7479 | ||
|
|
d0f93f1920 | ||
|
|
27f823139a | ||
|
|
88831439b1 | ||
|
|
177dfd4615 | ||
|
|
3bd2bb6dff | ||
|
|
393a470d05 | ||
|
|
519b75aef2 | ||
|
|
e8b01bf1a0 | ||
|
|
79cc560594 | ||
|
|
32bdf84806 | ||
|
|
070eab1473 | ||
|
|
11e0a60e3b | ||
|
|
22d3a9db15 | ||
|
|
a3532d3c55 | ||
|
|
b502aa7049 | ||
|
|
d8e53e84fd | ||
|
|
16bb305d24 | ||
|
|
6d0f34ac65 | ||
|
|
2fd1f9ec16 | ||
|
|
ca74a8424b | ||
|
|
ddf6b961b1 | ||
|
|
e5af5c2bfe | ||
|
|
d02dda5775 | ||
|
|
4a6a979f89 | ||
|
|
81bf29b8ca | ||
|
|
a8321aff92 | ||
|
|
48832354da | ||
|
|
dbfba732fd | ||
|
|
2f8f4e7fb1 | ||
|
|
e96f77d8b1 | ||
|
|
e14b97b18e | ||
|
|
5d4fdec627 | ||
|
|
fc141f874a | ||
|
|
a006293461 | ||
|
|
b7d505c2e2 | ||
|
|
eceeab66cf | ||
|
|
e50271765f | ||
|
|
d9e1df2367 | ||
|
|
7881b3527c | ||
|
|
0ac6c96e2a | ||
|
|
eb5dd2a86c | ||
|
|
b3efae2451 | ||
|
|
9c58196b6d | ||
|
|
421fe54fe6 | ||
|
|
11f5c94236 | ||
|
|
ff14d5ceb4 | ||
|
|
11cb469fb9 | ||
|
|
0ae628673c | ||
|
|
e11632798a | ||
|
|
24a70882d0 | ||
|
|
430a699d7f | ||
|
|
c056b5ad0f | ||
|
|
a88300bdd7 | ||
|
|
85f570ac09 | ||
|
|
d5c419bc8e | ||
|
|
08cdff9495 | ||
|
|
d052350738 | ||
|
|
b53468e50e | ||
|
|
e47e35bae4 | ||
|
|
6071fdf198 | ||
|
|
00f003afa5 | ||
|
|
c3918cdbaa | ||
|
|
e2399dc7f3 | ||
|
|
5d17fdf98d | ||
|
|
249db7db22 | ||
|
|
4dcfbb2079 | ||
|
|
9742c5f9c6 | ||
|
|
2847f78ab2 | ||
|
|
28aeda558b | ||
|
|
2dfa55420f | ||
|
|
22d0feaa05 | ||
|
|
cd0becff06 | ||
|
|
355a5c2fb7 | ||
|
|
139b491614 | ||
|
|
b15d9bb62e | ||
|
|
761152babe | ||
|
|
a34bb7d2b2 | ||
|
|
94fdfbe802 | ||
|
|
4cad610601 | ||
|
|
25a60b8490 | ||
|
|
953b2960da | ||
|
|
a8bee0552d | ||
|
|
c8b0894cb6 | ||
|
|
47d3f0c0f3 | ||
|
|
1d430e1bfd | ||
|
|
12fca1961f | ||
|
|
ee803fc38d | ||
|
|
0b589770bf | ||
|
|
f92c1e716e | ||
|
|
f5047a9e11 | ||
|
|
eb471d9539 | ||
|
|
e3dace9ee3 | ||
|
|
660c1c6773 | ||
|
|
b0ab3bf333 | ||
|
|
17ce951e99 | ||
|
|
7ae7054196 | ||
|
|
aee11f266b | ||
|
|
94744c9a8f | ||
|
|
741a293029 | ||
|
|
aa8f35f086 | ||
|
|
82685b0280 | ||
|
|
a39346bc78 | ||
|
|
5d76e1ff07 | ||
|
|
c721b7ac32 | ||
|
|
8233b9f81b | ||
|
|
7de5a7edc3 | ||
|
|
80ab30469e | ||
| 4dfd098545 | |||
| 3929851deb | |||
| e6f722914c | |||
| d4ab170b3c | |||
| 487d8eb287 | |||
| c7aca7cd73 | |||
| bdcb2f3173 | |||
|
|
e81ec3aa0c | ||
| 64e382b46e | |||
| add7bbcf88 | |||
|
|
377bd2a915 | ||
|
|
7782f928c2 | ||
|
|
eff3d31687 | ||
|
|
82d962639b | ||
|
|
1d4d6cc617 | ||
|
|
40fe8f3852 | ||
|
|
de43213873 | ||
|
|
df8c0c09d7 | ||
|
|
fc68a595a8 | ||
|
|
ba03552b87 | ||
|
|
9abc3fbbfe | ||
|
|
54a26f547f | ||
|
|
455ead3208 | ||
|
|
9df85ff63e | ||
|
|
3a811924d0 | ||
|
|
8c2cb12d9b | ||
|
|
d8f2074e6a | ||
|
|
f5a502301e | ||
|
|
a3858715a2 | ||
|
|
c734ef9ce9 | ||
|
|
064b4d3998 | ||
|
|
7634107dfb | ||
|
|
8373ad65f7 | ||
|
|
6353a7df04 | ||
|
|
19695e2d45 | ||
|
|
d84eb6e857 | ||
|
|
be057792d2 | ||
|
|
7e55616ece | ||
|
|
a5880c4604 | ||
|
|
e586d8994d | ||
|
|
9cac7a951d | ||
|
|
a673f94d8c | ||
|
|
d1fd40b52a | ||
|
|
c3597fd901 | ||
|
|
f60fd0c33b | ||
|
|
db93a1dfc8 | ||
|
|
4a418e8681 | ||
|
|
8232bccba2 | ||
|
|
96ba1e2e4a | ||
|
|
7c7f16181c | ||
|
|
fe7647545d | ||
|
|
e6c3f6f016 | ||
|
|
e7aff62c1b | ||
|
|
9b346c400a | ||
|
|
2a94ced97b | ||
|
|
f1048da643 | ||
|
|
aac51b80a9 |
165
.appveyor.yml
Normal file
165
.appveyor.yml
Normal file
@@ -0,0 +1,165 @@
|
||||
# .appveyor.yml for testing EPICS Base ci-scripts
|
||||
# (see: https://github.com/epics-base/ci-scripts)
|
||||
|
||||
# Note:
|
||||
# Paths to scripts are different in this test configuration
|
||||
# (your module has one more directory level: .ci)
|
||||
|
||||
# Ralph Lange <ralph.lange@gmx.de>
|
||||
# Copyright (c) 2020 ITER Organization
|
||||
|
||||
#---------------------------------#
|
||||
# build cache #
|
||||
#---------------------------------#
|
||||
|
||||
cache:
|
||||
- C:\Users\appveyor\.tools -> appveyor\do.py
|
||||
|
||||
#---------------------------------#
|
||||
# repository cloning #
|
||||
#---------------------------------#
|
||||
|
||||
# Called at very beginning, before repo cloning
|
||||
init:
|
||||
# Set autocrlf to make batch files work
|
||||
- git config --global core.autocrlf true
|
||||
# print the connection info for RDP connections (see 'debugging' below)
|
||||
- ps: iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
|
||||
|
||||
# Set clone depth (do not fetch complete history)
|
||||
clone_depth: 50
|
||||
|
||||
# Skipping commits affecting only specific files
|
||||
skip_commits:
|
||||
files:
|
||||
- 'documentation/*'
|
||||
- 'templates/*'
|
||||
- '**/*.html'
|
||||
- '**/*.md'
|
||||
- '.travis.yml'
|
||||
|
||||
#---------------------------------#
|
||||
# additional packages #
|
||||
#---------------------------------#
|
||||
|
||||
install:
|
||||
# fetch submodules (like ci-scripts)
|
||||
- cmd: git submodule update --init --recursive
|
||||
# for the sequencer
|
||||
- cinst re2c
|
||||
|
||||
#---------------------------------#
|
||||
# build matrix configuration #
|
||||
#---------------------------------#
|
||||
|
||||
# Default build worker image
|
||||
image: Visual Studio 2015
|
||||
|
||||
# Build Configurations: dll/static, regular/debug
|
||||
configuration:
|
||||
- dynamic
|
||||
- static
|
||||
- dynamic-debug
|
||||
- static-debug
|
||||
|
||||
# Environment variables: compiler toolchain, base version, setup file, ...
|
||||
environment:
|
||||
# common / default variables for all jobs
|
||||
SETUP_PATH: .:.ci
|
||||
SET: test01
|
||||
BASE_RECURSIVE: NO
|
||||
VV: 1
|
||||
|
||||
matrix:
|
||||
- CMP: vs2019
|
||||
SET: test00
|
||||
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
|
||||
- CMP: mingw
|
||||
- CMP: vs2019
|
||||
VV: 0
|
||||
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
|
||||
- CMP: vs2019
|
||||
BASE: 3.15
|
||||
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
|
||||
- CMP: vs2019
|
||||
BASE: 3.14
|
||||
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
|
||||
- CMP: vs2017
|
||||
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
|
||||
- CMP: vs2015
|
||||
- CMP: vs2013
|
||||
- CMP: vs2012
|
||||
- CMP: vs2010
|
||||
- CMP: vs2008
|
||||
|
||||
# Platform: architecture
|
||||
platform:
|
||||
- x86
|
||||
- x64
|
||||
|
||||
# Matrix configuration: allow specific failing jobs
|
||||
matrix:
|
||||
exclude:
|
||||
# Run test00 only once: x64 dynamic
|
||||
- platform: x86
|
||||
SET: test00
|
||||
- configuration: static
|
||||
SET: test00
|
||||
- configuration: dynamic-debug
|
||||
SET: test00
|
||||
- configuration: static-debug
|
||||
SET: test00
|
||||
# VS2012 and older installs don't have the 64 bit compiler
|
||||
- platform: x64
|
||||
CMP: vs2012
|
||||
- platform: x64
|
||||
CMP: vs2010
|
||||
- platform: x64
|
||||
CMP: vs2008
|
||||
|
||||
# Run test script for unit tests (SET = test00)
|
||||
for:
|
||||
-
|
||||
matrix:
|
||||
only:
|
||||
- SET: test00
|
||||
build_script:
|
||||
- cmd: python appveyor-test.py
|
||||
test_script:
|
||||
- cmd: echo Tests have been run in the build phase
|
||||
|
||||
#---------------------------------#
|
||||
# building & testing #
|
||||
#---------------------------------#
|
||||
|
||||
build_script:
|
||||
- cmd: python appveyor/do.py prepare
|
||||
- cmd: python appveyor/do.py build
|
||||
|
||||
test_script:
|
||||
- cmd: python appveyor/do.py test
|
||||
|
||||
#---------------------------------#
|
||||
# debugging #
|
||||
#---------------------------------#
|
||||
|
||||
## if you want to connect by remote desktop to a failed build, uncomment these lines
|
||||
## note that you will need to connect within the usual build timeout limit (60 minutes)
|
||||
## so you may want to adjust the build matrix above to just build the one of interest
|
||||
|
||||
#on_failure:
|
||||
# - ps: $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
|
||||
|
||||
|
||||
#---------------------------------#
|
||||
# notifications #
|
||||
#---------------------------------#
|
||||
|
||||
notifications:
|
||||
|
||||
# - provider: Email
|
||||
# to:
|
||||
# - core-talk@aps.anl.gov
|
||||
# on_build_success: false
|
||||
|
||||
# - provider: GitHubPullRequest
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -1,4 +1,10 @@
|
||||
/QtC-*
|
||||
|
||||
*.orig
|
||||
*.log
|
||||
.*.swp
|
||||
*.autosave
|
||||
|
||||
/.idea/
|
||||
*.py[cod]
|
||||
__pycache__/
|
||||
181
.travis.yml
Normal file
181
.travis.yml
Normal file
@@ -0,0 +1,181 @@
|
||||
# .travis.yml for testing EPICS Base ci-scripts
|
||||
# (see: https://github.com/epics-base/ci-scripts)
|
||||
|
||||
# Note:
|
||||
# Paths to scripts are different in this test configuration
|
||||
# (your module has one more directory level: .ci)
|
||||
|
||||
language: cpp
|
||||
compiler: gcc
|
||||
dist: xenial
|
||||
|
||||
cache:
|
||||
directories:
|
||||
- $HOME/.cache
|
||||
|
||||
env:
|
||||
global:
|
||||
- SETUP_PATH=.:.ci
|
||||
|
||||
addons:
|
||||
apt:
|
||||
packages:
|
||||
- libreadline6-dev
|
||||
- libncurses5-dev
|
||||
- perl
|
||||
- clang
|
||||
- g++-mingw-w64-i686
|
||||
- g++-mingw-w64-x86-64
|
||||
- qemu-system-x86
|
||||
homebrew:
|
||||
packages:
|
||||
- re2c
|
||||
- bash
|
||||
update: true
|
||||
|
||||
install:
|
||||
- ./travis/prepare.sh
|
||||
|
||||
script:
|
||||
- ./travis-test.sh
|
||||
|
||||
# If you need to do more during install and build,
|
||||
# add a local directory to your module and do e.g.
|
||||
# - ./.ci-local/travis/install-extras.sh
|
||||
|
||||
# Define build jobs
|
||||
|
||||
# Well-known variables to use
|
||||
# SET source setup file
|
||||
# EXTRA content will be added to make command line
|
||||
# STATIC set to YES for static build (default: NO)
|
||||
# TEST set to NO to skip running the tests (default: YES)
|
||||
# VV set to make build scripts verbose (default: unset)
|
||||
|
||||
# Usually from setup files, but may be specified or overridden
|
||||
# on a job line
|
||||
# MODULES list of dependency modules
|
||||
# BASE branch or release tag name of the EPICS Base to use
|
||||
# <MODULE> branch or release tag for a specific module
|
||||
# ... see README for setup file syntax description
|
||||
|
||||
jobs:
|
||||
include:
|
||||
|
||||
# Run unit tests on Linux and Mac
|
||||
|
||||
- env: SET=test00
|
||||
|
||||
- env: SET=test00
|
||||
os: osx
|
||||
|
||||
# Compile the example application
|
||||
# using the build configurations from full makeBaseApp example
|
||||
|
||||
- env: SET=test01
|
||||
|
||||
# On the side: test ADD_MODULES
|
||||
- env: SET=test01 ADD_MODULES=ipac
|
||||
compiler: clang
|
||||
|
||||
- env: VV="" SET=test01
|
||||
|
||||
- env: SET=test01 EXTRA="CMD_CXXFLAGS=-std=c++11"
|
||||
|
||||
- env: SET=test01 EXTRA="CMD_CXXFLAGS=-std=c++11"
|
||||
compiler: clang
|
||||
|
||||
# trusty is pretty close to RHEL7
|
||||
- env: SET=test01
|
||||
dist: trusty
|
||||
|
||||
- env: SET=test01 EXTRA="CMD_CXXFLAGS=-std=c++11"
|
||||
dist: trusty
|
||||
|
||||
# Cross-compilation to Windows using MinGW and WINE
|
||||
|
||||
- env: SET=test01 WINE=32 TEST=NO STATIC=YES
|
||||
compiler: mingw
|
||||
|
||||
- env: SET=test01 WINE=64 TEST=NO STATIC=YES
|
||||
compiler: mingw
|
||||
|
||||
# dynamic (DLL) builds are broken on xenial
|
||||
- env: SET=test01 WINE=32 TEST=NO STATIC=NO
|
||||
dist: bionic
|
||||
compiler: mingw
|
||||
|
||||
- env: SET=test01 WINE=64 TEST=NO STATIC=NO
|
||||
dist: bionic
|
||||
compiler: mingw
|
||||
|
||||
# Cross-compilation to RTEMS
|
||||
# (needs EPICS Base >= 3.16.2)
|
||||
|
||||
- env: SET=test01 RTEMS=4.10
|
||||
|
||||
- env: SET=test01 RTEMS=4.9
|
||||
|
||||
# Other gcc versions (adding as an extra package)
|
||||
|
||||
- env: SET=test01
|
||||
compiler: gcc-6
|
||||
addons: { apt: { packages: ["g++-6"], sources: ["ubuntu-toolchain-r-test"] } }
|
||||
|
||||
- env: SET=test01
|
||||
compiler: gcc-7
|
||||
addons: { apt: { packages: ["g++-7"], sources: ["ubuntu-toolchain-r-test"] } }
|
||||
|
||||
# MacOS build
|
||||
|
||||
# SNCSEQ 2.2.7 fails to build on MacOS; currently needs master
|
||||
- env: SET=test01 SNCSEQ=master
|
||||
os: osx
|
||||
compiler: clang
|
||||
|
||||
# Base 3.15 builds
|
||||
# ================
|
||||
|
||||
- env: BASE=R3.15.7 SET=test01
|
||||
|
||||
- env: BASE=R3.15.7 SET=test01 WINE=64 TEST=NO STATIC=YES
|
||||
dist: bionic
|
||||
compiler: mingw
|
||||
|
||||
# The DLL build for this Base version is known to fail
|
||||
# - env: BASE=R3.15.7 SET=test01 WINE=64 TEST=NO STATIC=NO
|
||||
# dist: bionic
|
||||
# compiler: mingw
|
||||
|
||||
# Cross-compilation to RTEMS
|
||||
# (needs EPICS Base >= 3.16.2)
|
||||
|
||||
- env: BASE=R3.16.2 SET=test01 RTEMS=4.10
|
||||
dist: trusty
|
||||
|
||||
- env: BASE=R3.16.2 SET=test01 RTEMS=4.9
|
||||
dist: trusty
|
||||
|
||||
# SNCSEQ 2.2.7 fails to build on MacOS; currently needs master
|
||||
- env: BASE=R3.15.7 SET=test01 SNCSEQ=master
|
||||
os: osx
|
||||
compiler: clang
|
||||
|
||||
# Base 3.14 builds
|
||||
# ================
|
||||
|
||||
- env: BASE=R3.14.12.8 SET=test01
|
||||
|
||||
- env: BASE=R3.14.12.8 SET=test01 WINE=64 TEST=NO STATIC=YES
|
||||
dist: bionic
|
||||
compiler: mingw
|
||||
|
||||
# The DLL build for this Base version is known to fail
|
||||
# - env: BASE=R3.14.12.8 SET=test01 WINE=64 TEST=NO STATIC=NO
|
||||
# dist: bionic
|
||||
# compiler: mingw
|
||||
|
||||
# SNCSEQ 2.2.7 fails to build on MacOS; currently needs master
|
||||
- env: BASE=R3.14.12.8 SET=test01 SNCSEQ=master
|
||||
os: osx
|
||||
compiler: clang
|
||||
@@ -1,92 +0,0 @@
|
||||
# .travis.xml for use with EPICS Base ci-scripts
|
||||
# (see: https://github.com/epics-base/ci-scripts)
|
||||
|
||||
language: cpp
|
||||
compiler: gcc
|
||||
dist: xenial
|
||||
|
||||
addons:
|
||||
apt:
|
||||
packages:
|
||||
- libreadline6-dev
|
||||
- libncurses5-dev
|
||||
- perl
|
||||
- clang
|
||||
- g++-mingw-w64-i686
|
||||
- qemu-system-x86
|
||||
|
||||
install:
|
||||
- ./.ci/travis-prepare.sh
|
||||
|
||||
script:
|
||||
- ./.ci/travis-build.sh
|
||||
|
||||
# If you need to do more during install and build,
|
||||
# add a local directory to your module and do e.g.
|
||||
# - ./.ci-local/travis-install-extras.sh
|
||||
|
||||
# Define build jobs
|
||||
|
||||
# Well-known variables to use
|
||||
# BASE branch or release tag name of the EPICS Base to use
|
||||
# EXTRA content will be added to make command line
|
||||
# STATIC set to YES for static build (default: NO)
|
||||
# TEST set to NO to skip running the tests (default: YES)
|
||||
|
||||
matrix:
|
||||
include:
|
||||
|
||||
# Different configurations of default gcc and clang
|
||||
|
||||
- env: BASE=7.0
|
||||
|
||||
- env: BASE=7.0
|
||||
compiler: clang
|
||||
|
||||
- env: BASE=7.0 EXTRA="CMD_CXXFLAGS=-std=c++11"
|
||||
|
||||
- env: BASE=7.0 EXTRA="CMD_CXXFLAGS=-std=c++11"
|
||||
compiler: clang
|
||||
|
||||
# Trusty: compiler versions very close to RHEL 7
|
||||
|
||||
- env: BASE=7.0
|
||||
dist: trusty
|
||||
|
||||
- env: BASE=7.0 EXTRA="CMD_CXXFLAGS=-std=c++11"
|
||||
dist: trusty
|
||||
|
||||
# Cross-compilation to Windows using MinGW and WINE
|
||||
|
||||
- env: BASE=7.0 WINE=32 TEST=NO STATIC=YES
|
||||
compiler: mingw
|
||||
|
||||
- env: BASE=7.0 WINE=32 TEST=NO STATIC=NO
|
||||
compiler: mingw
|
||||
|
||||
# Cross-compilation to RTEMS
|
||||
|
||||
- env: BASE=7.0 RTEMS=4.10 TEST=NO
|
||||
|
||||
- env: BASE=7.0 RTEMS=4.9 TEST=NO
|
||||
|
||||
# Other gcc versions (adding as an extra package)
|
||||
|
||||
- env: BASE=7.0
|
||||
compiler: gcc-6
|
||||
addons: { apt: { packages: ["g++-6"], sources: ["ubuntu-toolchain-r-test"] } }
|
||||
|
||||
- env: BASE=7.0
|
||||
compiler: gcc-7
|
||||
addons: { apt: { packages: ["g++-7"], sources: ["ubuntu-toolchain-r-test"] } }
|
||||
|
||||
# MacOS build
|
||||
|
||||
- env: BASE=7.0
|
||||
os: osx
|
||||
compiler: clang
|
||||
|
||||
# All above jobs can be defined for other branches or releases of EPICS Base
|
||||
# by setting BASE to the branch name or release tag name, e.g.
|
||||
# BASE=3.15 (to use the 3.15 branch of Base)
|
||||
# BASE=R7.0.3 (to use the 7.0.3 release of Base)
|
||||
65
LICENSE
Normal file
65
LICENSE
Normal file
@@ -0,0 +1,65 @@
|
||||
Copyright (c) 2019 EPICS. All rights reserved.
|
||||
|
||||
EPICS CI-Scripts are distributed subject to the following
|
||||
license conditions:
|
||||
|
||||
SOFTWARE LICENSE AGREEMENT
|
||||
Software: EPICS CI-Scripts
|
||||
|
||||
1. The "Software", below, refers to EPICS CI-Scripts (in
|
||||
either source code, or binary form and accompanying documentation).
|
||||
Each licensee is addressed as "you" or "Licensee."
|
||||
|
||||
2. The copyright holders shown above and their third-party licensors
|
||||
hereby grant Licensee a royalty-free nonexclusive license, subject to
|
||||
the limitations stated herein and U.S. Government license rights.
|
||||
|
||||
3. You may modify and make a copy or copies of the Software for use
|
||||
within your organization, if you meet the following conditions:
|
||||
a. Copies in source code must include the copyright notice and this
|
||||
Software License Agreement.
|
||||
b. Copies in binary form must include the copyright notice and this
|
||||
Software License Agreement in the documentation and/or other
|
||||
materials provided with the copy.
|
||||
|
||||
4. You may modify a copy or copies of the Software or any portion of it,
|
||||
thus forming a work based on the Software, and distribute copies of
|
||||
such work outside your organization, if you meet all of the following
|
||||
conditions:
|
||||
a. Copies in source code must include the copyright notice and this
|
||||
Software License Agreement;
|
||||
b. Copies in binary form must include the copyright notice and this
|
||||
Software License Agreement in the documentation and/or other
|
||||
materials provided with the copy;
|
||||
c. Modified copies and works based on the Software must carry
|
||||
prominent notices stating that you changed specified portions of
|
||||
the Software.
|
||||
|
||||
5. Portions of the Software resulted from work developed under a U.S.
|
||||
Government contract and are subject to the following license: the
|
||||
Government is granted for itself and others acting on its behalf a
|
||||
paid-up, nonexclusive, irrevocable worldwide license in this computer
|
||||
software to reproduce, prepare derivative works, and perform publicly
|
||||
and display publicly.
|
||||
|
||||
6. WARRANTY DISCLAIMER. THE SOFTWARE IS SUPPLIED "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND. THE COPYRIGHT HOLDERS, THEIR THIRD PARTY LICENSORS, THE
|
||||
UNITED STATES, THE UNITED STATES DEPARTMENT OF ENERGY, AND THEIR
|
||||
EMPLOYEES: (1) DISCLAIM ANY WARRANTIES, EXPRESS OR IMPLIED, INCLUDING
|
||||
BUT NOT LIMITED TO ANY IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE, TITLE OR NON-INFRINGEMENT, (2) DO NOT ASSUME
|
||||
ANY LEGAL LIABILITY OR RESPONSIBILITY FOR THE ACCURACY, COMPLETENESS,
|
||||
OR USEFULNESS OF THE SOFTWARE, (3) DO NOT REPRESENT THAT USE OF THE
|
||||
SOFTWARE WOULD NOT INFRINGE PRIVATELY OWNED RIGHTS, (4) DO NOT WARRANT
|
||||
THAT THE SOFTWARE WILL FUNCTION UNINTERRUPTED, THAT IT IS ERROR-FREE
|
||||
OR THAT ANY ERRORS WILL BE CORRECTED.
|
||||
|
||||
7. LIMITATION OF LIABILITY. IN NO EVENT WILL THE COPYRIGHT HOLDERS, THEIR
|
||||
THIRD PARTY LICENSORS, THE UNITED STATES, THE UNITED STATES DEPARTMENT
|
||||
OF ENERGY, OR THEIR EMPLOYEES: BE LIABLE FOR ANY INDIRECT, INCIDENTAL,
|
||||
CONSEQUENTIAL, SPECIAL OR PUNITIVE DAMAGES OF ANY KIND OR NATURE,
|
||||
INCLUDING BUT NOT LIMITED TO LOSS OF PROFITS OR LOSS OF DATA, FOR ANY
|
||||
REASON WHATSOEVER, WHETHER SUCH LIABILITY IS ASSERTED ON THE BASIS OF
|
||||
CONTRACT, TORT (INCLUDING NEGLIGENCE OR STRICT LIABILITY), OR
|
||||
OTHERWISE, EVEN IF ANY OF SAID PARTIES HAS BEEN WARNED OF THE
|
||||
POSSIBILITY OF SUCH LOSS OR DAMAGES.
|
||||
31
Makefile
Normal file
31
Makefile
Normal file
@@ -0,0 +1,31 @@
|
||||
# Makefile at top of application tree
|
||||
TOP = .
|
||||
include $(TOP)/configure/CONFIG
|
||||
|
||||
# Directories to build, any order
|
||||
DIRS += configure
|
||||
DIRS += $(wildcard *Sup)
|
||||
DIRS += $(wildcard *App)
|
||||
DIRS += $(wildcard *Top)
|
||||
DIRS += $(wildcard iocBoot)
|
||||
|
||||
# The build order is controlled by these dependency rules:
|
||||
|
||||
# All dirs except configure depend on configure
|
||||
$(foreach dir, $(filter-out configure, $(DIRS)), \
|
||||
$(eval $(dir)_DEPEND_DIRS += configure))
|
||||
|
||||
# Any *App dirs depend on all *Sup dirs
|
||||
$(foreach dir, $(filter %App, $(DIRS)), \
|
||||
$(eval $(dir)_DEPEND_DIRS += $(filter %Sup, $(DIRS))))
|
||||
|
||||
# Any *Top dirs depend on all *Sup and *App dirs
|
||||
$(foreach dir, $(filter %Top, $(DIRS)), \
|
||||
$(eval $(dir)_DEPEND_DIRS += $(filter %Sup %App, $(DIRS))))
|
||||
|
||||
# iocBoot depends on all *App dirs
|
||||
iocBoot_DEPEND_DIRS += $(filter %App,$(DIRS))
|
||||
|
||||
# Add any additional dependency rules here:
|
||||
|
||||
include $(TOP)/configure/RULES_TOP
|
||||
330
README.md
330
README.md
@@ -1,62 +1,300 @@
|
||||
<a target="_blank" href="http://semver.org">![Version][badge.version]</a>
|
||||
<a target="_blank" href="https://travis-ci.org/epics-base/ci-scripts">![Travis status][badge.travis]</a>
|
||||
<a target="_blank" href="https://ci.appveyor.com/project/epics-base/ci-scripts">![AppVeyor status][badge.appveyor]</a>
|
||||
|
||||
# Continuous Integration Scripts for EPICS Modules
|
||||
|
||||
The scripts in this repository are intended to provide a common,
|
||||
The scripts inside this repository are intended to provide a common,
|
||||
easy-to-use and flexible way to add Continuous Integration to EPICS
|
||||
software modules, e.g. Device Support modules.
|
||||
software modules, e.g. Device or Driver Support modules.
|
||||
|
||||
By including this repository as a Git Submodule, your module will
|
||||
always use an explicit commit, i.e. a fixed version of the scripts.
|
||||
That ensures that further development of these scripts cannot break
|
||||
your setup.
|
||||
By including this repository as a Git Submodule, you will be able to
|
||||
use the same flexible, powerful CI setup that EPICS Bases uses,
|
||||
including a way to specify sets of dependent modules
|
||||
(with versions) that you want to compile your module against.
|
||||
|
||||
## Travis-CI
|
||||
By using the submodule mechanism, your module will always use an
|
||||
explicit commit, i.e. a fixed version of the scripts.
|
||||
This ensures that any further development of the ci-scripts will
|
||||
never break existing use.
|
||||
|
||||
### Features
|
||||
## This Repository
|
||||
|
||||
- Compile against different branches or releases of EPICS Base
|
||||
- Use different versions of compilers (gcc, clang)
|
||||
- Cross-compile for Windows 32bit using MinGW and WINE
|
||||
- Cross-compile for RTEMS 4.9 and 4.10
|
||||
- Compile on MacOS
|
||||
In addition to the scripts themselves (in the subdirectories),
|
||||
this repository contains the test suite that is used to verify
|
||||
functionality and features of the ci-scripts.
|
||||
|
||||
You are welcome to use the test suite as a reference, but keep in
|
||||
mind that in your module the path to the scripts has one level more
|
||||
(e.g., `./travis/abc` here would be `./.ci/travis/abc` in your
|
||||
module).
|
||||
Also, a test suite might not show the same level of quality as an
|
||||
example.
|
||||
|
||||
## Features
|
||||
|
||||
- Compile against different branches or releases of EPICS Base and
|
||||
additional dependencies (modules like asyn, std, etc.).
|
||||
|
||||
- Define settings files that declare sets of dependencies
|
||||
with their versions and locations.
|
||||
|
||||
- Define hook scripts for any dependency.
|
||||
Hooks are run on the dependency module before it is compiled, so
|
||||
the module can be patched or further configured.
|
||||
|
||||
- Define static or shared builds (executables, libraries).
|
||||
|
||||
- Run tests (using the EPICS unit test suite).
|
||||
|
||||
## Supported CI Services
|
||||
|
||||
### [Travis-CI](https://travis-ci.org/)
|
||||
- Use different compilers (gcc, clang)
|
||||
- Use different gcc versions
|
||||
- Cross-compile for Windows 32bit and 64bit using MinGW and WINE
|
||||
- Cross-compile for RTEMS 4.9 and 4.10 (Base >= 3.16.2)
|
||||
- Compile on MacOS
|
||||
- Built dependencies are cached (for faster builds)
|
||||
|
||||
### How to Use these Scripts
|
||||
See specific **[ci-scripts on Travis-CI README](travis/README.md)** for more details.
|
||||
|
||||
1. Get an account on [Travis-CI](https://travis-ci.org/), connect
|
||||
it to your GitHub account and activate your support module's
|
||||
repository. For more details, please refer to the
|
||||
[Travis-CI Tutorial](https://docs.travis-ci.com/user/tutorial/).
|
||||
Make sure to use `travis-ci.org` and not their `.com` site.
|
||||
### [AppVeyor](https://www.appveyor.com/)
|
||||
- Use different compilers (Visual Studio, MinGW)
|
||||
- Use different Visual Studio versions: \
|
||||
2008, 2010, 2012, 2013, 2015, 2017, 2019
|
||||
- Compile for Windows 32bit and 64bit
|
||||
|
||||
1. In your Support Module, add this respository as a Git Submodule
|
||||
(name suggestion: `.ci`).
|
||||
```
|
||||
$ git submodule add https://github.com/epics-base/ci-scripts .ci
|
||||
```
|
||||
|
||||
1. Create a Travis configuration by copying one of the examples.
|
||||
```
|
||||
$ cp .ci/.travis.yml.example-full .travis.yml
|
||||
```
|
||||
|
||||
1. Edit the `.travis.yml` configuration to include the jobs you want
|
||||
Travis to run.
|
||||
|
||||
1. Push your changes and check
|
||||
[travis-ci.org](https://travis-ci.org/) for your build results.
|
||||
See specific **[ci-scripts on AppVeyor README](appveyor/README.md)** for more details.
|
||||
|
||||
## Releases and Numbering
|
||||
## How to Use the CI-Scripts
|
||||
|
||||
1. Get an account on a supported CI service provider platform.
|
||||
(e.g. [Travis-CI](https://travis-ci.org/),
|
||||
[AppVeyor](https://www.appveyor.com/), Azure Pipelines...)
|
||||
|
||||
(More details in the specific README of the subdirectory.)
|
||||
|
||||
2. In your Support Module, add this ci-scripts repository
|
||||
as a Git Submodule (name suggestion: `.ci`).
|
||||
```bash
|
||||
git submodule add https://github.com/epics-base/ci-scripts .ci
|
||||
```
|
||||
|
||||
3. Create setup files for different sets of dependencies you
|
||||
want to compile against. (See below.)
|
||||
|
||||
E.g., a setup file `stable.set` specifying
|
||||
```
|
||||
MODULES=sncseq asyn
|
||||
|
||||
BASE=3.15
|
||||
ASYN=R4-34
|
||||
SNCSEQ=R2-2-8
|
||||
```
|
||||
will compile against the EPICS Base 3.15 branch, the Sequencer
|
||||
release 2.2.8 and release 4.34 of asyn.
|
||||
(Any settings can be overridden from the specific job configuration
|
||||
in e.g. `.travis.yml`.)
|
||||
|
||||
4. Create a configuration for the CI service by copying one of
|
||||
the examples provided in the service specific subdirectory
|
||||
and editing it to include the jobs you want the service to run.
|
||||
Use your setup by defining e.g. `SET=stable` in the environment of
|
||||
a job.
|
||||
|
||||
5. Push your changes and check the CI service for your build results.
|
||||
|
||||
## Setup Files
|
||||
|
||||
Your module might depend on EPICS Base and a few other support modules.
|
||||
(E.g., a specific driver might need StreamDevice, ASYN and the Sequencer.)
|
||||
In that case, building against every possible combination of released
|
||||
versions of those dependencies is not possible:
|
||||
Base (37) x StreamDevice (50) x ASYN (40) x Sequencer (51) would produce
|
||||
more than 3.7 million different combinations, i.e. build jobs.
|
||||
|
||||
A more reasonable approach is to create a few setups, each being a
|
||||
combination of dependency releases, that do a few scans of the available
|
||||
"version space". One for the oldest versions you want to support, one or two
|
||||
for stable versions that many of your users have in production, one for the
|
||||
latest released versions and one for the development branches.
|
||||
|
||||
## Setup File Syntax
|
||||
|
||||
Setup files are loaded by the build scripts. They are found by searching
|
||||
the locations in `SETUP_PATH` (space or colon separated list of directories,
|
||||
relative to your module's root directory).
|
||||
|
||||
Setup files can include other setup files by calling `include <setup>`
|
||||
(omitting the `.set` extension of the setup file). The configured
|
||||
`SETUP_PATH` is searched for the include.
|
||||
|
||||
Any `VAR=value` setting of a variable is only executed if `VAR` is unset or
|
||||
empty. That way any settings can be overridden by settings in the main
|
||||
configuration (e.g., `.travis.yml`).
|
||||
|
||||
Empty lines or lines starting with `#` are ignored.
|
||||
|
||||
`MODULES=<list of names>` should list the dependencies (software modules)
|
||||
by using their well-known slugs, separated by spaces.
|
||||
EPICS Base (slug: `base`) will always be a dependency and will be added and
|
||||
compiled first. The other dependencies are added and compiled in the order
|
||||
they are defined in `MODULES`.
|
||||
Modules needed only for specific jobs (e.g., on specific architectures)
|
||||
can be added in the main configuration file by setting `ADD_MODULES`
|
||||
for the specific job(s).
|
||||
|
||||
`REPOOWNER=<name>` sets the default GitHub owner (or organization) for all
|
||||
dependency modules. Useful if you want to compile against a complete set
|
||||
of dependencies forked into your private GitHub area.
|
||||
|
||||
For any module mentioned as `foo` in the `MODULES` setting (and for `BASE`),
|
||||
the following settings can be configured:
|
||||
|
||||
`FOO=<version>` Set version of the module that should be used. Must either
|
||||
be a *tag* name or a *branch* name. [default: `master`]
|
||||
|
||||
`FOO_REPONAME=<name>` Set the name of the remote repository as `<name>.git`.
|
||||
[default is the slug in lower case: `foo`]
|
||||
|
||||
`FOO_REPOOWNER=<name>` Set the name of the GitHub owner (or organization)
|
||||
that the module repository can be found under.
|
||||
|
||||
`FOO_REPOURL="<url>"` Set the complete URL of the remote repository. Useful
|
||||
for dependencies that are not hosted on GitHub.
|
||||
|
||||
The default URL for the repository is pointing to GitHub, under
|
||||
`$FOO_REPOOWNER` else `$REPOOWNER` else `epics-modules`,
|
||||
using `$FOO_REPONAME` else `foo` and the extension`.git`.
|
||||
|
||||
`FOO_DEPTH=<number>` Set the depth of the git clone operation. Use 0 for a
|
||||
full clone. [default: 5]
|
||||
|
||||
`FOO_RECURSIVE=YES/NO` Set to `NO` (or `0`) for a flat clone without
|
||||
recursing into submodules. [default is including submodules: `YES`]
|
||||
|
||||
`FOO_DIRNAME=<name>` Set the local directory name for the checkout. This will
|
||||
be always be extended by the release or branch name as `<name>-<version>`.
|
||||
[default is the slug in lower case: `foo`]
|
||||
|
||||
`FOO_HOOK=<script>` Set the name of a script that will be run after cloning
|
||||
the module, before compiling it. Working directory when running the script
|
||||
is the root of the targeted module (e.g. `.../.cache/foo-1.2`).
|
||||
[default: no hooks are run]
|
||||
|
||||
`FOO_VARNAME=<name>` Set the name that is used for the module when creating
|
||||
the `RELEASE.local` files. [default is the slug in upper case: `FOO`]
|
||||
|
||||
The ci-scripts module contains default settings for widely used modules, so
|
||||
that usually it is sufficient to set `FOO=<version>`.
|
||||
You can find the list of supported (and tested) modules in `defaults.set`.
|
||||
Feel free to suggest more default settings using a Pull Request.
|
||||
|
||||
## Debugging
|
||||
|
||||
Setting `VV=1` in your service configuration (e.g., `.travis.yml`) for a
|
||||
specific job will run the job with high verbosity,
|
||||
printing every command as it is being executed and switching the dependency
|
||||
builds to higher verbosity.
|
||||
|
||||
For debugging on your local machine, you may set `CACHEDIR` to change the
|
||||
location for the dependency builds. [default is `$HOME/.cache`]
|
||||
|
||||
## References: EPICS Modules Using ci-scripts
|
||||
|
||||
[EPICS Base](https://github.com/epics-base/epics-base) and its submodules
|
||||
[pvData](https://github.com/epics-base/pvDataCPP),
|
||||
[pvAccess](https://github.com/epics-base/pvAccessCPP),
|
||||
[pva2pva](https://github.com/epics-base/pva2pva)
|
||||
|
||||
EPICS Modules:
|
||||
[ASYN](https://github.com/epics-modules/asyn),
|
||||
[devlib2](https://github.com/epics-modules/devlib2),
|
||||
[ecmc](https://github.com/epics-modules/ecmc),
|
||||
[ip](https://github.com/epics-modules/ip),
|
||||
[lua](https://github.com/epics-modules/lua),
|
||||
[MCoreUtils](https://github.com/epics-modules/MCoreUtils),
|
||||
[modbus](https://github.com/epics-modules/modbus),
|
||||
[motor](https://github.com/epics-modules/motor),
|
||||
[PCAS](https://github.com/epics-modules/pcas),
|
||||
[sscan](https://github.com/epics-modules/sscan),
|
||||
[vac](https://github.com/epics-modules/vac)
|
||||
|
||||
ESS: [EtherCAT MC Motor Driver][ref.ethercatmc]
|
||||
|
||||
ITER: [OPC UA Device Support](https://github.com/ralphlange/opcua)
|
||||
|
||||
## Frequently Asked Questions
|
||||
|
||||
**How can I see what the dependency building jobs are actually doing?**
|
||||
|
||||
Set `VV=1` in the configuration line of the job you are interested in.
|
||||
This will make all builds (not just for your module) verbose.
|
||||
|
||||
**How do I update my module to use a newer release of ci-scripts?**
|
||||
|
||||
Update the submodule in `.ci` first, then change your CI configuration
|
||||
(if needed) and commit both to your module. E.g., to update your Travis
|
||||
setup to release 2.3.5 of ci-scripts:
|
||||
```bash
|
||||
cd .ci
|
||||
git pull origin v2.3.5
|
||||
cd -
|
||||
git add .ci
|
||||
# if needed:
|
||||
edit .travis.yml
|
||||
git add .travis.yml
|
||||
git commit -m "Update ci-scripts submodule to v2.3.5"
|
||||
```
|
||||
|
||||
Check the example configuration files inside ci-scripts (and their
|
||||
changes) to see what might be needed and/or interesting to change
|
||||
in your configuration.
|
||||
|
||||
Depending on the changes contained in the ci-scripts update, it might
|
||||
be advisable to clear the CI caches after updating ci-scripts. E.g.,
|
||||
a change in setting up EPICS Base will not be applied if Base is found
|
||||
in the cache.
|
||||
|
||||
**Why does running the scripts locally on my MacOS machine fail?**
|
||||
|
||||
The ci-scripts for Travis-CI require Bash version 4.
|
||||
As Apple ships an older Bash for [political reasons][reddit.bash],
|
||||
you need to install a more recent Bash, e.g. using MacPorts
|
||||
or Homebrew.
|
||||
|
||||
## Release Numbering of this Module
|
||||
|
||||
The module tries to apply [Semantic Versioning](https://semver.org/).
|
||||
|
||||
Major release numbers refer to the API, which is more or less defined
|
||||
by the `.travis.yml.example-full` configuration example.
|
||||
Changing this file for the existing configuration options or to add
|
||||
new configurations options will usually cause a new major release.
|
||||
by the full configuration examples in the service specific
|
||||
subdirectories.
|
||||
If one of these files has to be changed for the existing configuration
|
||||
options or important new options are being added, a new major release
|
||||
is created.
|
||||
|
||||
Minor release numbers refer to bugfixes that should not require a user
|
||||
module (i.e., its `.travis.yml`) to be changed.
|
||||
Minor release numbers refer to additions and enhancements that do not
|
||||
require the configuration inside an existing user module to be changed.
|
||||
|
||||
Again: using git submodule to include these scripts means that user
|
||||
modules always work with a fixed, frozen version of these scripts.
|
||||
I.e., developments in the ci-scripts will never break an existing
|
||||
application.
|
||||
Again: using the git submodule mechanism to include these scripts means
|
||||
that user modules always work with a fixed, frozen version.
|
||||
I.e., developments in the ci-scripts repository will never break an
|
||||
existing application.
|
||||
These release numbering considerations are just a hint to assess the
|
||||
risk when updating the submodule.
|
||||
risks when updating the submodule.
|
||||
|
||||
## License
|
||||
|
||||
This module is distributed subject to a Software License Agreement found
|
||||
in file LICENSE that is included with this distribution.
|
||||
|
||||
<!-- Links -->
|
||||
[badge.version]: https://badge.fury.io/gh/epics-base%2Fci-scripts.svg
|
||||
[badge.travis]: https://travis-ci.org/epics-base/ci-scripts.svg?branch=master
|
||||
[badge.appveyor]: https://ci.appveyor.com/api/projects/status/8b578alg974axvux?svg=true
|
||||
|
||||
[reddit.bash]: https://www.reddit.com/r/bash/comments/393oqv/why_is_the_version_of_bash_included_in_os_x_so_old/
|
||||
|
||||
[ref.ethercatmc]: https://github.com/EuropeanSpallationSource/m-epics-ethercatmc
|
||||
|
||||
851
add-msi-to-314.patch
Normal file
851
add-msi-to-314.patch
Normal file
@@ -0,0 +1,851 @@
|
||||
diff --git a/config/RULES.Db b/config/RULES.Db
|
||||
index b4946c7aa..90b76ed08 100644
|
||||
--- a/config/RULES.Db
|
||||
+++ b/config/RULES.Db
|
||||
@@ -12,11 +12,7 @@
|
||||
#
|
||||
MAKEBPT = $(EPICS_BASE_HOST_BIN)/makeBpt$(EXE)
|
||||
|
||||
-ifndef MSI
|
||||
-# Tool from R3.14 extensions bin, R3.13 extensions bin, or user path
|
||||
-MSI = $(firstword $(wildcard $(EPICS_EXTENSIONS_HOST_BIN)/msi$(HOSTEXE) \
|
||||
- $(EPICS_EXTENSIONS)/bin/$(HOST_ARCH)/msi$(HOSTEXE)) msi$(HOSTEXE))
|
||||
-endif
|
||||
+MSI = $(EPICS_BASE_HOST_BIN)/msi$(HOSTEXE)
|
||||
|
||||
DBEXPAND = $(EPICS_BASE_HOST_BIN)/dbExpand$(EXE)
|
||||
DBST = dbst
|
||||
diff --git a/configure/CONFIG_BASE b/configure/CONFIG_BASE
|
||||
index 7ee5a5b89..9a9793093 100644
|
||||
--- a/configure/CONFIG_BASE
|
||||
+++ b/configure/CONFIG_BASE
|
||||
@@ -112,8 +112,5 @@ ifndef DBST
|
||||
DBST = dbst
|
||||
endif
|
||||
|
||||
-ifndef MSI
|
||||
-MSI = msi
|
||||
-endif
|
||||
-
|
||||
+MSI = $(EPICS_BASE_HOST_BIN)/msi$(HOSTEXE)
|
||||
|
||||
diff --git a/src/dbtools/Makefile b/src/dbtools/Makefile
|
||||
index 38ed52c9e..8655a5337 100644
|
||||
--- a/src/dbtools/Makefile
|
||||
+++ b/src/dbtools/Makefile
|
||||
@@ -11,6 +11,11 @@ TOP=../..
|
||||
|
||||
include $(TOP)/configure/CONFIG
|
||||
|
||||
+PROD_HOST += msi
|
||||
+
|
||||
+msi_SRCS = msi.c
|
||||
+msi_LIBS += Com
|
||||
+
|
||||
INC += dbLoadTemplate.h
|
||||
INC += dbtoolsIocRegister.h
|
||||
|
||||
diff --git a/src/dbtools/msi.c b/src/dbtools/msi.c
|
||||
new file mode 100644
|
||||
index 000000000..525d4f25b
|
||||
--- /dev/null
|
||||
+++ b/src/dbtools/msi.c
|
||||
@@ -0,0 +1,798 @@
|
||||
+/*************************************************************************\
|
||||
+* Copyright (c) 2002 The University of Chicago, as Operator of Argonne
|
||||
+* National Laboratory.
|
||||
+* Copyright (c) 2002 The Regents of the University of California, as
|
||||
+* Operator of Los Alamos National Laboratory.
|
||||
+* This file is distributed subject to a Software License Agreement found
|
||||
+* in the file LICENSE that is included with this distribution.
|
||||
+\*************************************************************************/
|
||||
+/*msi - macro sunstitutions and include */
|
||||
+
|
||||
+/*
|
||||
+ * Modification Log:
|
||||
+ * -----------------
|
||||
+ * .01 08DEC97 mrk Original version
|
||||
+ */
|
||||
+
|
||||
+#include <stdlib.h>
|
||||
+#include <stddef.h>
|
||||
+#include <stdio.h>
|
||||
+#include <string.h>
|
||||
+#include <ctype.h>
|
||||
+#include <errno.h>
|
||||
+
|
||||
+#include <epicsVersion.h>
|
||||
+#include <dbDefs.h>
|
||||
+#include <macLib.h>
|
||||
+#include <ellLib.h>
|
||||
+#include <errlog.h>
|
||||
+
|
||||
+#define MAX_BUFFER_SIZE 4096
|
||||
+
|
||||
+#if ((EPICS_VERSION <= 3) && (EPICS_REVISION <= 13))
|
||||
+#define macEnvExpand(x) strdup(x)
|
||||
+#endif
|
||||
+
|
||||
+/*Forward references to local routines*/
|
||||
+static void usageExit(void);
|
||||
+static void addMacroReplacements(MAC_HANDLE *macPvt,char *pval);
|
||||
+static void makeSubstitutions(void *inputPvt,void *macPvt,char *templateName);
|
||||
+
|
||||
+/*Routines that read the template files */
|
||||
+static void inputConstruct(void **inputPvt);
|
||||
+static void inputDestruct(void *inputPvt);
|
||||
+static void inputAddPath(void *inputPvt, char *pval);
|
||||
+static void inputBegin(void *inputPvt,char *fileName);
|
||||
+static char *inputNextLine(void *inputPvt);
|
||||
+static void inputNewIncludeFile(void *inputPvt,char *name);
|
||||
+static void inputErrPrint(void *inputPvt);
|
||||
+
|
||||
+/*Routines that read the substitution file */
|
||||
+static void substituteDestruct(void *substitutePvt);
|
||||
+static void substituteOpen(void **substitutePvt,char *substitutionName);
|
||||
+static int substituteGetNextSet(void *substitutePvt,char **filename);
|
||||
+static char *substituteGetReplacements(void *substitutePvt);
|
||||
+
|
||||
+/*Exit status*/
|
||||
+static int exitStatus = 0;
|
||||
+
|
||||
+int opt_V = 0;
|
||||
+
|
||||
+
|
||||
+int main(int argc,char **argv)
|
||||
+{
|
||||
+ void *inputPvt;
|
||||
+ MAC_HANDLE *macPvt;
|
||||
+ char *pval;
|
||||
+ int narg;
|
||||
+ char *substitutionName=0;
|
||||
+ char *templateName=0;
|
||||
+ int i;
|
||||
+
|
||||
+ inputConstruct(&inputPvt);
|
||||
+ macCreateHandle(&macPvt,0);
|
||||
+ macSuppressWarning(macPvt,1);
|
||||
+ while((argc>1) && (argv[1][0] == '-')) {
|
||||
+ narg = (strlen(argv[1])==2) ? 2 : 1;
|
||||
+ pval = (narg==1) ? (argv[1]+2) : argv[2];
|
||||
+ if(strncmp(argv[1],"-I",2)==0) {
|
||||
+ inputAddPath(inputPvt,pval);
|
||||
+ } else if(strncmp(argv[1],"-o",2)==0) {
|
||||
+ if(freopen(pval,"w",stdout)==NULL) {
|
||||
+ fprintf(stderr,"Can't open %s for writing: %s\n", pval, strerror(errno));
|
||||
+ exit(1);
|
||||
+ }
|
||||
+ } else if(strncmp(argv[1],"-M",2)==0) {
|
||||
+ addMacroReplacements(macPvt,pval);
|
||||
+ } else if(strncmp(argv[1],"-S",2)==0) {
|
||||
+ substitutionName = calloc(strlen(pval)+1,sizeof(char));
|
||||
+ strcpy(substitutionName,pval);
|
||||
+ } else if(strncmp(argv[1],"-V",2)==0) {
|
||||
+ macSuppressWarning(macPvt,0);
|
||||
+ opt_V = 1;
|
||||
+ narg = 1; /* no argument for this option */
|
||||
+ } else {
|
||||
+ usageExit();
|
||||
+ }
|
||||
+ argc -= narg;
|
||||
+ for(i=1; i<argc; i++) argv[i] = argv[i + narg];
|
||||
+ }
|
||||
+ if(argc>2) {
|
||||
+ fprintf(stderr,"too many filename arguments\n");
|
||||
+ usageExit();
|
||||
+ }
|
||||
+ if(argc==2) {
|
||||
+ templateName = calloc(strlen(argv[1])+1,sizeof(char));
|
||||
+ strcpy(templateName,argv[1]);
|
||||
+ }
|
||||
+ if(!substitutionName) {
|
||||
+ makeSubstitutions(inputPvt,macPvt,templateName);
|
||||
+ } else {
|
||||
+ void *substitutePvt;
|
||||
+ char *filename = 0;
|
||||
+
|
||||
+ substituteOpen(&substitutePvt,substitutionName);
|
||||
+ while(substituteGetNextSet(substitutePvt,&filename)) {
|
||||
+ if(templateName) filename = templateName;
|
||||
+ if(!filename) {
|
||||
+ fprintf(stderr,"no template file\n");
|
||||
+ usageExit();
|
||||
+ }
|
||||
+ macPushScope(macPvt);
|
||||
+ while((pval = substituteGetReplacements(substitutePvt))){
|
||||
+ addMacroReplacements(macPvt,pval);
|
||||
+ makeSubstitutions(inputPvt,macPvt,filename);
|
||||
+ }
|
||||
+ macPopScope(macPvt);
|
||||
+ }
|
||||
+ substituteDestruct(substitutePvt);
|
||||
+ }
|
||||
+ errlogFlush();
|
||||
+ inputDestruct(inputPvt);
|
||||
+ free((void *)templateName);
|
||||
+ free((void *)substitutionName);
|
||||
+ return(exitStatus);
|
||||
+}
|
||||
+
|
||||
+void usageExit(void)
|
||||
+{
|
||||
+ fprintf(stderr,"usage: msi -V -opath -Ipath ... -Msub ... -Ssubfile template\n");
|
||||
+ fprintf(stderr," Specifying path will replace the default '.'\n");
|
||||
+ fprintf(stderr," stdin is used if template is not given\n");
|
||||
+ exit(1);
|
||||
+}
|
||||
+
|
||||
+static void addMacroReplacements(MAC_HANDLE *macPvt,char *pval)
|
||||
+{
|
||||
+ char **pairs;
|
||||
+ long status;
|
||||
+
|
||||
+ status = macParseDefns(macPvt,pval,&pairs);
|
||||
+ if(!status) {
|
||||
+ fprintf(stderr,"Error macParseDefns error\n");
|
||||
+ usageExit();
|
||||
+ }
|
||||
+ status = macInstallMacros(macPvt,pairs);
|
||||
+ if(!status) {
|
||||
+ fprintf(stderr,"Error macInstallMacros error\n");
|
||||
+ usageExit();
|
||||
+ }
|
||||
+ free((void *)pairs);
|
||||
+}
|
||||
+
|
||||
+typedef enum {cmdInclude,cmdSubstitute} cmdType;
|
||||
+static const char *cmdNames[] = {"include","substitute"};
|
||||
+static void makeSubstitutions(void *inputPvt,void *macPvt,char *templateName)
|
||||
+{
|
||||
+ char *input;
|
||||
+ static char buffer[MAX_BUFFER_SIZE];
|
||||
+ int n;
|
||||
+ static int unexpWarned = 0;
|
||||
+
|
||||
+ inputBegin(inputPvt,templateName);
|
||||
+ while((input = inputNextLine(inputPvt))) {
|
||||
+ int expand=1;
|
||||
+ char *p;
|
||||
+ char *command = 0;
|
||||
+
|
||||
+ p = input;
|
||||
+ /*skip whitespace at beginning of line*/
|
||||
+ while(*p && (isspace(*p))) ++p;
|
||||
+ /*Look for i or s */
|
||||
+ if(*p && (*p=='i' || *p=='s')) command = p;
|
||||
+ if(command) {
|
||||
+ char *pstart;
|
||||
+ char *pend;
|
||||
+ char *copy;
|
||||
+ int cmdind=-1;
|
||||
+ int i;
|
||||
+
|
||||
+ for(i=0; i< NELEMENTS(cmdNames); i++) {
|
||||
+ if(strstr(command,cmdNames[i])) {
|
||||
+ cmdind = i;
|
||||
+ }
|
||||
+ }
|
||||
+ if(cmdind<0) goto endif;
|
||||
+ p = command + strlen(cmdNames[cmdind]);
|
||||
+ /*skip whitespace after command*/
|
||||
+ while(*p && (isspace(*p))) ++p;
|
||||
+ /*Next character must be quote*/
|
||||
+ if((*p==0) || (*p!='"')) goto endif;
|
||||
+ pstart = ++p;
|
||||
+ /*Look for end quote*/
|
||||
+ while(*p && (*p!='"')) {
|
||||
+ /*allow escape for imbeded quote*/
|
||||
+ if((*p=='\\') && *(p+1)=='"') {
|
||||
+ p += 2; continue;
|
||||
+ } else {
|
||||
+ if(*p=='"') break;
|
||||
+ }
|
||||
+ ++p;
|
||||
+ }
|
||||
+ pend = p;
|
||||
+ if(*p==0) goto endif;
|
||||
+ /*skip quote and any trailing blanks*/
|
||||
+ while(*++p==' ') ;
|
||||
+ if(*p != '\n' && *p !=0) goto endif;
|
||||
+ copy = calloc(pend-pstart+1,sizeof(char));
|
||||
+ strncpy(copy,pstart,pend-pstart);
|
||||
+ switch(cmdind) {
|
||||
+ case cmdInclude:
|
||||
+ inputNewIncludeFile(inputPvt,copy);
|
||||
+ break;
|
||||
+ case cmdSubstitute:
|
||||
+ addMacroReplacements(macPvt,copy);
|
||||
+ break;
|
||||
+ default:
|
||||
+ fprintf(stderr,"Logic Error: makeSubstitutions\n");
|
||||
+ inputErrPrint(inputPvt);
|
||||
+ exit(1);
|
||||
+ }
|
||||
+ free(copy);
|
||||
+ expand = 0;
|
||||
+ }
|
||||
+endif:
|
||||
+ if (expand) {
|
||||
+ n = macExpandString(macPvt,input,buffer,MAX_BUFFER_SIZE-1);
|
||||
+ fputs(buffer,stdout);
|
||||
+ if (!unexpWarned && n<0) {
|
||||
+ const char * pErrMsg = "Warning: Undefined macros present, use msi -V to list\n";
|
||||
+ if ( opt_V ) {
|
||||
+ exitStatus = 2;
|
||||
+ pErrMsg = "Error: Undefined macros present\n";
|
||||
+ }
|
||||
+ fprintf( stderr, pErrMsg );
|
||||
+ unexpWarned++;
|
||||
+ }
|
||||
+ }
|
||||
+ }
|
||||
+}
|
||||
+
|
||||
+typedef struct inputFile{
|
||||
+ ELLNODE node;
|
||||
+ char *filename;
|
||||
+ FILE *fp;
|
||||
+ int lineNum;
|
||||
+}inputFile;
|
||||
+
|
||||
+typedef struct pathNode {
|
||||
+ ELLNODE node;
|
||||
+ char *directory;
|
||||
+} pathNode;
|
||||
+
|
||||
+typedef struct inputData {
|
||||
+ ELLLIST inputFileList;
|
||||
+ ELLLIST pathList;
|
||||
+ char inputBuffer[MAX_BUFFER_SIZE];
|
||||
+}inputData;
|
||||
+
|
||||
+static void inputOpenFile(inputData *pinputData,char *filename);
|
||||
+static void inputCloseFile(inputData *pinputData);
|
||||
+static void inputCloseAllFiles(inputData *pinputData);
|
||||
+
|
||||
+static void inputConstruct(void **ppvt)
|
||||
+{
|
||||
+ inputData *pinputData;
|
||||
+
|
||||
+ pinputData = calloc(1,sizeof(inputData));
|
||||
+ ellInit(&pinputData->inputFileList);
|
||||
+ ellInit(&pinputData->pathList);
|
||||
+ *ppvt = pinputData;
|
||||
+}
|
||||
+
|
||||
+static void inputDestruct(void *pvt)
|
||||
+{
|
||||
+ inputData *pinputData = (inputData *)pvt;
|
||||
+ pathNode *ppathNode;
|
||||
+
|
||||
+ inputCloseAllFiles(pinputData);
|
||||
+ while((ppathNode = (pathNode *)ellFirst(&pinputData->pathList))) {
|
||||
+ ellDelete(&pinputData->pathList,&ppathNode->node);
|
||||
+ free((void *)ppathNode->directory);
|
||||
+ free((void *)ppathNode);
|
||||
+ }
|
||||
+ free(pvt);
|
||||
+}
|
||||
+
|
||||
+static void inputAddPath(void *pvt, char *path)
|
||||
+{
|
||||
+ inputData *pinputData = (inputData *)pvt;
|
||||
+ ELLLIST *ppathList = &pinputData->pathList;
|
||||
+ pathNode *ppathNode;
|
||||
+ const char *pcolon;
|
||||
+ const char *pdir;
|
||||
+ int len;
|
||||
+ int emptyName;
|
||||
+
|
||||
+ pdir = path;
|
||||
+ /*an empty name at beginning, middle, or end means current directory*/
|
||||
+ while(pdir && *pdir) {
|
||||
+ emptyName = ((*pdir == ':') ? 1 : 0);
|
||||
+ if(emptyName) ++pdir;
|
||||
+ ppathNode = (pathNode *)calloc(1,sizeof(pathNode));
|
||||
+ ellAdd(ppathList,&ppathNode->node);
|
||||
+ if(!emptyName) {
|
||||
+ pcolon = strchr(pdir,':');
|
||||
+ len = (pcolon ? (pcolon - pdir) : strlen(pdir));
|
||||
+ if(len>0) {
|
||||
+ ppathNode->directory = (char *)calloc(len+1,sizeof(char));
|
||||
+ strncpy(ppathNode->directory,pdir,len);
|
||||
+ pdir = pcolon;
|
||||
+ /*unless at end skip past first colon*/
|
||||
+ if(pdir && *(pdir+1)!=0) ++pdir;
|
||||
+ } else { /*must have been trailing : */
|
||||
+ emptyName=1;
|
||||
+ }
|
||||
+ }
|
||||
+ if(emptyName) {
|
||||
+ ppathNode->directory = (char *)calloc(2,sizeof(char));
|
||||
+ strcpy(ppathNode->directory,".");
|
||||
+ }
|
||||
+ }
|
||||
+ return;
|
||||
+}
|
||||
+
|
||||
+static void inputBegin(void *pvt,char *fileName)
|
||||
+{
|
||||
+ inputData *pinputData = (inputData *)pvt;
|
||||
+
|
||||
+ inputCloseAllFiles(pinputData);
|
||||
+ inputOpenFile(pinputData,fileName);
|
||||
+}
|
||||
+
|
||||
+static char *inputNextLine(void *pvt)
|
||||
+{
|
||||
+ inputData *pinputData = (inputData *)pvt;
|
||||
+ inputFile *pinputFile;
|
||||
+ char *pline;
|
||||
+
|
||||
+ while((pinputFile = (inputFile *)ellFirst(&pinputData->inputFileList))) {
|
||||
+ pline = fgets(pinputData->inputBuffer,MAX_BUFFER_SIZE,pinputFile->fp);
|
||||
+ if(pline) {
|
||||
+ ++pinputFile->lineNum;
|
||||
+ return(pline);
|
||||
+ }
|
||||
+ inputCloseFile(pinputData);
|
||||
+ }
|
||||
+ return(0);
|
||||
+}
|
||||
+
|
||||
+static void inputNewIncludeFile(void *pvt,char *name)
|
||||
+{
|
||||
+ inputData *pinputData = (inputData *)pvt;
|
||||
+
|
||||
+ inputOpenFile(pinputData,name);
|
||||
+}
|
||||
+
|
||||
+static void inputErrPrint(void *pvt)
|
||||
+{
|
||||
+ inputData *pinputData = (inputData *)pvt;
|
||||
+ inputFile *pinputFile;
|
||||
+
|
||||
+ fprintf(stderr,"input: %s which is ",pinputData->inputBuffer);
|
||||
+ pinputFile = (inputFile *)ellFirst(&pinputData->inputFileList);
|
||||
+ while(pinputFile) {
|
||||
+ fprintf(stderr,"line %d of ",pinputFile->lineNum);
|
||||
+ if(pinputFile->filename) {
|
||||
+ fprintf(stderr," file %s\n",pinputFile->filename);
|
||||
+ } else {
|
||||
+ fprintf(stderr,"stdin:\n");
|
||||
+ }
|
||||
+ pinputFile = (inputFile *)ellNext(&pinputFile->node);
|
||||
+ if(pinputFile) {
|
||||
+ fprintf(stderr," which is included from ");
|
||||
+ } else {
|
||||
+ fprintf(stderr,"\n");
|
||||
+ }
|
||||
+ }
|
||||
+ fprintf(stderr,"\n");
|
||||
+}
|
||||
+
|
||||
+static void inputOpenFile(inputData *pinputData,char *filename)
|
||||
+{
|
||||
+ ELLLIST *ppathList = &pinputData->pathList;
|
||||
+ pathNode *ppathNode = 0;
|
||||
+ inputFile *pinputFile;
|
||||
+ char *fullname = 0;
|
||||
+ FILE *fp = 0;
|
||||
+
|
||||
+ if(!filename) {
|
||||
+ fp = stdin;
|
||||
+ } else if((ellCount(ppathList)==0) || strchr(filename,'/')){
|
||||
+ fp = fopen(filename,"r");
|
||||
+ } else {
|
||||
+ ppathNode = (pathNode *)ellFirst(ppathList);
|
||||
+ while(ppathNode) {
|
||||
+ fullname = calloc(strlen(filename)+strlen(ppathNode->directory) +2,
|
||||
+ sizeof(char));
|
||||
+ strcpy(fullname,ppathNode->directory);
|
||||
+ strcat(fullname,"/");
|
||||
+ strcat(fullname,filename);
|
||||
+ fp = fopen(fullname,"r");
|
||||
+ if(fp) break;
|
||||
+ free((void *)fullname);
|
||||
+ ppathNode = (pathNode *)ellNext(&ppathNode->node);
|
||||
+ }
|
||||
+ }
|
||||
+ if(!fp) {
|
||||
+ fprintf(stderr,"Could not open %s\n",filename);
|
||||
+ inputErrPrint((void *)pinputData);
|
||||
+ exit(1);
|
||||
+ }
|
||||
+ pinputFile = calloc(1,sizeof(inputFile));
|
||||
+ if(ppathNode) {
|
||||
+ pinputFile->filename = calloc(1,strlen(fullname)+1);
|
||||
+ strcpy(pinputFile->filename,fullname);
|
||||
+ free((void *)fullname);
|
||||
+ } else if(filename) {
|
||||
+ pinputFile->filename = calloc(1,strlen(filename)+1);
|
||||
+ strcpy(pinputFile->filename,filename);
|
||||
+ } else {
|
||||
+ pinputFile->filename = calloc(1,strlen("stdin")+1);
|
||||
+ strcpy(pinputFile->filename,"stdin");
|
||||
+ }
|
||||
+ pinputFile->fp = fp;
|
||||
+ ellInsert(&pinputData->inputFileList,0,&pinputFile->node);
|
||||
+}
|
||||
+
|
||||
+static void inputCloseFile(inputData *pinputData)
|
||||
+{
|
||||
+ inputFile *pinputFile;
|
||||
+
|
||||
+ pinputFile = (inputFile *)ellFirst(&pinputData->inputFileList);
|
||||
+ if(!pinputFile) return;
|
||||
+ ellDelete(&pinputData->inputFileList,&pinputFile->node);
|
||||
+ if(fclose(pinputFile->fp))
|
||||
+ fprintf(stderr,"fclose failed: file %s\n",pinputFile->filename);
|
||||
+ free(pinputFile->filename);
|
||||
+ free(pinputFile);
|
||||
+}
|
||||
+
|
||||
+static void inputCloseAllFiles(inputData *pinputData)
|
||||
+{
|
||||
+ inputFile *pinputFile;
|
||||
+
|
||||
+ while((pinputFile=(inputFile *)ellFirst(&pinputData->inputFileList))){
|
||||
+ inputCloseFile(pinputData);
|
||||
+ }
|
||||
+}
|
||||
+
|
||||
+/*start of code that handles substitution file*/
|
||||
+typedef enum {
|
||||
+ tokenLBrace,tokenRBrace,tokenSeparater,tokenString,tokenEOF
|
||||
+}tokenType;
|
||||
+
|
||||
+typedef struct subFile {
|
||||
+ char *substitutionName;
|
||||
+ FILE *fp;
|
||||
+ int lineNum;
|
||||
+ char inputBuffer[MAX_BUFFER_SIZE];
|
||||
+ char *pnextChar;
|
||||
+ tokenType token;
|
||||
+ char string[MAX_BUFFER_SIZE];
|
||||
+} subFile;
|
||||
+
|
||||
+typedef struct patternNode {
|
||||
+ ELLNODE node;
|
||||
+ char *var;
|
||||
+}patternNode;
|
||||
+
|
||||
+typedef struct subInfo {
|
||||
+ subFile *psubFile;
|
||||
+ int isFile;
|
||||
+ char *filename;
|
||||
+ int isPattern;
|
||||
+ ELLLIST patternList;
|
||||
+ size_t size;
|
||||
+ size_t curLength;
|
||||
+ char *macroReplacements;
|
||||
+}subInfo;
|
||||
+
|
||||
+static char *subGetNextLine(subFile *psubFile);
|
||||
+static tokenType subGetNextToken(subFile *psubFile);
|
||||
+static void subFileErrPrint(subFile *psubFile,char * message);
|
||||
+static void freeSubFile(subInfo *psubInfo);
|
||||
+static void freePattern(subInfo *psubInfo);
|
||||
+static void catMacroReplacements(subInfo *psubInfo,const char *value);
|
||||
+
|
||||
+void freeSubFile(subInfo *psubInfo)
|
||||
+{
|
||||
+ subFile *psubFile = psubInfo->psubFile;
|
||||
+ if(psubFile->fp) {
|
||||
+ if(fclose(psubFile->fp))
|
||||
+ fprintf(stderr,"fclose failed on substitution file\n");
|
||||
+ }
|
||||
+ free((void *)psubFile);
|
||||
+ free((void *)psubInfo->filename);
|
||||
+ psubInfo->psubFile = 0;
|
||||
+}
|
||||
+
|
||||
+void freePattern(subInfo *psubInfo)
|
||||
+{
|
||||
+ patternNode *ppatternNode;
|
||||
+ while((ppatternNode = (patternNode *)ellFirst(&psubInfo->patternList))) {
|
||||
+ ellDelete(&psubInfo->patternList,&ppatternNode->node);
|
||||
+ free(ppatternNode->var);
|
||||
+ free(ppatternNode);
|
||||
+ }
|
||||
+ psubInfo->isPattern = 0;
|
||||
+}
|
||||
+
|
||||
+static void substituteDestruct(void *pvt)
|
||||
+{
|
||||
+ subInfo *psubInfo = (subInfo *)pvt;
|
||||
+
|
||||
+ freeSubFile(psubInfo);
|
||||
+ freePattern(psubInfo);
|
||||
+ free((void *)psubInfo);
|
||||
+ return;
|
||||
+}
|
||||
+
|
||||
+static void substituteOpen(void **ppvt,char *substitutionName)
|
||||
+{
|
||||
+ subInfo *psubInfo;
|
||||
+ subFile *psubFile;
|
||||
+ FILE *fp;
|
||||
+
|
||||
+ psubInfo = calloc(1,sizeof(subInfo));
|
||||
+ *ppvt = (void *)psubInfo;
|
||||
+ psubFile = calloc(1,sizeof(subFile));
|
||||
+ psubInfo->psubFile = psubFile;
|
||||
+ ellInit(&psubInfo->patternList);
|
||||
+ fp = fopen(substitutionName,"r");
|
||||
+ if(!fp) {
|
||||
+ fprintf(stderr,"Could not open %s\n",substitutionName);
|
||||
+ exit(1);
|
||||
+ }
|
||||
+ psubFile->substitutionName = substitutionName;
|
||||
+ psubFile->fp = fp;
|
||||
+ psubFile->lineNum = 0;
|
||||
+ psubFile->inputBuffer[0] = 0;
|
||||
+ psubFile->pnextChar = &psubFile->inputBuffer[0];
|
||||
+ subGetNextToken(psubFile);
|
||||
+ return;
|
||||
+}
|
||||
+
|
||||
+static int substituteGetNextSet(void *pvt,char **filename)
|
||||
+{
|
||||
+ subInfo *psubInfo = (subInfo *)pvt;
|
||||
+ subFile *psubFile = psubInfo->psubFile;
|
||||
+ patternNode *ppatternNode;
|
||||
+
|
||||
+ *filename = 0;
|
||||
+ while(psubFile->token==tokenSeparater) subGetNextToken(psubFile);
|
||||
+ if(psubFile->token==tokenEOF) return(0);
|
||||
+ if(psubFile->token==tokenString && strcmp(psubFile->string,"file")==0) {
|
||||
+ psubInfo->isFile = 1;
|
||||
+ if(subGetNextToken(psubFile)!=tokenString) {
|
||||
+ subFileErrPrint(psubFile,"Expecting filename");
|
||||
+ exit(1);
|
||||
+ }
|
||||
+ freePattern(psubInfo);
|
||||
+ free((void *)psubInfo->filename);
|
||||
+ if(psubFile->string[0]=='"'&&psubFile->string[strlen(psubFile->string)-1]=='"') {
|
||||
+ psubFile->string[strlen(psubFile->string)-1]='\0';
|
||||
+ psubInfo->filename = macEnvExpand(psubFile->string+1);
|
||||
+ }
|
||||
+ else {
|
||||
+ psubInfo->filename = macEnvExpand(psubFile->string);
|
||||
+ }
|
||||
+ while(subGetNextToken(psubFile)==tokenSeparater);
|
||||
+ if(psubFile->token!=tokenLBrace) {
|
||||
+ subFileErrPrint(psubFile,"Expecting {");
|
||||
+ exit(1);
|
||||
+ }
|
||||
+ subGetNextToken(psubFile);
|
||||
+ }
|
||||
+ *filename = psubInfo->filename;
|
||||
+ while(psubFile->token==tokenSeparater) subGetNextToken(psubFile);
|
||||
+ if(psubFile->token==tokenLBrace) return(1);
|
||||
+ if(psubFile->token==tokenRBrace) return(0);
|
||||
+ if(psubFile->token!=tokenString
|
||||
+ || strcmp(psubFile->string,"pattern")!=0) {
|
||||
+ subFileErrPrint(psubFile,"Expecting pattern");
|
||||
+ exit(1);
|
||||
+ }
|
||||
+ freePattern(psubInfo);
|
||||
+ psubInfo->isPattern = 1;
|
||||
+ while(subGetNextToken(psubFile)==tokenSeparater);
|
||||
+ if(psubFile->token!=tokenLBrace) {
|
||||
+ subFileErrPrint(psubFile,"Expecting {");
|
||||
+ exit(1);
|
||||
+ }
|
||||
+ while(1) {
|
||||
+ while(subGetNextToken(psubFile)==tokenSeparater);
|
||||
+ if(psubFile->token!=tokenString) break;
|
||||
+ ppatternNode = calloc(1,sizeof(patternNode));
|
||||
+ ellAdd(&psubInfo->patternList,&ppatternNode->node);
|
||||
+ ppatternNode->var = calloc(strlen(psubFile->string)+1,sizeof(char));
|
||||
+ strcpy(ppatternNode->var,psubFile->string);
|
||||
+ }
|
||||
+ if(psubFile->token!=tokenRBrace) {
|
||||
+ subFileErrPrint(psubFile,"Expecting }");
|
||||
+ exit(1);
|
||||
+ }
|
||||
+ subGetNextToken(psubFile);
|
||||
+ return(1);
|
||||
+}
|
||||
+
|
||||
+static char *substituteGetReplacements(void *pvt)
|
||||
+{
|
||||
+ subInfo *psubInfo = (subInfo *)pvt;
|
||||
+ subFile *psubFile = psubInfo->psubFile;
|
||||
+ patternNode *ppatternNode;
|
||||
+
|
||||
+ if(psubInfo->macroReplacements) psubInfo->macroReplacements[0] = 0;
|
||||
+ psubInfo->curLength = 0;
|
||||
+ while(psubFile->token==tokenSeparater) subGetNextToken(psubFile);
|
||||
+ if(psubFile->token==tokenRBrace && psubInfo->isFile) {
|
||||
+ psubInfo->isFile = 0;
|
||||
+ free((void *)psubInfo->filename);
|
||||
+ psubInfo->filename = 0;
|
||||
+ freePattern(psubInfo);
|
||||
+ subGetNextToken(psubFile);
|
||||
+ return(0);
|
||||
+ }
|
||||
+ if(psubFile->token==tokenEOF) return(0);
|
||||
+ if(psubFile->token!=tokenLBrace) return(0);
|
||||
+ if(psubInfo->isPattern) {
|
||||
+ int gotFirstPattern = 0;
|
||||
+
|
||||
+ while(subGetNextToken(psubFile)==tokenSeparater);
|
||||
+ ppatternNode = (patternNode *)ellFirst(&psubInfo->patternList);
|
||||
+ while(1) {
|
||||
+ if(psubFile->token==tokenRBrace) {
|
||||
+ if(ppatternNode)
|
||||
+ subFileErrPrint(psubFile,"less values than patterns");
|
||||
+ subGetNextToken(psubFile);
|
||||
+ return(psubInfo->macroReplacements);
|
||||
+ }
|
||||
+ if(psubFile->token!=tokenString) {
|
||||
+ subFileErrPrint(psubFile,"Illegal token");
|
||||
+ exit(-1);
|
||||
+ }
|
||||
+ if(gotFirstPattern) catMacroReplacements(psubInfo,",");
|
||||
+ gotFirstPattern = 1;
|
||||
+ if(ppatternNode) {
|
||||
+ catMacroReplacements(psubInfo,ppatternNode->var);
|
||||
+ catMacroReplacements(psubInfo,"=");
|
||||
+ catMacroReplacements(psubInfo,psubFile->string);
|
||||
+ ppatternNode = (patternNode *)ellNext(&ppatternNode->node);
|
||||
+ } else {
|
||||
+ subFileErrPrint(psubFile,"more values than patterns");
|
||||
+ }
|
||||
+ while(subGetNextToken(psubFile)==tokenSeparater);
|
||||
+ }
|
||||
+ } else while(1) {
|
||||
+ switch(subGetNextToken(psubFile)) {
|
||||
+ case tokenRBrace:
|
||||
+ subGetNextToken(psubFile);
|
||||
+ return(psubInfo->macroReplacements);
|
||||
+ case tokenSeparater:
|
||||
+ catMacroReplacements(psubInfo,",");
|
||||
+ break;
|
||||
+ case tokenString:
|
||||
+ catMacroReplacements(psubInfo,psubFile->string);
|
||||
+ break;
|
||||
+ default:
|
||||
+ subFileErrPrint(psubFile,"Illegal token");
|
||||
+ exit(1);
|
||||
+ }
|
||||
+ }
|
||||
+}
|
||||
+
|
||||
+static char *subGetNextLine(subFile *psubFile)
|
||||
+{
|
||||
+ char *pline;
|
||||
+
|
||||
+ pline = fgets(psubFile->inputBuffer,MAX_BUFFER_SIZE,psubFile->fp);
|
||||
+ ++psubFile->lineNum;
|
||||
+ while(pline && psubFile->inputBuffer[0]=='#') {
|
||||
+ pline = fgets(psubFile->inputBuffer,MAX_BUFFER_SIZE,psubFile->fp);
|
||||
+ ++psubFile->lineNum;
|
||||
+ }
|
||||
+ if(!pline) {
|
||||
+ psubFile->token = tokenEOF;
|
||||
+ psubFile->inputBuffer[0] = 0;
|
||||
+ psubFile->pnextChar = 0;
|
||||
+ return(0);
|
||||
+ }
|
||||
+ psubFile->pnextChar = &psubFile->inputBuffer[0];
|
||||
+ return(&psubFile->inputBuffer[0]);
|
||||
+}
|
||||
+
|
||||
+static void subFileErrPrint(subFile *psubFile,char * message)
|
||||
+{
|
||||
+ fprintf(stderr,"substitution file %s line %d: %s",
|
||||
+ psubFile->substitutionName,
|
||||
+ psubFile->lineNum,psubFile->inputBuffer);
|
||||
+ fprintf(stderr,"%s\n",message);
|
||||
+}
|
||||
+
|
||||
+
|
||||
+static tokenType subGetNextToken(subFile *psubFile)
|
||||
+{
|
||||
+ char *p;
|
||||
+ char *pto;
|
||||
+
|
||||
+ p = psubFile->pnextChar;
|
||||
+ if(!p) { psubFile->token = tokenEOF; return(tokenEOF);}
|
||||
+ if(*p==0 || *p=='\n' || *p=='#') {
|
||||
+ p = subGetNextLine(psubFile);
|
||||
+ if(!p) { psubFile->token = tokenEOF; return(tokenEOF);}
|
||||
+ else { psubFile->token = tokenSeparater; return(tokenSeparater);}
|
||||
+ }
|
||||
+ while(isspace(*p)) p++;
|
||||
+ if(*p=='{') {
|
||||
+ psubFile->token = tokenLBrace;
|
||||
+ psubFile->pnextChar = ++p;
|
||||
+ return(tokenLBrace);
|
||||
+ }
|
||||
+ if(*p=='}') {
|
||||
+ psubFile->token = tokenRBrace;
|
||||
+ psubFile->pnextChar = ++p;
|
||||
+ return(tokenRBrace);
|
||||
+ }
|
||||
+ if(*p==0 || isspace(*p) || *p==',') {
|
||||
+ while(isspace(*p) || *p==',') p++;
|
||||
+ psubFile->token = tokenSeparater;
|
||||
+ psubFile->pnextChar = p;
|
||||
+ return(tokenSeparater);
|
||||
+ }
|
||||
+ /*now handle quoted strings*/
|
||||
+ if(*p=='"') {
|
||||
+ pto = &psubFile->string[0];
|
||||
+ *pto++ = *p++;
|
||||
+ while(*p!='"') {
|
||||
+ if(*p==0 || *p=='\n') {
|
||||
+ subFileErrPrint(psubFile,"Strings must be on single line\n");
|
||||
+ exit(1);
|
||||
+ }
|
||||
+ /*allow escape for imbeded quote*/
|
||||
+ if((*p=='\\') && *(p+1)=='"') {
|
||||
+ *pto++ = *p++;
|
||||
+ *pto++ = *p++;
|
||||
+ continue;
|
||||
+ }
|
||||
+ *pto++ = *p++;
|
||||
+ }
|
||||
+ *pto++ = *p++;
|
||||
+ psubFile->pnextChar = p;
|
||||
+ *pto = 0;
|
||||
+ psubFile->token = tokenString;
|
||||
+ return(tokenString);
|
||||
+ }
|
||||
+ /*Now take anything up to next non String token and not space*/
|
||||
+ pto = &psubFile->string[0];
|
||||
+ while(!isspace(*p) && (strspn(p,"\",{}")==0)) *pto++ = *p++;
|
||||
+ *pto = 0;
|
||||
+ psubFile->pnextChar = p;
|
||||
+ psubFile->token = tokenString;
|
||||
+ return(tokenString);
|
||||
+}
|
||||
+
|
||||
+static void catMacroReplacements(subInfo *psubInfo,const char *value)
|
||||
+{
|
||||
+ size_t len = strlen(value);
|
||||
+
|
||||
+ if(psubInfo->size <= (psubInfo->curLength + len)) {
|
||||
+ size_t newsize = psubInfo->size + MAX_BUFFER_SIZE;
|
||||
+ char *newbuf;
|
||||
+
|
||||
+ if(newsize <= psubInfo->curLength + len)
|
||||
+ newsize = psubInfo->curLength + len + 1;
|
||||
+ newbuf = calloc(1,newsize);
|
||||
+ if(!newbuf) {
|
||||
+ fprintf(stderr,"calloc failed for size %Zu\n",newsize);
|
||||
+ exit(1);
|
||||
+ }
|
||||
+ if(psubInfo->macroReplacements) {
|
||||
+ memcpy(newbuf,psubInfo->macroReplacements,psubInfo->curLength);
|
||||
+ free(psubInfo->macroReplacements);
|
||||
+ }
|
||||
+ psubInfo->size = newsize;
|
||||
+ psubInfo->macroReplacements = newbuf;
|
||||
+ }
|
||||
+ strcat(psubInfo->macroReplacements,value);
|
||||
+ psubInfo->curLength += len;
|
||||
+}
|
||||
437
appveyor-test.py
Normal file
437
appveyor-test.py
Normal file
@@ -0,0 +1,437 @@
|
||||
#!/usr/bin/env python
|
||||
"""Module ci-scripts AppVeyor unit tests
|
||||
"""
|
||||
|
||||
# SET=test00 in the environment (.appveyor.yml) runs the tests in this script
|
||||
# all other jobs are started as compile jobs
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import sys, os, shutil, fileinput
|
||||
import distutils.util
|
||||
import re
|
||||
import subprocess as sp
|
||||
import unittest
|
||||
import logging
|
||||
from argparse import Namespace
|
||||
|
||||
builddir = os.getcwd()
|
||||
|
||||
def find_in_file(regex, filename):
|
||||
file = open (filename, "r")
|
||||
for line in file:
|
||||
if re.search(regex, line):
|
||||
return True
|
||||
return False
|
||||
|
||||
def getStringIO():
|
||||
if (sys.version_info > (3, 0)):
|
||||
import io
|
||||
return io.StringIO()
|
||||
else:
|
||||
import StringIO
|
||||
return StringIO.StringIO()
|
||||
|
||||
sys.path.append('appveyor')
|
||||
import do
|
||||
|
||||
# we're working with tags (detached heads) a lot: suppress advice
|
||||
do.call_git(['config', '--global', 'advice.detachedHead', 'false'])
|
||||
|
||||
class TestSourceSet(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
os.environ['SETUP_PATH'] = '.:appveyor'
|
||||
if 'BASE' in os.environ:
|
||||
del os.environ['BASE']
|
||||
do.clear_lists()
|
||||
os.chdir(builddir)
|
||||
|
||||
def test_EmptySetupDirsPath(self):
|
||||
del os.environ['SETUP_PATH']
|
||||
self.assertRaisesRegexp(NameError, '\(SETUP_PATH\) is empty', do.source_set, 'test01')
|
||||
|
||||
def test_InvalidSetupName(self):
|
||||
self.assertRaisesRegexp(NameError, 'does not exist in SETUP_PATH', do.source_set, 'xxdoesnotexistxx')
|
||||
|
||||
def test_ValidSetupName(self):
|
||||
capturedOutput = getStringIO()
|
||||
sys.stdout = capturedOutput
|
||||
do.source_set('test01')
|
||||
sys.stdout = sys.__stdout__
|
||||
self.assertEqual(do.setup['BASE'], '7.0', 'BASE was not set to \'7.0\'')
|
||||
|
||||
def test_SetupDoesNotOverridePreset(self):
|
||||
os.environ['BASE'] = 'foo'
|
||||
capturedOutput = getStringIO()
|
||||
sys.stdout = capturedOutput
|
||||
do.source_set('test01')
|
||||
sys.stdout = sys.__stdout__
|
||||
self.assertEqual(do.setup['BASE'], 'foo',
|
||||
'Preset BASE was overridden by test01 setup (expected \'foo\' got {0})'
|
||||
.format(do.setup['BASE']))
|
||||
|
||||
def test_IncludeSetupFirstSetWins(self):
|
||||
capturedOutput = getStringIO()
|
||||
sys.stdout = capturedOutput
|
||||
do.source_set('test02')
|
||||
sys.stdout = sys.__stdout__
|
||||
self.assertEqual(do.setup['BASE'], 'foo',
|
||||
'BASE set in test02 was overridden by test01 setup (expected \'foo\' got {0})'
|
||||
.format(do.setup['BASE']))
|
||||
self.assertEqual(do.setup['FOO'], 'bar', 'Setting of single word does not work')
|
||||
self.assertEqual(do.setup['FOO2'], 'bar bar2', 'Setting of multiple words does not work')
|
||||
self.assertEqual(do.setup['FOO3'], 'bar bar2', 'Indented setting of multiple words does not work')
|
||||
self.assertEqual(do.setup['SNCSEQ'], 'R2-2-7', 'Setup test01 was not included')
|
||||
|
||||
def test_DoubleIncludeGetsIgnored(self):
|
||||
capturedOutput = getStringIO()
|
||||
sys.stdout = capturedOutput
|
||||
do.source_set('test03')
|
||||
sys.stdout = sys.__stdout__
|
||||
self.assertRegexpMatches(capturedOutput.getvalue(), 'Ignoring already included setup file')
|
||||
|
||||
class TestUpdateReleaseLocal(unittest.TestCase):
|
||||
|
||||
release_local = os.path.join(do.cachedir, 'RELEASE.local')
|
||||
|
||||
def setUp(self):
|
||||
if os.path.exists(self.release_local):
|
||||
os.remove(self.release_local)
|
||||
os.chdir(builddir)
|
||||
|
||||
def test_SetModule(self):
|
||||
do.update_release_local('MOD1', '/foo/bar')
|
||||
found = 0
|
||||
for line in fileinput.input(self.release_local, inplace=1):
|
||||
if 'MOD1=' in line:
|
||||
self.assertEqual(line.strip(), 'MOD1=/foo/bar', 'MOD1 not set correctly')
|
||||
found += 1
|
||||
fileinput.close()
|
||||
self.assertEqual(found, 1, 'MOD1 not written once to RELEASE.local (found {0})'.format(found))
|
||||
|
||||
def test_SetBaseAndMultipleModules(self):
|
||||
do.update_release_local('EPICS_BASE', '/bar/foo')
|
||||
do.update_release_local('MOD1', '/foo/bar')
|
||||
do.update_release_local('MOD2', '/foo/bar2')
|
||||
do.update_release_local('MOD1', '/foo/bar1')
|
||||
found = {}
|
||||
foundat = {}
|
||||
for line in fileinput.input(self.release_local, inplace=1):
|
||||
if 'MOD1=' in line:
|
||||
self.assertEqual(line.strip(), 'MOD1=/foo/bar1',
|
||||
'MOD1 not set correctly (expected \'MOD1=/foo/bar1\' found \'{0}\')'
|
||||
.format(line))
|
||||
if 'mod1' in found:
|
||||
found['mod1'] += 1
|
||||
else:
|
||||
found['mod1'] = 1
|
||||
foundat['mod1'] = fileinput.filelineno()
|
||||
if 'MOD2=' in line:
|
||||
self.assertEqual(line.strip(), 'MOD2=/foo/bar2',
|
||||
'MOD2 not set correctly (expected \'MOD2=/foo/bar2\' found \'{0}\')'
|
||||
.format(line))
|
||||
if 'mod2' in found:
|
||||
found['mod2'] += 1
|
||||
else:
|
||||
found['mod2'] = 1
|
||||
foundat['mod2'] = fileinput.filelineno()
|
||||
if 'EPICS_BASE=' in line:
|
||||
self.assertEqual(line.strip(), 'EPICS_BASE=/bar/foo',
|
||||
'EPICS_BASE not set correctly (expected \'EPICS_BASE=/bar/foo\' found \'{0}\')'
|
||||
.format(line))
|
||||
if 'base' in found:
|
||||
found['base'] += 1
|
||||
else:
|
||||
found['base'] = 1
|
||||
foundat['base'] = fileinput.filelineno()
|
||||
fileinput.close()
|
||||
self.assertEqual(found['mod1'], 1,
|
||||
'MOD1 does not appear once in RELEASE.local (found {0})'.format(found['mod1']))
|
||||
self.assertEqual(found['mod2'], 1,
|
||||
'MOD2 does not appear once in RELEASE.local (found {0})'.format(found['mod2']))
|
||||
self.assertEqual(found['base'], 1,
|
||||
'EPICS_BASE does not appear once in RELEASE.local (found {0})'.format(found['base']))
|
||||
self.assertGreater(foundat['base'], foundat['mod2'],
|
||||
'EPICS_BASE (line {0}) appears before MOD2 (line {1})'
|
||||
.format(foundat['base'], foundat['mod2']))
|
||||
self.assertGreater(foundat['mod2'], foundat['mod1'],
|
||||
'MOD2 (line {0}) appears before MOD1 (line {1})'.format(foundat['mod2'], foundat['mod1']))
|
||||
|
||||
class TestAddDependencyUpToDateCheck(unittest.TestCase):
|
||||
|
||||
hash_3_15_6 = "ce7943fb44beb22b453ddcc0bda5398fadf72096"
|
||||
location = os.path.join(do.cachedir, 'base-R3.15.6')
|
||||
licensefile = os.path.join(location, 'LICENSE')
|
||||
checked_file = os.path.join(location, 'checked_out')
|
||||
release_file = os.path.join(location, 'configure', 'RELEASE')
|
||||
|
||||
def setUp(self):
|
||||
os.environ['SETUP_PATH'] = '.:appveyor'
|
||||
if os.path.exists(self.location):
|
||||
shutil.rmtree(self.location, onerror=do.remove_readonly)
|
||||
do.clear_lists()
|
||||
os.chdir(builddir)
|
||||
do.source_set('defaults')
|
||||
do.complete_setup('BASE')
|
||||
|
||||
def test_MissingDependency(self):
|
||||
do.setup['BASE'] = 'R3.15.6'
|
||||
do.add_dependency('BASE')
|
||||
self.assertTrue(os.path.exists(self.licensefile), 'Missing dependency was not checked out')
|
||||
self.assertTrue(os.path.exists(self.checked_file), 'Checked-out commit marker was not written')
|
||||
with open(self.checked_file, 'r') as bfile:
|
||||
checked_out = bfile.read().strip()
|
||||
bfile.close()
|
||||
self.assertEqual(checked_out, self.hash_3_15_6,
|
||||
'Wrong commit of dependency checked out (expected=\"{0}\" found=\"{1}\")'
|
||||
.format(self.hash_3_15_6, checked_out))
|
||||
self.assertFalse(find_in_file('include \$\(TOP\)/../RELEASE.local', self.release_file),
|
||||
'RELEASE in Base includes TOP/../RELEASE.local')
|
||||
|
||||
def test_UpToDateDependency(self):
|
||||
do.setup['BASE'] = 'R3.15.6'
|
||||
do.add_dependency('BASE')
|
||||
os.remove(self.licensefile)
|
||||
do.add_dependency('BASE')
|
||||
self.assertFalse(os.path.exists(self.licensefile), 'Check out on top of existing up-to-date dependency')
|
||||
|
||||
def test_OutdatedDependency(self):
|
||||
do.setup['BASE'] = 'R3.15.6'
|
||||
do.add_dependency('BASE')
|
||||
os.remove(self.licensefile)
|
||||
with open(self.checked_file, "w") as fout:
|
||||
print('XXX not the right hash XXX', file=fout)
|
||||
fout.close()
|
||||
do.add_dependency('BASE')
|
||||
self.assertTrue(os.path.exists(self.licensefile), 'No check-out on top of out-of-date dependency')
|
||||
with open(self.checked_file, 'r') as bfile:
|
||||
checked_out = bfile.read().strip()
|
||||
bfile.close()
|
||||
self.assertEqual(checked_out, self.hash_3_15_6,
|
||||
"Wrong commit of dependency checked out (expected='{0}' found='{1}')"
|
||||
.format(self.hash_3_15_6, checked_out))
|
||||
|
||||
def is_shallow_repo(place):
|
||||
check = sp.check_output(['git', 'rev-parse', '--is-shallow-repository'], cwd=place).strip()
|
||||
if check == '--is-shallow-repository':
|
||||
if os.path.exists(os.path.join(place, '.git', 'shallow')):
|
||||
check = 'true'
|
||||
else:
|
||||
check = 'false'
|
||||
return check == 'true'
|
||||
|
||||
class TestAddDependencyOptions(unittest.TestCase):
|
||||
|
||||
location = os.path.join(do.cachedir, 'mcoreutils-master')
|
||||
testfile = os.path.join(location, '.ci', 'LICENSE')
|
||||
|
||||
def setUp(self):
|
||||
os.environ['SETUP_PATH'] = '.:appveyor'
|
||||
if os.path.exists(do.cachedir):
|
||||
shutil.rmtree(do.cachedir, onerror=do.remove_readonly)
|
||||
do.clear_lists()
|
||||
do.source_set('defaults')
|
||||
do.complete_setup('MCoreUtils')
|
||||
do.setup['MCoreUtils'] = 'master'
|
||||
|
||||
def test_Default(self):
|
||||
do.add_dependency('MCoreUtils')
|
||||
self.assertTrue(os.path.exists(self.testfile),
|
||||
'Submodule (.ci) not checked out recursively (requested: default=YES')
|
||||
self.assertTrue(is_shallow_repo(self.location),
|
||||
'Module not checked out shallow (requested: default=5)')
|
||||
|
||||
def test_SetRecursiveNo(self):
|
||||
do.setup['MCoreUtils_RECURSIVE'] = 'NO'
|
||||
do.add_dependency('MCoreUtils')
|
||||
self.assertFalse(os.path.exists(self.testfile), 'Submodule (.ci) checked out recursively')
|
||||
|
||||
def test_SetDepthZero(self):
|
||||
do.setup['MCoreUtils_DEPTH'] = '0'
|
||||
do.add_dependency('MCoreUtils')
|
||||
self.assertFalse(is_shallow_repo(self.location), 'Module checked out shallow (requested full)')
|
||||
|
||||
def test_SetDepthThree(self):
|
||||
do.setup['MCoreUtils_DEPTH'] = '3'
|
||||
do.add_dependency('MCoreUtils')
|
||||
self.assertTrue(is_shallow_repo(self.location),
|
||||
'Module not checked out shallow (requested: default=5)')
|
||||
|
||||
def test_AddMsiTo314(self):
|
||||
do.complete_setup('BASE')
|
||||
do.setup['BASE'] = 'R3.14.12.1'
|
||||
msifile = os.path.join(do.cachedir, 'base-R3.14.12.1', 'src', 'dbtools', 'msi.c')
|
||||
do.add_dependency('BASE')
|
||||
self.assertTrue(os.path.exists(msifile), 'MSI was not added to Base 3.14')
|
||||
|
||||
def repo_access(dep):
|
||||
do.set_setup_from_env(dep)
|
||||
do.setup.setdefault(dep + "_DIRNAME", dep.lower())
|
||||
do.setup.setdefault(dep + "_REPONAME", dep.lower())
|
||||
do.setup.setdefault('REPOOWNER', 'epics-modules')
|
||||
do.setup.setdefault(dep + "_REPOOWNER", do.setup['REPOOWNER'])
|
||||
do.setup.setdefault(dep + "_REPOURL", 'https://github.com/{0}/{1}.git'
|
||||
.format(do.setup[dep + '_REPOOWNER'], do.setup[dep + '_REPONAME']))
|
||||
with open(os.devnull, 'w') as devnull:
|
||||
return do.call_git(['ls-remote', '--quiet', '--heads', do.setup[dep + '_REPOURL']],
|
||||
stdout=devnull, stderr=devnull)
|
||||
|
||||
class TestDefaultModuleURLs(unittest.TestCase):
|
||||
|
||||
modules = ['BASE', 'PVDATA', 'PVACCESS', 'NTYPES',
|
||||
'SNCSEQ', 'STREAM', 'ASYN', 'STD',
|
||||
'CALC', 'AUTOSAVE', 'BUSY', 'SSCAN',
|
||||
'IOCSTATS', 'MOTOR', 'IPAC', ]
|
||||
|
||||
def setUp(self):
|
||||
os.environ['SETUP_PATH'] = '.:appveyor'
|
||||
do.clear_lists()
|
||||
os.chdir(builddir)
|
||||
do.source_set('defaults')
|
||||
|
||||
def test_Repos(self):
|
||||
for mod in self.modules:
|
||||
self.assertEqual(repo_access(mod), 0, 'Defaults for {0} do not point to a valid git repository at {1}'
|
||||
.format(mod, do.setup[mod + '_REPOURL']))
|
||||
|
||||
class TestVCVars(unittest.TestCase):
|
||||
def test_vcvars(self):
|
||||
if ('CMP' in os.environ and os.environ['CMP'] in ('mingw',)) \
|
||||
or distutils.util.get_platform() != "win32":
|
||||
raise unittest.SkipTest()
|
||||
|
||||
do.with_vcvars('env')
|
||||
|
||||
class TestSetupForBuild(unittest.TestCase):
|
||||
configuration = os.environ['CONFIGURATION']
|
||||
platform = os.environ['PLATFORM']
|
||||
cc = os.environ['CMP']
|
||||
args = Namespace(paths=[])
|
||||
do.building_base = True
|
||||
|
||||
def setUp(self):
|
||||
os.environ.pop('EPICS_HOST_ARCH', None)
|
||||
do.clear_lists()
|
||||
|
||||
def tearDown(self):
|
||||
os.environ['CONFIGURATION'] = self.configuration
|
||||
os.environ['PLATFORM'] = self.platform
|
||||
os.environ['CMP'] = self.cc
|
||||
|
||||
def test_AddPathsOption(self):
|
||||
os.environ['FOOBAR'] = 'BAR'
|
||||
args = Namespace(paths=['/my/{FOOBAR}/dir', '/my/foobar'])
|
||||
do.setup_for_build(args)
|
||||
self.assertTrue(re.search('/my/BAR/dir', os.environ['PATH']), 'Expanded path not in PATH')
|
||||
self.assertTrue(re.search('/foobar', os.environ['PATH']), 'Plain path not in PATH')
|
||||
os.environ.pop('FOOBAR', None)
|
||||
|
||||
def test_HostArchConfiguration(self):
|
||||
for config in ['dynamic', 'dynamic-debug', 'static', 'static-debug']:
|
||||
os.environ['CONFIGURATION'] = config
|
||||
do.setup_for_build(self.args)
|
||||
self.assertTrue('EPICS_HOST_ARCH' in os.environ,
|
||||
'EPICS_HOST_ARCH is not set for Configuration={0}'.format(config))
|
||||
if re.search('static', config):
|
||||
self.assertTrue(re.search('-static$', os.environ['EPICS_HOST_ARCH']),
|
||||
'EPICS_HOST_ARCH is not -static for Configuration={0}'.format(config))
|
||||
self.assertFalse(re.search('debug', os.environ['EPICS_HOST_ARCH']),
|
||||
'EPICS_HOST_ARCH is -debug for Configuration={0}'.format(config))
|
||||
elif re.search('debug', config):
|
||||
self.assertFalse(re.search('static', os.environ['EPICS_HOST_ARCH']),
|
||||
'EPICS_HOST_ARCH is -static for Configuration={0}'.format(config))
|
||||
self.assertTrue(re.search('-debug$', os.environ['EPICS_HOST_ARCH']),
|
||||
'EPICS_HOST_ARCH is not -debug for Configuration={0}'.format(config))
|
||||
else:
|
||||
self.assertFalse(re.search('static', os.environ['EPICS_HOST_ARCH']),
|
||||
'EPICS_HOST_ARCH is -static for Configuration={0}'.format(config))
|
||||
self.assertFalse(re.search('debug', os.environ['EPICS_HOST_ARCH']),
|
||||
'EPICS_HOST_ARCH is -debug for Configuration={0}'.format(config))
|
||||
|
||||
def test_HostArchPlatform(self):
|
||||
for platform in ['x86', 'x64', 'X64']:
|
||||
for cc in ['vs2019', 'mingw']:
|
||||
os.environ['PLATFORM'] = platform
|
||||
os.environ['CMP'] = cc
|
||||
os.environ['CONFIGURATION'] = 'dynamic'
|
||||
do.setup_for_build(self.args)
|
||||
self.assertTrue('EPICS_HOST_ARCH' in os.environ,
|
||||
'EPICS_HOST_ARCH is not set for {0} / {1}'.format(cc, platform))
|
||||
if platform == 'x86':
|
||||
self.assertTrue(re.search('^win32-x86', os.environ['EPICS_HOST_ARCH']),
|
||||
'EPICS_HOST_ARCH is not win32-x86 for {0} / {1}'.format(cc, platform))
|
||||
else:
|
||||
self.assertTrue(re.search('^windows-x64', os.environ['EPICS_HOST_ARCH']),
|
||||
'EPICS_HOST_ARCH is not windows-x64 for {0} / {1}'.format(cc, platform))
|
||||
if cc == 'mingw':
|
||||
self.assertTrue(re.search('-mingw$', os.environ['EPICS_HOST_ARCH']),
|
||||
'EPICS_HOST_ARCH is not -mingw for {0} / {1}'.format(cc, platform))
|
||||
if platform == 'x86':
|
||||
pattern = 'mingw32'
|
||||
else:
|
||||
pattern = 'mingw64'
|
||||
self.assertTrue(re.search(pattern, os.environ['PATH']),
|
||||
'Binary location for {0} not in PATH'.format(pattern))
|
||||
self.assertTrue(re.search(pattern, os.environ['INCLUDE']),
|
||||
'Include location for {0} not in INCLUDE'.format(pattern))
|
||||
|
||||
def test_StrawberryInPath(self):
|
||||
os.environ['CMP'] = 'vs2019'
|
||||
do.setup_for_build(self.args)
|
||||
self.assertTrue(re.search('strawberry', os.environ['PATH'], flags=re.IGNORECASE),
|
||||
'Strawberry Perl location not in PATH for vs2019')
|
||||
|
||||
def setBase314(self, yesno):
|
||||
cfg_base_version = os.path.join('configure', 'CONFIG_BASE_VERSION')
|
||||
fout = open(cfg_base_version, 'w')
|
||||
print('# test file for base version detection', file=fout)
|
||||
print('BASE_3_14={0}'.format(yesno), file=fout)
|
||||
fout.close()
|
||||
|
||||
def setTestResultsTarget(self, target):
|
||||
rules_build = os.path.join('configure', 'RULES_BUILD')
|
||||
fout = open(rules_build, 'w')
|
||||
print('# test file for target detection', file=fout)
|
||||
print('{0}: something'.format(target), file=fout)
|
||||
fout.close()
|
||||
|
||||
def test_DetectionBase314No(self):
|
||||
self.setBase314('NO')
|
||||
do.setup_for_build(self.args)
|
||||
self.assertFalse(do.isbase314, 'Falsely detected Base 3.14')
|
||||
|
||||
def test_DetectionBase314Yes(self):
|
||||
self.setBase314('YES')
|
||||
do.setup_for_build(self.args)
|
||||
self.assertTrue(do.isbase314, 'Base 3.14 = YES not detected')
|
||||
|
||||
def test_DetectionTestResultsTarget314No(self):
|
||||
self.setBase314('YES')
|
||||
self.setTestResultsTarget('nottherighttarget')
|
||||
do.setup_for_build(self.args)
|
||||
self.assertFalse(do.has_test_results, 'Falsely detected test-results target')
|
||||
|
||||
def test_DetectionTestResultsTarget314Yes(self):
|
||||
self.setBase314('YES')
|
||||
self.setTestResultsTarget('test-results')
|
||||
do.setup_for_build(self.args)
|
||||
self.assertFalse(do.has_test_results, 'Falsely found test-results on Base 3.14')
|
||||
|
||||
def test_DetectionTestResultsTargetNot314Yes(self):
|
||||
self.setBase314('NO')
|
||||
self.setTestResultsTarget('test-results')
|
||||
do.setup_for_build(self.args)
|
||||
self.assertTrue(do.has_test_results, 'Target test-results not detected')
|
||||
|
||||
if __name__ == "__main__":
|
||||
if 'VV' in os.environ and os.environ['VV'] == '1':
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
do.silent_dep_builds = False
|
||||
|
||||
do.host_info()
|
||||
if sys.argv[1:]==['env']:
|
||||
# testing with_vcvars
|
||||
[print(K,'=',V) for K, V in os.environ.items()]
|
||||
else:
|
||||
unittest.main()
|
||||
155
appveyor/.appveyor.yml.example-full
Normal file
155
appveyor/.appveyor.yml.example-full
Normal file
@@ -0,0 +1,155 @@
|
||||
# .appveyor.yml for use with EPICS Base ci-scripts
|
||||
# (see: https://github.com/epics-base/ci-scripts)
|
||||
|
||||
# This is YAML - indentation levels are crucial
|
||||
|
||||
#---------------------------------#
|
||||
# build cache #
|
||||
#---------------------------------#
|
||||
# The AppVeyor cache allowance is way too small (1GB per account across all projects, branches and jobs)
|
||||
# to be used for the dependency builds.
|
||||
|
||||
cache:
|
||||
- C:\Users\appveyor\.tools
|
||||
|
||||
#---------------------------------#
|
||||
# repository cloning #
|
||||
#---------------------------------#
|
||||
|
||||
# Called at very beginning, before repo cloning
|
||||
init:
|
||||
# Set autocrlf to make batch files work
|
||||
- git config --global core.autocrlf true
|
||||
# print the connection info for RDP connections (see 'debugging' below)
|
||||
#- ps: iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
|
||||
|
||||
# Set clone depth (do not fetch complete history)
|
||||
clone_depth: 50
|
||||
|
||||
# Skipping commits affecting only specific files
|
||||
skip_commits:
|
||||
files:
|
||||
- 'documentation/*'
|
||||
- 'templates/*'
|
||||
- '**/*.html'
|
||||
- '**/*.md'
|
||||
- '.travis.yml'
|
||||
|
||||
#---------------------------------#
|
||||
# additional packages #
|
||||
#---------------------------------#
|
||||
|
||||
install:
|
||||
# fetch submodules (like ci-scripts)
|
||||
- cmd: git submodule update --init --recursive
|
||||
# for the sequencer
|
||||
- cinst re2c
|
||||
|
||||
#---------------------------------#
|
||||
# build matrix configuration #
|
||||
#---------------------------------#
|
||||
|
||||
# Since dependencies cannot be cached and AppVeyor only grants a single builder VM, all jobs
|
||||
# are executed sequentially, each one taking 10-15 minutes.
|
||||
# Consider this when defining your build matrix. (A full matrix build takes more than 8 hours.)
|
||||
|
||||
# Default build worker image
|
||||
image: Visual Studio 2015
|
||||
|
||||
# Build Configurations: dll/static, regular/debug
|
||||
configuration:
|
||||
- dynamic
|
||||
- static
|
||||
- dynamic-debug
|
||||
- static-debug
|
||||
|
||||
# Environment variables: compiler toolchain, base version, setup file, ...
|
||||
environment:
|
||||
# common / default variables for all jobs
|
||||
SETUP_PATH: .ci-local:.ci
|
||||
|
||||
matrix:
|
||||
- CMP: vs2019
|
||||
SET: test00
|
||||
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
|
||||
- CMP: mingw
|
||||
- CMP: vs2019
|
||||
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
|
||||
- CMP: vs2019
|
||||
BASE: 3.15
|
||||
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
|
||||
- CMP: vs2019
|
||||
BASE: 3.14
|
||||
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
|
||||
- CMP: vs2017
|
||||
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
|
||||
- CMP: vs2015
|
||||
- CMP: vs2013
|
||||
- CMP: vs2012
|
||||
- CMP: vs2010
|
||||
- CMP: vs2008
|
||||
|
||||
# Platform: processor architecture
|
||||
platform:
|
||||
- x86
|
||||
- x64
|
||||
|
||||
# Matrix configuration: exclude sets of jobs
|
||||
matrix:
|
||||
exclude:
|
||||
# VS2012 and older installs don't have the 64 bit compiler
|
||||
- platform: x64
|
||||
CMP: vs2012
|
||||
- platform: x64
|
||||
CMP: vs2010
|
||||
- platform: x64
|
||||
CMP: vs2008
|
||||
# Exclude more jobs to reduce build time
|
||||
# E.g., skip 32-bit for newer compilers
|
||||
#- platform: x86
|
||||
# CMP: vs2019
|
||||
#- platform: x86
|
||||
# CMP: vs2017
|
||||
|
||||
#---------------------------------#
|
||||
# building & testing #
|
||||
#---------------------------------#
|
||||
|
||||
build_script:
|
||||
- cmd: python .ci/appveyor/do.py prepare
|
||||
- cmd: python .ci/appveyor/do.py build
|
||||
|
||||
test_script:
|
||||
- cmd: python .ci/appveyor/do.py test
|
||||
|
||||
on_finish:
|
||||
- ps: Get-ChildItem *.tap -Recurse -Force | % { Push-AppveyorArtifact $_.FullName -FileName $_.Name }
|
||||
- cmd: python .ci/appveyor/do.py build test-results -s
|
||||
|
||||
#---------------------------------#
|
||||
# debugging #
|
||||
#---------------------------------#
|
||||
|
||||
## if you want to connect by remote desktop to a failed build, uncomment these lines
|
||||
## note that you will need to connect within the usual build timeout limit (60 minutes)
|
||||
## so you may want to adjust the build matrix above to just build the one of interest
|
||||
|
||||
# to print the RDP connection info
|
||||
# uncomment the appropriate line in the init: section above
|
||||
|
||||
# block a failed build (until the watchdog barks)
|
||||
#on_failure:
|
||||
# - ps: $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
|
||||
|
||||
#---------------------------------#
|
||||
# notifications #
|
||||
#---------------------------------#
|
||||
|
||||
notifications:
|
||||
|
||||
- provider: Email
|
||||
to:
|
||||
- me@example.com
|
||||
on_build_success: false
|
||||
|
||||
- provider: GitHubPullRequest
|
||||
72
appveyor/.appveyor.yml.example-mini
Normal file
72
appveyor/.appveyor.yml.example-mini
Normal file
@@ -0,0 +1,72 @@
|
||||
# .appveyor.yml for use with EPICS Base ci-scripts
|
||||
# (see: https://github.com/epics-base/ci-scripts)
|
||||
|
||||
# This is YAML - indentation levels are crucial
|
||||
|
||||
cache:
|
||||
- C:\Users\appveyor\.tools
|
||||
|
||||
init:
|
||||
- git config --global core.autocrlf true
|
||||
|
||||
clone_depth: 50
|
||||
|
||||
skip_commits:
|
||||
files:
|
||||
- 'documentation/*'
|
||||
- 'templates/*'
|
||||
- '**/*.html'
|
||||
- '**/*.md'
|
||||
- '.travis.yml'
|
||||
|
||||
install:
|
||||
- cmd: git submodule update --init --recursive
|
||||
|
||||
image: Visual Studio 2019
|
||||
|
||||
# Build Configurations: dll/static, regular/debug
|
||||
configuration:
|
||||
- dynamic
|
||||
# - static
|
||||
- dynamic-debug
|
||||
# - static-debug
|
||||
|
||||
environment:
|
||||
# common / default variables for all jobs
|
||||
SETUP_PATH: .ci-local:.ci
|
||||
|
||||
matrix:
|
||||
- CMP: vs2019
|
||||
BASE: 7.0
|
||||
- CMP: vs2019
|
||||
BASE: 3.15
|
||||
|
||||
# Platform: processor architecture
|
||||
platform:
|
||||
# - x86
|
||||
- x64
|
||||
|
||||
# Matrix configuration: exclude sets of jobs
|
||||
matrix:
|
||||
exclude:
|
||||
# VS2012 and older installs don't have the 64 bit compiler
|
||||
- platform: x64
|
||||
CMP: vs2012
|
||||
- platform: x64
|
||||
CMP: vs2010
|
||||
- platform: x64
|
||||
CMP: vs2008
|
||||
|
||||
build_script:
|
||||
- cmd: python .ci/appveyor/do.py prepare
|
||||
- cmd: python .ci/appveyor/do.py build
|
||||
|
||||
test_script:
|
||||
- cmd: python .ci/appveyor/do.py test
|
||||
|
||||
on_finish:
|
||||
- ps: Get-ChildItem *.tap -Recurse -Force | % { Push-AppveyorArtifact $_.FullName -FileName $_.Name }
|
||||
- cmd: python .ci/appveyor/do.py build test-results -s
|
||||
|
||||
notifications:
|
||||
- provider: GitHubPullRequest
|
||||
112
appveyor/README.md
Normal file
112
appveyor/README.md
Normal file
@@ -0,0 +1,112 @@
|
||||
# AppVeyor Scripts for EPICS Modules
|
||||
|
||||
## Features
|
||||
|
||||
- Use different compilers (Visual Studio, MinGW)
|
||||
- Use different VS versions (2008, 2010, 2012, 2013, 2015, 2017, 2019)
|
||||
- Compile for Windows 32bit and 64bit
|
||||
- Create static libraries or DLLs (plus the matching executables)
|
||||
- Create optimized or debug builds
|
||||
|
||||
## How to Use these Scripts
|
||||
|
||||
1. Get an account on [AppVeyor](https://www.appveyor.com/), connect
|
||||
it to your GitHub account and activate your support module's
|
||||
repository. For more details, please see below and refer to the
|
||||
[AppVeyor documentation](https://www.appveyor.com/docs/).
|
||||
|
||||
2. Add the ci-scripts respository as a Git Submodule
|
||||
(see [README](../README.md) one level above).
|
||||
|
||||
3. Add settings files defining which dependencies in which versions
|
||||
you want to build against
|
||||
(see [README](../README.md) one level above).
|
||||
|
||||
4. Create an AppVeyor configuration by copying one of the examples into
|
||||
the root directory of your module.
|
||||
```
|
||||
$ cp .ci/appveyor/.appveyor.yml.example-full .appveyor.yml
|
||||
```
|
||||
|
||||
5. Edit the `.appveyor.yml` configuration to include the jobs you want
|
||||
AppVeyor to run.
|
||||
|
||||
AppVeyor automatically creates a build matrix with the following axes:
|
||||
1. `configuration:` \
|
||||
Select static or dynamic (DLL) as well as regular or debug builds.
|
||||
2. `platform:` \
|
||||
Select 32bit or 64bit processor architecture.
|
||||
3. `environment: / matrix:` \
|
||||
List of environment variable settings. Each list element (starting with
|
||||
a dash) is one step on the axis of the build matrix. \
|
||||
Set `CMP` to select the compiler: `mingw` for the native
|
||||
[MinGW](http://mingw-w64.org/) GNU compiler, `vs2008` ...`vs2019`
|
||||
(options listed above) for the Microsoft Visual Studio compilers.
|
||||
|
||||
Your builds will take long. \
|
||||
AppVeyor only grants a single worker VM - all jobs of the matrix are
|
||||
executed sequentially. Each job will take between 6 and 15 minutes,
|
||||
plus testing time.
|
||||
|
||||
The `matrix: / exclude:` setting can be used to reduce the number of
|
||||
jobs. Check the [AppVeyor docs][appveyor.doc.matrix]
|
||||
for more ways to reduce the build matrix size.
|
||||
E.g., you can opt for not creating matrix axes for `configuration:`
|
||||
and`platform:` by moving these configurations into the job lines
|
||||
under `environment: / matrix:`.
|
||||
|
||||
6. Push your changes and check
|
||||
[ci.appveyor.com](https://ci.appveyor.com/) for your build results.
|
||||
|
||||
## GitHub / AppVeyor Integration and Authentication
|
||||
|
||||
### Security
|
||||
Enabling Two-Factor-Authentication (2FA) is always a good idea, for all
|
||||
your web based services, including GitHub and AppVeyor. \
|
||||
Get an app for your phone (Authy works fine for me, but there are plenty),
|
||||
and your phone will generate one-time passwords to verify your identity
|
||||
to the service if required (e.g., when logging in from a new device).
|
||||
|
||||
### Authentication
|
||||
You can use different ways and services to authenticate when you log into
|
||||
your AppVeyor account. The easiest way - at least when you're using the
|
||||
service with repositories on GitHub - is to use GitHub authentication.
|
||||
|
||||
### GitHub Integration
|
||||
AppVeyor offers two ways to integrate with GitHub: through a GitHub
|
||||
application or through an OAuth application. GitHub applications are using
|
||||
the newer API, allow easier fine-grained access rights tuning and are
|
||||
preferred.
|
||||
|
||||
The differences are mostly visible when you work with repositories under
|
||||
organizational GitHub accounts: Using OAuth, AppVeyor always has the full
|
||||
rights of your personal GitHub account.
|
||||
GitHub applications on the other hand have separate instances and
|
||||
configuration for every organizational account you are using on GitHub.
|
||||
|
||||
### Enabling Builds for your Repository
|
||||
On the 'Projects' tab of your AppVeyor web interface, create a new project.
|
||||
If the repository is not listed on the project creation page,
|
||||
verify the Integration settings. Most of the relevant configuration
|
||||
is taken from GitHub and has to be set up there.
|
||||
|
||||
### AppVeyor Account Sharing
|
||||
You can always invite other AppVeyor users to have access to an AppVeyor
|
||||
account, forming a team. Such additional shared accounts are a way to make
|
||||
the AppVeyor limits (e.g., one parallel builder per account) more manageable.
|
||||
|
||||
## Known Issues
|
||||
|
||||
#### Build Worker Images
|
||||
The AppVeyor documentation on build worker images doesn't seem to fully
|
||||
describe the way things are handled internally.
|
||||
|
||||
The tested and suggested reproducible way of defining the build worker image
|
||||
is shown in the example configuration files:
|
||||
|
||||
- Set the default image using the `image:` tag.
|
||||
- Override the image for specific jobs by setting the
|
||||
`APPVEYOR_BUILD_WORKER_IMAGE` environment variable.
|
||||
|
||||
<!-- Links -->
|
||||
[appveyor.doc.matrix]: https://www.appveyor.com/docs/build-configuration/#build-matrix
|
||||
671
appveyor/do.py
Normal file
671
appveyor/do.py
Normal file
@@ -0,0 +1,671 @@
|
||||
#!/usr/bin/env python
|
||||
"""Windows (AppVeyor) ci build script
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import sys, os, stat, shutil
|
||||
import fileinput
|
||||
import logging
|
||||
import re
|
||||
import subprocess as sp
|
||||
import distutils.util
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Setup ANSI Colors
|
||||
ANSI_RED = "\033[31;1m"
|
||||
ANSI_GREEN = "\033[32;1m"
|
||||
ANSI_YELLOW = "\033[33;1m"
|
||||
ANSI_BLUE = "\033[34;1m"
|
||||
ANSI_MAGENTA = "\033[35;1m"
|
||||
ANSI_CYAN = "\033[36;1m"
|
||||
ANSI_RESET = "\033[0m"
|
||||
ANSI_CLEAR = "\033[0K"
|
||||
|
||||
seen_setups = []
|
||||
modules_to_compile = []
|
||||
setup = {}
|
||||
places = {}
|
||||
|
||||
if 'HomeDrive' in os.environ:
|
||||
cachedir = os.path.join(os.getenv('HomeDrive'), os.getenv('HomePath'), '.cache')
|
||||
toolsdir = os.path.join(os.getenv('HomeDrive'), os.getenv('HomePath'), '.tools')
|
||||
elif 'HOME' in os.environ:
|
||||
cachedir = os.path.join(os.getenv('HOME'), '.cache')
|
||||
toolsdir = os.path.join(os.getenv('HOME'), '.tools')
|
||||
else:
|
||||
cachedir = os.path.join('.', '.cache')
|
||||
toolsdir = os.path.join('.', '.tools')
|
||||
|
||||
if 'CACHEDIR' in os.environ:
|
||||
cachedir = os.environ['CACHEDIR']
|
||||
|
||||
vcvars_table = {
|
||||
# https://en.wikipedia.org/wiki/Microsoft_Visual_Studio#History
|
||||
'vs2019':r'C:\Program Files (x86)\Microsoft Visual Studio\2019\Community\VC\Auxiliary\Build\vcvarsall.bat',
|
||||
'vs2017':r'C:\Program Files (x86)\Microsoft Visual Studio\2017\Community\VC\Auxiliary\Build\vcvarsall.bat',
|
||||
'vs2015':r'C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat',
|
||||
'vs2013':r'C:\Program Files (x86)\Microsoft Visual Studio 12.0\VC\vcvarsall.bat',
|
||||
'vs2012':r'C:\Program Files (x86)\Microsoft Visual Studio 11.0\VC\vcvarsall.bat',
|
||||
'vs2010':r'C:\Program Files (x86)\Microsoft Visual Studio 10.0\VC\vcvarsall.bat',
|
||||
'vs2008':r'C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\vcvarsall.bat',
|
||||
}
|
||||
|
||||
ciscriptsdir = os.path.abspath(os.path.dirname(sys.argv[0]))
|
||||
if os.path.basename(ciscriptsdir) == 'appveyor':
|
||||
ciscriptsdir = ciscriptsdir.rstrip(os.pathsep+'appveyor')
|
||||
|
||||
if 'BASE' in os.environ and os.environ['BASE'] == 'SELF':
|
||||
building_base = True
|
||||
places['EPICS_BASE'] = '.'
|
||||
else:
|
||||
building_base = False
|
||||
|
||||
def modlist():
|
||||
if building_base:
|
||||
ret = []
|
||||
else:
|
||||
for var in ['ADD_MODULES', 'MODULES']:
|
||||
setup.setdefault(var, '')
|
||||
if var in os.environ:
|
||||
setup[var] = os.environ[var]
|
||||
logger.debug('ENV assignment: %s = %s', var, setup[var])
|
||||
ret = ['BASE'] + setup['ADD_MODULES'].upper().split() + setup['MODULES'].upper().split()
|
||||
logger.debug('Effective module list: %s', ret)
|
||||
return ret
|
||||
|
||||
zip7 = r'C:\Program Files\7-Zip\7z'
|
||||
make = ''
|
||||
isbase314 = False
|
||||
has_test_results = False
|
||||
silent_dep_builds = True
|
||||
|
||||
def host_info():
|
||||
print('{0}AppVeyor Build Worker Image:{1} {2}'
|
||||
.format(ANSI_CYAN, ANSI_RESET, os.environ['APPVEYOR_BUILD_WORKER_IMAGE']))
|
||||
|
||||
print('{0}Python setup{1}'.format(ANSI_CYAN, ANSI_RESET))
|
||||
print(sys.version)
|
||||
print('PYTHONPATH')
|
||||
for dname in sys.path:
|
||||
print(' ', dname)
|
||||
print('platform =', distutils.util.get_platform())
|
||||
|
||||
print('{0}Available Visual Studio versions{1}'.format(ANSI_CYAN, ANSI_RESET))
|
||||
for key in vcvars_table:
|
||||
if os.path.exists(vcvars_table[key]):
|
||||
print('Found', key, 'in', vcvars_table[key])
|
||||
sys.stdout.flush()
|
||||
|
||||
# Used from unittests
|
||||
def clear_lists():
|
||||
global isbase314, has_test_results
|
||||
del seen_setups[:]
|
||||
del modules_to_compile[:]
|
||||
setup.clear()
|
||||
places.clear()
|
||||
isbase314 = False
|
||||
has_test_results = False
|
||||
|
||||
# Error-handler to make shutil.rmtree delete read-only files on Windows
|
||||
def remove_readonly(func, path, excinfo):
|
||||
os.chmod(path, stat.S_IWRITE)
|
||||
func(path)
|
||||
|
||||
# source_set(setup)
|
||||
#
|
||||
# Source a settings file (extension .set) found in the setup_dirs path
|
||||
# May be called recursively (from within a setup file)
|
||||
def source_set(name):
|
||||
# allowed separators: colon or whitespace
|
||||
setup_dirs = os.getenv('SETUP_PATH', "").replace(':', ' ').split()
|
||||
if len(setup_dirs) == 0:
|
||||
raise NameError("{0}Search path for setup files (SETUP_PATH) is empty{1}".format(ANSI_RED,ANSI_RESET))
|
||||
|
||||
for set_dir in setup_dirs:
|
||||
set_file = os.path.join(set_dir, name) + ".set"
|
||||
|
||||
if set_file in seen_setups:
|
||||
print("Ignoring already included setup file {0}".format(set_file))
|
||||
return
|
||||
|
||||
if os.path.isfile(set_file):
|
||||
seen_setups.append(set_file)
|
||||
print("Loading setup file {0}".format(set_file))
|
||||
sys.stdout.flush()
|
||||
with open(set_file) as fp:
|
||||
for line in fp:
|
||||
logger.debug('Next line: %s', line.strip())
|
||||
if not line.strip() or line.strip()[0] == '#':
|
||||
continue
|
||||
if line.startswith("include"):
|
||||
logger.debug('Found an include, reading %s', line.split()[1])
|
||||
source_set(line.split()[1])
|
||||
continue
|
||||
assign = line.replace('"', '').strip().split("=", 1)
|
||||
logger.debug('Interpreting as assignment')
|
||||
setup.setdefault(assign[0], os.getenv(assign[0], ""))
|
||||
if not setup[assign[0]].strip():
|
||||
logger.debug('Doing assignment: %s = %s', assign[0], assign[1])
|
||||
setup[assign[0]] = assign[1]
|
||||
break
|
||||
else:
|
||||
raise NameError("{0}Setup file {1} does not exist in SETUP_PATH search path ({2}){3}"
|
||||
.format(ANSI_RED, name, setup_dirs, ANSI_RESET))
|
||||
|
||||
# update_release_local(var, location)
|
||||
# var name of the variable to set in RELEASE.local
|
||||
# location location (absolute path) of where variable should point to
|
||||
#
|
||||
# Manipulate RELEASE.local in the cache location:
|
||||
# - replace "$var=$location" line if it exists and has changed
|
||||
# - otherwise add "$var=$location" line and possibly move EPICS_BASE=... line to the end
|
||||
# Set places[var] = location
|
||||
def update_release_local(var, location):
|
||||
release_local = os.path.join(cachedir, 'RELEASE.local')
|
||||
updated_line = '{0}={1}'.format(var, location.replace('\\', '/'))
|
||||
places[var] = location
|
||||
|
||||
if not os.path.exists(release_local):
|
||||
logger.debug('RELEASE.local does not exist, creating it')
|
||||
try:
|
||||
os.makedirs(cachedir)
|
||||
except:
|
||||
pass
|
||||
fout = open(release_local, 'w')
|
||||
fout.close()
|
||||
base_line = ''
|
||||
found = False
|
||||
logger.debug("Opening RELEASE.local for adding '%s'", updated_line)
|
||||
for line in fileinput.input(release_local, inplace=1):
|
||||
outputline = line.strip()
|
||||
if 'EPICS_BASE=' in line:
|
||||
base_line = line.strip()
|
||||
logger.debug("Found EPICS_BASE line '%s', not writing it", base_line)
|
||||
continue
|
||||
elif '{0}='.format(var) in line:
|
||||
logger.debug("Found '%s=' line, replacing", var)
|
||||
found = True
|
||||
outputline = updated_line
|
||||
logger.debug("Writing line to RELEASE.local: '%s'", outputline)
|
||||
print(outputline)
|
||||
fileinput.close()
|
||||
fout = open(release_local,"a")
|
||||
if not found:
|
||||
logger.debug("Adding new definition: '%s'", updated_line)
|
||||
print(updated_line, file=fout)
|
||||
if base_line:
|
||||
logger.debug("Writing EPICS_BASE line: '%s'", base_line)
|
||||
print(base_line, file=fout)
|
||||
fout.close()
|
||||
|
||||
def set_setup_from_env(dep):
|
||||
for postf in ['', '_DIRNAME', '_REPONAME', '_REPOOWNER', '_REPOURL',
|
||||
'_VARNAME', '_RECURSIVE', '_DEPTH', '_HOOK']:
|
||||
if dep+postf in os.environ:
|
||||
setup[dep+postf] = os.environ[dep+postf]
|
||||
logger.debug('ENV assignment: %s = %s', dep+postf, setup[dep+postf])
|
||||
|
||||
def call_git(args, **kws):
|
||||
if 'cwd' in kws:
|
||||
place = kws['cwd']
|
||||
else:
|
||||
place = os.getcwd()
|
||||
logger.debug("EXEC '%s' in %s", ' '.join(['git'] + args), place)
|
||||
sys.stdout.flush()
|
||||
exitcode = sp.call(['git'] + args, **kws)
|
||||
logger.debug('EXEC DONE')
|
||||
return exitcode
|
||||
|
||||
def call_make(args=[], **kws):
|
||||
place = kws.get('cwd', os.getcwd())
|
||||
parallel = kws.pop('parallel', 2)
|
||||
silent = kws.pop('silent', False)
|
||||
# no parallel make for Base 3.14
|
||||
if parallel <= 0 or isbase314:
|
||||
makeargs = []
|
||||
else:
|
||||
makeargs = ['-j{0}'.format(parallel), '-Otarget']
|
||||
if silent:
|
||||
makeargs += ['-s']
|
||||
logger.debug("EXEC '%s' in %s", ' '.join([make] + makeargs + args), place)
|
||||
sys.stdout.flush()
|
||||
exitcode = sp.call([make] + makeargs + args, **kws)
|
||||
logger.debug('EXEC DONE')
|
||||
if exitcode != 0:
|
||||
sys.exit(exitcode)
|
||||
|
||||
def get_git_hash(place):
|
||||
logger.debug("EXEC 'git log -n1 --pretty=format:%%H' in %s", place)
|
||||
sys.stdout.flush()
|
||||
head = sp.check_output(['git', 'log', '-n1', '--pretty=format:%H'], cwd=place).decode()
|
||||
logger.debug('EXEC DONE')
|
||||
return head
|
||||
|
||||
def complete_setup(dep):
|
||||
set_setup_from_env(dep)
|
||||
setup.setdefault(dep, 'master')
|
||||
setup.setdefault(dep+"_DIRNAME", dep.lower())
|
||||
setup.setdefault(dep+"_REPONAME", dep.lower())
|
||||
setup.setdefault('REPOOWNER', 'epics-modules')
|
||||
setup.setdefault(dep+"_REPOOWNER", setup['REPOOWNER'])
|
||||
setup.setdefault(dep+"_REPOURL", 'https://github.com/{0}/{1}.git'
|
||||
.format(setup[dep+'_REPOOWNER'], setup[dep+'_REPONAME']))
|
||||
setup.setdefault(dep+"_VARNAME", dep)
|
||||
setup.setdefault(dep+"_RECURSIVE", 'YES')
|
||||
setup.setdefault(dep+"_DEPTH", -1)
|
||||
|
||||
# add_dependency(dep, tag)
|
||||
#
|
||||
# Add a dependency to the cache area:
|
||||
# - check out (recursive if configured) in the CACHE area unless it already exists and the
|
||||
# required commit has been built
|
||||
# - Defaults:
|
||||
# $dep_DIRNAME = lower case ($dep)
|
||||
# $dep_REPONAME = lower case ($dep)
|
||||
# $dep_REPOURL = GitHub / $dep_REPOOWNER (or $REPOOWNER or epics-modules) / $dep_REPONAME .git
|
||||
# $dep_VARNAME = $dep
|
||||
# $dep_DEPTH = 5
|
||||
# $dep_RECURSIVE = 1/YES (0/NO to for a flat clone)
|
||||
# - Add $dep_VARNAME line to the RELEASE.local file in the cache area (unless already there)
|
||||
# - Add full path to $modules_to_compile
|
||||
def add_dependency(dep):
|
||||
recurse = setup[dep+'_RECURSIVE'].lower()
|
||||
if recurse not in ['0', 'no']:
|
||||
recursearg = ["--recursive"]
|
||||
elif recurse not in ['1', 'yes']:
|
||||
recursearg = []
|
||||
else:
|
||||
raise RuntimeError("Invalid value for {}_RECURSIVE='{}' not 0/NO/1/YES".format(dep, recurse))
|
||||
deptharg = {
|
||||
'-1':['--depth', '5'],
|
||||
'0':[],
|
||||
}.get(str(setup[dep+'_DEPTH']), ['--depth', str(setup[dep+'_DEPTH'])])
|
||||
|
||||
tag = setup[dep]
|
||||
|
||||
logger.debug('Adding dependency %s with tag %s', dep, setup[dep])
|
||||
|
||||
# determine if dep points to a valid release or branch
|
||||
if call_git(['ls-remote', '--quiet', '--exit-code', '--refs', setup[dep+'_REPOURL'], tag]):
|
||||
raise RuntimeError("{0}{1} is neither a tag nor a branch name for {2} ({3}){4}"
|
||||
.format(ANSI_RED, tag, dep, setup[dep+'_REPOURL'], ANSI_RESET))
|
||||
|
||||
dirname = setup[dep+'_DIRNAME']+'-{0}'.format(tag)
|
||||
place = os.path.join(cachedir, dirname)
|
||||
checked_file = os.path.join(place, "checked_out")
|
||||
|
||||
if os.path.isdir(place):
|
||||
logger.debug('Dependency %s: directory %s exists, comparing checked-out commit', dep, place)
|
||||
# check HEAD commit against the hash in marker file
|
||||
if os.path.exists(checked_file):
|
||||
with open(checked_file, 'r') as bfile:
|
||||
checked_out = bfile.read().strip()
|
||||
bfile.close()
|
||||
else:
|
||||
checked_out = 'never'
|
||||
head = get_git_hash(place)
|
||||
logger.debug('Found checked_out commit %s, git head is %s', checked_out, head)
|
||||
if head != checked_out:
|
||||
logger.debug('Dependency %s out of date - removing', dep)
|
||||
shutil.rmtree(place, onerror=remove_readonly)
|
||||
else:
|
||||
print('Found {0} of dependency {1} up-to-date in {2}'.format(tag, dep, place))
|
||||
sys.stdout.flush()
|
||||
|
||||
if not os.path.isdir(place):
|
||||
if not os.path.isdir(cachedir):
|
||||
os.makedirs(cachedir)
|
||||
# clone dependency
|
||||
print('Cloning {0} of dependency {1} into {2}'
|
||||
.format(tag, dep, place))
|
||||
sys.stdout.flush()
|
||||
call_git(['clone', '--quiet'] + deptharg + recursearg + ['--branch', tag, setup[dep+'_REPOURL'], dirname], cwd=cachedir)
|
||||
|
||||
sp.check_call(['git', 'log', '-n1'], cwd=place)
|
||||
modules_to_compile.append(place)
|
||||
|
||||
if dep == 'BASE':
|
||||
# add MSI 1.7 to Base 3.14
|
||||
versionfile = os.path.join(place, 'configure', 'CONFIG_BASE_VERSION')
|
||||
if os.path.exists(versionfile):
|
||||
with open(versionfile) as f:
|
||||
if 'BASE_3_14=YES' in f.read():
|
||||
print('Adding MSI 1.7 to {0}'.format(place))
|
||||
sys.stdout.flush()
|
||||
sp.check_call(['patch', '-p1', '-i', os.path.join(ciscriptsdir, 'add-msi-to-314.patch')],
|
||||
cwd=place)
|
||||
else:
|
||||
# force including RELEASE.local for non-base modules by overwriting their configure/RELEASE
|
||||
release = os.path.join(place, "configure", "RELEASE")
|
||||
if os.path.exists(release):
|
||||
with open(release, 'w') as fout:
|
||||
print('-include $(TOP)/../RELEASE.local', file=fout)
|
||||
|
||||
# run hook if defined
|
||||
if dep+'_HOOK' in setup:
|
||||
hook = os.path.join(place, setup[dep+'_HOOK'])
|
||||
if os.path.exists(hook):
|
||||
print('Running hook {0} in {1}'.format(setup[dep+'_HOOK'], place))
|
||||
sys.stdout.flush()
|
||||
sp.check_call(hook, shell=True, cwd=place)
|
||||
|
||||
# write checked out commit hash to marker file
|
||||
head = get_git_hash(place)
|
||||
logger.debug('Writing hash of checked-out dependency (%s) to marker file', head)
|
||||
with open(checked_file, "w") as fout:
|
||||
print(head, file=fout)
|
||||
fout.close()
|
||||
|
||||
update_release_local(setup[dep+"_VARNAME"], place)
|
||||
|
||||
def setup_for_build(args):
|
||||
global make, isbase314, has_test_results
|
||||
dllpaths = []
|
||||
|
||||
# there is no combined static and debug EPICS_HOST_ARCH target,
|
||||
# so a combined debug and static target will appear to be just static
|
||||
# but debug will have been specified in CONFIG_SITE by prepare()
|
||||
hostarchsuffix=''
|
||||
if re.search('debug', os.environ['CONFIGURATION']):
|
||||
hostarchsuffix = '-debug'
|
||||
if re.search('static', os.environ['CONFIGURATION']):
|
||||
hostarchsuffix = '-static'
|
||||
|
||||
if os.environ['PLATFORM'].lower() == 'x86':
|
||||
os.environ['EPICS_HOST_ARCH'] = 'win32-x86' + hostarchsuffix
|
||||
elif os.environ['PLATFORM'].lower() == 'x64':
|
||||
os.environ['EPICS_HOST_ARCH'] = 'windows-x64' + hostarchsuffix
|
||||
|
||||
if os.environ['CMP'] == 'vs2019':
|
||||
# put strawberry perl in the PATH
|
||||
os.environ['PATH'] = os.pathsep.join([os.path.join(r'C:\Strawberry\perl\site\bin'),
|
||||
os.path.join(r'C:\Strawberry\perl\bin'),
|
||||
os.environ['PATH']])
|
||||
if os.environ['CMP'] == 'mingw':
|
||||
if 'INCLUDE' not in os.environ:
|
||||
os.environ['INCLUDE'] = ''
|
||||
if os.environ['PLATFORM'].lower() == 'x86':
|
||||
os.environ['EPICS_HOST_ARCH'] = 'win32-x86-mingw'
|
||||
os.environ['INCLUDE'] = os.pathsep.join([r'C:\mingw-w64\i686-6.3.0-posix-dwarf-rt_v5-rev1\mingw32\include',
|
||||
os.environ['INCLUDE']])
|
||||
os.environ['PATH'] = os.pathsep.join([r'C:\mingw-w64\i686-6.3.0-posix-dwarf-rt_v5-rev1\mingw32\bin',
|
||||
os.environ['PATH']])
|
||||
elif os.environ['PLATFORM'].lower() == 'x64':
|
||||
os.environ['EPICS_HOST_ARCH'] = 'windows-x64-mingw'
|
||||
os.environ['INCLUDE'] = os.pathsep.join([r'C:\mingw-w64\x86_64-8.1.0-posix-seh-rt_v6-rev0\mingw64\include',
|
||||
os.environ['INCLUDE']])
|
||||
os.environ['PATH'] = os.pathsep.join([r'C:\mingw-w64\x86_64-8.1.0-posix-seh-rt_v6-rev0\mingw64\bin',
|
||||
os.environ['PATH']])
|
||||
|
||||
make = os.path.join(toolsdir, 'make.exe')
|
||||
|
||||
base_place = '.'
|
||||
if not building_base:
|
||||
with open(os.path.join(cachedir, 'RELEASE.local'), 'r') as f:
|
||||
lines = f.readlines()
|
||||
for line in lines:
|
||||
(mod, place) = line.strip().split('=')
|
||||
bindir = os.path.join(place, 'bin', os.environ['EPICS_HOST_ARCH'])
|
||||
if os.path.isdir(bindir):
|
||||
dllpaths.append(bindir)
|
||||
if mod == 'EPICS_BASE':
|
||||
base_place = place
|
||||
|
||||
cfg_base_version = os.path.join(base_place, 'configure', 'CONFIG_BASE_VERSION')
|
||||
if os.path.exists(cfg_base_version):
|
||||
with open(cfg_base_version) as myfile:
|
||||
if 'BASE_3_14=YES' in myfile.read():
|
||||
isbase314 = True
|
||||
|
||||
if not isbase314:
|
||||
rules_build = os.path.join(base_place, 'configure', 'RULES_BUILD')
|
||||
if os.path.exists(rules_build):
|
||||
with open(rules_build) as myfile:
|
||||
for line in myfile:
|
||||
if re.match('^test-results:', line):
|
||||
has_test_results = True
|
||||
|
||||
bindir = os.path.join(os.getcwd(), 'bin', os.environ['EPICS_HOST_ARCH'])
|
||||
if os.path.isdir(bindir):
|
||||
dllpaths.append(bindir)
|
||||
|
||||
os.environ['PATH'] = os.pathsep.join(dllpaths + [os.environ['PATH']])
|
||||
|
||||
# apparently %CD% is handled automagically
|
||||
os.environ['TOP'] = os.getcwd()
|
||||
|
||||
addpaths = []
|
||||
for path in args.paths:
|
||||
try:
|
||||
addpaths.append(path.format(**os.environ))
|
||||
except KeyError:
|
||||
print('Environment')
|
||||
[print(' ',K,'=',repr(V)) for K,V in os.environ.items()]
|
||||
raise
|
||||
|
||||
os.environ['PATH'] = os.pathsep.join([os.environ['PATH']] + addpaths)
|
||||
|
||||
def prepare(args):
|
||||
host_info()
|
||||
|
||||
print('{0}Loading setup files{1}'.format(ANSI_YELLOW, ANSI_RESET))
|
||||
source_set('defaults')
|
||||
if 'SET' in os.environ:
|
||||
source_set(os.environ['SET'])
|
||||
|
||||
[complete_setup(mod) for mod in modlist()]
|
||||
|
||||
logger.debug('Loaded setup')
|
||||
kvs = list(setup.items())
|
||||
kvs.sort()
|
||||
[logger.debug(' %s = "%s"', *kv) for kv in kvs]
|
||||
|
||||
# we're working with tags (detached heads) a lot: suppress advice
|
||||
call_git(['config', '--global', 'advice.detachedHead', 'false'])
|
||||
|
||||
print('{0}Checking/cloning dependencies{1}'.format(ANSI_YELLOW, ANSI_RESET))
|
||||
sys.stdout.flush()
|
||||
|
||||
[add_dependency(mod) for mod in modlist()]
|
||||
|
||||
if not building_base:
|
||||
if os.path.isdir('configure'):
|
||||
targetdir = 'configure'
|
||||
else:
|
||||
targetdir = '.'
|
||||
shutil.copy(os.path.join(cachedir, 'RELEASE.local'), targetdir)
|
||||
|
||||
print('{0}Configuring EPICS build system{1}'.format(ANSI_YELLOW, ANSI_RESET))
|
||||
|
||||
with open(os.path.join(places['EPICS_BASE'], 'configure', 'CONFIG_SITE'), 'a') as config_site:
|
||||
if re.search('static', os.environ['CONFIGURATION']):
|
||||
config_site.write('SHARED_LIBRARIES=NO\n')
|
||||
config_site.write('STATIC_BUILD=YES\n')
|
||||
linktype = 'static'
|
||||
else:
|
||||
linktype = 'dynamic (DLL)'
|
||||
if re.search('debug', os.environ['CONFIGURATION']):
|
||||
config_site.write('HOST_OPT=NO\n')
|
||||
optitype = 'debug'
|
||||
else:
|
||||
optitype = 'optimized'
|
||||
|
||||
# Enable/fix parallel build for VisualStudio compiler on older Base versions
|
||||
add_vs_fix = True
|
||||
config_win = os.path.join(places['EPICS_BASE'], 'configure', 'os', 'CONFIG.win32-x86.win32-x86')
|
||||
with open(config_win) as myfile:
|
||||
for line in myfile:
|
||||
if re.match(r'^ifneq \(\$\(VisualStudioVersion\),11\.0\)', line):
|
||||
add_vs_fix = False
|
||||
if add_vs_fix:
|
||||
with open(config_win, 'a') as myfile:
|
||||
myfile.write('''
|
||||
# Fix parallel build for some VisualStudio versions
|
||||
ifneq ($(VisualStudioVersion),)
|
||||
ifneq ($(VisualStudioVersion),11.0)
|
||||
ifeq ($(findstring -FS,$(OPT_CXXFLAGS_NO)),)
|
||||
OPT_CXXFLAGS_NO += -FS
|
||||
OPT_CFLAGS_NO += -FS
|
||||
endif
|
||||
else
|
||||
OPT_CXXFLAGS_NO := $(filter-out -FS,$(OPT_CXXFLAGS_NO))
|
||||
OPT_CFLAGS_NO := $(filter-out -FS,$(OPT_CFLAGS_NO))
|
||||
endif
|
||||
endif''')
|
||||
|
||||
print('EPICS Base build system set up for {0} build with {1} linking'
|
||||
.format(optitype, linktype))
|
||||
|
||||
if not os.path.isdir(toolsdir):
|
||||
os.makedirs(toolsdir)
|
||||
|
||||
makever = '4.2.1'
|
||||
if not os.path.exists(os.path.join(toolsdir, 'make.exe')):
|
||||
print('Installing Make 4.2.1 from ANL web site')
|
||||
sys.stdout.flush()
|
||||
sp.check_call(['curl', '-fsS', '--retry', '3', '-o', 'make-{0}.zip'.format(makever),
|
||||
'https://epics.anl.gov/download/tools/make-{0}-win64.zip'.format(makever)],
|
||||
cwd=toolsdir)
|
||||
sp.check_call([zip7, 'e', 'make-{0}.zip'.format(makever)], cwd=toolsdir)
|
||||
os.remove(os.path.join(toolsdir, 'make-{0}.zip'.format(makever)))
|
||||
|
||||
setup_for_build(args)
|
||||
|
||||
print('{0}EPICS_HOST_ARCH = {1}{2}'.format(ANSI_CYAN, os.environ['EPICS_HOST_ARCH'], ANSI_RESET))
|
||||
print('{0}$ {1} --version{2}'.format(ANSI_CYAN, make, ANSI_RESET))
|
||||
sys.stdout.flush()
|
||||
call_make(['--version'], parallel=0)
|
||||
print('{0}$ perl --version{1}'.format(ANSI_CYAN, ANSI_RESET))
|
||||
sys.stdout.flush()
|
||||
sp.check_call(['perl', '--version'])
|
||||
|
||||
if os.environ['CMP'] == 'mingw':
|
||||
print('{0}$ gcc --version{1}'.format(ANSI_CYAN, ANSI_RESET))
|
||||
sys.stdout.flush()
|
||||
sp.check_call(['gcc', '--version'])
|
||||
else:
|
||||
print('{0}$ cl{1}'.format(ANSI_CYAN, ANSI_RESET))
|
||||
sys.stdout.flush()
|
||||
sp.check_call(['cl'])
|
||||
|
||||
if not building_base:
|
||||
for mod in modlist():
|
||||
place = places[setup[mod+"_VARNAME"]]
|
||||
print('{0}Building dependency {1} in {2}{3}'.format(ANSI_YELLOW, mod, place, ANSI_RESET))
|
||||
call_make(cwd=place, silent=silent_dep_builds)
|
||||
|
||||
print('{0}Dependency module information{1}'.format(ANSI_CYAN, ANSI_RESET))
|
||||
print('Module Tag Binaries Commit')
|
||||
print(100 * '-')
|
||||
for mod in modlist():
|
||||
commit = sp.check_output(['git', 'log', '-n1', '--oneline'], cwd=places[setup[mod+"_VARNAME"]]).strip()
|
||||
print("%-10s %-12s %-11s %s" % (mod, setup[mod], 'rebuilt', commit))
|
||||
|
||||
print('{0}Contents of RELEASE.local{1}'.format(ANSI_CYAN, ANSI_RESET))
|
||||
with open(os.path.join(cachedir, 'RELEASE.local'), 'r') as f:
|
||||
print(f.read().strip())
|
||||
|
||||
def build(args):
|
||||
setup_for_build(args)
|
||||
print('{0}Building the main module{1}'.format(ANSI_YELLOW, ANSI_RESET))
|
||||
call_make(args.makeargs)
|
||||
|
||||
def test(args):
|
||||
setup_for_build(args)
|
||||
print('{0}Running the main module tests{1}'.format(ANSI_YELLOW, ANSI_RESET))
|
||||
call_make(['tapfiles'])
|
||||
if has_test_results:
|
||||
call_make(['test-results'], parallel=0, silent=True)
|
||||
|
||||
def doExec(args):
|
||||
'exec user command with vcvars'
|
||||
setup_for_build(args)
|
||||
os.environ['MAKE'] = make
|
||||
print('Execute command {}'.format(args.cmd))
|
||||
sys.stdout.flush()
|
||||
sp.check_call(' '.join(args.cmd), shell=True)
|
||||
|
||||
def with_vcvars(cmd):
|
||||
'''re-exec main script with a (hopefully different) command
|
||||
'''
|
||||
CC = os.environ['CMP']
|
||||
|
||||
# cf. https://docs.microsoft.com/en-us/cpp/build/building-on-the-command-line
|
||||
|
||||
info = {
|
||||
'python': sys.executable,
|
||||
'self': sys.argv[0],
|
||||
'cmd':cmd,
|
||||
}
|
||||
|
||||
info['arch'] = {
|
||||
'x86': 'x86', # 'amd64_x86' ??
|
||||
'x64': 'amd64',
|
||||
}[os.environ['PLATFORM'].lower()] # 'x86' or 'x64'
|
||||
|
||||
info['vcvars'] = vcvars_table[CC]
|
||||
|
||||
script='''
|
||||
call "{vcvars}" {arch}
|
||||
|
||||
"{python}" "{self}" {cmd}
|
||||
'''.format(**info)
|
||||
|
||||
logger.debug('----- Creating vcvars-trampoline.bat -----')
|
||||
for line in script.split('\n'):
|
||||
logger.debug(line)
|
||||
logger.debug('----- snip -----')
|
||||
|
||||
with open('vcvars-trampoline.bat', 'w') as F:
|
||||
F.write(script)
|
||||
|
||||
print('{0}Calling vcvars-trampoline.bat to set environment for {1} on {2}{3}'
|
||||
.format(ANSI_YELLOW, CC, os.environ['PLATFORM'], ANSI_RESET))
|
||||
sys.stdout.flush()
|
||||
returncode = sp.call('vcvars-trampoline.bat', shell=True)
|
||||
if returncode != 0:
|
||||
sys.exit(returncode)
|
||||
|
||||
def getargs():
|
||||
from argparse import ArgumentParser, REMAINDER
|
||||
P = ArgumentParser()
|
||||
P.add_argument('--no-vcvars', dest='vcvars', default=True, action='store_false',
|
||||
help='Assume vcvarsall.bat has already been run')
|
||||
P.add_argument('--add-path', dest='paths', default=[], action='append',
|
||||
help='Append directory to %PATH%. Expands {ENVVAR}')
|
||||
SP = P.add_subparsers()
|
||||
|
||||
CMD = SP.add_parser('prepare')
|
||||
CMD.set_defaults(func=prepare)
|
||||
|
||||
CMD = SP.add_parser('build')
|
||||
CMD.add_argument('makeargs', nargs=REMAINDER)
|
||||
CMD.set_defaults(func=build)
|
||||
|
||||
CMD = SP.add_parser('test')
|
||||
CMD.set_defaults(func=test)
|
||||
|
||||
CMD = SP.add_parser('exec')
|
||||
CMD.add_argument('cmd', nargs=REMAINDER)
|
||||
CMD.set_defaults(func=doExec)
|
||||
|
||||
return P
|
||||
|
||||
def main(raw):
|
||||
global silent_dep_builds
|
||||
args = getargs().parse_args(raw)
|
||||
if 'VV' in os.environ and os.environ['VV'] == '1':
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
silent_dep_builds = False
|
||||
|
||||
if args.vcvars and os.environ['CMP'].startswith('vs'):
|
||||
# re-exec with MSVC in PATH
|
||||
with_vcvars(' '.join(['--no-vcvars']+raw))
|
||||
|
||||
else:
|
||||
args.func(args)
|
||||
|
||||
if __name__=='__main__':
|
||||
main(sys.argv[1:])
|
||||
29
configure/CONFIG
Normal file
29
configure/CONFIG
Normal file
@@ -0,0 +1,29 @@
|
||||
# CONFIG - Load build configuration data
|
||||
#
|
||||
# Do not make changes to this file!
|
||||
|
||||
# Allow user to override where the build rules come from
|
||||
RULES = $(EPICS_BASE)
|
||||
|
||||
# RELEASE files point to other application tops
|
||||
include $(TOP)/configure/RELEASE
|
||||
-include $(TOP)/configure/RELEASE.$(EPICS_HOST_ARCH).Common
|
||||
ifdef T_A
|
||||
-include $(TOP)/configure/RELEASE.Common.$(T_A)
|
||||
-include $(TOP)/configure/RELEASE.$(EPICS_HOST_ARCH).$(T_A)
|
||||
endif
|
||||
|
||||
CONFIG = $(RULES)/configure
|
||||
include $(CONFIG)/CONFIG
|
||||
|
||||
# Override the Base definition:
|
||||
INSTALL_LOCATION = $(TOP)
|
||||
|
||||
# CONFIG_SITE files contain other build configuration settings
|
||||
include $(TOP)/configure/CONFIG_SITE
|
||||
-include $(TOP)/configure/CONFIG_SITE.$(EPICS_HOST_ARCH).Common
|
||||
ifdef T_A
|
||||
-include $(TOP)/configure/CONFIG_SITE.Common.$(T_A)
|
||||
-include $(TOP)/configure/CONFIG_SITE.$(EPICS_HOST_ARCH).$(T_A)
|
||||
endif
|
||||
|
||||
43
configure/CONFIG_SITE
Normal file
43
configure/CONFIG_SITE
Normal file
@@ -0,0 +1,43 @@
|
||||
# CONFIG_SITE
|
||||
|
||||
# Make any application-specific changes to the EPICS build
|
||||
# configuration variables in this file.
|
||||
#
|
||||
# Host/target specific settings can be specified in files named
|
||||
# CONFIG_SITE.$(EPICS_HOST_ARCH).Common
|
||||
# CONFIG_SITE.Common.$(T_A)
|
||||
# CONFIG_SITE.$(EPICS_HOST_ARCH).$(T_A)
|
||||
|
||||
# CHECK_RELEASE controls the consistency checking of the support
|
||||
# applications pointed to by the RELEASE* files.
|
||||
# Normally CHECK_RELEASE should be set to YES.
|
||||
# Set CHECK_RELEASE to NO to disable checking completely.
|
||||
# Set CHECK_RELEASE to WARN to perform consistency checking but
|
||||
# continue building even if conflicts are found.
|
||||
CHECK_RELEASE = YES
|
||||
|
||||
# Set this when you only want to compile this application
|
||||
# for a subset of the cross-compiled target architectures
|
||||
# that Base is built for.
|
||||
#CROSS_COMPILER_TARGET_ARCHS = vxWorks-ppc32
|
||||
|
||||
# To install files into a location other than $(TOP) define
|
||||
# INSTALL_LOCATION here.
|
||||
#INSTALL_LOCATION=</absolute/path/to/install/top>
|
||||
|
||||
# Set this when the IOC and build host use different paths
|
||||
# to the install location. This may be needed to boot from
|
||||
# a Microsoft FTP server say, or on some NFS configurations.
|
||||
#IOCS_APPL_TOP = </IOC's/absolute/path/to/install/top>
|
||||
|
||||
# For application debugging purposes, override the HOST_OPT and/
|
||||
# or CROSS_OPT settings from base/configure/CONFIG_SITE
|
||||
#HOST_OPT = NO
|
||||
#CROSS_OPT = NO
|
||||
|
||||
# These allow developers to override the CONFIG_SITE variable
|
||||
# settings without having to modify the configure/CONFIG_SITE
|
||||
# file itself.
|
||||
-include $(TOP)/../CONFIG_SITE.local
|
||||
-include $(TOP)/configure/CONFIG_SITE.local
|
||||
|
||||
8
configure/Makefile
Normal file
8
configure/Makefile
Normal file
@@ -0,0 +1,8 @@
|
||||
TOP=..
|
||||
|
||||
include $(TOP)/configure/CONFIG
|
||||
|
||||
TARGETS = $(CONFIG_TARGETS)
|
||||
CONFIGS += $(subst ../,,$(wildcard $(CONFIG_INSTALLS)))
|
||||
|
||||
include $(TOP)/configure/RULES
|
||||
43
configure/RELEASE
Normal file
43
configure/RELEASE
Normal file
@@ -0,0 +1,43 @@
|
||||
# RELEASE - Location of external support modules
|
||||
#
|
||||
# IF YOU MAKE ANY CHANGES to this file you must subsequently
|
||||
# do a "gnumake rebuild" in this application's top level
|
||||
# directory.
|
||||
#
|
||||
# The build process does not check dependencies against files
|
||||
# that are outside this application, thus you should do a
|
||||
# "gnumake rebuild" in the top level directory after EPICS_BASE
|
||||
# or any other external module pointed to below is rebuilt.
|
||||
#
|
||||
# Host- or target-specific settings can be given in files named
|
||||
# RELEASE.$(EPICS_HOST_ARCH).Common
|
||||
# RELEASE.Common.$(T_A)
|
||||
# RELEASE.$(EPICS_HOST_ARCH).$(T_A)
|
||||
#
|
||||
# This file is parsed by both GNUmake and an EPICS Perl script,
|
||||
# so it can ONLY contain definititions of paths to other support
|
||||
# modules, variable definitions that are used in module paths,
|
||||
# and include statements that pull in other RELEASE files.
|
||||
# Variables may be used before their values have been set.
|
||||
# Build variables that are NOT used in paths should be set in
|
||||
# the CONFIG_SITE file.
|
||||
|
||||
# Variables and paths to dependent modules:
|
||||
#MODULES = /path/to/modules
|
||||
#MYMODULE = $(MODULES)/my-module
|
||||
|
||||
# If using the sequencer, point SNCSEQ at its top directory:
|
||||
#SNCSEQ = $(MODULES)/seq-ver
|
||||
|
||||
# EPICS_BASE should appear last so earlier modules can override stuff:
|
||||
EPICS_BASE = /path/to/base/must/be/set/in/a/RELEASE.local/file
|
||||
|
||||
# Set RULES here if you want to use build rules from somewhere
|
||||
# other than EPICS_BASE:
|
||||
#RULES = $(MODULES)/build-rules
|
||||
|
||||
# These allow developers to override the RELEASE variable settings
|
||||
# without having to modify the configure/RELEASE file itself.
|
||||
-include $(TOP)/../RELEASE.local
|
||||
-include $(TOP)/configure/RELEASE.local
|
||||
|
||||
6
configure/RULES
Normal file
6
configure/RULES
Normal file
@@ -0,0 +1,6 @@
|
||||
# RULES
|
||||
|
||||
include $(CONFIG)/RULES
|
||||
|
||||
# Library should be rebuilt because LIBOBJS may have changed.
|
||||
$(LIBNAME): ../Makefile
|
||||
2
configure/RULES.ioc
Normal file
2
configure/RULES.ioc
Normal file
@@ -0,0 +1,2 @@
|
||||
#RULES.ioc
|
||||
include $(CONFIG)/RULES.ioc
|
||||
2
configure/RULES_DIRS
Normal file
2
configure/RULES_DIRS
Normal file
@@ -0,0 +1,2 @@
|
||||
#RULES_DIRS
|
||||
include $(CONFIG)/RULES_DIRS
|
||||
3
configure/RULES_TOP
Normal file
3
configure/RULES_TOP
Normal file
@@ -0,0 +1,3 @@
|
||||
#RULES_TOP
|
||||
include $(CONFIG)/RULES_TOP
|
||||
|
||||
37
defaults.set
Normal file
37
defaults.set
Normal file
@@ -0,0 +1,37 @@
|
||||
# EPICS Base
|
||||
BASE_DIRNAME=base
|
||||
BASE_REPONAME=epics-base
|
||||
BASE_REPOOWNER=epics-base
|
||||
BASE_VARNAME=EPICS_BASE
|
||||
|
||||
PVDATA_DIRNAME=pvData
|
||||
PVDATA_REPONAME=pvDataCPP
|
||||
PVDATA_REPOOWNER=epics-base
|
||||
|
||||
PVACCESS_DIRNAME=pvAccess
|
||||
PVACCESS_REPONAME=pvAccessCPP
|
||||
PVACCESS_REPOOWNER=epics-base
|
||||
|
||||
NTYPES_DIRNAME=normativeTypes
|
||||
NTYPES_REPONAME=normativeTypesCPP
|
||||
NTYPES_REPOOWNER=epics-base
|
||||
|
||||
# Sequencer
|
||||
SNCSEQ_REPOURL=https://www-csr.bessy.de/control/SoftDist/sequencer/repo/branch-2-2.git
|
||||
SNCSEQ_DEPTH=0
|
||||
SNCSEQ_DIRNAME=seq
|
||||
|
||||
# StreamDevice
|
||||
STREAM_REPONAME=StreamDevice
|
||||
STREAM_REPOOWNER=paulscherrerinstitute
|
||||
|
||||
# The default settings also work (and are tested) for:
|
||||
# asyn
|
||||
# std
|
||||
# calc
|
||||
# autosave
|
||||
# busy
|
||||
# sscan
|
||||
# iocStats
|
||||
# motor
|
||||
# ipac
|
||||
21
exampleApp/Db/Makefile
Normal file
21
exampleApp/Db/Makefile
Normal file
@@ -0,0 +1,21 @@
|
||||
TOP=../..
|
||||
include $(TOP)/configure/CONFIG
|
||||
#----------------------------------------
|
||||
# ADD MACRO DEFINITIONS BELOW HERE
|
||||
|
||||
# Install databases, templates & substitutions like this
|
||||
DB += dbExample1.db
|
||||
DB += dbExample2.db
|
||||
DB += dbSubExample.db
|
||||
DB += user.substitutions
|
||||
|
||||
# Host-side expansion of substitutions file with MSI
|
||||
DB += dbExample3.db
|
||||
|
||||
# If <anyname>.db template is not named <anyname>*.template add
|
||||
# <anyname>_TEMPLATE = <templatename>
|
||||
|
||||
include $(TOP)/configure/RULES
|
||||
#----------------------------------------
|
||||
# ADD EXTRA GNUMAKE RULES BELOW HERE
|
||||
|
||||
62
exampleApp/Db/dbExample1.db
Normal file
62
exampleApp/Db/dbExample1.db
Normal file
@@ -0,0 +1,62 @@
|
||||
record(ai, "$(user):aiExample")
|
||||
{
|
||||
field(DESC, "Analog input")
|
||||
field(INP, "$(user):calcExample.VAL NPP NMS")
|
||||
field(EGUF, "10")
|
||||
field(EGU, "Counts")
|
||||
field(HOPR, "10")
|
||||
field(LOPR, "0")
|
||||
field(HIHI, "8")
|
||||
field(HIGH, "6")
|
||||
field(LOW, "4")
|
||||
field(LOLO, "2")
|
||||
field(HHSV, "MAJOR")
|
||||
field(HSV, "MINOR")
|
||||
field(LSV, "MINOR")
|
||||
field(LLSV, "MAJOR")
|
||||
}
|
||||
record(calc, "$(user):calcExample")
|
||||
{
|
||||
field(DESC, "Counter")
|
||||
field(SCAN,"1 second")
|
||||
field(FLNK, "$(user):aiExample")
|
||||
field(CALC, "(A<B)?(A+C):D")
|
||||
field(INPA, "$(user):calcExample.VAL NPP NMS")
|
||||
field(INPB, "9")
|
||||
field(INPC, "1")
|
||||
field(INPD, "0")
|
||||
field(EGU, "Counts")
|
||||
field(HOPR, "10")
|
||||
field(HIHI, "8")
|
||||
field(HIGH, "6")
|
||||
field(LOW, "4")
|
||||
field(LOLO, "2")
|
||||
field(HHSV, "MAJOR")
|
||||
field(HSV, "MINOR")
|
||||
field(LSV, "MINOR")
|
||||
field(LLSV, "MAJOR")
|
||||
}
|
||||
record(xxx, "$(user):xxxExample")
|
||||
{
|
||||
field(DESC, "xxx record")
|
||||
field(EGU, "Counts")
|
||||
field(HOPR, "10")
|
||||
field(HIHI, "8")
|
||||
field(HIGH, "6")
|
||||
field(LOW, "4")
|
||||
field(LOLO, "2")
|
||||
field(HHSV, "MAJOR")
|
||||
field(HSV, "MINOR")
|
||||
field(LSV, "MINOR")
|
||||
field(LLSV, "MAJOR")
|
||||
}
|
||||
record(compress,"$(user):compressExample")
|
||||
{
|
||||
field(DESC, "Circular buffer")
|
||||
field(INP,"$(user):aiExample.VAL CP NMS")
|
||||
field(ALG,"Circular Buffer")
|
||||
field(NSAM,"10")
|
||||
field(HOPR,"10")
|
||||
field(EGU,"Counts")
|
||||
}
|
||||
|
||||
40
exampleApp/Db/dbExample2.db
Normal file
40
exampleApp/Db/dbExample2.db
Normal file
@@ -0,0 +1,40 @@
|
||||
record(calc, "$(user):calcExample$(no)")
|
||||
{
|
||||
alias("$(user):calc$(no)")
|
||||
field(DESC, "Counter No. $(no)")
|
||||
field(SCAN,"$(scan)")
|
||||
field(FLNK, "$(user):aiExample$(no)")
|
||||
field(CALC, "(A<B)?(A+C):D")
|
||||
field(INPA, "$(user):calcExample$(no).VAL NPP NMS")
|
||||
field(INPB, "9")
|
||||
field(INPC, "1")
|
||||
field(INPD, "0")
|
||||
field(EGU, "Counts")
|
||||
field(HOPR, "10")
|
||||
field(HIHI, "8")
|
||||
field(HIGH, "6")
|
||||
field(LOW, "4")
|
||||
field(LOLO, "2")
|
||||
field(HHSV, "MAJOR")
|
||||
field(HSV, "MINOR")
|
||||
field(LSV, "MINOR")
|
||||
field(LLSV, "MAJOR")
|
||||
}
|
||||
record(ai, "$(user):aiExample$(no)")
|
||||
{
|
||||
field(DESC, "Analog input No. $(no)")
|
||||
field(INP, "$(user):calcExample$(no).VAL NPP NMS")
|
||||
field(EGUF, "10")
|
||||
field(EGU, "Counts")
|
||||
field(HOPR, "10")
|
||||
field(LOPR, "0")
|
||||
field(HIHI, "8")
|
||||
field(HIGH, "6")
|
||||
field(LOW, "4")
|
||||
field(LOLO, "2")
|
||||
field(HHSV, "MAJOR")
|
||||
field(HSV, "MINOR")
|
||||
field(LSV, "MINOR")
|
||||
field(LLSV, "MAJOR")
|
||||
}
|
||||
alias("$(user):aiExample$(no)","$(user):ai$(no)")
|
||||
8
exampleApp/Db/dbExample3.substitutions
Normal file
8
exampleApp/Db/dbExample3.substitutions
Normal file
@@ -0,0 +1,8 @@
|
||||
# Example host-side substitutions file
|
||||
|
||||
file dbExample2.db {
|
||||
pattern { user, no, scan }
|
||||
{ "ralph", 4, "1 second" }
|
||||
{ "ralph", 5, "2 second" }
|
||||
{ "ralph", 6, "5 second" }
|
||||
}
|
||||
13
exampleApp/Db/dbSubExample.db
Normal file
13
exampleApp/Db/dbSubExample.db
Normal file
@@ -0,0 +1,13 @@
|
||||
record(sub,"$(user):subExample")
|
||||
{
|
||||
field(INAM,"mySubInit")
|
||||
field(SNAM,"mySubProcess")
|
||||
}
|
||||
record(aSub,"$(user):aSubExample")
|
||||
{
|
||||
field(INAM,"myAsubInit")
|
||||
field(SNAM,"myAsubProcess")
|
||||
field(FTA,"DOUBLE")
|
||||
field(NOA,"10")
|
||||
field(INPA,"$(user):compressExample CPP")
|
||||
}
|
||||
12
exampleApp/Db/user.substitutions
Normal file
12
exampleApp/Db/user.substitutions
Normal file
@@ -0,0 +1,12 @@
|
||||
# Example substitutions file
|
||||
|
||||
file "db/dbExample1.db" {
|
||||
{ user = "ralph" }
|
||||
}
|
||||
|
||||
file db/dbExample2.db {
|
||||
pattern { user, no, scan }
|
||||
{ "ralph", 1, "1 second" }
|
||||
{ "ralph", 2, "2 second" }
|
||||
{ "ralph", 3, "5 second" }
|
||||
}
|
||||
13
exampleApp/Makefile
Normal file
13
exampleApp/Makefile
Normal file
@@ -0,0 +1,13 @@
|
||||
TOP = ..
|
||||
include $(TOP)/configure/CONFIG
|
||||
DIRS := $(DIRS) $(filter-out $(DIRS), $(wildcard *src*))
|
||||
DIRS := $(DIRS) $(filter-out $(DIRS), $(wildcard *Src*))
|
||||
DIRS := $(DIRS) $(filter-out $(DIRS), $(wildcard *db*))
|
||||
DIRS := $(DIRS) $(filter-out $(DIRS), $(wildcard *Db*))
|
||||
|
||||
DIRS := $(DIRS) test
|
||||
|
||||
test_DEPEND_DIRS += src
|
||||
|
||||
include $(TOP)/configure/RULES_DIRS
|
||||
|
||||
82
exampleApp/src/Makefile
Normal file
82
exampleApp/src/Makefile
Normal file
@@ -0,0 +1,82 @@
|
||||
TOP=../..
|
||||
|
||||
include $(TOP)/configure/CONFIG
|
||||
#----------------------------------------
|
||||
# ADD MACRO DEFINITIONS BELOW HERE
|
||||
|
||||
# use the new RSET definition
|
||||
USR_CPPFLAGS += -DUSE_TYPED_RSET
|
||||
|
||||
# xxxRecord.h will be created from xxxRecord.dbd
|
||||
DBDINC += xxxRecord
|
||||
|
||||
# Install xxxSupport.dbd into <top>/dbd
|
||||
DBD += xxxSupport.dbd
|
||||
|
||||
# Build an IOC support library
|
||||
LIBRARY_IOC += exampleSupport
|
||||
|
||||
# Compile and add the code to the support library
|
||||
exampleSupport_SRCS += xxxRecord.c
|
||||
exampleSupport_SRCS += devXxxSoft.c
|
||||
|
||||
# Link locally-provided code into the support library,
|
||||
# rather than directly into the IOC application.
|
||||
# This is required for Windows DLL builds.
|
||||
exampleSupport_SRCS += dbSubExample.c
|
||||
exampleSupport_SRCS += exampleHello.c
|
||||
exampleSupport_SRCS += initTrace.c
|
||||
|
||||
exampleSupport_LIBS += $(EPICS_BASE_IOC_LIBS)
|
||||
|
||||
|
||||
# Build the IOC application
|
||||
PROD_IOC = example
|
||||
|
||||
# example.dbd will be created and installed
|
||||
DBD += example.dbd
|
||||
|
||||
# example.dbd will include these files:
|
||||
example_DBD += base.dbd
|
||||
example_DBD += xxxSupport.dbd
|
||||
example_DBD += dbSubExample.dbd
|
||||
example_DBD += exampleHello.dbd
|
||||
example_DBD += initTrace.dbd
|
||||
|
||||
# example_registerRecordDeviceDriver.cpp derives from example.dbd
|
||||
example_SRCS += example_registerRecordDeviceDriver.cpp
|
||||
|
||||
# Build the main IOC entry point where needed
|
||||
example_SRCS_DEFAULT += exampleMain.cpp
|
||||
example_SRCS_vxWorks += -nil-
|
||||
|
||||
# Link in the code from our support library
|
||||
example_LIBS += exampleSupport
|
||||
|
||||
# To build SNL programs, SNCSEQ must be defined
|
||||
# in the <top>/configure/RELEASE file
|
||||
ifneq ($(SNCSEQ),)
|
||||
# Build sncExample into exampleSupport
|
||||
sncExample_SNCFLAGS += +r
|
||||
example_DBD += sncExample.dbd
|
||||
# A .stt sequence program is *not* pre-processed:
|
||||
exampleSupport_SRCS += sncExample.stt
|
||||
exampleSupport_LIBS += seq pv
|
||||
example_LIBS += seq pv
|
||||
|
||||
# Build sncProgram as a standalone program
|
||||
PROD_HOST += sncProgram
|
||||
sncProgram_SNCFLAGS += +m
|
||||
# A .st sequence program *is* pre-processed:
|
||||
sncProgram_SRCS += sncProgram.st
|
||||
sncProgram_LIBS += seq pv
|
||||
sncProgram_LIBS += $(EPICS_BASE_HOST_LIBS)
|
||||
endif
|
||||
|
||||
# Finally link IOC to the EPICS Base libraries
|
||||
example_LIBS += $(EPICS_BASE_IOC_LIBS)
|
||||
|
||||
include $(TOP)/configure/RULES
|
||||
#----------------------------------------
|
||||
# ADD EXTRA GNUMAKE RULES BELOW HERE
|
||||
|
||||
49
exampleApp/src/dbSubExample.c
Normal file
49
exampleApp/src/dbSubExample.c
Normal file
@@ -0,0 +1,49 @@
|
||||
#include <stdio.h>
|
||||
|
||||
#include <dbDefs.h>
|
||||
#include <registryFunction.h>
|
||||
#include <subRecord.h>
|
||||
#include <aSubRecord.h>
|
||||
#include <epicsExport.h>
|
||||
|
||||
int mySubDebug;
|
||||
|
||||
static long mySubInit(subRecord *precord)
|
||||
{
|
||||
if (mySubDebug)
|
||||
printf("Record %s called mySubInit(%p)\n",
|
||||
precord->name, (void*) precord);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static long mySubProcess(subRecord *precord)
|
||||
{
|
||||
if (mySubDebug)
|
||||
printf("Record %s called mySubProcess(%p)\n",
|
||||
precord->name, (void*) precord);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static long myAsubInit(aSubRecord *precord)
|
||||
{
|
||||
if (mySubDebug)
|
||||
printf("Record %s called myAsubInit(%p)\n",
|
||||
precord->name, (void*) precord);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static long myAsubProcess(aSubRecord *precord)
|
||||
{
|
||||
if (mySubDebug)
|
||||
printf("Record %s called myAsubProcess(%p)\n",
|
||||
precord->name, (void*) precord);
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Register these symbols for use by IOC code: */
|
||||
|
||||
epicsExportAddress(int, mySubDebug);
|
||||
epicsRegisterFunction(mySubInit);
|
||||
epicsRegisterFunction(mySubProcess);
|
||||
epicsRegisterFunction(myAsubInit);
|
||||
epicsRegisterFunction(myAsubProcess);
|
||||
5
exampleApp/src/dbSubExample.dbd
Normal file
5
exampleApp/src/dbSubExample.dbd
Normal file
@@ -0,0 +1,5 @@
|
||||
variable(mySubDebug)
|
||||
function(mySubInit)
|
||||
function(mySubProcess)
|
||||
function(myAsubInit)
|
||||
function(myAsubProcess)
|
||||
58
exampleApp/src/devXxxSoft.c
Normal file
58
exampleApp/src/devXxxSoft.c
Normal file
@@ -0,0 +1,58 @@
|
||||
/* devXxxSoft.c */
|
||||
/* Example device support module */
|
||||
|
||||
#include <stddef.h>
|
||||
#include <stdlib.h>
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
|
||||
#include "alarm.h"
|
||||
#include "cvtTable.h"
|
||||
#include "dbDefs.h"
|
||||
#include "dbAccess.h"
|
||||
#include "recGbl.h"
|
||||
#include "recSup.h"
|
||||
#include "devSup.h"
|
||||
#include "link.h"
|
||||
#include "xxxRecord.h"
|
||||
#include "epicsExport.h"
|
||||
|
||||
/*Create the dset for devXxxSoft */
|
||||
static long init_record();
|
||||
static long read_xxx();
|
||||
struct {
|
||||
long number;
|
||||
DEVSUPFUN report;
|
||||
DEVSUPFUN init;
|
||||
DEVSUPFUN init_record;
|
||||
DEVSUPFUN get_ioint_info;
|
||||
DEVSUPFUN read_xxx;
|
||||
}devXxxSoft={
|
||||
5,
|
||||
NULL,
|
||||
NULL,
|
||||
init_record,
|
||||
NULL,
|
||||
read_xxx,
|
||||
};
|
||||
epicsExportAddress(dset,devXxxSoft);
|
||||
|
||||
|
||||
static long init_record(pxxx)
|
||||
struct xxxRecord *pxxx;
|
||||
{
|
||||
if(recGblInitConstantLink(&pxxx->inp,DBF_DOUBLE,&pxxx->val))
|
||||
pxxx->udf = FALSE;
|
||||
return(0);
|
||||
}
|
||||
|
||||
static long read_xxx(pxxx)
|
||||
struct xxxRecord *pxxx;
|
||||
{
|
||||
long status;
|
||||
|
||||
status = dbGetLink(&(pxxx->inp),DBF_DOUBLE, &(pxxx->val),0,0);
|
||||
/*If return was succesful then set undefined false*/
|
||||
if(!status) pxxx->udf = FALSE;
|
||||
return(0);
|
||||
}
|
||||
31
exampleApp/src/exampleHello.c
Normal file
31
exampleApp/src/exampleHello.c
Normal file
@@ -0,0 +1,31 @@
|
||||
/* Example showing how to register a new command with iocsh */
|
||||
|
||||
#include <stdio.h>
|
||||
|
||||
#include <epicsExport.h>
|
||||
#include <iocsh.h>
|
||||
|
||||
/* This is the command, which the vxWorks shell will call directly */
|
||||
void hello(const char *name) {
|
||||
if (name) {
|
||||
printf("Hello %s, from example\n", name);
|
||||
} else {
|
||||
puts("Hello from example");
|
||||
}
|
||||
}
|
||||
|
||||
/* Information needed by iocsh */
|
||||
static const iocshArg helloArg0 = {"name", iocshArgString};
|
||||
static const iocshArg *helloArgs[] = {&helloArg0};
|
||||
static const iocshFuncDef helloFuncDef = {"hello", 1, helloArgs};
|
||||
|
||||
/* Wrapper called by iocsh, selects the argument types that hello needs */
|
||||
static void helloCallFunc(const iocshArgBuf *args) {
|
||||
hello(args[0].sval);
|
||||
}
|
||||
|
||||
/* Registration routine, runs at startup */
|
||||
static void helloRegister(void) {
|
||||
iocshRegister(&helloFuncDef, helloCallFunc);
|
||||
}
|
||||
epicsExportRegistrar(helloRegister);
|
||||
1
exampleApp/src/exampleHello.dbd
Normal file
1
exampleApp/src/exampleHello.dbd
Normal file
@@ -0,0 +1 @@
|
||||
registrar(helloRegister)
|
||||
23
exampleApp/src/exampleMain.cpp
Normal file
23
exampleApp/src/exampleMain.cpp
Normal file
@@ -0,0 +1,23 @@
|
||||
/* exampleMain.cpp */
|
||||
/* Author: Marty Kraimer Date: 17MAR2000 */
|
||||
|
||||
#include <stddef.h>
|
||||
#include <stdlib.h>
|
||||
#include <stddef.h>
|
||||
#include <string.h>
|
||||
#include <stdio.h>
|
||||
|
||||
#include "epicsExit.h"
|
||||
#include "epicsThread.h"
|
||||
#include "iocsh.h"
|
||||
|
||||
int main(int argc,char *argv[])
|
||||
{
|
||||
if(argc>=2) {
|
||||
iocsh(argv[1]);
|
||||
epicsThreadSleep(.2);
|
||||
}
|
||||
iocsh(NULL);
|
||||
epicsExit(0);
|
||||
return(0);
|
||||
}
|
||||
39
exampleApp/src/initTrace.c
Normal file
39
exampleApp/src/initTrace.c
Normal file
@@ -0,0 +1,39 @@
|
||||
/* initTrace.c */
|
||||
|
||||
/*
|
||||
* An initHook routine to trace the iocInit() process.
|
||||
* Prints out the name of each state as it is reached.
|
||||
*/
|
||||
|
||||
#include <stdio.h>
|
||||
|
||||
#include "initHooks.h"
|
||||
#include "epicsExport.h"
|
||||
#include "iocsh.h"
|
||||
|
||||
|
||||
static void trace(initHookState state) {
|
||||
printf("iocInit: Reached %s\n", initHookName(state));
|
||||
}
|
||||
|
||||
int traceIocInit(void) {
|
||||
static int done = 0;
|
||||
if (done)
|
||||
return -1;
|
||||
done = 1;
|
||||
|
||||
initHookRegister(trace);
|
||||
puts("iocInit will be traced");
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
static const iocshFuncDef traceInitFuncDef = {"traceIocInit", 0, NULL};
|
||||
static void traceInitFunc(const iocshArgBuf *args) {
|
||||
traceIocInit();
|
||||
}
|
||||
|
||||
static void initTraceRegister(void) {
|
||||
iocshRegister(&traceInitFuncDef, traceInitFunc);
|
||||
}
|
||||
epicsExportRegistrar(initTraceRegister);
|
||||
1
exampleApp/src/initTrace.dbd
Normal file
1
exampleApp/src/initTrace.dbd
Normal file
@@ -0,0 +1 @@
|
||||
registrar(initTraceRegister)
|
||||
1
exampleApp/src/sncExample.dbd
Normal file
1
exampleApp/src/sncExample.dbd
Normal file
@@ -0,0 +1 @@
|
||||
registrar(sncExampleRegistrar)
|
||||
22
exampleApp/src/sncExample.stt
Normal file
22
exampleApp/src/sncExample.stt
Normal file
@@ -0,0 +1,22 @@
|
||||
program sncExample
|
||||
double v;
|
||||
assign v to "{user}:aiExample";
|
||||
monitor v;
|
||||
|
||||
ss ss1 {
|
||||
state init {
|
||||
when (delay(10)) {
|
||||
printf("sncExample: Startup delay over\n");
|
||||
} state low
|
||||
}
|
||||
state low {
|
||||
when (v > 5.0) {
|
||||
printf("sncExample: Changing to high\n");
|
||||
} state high
|
||||
}
|
||||
state high {
|
||||
when (v <= 5.0) {
|
||||
printf("sncExample: Changing to low\n");
|
||||
} state low
|
||||
}
|
||||
}
|
||||
1
exampleApp/src/sncProgram.st
Normal file
1
exampleApp/src/sncProgram.st
Normal file
@@ -0,0 +1 @@
|
||||
#include "../sncExample.stt"
|
||||
273
exampleApp/src/xxxRecord.c
Normal file
273
exampleApp/src/xxxRecord.c
Normal file
@@ -0,0 +1,273 @@
|
||||
/* xxxRecord.c */
|
||||
/* Example record support module */
|
||||
|
||||
#include <stddef.h>
|
||||
#include <stdlib.h>
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
|
||||
#include "epicsMath.h"
|
||||
#include "alarm.h"
|
||||
#include "dbAccess.h"
|
||||
#include "recGbl.h"
|
||||
#include "dbEvent.h"
|
||||
#include "dbDefs.h"
|
||||
#include "dbAccess.h"
|
||||
#include "devSup.h"
|
||||
#include "errMdef.h"
|
||||
#include "recSup.h"
|
||||
#include "special.h"
|
||||
#define GEN_SIZE_OFFSET
|
||||
#include "xxxRecord.h"
|
||||
#undef GEN_SIZE_OFFSET
|
||||
#include "epicsExport.h"
|
||||
|
||||
/* Create RSET - Record Support Entry Table */
|
||||
#define report NULL
|
||||
#define initialize NULL
|
||||
static long init_record(struct dbCommon *, int);
|
||||
static long process(struct dbCommon *);
|
||||
#define special NULL
|
||||
#define get_value NULL
|
||||
#define cvt_dbaddr NULL
|
||||
#define get_array_info NULL
|
||||
#define put_array_info NULL
|
||||
static long get_units(DBADDR *, char *);
|
||||
static long get_precision(const DBADDR *, long *);
|
||||
#define get_enum_str NULL
|
||||
#define get_enum_strs NULL
|
||||
#define put_enum_str NULL
|
||||
static long get_graphic_double(DBADDR *, struct dbr_grDouble *);
|
||||
static long get_control_double(DBADDR *, struct dbr_ctrlDouble *);
|
||||
static long get_alarm_double(DBADDR *, struct dbr_alDouble *);
|
||||
|
||||
rset xxxRSET={
|
||||
RSETNUMBER,
|
||||
report,
|
||||
initialize,
|
||||
init_record,
|
||||
process,
|
||||
special,
|
||||
get_value,
|
||||
cvt_dbaddr,
|
||||
get_array_info,
|
||||
put_array_info,
|
||||
get_units,
|
||||
get_precision,
|
||||
get_enum_str,
|
||||
get_enum_strs,
|
||||
put_enum_str,
|
||||
get_graphic_double,
|
||||
get_control_double,
|
||||
get_alarm_double
|
||||
};
|
||||
epicsExportAddress(rset,xxxRSET);
|
||||
|
||||
typedef struct xxxset { /* xxx input dset */
|
||||
long number;
|
||||
DEVSUPFUN dev_report;
|
||||
DEVSUPFUN init;
|
||||
DEVSUPFUN init_record; /*returns: (-1,0)=>(failure,success)*/
|
||||
DEVSUPFUN get_ioint_info;
|
||||
DEVSUPFUN read_xxx;
|
||||
}xxxdset;
|
||||
|
||||
static void checkAlarms(xxxRecord *prec);
|
||||
static void monitor(xxxRecord *prec);
|
||||
|
||||
static long init_record(struct dbCommon *pcommon, int pass)
|
||||
{
|
||||
xxxRecord *prec = (xxxRecord *)pcommon;
|
||||
xxxdset *pdset;
|
||||
long status;
|
||||
|
||||
if (pass==0) return(0);
|
||||
|
||||
if(!(pdset = (xxxdset *)(prec->dset))) {
|
||||
recGblRecordError(S_dev_noDSET,(void *)prec,"xxx: init_record");
|
||||
return(S_dev_noDSET);
|
||||
}
|
||||
/* must have read_xxx function defined */
|
||||
if( (pdset->number < 5) || (pdset->read_xxx == NULL) ) {
|
||||
recGblRecordError(S_dev_missingSup,(void *)prec,"xxx: init_record");
|
||||
return(S_dev_missingSup);
|
||||
}
|
||||
|
||||
if( pdset->init_record ) {
|
||||
if((status=(*pdset->init_record)(prec))) return(status);
|
||||
}
|
||||
return(0);
|
||||
}
|
||||
|
||||
static long process(struct dbCommon *pcommon)
|
||||
{
|
||||
xxxRecord *prec = (xxxRecord *)pcommon;
|
||||
xxxdset *pdset = (xxxdset *)(prec->dset);
|
||||
long status;
|
||||
unsigned char pact=prec->pact;
|
||||
|
||||
if( (pdset==NULL) || (pdset->read_xxx==NULL) ) {
|
||||
prec->pact=TRUE;
|
||||
recGblRecordError(S_dev_missingSup,(void *)prec,"read_xxx");
|
||||
return(S_dev_missingSup);
|
||||
}
|
||||
|
||||
/* pact must not be set until after calling device support */
|
||||
status=(*pdset->read_xxx)(prec);
|
||||
/* check if device support set pact */
|
||||
if ( !pact && prec->pact ) return(0);
|
||||
prec->pact = TRUE;
|
||||
|
||||
recGblGetTimeStamp(prec);
|
||||
/* check for alarms */
|
||||
checkAlarms(prec);
|
||||
/* check event list */
|
||||
monitor(prec);
|
||||
/* process the forward scan link record */
|
||||
recGblFwdLink(prec);
|
||||
|
||||
prec->pact=FALSE;
|
||||
return(status);
|
||||
}
|
||||
|
||||
static long get_units(DBADDR *paddr, char *units)
|
||||
{
|
||||
xxxRecord *prec=(xxxRecord *)paddr->precord;
|
||||
|
||||
strncpy(units,prec->egu,DB_UNITS_SIZE);
|
||||
return(0);
|
||||
}
|
||||
|
||||
static long get_precision(const DBADDR *paddr, long *precision)
|
||||
{
|
||||
xxxRecord *prec=(xxxRecord *)paddr->precord;
|
||||
|
||||
*precision = prec->prec;
|
||||
if(paddr->pfield == (void *)&prec->val) return(0);
|
||||
recGblGetPrec(paddr,precision);
|
||||
return(0);
|
||||
}
|
||||
|
||||
static long get_graphic_double(DBADDR *paddr,struct dbr_grDouble *pgd)
|
||||
{
|
||||
xxxRecord *prec=(xxxRecord *)paddr->precord;
|
||||
int fieldIndex = dbGetFieldIndex(paddr);
|
||||
|
||||
if(fieldIndex == xxxRecordVAL
|
||||
|| fieldIndex == xxxRecordHIHI
|
||||
|| fieldIndex == xxxRecordHIGH
|
||||
|| fieldIndex == xxxRecordLOW
|
||||
|| fieldIndex == xxxRecordLOLO
|
||||
|| fieldIndex == xxxRecordHOPR
|
||||
|| fieldIndex == xxxRecordLOPR) {
|
||||
pgd->upper_disp_limit = prec->hopr;
|
||||
pgd->lower_disp_limit = prec->lopr;
|
||||
} else recGblGetGraphicDouble(paddr,pgd);
|
||||
return(0);
|
||||
}
|
||||
|
||||
static long get_control_double(DBADDR *paddr,struct dbr_ctrlDouble *pcd)
|
||||
{
|
||||
xxxRecord *prec=(xxxRecord *)paddr->precord;
|
||||
int fieldIndex = dbGetFieldIndex(paddr);
|
||||
|
||||
if(fieldIndex == xxxRecordVAL
|
||||
|| fieldIndex == xxxRecordHIHI
|
||||
|| fieldIndex == xxxRecordHIGH
|
||||
|| fieldIndex == xxxRecordLOW
|
||||
|| fieldIndex == xxxRecordLOLO) {
|
||||
pcd->upper_ctrl_limit = prec->hopr;
|
||||
pcd->lower_ctrl_limit = prec->lopr;
|
||||
} else recGblGetControlDouble(paddr,pcd);
|
||||
return(0);
|
||||
}
|
||||
|
||||
static long get_alarm_double(DBADDR *paddr,struct dbr_alDouble *pad)
|
||||
{
|
||||
xxxRecord *prec=(xxxRecord *)paddr->precord;
|
||||
int fieldIndex = dbGetFieldIndex(paddr);
|
||||
|
||||
if(fieldIndex == xxxRecordVAL) {
|
||||
pad->upper_alarm_limit = prec->hhsv ? prec->hihi : epicsNAN;
|
||||
pad->upper_warning_limit = prec->hsv ? prec->high : epicsNAN;
|
||||
pad->lower_warning_limit = prec->lsv ? prec->low : epicsNAN;
|
||||
pad->lower_alarm_limit = prec->llsv ? prec->lolo : epicsNAN;
|
||||
} else recGblGetAlarmDouble(paddr,pad);
|
||||
return(0);
|
||||
}
|
||||
|
||||
static void checkAlarms(xxxRecord *prec)
|
||||
{
|
||||
double val, hyst, lalm;
|
||||
float hihi, high, low, lolo;
|
||||
unsigned short hhsv, llsv, hsv, lsv;
|
||||
|
||||
if(prec->udf == TRUE ){
|
||||
recGblSetSevr(prec,UDF_ALARM,INVALID_ALARM);
|
||||
return;
|
||||
}
|
||||
hihi = prec->hihi; lolo = prec->lolo; high = prec->high; low = prec->low;
|
||||
hhsv = prec->hhsv; llsv = prec->llsv; hsv = prec->hsv; lsv = prec->lsv;
|
||||
val = prec->val; hyst = prec->hyst; lalm = prec->lalm;
|
||||
|
||||
/* alarm condition hihi */
|
||||
if (hhsv && (val >= hihi || ((lalm==hihi) && (val >= hihi-hyst)))){
|
||||
if (recGblSetSevr(prec,HIHI_ALARM,prec->hhsv)) prec->lalm = hihi;
|
||||
return;
|
||||
}
|
||||
|
||||
/* alarm condition lolo */
|
||||
if (llsv && (val <= lolo || ((lalm==lolo) && (val <= lolo+hyst)))){
|
||||
if (recGblSetSevr(prec,LOLO_ALARM,prec->llsv)) prec->lalm = lolo;
|
||||
return;
|
||||
}
|
||||
|
||||
/* alarm condition high */
|
||||
if (hsv && (val >= high || ((lalm==high) && (val >= high-hyst)))){
|
||||
if (recGblSetSevr(prec,HIGH_ALARM,prec->hsv)) prec->lalm = high;
|
||||
return;
|
||||
}
|
||||
|
||||
/* alarm condition low */
|
||||
if (lsv && (val <= low || ((lalm==low) && (val <= low+hyst)))){
|
||||
if (recGblSetSevr(prec,LOW_ALARM,prec->lsv)) prec->lalm = low;
|
||||
return;
|
||||
}
|
||||
|
||||
/* we get here only if val is out of alarm by at least hyst */
|
||||
prec->lalm = val;
|
||||
return;
|
||||
}
|
||||
|
||||
static void monitor(xxxRecord *prec)
|
||||
{
|
||||
unsigned short monitor_mask;
|
||||
double delta;
|
||||
|
||||
monitor_mask = recGblResetAlarms(prec);
|
||||
/* check for value change */
|
||||
delta = prec->mlst - prec->val;
|
||||
if(delta<0.0) delta = -delta;
|
||||
if (delta > prec->mdel) {
|
||||
/* post events for value change */
|
||||
monitor_mask |= DBE_VALUE;
|
||||
/* update last value monitored */
|
||||
prec->mlst = prec->val;
|
||||
}
|
||||
|
||||
/* check for archive change */
|
||||
delta = prec->alst - prec->val;
|
||||
if(delta<0.0) delta = -delta;
|
||||
if (delta > prec->adel) {
|
||||
/* post events on value field for archive change */
|
||||
monitor_mask |= DBE_LOG;
|
||||
/* update last archive value monitored */
|
||||
prec->alst = prec->val;
|
||||
}
|
||||
|
||||
/* send out monitors connected to the value field */
|
||||
if (monitor_mask){
|
||||
db_post_events(prec,&prec->val,monitor_mask);
|
||||
}
|
||||
return;
|
||||
}
|
||||
117
exampleApp/src/xxxRecord.dbd
Normal file
117
exampleApp/src/xxxRecord.dbd
Normal file
@@ -0,0 +1,117 @@
|
||||
recordtype(xxx) {
|
||||
include "dbCommon.dbd"
|
||||
field(VAL,DBF_DOUBLE) {
|
||||
prompt("Current EGU Value")
|
||||
asl(ASL0)
|
||||
pp(TRUE)
|
||||
}
|
||||
field(INP,DBF_INLINK) {
|
||||
prompt("Input Specification")
|
||||
promptgroup(GUI_INPUTS)
|
||||
special(SPC_NOMOD)
|
||||
interest(1)
|
||||
}
|
||||
field(PREC,DBF_SHORT) {
|
||||
prompt("Display Precision")
|
||||
promptgroup(GUI_DISPLAY)
|
||||
interest(1)
|
||||
}
|
||||
field(EGU,DBF_STRING) {
|
||||
prompt("Engineering Units")
|
||||
promptgroup(GUI_DISPLAY)
|
||||
interest(1)
|
||||
size(16)
|
||||
}
|
||||
field(HOPR,DBF_FLOAT) {
|
||||
prompt("High Operating Range")
|
||||
promptgroup(GUI_DISPLAY)
|
||||
interest(1)
|
||||
}
|
||||
field(LOPR,DBF_FLOAT) {
|
||||
prompt("Low Operating Range")
|
||||
promptgroup(GUI_DISPLAY)
|
||||
interest(1)
|
||||
}
|
||||
field(HIHI,DBF_FLOAT) {
|
||||
prompt("Hihi Alarm Limit")
|
||||
promptgroup(GUI_ALARMS)
|
||||
pp(TRUE)
|
||||
interest(1)
|
||||
}
|
||||
field(LOLO,DBF_FLOAT) {
|
||||
prompt("Lolo Alarm Limit")
|
||||
promptgroup(GUI_ALARMS)
|
||||
pp(TRUE)
|
||||
interest(1)
|
||||
}
|
||||
field(HIGH,DBF_FLOAT) {
|
||||
prompt("High Alarm Limit")
|
||||
promptgroup(GUI_ALARMS)
|
||||
pp(TRUE)
|
||||
interest(1)
|
||||
}
|
||||
field(LOW,DBF_FLOAT) {
|
||||
prompt("Low Alarm Limit")
|
||||
promptgroup(GUI_ALARMS)
|
||||
pp(TRUE)
|
||||
interest(1)
|
||||
}
|
||||
field(HHSV,DBF_MENU) {
|
||||
prompt("Hihi Severity")
|
||||
promptgroup(GUI_ALARMS)
|
||||
pp(TRUE)
|
||||
interest(1)
|
||||
menu(menuAlarmSevr)
|
||||
}
|
||||
field(LLSV,DBF_MENU) {
|
||||
prompt("Lolo Severity")
|
||||
promptgroup(GUI_ALARMS)
|
||||
pp(TRUE)
|
||||
interest(1)
|
||||
menu(menuAlarmSevr)
|
||||
}
|
||||
field(HSV,DBF_MENU) {
|
||||
prompt("High Severity")
|
||||
promptgroup(GUI_ALARMS)
|
||||
pp(TRUE)
|
||||
interest(1)
|
||||
menu(menuAlarmSevr)
|
||||
}
|
||||
field(LSV,DBF_MENU) {
|
||||
prompt("Low Severity")
|
||||
promptgroup(GUI_ALARMS)
|
||||
pp(TRUE)
|
||||
interest(1)
|
||||
menu(menuAlarmSevr)
|
||||
}
|
||||
field(HYST,DBF_DOUBLE) {
|
||||
prompt("Alarm Deadband")
|
||||
promptgroup(GUI_ALARMS)
|
||||
interest(1)
|
||||
}
|
||||
field(ADEL,DBF_DOUBLE) {
|
||||
prompt("Archive Deadband")
|
||||
promptgroup(GUI_DISPLAY)
|
||||
interest(1)
|
||||
}
|
||||
field(MDEL,DBF_DOUBLE) {
|
||||
prompt("Monitor Deadband")
|
||||
promptgroup(GUI_DISPLAY)
|
||||
interest(1)
|
||||
}
|
||||
field(LALM,DBF_DOUBLE) {
|
||||
prompt("Last Value Alarmed")
|
||||
special(SPC_NOMOD)
|
||||
interest(3)
|
||||
}
|
||||
field(ALST,DBF_DOUBLE) {
|
||||
prompt("Last Value Archived")
|
||||
special(SPC_NOMOD)
|
||||
interest(3)
|
||||
}
|
||||
field(MLST,DBF_DOUBLE) {
|
||||
prompt("Last Val Monitored")
|
||||
special(SPC_NOMOD)
|
||||
interest(3)
|
||||
}
|
||||
}
|
||||
2
exampleApp/src/xxxSupport.dbd
Normal file
2
exampleApp/src/xxxSupport.dbd
Normal file
@@ -0,0 +1,2 @@
|
||||
include "xxxRecord.dbd"
|
||||
device(xxx,CONSTANT,devXxxSoft,"SoftChannel")
|
||||
58
exampleApp/test/Makefile
Normal file
58
exampleApp/test/Makefile
Normal file
@@ -0,0 +1,58 @@
|
||||
#*************************************************************************
|
||||
# Copyright (c) 2020 ITER Organization.
|
||||
# EPICS BASE is distributed subject to a Software License Agreement found
|
||||
# in the file LICENSE that is included with this distribution.
|
||||
#*************************************************************************
|
||||
CURDIR := $(patsubst %/,%,$(dir $(lastword $(MAKEFILE_LIST))))
|
||||
TOP = ../..
|
||||
|
||||
include $(TOP)/configure/CONFIG
|
||||
|
||||
# use the new RSET definition
|
||||
USR_CPPFLAGS += -DUSE_TYPED_RSET
|
||||
|
||||
TARGETS += $(COMMON_DIR)/exampleTest.dbd
|
||||
DBDDEPENDS_FILES += exampleTest.dbd$(DEP)
|
||||
exampleTest_DBD += example.dbd
|
||||
|
||||
TESTFILES += $(COMMON_DIR)/exampleTest.dbd
|
||||
|
||||
testHarness_SRCS += exampleTest_registerRecordDeviceDriver.cpp
|
||||
|
||||
PROD_LIBS += exampleSupport
|
||||
ifneq ($(SNCSEQ),)
|
||||
PROD_LIBS += seq pv
|
||||
endif
|
||||
PROD_LIBS += $(EPICS_BASE_IOC_LIBS)
|
||||
|
||||
TESTPROD_HOST += exampleTest
|
||||
exampleTest_SRCS += exampleTest.c
|
||||
exampleTest_SRCS += exampleTest_registerRecordDeviceDriver.cpp
|
||||
testHarness_SRCS += exampleTest.c
|
||||
TESTFILES += ../../../db/dbExample1.db
|
||||
TESTS += exampleTest
|
||||
|
||||
# This runs all the test programs in a known working order:
|
||||
testHarness_SRCS += epicsRunExampleTests.c
|
||||
|
||||
exampleTestHarness_SRCS += $(testHarness_SRCS)
|
||||
exampleTestHarness_SRCS_RTEMS += rtemsTestHarness.c
|
||||
|
||||
PROD_SRCS_RTEMS += rtemsTestData.c
|
||||
|
||||
PROD_vxWorks = exampleTestHarness
|
||||
PROD_RTEMS = exampleTestHarness
|
||||
|
||||
TESTSPEC_vxWorks = exampleTestHarness.munch; epicsRunExampleTests
|
||||
TESTSPEC_RTEMS = exampleTestHarness.boot; epicsRunExampleTests
|
||||
|
||||
TESTSCRIPTS_HOST += $(TESTS:%=%.t)
|
||||
ifneq ($(filter $(T_A),$(CROSS_COMPILER_RUNTEST_ARCHS)),)
|
||||
TESTPROD_RTEMS = $(TESTPROD_HOST)
|
||||
TESTSCRIPTS_RTEMS += $(TESTS:%=%.t)
|
||||
endif
|
||||
|
||||
include $(TOP)/configure/RULES
|
||||
|
||||
rtemsTestData.c : $(TESTFILES) $(TOOLS)/epicsMakeMemFs.pl
|
||||
$(PERL) $(TOOLS)/epicsMakeMemFs.pl $@ epicsRtemsFSImage $(TESTFILES)
|
||||
28
exampleApp/test/epicsRunExampleTests.c
Normal file
28
exampleApp/test/epicsRunExampleTests.c
Normal file
@@ -0,0 +1,28 @@
|
||||
/*************************************************************************\
|
||||
* Copyright (c) 2011 UChicago Argonne LLC, as Operator of Argonne
|
||||
* National Laboratory.
|
||||
* EPICS BASE is distributed subject to a Software License Agreement found
|
||||
* in file LICENSE that is included with this distribution.
|
||||
\*************************************************************************/
|
||||
|
||||
/*
|
||||
* Run Example tests as a batch.
|
||||
*
|
||||
*/
|
||||
|
||||
#include "epicsUnitTest.h"
|
||||
#include "epicsExit.h"
|
||||
#include "dbmf.h"
|
||||
|
||||
int exampleTest(void);
|
||||
|
||||
void epicsRunExampleTests(void)
|
||||
{
|
||||
testHarness();
|
||||
|
||||
runTest(exampleTest);
|
||||
|
||||
dbmfFreeChunks();
|
||||
|
||||
epicsExit(0); /* Trigger test harness */
|
||||
}
|
||||
58
exampleApp/test/exampleTest.c
Normal file
58
exampleApp/test/exampleTest.c
Normal file
@@ -0,0 +1,58 @@
|
||||
/*************************************************************************\
|
||||
* Copyright (c) 2020 ITER Organization.
|
||||
* EPICS BASE is distributed subject to a Software License Agreement found
|
||||
* in file LICENSE that is included with this distribution.
|
||||
\*************************************************************************/
|
||||
|
||||
/*
|
||||
* Author: Ralph Lange <ralph.lange@gmx.de>
|
||||
*/
|
||||
|
||||
#include <string.h>
|
||||
|
||||
#include <epicsUnitTest.h>
|
||||
#include <testMain.h>
|
||||
|
||||
#include <dbAccess.h>
|
||||
#include <dbStaticLib.h>
|
||||
#include <errlog.h>
|
||||
|
||||
void exampleTest_registerRecordDeviceDriver(struct dbBase *);
|
||||
|
||||
static dbCommon *prec;
|
||||
|
||||
/* from Base 3.15 dbUnitTest.c */
|
||||
static
|
||||
dbCommon* testdbRecordPtr(const char* pv)
|
||||
{
|
||||
DBADDR addr;
|
||||
|
||||
if (dbNameToAddr(pv, &addr))
|
||||
testAbort("Missing record \"%s\"", pv);
|
||||
|
||||
return addr.precord;
|
||||
}
|
||||
|
||||
static void testOnce(void)
|
||||
{
|
||||
testDiag("check that tests work");
|
||||
|
||||
dbReadDatabase(&pdbbase, "exampleTest.dbd", "../O.Common", NULL);
|
||||
exampleTest_registerRecordDeviceDriver(pdbbase);
|
||||
dbReadDatabase(&pdbbase, "dbExample1.db", "../../../db", "user=test");
|
||||
|
||||
testDiag("Searching for records from example application");
|
||||
|
||||
prec = testdbRecordPtr("test:xxxExample");
|
||||
testOk((prec != NULL), "record test:xxxExample");
|
||||
|
||||
prec = testdbRecordPtr("test:aiExample");
|
||||
testOk((prec != NULL), "record test:aiExample");
|
||||
}
|
||||
|
||||
MAIN(exampleTest)
|
||||
{
|
||||
testPlan(2);
|
||||
testOnce();
|
||||
return testDone();
|
||||
}
|
||||
14
exampleApp/test/rtemsTestHarness.c
Normal file
14
exampleApp/test/rtemsTestHarness.c
Normal file
@@ -0,0 +1,14 @@
|
||||
/*************************************************************************\
|
||||
* Copyright (c) 2011 UChicago Argonne LLC, as Operator of Argonne
|
||||
* National Laboratory.
|
||||
* EPICS BASE is distributed subject to a Software License Agreement found
|
||||
* in file LICENSE that is included with this distribution.
|
||||
\*************************************************************************/
|
||||
|
||||
extern void epicsRunExampleTests(void);
|
||||
|
||||
int main(int argc, char **argv)
|
||||
{
|
||||
epicsRunExampleTests(); /* calls epicsExit(0) */
|
||||
return 0;
|
||||
}
|
||||
43
synApps-6.0.set
Normal file
43
synApps-6.0.set
Normal file
@@ -0,0 +1,43 @@
|
||||
# Release tags for synApps modules as per synApps-6.0
|
||||
# see https://github.com/EPICS-synApps/support/blob/21f7fcd0f33cef5d34aacbd4e33511b43398a6dc/assemble_synApps.sh
|
||||
# also for additional configuration that could be done in hook scripts
|
||||
|
||||
ALLENBRADLEY=2.3
|
||||
ALIVE=R1-1-0
|
||||
AREA_DETECTOR=R3-3-1
|
||||
ASYN=R4-33
|
||||
AUTOSAVE=R5-9
|
||||
BUSY=R1-7
|
||||
CALC=R3-7-1
|
||||
CAMAC=R2-7-1
|
||||
CAPUTRECORDER=R1-7-1
|
||||
DAC128V=R2-9
|
||||
DELAYGEN=R1-2-0
|
||||
DXP=R5-0
|
||||
DXPSITORO=R1-1
|
||||
DEVIOCSTATS=3.1.15
|
||||
#GALIL=V3-6
|
||||
IP=R2-19-1
|
||||
IPAC=2.15
|
||||
IP330=R2-9
|
||||
IPUNIDIG=R2-11
|
||||
LOVE=R3-2-6
|
||||
LUA=R1-2-2
|
||||
MCA=R7-7
|
||||
MEASCOMP=R2-1
|
||||
MODBUS=R2-11
|
||||
MOTOR=R6-10-1
|
||||
OPTICS=R2-13-1
|
||||
QUADEM=R9-1
|
||||
SNCSEQ=2.2.5
|
||||
SOFTGLUE=R2-8-1
|
||||
SOFTGLUEZYNQ=R2-0-1
|
||||
SSCAN=R2-11-1
|
||||
STD=R3-5
|
||||
STREAM=R2-7-7c
|
||||
VAC=R1-7
|
||||
VME=R2-9
|
||||
YOKOGAWA_DAS=R1-0-0
|
||||
XXX=R6-0
|
||||
|
||||
include synApps-common
|
||||
44
synApps-6.1.set
Normal file
44
synApps-6.1.set
Normal file
@@ -0,0 +1,44 @@
|
||||
# Release tags for synApps modules as per synApps-6.1
|
||||
# see https://github.com/EPICS-synApps/support/blob/cc5adba5b8848c9cb98ab96768d668ae927d8859/assemble_synApps.sh
|
||||
# also for additional configuration that could be done in hook scripts
|
||||
|
||||
#ALLENBRADLEY=2.3
|
||||
ALIVE=R1-1-1
|
||||
AREA_DETECTOR=R3-7
|
||||
ASYN=R4-36
|
||||
AUTOSAVE=R5-10
|
||||
BUSY=R1-7-2
|
||||
CALC=R3-7-3
|
||||
CAMAC=R2-7-1
|
||||
CAPUTRECORDER=R1-7-2
|
||||
DAC128V=R2-9
|
||||
DELAYGEN=R1-2-1
|
||||
DXP=R6-0
|
||||
DXPSITORO=R1-2
|
||||
DEVIOCSTATS=3.1.16
|
||||
#ETHERIP=ether_ip-3-1
|
||||
#GALIL=V3-6
|
||||
IP=R2-20-1
|
||||
IPAC=2.15
|
||||
IP330=R2-9
|
||||
IPUNIDIG=R2-11
|
||||
LOVE=R3-2-7
|
||||
LUA=R2-0
|
||||
MCA=R7-8
|
||||
MEASCOMP=R2-3
|
||||
MODBUS=R3-0
|
||||
MOTOR=R7-1
|
||||
OPTICS=R2-13-3
|
||||
QUADEM=R9-2-1
|
||||
SNCSEQ=2.2.6
|
||||
SOFTGLUE=R2-8-2
|
||||
SOFTGLUEZYNQ=R2-0-2
|
||||
SSCAN=R2-11-3
|
||||
STD=R3-6
|
||||
STREAM=2.8.9
|
||||
VAC=R1-9
|
||||
VME=R2-9-2
|
||||
YOKOGAWA_DAS=R2-0-1
|
||||
XXX=R6-1
|
||||
|
||||
include synApps-common
|
||||
7
synApps-common.set
Normal file
7
synApps-common.set
Normal file
@@ -0,0 +1,7 @@
|
||||
# Common settings for all synApps releases
|
||||
|
||||
DEVIOCSTATS_REPONAME=iocStats
|
||||
ETHERIP_REPOOWNER=EPICSTools
|
||||
GALIL_REPOOWNER=motorapp
|
||||
GALIL_REPONAME=Galil-3-0
|
||||
AREADETECTOR_REPOOWNER=areaDetector
|
||||
3
test00.set
Normal file
3
test00.set
Normal file
@@ -0,0 +1,3 @@
|
||||
MODULES=
|
||||
|
||||
BASE=R3.15.6
|
||||
4
test01.set
Normal file
4
test01.set
Normal file
@@ -0,0 +1,4 @@
|
||||
MODULES="sncseq"
|
||||
|
||||
BASE=7.0
|
||||
SNCSEQ=R2-2-7
|
||||
10
test02.set
Normal file
10
test02.set
Normal file
@@ -0,0 +1,10 @@
|
||||
# a comment, then an empty line
|
||||
|
||||
# a comment that is indented
|
||||
BASE=foo
|
||||
# include an existing file
|
||||
include test01
|
||||
|
||||
FOO=bar
|
||||
FOO2=bar bar2
|
||||
FOO3=bar bar2
|
||||
4
test03.set
Normal file
4
test03.set
Normal file
@@ -0,0 +1,4 @@
|
||||
# Check for avoiding multiple includions
|
||||
|
||||
include test01
|
||||
include test01
|
||||
@@ -1,10 +0,0 @@
|
||||
#!/bin/sh
|
||||
set -e -x
|
||||
|
||||
make -j2 $EXTRA
|
||||
|
||||
if [ "$TEST" != "NO" ]
|
||||
then
|
||||
make tapfiles
|
||||
make -s test-results
|
||||
fi
|
||||
@@ -1,148 +0,0 @@
|
||||
#!/bin/sh
|
||||
set -e -x
|
||||
|
||||
CURDIR="$PWD"
|
||||
|
||||
# determine if BASE is a release or a branch
|
||||
git ls-remote --quiet --exit-code --tags https://github.com/${REPOBASE:-epics-base}/epics-base.git "$BASE" && BASE_RELEASE=YES
|
||||
git ls-remote --quiet --exit-code --heads https://github.com/${REPOBASE:-epics-base}/epics-base.git "$BASE" && BASE_BRANCH=YES
|
||||
|
||||
if [ "$BASE_RELEASE" = "YES" ]
|
||||
then
|
||||
# TODO: use a cached location
|
||||
BASE_LOCATION=$HOME/.source/epics-base
|
||||
else
|
||||
if [ "$BASE_BRANCH" = "YES" ]
|
||||
then
|
||||
BASE_LOCATION=$HOME/.source/epics-base
|
||||
else
|
||||
echo $BASE is neither a tag nor a branch name for BASE
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
cat << EOF > $CURDIR/configure/RELEASE.local
|
||||
EPICS_BASE=$BASE_LOCATION
|
||||
EOF
|
||||
|
||||
install -d "$HOME/.source"
|
||||
cd "$HOME/.source"
|
||||
|
||||
add_gh_flat() {
|
||||
MODULE=$1
|
||||
REPOOWNER=$2
|
||||
REPONAME=$3
|
||||
BRANCH=$4
|
||||
MODULE_UC=$5
|
||||
( git clone --quiet --depth 5 --branch $BRANCH https://github.com/$REPOOWNER/$REPONAME.git $MODULE && \
|
||||
cd $MODULE && git log -n1 )
|
||||
cat < $CURDIR/configure/RELEASE.local > $MODULE/configure/RELEASE.local
|
||||
cat << EOF >> $CURDIR/configure/RELEASE.local
|
||||
${MODULE_UC}=$HOME/.source/$MODULE
|
||||
EOF
|
||||
}
|
||||
|
||||
# not recursive
|
||||
git clone --quiet --depth 5 --branch "$BASE" https://github.com/${REPOBASE:-epics-base}/epics-base.git epics-base
|
||||
(cd epics-base && git log -n1 )
|
||||
for modrepo in ${MODULES}
|
||||
do
|
||||
module=${modrepo%CPP}
|
||||
module_uc=$(echo $module | tr 'a-z' 'A-Z')
|
||||
eval add_gh_flat $module \${REPO${module_uc}:-epics-base} $modrepo \${BR${module_uc}:-master} $module_uc
|
||||
done
|
||||
|
||||
if [ -e $CURDIR/configure/RELEASE.local ]
|
||||
then
|
||||
cat $CURDIR/configure/RELEASE.local
|
||||
fi
|
||||
|
||||
EPICS_HOST_ARCH=`sh epics-base/startup/EpicsHostArch`
|
||||
|
||||
# requires wine and g++-mingw-w64-i686
|
||||
if [ "$WINE" = "32" ]
|
||||
then
|
||||
echo "Cross mingw32"
|
||||
sed -i -e '/CMPLR_PREFIX/d' epics-base/configure/os/CONFIG_SITE.linux-x86.win32-x86-mingw
|
||||
cat << EOF >> epics-base/configure/os/CONFIG_SITE.linux-x86.win32-x86-mingw
|
||||
CMPLR_PREFIX=i686-w64-mingw32-
|
||||
EOF
|
||||
cat << EOF >> epics-base/configure/CONFIG_SITE
|
||||
CROSS_COMPILER_TARGET_ARCHS+=win32-x86-mingw
|
||||
EOF
|
||||
fi
|
||||
|
||||
if [ "$STATIC" = "YES" ]
|
||||
then
|
||||
echo "Build static libraries/executables"
|
||||
cat << EOF >> epics-base/configure/CONFIG_SITE
|
||||
SHARED_LIBRARIES=NO
|
||||
STATIC_BUILD=YES
|
||||
EOF
|
||||
fi
|
||||
|
||||
HOST_CCMPLR_NAME=`echo "$TRAVIS_COMPILER" | sed -E 's/^([[:alpha:]][^-]*(-[[:alpha:]][^-]*)*)+(-[0-9\.]+)?$/\1/g'`
|
||||
HOST_CMPLR_VER_SUFFIX=`echo "$TRAVIS_COMPILER" | sed -E 's/^([[:alpha:]][^-]*(-[[:alpha:]][^-]*)*)+(-[0-9\.]+)?$/\3/g'`
|
||||
HOST_CMPLR_VER=`echo "$HOST_CMPLR_VER_SUFFIX" | cut -c 2-`
|
||||
|
||||
case "$HOST_CCMPLR_NAME" in
|
||||
clang)
|
||||
echo "Host compiler is clang"
|
||||
HOST_CPPCMPLR_NAME=$(echo "$HOST_CCMPLR_NAME" | sed 's/clang/clang++/g')
|
||||
cat << EOF >> epics-base/configure/os/CONFIG_SITE.Common.$EPICS_HOST_ARCH
|
||||
GNU = NO
|
||||
CMPLR_CLASS = clang
|
||||
CC = ${HOST_CCMPLR_NAME}$HOST_CMPLR_VER_SUFFIX
|
||||
CCC = ${HOST_CPPCMPLR_NAME}$HOST_CMPLR_VER_SUFFIX
|
||||
EOF
|
||||
|
||||
# hack
|
||||
sed -i -e 's/CMPLR_CLASS = gcc/CMPLR_CLASS = clang/' epics-base/configure/CONFIG.gnuCommon
|
||||
|
||||
${HOST_CCMPLR_NAME}$HOST_CMPLR_VER_SUFFIX --version
|
||||
;;
|
||||
gcc)
|
||||
echo "Host compiler is GCC"
|
||||
HOST_CPPCMPLR_NAME=$(echo "$HOST_CCMPLR_NAME" | sed 's/gcc/g++/g')
|
||||
cat << EOF >> epics-base/configure/os/CONFIG_SITE.Common.$EPICS_HOST_ARCH
|
||||
CC = ${HOST_CCMPLR_NAME}$HOST_CMPLR_VER_SUFFIX
|
||||
CCC = ${HOST_CPPCMPLR_NAME}$HOST_CMPLR_VER_SUFFIX
|
||||
EOF
|
||||
|
||||
${HOST_CCMPLR_NAME}$HOST_CMPLR_VER_SUFFIX --version
|
||||
;;
|
||||
*)
|
||||
echo "Host compiler is default"
|
||||
gcc --version
|
||||
;;
|
||||
esac
|
||||
|
||||
cat <<EOF >> epics-base/configure/CONFIG_SITE
|
||||
USR_CPPFLAGS += $USR_CPPFLAGS
|
||||
USR_CFLAGS += $USR_CFLAGS
|
||||
USR_CXXFLAGS += $USR_CXXFLAGS
|
||||
EOF
|
||||
|
||||
# set RTEMS to eg. "4.9" or "4.10"
|
||||
# requires qemu, bison, flex, texinfo, install-info
|
||||
if [ -n "$RTEMS" ]
|
||||
then
|
||||
echo "Cross RTEMS${RTEMS} for pc386"
|
||||
curl -L "https://github.com/mdavidsaver/rsb/releases/download/20171203-${RTEMS}/i386-rtems${RTEMS}-trusty-20171203-${RTEMS}.tar.bz2" \
|
||||
| tar -C / -xmj
|
||||
|
||||
sed -i -e '/^RTEMS_VERSION/d' -e '/^RTEMS_BASE/d' epics-base/configure/os/CONFIG_SITE.Common.RTEMS
|
||||
cat << EOF >> epics-base/configure/os/CONFIG_SITE.Common.RTEMS
|
||||
RTEMS_VERSION=$RTEMS
|
||||
RTEMS_BASE=$HOME/.rtems
|
||||
EOF
|
||||
cat << EOF >> epics-base/configure/CONFIG_SITE
|
||||
CROSS_COMPILER_TARGET_ARCHS += RTEMS-pc386-qemu
|
||||
EOF
|
||||
fi
|
||||
|
||||
for modrepo in epics-base ${MODULES}
|
||||
do
|
||||
module=${modrepo%CPP}
|
||||
make -j2 -C $module $EXTRA
|
||||
done
|
||||
232
travis-test.sh
Executable file
232
travis-test.sh
Executable file
@@ -0,0 +1,232 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Module ci-scripts unit tests
|
||||
|
||||
# SET=test00 in .travis.yml runs the tests in this script
|
||||
# all other jobs are started as compile jobs
|
||||
|
||||
# The following if clause can be removed for ci-scripts major version 3
|
||||
if [ "$TRAVIS_OS_NAME" == osx -a "$BASH_VERSINFO" -lt 4 ]
|
||||
then
|
||||
brew install bash
|
||||
if [ $(/usr/local/bin/bash -c 'echo $BASH_VERSINFO') -lt 4 ]
|
||||
then
|
||||
echo "Failed to install a recent bash" >&2
|
||||
exit 1
|
||||
fi
|
||||
exec /usr/local/bin/bash $0 "$@"
|
||||
fi
|
||||
|
||||
# Set VV empty in .travis.yml to make scripts terse
|
||||
[ "${VV:-1}" ] && set -x
|
||||
|
||||
[ "$SET" != "test00" ] && exec ./travis/build.sh
|
||||
|
||||
UTILS_UNITTEST=1
|
||||
|
||||
# Perl version of "readlink -f" (which MacOS does not provide)
|
||||
readlinkf() { perl -MCwd -e 'print Cwd::abs_path shift' "$1"; }
|
||||
|
||||
# test utilities
|
||||
fail() {
|
||||
echo -e "${ANSI_RED}$1${ANSI_RESET}"
|
||||
exit 1
|
||||
}
|
||||
|
||||
fn_exists() {
|
||||
LC_ALL=C type -t $1 | grep -q function
|
||||
}
|
||||
|
||||
repo_exists() {
|
||||
DEP=$1
|
||||
dep_lc=${DEP,,}
|
||||
eval dirname=\${${DEP}_DIRNAME:=${dep_lc}}
|
||||
eval reponame=\${${DEP}_REPONAME:=${dep_lc}}
|
||||
eval repourl=\${${DEP}_REPOURL:="https://github.com/\${${DEP}_REPOOWNER:=${REPOOWNER:-epics-modules}}/${reponame}.git"}
|
||||
|
||||
git ls-remote --quiet --heads --exit-code $repourl > /dev/null 2>&1
|
||||
}
|
||||
|
||||
SETUP_DIRS=${SETUP_PATH//:/ }
|
||||
|
||||
SCRIPTDIR=$(dirname $(readlinkf $0))/travis
|
||||
CURDIR="$PWD"
|
||||
CACHEDIR=${CACHEDIR:-${HOME}/.cache}
|
||||
[ -e ${CACHEDIR} ] || mkdir -p ${CACHEDIR}
|
||||
|
||||
echo "Testing contents of utils.sh"
|
||||
|
||||
[ -d "$SCRIPTDIR" ] || fail "SCRIPTDIR does not exist"
|
||||
[ -e "$SCRIPTDIR/utils.sh" ] || fail "SCRIPTDIR/utils.sh does not exist"
|
||||
|
||||
# source functions
|
||||
. $SCRIPTDIR/utils.sh
|
||||
|
||||
# check for functions
|
||||
fn_exists fold_start || fail "function fold_start missing from SCRIPTDIR/utils.sh"
|
||||
fn_exists fold_end || fail "function fold_end missing from SCRIPTDIR/utils.sh"
|
||||
fn_exists source_set || fail "function source_set missing from SCRIPTDIR/utils.sh"
|
||||
fn_exists update_release_local || fail "function update_release_local missing from SCRIPTDIR/utils.sh"
|
||||
fn_exists add_dependency || fail "function add_dependency missing from SCRIPTDIR/utils.sh"
|
||||
|
||||
# test source_set()
|
||||
######################################################################
|
||||
|
||||
SETUP_PATH= source_set test01 | grep -q "(SETUP_PATH) is empty" || fail "empty search path not detected"
|
||||
source_set xxdoesnotexistxx | grep -q "does not exist" || fail "missing setup file not detected"
|
||||
source_set test01 | grep -q "Loading setup file" || fail "test01 setup file not found"
|
||||
unset SEEN_SETUPS
|
||||
export BASE=foo
|
||||
source_set test01
|
||||
[ "$BASE" = "foo" ] || fail "preset module BASE version does not override test01 setup file (expected foo got $BASE)"
|
||||
unset SEEN_SETUPS
|
||||
BASE=
|
||||
source_set test02
|
||||
[ "$BASE" = "foo" ] || fail "BASE set in test02 does not override included test01 setup file (expected foo got $BASE)"
|
||||
[ "$FOO" = "bar" ] || fail "Setting of single word does not work"
|
||||
[ "$FOO2" = "bar bar2" ] || fail "Setting of multiple words does not work"
|
||||
[ "$FOO3" = "bar bar2" ] || fail "Indented setting of multiple words does not work"
|
||||
[ "$SNCSEQ" = "R2-2-7" ] || fail "Setup test01 was not included"
|
||||
unset SEEN_SETUPS
|
||||
source_set test03 | grep -q "Ignoring already included setup file" || fail "test01 setup file included twice"
|
||||
|
||||
# test default settings file
|
||||
######################################################################
|
||||
|
||||
echo "Testing default settings for completeness and valid git repo settings"
|
||||
|
||||
[ -e ./defaults.set ] || fail "defaults.set does not exist"
|
||||
source_set defaults
|
||||
|
||||
repo_exists BASE || fail "Defaults for BASE do not point to a valid git repository at $repourl"
|
||||
repo_exists PVDATA || fail "Defaults for PVDATA do not point to a valid git repository at $repourl"
|
||||
repo_exists PVACCESS || fail "Defaults for PVACCESS do not point to a valid git repository at $repourl"
|
||||
repo_exists NTYPES || fail "Defaults for NTYPES do not point to a valid git repository at $repourl"
|
||||
repo_exists SNCSEQ || fail "Defaults for SNCSEQ do not point to a valid git repository at $repourl"
|
||||
repo_exists STREAM || fail "Defaults for STREAM do not point to a valid git repository at $repourl"
|
||||
repo_exists ASYN || fail "Defaults for ASYN do not point to a valid git repository at $repourl"
|
||||
repo_exists STD || fail "Defaults for STD do not point to a valid git repository at $repourl"
|
||||
repo_exists CALC || fail "Defaults for CALC do not point to a valid git repository at $repourl"
|
||||
repo_exists AUTOSAVE || fail "Defaults for AUTOSAVE do not point to a valid git repository at $repourl"
|
||||
repo_exists BUSY || fail "Defaults for BUSY do not point to a valid git repository at $repourl"
|
||||
repo_exists SSCAN || fail "Defaults for SSCAN do not point to a valid git repository at $repourl"
|
||||
repo_exists IOCSTATS || fail "Defaults for IOCSTATS do not point to a valid git repository at $repourl"
|
||||
repo_exists MOTOR || fail "Defaults for MOTOR do not point to a valid git repository at $repourl"
|
||||
repo_exists IPAC || fail "Defaults for IPAC do not point to a valid git repository at $repourl"
|
||||
|
||||
# test update_release_local()
|
||||
######################################################################
|
||||
|
||||
echo "Testing updating the RELEASE.local file"
|
||||
|
||||
release_local=$CACHEDIR/RELEASE.local
|
||||
|
||||
rm -f $release_local
|
||||
|
||||
# Set a module
|
||||
update_release_local MOD1 /tmp/mod1
|
||||
updated_line="MOD1=/tmp/mod1"
|
||||
grep -q "MOD1=" $release_local || fail "Line for MOD1 not added to RELEASE.local"
|
||||
existing_line=$(grep "MOD1=" $release_local)
|
||||
[ "${existing_line}" = "${updated_line}" ] || fail "Wrong line for MOD1 in RELEASE.local (expected=\"$updated_line\" found=\"$existing_line\")"
|
||||
|
||||
# Set base
|
||||
update_release_local EPICS_BASE /tmp/base
|
||||
updated_line="EPICS_BASE=/tmp/base"
|
||||
grep -q "EPICS_BASE=" $release_local || fail "Line for EPICS_BASE not added to RELEASE.local"
|
||||
|
||||
# Set another module
|
||||
update_release_local MOD2 /tmp/mod2
|
||||
updated_line="MOD2=/tmp/mod2"
|
||||
grep -q "MOD2=" $release_local || fail "Line for MOD2 not added to RELEASE.local"
|
||||
existing_line=$(grep "MOD2=" $release_local)
|
||||
[ "${existing_line}" = "${updated_line}" ] || fail "Wrong line for MOD2 in RELEASE.local (expected=\"$updated_line\" found=\"$existing_line\")"
|
||||
tail -n 1 $release_local | grep -q "EPICS_BASE=" || fail "Line for EPICS_BASE not moved to the end of RELEASE.local"
|
||||
|
||||
# Update a module
|
||||
update_release_local MOD1 /tmp/mod1b
|
||||
updated_line="MOD1=/tmp/mod1b"
|
||||
grep -q "MOD1=" $release_local || fail "Line for MOD1 not present in RELEASE.local"
|
||||
existing_line=$(grep "MOD1=" $release_local)
|
||||
[ "${existing_line}" = "${updated_line}" ] || fail "Wrong line for MOD1 in RELEASE.local (expected=\"$updated_line\" found=\"$existing_line\")"
|
||||
head -n 1 $release_local | grep -q "MOD1=" || fail "Line for MOD1 not at the top of RELEASE.local"
|
||||
tail -n 1 $release_local | grep -q "EPICS_BASE=" || fail "Line for EPICS_BASE not moved to the end of RELEASE.local"
|
||||
|
||||
# Check that RELEASE.local only contains variable settings
|
||||
[ $(grep -v -c '[^ =]*=.*' $release_local) -ne 0 ] && fail "RELEASE.local contains invalid lines"
|
||||
|
||||
rm -f $release_local
|
||||
|
||||
# test add_dependency()
|
||||
######################################################################
|
||||
|
||||
echo "Testing adding a specific commit (branch or tag) of a dependency"
|
||||
|
||||
hash_3_15_6="ce7943fb44beb22b453ddcc0bda5398fadf72096"
|
||||
location=$CACHEDIR/base-R3.15.6
|
||||
|
||||
# CAREFUL: order of the following check matters (speeds up the test)
|
||||
|
||||
# dependency does not exist in the cache
|
||||
rm -fr $location; modules_to_compile=
|
||||
add_dependency BASE R3.15.6
|
||||
[ -e $location/LICENSE ] || fail "Missing dependency was not checked out"
|
||||
BUILT=$(cat "$location/built")
|
||||
[ "$BUILT" != "$hash_3_15_6" ] && fail "Wrong commit of dependency checked out (expected=\"$hash_3_15_6\" found=\"$BUILT\")"
|
||||
grep -q "include \$(TOP)/../RELEASE.local" $location/configure/RELEASE && fail "RELEASE in Base includes RELEASE.local"
|
||||
[ "$do_recompile" ] || fail "do_recompile flag was not set for missing dependency"
|
||||
echo "$modules_to_compile" | grep -q "$location" || fail "Missing dependency was not set to compile"
|
||||
|
||||
# up-to-date dependency does exist in the cache
|
||||
( cd $CACHEDIR; git clone --quiet --depth 5 --recursive --branch R3.15.6 https://github.com/epics-base/epics-base.git base-R3.15.6 )
|
||||
rm -f $location/LICENSE
|
||||
unset do_recompile; modules_to_compile=
|
||||
add_dependency BASE R3.15.6
|
||||
[ -e $location/LICENSE ] && fail "Existing correct dependency was checked out on top"
|
||||
[ "$do_recompile" ] && fail "do_recompile flag was set for up-to-date dependency"
|
||||
echo "$modules_to_compile" | grep -q "$location" && fail "Up-to-date dependency was set to compile"
|
||||
|
||||
do_recompile=yes
|
||||
add_dependency BASE R3.15.6
|
||||
echo "$modules_to_compile" | grep -q "$location" || fail "Up-to-date module was not set to compile wile do_recompile=yes"
|
||||
|
||||
# dependency in the cache is outdated
|
||||
echo "nottherighthash" > "$location/built"
|
||||
unset do_recompile
|
||||
add_dependency BASE R3.15.6
|
||||
[ -e $location/LICENSE ] || fail "Outdated dependency was not checked out"
|
||||
BUILT=$(cat "$location/built")
|
||||
[ "$BUILT" != "$hash_3_15_6" ] && fail "Wrong commit of dependency checked out (expected=\"$hash_3_15_6\" found=\"$BUILT\")"
|
||||
[ "$do_recompile" ] || fail "do_recompile flag was not set for outdated dependency"
|
||||
echo "$modules_to_compile" | grep -q "$location" || fail "Outdated dependency was not set to compile"
|
||||
|
||||
# msi is automatically added to 3.14
|
||||
rm -fr $location; modules_to_compile=
|
||||
location=$CACHEDIR/base-R3.14.12.1
|
||||
rm -fr $location;
|
||||
add_dependency BASE R3.14.12.1
|
||||
[ -e $location/src/dbtools/msi.c ] || fail "MSI was not added to Base 3.14"
|
||||
|
||||
rm -fr $CACHEDIR/*; modules_to_compile=
|
||||
|
||||
# missing inclusion of RELEASE.local in configure/RELEASE
|
||||
location=$CACHEDIR/std-R3-4
|
||||
add_dependency STD R3-4
|
||||
grep -q "include \$(TOP)/../RELEASE.local" $location/configure/RELEASE || fail "Inclusion of RELEASE.local not added to configure/RELEASE"
|
||||
rm -fr $location; modules_to_compile=
|
||||
|
||||
# correct handling of FOO_RECURSIVE setting (https://github.com/epics-base/ci-scripts/issues/25 regression)
|
||||
export SSCAN_RECURSIVE=NO
|
||||
add_dependency SSCAN master
|
||||
add_dependency ASYN master
|
||||
[ -e $CACHEDIR/sscan-master/.ci/README.md ] && fail "Sscan was checked out recursively despite SSCAN_RECURSIVE=NO"
|
||||
[ -e $CACHEDIR/asyn-master/.ci/README.md ] || fail "Asyn was not checked out recursively"
|
||||
rm -fr $CACHEDIR/*; modules_to_compile=
|
||||
|
||||
unset SSCAN_RECURSIVE
|
||||
export ASYN_RECURSIVE=NO
|
||||
add_dependency SSCAN master
|
||||
add_dependency ASYN master
|
||||
[ -e $CACHEDIR/sscan-master/.ci/README.md ] || fail "Sscan was not checked out recursively"
|
||||
[ -e $CACHEDIR/asyn-master/.ci/README.md ] && fail "Asyn was checked out recursively despite ASYN_RECURSIVE=NO"
|
||||
rm -fr $CACHEDIR/*
|
||||
120
travis/.travis.yml.example-full
Normal file
120
travis/.travis.yml.example-full
Normal file
@@ -0,0 +1,120 @@
|
||||
# .travis.yml for use with EPICS Base ci-scripts
|
||||
# (see: https://github.com/epics-base/ci-scripts)
|
||||
|
||||
# This is YAML - indentation levels are crucial
|
||||
|
||||
language: cpp
|
||||
compiler: gcc
|
||||
dist: xenial
|
||||
|
||||
cache:
|
||||
directories:
|
||||
- $HOME/.cache
|
||||
|
||||
env:
|
||||
global:
|
||||
- SETUP_PATH=.ci-local:.ci
|
||||
|
||||
addons:
|
||||
apt:
|
||||
packages:
|
||||
# for all EPICS builds
|
||||
- libreadline6-dev
|
||||
- libncurses5-dev
|
||||
- perl
|
||||
# for clang compiler
|
||||
- clang
|
||||
# for mingw builds (32bit and 64bit)
|
||||
- g++-mingw-w64-i686
|
||||
- g++-mingw-w64-x86-64
|
||||
# for RTEMS cross builds
|
||||
- qemu-system-x86
|
||||
homebrew:
|
||||
packages:
|
||||
# for all EPICS builds
|
||||
- bash
|
||||
# for the sequencer
|
||||
- re2c
|
||||
update: true
|
||||
|
||||
install:
|
||||
- ./.ci/travis/prepare.sh
|
||||
|
||||
script:
|
||||
- ./.ci/travis/build.sh
|
||||
|
||||
# If you need to do more during install and build,
|
||||
# add a local directory to your module and do e.g.
|
||||
# - ./.ci-local/travis/install-extras.sh
|
||||
|
||||
# Define build jobs
|
||||
|
||||
# Well-known variables to use
|
||||
# SET source setup file
|
||||
# ADD_MODULES extra modules (for a specific job)
|
||||
# EXTRA content will be added to make command line
|
||||
# EXTRA1..5 additional arguments for the make command
|
||||
# (one argument per variable)
|
||||
# STATIC set to YES for static build (default: NO)
|
||||
# TEST set to NO to skip running the tests (default: YES)
|
||||
# VV set to make build scripts verbose (default: unset)
|
||||
|
||||
# Usually from setup files, but may be specified or overridden
|
||||
# on a job line
|
||||
# MODULES list of dependency modules
|
||||
# BASE branch or release tag name of the EPICS Base to use
|
||||
# <MODULE> branch or release tag for a specific module
|
||||
# ... see README for setup file syntax description
|
||||
|
||||
jobs:
|
||||
include:
|
||||
|
||||
# Different configurations of default gcc and clang
|
||||
|
||||
- env: BASE=7.0
|
||||
|
||||
- env: BASE=7.0
|
||||
compiler: clang
|
||||
|
||||
- env: BASE=7.0 EXTRA="CMD_CXXFLAGS=-std=c++11"
|
||||
|
||||
- env: BASE=7.0 EXTRA="CMD_CXXFLAGS=-std=c++11"
|
||||
compiler: clang
|
||||
|
||||
# Trusty: compiler versions very close to RHEL 7
|
||||
|
||||
- env: BASE=7.0
|
||||
dist: trusty
|
||||
|
||||
- env: BASE=7.0 EXTRA="CMD_CXXFLAGS=-std=c++11"
|
||||
dist: trusty
|
||||
|
||||
# Cross-compilations to Windows using MinGW and WINE
|
||||
|
||||
- env: BASE=7.0 WINE=32 TEST=NO STATIC=YES
|
||||
compiler: mingw
|
||||
|
||||
- env: BASE=7.0 WINE=64 TEST=NO STATIC=NO
|
||||
compiler: mingw
|
||||
|
||||
# Cross-compilation to RTEMS
|
||||
|
||||
- env: BASE=7.0 RTEMS=4.10 TEST=NO
|
||||
|
||||
- env: BASE=7.0 RTEMS=4.9 TEST=NO
|
||||
|
||||
# Other gcc versions (added as an extra package)
|
||||
|
||||
- env: BASE=7.0
|
||||
compiler: gcc-6
|
||||
addons: { apt: { packages: ["g++-6"], sources: ["ubuntu-toolchain-r-test"] } }
|
||||
|
||||
- env: BASE=7.0
|
||||
compiler: gcc-7
|
||||
addons: { apt: { packages: ["g++-7"], sources: ["ubuntu-toolchain-r-test"] } }
|
||||
|
||||
# MacOS build
|
||||
|
||||
- env: BASE=7.0
|
||||
os: osx
|
||||
compiler: clang
|
||||
@@ -15,14 +15,14 @@ addons:
|
||||
- perl
|
||||
|
||||
install:
|
||||
- ./.ci/travis-prepare.sh
|
||||
- ./.ci/travis/prepare.sh
|
||||
|
||||
script:
|
||||
- ./.ci/travis-build.sh
|
||||
- ./.ci/travis/build.sh
|
||||
|
||||
# Build using default gcc for Base branches 7.0 and 3.15
|
||||
|
||||
matrix:
|
||||
jobs:
|
||||
include:
|
||||
- env: BASE=7.0
|
||||
- env: BASE=3.15
|
||||
77
travis/Add-RTEMS-pc368-qemu-target.patch
Normal file
77
travis/Add-RTEMS-pc368-qemu-target.patch
Normal file
@@ -0,0 +1,77 @@
|
||||
From 00ee7bf7d3618c748491c88742c011a8353abeba Mon Sep 17 00:00:00 2001
|
||||
From: Andrew Johnson <anj@anl.gov>
|
||||
Date: Wed, 24 Oct 2018 14:27:15 -0500
|
||||
Subject: [PATCH] Add RTEMS-pc368-qemu target, use in Travis-CI builds
|
||||
|
||||
---
|
||||
configure/os/CONFIG.Common.RTEMS-pc386-qemu | 11 +++++++++++
|
||||
configure/os/CONFIG_SITE.Common.RTEMS-pc386 | 5 -----
|
||||
configure/os/CONFIG_SITE.Common.RTEMS-pc386-qemu | 9 +++++++++
|
||||
src/libCom/RTEMS/Makefile | 2 +-
|
||||
src/tools/makeTestfile.pl | 2 +-
|
||||
6 files changed, 24 insertions(+), 10 deletions(-)
|
||||
create mode 100644 configure/os/CONFIG.Common.RTEMS-pc386-qemu
|
||||
delete mode 100644 configure/os/CONFIG_SITE.Common.RTEMS-pc386
|
||||
create mode 100644 configure/os/CONFIG_SITE.Common.RTEMS-pc386-qemu
|
||||
|
||||
diff --git a/configure/os/CONFIG.Common.RTEMS-pc386-qemu b/configure/os/CONFIG.Common.RTEMS-pc386-qemu
|
||||
new file mode 100644
|
||||
index 000000000..684f01a19
|
||||
--- /dev/null
|
||||
+++ b/configure/os/CONFIG.Common.RTEMS-pc386-qemu
|
||||
@@ -0,0 +1,11 @@
|
||||
+# CONFIG.Common.RTEMS-pc386-qemu
|
||||
+#
|
||||
+# Definitions for the RTEMS-pc386-qemu target
|
||||
+# Site-specific overrides go in CONFIG_SITE.Common.RTEMS-pc386-qemu
|
||||
+#
|
||||
+#-------------------------------------------------------
|
||||
+
|
||||
+# Include definitions from RTEMS-pc386
|
||||
+include $(CONFIG)/os/CONFIG.Common.RTEMS-pc386
|
||||
+
|
||||
+RTEMS_QEMU_FIXUPS = YES
|
||||
diff --git a/configure/os/CONFIG_SITE.Common.RTEMS-pc386-qemu b/configure/os/CONFIG_SITE.Common.RTEMS-pc386-qemu
|
||||
new file mode 100644
|
||||
index 000000000..027dcf4ab
|
||||
--- /dev/null
|
||||
+++ b/configure/os/CONFIG_SITE.Common.RTEMS-pc386-qemu
|
||||
@@ -0,0 +1,9 @@
|
||||
+# CONFIG_SITE.Common.RTEMS-pc386-qemu
|
||||
+#
|
||||
+# Site-specific overrides for the RTEMS-pc386-qemu target
|
||||
+#
|
||||
+
|
||||
+# If you're building this architecture you _probably_ want to
|
||||
+# run the tests for it under QEMU, but if not you can turn
|
||||
+# them off here by commenting out this line:
|
||||
+CROSS_COMPILER_RUNTEST_ARCHS += RTEMS-pc386-qemu
|
||||
diff --git a/src/libCom/RTEMS/Makefile b/src/libCom/RTEMS/Makefile
|
||||
index 2f12b7bf0..22a92733c 100644
|
||||
--- a/src/libCom/RTEMS/Makefile
|
||||
+++ b/src/libCom/RTEMS/Makefile
|
||||
@@ -24,7 +24,7 @@ rtemsCom_SRCS += epicsRtemsInitHookPre.c
|
||||
rtemsCom_SRCS += epicsRtemsInitHookPost.c
|
||||
rtemsCom_SRCS += epicsMemFs.c
|
||||
|
||||
-ifeq ($(T_A),RTEMS-pc386)
|
||||
+ifeq ($(RTEMS_BSP),pc386)
|
||||
rtemsCom_SRCS += ne2kpci.c
|
||||
endif
|
||||
|
||||
diff --git a/src/tools/makeTestfile.pl b/src/tools/makeTestfile.pl
|
||||
index 73f522034..fb431fe7a 100644
|
||||
--- a/src/tools/makeTestfile.pl
|
||||
+++ b/src/tools/makeTestfile.pl
|
||||
@@ -37,7 +37,7 @@ if( $TA =~ /^win32-x86/ && $HA !~ /^win/ ) {
|
||||
$exec = "wine64 $exe";
|
||||
|
||||
# Run pc386 test harness w/ QEMU
|
||||
-} elsif( $TA =~ /^RTEMS-pc386$/ ) {
|
||||
+} elsif( $TA =~ /^RTEMS-pc386-qemu$/ ) {
|
||||
$exec = "qemu-system-i386 -m 64 -no-reboot -serial stdio -display none -net nic,model=ne2k_pci -net user,restrict=yes -kernel $exe";
|
||||
|
||||
# Explicitly fail for other RTEMS targets
|
||||
--
|
||||
2.21.0.windows.1
|
||||
|
||||
60
travis/README.md
Normal file
60
travis/README.md
Normal file
@@ -0,0 +1,60 @@
|
||||
# Travis-CI Scripts for EPICS Modules
|
||||
|
||||
## Features
|
||||
|
||||
- Use different compilers (gcc, clang)
|
||||
- Use different gcc versions
|
||||
- Cross-compile for Windows 32bit and 64bit using MinGW and WINE
|
||||
- Cross-compile for RTEMS 4.9 and 4.10
|
||||
- Compile on MacOS
|
||||
- Released versions of dependencies are cached (for faster builds)
|
||||
|
||||
## How to Use these Scripts
|
||||
|
||||
1. Get an account on [Travis-CI](https://travis-ci.org/), connect
|
||||
it to your GitHub account and activate your support module's
|
||||
repository. For more details, please refer to the
|
||||
[Travis-CI Tutorial](https://docs.travis-ci.com/user/tutorial/).
|
||||
Make sure to use `travis-ci.org` and not their `.com` site.
|
||||
|
||||
2. Add the ci-scripts respository as a Git Submodule
|
||||
(see [README](../README.md) one level above).
|
||||
|
||||
3. Add settings files defining which dependencies in which versions
|
||||
you want to build against
|
||||
(see [README](../README.md) one level above).
|
||||
|
||||
4. Create a Travis configuration by copying one of the examples into
|
||||
the root directory of your module.
|
||||
```
|
||||
$ cp .ci/travis/.travis.yml.example-full .travis.yml
|
||||
```
|
||||
|
||||
5. Edit the `.travis.yml` configuration to include the jobs you want
|
||||
Travis to run.
|
||||
|
||||
Build jobs are declared in the list following the `jobs: include:`
|
||||
declaration. Each element (starting with `-` in column 3) defines the
|
||||
settings for one build job. `env:` controls the setting of environment
|
||||
variables,`dist:` specifies the Linux distribution,
|
||||
`os:` the operating system.
|
||||
Also see the comments in the examples for more hints, and the Travis-CI
|
||||
documentation for more options and more details.
|
||||
|
||||
6. Push your changes and check
|
||||
[travis-ci.org](https://travis-ci.org/) for your build results.
|
||||
|
||||
## Caches
|
||||
|
||||
Travis keeps the caches separate for different jobs. As soon as the job
|
||||
description (in the `.travis.yml` configuration file) or its environment
|
||||
settings change (adding a space character is enough), the cache is different
|
||||
and will be rebuilt when the job runs.
|
||||
|
||||
This also means that changing a value inside a setup file will _not_
|
||||
invalidate the cache - in that case you will have to manually delete the cache
|
||||
through the Travis web interface. (Or add a space character in the job
|
||||
configuration.)
|
||||
|
||||
Caches are automatically removed after approx. four weeks.
|
||||
Your jobs will have to rebuild them once in a while.
|
||||
46
travis/build.sh
Executable file
46
travis/build.sh
Executable file
@@ -0,0 +1,46 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# Set VV in .travis.yml to make scripts verbose
|
||||
[ "$VV" ] && set -x
|
||||
|
||||
CACHEDIR=${CACHEDIR:-${HOME}/.cache}
|
||||
|
||||
if [ "$BASE" = "SELF" ]
|
||||
then
|
||||
EPICS_BASE=$CURDIR
|
||||
else
|
||||
eval $(grep "EPICS_BASE=" ${CACHEDIR}/RELEASE.local)
|
||||
fi
|
||||
export EPICS_BASE
|
||||
|
||||
[ -z "$EPICS_HOST_ARCH" -a -f $EPICS_BASE/src/tools/EpicsHostArch.pl ] && EPICS_HOST_ARCH=$(perl $EPICS_BASE/src/tools/EpicsHostArch.pl)
|
||||
[ -z "$EPICS_HOST_ARCH" -a -f $EPICS_BASE/startup/EpicsHostArch.pl ] && EPICS_HOST_ARCH=$(perl $EPICS_BASE/startup/EpicsHostArch.pl)
|
||||
export EPICS_HOST_ARCH
|
||||
|
||||
# Base 3.15 doesn't have -qemu target architecture and needs an extra define
|
||||
[ -e $EPICS_BASE/configure/os/CONFIG.Common.RTEMS-pc386-qemu ] || EXTRA_QEMU=RTEMS_QEMU_FIXUPS=YES
|
||||
|
||||
# use array variable to get the quoting right while using separate words for arguments
|
||||
[ -n "$EXTRA0" ] && EXTRA[0]="$EXTRA0"
|
||||
[ -n "$EXTRA1" ] && EXTRA[1]="$EXTRA1"
|
||||
[ -n "$EXTRA2" ] && EXTRA[2]="$EXTRA2"
|
||||
[ -n "$EXTRA3" ] && EXTRA[3]="$EXTRA3"
|
||||
[ -n "$EXTRA4" ] && EXTRA[4]="$EXTRA4"
|
||||
[ -n "$EXTRA5" ] && EXTRA[5]="$EXTRA5"
|
||||
|
||||
make -j2 $EXTRA_QEMU "${EXTRA[@]}"
|
||||
|
||||
ret=0
|
||||
|
||||
if [ "$TEST" != "NO" ]
|
||||
then
|
||||
make -j2 tapfiles || ret=$?
|
||||
|
||||
if grep -q "BASE_3_14=NO" $EPICS_BASE/configure/CONFIG_BASE_VERSION
|
||||
then
|
||||
grep -q "^test-results:" $EPICS_BASE/configure/RULES_BUILD && make -sk test-results
|
||||
fi
|
||||
fi
|
||||
|
||||
exit $ret
|
||||
247
travis/prepare.sh
Executable file
247
travis/prepare.sh
Executable file
@@ -0,0 +1,247 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# The following if clause can be removed for ci-scripts major version 3
|
||||
if [ "$TRAVIS_OS_NAME" == osx -a "$BASH_VERSINFO" -lt 4 ]
|
||||
then
|
||||
brew install bash
|
||||
if [ $(/usr/local/bin/bash -c 'echo $BASH_VERSINFO') -lt 4 ]
|
||||
then
|
||||
echo "Failed to install a recent bash" >&2
|
||||
exit 1
|
||||
fi
|
||||
exec /usr/local/bin/bash $0 "$@"
|
||||
fi
|
||||
|
||||
# Set VV in .travis.yml to make scripts verbose
|
||||
[ "$VV" ] && set -x
|
||||
|
||||
# Perl version of "readlink -f" (which MacOS does not provide)
|
||||
readlinkf() { perl -MCwd -e 'print Cwd::abs_path shift' "$1"; }
|
||||
|
||||
SCRIPTDIR=$(dirname $(readlinkf $0))
|
||||
CURDIR="$PWD"
|
||||
CACHEDIR=${CACHEDIR:-${HOME}/.cache}
|
||||
[ -e ${CACHEDIR} ] || mkdir -p ${CACHEDIR}
|
||||
|
||||
# source functions
|
||||
. $SCRIPTDIR/utils.sh
|
||||
|
||||
echo -e "${ANSI_YELLOW}Using bash version $BASH_VERSION${ANSI_RESET}"
|
||||
|
||||
if [ -f /etc/hosts ]
|
||||
then
|
||||
# The travis-ci "bionic" image throws us a curveball in /etc/hosts
|
||||
# by including two entries for localhost. The first for 127.0.1.1
|
||||
# which causes epicsSockResolveTest to fail.
|
||||
# cat /etc/hosts
|
||||
# ...
|
||||
# 127.0.1.1 localhost localhost ip4-loopback
|
||||
# 127.0.0.1 localhost nettuno travis vagrant travis-job-....
|
||||
|
||||
sudo sed -i -e '/^127\.0\.1\.1/ s|localhost\s*||g' /etc/hosts
|
||||
|
||||
echo "==== /etc/hosts"
|
||||
cat /etc/hosts
|
||||
echo "===="
|
||||
fi
|
||||
|
||||
# Load settings
|
||||
# -------------
|
||||
|
||||
fold_start load.settings "Loading settings"
|
||||
|
||||
# load default settings for well-known modules
|
||||
source_set defaults
|
||||
|
||||
# source configured settings
|
||||
[ -z "${SET+x}" ] || source_set $SET
|
||||
|
||||
fold_end load.settings
|
||||
|
||||
# Check out dependencies
|
||||
# ----------------------
|
||||
|
||||
if [ "$BASE" != "SELF" ]
|
||||
then
|
||||
fold_start check.out.dependencies "Checking/cloning dependencies"
|
||||
|
||||
for mod in BASE $ADD_MODULES $MODULES
|
||||
do
|
||||
mod_uc=${mod^^}
|
||||
eval add_dependency $mod_uc \${${mod_uc}:=master}
|
||||
done
|
||||
[ -d ./configure ] && target=./configure/RELEASE.local || target=./RELEASE.local
|
||||
cp ${CACHEDIR}/RELEASE.local $target
|
||||
|
||||
fold_end check.out.dependencies
|
||||
fi
|
||||
|
||||
# Set up compiler
|
||||
# ---------------
|
||||
|
||||
fold_start set.up.epics_build "Setting up EPICS build system"
|
||||
|
||||
if [ "$BASE" = "SELF" ]
|
||||
then
|
||||
EPICS_BASE=$CURDIR
|
||||
else
|
||||
eval $(grep "EPICS_BASE=" ${CACHEDIR}/RELEASE.local)
|
||||
fi
|
||||
export EPICS_BASE
|
||||
echo "EPICS_BASE=$EPICS_BASE"
|
||||
|
||||
[ -z "$EPICS_HOST_ARCH" -a -f $EPICS_BASE/src/tools/EpicsHostArch.pl ] && EPICS_HOST_ARCH=$(perl $EPICS_BASE/src/tools/EpicsHostArch.pl)
|
||||
[ -z "$EPICS_HOST_ARCH" -a -f $EPICS_BASE/startup/EpicsHostArch.pl ] && EPICS_HOST_ARCH=$(perl $EPICS_BASE/startup/EpicsHostArch.pl)
|
||||
export EPICS_HOST_ARCH
|
||||
echo "EPICS_HOST_ARCH=$EPICS_HOST_ARCH"
|
||||
|
||||
if echo ${modules_to_compile} | grep -q "$EPICS_BASE" || [ "$BASE" = "SELF" ]
|
||||
then
|
||||
|
||||
# requires wine and g++-mingw-w64-i686
|
||||
if [ "$WINE" = "32" ]
|
||||
then
|
||||
echo "Cross mingw32"
|
||||
sed -i -e '/CMPLR_PREFIX/d' $EPICS_BASE/configure/os/CONFIG_SITE.linux-x86.win32-x86-mingw
|
||||
cat << EOF >> $EPICS_BASE/configure/os/CONFIG_SITE.linux-x86.win32-x86-mingw
|
||||
CMPLR_PREFIX=i686-w64-mingw32-
|
||||
EOF
|
||||
cat << EOF >> $EPICS_BASE/configure/CONFIG_SITE
|
||||
CROSS_COMPILER_TARGET_ARCHS+=win32-x86-mingw
|
||||
EOF
|
||||
|
||||
elif [ "$WINE" = "64" ]
|
||||
then
|
||||
echo "Cross mingw64"
|
||||
sed -i -e '/CMPLR_PREFIX/d' $EPICS_BASE/configure/os/CONFIG_SITE.linux-x86.windows-x64-mingw
|
||||
cat << EOF >> $EPICS_BASE/configure/os/CONFIG_SITE.linux-x86.windows-x64-mingw
|
||||
CMPLR_PREFIX=x86_64-w64-mingw32-
|
||||
EOF
|
||||
cat << EOF >> $EPICS_BASE/configure/CONFIG_SITE
|
||||
CROSS_COMPILER_TARGET_ARCHS+=windows-x64-mingw
|
||||
EOF
|
||||
fi
|
||||
|
||||
if [ "$STATIC" = "YES" ]
|
||||
then
|
||||
echo "Build static libraries/executables"
|
||||
cat << EOF >> $EPICS_BASE/configure/CONFIG_SITE
|
||||
SHARED_LIBRARIES=NO
|
||||
STATIC_BUILD=YES
|
||||
EOF
|
||||
fi
|
||||
|
||||
HOST_CCMPLR_NAME=`echo "$TRAVIS_COMPILER" | sed -E 's/^([[:alpha:]][^-]*(-[[:alpha:]][^-]*)*)+(-[0-9\.]+)?$/\1/g'`
|
||||
HOST_CMPLR_VER_SUFFIX=`echo "$TRAVIS_COMPILER" | sed -E 's/^([[:alpha:]][^-]*(-[[:alpha:]][^-]*)*)+(-[0-9\.]+)?$/\3/g'`
|
||||
HOST_CMPLR_VER=`echo "$HOST_CMPLR_VER_SUFFIX" | cut -c 2-`
|
||||
|
||||
case "$HOST_CCMPLR_NAME" in
|
||||
clang)
|
||||
echo "Host compiler is clang"
|
||||
HOST_CPPCMPLR_NAME=$(echo "$HOST_CCMPLR_NAME" | sed 's/clang/clang++/g')
|
||||
cat << EOF >> $EPICS_BASE/configure/os/CONFIG_SITE.Common.$EPICS_HOST_ARCH
|
||||
GNU = NO
|
||||
CMPLR_CLASS = clang
|
||||
CC = ${HOST_CCMPLR_NAME}$HOST_CMPLR_VER_SUFFIX
|
||||
CCC = ${HOST_CPPCMPLR_NAME}$HOST_CMPLR_VER_SUFFIX
|
||||
EOF
|
||||
|
||||
# hack
|
||||
sed -i -e 's/CMPLR_CLASS = gcc/CMPLR_CLASS = clang/' $EPICS_BASE/configure/CONFIG.gnuCommon
|
||||
|
||||
${HOST_CCMPLR_NAME}$HOST_CMPLR_VER_SUFFIX --version
|
||||
;;
|
||||
gcc)
|
||||
echo "Host compiler is GCC"
|
||||
HOST_CPPCMPLR_NAME=$(echo "$HOST_CCMPLR_NAME" | sed 's/gcc/g++/g')
|
||||
cat << EOF >> $EPICS_BASE/configure/os/CONFIG_SITE.Common.$EPICS_HOST_ARCH
|
||||
CC = ${HOST_CCMPLR_NAME}$HOST_CMPLR_VER_SUFFIX
|
||||
CCC = ${HOST_CPPCMPLR_NAME}$HOST_CMPLR_VER_SUFFIX
|
||||
EOF
|
||||
|
||||
${HOST_CCMPLR_NAME}$HOST_CMPLR_VER_SUFFIX --version
|
||||
;;
|
||||
*)
|
||||
echo "Host compiler is default"
|
||||
gcc --version
|
||||
;;
|
||||
esac
|
||||
|
||||
cat <<EOF >> $EPICS_BASE/configure/CONFIG_SITE
|
||||
USR_CPPFLAGS += $USR_CPPFLAGS
|
||||
USR_CFLAGS += $USR_CFLAGS
|
||||
USR_CXXFLAGS += $USR_CXXFLAGS
|
||||
EOF
|
||||
|
||||
# set RTEMS to eg. "4.9" or "4.10"
|
||||
# requires qemu, bison, flex, texinfo, install-info
|
||||
if [ -n "$RTEMS" ]
|
||||
then
|
||||
echo "Cross RTEMS${RTEMS} for pc386"
|
||||
sed -i -e '/^RTEMS_VERSION/d' -e '/^RTEMS_BASE/d' $EPICS_BASE/configure/os/CONFIG_SITE.Common.RTEMS
|
||||
cat << EOF >> $EPICS_BASE/configure/os/CONFIG_SITE.Common.RTEMS
|
||||
RTEMS_VERSION=$RTEMS
|
||||
RTEMS_BASE=$HOME/.rtems
|
||||
EOF
|
||||
# Base 3.15 doesn't have -qemu target architecture
|
||||
[ -e $EPICS_BASE/configure/os/CONFIG.Common.RTEMS-pc386-qemu ] && QEMU=-qemu
|
||||
cat << EOF >> $EPICS_BASE/configure/CONFIG_SITE
|
||||
CROSS_COMPILER_TARGET_ARCHS += RTEMS-pc386$QEMU
|
||||
EOF
|
||||
fi
|
||||
|
||||
else
|
||||
echo -e "${ANSI_GREEN}EPICS build system already set up (Base was loaded from cache)${ANSI_RESET}"
|
||||
fi
|
||||
|
||||
# Download RTEMS cross compiler
|
||||
if [ -n "$RTEMS" ]
|
||||
then
|
||||
echo "Downloading RTEMS${RTEMS} cross compiler for pc386"
|
||||
curl -L "https://github.com/mdavidsaver/rsb/releases/download/20171203-${RTEMS}/i386-rtems${RTEMS}-trusty-20171203-${RTEMS}.tar.bz2" \
|
||||
| tar -C / -xmj
|
||||
fi
|
||||
|
||||
fold_end set.up.compiler
|
||||
|
||||
echo "\$ make --version"
|
||||
make --version
|
||||
|
||||
[ "$BASE" = "SELF" ] && exit 0
|
||||
|
||||
# Build required dependencies
|
||||
# ---------------------------
|
||||
|
||||
fold_start build.dependencies "Build missing/outdated dependencies"
|
||||
|
||||
[ "$VV" ] && silent="-s" || silent=
|
||||
|
||||
[ -z "$modules_to_compile" ] && echo -e "${ANSI_GREEN}All dependency modules are up-to-date (nothing to do)${ANSI_RESET}"
|
||||
|
||||
for module in ${modules_to_compile}
|
||||
do
|
||||
eval name=\${module#${CACHEDIR}/}
|
||||
fold_start build.$name "Build $name"
|
||||
make -j2 $silent -C $module $EXTRA
|
||||
fold_end build.$name
|
||||
done
|
||||
|
||||
fold_end build.dependencies
|
||||
|
||||
echo -e "${ANSI_BLUE}Dependency module information${ANSI_RESET}"
|
||||
|
||||
echo "Module Tag Binaries Commit"
|
||||
echo "-----------------------------------------------------------------------------------"
|
||||
for mod in base $MODULES $ADD_MODULES
|
||||
do
|
||||
mod_uc=${mod^^}
|
||||
eval tag=\${${mod_uc}}
|
||||
eval dir=${CACHEDIR}/\${${mod_uc}_DIRNAME}-$tag
|
||||
echo "$modules_to_compile" | grep -q "$dir" && stat="rebuilt" || stat="from cache"
|
||||
commit=$(git -C $dir log -n1 --oneline)
|
||||
printf "%-10s %-12s %-11s %s\n" "$mod" "$tag" "$stat" "$commit"
|
||||
done
|
||||
|
||||
echo -e "${ANSI_BLUE}Contents of RELEASE.local${ANSI_RESET}"
|
||||
cat ${CACHEDIR}/RELEASE.local
|
||||
218
travis/utils.sh
Normal file
218
travis/utils.sh
Normal file
@@ -0,0 +1,218 @@
|
||||
# Utility functions for Travis scripts in ci-scripts
|
||||
#
|
||||
# This file is sourced by the executable scripts
|
||||
# CACHEDIR must be defined and existing before calling these functions
|
||||
|
||||
# Portable version of 'sed -i' (that MacOS doesn't provide)
|
||||
|
||||
# sedi (cmd, file)
|
||||
# Do the equivalent of "sed -i cmd file"
|
||||
sedi () {
|
||||
cat $2 | sed "$1" > $2.tmp$$; mv -f $2.tmp$$ $2
|
||||
}
|
||||
|
||||
# Setup ANSI Colors
|
||||
export ANSI_RED="\033[31;1m"
|
||||
export ANSI_GREEN="\033[32;1m"
|
||||
export ANSI_YELLOW="\033[33;1m"
|
||||
export ANSI_BLUE="\033[34;1m"
|
||||
export ANSI_RESET="\033[0m"
|
||||
export ANSI_CLEAR="\033[0K"
|
||||
|
||||
# Travis log fold control
|
||||
# from https://github.com/travis-ci/travis-rubies/blob/build/build.sh
|
||||
|
||||
fold_start() {
|
||||
echo -e "travis_fold:start:$1\\r${ANSI_YELLOW}$2${ANSI_RESET}"
|
||||
}
|
||||
|
||||
fold_end() {
|
||||
echo -en "travis_fold:end:$1\\r"
|
||||
}
|
||||
|
||||
die() {
|
||||
echo -e "${ANSI_RED}$1${ANSI_RESET}"
|
||||
[ "$UTILS_UNITTEST" ] || exit 1
|
||||
}
|
||||
|
||||
# source_set(settings)
|
||||
#
|
||||
# Source a settings file (extension .set) found in SETUP_PATH
|
||||
# May be called recursively (from within a settings file)
|
||||
declare -a SEEN_SETUPS
|
||||
source_set() {
|
||||
local set_file=${1//[$'\r']}
|
||||
local set_dir
|
||||
local found=0
|
||||
[ "${SETUP_PATH}" ] || die "Search path for setup files (SETUP_PATH) is empty"
|
||||
for set_dir in ${SETUP_PATH//:/ }
|
||||
do
|
||||
if [ -e $set_dir/$set_file.set ]
|
||||
then
|
||||
if [[ " ${SEEN_SETUPS[@]} " =~ " $set_dir/$set_file.set " ]]
|
||||
then
|
||||
echo "Ignoring already included setup file $set_dir/$set_file.set"
|
||||
return
|
||||
fi
|
||||
SEEN_SETUPS+=($set_dir/$set_file.set)
|
||||
echo "Loading setup file $set_dir/$set_file.set"
|
||||
local line
|
||||
while read -r line
|
||||
do
|
||||
[ -z "$line" ] && continue
|
||||
echo $line | grep -q "^#" && continue
|
||||
if echo $line | grep -q "^include\W"
|
||||
then
|
||||
source_set $(echo $line | awk '{ print $2 }')
|
||||
continue
|
||||
fi
|
||||
if echo "$line" | grep -q "^\w\+="
|
||||
then
|
||||
IFS== read var value <<< "${line//[$'\r']}"
|
||||
value=$(sed "s/^\(\"\)\(.*\)\1\$/\2/g" <<< "$value") # remove surrounding quotes
|
||||
eval [ "\${$var}" ] || eval "$var=\$value"
|
||||
fi
|
||||
done < $set_dir/$set_file.set
|
||||
found=1
|
||||
break
|
||||
fi
|
||||
done
|
||||
[ $found -ne 0 ] || die "Setup file $set_file.set does not exist in SETUP_PATH search path ($SETUP_PATH)"
|
||||
}
|
||||
|
||||
# update_release_local(varname, place)
|
||||
# varname name of the variable to set in RELEASE.local
|
||||
# place place (absolute path) of where variable should point to
|
||||
#
|
||||
# Manipulate RELEASE.local in the cache location:
|
||||
# - replace "$varname=$place" line if it exists and has changed
|
||||
# - otherwise add "$varname=$place" line and possibly move EPICS_BASE=... line to the end
|
||||
update_release_local() {
|
||||
local var=$1
|
||||
local place=$2
|
||||
local release_local=${CACHEDIR}/RELEASE.local
|
||||
local updated_line="${var}=${place}"
|
||||
|
||||
local ret=0
|
||||
[ -e ${release_local} ] && grep -q "${var}=" ${release_local} || ret=$?
|
||||
if [ $ret -eq 0 ]
|
||||
then
|
||||
existing_line=$(grep "${var}=" ${release_local})
|
||||
if [ "${existing_line}" != "${updated_line}" ]
|
||||
then
|
||||
sedi "s|${var}=.*|${var}=${place}|g" ${release_local}
|
||||
fi
|
||||
else
|
||||
echo "$var=$place" >> ${release_local}
|
||||
ret=0
|
||||
grep -q "EPICS_BASE=" ${release_local} || ret=$?
|
||||
if [ $ret -eq 0 ]
|
||||
then
|
||||
base_line=$(grep "EPICS_BASE=" ${release_local})
|
||||
sedi '\|EPICS_BASE=|d' ${release_local}
|
||||
echo ${base_line} >> ${release_local}
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# add_dependency(dep, tag)
|
||||
#
|
||||
# Add a dependency to the cache area:
|
||||
# - check out (recursive if configured) in the CACHE area unless it already exists and the
|
||||
# required commit has been built
|
||||
# - Defaults:
|
||||
# $dep_DIRNAME = lower case ($dep)
|
||||
# $dep_REPONAME = lower case ($dep)
|
||||
# $dep_REPOURL = GitHub / $dep_REPOOWNER (or $REPOOWNER or epics-modules) / $dep_REPONAME .git
|
||||
# $dep_VARNAME = $dep
|
||||
# $dep_DEPTH = 5
|
||||
# $dep_RECURSIVE = 1/YES (0/NO to for a flat clone)
|
||||
# - Add $dep_VARNAME line to the RELEASE.local file in the cache area (unless already there)
|
||||
# - Add full path to $modules_to_compile
|
||||
add_dependency() {
|
||||
curdir="$PWD"
|
||||
DEP=$1
|
||||
TAG=$2
|
||||
dep_lc=${DEP,,}
|
||||
eval dirname=\${${DEP}_DIRNAME:=${dep_lc}}
|
||||
eval reponame=\${${DEP}_REPONAME:=${dep_lc}}
|
||||
eval repourl=\${${DEP}_REPOURL:="https://github.com/\${${DEP}_REPOOWNER:=${REPOOWNER:-epics-modules}}/${reponame}.git"}
|
||||
eval varname=\${${DEP}_VARNAME:=${DEP}}
|
||||
eval recursive=\${${DEP}_RECURSIVE:=1}
|
||||
recursive=${recursive,,}
|
||||
local recurse=""
|
||||
[ "$recursive" != "0" -a "$recursive" != "no" ] && recurse="--recursive"
|
||||
|
||||
# determine if $DEP points to a valid release or branch
|
||||
git ls-remote --quiet --exit-code --refs $repourl "$TAG" > /dev/null 2>&1 ||
|
||||
die "$TAG is neither a tag nor a branch name for $DEP ($repourl)"
|
||||
|
||||
if [ -e $CACHEDIR/$dirname-$TAG ]
|
||||
then
|
||||
[ -e $CACHEDIR/$dirname-$TAG/built ] && BUILT=$(cat $CACHEDIR/$dirname-$TAG/built) || BUILT="never"
|
||||
HEAD=$(cd "$CACHEDIR/$dirname-$TAG" && git log -n1 --pretty=format:%H)
|
||||
if [ "$HEAD" != "$BUILT" ]
|
||||
then
|
||||
rm -fr $CACHEDIR/$dirname-$TAG
|
||||
else
|
||||
echo "Found $TAG of dependency $DEP in $CACHEDIR/$dirname-$TAG"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ ! -e $CACHEDIR/$dirname-$TAG ]
|
||||
then
|
||||
cd $CACHEDIR
|
||||
eval depth=\${${DEP}_DEPTH:-"-1"}
|
||||
case ${depth} in
|
||||
-1 )
|
||||
deptharg="--depth 5"
|
||||
;;
|
||||
0 )
|
||||
deptharg=""
|
||||
;;
|
||||
* )
|
||||
deptharg="--depth $depth"
|
||||
;;
|
||||
esac
|
||||
echo "Cloning $TAG of dependency $DEP into $CACHEDIR/$dirname-$TAG"
|
||||
git clone --quiet $deptharg $recurse --branch "$TAG" $repourl $dirname-$TAG
|
||||
( cd $dirname-$TAG && git log -n1 )
|
||||
do_recompile=yes
|
||||
# add MSI to Base 3.14
|
||||
if [ $DEP == "BASE" ]
|
||||
then
|
||||
versionfile=$CACHEDIR/$dirname-$TAG/configure/CONFIG_BASE_VERSION
|
||||
if [ -e ${versionfile} ] && grep -q "BASE_3_14=YES" ${versionfile}
|
||||
then
|
||||
echo "Adding MSI 1.7 to $CACHEDIR/$dirname-$TAG"
|
||||
( cd $dirname-$TAG; patch -p1 < $SCRIPTDIR/../add-msi-to-314.patch )
|
||||
fi
|
||||
else
|
||||
# fix non-base modules that do not include the .local files in configure/RELEASE
|
||||
release=$CACHEDIR/$dirname-$TAG/configure/RELEASE
|
||||
if [ -e $release ]
|
||||
then
|
||||
echo "-include \$(TOP)/../RELEASE.local" > $release
|
||||
fi
|
||||
fi
|
||||
# run hook
|
||||
eval hook="\${${DEP}_HOOK}"
|
||||
if [ "$hook" ]
|
||||
then
|
||||
if [ -x "$curdir/$hook" ]
|
||||
then
|
||||
echo "Running hook $hook in $CACHEDIR/$dirname-$TAG"
|
||||
( cd $CACHEDIR/$dirname-$TAG; "$curdir/$hook" )
|
||||
else
|
||||
die "Hook script $hook is not executable or does not exist."
|
||||
fi
|
||||
fi
|
||||
HEAD=$(cd "$CACHEDIR/$dirname-$TAG" && git log -n1 --pretty=format:%H)
|
||||
echo "$HEAD" > "$CACHEDIR/$dirname-$TAG/built"
|
||||
cd "$curdir"
|
||||
fi
|
||||
|
||||
[ "${do_recompile}" ] && modules_to_compile="${modules_to_compile} $CACHEDIR/$dirname-$TAG"
|
||||
|
||||
update_release_local ${varname} $CACHEDIR/$dirname-$TAG
|
||||
}
|
||||
Reference in New Issue
Block a user