200 Commits

Author SHA1 Message Date
Ralph Lange
4690a977df Update README: small fixes and corrections 2020-06-15 11:42:55 +02:00
Ralph Lange
cf1224524f Merge pull request #43 from ralphlange:devel/cue-3.0
closes #43
2020-06-15 11:13:05 +02:00
Ralph Lange
9c89437dac cue: support PARALLEL_MAKE to configure number of make jobs
fixes #46
2020-06-15 11:07:55 +02:00
Ralph Lange
3931ff04fc cue-test: clean up DetectContext tests 2020-06-15 10:42:55 +02:00
Ralph Lange
7585b573d3 cue: warnings for STATIC and misspelled configuration
fixes #47
2020-06-15 10:42:55 +02:00
Ralph Lange
22652da198 cue: support setting TEST=NO to skip tests 2020-06-15 09:30:47 +02:00
Ralph Lange
ce4ef0b577 Update README: document cue.py, add migration hints 2020-06-12 20:13:42 +02:00
Ralph Lange
769a8ad08d cue: properly decode() subprocess.check_output()
(breaks on Python 3)
2020-06-12 18:00:18 +02:00
Ralph Lange
3dff483254 travis: add unit test and test build jobs for Python 3.7 2020-06-12 18:00:18 +02:00
Ralph Lange
35f5befa9d cue: fix misleading error message (setup file not found) 2020-06-12 18:00:18 +02:00
Ralph Lange
4de8c6240d README: remove 'Bash on MacOS' from FAQ 2020-06-12 18:00:18 +02:00
Ralph Lange
fd4bc0397a Add EXTRA definitions to the example builds 2020-06-12 18:00:18 +02:00
Ralph Lange
590e0e4d26 cue: implement EXTRA..EXTRA5 to set additional make args; add test 2020-06-12 18:00:18 +02:00
Ralph Lange
99619b5b90 appveyor: update examples for v3.0 2020-06-12 18:00:18 +02:00
Ralph Lange
e3d6ee16a9 travis: update examples for v3.0 2020-06-12 18:00:18 +02:00
Ralph Lange
e867277f6e Update README files for v3.0 2020-06-12 18:00:18 +02:00
Ralph Lange
2ea3e36eda travis: remove bash scripts (v2.x implementation) 2020-06-12 18:00:17 +02:00
Ralph Lange
e4dd66fd5c cue-test: fix tests on AppVeyor (set CONFIGURATION) 2020-06-07 14:24:36 +02:00
Ralph Lange
d4d094e2bb cue-test: add DetectContext tests for AppVeyor 2020-06-07 14:24:36 +02:00
Ralph Lange
c53bfa090e cue: recompile all dependency modules after a cache-miss 2020-06-07 14:24:36 +02:00
Ralph Lange
1f8acec2ac appveyor: update build configurations and cue calls 2020-06-07 14:24:36 +02:00
Ralph Lange
6ce24870e5 travis: add test and test-results actions 2020-06-07 14:24:36 +02:00
Ralph Lange
be4674e641 example: RTEMS memory fs only on Base 7.0 and above 2020-06-06 15:27:21 +02:00
Ralph Lange
5f97600612 defaults: set default base branch to 7.0
- add test for default base branch
- fixes #40
2020-06-06 15:27:21 +02:00
Ralph Lange
158bacd256 cue: add separate action for 'test-results'
fixes #31
2020-06-06 15:27:21 +02:00
Ralph Lange
0a4a845ff1 cue: only rebuild out-of-date dependencies 2020-06-06 15:27:21 +02:00
Ralph Lange
424fae7d53 travis: add RTEMS cross compilation jobs 2020-06-06 15:27:21 +02:00
Ralph Lange
4ca9efe4cf cue: add RTEMS cross compilation 2020-06-06 15:27:21 +02:00
Ralph Lange
dd23c6a91f travis: set base recursive=NO
to speed up the test builds (PVA not needed)
2020-06-06 15:27:21 +02:00
Ralph Lange
64908b18ba cue: add additional USR_* flags to CONFIG_SITE 2020-06-06 15:27:21 +02:00
Ralph Lange
eaffc82e36 travis: add wine32, wine64 and osx builds 2020-06-06 15:27:21 +02:00
Ralph Lange
f57b063f68 cue: compiler setup for clang, gcc, wine32 and wine64
fixes #30
2020-06-06 15:27:21 +02:00
Ralph Lange
58cebdd7dd Check make version for "-O" support
fixes #33
2020-06-06 15:27:21 +02:00
Ralph Lange
a0f1c7e96e travis: add Linux builds - bionic, trusty, clang
fixes #30
2020-06-06 15:27:21 +02:00
Ralph Lange
cdfea8ec70 cue-test: add DetectContext tests for Travis 2020-06-06 15:27:20 +02:00
Ralph Lange
9ea849eed3 cue: make detect_context() a function and ci a dict
for better testability
2020-06-06 14:55:06 +02:00
Ralph Lange
f456e47904 travis: fix unit tests and run/skip them
fixes #10
2020-06-06 14:55:06 +02:00
Ralph Lange
772fc3a09a cue-test: refactoring for v3.0 structure and names 2020-06-06 14:55:06 +02:00
Ralph Lange
d3b36cac74 cue: run check for Visual Studio versions only on Windows 2020-06-06 14:55:06 +02:00
Ralph Lange
4f01e9039b cue: set EPICS_BASE = '.' for base builds 2020-06-06 14:55:06 +02:00
Ralph Lange
5c8d3c20e8 cue: refactoring to avoid warnings 2020-06-06 14:55:06 +02:00
Ralph Lange
28ef822dd5 cue: add Travis fold control and AppVeyor printed folds
fixes #42
2020-06-06 14:55:06 +02:00
Ralph Lange
03d1a5456a travis: add static/debug/static-debug builds 2020-06-06 14:02:31 +02:00
Ralph Lange
9bd635d058 Switch example to use sequencer R2.2.8 2020-06-06 14:02:31 +02:00
Ralph Lange
bb5c73ae08 travis: BCFG for build configuration (static/debug) 2020-06-06 14:02:31 +02:00
Ralph Lange
9f6995c441 cue: allow multiple possible locations for a VS compiler version 2020-06-06 14:02:31 +02:00
Ralph Lange
25517c35d1 windows: CHOCO for additional chocolatey packages
Install:
- make
- @travis: strawberryperl
  the pre-installed perl does not have cpan and misses
  essentials like ExtUtils::Command
- packages from CHOCO env
2020-06-06 14:02:31 +02:00
Ralph Lange
b7d282fe3b cue: load 'defaults' setup file last 2020-06-06 14:02:31 +02:00
Ralph Lange
e1a52130a7 cue: improve logging 2020-06-06 14:02:30 +02:00
Ralph Lange
7d9b5985c6 cue: fix setting EPICS_BASE and EPICS_HOST_ARCH 2020-06-06 14:02:30 +02:00
Ralph Lange
5106e1d753 cue: add call to epicsHostArch.pl script 2020-06-06 14:02:30 +02:00
Ralph Lange
98cf1e709f cue: fix Windows setup_for_build() 2020-06-06 14:02:30 +02:00
Ralph Lange
91aba0df5c cue: assume 'make' in PATH by default 2020-06-06 14:02:30 +02:00
Ralph Lange
044ff8fa88 cue: use 'gcc' for native MinGW on Windows 2020-06-06 14:02:30 +02:00
Ralph Lange
488053cea4 cue: detect CI configuration and set up accordingly
- configuration: static/shared, debug/optimized
- compiler: gcc, mingw, vs....
- platform: x86/x64
- service and os: travis/appveyor, linux/windows/macos
2020-06-06 14:02:30 +02:00
Ralph Lange
c3920bc499 Minimal set of builds on Travis 2020-06-06 14:01:50 +02:00
Ralph Lange
44c441c436 Rename and move cue to top level 2020-05-31 12:10:24 +02:00
Ralph Lange
55038b7315 Improve test make-target logic
- make runtests if no valid test-results target exists
(closes #41)
2020-05-26 17:32:20 +02:00
Ralph Lange
bb9b59156c travis: set CURDIR in build.sh (fixes #41) 2020-05-26 09:36:00 +02:00
Ralph Lange
e66c1f748c appveyor: allow VS2019/3.14/64bit/static-debug job to fail
the MSI tool of that specific build segfaults for unknown reasons
(not reproducible on local build)
2020-05-25 18:08:33 +02:00
Ralph Lange
ba5508b39e travis: update README
(fixes #19)
2020-05-20 18:59:45 +02:00
Ralph Lange
d5eab412b4 Readme: small updates and fixes 2020-05-20 18:42:04 +02:00
Ralph Lange
fff4249771 appveyor: fix VisualStudio parallel build for older Base versions 2020-05-14 18:18:50 +02:00
Ralph Lange
11e2f1852b Do not run 'make test-results' on Base 3.14
- also add tests for this
2020-05-13 12:35:38 +02:00
Ralph Lange
53e23e3684 appveyor: update README
- fixes #37
2020-05-11 09:10:42 +02:00
Ralph Lange
f0ce39129c appveyor: clean-up install; move 'prepare' to 'build_script'
fixes #38
2020-05-07 18:47:35 +02:00
Ralph Lange
1583d41197 Skip running test-results target if not defined 2020-05-05 11:59:25 +02:00
Ralph Lange
98bc1d8f1f Readme: add links to lower level readme pages, fix badge URL 2020-04-30 15:23:56 +02:00
Ralph Lange
4484a9c302 appveyor: explicitly set default build worker image
- also update paragraph on worker images to AppVeyor README
2020-04-30 12:00:15 +02:00
Ralph Lange
ebc4b5ca2a Readme: update references; advise to clear cache after update 2020-04-30 11:21:50 +02:00
Ralph Lange
26c268eb28 Merge pull request #36 from mdavidsaver:msi314
- updated with a required change to the AppVeyor script
2020-04-30 09:41:51 +02:00
Ralph Lange
02c1c016c4 example: add host-side expanded substitutions file
- checks availablity of a working MSI tool (see #20 and #36)
2020-04-30 09:34:04 +02:00
Michael Davidsaver
27a1224d6d Refresh MSI 3.14 patch to allow using it
- set MSI to $EPICS_BASE_BIN/msi
- make patch file more symmetrical (patch with -p1)
- closes #20 (again), closes #36
2020-04-30 09:19:03 +02:00
Ralph Lange
157a7af098 appveyor: move init part of debugging settings to init section 2020-04-29 16:58:12 +02:00
Ralph Lange
3c1e223dc7 appveyor: don't build for changes in .travis.yml 2020-04-28 15:31:59 +02:00
Ralph Lange
4f4f76a5db appveyor: log name of build worker image 2020-04-28 14:37:30 +02:00
Ralph Lange
e91a588370 travis: copy RELEASE.local to top of checkout
- closes #32
  (fix is in AppVeyor do.py script as per 88831439)
2020-04-27 11:34:01 +02:00
Ralph Lange
29e657d585 Run tests using parallel make (reducing build time) 2020-04-27 11:34:01 +02:00
Ralph Lange
4413c7d75e travis: fix build.sh (set EPICS_BASE correctly) for BASE=SELF 2020-04-24 19:16:12 +02:00
Ralph Lange
48b15417a6 travis: fix RTEMS cross builds for Base 3.15 2020-04-24 19:12:20 +02:00
Ralph Lange
1ac8bf7479 appveyor: fix behavior when BASE set in setup file 2020-04-24 16:16:50 +02:00
Ralph Lange
d0f93f1920 travis: fix for EXTRA arguments with spaces/quotes
- feed EXTRA variables into an array to be properly expanded
2020-04-24 12:27:30 +02:00
Ralph Lange
27f823139a appveyor: don't walk() through the file system in host_info()
- was taking ~3min on AppVeyor builders
2020-04-23 14:12:03 +02:00
Ralph Lange
88831439b1 appveyor: consider base build (BASE=SELF) 2020-04-23 14:11:18 +02:00
Ralph Lange
177dfd4615 travis: fix /etc/hosts issue on bionic image 2020-04-23 14:11:17 +02:00
Ralph Lange
3bd2bb6dff travis: consider base build (BASE=SELF) 2020-04-22 13:36:16 +02:00
Ralph Lange
393a470d05 appveyor: add CMP doc to README 2020-04-22 10:25:18 +02:00
Ralph Lange
519b75aef2 appveyor: use pre-installed strawberry perl on vs2019 image 2020-04-22 10:25:18 +02:00
Ralph Lange
e8b01bf1a0 Merge pull request #29 from ralphlange:devel/add-appveyor
- closes #29, closes #6
2020-04-21 18:32:12 +02:00
Ralph Lange
79cc560594 appveyor: add minimal example file 2020-04-21 17:48:26 +02:00
Ralph Lange
32bdf84806 appveyor: improve RDP debugging options 2020-04-21 17:40:29 +02:00
Ralph Lange
070eab1473 appveyor: re-add recursive submodule update 2020-04-21 17:31:24 +02:00
Ralph Lange
11e0a60e3b appveyor: update example and README files 2020-04-08 15:56:00 +02:00
Ralph Lange
22d3a9db15 appveyor: add MSI 1.7 to any Base 3.14 dependency
- add test for patch being applied
2020-04-08 15:14:43 +02:00
Ralph Lange
a3532d3c55 appveyor: make cachedir configurable 2020-04-08 15:14:43 +02:00
Ralph Lange
b502aa7049 appveyor: update .appveyor.yml.example-full 2020-04-08 15:14:43 +02:00
Ralph Lange
d8e53e84fd appveyor: CC -> CMP for compiler toolchain setting 2020-04-08 15:14:43 +02:00
Ralph Lange
16bb305d24 appveyor: add setup_for_build() tests 2020-04-08 15:14:43 +02:00
Michael Davidsaver
6d0f34ac65 appveyor: do --add-path 2020-04-08 15:14:43 +02:00
Michael Davidsaver
2fd1f9ec16 appveyor: show test-results after .tap upload 2020-04-08 15:14:43 +02:00
Ralph Lange
ca74a8424b appveyor: create jobs for base 7.0, 3.15, 3.14
- default Base 7.0 / recursive = no
- add base 3.15 and 3.14 on vs2019
2020-04-08 15:14:43 +02:00
Ralph Lange
ddf6b961b1 appveyor: use VV to set logging level and silence dependency builds 2020-04-08 15:14:43 +02:00
Ralph Lange
e5af5c2bfe appveyor: print dependency table and RELEASE.local 2020-04-08 15:14:43 +02:00
Ralph Lange
d02dda5775 appveyor: add keys to call_make(); make test-results 2020-04-08 15:14:43 +02:00
Ralph Lange
4a6a979f89 appveyor: choco install re2c (for the sequencer) 2020-04-08 15:14:43 +02:00
Ralph Lange
81bf29b8ca appveyor: fix modlist() to allow MODULES in setup files 2020-04-08 15:14:43 +02:00
Ralph Lange
a8321aff92 appveyor: add binary locations to PATH for tools and DLLs 2020-04-08 15:14:42 +02:00
Michael Davidsaver
48832354da appveyor: export MAKE executable to environment
Save user scripts from having to compute
2020-04-08 15:14:42 +02:00
Michael Davidsaver
dbfba732fd appveyor: build action accepts arguments (passed on to make) 2020-04-08 15:14:42 +02:00
Ralph Lange
2f8f4e7fb1 appveyor: add tests for _RECURSIVE and _DEPTH 2020-04-08 15:14:42 +02:00
Ralph Lange
e96f77d8b1 appveyor: fix _DEPTH option 2020-04-08 15:14:42 +02:00
Michael Davidsaver
e14b97b18e appveyor: fix _RECURSIVE option 2020-04-08 15:14:42 +02:00
Michael Davidsaver
5d4fdec627 appveyor: add newlines when writing to CONFIG_SITE 2020-04-08 15:14:42 +02:00
Ralph Lange
fc141f874a appveyor: run unit tests via matrix 'only' setting 2020-04-08 15:14:42 +02:00
Michael Davidsaver
a006293461 appveyor: fixup argument parsing 2020-04-08 15:14:42 +02:00
Ralph Lange
b7d505c2e2 appveyor: use '/' in RELEASE.local paths 2020-04-08 15:14:42 +02:00
Ralph Lange
eceeab66cf appveyor: test full matrix (no 64bit builds on <= vs2012) 2020-04-08 15:14:42 +02:00
Ralph Lange
e50271765f appveyor: add build cache for external tools 2020-04-08 15:14:42 +02:00
Ralph Lange
d9e1df2367 appveyor: add HOST_ARCH suffix for -debug and -static 2020-04-08 15:14:42 +02:00
Ralph Lange
7881b3527c appveyor: fix for older vcvars.bat manipulating PLATFORM 2020-04-08 15:14:42 +02:00
Ralph Lange
0ac6c96e2a appveyor: add call_make() wrapper, forward returncode 2020-04-08 15:14:41 +02:00
Ralph Lange
eb5dd2a86c appveyor: read Base location from RELEASE.local 2020-04-08 15:14:41 +02:00
Ralph Lange
b3efae2451 appveyor: improve logging / print formatting 2020-04-08 15:14:41 +02:00
Ralph Lange
9c58196b6d appveyor: use Base 3.15 for tests to speed up build 2020-04-08 15:14:41 +02:00
Ralph Lange
421fe54fe6 appveyor: move environment setting into setup_for_build() 2020-04-08 15:14:41 +02:00
Ralph Lange
11f5c94236 appveyor: use r'' string constants for all Windows paths 2020-04-08 15:14:41 +02:00
Ralph Lange
ff14d5ceb4 appveyor: adding make calls for 'build' and 'test' actions 2020-04-08 15:14:41 +02:00
Ralph Lange
11cb469fb9 appveyor: silence Perl relocation batch script 2020-04-08 15:14:41 +02:00
Ralph Lange
0ae628673c appveyor: reduce number of SET=test00 runs 2020-04-08 15:14:41 +02:00
Ralph Lange
e11632798a appveyor: print make version 2020-04-08 15:14:41 +02:00
Ralph Lange
24a70882d0 appveyor: use parallel make (except for Base 3.14) 2020-04-08 15:14:41 +02:00
Ralph Lange
430a699d7f appveyor: add mingw definitions 2020-04-08 15:14:41 +02:00
Ralph Lange
c056b5ad0f appveyor: only grep 'vcvarsall.bat' in VS install list 2020-04-08 15:14:41 +02:00
Ralph Lange
a88300bdd7 appveyor: put strawberry perl in the PATH 2020-04-08 15:14:41 +02:00
Ralph Lange
85f570ac09 appveyor: set EPICS_HOST_ARCH 2020-04-08 15:14:41 +02:00
Ralph Lange
d5c419bc8e appveyor: don't run TestVCVars unless on Windows 2020-04-08 15:14:40 +02:00
Ralph Lange
08cdff9495 appveyor: don't run host_info() twice for compile tests 2020-04-08 15:14:40 +02:00
Ralph Lange
d052350738 appveyor: add complete_setup() and do a complete log 2020-04-08 15:14:40 +02:00
Ralph Lange
b53468e50e appveyor: make modlist a function 2020-04-08 15:14:40 +02:00
Michael Davidsaver
e47e35bae4 appveyor: add do_exec() action; 'make' dependencies 2020-04-08 15:14:40 +02:00
Michael Davidsaver
6071fdf198 appveyor: minor fixes
- using 'place' as both a local and a global is confusing
- use "with open()" instead of "open() / close()"
2020-04-08 15:14:40 +02:00
Michael Davidsaver
00f003afa5 appveyor: add with_vcvars to read VS environment settings
- writes and calls a "trampoline" batch that calls the
  appropriate "vcvarsall" script, then calls back into python
2020-04-08 15:14:40 +02:00
Michael Davidsaver
c3918cdbaa appveyor: add printing host_info (python settings, VS versions) 2020-04-08 15:14:40 +02:00
Michael Davidsaver
e2399dc7f3 appveyor: avoid chdir, use 'cwd' key instead 2020-04-08 15:14:40 +02:00
Ralph Lange
5d17fdf98d appveyor: configure EPICS build; install make and Perl 2020-04-08 15:14:40 +02:00
Ralph Lange
249db7db22 appveyor: add cloning the dependency modules to 'prepare' action 2020-04-08 15:14:40 +02:00
Ralph Lange
4dcfbb2079 appveyor: add default repo URL test 2020-04-08 15:14:40 +02:00
Ralph Lange
9742c5f9c6 appveyor: use portable os.chdir() instead of "cd" 2020-04-08 15:14:40 +02:00
Ralph Lange
2847f78ab2 appveyor: add error handler to fix shutil.rmtree on Windows 2020-04-08 15:14:39 +02:00
Ralph Lange
28aeda558b appveyor: use decode() on git hashes 2020-04-08 15:14:39 +02:00
Ralph Lange
2dfa55420f appveyor-test: always chdir into builddir 2020-04-08 15:14:39 +02:00
Ralph Lange
22d0feaa05 appveyor: enable debugging 2020-04-08 15:14:39 +02:00
Ralph Lange
cd0becff06 appveyor: add add_dependency() 2020-04-08 15:14:39 +02:00
Ralph Lange
355a5c2fb7 appveyor: fix logging in do.py 2020-04-08 15:14:39 +02:00
Ralph Lange
139b491614 appveyor: improve tests (capture stdout; use dictionaries) 2020-04-08 15:14:39 +02:00
Ralph Lange
b15d9bb62e appveyor: first version (source_set, update_release_local) 2020-04-08 15:14:39 +02:00
Ralph Lange
761152babe appveyor: first version 2020-04-08 15:14:39 +02:00
Michael Davidsaver
a34bb7d2b2 travis: always show test-results
even if one of the tests crashes
2020-04-08 14:52:06 +02:00
Ralph Lange
94fdfbe802 travis: fix MSI patch file location 2020-04-08 14:50:06 +02:00
Ralph Lange
4cad610601 travis: add MacOS Bash 3.x workaround to travis-test.sh 2020-04-06 18:31:52 +02:00
Ralph Lange
25a60b8490 travis: fix broken test for detecting empty SETUP_PATH
- Test was broken by commit 487d8eb2
  this remained undetected due to name conflict between
  die() in test script and in utils.sh
2020-04-06 17:46:25 +02:00
Ralph Lange
953b2960da travis: refactor die() -> fail() in tests
- avoids name conflict with die() in utils that does not quit under test
2020-04-06 17:46:25 +02:00
Ralph Lange
a8bee0552d Add MSI 1.7 to any Base 3.14 dependency
- add test for patch being applied
- fixes #20
2020-04-06 17:46:25 +02:00
Ralph Lange
c8b0894cb6 Add setup files for synApps 6.0 and 6.1
(fixes #15)
2020-04-02 18:00:25 +02:00
Ralph Lange
47d3f0c0f3 example: add testing on RTEMS 2020-03-31 18:43:36 +02:00
Ralph Lange
1d430e1bfd example: add simple test to check correct linkage
- needs to be 3.14 compatible
- fixes #27
2020-03-31 18:43:36 +02:00
Ralph Lange
12fca1961f Remove specific setting for EPICS_BASE in configure/RELEASE 2020-03-31 18:43:36 +02:00
Ralph Lange
ee803fc38d example: use TYPED_RSET for xxxRecord.c 2020-03-31 18:43:36 +02:00
Ralph Lange
0b589770bf Add Python things to .gitignore 2020-03-31 09:34:37 +02:00
Ralph Lange
f92c1e716e Readme: add Bash4@Mac requirement, add ASYN to references 2020-03-20 10:18:06 +01:00
Ralph Lange
f5047a9e11 travis: fix test error message 2020-03-17 15:26:12 +01:00
Ralph Lange
eb471d9539 Recompile remaining MODULE list after a miss
(fixes #18, closes #21)
2020-03-02 18:22:43 +01:00
Ralph Lange
e3dace9ee3 Fix FOO_RECURSIVE behavior
(fixes #25, closes #24)
2020-03-02 15:12:28 +01:00
Ralph Lange
660c1c6773 Add test for FOO_RECURSIVE behavior
- regression test for issue #25
2020-03-02 15:07:03 +01:00
Ralph Lange
b0ab3bf333 Allow CACHEDIR to be set externally.
fixes #22, closes #23
2020-02-28 12:40:40 +01:00
Ralph Lange
17ce951e99 Merge pull request #17 from EuropeanSpallationSource/overwrite-configure-RELEASE
travis: overwrite configure/RELEASE instead of appending to it
2020-02-12 16:42:54 +00:00
Ralph Lange
7ae7054196 Make ADD_MODULES built before MODULES 2020-02-12 13:50:11 +00:00
Torsten Bögershausen
aee11f266b Overwrite configure/RELEASE
The current scripts append the line

-include $(TOP)/../RELEASE.local

at the end of the configure/RELEASE file (if needed).
While this works for most EPICS modules, there is one drawback:

Definitions like
SUPPORT=/myfavorite/build/server
are not allways overwritten (better say undefined).

Fix this and create a configure/RELEASE file which is the same
for all EPICS modules.
Simply overwrite configure/RELEASE with what we want.

Thanks to the EPICS community for this suggestion
2020-02-12 13:08:56 +01:00
Ralph Lange
94744c9a8f Readme: small improvements and clarifications 2020-01-17 13:01:34 +01:00
Ralph Lange
741a293029 Add ipac to the list of tested modules 2020-01-17 13:01:34 +01:00
Ralph Lange
aa8f35f086 Support ADD_MODULES to add specific modules to jobs
(closes #14)
2020-01-17 13:01:34 +01:00
Ralph Lange
82685b0280 Readme: small fixes, add FAQ how to update ci-scripts 2020-01-15 13:27:57 +01:00
Ralph Lange
a39346bc78 Readme: add references, FAQ, Travis badge 2020-01-14 14:36:28 +01:00
Ralph Lange
5d76e1ff07 Add motor to the list of tested modules 2020-01-13 17:24:05 +01:00
Ralph Lange
c721b7ac32 Readme: fix heading levels 2020-01-13 11:10:40 +01:00
Ralph Lange
8233b9f81b travis: auto-fix missing inclusion of ../RELEASE.local 2020-01-10 16:27:33 +01:00
Ralph Lange
7de5a7edc3 travis: pre-install homebrew packages (in global addons)
(closes #13)
2019-12-18 17:24:19 +01:00
Ralph Lange
80ab30469e travis: avoid spawning 'tr' by using bash builtins 2019-12-18 17:22:11 +01:00
4dfd098545 travis: consistently use SETUP_PATH user variable 2019-12-18 17:00:46 +01:00
3929851deb travis: implement die() in utils.sh 2019-12-18 16:58:31 +01:00
e6f722914c travis: fix error message formats 2019-12-18 16:57:09 +01:00
d4ab170b3c avoid 'tr' calls to improve performance a bit 2019-12-16 14:37:32 +01:00
487d8eb287 refer to user variable, not internal variable in error message 2019-12-13 14:58:57 +01:00
c7aca7cd73 implement die function 2019-12-13 14:39:53 +01:00
bdcb2f3173 fix error message formats 2019-12-13 14:03:27 +01:00
Ralph Lange
e81ec3aa0c Merge pull request #12 from dirk-zimoch/formatfix
travis: fix formats for folders and colors
2019-12-13 11:45:20 +01:00
64e382b46e enable line feed after fold_start message 2019-12-12 10:44:51 +01:00
add7bbcf88 enable escape codes for color 2019-12-12 10:43:26 +01:00
Ralph Lange
377bd2a915 Readme: update, add version badge, mention License 2019-12-11 16:42:52 +01:00
Ralph Lange
7782f928c2 Add LICENSE 2019-12-11 16:40:37 +01:00
34 changed files with 4248 additions and 878 deletions

175
.appveyor.yml Normal file
View File

@@ -0,0 +1,175 @@
# .appveyor.yml for testing EPICS Base ci-scripts
# (see: https://github.com/epics-base/ci-scripts)
# Note:
# Paths to scripts are different in this test configuration
# (your module has one more directory level: .ci)
# Ralph Lange <ralph.lange@gmx.de>
# Copyright (c) 2020 ITER Organization
#---------------------------------#
# build cache #
#---------------------------------#
cache:
- C:\Users\appveyor\.tools -> appveyor\do.py
#---------------------------------#
# repository cloning #
#---------------------------------#
# Called at very beginning, before repo cloning
init:
# Set autocrlf to make batch files work
- git config --global core.autocrlf true
# print the connection info for RDP connections (see 'debugging' below)
- ps: iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
# Set clone depth (do not fetch complete history)
clone_depth: 50
# Skipping commits affecting only specific files
skip_commits:
files:
- 'documentation/*'
- 'templates/*'
- '**/*.html'
- '**/*.md'
- '.travis.yml'
#---------------------------------#
# additional packages #
#---------------------------------#
install:
# fetch submodules (like ci-scripts)
- cmd: git submodule update --init --recursive
# for the sequencer
- cinst re2c
#---------------------------------#
# build matrix configuration #
#---------------------------------#
# Default build worker image
image: Visual Studio 2015
# Build Configurations: shared/static, optimized/debug
configuration:
- default
- static
- debug
- static-debug
# Environment variables: compiler toolchain, base version, setup file, ...
environment:
# common / default variables for all jobs
SETUP_PATH: .:.ci
SET: test01
BASE_RECURSIVE: NO
VV: 1
matrix:
- CMP: vs2019
SET: test00
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
- CMP: gcc
- CMP: vs2019
VV: 0
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
- CMP: vs2019
BASE: 3.15
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
- CMP: vs2019
BASE: 3.14
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
- CMP: vs2017
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
EXTRA: EXTRA_DEF="extra"
EXTRA1: EXTRA1_DEF="extra 1"
EXTRA2: EXTRA2_DEF="extra 2"
- CMP: vs2015
- CMP: vs2013
- CMP: vs2012
- CMP: vs2010
- CMP: vs2008
# Platform: architecture
platform:
- x86
- x64
# Matrix configuration: allow/exclude specific failing jobs
matrix:
# The MSI tool generated from that builds segfaults (*shrug*)
allow_failures:
- platform: x64
configuration: static-debug
CMP: vs2019
BASE: 3.14
exclude:
# Run test00 only once: x64 dynamic
- platform: x86
SET: test00
- configuration: static
SET: test00
- configuration: debug
SET: test00
- configuration: static-debug
SET: test00
# VS2012 and older installs don't have the 64 bit compiler
- platform: x64
CMP: vs2012
- platform: x64
CMP: vs2010
- platform: x64
CMP: vs2008
# Run test script for unit tests (SET = test00)
for:
-
matrix:
only:
- SET: test00
build_script:
- cmd: python cue-test.py
test_script:
- cmd: echo Tests have been run in the build phase
#---------------------------------#
# building & testing #
#---------------------------------#
build_script:
- cmd: python cue.py prepare
- cmd: python cue.py build
test_script:
- cmd: python cue.py test
- cmd: python cue.py test-results
#---------------------------------#
# debugging #
#---------------------------------#
## if you want to connect by remote desktop to a failed build, uncomment these lines
## note that you will need to connect within the usual build timeout limit (60 minutes)
## so you may want to adjust the build matrix above to just build the one of interest
#on_failure:
# - ps: $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
#---------------------------------#
# notifications #
#---------------------------------#
notifications:
# - provider: Email
# to:
# - core-talk@aps.anl.gov
# on_build_success: false
# - provider: GitHubPullRequest

5
.gitignore vendored
View File

@@ -1,5 +1,10 @@
/QtC-*
*.orig
*.log
.*.swp
*.autosave
/.idea/
*.py[cod]
__pycache__/

View File

@@ -16,6 +16,9 @@ cache:
env:
global:
- SETUP_PATH=.:.ci
- VV=1
- CHOCO=re2c
- BASE_RECURSIVE=NO
addons:
apt:
@@ -27,12 +30,18 @@ addons:
- g++-mingw-w64-i686
- g++-mingw-w64-x86-64
- qemu-system-x86
homebrew:
packages:
- re2c
update: true
install:
- ./travis/prepare.sh
- python cue.py prepare
script:
- ./travis-test.sh
- python cue.py build
- python cue.py test
- python cue.py test-results
# If you need to do more during install and build,
# add a local directory to your module and do e.g.
@@ -41,138 +50,125 @@ script:
# Define build jobs
# Well-known variables to use
# SET source setup file
# EXTRA content will be added to make command line
# STATIC set to YES for static build (default: NO)
# TEST set to NO to skip running the tests (default: YES)
# VV set to make build scripts verbose (default: unset)
# SET source setup file
# ADD_MODULES extra modules (for a specific job)
# EXTRA content will be added to make command line
# BCFG build configuration (static/debug/static-debug;
# default: shared-optimized)
# TEST set to NO to skip running the tests (default: YES)
# VV set to make build scripts verbose (default: unset)
# Usually from setup files, but may be specified or overridden
# on a job line
# MODULES list of dependency modules
# BASE branch or release tag name of the EPICS Base to use
# <MODULE> branch or release tag for a specific module
# ... see README for setup file syntax description
# MODULES list of dependency modules
# BASE branch or release tag name of the EPICS Base to use
# <MODULE> branch or release tag for a specific module
# ... see README for setup file syntax description
jobs:
include:
# Run unit tests on Linux and Mac
# Run unit tests
- env: SET=test00
install: python cue-test.py env
script: python cue-test.py
- env: SET=test00 TRAVIS_COMPILER=gcc
language: python
python: "3.7"
install: python cue-test.py env
script: python cue-test.py
- env: SET=test00
os: osx
install: python cue-test.py env
script: python cue-test.py
# Compile the example application
# using the build configurations from full makeBaseApp example
- env: SET=test00
os: windows
install: python cue-test.py env
script: python cue-test.py
# Compile example
- env: SET=test01
dist: bionic
- env: SET=test01 BCFG=static-debug
dist: bionic
- env: SET=test01
compiler: clang
dist: trusty
- env: VV="" SET=test01
- env: SET=test01 BCFG=static-debug
dist: trusty
- env: SET=test01 TRAVIS_COMPILER=gcc
language: python
python: "3.7"
- env: SET=test01 EXTRA="CMD_CXXFLAGS=-std=c++11"
- env: SET=test01 EXTRA="CMD_CXXFLAGS=-std=c++11"
compiler: clang
# trusty is pretty close to RHEL7
- env: SET=test01
dist: trusty
- env: SET=test01 EXTRA="CMD_CXXFLAGS=-std=c++11"
dist: trusty
# Cross-compilation to Windows using MinGW and WINE
- env: SET=test01 WINE=32 TEST=NO STATIC=YES
compiler: mingw
- env: SET=test01 WINE=64 TEST=NO STATIC=YES
compiler: mingw
# dynamic (DLL) builds are broken on xenial
- env: SET=test01 WINE=32 TEST=NO STATIC=NO
dist: bionic
compiler: mingw
- env: SET=test01 WINE=64 TEST=NO STATIC=NO
dist: bionic
compiler: mingw
# Cross-compilation to RTEMS
# (needs EPICS Base >= 3.16.2)
- env: SET=test01 RTEMS=4.10 TEST=NO
- env: SET=test01 RTEMS=4.9 TEST=NO
# Other gcc versions (adding as an extra package)
- env: SET=test01 BCFG=static
- env: SET=test01 BCFG=debug
- env: SET=test01 BCFG=static-debug
- env: SET=test01
compiler: gcc-6
addons: { apt: { packages: ["g++-6"], sources: ["ubuntu-toolchain-r-test"] } }
compiler: clang
- env: SET=test01 BCFG=static-debug
compiler: clang
- os: osx
env:
- SET=test01
- EXTRA=CMD_CFLAGS="-mmacosx-version-min=10.7"
- EXTRA1=CMD_CXXFLAGS="-mmacosx-version-min=10.7 -std=c++11 -stdlib=libc++"
- EXTRA2=CMD_LDXFLAGS="-mmacosx-version-min=10.7 -std=c++11 -stdlib=libc++"
compiler: clang
- env: SET=test01 BCFG=debug
os: osx
compiler: clang
- env: SET=test01 WINE=32 TEST=NO
- env: SET=test01 WINE=32 TEST=NO BCFG=static
- env: SET=test01 WINE=32 TEST=NO BCFG=debug
- env: SET=test01 WINE=32 TEST=NO BCFG=static-debug
- env: SET=test01 WINE=64 TEST=NO
- env: SET=test01 WINE=64 TEST=NO BCFG=static
- env: SET=test01 WINE=64 TEST=NO BCFG=debug
- env: SET=test01 WINE=64 TEST=NO BCFG=static-debug
- env: SET=test01 RTEMS=4.9 BASE=3.15 TEST=NO
- env: SET=test01 RTEMS=4.9 BASE=3.15 TEST=NO BCFG=static
- env: SET=test01 RTEMS=4.9 BASE=3.15 TEST=NO BCFG=debug
- env: SET=test01 RTEMS=4.9 BASE=3.15 TEST=NO BCFG=static-debug
- env: SET=test01 RTEMS=4.10
- env: SET=test01 RTEMS=4.10 BCFG=static
- env: SET=test01 RTEMS=4.10 BCFG=debug
- env: SET=test01 RTEMS=4.10 BCFG=static-debug
- env: SET=test01
compiler: gcc-7
addons: { apt: { packages: ["g++-7"], sources: ["ubuntu-toolchain-r-test"] } }
os: windows
- env: SET=test01 BCFG=static
os: windows
- env: SET=test01 BCFG=debug
os: windows
- env: SET=test01 BCFG=static-debug
os: windows
# MacOS build
# SNCSEQ 2.2.7 fails to build on MacOS; currently needs master
- env: SET=test01 SNCSEQ=master
os: osx
compiler: clang
addons: { homebrew: { packages: ["re2c"], update: true } }
# Base 3.15 builds
# ================
- env: BASE=R3.15.7 SET=test01
- env: BASE=R3.15.7 SET=test01 WINE=64 TEST=NO STATIC=YES
dist: bionic
compiler: mingw
# The DLL build for this Base version is known to fail
# - env: BASE=R3.15.7 SET=test01 WINE=64 TEST=NO STATIC=NO
# dist: bionic
# compiler: mingw
# Cross-compilation to RTEMS
# (needs EPICS Base >= 3.16.2)
- env: BASE=R3.16.2 SET=test01 RTEMS=4.10 TEST=NO
dist: trusty
- env: BASE=R3.16.2 SET=test01 RTEMS=4.9 TEST=NO
dist: trusty
# SNCSEQ 2.2.7 fails to build on MacOS; currently needs master
- env: BASE=R3.15.7 SET=test01 SNCSEQ=master
os: osx
compiler: clang
addons: { homebrew: { packages: ["re2c"], update: true } }
# Base 3.14 builds
# ================
- env: BASE=R3.14.12.8 SET=test01
- env: BASE=R3.14.12.8 SET=test01 WINE=64 TEST=NO STATIC=YES
dist: bionic
compiler: mingw
# The DLL build for this Base version is known to fail
# - env: BASE=R3.14.12.8 SET=test01 WINE=64 TEST=NO STATIC=NO
# dist: bionic
# compiler: mingw
# SNCSEQ 2.2.7 fails to build on MacOS; currently needs master
- env: BASE=R3.14.12.8 SET=test01 SNCSEQ=master
os: osx
compiler: clang
addons: { homebrew: { packages: ["re2c"], update: true } }
- env: SET=test01
compiler: vs2017
os: windows
- env: SET=test01 BCFG=static
compiler: vs2017
os: windows
- env: SET=test01 BCFG=debug
compiler: vs2017
os: windows
- env: SET=test01 BCFG=static-debug
compiler: vs2017
os: windows

65
LICENSE Normal file
View File

@@ -0,0 +1,65 @@
Copyright (c) 2019 EPICS. All rights reserved.
EPICS CI-Scripts are distributed subject to the following
license conditions:
SOFTWARE LICENSE AGREEMENT
Software: EPICS CI-Scripts
1. The "Software", below, refers to EPICS CI-Scripts (in
either source code, or binary form and accompanying documentation).
Each licensee is addressed as "you" or "Licensee."
2. The copyright holders shown above and their third-party licensors
hereby grant Licensee a royalty-free nonexclusive license, subject to
the limitations stated herein and U.S. Government license rights.
3. You may modify and make a copy or copies of the Software for use
within your organization, if you meet the following conditions:
a. Copies in source code must include the copyright notice and this
Software License Agreement.
b. Copies in binary form must include the copyright notice and this
Software License Agreement in the documentation and/or other
materials provided with the copy.
4. You may modify a copy or copies of the Software or any portion of it,
thus forming a work based on the Software, and distribute copies of
such work outside your organization, if you meet all of the following
conditions:
a. Copies in source code must include the copyright notice and this
Software License Agreement;
b. Copies in binary form must include the copyright notice and this
Software License Agreement in the documentation and/or other
materials provided with the copy;
c. Modified copies and works based on the Software must carry
prominent notices stating that you changed specified portions of
the Software.
5. Portions of the Software resulted from work developed under a U.S.
Government contract and are subject to the following license: the
Government is granted for itself and others acting on its behalf a
paid-up, nonexclusive, irrevocable worldwide license in this computer
software to reproduce, prepare derivative works, and perform publicly
and display publicly.
6. WARRANTY DISCLAIMER. THE SOFTWARE IS SUPPLIED "AS IS" WITHOUT WARRANTY
OF ANY KIND. THE COPYRIGHT HOLDERS, THEIR THIRD PARTY LICENSORS, THE
UNITED STATES, THE UNITED STATES DEPARTMENT OF ENERGY, AND THEIR
EMPLOYEES: (1) DISCLAIM ANY WARRANTIES, EXPRESS OR IMPLIED, INCLUDING
BUT NOT LIMITED TO ANY IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE, TITLE OR NON-INFRINGEMENT, (2) DO NOT ASSUME
ANY LEGAL LIABILITY OR RESPONSIBILITY FOR THE ACCURACY, COMPLETENESS,
OR USEFULNESS OF THE SOFTWARE, (3) DO NOT REPRESENT THAT USE OF THE
SOFTWARE WOULD NOT INFRINGE PRIVATELY OWNED RIGHTS, (4) DO NOT WARRANT
THAT THE SOFTWARE WILL FUNCTION UNINTERRUPTED, THAT IT IS ERROR-FREE
OR THAT ANY ERRORS WILL BE CORRECTED.
7. LIMITATION OF LIABILITY. IN NO EVENT WILL THE COPYRIGHT HOLDERS, THEIR
THIRD PARTY LICENSORS, THE UNITED STATES, THE UNITED STATES DEPARTMENT
OF ENERGY, OR THEIR EMPLOYEES: BE LIABLE FOR ANY INDIRECT, INCIDENTAL,
CONSEQUENTIAL, SPECIAL OR PUNITIVE DAMAGES OF ANY KIND OR NATURE,
INCLUDING BUT NOT LIMITED TO LOSS OF PROFITS OR LOSS OF DATA, FOR ANY
REASON WHATSOEVER, WHETHER SUCH LIABILITY IS ASSERTED ON THE BASIS OF
CONTRACT, TORT (INCLUDING NEGLIGENCE OR STRICT LIABILITY), OR
OTHERWISE, EVEN IF ANY OF SAID PARTIES HAS BEEN WARNED OF THE
POSSIBILITY OF SUCH LOSS OR DAMAGES.

307
README.md
View File

@@ -1,4 +1,8 @@
# Continuous Integration Scripts for EPICS Modules
<a target="_blank" href="http://semver.org">![Version][badge.version]</a>
<a target="_blank" href="https://travis-ci.org/epics-base/ci-scripts">![Travis status][badge.travis]</a>
<a target="_blank" href="https://ci.appveyor.com/project/epics-base/ci-scripts">![AppVeyor status][badge.appveyor]</a>
# Continuous Integration for EPICS Modules
The scripts inside this repository are intended to provide a common,
easy-to-use and flexible way to add Continuous Integration to EPICS
@@ -6,66 +10,92 @@ software modules, e.g. Device or Driver Support modules.
By including this repository as a Git Submodule, you will be able to
use the same flexible, powerful CI setup that EPICS Bases uses,
including a mechanism to specify sets of dependent modules
including a way to specify sets of dependent modules
(with versions) that you want to compile your module against.
By using the submodule mechnism, your module will always use an
By using the submodule mechanism, your module will always use an
explicit commit, i.e. a fixed version of the scripts.
This ensures that any further development of the ci-scripts will
never break existing use.
never break your existing use.
## This Repository
In addition to the scripts themselves (in the subdirectories),
this repository contains the test suite that is used to verify
functionality and features of the ci-scripts.
In addition to the script that runs the builds and tests, this repository
contains service specific documentation and example configuration files
(in the subdirectories), and a small test suite that is used to verify
functionality and features of the ci-scripts module itself
You are welcome to use the test suite as a reference, but keep in
mind that in your module the path to the scripts has one level more
(e.g., `./travis/abc` here would be `./.ci/travis/abc` in your
The example files are your best reference. They are kept up-to-date and
show a fully-featured and a minimal setup.
You are welcome to use the test suite as a secondary reference, but keep in
mind that in your main module the path to the scripts has one level more
(e.g., `./abc` here would be `./.ci/abc` in your
module).
Also, a test suite might not show the same level of quality as an
example.
Also, the test suite does not show the same quality and documentation
levels as the example files.
## Features
- Compile against different branches or releases of EPICS Base and
additional dependencies (modules like asyn, std, etc.).
additional dependencies (modules like asyn, std, sequencer, etc.).
- Define settings files that declare sets of dependencies
with their versions and locations.
- Define setup files that declare sets of dependencies with their
versions and locations.
- Define hook scripts for any dependency.
- Define hooks for any dependency.
Hooks are run on the dependency module before it is compiled, so
the module can be patched or further configured.
- Define static or shared builds (executables, libraries).
- Define shared (default) or static builds (for executables and libraries).
- Define optimized (default) or debug builds.
- Run tests (using the EPICS unit test suite).
- Run tests (using the EPICS build system, i.e., `make runtests`
and friends).
## Supported CI Services
### Travis-CI
### [Travis-CI](https://travis-ci.org/)
- Five parallel runners on Linux/Windows (one runner on MacOS)
- Use different compilers (gcc, clang)
- Use different gcc versions
- Cross-compile for Windows 32bit and 64bit using MinGW and WINE
- Cross-compile for RTEMS 4.9 and 4.10 (Base >= 3.16.2)
- Compile on MacOS
- Built dependencies are cached (for faster builds)
- Cross-compile for RTEMS 4.9 and 4.10 (Base >= 3.15)
- Compile natively on MacOS (clang)
- Compile natively on Windows (gcc/MinGW, Visual Studio 2017)
- Built dependencies are cached (for faster builds).
### How to Use the CI-Scripts
See specific
**[ci-scripts on Travis-CI README](travis/README.md)**
for more details.
### [AppVeyor](https://www.appveyor.com/)
- One parallel runner (all builds are sequential)
- Use different compilers (Visual Studio, gcc/MinGW)
- Use different Visual Studio versions: \
2008, 2010, 2012, 2013, 2015, 2017, 2019
- Compile for Windows 32bit and 64bit
- No useful caching available.
See specific
**[ci-scripts on AppVeyor README](appveyor/README.md)**
for more details.
## How to Use the CI-Scripts
1. Get an account on a supported CI service provider platform.
(e.g. [Travis-CI](https://travis-ci.org/),
Appveyor, Azure Pipelines...)
[AppVeyor](https://www.appveyor.com/), ...)
(More details in the specific README of the subdirectory.)
2. In your Support Module, add this ci-scripts respository
2. In your Support Module, add this ci-scripts repository
as a Git Submodule (name suggestion: `.ci`).
```bash
git submodule add https://github.com/epics-base/ci-scripts .ci
```
$ git submodule add https://github.com/epics-base/ci-scripts .ci
```
3. Create setup files for different sets of dependencies you
want to compile against. (See below.)
@@ -73,13 +103,14 @@ example.
```
MODULES=sncseq asyn
BASE=R3.15.6
BASE=3.15
ASYN=R4-34
SNCSEQ=R2-2-7
SNCSEQ=R2-2-8
```
will compile against the EPICS Base release 3.15.6, the Sequencer
release 2.2.7 and release 4.34 of asyn.
(Any settings can be overridden from `.travis.yml`.)
will compile against the EPICS Base 3.15 branch, the Sequencer
release 2.2.8 and release 4.34 of asyn.
(Any settings can be overridden from the specific job line
in the service configuration, e.g., `.travis.yml`.)
4. Create a configuration for the CI service by copying one of
the examples provided in the service specific subdirectory
@@ -89,14 +120,41 @@ example.
5. Push your changes and check the CI service for your build results.
## Calling the cue.py Script
Independent from CI service and platform, the runner script is called
from your main configuration as:
`python .ci/cue.py <action>`
where `<action>` is one of:
`prepare`\
Prepare the build by cloning Base and the configured dependency modules,
set up the EPICS build system, then
compile Base and these modules in the order they appear in the `MODULES`
setting.
`build`\
Build your main module.
`test`\
Run the tests of your main module.
`test-results`\
Collect the results of your tests and print a summary.
`exec`\
Execute the remainder of the line using the default command shell.
## Setup Files
Your module might depend on EPICS Base and a few other support modules.
(E.g., a specific driver might need StreamDevice, ASYN and the Sequencer.)
In that case, building against every possible combination of released
versions of those dependencies is not possible:
Base (37) x StreamDevice (50) x ASYN (40) x Sequencer (51) would produce
more than 3.7 million different combinations, i.e. build jobs.
Base (39) x StreamDevice (50) x ASYN (40) x Sequencer (52) would produce
more than 4 million different combinations, i.e. build jobs.
A more reasonable approach is to create a few setups, each being a
combination of dependency releases, that do a few scans of the available
@@ -104,27 +162,36 @@ combination of dependency releases, that do a few scans of the available
for stable versions that many of your users have in production, one for the
latest released versions and one for the development branches.
A job uses a setup file if `SET=<setup>` (without the `.set` extension
of the setup file) is set for the job in the main configuration file.
## Setup File Syntax
Setup files are loaded by the bash scripts. They are found by searching
Setup files are loaded by the build script. They are found by searching
the locations in `SETUP_PATH` (space or colon separated list of directories,
relative to your module's root directory).
Setup files can include other setup files by calling `include <setup>`
(omitting the `.set` extension of the setup file). The configured
(again omitting the `.set` extension of the setup file). The configured
`SETUP_PATH` is searched for the include.
Any `VAR=value` setting of a variable is only executed if `VAR` is unset or
empty. That way any settings can be overridden by settings in `.travis.yml`.
Any `VAR=value` setting of a variable in a setup file is only executed if
`VAR` is unset or empty.
That way any settings can be overridden by setting them in the job
description inside the main configuration file (e.g., `.travis.yml`).
Empty lines or lines starting with `#` are ignored.
`MODULES="<list of names>"` should list the dependencies (software modules)
`MODULES=<list of names>` should list the dependencies (software modules)
by using their well-known slugs, separated by spaces.
EPICS Base (slug: `base`) will always be a dependency and will be added and
compiled first. The other dependencies are added and compiled in the order
they are defined in `MODULES`.
Modules needed only for specific jobs (e.g., on specific architectures)
can be added from the main configuration file by setting `ADD_MODULES`
for the specific job(s).
`REPOOWNER=<name>` sets the default GitHub owner (or organization) for all
dependency modules. Useful if you want to compile against a complete set
of dependencies forked into your private GitHub area.
@@ -133,9 +200,7 @@ For any module mentioned as `foo` in the `MODULES` setting (and for `BASE`),
the following settings can be configured:
`FOO=<version>` Set version of the module that should be used. Must either
be a *tag* name (in that case the module is checked out into Travis' cache
system) or a *branch* name (in that case the module is always checked out
and recompiled as part of the job). [default: `master`]
be a *tag* name or a *branch* name. [default: `master`]
`FOO_REPONAME=<name>` Set the name of the remote repository as `<name>.git`.
[default is the slug in lower case: `foo`]
@@ -143,7 +208,8 @@ and recompiled as part of the job). [default: `master`]
`FOO_REPOOWNER=<name>` Set the name of the GitHub owner (or organization)
that the module repository can be found under.
`FOO_REPOURL="<url>"` Set the complete URL of the remote repository.
`FOO_REPOURL="<url>"` Set the complete URL of the remote repository. Useful
for dependencies that are not hosted on GitHub.
The default URL for the repository is pointing to GitHub, under
`$FOO_REPOOWNER` else `$REPOOWNER` else `epics-modules`,
@@ -174,12 +240,140 @@ Feel free to suggest more default settings using a Pull Request.
## Debugging
Setting `VV=1` in your `.travis.yml` configuration for a specific job
will run the job with high verbosity, printing every command as it is being
executed and switching the dependency builds to higher verbosity.
Setting `VV=1` in your service configuration (e.g., `.travis.yml`) for a
specific job will run the job with high verbosity,
printing every command as it is being executed and switching the dependency
builds to higher verbosity.
For debugging on your local machine, you may set `CACHEDIR` to change the
location for the dependency builds. [default is `$HOME/.cache`]
Set `PARALLEL_MAKE` to the number of parallel make jobs that you want your
build to use. [default is the number of CPUs on the runner]
Service specific options are described in the README files
in the service specific subdirectories:
- [Travis-CI README](travis/README.md)
- [AppVeyor README](appveyor/README.md)
## References: EPICS Modules Using ci-scripts
[EPICS Base](https://github.com/epics-base/epics-base) and its submodules
[pvData](https://github.com/epics-base/pvDataCPP),
[pvAccess](https://github.com/epics-base/pvAccessCPP),
[pva2pva](https://github.com/epics-base/pva2pva),
[PVXS](https://github.com/mdavidsaver/pvxs)
EPICS Modules:
[ASYN](https://github.com/epics-modules/asyn),
[devlib2](https://github.com/epics-modules/devlib2),
[ecmc](https://github.com/epics-modules/ecmc),
[gtest](https://github.com/epics-modules/gtest),
[ip](https://github.com/epics-modules/ip),
[lua](https://github.com/epics-modules/lua),
[MCoreUtils](https://github.com/epics-modules/MCoreUtils),
[modbus](https://github.com/epics-modules/modbus),
[motor](https://github.com/epics-modules/motor),
[OPCUA](https://github.com/ralphlange/opcua),
[PCAS](https://github.com/epics-modules/pcas),
[sscan](https://github.com/epics-modules/sscan),
[vac](https://github.com/epics-modules/vac)
ESS: [EtherCAT MC Motor Driver][ref.ethercatmc]
## Migration Hints
Look for changes in the example configuration files, and check how they
apply to your module.
If comments in the example have changed, copy them to your configuration
to always have up-to-date documentation in your file.
### 2.x to 3.x Migration
Update the script and test settings in your configuration to call the
new script, following the example file.
`python .ci/cue.py <action>`
#### AppVeyor
The `configuration:` setting options have changed; they are now
`default`, `static`, `debug` and `static-debug`.
MinGW builds are now using the `CMP: gcc` compiler setting.
Adding arguments to make is supported through the `EXTRA` .. `EXTRA5`
variables. Each variable value will be passed as one argument.
#### Travis
The new `BCFG` (build configuration) variable accepts the same options as
the AppVeyor `configuration:` setting. Replace any`STATIC=YES` settings with
`BCFG=static`.
Remove `bash` in the `homebrew:` section of `addons:`. There are no more
bash scripts.
MinGW builds (cross-builds using WINE as well as native builds on Windows)
are now using the `gcc` compiler setting.
Since `gcc` is the default, you can simply remove `compiler: mingw` lines.
For Windows, Travis offers native MinGW and Visual Studio 2017 compilers.
Use `os: windows` and set `compiler:` to `gcc` or `vs2017`
for those builds.
Chocolatey packages to be installed for the Windows jobs are set by adding
them to the environment variable `CHOCO`.
## Frequently Asked Questions
##### How can I see what the dependency building jobs are actually doing?
Set `VV=1` in the configuration line of the job you are interested in.
This will make all builds (not just for your module) verbose.
##### How do I update my module to use a newer minor release of ci-scripts?
Update the submodule in `.ci` first, then change your CI configuration
(if needed) and commit both to your module. E.g., to update your Travis
setup to release 3.0.1 of ci-scripts:
```bash
cd .ci
git pull origin v3.0.1
cd -
git add .ci
# if needed:
edit .travis.yml # and/or .appveyor.yml
git add .travis.yml
git commit -m "Update ci-scripts submodule to v3.0.1"
```
Check the example configuration files inside ci-scripts (and their
changes) to see what might be needed and/or interesting to change
in your configuration.
Depending on the changes contained in the ci-scripts update, it might
be advisable to clear the CI caches after updating ci-scripts. E.g.,
a change in setting up EPICS Base will not be applied if Base is found
in the cache.
##### How do I add a dependency module only for a specific job?
Add the additional dependency in the main configuration file by setting
`ADD_MODULES` for the specific job(s).
##### Why the name _cue_?
The noun _cue_ is defined as "_a signal (such as a word, phrase, or bit of
stage business) to a performer to begin a specific speech or action_".
(Merriam-Webster)
## Release Numbering of this Module
The module tries to apply [Semantic Versioning](https://semver.org/).
Major release numbers refer to the API, which is more or less defined
by the full configuration examples in the service specific
subdirectories.
@@ -187,12 +381,27 @@ If one of these files has to be changed for the existing configuration
options or important new options are being added, a new major release
is created.
Minor release numbers refer to bugfixes that should not require the
configuration inside a user module to be changed.
Minor release numbers refer to additions and enhancements that do not
require the configuration inside an existing user module to be changed.
(Unless for using a new feature.)
Again: using the git submodule mechanism to include these scripts means
that user modules always work with a fixed, frozen version.
I.e., developments in the ci-scripts repository will never break an\
I.e., developments in the ci-scripts repository will never break an
existing application.
These release numbering considerations are just a hint to assess the
risks when updating the submodule.
## License
This module is distributed subject to a Software License Agreement found
in file LICENSE that is included with this distribution.
<!-- Links -->
[badge.version]: https://badge.fury.io/gh/epics-base%2Fci-scripts.svg
[badge.travis]: https://travis-ci.org/epics-base/ci-scripts.svg?branch=master
[badge.appveyor]: https://ci.appveyor.com/api/projects/status/8b578alg974axvux?svg=true
[reddit.bash]: https://www.reddit.com/r/bash/comments/393oqv/why_is_the_version_of_bash_included_in_os_x_so_old/
[ref.ethercatmc]: https://github.com/EuropeanSpallationSource/m-epics-ethercatmc

851
add-msi-to-314.patch Normal file
View File

@@ -0,0 +1,851 @@
diff --git a/config/RULES.Db b/config/RULES.Db
index b4946c7aa..90b76ed08 100644
--- a/config/RULES.Db
+++ b/config/RULES.Db
@@ -12,11 +12,7 @@
#
MAKEBPT = $(EPICS_BASE_HOST_BIN)/makeBpt$(EXE)
-ifndef MSI
-# Tool from R3.14 extensions bin, R3.13 extensions bin, or user path
-MSI = $(firstword $(wildcard $(EPICS_EXTENSIONS_HOST_BIN)/msi$(HOSTEXE) \
- $(EPICS_EXTENSIONS)/bin/$(HOST_ARCH)/msi$(HOSTEXE)) msi$(HOSTEXE))
-endif
+MSI = $(EPICS_BASE_HOST_BIN)/msi$(HOSTEXE)
DBEXPAND = $(EPICS_BASE_HOST_BIN)/dbExpand$(EXE)
DBST = dbst
diff --git a/configure/CONFIG_BASE b/configure/CONFIG_BASE
index 7ee5a5b89..9a9793093 100644
--- a/configure/CONFIG_BASE
+++ b/configure/CONFIG_BASE
@@ -112,8 +112,5 @@ ifndef DBST
DBST = dbst
endif
-ifndef MSI
-MSI = msi
-endif
-
+MSI = $(EPICS_BASE_HOST_BIN)/msi$(HOSTEXE)
diff --git a/src/dbtools/Makefile b/src/dbtools/Makefile
index 38ed52c9e..8655a5337 100644
--- a/src/dbtools/Makefile
+++ b/src/dbtools/Makefile
@@ -11,6 +11,11 @@ TOP=../..
include $(TOP)/configure/CONFIG
+PROD_HOST += msi
+
+msi_SRCS = msi.c
+msi_LIBS += Com
+
INC += dbLoadTemplate.h
INC += dbtoolsIocRegister.h
diff --git a/src/dbtools/msi.c b/src/dbtools/msi.c
new file mode 100644
index 000000000..525d4f25b
--- /dev/null
+++ b/src/dbtools/msi.c
@@ -0,0 +1,798 @@
+/*************************************************************************\
+* Copyright (c) 2002 The University of Chicago, as Operator of Argonne
+* National Laboratory.
+* Copyright (c) 2002 The Regents of the University of California, as
+* Operator of Los Alamos National Laboratory.
+* This file is distributed subject to a Software License Agreement found
+* in the file LICENSE that is included with this distribution.
+\*************************************************************************/
+/*msi - macro sunstitutions and include */
+
+/*
+ * Modification Log:
+ * -----------------
+ * .01 08DEC97 mrk Original version
+ */
+
+#include <stdlib.h>
+#include <stddef.h>
+#include <stdio.h>
+#include <string.h>
+#include <ctype.h>
+#include <errno.h>
+
+#include <epicsVersion.h>
+#include <dbDefs.h>
+#include <macLib.h>
+#include <ellLib.h>
+#include <errlog.h>
+
+#define MAX_BUFFER_SIZE 4096
+
+#if ((EPICS_VERSION <= 3) && (EPICS_REVISION <= 13))
+#define macEnvExpand(x) strdup(x)
+#endif
+
+/*Forward references to local routines*/
+static void usageExit(void);
+static void addMacroReplacements(MAC_HANDLE *macPvt,char *pval);
+static void makeSubstitutions(void *inputPvt,void *macPvt,char *templateName);
+
+/*Routines that read the template files */
+static void inputConstruct(void **inputPvt);
+static void inputDestruct(void *inputPvt);
+static void inputAddPath(void *inputPvt, char *pval);
+static void inputBegin(void *inputPvt,char *fileName);
+static char *inputNextLine(void *inputPvt);
+static void inputNewIncludeFile(void *inputPvt,char *name);
+static void inputErrPrint(void *inputPvt);
+
+/*Routines that read the substitution file */
+static void substituteDestruct(void *substitutePvt);
+static void substituteOpen(void **substitutePvt,char *substitutionName);
+static int substituteGetNextSet(void *substitutePvt,char **filename);
+static char *substituteGetReplacements(void *substitutePvt);
+
+/*Exit status*/
+static int exitStatus = 0;
+
+int opt_V = 0;
+
+
+int main(int argc,char **argv)
+{
+ void *inputPvt;
+ MAC_HANDLE *macPvt;
+ char *pval;
+ int narg;
+ char *substitutionName=0;
+ char *templateName=0;
+ int i;
+
+ inputConstruct(&inputPvt);
+ macCreateHandle(&macPvt,0);
+ macSuppressWarning(macPvt,1);
+ while((argc>1) && (argv[1][0] == '-')) {
+ narg = (strlen(argv[1])==2) ? 2 : 1;
+ pval = (narg==1) ? (argv[1]+2) : argv[2];
+ if(strncmp(argv[1],"-I",2)==0) {
+ inputAddPath(inputPvt,pval);
+ } else if(strncmp(argv[1],"-o",2)==0) {
+ if(freopen(pval,"w",stdout)==NULL) {
+ fprintf(stderr,"Can't open %s for writing: %s\n", pval, strerror(errno));
+ exit(1);
+ }
+ } else if(strncmp(argv[1],"-M",2)==0) {
+ addMacroReplacements(macPvt,pval);
+ } else if(strncmp(argv[1],"-S",2)==0) {
+ substitutionName = calloc(strlen(pval)+1,sizeof(char));
+ strcpy(substitutionName,pval);
+ } else if(strncmp(argv[1],"-V",2)==0) {
+ macSuppressWarning(macPvt,0);
+ opt_V = 1;
+ narg = 1; /* no argument for this option */
+ } else {
+ usageExit();
+ }
+ argc -= narg;
+ for(i=1; i<argc; i++) argv[i] = argv[i + narg];
+ }
+ if(argc>2) {
+ fprintf(stderr,"too many filename arguments\n");
+ usageExit();
+ }
+ if(argc==2) {
+ templateName = calloc(strlen(argv[1])+1,sizeof(char));
+ strcpy(templateName,argv[1]);
+ }
+ if(!substitutionName) {
+ makeSubstitutions(inputPvt,macPvt,templateName);
+ } else {
+ void *substitutePvt;
+ char *filename = 0;
+
+ substituteOpen(&substitutePvt,substitutionName);
+ while(substituteGetNextSet(substitutePvt,&filename)) {
+ if(templateName) filename = templateName;
+ if(!filename) {
+ fprintf(stderr,"no template file\n");
+ usageExit();
+ }
+ macPushScope(macPvt);
+ while((pval = substituteGetReplacements(substitutePvt))){
+ addMacroReplacements(macPvt,pval);
+ makeSubstitutions(inputPvt,macPvt,filename);
+ }
+ macPopScope(macPvt);
+ }
+ substituteDestruct(substitutePvt);
+ }
+ errlogFlush();
+ inputDestruct(inputPvt);
+ free((void *)templateName);
+ free((void *)substitutionName);
+ return(exitStatus);
+}
+
+void usageExit(void)
+{
+ fprintf(stderr,"usage: msi -V -opath -Ipath ... -Msub ... -Ssubfile template\n");
+ fprintf(stderr," Specifying path will replace the default '.'\n");
+ fprintf(stderr," stdin is used if template is not given\n");
+ exit(1);
+}
+
+static void addMacroReplacements(MAC_HANDLE *macPvt,char *pval)
+{
+ char **pairs;
+ long status;
+
+ status = macParseDefns(macPvt,pval,&pairs);
+ if(!status) {
+ fprintf(stderr,"Error macParseDefns error\n");
+ usageExit();
+ }
+ status = macInstallMacros(macPvt,pairs);
+ if(!status) {
+ fprintf(stderr,"Error macInstallMacros error\n");
+ usageExit();
+ }
+ free((void *)pairs);
+}
+
+typedef enum {cmdInclude,cmdSubstitute} cmdType;
+static const char *cmdNames[] = {"include","substitute"};
+static void makeSubstitutions(void *inputPvt,void *macPvt,char *templateName)
+{
+ char *input;
+ static char buffer[MAX_BUFFER_SIZE];
+ int n;
+ static int unexpWarned = 0;
+
+ inputBegin(inputPvt,templateName);
+ while((input = inputNextLine(inputPvt))) {
+ int expand=1;
+ char *p;
+ char *command = 0;
+
+ p = input;
+ /*skip whitespace at beginning of line*/
+ while(*p && (isspace(*p))) ++p;
+ /*Look for i or s */
+ if(*p && (*p=='i' || *p=='s')) command = p;
+ if(command) {
+ char *pstart;
+ char *pend;
+ char *copy;
+ int cmdind=-1;
+ int i;
+
+ for(i=0; i< NELEMENTS(cmdNames); i++) {
+ if(strstr(command,cmdNames[i])) {
+ cmdind = i;
+ }
+ }
+ if(cmdind<0) goto endif;
+ p = command + strlen(cmdNames[cmdind]);
+ /*skip whitespace after command*/
+ while(*p && (isspace(*p))) ++p;
+ /*Next character must be quote*/
+ if((*p==0) || (*p!='"')) goto endif;
+ pstart = ++p;
+ /*Look for end quote*/
+ while(*p && (*p!='"')) {
+ /*allow escape for imbeded quote*/
+ if((*p=='\\') && *(p+1)=='"') {
+ p += 2; continue;
+ } else {
+ if(*p=='"') break;
+ }
+ ++p;
+ }
+ pend = p;
+ if(*p==0) goto endif;
+ /*skip quote and any trailing blanks*/
+ while(*++p==' ') ;
+ if(*p != '\n' && *p !=0) goto endif;
+ copy = calloc(pend-pstart+1,sizeof(char));
+ strncpy(copy,pstart,pend-pstart);
+ switch(cmdind) {
+ case cmdInclude:
+ inputNewIncludeFile(inputPvt,copy);
+ break;
+ case cmdSubstitute:
+ addMacroReplacements(macPvt,copy);
+ break;
+ default:
+ fprintf(stderr,"Logic Error: makeSubstitutions\n");
+ inputErrPrint(inputPvt);
+ exit(1);
+ }
+ free(copy);
+ expand = 0;
+ }
+endif:
+ if (expand) {
+ n = macExpandString(macPvt,input,buffer,MAX_BUFFER_SIZE-1);
+ fputs(buffer,stdout);
+ if (!unexpWarned && n<0) {
+ const char * pErrMsg = "Warning: Undefined macros present, use msi -V to list\n";
+ if ( opt_V ) {
+ exitStatus = 2;
+ pErrMsg = "Error: Undefined macros present\n";
+ }
+ fprintf( stderr, pErrMsg );
+ unexpWarned++;
+ }
+ }
+ }
+}
+
+typedef struct inputFile{
+ ELLNODE node;
+ char *filename;
+ FILE *fp;
+ int lineNum;
+}inputFile;
+
+typedef struct pathNode {
+ ELLNODE node;
+ char *directory;
+} pathNode;
+
+typedef struct inputData {
+ ELLLIST inputFileList;
+ ELLLIST pathList;
+ char inputBuffer[MAX_BUFFER_SIZE];
+}inputData;
+
+static void inputOpenFile(inputData *pinputData,char *filename);
+static void inputCloseFile(inputData *pinputData);
+static void inputCloseAllFiles(inputData *pinputData);
+
+static void inputConstruct(void **ppvt)
+{
+ inputData *pinputData;
+
+ pinputData = calloc(1,sizeof(inputData));
+ ellInit(&pinputData->inputFileList);
+ ellInit(&pinputData->pathList);
+ *ppvt = pinputData;
+}
+
+static void inputDestruct(void *pvt)
+{
+ inputData *pinputData = (inputData *)pvt;
+ pathNode *ppathNode;
+
+ inputCloseAllFiles(pinputData);
+ while((ppathNode = (pathNode *)ellFirst(&pinputData->pathList))) {
+ ellDelete(&pinputData->pathList,&ppathNode->node);
+ free((void *)ppathNode->directory);
+ free((void *)ppathNode);
+ }
+ free(pvt);
+}
+
+static void inputAddPath(void *pvt, char *path)
+{
+ inputData *pinputData = (inputData *)pvt;
+ ELLLIST *ppathList = &pinputData->pathList;
+ pathNode *ppathNode;
+ const char *pcolon;
+ const char *pdir;
+ int len;
+ int emptyName;
+
+ pdir = path;
+ /*an empty name at beginning, middle, or end means current directory*/
+ while(pdir && *pdir) {
+ emptyName = ((*pdir == ':') ? 1 : 0);
+ if(emptyName) ++pdir;
+ ppathNode = (pathNode *)calloc(1,sizeof(pathNode));
+ ellAdd(ppathList,&ppathNode->node);
+ if(!emptyName) {
+ pcolon = strchr(pdir,':');
+ len = (pcolon ? (pcolon - pdir) : strlen(pdir));
+ if(len>0) {
+ ppathNode->directory = (char *)calloc(len+1,sizeof(char));
+ strncpy(ppathNode->directory,pdir,len);
+ pdir = pcolon;
+ /*unless at end skip past first colon*/
+ if(pdir && *(pdir+1)!=0) ++pdir;
+ } else { /*must have been trailing : */
+ emptyName=1;
+ }
+ }
+ if(emptyName) {
+ ppathNode->directory = (char *)calloc(2,sizeof(char));
+ strcpy(ppathNode->directory,".");
+ }
+ }
+ return;
+}
+
+static void inputBegin(void *pvt,char *fileName)
+{
+ inputData *pinputData = (inputData *)pvt;
+
+ inputCloseAllFiles(pinputData);
+ inputOpenFile(pinputData,fileName);
+}
+
+static char *inputNextLine(void *pvt)
+{
+ inputData *pinputData = (inputData *)pvt;
+ inputFile *pinputFile;
+ char *pline;
+
+ while((pinputFile = (inputFile *)ellFirst(&pinputData->inputFileList))) {
+ pline = fgets(pinputData->inputBuffer,MAX_BUFFER_SIZE,pinputFile->fp);
+ if(pline) {
+ ++pinputFile->lineNum;
+ return(pline);
+ }
+ inputCloseFile(pinputData);
+ }
+ return(0);
+}
+
+static void inputNewIncludeFile(void *pvt,char *name)
+{
+ inputData *pinputData = (inputData *)pvt;
+
+ inputOpenFile(pinputData,name);
+}
+
+static void inputErrPrint(void *pvt)
+{
+ inputData *pinputData = (inputData *)pvt;
+ inputFile *pinputFile;
+
+ fprintf(stderr,"input: %s which is ",pinputData->inputBuffer);
+ pinputFile = (inputFile *)ellFirst(&pinputData->inputFileList);
+ while(pinputFile) {
+ fprintf(stderr,"line %d of ",pinputFile->lineNum);
+ if(pinputFile->filename) {
+ fprintf(stderr," file %s\n",pinputFile->filename);
+ } else {
+ fprintf(stderr,"stdin:\n");
+ }
+ pinputFile = (inputFile *)ellNext(&pinputFile->node);
+ if(pinputFile) {
+ fprintf(stderr," which is included from ");
+ } else {
+ fprintf(stderr,"\n");
+ }
+ }
+ fprintf(stderr,"\n");
+}
+
+static void inputOpenFile(inputData *pinputData,char *filename)
+{
+ ELLLIST *ppathList = &pinputData->pathList;
+ pathNode *ppathNode = 0;
+ inputFile *pinputFile;
+ char *fullname = 0;
+ FILE *fp = 0;
+
+ if(!filename) {
+ fp = stdin;
+ } else if((ellCount(ppathList)==0) || strchr(filename,'/')){
+ fp = fopen(filename,"r");
+ } else {
+ ppathNode = (pathNode *)ellFirst(ppathList);
+ while(ppathNode) {
+ fullname = calloc(strlen(filename)+strlen(ppathNode->directory) +2,
+ sizeof(char));
+ strcpy(fullname,ppathNode->directory);
+ strcat(fullname,"/");
+ strcat(fullname,filename);
+ fp = fopen(fullname,"r");
+ if(fp) break;
+ free((void *)fullname);
+ ppathNode = (pathNode *)ellNext(&ppathNode->node);
+ }
+ }
+ if(!fp) {
+ fprintf(stderr,"Could not open %s\n",filename);
+ inputErrPrint((void *)pinputData);
+ exit(1);
+ }
+ pinputFile = calloc(1,sizeof(inputFile));
+ if(ppathNode) {
+ pinputFile->filename = calloc(1,strlen(fullname)+1);
+ strcpy(pinputFile->filename,fullname);
+ free((void *)fullname);
+ } else if(filename) {
+ pinputFile->filename = calloc(1,strlen(filename)+1);
+ strcpy(pinputFile->filename,filename);
+ } else {
+ pinputFile->filename = calloc(1,strlen("stdin")+1);
+ strcpy(pinputFile->filename,"stdin");
+ }
+ pinputFile->fp = fp;
+ ellInsert(&pinputData->inputFileList,0,&pinputFile->node);
+}
+
+static void inputCloseFile(inputData *pinputData)
+{
+ inputFile *pinputFile;
+
+ pinputFile = (inputFile *)ellFirst(&pinputData->inputFileList);
+ if(!pinputFile) return;
+ ellDelete(&pinputData->inputFileList,&pinputFile->node);
+ if(fclose(pinputFile->fp))
+ fprintf(stderr,"fclose failed: file %s\n",pinputFile->filename);
+ free(pinputFile->filename);
+ free(pinputFile);
+}
+
+static void inputCloseAllFiles(inputData *pinputData)
+{
+ inputFile *pinputFile;
+
+ while((pinputFile=(inputFile *)ellFirst(&pinputData->inputFileList))){
+ inputCloseFile(pinputData);
+ }
+}
+
+/*start of code that handles substitution file*/
+typedef enum {
+ tokenLBrace,tokenRBrace,tokenSeparater,tokenString,tokenEOF
+}tokenType;
+
+typedef struct subFile {
+ char *substitutionName;
+ FILE *fp;
+ int lineNum;
+ char inputBuffer[MAX_BUFFER_SIZE];
+ char *pnextChar;
+ tokenType token;
+ char string[MAX_BUFFER_SIZE];
+} subFile;
+
+typedef struct patternNode {
+ ELLNODE node;
+ char *var;
+}patternNode;
+
+typedef struct subInfo {
+ subFile *psubFile;
+ int isFile;
+ char *filename;
+ int isPattern;
+ ELLLIST patternList;
+ size_t size;
+ size_t curLength;
+ char *macroReplacements;
+}subInfo;
+
+static char *subGetNextLine(subFile *psubFile);
+static tokenType subGetNextToken(subFile *psubFile);
+static void subFileErrPrint(subFile *psubFile,char * message);
+static void freeSubFile(subInfo *psubInfo);
+static void freePattern(subInfo *psubInfo);
+static void catMacroReplacements(subInfo *psubInfo,const char *value);
+
+void freeSubFile(subInfo *psubInfo)
+{
+ subFile *psubFile = psubInfo->psubFile;
+ if(psubFile->fp) {
+ if(fclose(psubFile->fp))
+ fprintf(stderr,"fclose failed on substitution file\n");
+ }
+ free((void *)psubFile);
+ free((void *)psubInfo->filename);
+ psubInfo->psubFile = 0;
+}
+
+void freePattern(subInfo *psubInfo)
+{
+ patternNode *ppatternNode;
+ while((ppatternNode = (patternNode *)ellFirst(&psubInfo->patternList))) {
+ ellDelete(&psubInfo->patternList,&ppatternNode->node);
+ free(ppatternNode->var);
+ free(ppatternNode);
+ }
+ psubInfo->isPattern = 0;
+}
+
+static void substituteDestruct(void *pvt)
+{
+ subInfo *psubInfo = (subInfo *)pvt;
+
+ freeSubFile(psubInfo);
+ freePattern(psubInfo);
+ free((void *)psubInfo);
+ return;
+}
+
+static void substituteOpen(void **ppvt,char *substitutionName)
+{
+ subInfo *psubInfo;
+ subFile *psubFile;
+ FILE *fp;
+
+ psubInfo = calloc(1,sizeof(subInfo));
+ *ppvt = (void *)psubInfo;
+ psubFile = calloc(1,sizeof(subFile));
+ psubInfo->psubFile = psubFile;
+ ellInit(&psubInfo->patternList);
+ fp = fopen(substitutionName,"r");
+ if(!fp) {
+ fprintf(stderr,"Could not open %s\n",substitutionName);
+ exit(1);
+ }
+ psubFile->substitutionName = substitutionName;
+ psubFile->fp = fp;
+ psubFile->lineNum = 0;
+ psubFile->inputBuffer[0] = 0;
+ psubFile->pnextChar = &psubFile->inputBuffer[0];
+ subGetNextToken(psubFile);
+ return;
+}
+
+static int substituteGetNextSet(void *pvt,char **filename)
+{
+ subInfo *psubInfo = (subInfo *)pvt;
+ subFile *psubFile = psubInfo->psubFile;
+ patternNode *ppatternNode;
+
+ *filename = 0;
+ while(psubFile->token==tokenSeparater) subGetNextToken(psubFile);
+ if(psubFile->token==tokenEOF) return(0);
+ if(psubFile->token==tokenString && strcmp(psubFile->string,"file")==0) {
+ psubInfo->isFile = 1;
+ if(subGetNextToken(psubFile)!=tokenString) {
+ subFileErrPrint(psubFile,"Expecting filename");
+ exit(1);
+ }
+ freePattern(psubInfo);
+ free((void *)psubInfo->filename);
+ if(psubFile->string[0]=='"'&&psubFile->string[strlen(psubFile->string)-1]=='"') {
+ psubFile->string[strlen(psubFile->string)-1]='\0';
+ psubInfo->filename = macEnvExpand(psubFile->string+1);
+ }
+ else {
+ psubInfo->filename = macEnvExpand(psubFile->string);
+ }
+ while(subGetNextToken(psubFile)==tokenSeparater);
+ if(psubFile->token!=tokenLBrace) {
+ subFileErrPrint(psubFile,"Expecting {");
+ exit(1);
+ }
+ subGetNextToken(psubFile);
+ }
+ *filename = psubInfo->filename;
+ while(psubFile->token==tokenSeparater) subGetNextToken(psubFile);
+ if(psubFile->token==tokenLBrace) return(1);
+ if(psubFile->token==tokenRBrace) return(0);
+ if(psubFile->token!=tokenString
+ || strcmp(psubFile->string,"pattern")!=0) {
+ subFileErrPrint(psubFile,"Expecting pattern");
+ exit(1);
+ }
+ freePattern(psubInfo);
+ psubInfo->isPattern = 1;
+ while(subGetNextToken(psubFile)==tokenSeparater);
+ if(psubFile->token!=tokenLBrace) {
+ subFileErrPrint(psubFile,"Expecting {");
+ exit(1);
+ }
+ while(1) {
+ while(subGetNextToken(psubFile)==tokenSeparater);
+ if(psubFile->token!=tokenString) break;
+ ppatternNode = calloc(1,sizeof(patternNode));
+ ellAdd(&psubInfo->patternList,&ppatternNode->node);
+ ppatternNode->var = calloc(strlen(psubFile->string)+1,sizeof(char));
+ strcpy(ppatternNode->var,psubFile->string);
+ }
+ if(psubFile->token!=tokenRBrace) {
+ subFileErrPrint(psubFile,"Expecting }");
+ exit(1);
+ }
+ subGetNextToken(psubFile);
+ return(1);
+}
+
+static char *substituteGetReplacements(void *pvt)
+{
+ subInfo *psubInfo = (subInfo *)pvt;
+ subFile *psubFile = psubInfo->psubFile;
+ patternNode *ppatternNode;
+
+ if(psubInfo->macroReplacements) psubInfo->macroReplacements[0] = 0;
+ psubInfo->curLength = 0;
+ while(psubFile->token==tokenSeparater) subGetNextToken(psubFile);
+ if(psubFile->token==tokenRBrace && psubInfo->isFile) {
+ psubInfo->isFile = 0;
+ free((void *)psubInfo->filename);
+ psubInfo->filename = 0;
+ freePattern(psubInfo);
+ subGetNextToken(psubFile);
+ return(0);
+ }
+ if(psubFile->token==tokenEOF) return(0);
+ if(psubFile->token!=tokenLBrace) return(0);
+ if(psubInfo->isPattern) {
+ int gotFirstPattern = 0;
+
+ while(subGetNextToken(psubFile)==tokenSeparater);
+ ppatternNode = (patternNode *)ellFirst(&psubInfo->patternList);
+ while(1) {
+ if(psubFile->token==tokenRBrace) {
+ if(ppatternNode)
+ subFileErrPrint(psubFile,"less values than patterns");
+ subGetNextToken(psubFile);
+ return(psubInfo->macroReplacements);
+ }
+ if(psubFile->token!=tokenString) {
+ subFileErrPrint(psubFile,"Illegal token");
+ exit(-1);
+ }
+ if(gotFirstPattern) catMacroReplacements(psubInfo,",");
+ gotFirstPattern = 1;
+ if(ppatternNode) {
+ catMacroReplacements(psubInfo,ppatternNode->var);
+ catMacroReplacements(psubInfo,"=");
+ catMacroReplacements(psubInfo,psubFile->string);
+ ppatternNode = (patternNode *)ellNext(&ppatternNode->node);
+ } else {
+ subFileErrPrint(psubFile,"more values than patterns");
+ }
+ while(subGetNextToken(psubFile)==tokenSeparater);
+ }
+ } else while(1) {
+ switch(subGetNextToken(psubFile)) {
+ case tokenRBrace:
+ subGetNextToken(psubFile);
+ return(psubInfo->macroReplacements);
+ case tokenSeparater:
+ catMacroReplacements(psubInfo,",");
+ break;
+ case tokenString:
+ catMacroReplacements(psubInfo,psubFile->string);
+ break;
+ default:
+ subFileErrPrint(psubFile,"Illegal token");
+ exit(1);
+ }
+ }
+}
+
+static char *subGetNextLine(subFile *psubFile)
+{
+ char *pline;
+
+ pline = fgets(psubFile->inputBuffer,MAX_BUFFER_SIZE,psubFile->fp);
+ ++psubFile->lineNum;
+ while(pline && psubFile->inputBuffer[0]=='#') {
+ pline = fgets(psubFile->inputBuffer,MAX_BUFFER_SIZE,psubFile->fp);
+ ++psubFile->lineNum;
+ }
+ if(!pline) {
+ psubFile->token = tokenEOF;
+ psubFile->inputBuffer[0] = 0;
+ psubFile->pnextChar = 0;
+ return(0);
+ }
+ psubFile->pnextChar = &psubFile->inputBuffer[0];
+ return(&psubFile->inputBuffer[0]);
+}
+
+static void subFileErrPrint(subFile *psubFile,char * message)
+{
+ fprintf(stderr,"substitution file %s line %d: %s",
+ psubFile->substitutionName,
+ psubFile->lineNum,psubFile->inputBuffer);
+ fprintf(stderr,"%s\n",message);
+}
+
+
+static tokenType subGetNextToken(subFile *psubFile)
+{
+ char *p;
+ char *pto;
+
+ p = psubFile->pnextChar;
+ if(!p) { psubFile->token = tokenEOF; return(tokenEOF);}
+ if(*p==0 || *p=='\n' || *p=='#') {
+ p = subGetNextLine(psubFile);
+ if(!p) { psubFile->token = tokenEOF; return(tokenEOF);}
+ else { psubFile->token = tokenSeparater; return(tokenSeparater);}
+ }
+ while(isspace(*p)) p++;
+ if(*p=='{') {
+ psubFile->token = tokenLBrace;
+ psubFile->pnextChar = ++p;
+ return(tokenLBrace);
+ }
+ if(*p=='}') {
+ psubFile->token = tokenRBrace;
+ psubFile->pnextChar = ++p;
+ return(tokenRBrace);
+ }
+ if(*p==0 || isspace(*p) || *p==',') {
+ while(isspace(*p) || *p==',') p++;
+ psubFile->token = tokenSeparater;
+ psubFile->pnextChar = p;
+ return(tokenSeparater);
+ }
+ /*now handle quoted strings*/
+ if(*p=='"') {
+ pto = &psubFile->string[0];
+ *pto++ = *p++;
+ while(*p!='"') {
+ if(*p==0 || *p=='\n') {
+ subFileErrPrint(psubFile,"Strings must be on single line\n");
+ exit(1);
+ }
+ /*allow escape for imbeded quote*/
+ if((*p=='\\') && *(p+1)=='"') {
+ *pto++ = *p++;
+ *pto++ = *p++;
+ continue;
+ }
+ *pto++ = *p++;
+ }
+ *pto++ = *p++;
+ psubFile->pnextChar = p;
+ *pto = 0;
+ psubFile->token = tokenString;
+ return(tokenString);
+ }
+ /*Now take anything up to next non String token and not space*/
+ pto = &psubFile->string[0];
+ while(!isspace(*p) && (strspn(p,"\",{}")==0)) *pto++ = *p++;
+ *pto = 0;
+ psubFile->pnextChar = p;
+ psubFile->token = tokenString;
+ return(tokenString);
+}
+
+static void catMacroReplacements(subInfo *psubInfo,const char *value)
+{
+ size_t len = strlen(value);
+
+ if(psubInfo->size <= (psubInfo->curLength + len)) {
+ size_t newsize = psubInfo->size + MAX_BUFFER_SIZE;
+ char *newbuf;
+
+ if(newsize <= psubInfo->curLength + len)
+ newsize = psubInfo->curLength + len + 1;
+ newbuf = calloc(1,newsize);
+ if(!newbuf) {
+ fprintf(stderr,"calloc failed for size %Zu\n",newsize);
+ exit(1);
+ }
+ if(psubInfo->macroReplacements) {
+ memcpy(newbuf,psubInfo->macroReplacements,psubInfo->curLength);
+ free(psubInfo->macroReplacements);
+ }
+ psubInfo->size = newsize;
+ psubInfo->macroReplacements = newbuf;
+ }
+ strcat(psubInfo->macroReplacements,value);
+ psubInfo->curLength += len;
+}

View File

@@ -0,0 +1,177 @@
# .appveyor.yml for use with EPICS Base ci-scripts
# (see: https://github.com/epics-base/ci-scripts)
# This is YAML - indentation levels are crucial
#---------------------------------#
# build cache #
#---------------------------------#
# The AppVeyor cache allowance is way too small (1GB per account across all projects, branches and jobs)
# to be used for the dependency builds.
cache:
- C:\Users\appveyor\.tools
#---------------------------------#
# repository cloning #
#---------------------------------#
# Called at very beginning, before repo cloning
init:
# Set autocrlf to make batch files work
- git config --global core.autocrlf true
# print the connection info for RDP connections (see 'debugging' below)
#- ps: iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
# Set clone depth (do not fetch complete history)
clone_depth: 50
# Skipping commits affecting only specific files
skip_commits:
files:
- 'documentation/*'
- 'templates/*'
- '**/*.html'
- '**/*.md'
- '.travis.yml'
#---------------------------------#
# additional packages #
#---------------------------------#
install:
# fetch submodules (like ci-scripts)
- cmd: git submodule update --init --recursive
# for the sequencer
- cinst re2c
#---------------------------------#
# build matrix configuration #
#---------------------------------#
# Since dependencies cannot be cached and AppVeyor only grants a single builder VM, all jobs
# are executed sequentially, each one taking 10-15 minutes.
# Consider this when defining your build matrix. (A full matrix build takes more than 8 hours.)
# Default build worker image
image: Visual Studio 2015
# Build Configurations: shared/static, optimized/debug
configuration:
- default
- static
- debug
- static-debug
# Environment variables
# Well-known variables to use
# CMP compiler to use ('gcc' for native MinGW, 'vs...' for Visual Studio)
# SET source setup file
# ADD_MODULES extra modules (for a specific job)
# TEST set to NO to skip running the tests (default: YES)
# VV set VV=1 to make build scripts verbose (default: unset)
# EXTRA content will be added to make command line
# EXTRA1..5 more additional arguments for the make command
# (one argument per variable)
# Usually from setup files, but may be specified or overridden
# on a job line
# MODULES list of dependency modules
# BASE branch or release tag name of the EPICS Base to use
# <MODULE> branch or release tag for a specific module
# ... see README for setup file syntax description
# AppVeyor specific
# APPVEYOR_BUILD_WORKER_IMAGE run job using specified VM image
# (not the one from the image: line above)
environment:
# common / default variables for all jobs
SETUP_PATH: .ci-local:.ci
BASE: 7.0
matrix:
- CMP: vs2019
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
- CMP: gcc
- CMP: vs2019
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
- CMP: vs2019
BASE: 3.15
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
- CMP: vs2019
BASE: 3.14
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
- CMP: vs2017
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
- CMP: vs2015
- CMP: vs2013
- CMP: vs2012
- CMP: vs2010
- CMP: vs2008
# Platform: processor architecture
platform:
- x86
- x64
# Matrix configuration: exclude sets of jobs
matrix:
exclude:
# VS2012 and older installs don't have the 64 bit compiler
- platform: x64
CMP: vs2012
- platform: x64
CMP: vs2010
- platform: x64
CMP: vs2008
# Exclude more jobs to reduce build time
# E.g., skip 32-bit for newer compilers
#- platform: x86
# CMP: vs2019
#- platform: x86
# CMP: vs2017
#---------------------------------#
# building & testing #
#---------------------------------#
build_script:
- cmd: python .ci/cue.py prepare
- cmd: python .ci/cue.py build
test_script:
- cmd: python .ci/cue.py test
on_finish:
- ps: Get-ChildItem *.tap -Recurse -Force | % { Push-AppveyorArtifact $_.FullName -FileName $_.Name }
- cmd: python .ci/cue.py test-results
#---------------------------------#
# debugging #
#---------------------------------#
## if you want to connect by remote desktop to a failed build, uncomment these lines
## note that you will need to connect within the usual build timeout limit (60 minutes)
## so you may want to adjust the build matrix above to just build the one of interest
# to print the RDP connection info
# uncomment the appropriate line in the init: section above
# block a failed build (until the watchdog barks)
#on_failure:
# - ps: $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
#---------------------------------#
# notifications #
#---------------------------------#
notifications:
- provider: Email
to:
- me@example.com
on_build_success: false
- provider: GitHubPullRequest

View File

@@ -0,0 +1,70 @@
# .appveyor.yml for use with EPICS Base ci-scripts
# (see: https://github.com/epics-base/ci-scripts)
cache:
- C:\Users\appveyor\.tools
init:
- git config --global core.autocrlf true
clone_depth: 50
skip_commits:
files:
- 'documentation/*'
- 'templates/*'
- '**/*.html'
- '**/*.md'
- '.travis.yml'
install:
- cmd: git submodule update --init --recursive
image: Visual Studio 2019
# Build Configurations: shared/static, optimized/debug
configuration:
- default
# - static
- debug
# - static-debug
environment:
# common / default variables for all jobs
SETUP_PATH: .ci-local:.ci
matrix:
- CMP: vs2019
BASE: 7.0
- CMP: vs2019
BASE: 3.15
# Platform: processor architecture
platform:
# - x86
- x64
# Matrix configuration: exclude sets of jobs
matrix:
exclude:
# VS2012 and older installs don't have the 64 bit compiler
- platform: x64
CMP: vs2012
- platform: x64
CMP: vs2010
- platform: x64
CMP: vs2008
build_script:
- cmd: python .ci/cue.py prepare
- cmd: python .ci/cue.py build
test_script:
- cmd: python .ci/cue.py test
on_finish:
- ps: Get-ChildItem *.tap -Recurse -Force | % { Push-AppveyorArtifact $_.FullName -FileName $_.Name }
- cmd: python .ci/cue.py test-results
notifications:
- provider: GitHubPullRequest

115
appveyor/README.md Normal file
View File

@@ -0,0 +1,115 @@
# AppVeyor Scripts for EPICS Modules
## Features
- One parallel runner (all builds are sequential)
- Use different compilers (Visual Studio, gcc/MinGW)
- Use different Visual Studio versions: \
2008, 2010, 2012, 2013, 2015, 2017, 2019
- Compile for Windows 32bit and 64bit
- No useful caching available.
## How to Use these Scripts
1. Get an account on [AppVeyor](https://www.appveyor.com/), connect
it to your GitHub account and activate your support module's
repository. For more details, please see below and refer to the
[AppVeyor documentation](https://www.appveyor.com/docs/).
2. Add the ci-scripts respository as a Git Submodule
(see [README](../README.md) one level above).
3. Add settings files defining which dependencies in which versions
you want to build against
(see [README](../README.md) one level above).
4. Create an AppVeyor configuration by copying one of the examples into
the root directory of your module.
```
$ cp .ci/appveyor/.appveyor.yml.example-full .appveyor.yml
```
5. Edit the `.appveyor.yml` configuration to include the jobs you want
AppVeyor to run.
AppVeyor automatically creates a build matrix with the following axes:
1. `configuration:` \
Select shared (DLL) or static as well as optimized or debug builds. \
Default: `shared-optimized`
2. `platform:` \
Select 32bit or 64bit processor architecture.
3. `environment: / matrix:` \
List of environment variable settings. Each list element (starting with
a dash) is one step on the axis of the build matrix. \
Set `CMP` to select the compiler: `gcc` for the native
[MinGW](http://mingw-w64.org/) GNU compiler, `vs2008` ...`vs2019`
(options listed above) for the Microsoft Visual Studio compilers.
Your builds will take long. \
AppVeyor only grants a single parallel runner VM - all jobs of the matrix
are executed sequentially. AppVeyor also does not provide a usable cache
mechanism to retain dependency artifacts across builds.
Each job will take between 6 and 15 minutes, plus testing time, every time.
The `matrix: / exclude:` setting can be used to reduce the number of
jobs. Check the [AppVeyor docs][appveyor.doc.matrix]
for more ways to reduce the build matrix size. \
E.g., you can opt for not creating matrix axes for `configuration:`
and`platform:` by moving these configurations into the job lines
under `environment: / matrix:`.
6. Push your changes and check
[ci.appveyor.com](https://ci.appveyor.com/) for your build results.
## GitHub / AppVeyor Integration and Authentication
### Security
Enabling Two-Factor-Authentication (2FA) is always a good idea, for all
your web based services, including GitHub and AppVeyor. \
Get an app for your phone (Authy works fine for me, but there are plenty),
and your phone will generate one-time passwords to verify your identity
to the service if required (e.g., when logging in from a new device).
### Authentication
You can use different ways and services to authenticate when you log into
your AppVeyor account. The easiest way - at least when you're using the
service with repositories on GitHub - is to use GitHub authentication.
### GitHub Integration
AppVeyor offers two ways to integrate with GitHub: through a GitHub
application or through an OAuth application. GitHub applications are using
the newer API, allow easier fine-grained access rights tuning and are
preferred.
The differences are mostly visible when you work with repositories under
organizational GitHub accounts: Using OAuth, AppVeyor always has the full
rights of your personal GitHub account.
GitHub applications on the other hand have separate instances and
configuration for every organizational account you are using on GitHub.
### Enabling Builds for your Repository
On the 'Projects' tab of your AppVeyor web interface, create a new project.
If the repository is not listed on the project creation page,
verify the Integration settings. Most of the relevant configuration
is taken from GitHub and has to be set up there.
### AppVeyor Account Sharing
You can always invite other AppVeyor users to have access to an AppVeyor
account, forming a team. Such additional shared accounts are a way to make
the AppVeyor limits (e.g., one parallel builder per account) more manageable.
## Known Issues
#### Build Worker Images
The AppVeyor documentation on build worker images doesn't seem to fully
describe the way things are handled internally.
The tested and suggested reproducible way of defining the build worker image
is shown in the example configuration files:
- Set the default image using the `image:` tag.
- Override the image for specific jobs by setting the
`APPVEYOR_BUILD_WORKER_IMAGE` environment variable.
<!-- Links -->
[appveyor.doc.matrix]: https://www.appveyor.com/docs/build-configuration/#build-matrix

View File

@@ -30,7 +30,7 @@
#SNCSEQ = $(MODULES)/seq-ver
# EPICS_BASE should appear last so earlier modules can override stuff:
EPICS_BASE = /home/ralph/work/EPICS/V3/base-3.15.6
EPICS_BASE = /path/to/base/must/be/set/in/a/RELEASE.local/file
# Set RULES here if you want to use build rules from somewhere
# other than EPICS_BASE:

831
cue-test.py Normal file
View File

@@ -0,0 +1,831 @@
#!/usr/bin/env python
"""Module ci-scripts unit tests
"""
# SET=test00 in the environment and run the tests in this script
# all other jobs are started as compile jobs
from __future__ import print_function
import sys, os, shutil, fileinput
import distutils.util
import re
import subprocess as sp
import unittest
import logging
from argparse import Namespace
builddir = os.getcwd()
# Detect basic context (service, os)
if 'TRAVIS' in os.environ:
ci_service = 'travis'
ci_os = os.environ['TRAVIS_OS_NAME']
if 'APPVEYOR' in os.environ:
ci_service = 'appveyor'
if re.match(r'^Visual', os.environ['APPVEYOR_BUILD_WORKER_IMAGE']):
ci_os = 'windows'
elif re.match(r'^Ubuntu', os.environ['APPVEYOR_BUILD_WORKER_IMAGE']):
ci_os = 'linux'
elif re.match(r'^macOS', os.environ['APPVEYOR_BUILD_WORKER_IMAGE']):
ci_os = 'osx'
def find_in_file(regex, filename):
file = open(filename, "r")
for line in file:
if re.search(regex, line):
return True
return False
def getStringIO():
if sys.version_info > (3, 0):
import io
return io.StringIO()
else:
import StringIO
return StringIO.StringIO()
sys.path.append('.')
import cue
# we're working with tags (detached heads) a lot: suppress advice
cue.call_git(['config', '--global', 'advice.detachedHead', 'false'])
class TestSourceSet(unittest.TestCase):
def setUp(self):
os.environ['SETUP_PATH'] = '.:appveyor'
if 'BASE' in os.environ:
del os.environ['BASE']
cue.clear_lists()
os.chdir(builddir)
def test_EmptySetupDirsPath(self):
del os.environ['SETUP_PATH']
self.assertRaisesRegexp(NameError, '\(SETUP_PATH\) is empty', cue.source_set, 'test01')
def test_InvalidSetupName(self):
self.assertRaisesRegexp(NameError, 'does not exist in SETUP_PATH', cue.source_set, 'xxdoesnotexistxx')
def test_ValidSetupName(self):
capturedOutput = getStringIO()
sys.stdout = capturedOutput
cue.source_set('test01')
sys.stdout = sys.__stdout__
self.assertEqual(cue.setup['BASE'], '7.0', 'BASE was not set to \'7.0\'')
def test_SetupDoesNotOverridePreset(self):
os.environ['BASE'] = 'foo'
capturedOutput = getStringIO()
sys.stdout = capturedOutput
cue.source_set('test01')
sys.stdout = sys.__stdout__
self.assertEqual(cue.setup['BASE'], 'foo',
'Preset BASE was overridden by test01 setup (expected \'foo\' got {0})'
.format(cue.setup['BASE']))
def test_IncludeSetupFirstSetWins(self):
captured_output = getStringIO()
sys.stdout = captured_output
cue.source_set('test02')
sys.stdout = sys.__stdout__
self.assertEqual(cue.setup['BASE'], 'foo',
'BASE set in test02 was overridden by test01 setup (expected \'foo\' got {0})'
.format(cue.setup['BASE']))
self.assertEqual(cue.setup['FOO'], 'bar', 'Setting of single word does not work')
self.assertEqual(cue.setup['FOO2'], 'bar bar2', 'Setting of multiple words does not work')
self.assertEqual(cue.setup['FOO3'], 'bar bar2', 'Indented setting of multiple words does not work')
self.assertEqual(cue.setup['SNCSEQ'], 'R2-2-8', 'Setup test01 was not included')
def test_DoubleIncludeGetsIgnored(self):
capturedOutput = getStringIO()
sys.stdout = capturedOutput
cue.source_set('test03')
sys.stdout = sys.__stdout__
self.assertRegexpMatches(capturedOutput.getvalue(), 'Ignoring already included setup file')
class TestUpdateReleaseLocal(unittest.TestCase):
release_local = os.path.join(cue.cachedir, 'RELEASE.local')
def setUp(self):
if os.path.exists(self.release_local):
os.remove(self.release_local)
os.chdir(builddir)
def test_SetModule(self):
cue.update_release_local('MOD1', '/foo/bar')
found = 0
for line in fileinput.input(self.release_local, inplace=1):
if 'MOD1=' in line:
self.assertEqual(line.strip(), 'MOD1=/foo/bar', 'MOD1 not set correctly')
found += 1
fileinput.close()
self.assertEqual(found, 1, 'MOD1 not written once to RELEASE.local (found {0})'.format(found))
def test_SetBaseAndMultipleModules(self):
cue.update_release_local('EPICS_BASE', '/bar/foo')
cue.update_release_local('MOD1', '/foo/bar')
cue.update_release_local('MOD2', '/foo/bar2')
cue.update_release_local('MOD1', '/foo/bar1')
found = {}
foundat = {}
for line in fileinput.input(self.release_local, inplace=1):
if 'MOD1=' in line:
self.assertEqual(line.strip(), 'MOD1=/foo/bar1',
'MOD1 not set correctly (expected \'MOD1=/foo/bar1\' found \'{0}\')'
.format(line))
if 'mod1' in found:
found['mod1'] += 1
else:
found['mod1'] = 1
foundat['mod1'] = fileinput.filelineno()
if 'MOD2=' in line:
self.assertEqual(line.strip(), 'MOD2=/foo/bar2',
'MOD2 not set correctly (expected \'MOD2=/foo/bar2\' found \'{0}\')'
.format(line))
if 'mod2' in found:
found['mod2'] += 1
else:
found['mod2'] = 1
foundat['mod2'] = fileinput.filelineno()
if 'EPICS_BASE=' in line:
self.assertEqual(line.strip(), 'EPICS_BASE=/bar/foo',
'EPICS_BASE not set correctly (expected \'EPICS_BASE=/bar/foo\' found \'{0}\')'
.format(line))
if 'base' in found:
found['base'] += 1
else:
found['base'] = 1
foundat['base'] = fileinput.filelineno()
fileinput.close()
self.assertEqual(found['mod1'], 1,
'MOD1 does not appear once in RELEASE.local (found {0})'.format(found['mod1']))
self.assertEqual(found['mod2'], 1,
'MOD2 does not appear once in RELEASE.local (found {0})'.format(found['mod2']))
self.assertEqual(found['base'], 1,
'EPICS_BASE does not appear once in RELEASE.local (found {0})'.format(found['base']))
self.assertGreater(foundat['base'], foundat['mod2'],
'EPICS_BASE (line {0}) appears before MOD2 (line {1})'
.format(foundat['base'], foundat['mod2']))
self.assertGreater(foundat['mod2'], foundat['mod1'],
'MOD2 (line {0}) appears before MOD1 (line {1})'.format(foundat['mod2'], foundat['mod1']))
class TestAddDependencyUpToDateCheck(unittest.TestCase):
hash_3_15_6 = "ce7943fb44beb22b453ddcc0bda5398fadf72096"
location = os.path.join(cue.cachedir, 'base-R3.15.6')
licensefile = os.path.join(location, 'LICENSE')
checked_file = os.path.join(location, 'checked_out')
release_file = os.path.join(location, 'configure', 'RELEASE')
def setUp(self):
os.environ['SETUP_PATH'] = '.:appveyor'
if os.path.exists(self.location):
shutil.rmtree(self.location, onerror=cue.remove_readonly)
cue.clear_lists()
os.chdir(builddir)
cue.source_set('defaults')
cue.complete_setup('BASE')
def test_MissingDependency(self):
cue.setup['BASE'] = 'R3.15.6'
cue.add_dependency('BASE')
self.assertTrue(os.path.exists(self.licensefile), 'Missing dependency was not checked out')
self.assertTrue(os.path.exists(self.checked_file), 'Checked-out commit marker was not written')
with open(self.checked_file, 'r') as bfile:
checked_out = bfile.read().strip()
bfile.close()
self.assertEqual(checked_out, self.hash_3_15_6,
'Wrong commit of dependency checked out (expected=\"{0}\" found=\"{1}\")'
.format(self.hash_3_15_6, checked_out))
self.assertFalse(find_in_file('include \$\(TOP\)/../RELEASE.local', self.release_file),
'RELEASE in Base includes TOP/../RELEASE.local')
def test_UpToDateDependency(self):
cue.setup['BASE'] = 'R3.15.6'
cue.add_dependency('BASE')
os.remove(self.licensefile)
cue.add_dependency('BASE')
self.assertFalse(os.path.exists(self.licensefile), 'Check out on top of existing up-to-date dependency')
def test_OutdatedDependency(self):
cue.setup['BASE'] = 'R3.15.6'
cue.add_dependency('BASE')
os.remove(self.licensefile)
with open(self.checked_file, "w") as fout:
print('XXX not the right hash XXX', file=fout)
fout.close()
cue.add_dependency('BASE')
self.assertTrue(os.path.exists(self.licensefile), 'No check-out on top of out-of-date dependency')
with open(self.checked_file, 'r') as bfile:
checked_out = bfile.read().strip()
bfile.close()
self.assertEqual(checked_out, self.hash_3_15_6,
"Wrong commit of dependency checked out (expected='{0}' found='{1}')"
.format(self.hash_3_15_6, checked_out))
def is_shallow_repo(place):
check = sp.check_output(['git', 'rev-parse', '--is-shallow-repository'], cwd=place).strip().decode('ascii')
if check == '--is-shallow-repository':
if os.path.exists(os.path.join(place, '.git', 'shallow')):
check = 'true'
else:
check = 'false'
return check == 'true'
class TestAddDependencyOptions(unittest.TestCase):
location = os.path.join(cue.cachedir, 'mcoreutils-master')
testfile = os.path.join(location, '.ci', 'LICENSE')
def setUp(self):
os.environ['SETUP_PATH'] = '.'
if os.path.exists(cue.cachedir):
shutil.rmtree(cue.cachedir, onerror=cue.remove_readonly)
cue.clear_lists()
cue.detect_context()
cue.source_set('defaults')
cue.complete_setup('MCoreUtils')
cue.setup['MCoreUtils'] = 'master'
def test_Default(self):
cue.add_dependency('MCoreUtils')
self.assertTrue(os.path.exists(self.testfile),
'Submodule (.ci) not checked out recursively (requested: default=YES')
self.assertTrue(is_shallow_repo(self.location),
'Module not checked out shallow (requested: default=5)')
def test_SetRecursiveNo(self):
cue.setup['MCoreUtils_RECURSIVE'] = 'NO'
cue.add_dependency('MCoreUtils')
self.assertFalse(os.path.exists(self.testfile), 'Submodule (.ci) checked out recursively')
def test_SetDepthZero(self):
cue.setup['MCoreUtils_DEPTH'] = '0'
cue.add_dependency('MCoreUtils')
self.assertFalse(is_shallow_repo(self.location), 'Module checked out shallow (requested full)')
def test_SetDepthThree(self):
cue.setup['MCoreUtils_DEPTH'] = '3'
cue.add_dependency('MCoreUtils')
self.assertTrue(is_shallow_repo(self.location),
'Module not checked out shallow (requested: depth=3)')
def test_AddMsiTo314(self):
cue.complete_setup('BASE')
cue.setup['BASE'] = 'R3.14.12.1'
msifile = os.path.join(cue.cachedir, 'base-R3.14.12.1', 'src', 'dbtools', 'msi.c')
cue.add_dependency('BASE')
self.assertTrue(os.path.exists(msifile), 'MSI was not added to Base 3.14')
def test_DefaultBaseBranch(self):
cue.complete_setup('BASE')
self.assertEqual(cue.setup['BASE'], '7.0',
'Default Base branch is not 7.0 (found {0})'.format(cue.setup['BASE']))
def repo_access(dep):
cue.set_setup_from_env(dep)
cue.setup.setdefault(dep + "_DIRNAME", dep.lower())
cue.setup.setdefault(dep + "_REPONAME", dep.lower())
cue.setup.setdefault('REPOOWNER', 'epics-modules')
cue.setup.setdefault(dep + "_REPOOWNER", cue.setup['REPOOWNER'])
cue.setup.setdefault(dep + "_REPOURL", 'https://github.com/{0}/{1}.git'
.format(cue.setup[dep + '_REPOOWNER'], cue.setup[dep + '_REPONAME']))
with open(os.devnull, 'w') as devnull:
return cue.call_git(['ls-remote', '--quiet', '--heads', cue.setup[dep + '_REPOURL']],
stdout=devnull, stderr=devnull)
class TestDefaultModuleURLs(unittest.TestCase):
modules = ['BASE', 'PVDATA', 'PVACCESS', 'NTYPES',
'SNCSEQ', 'STREAM', 'ASYN', 'STD',
'CALC', 'AUTOSAVE', 'BUSY', 'SSCAN',
'IOCSTATS', 'MOTOR', 'IPAC', ]
def setUp(self):
os.environ['SETUP_PATH'] = '.:appveyor'
cue.clear_lists()
os.chdir(builddir)
cue.source_set('defaults')
def test_Repos(self):
for mod in self.modules:
self.assertEqual(repo_access(mod), 0, 'Defaults for {0} do not point to a valid git repository at {1}'
.format(mod, cue.setup[mod + '_REPOURL']))
@unittest.skipIf(ci_os != 'windows', 'VCVars test only applies to windows')
class TestVCVars(unittest.TestCase):
def test_vcvars(self):
if ci_service == 'appveyor':
os.environ['CONFIGURATION'] = 'default'
cue.detect_context()
cue.with_vcvars('env')
@unittest.skipIf(ci_service != 'travis', 'Run travis tests only on travis')
class TestTravisDetectContext(unittest.TestCase):
def setUp(self):
os.environ['TRAVIS'] = 'true'
os.environ['TRAVIS_OS_NAME'] = 'linux'
os.environ['TRAVIS_COMPILER'] = 'gcc'
def tearDown(self):
cue.clear_lists()
os.environ.pop('BCFG', None)
os.environ.pop('TEST', None)
os.environ.pop('STATIC', None)
def test_LinuxGccNone(self):
cue.detect_context()
self.assertEqual(cue.ci['service'], 'travis', "ci['service'] is {0} (expected: travis)"
.format(cue.ci['service']))
self.assertEqual(cue.ci['os'], 'linux', "ci['os'] is {0} (expected: linux)"
.format(cue.ci['os']))
self.assertEqual(cue.ci['compiler'], 'gcc', "ci['compiler'] is {0} (expected: gcc)"
.format(cue.ci['compiler']))
self.assertEqual(cue.ci['platform'], 'x64', "ci['platform'] is {0} (expected: x64)"
.format(cue.ci['platform']))
self.assertFalse(cue.ci['static'], "ci['static'] is True (expected: False)")
self.assertFalse(cue.ci['debug'], "ci['debug'] is True (expected: False)")
self.assertEqual(cue.ci['configuration'], 'shared-optimized',
"ci['configuration'] is {0} (expected: shared-optimized)"
.format(cue.ci['configuration']))
def test_LinuxClangNone(self):
os.environ['TRAVIS_COMPILER'] = 'clang'
cue.detect_context()
self.assertEqual(cue.ci['service'], 'travis', "ci['service'] is {0} (expected: travis)"
.format(cue.ci['service']))
self.assertEqual(cue.ci['os'], 'linux', "ci['os'] is {0} (expected: linux)"
.format(cue.ci['os']))
self.assertEqual(cue.ci['compiler'], 'clang', "ci['compiler'] is {0} (expected: clang)"
.format(cue.ci['compiler']))
self.assertEqual(cue.ci['platform'], 'x64', "ci['platform'] is {0} (expected: x64)"
.format(cue.ci['platform']))
self.assertFalse(cue.ci['static'], "ci['static'] is True (expected: False)")
self.assertFalse(cue.ci['debug'], "ci['debug'] is True (expected: False)")
self.assertEqual(cue.ci['configuration'], 'shared-optimized',
"ci['configuration'] is {0} (expected: shared-optimized)"
.format(cue.ci['configuration']))
def test_BcfgShared(self):
os.environ['BCFG'] = 'shared'
cue.detect_context()
self.assertFalse(cue.ci['static'], "ci['static'] is True (expected: False)")
self.assertFalse(cue.ci['debug'], "ci['debug'] is True (expected: False)")
self.assertEqual(cue.ci['configuration'], 'shared-optimized',
"ci['configuration'] is {0} (expected: shared-optimized)"
.format(cue.ci['configuration']))
def test_BcfgStatic(self):
os.environ['BCFG'] = 'static'
cue.detect_context()
self.assertTrue(cue.ci['static'], "ci['static'] is False (expected: True)")
self.assertFalse(cue.ci['debug'], "ci['debug'] is True (expected: False)")
self.assertEqual(cue.ci['configuration'], 'static-optimized',
"ci['configuration'] is {0} (expected: static-optimized)"
.format(cue.ci['configuration']))
def test_BcfgDebug(self):
os.environ['BCFG'] = 'debug'
cue.detect_context()
self.assertFalse(cue.ci['static'], "ci['static'] is True (expected: False)")
self.assertTrue(cue.ci['debug'], "ci['debug'] is False (expected: True)")
self.assertEqual(cue.ci['configuration'], 'shared-debug',
"ci['configuration'] is {0} (expected: shared-debug)"
.format(cue.ci['configuration']))
def test_BcfgStaticDebug(self):
os.environ['BCFG'] = 'static-debug'
cue.detect_context()
self.assertTrue(cue.ci['static'], "ci['static'] is False (expected: True)")
self.assertTrue(cue.ci['debug'], "ci['debug'] is False (expected: True)")
self.assertEqual(cue.ci['configuration'], 'static-debug',
"ci['configuration'] is {0} (expected: static-debug)"
.format(cue.ci['configuration']))
def test_TestNo(self):
os.environ['TEST'] = 'NO'
cue.detect_context()
self.assertFalse(cue.ci['test'], "ci['test'] is True (expected: False)")
def test_WindowsGccNone(self):
os.environ['TRAVIS_OS_NAME'] = 'windows'
cue.detect_context()
self.assertEqual(cue.ci['service'], 'travis', "ci['service'] is {0} (expected: travis)"
.format(cue.ci['service']))
self.assertEqual(cue.ci['os'], 'windows', "ci['os'] is {0} (expected: windows)"
.format(cue.ci['os']))
self.assertEqual(cue.ci['compiler'], 'gcc', "ci['compiler'] is {0} (expected: gcc)"
.format(cue.ci['compiler']))
self.assertEqual(cue.ci['platform'], 'x64', "ci['platform'] is {0} (expected: x64)"
.format(cue.ci['platform']))
self.assertFalse(cue.ci['static'], "ci['static'] is True (expected: False)")
self.assertFalse(cue.ci['debug'], "ci['debug'] is True (expected: False)")
self.assertEqual(cue.ci['configuration'], 'shared-optimized',
"ci['configuration'] is {0} (expected: shared-optimized)"
.format(cue.ci['configuration']))
self.assertIn('strawberryperl', cue.ci['choco'], "'strawberryperl' is not in ci['choco']")
self.assertIn('make', cue.ci['choco'], "'make' is not in ci['choco']")
def test_WindowsVs2017None(self):
os.environ['TRAVIS_OS_NAME'] = 'windows'
os.environ['TRAVIS_COMPILER'] = 'vs2017'
cue.detect_context()
self.assertEqual(cue.ci['service'], 'travis', "ci['service'] is {0} (expected: travis)"
.format(cue.ci['service']))
self.assertEqual(cue.ci['os'], 'windows', "ci['os'] is {0} (expected: windows)"
.format(cue.ci['os']))
self.assertEqual(cue.ci['compiler'], 'vs2017', "ci['compiler'] is {0} (expected: vs2017)"
.format(cue.ci['compiler']))
self.assertEqual(cue.ci['platform'], 'x64', "ci['platform'] is {0} (expected: x64)"
.format(cue.ci['platform']))
self.assertFalse(cue.ci['static'], "ci['static'] is True (expected: False)")
self.assertFalse(cue.ci['debug'], "ci['debug'] is True (expected: False)")
self.assertEqual(cue.ci['configuration'], 'shared-optimized',
"ci['configuration'] is {0} (expected: shared-optimized)"
.format(cue.ci['configuration']))
self.assertIn('strawberryperl', cue.ci['choco'], "'strawberryperl' is not in ci['choco']")
self.assertIn('make', cue.ci['choco'], "'make' is not in ci['choco']")
def test_WindowsVs2019None(self):
os.environ['TRAVIS_OS_NAME'] = 'windows'
os.environ['TRAVIS_COMPILER'] = 'vs2019'
cue.detect_context()
self.assertEqual(cue.ci['service'], 'travis', "ci['service'] is {0} (expected: travis)"
.format(cue.ci['service']))
self.assertEqual(cue.ci['os'], 'windows', "ci['os'] is {0} (expected: windows)"
.format(cue.ci['os']))
self.assertEqual(cue.ci['compiler'], 'vs2017', "ci['compiler'] is {0} (expected: vs2017)"
.format(cue.ci['compiler']))
self.assertEqual(cue.ci['platform'], 'x64', "ci['platform'] is {0} (expected: x64)"
.format(cue.ci['platform']))
self.assertFalse(cue.ci['static'], "ci['static'] is True (expected: False)")
self.assertFalse(cue.ci['debug'], "ci['debug'] is True (expected: False)")
self.assertEqual(cue.ci['configuration'], 'shared-optimized',
"ci['configuration'] is {0} (expected: shared-optimized)"
.format(cue.ci['configuration']))
self.assertIn('strawberryperl', cue.ci['choco'], "'strawberryperl' is not in ci['choco']")
self.assertIn('make', cue.ci['choco'], "'make' is not in ci['choco']")
def test_OsxClangNone(self):
os.environ['TRAVIS_OS_NAME'] = 'osx'
os.environ['TRAVIS_COMPILER'] = 'clang'
cue.detect_context()
self.assertEqual(cue.ci['service'], 'travis', "ci['service'] is {0} (expected: travis)"
.format(cue.ci['service']))
self.assertEqual(cue.ci['os'], 'osx', "ci['os'] is {0} (expected: osx)"
.format(cue.ci['os']))
self.assertEqual(cue.ci['compiler'], 'clang', "ci['compiler'] is {0} (expected: clang)"
.format(cue.ci['compiler']))
self.assertEqual(cue.ci['platform'], 'x64', "ci['platform'] is {0} (expected: x64)"
.format(cue.ci['platform']))
self.assertFalse(cue.ci['static'], "ci['static'] is True (expected: False)")
self.assertFalse(cue.ci['debug'], "ci['debug'] is True (expected: False)")
self.assertEqual(cue.ci['configuration'], 'shared-optimized',
"ci['configuration'] is {0} (expected: shared-optimized)"
.format(cue.ci['configuration']))
def test_StaticGetsWarning(self):
os.environ['STATIC'] = 'YES'
capturedOutput = getStringIO()
sys.stdout = capturedOutput
cue.detect_context()
sys.stdout = sys.__stdout__
self.assertRegexpMatches(capturedOutput.getvalue(), "Variable 'STATIC' not supported anymore")
def test_MisspelledBcfgGetsWarning(self):
os.environ['BCFG'] = 'static-dubug'
capturedOutput = getStringIO()
sys.stdout = capturedOutput
cue.detect_context()
sys.stdout = sys.__stdout__
self.assertRegexpMatches(capturedOutput.getvalue(), "Unrecognized build configuration setting")
@unittest.skipIf(ci_service != 'appveyor', 'Run appveyor tests only on appveyor')
class TestAppveyorDetectContext(unittest.TestCase):
def setUp(self):
os.environ['APPVEYOR'] = 'True'
os.environ['APPVEYOR_BUILD_WORKER_IMAGE'] = 'Visual Studio 2019'
os.environ['CMP'] = 'vs2019'
os.environ['CONFIGURATION'] = 'default'
os.environ['PLATFORM'] = 'x64'
def tearDown(self):
cue.clear_lists()
os.environ.pop('STATIC', None)
os.environ.pop('TEST', None)
def test_Platform32(self):
os.environ['PLATFORM'] = 'x86'
cue.detect_context()
self.assertFalse(cue.ci['static'], "ci['static'] is True (expected: False)")
self.assertFalse(cue.ci['debug'], "ci['debug'] is True (expected: False)")
self.assertEqual(cue.ci['configuration'], 'shared-optimized',
"ci['configuration'] is {0} (expected: shared-optimized)"
.format(cue.ci['configuration']))
self.assertEqual(cue.ci['platform'], 'x86',
"ci['platform'] is {0} (expected: x86)"
.format(cue.ci['platform']))
def test_Platform64(self):
cue.detect_context()
self.assertFalse(cue.ci['static'], "ci['static'] is True (expected: False)")
self.assertFalse(cue.ci['debug'], "ci['debug'] is True (expected: False)")
self.assertEqual(cue.ci['configuration'], 'shared-optimized',
"ci['configuration'] is {0} (expected: shared-optimized)"
.format(cue.ci['configuration']))
self.assertEqual(cue.ci['platform'], 'x64',
"ci['platform'] is {0} (expected: x64)"
.format(cue.ci['platform']))
def test_PlatformX64(self):
os.environ['PLATFORM'] = 'X64'
cue.detect_context()
self.assertFalse(cue.ci['static'], "ci['static'] is True (expected: False)")
self.assertFalse(cue.ci['debug'], "ci['debug'] is True (expected: False)")
self.assertEqual(cue.ci['configuration'], 'shared-optimized',
"ci['configuration'] is {0} (expected: shared-optimized)"
.format(cue.ci['configuration']))
self.assertEqual(cue.ci['platform'], 'x64',
"ci['platform'] is {0} (expected: x64)"
.format(cue.ci['platform']))
def test_ConfigDefault(self):
cue.detect_context()
self.assertFalse(cue.ci['static'], "ci['static'] is True (expected: False)")
self.assertFalse(cue.ci['debug'], "ci['debug'] is True (expected: False)")
self.assertEqual(cue.ci['configuration'], 'shared-optimized',
"ci['configuration'] is {0} (expected: shared-optimized)"
.format(cue.ci['configuration']))
def test_ConfigStatic(self):
os.environ['CONFIGURATION'] = 'static'
cue.detect_context()
self.assertTrue(cue.ci['static'], "ci['static'] is False (expected: True)")
self.assertFalse(cue.ci['debug'], "ci['debug'] is True (expected: False)")
self.assertEqual(cue.ci['configuration'], 'static-optimized',
"ci['configuration'] is {0} (expected: static-optimized)"
.format(cue.ci['configuration']))
def test_ConfigDebug(self):
os.environ['CONFIGURATION'] = 'debug'
cue.detect_context()
self.assertFalse(cue.ci['static'], "ci['static'] is True (expected: False)")
self.assertTrue(cue.ci['debug'], "ci['debug'] is False (expected: True)")
self.assertEqual(cue.ci['configuration'], 'shared-debug',
"ci['configuration'] is {0} (expected: shared-debug)"
.format(cue.ci['configuration']))
def test_ConfigStaticDebug(self):
os.environ['CONFIGURATION'] = 'static-debug'
cue.detect_context()
self.assertTrue(cue.ci['static'], "ci['static'] is False (expected: True)")
self.assertTrue(cue.ci['debug'], "ci['debug'] is False (expected: True)")
self.assertEqual(cue.ci['configuration'], 'static-debug',
"ci['configuration'] is {0} (expected: static-debug)"
.format(cue.ci['configuration']))
def test_TestNo(self):
os.environ['TEST'] = 'NO'
cue.detect_context()
self.assertFalse(cue.ci['test'], "ci['test'] is True (expected: False)")
def test_WindowsGccNone(self):
os.environ['CMP'] = 'gcc'
cue.detect_context()
self.assertEqual(cue.ci['service'], 'appveyor', "ci['service'] is {0} (expected: appveyor)"
.format(cue.ci['service']))
self.assertEqual(cue.ci['os'], 'windows', "ci['os'] is {0} (expected: windows)"
.format(cue.ci['os']))
self.assertEqual(cue.ci['compiler'], 'gcc', "ci['compiler'] is {0} (expected: gcc)"
.format(cue.ci['compiler']))
self.assertEqual(cue.ci['platform'], 'x64', "ci['platform'] is {0} (expected: x64)"
.format(cue.ci['platform']))
self.assertFalse(cue.ci['static'], "ci['static'] is True (expected: False)")
self.assertFalse(cue.ci['debug'], "ci['debug'] is True (expected: False)")
self.assertEqual(cue.ci['configuration'], 'shared-optimized',
"ci['configuration'] is {0} (expected: shared-optimized)"
.format(cue.ci['configuration']))
self.assertIn('make', cue.ci['choco'], "'make' is not in ci['choco']")
def test_WindowsVs2017None(self):
os.environ['APPVEYOR_BUILD_WORKER_IMAGE'] = 'Visual Studio 2017'
os.environ['CMP'] = 'vs2017'
os.environ['PLATFORM'] = 'x86'
cue.detect_context()
self.assertEqual(cue.ci['service'], 'appveyor', "ci['service'] is {0} (expected: appveyor)"
.format(cue.ci['service']))
self.assertEqual(cue.ci['os'], 'windows', "ci['os'] is {0} (expected: windows)"
.format(cue.ci['os']))
self.assertEqual(cue.ci['compiler'], 'vs2017', "ci['compiler'] is {0} (expected: vs2017)"
.format(cue.ci['compiler']))
self.assertEqual(cue.ci['platform'], 'x86', "ci['platform'] is {0} (expected: x86)"
.format(cue.ci['platform']))
self.assertFalse(cue.ci['static'], "ci['static'] is True (expected: False)")
self.assertFalse(cue.ci['debug'], "ci['debug'] is True (expected: False)")
self.assertEqual(cue.ci['configuration'], 'shared-optimized',
"ci['configuration'] is {0} (expected: shared-optimized)"
.format(cue.ci['configuration']))
self.assertIn('make', cue.ci['choco'], "'make' is not in ci['choco']")
def test_WindowsVs2019None(self):
cue.detect_context()
self.assertEqual(cue.ci['service'], 'appveyor', "ci['service'] is {0} (expected: appveyor)"
.format(cue.ci['service']))
self.assertEqual(cue.ci['os'], 'windows', "ci['os'] is {0} (expected: windows)"
.format(cue.ci['os']))
self.assertEqual(cue.ci['compiler'], 'vs2019', "ci['compiler'] is {0} (expected: vs2019)"
.format(cue.ci['compiler']))
self.assertEqual(cue.ci['platform'], 'x64', "ci['platform'] is {0} (expected: x64)"
.format(cue.ci['platform']))
self.assertFalse(cue.ci['static'], "ci['static'] is True (expected: False)")
self.assertFalse(cue.ci['debug'], "ci['debug'] is True (expected: False)")
self.assertEqual(cue.ci['configuration'], 'shared-optimized',
"ci['configuration'] is {0} (expected: shared-optimized)"
.format(cue.ci['configuration']))
self.assertIn('make', cue.ci['choco'], "'make' is not in ci['choco']")
def test_StaticGetsWarning(self):
os.environ['STATIC'] = 'YES'
capturedOutput = getStringIO()
sys.stdout = capturedOutput
cue.detect_context()
sys.stdout = sys.__stdout__
self.assertRegexpMatches(capturedOutput.getvalue(), "Variable 'STATIC' not supported anymore")
def test_MisspelledConfigurationGetsWarning(self):
os.environ['CONFIGURATION'] = 'static-dubug'
capturedOutput = getStringIO()
sys.stdout = capturedOutput
cue.detect_context()
sys.stdout = sys.__stdout__
self.assertRegexpMatches(capturedOutput.getvalue(), "Unrecognized build configuration setting")
class TestSetupForBuild(unittest.TestCase):
args = Namespace(paths=[])
cue.building_base = True
if ci_os == 'windows':
sp.check_call(['choco', 'install', 'make'])
def setUp(self):
if ci_service == 'appveyor':
os.environ['CONFIGURATION'] = 'default'
cue.detect_context()
def tearDown(self):
os.environ.pop('EPICS_HOST_ARCH', None)
cue.clear_lists()
def test_AddPathsOption(self):
os.environ['FOOBAR'] = 'BAR'
args = Namespace(paths=['/my/{FOOBAR}/dir', '/my/foobar'])
cue.setup_for_build(args)
self.assertTrue(re.search('/my/BAR/dir', os.environ['PATH']), 'Expanded path not in PATH')
self.assertTrue(re.search('/foobar', os.environ['PATH']), 'Plain path not in PATH')
os.environ.pop('FOOBAR', None)
@unittest.skipIf(ci_os != 'windows', 'HostArchConfiguration test only applies to windows')
def test_HostArchConfiguration(self):
cue.ci['compiler'] = 'vs2017'
for cue.ci['debug'] in [True, False]:
for cue.ci['static'] in [True, False]:
config_st = {True: 'static', False: 'shared'}
config_db = {True: '-debug', False: '-optimized'}
config = config_st[cue.ci['static']] + config_db[cue.ci['debug']]
cue.setup_for_build(self.args)
self.assertTrue('EPICS_HOST_ARCH' in os.environ,
'EPICS_HOST_ARCH is not set for Configuration={0}'.format(config))
if cue.ci['static']:
self.assertTrue(re.search('-static$', os.environ['EPICS_HOST_ARCH']),
'EPICS_HOST_ARCH is not -static for Configuration={0}'.format(config))
self.assertFalse(re.search('debug', os.environ['EPICS_HOST_ARCH']),
'EPICS_HOST_ARCH is -debug for Configuration={0}'.format(config))
elif cue.ci['debug']:
self.assertFalse(re.search('static', os.environ['EPICS_HOST_ARCH']),
'EPICS_HOST_ARCH (found {0}) is -static for Configuration={1}'
.format(os.environ['EPICS_HOST_ARCH'], config))
self.assertTrue(re.search('-debug$', os.environ['EPICS_HOST_ARCH']),
'EPICS_HOST_ARCH (found {0}) is not -debug for Configuration={1}'
.format(os.environ['EPICS_HOST_ARCH'], config))
else:
self.assertFalse(re.search('static', os.environ['EPICS_HOST_ARCH']),
'EPICS_HOST_ARCH is -static for Configuration={0}'.format(config))
self.assertFalse(re.search('debug', os.environ['EPICS_HOST_ARCH']),
'EPICS_HOST_ARCH is -debug for Configuration={0}'.format(config))
@unittest.skipIf(ci_os != 'windows', 'HostArchPlatform test only applies to windows')
def test_HostArchPlatform(self):
if ci_service == 'travis':
platforms = ['x64']
else:
platforms = ['x86', 'x64']
for platform in platforms:
for cc in ['vs2019', 'gcc']:
cue.ci['platform'] = platform
cue.ci['compiler'] = cc
cue.setup_for_build(self.args)
self.assertTrue('EPICS_HOST_ARCH' in os.environ,
'EPICS_HOST_ARCH is not set for {0} / {1}'
.format(cc, cue.ci['platform']))
if platform == 'x86':
self.assertTrue(re.search('^win32-x86', os.environ['EPICS_HOST_ARCH']),
'EPICS_HOST_ARCH (found {0}) is not win32-x86 for {1} / {2}'
.format(os.environ['EPICS_HOST_ARCH'], cc, platform))
else:
self.assertTrue(re.search('^windows-x64', os.environ['EPICS_HOST_ARCH']),
'EPICS_HOST_ARCH (found {0}) is not windows-x64 for {1} / {2}'
.format(os.environ['EPICS_HOST_ARCH'], cc, platform))
if cc == 'gcc':
self.assertTrue(re.search('-mingw$', os.environ['EPICS_HOST_ARCH']),
'EPICS_HOST_ARCH (found {0}) is not -mingw for {1} / {2}'
.format(os.environ['EPICS_HOST_ARCH'], cc, platform))
pattern = {'x86': 'mingw32', 'x64': 'mingw64'}
self.assertTrue(re.search(pattern[platform], os.environ['PATH']),
'Binary location for {0} not in PATH (found {1})'
.format(pattern[platform], os.environ['PATH']))
@unittest.skipIf(ci_os != 'windows', 'Strawberry perl test only applies to windows')
def test_StrawberryInPathVS2019(self):
if 'APPVEYOR' in os.environ:
os.environ['CMP'] = 'vs2019'
cue.setup_for_build(self.args)
self.assertTrue(re.search('strawberry', os.environ['PATH'], flags=re.IGNORECASE),
'Strawberry Perl installed but location not in PATH (found {0})'
.format(os.environ['PATH']))
def setBase314(self, yesno):
cfg_base_version = os.path.join('configure', 'CONFIG_BASE_VERSION')
fout = open(cfg_base_version, 'w')
print('# test file for base version detection', file=fout)
print('BASE_3_14={0}'.format(yesno), file=fout)
fout.close()
def setTestResultsTarget(self, target):
rules_build = os.path.join('configure', 'RULES_BUILD')
fout = open(rules_build, 'w')
print('# test file for target detection', file=fout)
print('{0}: something'.format(target), file=fout)
fout.close()
def test_DetectionBase314No(self):
self.setBase314('NO')
cue.setup_for_build(self.args)
self.assertFalse(cue.is_base314, 'Falsely detected Base 3.14')
def test_DetectionBase314Yes(self):
self.setBase314('YES')
cue.setup_for_build(self.args)
self.assertTrue(cue.is_base314, 'Base 3.14 = YES not detected')
def test_DetectionTestResultsTarget314No(self):
self.setBase314('YES')
self.setTestResultsTarget('nottherighttarget')
cue.setup_for_build(self.args)
self.assertFalse(cue.has_test_results, 'Falsely detected test-results target')
def test_DetectionTestResultsTarget314Yes(self):
self.setBase314('YES')
self.setTestResultsTarget('test-results')
cue.setup_for_build(self.args)
self.assertFalse(cue.has_test_results, 'Falsely found test-results on Base 3.14')
def test_DetectionTestResultsTargetNot314Yes(self):
self.setBase314('NO')
self.setTestResultsTarget('test-results')
cue.setup_for_build(self.args)
self.assertTrue(cue.has_test_results, 'Target test-results not detected')
def test_ExtraMakeArgs(self):
os.environ['EXTRA'] = 'bla'
for ind in range(1,5):
os.environ['EXTRA{0}'.format(ind)] = 'bla {0}'.format(ind)
cue.setup_for_build(self.args)
self.assertTrue(cue.extra_makeargs[0] == 'bla', 'Extra make arg [0] not set')
for ind in range(1,5):
self.assertTrue(cue.extra_makeargs[ind] == 'bla {0}'.format(ind),
'Extra make arg [{0}] not set (expected "bla {0}", found "{1}")'
.format(ind, cue.extra_makeargs[ind]))
if __name__ == "__main__":
if 'VV' in os.environ and os.environ['VV'] == '1':
logging.basicConfig(level=logging.DEBUG)
cue.silent_dep_builds = False
cue.detect_context()
cue.host_info()
if sys.argv[1:] == ['env']:
# testing with_vcvars
[print(K, '=', V) for K, V in os.environ.items()]
else:
unittest.main()

1010
cue.py Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,5 @@
# EPICS Base
BASE=7.0
BASE_DIRNAME=base
BASE_REPONAME=epics-base
BASE_REPOOWNER=epics-base
@@ -33,3 +34,5 @@ STREAM_REPOOWNER=paulscherrerinstitute
# busy
# sscan
# iocStats
# motor
# ipac

View File

@@ -9,6 +9,9 @@ DB += dbExample2.db
DB += dbSubExample.db
DB += user.substitutions
# Host-side expansion of substitutions file with MSI
DB += dbExample3.db
# If <anyname>.db template is not named <anyname>*.template add
# <anyname>_TEMPLATE = <templatename>

View File

@@ -0,0 +1,8 @@
# Example host-side substitutions file
file dbExample2.db {
pattern { user, no, scan }
{ "ralph", 4, "1 second" }
{ "ralph", 5, "2 second" }
{ "ralph", 6, "5 second" }
}

View File

@@ -4,5 +4,10 @@ DIRS := $(DIRS) $(filter-out $(DIRS), $(wildcard *src*))
DIRS := $(DIRS) $(filter-out $(DIRS), $(wildcard *Src*))
DIRS := $(DIRS) $(filter-out $(DIRS), $(wildcard *db*))
DIRS := $(DIRS) $(filter-out $(DIRS), $(wildcard *Db*))
DIRS := $(DIRS) test
test_DEPEND_DIRS += src
include $(TOP)/configure/RULES_DIRS

View File

@@ -4,6 +4,9 @@ include $(TOP)/configure/CONFIG
#----------------------------------------
# ADD MACRO DEFINITIONS BELOW HERE
# use the new RSET definition
USR_CPPFLAGS += -DUSE_TYPED_RSET
# xxxRecord.h will be created from xxxRecord.dbd
DBDINC += xxxRecord

View File

@@ -1,6 +1,6 @@
/* xxxRecord.c */
/* Example record support module */
#include <stddef.h>
#include <stdlib.h>
#include <stdio.h>
@@ -25,111 +25,111 @@
/* Create RSET - Record Support Entry Table */
#define report NULL
#define initialize NULL
static long init_record();
static long process();
static long init_record(struct dbCommon *, int);
static long process(struct dbCommon *);
#define special NULL
#define get_value NULL
#define cvt_dbaddr NULL
#define get_array_info NULL
#define put_array_info NULL
static long get_units();
static long get_precision();
static long get_units(DBADDR *, char *);
static long get_precision(const DBADDR *, long *);
#define get_enum_str NULL
#define get_enum_strs NULL
#define put_enum_str NULL
static long get_graphic_double();
static long get_control_double();
static long get_alarm_double();
static long get_graphic_double(DBADDR *, struct dbr_grDouble *);
static long get_control_double(DBADDR *, struct dbr_ctrlDouble *);
static long get_alarm_double(DBADDR *, struct dbr_alDouble *);
rset xxxRSET={
RSETNUMBER,
report,
initialize,
init_record,
process,
special,
get_value,
cvt_dbaddr,
get_array_info,
put_array_info,
get_units,
get_precision,
get_enum_str,
get_enum_strs,
put_enum_str,
get_graphic_double,
get_control_double,
get_alarm_double
RSETNUMBER,
report,
initialize,
init_record,
process,
special,
get_value,
cvt_dbaddr,
get_array_info,
put_array_info,
get_units,
get_precision,
get_enum_str,
get_enum_strs,
put_enum_str,
get_graphic_double,
get_control_double,
get_alarm_double
};
epicsExportAddress(rset,xxxRSET);
typedef struct xxxset { /* xxx input dset */
long number;
DEVSUPFUN dev_report;
DEVSUPFUN init;
DEVSUPFUN init_record; /*returns: (-1,0)=>(failure,success)*/
DEVSUPFUN get_ioint_info;
DEVSUPFUN read_xxx;
long number;
DEVSUPFUN dev_report;
DEVSUPFUN init;
DEVSUPFUN init_record; /*returns: (-1,0)=>(failure,success)*/
DEVSUPFUN get_ioint_info;
DEVSUPFUN read_xxx;
}xxxdset;
static void checkAlarms(xxxRecord *prec);
static void monitor(xxxRecord *prec);
static long init_record(void *precord,int pass)
static long init_record(struct dbCommon *pcommon, int pass)
{
xxxRecord *prec = (xxxRecord *)precord;
xxxRecord *prec = (xxxRecord *)pcommon;
xxxdset *pdset;
long status;
if (pass==0) return(0);
if(!(pdset = (xxxdset *)(prec->dset))) {
recGblRecordError(S_dev_noDSET,(void *)prec,"xxx: init_record");
return(S_dev_noDSET);
recGblRecordError(S_dev_noDSET,(void *)prec,"xxx: init_record");
return(S_dev_noDSET);
}
/* must have read_xxx function defined */
if( (pdset->number < 5) || (pdset->read_xxx == NULL) ) {
recGblRecordError(S_dev_missingSup,(void *)prec,"xxx: init_record");
return(S_dev_missingSup);
recGblRecordError(S_dev_missingSup,(void *)prec,"xxx: init_record");
return(S_dev_missingSup);
}
if( pdset->init_record ) {
if((status=(*pdset->init_record)(prec))) return(status);
if((status=(*pdset->init_record)(prec))) return(status);
}
return(0);
}
static long process(void *precord)
static long process(struct dbCommon *pcommon)
{
xxxRecord *prec = (xxxRecord *)precord;
xxxdset *pdset = (xxxdset *)(prec->dset);
long status;
unsigned char pact=prec->pact;
xxxRecord *prec = (xxxRecord *)pcommon;
xxxdset *pdset = (xxxdset *)(prec->dset);
long status;
unsigned char pact=prec->pact;
if( (pdset==NULL) || (pdset->read_xxx==NULL) ) {
prec->pact=TRUE;
recGblRecordError(S_dev_missingSup,(void *)prec,"read_xxx");
return(S_dev_missingSup);
}
if( (pdset==NULL) || (pdset->read_xxx==NULL) ) {
prec->pact=TRUE;
recGblRecordError(S_dev_missingSup,(void *)prec,"read_xxx");
return(S_dev_missingSup);
}
/* pact must not be set until after calling device support */
status=(*pdset->read_xxx)(prec);
/* check if device support set pact */
if ( !pact && prec->pact ) return(0);
prec->pact = TRUE;
/* pact must not be set until after calling device support */
status=(*pdset->read_xxx)(prec);
/* check if device support set pact */
if ( !pact && prec->pact ) return(0);
prec->pact = TRUE;
recGblGetTimeStamp(prec);
/* check for alarms */
checkAlarms(prec);
/* check event list */
monitor(prec);
/* process the forward scan link record */
recGblGetTimeStamp(prec);
/* check for alarms */
checkAlarms(prec);
/* check event list */
monitor(prec);
/* process the forward scan link record */
recGblFwdLink(prec);
prec->pact=FALSE;
return(status);
prec->pact=FALSE;
return(status);
}
static long get_units(DBADDR *paddr, char *units)
{
xxxRecord *prec=(xxxRecord *)paddr->precord;
@@ -138,7 +138,7 @@ static long get_units(DBADDR *paddr, char *units)
return(0);
}
static long get_precision(DBADDR *paddr, long *precision)
static long get_precision(const DBADDR *paddr, long *precision)
{
xxxRecord *prec=(xxxRecord *)paddr->precord;
@@ -176,8 +176,8 @@ static long get_control_double(DBADDR *paddr,struct dbr_ctrlDouble *pcd)
|| fieldIndex == xxxRecordHIGH
|| fieldIndex == xxxRecordLOW
|| fieldIndex == xxxRecordLOLO) {
pcd->upper_ctrl_limit = prec->hopr;
pcd->lower_ctrl_limit = prec->lopr;
pcd->upper_ctrl_limit = prec->hopr;
pcd->lower_ctrl_limit = prec->lopr;
} else recGblGetControlDouble(paddr,pcd);
return(0);
}
@@ -195,79 +195,79 @@ static long get_alarm_double(DBADDR *paddr,struct dbr_alDouble *pad)
} else recGblGetAlarmDouble(paddr,pad);
return(0);
}
static void checkAlarms(xxxRecord *prec)
{
double val, hyst, lalm;
float hihi, high, low, lolo;
unsigned short hhsv, llsv, hsv, lsv;
unsigned short hhsv, llsv, hsv, lsv;
if(prec->udf == TRUE ){
if(prec->udf == TRUE ){
recGblSetSevr(prec,UDF_ALARM,INVALID_ALARM);
return;
}
hihi = prec->hihi; lolo = prec->lolo; high = prec->high; low = prec->low;
hhsv = prec->hhsv; llsv = prec->llsv; hsv = prec->hsv; lsv = prec->lsv;
val = prec->val; hyst = prec->hyst; lalm = prec->lalm;
return;
}
hihi = prec->hihi; lolo = prec->lolo; high = prec->high; low = prec->low;
hhsv = prec->hhsv; llsv = prec->llsv; hsv = prec->hsv; lsv = prec->lsv;
val = prec->val; hyst = prec->hyst; lalm = prec->lalm;
/* alarm condition hihi */
if (hhsv && (val >= hihi || ((lalm==hihi) && (val >= hihi-hyst)))){
if (recGblSetSevr(prec,HIHI_ALARM,prec->hhsv)) prec->lalm = hihi;
return;
}
/* alarm condition hihi */
if (hhsv && (val >= hihi || ((lalm==hihi) && (val >= hihi-hyst)))){
if (recGblSetSevr(prec,HIHI_ALARM,prec->hhsv)) prec->lalm = hihi;
return;
}
/* alarm condition lolo */
if (llsv && (val <= lolo || ((lalm==lolo) && (val <= lolo+hyst)))){
if (recGblSetSevr(prec,LOLO_ALARM,prec->llsv)) prec->lalm = lolo;
return;
}
/* alarm condition lolo */
if (llsv && (val <= lolo || ((lalm==lolo) && (val <= lolo+hyst)))){
if (recGblSetSevr(prec,LOLO_ALARM,prec->llsv)) prec->lalm = lolo;
return;
}
/* alarm condition high */
if (hsv && (val >= high || ((lalm==high) && (val >= high-hyst)))){
if (recGblSetSevr(prec,HIGH_ALARM,prec->hsv)) prec->lalm = high;
return;
}
/* alarm condition high */
if (hsv && (val >= high || ((lalm==high) && (val >= high-hyst)))){
if (recGblSetSevr(prec,HIGH_ALARM,prec->hsv)) prec->lalm = high;
return;
}
/* alarm condition low */
if (lsv && (val <= low || ((lalm==low) && (val <= low+hyst)))){
if (recGblSetSevr(prec,LOW_ALARM,prec->lsv)) prec->lalm = low;
return;
}
/* alarm condition low */
if (lsv && (val <= low || ((lalm==low) && (val <= low+hyst)))){
if (recGblSetSevr(prec,LOW_ALARM,prec->lsv)) prec->lalm = low;
return;
}
/* we get here only if val is out of alarm by at least hyst */
prec->lalm = val;
return;
/* we get here only if val is out of alarm by at least hyst */
prec->lalm = val;
return;
}
static void monitor(xxxRecord *prec)
{
unsigned short monitor_mask;
double delta;
unsigned short monitor_mask;
double delta;
monitor_mask = recGblResetAlarms(prec);
/* check for value change */
delta = prec->mlst - prec->val;
if(delta<0.0) delta = -delta;
if (delta > prec->mdel) {
/* post events for value change */
monitor_mask |= DBE_VALUE;
/* update last value monitored */
prec->mlst = prec->val;
}
/* check for value change */
delta = prec->mlst - prec->val;
if(delta<0.0) delta = -delta;
if (delta > prec->mdel) {
/* post events for value change */
monitor_mask |= DBE_VALUE;
/* update last value monitored */
prec->mlst = prec->val;
}
/* check for archive change */
delta = prec->alst - prec->val;
if(delta<0.0) delta = -delta;
if (delta > prec->adel) {
/* post events on value field for archive change */
monitor_mask |= DBE_LOG;
/* update last archive value monitored */
prec->alst = prec->val;
}
/* check for archive change */
delta = prec->alst - prec->val;
if(delta<0.0) delta = -delta;
if (delta > prec->adel) {
/* post events on value field for archive change */
monitor_mask |= DBE_LOG;
/* update last archive value monitored */
prec->alst = prec->val;
}
/* send out monitors connected to the value field */
if (monitor_mask){
db_post_events(prec,&prec->val,monitor_mask);
}
return;
/* send out monitors connected to the value field */
if (monitor_mask){
db_post_events(prec,&prec->val,monitor_mask);
}
return;
}

62
exampleApp/test/Makefile Normal file
View File

@@ -0,0 +1,62 @@
#*************************************************************************
# Copyright (c) 2020 ITER Organization.
# EPICS BASE is distributed subject to a Software License Agreement found
# in the file LICENSE that is included with this distribution.
#*************************************************************************
CURDIR := $(patsubst %/,%,$(dir $(lastword $(MAKEFILE_LIST))))
TOP = ../..
include $(TOP)/configure/CONFIG
# use the new RSET definition
USR_CPPFLAGS += -DUSE_TYPED_RSET
TARGETS += $(COMMON_DIR)/exampleTest.dbd
DBDDEPENDS_FILES += exampleTest.dbd$(DEP)
exampleTest_DBD += example.dbd
TESTFILES += $(COMMON_DIR)/exampleTest.dbd
testHarness_SRCS += exampleTest_registerRecordDeviceDriver.cpp
PROD_LIBS += exampleSupport
ifneq ($(SNCSEQ),)
PROD_LIBS += seq pv
endif
PROD_LIBS += $(EPICS_BASE_IOC_LIBS)
TESTPROD_HOST += exampleTest
exampleTest_SRCS += exampleTest.c
exampleTest_SRCS += exampleTest_registerRecordDeviceDriver.cpp
testHarness_SRCS += exampleTest.c
TESTFILES += ../../../db/dbExample1.db
TESTS += exampleTest
# This runs all the test programs in a known working order:
testHarness_SRCS += epicsRunExampleTests.c
exampleTestHarness_SRCS += $(testHarness_SRCS)
exampleTestHarness_SRCS_RTEMS += rtemsTestHarness.c
ifdef BASE_7_0
PROD_SRCS_RTEMS += rtemsTestData.c
endif
PROD_vxWorks = exampleTestHarness
PROD_RTEMS = exampleTestHarness
TESTSPEC_vxWorks = exampleTestHarness.munch; epicsRunExampleTests
TESTSPEC_RTEMS = exampleTestHarness.boot; epicsRunExampleTests
TESTSCRIPTS_HOST += $(TESTS:%=%.t)
ifneq ($(filter $(T_A),$(CROSS_COMPILER_RUNTEST_ARCHS)),)
TESTPROD_RTEMS = $(TESTPROD_HOST)
TESTSCRIPTS_RTEMS += $(TESTS:%=%.t)
endif
include $(TOP)/configure/RULES
ifdef BASE_7_0
rtemsTestData.c : $(TESTFILES) $(TOOLS)/epicsMakeMemFs.pl
$(PERL) $(TOOLS)/epicsMakeMemFs.pl $@ epicsRtemsFSImage $(TESTFILES)
endif

View File

@@ -0,0 +1,28 @@
/*************************************************************************\
* Copyright (c) 2011 UChicago Argonne LLC, as Operator of Argonne
* National Laboratory.
* EPICS BASE is distributed subject to a Software License Agreement found
* in file LICENSE that is included with this distribution.
\*************************************************************************/
/*
* Run Example tests as a batch.
*
*/
#include "epicsUnitTest.h"
#include "epicsExit.h"
#include "dbmf.h"
int exampleTest(void);
void epicsRunExampleTests(void)
{
testHarness();
runTest(exampleTest);
dbmfFreeChunks();
epicsExit(0); /* Trigger test harness */
}

View File

@@ -0,0 +1,58 @@
/*************************************************************************\
* Copyright (c) 2020 ITER Organization.
* EPICS BASE is distributed subject to a Software License Agreement found
* in file LICENSE that is included with this distribution.
\*************************************************************************/
/*
* Author: Ralph Lange <ralph.lange@gmx.de>
*/
#include <string.h>
#include <epicsUnitTest.h>
#include <testMain.h>
#include <dbAccess.h>
#include <dbStaticLib.h>
#include <errlog.h>
void exampleTest_registerRecordDeviceDriver(struct dbBase *);
static dbCommon *prec;
/* from Base 3.15 dbUnitTest.c */
static
dbCommon* testdbRecordPtr(const char* pv)
{
DBADDR addr;
if (dbNameToAddr(pv, &addr))
testAbort("Missing record \"%s\"", pv);
return addr.precord;
}
static void testOnce(void)
{
testDiag("check that tests work");
dbReadDatabase(&pdbbase, "exampleTest.dbd", "../O.Common", NULL);
exampleTest_registerRecordDeviceDriver(pdbbase);
dbReadDatabase(&pdbbase, "dbExample1.db", "../../../db", "user=test");
testDiag("Searching for records from example application");
prec = testdbRecordPtr("test:xxxExample");
testOk((prec != NULL), "record test:xxxExample");
prec = testdbRecordPtr("test:aiExample");
testOk((prec != NULL), "record test:aiExample");
}
MAIN(exampleTest)
{
testPlan(2);
testOnce();
return testDone();
}

View File

@@ -0,0 +1,14 @@
/*************************************************************************\
* Copyright (c) 2011 UChicago Argonne LLC, as Operator of Argonne
* National Laboratory.
* EPICS BASE is distributed subject to a Software License Agreement found
* in file LICENSE that is included with this distribution.
\*************************************************************************/
extern void epicsRunExampleTests(void);
int main(int argc, char **argv)
{
epicsRunExampleTests(); /* calls epicsExit(0) */
return 0;
}

43
synApps-6.0.set Normal file
View File

@@ -0,0 +1,43 @@
# Release tags for synApps modules as per synApps-6.0
# see https://github.com/EPICS-synApps/support/blob/21f7fcd0f33cef5d34aacbd4e33511b43398a6dc/assemble_synApps.sh
# also for additional configuration that could be done in hook scripts
ALLENBRADLEY=2.3
ALIVE=R1-1-0
AREA_DETECTOR=R3-3-1
ASYN=R4-33
AUTOSAVE=R5-9
BUSY=R1-7
CALC=R3-7-1
CAMAC=R2-7-1
CAPUTRECORDER=R1-7-1
DAC128V=R2-9
DELAYGEN=R1-2-0
DXP=R5-0
DXPSITORO=R1-1
DEVIOCSTATS=3.1.15
#GALIL=V3-6
IP=R2-19-1
IPAC=2.15
IP330=R2-9
IPUNIDIG=R2-11
LOVE=R3-2-6
LUA=R1-2-2
MCA=R7-7
MEASCOMP=R2-1
MODBUS=R2-11
MOTOR=R6-10-1
OPTICS=R2-13-1
QUADEM=R9-1
SNCSEQ=2.2.5
SOFTGLUE=R2-8-1
SOFTGLUEZYNQ=R2-0-1
SSCAN=R2-11-1
STD=R3-5
STREAM=R2-7-7c
VAC=R1-7
VME=R2-9
YOKOGAWA_DAS=R1-0-0
XXX=R6-0
include synApps-common

44
synApps-6.1.set Normal file
View File

@@ -0,0 +1,44 @@
# Release tags for synApps modules as per synApps-6.1
# see https://github.com/EPICS-synApps/support/blob/cc5adba5b8848c9cb98ab96768d668ae927d8859/assemble_synApps.sh
# also for additional configuration that could be done in hook scripts
#ALLENBRADLEY=2.3
ALIVE=R1-1-1
AREA_DETECTOR=R3-7
ASYN=R4-36
AUTOSAVE=R5-10
BUSY=R1-7-2
CALC=R3-7-3
CAMAC=R2-7-1
CAPUTRECORDER=R1-7-2
DAC128V=R2-9
DELAYGEN=R1-2-1
DXP=R6-0
DXPSITORO=R1-2
DEVIOCSTATS=3.1.16
#ETHERIP=ether_ip-3-1
#GALIL=V3-6
IP=R2-20-1
IPAC=2.15
IP330=R2-9
IPUNIDIG=R2-11
LOVE=R3-2-7
LUA=R2-0
MCA=R7-8
MEASCOMP=R2-3
MODBUS=R3-0
MOTOR=R7-1
OPTICS=R2-13-3
QUADEM=R9-2-1
SNCSEQ=2.2.6
SOFTGLUE=R2-8-2
SOFTGLUEZYNQ=R2-0-2
SSCAN=R2-11-3
STD=R3-6
STREAM=2.8.9
VAC=R1-9
VME=R2-9-2
YOKOGAWA_DAS=R2-0-1
XXX=R6-1
include synApps-common

7
synApps-common.set Normal file
View File

@@ -0,0 +1,7 @@
# Common settings for all synApps releases
DEVIOCSTATS_REPONAME=iocStats
ETHERIP_REPOOWNER=EPICSTools
GALIL_REPOOWNER=motorapp
GALIL_REPONAME=Galil-3-0
AREADETECTOR_REPOOWNER=areaDetector

View File

@@ -1,4 +1,4 @@
MODULES="sncseq"
BASE=7.0
SNCSEQ=R2-2-7
SNCSEQ=R2-2-8

View File

@@ -5,6 +5,18 @@
# SET=test00 in .travis.yml runs the tests in this script
# all other jobs are started as compile jobs
# The following if clause can be removed for ci-scripts major version 3
if [ "$TRAVIS_OS_NAME" == osx -a "$BASH_VERSINFO" -lt 4 ]
then
brew install bash
if [ $(/usr/local/bin/bash -c 'echo $BASH_VERSINFO') -lt 4 ]
then
echo "Failed to install a recent bash" >&2
exit 1
fi
exec /usr/local/bin/bash $0 "$@"
fi
# Set VV empty in .travis.yml to make scripts terse
[ "${VV:-1}" ] && set -x
@@ -16,8 +28,8 @@ UTILS_UNITTEST=1
readlinkf() { perl -MCwd -e 'print Cwd::abs_path shift' "$1"; }
# test utilities
die() {
echo "${ANSI_RED}$1${ANSI_RESET}"
fail() {
echo -e "${ANSI_RED}$1${ANSI_RESET}"
exit 1
}
@@ -27,7 +39,7 @@ fn_exists() {
repo_exists() {
DEP=$1
dep_lc=$(echo $DEP | tr 'A-Z' 'a-z')
dep_lc=${DEP,,}
eval dirname=\${${DEP}_DIRNAME:=${dep_lc}}
eval reponame=\${${DEP}_REPONAME:=${dep_lc}}
eval repourl=\${${DEP}_REPOURL:="https://github.com/\${${DEP}_REPOOWNER:=${REPOOWNER:-epics-modules}}/${reponame}.git"}
@@ -35,69 +47,72 @@ repo_exists() {
git ls-remote --quiet --heads --exit-code $repourl > /dev/null 2>&1
}
SETUP_DIRS=$(echo $SETUP_PATH | tr ":" "\n")
SETUP_DIRS=${SETUP_PATH//:/ }
SCRIPTDIR=$(dirname $(readlinkf $0))/travis
CURDIR="$PWD"
CACHEDIR="$HOME/.cache"
CACHEDIR=${CACHEDIR:-${HOME}/.cache}
[ -e ${CACHEDIR} ] || mkdir -p ${CACHEDIR}
echo "Testing contents of utils.sh"
[ -d "$SCRIPTDIR" ] || die "SCRIPTDIR does not exist"
[ -e "$SCRIPTDIR/utils.sh" ] || die "SCRIPTDIR/utils.sh does not exist"
[ -d "$SCRIPTDIR" ] || fail "SCRIPTDIR does not exist"
[ -e "$SCRIPTDIR/utils.sh" ] || fail "SCRIPTDIR/utils.sh does not exist"
# source functions
. $SCRIPTDIR/utils.sh
# check for functions
fn_exists fold_start || die "function fold_start missing from SCRIPTDIR/utils.sh"
fn_exists fold_end || die "function fold_end missing from SCRIPTDIR/utils.sh"
fn_exists source_set || die "function source_set missing from SCRIPTDIR/utils.sh"
fn_exists update_release_local || die "function update_release_local missing from SCRIPTDIR/utils.sh"
fn_exists add_dependency || die "function add_dependency missing from SCRIPTDIR/utils.sh"
fn_exists fold_start || fail "function fold_start missing from SCRIPTDIR/utils.sh"
fn_exists fold_end || fail "function fold_end missing from SCRIPTDIR/utils.sh"
fn_exists source_set || fail "function source_set missing from SCRIPTDIR/utils.sh"
fn_exists update_release_local || fail "function update_release_local missing from SCRIPTDIR/utils.sh"
fn_exists add_dependency || fail "function add_dependency missing from SCRIPTDIR/utils.sh"
# test source_set()
######################################################################
SETUP_DIRS= source_set test01 | grep -q "(SETUP_PATH) is empty" || die "empty search path not detected"
source_set xxdoesnotexistxx | grep -q "does not exist" || die "missing setup file not detected"
source_set test01 | grep -q "Loading setup file" || die "test01 setup file not found"
SETUP_PATH= source_set test01 | grep -q "(SETUP_PATH) is empty" || fail "empty search path not detected"
source_set xxdoesnotexistxx | grep -q "does not exist" || fail "missing setup file not detected"
source_set test01 | grep -q "Loading setup file" || fail "test01 setup file not found"
unset SEEN_SETUPS
export BASE=foo
source_set test01
[ "$BASE" = "foo" ] || die "preset module BASE version does not override test01 setup file (expected foo got $BASE)"
[ "$BASE" = "foo" ] || fail "preset module BASE version does not override test01 setup file (expected foo got $BASE)"
unset SEEN_SETUPS
BASE=
source_set test02
[ "$BASE" = "foo" ] || die "BASE set in test02 does not override included test01 setup file (expected foo got $BASE)"
[ "$FOO" = "bar" ] || die "Setting of single word does not work"
[ "$FOO2" = "bar bar2" ] || die "Setting of multiple words does not work"
[ "$FOO3" = "bar bar2" ] || die "Indented setting of multiple words does not work"
[ "$SNCSEQ" = "R2-2-7" ] || die "Setup test01 was not included"
[ "$BASE" = "foo" ] || fail "BASE set in test02 does not override included test01 setup file (expected foo got $BASE)"
[ "$FOO" = "bar" ] || fail "Setting of single word does not work"
[ "$FOO2" = "bar bar2" ] || fail "Setting of multiple words does not work"
[ "$FOO3" = "bar bar2" ] || fail "Indented setting of multiple words does not work"
[ "$SNCSEQ" = "R2-2-7" ] || fail "Setup test01 was not included"
unset SEEN_SETUPS
source_set test03 | grep -q "Ignoring already included setup file" || die "test01 setup file included twice"
source_set test03 | grep -q "Ignoring already included setup file" || fail "test01 setup file included twice"
# test default settings file
######################################################################
echo "Testing default settings for completeness and valid git repo settings"
[ -e ./defaults.set ] || die "defaults.set does not exist"
[ -e ./defaults.set ] || fail "defaults.set does not exist"
source_set defaults
repo_exists BASE || die "Defaults for BASE do not point to a valid git repository at $repourl"
repo_exists PVDATA || die "Defaults for PVDATA do not point to a valid git repository at $repourl"
repo_exists PVACCESS || die "Defaults for PVACCESS do not point to a valid git repository at $repourl"
repo_exists NTYPES || die "Defaults for NTYPES do not point to a valid git repository at $repourl"
repo_exists SNCSEQ || die "Defaults for SNCSEQ do not point to a valid git repository at $repourl"
repo_exists STREAM || die "Defaults for STREAM do not point to a valid git repository at $repourl"
repo_exists ASYN || die "Defaults for STREAM do not point to a valid git repository at $repourl"
repo_exists STD || die "Defaults for STD do not point to a valid git repository at $repourl"
repo_exists CALC || die "Defaults for CALC do not point to a valid git repository at $repourl"
repo_exists AUTOSAVE || die "Defaults for AUTOSAVE do not point to a valid git repository at $repourl"
repo_exists BUSY || die "Defaults for BUSY do not point to a valid git repository at $repourl"
repo_exists SSCAN || die "Defaults for SSCAN do not point to a valid git repository at $repourl"
repo_exists IOCSTATS || die "Defaults for IOCSTATS do not point to a valid git repository at $repourl"
repo_exists BASE || fail "Defaults for BASE do not point to a valid git repository at $repourl"
repo_exists PVDATA || fail "Defaults for PVDATA do not point to a valid git repository at $repourl"
repo_exists PVACCESS || fail "Defaults for PVACCESS do not point to a valid git repository at $repourl"
repo_exists NTYPES || fail "Defaults for NTYPES do not point to a valid git repository at $repourl"
repo_exists SNCSEQ || fail "Defaults for SNCSEQ do not point to a valid git repository at $repourl"
repo_exists STREAM || fail "Defaults for STREAM do not point to a valid git repository at $repourl"
repo_exists ASYN || fail "Defaults for ASYN do not point to a valid git repository at $repourl"
repo_exists STD || fail "Defaults for STD do not point to a valid git repository at $repourl"
repo_exists CALC || fail "Defaults for CALC do not point to a valid git repository at $repourl"
repo_exists AUTOSAVE || fail "Defaults for AUTOSAVE do not point to a valid git repository at $repourl"
repo_exists BUSY || fail "Defaults for BUSY do not point to a valid git repository at $repourl"
repo_exists SSCAN || fail "Defaults for SSCAN do not point to a valid git repository at $repourl"
repo_exists IOCSTATS || fail "Defaults for IOCSTATS do not point to a valid git repository at $repourl"
repo_exists MOTOR || fail "Defaults for MOTOR do not point to a valid git repository at $repourl"
repo_exists IPAC || fail "Defaults for IPAC do not point to a valid git repository at $repourl"
# test update_release_local()
######################################################################
@@ -111,34 +126,34 @@ rm -f $release_local
# Set a module
update_release_local MOD1 /tmp/mod1
updated_line="MOD1=/tmp/mod1"
grep -q "MOD1=" $release_local || die "Line for MOD1 not added to RELEASE.local"
grep -q "MOD1=" $release_local || fail "Line for MOD1 not added to RELEASE.local"
existing_line=$(grep "MOD1=" $release_local)
[ "${existing_line}" = "${updated_line}" ] || die "Wrong line for MOD1 in RELEASE.local (expected=\"$updated_line\" found=\"$existing_line\")"
[ "${existing_line}" = "${updated_line}" ] || fail "Wrong line for MOD1 in RELEASE.local (expected=\"$updated_line\" found=\"$existing_line\")"
# Set base
update_release_local EPICS_BASE /tmp/base
updated_line="EPICS_BASE=/tmp/base"
grep -q "EPICS_BASE=" $release_local || die "Line for EPICS_BASE not added to RELEASE.local"
grep -q "EPICS_BASE=" $release_local || fail "Line for EPICS_BASE not added to RELEASE.local"
# Set another module
update_release_local MOD2 /tmp/mod2
updated_line="MOD2=/tmp/mod2"
grep -q "MOD2=" $release_local || die "Line for MOD2 not added to RELEASE.local"
grep -q "MOD2=" $release_local || fail "Line for MOD2 not added to RELEASE.local"
existing_line=$(grep "MOD2=" $release_local)
[ "${existing_line}" = "${updated_line}" ] || die "Wrong line for MOD2 in RELEASE.local (expected=\"$updated_line\" found=\"$existing_line\")"
tail -n 1 $release_local | grep -q "EPICS_BASE=" || die "Line for EPICS_BASE not moved to the end of RELEASE.local"
[ "${existing_line}" = "${updated_line}" ] || fail "Wrong line for MOD2 in RELEASE.local (expected=\"$updated_line\" found=\"$existing_line\")"
tail -n 1 $release_local | grep -q "EPICS_BASE=" || fail "Line for EPICS_BASE not moved to the end of RELEASE.local"
# Update a module
update_release_local MOD1 /tmp/mod1b
updated_line="MOD1=/tmp/mod1b"
grep -q "MOD1=" $release_local || die "Line for MOD1 not present in RELEASE.local"
grep -q "MOD1=" $release_local || fail "Line for MOD1 not present in RELEASE.local"
existing_line=$(grep "MOD1=" $release_local)
[ "${existing_line}" = "${updated_line}" ] || die "Wrong line for MOD1 in RELEASE.local (expected=\"$updated_line\" found=\"$existing_line\")"
head -n 1 $release_local | grep -q "MOD1=" || die "Line for MOD1 not at the top of RELEASE.local"
tail -n 1 $release_local | grep -q "EPICS_BASE=" || die "Line for EPICS_BASE not moved to the end of RELEASE.local"
[ "${existing_line}" = "${updated_line}" ] || fail "Wrong line for MOD1 in RELEASE.local (expected=\"$updated_line\" found=\"$existing_line\")"
head -n 1 $release_local | grep -q "MOD1=" || fail "Line for MOD1 not at the top of RELEASE.local"
tail -n 1 $release_local | grep -q "EPICS_BASE=" || fail "Line for EPICS_BASE not moved to the end of RELEASE.local"
# Check that RELEASE.local only contains variable settings
[ $(grep -v -c '[^ =]*=.*' $release_local) -ne 0 ] && die "RELEASE.local contains invalid lines"
[ $(grep -v -c '[^ =]*=.*' $release_local) -ne 0 ] && fail "RELEASE.local contains invalid lines"
rm -f $release_local
@@ -153,23 +168,65 @@ location=$CACHEDIR/base-R3.15.6
# CAREFUL: order of the following check matters (speeds up the test)
# dependency does not exist in the cache
rm -fr $location
rm -fr $location; modules_to_compile=
add_dependency BASE R3.15.6
[ -e $location/LICENSE ] || die "Missing dependency was not checked out"
[ -e $location/LICENSE ] || fail "Missing dependency was not checked out"
BUILT=$(cat "$location/built")
[ "$BUILT" != "$hash_3_15_6" ] && die "Wrong commit of dependency checked out (expected=\"$hash_3_15_6\" found=\"$BUILT\")"
[ "$BUILT" != "$hash_3_15_6" ] && fail "Wrong commit of dependency checked out (expected=\"$hash_3_15_6\" found=\"$BUILT\")"
grep -q "include \$(TOP)/../RELEASE.local" $location/configure/RELEASE && fail "RELEASE in Base includes RELEASE.local"
[ "$do_recompile" ] || fail "do_recompile flag was not set for missing dependency"
echo "$modules_to_compile" | grep -q "$location" || fail "Missing dependency was not set to compile"
# up-to-date dependency does exist in the cache
( cd $CACHEDIR; git clone --quiet --depth 5 --recursive --branch R3.15.6 https://github.com/epics-base/epics-base.git base-R3.15.6 )
rm -f $location/LICENSE
unset do_recompile; modules_to_compile=
add_dependency BASE R3.15.6
[ -e $location/LICENSE ] && die "Existing correct dependency was checked out on top"
[ -e $location/LICENSE ] && fail "Existing correct dependency was checked out on top"
[ "$do_recompile" ] && fail "do_recompile flag was set for up-to-date dependency"
echo "$modules_to_compile" | grep -q "$location" && fail "Up-to-date dependency was set to compile"
do_recompile=yes
add_dependency BASE R3.15.6
echo "$modules_to_compile" | grep -q "$location" || fail "Up-to-date module was not set to compile wile do_recompile=yes"
# dependency in the cache is outdated
echo "nottherighthash" > "$location/built"
unset do_recompile
add_dependency BASE R3.15.6
[ -e $location/LICENSE ] || die "Outdated dependency was not checked out"
[ -e $location/LICENSE ] || fail "Outdated dependency was not checked out"
BUILT=$(cat "$location/built")
[ "$BUILT" != "$hash_3_15_6" ] && die "Wrong commit of dependency checked out (expected=\"$hash_3_15_6\" found=\"$BUILT\")"
[ "$BUILT" != "$hash_3_15_6" ] && fail "Wrong commit of dependency checked out (expected=\"$hash_3_15_6\" found=\"$BUILT\")"
[ "$do_recompile" ] || fail "do_recompile flag was not set for outdated dependency"
echo "$modules_to_compile" | grep -q "$location" || fail "Outdated dependency was not set to compile"
rm -fr $location
# msi is automatically added to 3.14
rm -fr $location; modules_to_compile=
location=$CACHEDIR/base-R3.14.12.1
rm -fr $location;
add_dependency BASE R3.14.12.1
[ -e $location/src/dbtools/msi.c ] || fail "MSI was not added to Base 3.14"
rm -fr $CACHEDIR/*; modules_to_compile=
# missing inclusion of RELEASE.local in configure/RELEASE
location=$CACHEDIR/std-R3-4
add_dependency STD R3-4
grep -q "include \$(TOP)/../RELEASE.local" $location/configure/RELEASE || fail "Inclusion of RELEASE.local not added to configure/RELEASE"
rm -fr $location; modules_to_compile=
# correct handling of FOO_RECURSIVE setting (https://github.com/epics-base/ci-scripts/issues/25 regression)
export SSCAN_RECURSIVE=NO
add_dependency SSCAN master
add_dependency ASYN master
[ -e $CACHEDIR/sscan-master/.ci/README.md ] && fail "Sscan was checked out recursively despite SSCAN_RECURSIVE=NO"
[ -e $CACHEDIR/asyn-master/.ci/README.md ] || fail "Asyn was not checked out recursively"
rm -fr $CACHEDIR/*; modules_to_compile=
unset SSCAN_RECURSIVE
export ASYN_RECURSIVE=NO
add_dependency SSCAN master
add_dependency ASYN master
[ -e $CACHEDIR/sscan-master/.ci/README.md ] || fail "Sscan was not checked out recursively"
[ -e $CACHEDIR/asyn-master/.ci/README.md ] && fail "Asyn was checked out recursively despite ASYN_RECURSIVE=NO"
rm -fr $CACHEDIR/*

View File

@@ -14,6 +14,8 @@ cache:
env:
global:
- SETUP_PATH=.ci-local:.ci
# for the sequencer on Windows
- CHOCO=re2c
addons:
apt:
@@ -29,12 +31,19 @@ addons:
- g++-mingw-w64-x86-64
# for RTEMS cross builds
- qemu-system-x86
homebrew:
packages:
# for the sequencer
- re2c
update: true
install:
- ./.ci/travis/prepare.sh
- python .ci/cue.py prepare
script:
- ./.ci/travis/build.sh
- python .ci/cue.py build
- python .ci/cue.py test
- python .ci/cue.py test-results
# If you need to do more during install and build,
# add a local directory to your module and do e.g.
@@ -43,18 +52,22 @@ script:
# Define build jobs
# Well-known variables to use
# SET source setup file
# EXTRA content will be added to make command line
# STATIC set to YES for static build (default: NO)
# TEST set to NO to skip running the tests (default: YES)
# VV set to make build scripts verbose (default: unset)
# SET source setup file
# ADD_MODULES extra modules (for a specific job)
# BCFG build configuration (static/debug/static-debug;
# default: shared-optimized)
# TEST set to NO to skip running the tests (default: YES)
# VV set to make build scripts verbose (default: unset)
# EXTRA content will be added to make command line
# EXTRA1..5 more additional arguments for the make command
# (one argument per variable)
# Usually from setup files, but may be specified or overridden
# on a job line
# MODULES list of dependency modules
# BASE branch or release tag name of the EPICS Base to use
# <MODULE> branch or release tag for a specific module
# ... see README for setup file syntax description
# MODULES list of dependency modules
# BASE branch or release tag name of the EPICS Base to use
# <MODULE> branch or release tag for a specific module
# ... see README for setup file syntax description
jobs:
include:
@@ -66,7 +79,7 @@ jobs:
- env: BASE=7.0
compiler: clang
- env: BASE=7.0 EXTRA="CMD_CXXFLAGS=-std=c++11"
- env: BASE=7.0
- env: BASE=7.0 EXTRA="CMD_CXXFLAGS=-std=c++11"
compiler: clang
@@ -79,19 +92,17 @@ jobs:
- env: BASE=7.0 EXTRA="CMD_CXXFLAGS=-std=c++11"
dist: trusty
# Cross-compilations to Windows using MinGW and WINE
- env: BASE=7.0 WINE=32 TEST=NO STATIC=YES
compiler: mingw
# Cross-compilations to Windows using gcc/MinGW and WINE
- env: BASE=7.0 WINE=64 TEST=NO STATIC=NO
compiler: mingw
- env: BASE=7.0 WINE=32 TEST=NO BCFG=static
- env: BASE=7.0 WINE=64 TEST=NO
# Cross-compilation to RTEMS
- env: BASE=7.0 RTEMS=4.10 TEST=NO
- env: BASE=7.0 RTEMS=4.10
- env: BASE=7.0 RTEMS=4.9 TEST=NO
- env: BASE=7.0 RTEMS=4.9
# Other gcc versions (added as an extra package)
@@ -108,4 +119,12 @@ jobs:
- env: BASE=7.0
os: osx
compiler: clang
addons: { homebrew: { packages: ["re2c"], update: true } }
# Windows builds
- env: BASE=7.0
os: windows
compiler: vs2017
- env: BASE=7.0
os: windows

View File

@@ -15,12 +15,14 @@ addons:
- perl
install:
- ./.ci/travis/prepare.sh
- python .ci/cue.py prepare
script:
- ./.ci/travis/build.sh
- python .ci/cue.py build
- python .ci/cue.py test
- python .ci/cue.py test-results
# Build using default gcc for Base branches 7.0 and 3.15
# Build on Linux using default gcc for Base branches 7.0 and 3.15
jobs:
include:

View File

@@ -1,77 +0,0 @@
From 00ee7bf7d3618c748491c88742c011a8353abeba Mon Sep 17 00:00:00 2001
From: Andrew Johnson <anj@anl.gov>
Date: Wed, 24 Oct 2018 14:27:15 -0500
Subject: [PATCH] Add RTEMS-pc368-qemu target, use in Travis-CI builds
---
configure/os/CONFIG.Common.RTEMS-pc386-qemu | 11 +++++++++++
configure/os/CONFIG_SITE.Common.RTEMS-pc386 | 5 -----
configure/os/CONFIG_SITE.Common.RTEMS-pc386-qemu | 9 +++++++++
src/libCom/RTEMS/Makefile | 2 +-
src/tools/makeTestfile.pl | 2 +-
6 files changed, 24 insertions(+), 10 deletions(-)
create mode 100644 configure/os/CONFIG.Common.RTEMS-pc386-qemu
delete mode 100644 configure/os/CONFIG_SITE.Common.RTEMS-pc386
create mode 100644 configure/os/CONFIG_SITE.Common.RTEMS-pc386-qemu
diff --git a/configure/os/CONFIG.Common.RTEMS-pc386-qemu b/configure/os/CONFIG.Common.RTEMS-pc386-qemu
new file mode 100644
index 000000000..684f01a19
--- /dev/null
+++ b/configure/os/CONFIG.Common.RTEMS-pc386-qemu
@@ -0,0 +1,11 @@
+# CONFIG.Common.RTEMS-pc386-qemu
+#
+# Definitions for the RTEMS-pc386-qemu target
+# Site-specific overrides go in CONFIG_SITE.Common.RTEMS-pc386-qemu
+#
+#-------------------------------------------------------
+
+# Include definitions from RTEMS-pc386
+include $(CONFIG)/os/CONFIG.Common.RTEMS-pc386
+
+RTEMS_QEMU_FIXUPS = YES
diff --git a/configure/os/CONFIG_SITE.Common.RTEMS-pc386-qemu b/configure/os/CONFIG_SITE.Common.RTEMS-pc386-qemu
new file mode 100644
index 000000000..027dcf4ab
--- /dev/null
+++ b/configure/os/CONFIG_SITE.Common.RTEMS-pc386-qemu
@@ -0,0 +1,9 @@
+# CONFIG_SITE.Common.RTEMS-pc386-qemu
+#
+# Site-specific overrides for the RTEMS-pc386-qemu target
+#
+
+# If you're building this architecture you _probably_ want to
+# run the tests for it under QEMU, but if not you can turn
+# them off here by commenting out this line:
+CROSS_COMPILER_RUNTEST_ARCHS += RTEMS-pc386-qemu
diff --git a/src/libCom/RTEMS/Makefile b/src/libCom/RTEMS/Makefile
index 2f12b7bf0..22a92733c 100644
--- a/src/libCom/RTEMS/Makefile
+++ b/src/libCom/RTEMS/Makefile
@@ -24,7 +24,7 @@ rtemsCom_SRCS += epicsRtemsInitHookPre.c
rtemsCom_SRCS += epicsRtemsInitHookPost.c
rtemsCom_SRCS += epicsMemFs.c
-ifeq ($(T_A),RTEMS-pc386)
+ifeq ($(RTEMS_BSP),pc386)
rtemsCom_SRCS += ne2kpci.c
endif
diff --git a/src/tools/makeTestfile.pl b/src/tools/makeTestfile.pl
index 73f522034..fb431fe7a 100644
--- a/src/tools/makeTestfile.pl
+++ b/src/tools/makeTestfile.pl
@@ -37,7 +37,7 @@ if( $TA =~ /^win32-x86/ && $HA !~ /^win/ ) {
$exec = "wine64 $exe";
# Run pc386 test harness w/ QEMU
-} elsif( $TA =~ /^RTEMS-pc386$/ ) {
+} elsif( $TA =~ /^RTEMS-pc386-qemu$/ ) {
$exec = "qemu-system-i386 -m 64 -no-reboot -serial stdio -display none -net nic,model=ne2k_pci -net user,restrict=yes -kernel $exe";
# Explicitly fail for other RTEMS targets
--
2.21.0.windows.1

View File

@@ -2,12 +2,14 @@
## Features
- Five parallel runners on Linux/Windows (one runner on MacOS)
- Use different compilers (gcc, clang)
- Use different gcc versions
- Cross-compile for Windows 32bit and 64bit using MinGW and WINE
- Cross-compile for RTEMS 4.9 and 4.10
- Compile on MacOS
- Released versions of dependencies are cached (for faster builds)
- Cross-compile for RTEMS 4.9 and 4.10 (Base >= 3.15)
- Compile natively on MacOS (clang)
- Compile natively on Windows (gcc/MinGW, Visual Studio 2017)
- Built dependencies are cached (for faster builds).
## How to Use these Scripts
@@ -34,7 +36,7 @@
Travis to run.
Build jobs are declared in the list following the `jobs: include:`
declaration. Each element (starting with `-` in column 3) defines the
declaration. Each element (starting with a dash) defines the
settings for one build job. `env:` controls the setting of environment
variables,`dist:` specifies the Linux distribution,
`os:` the operating system.
@@ -43,3 +45,18 @@
6. Push your changes and check
[travis-ci.org](https://travis-ci.org/) for your build results.
## Caches
Travis keeps the caches separate for different jobs. As soon as the job
description (in the `.travis.yml` configuration file) or its environment
settings change (adding a space character is enough), the cache is different
and will be rebuilt when the job runs.
This also means that changing a value inside a setup file will _not_
invalidate the cache - in that case you will have to manually delete the cache
through the Travis web interface. (Or add a space character in the job
configuration.)
Caches are automatically removed after approx. four weeks.
Your jobs will have to rebuild them once in a while.

View File

@@ -1,22 +0,0 @@
#!/bin/bash
set -e
# Set VV in .travis.yml to make scripts verbose
[ "$VV" ] && set -x
CACHEDIR="$HOME/.cache"
eval $(grep "EPICS_BASE=" ${CACHEDIR}/RELEASE.local)
export EPICS_BASE
[ -z "$EPICS_HOST_ARCH" -a -f $EPICS_BASE/src/tools/EpicsHostArch.pl ] && EPICS_HOST_ARCH=$(perl $EPICS_BASE/src/tools/EpicsHostArch.pl)
[ -z "$EPICS_HOST_ARCH" -a -f $EPICS_BASE/startup/EpicsHostArch.pl ] && EPICS_HOST_ARCH=$(perl $EPICS_BASE/startup/EpicsHostArch.pl)
export EPICS_HOST_ARCH
make -j2 $EXTRA
if [ "$TEST" != "NO" ]
then
make tapfiles
make -s test-results
fi

View File

@@ -1,204 +0,0 @@
#!/bin/bash
set -e
# Set VV in .travis.yml to make scripts verbose
[ "$VV" ] && set -x
# Perl version of "readlink -f" (which MacOS does not provide)
readlinkf() { perl -MCwd -e 'print Cwd::abs_path shift' "$1"; }
SETUP_DIRS=$(echo $SETUP_PATH | tr ":" "\n")
SCRIPTDIR=$(dirname $(readlinkf $0))
CURDIR="$PWD"
CACHEDIR="$HOME/.cache"
# source functions
. $SCRIPTDIR/utils.sh
# Load settings
# -------------
fold_start load.settings "Loading settings"
# load default settings for well-known modules
source_set defaults
# source configured settings
[ -z "${SET+x}" ] || source_set $SET
fold_end load.settings
# Check out dependencies
# ----------------------
fold_start check.out.dependencies "Checking/cloning dependencies"
for mod in BASE $MODULES
do
mod_uc=$(echo $mod | tr 'a-z' 'A-Z')
eval add_dependency $mod_uc \${${mod_uc}:=master}
done
[ -e ./configure ] && cp ${CACHEDIR}/RELEASE.local ./configure/RELEASE.local
fold_end check.out.dependencies
# Set up compiler
# ---------------
fold_start set.up.epics_build "Setting up EPICS build system"
eval $(grep "EPICS_BASE=" ${CACHEDIR}/RELEASE.local)
export EPICS_BASE
echo "EPICS_BASE=$EPICS_BASE"
[ -z "$EPICS_HOST_ARCH" -a -f $EPICS_BASE/src/tools/EpicsHostArch.pl ] && EPICS_HOST_ARCH=$(perl $EPICS_BASE/src/tools/EpicsHostArch.pl)
[ -z "$EPICS_HOST_ARCH" -a -f $EPICS_BASE/startup/EpicsHostArch.pl ] && EPICS_HOST_ARCH=$(perl $EPICS_BASE/startup/EpicsHostArch.pl)
export EPICS_HOST_ARCH
echo "EPICS_HOST_ARCH=$EPICS_HOST_ARCH"
if echo ${modules_to_compile} | grep -q "$EPICS_BASE"
then
# requires wine and g++-mingw-w64-i686
if [ "$WINE" = "32" ]
then
echo "Cross mingw32"
sed -i -e '/CMPLR_PREFIX/d' $EPICS_BASE/configure/os/CONFIG_SITE.linux-x86.win32-x86-mingw
cat << EOF >> $EPICS_BASE/configure/os/CONFIG_SITE.linux-x86.win32-x86-mingw
CMPLR_PREFIX=i686-w64-mingw32-
EOF
cat << EOF >> $EPICS_BASE/configure/CONFIG_SITE
CROSS_COMPILER_TARGET_ARCHS+=win32-x86-mingw
EOF
elif [ "$WINE" = "64" ]
then
echo "Cross mingw64"
sed -i -e '/CMPLR_PREFIX/d' $EPICS_BASE/configure/os/CONFIG_SITE.linux-x86.windows-x64-mingw
cat << EOF >> $EPICS_BASE/configure/os/CONFIG_SITE.linux-x86.windows-x64-mingw
CMPLR_PREFIX=x86_64-w64-mingw32-
EOF
cat << EOF >> $EPICS_BASE/configure/CONFIG_SITE
CROSS_COMPILER_TARGET_ARCHS+=windows-x64-mingw
EOF
fi
if [ "$STATIC" = "YES" ]
then
echo "Build static libraries/executables"
cat << EOF >> $EPICS_BASE/configure/CONFIG_SITE
SHARED_LIBRARIES=NO
STATIC_BUILD=YES
EOF
fi
HOST_CCMPLR_NAME=`echo "$TRAVIS_COMPILER" | sed -E 's/^([[:alpha:]][^-]*(-[[:alpha:]][^-]*)*)+(-[0-9\.]+)?$/\1/g'`
HOST_CMPLR_VER_SUFFIX=`echo "$TRAVIS_COMPILER" | sed -E 's/^([[:alpha:]][^-]*(-[[:alpha:]][^-]*)*)+(-[0-9\.]+)?$/\3/g'`
HOST_CMPLR_VER=`echo "$HOST_CMPLR_VER_SUFFIX" | cut -c 2-`
case "$HOST_CCMPLR_NAME" in
clang)
echo "Host compiler is clang"
HOST_CPPCMPLR_NAME=$(echo "$HOST_CCMPLR_NAME" | sed 's/clang/clang++/g')
cat << EOF >> $EPICS_BASE/configure/os/CONFIG_SITE.Common.$EPICS_HOST_ARCH
GNU = NO
CMPLR_CLASS = clang
CC = ${HOST_CCMPLR_NAME}$HOST_CMPLR_VER_SUFFIX
CCC = ${HOST_CPPCMPLR_NAME}$HOST_CMPLR_VER_SUFFIX
EOF
# hack
sed -i -e 's/CMPLR_CLASS = gcc/CMPLR_CLASS = clang/' $EPICS_BASE/configure/CONFIG.gnuCommon
${HOST_CCMPLR_NAME}$HOST_CMPLR_VER_SUFFIX --version
;;
gcc)
echo "Host compiler is GCC"
HOST_CPPCMPLR_NAME=$(echo "$HOST_CCMPLR_NAME" | sed 's/gcc/g++/g')
cat << EOF >> $EPICS_BASE/configure/os/CONFIG_SITE.Common.$EPICS_HOST_ARCH
CC = ${HOST_CCMPLR_NAME}$HOST_CMPLR_VER_SUFFIX
CCC = ${HOST_CPPCMPLR_NAME}$HOST_CMPLR_VER_SUFFIX
EOF
${HOST_CCMPLR_NAME}$HOST_CMPLR_VER_SUFFIX --version
;;
*)
echo "Host compiler is default"
gcc --version
;;
esac
cat <<EOF >> $EPICS_BASE/configure/CONFIG_SITE
USR_CPPFLAGS += $USR_CPPFLAGS
USR_CFLAGS += $USR_CFLAGS
USR_CXXFLAGS += $USR_CXXFLAGS
EOF
# set RTEMS to eg. "4.9" or "4.10"
# requires qemu, bison, flex, texinfo, install-info
if [ -n "$RTEMS" ]
then
echo "Cross RTEMS${RTEMS} for pc386"
sed -i -e '/^RTEMS_VERSION/d' -e '/^RTEMS_BASE/d' $EPICS_BASE/configure/os/CONFIG_SITE.Common.RTEMS
cat << EOF >> $EPICS_BASE/configure/os/CONFIG_SITE.Common.RTEMS
RTEMS_VERSION=$RTEMS
RTEMS_BASE=$HOME/.rtems
EOF
cat << EOF >> $EPICS_BASE/configure/CONFIG_SITE
CROSS_COMPILER_TARGET_ARCHS += RTEMS-pc386-qemu
EOF
fi
else
echo "${ANSI_GREEN}EPICS build system already set up (Base was loaded from cache)${ANSI_RESET}"
fi
# Download RTEMS cross compiler
if [ -n "$RTEMS" ]
then
echo "Downloading RTEMS${RTEMS} cross compiler for pc386"
curl -L "https://github.com/mdavidsaver/rsb/releases/download/20171203-${RTEMS}/i386-rtems${RTEMS}-trusty-20171203-${RTEMS}.tar.bz2" \
| tar -C / -xmj
fi
fold_end set.up.compiler
echo "\$ make --version"
make --version
# Build required dependencies
# ---------------------------
fold_start build.dependencies "Build missing/outdated dependencies"
[ "$VV" ] && silent="-s" || silent=
[ -z "$modules_to_compile" ] && echo "${ANSI_GREEN}All dependency modules are up-to-date (nothing to do)${ANSI_RESET}"
for module in ${modules_to_compile}
do
eval name=\${module#${CACHEDIR}/}
fold_start build.$name "Build $name"
make -j2 $silent -C $module $EXTRA
fold_end build.$name
done
fold_end build.dependencies
echo "${ANSI_BLUE}Dependency module information${ANSI_RESET}"
echo "Module Tag Binaries Commit"
echo "-----------------------------------------------------------------------------------"
for mod in base $MODULES
do
mod_uc=$(echo $mod | tr 'a-z' 'A-Z')
eval tag=\${${mod_uc}}
eval dir=${CACHEDIR}/\${${mod_uc}_DIRNAME}-$tag
echo "$modules_to_compile" | grep -q "$dir" && stat="rebuilt" || stat="from cache"
commit=$(git -C $dir log -n1 --oneline)
printf "%-10s %-12s %-11s %s\n" "$mod" "$tag" "$stat" "$commit"
done
echo "${ANSI_BLUE}Contents of RELEASE.local${ANSI_RESET}"
cat ${CACHEDIR}/RELEASE.local

View File

@@ -1,204 +0,0 @@
# Utility functions for Travis scripts in ci-scripts
#
# This file is sourced by the executable scripts
# Portable version of 'sed -i' (that MacOS doesn't provide)
# sedi (cmd, file)
# Do the equivalent of "sed -i cmd file"
sedi () {
cat $2 | sed "$1" > $2.tmp$$; mv -f $2.tmp$$ $2
}
# Setup ANSI Colors
export ANSI_RED="\033[31;1m"
export ANSI_GREEN="\033[32;1m"
export ANSI_YELLOW="\033[33;1m"
export ANSI_BLUE="\033[34;1m"
export ANSI_RESET="\033[0m"
export ANSI_CLEAR="\033[0K"
# Travis log fold control
# from https://github.com/travis-ci/travis-rubies/blob/build/build.sh
fold_start() {
echo -en "travis_fold:start:$1\\r${ANSI_YELLOW}$2${ANSI_RESET}"
}
fold_end() {
echo -en "travis_fold:end:$1\\r"
}
# source_set(settings)
#
# Source a settings file (extension .set) found in the SETUP_DIRS path
# May be called recursively (from within a settings file)
declare -a SEEN_SETUPS
source_set() {
local set_file=${1//[$'\r']}
local set_dir
local found=0
if [ -z "${SETUP_DIRS}" ]
then
echo "${ANSI_RED}Search path for setup files (SETUP_PATH) is empty${ANSI_RESET}"
[ "$UTILS_UNITTEST" ] || exit 1
fi
for set_dir in ${SETUP_DIRS}
do
if [ -e $set_dir/$set_file.set ]
then
if [[ " ${SEEN_SETUPS[@]} " =~ " $set_dir/$set_file.set " ]]
then
echo "Ignoring already included setup file $set_dir/$set_file.set"
return
fi
SEEN_SETUPS+=($set_dir/$set_file.set)
echo "Loading setup file $set_dir/$set_file.set"
local line
while read -r line
do
[ -z "$line" ] && continue
echo $line | grep -q "^#" && continue
if echo $line | grep -q "^include\W"
then
source_set $(echo $line | awk '{ print $2 }')
continue
fi
if echo "$line" | grep -q "^\w\+="
then
IFS== read var value <<< "${line//[$'\r']}"
value=$(sed "s/^\(\"\)\(.*\)\1\$/\2/g" <<< "$value") # remove surrounding quotes
eval [ "\${$var}" ] || eval "$var=\$value"
fi
done < $set_dir/$set_file.set
found=1
break
fi
done
if [ $found -eq 0 ]
then
echo "${ANSI_RED}Setup file $set_file.set does not exist in SETUP_DIRS search path ($SETUP_DIRS)${ANSI_RESET}"
[ "$UTILS_UNITTEST" ] || exit 1
fi
}
# update_release_local(varname, place)
# varname name of the variable to set in RELEASE.local
# place place (absolute path) of where variable should point to
#
# Manipulate RELEASE.local in the cache location:
# - replace "$varname=$place" line if it exists and has changed
# - otherwise add "$varname=$place" line and possibly move EPICS_BASE=... line to the end
update_release_local() {
local var=$1
local place=$2
local release_local=${CACHEDIR}/RELEASE.local
local updated_line="${var}=${place}"
local ret=0
[ -e ${release_local} ] && grep -q "${var}=" ${release_local} || ret=$?
if [ $ret -eq 0 ]
then
existing_line=$(grep "${var}=" ${release_local})
if [ "${existing_line}" != "${updated_line}" ]
then
sedi "s|${var}=.*|${var}=${place}|g" ${release_local}
fi
else
echo "$var=$place" >> ${release_local}
ret=0
grep -q "EPICS_BASE=" ${release_local} || ret=$?
if [ $ret -eq 0 ]
then
base_line=$(grep "EPICS_BASE=" ${release_local})
sedi '\|EPICS_BASE=|d' ${release_local}
echo ${base_line} >> ${release_local}
fi
fi
}
# add_dependency(dep, tag)
#
# Add a dependency to the cache area:
# - check out (recursive if configured) in the CACHE area unless it already exists and the
# required commit has been built
# - Defaults:
# $dep_DIRNAME = lower case ($dep)
# $dep_REPONAME = lower case ($dep)
# $dep_REPOURL = GitHub / $dep_REPOOWNER (or $REPOOWNER or epics-modules) / $dep_REPONAME .git
# $dep_VARNAME = $dep
# $dep_DEPTH = 5
# $dep_RECURSIVE = 1/YES (0/NO to for a flat clone)
# - Add $dep_VARNAME line to the RELEASE.local file in the cache area (unless already there)
# - Add full path to $modules_to_compile
add_dependency() {
curdir="$PWD"
DEP=$1
TAG=$2
dep_lc=$(echo $DEP | tr 'A-Z' 'a-z')
eval dirname=\${${DEP}_DIRNAME:=${dep_lc}}
eval reponame=\${${DEP}_REPONAME:=${dep_lc}}
eval repourl=\${${DEP}_REPOURL:="https://github.com/\${${DEP}_REPOOWNER:=${REPOOWNER:-epics-modules}}/${reponame}.git"}
eval varname=\${${DEP}_VARNAME:=${DEP}}
eval recursive=\${${DEP}_RECURSIVE:=1}
recursive=$(echo $recursive | tr 'A-Z' 'a-z')
[ "$recursive" != "0" -a "$recursive" != "no" ] && recurse="--recursive"
# determine if $DEP points to a valid release or branch
if ! git ls-remote --quiet --exit-code --refs $repourl "$TAG" > /dev/null 2>&1
then
echo "${ANSI_RED}$TAG is neither a tag nor a branch name for $DEP ($repourl)${ANSI_RESET}"
[ "$UTILS_UNITTEST" ] || exit 1
fi
if [ -e $CACHEDIR/$dirname-$TAG ]
then
[ -e $CACHEDIR/$dirname-$TAG/built ] && BUILT=$(cat $CACHEDIR/$dirname-$TAG/built) || BUILT="never"
HEAD=$(cd "$CACHEDIR/$dirname-$TAG" && git log -n1 --pretty=format:%H)
if [ "$HEAD" != "$BUILT" ]
then
rm -fr $CACHEDIR/$dirname-$TAG
else
echo "Found $TAG of dependency $DEP in $CACHEDIR/$dirname-$TAG"
fi
fi
if [ ! -e $CACHEDIR/$dirname-$TAG ]
then
cd $CACHEDIR
eval depth=\${${DEP}_DEPTH:-"-1"}
case ${depth} in
-1 )
deptharg="--depth 5"
;;
0 )
deptharg=""
;;
* )
deptharg="--depth $depth"
;;
esac
echo "Cloning $TAG of dependency $DEP into $CACHEDIR/$dirname-$TAG"
git clone --quiet $deptharg $recurse --branch "$TAG" $repourl $dirname-$TAG
( cd $dirname-$TAG && git log -n1 )
modules_to_compile="${modules_to_compile} $CACHEDIR/$dirname-$TAG"
# run hook
eval hook="\${${DEP}_HOOK}"
if [ "$hook" ]
then
if [ -x "$curdir/$hook" ]
then
echo "Running hook $hook in $CACHEDIR/$dirname-$TAG"
( cd $CACHEDIR/$dirname-$TAG; "$curdir/$hook" )
else
echo "${ANSI_RED}Hook script $hook is not executable or does not exist.${ANSI_RESET}"
exit 1
fi
fi
HEAD=$(cd "$CACHEDIR/$dirname-$TAG" && git log -n1 --pretty=format:%H)
echo "$HEAD" > "$CACHEDIR/$dirname-$TAG/built"
cd "$curdir"
fi
update_release_local ${varname} $CACHEDIR/$dirname-$TAG
}