-
Notifications
You must be signed in to change notification settings - Fork 8
/
Copy pathrun_atest
344 lines (291 loc) · 8.12 KB
/
run_atest
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
#!/bin/sh
#
# $Progeny$
#
# Copyright 2005 Progeny Linux Systems, Inc.
#
# This file is part of PDK.
#
# PDK is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# PDK is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PDK; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
########################################################################
# run_atest
#
# Execute the acceptance test suite
# run_atest <test>[, <test> [...]]
#
# All test results (and much more than those) are to stdout.
# NOTES
# * if you start a process, put the pid in a file in a dir called
# 'run' under your current directory, to ensure that it is killed
# off in cleanup.
# * set $snapshot if you want a tar file created, otherwise unset it.
# * The $log is critical, it's how we tally success/failure among the
# tests.
#Set up colors
pass="$(tput setaf 7; tput setab 2; tput bold)"
fail="$(tput setaf 7; tput setab 1; tput bold)"
reset="$(tput sgr0)"
set -e
install_in_sandbox=1
setup_initial() {
tdir=$1
if [ -n "$install_in_sandbox" ]; then
rm -rf build/
python setup.py --quiet build --build-base=${tdir}/tmp-build \
install --home=${tdir}
echo "import compileall; " \
"compileall.compile_dir('${tdir}/lib', quiet=True)" \
| python
rm -rf ${tdir}/tmp-build
fi
ln -s ${dev_dir}/atest/packages ${tdir}
ln -s ${dev_dir}/atest ${tdir}
mkdir ${tdir}/run
}
setup() {
tdir=$1
cp -r ${setup_dir}/* ${tdir}
}
cleanup() {
trap - 0
message=''
args=$(getopt -o sr:m: -- "$@")
eval set -- "$args"
while true; do
case "$1" in
-s) shift; snapshot=1;;
-r) shift; result="$1"; shift;;
-m) shift; message="$1"; shift;;
--) shift; break;;
esac
done
# Kill all processes identified in the run/ dir
# under the tmp.
cd ${tmp_dir}
if ls run/* 2>/dev/null; then
for pid in $(cat run/*); do
kill -TERM ${pid} || true
done
fi
wait
# Create a snapshot, if desired
if [ -n "${snapshot}" ]; then
tar zcfC ${dev_dir}/${test_name}.snap.tar.gz ${tmp_dir} .
fi
if [ -n "${tmp_dir}" ]; then
rm -rf ${tmp_dir}
fi
if [ -n "${result}" ]; then
echo -e "${test_name}\t${result}\t${message}" >>${log}
fi
# Colorized results are nice as work scrolls past.
if [ $report_colorized_results ]; then
if [ "${result}" = "pass" ]; then
echo -e "${pass}${test_name}${reset} ${result} ${message} " >&2
else
echo -e "${fail}${test_name}${reset} ${result} ${message} " >&2
fi
echo
fi
exit
}
fail () {
set +x
message=$1
if [ -z "${message}" ]; then
message='no message given'
fi
cleanup -s -r fail -m "${message}"
}
pass () {
set +x
cleanup -r pass
}
stop () {
set +x
echo 1 > $halt_file
fail 'unexpected termination signal'
}
run_test() {
# Begin with a subshell
(
trap "fail \"unexpected exit \$?\"" 0
trap "stop" 1 2 3 15
test_name=$(basename $test)
tmp_dir=$(mktemp -dt ${test_name}.XXXXXX)
echo "--------------------------------------------------------"
echo " ${test_name} running in ${tmp_dir}"
setup ${tmp_dir}
export PACKAGES=${tmp_dir}/packages
mkdir -p ${tmp_dir}/bin
PATH=${tmp_dir}/bin:$PATH
PYTHONPATH=${tmp_dir}/lib/python:$PYTHONPATH
export PYTHONPATH
EDITOR=false
export EDITOR
cd ${tmp_dir}
echo "------------------------------------"
if [ -z "${quiet}" ]; then
set -x
fi
. $test
pass
) || true
}
future_tests=""
args=$(getopt -o dqI -- "$@")
eval set -- "$args"
while true; do
case "$1" in
-d) shift; docs_only=1 ;;
-I) shift; unset install_in_sandbox ;;
-q) shift; quiet=1 ;;
--) shift; break;;
esac
done
# freshen tests from doc directory
rm -f atest/*.fw.sh
for test in $(ls doc/*.fw); do
fw +Q +L -T +U $test
done
if [ -n "$docs_only" ]; then
exit 0
fi
if [ -z "$*" ]; then
# If no tests listed, run all tests
tests=$(ls atest/*.sh)
if [ -z "$tests" ]; then
echo >&2 "No tests found!"
exit 1
fi
ls atest/future/*.sh >/dev/null 2>&1 && {
future_tests=$(ls atest/future/*.sh)
}
report_colorized_results="colorize"
else
tests="$@"
report_colorized_results=""
no_parallel="just serial please"
fi
# Create a fresh report file
dev_dir=$(pwd)
log=${dev_dir}/atest.log
rm -f ${log}
rm -rf $dev_dir/atest.log.d
mkdir -p $dev_dir/atest.log.d
# Do the initial setup
setup_dir=$(mktemp -dt setup-template.XXXXXX)
setup_initial ${setup_dir}
run_test_batch() {
for test in $*; do
halt_file="$(mktemp -t atest.halt.XXXXXX)"
run_test $test
halt_contents="$(cat $halt_file)"
rm $halt_file
if [ -n "$halt_contents" ]; then
break
fi
done
}
random() {
head -c4 /dev/urandom | od -N2 -tu4 | sed -ne '1s/.* //p'
}
run_test_batch_in_parallel() {
batchname=$1
shift
list=$dev_dir/atest.log.d/list.${batchname}
for test in $*; do
echo "$test" >>$list
done
exec 3<$list
semaphore_file="$(mktemp -t atest.semaphore.XXXXXX)"
rm $semaphore_file
job_count=4
save_log="$log"
for i in $(seq $job_count); do
(
child_holder="$(mktemp -t atest.child.XXXXXX)"
echo $$ >$child_holder
while true; do
# try to acquire lock
if ! ln $child_holder $semaphore_file 2>/dev/null; then
sleep 0.$(( $(random) % 10 ))
continue
fi
read <&3 test || true
# release lock
rm $semaphore_file
if [ -z "$test" ]; then
break
fi
test_key=$(basename $test)
log=$dev_dir/atest.log.d/$test_key.log.$batchname
run_test_batch "$test" >atest.log.d/$test_key.output 2>&1
cat $log
done
) &
done
wait
log="$save_log"
for logfn in $dev_dir/atest.log.d/*.log.$batchname; do
cat $logfn >>$log || true
newlogfn=$(echo "$logfn" | sed s/\\.$batchname//)
mv $logfn $newlogfn
done
}
# Run the tests
if [ -n "${no_parallel}" ]; then
run_test_batch $tests
else
run_test_batch_in_parallel std $tests
fi
# Figure out whether we won or lost:
fail_file="$(mktemp -t atest.fail.XXXXXX)"
awk -v FS='\t' -v OFS='\t' '$2 == "fail" { print $1, $3 }' ${log} >${fail_file}
fail_count="$(wc -l <${fail_file})"
# Run future tests
if [ -n "${future_tests}" ]; then
log=${dev_dir}/future.log
test -f ${log} && rm ${log}
unset report_colorized_results
if [ -n "${no_parallel}" ]; then
run_test_batch ${future_tests}
else
run_test_batch_in_parallel future ${future_tests}
fi
fi
# Clean up our initial setup dir
if [ -n "${setup_dir}" ]; then
rm -rf "${setup_dir}"
fi
# print a simple and colorful report
echo -e "\n============================================================"
if [ "${fail_count}" = 0 ]; then
echo "${pass}> pass <${reset}"
status=0
else
echo "${fail}> fail <${reset}"
awk -v FS='\t' '{ printf "%-25s %s\n", $1, $2 }' ${fail_file}
status=1
fi
# Report on future tests
if [ -n "${future_tests}" ] ; then
echo -e '\nFuture tests -------------------------------------------'
rm ${fail_file}
column -t ${log}
fi
$DONE_ACTION
exit $status
# vim:set ai et sw=4 ts=4 tw=75: