0023372: "diffimage" using in tests commands
[occt.git] / src / DrawResources / TestCommands.tcl
CommitLineData
40093367 1# Copyright (c) 2012 OPEN CASCADE SAS
2#
3# The content of this file is subject to the Open CASCADE Technology Public
4# License Version 6.5 (the "License"). You may not use the content of this file
5# except in compliance with the License. Please obtain a copy of the License
6# at http://www.opencascade.org and read it completely before using this file.
7#
8# The Initial Developer of the Original Code is Open CASCADE S.A.S., having its
9# main offices at: 1, place des Freres Montgolfier, 78280 Guyancourt, France.
10#
11# The Original Code and all software distributed under the License is
12# distributed on an "AS IS" basis, without warranty of any kind, and the
13# Initial Developer hereby disclaims all such warranties, including without
14# limitation, any warranties of merchantability, fitness for a particular
15# purpose or non-infringement. Please see the License for the specific terms
16# and conditions governing the rights and limitations under the License.
17
18############################################################################
19# This file defines scripts for execution of OCCT tests.
20# It should be loaded automatically when DRAW is started, and provides
cc6a292d 21# three top-level commands: 'test', 'testgrid', and 'testdiff'.
40093367 22# See OCCT Tests User Guide for description of the test system.
23#
24# Note: procedures with names starting with underscore are for internal use
25# inside the test system.
26############################################################################
27
28# Default verbose level for command _run_test
29set _tests_verbose 0
30
31# regexp for parsing test case results in summary log
32set _test_case_regexp {^CASE\s+([\w.-]+)\s+([\w.-]+)\s+([\w.-]+)\s*:\s*([\w]+)(.*)}
33
34# Basic command to run indicated test case in DRAW
b725d7c5 35help test {
36 Run specified test case
37 Use: test group grid casename [echo=0]
38 - If echo is set to 0 (default), log is stored in memory and only summary
39 is output (the log can be obtained with command "dlog get")
40 - If echo is set to 1 or "-echo", all commands and results are echoed
41 immediately, but log is not saved and summary is not produced
42}
5df3a117 43proc test {group grid casename {echo 0}} {
40093367 44 # get test case paths (will raise error if input is invalid)
45 _get_test $group $grid $casename dir gridname casefile
46
b725d7c5 47 # if echo specified as "-echo", convert it to bool
48 if { "$echo" == "-echo" } { set echo t }
49
40093367 50 # run test
5df3a117 51 uplevel _run_test $dir $group $gridname $casefile $echo
40093367 52
53 # check log
5df3a117 54 if { ! $echo } {
55 _check_log $dir $group $gridname $casename [dlog get]
56 }
40093367 57
58 return
59}
60
61# Basic command to run indicated test case in DRAW
b725d7c5 62help testgrid {
63 Run all tests, or specified group, or one grid
64 Use: testgrid [group [grid]] [options...]
65 Allowed options are:
66 -parallel N: run N parallel processes (default is number of CPUs, 0 to disable)
67 -refresh N: save summary logs every N seconds (default 60, minimal 1, 0 to disable)
68 -outdir dirname: set log directory (should be empty or non-existing)
69 -overwrite: force writing logs in existing non-empty directory
70 -xml filename: write XML report for Jenkins (in JUnit-like format)
40093367 71}
b725d7c5 72proc testgrid {args} {
40093367 73 global env tcl_platform _tests_verbose
74
75 ######################################################
76 # check arguments
77 ######################################################
78
79 # check that environment variable defining paths to test scripts is defined
80 if { ! [info exists env(CSF_TestScriptsPath)] ||
81 [llength $env(CSF_TestScriptsPath)] <= 0 } {
82 error "Error: Environment variable CSF_TestScriptsPath is not defined"
83 }
84
85 # treat options
b725d7c5 86 set parallel [_get_nb_cpus]
40093367 87 set refresh 60
b725d7c5 88 set logdir ""
40093367 89 set overwrite 0
90 set xmlfile ""
91 for {set narg 0} {$narg < [llength $args]} {incr narg} {
92 set arg [lindex $args $narg]
93
94 # parallel execution
95 if { $arg == "-parallel" } {
96 incr narg
b725d7c5 97 if { $narg < [llength $args] && ! [regexp {^-} [lindex $args $narg]] } {
40093367 98 set parallel [expr [lindex $args $narg]]
99 } else {
b725d7c5 100 error "Option -parallel requires argument"
40093367 101 }
102 continue
103 }
104
105 # refresh logs time
106 if { $arg == "-refresh" } {
107 incr narg
b725d7c5 108 if { $narg < [llength $args] && ! [regexp {^-} [lindex $args $narg]] } {
40093367 109 set refresh [expr [lindex $args $narg]]
110 } else {
b725d7c5 111 error "Option -refresh requires argument"
112 }
113 continue
114 }
115
116 # output directory
117 if { $arg == "-outdir" } {
118 incr narg
119 if { $narg < [llength $args] && ! [regexp {^-} [lindex $args $narg]] } {
120 set logdir [lindex $args $narg]
121 } else {
122 error "Option -outdir requires argument"
40093367 123 }
124 continue
125 }
126
127 # allow overwrite logs
128 if { $arg == "-overwrite" } {
129 set overwrite 1
130 continue
131 }
132
133 # refresh logs time
134 if { $arg == "-xml" } {
135 incr narg
b725d7c5 136 if { $narg < [llength $args] && ! [regexp {^-} [lindex $args $narg]] } {
40093367 137 set xmlfile [lindex $args $narg]
138 }
139 if { $xmlfile == "" } {
140 set xmlfile TESTS-summary.xml
141 }
142 continue
143 }
144
145 # unsupported option
146 if { [regexp {^-} $arg] } {
147 error "Error: unsupported option \"$arg\""
148 }
149
150 # treat arguments not recognized as options as group and grid names
151 if { ! [info exists groupname] } {
152 set groupname $arg
153 } elseif { ! [info exists gridname] } {
154 set gridname $arg
155 } else {
156 error "Error: cannot interpret argument $narg ($arg): both group and grid names are already defined by previous args!"
157 }
158 }
159
160 # check that target log directory is empty or does not exist
161 set logdir [file normalize [string trim $logdir]]
162 if { $logdir == "" } {
b725d7c5 163 # if specified logdir is empty string, generate unique name like
164 # results_<branch>_<timestamp>
165 set prefix "results"
166 if { ! [catch {exec git branch} gitout] &&
167 [regexp {[*] ([\w]+)} $gitout res branch] } {
168 set prefix "${prefix}_$branch"
169 }
170 set logdir "${prefix}_[clock format [clock seconds] -format {%Y-%m-%dT%H%M}]"
40093367 171 set logdir [file normalize $logdir]
172 }
173 if { [file isdirectory $logdir] && ! $overwrite && ! [catch {glob -directory $logdir *}] } {
174 error "Error: Specified log directory \"$logdir\" is not empty; please clean it before running tests"
175 }
176 if { [catch {file mkdir $logdir}] || ! [file writable $logdir] } {
177 error "Error: Cannot create directory \"$logdir\", or it is not writable"
178 }
179
180 ######################################################
181 # prepare list of tests to be performed
182 ######################################################
183
184 # list of tests, each defined by a list of:
185 # test scripts directory
186 # group (subfolder) name
187 # grid (subfolder) name
188 # test case name
189 # path to test case file
190 set tests_list {}
191
192 # iterate by all script paths
193 foreach dir [_split_path $env(CSF_TestScriptsPath)] {
194 # protection against empty paths
195 set dir [string trim $dir]
196 if { $dir == "" } { continue }
197
198 if { $_tests_verbose > 0 } { _log_and_puts log "Examining tests directory $dir" }
199
200 # check that directory exists
201 if { ! [file isdirectory $dir] } {
202 _log_and_puts log "Warning: directory $dir listed in CSF_TestScriptsPath does not exist, skipped"
203 continue
204 }
205
206 # if test group is specified, check that directory with given name exists in this dir
207 # if not, continue to the next test dir
208 if { [info exists groupname] && $groupname != "" } {
209 if { [file isdirectory $dir/$groupname] } {
210 set groups $groupname
211 } else {
212 continue
213 }
214 } else {
215 # else search all directories in the current dir
216 if [catch {glob -directory $dir -tail -types d *} groups] { continue }
217 }
218
219 # iterate by groups
220 if { $_tests_verbose > 0 } { _log_and_puts log "Groups to be executed: $groups" }
221 foreach group [lsort -dictionary $groups] {
222 if { $_tests_verbose > 0 } { _log_and_puts log "Examining group directory $group" }
223
224 # file grids.list must exist: it defines sequence of grids in the group
225 if { ! [file exists $dir/$group/grids.list] } {
226 _log_and_puts log "Warning: directory $dir/$group does not contain file grids.list, skipped"
227 continue
228 }
229
230 # read grids.list file and make a list of grids to be executed
231 set gridlist {}
232 set fd [open $dir/$group/grids.list]
233 set nline 0
234 while { [gets $fd line] >= 0 } {
235 incr nline
236
237 # skip comments and empty lines
238 if { [regexp "\[ \t\]*\#.*" $line] } { continue }
239 if { [string trim $line] == "" } { continue }
240
241 # get grid id and name
242 if { ! [regexp "^\(\[0-9\]+\)\[ \t\]*\(\[A-Za-z0-9_.-\]+\)\$" $line res gridid grid] } {
243 _log_and_puts log "Warning: cannot recognize line $nline in file $dir/$group/grids.list as \"gridid gridname\"; ignored"
244 continue
245 }
246
247 # if specific grid is requested, check that it is present; otherwise make complete list
248 if { ! [info exists gridname] || $gridname == "" || $gridname == $gridid || $gridname == $grid } {
249 lappend gridlist $grid
250 }
251 }
252 close $fd
253
254 # iterate by all grids
255 foreach grid $gridlist {
256
257 # check if this grid is aliased to another one
258 set griddir $dir/$group/$grid
259 if { [file exists $griddir/cases.list] } {
260 set fd [open $griddir/cases.list]
261 if { [gets $fd line] >= 0 } {
262 set griddir [file normalize $dir/$group/$grid/[string trim $line]]
263 }
264 close $fd
265 }
266
267 # check if grid directory actually exists
268 if { ! [file isdirectory $griddir] } {
269 _log_and_puts log "Error: tests directory for grid $grid ($griddir) is missing; skipped"
270 continue
271 }
272
273 # create directory for logging test results
274 if { $logdir != "" } { file mkdir $logdir/$group/$grid }
275
276 # iterate by all tests in the grid directory
277 if { [catch {glob -directory $griddir -type f *} testfiles] } { continue }
278 foreach casefile [lsort -dictionary $testfiles] {
279 # filter out begin and end files
280 set casename [file tail $casefile]
281 if { $casename == "begin" || $casename == "end" } { continue }
282
283 lappend tests_list [list $dir $group $grid $casename $casefile]
284 }
285 }
286 }
287 }
288 if { [llength $tests_list] < 1 } {
289 error "Error: no tests are found, check you input arguments and variable CSF_TestScriptsPath!"
290 }
291
292 ######################################################
293 # run tests
294 ######################################################
295
296 # log command arguments and environment
297 set log "Command: testgrid $args\nHost: [info hostname]\nStarted on: [clock format [clock seconds] -format {%Y-%m-%d %H:%M:%S}]\n"
298 set log "$log\nEnvironment:\n"
299 foreach envar [array names env] {
300 set log "$log$envar=\"$env($envar)\"\n"
301 }
302 set log "$log\n"
303
304 set refresh_timer [clock seconds]
305 uplevel dchrono _timer reset
306 uplevel dchrono _timer start
307
308 # if parallel execution is requested, allocate thread pool
309 if { $parallel > 0 } {
310 if { ! [info exists tcl_platform(threaded)] || [catch {package require Thread}] } {
311 _log_and_puts log "Warning: Tcl package Thread is not available, running in sequential mode"
312 set parallel 0
313 } else {
314 set worker [tpool::create -minworkers $parallel -maxworkers $parallel]
315 # suspend the pool until all jobs are posted, to prevent blocking of the process
316 # of starting / processing jobs by running threads
b725d7c5 317 catch {tpool::suspend $worker}
40093367 318 if { $_tests_verbose > 0 } { _log_and_puts log "Executing tests in (up to) $parallel threads" }
319 }
320 }
321
322 # start test cases
323 foreach test_def $tests_list {
324 set dir [lindex $test_def 0]
325 set group [lindex $test_def 1]
326 set grid [lindex $test_def 2]
327 set casename [lindex $test_def 3]
328 set casefile [lindex $test_def 4]
329
330 # command to set tests for generation of image in results directory
331 set imgdir_cmd ""
332 if { $logdir != "" } { set imgdir_cmd "set imagedir $logdir/$group/$grid" }
333
334 # prepare command file for running test case in separate instance of DRAW
335 set fd_cmd [open $logdir/$group/$grid/${casename}.tcl w]
336 puts $fd_cmd "$imgdir_cmd"
337 puts $fd_cmd "set test_image $casename"
b725d7c5 338 puts $fd_cmd "_run_test $dir $group $grid $casefile t"
5df3a117 339
40093367 340 # use dlog command to obtain complete output of the test when it is absent (i.e. since OCCT 6.6.0)
5df3a117 341 # note: this is not needed if echo is set to 1 in call to _run_test above
40093367 342 if { ! [catch {dlog get}] } {
343 puts $fd_cmd "puts \[dlog get\]"
344 } else {
345 # else try to use old-style QA_ variables to get more output...
346 set env(QA_DUMP) 1
347 set env(QA_DUP) 1
348 set env(QA_print_command) 1
349 }
5df3a117 350
40093367 351 # final 'exit' is needed when running on Linux under VirtualGl
352 puts $fd_cmd "exit"
353 close $fd_cmd
8418c617 354
355 # commant to run DRAW with a command file;
356 # note that empty string is passed as standard input to avoid possible
357 # hang-ups due to waiting for stdin of the launching process
358 set command "exec <<{} DRAWEXE -f $logdir/$group/$grid/${casename}.tcl"
359
40093367 360 # alternative method to run without temporary file; disabled as it needs too many backslashes
361# else {
8418c617 362# set command "exec <<\"\" DRAWEXE -c $imgdir_cmd\\\; set test_image $casename\\\; \
40093367 363# _run_test $dir $group $grid $casefile\\\; \
364# puts \\\[dlog get\\\]\\\; exit"
365# }
366
367 # run test case, either in parallel or sequentially
368 if { $parallel > 0 } {
369 # parallel execution
370 set job [tpool::post -nowait $worker "catch \"$command\" output; return \$output"]
371 set job_def($job) [list $logdir $dir $group $grid $casename]
372 } else {
373 # sequential execution
374 catch {eval $command} output
375 _log_test_case $output $logdir $dir $group $grid $casename log
376
377 # update summary log with requested period
378 if { $logdir != "" && $refresh > 0 && [expr [clock seconds] - $refresh_timer > $refresh] } {
379 # update and dump summary
380 _log_summarize $logdir $log
381 set refresh_timer [clock seconds]
382 }
383 }
384 }
385
386 # get results of started threads
387 if { $parallel > 0 } {
b725d7c5 388 catch {tpool::resume $worker}
40093367 389 while { [llength [array names job_def]] > 0 } {
390 foreach job [tpool::wait $worker [array names job_def]] {
391 eval _log_test_case \[tpool::get $worker $job\] $job_def($job) log
392 unset job_def($job)
393 }
394
395 # update summary log with requested period
396 if { $logdir != "" && $refresh > 0 && [clock seconds] > $refresh_timer + $refresh } {
397 _log_summarize $logdir $log
398 set refresh_timer [clock seconds]
399 }
400 }
401 # release thread pool
402 tpool::release $worker
403 }
404
405 uplevel dchrono _timer stop
406 set time [lindex [split [uplevel dchrono _timer show] "\n"] 0]
407
408 ######################################################
409 # output summary logs and exit
410 ######################################################
411
412 _log_summarize $logdir $log $time
413 if { $logdir != "" } {
414 puts "Detailed logs are saved in $logdir"
415 }
416 if { $logdir != "" && $xmlfile != "" } {
417 # XML output file is assumed relative to log dir unless it is absolute
418 if { [ file pathtype $xmlfile] == "relative" } {
419 set xmlfile [file normalize $logdir/$xmlfile]
420 }
421 _log_xml_summary $logdir $xmlfile $log 0
422 puts "XML summary is saved to $xmlfile"
423 }
424
425 return
426}
427
22db40eb 428# Procedure to regenerate summary log from logs of test cases
429help testsummarize {
430 Regenerate summary log in the test directory from logs of test cases.
431 This can be necessary if test grids are executed separately (e.g. on
432 different stations) or some grids have been re-executed.
433 Use: testsummarize dir
434}
435proc testsummarize {dir} {
436 global _test_case_regexp
437
438 if { ! [file isdirectory $dir] } {
439 error "Error: \"$dir\" is not a directory"
440 }
441
442 # get summary statements from all test cases in one log
443 set log ""
444
445 # to avoid huge listing of logs, first find all subdirectories and iterate
446 # by them, parsing log files in each subdirectory independently
447 foreach grid [glob -directory $dir -types d -tails */*] {
448 foreach caselog [glob -nocomplain -directory [file join $dir $grid] -types f -tails *.log] {
449 set file [file join $dir $grid $caselog]
450 set nbfound 0
451 set fd [open $file r]
452 while { [gets $fd line] >= 0 } {
453 if { [regexp $_test_case_regexp $line res grp grd cas status message] } {
454 if { "[file join $grid $caselog]" != "[file join $grp $grd ${cas}.log]" } {
455 puts "Error: $file contains status line for another test case ($line)"
456 }
457 set log "$log$line\n"
458 incr nbfound
459 }
460 }
461 close $fd
462
463 if { $nbfound != 1 } {
464 puts "Error: $file contains $nbfound status lines, expected 1"
465 }
466 }
467 }
468
469 _log_summarize $dir $log "Summary regenerated from logs at [clock format [clock seconds]]"
470 return
471}
472
cc6a292d 473# Procedure to compare results of two runs of test cases
b725d7c5 474help testdiff {
475 Compare results of two executions of tests (CPU times, ...)
22db40eb 476 Use: testdiff dir1 dir2 [groupname [gridname]] [options...]
b725d7c5 477 Where dir1 and dir2 are directories containing logs of two test runs.
478 Allowed options are:
22db40eb 479 -save filename: save resulting log in specified file (default name is
480 \$dir1/diff-\$dir2.log); HTML log is saved with same name
481 and extension .html
b725d7c5 482 -status {same|ok|all}: filter cases for comparing by their status:
483 same - only cases with same status are compared (default)
484 ok - only cases with OK status in both logs are compared
485 all - results are compared regardless of status
486 -verbose level:
487 1 - output only differences
22db40eb 488 2 - output also list of logs and directories present in one of dirs only
489 3 - (default) output also progress messages
cc6a292d 490}
491proc testdiff {dir1 dir2 args} {
492 if { "$dir1" == "$dir2" } {
493 error "Input directories are the same"
494 }
495
496 ######################################################
497 # check arguments
498 ######################################################
499
500 # treat options
22db40eb 501 set logfile [file join $dir1 "diff-[file tail $dir2].log"]
cc6a292d 502 set basename ""
503 set status "same"
504 set verbose 3
505 for {set narg 0} {$narg < [llength $args]} {incr narg} {
506 set arg [lindex $args $narg]
507
508 # log file name
509 if { $arg == "-save" } {
510 incr narg
22db40eb 511 if { $narg < [llength $args] && ! [regexp {^-} [lindex $args $narg]] } {
cc6a292d 512 set logfile [lindex $args $narg]
513 } else {
514 error "Error: Option -save must be followed by log file name"
515 }
516 continue
517 }
518
cc6a292d 519 # status filter
520 if { $arg == "-status" } {
521 incr narg
22db40eb 522 if { $narg < [llength $args] && ! [regexp {^-} [lindex $args $narg]] } {
cc6a292d 523 set status [lindex $args $narg]
524 } else { set status "" }
525 if { "$status" != "same" && "$status" != "all" && "$status" != "ok" } {
526 error "Error: Option -status must be followed by one of \"same\", \"all\", or \"ok\""
527 }
528 continue
529 }
530
531 # verbose level
532 if { $arg == "-verbose" } {
533 incr narg
22db40eb 534 if { $narg < [llength $args] && ! [regexp {^-} [lindex $args $narg]] } {
cc6a292d 535 set verbose [expr [lindex $args $narg]]
22db40eb 536 } else {
537 error "Error: Option -verbose must be followed by integer verbose level"
cc6a292d 538 }
539 continue
540 }
541
22db40eb 542 if { [regexp {^-} $arg] } {
cc6a292d 543 error "Error: unsupported option \"$arg\""
22db40eb 544 }
545
546 # non-option arguments form a subdirectory path
547 set basename [file join $basename $arg]
cc6a292d 548 }
549
550 # run diff procedure (recursive)
551 _test_diff $dir1 $dir2 $basename $status $verbose log
552
553 # save result to log file
554 if { "$logfile" != "" } {
555 _log_save $logfile $log
22db40eb 556 _log_html_diff "[file rootname $logfile].html" $log $dir1 $dir2
557 puts "Log is saved to $logfile (and .html)"
cc6a292d 558 }
559
560 return
561}
562
22db40eb 563# Procedure to locate data file for test given its name.
564# The search is performed assuming that the function is called
565# from the test case script; the search order is:
566# - subdirectory "data" of the test script (grid) folder
567# - subdirectories in environment variable CSF_TestDataPath
568# - subdirectory set by datadir command
569# If file is not found, raises Tcl error.
570proc locate_data_file {filename} {
571 global env groupname gridname casename
572
573 # check if the file is located in the subdirectory data of the script dir
574 set scriptfile [info script]
575 if { $scriptfile != "" } {
576 set path [file join [file dirname $scriptfile] data $filename]
577 if { [file exists $path] } {
578 return [file normalize $path]
579 }
580 }
581
582 # check sub-directories in paths indicated by CSF_TestDataPath
583 if { [info exists env(CSF_TestDataPath)] } {
584 foreach dir [_split_path $env(CSF_TestDataPath)] {
585 while {[llength $dir] != 0} {
586 set name [lindex $dir 0]
587 set dir [lrange $dir 1 end]
588 eval lappend dir [glob -nocomplain -directory $name -type d *]
589 if { [file exists $name/$filename] } {
590 return [file normalize $name/$filename]
591 }
592 }
593 }
594 }
595
596 # check current datadir
597 if { [file exists [uplevel datadir]/$filename] } {
598 return [file normalize [uplevel datadir]/$filename]
599 }
600
601 # raise error
602 error [join [list "Error: file $filename could not be found" \
603 "(should be in paths indicated by CSF_TestDataPath environment variable, " \
604 "or in subfolder data in the script directory)"] "\n"]
605}
606
40093367 607# Internal procedure to find test case indicated by group, grid, and test case names;
608# returns:
609# - dir: path to the base directory of the tests group
610# - gridname: actual name of the grid
611# - casefile: path to the test case script
612# if no such test is found, raises error with appropriate message
613proc _get_test {group grid casename _dir _gridname _casefile} {
614 upvar $_dir dir
615 upvar $_gridname gridname
616 upvar $_casefile casefile
617
618 global env
619
620 # check that environment variable defining paths to test scripts is defined
621 if { ! [info exists env(CSF_TestScriptsPath)] ||
622 [llength $env(CSF_TestScriptsPath)] <= 0 } {
623 error "Error: Environment variable CSF_TestScriptsPath is not defined"
624 }
625
626 # iterate by all script paths
627 foreach dir [_split_path $env(CSF_TestScriptsPath)] {
628 # protection against empty paths
629 set dir [string trim $dir]
630 if { $dir == "" } { continue }
631
632 # check that directory exists
633 if { ! [file isdirectory $dir] } {
634 puts "Warning: directory $dir listed in CSF_TestScriptsPath does not exist, skipped"
635 continue
636 }
637
638 # check if test group with given name exists in this dir
639 # if not, continue to the next test dir
640 if { ! [file isdirectory $dir/$group] } { continue }
641
642 # check that grid with given name (possibly alias) exists; stop otherwise
643 set gridname $grid
644 if { ! [file isdirectory $dir/$group/$gridname] } {
645 # check if grid is named by alias rather than by actual name
646 if { [file exists $dir/$group/grids.list] } {
647 set fd [open $dir/$group/grids.list]
648 while { [gets $fd line] >= 0 } {
649 if { [regexp "\[ \t\]*\#.*" $line] } { continue }
650 if { [regexp "^$grid\[ \t\]*\(\[A-Za-z0-9_.-\]+\)\$" $line res gridname] } {
651 break
652 }
653 }
654 close $fd
655 }
656 }
657 if { ! [file isdirectory $dir/$group/$gridname] } { continue }
658
659 # get actual file name of the script; stop if it cannot be found
660 set casefile $dir/$group/$gridname/$casename
661 if { ! [file exists $casefile] } {
662 # check if this grid is aliased to another one
663 if { [file exists $dir/$group/$gridname/cases.list] } {
664 set fd [open $dir/$group/$gridname/cases.list]
665 if { [gets $fd line] >= 0 } {
666 set casefile [file normalize $dir/$group/$gridname/[string trim $line]/$casename]
667 }
668 close $fd
669 }
670 }
671 if { [file exists $casefile] } {
672 # normal return
673 return
674 }
675 }
676
677 # coming here means specified test is not found; report error
678 error [join [list "Error: test case $group / $grid / $casename is not found in paths listed in variable" \
679 "CSF_TestScriptsPath (current value is \"$env(CSF_TestScriptsPath)\")"] "\n"]
680}
681
682# Internal procedure to run test case indicated by base directory,
683# grid and grid names, and test case file path.
684# The log can be obtained by command "dlog get".
5df3a117 685proc _run_test {scriptsdir group gridname casefile echo} {
40093367 686 global env
687
688 # start timer
689 uplevel dchrono _timer reset
690 uplevel dchrono _timer start
22db40eb 691 catch {uplevel meminfo w} membase
40093367 692
693 # enable commands logging; switch to old-style mode if dlog command is not present
694 set dlog_exists 1
695 if { [catch {dlog reset}] } {
696 set dlog_exists 0
5df3a117 697 } elseif { $echo } {
698 decho on
40093367 699 } else {
700 dlog reset
701 dlog on
702 rename puts puts-saved
703 proc puts args {
704 global _tests_verbose
705
706 # log only output to stdout and stderr, not to file!
707 if {[llength $args] > 1} {
d33dea30
PK
708 set optarg [lindex $args end-1]
709 if { $optarg == "stdout" || $optarg == "stderr" || $optarg == "-newline" } {
40093367 710 dlog add [lindex $args end]
711 }
712 } else {
713 dlog add [lindex $args end]
714 }
715
716 # reproduce original puts
717 if { $_tests_verbose } {
718 eval puts-saved $args
719 }
720 }
721 }
722
40093367 723 # evaluate test case
724 if [catch {
725 uplevel set casename [file tail $casefile]
8418c617 726 uplevel set groupname $group
727 uplevel set gridname $gridname
40093367 728
729 if { [file exists $scriptsdir/$group/begin] } {
730 puts "Executing $scriptsdir/$group/begin..."; flush stdout
731 uplevel source $scriptsdir/$group/begin
732 }
733 if { [file exists $scriptsdir/$group/$gridname/begin] } {
734 puts "Executing $scriptsdir/$group/$gridname/begin..."; flush stdout
735 uplevel source $scriptsdir/$group/$gridname/begin
736 }
737
738 puts "Executing $casefile..."; flush stdout
739 uplevel source $casefile
740
741 if { [file exists $scriptsdir/$group/$gridname/end] } {
742 puts "Executing $scriptsdir/$group/$gridname/end..."; flush stdout
743 uplevel source $scriptsdir/$group/$gridname/end
744 }
745 if { [file exists $scriptsdir/$group/end] } {
746 puts "Executing $scriptsdir/$group/end..."; flush stdout
747 uplevel source $scriptsdir/$group/end
748 }
749 } res] {
750 puts "Tcl Exception: $res"
751 }
752
40093367 753 # stop logging
754 if { $dlog_exists } {
5df3a117 755 if { $echo } {
756 decho off
757 } else {
758 rename puts {}
759 rename puts-saved puts
760 dlog off
761 }
40093367 762 }
763
8418c617 764 # stop cpulimit killer if armed by the test
765 cpulimit
766
22db40eb 767 # add memory and timing info
768 set stats ""
769 if { ! [catch {uplevel meminfo w} memuse] } {
770 set stats "MEMORY DELTA: [expr ($memuse - $membase) / 1024] KiB\n"
771 }
40093367 772 uplevel dchrono _timer stop
773 set time [uplevel dchrono _timer show]
774 if [regexp -nocase {CPU user time:[ \t]*([0-9.e-]+)} $time res cpu] {
22db40eb 775 set stats "${stats}TOTAL CPU TIME: $cpu sec\n"
776 }
777 if { $dlog_exists && ! $echo } {
778 dlog add $stats
779 } else {
780 puts $stats
40093367 781 }
782}
783
784# Internal procedure to check log of test execution and decide if it passed or failed
785proc _check_log {dir group gridname casename log {_summary {}} {_html_log {}}} {
786 global env
787 if { $_summary != "" } { upvar $_summary summary }
788 if { $_html_log != "" } { upvar $_html_log html_log }
789 set summary ""
790 set html_log ""
791
792if [catch {
793
794 # load definition of 'bad words' indicating test failure
795 # note that rules are loaded in the order of decreasing priority (grid - group - common),
796 # thus grid rules will override group ones
797 set badwords {}
798 foreach rulesfile [list $dir/$group/$gridname/parse.rules $dir/$group/parse.rules $dir/parse.rules] {
799 if [catch {set fd [open $rulesfile r]}] { continue }
800 while { [gets $fd line] >= 0 } {
801 # skip comments and empty lines
802 if { [regexp "\[ \t\]*\#.*" $line] } { continue }
803 if { [string trim $line] == "" } { continue }
804 # extract regexp
805 if { ! [regexp {^([^/]*)/([^/]*)/(.*)$} $line res status rexp comment] } {
806 puts "Warning: cannot recognize parsing rule \"$line\" in file $rulesfile"
807 continue
808 }
809 set status [string trim $status]
810 if { $comment != "" } { set status "$status ([string trim $comment])" }
811 set rexp [regsub -all {\\b} $rexp {\\y}] ;# convert regexp from Perl to Tcl style
812 lappend badwords [list $status $rexp]
813 }
814 close $fd
815 }
816 if { [llength $badwords] <= 0 } {
817 puts "Warning: no definition of error indicators found (check files parse.rules)"
818 }
819
820 # analyse log line-by-line
821 set todos {}
822 set status ""
823 foreach line [split $log "\n"] {
824 # check if line defines specific treatment of some messages
825 if [regexp -nocase {^[ \t]*TODO ([^:]*):(.*)$} $line res platforms pattern] {
826 if { ! [regexp -nocase {\mAll\M} $platforms] &&
827 ! [regexp -nocase "\\m$env(os_type)\\M" $platforms] } {
828 set html_log "$html_log\n$line"
829 continue ;# TODO statement is for another platform
830 }
831
832 # record TODOs that mark unstable cases
833 if { [regexp {[\?]} $platforms] } {
834 set todos_unstable([llength $todos]) 1
835 }
836
837 lappend todos [regsub -all {\\b} [string trim $pattern] {\\y}] ;# convert regexp from Perl to Tcl style
838 set html_log "$html_log\n[_html_highlight BAD $line]"
839 continue
840 }
841
842 # check for presence of messages indicating test result
843 set ismarked 0
844 foreach bw $badwords {
845 if { [regexp [lindex $bw 1] $line] } {
846 # check if this is known bad case
847 set is_known 0
848 for {set i 0} {$i < [llength $todos]} {incr i} {
849 if { [regexp [lindex $todos $i] $line] } {
850 set is_known 1
851 incr todo_count($i)
852 set html_log "$html_log\n[_html_highlight BAD $line]"
853 break
854 }
855 }
856
857 # if it is not in todo, define status
858 if { ! $is_known } {
859 set stat [lindex $bw 0 0]
860 set html_log "$html_log\n[_html_highlight $stat $line]"
861 if { $status == "" && $stat != "OK" && ! [regexp -nocase {^IGNOR} $stat] } {
862 set status [lindex $bw 0]
863 }
864 }
865 set ismarked 1
866 break
867 }
868 }
869 if { ! $ismarked } {
870 set html_log "$html_log\n$line"
871 }
872 }
873
874 # check for presence of TEST COMPLETED statement
875 if { $status == "" && ! [regexp {TEST COMPLETED} $log] } {
876 # check whether absence of TEST COMPLETED is known problem
877 set i [lsearch $todos "TEST INCOMPLETE"]
878 if { $i >= 0 } {
879 incr todo_count($i)
880 } else {
881 set status "FAILED (no final message is found)"
882 }
883 }
884
885 # check declared bad cases and diagnose possible improvement
886 # (bad case declared but not detected).
887 # Note that absence of the problem marked by TODO with question mark
888 # (unstable) is not reported as improvement.
889 if { $status == "" } {
890 for {set i 0} {$i < [llength $todos]} {incr i} {
891 if { ! [info exists todos_unstable($i)] &&
892 (! [info exists todo_count($i)] || $todo_count($i) <= 0) } {
893 set status "IMPROVEMENT (expected problem TODO no. [expr $i + 1] is not detected)"
894 break;
895 }
896 }
897 }
898
899 # report test as known bad if at least one of expected problems is found
900 if { $status == "" && [llength [array names todo_count]] > 0 } {
901 set status "BAD (known problem)"
902 }
903
904 # report normal OK
905 if { $status == "" } {set status "OK" }
906
907} res] {
908 set status "FAILED ($res)"
909}
910
911 # put final message
912 _log_and_puts summary "CASE $group $gridname $casename: $status"
913 set html_log "[_html_highlight [lindex $status 0] $summary]\n$html_log"
914}
915
916# Auxiliary procedure putting message to both cout and log variable (list)
917proc _log_and_puts {logvar message} {
918 if { $logvar != "" } {
919 upvar $logvar log
920 if [info exists log] {
921 set log "$log$message\n"
922 } else {
923 set log "$message\n"
924 }
925 }
926 puts $message
927}
928
929# Auxiliary procedure to log result on single test case
930proc _log_test_case {output logdir dir group grid casename logvar} {
931 upvar $logvar log
932
933 # check result and make HTML log
934 _check_log $dir $group $grid $casename $output summary html_log
935 set log "$log$summary"
936
937 # save log to file
938 if { $logdir != "" } {
939 _log_html $logdir/$group/$grid/$casename.html $html_log "Test $group $grid $casename"
940 _log_save $logdir/$group/$grid/$casename.log "$output\n$summary" "Test $group $grid $casename"
941 }
942}
943
944# Auxiliary procedure to save log to file
945proc _log_save {file log {title {}}} {
946 # create missing directories as needed
947 catch {file mkdir [file dirname $file]}
948
949 # try to open a file
950 if [catch {set fd [open $file w]} res] {
951 error "Error saving log file $file: $res"
952 }
953
954 # dump log and close
955 puts $fd "$title\n"
956 puts $fd $log
957 close $fd
958 return
959}
960
22db40eb 961# Auxiliary procedure to make a (relative if possible) URL to a file for
962# inclusion a reference in HTML log
963proc _make_url {htmldir file} {
964 set htmlpath [file split [file normalize $htmldir]]
965 set filepath [file split [file normalize $file]]
966 for {set i 0} {$i < [llength $htmlpath]} {incr i} {
967 if { "[lindex $htmlpath $i]" != "[lindex $filepath $i]" } {
968 if { $i == 0 } { break }
969 return "[string repeat "../" [expr [llength $htmlpath] - $i - 1]][eval file join [lrange $filepath $i end]]"
970 }
971 }
972
973 # if relative path could not be made, return full file URL
974 return "file://[file normalize $file]"
975}
976
40093367 977# Auxiliary procedure to save log to file
978proc _log_html {file log {title {}}} {
979 # create missing directories as needed
980 catch {file mkdir [file dirname $file]}
981
982 # try to open a file
983 if [catch {set fd [open $file w]} res] {
984 error "Error saving log file $file: $res"
985 }
986
987 # print header
22db40eb 988 puts $fd "<html><head><title>$title</title></head><body><h1>$title</h1>"
40093367 989
990 # add images if present
991 set imgbasename [file rootname [file tail $file]]
992 foreach img [lsort [glob -nocomplain -directory [file dirname $file] -tails ${imgbasename}*.gif ${imgbasename}*.png ${imgbasename}*.jpg]] {
993 puts $fd "<p><img src=\"$img\"/><p>"
994 }
995
b725d7c5 996 # print log body, trying to add HTML links to script files on lines like
997 # "Executing <filename>..."
40093367 998 puts $fd "<pre>"
b725d7c5 999 foreach line [split $log "\n"] {
1000 if { [regexp {Executing[ \t]+([a-zA-Z0-9._/:-]+[^.])} $line res script] &&
1001 [file exists $script] } {
22db40eb 1002 set line [regsub $script $line "<a href=\"[_make_url $file $script]\">$script</a>"]
b725d7c5 1003 }
1004 puts $fd $line
1005 }
40093367 1006 puts $fd "</pre></body></html>"
1007
1008 close $fd
1009 return
1010}
1011
1012# Auxiliary method to make text with HTML highlighting according to status
1013proc _html_color {status} {
1014 # choose a color for the cell according to result
1015 if { $status == "OK" } {
1016 return lightgreen
1017 } elseif { [regexp -nocase {^FAIL} $status] } {
1018 return red
1019 } elseif { [regexp -nocase {^BAD} $status] } {
1020 return yellow
1021 } elseif { [regexp -nocase {^IMP} $status] } {
1022 return orange
1023 } elseif { [regexp -nocase {^SKIP} $status] } {
1024 return gray
1025 } elseif { [regexp -nocase {^IGNOR} $status] } {
1026 return gray
1027 } else {
1028 puts "Warning: no color defined for status $status, using red as if FAILED"
1029 return red
1030 }
1031}
1032
1033# Format text line in HTML to be colored according to the status
1034proc _html_highlight {status line} {
1035 return "<table><tr><td bgcolor=\"[_html_color $status]\">$line</td></tr></table>"
1036}
1037
1038# Internal procedure to generate HTML page presenting log of the tests
1039# execution in tabular form, with links to reports on individual cases
1040proc _log_html_summary {logdir log totals regressions improvements total_time} {
1041 global _test_case_regexp
1042
1043 # create missing directories as needed
1044 catch {file mkdir $logdir}
1045
1046 # try to open a file and start HTML
1047 if [catch {set fd [open $logdir/summary.html w]} res] {
1048 error "Error creating log file: $res"
1049 }
1050
1051 # write HRML header, including command to refresh log if still in progress
1052 puts $fd "<html><head>"
1053 puts $fd "<title>Tests summary</title>"
1054 if { $total_time == "" } {
1055 puts $fd "<meta http-equiv=\"refresh\" content=\"10\">"
1056 }
1057 puts $fd "<meta http-equiv=\"pragma\" content=\"NO-CACHE\">"
1058 puts $fd "</head><body>"
1059
1060 # put summary
1061 set legend(OK) "Test passed OK"
1062 set legend(FAILED) "Test failed (regression)"
1063 set legend(BAD) "Known problem"
1064 set legend(IMPROVEMENT) "Possible improvement (expected problem not detected)"
1065 set legend(SKIPPED) "Test skipped due to lack of data file"
1066 puts $fd "<h1>Summary</h1><table>"
1067 foreach nbstat $totals {
1068 set status [lindex $nbstat 1]
1069 if { [info exists legend($status)] } {
1070 set comment $legend($status)
1071 } else {
1072 set comment "User-defined status"
1073 }
1074 puts $fd "<tr><td align=\"right\">[lindex $nbstat 0]</td><td bgcolor=\"[_html_color $status]\">$status</td><td>$comment</td></tr>"
1075 }
1076 puts $fd "</table>"
1077
1078 # time stamp and elapsed time info
1079 if { $total_time != "" } {
1080 puts $fd "<p>Generated on [clock format [clock seconds] -format {%Y-%m-%d %H:%M:%S}] on [info hostname] <p> $total_time"
1081 } else {
1082 puts $fd "<p>NOTE: This is intermediate summary; the tests are still running! This page will refresh automatically until tests are finished."
1083 }
1084
1085 # print regressions and improvements
1086 foreach featured [list $regressions $improvements] {
1087 if { [llength $featured] <= 1 } { continue }
1088 set status [string trim [lindex $featured 0] { :}]
1089 puts $fd "<h2>$status</h2>"
1090 puts $fd "<table>"
1091 set groupgrid ""
1092 foreach test [lrange $featured 1 end] {
1093 if { ! [regexp {^(.*)\s+([\w.]+)$} $test res gg name] } {
1094 set gg UNKNOWN
1095 set name "Error building short list; check details"
1096 }
1097 if { $gg != $groupgrid } {
1098 if { $groupgrid != "" } { puts $fd "</tr>" }
1099 set groupgrid $gg
1100 puts $fd "<tr><td>$gg</td>"
1101 }
1102 puts $fd "<td bgcolor=\"[_html_color $status]\"><a href=\"[regsub -all { } $gg /]/${name}.html\">$name</a></td>"
1103 }
1104 if { $groupgrid != "" } { puts $fd "</tr>" }
1105 puts $fd "</table>"
1106 }
1107
1108 # put detailed log
1109 puts $fd "<h1>Details</h1>"
1110
1111 # process log line-by-line
1112 set group {}
1113 set letter {}
1114 foreach line [lsort -dictionary [split $log "\n"]] {
1115 # check that the line is case report in the form "CASE group grid name: result (explanation)"
1116 if { ! [regexp $_test_case_regexp $line res grp grd casename result message] } {
1117 continue
1118 }
1119
1120 # start new group
1121 if { $grp != $group } {
1122 if { $letter != "" } { puts $fd "</tr></table>" }
1123 set letter {}
1124 set group $grp
1125 set grid {}
1126 puts $fd "<h2>Group $group</h2>"
1127 }
1128
1129 # start new grid
1130 if { $grd != $grid } {
1131 if { $letter != "" } { puts $fd "</tr></table>" }
1132 set letter {}
1133 set grid $grd
1134 puts $fd "<h3>Grid $grid</h3>"
1135 }
1136
1137 # check if test case name is <letter><digit>;
1138 # if not, set alnum to period "." to recognize non-standard test name
1139 if { ! [regexp {([A-Za-z]+)([0-9]+)} $casename res alnum number] } {
1140 set alnum .
1141 }
1142
1143 # start new row when letter changes or for non-standard names
1144 if { $alnum != $letter || $alnum == "." } {
1145 if { $letter != "" } {
1146 puts $fd "</tr><tr>"
1147 } else {
1148 puts $fd "<table><tr>"
1149 }
1150 set letter $alnum
1151 }
1152
1153 puts $fd "<td bgcolor=\"[_html_color $result]\"><a href=\"$group/$grid/${casename}.html\">$casename</a></td>"
1154 }
1155 puts $fd "</tr></table>"
1156
1157 # add remaining lines of log as plain text
1158 puts $fd "<h2>Plain text messages</h2>\n<pre>"
1159 foreach line [split $log "\n"] {
1160 if { ! [regexp $_test_case_regexp $line] } {
1161 puts $fd "$line"
1162 }
1163 }
1164 puts $fd "</pre>"
1165
1166 # close file and exit
1167 puts $fd "</body>"
1168 close $fd
1169 return
1170}
1171
1172# Procedure to dump summary logs of tests
1173proc _log_summarize {logdir log {total_time {}}} {
1174
1175 # sort log records alphabetically to have the same behavior on Linux and Windows
1176 # (also needed if tests are run in parallel)
1177 set loglist [lsort -dictionary [split $log "\n"]]
1178
1179 # classify test cases by status
1180 foreach line $loglist {
1181 if { [regexp {^CASE ([^:]*): ([[:alnum:]]+).*$} $line res caseid status] } {
1182 lappend stat($status) $caseid
1183 }
1184 }
1185 set totals {}
1186 set improvements {Improvements:}
1187 set regressions {Failed:}
1188 if { [info exists stat] } {
1189 foreach status [lsort [array names stat]] {
1190 lappend totals [list [llength $stat($status)] $status]
1191
1192 # separately count improvements (status starting with IMP) and regressions (all except IMP, OK, BAD, and SKIP)
1193 if { [regexp -nocase {^IMP} $status] } {
1194 eval lappend improvements $stat($status)
1195 } elseif { $status != "OK" && ! [regexp -nocase {^BAD} $status] && ! [regexp -nocase {^SKIP} $status] } {
1196 eval lappend regressions $stat($status)
1197 }
1198 }
1199 }
1200
1201 # if time is specified, add totals
1202 if { $total_time != "" } {
1203 if { [llength $improvements] > 1 } {
1204 _log_and_puts log [join $improvements "\n "]
1205 }
1206 if { [llength $regressions] > 1 } {
1207 _log_and_puts log [join $regressions "\n "]
1208 }
1209 if { [llength $improvements] == 1 && [llength $regressions] == 1 } {
1210 _log_and_puts log "No regressions"
1211 }
1212 _log_and_puts log "Total cases: [join $totals {, }]"
1213 _log_and_puts log $total_time
1214 }
1215
1216 # save log to files
1217 if { $logdir != "" } {
1218 _log_html_summary $logdir $log $totals $regressions $improvements $total_time
1219 _log_save $logdir/tests.log $log "Tests summary"
1220 }
1221
1222 return
1223}
1224
1225# Internal procedure to generate XML log in JUnit style, for further
1226# consumption by Jenkins or similar systems.
1227#
1228# The output is intended to conform to XML schema supported by Jenkins found at
1229# https://svn.jenkins-ci.org/trunk/hudson/dtkit/dtkit-format/dtkit-junit-model/src/main/resources/com/thalesgroup/dtkit/junit/model/xsd/junit-4.xsd
1230#
1231# The mapping of the fields is inspired by annotated schema of Apache Ant JUnit XML format found at
1232# http://windyroad.org/dl/Open%20Source/JUnit.xsd
1233proc _log_xml_summary {logdir filename log include_cout} {
1234 global _test_case_regexp
1235
1236 catch {file mkdir [file dirname $filename]}
1237
1238 # try to open a file and start XML
1239 if [catch {set fd [open $filename w]} res] {
1240 error "Error creating XML summary file $filename: $res"
1241 }
1242 puts $fd "<?xml version='1.0' encoding='utf-8'?>"
1243 puts $fd "<testsuites>"
1244
1245 # prototype for command to generate test suite tag
1246 set time_and_host "timestamp=\"[clock format [clock seconds] -format {%Y-%m-%dT%H:%M:%S}]\" hostname=\"[info hostname]\""
1247 set cmd_testsuite {puts $fd "<testsuite name=\"$group $grid\" tests=\"$nbtests\" failures=\"$nbfail\" errors=\"$nberr\" time=\"$time\" skipped=\"$nbskip\" $time_and_host>\n$testcases\n</testsuite>\n"}
1248
1249 # sort log and process it line-by-line
1250 set group {}
1251 foreach line [lsort -dictionary [split $log "\n"]] {
1252 # check that the line is case report in the form "CASE group grid name: result (explanation)"
1253 if { ! [regexp $_test_case_regexp $line res grp grd casename result message] } {
1254 continue
1255 }
1256 set message [string trim $message " \t\r\n()"]
1257
1258 # start new testsuite for each grid
1259 if { $grp != $group || $grd != $grid } {
1260
1261 # write previous test suite
1262 if [info exists testcases] { eval $cmd_testsuite }
1263
1264 set testcases {}
1265 set nbtests 0
1266 set nberr 0
1267 set nbfail 0
1268 set nbskip 0
1269 set time 0.
1270
1271 set group $grp
1272 set grid $grd
1273 }
1274
1275 incr nbtests
1276
1277 # parse test log and get its CPU time
1278 set testout {}
1279 set add_cpu {}
1280 if { [catch {set fdlog [open $logdir/$group/$grid/${casename}.log r]} ret] } {
1281 puts "Error: cannot open $logdir/$group/$grid/${casename}.log: $ret"
1282 } else {
1283 while { [gets $fdlog logline] >= 0 } {
1284 if { $include_cout } {
1285 set testout "$testout$logline\n"
1286 }
1287 if [regexp -nocase {TOTAL CPU TIME:\s*([\d.]+)\s*sec} $logline res cpu] {
1288 set add_cpu " time=\"$cpu\""
1289 set time [expr $time + $cpu]
1290 }
1291 }
1292 close $fdlog
1293 }
1294 if { ! $include_cout } {
1295 set testout "$line\n"
1296 }
1297
1298 # record test case with its output and status
1299 # Mapping is: SKIPPED, BAD, and OK to OK, all other to failure
1300 set testcases "$testcases\n <testcase name=\"$casename\"$add_cpu status=\"$result\">\n"
1301 set testcases "$testcases\n <system-out>\n$testout </system-out>"
1302 if { $result != "OK" } {
1303 if { [regexp -nocase {^SKIP} $result] } {
1304 incr nberr
1305 set testcases "$testcases\n <error name=\"$result\" message=\"$message\"/>"
1306 } elseif { [regexp -nocase {^BAD} $result] } {
1307 incr nbskip
1308 set testcases "$testcases\n <skipped>$message</skipped>"
1309 } else {
1310 incr nbfail
1311 set testcases "$testcases\n <failure name=\"$result\" message=\"$message\"/>"
1312 }
1313 }
1314 set testcases "$testcases\n </testcase>"
1315 }
1316
1317 # write last test suite
1318 if [info exists testcases] { eval $cmd_testsuite }
1319
1320 # the end
1321 puts $fd "</testsuites>"
1322 close $fd
1323 return
1324}
1325
1326# define custom platform name
1327proc _tests_platform_def {} {
1328 global env tcl_platform
1329
1330 if [info exists env(os_type)] { return }
1331
1332 set env(os_type) $tcl_platform(platform)
1333
1334 # use detailed mapping for various versions of Lunix
1335 # (note that mapping is rather non-uniform, for historical reasons)
1336 if { $env(os_type) == "unix" && ! [catch {exec cat /etc/issue} issue] } {
1337 if { [regexp {Mandriva[ \tA-Za-z]+([0-9]+)} $issue res num] } {
1338 set env(os_type) Mandriva$num
1339 } elseif { [regexp {Red Hat[ \tA-Za-z]+([0-9]+)} $issue res num] } {
1340 set env(os_type) RedHat$num
1341 } elseif { [regexp {Debian[ \tA-Za-z/]+([0-9]+)[.]([0-9]+)} $issue res num subnum] } {
1342 set env(os_type) Debian$num$subnum
1343 } elseif { [regexp {CentOS[ \tA-Za-z]+([0-9]+)[.]([0-9]+)} $issue res num subnum] } {
1344 set env(os_type) CentOS$num$subnum
1345 } elseif { [regexp {Scientific[ \tA-Za-z]+([0-9]+)[.]([0-9]+)} $issue res num subnum] } {
1346 set env(os_type) SL$num$subnum
1347 } elseif { [regexp {Fedora Core[ \tA-Za-z]+([0-9]+)} $issue res num] } {
1348 set env(os_type) FedoraCore$num
1349 }
1350 if { [exec uname -m] == "x86_64" } {
1351 set env(os_type) "$env(os_type)-64"
1352 }
1353 }
1354}
1355_tests_platform_def
1356
1357# Auxiliary procedure to split path specification (usually defined by
1358# environment variable) into list of directories or files
1359proc _split_path {pathspec} {
1360 global tcl_platform
1361
1362 # first replace all \ (which might occur on Windows) by /
1363 regsub -all "\\\\" $pathspec "/" pathspec
1364
1365 # split path by platform-specific separator
1366 return [split $pathspec [_path_separator]]
1367}
1368
1369# Auxiliary procedure to define platform-specific separator for directories in
1370# path specification
1371proc _path_separator {} {
1372 global tcl_platform
1373
1374 # split path by platform-specific separator
1375 if { $tcl_platform(platform) == "windows" } {
1376 return ";"
1377 } else {
1378 return ":"
1379 }
1380}
1381
cc6a292d 1382# Procedure to make a diff and common of two lists
1383proc _list_diff {list1 list2 _in1 _in2 _common} {
1384 upvar $_in1 in1
1385 upvar $_in2 in2
1386 upvar $_common common
1387
1388 set in1 {}
1389 set in2 {}
1390 set common {}
1391 foreach item $list1 {
1392 if { [lsearch -exact $list2 $item] >= 0 } {
1393 lappend common $item
1394 } else {
1395 lappend in1 $item
1396 }
1397 }
1398 foreach item $list2 {
1399 if { [lsearch -exact $common $item] < 0 } {
1400 lappend in2 $item
1401 }
1402 }
1403 return
1404}
1405
1406# procedure to load a file to Tcl string
1407proc _read_file {filename} {
1408 set fd [open $filename r]
1409 set result [read -nonewline $fd]
1410 close $fd
1411 return $result
1412}
1413
22db40eb 1414# procedure to construct name for the mage diff file
1415proc _diff_img_name {dir1 dir2 casepath imgfile} {
1416 return [file join $dir1 $casepath "diff-[file tail $dir2]-$imgfile"]
1417}
1418
cc6a292d 1419# Procedure to compare results of two runs of test cases
1420proc _test_diff {dir1 dir2 basename status verbose _logvar {_statvar ""}} {
1421 upvar $_logvar log
1422
22db40eb 1423 # make sure to load diffimage command
1424 uplevel pload VISUALIZATION
1425
cc6a292d 1426 # prepare variable (array) for collecting statistics
1427 if { "$_statvar" != "" } {
1428 upvar $_statvar stat
1429 } else {
1430 set stat(cpu1) 0
1431 set stat(cpu2) 0
22db40eb 1432 set stat(mem1) 0
1433 set stat(mem2) 0
cc6a292d 1434 set log {}
1435 }
1436
1437 # first check subdirectories
1438 set path1 [file join $dir1 $basename]
1439 set path2 [file join $dir2 $basename]
1440 set list1 [glob -directory $path1 -types d -tails -nocomplain *]
1441 set list2 [glob -directory $path2 -types d -tails -nocomplain *]
1442 if { [llength $list1] >0 || [llength $list2] > 0 } {
1443 _list_diff $list1 $list2 in1 in2 common
1444 if { "$verbose" > 1 } {
1445 if { [llength $in1] > 0 } { _log_and_puts log "Only in $path1: $in1" }
1446 if { [llength $in2] > 0 } { _log_and_puts log "Only in $path2: $in2" }
1447 }
1448 foreach subdir $common {
1449 if { "$verbose" > 2 } {
1450 _log_and_puts log "Checking [file join $basename $subdir]"
1451 }
1452 _test_diff $dir1 $dir2 [file join $basename $subdir] $status $verbose log stat
1453 }
1454 } else {
1455 # check log files (only if directory has no subdirs)
1456 set list1 [glob -directory $path1 -types f -tails -nocomplain *.log]
1457 set list2 [glob -directory $path2 -types f -tails -nocomplain *.log]
1458 _list_diff $list1 $list2 in1 in2 common
1459 if { "$verbose" > 1 } {
1460 if { [llength $in1] > 0 } { _log_and_puts log "Only in $path1: $in1" }
1461 if { [llength $in2] > 0 } { _log_and_puts log "Only in $path2: $in2" }
1462 }
1463 foreach logfile $common {
1464 # load two logs
1465 set log1 [_read_file [file join $dir1 $basename $logfile]]
1466 set log2 [_read_file [file join $dir2 $basename $logfile]]
22db40eb 1467 set casename [file rootname $logfile]
cc6a292d 1468
1469 # check execution statuses
1470 set status1 UNDEFINED
1471 set status2 UNDEFINED
1472 if { ! [regexp {CASE [^:]*:\s*([\w]+)} $log1 res1 status1] ||
1473 ! [regexp {CASE [^:]*:\s*([\w]+)} $log2 res2 status2] ||
1474 "$status1" != "$status2" } {
22db40eb 1475 _log_and_puts log "STATUS [split $basename /] $casename: $status1 / $status2"
cc6a292d 1476
1477 # if test statuses are different, further comparison makes
1478 # no sense unless explicitly requested
1479 if { "$status" != "all" } {
1480 continue
1481 }
1482 }
1483 if { "$status" == "ok" && "$status1" != "OK" } {
1484 continue
1485 }
1486
1487 # check CPU times
1488 set cpu1 UNDEFINED
1489 set cpu2 UNDEFINED
1490 if { [regexp {TOTAL CPU TIME:\s*([\d.]+)} $log1 res1 cpu1] &&
1491 [regexp {TOTAL CPU TIME:\s*([\d.]+)} $log2 res1 cpu2] } {
1492 set stat(cpu1) [expr $stat(cpu1) + $cpu1]
1493 set stat(cpu2) [expr $stat(cpu2) + $cpu2]
1494
1495 # compare CPU times with 10% precision (but not less 0.5 sec)
1496 if { [expr abs ($cpu1 - $cpu2) > 0.5 + 0.05 * abs ($cpu1 + $cpu2)] } {
22db40eb 1497 _log_and_puts log "CPU [split $basename /] $casename: $cpu1 / $cpu2"
cc6a292d 1498 }
1499 }
22db40eb 1500
1501 # check memory delta
1502 set mem1 UNDEFINED
1503 set mem2 UNDEFINED
1504 if { [regexp {MEMORY DELTA:\s*([\d.]+)} $log1 res1 mem1] &&
1505 [regexp {MEMORY DELTA:\s*([\d.]+)} $log2 res1 mem2] } {
1506 set stat(mem1) [expr $stat(mem1) + $mem1]
1507 set stat(mem2) [expr $stat(mem2) + $mem2]
1508
1509 # compare memory usage with 10% precision (but not less 16 KiB)
1510 if { [expr abs ($mem1 - $mem2) > 16 + 0.05 * abs ($mem1 + $mem2)] } {
1511 _log_and_puts log "MEMORY [split $basename /] $casename: $mem1 / $mem2"
1512 }
1513 }
1514
1515 # check images
1516 set imglist1 [glob -directory $path1 -types f -tails -nocomplain $casename*.{png,gif}]
1517 set imglist2 [glob -directory $path2 -types f -tails -nocomplain $casename*.{png,gif}]
1518 _list_diff $imglist1 $imglist2 imgin1 imgin2 imgcommon
1519 if { "$verbose" > 1 } {
1520 if { [llength $imgin1] > 0 } { _log_and_puts log "Only in $path1: $imgin1" }
1521 if { [llength $imgin2] > 0 } { _log_and_puts log "Only in $path2: $imgin2" }
1522 }
1523 foreach imgfile $imgcommon {
1524# if { $verbose > 1 } { _log_and_puts log "Checking [split basename /] $casename: $imgfile" }
1525 set diffile [_diff_img_name $dir1 $dir2 $basename $imgfile]
1526 if { [catch {diffimage [file join $dir1 $basename $imgfile] \
1527 [file join $dir2 $basename $imgfile] \
1528 0 0 0 $diffile} diff] } {
1529 _log_and_puts log "IMAGE [split $basename /] $casename: $imgfile cannot be compared"
1530 file delete -force $diffile ;# clean possible previous result of diffimage
1531 } elseif { $diff != 0 } {
1532 _log_and_puts log "IMAGE [split $basename /] $casename: $imgfile differs"
1533 } else {
1534 file delete -force $diffile ;# clean useless artifact of diffimage
1535 }
1536 }
cc6a292d 1537 }
1538 }
1539
1540 if { "$_statvar" == "" } {
22db40eb 1541 _log_and_puts log "Total MEMORY difference: $stat(mem1) / $stat(mem2)"
cc6a292d 1542 _log_and_puts log "Total CPU difference: $stat(cpu1) / $stat(cpu2)"
1543 }
1544}
b725d7c5 1545
22db40eb 1546# Auxiliary procedure to save log of results comparison to file
1547proc _log_html_diff {file log dir1 dir2} {
1548 # create missing directories as needed
1549 catch {file mkdir [file dirname $file]}
1550
1551 # try to open a file
1552 if [catch {set fd [open $file w]} res] {
1553 error "Error saving log file $file: $res"
1554 }
1555
1556 # print header
1557 puts $fd "<html><head><title>Diff $dir1 vs. $dir2</title></head><body>"
1558 puts $fd "<h1>Comparison of test results: $dir1 vs. $dir2</h1>"
1559
1560 # print log body, trying to add HTML links to script files on lines like
1561 # "Executing <filename>..."
1562 puts $fd "<pre>"
1563 set logpath [file split [file normalize $file]]
1564 foreach line [split $log "\n"] {
1565 puts $fd $line
1566
1567 if { [regexp {IMAGE[ \t]+([^:]+):[ \t]+([A-Za-z0-9_.-]+)} $line res case img] } {
1568 if { [catch {eval file join "" [lrange $case 0 end-1]} gridpath] } {
1569 # note: special handler for the case if test grid directoried are compared directly
1570 set gridpath ""
1571 }
1572 set img1 "<img src=\"[_make_url $file [file join $dir1 $gridpath $img]]\">"
1573 set img2 "<img src=\"[_make_url $file [file join $dir2 $gridpath $img]]\">"
1574
1575 set difffile [_diff_img_name $dir1 $dir2 $gridpath $img]
1576 if { [file exists $difffile] } {
1577 set imgd "<img src=\"[_make_url $file $difffile]\">"
1578 } else {
1579 set imgd "N/A"
1580 }
1581
1582 puts $fd "<table><tr><th>[file tail $dir1]</th><th>[file tail $dir2]</th><th>Different pixels</th></tr>"
1583 puts $fd "<tr><td>$img1</td><td>$img2</td><td>$imgd</td></tr></table>"
1584 }
1585 }
1586 puts $fd "</pre></body></html>"
1587
1588 close $fd
1589 return
1590}
1591
b725d7c5 1592# get number of CPUs on the system
1593proc _get_nb_cpus {} {
1594 global tcl_platform env
1595
1596 if { "$tcl_platform(platform)" == "windows" } {
1597 # on Windows, take the value of the environment variable
1598 if { [info exists env(NUMBER_OF_PROCESSORS)] &&
1599 ! [catch {expr $env(NUMBER_OF_PROCESSORS) > 0} res] && $res >= 0 } {
1600 return $env(NUMBER_OF_PROCESSORS)
1601 }
1602 } elseif { "$tcl_platform(os)" == "Linux" } {
1603 # on Linux, take number of logical processors listed in /proc/cpuinfo
1604 if { [catch {open "/proc/cpuinfo" r} fd] } {
1605 return 0 ;# should never happen, but...
1606 }
1607 set nb 0
1608 while { [gets $fd line] >= 0 } {
1609 if { [regexp {^processor[ \t]*:} $line] } {
1610 incr nb
1611 }
1612 }
1613 close $fd
1614 return $nb
1615 } elseif { "$tcl_platform(os)" == "Darwin" } {
1616 # on MacOS X, call sysctl command
1617 if { ! [catch {exec sysctl hw.ncpu} ret] &&
1618 [regexp {^hw[.]ncpu[ \t]*:[ \t]*([0-9]+)} $ret res nb] } {
1619 return $nb
1620 }
1621 }
1622
1623 # if cannot get good value, return 0 as default
1624 return 0
1625}