1 # Copyright (c) 2012 OPEN CASCADE SAS
3 # The content of this file is subject to the Open CASCADE Technology Public
4 # License Version 6.5 (the "License"). You may not use the content of this file
5 # except in compliance with the License. Please obtain a copy of the License
6 # at http://www.opencascade.org and read it completely before using this file.
8 # The Initial Developer of the Original Code is Open CASCADE S.A.S., having its
9 # main offices at: 1, place des Freres Montgolfier, 78280 Guyancourt, France.
11 # The Original Code and all software distributed under the License is
12 # distributed on an "AS IS" basis, without warranty of any kind, and the
13 # Initial Developer hereby disclaims all such warranties, including without
14 # limitation, any warranties of merchantability, fitness for a particular
15 # purpose or non-infringement. Please see the License for the specific terms
16 # and conditions governing the rights and limitations under the License.
18 ############################################################################
19 # This file defines scripts for execution of OCCT tests.
20 # It should be loaded automatically when DRAW is started, and provides
21 # top-level commands starting with 'test'. Type 'help test' to get their
23 # See OCCT Tests User Guide for description of the test system.
25 # Note: procedures with names starting with underscore are for internal use
26 # inside the test system.
27 ############################################################################
29 # Default verbose level for command _run_test
32 # regexp for parsing test case results in summary log
33 set _test_case_regexp {^CASE\s+([\w.-]+)\s+([\w.-]+)\s+([\w.-]+)\s*:\s*([\w]+)(.*)}
35 # Basic command to run indicated test case in DRAW
37 Run specified test case
38 Use: test group grid casename [echo=0]
39 - If echo is set to 0 (default), log is stored in memory and only summary
40 is output (the log can be obtained with command "dlog get")
41 - If echo is set to 1 or "-echo", all commands and results are echoed
42 immediately, but log is not saved and summary is not produced
44 proc test {group grid casename {echo 0}} {
45 # get test case paths (will raise error if input is invalid)
46 _get_test $group $grid $casename dir gridname casefile
48 # if echo specified as "-echo", convert it to bool
49 if { "$echo" == "-echo" } { set echo t }
52 uplevel _run_test $dir $group $gridname $casefile $echo
56 _check_log $dir $group $gridname $casename [dlog get]
62 # Basic command to run indicated test case in DRAW
64 Run all tests, or specified group, or one grid
65 Use: testgrid [group [grid]] [options...]
67 -parallel N: run N parallel processes (default is number of CPUs, 0 to disable)
68 -refresh N: save summary logs every N seconds (default 600, minimal 1, 0 to disable)
69 -outdir dirname: set log directory (should be empty or non-existing)
70 -overwrite: force writing logs in existing non-empty directory
71 -xml filename: write XML report for Jenkins (in JUnit-like format)
73 proc testgrid {args} {
74 global env tcl_platform _tests_verbose
76 ######################################################
78 ######################################################
80 # check that environment variable defining paths to test scripts is defined
81 if { ! [info exists env(CSF_TestScriptsPath)] ||
82 [llength $env(CSF_TestScriptsPath)] <= 0 } {
83 error "Error: Environment variable CSF_TestScriptsPath is not defined"
87 set parallel [_get_nb_cpus]
92 for {set narg 0} {$narg < [llength $args]} {incr narg} {
93 set arg [lindex $args $narg]
96 if { $arg == "-parallel" } {
98 if { $narg < [llength $args] && ! [regexp {^-} [lindex $args $narg]] } {
99 set parallel [expr [lindex $args $narg]]
101 error "Option -parallel requires argument"
107 if { $arg == "-refresh" } {
109 if { $narg < [llength $args] && ! [regexp {^-} [lindex $args $narg]] } {
110 set refresh [expr [lindex $args $narg]]
112 error "Option -refresh requires argument"
118 if { $arg == "-outdir" } {
120 if { $narg < [llength $args] && ! [regexp {^-} [lindex $args $narg]] } {
121 set logdir [lindex $args $narg]
123 error "Option -outdir requires argument"
128 # allow overwrite logs
129 if { $arg == "-overwrite" } {
135 if { $arg == "-xml" } {
137 if { $narg < [llength $args] && ! [regexp {^-} [lindex $args $narg]] } {
138 set xmlfile [lindex $args $narg]
140 if { $xmlfile == "" } {
141 set xmlfile TESTS-summary.xml
147 if { [regexp {^-} $arg] } {
148 error "Error: unsupported option \"$arg\""
151 # treat arguments not recognized as options as group and grid names
152 if { ! [info exists groupname] } {
154 } elseif { ! [info exists gridname] } {
157 error "Error: cannot interpret argument $narg ($arg): both group and grid names are already defined by previous args!"
161 # check that target log directory is empty or does not exist
162 set logdir [file normalize [string trim $logdir]]
163 if { $logdir == "" } {
164 # if specified logdir is empty string, generate unique name like
165 # results_<branch>_<timestamp>
167 if { ! [catch {exec git branch} gitout] &&
168 [regexp {[*] ([\w]+)} $gitout res branch] } {
169 set prefix "${prefix}_$branch"
171 set logdir "${prefix}_[clock format [clock seconds] -format {%Y-%m-%dT%H%M}]"
172 set logdir [file normalize $logdir]
174 if { [file isdirectory $logdir] && ! $overwrite && ! [catch {glob -directory $logdir *}] } {
175 error "Error: Specified log directory \"$logdir\" is not empty; please clean it before running tests"
177 if { [catch {file mkdir $logdir}] || ! [file writable $logdir] } {
178 error "Error: Cannot create directory \"$logdir\", or it is not writable"
181 ######################################################
182 # prepare list of tests to be performed
183 ######################################################
185 # list of tests, each defined by a list of:
186 # test scripts directory
187 # group (subfolder) name
188 # grid (subfolder) name
190 # path to test case file
193 # iterate by all script paths
194 foreach dir [_split_path $env(CSF_TestScriptsPath)] {
195 # protection against empty paths
196 set dir [string trim $dir]
197 if { $dir == "" } { continue }
199 if { $_tests_verbose > 0 } { _log_and_puts log "Examining tests directory $dir" }
201 # check that directory exists
202 if { ! [file isdirectory $dir] } {
203 _log_and_puts log "Warning: directory $dir listed in CSF_TestScriptsPath does not exist, skipped"
207 # if test group is specified, check that directory with given name exists in this dir
208 # if not, continue to the next test dir
209 if { [info exists groupname] && $groupname != "" } {
210 if { [file isdirectory $dir/$groupname] } {
211 set groups $groupname
216 # else search all directories in the current dir
217 if [catch {glob -directory $dir -tail -types d *} groups] { continue }
221 if { $_tests_verbose > 0 } { _log_and_puts log "Groups to be executed: $groups" }
222 foreach group [lsort -dictionary $groups] {
223 if { $_tests_verbose > 0 } { _log_and_puts log "Examining group directory $group" }
225 # file grids.list must exist: it defines sequence of grids in the group
226 if { ! [file exists $dir/$group/grids.list] } {
227 _log_and_puts log "Warning: directory $dir/$group does not contain file grids.list, skipped"
231 # read grids.list file and make a list of grids to be executed
233 set fd [open $dir/$group/grids.list]
235 while { [gets $fd line] >= 0 } {
238 # skip comments and empty lines
239 if { [regexp "\[ \t\]*\#.*" $line] } { continue }
240 if { [string trim $line] == "" } { continue }
242 # get grid id and name
243 if { ! [regexp "^\(\[0-9\]+\)\[ \t\]*\(\[A-Za-z0-9_.-\]+\)\$" $line res gridid grid] } {
244 _log_and_puts log "Warning: cannot recognize line $nline in file $dir/$group/grids.list as \"gridid gridname\"; ignored"
248 # if specific grid is requested, check that it is present; otherwise make complete list
249 if { ! [info exists gridname] || $gridname == "" || $gridname == $gridid || $gridname == $grid } {
250 lappend gridlist $grid
255 # iterate by all grids
256 foreach grid $gridlist {
258 # check if this grid is aliased to another one
259 set griddir $dir/$group/$grid
260 if { [file exists $griddir/cases.list] } {
261 set fd [open $griddir/cases.list]
262 if { [gets $fd line] >= 0 } {
263 set griddir [file normalize $dir/$group/$grid/[string trim $line]]
268 # check if grid directory actually exists
269 if { ! [file isdirectory $griddir] } {
270 _log_and_puts log "Error: tests directory for grid $grid ($griddir) is missing; skipped"
274 # create directory for logging test results
275 if { $logdir != "" } { file mkdir $logdir/$group/$grid }
277 # iterate by all tests in the grid directory
278 if { [catch {glob -directory $griddir -type f *} testfiles] } { continue }
279 foreach casefile [lsort -dictionary $testfiles] {
280 # filter out begin and end files
281 set casename [file tail $casefile]
282 if { $casename == "begin" || $casename == "end" } { continue }
284 lappend tests_list [list $dir $group $grid $casename $casefile]
289 if { [llength $tests_list] < 1 } {
290 error "Error: no tests are found, check you input arguments and variable CSF_TestScriptsPath!"
293 ######################################################
295 ######################################################
297 # log command arguments and environment
298 lappend log "Command: testgrid $args"
299 lappend log "Host: [info hostname]"
300 lappend log "Started on: [clock format [clock seconds] -format {%Y-%m-%d %H:%M:%S}]"
301 catch {lappend log "DRAW build:\n[dversion]" }
302 lappend log "Environment:"
303 foreach envar [lsort [array names env]] {
304 lappend log "$envar=\"$env($envar)\""
308 set refresh_timer [clock seconds]
309 uplevel dchrono _timer reset
310 uplevel dchrono _timer start
312 # if parallel execution is requested, allocate thread pool
313 if { $parallel > 0 } {
314 if { ! [info exists tcl_platform(threaded)] || [catch {package require Thread}] } {
315 _log_and_puts log "Warning: Tcl package Thread is not available, running in sequential mode"
318 set worker [tpool::create -minworkers $parallel -maxworkers $parallel]
319 # suspend the pool until all jobs are posted, to prevent blocking of the process
320 # of starting / processing jobs by running threads
321 catch {tpool::suspend $worker}
322 if { $_tests_verbose > 0 } { _log_and_puts log "Executing tests in (up to) $parallel threads" }
323 # limit number of jobs in the queue by reasonable value
324 # to prevent slowdown due to unnecessary queue processing
326 set nbpooled_max [expr 10 * $parallel]
327 set nbpooled_ok [expr 5 * $parallel]
333 foreach test_def $tests_list {
334 # check for user break
335 if { $userbreak || "[info commands dbreak]" == "dbreak" && [catch dbreak] } {
340 set dir [lindex $test_def 0]
341 set group [lindex $test_def 1]
342 set grid [lindex $test_def 2]
343 set casename [lindex $test_def 3]
344 set casefile [lindex $test_def 4]
346 # command to set tests for generation of image in results directory
348 if { $logdir != "" } { set imgdir_cmd "set imagedir $logdir/$group/$grid" }
350 # prepare command file for running test case in separate instance of DRAW
351 set fd_cmd [open $logdir/$group/$grid/${casename}.tcl w]
352 puts $fd_cmd "$imgdir_cmd"
353 puts $fd_cmd "set test_image $casename"
354 puts $fd_cmd "_run_test $dir $group $grid $casefile t"
356 # use dlog command to obtain complete output of the test when it is absent (i.e. since OCCT 6.6.0)
357 # note: this is not needed if echo is set to 1 in call to _run_test above
358 if { ! [catch {dlog get}] } {
359 puts $fd_cmd "puts \[dlog get\]"
361 # else try to use old-style QA_ variables to get more output...
364 set env(QA_print_command) 1
367 # final 'exit' is needed when running on Linux under VirtualGl
371 # commant to run DRAW with a command file;
372 # note that empty string is passed as standard input to avoid possible
373 # hang-ups due to waiting for stdin of the launching process
374 set command "exec <<{} DRAWEXE -f $logdir/$group/$grid/${casename}.tcl"
376 # alternative method to run without temporary file; disabled as it needs too many backslashes
378 # set command "exec <<\"\" DRAWEXE -c $imgdir_cmd\\\; set test_image $casename\\\; \
379 # _run_test $dir $group $grid $casefile\\\; \
380 # puts \\\[dlog get\\\]\\\; exit"
383 # run test case, either in parallel or sequentially
384 if { $parallel > 0 } {
386 set job [tpool::post -nowait $worker "catch \"$command\" output; return \$output"]
387 set job_def($job) [list $logdir $dir $group $grid $casename]
389 if { $nbpooled > $nbpooled_max } {
390 _testgrid_process_jobs $worker $nbpooled_ok
393 # sequential execution
394 catch {eval $command} output
395 _log_test_case $output $logdir $dir $group $grid $casename log
397 # update summary log with requested period
398 if { $logdir != "" && $refresh > 0 && [expr [clock seconds] - $refresh_timer > $refresh] } {
399 # update and dump summary
400 _log_summarize $logdir $log
401 set refresh_timer [clock seconds]
406 # get results of started threads
407 if { $parallel > 0 } {
408 _testgrid_process_jobs $worker
409 # release thread pool
410 if { $nbpooled > 0 } {
411 tpool::cancel $worker [array names job_def]
413 catch {tpool::resume $worker}
414 tpool::release $worker
417 uplevel dchrono _timer stop
418 set time [lindex [split [uplevel dchrono _timer show] "\n"] 0]
421 _log_and_puts log "*********** Stopped by user break ***********"
422 set time "${time} \nNote: the process is not finished, stopped by user break!"
425 ######################################################
426 # output summary logs and exit
427 ######################################################
429 _log_summarize $logdir $log $time
430 if { $logdir != "" } {
431 puts "Detailed logs are saved in $logdir"
433 if { $logdir != "" && $xmlfile != "" } {
434 # XML output file is assumed relative to log dir unless it is absolute
435 if { [ file pathtype $xmlfile] == "relative" } {
436 set xmlfile [file normalize $logdir/$xmlfile]
438 _log_xml_summary $logdir $xmlfile $log 0
439 puts "XML summary is saved to $xmlfile"
445 # Procedure to regenerate summary log from logs of test cases
447 Regenerate summary log in the test directory from logs of test cases.
448 This can be necessary if test grids are executed separately (e.g. on
449 different stations) or some grids have been re-executed.
450 Use: testsummarize dir
452 proc testsummarize {dir} {
453 global _test_case_regexp
455 if { ! [file isdirectory $dir] } {
456 error "Error: \"$dir\" is not a directory"
459 # get summary statements from all test cases in one log
462 # to avoid huge listing of logs, first find all subdirectories and iterate
463 # by them, parsing log files in each subdirectory independently
464 foreach grid [glob -directory $dir -types d -tails */*] {
465 foreach caselog [glob -nocomplain -directory [file join $dir $grid] -types f -tails *.log] {
466 set file [file join $dir $grid $caselog]
468 set fd [open $file r]
469 while { [gets $fd line] >= 0 } {
470 if { [regexp $_test_case_regexp $line res grp grd cas status message] } {
471 if { "[file join $grid $caselog]" != "[file join $grp $grd ${cas}.log]" } {
472 puts "Error: $file contains status line for another test case ($line)"
480 if { $nbfound != 1 } {
481 puts "Error: $file contains $nbfound status lines, expected 1"
486 _log_summarize $dir $log "Summary regenerated from logs at [clock format [clock seconds]]"
490 # Procedure to compare results of two runs of test cases
492 Compare results of two executions of tests (CPU times, ...)
493 Use: testdiff dir1 dir2 [groupname [gridname]] [options...]
494 Where dir1 and dir2 are directories containing logs of two test runs.
496 -save filename: save resulting log in specified file (default name is
497 <dir1>/diff-<dir2>.log); HTML log is saved with same name
499 -status {same|ok|all}: filter cases for comparing by their status:
500 same - only cases with same status are compared (default)
501 ok - only cases with OK status in both logs are compared
502 all - results are compared regardless of status
504 1 - output only differences
505 2 - output also list of logs and directories present in one of dirs only
506 3 - (default) output also progress messages
508 proc testdiff {dir1 dir2 args} {
509 if { "$dir1" == "$dir2" } {
510 error "Input directories are the same"
513 ######################################################
515 ######################################################
518 set logfile [file join $dir1 "diff-[file tail $dir2].log"]
522 for {set narg 0} {$narg < [llength $args]} {incr narg} {
523 set arg [lindex $args $narg]
526 if { $arg == "-save" } {
528 if { $narg < [llength $args] && ! [regexp {^-} [lindex $args $narg]] } {
529 set logfile [lindex $args $narg]
531 error "Error: Option -save must be followed by log file name"
537 if { $arg == "-status" } {
539 if { $narg < [llength $args] && ! [regexp {^-} [lindex $args $narg]] } {
540 set status [lindex $args $narg]
541 } else { set status "" }
542 if { "$status" != "same" && "$status" != "all" && "$status" != "ok" } {
543 error "Error: Option -status must be followed by one of \"same\", \"all\", or \"ok\""
549 if { $arg == "-verbose" } {
551 if { $narg < [llength $args] && ! [regexp {^-} [lindex $args $narg]] } {
552 set verbose [expr [lindex $args $narg]]
554 error "Error: Option -verbose must be followed by integer verbose level"
559 if { [regexp {^-} $arg] } {
560 error "Error: unsupported option \"$arg\""
563 # non-option arguments form a subdirectory path
564 set basename [file join $basename $arg]
567 # run diff procedure (recursive)
568 _test_diff $dir1 $dir2 $basename $status $verbose log
570 # save result to log file
571 if { "$logfile" != "" } {
572 _log_save $logfile [join $log "\n"]
573 _log_html_diff "[file rootname $logfile].html" $log $dir1 $dir2
574 puts "Log is saved to $logfile (and .html)"
580 # Procedure to check data file before adding it to repository
582 Check data file and prepare it for putting to test data files repository.
583 Use: testfile [filelist]
586 - data file (non-binary) is in DOS encoding (CR/LF)
587 - same data file (with same or another name) already exists in the repository
588 - another file with the same name already exists
589 Note that names are assumed to be case-insensitive (for Windows).
591 Unless the file is already in the repository, tries to load it, reports
592 the recognized file format, file size, number of faces and edges in the
593 loaded shape (if any), and makes snapshot (in the temporary directory).
594 Finally it advises whether the file should be put to public section of the
597 proc testfile {filelist} {
600 # check that CSF_TestDataPath is defined
601 if { ! [info exists env(CSF_TestDataPath)] } {
602 error "Environment variable CSF_TestDataPath must be defined!"
605 # build registry of existing data files (name -> path) and (size -> path)
606 puts "Checking available test data files..."
607 foreach dir [_split_path $env(CSF_TestDataPath)] {
608 while {[llength $dir] != 0} {
609 set curr [lindex $dir 0]
610 set dir [lrange $dir 1 end]
611 eval lappend dir [glob -nocomplain -directory $curr -type d *]
612 foreach file [glob -nocomplain -directory $curr -type f *] {
613 set name [file tail $file]
614 set name_lower [string tolower $name]
616 # check that the file is not in DOS encoding
617 if { [_check_dos_encoding $file] } {
618 puts "Warning: file $file is in DOS encoding; was this intended?"
620 _check_file_format $file
622 # check if file with the same name is present twice or more
623 if { [info exists names($name_lower)] } {
624 puts "Error: more than one file with name $name is present in the repository:"
625 if { [_diff_files $file $names($name_lower)] } {
626 puts "(files are different by content)"
628 puts "(files are same by content)"
631 puts "--> $names($name_lower)"
635 # check if file with the same content exists
636 set size [file size $file]
637 if { [info exists sizes($size)] } {
638 foreach other $sizes($size) {
639 if { ! [_diff_files $file $other] } {
640 puts "Warning: two files with the same content found:"
647 # add the file to the registry
648 set names($name_lower) $file
649 lappend sizes($size) $file
653 if { [llength $filelist] <= 0 } { return }
655 # check the new files
657 puts "Checking new file(s)..."
658 foreach file $filelist {
659 # check for DOS encoding
660 if { [_check_dos_encoding $file] } {
661 puts "$file: Warning: DOS encoding detected"
664 set name [file tail $file]
665 set name_lower [string tolower $name]
667 # check for presence of the file with same name
668 if { [info exists names($name_lower)] } {
669 if { [_diff_files $file $names($name_lower)] } {
670 puts "$file: Error: name is already used by existing file\n--> $names($name_lower)"
672 puts "$file: OK: already in the repository \n--> $names($name_lower)"
677 # check if file with the same content exists
678 set size [file size $file]
679 if { [info exists sizes($size)] } {
681 foreach other $sizes($size) {
682 if { ! [_diff_files $file $other] } {
683 puts "$file: OK: the same file is already present under name [file tail $other]\n--> $other"
688 if { $found } { continue }
691 # try to read the file
692 set format [_check_file_format $file]
693 if { [catch {uplevel load_data_file $file $format a}] } {
694 puts "$file: Error: Cannot read as $format file"
698 # get number of faces and edges
701 set nbs [uplevel nbshapes a]
702 regexp {EDGE[ \t:]*([0-9]+)} $nbs res edges
703 regexp {FACE[ \t:]*([0-9]+)} $nbs res faces
705 # classify; first check file size and number of faces and edges
706 if { $size < 95000 && $faces < 20 && $edges < 100 } {
710 # check if one of names of that file corresponds to typical name for
711 # MDTV bugs or has extension .rle, this should be old model
712 if { [regexp -nocase {.*(cts|ats|pro|buc|ger|fra|usa|uki)[0-9]+.*} $name] ||
713 [regexp -nocase {[.]rle\y} $name] } {
719 puts "$file: $format size=[expr $size / 1024] KiB, nbfaces=$faces, nbedges=$edges -> $dir"
721 set tmpdir [_get_temp_dir]
722 file mkdir $tmpdir/$dir
729 uplevel vdump $tmpdir/$dir/[file rootname [file tail $file]].png
733 puts "Snapshots are saved in subdirectory [_get_temp_dir]"
737 # Procedure to locate data file for test given its name.
738 # The search is performed assuming that the function is called
739 # from the test case script; the search order is:
740 # - subdirectory "data" of the test script (grid) folder
741 # - subdirectories in environment variable CSF_TestDataPath
742 # - subdirectory set by datadir command
743 # If file is not found, raises Tcl error.
744 proc locate_data_file {filename} {
745 global env groupname gridname casename
747 # check if the file is located in the subdirectory data of the script dir
748 set scriptfile [info script]
749 if { $scriptfile != "" } {
750 set path [file join [file dirname $scriptfile] data $filename]
751 if { [file exists $path] } {
752 return [file normalize $path]
756 # check sub-directories in paths indicated by CSF_TestDataPath
757 if { [info exists env(CSF_TestDataPath)] } {
758 foreach dir [_split_path $env(CSF_TestDataPath)] {
759 while {[llength $dir] != 0} {
760 set name [lindex $dir 0]
761 set dir [lrange $dir 1 end]
762 # skip directories starting with dot
763 if { [regexp {^[.]} $name] } { continue }
764 if { [file exists $name/$filename] } {
765 return [file normalize $name/$filename]
767 eval lappend dir [glob -nocomplain -directory $name -type d *]
772 # check current datadir
773 if { [file exists [uplevel datadir]/$filename] } {
774 return [file normalize [uplevel datadir]/$filename]
778 error [join [list "File $filename could not be found" \
779 "(should be in paths indicated by CSF_TestDataPath environment variable, " \
780 "or in subfolder data in the script directory)"] "\n"]
783 # Internal procedure to find test case indicated by group, grid, and test case names;
785 # - dir: path to the base directory of the tests group
786 # - gridname: actual name of the grid
787 # - casefile: path to the test case script
788 # if no such test is found, raises error with appropriate message
789 proc _get_test {group grid casename _dir _gridname _casefile} {
791 upvar $_gridname gridname
792 upvar $_casefile casefile
796 # check that environment variable defining paths to test scripts is defined
797 if { ! [info exists env(CSF_TestScriptsPath)] ||
798 [llength $env(CSF_TestScriptsPath)] <= 0 } {
799 error "Error: Environment variable CSF_TestScriptsPath is not defined"
802 # iterate by all script paths
803 foreach dir [_split_path $env(CSF_TestScriptsPath)] {
804 # protection against empty paths
805 set dir [string trim $dir]
806 if { $dir == "" } { continue }
808 # check that directory exists
809 if { ! [file isdirectory $dir] } {
810 puts "Warning: directory $dir listed in CSF_TestScriptsPath does not exist, skipped"
814 # check if test group with given name exists in this dir
815 # if not, continue to the next test dir
816 if { ! [file isdirectory $dir/$group] } { continue }
818 # check that grid with given name (possibly alias) exists; stop otherwise
820 if { ! [file isdirectory $dir/$group/$gridname] } {
821 # check if grid is named by alias rather than by actual name
822 if { [file exists $dir/$group/grids.list] } {
823 set fd [open $dir/$group/grids.list]
824 while { [gets $fd line] >= 0 } {
825 if { [regexp "\[ \t\]*\#.*" $line] } { continue }
826 if { [regexp "^$grid\[ \t\]*\(\[A-Za-z0-9_.-\]+\)\$" $line res gridname] } {
833 if { ! [file isdirectory $dir/$group/$gridname] } { continue }
835 # get actual file name of the script; stop if it cannot be found
836 set casefile $dir/$group/$gridname/$casename
837 if { ! [file exists $casefile] } {
838 # check if this grid is aliased to another one
839 if { [file exists $dir/$group/$gridname/cases.list] } {
840 set fd [open $dir/$group/$gridname/cases.list]
841 if { [gets $fd line] >= 0 } {
842 set casefile [file normalize $dir/$group/$gridname/[string trim $line]/$casename]
847 if { [file exists $casefile] } {
853 # coming here means specified test is not found; report error
854 error [join [list "Error: test case $group / $grid / $casename is not found in paths listed in variable" \
855 "CSF_TestScriptsPath (current value is \"$env(CSF_TestScriptsPath)\")"] "\n"]
858 # Internal procedure to run test case indicated by base directory,
859 # grid and grid names, and test case file path.
860 # The log can be obtained by command "dlog get".
861 proc _run_test {scriptsdir group gridname casefile echo} {
865 uplevel dchrono _timer reset
866 uplevel dchrono _timer start
867 catch {uplevel meminfo w} membase
869 # enable commands logging; switch to old-style mode if dlog command is not present
871 if { [catch {dlog reset}] } {
878 rename puts puts-saved
880 global _tests_verbose
882 # log only output to stdout and stderr, not to file!
883 if {[llength $args] > 1} {
884 set optarg [lindex $args end-1]
885 if { $optarg == "stdout" || $optarg == "stderr" || $optarg == "-newline" } {
886 dlog add [lindex $args end]
888 eval puts-saved $args
891 dlog add [lindex $args end]
898 # set variables identifying test case
899 uplevel set casename [file tail $casefile]
900 uplevel set groupname $group
901 uplevel set gridname $gridname
902 uplevel set dirname $scriptsdir
904 # set variables for saving of images if not yet set
905 if { ! [uplevel info exists imagedir] } {
906 uplevel set imagedir [_get_temp_dir]
907 uplevel set test_image \$casename
910 # execute test scripts
911 if { [file exists $scriptsdir/$group/begin] } {
912 puts "Executing $scriptsdir/$group/begin..."; flush stdout
913 uplevel source $scriptsdir/$group/begin
915 if { [file exists $scriptsdir/$group/$gridname/begin] } {
916 puts "Executing $scriptsdir/$group/$gridname/begin..."; flush stdout
917 uplevel source $scriptsdir/$group/$gridname/begin
920 puts "Executing $casefile..."; flush stdout
921 uplevel source $casefile
923 if { [file exists $scriptsdir/$group/$gridname/end] } {
924 puts "Executing $scriptsdir/$group/$gridname/end..."; flush stdout
925 uplevel source $scriptsdir/$group/$gridname/end
927 if { [file exists $scriptsdir/$group/end] } {
928 puts "Executing $scriptsdir/$group/end..."; flush stdout
929 uplevel source $scriptsdir/$group/end
932 puts "Tcl Exception: $res"
936 if { $dlog_exists } {
941 rename puts-saved puts
946 # stop cpulimit killer if armed by the test
949 # add memory and timing info
951 if { ! [catch {uplevel meminfo w} memuse] } {
952 set stats "MEMORY DELTA: [expr ($memuse - $membase) / 1024] KiB\n"
954 uplevel dchrono _timer stop
955 set time [uplevel dchrono _timer show]
956 if [regexp -nocase {CPU user time:[ \t]*([0-9.e-]+)} $time res cpu] {
957 set stats "${stats}TOTAL CPU TIME: $cpu sec\n"
959 if { $dlog_exists && ! $echo } {
966 # Internal procedure to check log of test execution and decide if it passed or failed
967 proc _check_log {dir group gridname casename log {_summary {}} {_html_log {}}} {
969 if { $_summary != "" } { upvar $_summary summary }
970 if { $_html_log != "" } { upvar $_html_log html_log }
976 # load definition of 'bad words' indicating test failure
977 # note that rules are loaded in the order of decreasing priority (grid - group - common),
978 # thus grid rules will override group ones
980 foreach rulesfile [list $dir/$group/$gridname/parse.rules $dir/$group/parse.rules $dir/parse.rules] {
981 if [catch {set fd [open $rulesfile r]}] { continue }
982 while { [gets $fd line] >= 0 } {
983 # skip comments and empty lines
984 if { [regexp "\[ \t\]*\#.*" $line] } { continue }
985 if { [string trim $line] == "" } { continue }
987 if { ! [regexp {^([^/]*)/([^/]*)/(.*)$} $line res status rexp comment] } {
988 puts "Warning: cannot recognize parsing rule \"$line\" in file $rulesfile"
991 set status [string trim $status]
992 if { $comment != "" } { set status "$status ([string trim $comment])" }
993 set rexp [regsub -all {\\b} $rexp {\\y}] ;# convert regexp from Perl to Tcl style
994 lappend badwords [list $status $rexp]
998 if { [llength $badwords] <= 0 } {
999 puts "Warning: no definition of error indicators found (check files parse.rules)"
1002 # analyse log line-by-line
1005 foreach line [split $log "\n"] {
1006 # check if line defines specific treatment of some messages
1007 set deb_info [dversion]
1008 if [regexp -nocase {^[ \s]*TODO ([^:]*):(.*)$} $line res platforms pattern] {
1009 if { [regexp {DEBUG_} $platforms] != 1 } {
1010 if { ! [regexp -nocase {\mAll\M} $platforms] &&
1011 ! [regexp -nocase "\\m$env(os_type)\\M" $platforms] } {
1012 lappend html_log $line
1013 continue ;# TODO statement is for another platform
1016 # record TODOs that mark unstable cases
1017 if { [regexp {[\?]} $platforms] } {
1018 set todos_unstable([llength $todos]) 1
1021 lappend todos [regsub -all {\\b} [string trim $pattern] {\\y}] ;# convert regexp from Perl to Tcl style
1022 lappend html_log [_html_highlight BAD $line]
1026 if { [regexp "Debug mode" $deb_info] != 1 && [regexp {DEBUG_} $platforms] == 1 } {
1030 if { [regexp "Debug mode" $deb_info] == 1 && [regexp {DEBUG_} $platforms] == 1 } {
1031 if { ! [regexp -nocase {\mAll\M} $platforms] &&
1032 ! [regexp -nocase "\\m$env(os_type)\\M" $platforms] } {
1033 lappend html_log $line
1034 continue ;# TODO statement is for another platform
1037 # record TODOs that mark unstable cases
1038 if { [regexp {[\?]} $platforms] } {
1039 set todos_unstable([llength $todos]) 1
1042 lappend todos [regsub -all {\\b} [string trim $pattern] {\\y}] ;# convert regexp from Perl to Tcl style
1043 lappend html_log [_html_highlight BAD $line]
1048 # check for presence of messages indicating test result
1050 foreach bw $badwords {
1051 if { [regexp [lindex $bw 1] $line] } {
1052 # check if this is known bad case
1054 for {set i 0} {$i < [llength $todos]} {incr i} {
1055 if { [regexp [lindex $todos $i] $line] } {
1058 lappend html_log [_html_highlight BAD $line]
1063 # if it is not in todo, define status
1064 if { ! $is_known } {
1065 set stat [lindex $bw 0 0]
1066 lappend html_log [_html_highlight $stat $line]
1067 if { $status == "" && $stat != "OK" && ! [regexp -nocase {^IGNOR} $stat] } {
1068 set status [lindex $bw 0]
1075 if { ! $ismarked } {
1076 lappend html_log $line
1080 # check for presence of TEST COMPLETED statement
1081 if { $status == "" && ! [regexp {TEST COMPLETED} $log] } {
1082 # check whether absence of TEST COMPLETED is known problem
1083 set i [lsearch $todos "TEST INCOMPLETE"]
1087 set status "FAILED (no final message is found)"
1091 # check declared bad cases and diagnose possible improvement
1092 # (bad case declared but not detected).
1093 # Note that absence of the problem marked by TODO with question mark
1094 # (unstable) is not reported as improvement.
1095 if { $status == "" } {
1096 for {set i 0} {$i < [llength $todos]} {incr i} {
1097 if { ! [info exists todos_unstable($i)] &&
1098 (! [info exists todo_count($i)] || $todo_count($i) <= 0) } {
1099 set status "IMPROVEMENT (expected problem TODO no. [expr $i + 1] is not detected)"
1105 # report test as known bad if at least one of expected problems is found
1106 if { $status == "" && [llength [array names todo_count]] > 0 } {
1107 set status "BAD (known problem)"
1111 if { $status == "" } {set status "OK" }
1114 set status "FAILED ($res)"
1118 _log_and_puts summary "CASE $group $gridname $casename: $status"
1119 set summary [join $summary "\n"]
1120 set html_log "[_html_highlight [lindex $status 0] $summary]\n[join $html_log \n]"
1123 # Auxiliary procedure putting message to both cout and log variable (list)
1124 proc _log_and_puts {logvar message} {
1125 if { $logvar != "" } {
1127 lappend log $message
1132 # Auxiliary procedure to log result on single test case
1133 proc _log_test_case {output logdir dir group grid casename logvar} {
1136 # check result and make HTML log
1137 _check_log $dir $group $grid $casename $output summary html_log
1138 lappend log $summary
1141 if { $logdir != "" } {
1142 _log_html $logdir/$group/$grid/$casename.html $html_log "Test $group $grid $casename"
1143 _log_save $logdir/$group/$grid/$casename.log "$output\n$summary" "Test $group $grid $casename"
1147 # Auxiliary procedure to save log to file
1148 proc _log_save {file log {title {}}} {
1149 # create missing directories as needed
1150 catch {file mkdir [file dirname $file]}
1152 # try to open a file
1153 if [catch {set fd [open $file w]} res] {
1154 error "Error saving log file $file: $res"
1157 # dump log and close
1164 # Auxiliary procedure to make a (relative if possible) URL to a file for
1165 # inclusion a reference in HTML log
1166 proc _make_url {htmldir file} {
1167 set htmlpath [file split [file normalize $htmldir]]
1168 set filepath [file split [file normalize $file]]
1169 for {set i 0} {$i < [llength $htmlpath]} {incr i} {
1170 if { "[lindex $htmlpath $i]" != "[lindex $filepath $i]" } {
1171 if { $i == 0 } { break }
1172 return "[string repeat "../" [expr [llength $htmlpath] - $i - 1]][eval file join [lrange $filepath $i end]]"
1176 # if relative path could not be made, return full file URL
1177 return "file://[file normalize $file]"
1180 # Auxiliary procedure to save log to file
1181 proc _log_html {file log {title {}}} {
1182 # create missing directories as needed
1183 catch {file mkdir [file dirname $file]}
1185 # try to open a file
1186 if [catch {set fd [open $file w]} res] {
1187 error "Error saving log file $file: $res"
1191 puts $fd "<html><head><title>$title</title></head><body><h1>$title</h1>"
1193 # add images if present
1194 set imgbasename [file rootname [file tail $file]]
1195 foreach img [lsort [glob -nocomplain -directory [file dirname $file] -tails ${imgbasename}*.gif ${imgbasename}*.png ${imgbasename}*.jpg]] {
1196 puts $fd "<p>[file tail $img]<br><img src=\"$img\"/><p>"
1199 # print log body, trying to add HTML links to script files on lines like
1200 # "Executing <filename>..."
1202 foreach line [split $log "\n"] {
1203 if { [regexp {Executing[ \t]+([a-zA-Z0-9._/:-]+[^.])} $line res script] &&
1204 [file exists $script] } {
1205 set line [regsub $script $line "<a href=\"[_make_url $file $script]\">$script</a>"]
1209 puts $fd "</pre></body></html>"
1215 # Auxiliary method to make text with HTML highlighting according to status
1216 proc _html_color {status} {
1217 # choose a color for the cell according to result
1218 if { $status == "OK" } {
1220 } elseif { [regexp -nocase {^FAIL} $status] } {
1222 } elseif { [regexp -nocase {^BAD} $status] } {
1224 } elseif { [regexp -nocase {^IMP} $status] } {
1226 } elseif { [regexp -nocase {^SKIP} $status] } {
1228 } elseif { [regexp -nocase {^IGNOR} $status] } {
1231 puts "Warning: no color defined for status $status, using red as if FAILED"
1236 # Format text line in HTML to be colored according to the status
1237 proc _html_highlight {status line} {
1238 return "<table><tr><td bgcolor=\"[_html_color $status]\">$line</td></tr></table>"
1241 # Internal procedure to generate HTML page presenting log of the tests
1242 # execution in tabular form, with links to reports on individual cases
1243 proc _log_html_summary {logdir log totals regressions improvements total_time} {
1244 global _test_case_regexp
1246 # create missing directories as needed
1249 # try to open a file and start HTML
1250 if [catch {set fd [open $logdir/summary.html w]} res] {
1251 error "Error creating log file: $res"
1254 # write HRML header, including command to refresh log if still in progress
1255 puts $fd "<html><head>"
1256 puts $fd "<title>Tests summary</title>"
1257 if { $total_time == "" } {
1258 puts $fd "<meta http-equiv=\"refresh\" content=\"10\">"
1260 puts $fd "<meta http-equiv=\"pragma\" content=\"NO-CACHE\">"
1261 puts $fd "</head><body>"
1264 set legend(OK) "Test passed OK"
1265 set legend(FAILED) "Test failed (regression)"
1266 set legend(BAD) "Known problem"
1267 set legend(IMPROVEMENT) "Possible improvement (expected problem not detected)"
1268 set legend(SKIPPED) "Test skipped due to lack of data file"
1269 puts $fd "<h1>Summary</h1><table>"
1270 foreach nbstat $totals {
1271 set status [lindex $nbstat 1]
1272 if { [info exists legend($status)] } {
1273 set comment $legend($status)
1275 set comment "User-defined status"
1277 puts $fd "<tr><td align=\"right\">[lindex $nbstat 0]</td><td bgcolor=\"[_html_color $status]\">$status</td><td>$comment</td></tr>"
1281 # time stamp and elapsed time info
1282 if { $total_time != "" } {
1283 puts $fd "<p>Generated on [clock format [clock seconds] -format {%Y-%m-%d %H:%M:%S}] on [info hostname]\n<p>"
1284 puts $fd [join [split $total_time "\n"] "<p>"]
1286 puts $fd "<p>NOTE: This is intermediate summary; the tests are still running! This page will refresh automatically until tests are finished."
1289 # print regressions and improvements
1290 foreach featured [list $regressions $improvements] {
1291 if { [llength $featured] <= 1 } { continue }
1292 set status [string trim [lindex $featured 0] { :}]
1293 puts $fd "<h2>$status</h2>"
1296 foreach test [lrange $featured 1 end] {
1297 if { ! [regexp {^(.*)\s+([\w.]+)$} $test res gg name] } {
1299 set name "Error building short list; check details"
1301 if { $gg != $groupgrid } {
1302 if { $groupgrid != "" } { puts $fd "</tr>" }
1304 puts $fd "<tr><td>$gg</td>"
1306 puts $fd "<td bgcolor=\"[_html_color $status]\"><a href=\"[regsub -all { } $gg /]/${name}.html\">$name</a></td>"
1308 if { $groupgrid != "" } { puts $fd "</tr>" }
1312 # put detailed log with TOC
1313 puts $fd "<hr><h1>Details</h1>"
1314 puts $fd "<div style=\"float:right; padding: 10px; border-style: solid; border-color: blue; border-width: 2px;\">"
1316 # process log line-by-line
1320 foreach line [lsort -dictionary $log] {
1321 # check that the line is case report in the form "CASE group grid name: result (explanation)"
1322 if { ! [regexp $_test_case_regexp $line res grp grd casename result message] } {
1327 if { $grp != $group } {
1328 if { $letter != "" } { lappend body "</tr></table>" }
1332 puts $fd "<a href=\"#$group\">$group</a><br>"
1333 lappend body "<h2><a name=\"$group\">Group $group</a></h2>"
1337 if { $grd != $grid } {
1338 if { $letter != "" } { lappend body "</tr></table>" }
1341 puts $fd " <a href=\"#$group-$grid\">$grid</a><br>"
1342 lappend body "<h2><a name=\"$group-$grid\">Grid $group $grid</a></h2>"
1345 # check if test case name is <letter><digit>;
1346 # if not, set alnum to period "." to recognize non-standard test name
1347 if { ! [regexp {\A([A-Za-z]{1,2})([0-9]{1,2})\Z} $casename res alnum number] &&
1348 ! [regexp {\A([A-Za-z0-9]+)_([0-9]+)\Z} $casename res alnum number] } {
1352 # start new row when letter changes or for non-standard names
1353 if { $alnum != $letter || $alnum == "." } {
1354 if { $letter != "" } {
1355 lappend body "</tr><tr>"
1357 lappend body "<table><tr>"
1362 lappend body "<td bgcolor=\"[_html_color $result]\"><a href=\"$group/$grid/${casename}.html\">$casename</a></td>"
1364 puts $fd "</div>\n[join $body "\n"]</tr></table>"
1366 # add remaining lines of log as plain text
1367 puts $fd "<h2>Plain text messages</h2>\n<pre>"
1369 if { ! [regexp $_test_case_regexp $line] } {
1375 # close file and exit
1381 # Procedure to dump summary logs of tests
1382 proc _log_summarize {logdir log {total_time {}}} {
1384 # sort log records alphabetically to have the same behavior on Linux and Windows
1385 # (also needed if tests are run in parallel)
1386 set loglist [lsort -dictionary $log]
1388 # classify test cases by status
1389 foreach line $loglist {
1390 if { [regexp {^CASE ([^:]*): ([[:alnum:]]+).*$} $line res caseid status] } {
1391 lappend stat($status) $caseid
1395 set improvements {Improvements:}
1396 set regressions {Failed:}
1397 if { [info exists stat] } {
1398 foreach status [lsort [array names stat]] {
1399 lappend totals [list [llength $stat($status)] $status]
1401 # separately count improvements (status starting with IMP) and regressions (all except IMP, OK, BAD, and SKIP)
1402 if { [regexp -nocase {^IMP} $status] } {
1403 eval lappend improvements $stat($status)
1404 } elseif { $status != "OK" && ! [regexp -nocase {^BAD} $status] && ! [regexp -nocase {^SKIP} $status] } {
1405 eval lappend regressions $stat($status)
1410 # if time is specified, add totals
1411 if { $total_time != "" } {
1412 if { [llength $improvements] > 1 } {
1413 _log_and_puts log [join $improvements "\n "]
1415 if { [llength $regressions] > 1 } {
1416 _log_and_puts log [join $regressions "\n "]
1418 if { [llength $improvements] == 1 && [llength $regressions] == 1 } {
1419 _log_and_puts log "No regressions"
1421 _log_and_puts log "Total cases: [join $totals {, }]"
1422 _log_and_puts log $total_time
1426 if { $logdir != "" } {
1427 _log_html_summary $logdir $log $totals $regressions $improvements $total_time
1428 _log_save $logdir/tests.log [join $log "\n"] "Tests summary"
1434 # Internal procedure to generate XML log in JUnit style, for further
1435 # consumption by Jenkins or similar systems.
1437 # The output is intended to conform to XML schema supported by Jenkins found at
1438 # https://svn.jenkins-ci.org/trunk/hudson/dtkit/dtkit-format/dtkit-junit-model/src/main/resources/com/thalesgroup/dtkit/junit/model/xsd/junit-4.xsd
1440 # The mapping of the fields is inspired by annotated schema of Apache Ant JUnit XML format found at
1441 # http://windyroad.org/dl/Open%20Source/JUnit.xsd
1442 proc _log_xml_summary {logdir filename log include_cout} {
1443 global _test_case_regexp
1445 catch {file mkdir [file dirname $filename]}
1447 # try to open a file and start XML
1448 if [catch {set fd [open $filename w]} res] {
1449 error "Error creating XML summary file $filename: $res"
1451 puts $fd "<?xml version='1.0' encoding='utf-8'?>"
1452 puts $fd "<testsuites>"
1454 # prototype for command to generate test suite tag
1455 set time_and_host "timestamp=\"[clock format [clock seconds] -format {%Y-%m-%dT%H:%M:%S}]\" hostname=\"[info hostname]\""
1456 set cmd_testsuite {puts $fd "<testsuite name=\"$group $grid\" tests=\"$nbtests\" failures=\"$nbfail\" errors=\"$nberr\" time=\"$time\" skipped=\"$nbskip\" $time_and_host>\n$testcases\n</testsuite>\n"}
1458 # sort log and process it line-by-line
1460 foreach line [lsort -dictionary $log] {
1461 # check that the line is case report in the form "CASE group grid name: result (explanation)"
1462 if { ! [regexp $_test_case_regexp $line res grp grd casename result message] } {
1465 set message [string trim $message " \t\r\n()"]
1467 # start new testsuite for each grid
1468 if { $grp != $group || $grd != $grid } {
1470 # write previous test suite
1471 if [info exists testcases] { eval $cmd_testsuite }
1486 # parse test log and get its CPU time
1489 if { [catch {set fdlog [open $logdir/$group/$grid/${casename}.log r]} ret] } {
1490 puts "Error: cannot open $logdir/$group/$grid/${casename}.log: $ret"
1492 while { [gets $fdlog logline] >= 0 } {
1493 if { $include_cout } {
1494 set testout "$testout$logline\n"
1496 if [regexp -nocase {TOTAL CPU TIME:\s*([\d.]+)\s*sec} $logline res cpu] {
1497 set add_cpu " time=\"$cpu\""
1498 set time [expr $time + $cpu]
1503 if { ! $include_cout } {
1504 set testout "$line\n"
1507 # record test case with its output and status
1508 # Mapping is: SKIPPED, BAD, and OK to OK, all other to failure
1509 set testcases "$testcases\n <testcase name=\"$casename\"$add_cpu status=\"$result\">\n"
1510 set testcases "$testcases\n <system-out>\n$testout </system-out>"
1511 if { $result != "OK" } {
1512 if { [regexp -nocase {^SKIP} $result] } {
1514 set testcases "$testcases\n <error name=\"$result\" message=\"$message\"/>"
1515 } elseif { [regexp -nocase {^BAD} $result] } {
1517 set testcases "$testcases\n <skipped>$message</skipped>"
1520 set testcases "$testcases\n <failure name=\"$result\" message=\"$message\"/>"
1523 set testcases "$testcases\n </testcase>"
1526 # write last test suite
1527 if [info exists testcases] { eval $cmd_testsuite }
1530 puts $fd "</testsuites>"
1535 # define custom platform name
1536 proc _tests_platform_def {} {
1537 global env tcl_platform
1539 if [info exists env(os_type)] { return }
1541 set env(os_type) $tcl_platform(platform)
1543 # use detailed mapping for various versions of Lunix
1544 # (note that mapping is rather non-uniform, for historical reasons)
1545 if { $tcl_platform(os) == "Linux" && ! [catch {exec cat /etc/issue} issue] } {
1546 if { [regexp {Mandriva[ \tA-Za-z]+([0-9]+)} $issue res num] } {
1547 set env(os_type) Mandriva$num
1548 } elseif { [regexp {Red Hat[ \tA-Za-z]+([0-9]+)} $issue res num] } {
1549 set env(os_type) RedHat$num
1550 } elseif { [regexp {Debian[ \tA-Za-z/]+([0-9]+)[.]([0-9]+)} $issue res num subnum] } {
1551 set env(os_type) Debian$num$subnum
1552 } elseif { [regexp {CentOS[ \tA-Za-z]+([0-9]+)[.]([0-9]+)} $issue res num subnum] } {
1553 set env(os_type) CentOS$num$subnum
1554 } elseif { [regexp {Scientific[ \tA-Za-z]+([0-9]+)[.]([0-9]+)} $issue res num subnum] } {
1555 set env(os_type) SL$num$subnum
1556 } elseif { [regexp {Fedora Core[ \tA-Za-z]+([0-9]+)} $issue res num] } {
1557 set env(os_type) FedoraCore$num
1559 if { [exec uname -m] == "x86_64" } {
1560 set env(os_type) "$env(os_type)-64"
1562 } elseif { $tcl_platform(os) == "Darwin" } {
1563 set env(os_type) MacOS
1568 # Auxiliary procedure to split path specification (usually defined by
1569 # environment variable) into list of directories or files
1570 proc _split_path {pathspec} {
1573 # first replace all \ (which might occur on Windows) by /
1574 regsub -all "\\\\" $pathspec "/" pathspec
1576 # split path by platform-specific separator
1577 return [split $pathspec [_path_separator]]
1580 # Auxiliary procedure to define platform-specific separator for directories in
1581 # path specification
1582 proc _path_separator {} {
1585 # split path by platform-specific separator
1586 if { $tcl_platform(platform) == "windows" } {
1593 # Procedure to make a diff and common of two lists
1594 proc _list_diff {list1 list2 _in1 _in2 _common} {
1597 upvar $_common common
1602 foreach item $list1 {
1603 if { [lsearch -exact $list2 $item] >= 0 } {
1604 lappend common $item
1609 foreach item $list2 {
1610 if { [lsearch -exact $common $item] < 0 } {
1617 # procedure to load a file to Tcl string
1618 proc _read_file {filename} {
1619 set fd [open $filename r]
1620 set result [read -nonewline $fd]
1625 # procedure to construct name for the mage diff file
1626 proc _diff_img_name {dir1 dir2 casepath imgfile} {
1627 return [file join $dir1 $casepath "diff-[file tail $dir2]-$imgfile"]
1630 # Procedure to compare results of two runs of test cases
1631 proc _test_diff {dir1 dir2 basename status verbose _logvar {_statvar ""}} {
1634 # make sure to load diffimage command
1635 uplevel pload VISUALIZATION
1637 # prepare variable (array) for collecting statistics
1638 if { "$_statvar" != "" } {
1639 upvar $_statvar stat
1648 # first check subdirectories
1649 set path1 [file join $dir1 $basename]
1650 set path2 [file join $dir2 $basename]
1651 set list1 [glob -directory $path1 -types d -tails -nocomplain *]
1652 set list2 [glob -directory $path2 -types d -tails -nocomplain *]
1653 if { [llength $list1] >0 || [llength $list2] > 0 } {
1654 _list_diff $list1 $list2 in1 in2 common
1655 if { "$verbose" > 1 } {
1656 if { [llength $in1] > 0 } { _log_and_puts log "Only in $path1: $in1" }
1657 if { [llength $in2] > 0 } { _log_and_puts log "Only in $path2: $in2" }
1659 foreach subdir $common {
1660 if { "$verbose" > 2 } {
1661 _log_and_puts log "Checking [file join $basename $subdir]"
1663 _test_diff $dir1 $dir2 [file join $basename $subdir] $status $verbose log stat
1666 # check log files (only if directory has no subdirs)
1667 set list1 [glob -directory $path1 -types f -tails -nocomplain *.log]
1668 set list2 [glob -directory $path2 -types f -tails -nocomplain *.log]
1669 _list_diff $list1 $list2 in1 in2 common
1670 if { "$verbose" > 1 } {
1671 if { [llength $in1] > 0 } { _log_and_puts log "Only in $path1: $in1" }
1672 if { [llength $in2] > 0 } { _log_and_puts log "Only in $path2: $in2" }
1674 foreach logfile $common {
1676 set log1 [_read_file [file join $dir1 $basename $logfile]]
1677 set log2 [_read_file [file join $dir2 $basename $logfile]]
1678 set casename [file rootname $logfile]
1680 # check execution statuses
1681 set status1 UNDEFINED
1682 set status2 UNDEFINED
1683 if { ! [regexp {CASE [^:]*:\s*([\w]+)} $log1 res1 status1] ||
1684 ! [regexp {CASE [^:]*:\s*([\w]+)} $log2 res2 status2] ||
1685 "$status1" != "$status2" } {
1686 _log_and_puts log "STATUS [split $basename /] $casename: $status1 / $status2"
1688 # if test statuses are different, further comparison makes
1689 # no sense unless explicitly requested
1690 if { "$status" != "all" } {
1694 if { "$status" == "ok" && "$status1" != "OK" } {
1701 if { [regexp {TOTAL CPU TIME:\s*([\d.]+)} $log1 res1 cpu1] &&
1702 [regexp {TOTAL CPU TIME:\s*([\d.]+)} $log2 res1 cpu2] } {
1703 set stat(cpu1) [expr $stat(cpu1) + $cpu1]
1704 set stat(cpu2) [expr $stat(cpu2) + $cpu2]
1706 # compare CPU times with 10% precision (but not less 0.5 sec)
1707 if { [expr abs ($cpu1 - $cpu2) > 0.5 + 0.05 * abs ($cpu1 + $cpu2)] } {
1708 _log_and_puts log "CPU [split $basename /] $casename: $cpu1 / $cpu2"
1712 # check memory delta
1715 if { [regexp {MEMORY DELTA:\s*([\d.]+)} $log1 res1 mem1] &&
1716 [regexp {MEMORY DELTA:\s*([\d.]+)} $log2 res1 mem2] } {
1717 set stat(mem1) [expr $stat(mem1) + $mem1]
1718 set stat(mem2) [expr $stat(mem2) + $mem2]
1720 # compare memory usage with 10% precision (but not less 16 KiB)
1721 if { [expr abs ($mem1 - $mem2) > 16 + 0.05 * abs ($mem1 + $mem2)] } {
1722 _log_and_puts log "MEMORY [split $basename /] $casename: $mem1 / $mem2"
1727 set imglist1 [glob -directory $path1 -types f -tails -nocomplain $casename*.{png,gif}]
1728 set imglist2 [glob -directory $path2 -types f -tails -nocomplain $casename*.{png,gif}]
1729 _list_diff $imglist1 $imglist2 imgin1 imgin2 imgcommon
1730 if { "$verbose" > 1 } {
1731 if { [llength $imgin1] > 0 } { _log_and_puts log "Only in $path1: $imgin1" }
1732 if { [llength $imgin2] > 0 } { _log_and_puts log "Only in $path2: $imgin2" }
1734 foreach imgfile $imgcommon {
1735 # if { $verbose > 1 } { _log_and_puts log "Checking [split basename /] $casename: $imgfile" }
1736 set diffile [_diff_img_name $dir1 $dir2 $basename $imgfile]
1737 if { [catch {diffimage [file join $dir1 $basename $imgfile] \
1738 [file join $dir2 $basename $imgfile] \
1739 0 0 0 $diffile} diff] } {
1740 _log_and_puts log "IMAGE [split $basename /] $casename: $imgfile cannot be compared"
1741 file delete -force $diffile ;# clean possible previous result of diffimage
1742 } elseif { $diff != 0 } {
1743 _log_and_puts log "IMAGE [split $basename /] $casename: $imgfile differs"
1745 file delete -force $diffile ;# clean useless artifact of diffimage
1751 if { "$_statvar" == "" } {
1752 _log_and_puts log "Total MEMORY difference: $stat(mem1) / $stat(mem2)"
1753 _log_and_puts log "Total CPU difference: $stat(cpu1) / $stat(cpu2)"
1757 # Auxiliary procedure to save log of results comparison to file
1758 proc _log_html_diff {file log dir1 dir2} {
1759 # create missing directories as needed
1760 catch {file mkdir [file dirname $file]}
1762 # try to open a file
1763 if [catch {set fd [open $file w]} res] {
1764 error "Error saving log file $file: $res"
1768 puts $fd "<html><head><title>Diff $dir1 vs. $dir2</title></head><body>"
1769 puts $fd "<h1>Comparison of test results: $dir1 vs. $dir2</h1>"
1771 # print log body, trying to add HTML links to script files on lines like
1772 # "Executing <filename>..."
1774 set logpath [file split [file normalize $file]]
1778 if { [regexp {IMAGE[ \t]+([^:]+):[ \t]+([A-Za-z0-9_.-]+)} $line res case img] } {
1779 if { [catch {eval file join "" [lrange $case 0 end-1]} gridpath] } {
1780 # note: special handler for the case if test grid directoried are compared directly
1783 set img1 "<img src=\"[_make_url $file [file join $dir1 $gridpath $img]]\">"
1784 set img2 "<img src=\"[_make_url $file [file join $dir2 $gridpath $img]]\">"
1786 set difffile [_diff_img_name $dir1 $dir2 $gridpath $img]
1787 if { [file exists $difffile] } {
1788 set imgd "<img src=\"[_make_url $file $difffile]\">"
1793 puts $fd "<table><tr><th>[file tail $dir1]</th><th>[file tail $dir2]</th><th>Different pixels</th></tr>"
1794 puts $fd "<tr><td>$img1</td><td>$img2</td><td>$imgd</td></tr></table>"
1797 puts $fd "</pre></body></html>"
1803 # get number of CPUs on the system
1804 proc _get_nb_cpus {} {
1805 global tcl_platform env
1807 if { "$tcl_platform(platform)" == "windows" } {
1808 # on Windows, take the value of the environment variable
1809 if { [info exists env(NUMBER_OF_PROCESSORS)] &&
1810 ! [catch {expr $env(NUMBER_OF_PROCESSORS) > 0} res] && $res >= 0 } {
1811 return $env(NUMBER_OF_PROCESSORS)
1813 } elseif { "$tcl_platform(os)" == "Linux" } {
1814 # on Linux, take number of logical processors listed in /proc/cpuinfo
1815 if { [catch {open "/proc/cpuinfo" r} fd] } {
1816 return 0 ;# should never happen, but...
1819 while { [gets $fd line] >= 0 } {
1820 if { [regexp {^processor[ \t]*:} $line] } {
1826 } elseif { "$tcl_platform(os)" == "Darwin" } {
1827 # on MacOS X, call sysctl command
1828 if { ! [catch {exec sysctl hw.ncpu} ret] &&
1829 [regexp {^hw[.]ncpu[ \t]*:[ \t]*([0-9]+)} $ret res nb] } {
1834 # if cannot get good value, return 0 as default
1838 # check two files for difference
1839 proc _diff_files {file1 file2} {
1840 set fd1 [open $file1 "r"]
1841 set fd2 [open $file2 "r"]
1845 set nb1 [gets $fd1 line1]
1846 set nb2 [gets $fd2 line2]
1847 if { $nb1 != $nb2 } { set differ t; break }
1848 if { $nb1 < 0 } { break }
1849 if { [string compare $line1 $line2] } {
1860 # Check if file is in DOS encoding.
1861 # This check is done by presence of \r\n combination at the end of the first
1862 # line (i.e. prior to any other \n symbol).
1863 # Note that presence of non-ascii symbols typically used for recognition
1864 # of binary files is not suitable since some IGES and STEP files contain
1865 # non-ascii symbols.
1866 # Special check is added for PNG files which contain \r\n in the beginning.
1867 proc _check_dos_encoding {file} {
1868 set fd [open $file rb]
1870 if { [gets $fd line] && [regexp {.*\r$} $line] &&
1871 ! [regexp {^.PNG} $line] } {
1878 # procedure to recognize format of a data file by its first symbols (for OCCT
1879 # BREP and geometry DRAW formats, IGES, and STEP) and extension (all others)
1880 proc _check_file_format {file} {
1881 set fd [open $file rb]
1882 set line [read $fd 1024]
1886 set ext [file extension $file]
1888 if { [regexp {^DBRep_DrawableShape} $line] } {
1890 if { "$ext" != ".brep" && "$ext" != ".rle" &&
1891 "$ext" != ".draw" && "$ext" != "" } {
1894 } elseif { [regexp {^DrawTrSurf_} $line] } {
1896 if { "$ext" != ".rle" &&
1897 "$ext" != ".draw" && "$ext" != "" } {
1900 } elseif { [regexp {^[ \t]*ISO-10303-21} $line] } {
1902 if { "$ext" != ".step" && "$ext" != ".stp" } {
1905 } elseif { [regexp {^.\{72\}S[0 ]\{6\}1} $line] } {
1907 if { "$ext" != ".iges" && "$ext" != ".igs" } {
1910 } elseif { "$ext" == ".igs" } {
1912 } elseif { "$ext" == ".stp" } {
1915 set format [string toupper [string range $ext 1 end]]
1919 puts "$file: Warning: extension ($ext) does not match format ($format)"
1925 # procedure to load file knowing its format
1926 proc load_data_file {file format shape} {
1928 BREP { uplevel restore $file $shape }
1929 DRAW { uplevel restore $file $shape }
1930 IGES { pload XSDRAW; uplevel igesbrep $file $shape * }
1931 STEP { pload XSDRAW; uplevel stepread $file __a *; uplevel renamevar __a_1 $shape }
1932 STL { pload XSDRAW; uplevel readstl $shape $file }
1933 default { error "Cannot read $format file $file" }
1937 # procedure to get name of temporary directory,
1938 # ensuring it is existing and writeable
1939 proc _get_temp_dir {} {
1940 global env tcl_platform
1942 # check typical environment variables
1943 foreach var {TempDir Temp Tmp} {
1944 # check different case
1945 foreach name [list [string toupper $var] $var [string tolower $var]] {
1946 if { [info exists env($name)] && [file isdirectory $env($name)] &&
1947 [file writable $env($name)] } {
1948 return [regsub -all {\\} $env($name) /]
1953 # check platform-specific locations
1955 if { "$tcl_platform(platform)" == "windows" } {
1956 set paths "c:/TEMP c:/TMP /TEMP /TMP"
1957 if { [info exists env(HOMEDRIVE)] && [info exists env(HOMEPATH)] } {
1958 set fallback [regsub -all {\\} "$env(HOMEDRIVE)$(HOMEPATH)/tmp" /]
1961 set paths "/tmp /var/tmp /usr/tmp"
1962 if { [info exists env(HOME)] } {
1963 set fallback "$env(HOME)/tmp"
1966 foreach dir $paths {
1967 if { [file isdirectory $dir] && [file writable $dir] } {
1972 # fallback case: use subdir /tmp of home or current dir
1973 file mkdir $fallback
1977 # extract of code from testgrid command used to process jobs running in
1978 # parallel until number of jobs in the queue becomes equal or less than
1980 proc _testgrid_process_jobs {worker {nb_ok 0}} {
1981 # bind local vars to variables of the caller procedure
1984 upvar job_def job_def
1985 upvar nbpooled nbpooled
1986 upvar userbreak userbreak
1987 upvar refresh refresh
1988 upvar refresh_timer refresh_timer
1990 catch {tpool::resume $worker}
1991 while { ! $userbreak && $nbpooled > $nb_ok } {
1992 foreach job [tpool::wait $worker [array names job_def]] {
1993 eval _log_test_case \[tpool::get $worker $job\] $job_def($job) log
1998 # check for user break
1999 if { "[info commands dbreak]" == "dbreak" && [catch dbreak] } {
2003 # update summary log with requested period
2004 if { $logdir != "" && $refresh > 0 && [clock seconds] > $refresh_timer + $refresh } {
2005 _log_summarize $logdir $log
2006 set refresh_timer [clock seconds]
2009 catch {tpool::suspend $worker}