0023877: Errors in MFC sample HLR
[occt.git] / src / DrawResources / TestCommands.tcl
CommitLineData
40093367 1# Copyright (c) 2012 OPEN CASCADE SAS
2#
3# The content of this file is subject to the Open CASCADE Technology Public
4# License Version 6.5 (the "License"). You may not use the content of this file
5# except in compliance with the License. Please obtain a copy of the License
6# at http://www.opencascade.org and read it completely before using this file.
7#
8# The Initial Developer of the Original Code is Open CASCADE S.A.S., having its
9# main offices at: 1, place des Freres Montgolfier, 78280 Guyancourt, France.
10#
11# The Original Code and all software distributed under the License is
12# distributed on an "AS IS" basis, without warranty of any kind, and the
13# Initial Developer hereby disclaims all such warranties, including without
14# limitation, any warranties of merchantability, fitness for a particular
15# purpose or non-infringement. Please see the License for the specific terms
16# and conditions governing the rights and limitations under the License.
17
18############################################################################
19# This file defines scripts for execution of OCCT tests.
20# It should be loaded automatically when DRAW is started, and provides
351bbcba 21# top-level commands starting with 'test'. Type 'help test' to get their
22# synopsys.
40093367 23# See OCCT Tests User Guide for description of the test system.
24#
25# Note: procedures with names starting with underscore are for internal use
26# inside the test system.
27############################################################################
28
29# Default verbose level for command _run_test
30set _tests_verbose 0
31
32# regexp for parsing test case results in summary log
33set _test_case_regexp {^CASE\s+([\w.-]+)\s+([\w.-]+)\s+([\w.-]+)\s*:\s*([\w]+)(.*)}
34
35# Basic command to run indicated test case in DRAW
b725d7c5 36help test {
37 Run specified test case
38 Use: test group grid casename [echo=0]
39 - If echo is set to 0 (default), log is stored in memory and only summary
40 is output (the log can be obtained with command "dlog get")
41 - If echo is set to 1 or "-echo", all commands and results are echoed
42 immediately, but log is not saved and summary is not produced
43}
5df3a117 44proc test {group grid casename {echo 0}} {
40093367 45 # get test case paths (will raise error if input is invalid)
46 _get_test $group $grid $casename dir gridname casefile
47
b725d7c5 48 # if echo specified as "-echo", convert it to bool
49 if { "$echo" == "-echo" } { set echo t }
50
40093367 51 # run test
5df3a117 52 uplevel _run_test $dir $group $gridname $casefile $echo
40093367 53
54 # check log
5df3a117 55 if { ! $echo } {
56 _check_log $dir $group $gridname $casename [dlog get]
57 }
40093367 58
59 return
60}
61
62# Basic command to run indicated test case in DRAW
b725d7c5 63help testgrid {
64 Run all tests, or specified group, or one grid
65 Use: testgrid [group [grid]] [options...]
66 Allowed options are:
67 -parallel N: run N parallel processes (default is number of CPUs, 0 to disable)
68 -refresh N: save summary logs every N seconds (default 60, minimal 1, 0 to disable)
69 -outdir dirname: set log directory (should be empty or non-existing)
70 -overwrite: force writing logs in existing non-empty directory
71 -xml filename: write XML report for Jenkins (in JUnit-like format)
40093367 72}
b725d7c5 73proc testgrid {args} {
40093367 74 global env tcl_platform _tests_verbose
75
76 ######################################################
77 # check arguments
78 ######################################################
79
80 # check that environment variable defining paths to test scripts is defined
81 if { ! [info exists env(CSF_TestScriptsPath)] ||
82 [llength $env(CSF_TestScriptsPath)] <= 0 } {
83 error "Error: Environment variable CSF_TestScriptsPath is not defined"
84 }
85
86 # treat options
b725d7c5 87 set parallel [_get_nb_cpus]
40093367 88 set refresh 60
b725d7c5 89 set logdir ""
40093367 90 set overwrite 0
91 set xmlfile ""
92 for {set narg 0} {$narg < [llength $args]} {incr narg} {
93 set arg [lindex $args $narg]
94
95 # parallel execution
96 if { $arg == "-parallel" } {
97 incr narg
b725d7c5 98 if { $narg < [llength $args] && ! [regexp {^-} [lindex $args $narg]] } {
40093367 99 set parallel [expr [lindex $args $narg]]
100 } else {
b725d7c5 101 error "Option -parallel requires argument"
40093367 102 }
103 continue
104 }
105
106 # refresh logs time
107 if { $arg == "-refresh" } {
108 incr narg
b725d7c5 109 if { $narg < [llength $args] && ! [regexp {^-} [lindex $args $narg]] } {
40093367 110 set refresh [expr [lindex $args $narg]]
111 } else {
b725d7c5 112 error "Option -refresh requires argument"
113 }
114 continue
115 }
116
117 # output directory
118 if { $arg == "-outdir" } {
119 incr narg
120 if { $narg < [llength $args] && ! [regexp {^-} [lindex $args $narg]] } {
121 set logdir [lindex $args $narg]
122 } else {
123 error "Option -outdir requires argument"
40093367 124 }
125 continue
126 }
127
128 # allow overwrite logs
129 if { $arg == "-overwrite" } {
130 set overwrite 1
131 continue
132 }
133
134 # refresh logs time
135 if { $arg == "-xml" } {
136 incr narg
b725d7c5 137 if { $narg < [llength $args] && ! [regexp {^-} [lindex $args $narg]] } {
40093367 138 set xmlfile [lindex $args $narg]
139 }
140 if { $xmlfile == "" } {
141 set xmlfile TESTS-summary.xml
142 }
143 continue
144 }
145
146 # unsupported option
147 if { [regexp {^-} $arg] } {
148 error "Error: unsupported option \"$arg\""
149 }
150
151 # treat arguments not recognized as options as group and grid names
152 if { ! [info exists groupname] } {
153 set groupname $arg
154 } elseif { ! [info exists gridname] } {
155 set gridname $arg
156 } else {
157 error "Error: cannot interpret argument $narg ($arg): both group and grid names are already defined by previous args!"
158 }
159 }
160
161 # check that target log directory is empty or does not exist
162 set logdir [file normalize [string trim $logdir]]
163 if { $logdir == "" } {
b725d7c5 164 # if specified logdir is empty string, generate unique name like
165 # results_<branch>_<timestamp>
166 set prefix "results"
167 if { ! [catch {exec git branch} gitout] &&
168 [regexp {[*] ([\w]+)} $gitout res branch] } {
169 set prefix "${prefix}_$branch"
170 }
171 set logdir "${prefix}_[clock format [clock seconds] -format {%Y-%m-%dT%H%M}]"
40093367 172 set logdir [file normalize $logdir]
173 }
174 if { [file isdirectory $logdir] && ! $overwrite && ! [catch {glob -directory $logdir *}] } {
175 error "Error: Specified log directory \"$logdir\" is not empty; please clean it before running tests"
176 }
177 if { [catch {file mkdir $logdir}] || ! [file writable $logdir] } {
178 error "Error: Cannot create directory \"$logdir\", or it is not writable"
179 }
180
181 ######################################################
182 # prepare list of tests to be performed
183 ######################################################
184
185 # list of tests, each defined by a list of:
186 # test scripts directory
187 # group (subfolder) name
188 # grid (subfolder) name
189 # test case name
190 # path to test case file
191 set tests_list {}
192
193 # iterate by all script paths
194 foreach dir [_split_path $env(CSF_TestScriptsPath)] {
195 # protection against empty paths
196 set dir [string trim $dir]
197 if { $dir == "" } { continue }
198
199 if { $_tests_verbose > 0 } { _log_and_puts log "Examining tests directory $dir" }
200
201 # check that directory exists
202 if { ! [file isdirectory $dir] } {
203 _log_and_puts log "Warning: directory $dir listed in CSF_TestScriptsPath does not exist, skipped"
204 continue
205 }
206
207 # if test group is specified, check that directory with given name exists in this dir
208 # if not, continue to the next test dir
209 if { [info exists groupname] && $groupname != "" } {
210 if { [file isdirectory $dir/$groupname] } {
211 set groups $groupname
212 } else {
213 continue
214 }
215 } else {
216 # else search all directories in the current dir
217 if [catch {glob -directory $dir -tail -types d *} groups] { continue }
218 }
219
220 # iterate by groups
221 if { $_tests_verbose > 0 } { _log_and_puts log "Groups to be executed: $groups" }
222 foreach group [lsort -dictionary $groups] {
223 if { $_tests_verbose > 0 } { _log_and_puts log "Examining group directory $group" }
224
225 # file grids.list must exist: it defines sequence of grids in the group
226 if { ! [file exists $dir/$group/grids.list] } {
227 _log_and_puts log "Warning: directory $dir/$group does not contain file grids.list, skipped"
228 continue
229 }
230
231 # read grids.list file and make a list of grids to be executed
232 set gridlist {}
233 set fd [open $dir/$group/grids.list]
234 set nline 0
235 while { [gets $fd line] >= 0 } {
236 incr nline
237
238 # skip comments and empty lines
239 if { [regexp "\[ \t\]*\#.*" $line] } { continue }
240 if { [string trim $line] == "" } { continue }
241
242 # get grid id and name
243 if { ! [regexp "^\(\[0-9\]+\)\[ \t\]*\(\[A-Za-z0-9_.-\]+\)\$" $line res gridid grid] } {
244 _log_and_puts log "Warning: cannot recognize line $nline in file $dir/$group/grids.list as \"gridid gridname\"; ignored"
245 continue
246 }
247
248 # if specific grid is requested, check that it is present; otherwise make complete list
249 if { ! [info exists gridname] || $gridname == "" || $gridname == $gridid || $gridname == $grid } {
250 lappend gridlist $grid
251 }
252 }
253 close $fd
254
255 # iterate by all grids
256 foreach grid $gridlist {
257
258 # check if this grid is aliased to another one
259 set griddir $dir/$group/$grid
260 if { [file exists $griddir/cases.list] } {
261 set fd [open $griddir/cases.list]
262 if { [gets $fd line] >= 0 } {
263 set griddir [file normalize $dir/$group/$grid/[string trim $line]]
264 }
265 close $fd
266 }
267
268 # check if grid directory actually exists
269 if { ! [file isdirectory $griddir] } {
270 _log_and_puts log "Error: tests directory for grid $grid ($griddir) is missing; skipped"
271 continue
272 }
273
274 # create directory for logging test results
275 if { $logdir != "" } { file mkdir $logdir/$group/$grid }
276
277 # iterate by all tests in the grid directory
278 if { [catch {glob -directory $griddir -type f *} testfiles] } { continue }
279 foreach casefile [lsort -dictionary $testfiles] {
280 # filter out begin and end files
281 set casename [file tail $casefile]
282 if { $casename == "begin" || $casename == "end" } { continue }
283
284 lappend tests_list [list $dir $group $grid $casename $casefile]
285 }
286 }
287 }
288 }
289 if { [llength $tests_list] < 1 } {
290 error "Error: no tests are found, check you input arguments and variable CSF_TestScriptsPath!"
291 }
292
293 ######################################################
294 # run tests
295 ######################################################
296
297 # log command arguments and environment
298 set log "Command: testgrid $args\nHost: [info hostname]\nStarted on: [clock format [clock seconds] -format {%Y-%m-%d %H:%M:%S}]\n"
8a262fa1 299 catch {set log "$log\nDRAW build:\n[dversion]\n" }
40093367 300 set log "$log\nEnvironment:\n"
8a262fa1 301 foreach envar [lsort [array names env]] {
40093367 302 set log "$log$envar=\"$env($envar)\"\n"
303 }
304 set log "$log\n"
305
306 set refresh_timer [clock seconds]
307 uplevel dchrono _timer reset
308 uplevel dchrono _timer start
309
310 # if parallel execution is requested, allocate thread pool
311 if { $parallel > 0 } {
312 if { ! [info exists tcl_platform(threaded)] || [catch {package require Thread}] } {
313 _log_and_puts log "Warning: Tcl package Thread is not available, running in sequential mode"
314 set parallel 0
315 } else {
316 set worker [tpool::create -minworkers $parallel -maxworkers $parallel]
317 # suspend the pool until all jobs are posted, to prevent blocking of the process
318 # of starting / processing jobs by running threads
b725d7c5 319 catch {tpool::suspend $worker}
40093367 320 if { $_tests_verbose > 0 } { _log_and_puts log "Executing tests in (up to) $parallel threads" }
321 }
322 }
323
324 # start test cases
8a262fa1 325 set userbreak 0
40093367 326 foreach test_def $tests_list {
8a262fa1 327 # check for user break
328 if { "[info commands dbreak]" == "dbreak" && [catch dbreak] } {
329 set userbreak 1
330 break
331 }
332
40093367 333 set dir [lindex $test_def 0]
334 set group [lindex $test_def 1]
335 set grid [lindex $test_def 2]
336 set casename [lindex $test_def 3]
337 set casefile [lindex $test_def 4]
338
339 # command to set tests for generation of image in results directory
340 set imgdir_cmd ""
341 if { $logdir != "" } { set imgdir_cmd "set imagedir $logdir/$group/$grid" }
342
343 # prepare command file for running test case in separate instance of DRAW
344 set fd_cmd [open $logdir/$group/$grid/${casename}.tcl w]
345 puts $fd_cmd "$imgdir_cmd"
346 puts $fd_cmd "set test_image $casename"
b725d7c5 347 puts $fd_cmd "_run_test $dir $group $grid $casefile t"
5df3a117 348
40093367 349 # use dlog command to obtain complete output of the test when it is absent (i.e. since OCCT 6.6.0)
5df3a117 350 # note: this is not needed if echo is set to 1 in call to _run_test above
40093367 351 if { ! [catch {dlog get}] } {
352 puts $fd_cmd "puts \[dlog get\]"
353 } else {
354 # else try to use old-style QA_ variables to get more output...
355 set env(QA_DUMP) 1
356 set env(QA_DUP) 1
357 set env(QA_print_command) 1
358 }
5df3a117 359
40093367 360 # final 'exit' is needed when running on Linux under VirtualGl
361 puts $fd_cmd "exit"
362 close $fd_cmd
8418c617 363
364 # commant to run DRAW with a command file;
365 # note that empty string is passed as standard input to avoid possible
366 # hang-ups due to waiting for stdin of the launching process
367 set command "exec <<{} DRAWEXE -f $logdir/$group/$grid/${casename}.tcl"
368
40093367 369 # alternative method to run without temporary file; disabled as it needs too many backslashes
370# else {
8418c617 371# set command "exec <<\"\" DRAWEXE -c $imgdir_cmd\\\; set test_image $casename\\\; \
40093367 372# _run_test $dir $group $grid $casefile\\\; \
373# puts \\\[dlog get\\\]\\\; exit"
374# }
375
376 # run test case, either in parallel or sequentially
377 if { $parallel > 0 } {
378 # parallel execution
379 set job [tpool::post -nowait $worker "catch \"$command\" output; return \$output"]
380 set job_def($job) [list $logdir $dir $group $grid $casename]
381 } else {
382 # sequential execution
383 catch {eval $command} output
384 _log_test_case $output $logdir $dir $group $grid $casename log
385
386 # update summary log with requested period
387 if { $logdir != "" && $refresh > 0 && [expr [clock seconds] - $refresh_timer > $refresh] } {
388 # update and dump summary
389 _log_summarize $logdir $log
390 set refresh_timer [clock seconds]
391 }
392 }
393 }
394
395 # get results of started threads
396 if { $parallel > 0 } {
b725d7c5 397 catch {tpool::resume $worker}
8a262fa1 398 while { ! $userbreak && [llength [array names job_def]] > 0 } {
40093367 399 foreach job [tpool::wait $worker [array names job_def]] {
400 eval _log_test_case \[tpool::get $worker $job\] $job_def($job) log
401 unset job_def($job)
402 }
403
8a262fa1 404 # check for user break
405 if { "[info commands dbreak]" == "dbreak" && [catch dbreak] } {
406 set userbreak 1
407 }
408
40093367 409 # update summary log with requested period
410 if { $logdir != "" && $refresh > 0 && [clock seconds] > $refresh_timer + $refresh } {
411 _log_summarize $logdir $log
412 set refresh_timer [clock seconds]
413 }
414 }
415 # release thread pool
8a262fa1 416 tpool::cancel $worker [array names job_def]
40093367 417 tpool::release $worker
418 }
419
420 uplevel dchrono _timer stop
421 set time [lindex [split [uplevel dchrono _timer show] "\n"] 0]
422
8a262fa1 423 if { $userbreak } {
424 puts "*********** Stopped by user break ***********"
425 set time "${time} \nNote: the process is not finished, stopped by user break!"
426 }
427
40093367 428 ######################################################
429 # output summary logs and exit
430 ######################################################
431
432 _log_summarize $logdir $log $time
433 if { $logdir != "" } {
434 puts "Detailed logs are saved in $logdir"
435 }
436 if { $logdir != "" && $xmlfile != "" } {
437 # XML output file is assumed relative to log dir unless it is absolute
438 if { [ file pathtype $xmlfile] == "relative" } {
439 set xmlfile [file normalize $logdir/$xmlfile]
440 }
441 _log_xml_summary $logdir $xmlfile $log 0
442 puts "XML summary is saved to $xmlfile"
443 }
444
445 return
446}
447
22db40eb 448# Procedure to regenerate summary log from logs of test cases
449help testsummarize {
450 Regenerate summary log in the test directory from logs of test cases.
451 This can be necessary if test grids are executed separately (e.g. on
452 different stations) or some grids have been re-executed.
453 Use: testsummarize dir
454}
455proc testsummarize {dir} {
456 global _test_case_regexp
457
458 if { ! [file isdirectory $dir] } {
459 error "Error: \"$dir\" is not a directory"
460 }
461
462 # get summary statements from all test cases in one log
463 set log ""
464
465 # to avoid huge listing of logs, first find all subdirectories and iterate
466 # by them, parsing log files in each subdirectory independently
467 foreach grid [glob -directory $dir -types d -tails */*] {
468 foreach caselog [glob -nocomplain -directory [file join $dir $grid] -types f -tails *.log] {
469 set file [file join $dir $grid $caselog]
470 set nbfound 0
471 set fd [open $file r]
472 while { [gets $fd line] >= 0 } {
473 if { [regexp $_test_case_regexp $line res grp grd cas status message] } {
474 if { "[file join $grid $caselog]" != "[file join $grp $grd ${cas}.log]" } {
475 puts "Error: $file contains status line for another test case ($line)"
476 }
477 set log "$log$line\n"
478 incr nbfound
479 }
480 }
481 close $fd
482
483 if { $nbfound != 1 } {
484 puts "Error: $file contains $nbfound status lines, expected 1"
485 }
486 }
487 }
488
489 _log_summarize $dir $log "Summary regenerated from logs at [clock format [clock seconds]]"
490 return
491}
492
cc6a292d 493# Procedure to compare results of two runs of test cases
b725d7c5 494help testdiff {
495 Compare results of two executions of tests (CPU times, ...)
22db40eb 496 Use: testdiff dir1 dir2 [groupname [gridname]] [options...]
b725d7c5 497 Where dir1 and dir2 are directories containing logs of two test runs.
498 Allowed options are:
22db40eb 499 -save filename: save resulting log in specified file (default name is
60874ff8 500 <dir1>/diff-<dir2>.log); HTML log is saved with same name
22db40eb 501 and extension .html
b725d7c5 502 -status {same|ok|all}: filter cases for comparing by their status:
503 same - only cases with same status are compared (default)
504 ok - only cases with OK status in both logs are compared
505 all - results are compared regardless of status
506 -verbose level:
507 1 - output only differences
22db40eb 508 2 - output also list of logs and directories present in one of dirs only
509 3 - (default) output also progress messages
cc6a292d 510}
511proc testdiff {dir1 dir2 args} {
512 if { "$dir1" == "$dir2" } {
513 error "Input directories are the same"
514 }
515
516 ######################################################
517 # check arguments
518 ######################################################
519
520 # treat options
22db40eb 521 set logfile [file join $dir1 "diff-[file tail $dir2].log"]
cc6a292d 522 set basename ""
523 set status "same"
524 set verbose 3
525 for {set narg 0} {$narg < [llength $args]} {incr narg} {
526 set arg [lindex $args $narg]
527
528 # log file name
529 if { $arg == "-save" } {
530 incr narg
22db40eb 531 if { $narg < [llength $args] && ! [regexp {^-} [lindex $args $narg]] } {
cc6a292d 532 set logfile [lindex $args $narg]
533 } else {
534 error "Error: Option -save must be followed by log file name"
535 }
536 continue
537 }
538
cc6a292d 539 # status filter
540 if { $arg == "-status" } {
541 incr narg
22db40eb 542 if { $narg < [llength $args] && ! [regexp {^-} [lindex $args $narg]] } {
cc6a292d 543 set status [lindex $args $narg]
544 } else { set status "" }
545 if { "$status" != "same" && "$status" != "all" && "$status" != "ok" } {
546 error "Error: Option -status must be followed by one of \"same\", \"all\", or \"ok\""
547 }
548 continue
549 }
550
551 # verbose level
552 if { $arg == "-verbose" } {
553 incr narg
22db40eb 554 if { $narg < [llength $args] && ! [regexp {^-} [lindex $args $narg]] } {
cc6a292d 555 set verbose [expr [lindex $args $narg]]
22db40eb 556 } else {
557 error "Error: Option -verbose must be followed by integer verbose level"
cc6a292d 558 }
559 continue
560 }
561
22db40eb 562 if { [regexp {^-} $arg] } {
cc6a292d 563 error "Error: unsupported option \"$arg\""
22db40eb 564 }
565
566 # non-option arguments form a subdirectory path
567 set basename [file join $basename $arg]
cc6a292d 568 }
569
570 # run diff procedure (recursive)
571 _test_diff $dir1 $dir2 $basename $status $verbose log
572
573 # save result to log file
574 if { "$logfile" != "" } {
575 _log_save $logfile $log
22db40eb 576 _log_html_diff "[file rootname $logfile].html" $log $dir1 $dir2
577 puts "Log is saved to $logfile (and .html)"
cc6a292d 578 }
579
580 return
581}
582
351bbcba 583# Procedure to check data file before adding it to repository
584help testfile {
585 Check data file and prepare it for putting to test data files repository.
586 Use: testfile [filelist]
587
588 Will report if:
589 - data file (non-binary) is in DOS encoding (CR/LF)
590 - same data file (with same or another name) already exists in the repository
591 - another file with the same name already exists
592 Note that names are assumed to be case-insensitive (for Windows).
593
594 Unless the file is already in the repository, tries to load it, reports
595 the recognized file format, file size, number of faces and edges in the
60874ff8 596 loaded shape (if any), and makes snapshot (in the temporary directory).
351bbcba 597 Finally it advises whether the file should be put to public section of the
598 repository.
599}
600proc testfile {filelist} {
601 global env
602
603 # check that CSF_TestDataPath is defined
604 if { ! [info exists env(CSF_TestDataPath)] } {
605 error "Environment variable CSF_TestDataPath must be defined!"
606 }
607
608 # build registry of existing data files (name -> path) and (size -> path)
609 puts "Checking available test data files..."
610 foreach dir [_split_path $env(CSF_TestDataPath)] {
611 while {[llength $dir] != 0} {
612 set curr [lindex $dir 0]
613 set dir [lrange $dir 1 end]
614 eval lappend dir [glob -nocomplain -directory $curr -type d *]
615 foreach file [glob -nocomplain -directory $curr -type f *] {
616 set name [file tail $file]
617 set name_lower [string tolower $name]
618
619 # check that the file is not in DOS encoding
620 if { [_check_dos_encoding $file] } {
621 puts "Warning: file $file is in DOS encoding; was this intended?"
622 }
623 _check_file_format $file
624
625 # check if file with the same name is present twice or more
626 if { [info exists names($name_lower)] } {
627 puts "Error: more than one file with name $name is present in the repository:"
628 if { [_diff_files $file $names($name_lower)] } {
629 puts "(files are different by content)"
630 } else {
631 puts "(files are same by content)"
632 }
633 puts "--> $file"
634 puts "--> $names($name_lower)"
635 continue
636 }
637
638 # check if file with the same content exists
639 set size [file size $file]
640 if { [info exists sizes($size)] } {
641 foreach other $sizes($size) {
642 if { ! [_diff_files $file $other] } {
643 puts "Warning: two files with the same content found:"
644 puts "--> $file"
645 puts "--> $other"
646 }
647 }
648 }
649
650 # add the file to the registry
651 set names($name_lower) $file
652 lappend sizes($size) $file
653 }
654 }
655 }
656 if { [llength $filelist] <= 0 } { return }
657
658 # check the new files
659 set has_images f
660 puts "Checking new file(s)..."
661 foreach file $filelist {
662 # check for DOS encoding
663 if { [_check_dos_encoding $file] } {
664 puts "$file: Warning: DOS encoding detected"
665 }
666
667 set name [file tail $file]
668 set name_lower [string tolower $name]
669
670 # check for presence of the file with same name
671 if { [info exists names($name_lower)] } {
672 if { [_diff_files $file $names($name_lower)] } {
673 puts "$file: Error: name is already used by existing file\n--> $names($name_lower)"
674 } else {
675 puts "$file: OK: already in the repository \n--> $names($name_lower)"
676 continue
677 }
678 }
679
680 # check if file with the same content exists
681 set size [file size $file]
682 if { [info exists sizes($size)] } {
683 set found f
684 foreach other $sizes($size) {
685 if { ! [_diff_files $file $other] } {
686 puts "$file: OK: the same file is already present under name [file tail $other]\n--> $other"
687 set found t
688 break
689 }
690 }
691 if { $found } { continue }
692 }
693
694 # try to read the file
695 set format [_check_file_format $file]
696 if { [catch {uplevel load_data_file $file $format a}] } {
697 puts "$file: Error: Cannot read as $format file"
698 continue
699 }
700
701 # get number of faces and edges
702 set edges 0
703 set faces 0
704 set nbs [uplevel nbshapes a]
705 regexp {EDGE[ \t:]*([0-9]+)} $nbs res edges
706 regexp {FACE[ \t:]*([0-9]+)} $nbs res faces
707
708 # classify; first check file size and number of faces and edges
709 if { $size < 95000 && $faces < 20 && $edges < 100 } {
710 set dir public
711 } else {
712 set dir private
713 # check if one of names of that file corresponds to typical name for
714 # MDTV bugs or has extension .rle, this should be old model
715 if { [regexp -nocase {.*(cts|ats|pro|buc|ger|fra|usa|uki)[0-9]+.*} $name] ||
716 [regexp -nocase {[.]rle\y} $name] } {
717 set dir old
718 }
719 }
720
721 # add stats
722 puts "$file: $format size=[expr $size / 1024] KiB, nbfaces=$faces, nbedges=$edges -> $dir"
723
60874ff8 724 set tmpdir [_get_temp_dir]
725 file mkdir $tmpdir/$dir
351bbcba 726
727 # make snapshot
728 pload AISV
729 uplevel vdisplay a
730 uplevel vfit
731 uplevel vzfit
60874ff8 732 uplevel vdump $tmpdir/$dir/[file rootname [file tail $file]].png
351bbcba 733 set has_images t
734 }
735 if { $has_images } {
60874ff8 736 puts "Snapshots are saved in subdirectory [_get_temp_dir]"
351bbcba 737 }
738}
739
22db40eb 740# Procedure to locate data file for test given its name.
741# The search is performed assuming that the function is called
742# from the test case script; the search order is:
743# - subdirectory "data" of the test script (grid) folder
744# - subdirectories in environment variable CSF_TestDataPath
745# - subdirectory set by datadir command
746# If file is not found, raises Tcl error.
747proc locate_data_file {filename} {
748 global env groupname gridname casename
749
750 # check if the file is located in the subdirectory data of the script dir
751 set scriptfile [info script]
752 if { $scriptfile != "" } {
753 set path [file join [file dirname $scriptfile] data $filename]
754 if { [file exists $path] } {
755 return [file normalize $path]
756 }
757 }
758
759 # check sub-directories in paths indicated by CSF_TestDataPath
760 if { [info exists env(CSF_TestDataPath)] } {
761 foreach dir [_split_path $env(CSF_TestDataPath)] {
762 while {[llength $dir] != 0} {
763 set name [lindex $dir 0]
764 set dir [lrange $dir 1 end]
765 eval lappend dir [glob -nocomplain -directory $name -type d *]
766 if { [file exists $name/$filename] } {
767 return [file normalize $name/$filename]
768 }
769 }
770 }
771 }
772
773 # check current datadir
774 if { [file exists [uplevel datadir]/$filename] } {
775 return [file normalize [uplevel datadir]/$filename]
776 }
777
778 # raise error
779 error [join [list "Error: file $filename could not be found" \
780 "(should be in paths indicated by CSF_TestDataPath environment variable, " \
781 "or in subfolder data in the script directory)"] "\n"]
782}
783
40093367 784# Internal procedure to find test case indicated by group, grid, and test case names;
785# returns:
786# - dir: path to the base directory of the tests group
787# - gridname: actual name of the grid
788# - casefile: path to the test case script
789# if no such test is found, raises error with appropriate message
790proc _get_test {group grid casename _dir _gridname _casefile} {
791 upvar $_dir dir
792 upvar $_gridname gridname
793 upvar $_casefile casefile
794
795 global env
796
797 # check that environment variable defining paths to test scripts is defined
798 if { ! [info exists env(CSF_TestScriptsPath)] ||
799 [llength $env(CSF_TestScriptsPath)] <= 0 } {
800 error "Error: Environment variable CSF_TestScriptsPath is not defined"
801 }
802
803 # iterate by all script paths
804 foreach dir [_split_path $env(CSF_TestScriptsPath)] {
805 # protection against empty paths
806 set dir [string trim $dir]
807 if { $dir == "" } { continue }
808
809 # check that directory exists
810 if { ! [file isdirectory $dir] } {
811 puts "Warning: directory $dir listed in CSF_TestScriptsPath does not exist, skipped"
812 continue
813 }
814
815 # check if test group with given name exists in this dir
816 # if not, continue to the next test dir
817 if { ! [file isdirectory $dir/$group] } { continue }
818
819 # check that grid with given name (possibly alias) exists; stop otherwise
820 set gridname $grid
821 if { ! [file isdirectory $dir/$group/$gridname] } {
822 # check if grid is named by alias rather than by actual name
823 if { [file exists $dir/$group/grids.list] } {
824 set fd [open $dir/$group/grids.list]
825 while { [gets $fd line] >= 0 } {
826 if { [regexp "\[ \t\]*\#.*" $line] } { continue }
827 if { [regexp "^$grid\[ \t\]*\(\[A-Za-z0-9_.-\]+\)\$" $line res gridname] } {
828 break
829 }
830 }
831 close $fd
832 }
833 }
834 if { ! [file isdirectory $dir/$group/$gridname] } { continue }
835
836 # get actual file name of the script; stop if it cannot be found
837 set casefile $dir/$group/$gridname/$casename
838 if { ! [file exists $casefile] } {
839 # check if this grid is aliased to another one
840 if { [file exists $dir/$group/$gridname/cases.list] } {
841 set fd [open $dir/$group/$gridname/cases.list]
842 if { [gets $fd line] >= 0 } {
843 set casefile [file normalize $dir/$group/$gridname/[string trim $line]/$casename]
844 }
845 close $fd
846 }
847 }
848 if { [file exists $casefile] } {
849 # normal return
850 return
851 }
852 }
853
854 # coming here means specified test is not found; report error
855 error [join [list "Error: test case $group / $grid / $casename is not found in paths listed in variable" \
856 "CSF_TestScriptsPath (current value is \"$env(CSF_TestScriptsPath)\")"] "\n"]
857}
858
859# Internal procedure to run test case indicated by base directory,
860# grid and grid names, and test case file path.
861# The log can be obtained by command "dlog get".
5df3a117 862proc _run_test {scriptsdir group gridname casefile echo} {
40093367 863 global env
864
865 # start timer
866 uplevel dchrono _timer reset
867 uplevel dchrono _timer start
22db40eb 868 catch {uplevel meminfo w} membase
40093367 869
870 # enable commands logging; switch to old-style mode if dlog command is not present
871 set dlog_exists 1
872 if { [catch {dlog reset}] } {
873 set dlog_exists 0
5df3a117 874 } elseif { $echo } {
875 decho on
40093367 876 } else {
877 dlog reset
878 dlog on
879 rename puts puts-saved
880 proc puts args {
881 global _tests_verbose
882
883 # log only output to stdout and stderr, not to file!
884 if {[llength $args] > 1} {
d33dea30
PK
885 set optarg [lindex $args end-1]
886 if { $optarg == "stdout" || $optarg == "stderr" || $optarg == "-newline" } {
40093367 887 dlog add [lindex $args end]
888 }
889 } else {
890 dlog add [lindex $args end]
891 }
892
893 # reproduce original puts
894 if { $_tests_verbose } {
895 eval puts-saved $args
896 }
897 }
898 }
899
40093367 900 # evaluate test case
901 if [catch {
60874ff8 902 # set variables identifying test case
40093367 903 uplevel set casename [file tail $casefile]
8418c617 904 uplevel set groupname $group
905 uplevel set gridname $gridname
40093367 906
60874ff8 907 # set variables for saving of images if not yet set
908 if { ! [uplevel info exists imagedir] } {
909 uplevel set imagedir [_get_temp_dir]
910 uplevel set test_image \$casename
911 }
912
913 # execute test scripts
40093367 914 if { [file exists $scriptsdir/$group/begin] } {
915 puts "Executing $scriptsdir/$group/begin..."; flush stdout
916 uplevel source $scriptsdir/$group/begin
917 }
918 if { [file exists $scriptsdir/$group/$gridname/begin] } {
919 puts "Executing $scriptsdir/$group/$gridname/begin..."; flush stdout
920 uplevel source $scriptsdir/$group/$gridname/begin
921 }
922
923 puts "Executing $casefile..."; flush stdout
924 uplevel source $casefile
925
926 if { [file exists $scriptsdir/$group/$gridname/end] } {
927 puts "Executing $scriptsdir/$group/$gridname/end..."; flush stdout
928 uplevel source $scriptsdir/$group/$gridname/end
929 }
930 if { [file exists $scriptsdir/$group/end] } {
931 puts "Executing $scriptsdir/$group/end..."; flush stdout
932 uplevel source $scriptsdir/$group/end
933 }
934 } res] {
935 puts "Tcl Exception: $res"
936 }
937
40093367 938 # stop logging
939 if { $dlog_exists } {
5df3a117 940 if { $echo } {
941 decho off
942 } else {
943 rename puts {}
944 rename puts-saved puts
945 dlog off
946 }
40093367 947 }
948
8418c617 949 # stop cpulimit killer if armed by the test
950 cpulimit
951
22db40eb 952 # add memory and timing info
953 set stats ""
954 if { ! [catch {uplevel meminfo w} memuse] } {
955 set stats "MEMORY DELTA: [expr ($memuse - $membase) / 1024] KiB\n"
956 }
40093367 957 uplevel dchrono _timer stop
958 set time [uplevel dchrono _timer show]
959 if [regexp -nocase {CPU user time:[ \t]*([0-9.e-]+)} $time res cpu] {
22db40eb 960 set stats "${stats}TOTAL CPU TIME: $cpu sec\n"
961 }
962 if { $dlog_exists && ! $echo } {
963 dlog add $stats
964 } else {
965 puts $stats
40093367 966 }
967}
968
969# Internal procedure to check log of test execution and decide if it passed or failed
970proc _check_log {dir group gridname casename log {_summary {}} {_html_log {}}} {
971 global env
972 if { $_summary != "" } { upvar $_summary summary }
973 if { $_html_log != "" } { upvar $_html_log html_log }
974 set summary ""
975 set html_log ""
976
977if [catch {
978
979 # load definition of 'bad words' indicating test failure
980 # note that rules are loaded in the order of decreasing priority (grid - group - common),
981 # thus grid rules will override group ones
982 set badwords {}
983 foreach rulesfile [list $dir/$group/$gridname/parse.rules $dir/$group/parse.rules $dir/parse.rules] {
984 if [catch {set fd [open $rulesfile r]}] { continue }
985 while { [gets $fd line] >= 0 } {
986 # skip comments and empty lines
987 if { [regexp "\[ \t\]*\#.*" $line] } { continue }
988 if { [string trim $line] == "" } { continue }
989 # extract regexp
990 if { ! [regexp {^([^/]*)/([^/]*)/(.*)$} $line res status rexp comment] } {
991 puts "Warning: cannot recognize parsing rule \"$line\" in file $rulesfile"
992 continue
993 }
994 set status [string trim $status]
995 if { $comment != "" } { set status "$status ([string trim $comment])" }
996 set rexp [regsub -all {\\b} $rexp {\\y}] ;# convert regexp from Perl to Tcl style
997 lappend badwords [list $status $rexp]
998 }
999 close $fd
1000 }
1001 if { [llength $badwords] <= 0 } {
1002 puts "Warning: no definition of error indicators found (check files parse.rules)"
1003 }
1004
1005 # analyse log line-by-line
1006 set todos {}
1007 set status ""
1008 foreach line [split $log "\n"] {
1009 # check if line defines specific treatment of some messages
1010 if [regexp -nocase {^[ \t]*TODO ([^:]*):(.*)$} $line res platforms pattern] {
1011 if { ! [regexp -nocase {\mAll\M} $platforms] &&
1012 ! [regexp -nocase "\\m$env(os_type)\\M" $platforms] } {
1013 set html_log "$html_log\n$line"
1014 continue ;# TODO statement is for another platform
1015 }
1016
1017 # record TODOs that mark unstable cases
1018 if { [regexp {[\?]} $platforms] } {
1019 set todos_unstable([llength $todos]) 1
1020 }
1021
1022 lappend todos [regsub -all {\\b} [string trim $pattern] {\\y}] ;# convert regexp from Perl to Tcl style
1023 set html_log "$html_log\n[_html_highlight BAD $line]"
1024 continue
1025 }
1026
1027 # check for presence of messages indicating test result
1028 set ismarked 0
1029 foreach bw $badwords {
1030 if { [regexp [lindex $bw 1] $line] } {
1031 # check if this is known bad case
1032 set is_known 0
1033 for {set i 0} {$i < [llength $todos]} {incr i} {
1034 if { [regexp [lindex $todos $i] $line] } {
1035 set is_known 1
1036 incr todo_count($i)
1037 set html_log "$html_log\n[_html_highlight BAD $line]"
1038 break
1039 }
1040 }
1041
1042 # if it is not in todo, define status
1043 if { ! $is_known } {
1044 set stat [lindex $bw 0 0]
1045 set html_log "$html_log\n[_html_highlight $stat $line]"
1046 if { $status == "" && $stat != "OK" && ! [regexp -nocase {^IGNOR} $stat] } {
1047 set status [lindex $bw 0]
1048 }
1049 }
1050 set ismarked 1
1051 break
1052 }
1053 }
1054 if { ! $ismarked } {
1055 set html_log "$html_log\n$line"
1056 }
1057 }
1058
1059 # check for presence of TEST COMPLETED statement
1060 if { $status == "" && ! [regexp {TEST COMPLETED} $log] } {
1061 # check whether absence of TEST COMPLETED is known problem
1062 set i [lsearch $todos "TEST INCOMPLETE"]
1063 if { $i >= 0 } {
1064 incr todo_count($i)
1065 } else {
1066 set status "FAILED (no final message is found)"
1067 }
1068 }
1069
1070 # check declared bad cases and diagnose possible improvement
1071 # (bad case declared but not detected).
1072 # Note that absence of the problem marked by TODO with question mark
1073 # (unstable) is not reported as improvement.
1074 if { $status == "" } {
1075 for {set i 0} {$i < [llength $todos]} {incr i} {
1076 if { ! [info exists todos_unstable($i)] &&
1077 (! [info exists todo_count($i)] || $todo_count($i) <= 0) } {
1078 set status "IMPROVEMENT (expected problem TODO no. [expr $i + 1] is not detected)"
1079 break;
1080 }
1081 }
1082 }
1083
1084 # report test as known bad if at least one of expected problems is found
1085 if { $status == "" && [llength [array names todo_count]] > 0 } {
1086 set status "BAD (known problem)"
1087 }
1088
1089 # report normal OK
1090 if { $status == "" } {set status "OK" }
1091
1092} res] {
1093 set status "FAILED ($res)"
1094}
1095
1096 # put final message
1097 _log_and_puts summary "CASE $group $gridname $casename: $status"
1098 set html_log "[_html_highlight [lindex $status 0] $summary]\n$html_log"
1099}
1100
1101# Auxiliary procedure putting message to both cout and log variable (list)
1102proc _log_and_puts {logvar message} {
1103 if { $logvar != "" } {
1104 upvar $logvar log
1105 if [info exists log] {
1106 set log "$log$message\n"
1107 } else {
1108 set log "$message\n"
1109 }
1110 }
1111 puts $message
1112}
1113
1114# Auxiliary procedure to log result on single test case
1115proc _log_test_case {output logdir dir group grid casename logvar} {
1116 upvar $logvar log
1117
1118 # check result and make HTML log
1119 _check_log $dir $group $grid $casename $output summary html_log
1120 set log "$log$summary"
1121
1122 # save log to file
1123 if { $logdir != "" } {
1124 _log_html $logdir/$group/$grid/$casename.html $html_log "Test $group $grid $casename"
1125 _log_save $logdir/$group/$grid/$casename.log "$output\n$summary" "Test $group $grid $casename"
1126 }
1127}
1128
1129# Auxiliary procedure to save log to file
1130proc _log_save {file log {title {}}} {
1131 # create missing directories as needed
1132 catch {file mkdir [file dirname $file]}
1133
1134 # try to open a file
1135 if [catch {set fd [open $file w]} res] {
1136 error "Error saving log file $file: $res"
1137 }
1138
1139 # dump log and close
1140 puts $fd "$title\n"
1141 puts $fd $log
1142 close $fd
1143 return
1144}
1145
22db40eb 1146# Auxiliary procedure to make a (relative if possible) URL to a file for
1147# inclusion a reference in HTML log
1148proc _make_url {htmldir file} {
1149 set htmlpath [file split [file normalize $htmldir]]
1150 set filepath [file split [file normalize $file]]
1151 for {set i 0} {$i < [llength $htmlpath]} {incr i} {
1152 if { "[lindex $htmlpath $i]" != "[lindex $filepath $i]" } {
1153 if { $i == 0 } { break }
1154 return "[string repeat "../" [expr [llength $htmlpath] - $i - 1]][eval file join [lrange $filepath $i end]]"
1155 }
1156 }
1157
1158 # if relative path could not be made, return full file URL
1159 return "file://[file normalize $file]"
1160}
1161
40093367 1162# Auxiliary procedure to save log to file
1163proc _log_html {file log {title {}}} {
1164 # create missing directories as needed
1165 catch {file mkdir [file dirname $file]}
1166
1167 # try to open a file
1168 if [catch {set fd [open $file w]} res] {
1169 error "Error saving log file $file: $res"
1170 }
1171
1172 # print header
22db40eb 1173 puts $fd "<html><head><title>$title</title></head><body><h1>$title</h1>"
40093367 1174
1175 # add images if present
1176 set imgbasename [file rootname [file tail $file]]
1177 foreach img [lsort [glob -nocomplain -directory [file dirname $file] -tails ${imgbasename}*.gif ${imgbasename}*.png ${imgbasename}*.jpg]] {
3b21f8ae 1178 puts $fd "<p>[file tail $img]<br><img src=\"$img\"/><p>"
40093367 1179 }
1180
b725d7c5 1181 # print log body, trying to add HTML links to script files on lines like
1182 # "Executing <filename>..."
40093367 1183 puts $fd "<pre>"
b725d7c5 1184 foreach line [split $log "\n"] {
1185 if { [regexp {Executing[ \t]+([a-zA-Z0-9._/:-]+[^.])} $line res script] &&
1186 [file exists $script] } {
22db40eb 1187 set line [regsub $script $line "<a href=\"[_make_url $file $script]\">$script</a>"]
b725d7c5 1188 }
1189 puts $fd $line
1190 }
40093367 1191 puts $fd "</pre></body></html>"
1192
1193 close $fd
1194 return
1195}
1196
1197# Auxiliary method to make text with HTML highlighting according to status
1198proc _html_color {status} {
1199 # choose a color for the cell according to result
1200 if { $status == "OK" } {
1201 return lightgreen
1202 } elseif { [regexp -nocase {^FAIL} $status] } {
1203 return red
1204 } elseif { [regexp -nocase {^BAD} $status] } {
1205 return yellow
1206 } elseif { [regexp -nocase {^IMP} $status] } {
1207 return orange
1208 } elseif { [regexp -nocase {^SKIP} $status] } {
1209 return gray
1210 } elseif { [regexp -nocase {^IGNOR} $status] } {
1211 return gray
1212 } else {
1213 puts "Warning: no color defined for status $status, using red as if FAILED"
1214 return red
1215 }
1216}
1217
1218# Format text line in HTML to be colored according to the status
1219proc _html_highlight {status line} {
1220 return "<table><tr><td bgcolor=\"[_html_color $status]\">$line</td></tr></table>"
1221}
1222
1223# Internal procedure to generate HTML page presenting log of the tests
1224# execution in tabular form, with links to reports on individual cases
1225proc _log_html_summary {logdir log totals regressions improvements total_time} {
1226 global _test_case_regexp
1227
1228 # create missing directories as needed
1229 catch {file mkdir $logdir}
1230
1231 # try to open a file and start HTML
1232 if [catch {set fd [open $logdir/summary.html w]} res] {
1233 error "Error creating log file: $res"
1234 }
1235
1236 # write HRML header, including command to refresh log if still in progress
1237 puts $fd "<html><head>"
1238 puts $fd "<title>Tests summary</title>"
1239 if { $total_time == "" } {
1240 puts $fd "<meta http-equiv=\"refresh\" content=\"10\">"
1241 }
1242 puts $fd "<meta http-equiv=\"pragma\" content=\"NO-CACHE\">"
1243 puts $fd "</head><body>"
1244
1245 # put summary
1246 set legend(OK) "Test passed OK"
1247 set legend(FAILED) "Test failed (regression)"
1248 set legend(BAD) "Known problem"
1249 set legend(IMPROVEMENT) "Possible improvement (expected problem not detected)"
1250 set legend(SKIPPED) "Test skipped due to lack of data file"
1251 puts $fd "<h1>Summary</h1><table>"
1252 foreach nbstat $totals {
1253 set status [lindex $nbstat 1]
1254 if { [info exists legend($status)] } {
1255 set comment $legend($status)
1256 } else {
1257 set comment "User-defined status"
1258 }
1259 puts $fd "<tr><td align=\"right\">[lindex $nbstat 0]</td><td bgcolor=\"[_html_color $status]\">$status</td><td>$comment</td></tr>"
1260 }
1261 puts $fd "</table>"
1262
1263 # time stamp and elapsed time info
1264 if { $total_time != "" } {
8a262fa1 1265 puts $fd "<p>Generated on [clock format [clock seconds] -format {%Y-%m-%d %H:%M:%S}] on [info hostname]\n<p>"
1266 puts $fd [join [split $total_time "\n"] "<p>"]
40093367 1267 } else {
1268 puts $fd "<p>NOTE: This is intermediate summary; the tests are still running! This page will refresh automatically until tests are finished."
1269 }
1270
1271 # print regressions and improvements
1272 foreach featured [list $regressions $improvements] {
1273 if { [llength $featured] <= 1 } { continue }
1274 set status [string trim [lindex $featured 0] { :}]
1275 puts $fd "<h2>$status</h2>"
1276 puts $fd "<table>"
1277 set groupgrid ""
1278 foreach test [lrange $featured 1 end] {
1279 if { ! [regexp {^(.*)\s+([\w.]+)$} $test res gg name] } {
1280 set gg UNKNOWN
1281 set name "Error building short list; check details"
1282 }
1283 if { $gg != $groupgrid } {
1284 if { $groupgrid != "" } { puts $fd "</tr>" }
1285 set groupgrid $gg
1286 puts $fd "<tr><td>$gg</td>"
1287 }
1288 puts $fd "<td bgcolor=\"[_html_color $status]\"><a href=\"[regsub -all { } $gg /]/${name}.html\">$name</a></td>"
1289 }
1290 if { $groupgrid != "" } { puts $fd "</tr>" }
1291 puts $fd "</table>"
1292 }
1293
1294 # put detailed log
1295 puts $fd "<h1>Details</h1>"
1296
1297 # process log line-by-line
1298 set group {}
1299 set letter {}
1300 foreach line [lsort -dictionary [split $log "\n"]] {
1301 # check that the line is case report in the form "CASE group grid name: result (explanation)"
1302 if { ! [regexp $_test_case_regexp $line res grp grd casename result message] } {
1303 continue
1304 }
1305
1306 # start new group
1307 if { $grp != $group } {
1308 if { $letter != "" } { puts $fd "</tr></table>" }
1309 set letter {}
1310 set group $grp
1311 set grid {}
1312 puts $fd "<h2>Group $group</h2>"
1313 }
1314
1315 # start new grid
1316 if { $grd != $grid } {
1317 if { $letter != "" } { puts $fd "</tr></table>" }
1318 set letter {}
1319 set grid $grd
1320 puts $fd "<h3>Grid $grid</h3>"
1321 }
1322
1323 # check if test case name is <letter><digit>;
1324 # if not, set alnum to period "." to recognize non-standard test name
1325 if { ! [regexp {([A-Za-z]+)([0-9]+)} $casename res alnum number] } {
1326 set alnum .
1327 }
1328
1329 # start new row when letter changes or for non-standard names
1330 if { $alnum != $letter || $alnum == "." } {
1331 if { $letter != "" } {
1332 puts $fd "</tr><tr>"
1333 } else {
1334 puts $fd "<table><tr>"
1335 }
1336 set letter $alnum
1337 }
1338
1339 puts $fd "<td bgcolor=\"[_html_color $result]\"><a href=\"$group/$grid/${casename}.html\">$casename</a></td>"
1340 }
1341 puts $fd "</tr></table>"
1342
1343 # add remaining lines of log as plain text
1344 puts $fd "<h2>Plain text messages</h2>\n<pre>"
1345 foreach line [split $log "\n"] {
1346 if { ! [regexp $_test_case_regexp $line] } {
1347 puts $fd "$line"
1348 }
1349 }
1350 puts $fd "</pre>"
1351
1352 # close file and exit
1353 puts $fd "</body>"
1354 close $fd
1355 return
1356}
1357
1358# Procedure to dump summary logs of tests
1359proc _log_summarize {logdir log {total_time {}}} {
1360
1361 # sort log records alphabetically to have the same behavior on Linux and Windows
1362 # (also needed if tests are run in parallel)
1363 set loglist [lsort -dictionary [split $log "\n"]]
1364
1365 # classify test cases by status
1366 foreach line $loglist {
1367 if { [regexp {^CASE ([^:]*): ([[:alnum:]]+).*$} $line res caseid status] } {
1368 lappend stat($status) $caseid
1369 }
1370 }
1371 set totals {}
1372 set improvements {Improvements:}
1373 set regressions {Failed:}
1374 if { [info exists stat] } {
1375 foreach status [lsort [array names stat]] {
1376 lappend totals [list [llength $stat($status)] $status]
1377
1378 # separately count improvements (status starting with IMP) and regressions (all except IMP, OK, BAD, and SKIP)
1379 if { [regexp -nocase {^IMP} $status] } {
1380 eval lappend improvements $stat($status)
1381 } elseif { $status != "OK" && ! [regexp -nocase {^BAD} $status] && ! [regexp -nocase {^SKIP} $status] } {
1382 eval lappend regressions $stat($status)
1383 }
1384 }
1385 }
1386
1387 # if time is specified, add totals
1388 if { $total_time != "" } {
1389 if { [llength $improvements] > 1 } {
1390 _log_and_puts log [join $improvements "\n "]
1391 }
1392 if { [llength $regressions] > 1 } {
1393 _log_and_puts log [join $regressions "\n "]
1394 }
1395 if { [llength $improvements] == 1 && [llength $regressions] == 1 } {
1396 _log_and_puts log "No regressions"
1397 }
1398 _log_and_puts log "Total cases: [join $totals {, }]"
1399 _log_and_puts log $total_time
1400 }
1401
1402 # save log to files
1403 if { $logdir != "" } {
1404 _log_html_summary $logdir $log $totals $regressions $improvements $total_time
1405 _log_save $logdir/tests.log $log "Tests summary"
1406 }
1407
1408 return
1409}
1410
1411# Internal procedure to generate XML log in JUnit style, for further
1412# consumption by Jenkins or similar systems.
1413#
1414# The output is intended to conform to XML schema supported by Jenkins found at
1415# https://svn.jenkins-ci.org/trunk/hudson/dtkit/dtkit-format/dtkit-junit-model/src/main/resources/com/thalesgroup/dtkit/junit/model/xsd/junit-4.xsd
1416#
1417# The mapping of the fields is inspired by annotated schema of Apache Ant JUnit XML format found at
1418# http://windyroad.org/dl/Open%20Source/JUnit.xsd
1419proc _log_xml_summary {logdir filename log include_cout} {
1420 global _test_case_regexp
1421
1422 catch {file mkdir [file dirname $filename]}
1423
1424 # try to open a file and start XML
1425 if [catch {set fd [open $filename w]} res] {
1426 error "Error creating XML summary file $filename: $res"
1427 }
1428 puts $fd "<?xml version='1.0' encoding='utf-8'?>"
1429 puts $fd "<testsuites>"
1430
1431 # prototype for command to generate test suite tag
1432 set time_and_host "timestamp=\"[clock format [clock seconds] -format {%Y-%m-%dT%H:%M:%S}]\" hostname=\"[info hostname]\""
1433 set cmd_testsuite {puts $fd "<testsuite name=\"$group $grid\" tests=\"$nbtests\" failures=\"$nbfail\" errors=\"$nberr\" time=\"$time\" skipped=\"$nbskip\" $time_and_host>\n$testcases\n</testsuite>\n"}
1434
1435 # sort log and process it line-by-line
1436 set group {}
1437 foreach line [lsort -dictionary [split $log "\n"]] {
1438 # check that the line is case report in the form "CASE group grid name: result (explanation)"
1439 if { ! [regexp $_test_case_regexp $line res grp grd casename result message] } {
1440 continue
1441 }
1442 set message [string trim $message " \t\r\n()"]
1443
1444 # start new testsuite for each grid
1445 if { $grp != $group || $grd != $grid } {
1446
1447 # write previous test suite
1448 if [info exists testcases] { eval $cmd_testsuite }
1449
1450 set testcases {}
1451 set nbtests 0
1452 set nberr 0
1453 set nbfail 0
1454 set nbskip 0
1455 set time 0.
1456
1457 set group $grp
1458 set grid $grd
1459 }
1460
1461 incr nbtests
1462
1463 # parse test log and get its CPU time
1464 set testout {}
1465 set add_cpu {}
1466 if { [catch {set fdlog [open $logdir/$group/$grid/${casename}.log r]} ret] } {
1467 puts "Error: cannot open $logdir/$group/$grid/${casename}.log: $ret"
1468 } else {
1469 while { [gets $fdlog logline] >= 0 } {
1470 if { $include_cout } {
1471 set testout "$testout$logline\n"
1472 }
1473 if [regexp -nocase {TOTAL CPU TIME:\s*([\d.]+)\s*sec} $logline res cpu] {
1474 set add_cpu " time=\"$cpu\""
1475 set time [expr $time + $cpu]
1476 }
1477 }
1478 close $fdlog
1479 }
1480 if { ! $include_cout } {
1481 set testout "$line\n"
1482 }
1483
1484 # record test case with its output and status
1485 # Mapping is: SKIPPED, BAD, and OK to OK, all other to failure
1486 set testcases "$testcases\n <testcase name=\"$casename\"$add_cpu status=\"$result\">\n"
1487 set testcases "$testcases\n <system-out>\n$testout </system-out>"
1488 if { $result != "OK" } {
1489 if { [regexp -nocase {^SKIP} $result] } {
1490 incr nberr
1491 set testcases "$testcases\n <error name=\"$result\" message=\"$message\"/>"
1492 } elseif { [regexp -nocase {^BAD} $result] } {
1493 incr nbskip
1494 set testcases "$testcases\n <skipped>$message</skipped>"
1495 } else {
1496 incr nbfail
1497 set testcases "$testcases\n <failure name=\"$result\" message=\"$message\"/>"
1498 }
1499 }
1500 set testcases "$testcases\n </testcase>"
1501 }
1502
1503 # write last test suite
1504 if [info exists testcases] { eval $cmd_testsuite }
1505
1506 # the end
1507 puts $fd "</testsuites>"
1508 close $fd
1509 return
1510}
1511
1512# define custom platform name
1513proc _tests_platform_def {} {
1514 global env tcl_platform
1515
1516 if [info exists env(os_type)] { return }
1517
1518 set env(os_type) $tcl_platform(platform)
1519
1520 # use detailed mapping for various versions of Lunix
1521 # (note that mapping is rather non-uniform, for historical reasons)
1522 if { $env(os_type) == "unix" && ! [catch {exec cat /etc/issue} issue] } {
1523 if { [regexp {Mandriva[ \tA-Za-z]+([0-9]+)} $issue res num] } {
1524 set env(os_type) Mandriva$num
1525 } elseif { [regexp {Red Hat[ \tA-Za-z]+([0-9]+)} $issue res num] } {
1526 set env(os_type) RedHat$num
1527 } elseif { [regexp {Debian[ \tA-Za-z/]+([0-9]+)[.]([0-9]+)} $issue res num subnum] } {
1528 set env(os_type) Debian$num$subnum
1529 } elseif { [regexp {CentOS[ \tA-Za-z]+([0-9]+)[.]([0-9]+)} $issue res num subnum] } {
1530 set env(os_type) CentOS$num$subnum
1531 } elseif { [regexp {Scientific[ \tA-Za-z]+([0-9]+)[.]([0-9]+)} $issue res num subnum] } {
1532 set env(os_type) SL$num$subnum
1533 } elseif { [regexp {Fedora Core[ \tA-Za-z]+([0-9]+)} $issue res num] } {
1534 set env(os_type) FedoraCore$num
1535 }
1536 if { [exec uname -m] == "x86_64" } {
1537 set env(os_type) "$env(os_type)-64"
1538 }
1539 }
1540}
1541_tests_platform_def
1542
1543# Auxiliary procedure to split path specification (usually defined by
1544# environment variable) into list of directories or files
1545proc _split_path {pathspec} {
1546 global tcl_platform
1547
1548 # first replace all \ (which might occur on Windows) by /
1549 regsub -all "\\\\" $pathspec "/" pathspec
1550
1551 # split path by platform-specific separator
1552 return [split $pathspec [_path_separator]]
1553}
1554
1555# Auxiliary procedure to define platform-specific separator for directories in
1556# path specification
1557proc _path_separator {} {
1558 global tcl_platform
1559
1560 # split path by platform-specific separator
1561 if { $tcl_platform(platform) == "windows" } {
1562 return ";"
1563 } else {
1564 return ":"
1565 }
1566}
1567
cc6a292d 1568# Procedure to make a diff and common of two lists
1569proc _list_diff {list1 list2 _in1 _in2 _common} {
1570 upvar $_in1 in1
1571 upvar $_in2 in2
1572 upvar $_common common
1573
1574 set in1 {}
1575 set in2 {}
1576 set common {}
1577 foreach item $list1 {
1578 if { [lsearch -exact $list2 $item] >= 0 } {
1579 lappend common $item
1580 } else {
1581 lappend in1 $item
1582 }
1583 }
1584 foreach item $list2 {
1585 if { [lsearch -exact $common $item] < 0 } {
1586 lappend in2 $item
1587 }
1588 }
1589 return
1590}
1591
1592# procedure to load a file to Tcl string
1593proc _read_file {filename} {
1594 set fd [open $filename r]
1595 set result [read -nonewline $fd]
1596 close $fd
1597 return $result
1598}
1599
22db40eb 1600# procedure to construct name for the mage diff file
1601proc _diff_img_name {dir1 dir2 casepath imgfile} {
1602 return [file join $dir1 $casepath "diff-[file tail $dir2]-$imgfile"]
1603}
1604
cc6a292d 1605# Procedure to compare results of two runs of test cases
1606proc _test_diff {dir1 dir2 basename status verbose _logvar {_statvar ""}} {
1607 upvar $_logvar log
1608
22db40eb 1609 # make sure to load diffimage command
1610 uplevel pload VISUALIZATION
1611
cc6a292d 1612 # prepare variable (array) for collecting statistics
1613 if { "$_statvar" != "" } {
1614 upvar $_statvar stat
1615 } else {
1616 set stat(cpu1) 0
1617 set stat(cpu2) 0
22db40eb 1618 set stat(mem1) 0
1619 set stat(mem2) 0
cc6a292d 1620 set log {}
1621 }
1622
1623 # first check subdirectories
1624 set path1 [file join $dir1 $basename]
1625 set path2 [file join $dir2 $basename]
1626 set list1 [glob -directory $path1 -types d -tails -nocomplain *]
1627 set list2 [glob -directory $path2 -types d -tails -nocomplain *]
1628 if { [llength $list1] >0 || [llength $list2] > 0 } {
1629 _list_diff $list1 $list2 in1 in2 common
1630 if { "$verbose" > 1 } {
1631 if { [llength $in1] > 0 } { _log_and_puts log "Only in $path1: $in1" }
1632 if { [llength $in2] > 0 } { _log_and_puts log "Only in $path2: $in2" }
1633 }
1634 foreach subdir $common {
1635 if { "$verbose" > 2 } {
1636 _log_and_puts log "Checking [file join $basename $subdir]"
1637 }
1638 _test_diff $dir1 $dir2 [file join $basename $subdir] $status $verbose log stat
1639 }
1640 } else {
1641 # check log files (only if directory has no subdirs)
1642 set list1 [glob -directory $path1 -types f -tails -nocomplain *.log]
1643 set list2 [glob -directory $path2 -types f -tails -nocomplain *.log]
1644 _list_diff $list1 $list2 in1 in2 common
1645 if { "$verbose" > 1 } {
1646 if { [llength $in1] > 0 } { _log_and_puts log "Only in $path1: $in1" }
1647 if { [llength $in2] > 0 } { _log_and_puts log "Only in $path2: $in2" }
1648 }
1649 foreach logfile $common {
1650 # load two logs
1651 set log1 [_read_file [file join $dir1 $basename $logfile]]
1652 set log2 [_read_file [file join $dir2 $basename $logfile]]
22db40eb 1653 set casename [file rootname $logfile]
cc6a292d 1654
1655 # check execution statuses
1656 set status1 UNDEFINED
1657 set status2 UNDEFINED
1658 if { ! [regexp {CASE [^:]*:\s*([\w]+)} $log1 res1 status1] ||
1659 ! [regexp {CASE [^:]*:\s*([\w]+)} $log2 res2 status2] ||
1660 "$status1" != "$status2" } {
22db40eb 1661 _log_and_puts log "STATUS [split $basename /] $casename: $status1 / $status2"
cc6a292d 1662
1663 # if test statuses are different, further comparison makes
1664 # no sense unless explicitly requested
1665 if { "$status" != "all" } {
1666 continue
1667 }
1668 }
1669 if { "$status" == "ok" && "$status1" != "OK" } {
1670 continue
1671 }
1672
1673 # check CPU times
1674 set cpu1 UNDEFINED
1675 set cpu2 UNDEFINED
1676 if { [regexp {TOTAL CPU TIME:\s*([\d.]+)} $log1 res1 cpu1] &&
1677 [regexp {TOTAL CPU TIME:\s*([\d.]+)} $log2 res1 cpu2] } {
1678 set stat(cpu1) [expr $stat(cpu1) + $cpu1]
1679 set stat(cpu2) [expr $stat(cpu2) + $cpu2]
1680
1681 # compare CPU times with 10% precision (but not less 0.5 sec)
1682 if { [expr abs ($cpu1 - $cpu2) > 0.5 + 0.05 * abs ($cpu1 + $cpu2)] } {
22db40eb 1683 _log_and_puts log "CPU [split $basename /] $casename: $cpu1 / $cpu2"
cc6a292d 1684 }
1685 }
22db40eb 1686
1687 # check memory delta
1688 set mem1 UNDEFINED
1689 set mem2 UNDEFINED
1690 if { [regexp {MEMORY DELTA:\s*([\d.]+)} $log1 res1 mem1] &&
1691 [regexp {MEMORY DELTA:\s*([\d.]+)} $log2 res1 mem2] } {
1692 set stat(mem1) [expr $stat(mem1) + $mem1]
1693 set stat(mem2) [expr $stat(mem2) + $mem2]
1694
1695 # compare memory usage with 10% precision (but not less 16 KiB)
1696 if { [expr abs ($mem1 - $mem2) > 16 + 0.05 * abs ($mem1 + $mem2)] } {
1697 _log_and_puts log "MEMORY [split $basename /] $casename: $mem1 / $mem2"
1698 }
1699 }
1700
1701 # check images
1702 set imglist1 [glob -directory $path1 -types f -tails -nocomplain $casename*.{png,gif}]
1703 set imglist2 [glob -directory $path2 -types f -tails -nocomplain $casename*.{png,gif}]
1704 _list_diff $imglist1 $imglist2 imgin1 imgin2 imgcommon
1705 if { "$verbose" > 1 } {
1706 if { [llength $imgin1] > 0 } { _log_and_puts log "Only in $path1: $imgin1" }
1707 if { [llength $imgin2] > 0 } { _log_and_puts log "Only in $path2: $imgin2" }
1708 }
1709 foreach imgfile $imgcommon {
1710# if { $verbose > 1 } { _log_and_puts log "Checking [split basename /] $casename: $imgfile" }
1711 set diffile [_diff_img_name $dir1 $dir2 $basename $imgfile]
1712 if { [catch {diffimage [file join $dir1 $basename $imgfile] \
1713 [file join $dir2 $basename $imgfile] \
1714 0 0 0 $diffile} diff] } {
1715 _log_and_puts log "IMAGE [split $basename /] $casename: $imgfile cannot be compared"
1716 file delete -force $diffile ;# clean possible previous result of diffimage
1717 } elseif { $diff != 0 } {
1718 _log_and_puts log "IMAGE [split $basename /] $casename: $imgfile differs"
1719 } else {
1720 file delete -force $diffile ;# clean useless artifact of diffimage
1721 }
1722 }
cc6a292d 1723 }
1724 }
1725
1726 if { "$_statvar" == "" } {
22db40eb 1727 _log_and_puts log "Total MEMORY difference: $stat(mem1) / $stat(mem2)"
cc6a292d 1728 _log_and_puts log "Total CPU difference: $stat(cpu1) / $stat(cpu2)"
1729 }
1730}
b725d7c5 1731
22db40eb 1732# Auxiliary procedure to save log of results comparison to file
1733proc _log_html_diff {file log dir1 dir2} {
1734 # create missing directories as needed
1735 catch {file mkdir [file dirname $file]}
1736
1737 # try to open a file
1738 if [catch {set fd [open $file w]} res] {
1739 error "Error saving log file $file: $res"
1740 }
1741
1742 # print header
1743 puts $fd "<html><head><title>Diff $dir1 vs. $dir2</title></head><body>"
1744 puts $fd "<h1>Comparison of test results: $dir1 vs. $dir2</h1>"
1745
1746 # print log body, trying to add HTML links to script files on lines like
1747 # "Executing <filename>..."
1748 puts $fd "<pre>"
1749 set logpath [file split [file normalize $file]]
1750 foreach line [split $log "\n"] {
1751 puts $fd $line
1752
1753 if { [regexp {IMAGE[ \t]+([^:]+):[ \t]+([A-Za-z0-9_.-]+)} $line res case img] } {
1754 if { [catch {eval file join "" [lrange $case 0 end-1]} gridpath] } {
1755 # note: special handler for the case if test grid directoried are compared directly
1756 set gridpath ""
1757 }
1758 set img1 "<img src=\"[_make_url $file [file join $dir1 $gridpath $img]]\">"
1759 set img2 "<img src=\"[_make_url $file [file join $dir2 $gridpath $img]]\">"
1760
1761 set difffile [_diff_img_name $dir1 $dir2 $gridpath $img]
1762 if { [file exists $difffile] } {
1763 set imgd "<img src=\"[_make_url $file $difffile]\">"
1764 } else {
1765 set imgd "N/A"
1766 }
1767
1768 puts $fd "<table><tr><th>[file tail $dir1]</th><th>[file tail $dir2]</th><th>Different pixels</th></tr>"
1769 puts $fd "<tr><td>$img1</td><td>$img2</td><td>$imgd</td></tr></table>"
1770 }
1771 }
1772 puts $fd "</pre></body></html>"
1773
1774 close $fd
1775 return
1776}
1777
b725d7c5 1778# get number of CPUs on the system
1779proc _get_nb_cpus {} {
1780 global tcl_platform env
1781
1782 if { "$tcl_platform(platform)" == "windows" } {
1783 # on Windows, take the value of the environment variable
1784 if { [info exists env(NUMBER_OF_PROCESSORS)] &&
1785 ! [catch {expr $env(NUMBER_OF_PROCESSORS) > 0} res] && $res >= 0 } {
1786 return $env(NUMBER_OF_PROCESSORS)
1787 }
1788 } elseif { "$tcl_platform(os)" == "Linux" } {
1789 # on Linux, take number of logical processors listed in /proc/cpuinfo
1790 if { [catch {open "/proc/cpuinfo" r} fd] } {
1791 return 0 ;# should never happen, but...
1792 }
1793 set nb 0
1794 while { [gets $fd line] >= 0 } {
1795 if { [regexp {^processor[ \t]*:} $line] } {
1796 incr nb
1797 }
1798 }
1799 close $fd
1800 return $nb
1801 } elseif { "$tcl_platform(os)" == "Darwin" } {
1802 # on MacOS X, call sysctl command
1803 if { ! [catch {exec sysctl hw.ncpu} ret] &&
1804 [regexp {^hw[.]ncpu[ \t]*:[ \t]*([0-9]+)} $ret res nb] } {
1805 return $nb
1806 }
1807 }
1808
1809 # if cannot get good value, return 0 as default
1810 return 0
1811}
351bbcba 1812
1813# check two files for difference
1814proc _diff_files {file1 file2} {
1815 set fd1 [open $file1 "r"]
1816 set fd2 [open $file2 "r"]
1817
1818 set differ f
1819 while {! $differ} {
1820 set nb1 [gets $fd1 line1]
1821 set nb2 [gets $fd2 line2]
1822 if { $nb1 != $nb2 } { set differ t; break }
1823 if { $nb1 < 0 } { break }
1824 if { [string compare $line1 $line2] } {
1825 set differ t
1826 }
1827 }
1828
1829 close $fd1
1830 close $fd2
1831
1832 return $differ
1833}
1834
1835# Check if file is in DOS encoding.
1836# This check is done by presence of \r\n combination at the end of the first
1837# line (i.e. prior to any other \n symbol).
1838# Note that presence of non-ascii symbols typically used for recognition
1839# of binary files is not suitable since some IGES and STEP files contain
1840# non-ascii symbols.
1841# Special check is added for PNG files which contain \r\n in the beginning.
1842proc _check_dos_encoding {file} {
1843 set fd [open $file rb]
1844 set isdos f
1845 if { [gets $fd line] && [regexp {.*\r$} $line] &&
1846 ! [regexp {^.PNG} $line] } {
1847 set isdos t
1848 }
1849 close $fd
1850 return $isdos
1851}
1852
1853# procedure to recognize format of a data file by its first symbols (for OCCT
1854# BREP and geometry DRAW formats, IGES, and STEP) and extension (all others)
1855proc _check_file_format {file} {
1856 set fd [open $file rb]
1857 set line [read $fd 1024]
1858 close $fd
1859
1860 set warn f
1861 set ext [file extension $file]
1862 set format unknown
1863 if { [regexp {^DBRep_DrawableShape} $line] } {
1864 set format BREP
1865 if { "$ext" != ".brep" && "$ext" != ".rle" &&
1866 "$ext" != ".draw" && "$ext" != "" } {
1867 set warn t
1868 }
1869 } elseif { [regexp {^DrawTrSurf_} $line] } {
1870 set format DRAW
1871 if { "$ext" != ".rle" &&
1872 "$ext" != ".draw" && "$ext" != "" } {
1873 set warn t
1874 }
1875 } elseif { [regexp {^[ \t]*ISO-10303-21} $line] } {
1876 set format STEP
1877 if { "$ext" != ".step" && "$ext" != ".stp" } {
1878 set warn t
1879 }
1880 } elseif { [regexp {^.\{72\}S[0 ]\{6\}1} $line] } {
1881 set format IGES
1882 if { "$ext" != ".iges" && "$ext" != ".igs" } {
1883 set warn t
1884 }
1885 } elseif { "$ext" == ".igs" } {
1886 set format IGES
1887 } elseif { "$ext" == ".stp" } {
1888 set format STEP
1889 } else {
1890 set format [string toupper [string range $ext 1 end]]
1891 }
1892
1893 if { $warn } {
1894 puts "$file: Warning: extension ($ext) does not match format ($format)"
1895 }
1896
1897 return $format
1898}
1899
1900# procedure to load file knowing its format
1901proc load_data_file {file format shape} {
1902 switch $format {
1903 BREP { uplevel restore $file $shape }
808b3f9f 1904 DRAW { uplevel restore $file $shape }
351bbcba 1905 IGES { pload XSDRAW; uplevel igesbrep $file $shape * }
1906 STEP { pload XSDRAW; uplevel stepread $file __a *; uplevel renamevar __a_1 $shape }
1907 STL { pload XSDRAW; uplevel readstl $shape $file }
1908 default { error "Cannot read $format file $file" }
1909 }
1910}
60874ff8 1911
1912# procedure to get name of temporary directory,
1913# ensuring it is existing and writeable
1914proc _get_temp_dir {} {
723754e2 1915 global env tcl_platform
60874ff8 1916
1917 # check typical environment variables
1918 foreach var {TempDir Temp Tmp} {
1919 # check different case
1920 foreach name [list [string toupper $var] $var [string tolower $var]] {
1921 if { [info exists env($name)] && [file isdirectory $env($name)] &&
1922 [file writable $env($name)] } {
1923 return [regsub -all {\\} $env($name) /]
1924 }
1925 }
1926 }
1927
1928 # check platform-specific locations
1929 set fallback tmp
1930 if { "$tcl_platform(platform)" == "windows" } {
1931 set paths "c:/TEMP c:/TMP /TEMP /TMP"
1932 if { [info exists env(HOMEDRIVE)] && [info exists env(HOMEPATH)] } {
1933 set fallback [regsub -all {\\} "$env(HOMEDRIVE)$(HOMEPATH)/tmp" /]
1934 }
1935 } else {
1936 set paths "/tmp /var/tmp /usr/tmp"
1937 if { [info exists env(HOME)] } {
1938 set fallback "$env(HOME)/tmp"
1939 }
1940 }
1941 foreach dir $paths {
723754e2 1942 if { [file isdirectory $dir] && [file writable $dir] } {
60874ff8 1943 return $dir
1944 }
1945 }
1946
1947 # fallback case: use subdir /tmp of home or current dir
1948 file mkdir $fallback
1949 return $fallback
1950}