1# Copyright (C) 2001-2015 Free Software Foundation, Inc. 2 3# This program is free software; you can redistribute it and/or modify 4# it under the terms of the GNU General Public License as published by 5# the Free Software Foundation; either version 3 of the License, or 6# (at your option) any later version. 7# 8# This program is distributed in the hope that it will be useful, 9# but WITHOUT ANY WARRANTY; without even the implied warranty of 10# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 11# GNU General Public License for more details. 12# 13# You should have received a copy of the GNU General Public License 14# along with GCC; see the file COPYING3. If not see 15# <http://www.gnu.org/licenses/>. 16# 17# This script was submitted by Janis Johnson <janis187@us.ibm.com>. 18 19# Test the functionality and optionally, performance improvement, of 20# programs compiled with profile-directed optimizations. Compile and 21# run a test with profile options, compile it with options using the 22# profile feedback, and then run the test again. Optionally compile 23# and run a third time without the profile-directed optimization and 24# compare timing results of the program with normal optimization and 25# with the profile-directed optimization. Each test is run using 26# multiple sets of optimization and/or code generation options in 27# addition to the profiling and feedback options. 28 29# If perf_ext is defined and the performance value for the 30# profile-directed test run is nonzero then the performance check will 31# be done. 32 33load_lib dg.exp 34load_lib gcc-dg.exp 35 36global PROFOPT_OPTIONS perf_delta 37 38# The including .exp file must define these. 39global tool profile_option feedback_option prof_ext 40if ![info exists tool] { 41 error "Tools is not specified." 42} 43if ![info exists prof_ext] { 44 error "No profile data file extensions specified." 45} 46 47# The maximum perforance degradation can be defined in the including file. 48if ![info exists perf_delta] { 49 set perf_delta 4 50} 51 52# The default option list can be overridden by 53# PROFOPT_OPTIONS="{ { list1 } ... { list2 } }" 54 55if ![info exists PROFOPT_OPTIONS] { 56 set PROFOPT_OPTIONS [list \ 57 { -g } \ 58 { -O0 } \ 59 { -O1 } \ 60 { -O2 } \ 61 { -O3 } \ 62 { -O3 -g } \ 63 { -Os } ] 64} 65 66# 67# profopt-cleanup -- remove profiling or performance results files. 68# 69# TESTCASE is the name of the test 70# EXT is the extensions of files to remove 71# 72proc profopt-cleanup { testcase extlist } { 73 set basename [file tail $testcase] 74 set base [file rootname $basename] 75 foreach ext $extlist { 76 set files [glob -nocomplain $base.$ext] 77 if { $files != "" } { 78 eval "remote_file build delete $files" 79 } 80 } 81} 82 83# 84# profopt-target-cleanup -- remove profiling result files. 85# 86# DIR is the name of the directory 87# TESTCASE is the name of the test 88# EXT is the extensions of files to remove 89# 90proc profopt-target-cleanup { dir testcase ext } { 91 global additional_sources_used 92 set basename [file tail $testcase] 93 set base [file rootname $basename] 94 set file "$dir/$base.$ext" 95 eval "remote_file target delete $file" 96 97 if [info exists additional_sources_used] { 98 foreach srcfile $additional_sources_used { 99 set basename [file tail $srcfile] 100 set base [file rootname $basename] 101 set file "$dir/$base.$ext" 102 eval "remote_file target delete $file" 103 } 104 } 105} 106 107# 108# profopt-perf-value -- get performance value for a test 109# 110# TESTCASE is the name of the test 111# PERF_EXT is the extension of the performance result file 112# OPTSTR is the string of compiler options 113# 114proc profopt-perf-value { testcase perf_ext optstr } { 115 set basename [file tail $testcase] 116 set base [file rootname $basename] 117 set files [glob -nocomplain $base.$perf_ext] 118 # The file doesn't exist; let the caller decide if that's a problem. 119 if { $files == "" } { 120 return -2 121 } 122 remote_upload host $base.$perf_ext $base.$perf_ext 123 set fd [open $base.$perf_ext r] 124 gets $fd line 125 set val -2 126 if [regexp "TIME" $line] { 127 if [regexp "TIME -1" $line] { 128 fail "$testcase perf check: no consistent time available, $optstr" 129 set val -1 130 } elseif ![regexp "(\[0-9\]+)" "$line" val] { 131 set val -2 132 } 133 } 134 # Report problems with an existing file. 135 if { $val == -2 } { 136 fail "$testcase perf check: file $base.$perf_ext has wrong format, $optstr" 137 } 138 close $fd 139 profopt-cleanup $testcase $perf_ext 140 return $val 141} 142 143# 144# dg-final-generate -- process code to run after the profile-generate step 145# 146# ARGS is the line number of the directive followed by the commands. 147# 148proc dg-final-generate { args } { 149 global generate_final_code 150 151 if { [llength $args] > 2 } { 152 error "[lindex $args 0]: too many arguments" 153 return 154 } 155 append generate_final_code "[lindex $args 1]\n" 156} 157 158# 159# dg-final-use -- process code to run after the profile-use step 160# 161# ARGS is the line number of the directive followed by the commands. 162# 163proc dg-final-use { args } { 164 global use_final_code 165 166 if { [llength $args] > 2 } { 167 error "[lindex $args 0]: too many arguments" 168 return 169 } 170 append use_final_code "[lindex $args 1]\n" 171} 172 173# 174# profopt-final-code -- run final code 175# 176# WHICH is "generate" or "use". 177# FINAL_CODE is the TCL code to run. 178# TESTCASE is the name of the test, for error messages. 179# 180proc profopt-final-code { which final_code name } { 181 # This is copied from dg-test in dg.exp of DejaGnu. 182 regsub -all "\\\\(\[{}\])" $final_code "\\1" final_code 183 proc profopt-final-proc { args } $final_code 184 if [catch "profopt-final-proc $name" errmsg] { 185 perror "$name: error executing dg-final-${which}: $errmsg" 186 unresolved "$name: Error executing dg-final-${which}: $errmsg" 187 } 188} 189 190# 191# profopt-get-options -- process test directives 192# 193# SRC is the full pathname of the testcase. 194# 195proc profopt-get-options { src } { 196 # dg-options sets a variable called dg-extra-tool-flags. 197 set dg-extra-tool-flags "" 198 199 # dg-require-* sets dg-do-what. 200 upvar dg-do-what dg-do-what 201 202 # current_compiler_flags reads tool_flags from the same stack frame 203 # as dg-extra-tool-flags 204 set tool_flags "" 205 206 set tmp [dg-get-options $src] 207 foreach op $tmp { 208 set cmd [lindex $op 0] 209 if { ![string compare "dg-options" $cmd] \ 210 || ![string compare "dg-skip-if" $cmd] \ 211 || ![string compare "dg-final-generate" $cmd] \ 212 || ![string compare "dg-final-use" $cmd] \ 213 || ![string compare "dg-additional-sources" $cmd] \ 214 || [string match "dg-require-*" $cmd] } { 215 set status [catch "$op" errmsg] 216 if { $status != 0 } { 217 perror "$src: $errmsg for \"$op\"\n" 218 unresolved "$src: $errmsg for \"$op\"" 219 return 220 } 221 } else { 222 # Ignore unrecognized dg- commands, but warn about them. 223 warning "profopt.exp does not support $cmd" 224 } 225 } 226 227 # Return flags to use for compiling the primary source file and for 228 # linking. 229 return ${dg-extra-tool-flags} 230} 231 232# 233# c-prof-execute -- compile for profiling and then feedback, then normal 234# 235# SRC is the full pathname of the testcase. 236# 237proc profopt-execute { src } { 238 global srcdir tmpdir 239 global PROFOPT_OPTIONS 240 global tool profile_option feedback_option prof_ext perf_ext perf_delta 241 global generate_final_code use_final_code 242 global verbose 243 global testname_with_flags 244 245 if ![info exists profile_option] { 246 error "No profile option specified for first compile." 247 } 248 if ![info exists feedback_option] { 249 error "No feedback option specified for second compile." 250 } 251 252 # Use the default option list or one defined for a set of tests. 253 if ![info exists PROFOPT_OPTIONS] { 254 error "PROFOPT_OPTIONS is not defined" 255 } 256 set prof_option_list $PROFOPT_OPTIONS 257 258 regsub "(?q)$srcdir/" $src "" testcase 259 # If we couldn't rip $srcdir out of `src' then just do the best we can. 260 # The point is to reduce the unnecessary noise in the logs. Don't strip 261 # out too much because different testcases with the same name can confuse 262 # `test-tool'. 263 if [string match "/*" $testcase] { 264 set testcase "[file tail [file dirname $src]]/[file tail $src]" 265 } 266 267 # Several procedures access the name of the test with torture flags, 268 # normally defined in dg-test. Profile optimization tests don't 269 # use dg-test, so define it here to make it accessible via 270 # testname-for-summary. 271 set testname_with_flags $testcase 272 273 set executable $tmpdir/[file tail [file rootname $src].x] 274 set basename [file tail $testcase] 275 set base [file rootname $basename] 276 277 set count 0 278 foreach option $prof_option_list { 279 set execname1 "${executable}${count}1" 280 set execname2 "${executable}${count}2" 281 set execname3 "${executable}${count}3" 282 incr count 283 284 remote_file build delete $execname1 285 remote_file build delete $execname2 286 remote_file build delete $execname3 287 verbose "Testing $testcase, $option" 1 288 289 # Remove old performance data files. 290 if [info exists perf_ext] { 291 profopt-cleanup $testcase $perf_ext 292 } 293 294 # Process test directives. 295 296 set generate_final_code "" 297 set use_final_code "" 298 set dg-do-what [list "run" "" P] 299 set extra_flags [profopt-get-options $src] 300 if { [lindex ${dg-do-what} 1 ] == "N" } { 301 unsupported "$testcase" 302 unset testname_with_flags 303 verbose "$src not supported on this target, skipping it" 3 304 return 305 } 306 307 set extra_options [dg-additional-files-options "" "$src"] 308 309 # Remove old profiling data files. Make sure additional_sources_used is 310 # valid, by running it after dg-additional-files-options. 311 foreach ext $prof_ext { 312 profopt-target-cleanup $tmpdir $base $ext 313 } 314 315 # Tree profiling requires TLS runtime support, which may need 316 # additional flags. 317 if { [string first "-fprofile-generate" $profile_option] >= 0 } { 318 set extra_flags [add_options_for_tls $extra_flags] 319 } 320 321 # Compile for profiling. 322 323 set options "$extra_options" 324 lappend options "additional_flags=$option $extra_flags $profile_option" 325 set optstr "$option $profile_option" 326 set comp_output [${tool}_target_compile "$src" "$execname1" executable $options] 327 if ![${tool}_check_compile "$testcase compilation" $optstr $execname1 $comp_output] { 328 unresolved "$testcase execution, $optstr" 329 unresolved "$testcase compilation, $option $feedback_option" 330 unresolved "$testcase execution, $option $feedback_option" 331 continue 332 } 333 334 # Run the profiled test. 335 336 set result [${tool}_load $execname1 "" ""] 337 set status [lindex $result 0] 338 set missing_file 0 339 # Make sure the profile data was generated, and fail if not. 340 if { $status == "pass" } { 341 foreach ext $prof_ext { 342 remote_upload target $tmpdir/$base.$ext 343 set files [glob -nocomplain $base.$ext] 344 if { $files == "" } { 345 set status "fail" 346 set missing_file 1 347 fail "$testcase execution: file $base.$ext does not exist, $option $profile_option" 348 } 349 } 350 } 351 if { $missing_file == 0 } { 352 $status "$testcase execution, $optstr" 353 } 354 355 # If there is dg-final code to execute for the generate step, do it 356 # even if it failed; it might clean up temporary files. 357 if ![string match $generate_final_code ""] { 358 profopt-final-code "generate" $generate_final_code $testcase 359 } 360 361 remote_file build delete $execname1 362 363 # Quit for this round if it failed 364 if { $status != "pass" } { 365 unresolved "$testcase compilation, $option $feedback_option" 366 unresolved "$testcase execution, $option $feedback_option" 367 continue 368 } 369 370 # Compile with feedback-directed optimizations. 371 372 set options "$extra_options" 373 lappend options "additional_flags=$option $extra_flags $feedback_option" 374 set optstr "$option $feedback_option" 375 set comp_output [${tool}_target_compile "$src" "$execname2" "executable" $options] 376 377 # Prune warnings we know are unwanted. 378 set comp_output [prune_warnings $comp_output] 379 380 if ![${tool}_check_compile "$testcase compilation" $optstr $execname2 $comp_output] { 381 unresolved "$testcase execution, $optstr" 382 continue 383 } 384 385 # Run the profile-directed optimized test. 386 387 set result [${tool}_load "$execname2" "" ""] 388 set status [lindex $result 0] 389 $status "$testcase execution, $optstr" 390 391 # If there is dg-final code to execute for the use step, do it. 392 if ![string match $use_final_code ""] { 393 profopt-final-code "use" $use_final_code $testcase 394 } 395 396 # Remove the profiling data files. 397 foreach ext $prof_ext { 398 profopt-target-cleanup $tmpdir $base $ext 399 } 400 401 if { $status != "pass" } { 402 continue 403 } 404 405 # If the test is not expected to produce performance data then 406 # we're done now. 407 if ![info exists perf_ext] { 408 remote_file build delete $execname2 409 continue 410 } 411 412 # Get the performance data from the test built with 413 # profile-directed optimization. If the file doesn't exist or if 414 # the value is zero, skip the performance comparison. 415 set val2 [profopt-perf-value $testcase $perf_ext $optstr] 416 if { $val2 <= 0 } { 417 remote_file build delete $execname2 418 continue 419 } 420 421 # Compile with normal optimizations. 422 423 set options "$extra_options" 424 lappend options "additional_flags=$option" 425 set optstr "$option" 426 set comp_output [${tool}_target_compile "$src" "$execname3" "executable" $options] 427 if ![${tool}_check_compile "$testcase compilation" $optstr $execname3 $comp_output] { 428 unresolved "$testcase execution, $optstr" 429 unresolved "$testcase perf check, $optstr" 430 continue 431 } 432 433 # Run the test with normal optimizations. 434 435 set result [${tool}_load "$execname3" "" ""] 436 set status [lindex $result 0] 437 $status "$testcase execution, $optstr" 438 if { $status != "pass" } { 439 unresolved "$testcase perf check, $optstr" 440 continue 441 } 442 443 # Get the performance data from the test built with normal 444 # optimization. 445 set val1 [profopt-perf-value $testcase $perf_ext $optstr] 446 if { $val1 < 0 } { 447 if { $val1 == -2 } { 448 # The data file existed with the profile-directed 449 # optimization so this one should, too. 450 fail "$testcase perf check: file $base.$perf_ext does not exist, $optstr" 451 } 452 continue 453 } 454 455 # Compare results of the two runs and fail if the time with the 456 # profile-directed optimization is significantly more than the time 457 # without it. 458 set status "pass" 459 if { $val2 > $val1 } { 460 # Check for a performance degration outside of allowable limits. 461 if { [expr $val2 - $val1] > [expr [expr $val1 * $perf_delta] / 100] } { 462 set status "fail" 463 } 464 } 465 if { $status == "fail" } { 466 fail "$testcase perf check: orig: $val1 new: $val2, $optstr" 467 } else { 468 $status "$testcase perf check, $optstr" 469 verbose "$testcase orig: $val1 new: $val2, $optstr" 2 470 remote_file build delete $execname2 471 remote_file build delete $execname3 472 } 473 } 474 unset testname_with_flags 475} 476