mirror of
https://github.com/MariaDB/server.git
synced 2025-01-29 18:20:07 +01:00
Fix problem with testcase timeouts not working. The timeout value was always multiplied with 10
since the $opt_valgrind variable was initialised to 0, thus being "defined"
This commit is contained in:
parent
5a912955d5
commit
339cd91a98
1 changed files with 2 additions and 2 deletions
|
@ -848,13 +848,13 @@ sub command_line_setup () {
|
|||
if ( ! $opt_testcase_timeout )
|
||||
{
|
||||
$opt_testcase_timeout= $default_testcase_timeout;
|
||||
$opt_testcase_timeout*= 10 if defined $opt_valgrind;
|
||||
$opt_testcase_timeout*= 10 if $opt_valgrind;
|
||||
}
|
||||
|
||||
if ( ! $opt_suite_timeout )
|
||||
{
|
||||
$opt_suite_timeout= $default_suite_timeout;
|
||||
$opt_suite_timeout*= 6 if defined $opt_valgrind;
|
||||
$opt_suite_timeout*= 6 if $opt_valgrind;
|
||||
}
|
||||
|
||||
# Increase times to wait for executables to start if using valgrind
|
||||
|
|
Loading…
Add table
Reference in a new issue