mirror of
https://github.com/MariaDB/server.git
synced 2025-01-29 02:05:57 +01:00
5.6.31-77.0
This commit is contained in:
parent
f853a99a4f
commit
4f2d214359
35 changed files with 153 additions and 79 deletions
|
@ -1,4 +1,4 @@
|
|||
SET(TOKUDB_VERSION 5.6.30-76.3)
|
||||
SET(TOKUDB_VERSION 5.6.31-77.0)
|
||||
# PerconaFT only supports x86-64 and cmake-2.8.9+
|
||||
IF(CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64" AND
|
||||
NOT CMAKE_VERSION VERSION_LESS "2.8.9")
|
||||
|
@ -52,10 +52,6 @@ IF(DEFINED TOKUDB_NOPATCH_CONFIG)
|
|||
ADD_DEFINITIONS("-DTOKUDB_NOPATCH_CONFIG=${TOKUDB_NOPATCH_CONFIG}")
|
||||
ENDIF()
|
||||
|
||||
IF(DEFINED TOKUDB_CHECK_JEMALLOC)
|
||||
ADD_DEFINITIONS("-DTOKUDB_CHECK_JEMALLOC=${TOKUDB_CHECK_JEMALLOC}")
|
||||
ENDIF()
|
||||
|
||||
macro(set_cflags_if_supported)
|
||||
foreach(flag ${ARGN})
|
||||
string(REGEX REPLACE "-" "_" temp_flag ${flag})
|
||||
|
|
|
@ -420,6 +420,8 @@ static void print_db_env_struct (void) {
|
|||
"int (*set_client_pool_threads)(DB_ENV *, uint32_t)",
|
||||
"int (*set_cachetable_pool_threads)(DB_ENV *, uint32_t)",
|
||||
"int (*set_checkpoint_pool_threads)(DB_ENV *, uint32_t)",
|
||||
"void (*set_check_thp)(DB_ENV *, bool new_val)",
|
||||
"bool (*get_check_thp)(DB_ENV *)",
|
||||
NULL};
|
||||
|
||||
sort_and_dump_fields("db_env", true, extra);
|
||||
|
|
|
@ -132,6 +132,7 @@ struct __toku_db_env_internal {
|
|||
int datadir_lockfd;
|
||||
int logdir_lockfd;
|
||||
int tmpdir_lockfd;
|
||||
bool check_thp; // if set check if transparent huge pages are disables
|
||||
uint64_t (*get_loader_memory_size_callback)(void);
|
||||
uint64_t default_lock_timeout_msec;
|
||||
uint64_t (*get_lock_timeout_callback)(uint64_t default_lock_timeout_msec);
|
||||
|
|
|
@ -623,32 +623,39 @@ ydb_recover_log_exists(DB_ENV *env) {
|
|||
}
|
||||
|
||||
// Validate that all required files are present, no side effects.
|
||||
// Return 0 if all is well, ENOENT if some files are present but at least one is missing,
|
||||
// Return 0 if all is well, ENOENT if some files are present but at least one is
|
||||
// missing,
|
||||
// other non-zero value if some other error occurs.
|
||||
// Set *valid_newenv if creating a new environment (all files missing).
|
||||
// (Note, if special dictionaries exist, then they were created transactionally and log should exist.)
|
||||
static int
|
||||
validate_env(DB_ENV * env, bool * valid_newenv, bool need_rollback_cachefile) {
|
||||
// (Note, if special dictionaries exist, then they were created transactionally
|
||||
// and log should exist.)
|
||||
static int validate_env(DB_ENV *env,
|
||||
bool *valid_newenv,
|
||||
bool need_rollback_cachefile) {
|
||||
int r;
|
||||
bool expect_newenv = false; // set true if we expect to create a new env
|
||||
bool expect_newenv = false; // set true if we expect to create a new env
|
||||
toku_struct_stat buf;
|
||||
char* path = NULL;
|
||||
char *path = NULL;
|
||||
|
||||
// Test for persistent environment
|
||||
path = toku_construct_full_name(2, env->i->dir, toku_product_name_strings.environmentdictionary);
|
||||
path = toku_construct_full_name(
|
||||
2, env->i->dir, toku_product_name_strings.environmentdictionary);
|
||||
assert(path);
|
||||
r = toku_stat(path, &buf);
|
||||
if (r == 0) {
|
||||
expect_newenv = false; // persistent info exists
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
int stat_errno = get_error_errno();
|
||||
if (stat_errno == ENOENT) {
|
||||
expect_newenv = true;
|
||||
r = 0;
|
||||
}
|
||||
else {
|
||||
r = toku_ydb_do_error(env, stat_errno, "Unable to access persistent environment\n");
|
||||
} else {
|
||||
r = toku_ydb_do_error(
|
||||
env,
|
||||
stat_errno,
|
||||
"Unable to access persistent environment [%s] in [%s]\n",
|
||||
toku_product_name_strings.environmentdictionary,
|
||||
env->i->dir);
|
||||
assert(r);
|
||||
}
|
||||
}
|
||||
|
@ -656,23 +663,40 @@ validate_env(DB_ENV * env, bool * valid_newenv, bool need_rollback_cachefile) {
|
|||
|
||||
// Test for existence of rollback cachefile if it is expected to exist
|
||||
if (r == 0 && need_rollback_cachefile) {
|
||||
path = toku_construct_full_name(2, env->i->dir, toku_product_name_strings.rollback_cachefile);
|
||||
path = toku_construct_full_name(
|
||||
2, env->i->dir, toku_product_name_strings.rollback_cachefile);
|
||||
assert(path);
|
||||
r = toku_stat(path, &buf);
|
||||
if (r == 0) {
|
||||
if (expect_newenv) // rollback cachefile exists, but persistent env is missing
|
||||
r = toku_ydb_do_error(env, ENOENT, "Persistent environment is missing\n");
|
||||
}
|
||||
else {
|
||||
if (r == 0) {
|
||||
if (expect_newenv) // rollback cachefile exists, but persistent env
|
||||
// is missing
|
||||
r = toku_ydb_do_error(
|
||||
env,
|
||||
ENOENT,
|
||||
"Persistent environment is missing while looking for "
|
||||
"rollback cachefile [%s] in [%s]\n",
|
||||
toku_product_name_strings.rollback_cachefile, env->i->dir);
|
||||
} else {
|
||||
int stat_errno = get_error_errno();
|
||||
if (stat_errno == ENOENT) {
|
||||
if (!expect_newenv) // rollback cachefile is missing but persistent env exists
|
||||
r = toku_ydb_do_error(env, ENOENT, "rollback cachefile directory is missing\n");
|
||||
else
|
||||
r = 0; // both rollback cachefile and persistent env are missing
|
||||
}
|
||||
else {
|
||||
r = toku_ydb_do_error(env, stat_errno, "Unable to access rollback cachefile\n");
|
||||
if (!expect_newenv) // rollback cachefile is missing but
|
||||
// persistent env exists
|
||||
r = toku_ydb_do_error(
|
||||
env,
|
||||
ENOENT,
|
||||
"rollback cachefile [%s] is missing from [%s]\n",
|
||||
toku_product_name_strings.rollback_cachefile,
|
||||
env->i->dir);
|
||||
else
|
||||
r = 0; // both rollback cachefile and persistent env are
|
||||
// missing
|
||||
} else {
|
||||
r = toku_ydb_do_error(
|
||||
env,
|
||||
stat_errno,
|
||||
"Unable to access rollback cachefile [%s] in [%s]\n",
|
||||
toku_product_name_strings.rollback_cachefile,
|
||||
env->i->dir);
|
||||
assert(r);
|
||||
}
|
||||
}
|
||||
|
@ -681,23 +705,41 @@ validate_env(DB_ENV * env, bool * valid_newenv, bool need_rollback_cachefile) {
|
|||
|
||||
// Test for fileops directory
|
||||
if (r == 0) {
|
||||
path = toku_construct_full_name(2, env->i->dir, toku_product_name_strings.fileopsdirectory);
|
||||
path = toku_construct_full_name(
|
||||
2, env->i->dir, toku_product_name_strings.fileopsdirectory);
|
||||
assert(path);
|
||||
r = toku_stat(path, &buf);
|
||||
if (r == 0) {
|
||||
if (expect_newenv) // fileops directory exists, but persistent env is missing
|
||||
r = toku_ydb_do_error(env, ENOENT, "Persistent environment is missing\n");
|
||||
}
|
||||
else {
|
||||
if (r == 0) {
|
||||
if (expect_newenv) // fileops directory exists, but persistent env
|
||||
// is missing
|
||||
r = toku_ydb_do_error(
|
||||
env,
|
||||
ENOENT,
|
||||
"Persistent environment is missing while looking for "
|
||||
"fileops directory [%s] in [%s]\n",
|
||||
toku_product_name_strings.fileopsdirectory,
|
||||
env->i->dir);
|
||||
} else {
|
||||
int stat_errno = get_error_errno();
|
||||
if (stat_errno == ENOENT) {
|
||||
if (!expect_newenv) // fileops directory is missing but persistent env exists
|
||||
r = toku_ydb_do_error(env, ENOENT, "Fileops directory is missing\n");
|
||||
else
|
||||
r = 0; // both fileops directory and persistent env are missing
|
||||
}
|
||||
else {
|
||||
r = toku_ydb_do_error(env, stat_errno, "Unable to access fileops directory\n");
|
||||
if (!expect_newenv) // fileops directory is missing but
|
||||
// persistent env exists
|
||||
r = toku_ydb_do_error(
|
||||
env,
|
||||
ENOENT,
|
||||
"Fileops directory [%s] is missing from [%s]\n",
|
||||
toku_product_name_strings.fileopsdirectory,
|
||||
env->i->dir);
|
||||
else
|
||||
r = 0; // both fileops directory and persistent env are
|
||||
// missing
|
||||
} else {
|
||||
r = toku_ydb_do_error(
|
||||
env,
|
||||
stat_errno,
|
||||
"Unable to access fileops directory [%s] in [%s]\n",
|
||||
toku_product_name_strings.fileopsdirectory,
|
||||
env->i->dir);
|
||||
assert(r);
|
||||
}
|
||||
}
|
||||
|
@ -709,16 +751,26 @@ validate_env(DB_ENV * env, bool * valid_newenv, bool need_rollback_cachefile) {
|
|||
// if using transactions, test for existence of log
|
||||
r = ydb_recover_log_exists(env); // return 0 or ENOENT
|
||||
if (expect_newenv && (r != ENOENT))
|
||||
r = toku_ydb_do_error(env, ENOENT, "Persistent environment information is missing (but log exists)\n");
|
||||
r = toku_ydb_do_error(env,
|
||||
ENOENT,
|
||||
"Persistent environment information is "
|
||||
"missing (but log exists) while looking for "
|
||||
"recovery log files in [%s]\n",
|
||||
env->i->real_log_dir);
|
||||
else if (!expect_newenv && r == ENOENT)
|
||||
r = toku_ydb_do_error(env, ENOENT, "Recovery log is missing (persistent environment information is present)\n");
|
||||
r = toku_ydb_do_error(env,
|
||||
ENOENT,
|
||||
"Recovery log is missing (persistent "
|
||||
"environment information is present) while "
|
||||
"looking for recovery log files in [%s]\n",
|
||||
env->i->real_log_dir);
|
||||
else
|
||||
r = 0;
|
||||
}
|
||||
|
||||
if (r == 0)
|
||||
*valid_newenv = expect_newenv;
|
||||
else
|
||||
else
|
||||
*valid_newenv = false;
|
||||
return r;
|
||||
}
|
||||
|
@ -768,7 +820,7 @@ env_open(DB_ENV * env, const char *home, uint32_t flags, int mode) {
|
|||
goto cleanup;
|
||||
}
|
||||
|
||||
if (toku_os_huge_pages_enabled()) {
|
||||
if (env->get_check_thp(env) && toku_os_huge_pages_enabled()) {
|
||||
r = toku_ydb_do_error(env, TOKUDB_HUGE_PAGES_ENABLED,
|
||||
"Huge pages are enabled, disable them before continuing\n");
|
||||
goto cleanup;
|
||||
|
@ -1234,6 +1286,18 @@ env_set_checkpoint_pool_threads(DB_ENV * env, uint32_t threads) {
|
|||
return 0;
|
||||
}
|
||||
|
||||
static void
|
||||
env_set_check_thp(DB_ENV * env, bool new_val) {
|
||||
assert(env);
|
||||
env->i->check_thp = new_val;
|
||||
}
|
||||
|
||||
static bool
|
||||
env_get_check_thp(DB_ENV * env) {
|
||||
assert(env);
|
||||
return env->i->check_thp;
|
||||
}
|
||||
|
||||
static int env_dbremove(DB_ENV * env, DB_TXN *txn, const char *fname, const char *dbname, uint32_t flags);
|
||||
|
||||
static int
|
||||
|
@ -2634,6 +2698,8 @@ toku_env_create(DB_ENV ** envp, uint32_t flags) {
|
|||
USENV(get_loader_memory_size);
|
||||
USENV(set_killed_callback);
|
||||
USENV(do_backtrace);
|
||||
USENV(set_check_thp);
|
||||
USENV(get_check_thp);
|
||||
#undef USENV
|
||||
|
||||
// unlocked methods
|
||||
|
@ -2659,6 +2725,8 @@ toku_env_create(DB_ENV ** envp, uint32_t flags) {
|
|||
env_fs_init(result);
|
||||
env_fsync_log_init(result);
|
||||
|
||||
result->i->check_thp = true;
|
||||
|
||||
result->i->bt_compare = toku_builtin_compare_fun;
|
||||
|
||||
r = toku_logger_create(&result->i->logger);
|
||||
|
|
|
@ -69,10 +69,6 @@ Copyright (c) 2006, 2015, Percona and/or its affiliates. All rights reserved.
|
|||
#pragma interface /* gcc class implementation */
|
||||
#endif
|
||||
|
||||
#if !defined(TOKUDB_CHECK_JEMALLOC)
|
||||
#define TOKUDB_CHECK_JEMALLOC 1
|
||||
#endif
|
||||
|
||||
#if 100000 <= MYSQL_VERSION_ID && MYSQL_VERSION_ID <= 100099
|
||||
// mariadb 10.0
|
||||
#define TOKU_USE_DB_TYPE_TOKUDB 1
|
||||
|
|
|
@ -278,7 +278,6 @@ static int tokudb_init_func(void *p) {
|
|||
db_env = NULL;
|
||||
tokudb_hton = (handlerton *) p;
|
||||
|
||||
#if TOKUDB_CHECK_JEMALLOC
|
||||
if (tokudb::sysvars::check_jemalloc) {
|
||||
typedef int (*mallctl_type)(
|
||||
const char*,
|
||||
|
@ -305,7 +304,6 @@ static int tokudb_init_func(void *p) {
|
|||
goto error;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
r = tokudb_set_product_name();
|
||||
if (r) {
|
||||
|
@ -550,6 +548,8 @@ static int tokudb_init_func(void *p) {
|
|||
db_env,
|
||||
tokudb_get_loader_memory_size_callback);
|
||||
|
||||
db_env->set_check_thp(db_env, tokudb::sysvars::check_jemalloc);
|
||||
|
||||
r = db_env->open(
|
||||
db_env,
|
||||
tokudb_home,
|
||||
|
|
|
@ -71,6 +71,7 @@ tables = [
|
|||
]
|
||||
|
||||
# Code generation stats here
|
||||
print "source include/have_tokudb.inc;"
|
||||
print "# Tokutek"
|
||||
print "# Test that bulk fetch works with various table types"
|
||||
print ""
|
||||
|
|
|
@ -56,6 +56,7 @@ write_range_queries = [
|
|||
timeouts = [0, 500]
|
||||
|
||||
# Here's where all the magic happens
|
||||
print "source include/have_tokudb.inc;"
|
||||
print "# Tokutek"
|
||||
print "# Blocking row lock tests;"
|
||||
print "# Generated by %s on %s;" % (__file__, datetime.date.today())
|
||||
|
|
|
@ -23,6 +23,7 @@ def sqlgen_explain_and_do(query):
|
|||
def sqlgen_drop_table():
|
||||
print "drop table t;"
|
||||
|
||||
print "source include/have_tokudb.inc;"
|
||||
print "# Tokutek"
|
||||
print "# Test that replace into and insert ignore insertions "
|
||||
print "# work under various index schemas. "
|
||||
|
|
1
storage/tokudb/mysql-test/tokudb/t/change_column_bin.py
Normal file → Executable file
1
storage/tokudb/mysql-test/tokudb/t/change_column_bin.py
Normal file → Executable file
|
@ -22,6 +22,7 @@ def gen_test(n):
|
|||
print "DROP TABLE t;"
|
||||
|
||||
def main():
|
||||
print "source include/have_tokudb.inc;"
|
||||
print "# this test is generated by change_bin.py"
|
||||
print "# test binary expansion is hot"
|
||||
print "--disable_warnings"
|
||||
|
|
1
storage/tokudb/mysql-test/tokudb/t/change_column_bin_rename.py
Normal file → Executable file
1
storage/tokudb/mysql-test/tokudb/t/change_column_bin_rename.py
Normal file → Executable file
|
@ -26,6 +26,7 @@ def gen_test(n):
|
|||
print "DROP TABLE t;"
|
||||
|
||||
def main():
|
||||
print "source include/have_tokudb.inc;"
|
||||
print "# this test is generated by change_char_rename.py"
|
||||
print "# test char expansion + rename is hot"
|
||||
print "--disable_warnings"
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import sys
|
||||
def main():
|
||||
print "source include/have_tokudb.inc;"
|
||||
print "# this test is generated by change_blob.py"
|
||||
print "# generate hot blob expansion test cases"
|
||||
print "--disable_warnings"
|
||||
|
|
1
storage/tokudb/mysql-test/tokudb/t/change_column_char.py
Normal file → Executable file
1
storage/tokudb/mysql-test/tokudb/t/change_column_char.py
Normal file → Executable file
|
@ -22,6 +22,7 @@ def gen_test(n):
|
|||
print "DROP TABLE t;"
|
||||
|
||||
def main():
|
||||
print "source include/have_tokudb.inc;"
|
||||
print "# this test is generated by change_char.py"
|
||||
print "# test char expansion"
|
||||
print "--disable_warnings"
|
||||
|
|
1
storage/tokudb/mysql-test/tokudb/t/change_column_char_binary.py
Normal file → Executable file
1
storage/tokudb/mysql-test/tokudb/t/change_column_char_binary.py
Normal file → Executable file
|
@ -23,6 +23,7 @@ def gen_test(n):
|
|||
print "DROP TABLE t;"
|
||||
|
||||
def main():
|
||||
print "source include/have_tokudb.inc;"
|
||||
print "# this test is generated by change_char_binary.py"
|
||||
print "# test that char(X) <-> binary(X) is not hot"
|
||||
print "--disable_warnings"
|
||||
|
|
1
storage/tokudb/mysql-test/tokudb/t/change_column_char_charbinary.py
Normal file → Executable file
1
storage/tokudb/mysql-test/tokudb/t/change_column_char_charbinary.py
Normal file → Executable file
|
@ -23,6 +23,7 @@ def gen_test(n):
|
|||
print "DROP TABLE t;"
|
||||
|
||||
def main():
|
||||
print "source include/have_tokudb.inc;"
|
||||
print "# this test is generated by change_char_charbinary.py"
|
||||
print "# test that char(X) <-> char(X) binary is not hot"
|
||||
print "--disable_warnings"
|
||||
|
|
1
storage/tokudb/mysql-test/tokudb/t/change_column_char_rename.py
Normal file → Executable file
1
storage/tokudb/mysql-test/tokudb/t/change_column_char_rename.py
Normal file → Executable file
|
@ -26,6 +26,7 @@ def gen_test(n):
|
|||
print "DROP TABLE t;"
|
||||
|
||||
def main():
|
||||
print "source include/have_tokudb.inc;"
|
||||
print "# this test is generated by change_char_rename.py"
|
||||
print "# test char expansion + rename is hot"
|
||||
print "--disable_warnings"
|
||||
|
|
1
storage/tokudb/mysql-test/tokudb/t/change_column_int.py
Normal file → Executable file
1
storage/tokudb/mysql-test/tokudb/t/change_column_int.py
Normal file → Executable file
|
@ -25,6 +25,7 @@ def gen_test(types, values):
|
|||
print "DROP TABLE ti;"
|
||||
print "DROP TABLE t;"
|
||||
def main():
|
||||
print "source include/have_tokudb.inc;"
|
||||
print "# this test is generated by change_int.py"
|
||||
print "# test int expansion is hot"
|
||||
print "--disable_warnings"
|
||||
|
|
1
storage/tokudb/mysql-test/tokudb/t/change_column_int_key.py
Normal file → Executable file
1
storage/tokudb/mysql-test/tokudb/t/change_column_int_key.py
Normal file → Executable file
|
@ -25,6 +25,7 @@ def gen_test(types):
|
|||
print "DROP TABLE t;"
|
||||
|
||||
def main():
|
||||
print "source include/have_tokudb.inc;"
|
||||
print "# this test is generated by change_int_key.py"
|
||||
print "# ensure that changing an int column that is part of a key is not hot"
|
||||
print "--disable_warnings"
|
||||
|
|
1
storage/tokudb/mysql-test/tokudb/t/change_column_int_not_supported.py
Normal file → Executable file
1
storage/tokudb/mysql-test/tokudb/t/change_column_int_not_supported.py
Normal file → Executable file
|
@ -21,6 +21,7 @@ def gen_tests(int_types, modifiers):
|
|||
for from_modifier in range(len(modifiers)):
|
||||
gen_tests_for_int(from_int, from_modifier, int_types, modifiers)
|
||||
def main():
|
||||
print "source include/have_tokudb.inc;"
|
||||
print "# this test is generated by change_int_not_supported.py"
|
||||
print "# ensure that int types are only expanded and are not cnverted to some other type"
|
||||
print "--disable_warnings"
|
||||
|
|
1
storage/tokudb/mysql-test/tokudb/t/change_column_int_rename.py
Normal file → Executable file
1
storage/tokudb/mysql-test/tokudb/t/change_column_int_rename.py
Normal file → Executable file
|
@ -27,6 +27,7 @@ def gen_test(types, values):
|
|||
print "DROP TABLE ti;"
|
||||
print "DROP TABLE t;"
|
||||
def main():
|
||||
print "source include/have_tokudb.inc;"
|
||||
print "# this test is generated by change_int_rename.py"
|
||||
print "--disable_warnings"
|
||||
print "DROP TABLE IF EXISTS t, ti;"
|
||||
|
|
|
@ -6,6 +6,7 @@ old_types = [ 'VARCHAR(1)', 'VARBINARY(1)', 'INT', 'CHAR(1)', 'BINARY(1)' ]
|
|||
new_types = [ 'VARCHAR(2)', 'VARBINARY(2)', 'BIGINT', 'CHAR(2)', 'BINARY(2)' ]
|
||||
|
||||
def main():
|
||||
print "source include/have_tokudb.inc;"
|
||||
print "# this test generated by change_multiple_columns.py"
|
||||
print "# this test generated multiple column changes which should all fail since we support only one at a time"
|
||||
print "--disable_warnings"
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import sys
|
||||
def main():
|
||||
print "source include/have_tokudb.inc;"
|
||||
print "# this test is generated by change_text.py"
|
||||
print "# generate hot text expansion test cases"
|
||||
print "--disable_warnings"
|
||||
|
|
|
@ -48,6 +48,9 @@ select trx_id,trx_mysql_thread_id from information_schema.tokudb_trx;
|
|||
connection conn_a;
|
||||
commit;
|
||||
# verify that the lock on the 1st transaction is released and replaced by the lock for the 2nd transaction
|
||||
let $wait_condition= select count(*)=1 from information_schema.tokudb_locks where locks_dname='./test/t-main';
|
||||
source include/wait_condition.inc;
|
||||
|
||||
replace_column 1 TRX_ID 2 MYSQL_ID;
|
||||
select * from information_schema.tokudb_locks;
|
||||
select * from information_schema.tokudb_lock_waits;
|
||||
|
|
|
@ -39,6 +39,9 @@ eval select * from information_schema.tokudb_locks;
|
|||
connection conn_a;
|
||||
commit;
|
||||
# verify that the lock on the 1st transaction is released and replaced by the lock for the 2nd transaction
|
||||
let $wait_condition= select count(*)=1 from information_schema.tokudb_locks where locks_dname='./test/t-main';
|
||||
source include/wait_condition.inc;
|
||||
|
||||
replace_column 1 TRX_ID 2 MYSQL_ID;
|
||||
select * from information_schema.tokudb_locks;
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
$TOKUDB_OPT $TOKUDB_LOAD_ADD --loose-tokudb-check-jemalloc=0
|
||||
$TOKUDB_OPT $TOKUDB_LOAD_ADD --loose-tokudb-check-jemalloc=0 --loose-tokudb-cache-size=512M --loose-tokudb-block-size=1M
|
||||
|
|
|
@ -1 +1 @@
|
|||
$TOKUDB_OPT $TOKUDB_LOAD_ADD --loose-tokudb-check-jemalloc=0
|
||||
$TOKUDB_OPT $TOKUDB_LOAD_ADD --loose-tokudb-check-jemalloc=0 --loose-tokudb-cache-size=512M --loose-tokudb-block-size=1M
|
||||
|
|
|
@ -1 +1 @@
|
|||
$TOKUDB_OPT $TOKUDB_LOAD_ADD --loose-tokudb-check-jemalloc=0
|
||||
$TOKUDB_OPT $TOKUDB_LOAD_ADD --loose-tokudb-check-jemalloc=0 --loose-tokudb-cache-size=512M --loose-tokudb-block-size=1M
|
||||
|
|
|
@ -1 +1 @@
|
|||
$TOKUDB_OPT $TOKUDB_LOAD_ADD $TOKUDB_BACKUP_OPT $TOKUDB_BACKUP_LOAD_ADD --loose-tokudb-check-jemalloc=0
|
||||
$TOKUDB_OPT $TOKUDB_LOAD_ADD $TOKUDB_BACKUP_OPT $TOKUDB_BACKUP_LOAD_ADD --loose-tokudb-check-jemalloc=0 --loose-tokudb-cache-size=512M --loose-tokudb-block-size=1M
|
||||
|
|
|
@ -1 +1 @@
|
|||
$TOKUDB_OPT $TOKUDB_LOAD_ADD --loose-tokudb-check-jemalloc=0
|
||||
$TOKUDB_OPT $TOKUDB_LOAD_ADD --loose-tokudb-check-jemalloc=0 --loose-tokudb-cache-size=512M --loose-tokudb-block-size=1M
|
||||
|
|
|
@ -1 +1 @@
|
|||
$TOKUDB_OPT $TOKUDB_LOAD_ADD --loose-tokudb-check-jemalloc=0
|
||||
$TOKUDB_OPT $TOKUDB_LOAD_ADD --loose-tokudb-check-jemalloc=0 --loose-tokudb-cache-size=512M --loose-tokudb-block-size=1M
|
||||
|
|
|
@ -1 +1 @@
|
|||
--read-only=true --tokudb-rpl-unique-checks=false --tokudb-rpl-lookup-rows=false
|
||||
--read-only=true --loose-tokudb-rpl-unique-checks=false --loose-tokudb-rpl-lookup-rows=false
|
||||
|
|
|
@ -1 +1 @@
|
|||
$TOKUDB_OPT $TOKUDB_LOAD_ADD --loose-tokudb-check-jemalloc=0
|
||||
$TOKUDB_OPT $TOKUDB_LOAD_ADD --loose-tokudb-check-jemalloc=0 --loose-tokudb-cache-size=512M --loose-tokudb-block-size=1M
|
||||
|
|
|
@ -1 +1 @@
|
|||
$TOKUDB_OPT $TOKUDB_LOAD_ADD --loose-tokudb-check-jemalloc=0
|
||||
$TOKUDB_OPT $TOKUDB_LOAD_ADD --loose-tokudb-check-jemalloc=0 --loose-tokudb-cache-size=512M --loose-tokudb-block-size=1M
|
||||
|
|
|
@ -74,9 +74,7 @@ char* gdb_path = NULL;
|
|||
my_bool gdb_on_fatal = FALSE;
|
||||
#endif
|
||||
|
||||
#if TOKUDB_CHECK_JEMALLOC
|
||||
uint check_jemalloc = 0;
|
||||
#endif
|
||||
my_bool check_jemalloc = TRUE;
|
||||
|
||||
static MYSQL_SYSVAR_ULONGLONG(
|
||||
cache_size,
|
||||
|
@ -416,19 +414,14 @@ static MYSQL_SYSVAR_BOOL(
|
|||
true);
|
||||
#endif
|
||||
|
||||
#if TOKUDB_CHECK_JEMALLOC
|
||||
static MYSQL_SYSVAR_UINT(
|
||||
static MYSQL_SYSVAR_BOOL(
|
||||
check_jemalloc,
|
||||
check_jemalloc,
|
||||
0,
|
||||
"check if jemalloc is linked",
|
||||
PLUGIN_VAR_READONLY|PLUGIN_VAR_RQCMDARG,
|
||||
"check if jemalloc is linked and transparent huge pages are disabled",
|
||||
NULL,
|
||||
NULL,
|
||||
1,
|
||||
0,
|
||||
1,
|
||||
0);
|
||||
#endif
|
||||
TRUE);
|
||||
|
||||
|
||||
//******************************************************************************
|
||||
|
@ -948,9 +941,7 @@ st_mysql_sys_var* system_variables[] = {
|
|||
MYSQL_SYSVAR(gdb_on_fatal),
|
||||
#endif
|
||||
|
||||
#if TOKUDB_CHECK_JEMALLOC
|
||||
MYSQL_SYSVAR(check_jemalloc),
|
||||
#endif
|
||||
|
||||
// session vars
|
||||
MYSQL_SYSVAR(alter_print_error),
|
||||
|
|
|
@ -88,9 +88,7 @@ extern char* gdb_path;
|
|||
extern my_bool gdb_on_fatal;
|
||||
#endif
|
||||
|
||||
#if TOKUDB_CHECK_JEMALLOC
|
||||
extern uint check_jemalloc;
|
||||
#endif
|
||||
extern my_bool check_jemalloc;
|
||||
|
||||
#if TOKUDB_DEBUG
|
||||
// used to control background job manager
|
||||
|
|
Loading…
Add table
Reference in a new issue