mirror of
https://github.com/MariaDB/server.git
synced 2025-01-16 03:52:35 +01:00
Initial revision of NDB Cluster files
BitKeeper/etc/logging_ok: Logging to logging@openlogging.org accepted
This commit is contained in:
parent
0ba6cb48d8
commit
6386c55cee
1835 changed files with 500032 additions and 0 deletions
|
@ -76,6 +76,7 @@ konstantin@mysql.com
|
|||
kostja@oak.local
|
||||
lenz@kallisto.mysql.com
|
||||
lenz@mysql.com
|
||||
magnus@neptunus.(none)
|
||||
marko@hundin.mysql.fi
|
||||
miguel@hegel.(none)
|
||||
miguel@hegel.br
|
||||
|
|
121
ndb/BinDist.sh
Normal file
121
ndb/BinDist.sh
Normal file
|
@ -0,0 +1,121 @@
|
|||
#
|
||||
# Invoked from scripts/make_binary_distribution as "sh BinDist.sh".
|
||||
# Prints list of dirs and files to include under mysql/ndb.
|
||||
#
|
||||
|
||||
# release notes
|
||||
|
||||
grep -v '^#' <<__END__
|
||||
#ReleaseNotes.html
|
||||
mysqlclusterenv.sh
|
||||
__END__
|
||||
|
||||
# subset of bins, libs, includes
|
||||
|
||||
grep -v '^#' <<__END__
|
||||
bin/
|
||||
bin/ndb
|
||||
bin/mgmtsrvr
|
||||
bin/mgmtclient
|
||||
bin/mysqlcluster
|
||||
bin/mysqlcluster_install_db
|
||||
bin/mysqlclusterd
|
||||
bin/restore
|
||||
bin/ndb_rep
|
||||
bin/desc
|
||||
bin/flexBench
|
||||
bin/select_all
|
||||
bin/select_count
|
||||
bin/delete_all
|
||||
bin/ndbsql
|
||||
bin/drop_tab
|
||||
bin/drop_index
|
||||
bin/list_tables
|
||||
bin/waiter
|
||||
lib/
|
||||
lib/libNEWTON_API.a
|
||||
lib/libNEWTON_API.so
|
||||
lib/libNDB_API.a
|
||||
lib/libNDB_API.so
|
||||
lib/libMGM_API.a
|
||||
lib/libMGM_API.so
|
||||
lib/libNDB_ODBC.so
|
||||
lib/libMGM_API_pic.a
|
||||
lib/libNDB_API_pic.a
|
||||
include/
|
||||
include/ndb_types.h
|
||||
include/ndb_version.h
|
||||
include/mgmapi/
|
||||
include/mgmapi/mgmapi.h
|
||||
include/mgmapi/mgmapi_debug.h
|
||||
include/ndbapi/
|
||||
include/ndbapi/ndbapi_limits.h
|
||||
include/ndbapi/AttrType.hpp
|
||||
include/ndbapi/Ndb.hpp
|
||||
include/ndbapi/NdbApi.hpp
|
||||
include/ndbapi/NdbConnection.hpp
|
||||
include/ndbapi/NdbCursorOperation.hpp
|
||||
include/ndbapi/NdbDictionary.hpp
|
||||
include/ndbapi/NdbError.hpp
|
||||
include/ndbapi/NdbEventOperation.hpp
|
||||
include/ndbapi/NdbIndexOperation.hpp
|
||||
include/ndbapi/NdbOperation.hpp
|
||||
include/ndbapi/NdbPool.hpp
|
||||
include/ndbapi/NdbRecAttr.hpp
|
||||
include/ndbapi/NdbReceiver.hpp
|
||||
include/ndbapi/NdbResultSet.hpp
|
||||
include/ndbapi/NdbScanFilter.hpp
|
||||
include/ndbapi/NdbScanOperation.hpp
|
||||
include/ndbapi/NdbSchemaCon.hpp
|
||||
include/ndbapi/NdbSchemaOp.hpp
|
||||
include/newtonapi/dba.h
|
||||
include/newtonapi/defs/pcn_types.h
|
||||
__END__
|
||||
|
||||
#if [ -f /usr/local/lib/libstdc++.a ]; then
|
||||
# cp /usr/local/lib/libstdc++.a lib/.
|
||||
# echo lib/libstdc++.a
|
||||
#fi
|
||||
#if [ -f /usr/local/lib/libstdc++.so.5 ]; then
|
||||
# cp /usr/local/lib/libstdc++.so.5 lib/.
|
||||
# echo lib/libstdc++.so.5
|
||||
#fi
|
||||
#if [ -f /usr/local/lib/libgcc_s.so.1 ]; then
|
||||
# cp /usr/local/lib/libgcc_s.so.1 lib/.
|
||||
# echo lib/libgcc_s.so.1
|
||||
#fi
|
||||
|
||||
# docs
|
||||
|
||||
#find docs/*.html docs/*.pdf -print | sort -t/
|
||||
|
||||
# demos
|
||||
|
||||
find demos -print | grep -v /SCCS | sort -t/
|
||||
|
||||
# examples
|
||||
|
||||
grep -v '^#' <<__END__
|
||||
examples/
|
||||
examples/Makefile
|
||||
examples/ndbapi_example1/
|
||||
examples/ndbapi_example1/Makefile
|
||||
examples/ndbapi_example1/ndbapi_example1.cpp
|
||||
examples/ndbapi_example2/
|
||||
examples/ndbapi_example2/Makefile
|
||||
examples/ndbapi_example2/ndbapi_example2.cpp
|
||||
examples/ndbapi_example3/
|
||||
examples/ndbapi_example3/Makefile
|
||||
examples/ndbapi_example3/ndbapi_example3.cpp
|
||||
examples/ndbapi_example4/
|
||||
examples/ndbapi_example4/Makefile
|
||||
examples/ndbapi_example4/ndbapi_example4.cpp
|
||||
examples/ndbapi_example5/
|
||||
examples/ndbapi_example5/Makefile
|
||||
examples/ndbapi_example5/ndbapi_example5.cpp
|
||||
examples/select_all/
|
||||
examples/select_all/Makefile
|
||||
examples/select_all/select_all.cpp
|
||||
__END__
|
||||
|
||||
exit 0
|
84
ndb/Defs.mk
Normal file
84
ndb/Defs.mk
Normal file
|
@ -0,0 +1,84 @@
|
|||
include $(NDB_TOP)/config/config.mk
|
||||
include $(NDB_TOP)/config/Defs.$(NDB_VERSION).mk
|
||||
include $(NDB_TOP)/config/Defs.$(NDB_OS).$(NDB_ARCH).$(NDB_COMPILER).mk
|
||||
|
||||
ifeq ($(NDB_OS), WIN32)
|
||||
# Windows specific definitions
|
||||
OBJEXT := obj
|
||||
LIBEXT := lib
|
||||
LIBPREFIX :=
|
||||
fixpath = `cygpath -w $1`
|
||||
ar_rcs = lib -out:`cygpath -w $1` $2
|
||||
link_so = link -DLL -OUT:`cygpath -w $1` $(WIN_LIBS) $2
|
||||
#check-odbc = Y
|
||||
USE_EDITLINE := N
|
||||
#STRCASECMP is defined in include/portlib/PortDefs.h to _strcmpi
|
||||
else
|
||||
#Common definitions for almost all non-Windows environments
|
||||
OBJEXT := o
|
||||
LIBEXT := a
|
||||
LIBPREFIX := lib
|
||||
fixpath = $1
|
||||
ar_rcs = $(AR_RCS) $1 $2
|
||||
#check-odbc = $(findstring sqlext.h, $(wildcard /usr/include/sqlext.h) $(wildcard /usr/local/include/sqlext.h))
|
||||
CCFLAGS_TOP += -DHAVE_STRCASECMP
|
||||
|
||||
endif
|
||||
|
||||
ifeq ($(NDB_OS), WIN32)
|
||||
CCFLAGS_TOP += -DHAVE_STRDUP
|
||||
NDB_STRLCPY := Y
|
||||
NDB_STRLCAT := Y
|
||||
SHLIBEXT := dll
|
||||
endif
|
||||
|
||||
ifeq ($(NDB_OS), LINUX)
|
||||
CCFLAGS_TOP += -DHAVE_STRDUP
|
||||
NDB_STRLCAT := Y
|
||||
NDB_STRLCPY := Y
|
||||
SHLIBEXT := so
|
||||
endif
|
||||
|
||||
ifeq ($(NDB_OS), SOLARIS)
|
||||
CCFLAGS_TOP += -DHAVE_STRDUP
|
||||
NDB_STRLCAT := Y
|
||||
NDB_STRLCPY := Y
|
||||
SHLIBEXT := so
|
||||
endif
|
||||
|
||||
ifeq ($(NDB_OS), HPUX)
|
||||
CCFLAGS_TOP += -DHAVE_STRDUP
|
||||
NDB_STRLCAT := Y
|
||||
NDB_STRLCPY := Y
|
||||
SHLIBEXT := sl
|
||||
endif
|
||||
|
||||
ifeq ($(NDB_OS), MACOSX)
|
||||
CCFLAGS_TOP += -DHAVE_STRLCAT
|
||||
CCFLAGS_TOP += -DHAVE_STRLCAT
|
||||
CCFLAGS_TOP += -DHAVE_STRLCPY
|
||||
CCFLAGS_TOP += -DNDBOUT_UINTPTR
|
||||
SHLIBEXT := dylib
|
||||
endif
|
||||
|
||||
ifeq ($(NDB_OS), OSE)
|
||||
NDB_STRDUP := Y
|
||||
NDB_STRLCAT := Y
|
||||
NDB_STRLCPY := Y
|
||||
SHLIBEXT := so
|
||||
endif
|
||||
|
||||
ifeq ($(NDB_OS), SOFTOSE)
|
||||
NDB_STRDUP := Y
|
||||
NDB_STRLCAT := Y
|
||||
NDB_STRLCPY := Y
|
||||
SHLIBEXT := so
|
||||
endif
|
||||
|
||||
ifeq ($(NDB_SCI), Y)
|
||||
CCFLAGS_TOP += -DHAVE_SCI
|
||||
endif
|
||||
|
||||
ifneq ($(findstring OSE, $(NDB_OS)),)
|
||||
USE_EDITLINE := N
|
||||
endif
|
853
ndb/Epilogue.mk
Normal file
853
ndb/Epilogue.mk
Normal file
|
@ -0,0 +1,853 @@
|
|||
# .KEEP_STATE:
|
||||
# bk test !!!
|
||||
|
||||
###
|
||||
# For building some intermediary targets in /tmp (only useful on solaris)
|
||||
ifneq ($(NDB_BUILDROOT),)
|
||||
NDB_TOPABS := $(shell cd $(NDB_TOP) && /bin/pwd)
|
||||
NDB_BUILDDIR := $(subst $(NDB_TOPABS),$(NDB_BUILDROOT),$(CURDIR))/
|
||||
ifeq ($(wildcard $(NDB_BUILDDIR)),)
|
||||
dummy := $(shell mkdir -p $(NDB_BUILDDIR))
|
||||
endif
|
||||
endif
|
||||
|
||||
###
|
||||
CCFLAGS_TOP += -DNDB_$(NDB_OS) -DNDB_$(NDB_ARCH) -DNDB_$(NDB_COMPILER)
|
||||
|
||||
ifdef BIN_TARGET
|
||||
BIN_EXE = Y
|
||||
endif
|
||||
|
||||
###
|
||||
#
|
||||
# OS specifics
|
||||
#
|
||||
|
||||
# Disable shared libraries on HP-UX for the time being.
|
||||
ifeq ($(NDB_OS), HPUX)
|
||||
SO_LIB := N
|
||||
PIC_LIB := N
|
||||
PIC_ARCHIVE := N
|
||||
NONPIC_ARCHIVE := Y
|
||||
endif
|
||||
|
||||
ifeq ($(NDB_OS), OSE)
|
||||
SO_LIB := N
|
||||
PIC_LIB := N
|
||||
PIC_ARCHIVE := N
|
||||
NONPIC_ARCHIVE := Y
|
||||
|
||||
ifdef BIN_TARGET
|
||||
BIN_LIB_TARGET := lib$(BIN_TARGET).a
|
||||
BIN_TARGET := lib$(BIN_TARGET).a
|
||||
endif
|
||||
endif
|
||||
|
||||
ifeq ($(NDB_OS), SOFTOSE)
|
||||
SO_LIB := N
|
||||
PIC_LIB := N
|
||||
PIC_ARCHIVE := N
|
||||
|
||||
ifdef BIN_TARGET
|
||||
BIN_EXE_TARGET := $(BIN_TARGET)
|
||||
BIN_LIB_TARGET := lib$(BIN_TARGET).a
|
||||
EXTRA_MAIN := osemain.o
|
||||
endif
|
||||
endif
|
||||
|
||||
ifeq ($(filter OSE, $(NDB_OS)),)
|
||||
BIN_EXE_TARGET := $(BIN_TARGET)
|
||||
endif
|
||||
|
||||
|
||||
ifeq ($(NDB_OS), MACOSX)
|
||||
.LIBPATTERNS= lib%.dylib lib%.a
|
||||
endif
|
||||
|
||||
###
|
||||
#
|
||||
#
|
||||
|
||||
###
|
||||
# External dependencies definition : the place we store libraries
|
||||
# we get from outside the NDB development group.
|
||||
EXTERNAL_DEPENDS_TOP=$(NDB_TOP)/src/external/$(NDB_OS).$(NDB_ARCH)
|
||||
|
||||
|
||||
###
|
||||
#
|
||||
# TYPE Handling
|
||||
|
||||
#
|
||||
# TYPE := kernel
|
||||
#
|
||||
ifneq ($(filter kernel, $(TYPE)),)
|
||||
CCFLAGS_LOC += \
|
||||
-I$(call fixpath,$(NDB_TOP)/src/kernel/vm) \
|
||||
-I$(call fixpath,$(NDB_TOP)/src/kernel/error) \
|
||||
-I$(call fixpath,$(NDB_TOP)/src/kernel) \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/kernel) \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/transporter) \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/debugger) \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/mgmcommon) \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/ndbapi) \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/util) \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/portlib) \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/logger)
|
||||
endif
|
||||
|
||||
#
|
||||
# TYPE := ndbapi
|
||||
#
|
||||
ifneq ($(filter ndbapi, $(TYPE)),)
|
||||
CCFLAGS_LOC += \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/kernel) \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/transporter) \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/debugger) \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/mgmcommon) \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/ndbapi) \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/util) \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/portlib) \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/logger)
|
||||
endif
|
||||
|
||||
#
|
||||
# TYPE := ndbapiclient
|
||||
#
|
||||
ifneq ($(filter ndbapiclient, $(TYPE)),)
|
||||
CCFLAGS_LOC += \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/ndbapi)
|
||||
|
||||
BIN_TARGET_LIBS += NDB_API
|
||||
endif
|
||||
|
||||
#
|
||||
# TYPE := mgmapiclient
|
||||
#
|
||||
ifneq ($(filter mgmapiclient, $(TYPE)),)
|
||||
CCFLAGS_LOC += \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/mgmapi)
|
||||
|
||||
BIN_TARGET_LIBS += MGM_API
|
||||
endif
|
||||
|
||||
#
|
||||
# TYPE := ndbapitest
|
||||
#
|
||||
ifneq ($(filter ndbapitest, $(TYPE)),)
|
||||
CCFLAGS_LOC += \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/ndbapi) \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/util) \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/portlib) \
|
||||
-I$(call fixpath,$(NDB_TOP)/test/include) \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/mgmapi)
|
||||
|
||||
BIN_TARGET_LIBS += NDBT
|
||||
LDFLAGS_LOC += -lNDB_API -lMGM_API -lm
|
||||
|
||||
endif
|
||||
|
||||
#
|
||||
# TYPE := signalsender
|
||||
#
|
||||
ifneq ($(filter signalsender, $(TYPE)),)
|
||||
CCFLAGS_LOC += \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/ndbapi) \
|
||||
-I$(call fixpath,$(NDB_TOP)/src/ndbapi) \
|
||||
-I$(call fixpath,$(NDB_TOP)/src/ndbapi/signal-sender) \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/util) \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/portlib) \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/transporter) \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/mgmcommon) \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/kernel)
|
||||
|
||||
BIN_TARGET_LIBS += NDB_API
|
||||
BIN_TARGET_ARCHIVES += editline signal-sender
|
||||
|
||||
endif
|
||||
|
||||
|
||||
#
|
||||
# TYPE := repserver
|
||||
#
|
||||
ifneq ($(filter repserver, $(TYPE)),)
|
||||
CCFLAGS_LOC += \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/ndbapi) \
|
||||
-I$(call fixpath,$(NDB_TOP)/src) \
|
||||
-I$(call fixpath,$(NDB_TOP)/src/ndbapi) \
|
||||
-I$(call fixpath,$(NDB_TOP)/src/ndbapi/signal-sender) \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/util) \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/portlib) \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/transporter) \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/mgmcommon) \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/kernel)
|
||||
endif
|
||||
|
||||
#
|
||||
# TYPE := odbcclient
|
||||
#
|
||||
|
||||
ifneq ($(filter odbcclient, $(TYPE)),)
|
||||
TYPE += util
|
||||
LDFLAGS_LOC += -lm
|
||||
#ifneq ($(call check-odbc),)
|
||||
ifneq ($(NDB_ODBC),N)
|
||||
ifeq ($(NDB_OS), SOLARIS)
|
||||
CCFLAGS_LOC += -I/usr/local/include
|
||||
BIN_TARGET_LIBS_DIRS += /usr/local/lib
|
||||
BIN_TARGET_LIBS += odbc odbcinst NDBT
|
||||
endif
|
||||
ifeq ($(NDB_OS), LINUX)
|
||||
BIN_TARGET_LIBS += odbc odbcinst NDBT
|
||||
endif
|
||||
ifeq ($(NDB_OS), MACOSX)
|
||||
BIN_TARGET_LIBS += odbc odbcinst NDBT
|
||||
endif
|
||||
ifeq ($(NDB_OS), IBMAIX)
|
||||
BIN_TARGET_LIBS += odbc odbcinst NDBT
|
||||
endif
|
||||
ifeq ($(NDB_OS), TRU64X)
|
||||
BIN_TARGET_LIBS += odbc odbcinst NDBT
|
||||
endif
|
||||
else
|
||||
BIN_EXE = N
|
||||
endif
|
||||
endif
|
||||
|
||||
#
|
||||
# TYPE := *
|
||||
#
|
||||
#
|
||||
# TYPE := util
|
||||
#
|
||||
ifneq ($(filter util, $(TYPE)),)
|
||||
CCFLAGS_LOC += -I$(call fixpath,$(NDB_TOP)/include/util) \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/portlib) \
|
||||
-I$(call fixpath,$(NDB_TOP)/include/logger)
|
||||
BIN_TARGET_LIBS += logger general portlib
|
||||
endif
|
||||
|
||||
CCFLAGS_LOC += -I$(call fixpath,$(NDB_TOP)/include)
|
||||
|
||||
ifeq ($(NDB_SCI), Y)
|
||||
BIN_TARGET_LIBS += sisci
|
||||
BIN_TARGET_LIBS_DIRS += $(EXTERNAL_DEPENDS_TOP)/sci/lib
|
||||
|
||||
CCFLAGS_LOC += -I$(call fixpath,$(EXTERNAL_DEPENDS_TOP)/sci/include)
|
||||
endif
|
||||
|
||||
#
|
||||
# TYPE Handling
|
||||
###
|
||||
|
||||
###
|
||||
#
|
||||
# First rule
|
||||
#
|
||||
first:
|
||||
$(MAKE) libs
|
||||
$(MAKE) bins
|
||||
|
||||
ifeq ($(findstring all,$(replace-targets)),)
|
||||
all: first
|
||||
endif
|
||||
|
||||
###
|
||||
#
|
||||
# Nice to have rules
|
||||
api: libs
|
||||
$(MAKE) -C $(NDB_TOP)/src/ndbapi bins
|
||||
|
||||
mgm: libs
|
||||
$(MAKE) -C $(NDB_TOP)/src/mgmsrv bins
|
||||
|
||||
ndb: libs
|
||||
$(MAKE) -C $(NDB_TOP)/src/kernel/ndb-main bins
|
||||
|
||||
apitest: first
|
||||
$(MAKE) -C $(NDB_TOP)/test/ndbapi all
|
||||
|
||||
#-lNDBT:
|
||||
# $(MAKE) -C $(NDB_TOP)/test/src all
|
||||
#
|
||||
#-lNDB_API: libs
|
||||
# $(MAKE) -C $(NDB_TOP)/src/ndbapi bins
|
||||
|
||||
#
|
||||
# Libs/Bins
|
||||
#
|
||||
ifdef PREREQ_LOC
|
||||
_libs:: $(PREREQ_LOC)
|
||||
_bins:: $(PREREQ_LOC)
|
||||
endif
|
||||
|
||||
L_DIRS := $(LIB_DIRS) $(DIRS)
|
||||
B_DIRS := $(BIN_DIRS) $(DIRS)
|
||||
A_DIRS := $(LIB_DIRS) $(BIN_DIRS) $(DIRS)
|
||||
|
||||
_libs::
|
||||
|
||||
_bins::
|
||||
|
||||
libs: _libs $(patsubst %, _libs_%, $(L_DIRS))
|
||||
$(patsubst %, _libs_%, $(L_DIRS)) : DUMMY
|
||||
$(MAKE) -C $(patsubst _libs_%,%,$@) libs
|
||||
|
||||
bins: _bins $(patsubst %, _bins_%, $(B_DIRS))
|
||||
$(patsubst %, _bins_%, $(B_DIRS)) : DUMMY
|
||||
$(MAKE) -C $(patsubst _bins_%,%,$@) bins
|
||||
|
||||
###
|
||||
#
|
||||
# Links
|
||||
_links:
|
||||
-$(NDB_TOP)/tools/make-links.sh $(NDB_TOP)/include `pwd`
|
||||
|
||||
links: _links $(patsubst %, _links_%, $(A_DIRS))
|
||||
$(patsubst %, _links_%, $(A_DIRS)) : DUMMY
|
||||
$(MAKE) -C $(patsubst _links_%,%,$@) links
|
||||
|
||||
|
||||
####
|
||||
#
|
||||
# OSE build_spec (
|
||||
ifdef SOURCES
|
||||
BS := Y
|
||||
endif
|
||||
|
||||
ifdef SOURCES_c
|
||||
BS := Y
|
||||
endif
|
||||
|
||||
_build_spec: Makefile
|
||||
ifdef BS
|
||||
@echo "TYPE = SWU" > build.spec
|
||||
@echo "include $(NDB_TOP)/Ndb.mk" >> build.spec
|
||||
# @for i in $(CCFLAGS_LOC); do echo "INC += $$i" >> build.spec ; done
|
||||
@for i in $(patsubst -I%, %, $(CCFLAGS_LOC)); do echo "INC += $$i" >> build.spec ; done
|
||||
@echo "INC += /vobs/cello/cls/rtosi_if/include" >> build.spec
|
||||
@echo "INC += /vobs/cello/cls/rtosi_if/include.@@@" >> build.spec
|
||||
@echo "INC += /vobs/cello/cls/rtosi_if/include.<<<" >> build.spec
|
||||
endif
|
||||
|
||||
build_spec: _build_spec $(patsubst %, _build_spec_%, $(A_DIRS))
|
||||
$(patsubst %, _build_spec_%, $(A_DIRS)) : DUMMY
|
||||
$(MAKE) -C $(patsubst _build_spec_%,%,$@) build_spec
|
||||
|
||||
###
|
||||
#
|
||||
# Phony targets
|
||||
|
||||
.PHONY: $(A_DIRS)
|
||||
|
||||
###
|
||||
#
|
||||
# Dummy rule
|
||||
|
||||
DUMMY:
|
||||
|
||||
###
|
||||
#
|
||||
# Definitions of...
|
||||
|
||||
PIC_DIR := $(NDB_BUILDDIR).pic
|
||||
A_TMP_DIR := $(NDB_BUILDDIR).a_tmp
|
||||
SO_TMP_DIR := $(NDB_BUILDDIR).so_tmp
|
||||
PIC_TMP_DIR := $(NDB_BUILDDIR).pic_tmp
|
||||
|
||||
$(PIC_DIR):
|
||||
mkdir -p $(PIC_DIR)
|
||||
|
||||
SRC_C := $(filter %.C, $(SOURCES))
|
||||
SRC_CPP := $(filter %.cpp, $(SOURCES))
|
||||
SRC_CC := $(filter %.cc, $(SOURCES))
|
||||
SRC_c := $(filter %.c, $(SOURCES)) $(filter %.c, $(SOURCES.c))
|
||||
SRC_YPP := $(filter %.ypp, $(SOURCES))
|
||||
SRC_LPP := $(filter %.lpp, $(SOURCES))
|
||||
|
||||
OBJECTS := $(SRC_C:%.C=%.$(OBJEXT)) \
|
||||
$(SRC_CPP:%.cpp=%.$(OBJEXT)) \
|
||||
$(SRC_CC:%.cc=%.$(OBJEXT)) \
|
||||
$(SRC_c:%.c=%.$(OBJEXT)) \
|
||||
$(SRC_YPP:%.ypp=%.tab.$(OBJEXT)) \
|
||||
$(SRC_LPP:%.lpp=%.yy.$(OBJEXT)) \
|
||||
$(OBJECTS_LOC)
|
||||
|
||||
PIC_OBJS := $(OBJECTS:%=$(PIC_DIR)/%)
|
||||
|
||||
LIB_DIR := $(NDB_TOP)/lib
|
||||
BIN_DIR := $(NDB_TOP)/bin
|
||||
|
||||
###
|
||||
#
|
||||
# ARCHIVE_TARGET
|
||||
#
|
||||
ifdef ARCHIVE_TARGET
|
||||
|
||||
ifndef NONPIC_ARCHIVE
|
||||
NONPIC_ARCHIVE := Y
|
||||
endif
|
||||
|
||||
ifeq ($(NONPIC_ARCHIVE), Y)
|
||||
_libs:: $(LIB_DIR)/$(LIBPREFIX)$(ARCHIVE_TARGET).$(LIBEXT)
|
||||
$(LIB_DIR)/$(LIBPREFIX)$(ARCHIVE_TARGET).$(LIBEXT) : $(OBJECTS)
|
||||
$(call ar_rcs,$@,$(OBJECTS))
|
||||
|
||||
endif # NONPIC_ARCHIVE := Y
|
||||
|
||||
ifeq ($(PIC_ARCHIVE), Y)
|
||||
_libs:: $(PIC_DIR) $(LIB_DIR)/$(LIBPREFIX)$(ARCHIVE_TARGET)_pic.$(LIBEXT)
|
||||
$(LIB_DIR)/$(LIBPREFIX)$(ARCHIVE_TARGET)_pic.$(LIBEXT) : $(PIC_OBJS)
|
||||
cd $(PIC_DIR) && $(call ar_rcs,../$@,$(OBJECTS))
|
||||
|
||||
PIC_DEP := Y
|
||||
|
||||
endif # PIC_ARCHIVE := Y
|
||||
|
||||
endif # ARCHIVE_TARGET
|
||||
|
||||
###
|
||||
#
|
||||
# LIB_TARGET
|
||||
#
|
||||
ifdef LIB_TARGET
|
||||
|
||||
ifeq ($(A_LIB), Y)
|
||||
|
||||
A_LIB_ARCHIVES := $(LIB_TARGET_ARCHIVES:%=$(LIB_DIR)/$(LIBPREFIX)%.$(LIBEXT))
|
||||
|
||||
_bins:: $(LIB_DIR)/$(LIBPREFIX)$(LIB_TARGET).$(LIBEXT)
|
||||
$(LIB_DIR)/$(LIBPREFIX)$(LIB_TARGET).$(LIBEXT) : $(A_LIB_ARCHIVES)
|
||||
@rm -rf $(A_TMP_DIR) && mkdir $(A_TMP_DIR)
|
||||
cd $(A_TMP_DIR) && for i in $^; do ar -x ../$$i; done && $(call ar_rcs,../$@,*.$(OBJEXT))
|
||||
$(NDB_TOP)/home/bin/ndb_deploy $@
|
||||
endif # A_LIB := Y
|
||||
|
||||
ifeq ($(SO_LIB), Y)
|
||||
ifneq ($(NDB_OS), WIN32)
|
||||
SO_LIB_ARCHIVES := $(LIB_TARGET_ARCHIVES:%=$(LIB_DIR)/$(LIBPREFIX)%_pic.$(LIBEXT))
|
||||
|
||||
_bins:: $(LIB_DIR)/$(LIBPREFIX)$(LIB_TARGET).$(SHLIBEXT)
|
||||
$(LIB_DIR)/$(LIBPREFIX)$(LIB_TARGET).$(SHLIBEXT) : $(SO_LIB_ARCHIVES)
|
||||
@rm -rf $(SO_TMP_DIR) && mkdir $(SO_TMP_DIR)
|
||||
cd $(SO_TMP_DIR) && for i in $^; do ar -x ../$$i; done
|
||||
ifneq ($(NDB_OS), MACOSX)
|
||||
$(SO) $@.new $(SO_TMP_DIR)/*.$(OBJEXT) -L$(LIB_DIR) $(LIB_TARGET_LIBS) $(LDFLAGS_LAST)
|
||||
rm -f $@; mv $@.new $@
|
||||
else
|
||||
$(SO) $@ $(SO_TMP_DIR)/*.$(OBJEXT) -L$(LIB_DIR) $(LIB_TARGET_LIBS) $(LDFLAGS_LAST)
|
||||
endif
|
||||
ifeq ($(NDB_VERSION), RELEASE)
|
||||
ifneq ($(NDB_OS), MACOSX)
|
||||
strip $@
|
||||
endif
|
||||
endif
|
||||
$(NDB_TOP)/home/bin/ndb_deploy $@
|
||||
else # WIN32
|
||||
SO_LIB_ARCHIVES := $(LIB_TARGET_ARCHIVES:%=$(LIB_DIR)/$(LIBPREFIX)%_pic.$(LIBEXT))
|
||||
|
||||
_bins:: $(LIB_DIR)/$(LIBPREFIX)$(LIB_TARGET).$(SHLIBEXT)
|
||||
$(LIB_DIR)/$(LIBPREFIX)$(LIB_TARGET).$(SHLIBEXT) : $(SO_LIB_ARCHIVES)
|
||||
@rm -rf $(SO_TMP_DIR) && mkdir $(SO_TMP_DIR)
|
||||
cd $(SO_TMP_DIR) && for i in $^; do ar -x ../$$i; done
|
||||
$(call link_so,$@.new,$(SO_TMP_DIR)/*.$(OBJEXT))
|
||||
rm -f $@; mv $@.new $@
|
||||
#ifeq ($(NDB_VERSION), RELEASE)
|
||||
# strip $@
|
||||
#endif
|
||||
|
||||
endif
|
||||
endif # SO_LIB := Y
|
||||
|
||||
ifeq ($(PIC_LIB), Y)
|
||||
|
||||
PIC_LIB_ARCHIVES := $(LIB_TARGET_ARCHIVES:%=$(LIB_DIR)/$(LIBPREFIX)%_pic.$(LIBEXT))
|
||||
|
||||
_bins:: $(LIB_DIR)/$(LIBPREFIX)$(LIB_TARGET)_pic.$(LIBEXT)
|
||||
$(LIB_DIR)/$(LIBPREFIX)$(LIB_TARGET)_pic.$(LIBEXT) : $(PIC_LIB_ARCHIVES)
|
||||
@rm -rf $(PIC_TMP_DIR) && mkdir $(PIC_TMP_DIR)
|
||||
cd $(PIC_TMP_DIR) && for i in $^; do ar -x ../$$i; done && $(call ar_rcs,../$@,*.$(OBJEXT))
|
||||
|
||||
endif # PIC_LIB := Y
|
||||
|
||||
endif # LIB_TARGET
|
||||
|
||||
###
|
||||
#
|
||||
# BIN_TARGET
|
||||
#
|
||||
ifeq ($(BIN_EXE), Y)
|
||||
ifneq ($(NDB_OS), WIN32)
|
||||
BIN_LIBS := $(BIN_TARGET_ARCHIVES:%=$(LIB_DIR)/$(LIBPREFIX)%.$(LIBEXT))
|
||||
BIN_LIBS += $(BIN_TARGET_LIBS:%=-l%)
|
||||
|
||||
BIN_DEPS := $(OBJECTS) $(EXTRA_MAIN) $(BIN_LIBS)
|
||||
BIN_LIB_DIRS := $(BIN_TARGET_LIBS_DIRS:%=-L%)
|
||||
|
||||
BIN_FLAGS := $(BIN_LIB_DIRS) $(BIN_DEPS)
|
||||
|
||||
VPATH := $(LIB_DIR) $(BIN_TARGET_LIBS_DIRS)
|
||||
_bins:: $(BIN_DIR)/$(BIN_TARGET)
|
||||
$(BIN_DIR)/$(BIN_TARGET) : $(BIN_DEPS)
|
||||
$(LINK.cc) $(LDFLAGS) $(LDLIBS) -L$(LIB_DIR) $(BIN_FLAGS) -o $@.new $(LDFLAGS_LAST)
|
||||
rm -f $@; mv $@.new $@
|
||||
ifeq ($(NDB_VERSION), RELEASE)
|
||||
ifneq ($(NDB_OS), MACOSX)
|
||||
strip $@
|
||||
endif
|
||||
endif
|
||||
$(NDB_TOP)/home/bin/ndb_deploy $@
|
||||
else # WIN32
|
||||
BIN_LIBS := $(foreach lib,$(BIN_TARGET_ARCHIVES),$(call fixpath,$(LIB_DIR)/$(LIBPREFIX)$(lib).$(LIBEXT)))
|
||||
BIN_LIBS += $(BIN_TARGET_LIBS:%=$(LIBPREFIX)%.$(LIBEXT))
|
||||
|
||||
BIN_DEPS := $(OBJECTS) $(BIN_TARGET_ARCHIVES:%=$(LIB_DIR)/$(LIBPREFIX)%.$(LIBEXT))
|
||||
BIN_LIB_DIRS := -libpath:$(call fixpath,$(LIB_DIR)) $(BIN_TARGET_LIBS_DIRS:%=-libpath:%)
|
||||
|
||||
BIN_FLAGS := $(BIN_LIB_DIRS)
|
||||
|
||||
VPATH := $(LIB_DIR) $(BIN_TARGET_LIBS_DIRS)
|
||||
_bins:: $(BIN_DIR)/$(BIN_TARGET).exe
|
||||
$(BIN_DIR)/$(BIN_TARGET).exe : $(BIN_DEPS)
|
||||
$(LINK.cc) -out:$(call fixpath,$@.new) $(OBJECTS) $(BIN_FLAGS) $(BIN_LIBS)
|
||||
rm -f $@; mv $@.new $@
|
||||
ifeq ($(NDB_VERSION), RELEASE)
|
||||
strip $@
|
||||
endif
|
||||
|
||||
endif
|
||||
endif
|
||||
|
||||
###
|
||||
#
|
||||
# SOURCES.sh
|
||||
#
|
||||
ifdef SOURCES.sh
|
||||
|
||||
BIN_SRC := $(SOURCES.sh:%=$(BIN_DIR)/%)
|
||||
|
||||
_bins:: $(BIN_SRC)
|
||||
|
||||
$(BIN_SRC) : $(SOURCES.sh)
|
||||
rm -f $(^:%=$(BIN_DIR)/%)
|
||||
cp $^ $(BIN_DIR)
|
||||
endif
|
||||
|
||||
#
|
||||
# Compile rules PIC objects
|
||||
#
|
||||
ifeq ($(NDB_OS), WIN32)
|
||||
OUT := -Fo
|
||||
else
|
||||
OUT := -o
|
||||
endif
|
||||
|
||||
$(PIC_DIR)/%.$(OBJEXT): %.C
|
||||
$(C++) $(OUT)$@ -c $(CCFLAGS) $(CFLAGS_$<) $(PIC) $<
|
||||
|
||||
$(PIC_DIR)/%.$(OBJEXT): %.cpp
|
||||
$(C++) $(OUT)$@ -c $(CCFLAGS) $(CFLAGS_$<) $(PIC) $<
|
||||
|
||||
$(PIC_DIR)/%.$(OBJEXT): %.cc
|
||||
$(C++) $(OUT)$@ -c $(CCFLAGS) $(CFLAGS_$<) $(PIC) $<
|
||||
|
||||
$(PIC_DIR)/%.$(OBJEXT): %.c
|
||||
$(CC) $(OUT)$@ -c $(CFLAGS) $(CFLAGS_$<) $(PIC) $<
|
||||
|
||||
#
|
||||
# Compile rules
|
||||
#
|
||||
%.$(OBJEXT) : %.cpp
|
||||
$(C++) $(OUT)$@ -c $(CCFLAGS) $(CFLAGS_$<) $(NON_PIC) $<
|
||||
|
||||
%.$(OBJEXT) : %.C
|
||||
$(C++) $(OUT)$@ -c $(CCFLAGS) $(CFLAGS_$<) $(NON_PIC) $<
|
||||
|
||||
%.$(OBJEXT) : %.cc
|
||||
$(C++) $(OUT)$@ -c $(CCFLAGS) $(CFLAGS_$<) $(NON_PIC) $<
|
||||
|
||||
%.$(OBJEXT) : %.c
|
||||
$(CC) $(OUT)$@ -c $(CFLAGS) $(CFLAGS_$<) $(NON_PIC) $<
|
||||
|
||||
%.s : %.C
|
||||
$(C++) -S $(CCFLAGS) $(CFLAGS_$<) $(NON_PIC) $<
|
||||
|
||||
%.s : %.cpp
|
||||
$(C++) -S $(CCFLAGS) $(CFLAGS_$<) $(NON_PIC) $<
|
||||
|
||||
%.s : %.cc
|
||||
$(C++) -S $(CCFLAGS) $(CFLAGS_$<) $(NON_PIC) $<
|
||||
|
||||
%.s : %.c
|
||||
$(CC) -S $(CCFLAGS) $(CFLAGS_$<) $(NON_PIC) $<
|
||||
|
||||
BISON = bison
|
||||
BISONHACK = :
|
||||
%.tab.cpp %.tab.hpp : %.ypp
|
||||
$(BISON) $<
|
||||
$(BISONHACK) $*.tab.cpp $*.tab.hpp
|
||||
|
||||
FLEX = flex
|
||||
FLEXHACK = :
|
||||
%.yy.cpp : %.lpp
|
||||
$(FLEX) -o$@ $<
|
||||
$(FLEXHACK) $@
|
||||
|
||||
###
|
||||
#
|
||||
# Defines regarding dependencies
|
||||
|
||||
DEPMK := $(NDB_BUILDDIR).depend.mk
|
||||
|
||||
DEPDIR := $(NDB_BUILDDIR).depend
|
||||
|
||||
DEPENDENCIES := $(SRC_C:%.C=$(DEPDIR)/%.d) \
|
||||
$(SRC_CC:%.cc=$(DEPDIR)/%.d) \
|
||||
$(SRC_CPP:%.cpp=$(DEPDIR)/%.d) \
|
||||
$(SRC_c:%.c=$(DEPDIR)/%.d) \
|
||||
$(SRC_YPP:%.ypp=$(DEPDIR)/%.tab.d) \
|
||||
$(SRC_LPP:%.lpp=$(DEPDIR)/%.yy.d)
|
||||
|
||||
###
|
||||
#
|
||||
# Dependency rule
|
||||
|
||||
_depend: $(DEPMK)
|
||||
|
||||
depend: _depend $(patsubst %, _depend_%, $(A_DIRS))
|
||||
|
||||
$(patsubst %, _depend_%, $(A_DIRS)) : DUMMY
|
||||
$(MAKE) -C $(patsubst _depend_%,%,$@) depend
|
||||
|
||||
###
|
||||
#
|
||||
# Clean dependencies
|
||||
|
||||
_clean_dep:
|
||||
-rm -rf $(DEPMK) $(DEPDIR)/*
|
||||
|
||||
clean_dep: _clean_dep $(patsubst %, _clean_dep_%, $(A_DIRS))
|
||||
|
||||
$(patsubst %, _clean_dep_%, $(A_DIRS)) : DUMMY
|
||||
$(MAKE) -C $(patsubst _clean_dep_%,%,$@) clean_dep
|
||||
|
||||
###
|
||||
#
|
||||
# Generate dependencies
|
||||
|
||||
$(DEPDIR):
|
||||
-@mkdir -p $(DEPDIR)
|
||||
|
||||
$(DEPDIR)/%.d: %.C
|
||||
@echo Generating depend for $<
|
||||
@$(MAKEDEPEND) $(CCFLAGS) $(CFLAGS_$<) $< >$@
|
||||
|
||||
$(DEPDIR)/%.d: %.c
|
||||
@echo Generating depend for $<
|
||||
@$(MAKEDEPEND) $(CCFLAGS) $(CFLAGS_$<) $< >$@
|
||||
|
||||
$(DEPDIR)/%.d: %.cpp
|
||||
@echo Generating depend for $<
|
||||
@$(MAKEDEPEND) $(CCFLAGS) $(CFLAGS_$<) $< >$@
|
||||
|
||||
$(DEPDIR)/%.d: %.cc
|
||||
@echo Generating depend for $<
|
||||
@$(MAKEDEPEND) $(CCFLAGS) $(CFLAGS_$<) $< >$@
|
||||
|
||||
ifeq ($(NDB_OS), WIN32)
|
||||
ifndef PIC_DEP
|
||||
DEP_PTN := -e 's/\(.*\)\.o[ :]*/\1.$(OBJEXT) $(DEPDIR)\/\1.d : /g'
|
||||
else
|
||||
DEP_PTN := -e 's/\(.*\)\.o[ :]*/\1.$(OBJEXT) $(PIC_DIR)\/\1.$(OBJEXT) $(DEPDIR)\/\1.d : /g'
|
||||
endif
|
||||
else
|
||||
ifndef PIC_DEP
|
||||
DEP_PTN := -e 's!\(.*\)\.$(OBJEXT)[ :]*!\1.$(OBJEXT) $(DEPDIR)\/\1.d : !g'
|
||||
else
|
||||
DEP_PTN := -e 's!\(.*\)\.$(OBJEXT)[ :]*!\1.$(OBJEXT) $(PIC_DIR)\/\1.$(OBJEXT) $(DEPDIR)\/\1.d : !g'
|
||||
endif
|
||||
endif
|
||||
#DEP_PTN += -e 's!/usr/include/[-+a-zA-Z0-9_/.]*!!g'
|
||||
#DEP_PTN += -e 's!/usr/local/lib/gcc-lib/[-+a-zA-Z0-9_/.]*!!g'
|
||||
|
||||
$(DEPMK): $(DEPDIR) $(SRC_YPP:%.ypp=%.tab.hpp) $(SRC_LPP:%.lpp=%.yy.cpp) $(DEPENDENCIES) $(wildcard $(NDB_TOP)/.update.d)
|
||||
@echo "updating .depend.mk"
|
||||
@sed $(DEP_PTN) /dev/null $(DEPENDENCIES) >$(DEPMK)
|
||||
|
||||
###
|
||||
#
|
||||
# clean
|
||||
#
|
||||
_clean:
|
||||
-rm -rf SunWS_cache $(PIC_DIR)/SunWS_cache
|
||||
ifeq ($(NONPIC_ARCHIVE), Y)
|
||||
-rm -f $(OBJECTS) $(LIB_DIR)/$(LIBPREFIX)$(ARCHIVE_TARGET).$(LIBEXT)
|
||||
endif
|
||||
ifeq ($(PIC_ARCHIVE), Y)
|
||||
-rm -f $(PIC_OBJS) $(LIB_DIR)/$(LIBPREFIX)$(ARCHIVE_TARGET)_pic.$(LIBEXT)
|
||||
endif
|
||||
ifdef BIN_TARGET
|
||||
-rm -f $(OBJECTS)
|
||||
endif
|
||||
ifdef LIB_TARGET
|
||||
ifeq ($(A_LIB), Y)
|
||||
-rm -f $(A_TMP_DIR)/*
|
||||
endif
|
||||
ifeq ($(SO_LIB), Y)
|
||||
-rm -f $(SO_TMP_DIR)/*
|
||||
endif
|
||||
ifeq ($(PIC_LIB), Y)
|
||||
-rm -f $(PIC_TMP_DIR)/*
|
||||
endif
|
||||
endif
|
||||
ifneq ($(SRC_YPP),)
|
||||
-rm -f $(SRC_YPP:%.ypp=%.tab.[hc]pp) $(SRC_YPP:%.ypp=%.output)
|
||||
endif
|
||||
ifneq ($(SRC_LPP),)
|
||||
-rm -f $(SRC_LPP:%.lpp=%.yy.*)
|
||||
endif
|
||||
ifdef CLEAN_LOC
|
||||
-rm -f $(CLEAN_LOC)
|
||||
endif
|
||||
|
||||
###
|
||||
#
|
||||
# clean all
|
||||
#
|
||||
clobber: cleanall
|
||||
_cleanall: _clean clean_links
|
||||
-rm -f osemain.con osemain.c
|
||||
ifdef LIB_TARGET
|
||||
ifeq ($(A_LIB), Y)
|
||||
-rm -f $(LIB_DIR)/$(LIBPREFIX)$(LIB_TARGET).$(LIBEXT)
|
||||
endif
|
||||
ifeq ($(SO_LIB), Y)
|
||||
-rm -f $(LIB_DIR)/$(LIBPREFIX)$(LIB_TARGET).$(SHLIBEXT)
|
||||
endif
|
||||
ifeq ($(PIC_LIB), Y)
|
||||
-rm -f $(LIB_DIR)/$(LIBPREFIX)$(LIB_TARGET)_pic.$(LIBEXT)
|
||||
endif
|
||||
endif
|
||||
ifdef BIN_TARGET
|
||||
-rm -f $(BIN_DIR)/$(BIN_TARGET)
|
||||
endif
|
||||
|
||||
clean_links:
|
||||
|
||||
###
|
||||
#
|
||||
# Dist clean
|
||||
#
|
||||
_distclean: _tidy
|
||||
rm -rf $(DEPDIR) $(PIC_DIR) $(PIC_TMP_DIR) $(SO_TMP_DIR) $(A_TMP_DIR) Sources build.spec
|
||||
|
||||
###
|
||||
#
|
||||
# tidy
|
||||
#
|
||||
_tidy: _cleanall _clean_dep
|
||||
-rm -f *~ *.$(OBJEXT) *.$(LIBEXT) *.${SHLIBEXT}
|
||||
|
||||
#
|
||||
# clean cleanall tidy - recursion
|
||||
#
|
||||
ifeq ($(findstring clean,$(replace-targets)),)
|
||||
clean: _clean $(patsubst %, _clean_%, $(A_DIRS))
|
||||
endif
|
||||
|
||||
$(patsubst %, _clean_%, $(A_DIRS)) : DUMMY
|
||||
$(MAKE) -C $(patsubst _clean_%,%,$@) clean
|
||||
|
||||
cleanall: _cleanall $(patsubst %, _cleanall_%, $(A_DIRS))
|
||||
|
||||
$(patsubst %, _cleanall_%, $(A_DIRS)) : DUMMY
|
||||
$(MAKE) -C $(patsubst _cleanall_%,%,$@) cleanall
|
||||
|
||||
tidy: _tidy $(patsubst %, _tidy_%, $(A_DIRS))
|
||||
|
||||
$(patsubst %, _tidy_%, $(A_DIRS)) : DUMMY
|
||||
$(MAKE) -C $(patsubst _tidy_%,%,$@) tidy
|
||||
|
||||
distclean: _distclean $(patsubst %, _distclean_%, $(A_DIRS))
|
||||
|
||||
$(patsubst %, _distclean_%, $(A_DIRS)) : DUMMY
|
||||
$(MAKE) -C $(patsubst _distclean_%,%,$@) distclean
|
||||
|
||||
###
|
||||
#
|
||||
# Guess configuration
|
||||
|
||||
$(NDB_TOP)/config/config.mk: $(NDB_TOP)/config/GuessConfig.sh
|
||||
$(NDB_TOP)/config/GuessConfig.sh -D
|
||||
|
||||
$(NDB_TOP)/config/Defs....mk: $(NDB_TOP)/config/config.mk
|
||||
$(NDB_TOP)/config/Defs..mk: $(NDB_TOP)/config/config.mk
|
||||
|
||||
###
|
||||
# Soft ose envirment stuff
|
||||
#
|
||||
osemain.con: $(NDB_TOP)/src/env/softose/osemain_con.org
|
||||
cp $< $@
|
||||
echo "PRI_PROC(init_$(BIN_TARGET), init_$(BIN_TARGET), 65535, 3, ndb, 0, NULL)" >> $@
|
||||
|
||||
osemain.c: $(OSE_LOC)/sfk-solaris2/krn-solaris2/src/osemain.c
|
||||
ln -s $< $@
|
||||
|
||||
osemain.o : osemain.con
|
||||
|
||||
$(DEPDIR)/osemain.d : osemain.con
|
||||
|
||||
###
|
||||
#
|
||||
# These target dont want dependencies
|
||||
|
||||
NO_DEP=clean clobber cleanall tidy clean_dep $(DEPDIR) build_spec \
|
||||
$(NDB_TOP)/config/config.mk distclean osemain.con osemain.c
|
||||
|
||||
ifeq ($(filter $(NO_DEP), $(MAKECMDGOALS)),)
|
||||
ifneq ($(strip $(DEPENDENCIES)),)
|
||||
include $(DEPMK)
|
||||
endif
|
||||
endif
|
||||
|
||||
###
|
||||
#
|
||||
# Auxiliary targets
|
||||
|
||||
sources: Sources
|
||||
|
||||
Sources: Makefile
|
||||
@rm -f $@
|
||||
@for f in Makefile $(A_DIRS) $(SOURCES) $(SOURCES.c); do echo $$f; done >$@
|
||||
|
||||
###
|
||||
#
|
||||
# TAG generation for emacs and vi folks
|
||||
#
|
||||
# In emacs "Esc- ." or "M- ." to find a symbol location
|
||||
# In vi use the :\tag command
|
||||
# by convention:
|
||||
# TAGS is used with emacs
|
||||
# tags is used with vi
|
||||
#
|
||||
# Hopefully the make is being done from $(NDB_TOP)/src
|
||||
# and your TAGS/tags file then is in the same directory.
|
||||
|
||||
TAGS: DUMMY
|
||||
rm -f TAGS
|
||||
find $(NDB_TOP) -name "*.[ch]" | xargs $(ETAGS) --append
|
||||
find $(NDB_TOP) -name "*.[ch]pp" | xargs $(ETAGS) --append
|
||||
|
||||
tags: DUMMY
|
||||
rm -f tags
|
||||
find $(NDB_TOP) -name "*.[ch]" | xargs $(CTAGS) --append
|
||||
find $(NDB_TOP) -name "*.[ch]pp" | xargs $(CTAGS) --append
|
||||
|
||||
install:
|
||||
|
||||
|
||||
ebrowse: DUMMY
|
||||
cd $(NDB_TOP); rm -f EBROWSE
|
||||
cd $(NDB_TOP); find . -name "*.hpp" -or -name "*.cpp" -or -name "*.h" -or -name "*.c" > tmpfile~
|
||||
cd $(NDB_TOP); ebrowse --file tmpfile~
|
||||
cd $(NDB_TOP); rm -f tmpfile~
|
62
ndb/Makefile
Normal file
62
ndb/Makefile
Normal file
|
@ -0,0 +1,62 @@
|
|||
include .defs.mk
|
||||
|
||||
DIRS := src test tools examples
|
||||
|
||||
# hack before full autoconf
|
||||
replace-targets := all clean
|
||||
NDB_RELEASE := $(shell ../scripts/mysql_config --version)
|
||||
|
||||
include $(NDB_TOP)/Epilogue.mk
|
||||
|
||||
_libs_test : _bins_src
|
||||
_libs_tools : _libs_test
|
||||
_libs_examples : _bins_src
|
||||
_bins_src : _libs_src
|
||||
_bins_tools : _bins_src
|
||||
|
||||
# always release compile except for ndbapi static lib
|
||||
all:
|
||||
$(MAKE) -C src/ndbapi libs
|
||||
$(MAKE) libs NDB_VERSION=RELEASE
|
||||
$(MAKE) bins NDB_VERSION=RELEASE
|
||||
ifeq ($(NDB_OS),LINUX)
|
||||
NDB_RELEASE=$(NDB_RELEASE) $(MAKE) -j1 -C docs all </dev/null || :
|
||||
endif
|
||||
|
||||
# old distclean matches clean better
|
||||
clean: distclean
|
||||
$(MAKE) -C docs clean
|
||||
|
||||
nuke-deps:
|
||||
find . -name '.depend*' | xargs rm -rf
|
||||
|
||||
vim-tags:
|
||||
bk sfiles -g | ctags --c-types=+p --extra=+fq -L -
|
||||
|
||||
cvs-update:
|
||||
ifeq ($(NDB_VERSION),main)
|
||||
-cvs update -d
|
||||
else
|
||||
ifeq ($(NDB_TAG),HEAD)
|
||||
-cvs -q update
|
||||
-cd include && cvs -q update -d
|
||||
-cd src && cvs -q update -d
|
||||
-cd test && cvs -q update -d
|
||||
-cd tools && cvs -q update -d
|
||||
else
|
||||
-cvs -q update -r $(NDB_TAG)
|
||||
-cd include && cvs -q update -d -r $(NDB_TAG)
|
||||
-cd src && cvs -q update -d -r $(NDB_TAG)
|
||||
-cd test && cvs -q update -d -r $(NDB_TAG)
|
||||
-cd tools && cvs -q update -d -r $(NDB_TAG)
|
||||
endif
|
||||
endif
|
||||
make nuke-deps
|
||||
make vim-tags
|
||||
make TAGS
|
||||
|
||||
bk-update:
|
||||
bk pull
|
||||
make nuke-deps
|
||||
make vim-tags
|
||||
make TAGS
|
7
ndb/README
Normal file
7
ndb/README
Normal file
|
@ -0,0 +1,7 @@
|
|||
INSTALLATION
|
||||
To compile a pentium version of MySQL Cluster from this BK clone do:
|
||||
|
||||
shell> cd /home/bk/mysql-4.1-ndb
|
||||
shell> BUILD/compile-pentium-debug -c --prefix=/usr/local/mysql-4.1-ndb
|
||||
shell> make
|
||||
|
0
ndb/bin/.empty
Normal file
0
ndb/bin/.empty
Normal file
180
ndb/bin/check-regression.sh
Executable file
180
ndb/bin/check-regression.sh
Executable file
|
@ -0,0 +1,180 @@
|
|||
#!/bin/sh
|
||||
# NAME
|
||||
# check-regression.sh
|
||||
#
|
||||
# SYNOPSIS
|
||||
# check-regression.sh
|
||||
#
|
||||
# DESCRIPTION
|
||||
#
|
||||
# This scrip must be run before any major cvs checkins are done.
|
||||
# It will perform a number of regression tests to check that
|
||||
# nothing is broken.
|
||||
#
|
||||
# OPTIONS
|
||||
#
|
||||
# EXAMPLES
|
||||
#
|
||||
#
|
||||
# ENVIRONMENT
|
||||
# NDB_PROJ_HOME Home dir for ndb
|
||||
# verbose verbose printouts
|
||||
#
|
||||
# FILES
|
||||
# $NDB_PROJ_HOME/lib/funcs.sh general shell script functions
|
||||
#
|
||||
#
|
||||
# SEE ALSO
|
||||
#
|
||||
# DIAGNOSTICTS
|
||||
#
|
||||
#
|
||||
# VERSION
|
||||
# 1.0
|
||||
#
|
||||
# AUTHOR
|
||||
#
|
||||
#
|
||||
|
||||
. $NDB_PROJ_HOME/lib/funcs.sh # Load some good stuff
|
||||
|
||||
synopsis="check-regression.sh"
|
||||
progname=`basename $0`
|
||||
|
||||
numOfTestsOK=0
|
||||
numOfTestsFailed=0
|
||||
|
||||
LOG=check-regression.`date '+%Y-%m-%d'`
|
||||
|
||||
executeTest()
|
||||
{
|
||||
eval "$@" | tee -a $LOG
|
||||
|
||||
if [ $? -eq 0 ]
|
||||
then
|
||||
echo "SUCCESS: $@"
|
||||
numOfTestsOK=`expr $numOfTestsOK + 1`
|
||||
else
|
||||
echo "FAILED: $@"
|
||||
numOfTestsFailed=`expr $numOfTestsFailed + 1`
|
||||
fi
|
||||
}
|
||||
|
||||
#
|
||||
# INFO
|
||||
#
|
||||
trace "Starting: `date`"
|
||||
trace "NDB_PROJ_HOME = $NDB_PROJ_HOME"
|
||||
trace "NDB_TOP = $NDB_TOP"
|
||||
|
||||
#
|
||||
# THE TESTS TO EXECUTE
|
||||
#
|
||||
|
||||
# Testsuite: testDataBuffers
|
||||
# Number of tests: 1
|
||||
executeTest 'drop_tab ' TB00 TB01 TB02 TB03 TB04 TB05 TB06 TB07 TB08 TB09 TB10 TB11 TB12 TB13 TB14 TB15
|
||||
executeTest 'testDataBuffers'
|
||||
executeTest 'drop_tab ' TB00 TB01 TB02 TB03 TB04 TB05 TB06 TB07 TB08 TB09 TB10 TB11 TB12 TB13 TB14 TB15
|
||||
|
||||
TABLES="T9 T13"
|
||||
|
||||
# Testsuite: testBasic
|
||||
# Number of tests: 16
|
||||
executeTest 'testBasic -n PkInsert' $TABLES
|
||||
executeTest 'testBasic -n PkRead' $TABLES
|
||||
executeTest 'testBasic -n PkUpdate' $TABLES
|
||||
executeTest 'testBasic -n PkDelete' $TABLES
|
||||
#executeTest 'testBasic -n UpdateAndRead'
|
||||
#executeTest 'testBasic -n PkReadAndLocker'
|
||||
#executeTest 'testBasic -n PkReadAndLocker2'
|
||||
#executeTest 'testBasic -n PkReadUpdateAndLocker'
|
||||
#executeTest 'testBasic -n ReadWithLocksAndInserts'
|
||||
#executeTest 'testBasic -n ReadConsistency'
|
||||
#executeTest 'testBasic -n PkInsertTwice'
|
||||
#executeTest 'testBasic -n Fill'
|
||||
#executeTest 'testBasic -n FillTwice'
|
||||
#executeTest 'testBasic -n NoCommitSleep'
|
||||
#executeTest 'testBasic -n NoCommit626'
|
||||
#executeTest 'testBasic -n NoCommitAndClose'
|
||||
|
||||
# Testsuite: testBasicAsynch
|
||||
# Number of tests: 4
|
||||
executeTest 'testBasicAsynch -n PkInsertAsynch' $TABLES
|
||||
executeTest 'testBasicAsynch -n PkReadAsynch' $TABLES
|
||||
executeTest 'testBasicAsynch -n PkUpdateAsynch' $TABLES
|
||||
executeTest 'testBasicAsynch -n PkDeleteAsynch' $TABLES
|
||||
|
||||
# Testsuite: testDict
|
||||
# Number of tests: 6
|
||||
#executeTest 'testDict -n CreateAndDrop'
|
||||
#executeTest 'testDict -n CreateAndDropWithData'
|
||||
#executeTest 'testDict -n CreateAndDropDuring'
|
||||
#executeTest 'testDict -n CreateInvalidTables'
|
||||
#executeTest 'testDict -n CreateTableWhenDbIsFull'
|
||||
#executeTest 'testDict -n CreateMaxTables'
|
||||
|
||||
# Testsuite: testScan
|
||||
# Number of tests: 34
|
||||
#executeTest 'testScan -n ScanRead'
|
||||
#executeTest 'testScan -n ScanRead16'
|
||||
executeTest 'testScan -n ScanRead240' $TABLES
|
||||
executeTest 'testScan -n ScanUpdate' $TABLES
|
||||
executeTest 'testScan -n ScanUpdate2' $TABLES
|
||||
executeTest 'testScan -n ScanDelete' $TABLES
|
||||
executeTest 'testScan -n ScanDelete2' $TABLES
|
||||
#executeTest 'testScan -n ScanUpdateAndScanRead'
|
||||
#executeTest 'testScan -n ScanReadAndLocker'
|
||||
#executeTest 'testScan -n ScanReadAndPkRead'
|
||||
#executeTest 'testScan -n ScanRead488'
|
||||
#executeTest 'testScan -n ScanWithLocksAndInserts'
|
||||
#executeTest 'testScan -n ScanReadAbort'
|
||||
#executeTest 'testScan -n ScanReadAbort15'
|
||||
#executeTest 'testScan -n ScanReadAbort16'
|
||||
#executeTest 'testScan -n ScanUpdateAbort16'
|
||||
#executeTest 'testScan -n ScanReadAbort240'
|
||||
#executeTest 'testScan -n ScanReadRestart'
|
||||
#executeTest 'testScan -n ScanReadRestart16'
|
||||
#executeTest 'testScan -n ScanReadRestart32'
|
||||
#executeTest 'testScan -n ScanUpdateRestart'
|
||||
#executeTest 'testScan -n ScanUpdateRestart16'
|
||||
#executeTest 'testScan -n CheckGetValue'
|
||||
#executeTest 'testScan -n CloseWithoutStop'
|
||||
#executeTest 'testScan -n NextScanWhenNoMore'
|
||||
#executeTest 'testScan -n ExecuteScanWithoutOpenScan'
|
||||
#executeTest 'testScan -n OnlyOpenScanOnce'
|
||||
#executeTest 'testScan -n OnlyOneOpInScanTrans'
|
||||
#executeTest 'testScan -n OnlyOneOpBeforeOpenScan'
|
||||
#executeTest 'testScan -n OnlyOneScanPerTrans'
|
||||
#executeTest 'testScan -n NoCloseTransaction'
|
||||
#executeTest 'testScan -n CheckInactivityTimeOut'
|
||||
#executeTest 'testScan -n CheckInactivityBeforeClose'
|
||||
#executeTest 'testScan -n CheckAfterTerror'
|
||||
|
||||
# Testsuite: testScanInterpreter
|
||||
# Number of tests: 1
|
||||
#executeTest 'testScanInterpreter -n ScanLessThan'
|
||||
|
||||
TABLES="T6 T13"
|
||||
|
||||
# Testsuite: testSystemRestart
|
||||
# Number of tests: 4
|
||||
executeTest 'testSystemRestart -l 1 -n SR1' $TABLES
|
||||
executeTest 'testSystemRestart -l 1 -n SR2' $TABLES
|
||||
#executeTest 'testSystemRestart -n SR_UNDO'
|
||||
#executeTest 'testSystemRestart -n SR_FULLDB'
|
||||
|
||||
# TESTS FINISHED
|
||||
trace "Finished: `date`"
|
||||
|
||||
#
|
||||
# TEST SUMMARY
|
||||
#
|
||||
if [ $numOfTestsFailed -eq 0 ]
|
||||
then
|
||||
echo "-- REGRESSION TEST SUCCESSFUL --"
|
||||
else
|
||||
echo "-- REGRESSION TEST FAILED!! --"
|
||||
fi
|
||||
echo "Number of successful tests: $numOfTestsOK"
|
||||
echo "Number of failed tests : $numOfTestsFailed"
|
22
ndb/bin/makeTestPrograms_html.sh
Executable file
22
ndb/bin/makeTestPrograms_html.sh
Executable file
|
@ -0,0 +1,22 @@
|
|||
#!/bin/sh
|
||||
rm $1
|
||||
touch $1
|
||||
echo "<table border="1" width=640>" >> $1
|
||||
echo "<tr>" >> $1
|
||||
echo "<td><b>Name</b></td><td> </td><td width="70%"><b>Description</b></td>" >> $1
|
||||
echo "</tr>" >> $1
|
||||
testBasic --print_html >> $1
|
||||
testBackup --print_html >> $1
|
||||
testBasicAsynch --print_html >> $1
|
||||
testDict --print_html >> $1
|
||||
testBank --print_html >> $1
|
||||
testIndex --print_html >> $1
|
||||
testNdbApi --print_html >> $1
|
||||
testNodeRestart --print_html >> $1
|
||||
testOperations --print_html >> $1
|
||||
testRestartGci --print_html >> $1
|
||||
testScan --print_html >> $1
|
||||
testScanInterpreter --print_html >> $1
|
||||
testSystemRestart --print_html >> $1
|
||||
echo "</table>" >> $1
|
||||
|
11
ndb/bin/mysqlcluster
Executable file
11
ndb/bin/mysqlcluster
Executable file
|
@ -0,0 +1,11 @@
|
|||
#!/bin/sh
|
||||
if [ -z "$MYSQLCLUSTER_TOP" -o ! -d "$MYSQLCLUSTER_TOP" ]; then
|
||||
echo "MYSQLCLUSTER_TOP not set or directory does not exist"
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "$MYSQLCLUSTER_TOP" -o ! -d "$MYSQLCLUSTER_TOP/ndb" ]; then
|
||||
echo "$MYSQLCLUSTER_TOP/ndb directory does not exist"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mysql --socket=$MYSQLCLUSTER_TOP/data/mysqlcluster.sock $*
|
119
ndb/bin/mysqlcluster_install_db
Executable file
119
ndb/bin/mysqlcluster_install_db
Executable file
|
@ -0,0 +1,119 @@
|
|||
#!/bin/sh
|
||||
|
||||
NDB_HOME=
|
||||
export NDB_CONNECTSTRING
|
||||
if [ -z "$MYSQLCLUSTER_TOP" ]; then
|
||||
echo "MYSQLCLUSTER_TOP not set"
|
||||
exit 1
|
||||
fi
|
||||
if [ -d "$MYSQLCLUSTER_TOP" ]; then :; else
|
||||
echo "$MYSQLCLUSTER_TOP directory does not exist"
|
||||
exit 1
|
||||
fi
|
||||
if [ -d "$MYSQLCLUSTER_TOP/ndb" ]; then :; else
|
||||
echo "$MYSQLCLUSTER_TOP/ndb directory does not exist"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
start_default_ndbcluster() {
|
||||
|
||||
# configurable parameters, make sure to change in mysqlcluterd as well
|
||||
MYSQLCLUSTER_FILESYSTEM=$MYSQLCLUSTER_TOP/data/mysqlclusterfs
|
||||
MYSQLCLUSTER_PORT_BASE="22" # using ports MYSQLCLUSTER_PORT_BASE{"00","01", etc}
|
||||
# end configurable parameters
|
||||
|
||||
# do some checks
|
||||
|
||||
NDB_CONNECTSTRING=
|
||||
|
||||
[ -d "$MYSQLCLUSTER_FILESYSTEM" ] || mkdir "$MYSQLCLUSTER_FILESYSTEM"
|
||||
if [ -d "$MYSQLCLUSTER_FILESYSTEM" ]; then :; else
|
||||
echo "$MYSQLCLUSTER_FILESYSTEM filesystem directory does not exist"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
# set som help variables
|
||||
|
||||
NDB_HOST="localhost"
|
||||
NDB_PORT=$MYSQLCLUSTER_PORT_BASE"00"
|
||||
NDB_CONNECTSTRING_BASE="host=$NDB_HOST:$NDB_PORT;nodeid="
|
||||
|
||||
|
||||
# Edit file system path and ports in config file
|
||||
|
||||
cd $MYSQLCLUSTER_FILESYSTEM
|
||||
sed \
|
||||
-e s,"WRITE_PATH_TO_FILESYSTEM_2_HERE",$MYSQLCLUSTER_FILESYSTEM,g \
|
||||
-e s,"CHOOSE_PORT_BASE",$MYSQLCLUSTER_PORT_BASE,g \
|
||||
< $MYSQLCLUSTER_TOP/ndb/demos/config-templates/config_template-install.ini \
|
||||
> config.ini
|
||||
|
||||
|
||||
# Start management server as deamon
|
||||
|
||||
NDB_ID="1"
|
||||
NDB_CONNECTSTRING=$NDB_CONNECTSTRING_BASE$NDB_ID
|
||||
#xterm -e mgmtsrvr -c $MYSQLCLUSTER_FILESYSTEM/config.ini &
|
||||
if mgmtsrvr -d -c $MYSQLCLUSTER_FILESYSTEM/config.ini ; then :; else
|
||||
echo "Unable to start mgmtsrvr"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
# Start database node
|
||||
|
||||
cd $MYSQLCLUSTER_FILESYSTEM # the output from the database node gets where it starts
|
||||
NDB_ID="2"
|
||||
NDB_CONNECTSTRING=$NDB_CONNECTSTRING_BASE$NDB_ID
|
||||
#xterm -T "NDB Cluster DB Node" -geometry 80x10 -xrm *.hold:true -e ndb -i &
|
||||
ndb -d -i &
|
||||
|
||||
# Start xterm for application programs
|
||||
|
||||
NDB_ID="3"
|
||||
NDB_CONNECTSTRING=$NDB_CONNECTSTRING_BASE$NDB_ID
|
||||
#xterm -T "NDB Cluster API Node" -geometry 80x10 &
|
||||
echo set before running ndbApi programs > export NDB_CONNECTSTRING=$NDB_CONNECTSTRING
|
||||
|
||||
# Start management client
|
||||
|
||||
#xterm -T "NDB Management Client" -geometry 80x10 -xrm *.hold:true -e mgmtclient $NDB_HOST $NDB_PORT &
|
||||
echo "NDB Management Client starts with: mgmtclient $NDB_HOST $NDB_PORT"
|
||||
|
||||
# test if Ndb Cluster starts properly
|
||||
|
||||
NDB_ID="11"
|
||||
NDB_CONNECTSTRING=$NDB_CONNECTSTRING_BASE$NDB_ID
|
||||
if list_tables | grep "NDBT_ProgramExit: 0 - OK"; then :; else
|
||||
echo "Ndbcluster startup failed"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
start_mysql_install_db() {
|
||||
# run install of regular MySQL Server
|
||||
|
||||
cd $MYSQLCLUSTER_TOP
|
||||
scripts/mysql_install_db --basedir=$MYSQLCLUSTER_TOP --datadir=$MYSQLCLUSTER_TOP/data --socket=$MYSQLCLUSTER_TOP/data/mysqlcluster.sock $*
|
||||
}
|
||||
|
||||
if test "$1" = "ndb_started"
|
||||
then
|
||||
shift
|
||||
mgmt_host=$1
|
||||
shift
|
||||
mgmt_port=$1
|
||||
shift
|
||||
if [ -z "$mgmt_host" -o -z "$mgmt_port" ]; then
|
||||
echo "syntax: ndb_started hostname port"
|
||||
exit 1
|
||||
fi
|
||||
NDB_CONNECTSTRING="host=$mgmt_host:$mgmt_port;nodeid=11"
|
||||
echo using NDB_CONNECTSTRING=$NDB_CONNECTSTRING
|
||||
start_mysql_install_db $*
|
||||
else
|
||||
start_default_ndbcluster
|
||||
start_mysql_install_db
|
||||
fi
|
||||
|
34
ndb/bin/mysqlclusterd
Executable file
34
ndb/bin/mysqlclusterd
Executable file
|
@ -0,0 +1,34 @@
|
|||
#!/bin/sh
|
||||
|
||||
# configurable parameters
|
||||
MYSQLCLUSTER_PORT_BASE="22"
|
||||
# end configurable parameters
|
||||
|
||||
if [ -z "$MYSQLCLUSTER_TOP" -o ! -d "$MYSQLCLUSTER_TOP" ]; then
|
||||
echo "MYSQLCLUSTER_TOP not set or directory does not exist"
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "$MYSQLCLUSTER_TOP" -o ! -d "$MYSQLCLUSTER_TOP/ndb" ]; then
|
||||
echo "$MYSQLCLUSTER_TOP/ndb directory does not exist"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if test "$1" = "ndb_started"
|
||||
then
|
||||
shift
|
||||
mgmt_host=$1
|
||||
shift
|
||||
mgmt_port=$1
|
||||
shift
|
||||
if [ -z "$mgmt_host" -o -z "$mgmt_port" ]; then
|
||||
echo "syntax: ndb_started hostname port"
|
||||
exit 1
|
||||
fi
|
||||
NDB_CONNECTSTRING="host=$mgmt_host:$mgmt_port;nodeid=11"
|
||||
echo using NDB_CONNECTSTRING=$NDB_CONNECTSTRING
|
||||
else
|
||||
NDB_CONNECTSTRING="host=localhost:"$MYSQLCLUSTER_PORT_BASE"00;nodeid=11"
|
||||
fi
|
||||
export NDB_CONNECTSTRING
|
||||
|
||||
mysqld --default-table-type=ndbcluster --basedir=$MYSQLCLUSTER_TOP --datadir=$MYSQLCLUSTER_TOP/data --socket=$MYSQLCLUSTER_TOP/data/mysqlcluster.sock $*
|
644
ndb/bin/regression.sh
Normal file
644
ndb/bin/regression.sh
Normal file
|
@ -0,0 +1,644 @@
|
|||
#!/bin/sh
|
||||
# NAME
|
||||
# regression.sh
|
||||
#
|
||||
# SYNOPSIS
|
||||
# regression.sh
|
||||
#
|
||||
# DESCRIPTION
|
||||
#
|
||||
# This script runs a number of regression tests to verify that nothing
|
||||
# is broken. Currently it executes the same tests as in the autotest
|
||||
# regression suite.
|
||||
#
|
||||
# OPTIONS
|
||||
#
|
||||
# EXAMPLES
|
||||
#
|
||||
#
|
||||
# ENVIRONMENT
|
||||
# verbose verbose printouts
|
||||
#
|
||||
# FILES
|
||||
#
|
||||
#
|
||||
# SEE ALSO
|
||||
#
|
||||
# DIAGNOSTICTS
|
||||
#
|
||||
#
|
||||
# VERSION
|
||||
# 1.0
|
||||
#
|
||||
# AUTHOR
|
||||
#
|
||||
#
|
||||
|
||||
|
||||
# die prints the supplied message to stderr,
|
||||
# prefixed with the program name, and exits
|
||||
# with the exit code given by "-e num" or
|
||||
# 1, if no -e option is present.
|
||||
#
|
||||
die ()
|
||||
{
|
||||
die_code__=1
|
||||
[ "X$1" = X-e ] && { die_code__=$2; shift 2; }
|
||||
[ "X$1" = X-- ] && shift
|
||||
errmsg "$@"
|
||||
exit $die_code__
|
||||
}
|
||||
|
||||
|
||||
# msg prints the supplied message to stderr,
|
||||
# prefixed with the program name.
|
||||
#
|
||||
errmsg ()
|
||||
{
|
||||
echo "${progname:-<no program name set>}:" "$@" >&2
|
||||
}
|
||||
|
||||
# rawdie prints the supplied message to stderr.
|
||||
# It then exits with the exit code given with "-e num"
|
||||
# or 1, if no -e option is present.
|
||||
#
|
||||
rawdie ()
|
||||
{
|
||||
rawdie_code__=1
|
||||
[ "X$1" = X-e ] && { rawdie_code__=$2; shift 2; }
|
||||
[ "X$1" = X-- ] && shift
|
||||
rawerrmsg "$@"
|
||||
exit $rawdie_code__
|
||||
}
|
||||
|
||||
# Syndie prints the supplied message (if present) to stderr,
|
||||
# prefixed with the program name, on the first line.
|
||||
# On the second line, it prints $synopsis.
|
||||
# It then exits with the exit code given with "-e num"
|
||||
# or 1, if no -e option is present.
|
||||
#
|
||||
syndie ()
|
||||
{
|
||||
syndie_code__=1
|
||||
[ "X$1" = X-e ] && { syndie_code__=$2; shift 2; }
|
||||
[ "X$1" = X-- ] && shift
|
||||
[ -n "$*" ] && msg "$*"
|
||||
rawdie -e $syndie_code__ "Synopsis: $synopsis"
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
# msg prints the supplied message to stdout,
|
||||
# prefixed with the program name.
|
||||
#
|
||||
msg ()
|
||||
{
|
||||
echo "${progname:-<no program name set>}:" "$@"
|
||||
}
|
||||
|
||||
rawmsg () { echo "$*"; } # print the supplied message to stdout
|
||||
rawerrmsg () { echo "$*" >&2; } # print the supplied message to stderr
|
||||
|
||||
# trace prints the supplied message to stdout if verbose is non-null
|
||||
#
|
||||
trace ()
|
||||
{
|
||||
[ -n "$verbose" ] && msg "$@"
|
||||
}
|
||||
|
||||
|
||||
# errtrace prints the supplied message to stderr if verbose is non-null
|
||||
#
|
||||
errtrace ()
|
||||
{
|
||||
[ -n "$verbose" ] && msg "$@" >&2
|
||||
}
|
||||
|
||||
|
||||
synopsis="regression.sh"
|
||||
progname=`basename $0`
|
||||
|
||||
numOfTestsOK=0
|
||||
numOfTestsFailed=0
|
||||
|
||||
LOG=regression-$1.`date '+%Y-%m-%d'`
|
||||
|
||||
executeTest()
|
||||
{
|
||||
eval "$@" | tee -a $LOG
|
||||
|
||||
if [ $? -eq 0 ]
|
||||
then
|
||||
echo "SUCCESS: $@"
|
||||
numOfTestsOK=`expr $numOfTestsOK + 1`
|
||||
else
|
||||
echo "FAILED: $@"
|
||||
numOfTestsFailed=`expr $numOfTestsFailed + 1`
|
||||
fi
|
||||
}
|
||||
|
||||
#
|
||||
# INFO
|
||||
#
|
||||
trace "Starting: `date`"
|
||||
trace "NDB_TOP = $NDB_TOP"
|
||||
|
||||
#
|
||||
# THE TESTS TO EXECUTE
|
||||
#
|
||||
|
||||
# BASIC FUNCTIONALITY
|
||||
if [ $1 = "basic" ]
|
||||
then
|
||||
executeTest 'testBasic -n PkRead'
|
||||
executeTest 'drop_all_tabs'
|
||||
|
||||
executeTest 'testBasic -n PkUpdate'
|
||||
executeTest 'drop_all_tabs'
|
||||
|
||||
executeTest 'testBasic -n PkDelete'
|
||||
executeTest 'drop_all_tabs'
|
||||
|
||||
executeTest 'testBasic -n PkInsert'
|
||||
executeTest 'drop_all_tabs'
|
||||
|
||||
executeTest 'testBasic -n UpdateAndRead'
|
||||
executeTest 'drop_all_tabs'
|
||||
|
||||
executeTest 'testBasic -n PkReadAndLocker' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testBasic -n PkReadAndLocker2' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testBasic -n PkReadUpdateAndLocker' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testBasic -n ReadWithLocksAndInserts' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testBasic -n PkInsertTwice' T1 T6 T10
|
||||
executeTest 'drop_tab' T1 T6 T10
|
||||
|
||||
executeTest 'testBasic -n PkDirtyRead'
|
||||
executeTest 'drop_all_tabs'
|
||||
|
||||
executeTest 'testBasic -n Fill' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testBasic -n Fill' T1
|
||||
executeTest 'drop_tab' T1
|
||||
|
||||
executeTest 'testBasic -n NoCommitSleep' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testBasic -n NoCommit626' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testBasic -n NoCommitAndClose' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testBasic -n Commit626' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testBasic -n CommitTry626' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testBasic -n CommitAsMuch626' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testBasic -n NoCommit626' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testBasic -n NoCommitRollback626' T1 T6
|
||||
executeTest 'drop_tab' T1 T6
|
||||
|
||||
executeTest 'testBasic -n Commit630' T1 T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testBasic -n CommitTry630' T1 T6
|
||||
executeTest 'drop_tab' T1 T6
|
||||
|
||||
executeTest 'testBasic -n CommitAsMuch630' T1 T6
|
||||
executeTest 'drop_tab' T1 T6
|
||||
|
||||
executeTest 'testBasic -n NoCommit630' T1 T6
|
||||
executeTest 'drop_tab' T1 T6
|
||||
|
||||
executeTest 'testBasic -n NoCommitRollback630' T1 T6
|
||||
executeTest 'drop_tab' T1 T6
|
||||
|
||||
executeTest 'testBasic -n NoCommitAndClose' T1 T6
|
||||
executeTest 'drop_tab' T1 T6
|
||||
|
||||
executeTest 'testBasic -n RollbackUpdate' T1 T6
|
||||
executeTest 'drop_tab' T1 T6
|
||||
|
||||
executeTest 'testBasic -n RollbackDeleteMultiple' T1 T6
|
||||
executeTest 'drop_tab' T1 T6
|
||||
|
||||
executeTest 'testBasic -n ImplicitRollbackDelete' T1 T6
|
||||
executeTest 'drop_tab' T1 T6
|
||||
|
||||
executeTest 'testBasic -n CommitDelete' T1 T6
|
||||
executeTest 'drop_tab' T1 T6
|
||||
|
||||
executeTest 'testBasic -n RollbackNothing' T1 T6
|
||||
executeTest 'drop_tab' T1 T6
|
||||
|
||||
executeTest 'testBasic -n ReadConsistency' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testBasic -n PkRead' TPK_33 TPK_34 TPK_1003 TPK_2003 TPK_4092
|
||||
executeTest 'drop_tab' TPK_33 TPK_34 TPK_1003 TPK_2003 TPK_4092
|
||||
|
||||
executeTest 'testBasic -n PkUpdate' TPK_33 TPK_34 TPK_1003 TPK_2003 TPK_4092
|
||||
executeTest 'drop_tab' TPK_33 TPK_34 TPK_1003 TPK_2003 TPK_4092
|
||||
|
||||
executeTest 'testBasic -n PkDelete' TPK_33 TPK_34 TPK_1003 TPK_2003 TPK_4092
|
||||
executeTest 'drop_tab' TPK_33 TPK_34 TPK_1003 TPK_2003 TPK_4092
|
||||
|
||||
executeTest 'testBasic -n PkInsert' TPK_33 TPK_34 TPK_1003 TPK_2003 TPK_409
|
||||
executeTest 'drop_tab' TPK_33 TPK_34 TPK_1003 TPK_2003 TPK_4092
|
||||
|
||||
executeTest 'testBasic -n UpdateAndRead' TPK_33 TPK_34 TPK_1003 TPK_2003 TPK_4092
|
||||
#executeTest 'drop_tab' TPK_33 TPK_34 TPK_1003 TPK_2003 TPK_4092
|
||||
|
||||
executeTest 'testBasicAsynch -n PkInsertAsynch'
|
||||
executeTest 'drop_all_tabs'
|
||||
|
||||
executeTest 'testBasicAsynch -n PkReadAsynch'
|
||||
executeTest 'drop_all_tabs'
|
||||
|
||||
executeTest 'testBasicAsynch -n PkUpdateAsynch'
|
||||
executeTest 'drop_all_tabs'
|
||||
|
||||
executeTest 'testBasicAsynch -n PkDeleteAsynch'
|
||||
executeTest 'drop_all_tabs'
|
||||
fi
|
||||
|
||||
# SCAN TESTS
|
||||
if [ $1 = "scan" ]
|
||||
then
|
||||
executeTest 'testScan -n ScanRead16'
|
||||
executeTest 'drop_all_tabs'
|
||||
|
||||
executeTest 'testScan -n ScanRead240'
|
||||
executeTest 'drop_all_tabs'
|
||||
|
||||
executeTest 'testScan -n ScanUpdate'
|
||||
executeTest 'drop_all_tabs'
|
||||
|
||||
executeTest 'testScan -n ScanUpdate2' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testScan -n ScanDelete'
|
||||
executeTest 'drop_all_tab'
|
||||
|
||||
executeTest 'testScan -n ScanDelete2' T10
|
||||
executeTest 'drop_tab' T10
|
||||
|
||||
executeTest 'testScan -n ScanUpdateAndScanRead' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testScan -n ScanReadAndLocker' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testScan -n ScanReadAndPkRead' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testScan -n ScanRead488' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testScan -n ScanWithLocksAndInserts' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testScan -n ScanReadAbort' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testScan -n ScanReadAbort15' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testScan -n ScanReadAbort240' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testScan -n ScanUpdateAbort16' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testScan -n ScanReadRestart' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testScan -n ScanUpdateRestart' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testScan -n CheckGetValue' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testScan -n CloseWithoutStop' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testScan -n NextScanWhenNoMore' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testScan -n ExecuteScanWithoutOpenScan' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testScan -n OnlyOpenScanOnce' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testScan -n OnlyOneOpInScanTrans' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testScan -n OnlyOneOpBeforeOpenScan' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testScan -n OnlyOneScanPerTrans' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testScan -n NoCloseTransaction' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testScan -n CheckInactivityTimeOut' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testScan -n CheckInactivityBeforeClose' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testScan -n CheckAfterTerror' T6
|
||||
executeTest 'drop_tab' T6
|
||||
fi
|
||||
|
||||
|
||||
# DICT TESTS
|
||||
if [ $1 = "dict" ]
|
||||
then
|
||||
executeTest 'testDict -n CreateAndDrop'
|
||||
executeTest 'drop_all_tabs'
|
||||
|
||||
executeTest 'testDict -n CreateAndDropWithData'
|
||||
executeTest 'drop_all_tabs'
|
||||
|
||||
executeTest 'testDict -n CreateAndDropDuring' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testDict -n CreateInvalidTables'
|
||||
executeTest 'drop_all_tabs'
|
||||
|
||||
executeTest 'testDict -n CreateTableWhenDbIsFull' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testDict -n CreateMaxTables' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testDict -n FragmentTypeAll' T1 T6 T7 T8
|
||||
executeTest 'drop_tab' T1 T6 T7 T8
|
||||
|
||||
executeTest 'testDict -n FragmentTypeAllLarge' T1 T6 T7 T8
|
||||
executeTest 'drop_tab' T1 T6 T7 T8
|
||||
|
||||
executeTest 'testDict -n TemporaryTables' T1 T6 T7 T8
|
||||
executeTest 'drop_tab' T1 T6 T7 T8
|
||||
fi
|
||||
|
||||
# TEST NDBAPI
|
||||
if [ $1 = "api" ]
|
||||
then
|
||||
executeTest 'testNdbApi -n MaxNdb' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testNdbApi -n MaxTransactions' T1 T6 T7 T8 T13
|
||||
executeTest 'drop_tab' T1 T6 T7 T8 T13
|
||||
|
||||
executeTest 'testNdbApi -n MaxOperations' T1 T6 T7 T8 T1
|
||||
executeTest 'drop_tab' T1 T6 T7 T8 T13
|
||||
|
||||
executeTest 'testNdbApi -n MaxGetValue' T1 T6 T7 T8 T13
|
||||
executeTest 'drop_tab' T1 T6 T7 T8 T13
|
||||
|
||||
executeTest 'testNdbApi -n MaxEqual'
|
||||
executeTest 'drop_all_tabs'
|
||||
|
||||
executeTest 'testNdbApi -n DeleteNdb' T1 T6
|
||||
executeTest 'drop_tab' T1 T6
|
||||
|
||||
executeTest 'testNdbApi -n WaitUntilReady' T1 T6 T7 T8 T13
|
||||
executeTest 'drop_tab' T1 T6 T7 T8 T13
|
||||
|
||||
executeTest 'testNdbApi -n GetOperationNoTab' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testNdbApi -n NdbErrorOperation' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testNdbApi -n MissingOperation' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testNdbApi -n GetValueInUpdate' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testNdbApi -n UpdateWithoutKeys' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testNdbApi -n UpdateWithoutValues' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testOperations -n ReadRead' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testOperations -n ReadReadEx' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testOperations -n ReadInsert' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testOperations -n ReadUpdate' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testOperations -n ReadDelete' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testOperations -n ReadExRead' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testOperations -n ReadExReadEx' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testOperations -n ReadExInsert' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testOperations -n ReadExUpdate' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testOperations -n ReadExDelete' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testOperations -n InsertRead' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testOperations -n InsertReadEx' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testOperations -n InsertInsert' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testOperations -n InsertUpdate' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testOperations -n InsertDelete' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testOperations -n UpdateRead' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testOperations -n UpdateReadEx' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testOperations -n UpdateInsert' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testOperations -n UpdateUpdate' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testOperations -n UpdateDelete' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testOperations -n DeleteRead' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testOperations -n DeleteReadEx' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testOperations -n DeleteInsert' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testOperations -n DeleteUpdate' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testOperations -n DeleteDelete' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testRestartGci' T6
|
||||
executeTest 'drop_tab' T6
|
||||
|
||||
executeTest 'testIndex -n CreateAll'
|
||||
executeTest 'drop_all_tabs'
|
||||
|
||||
executeTest 'testIndex -n InsertDeleteGentle' T1 T6 T8 T10
|
||||
executeTest 'drop_tab' T1 T6 T8 T10
|
||||
|
||||
executeTest 'testIndex -n InsertDelete' T1 T6 T8 T10
|
||||
executeTest 'drop_tab' T1 T6 T8 T10
|
||||
|
||||
executeTest 'testIndex -n CreateLoadDropGentle' T1 T6 T8 T10
|
||||
executeTest 'drop_tab' T1 T6 T8 T10
|
||||
|
||||
executeTest 'testIndex -n CreateLoadDrop' T1 T6 T8 T10
|
||||
executeTest 'drop_tab' T1 T6 T8 T10
|
||||
|
||||
executeTest 'testBackup' -n BackupOne
|
||||
|
||||
executeTest 'testBackup' -n BackupBank T6
|
||||
executeTest 'drop_tab' T6
|
||||
fi
|
||||
|
||||
# TEST SYSTEM RESTARTS
|
||||
if [ $1 = "sr" ]
|
||||
then
|
||||
executeTest 'testSystemRestart -n SR1' T1
|
||||
executeTest 'testSystemRestart -n SR1' T6
|
||||
executeTest 'testSystemRestart -n SR1' T7
|
||||
executeTest 'testSystemRestart -n SR1' T8
|
||||
executeTest 'testSystemRestart -n SR1' T10
|
||||
executeTest 'testSystemRestart -n SR2' T1
|
||||
executeTest 'testSystemRestart -n SR2' T6
|
||||
executeTest 'testSystemRestart -n SR2' T7
|
||||
executeTest 'testSystemRestart -n SR2' T10
|
||||
executeTest 'testSystemRestart -n SR2' T13
|
||||
executeTest 'testSystemRestart -n SR3' T6
|
||||
executeTest 'testSystemRestart -n SR3' T10
|
||||
executeTest 'testSystemRestart -n SR4' T6
|
||||
executeTest 'testSystemRestart -n SR_UNDO' T1
|
||||
executeTest 'testSystemRestart -n SR_UNDO' T6
|
||||
executeTest 'testSystemRestart -n SR_UNDO' T7
|
||||
executeTest 'testSystemRestart -n SR_UNDO' T8
|
||||
executeTest 'testSystemRestart -n SR_UNDO' T10
|
||||
executeTest 'drop_tab' T1 T6 T7 T8 T10
|
||||
fi
|
||||
|
||||
# TEST NODE RESTARTS
|
||||
if [ $1 = "nr" ]
|
||||
then
|
||||
executeTest 'testNodeRestart -n NoLoad' T6 T8 T13
|
||||
executeTest 'drop_tab' T6 T8 T13
|
||||
|
||||
executeTest 'testNodeRestart -n PkRead' T6 T8 T13
|
||||
executeTest 'drop_tab' T6 T8 T13
|
||||
|
||||
executeTest 'testNodeRestart -n PkReadPkUpdate' T6 T8 T13
|
||||
executeTest 'drop_tab' T6 T8 T13
|
||||
|
||||
executeTest 'testNodeRestart -n ReadUpdateScan' T6 T8 T13
|
||||
executeTest 'drop_tab' T6 T8 T13
|
||||
|
||||
executeTest 'testNodeRestart -n Terror' T6 T13
|
||||
executeTest 'drop_tab' T6 T8 T13
|
||||
|
||||
executeTest 'testNodeRestart -n FullDb' T6 T13
|
||||
executeTest 'drop_tab' T6 T8 T13
|
||||
|
||||
executeTest 'testNodeRestart -n RestartRandomNode' T6 T13
|
||||
executeTest 'drop_tab' T6 T8 T13
|
||||
|
||||
executeTest 'testNodeRestart -n RestartRandomNodeError' T6 T13
|
||||
executeTest 'drop_tab' T6 T8 T13
|
||||
|
||||
executeTest 'testNodeRestart -n RestartRandomNodeInitial' T6 T13
|
||||
executeTest 'drop_tab' T6 T8 T13
|
||||
|
||||
executeTest 'testNodeRestart -n RestartNFDuringNR' T6 T13
|
||||
executeTest 'drop_tab' T6 T8 T13
|
||||
|
||||
executeTest 'testNodeRestart -n RestartNodeDuringLCP' T6 T13
|
||||
executeTest 'drop_tab' T6 T8 T13
|
||||
|
||||
executeTest 'testNodeRestart -n RestartMasterNodeError' T6 T8 T13
|
||||
executeTest 'drop_tab' T6 T8 T13
|
||||
|
||||
executeTest 'testNodeRestart -n TwoNodeFailure' T6 T8 T13
|
||||
executeTest 'drop_tab' T6 T8 T13
|
||||
|
||||
executeTest 'testNodeRestart -n TwoMasterNodeFailure' T6 T8 T13
|
||||
executeTest 'drop_tab' T6 T8 T13
|
||||
|
||||
executeTest 'testNodeRestart -n FiftyPercentFail' T6 T8 T13
|
||||
executeTest 'drop_tab' T6 T8 T13
|
||||
|
||||
executeTest 'testNodeRestart -n RestartAllNodes' T6 T8 T13
|
||||
executeTest 'drop_tab' T6 T8 T13
|
||||
|
||||
executeTest 'testNodeRestart -n RestartAllNodesAbort' T6 T8 T13
|
||||
executeTest 'drop_tab' T6 T8 T13
|
||||
|
||||
executeTest 'testNodeRestart -n RestartAllNodesError9999' T6 T8 T13
|
||||
executeTest 'drop_tab' T6 T8 T13
|
||||
|
||||
executeTest 'testNodeRestart -n FiftyPercentStopAndWait' T6 T8 T13
|
||||
executeTest 'drop_tab' T6 T8 T13
|
||||
|
||||
fi
|
||||
|
||||
# TESTS FINISHED
|
||||
trace "Finished: `date`"
|
||||
|
||||
#
|
||||
# TEST SUMMARY
|
||||
#
|
||||
if [ $numOfTestsFailed -eq 0 ]
|
||||
then
|
||||
echo "-- REGRESSION TEST SUCCESSFUL --"
|
||||
else
|
||||
echo "-- REGRESSION TEST FAILED!! --"
|
||||
fi
|
||||
echo "Number of successful tests: $numOfTestsOK"
|
||||
echo "Number of failed tests : $numOfTestsFailed"
|
4
ndb/config/Defs.DEBUG.mk
Normal file
4
ndb/config/Defs.DEBUG.mk
Normal file
|
@ -0,0 +1,4 @@
|
|||
|
||||
VERSION_FLAGS := -DNDB_DEBUG -DUSE_EMULATED_JAM -DVM_TRACE -DERROR_INSERT -DARRAY_GUARD
|
||||
#-DDEBUG_TRANSPORTER
|
||||
|
50
ndb/config/Defs.HPUX.HPPA.GCC.mk
Normal file
50
ndb/config/Defs.HPUX.HPPA.GCC.mk
Normal file
|
@ -0,0 +1,50 @@
|
|||
###
|
||||
#
|
||||
# Defines
|
||||
SHELL := /bin/sh
|
||||
|
||||
C++ := g++
|
||||
CC := gcc
|
||||
AR_RCS := ar rcs
|
||||
SO := ld -b -o
|
||||
|
||||
SHLIBEXT := sl
|
||||
|
||||
MAKEDEPEND := g++ -M
|
||||
PIC := -fPIC
|
||||
|
||||
RPCGENFLAGS := -MA -C -N
|
||||
ETAGS := etags
|
||||
CTAGS := ctags
|
||||
|
||||
###
|
||||
#
|
||||
# Flags
|
||||
#
|
||||
CCFLAGS_WARNINGS = -Wno-long-long -W -Wall -pedantic
|
||||
# -Wno-sign-compare Use this flag if you are annoyed with all the warnings
|
||||
CCFLAGS_TOP = -DHPUX -D_REENTRANT -D_POSIX_PTHREAD_SEMANTICS -DNO_COMMAND_HANDLER
|
||||
|
||||
ifeq (RELEASE, $(NDB_VERSION))
|
||||
VERSION_FLAGS += -O3
|
||||
else
|
||||
ifeq (RELEASE_TRACE, $(NDB_VERSION))
|
||||
VERSION_FLAGS += -O3 -g
|
||||
else
|
||||
VERSION_FLAGS += -g
|
||||
endif
|
||||
endif
|
||||
|
||||
CCFLAGS = $(CCFLAGS_LOC) $(CCFLAGS_TOP) $(USER_FLAGS) $(VERSION_FLAGS) $(CCFLAGS_WARNINGS)
|
||||
CFLAGS = $(CCFLAGS_LOC) $(CCFLAGS_TOP) $(USER_FLAGS) $(VERSION_FLAGS) $(CCFLAGS_WARNINGS)
|
||||
|
||||
LDFLAGS_TOP = -lpthread -lnsl -lrt
|
||||
|
||||
LDFLAGS = $(LDFLAGS_LOC) $(LDFLAGS_TOP)
|
||||
|
||||
LDLIBS = $(LDLIBS_LOC) $(LDLIBS_TOP)
|
||||
|
||||
LINK.cc = $(PURE) $(C++) $(CCFLAGS) $(LDFLAGS)
|
||||
|
||||
LINK.c = $(PURE) $(CC) $(CFLAGS) $(LDFLAGS)
|
||||
|
49
ndb/config/Defs.IBMAIX.POWERPC.GCC.mk
Normal file
49
ndb/config/Defs.IBMAIX.POWERPC.GCC.mk
Normal file
|
@ -0,0 +1,49 @@
|
|||
###
|
||||
#
|
||||
# Defines
|
||||
SHELL := /bin/sh
|
||||
|
||||
C++ := g++
|
||||
CC := gcc
|
||||
AR_RCS := $(PURE) ar rcs
|
||||
SO := g++ -shared -o
|
||||
|
||||
MAKEDEPEND := g++ -M
|
||||
PIC := -fPIC
|
||||
|
||||
RPCGENFLAGS := -M -C -N
|
||||
|
||||
ETAGS := etags
|
||||
CTAGS := ctags
|
||||
|
||||
###
|
||||
#
|
||||
# Flags
|
||||
#
|
||||
CCFLAGS_WARNINGS = -Wno-long-long -Wall #-pedantic
|
||||
# Add these for more warnings -Weffc++ -W
|
||||
CCFLAGS_TOP = -D_REENTRANT -D_POSIX_PTHREAD_SEMANTICS
|
||||
CCFLAGS_TOP += -fno-rtti
|
||||
|
||||
ifeq (RELEASE, $(NDB_VERSION))
|
||||
VERSION_FLAGS += -O3
|
||||
else
|
||||
ifeq (RELEASE_TRACE, $(NDB_VERSION))
|
||||
VERSION_FLAGS += -O3 -g
|
||||
else
|
||||
VERSION_FLAGS += -g
|
||||
endif
|
||||
endif
|
||||
|
||||
CCFLAGS = $(CCFLAGS_LOC) $(CCFLAGS_TOP) $(USER_FLAGS) $(VERSION_FLAGS) $(CCFLAGS_WARNINGS)
|
||||
CFLAGS = $(CCFLAGS_LOC) $(CCFLAGS_TOP) $(USER_FLAGS) $(VERSION_FLAGS) $(CCFLAGS_WARNINGS)
|
||||
|
||||
LDFLAGS_TOP = -lpthread -lrt
|
||||
|
||||
LDFLAGS = $(LDFLAGS_LOC) $(LDFLAGS_TOP)
|
||||
|
||||
LDLIBS = $(LDLIBS_LOC) $(LDLIBS_TOP)
|
||||
|
||||
LINK.cc = $(PURE) $(C++) $(CCFLAGS) $(LDFLAGS)
|
||||
|
||||
LINK.c = $(PURE) $(CC) $(CFLAGS) $(LDFLAGS)
|
56
ndb/config/Defs.LINUX.x86.GCC.mk
Normal file
56
ndb/config/Defs.LINUX.x86.GCC.mk
Normal file
|
@ -0,0 +1,56 @@
|
|||
###
|
||||
#
|
||||
# Defines
|
||||
SHELL := /bin/sh
|
||||
|
||||
C++ := g++$(GCC_VERSION)
|
||||
CC := gcc$(GCC_VERSION)
|
||||
AR_RCS := $(PURE) ar rcs
|
||||
SO := gcc$(GCC_VERSION) -shared -lpthread -o
|
||||
|
||||
MAKEDEPEND := g++$(GCC_VERSION) -M
|
||||
PIC := -fPIC
|
||||
|
||||
RPCGENFLAGS := -M -C -N
|
||||
|
||||
ETAGS := etags
|
||||
CTAGS := ctags
|
||||
|
||||
###
|
||||
#
|
||||
# Flags
|
||||
#
|
||||
# gcc3.3 __THROW problem if -pedantic and -O2
|
||||
ifeq ($(NDB_VERSION),DEBUG)
|
||||
CCFLAGS_WARNINGS = -Wno-long-long -Wall -pedantic
|
||||
else
|
||||
CCFLAGS_WARNINGS = -Wno-long-long -Wall
|
||||
endif
|
||||
# Add these for more warnings -Weffc++ -W
|
||||
CCFLAGS_TOP = -D_REENTRANT -D_POSIX_PTHREAD_SEMANTICS
|
||||
CCFLAGS_TOP += -fno-rtti -fno-exceptions
|
||||
|
||||
ifeq (RELEASE, $(NDB_VERSION))
|
||||
VERSION_FLAGS += -O2
|
||||
else
|
||||
ifeq (RELEASE_TRACE, $(NDB_VERSION))
|
||||
VERSION_FLAGS += -O2 -g
|
||||
else
|
||||
VERSION_FLAGS += -g
|
||||
endif
|
||||
endif
|
||||
|
||||
CCFLAGS = $(CCFLAGS_LOC) $(CCFLAGS_TOP) $(USER_FLAGS) $(VERSION_FLAGS) $(CCFLAGS_WARNINGS)
|
||||
CFLAGS = $(CCFLAGS_LOC) $(CCFLAGS_TOP) $(USER_FLAGS) $(VERSION_FLAGS) $(CCFLAGS_WARNINGS)
|
||||
|
||||
LDFLAGS_TOP =
|
||||
|
||||
LDFLAGS = $(LDFLAGS_LOC) $(LDFLAGS_TOP)
|
||||
|
||||
LDLIBS = $(LDLIBS_LOC) $(LDLIBS_TOP)
|
||||
|
||||
LINK.cc = $(PURE) $(CC) $(CCFLAGS) $(LDFLAGS)
|
||||
|
||||
LINK.c = $(PURE) $(CC) $(CFLAGS) $(LDFLAGS)
|
||||
|
||||
LDFLAGS_LAST = -lpthread -lrt -Wl,-Bstatic -lstdc++ -Wl,-Bdynamic
|
54
ndb/config/Defs.LINUX.x86.ICC.mk
Normal file
54
ndb/config/Defs.LINUX.x86.ICC.mk
Normal file
|
@ -0,0 +1,54 @@
|
|||
###
|
||||
#
|
||||
# Defines
|
||||
SHELL := /bin/sh
|
||||
|
||||
C++ := icc
|
||||
CC := icc
|
||||
AR_RCS := $(PURE) ar rcs
|
||||
SO := g++$(GCC_VERSION) -shared -lpthread -o
|
||||
|
||||
MAKEDEPEND := g++$(GCC_VERSION) -M
|
||||
PIC := -fPIC
|
||||
|
||||
RPCGENFLAGS := -M -C -N
|
||||
|
||||
ETAGS := etags
|
||||
CTAGS := ctags
|
||||
|
||||
###
|
||||
#
|
||||
# Flags
|
||||
#
|
||||
# gcc3.3 __THROW problem if -pedantic and -O2
|
||||
ifeq ($(NDB_VERSION),DEBUG)
|
||||
CCFLAGS_WARNINGS =
|
||||
else
|
||||
CCFLAGS_WARNINGS =
|
||||
endif
|
||||
# Add these for more warnings -Weffc++ -W
|
||||
CCFLAGS_TOP = -D_REENTRANT -D_POSIX_PTHREAD_SEMANTICS
|
||||
CCFLAGS_TOP +=
|
||||
|
||||
ifeq (RELEASE, $(NDB_VERSION))
|
||||
VERSION_FLAGS += -O2
|
||||
else
|
||||
ifeq (RELEASE_TRACE, $(NDB_VERSION))
|
||||
VERSION_FLAGS += -O2 -g
|
||||
else
|
||||
VERSION_FLAGS += -g
|
||||
endif
|
||||
endif
|
||||
|
||||
CCFLAGS = $(CCFLAGS_LOC) $(CCFLAGS_TOP) $(USER_FLAGS) $(VERSION_FLAGS) $(CCFLAGS_WARNINGS)
|
||||
CFLAGS = $(CCFLAGS_LOC) $(CCFLAGS_TOP) $(USER_FLAGS) $(VERSION_FLAGS) $(CCFLAGS_WARNINGS)
|
||||
|
||||
LDFLAGS_TOP = -lpthread -lrt
|
||||
|
||||
LDFLAGS = $(LDFLAGS_LOC) $(LDFLAGS_TOP)
|
||||
|
||||
LDLIBS = $(LDLIBS_LOC) $(LDLIBS_TOP)
|
||||
|
||||
LINK.cc = $(PURE) $(C++) $(CCFLAGS) $(LDFLAGS)
|
||||
|
||||
LINK.c = $(PURE) $(CC) $(CFLAGS) $(LDFLAGS)
|
54
ndb/config/Defs.LINUX.x86_64.GCC.mk
Normal file
54
ndb/config/Defs.LINUX.x86_64.GCC.mk
Normal file
|
@ -0,0 +1,54 @@
|
|||
###
|
||||
#
|
||||
# Defines
|
||||
SHELL := /bin/sh
|
||||
|
||||
C++ := g++
|
||||
CC := gcc
|
||||
AR_RCS := $(PURE) ar rcs
|
||||
SO := g++ -shared -lpthread -o
|
||||
|
||||
MAKEDEPEND := g++ -M
|
||||
PIC := -fPIC
|
||||
|
||||
RPCGENFLAGS := -M -C -N
|
||||
|
||||
ETAGS := etags
|
||||
CTAGS := ctags
|
||||
|
||||
###
|
||||
#
|
||||
# Flags
|
||||
#
|
||||
# gcc3.3 __THROW problem if -pedantic and -O2
|
||||
ifeq ($(NDB_VERSION),DEBUG)
|
||||
CCFLAGS_WARNINGS = -Wno-long-long -Wall -pedantic
|
||||
else
|
||||
CCFLAGS_WARNINGS = -Wno-long-long -Wall
|
||||
endif
|
||||
# Add these for more warnings -Weffc++ -W
|
||||
CCFLAGS_TOP = -D_REENTRANT -D_POSIX_PTHREAD_SEMANTICS
|
||||
CCFLAGS_TOP += -fno-rtti -fno-exceptions -m64
|
||||
|
||||
ifeq (RELEASE, $(NDB_VERSION))
|
||||
VERSION_FLAGS += -O2
|
||||
else
|
||||
ifeq (RELEASE_TRACE, $(NDB_VERSION))
|
||||
VERSION_FLAGS += -O2 -g
|
||||
else
|
||||
VERSION_FLAGS += -g
|
||||
endif
|
||||
endif
|
||||
|
||||
CCFLAGS = $(CCFLAGS_LOC) $(CCFLAGS_TOP) $(USER_FLAGS) $(VERSION_FLAGS) $(CCFLAGS_WARNINGS)
|
||||
CFLAGS = $(CCFLAGS_LOC) $(CCFLAGS_TOP) $(USER_FLAGS) $(VERSION_FLAGS) $(CCFLAGS_WARNINGS)
|
||||
|
||||
LDFLAGS_TOP = -lpthread -lrt
|
||||
|
||||
LDFLAGS = $(LDFLAGS_LOC) $(LDFLAGS_TOP)
|
||||
|
||||
LDLIBS = $(LDLIBS_LOC) $(LDLIBS_TOP)
|
||||
|
||||
LINK.cc = $(PURE) $(C++) $(CCFLAGS) $(LDFLAGS)
|
||||
|
||||
LINK.c = $(PURE) $(CC) $(CFLAGS) $(LDFLAGS)
|
58
ndb/config/Defs.MACOSX.POWERPC.GCC.mk
Normal file
58
ndb/config/Defs.MACOSX.POWERPC.GCC.mk
Normal file
|
@ -0,0 +1,58 @@
|
|||
###
|
||||
#
|
||||
# Defines
|
||||
SHELL := /bin/sh
|
||||
|
||||
C++ := gcc
|
||||
CC := gcc
|
||||
CXX := gcc
|
||||
AR_RCS := $(PURE) ar rcs
|
||||
#SO := g++ -dynamiclib -Wl,-segprot,__TEXT,rwx,rwx -o
|
||||
SO := gcc -dynamiclib -o
|
||||
|
||||
SHLIBEXT := dylib
|
||||
|
||||
MAKEDEPEND := gcc -M
|
||||
PIC := -fPIC
|
||||
|
||||
RPCGENFLAGS := -M -C -N
|
||||
|
||||
ETAGS := etags
|
||||
CTAGS := ctags
|
||||
|
||||
###
|
||||
#
|
||||
# Flags
|
||||
#
|
||||
CCFLAGS_WARNINGS = -Wno-long-long -Wall -Winline #-Werror#-pedantic
|
||||
# Add these for more warnings -Weffc++ -W
|
||||
CCFLAGS_TOP = -D_REENTRANT -D_POSIX_PTHREAD_SEMANTICS -D_BIG_ENDIAN
|
||||
CXX_FLAGS_TOP = -fno-rtti -felide-constructors -fno-exceptions -fno-omit-fram-pointer
|
||||
C_FLAGS_TOP += -fno-omit-frame-pointer
|
||||
|
||||
ifeq (RELEASE, $(NDB_VERSION))
|
||||
VERSION_FLAGS += -O3
|
||||
else
|
||||
ifeq (RELEASE_TRACE, $(NDB_VERSION))
|
||||
VERSION_FLAGS += -O3 -g
|
||||
else
|
||||
VERSION_FLAGS += -g
|
||||
endif
|
||||
endif
|
||||
|
||||
CCFLAGS = $(CCFLAGS_LOC) $(CCFLAGS_TOP) $(CXXFLAGS_TOP) $(USER_FLAGS) $(VERSION_FLAGS) $(CCFLAGS_WARNINGS)
|
||||
CFLAGS = $(CCFLAGS_LOC) $(CCFLAGS_TOP) $(C_FLAGS_TOP) $(USER_FLAGS) $(VERSION_FLAGS) $(CCFLAGS_WARNINGS)
|
||||
|
||||
LDFLAGS_TOP =
|
||||
|
||||
LDFLAGS = $(LDFLAGS_LOC) $(LDFLAGS_TOP)
|
||||
|
||||
LDLIBS = $(LDLIBS_LOC) $(LDLIBS_TOP)
|
||||
|
||||
LINK.cc = $(PURE) $(C++) $(CCFLAGS) $(LDFLAGS)
|
||||
|
||||
LINK.c = $(PURE) $(CC) $(CFLAGS) $(LDFLAGS)
|
||||
|
||||
#LDFLAGS_LAST = -Wl,-Bstatic -lstdc++ -Wl,-Bdynamic
|
||||
LDFLAGS_LAST = -lstdc++
|
||||
|
47
ndb/config/Defs.OSE.PPC750.DIAB.mk
Normal file
47
ndb/config/Defs.OSE.PPC750.DIAB.mk
Normal file
|
@ -0,0 +1,47 @@
|
|||
###
|
||||
#
|
||||
# Defines
|
||||
SHELL := /bin/sh
|
||||
|
||||
C++ := dplus
|
||||
CC := dcc
|
||||
AR_RCS := $(PURE) ar rcs
|
||||
SO := dar -r
|
||||
|
||||
MAKEDEPEND := g++ -M -nostdinc
|
||||
PIC :=
|
||||
|
||||
RPCGENFLAGS := -MA -C -N
|
||||
|
||||
###
|
||||
#
|
||||
# Flags
|
||||
#
|
||||
CCFLAGS_INCLUDE = -I/vobs/cello/cls/rtosi_if/include -I/vobs/cello/cls/rtosi_if/include.mp750 -I/vobs/cello/cls/rtosi_if/include.ppc
|
||||
CCFLAGS_TOP = -tPPC750EH -DBIG_ENDIAN -D_BIG_ENDIAN -DPPC -DPPC750 -DOSE_DELTA -DMP -Xlint -Xforce-prototypes -DINLINE=__inline__ -Xansi -Xsmall-data=0 -Xsmall-const=0 -Xstrings-in-text
|
||||
|
||||
ifeq (RELEASE, $(NDB_VERSION))
|
||||
VERSION_FLAGS += -XO
|
||||
else
|
||||
ifeq (RELEASE_TRACE, $(NDB_VERSION))
|
||||
VERSION_FLAGS += -XO -g
|
||||
else
|
||||
VERSION_FLAGS += -g
|
||||
endif
|
||||
endif
|
||||
|
||||
CCFLAGS = $(CCFLAGS_LOC) $(CCFLAGS_TOP) $(USER_FLAGS) $(VERSION_FLAGS) $(CCFLAGS_INCLUDE)
|
||||
CFLAGS = $(CCFLAGS_LOC) $(CCFLAGS_TOP) $(USER_FLAGS) $(VERSION_FLAGS) $(CCFLAGS_INCLUDE)
|
||||
|
||||
LDFLAGS_TOP =
|
||||
|
||||
LDFLAGS = $(LDFLAGS_LOC) $(LDFLAGS_TOP)
|
||||
|
||||
LDLIBS = $(LDLIBS_LOC) $(LDLIBS_TOP)
|
||||
|
||||
LINK.cc = $(PURE) $(C++) $(CCFLAGS) $(LDFLAGS)
|
||||
|
||||
LINK.c = $(PURE) $(CC) $(CFLAGS) $(LDFLAGS)
|
||||
|
||||
|
||||
|
3
ndb/config/Defs.RELEASE.mk
Normal file
3
ndb/config/Defs.RELEASE.mk
Normal file
|
@ -0,0 +1,3 @@
|
|||
|
||||
VERSION_FLAGS := -DNDB_RELEASE -DUSE_EMULATED_JAM -DNDEBUG
|
||||
|
3
ndb/config/Defs.RELEASE_TRACE.mk
Normal file
3
ndb/config/Defs.RELEASE_TRACE.mk
Normal file
|
@ -0,0 +1,3 @@
|
|||
|
||||
VERSION_FLAGS := -DNDB_RELEASE -DUSE_EMULATED_JAM -DNDEBUG -DVM_TRACE -DERROR_INSERT -DARRAY_GUARD
|
||||
|
53
ndb/config/Defs.SIMCELLO.SOFTOSE.GCC.mk
Normal file
53
ndb/config/Defs.SIMCELLO.SOFTOSE.GCC.mk
Normal file
|
@ -0,0 +1,53 @@
|
|||
###
|
||||
#
|
||||
# Defines
|
||||
SHELL := /bin/sh
|
||||
|
||||
C++ := g++
|
||||
CC := gcc
|
||||
AR_RCS := $(PURE) ar rcs
|
||||
SO := g++ -shared -o
|
||||
|
||||
MAKEDEPEND := g++ -M
|
||||
PIC := -fPIC
|
||||
|
||||
###
|
||||
#
|
||||
# Flags
|
||||
#
|
||||
NDB_STRDUP := Y
|
||||
CCFLAGS_WARNINGS = -Wall -pedantic -Wno-sign-compare
|
||||
CC_FLAGS_OSE = -DSPARC -DSIM -DOSE_DELTA -DMP
|
||||
CCFLAGS_TOP = $(CC_FLAGS_OSE) $(CC_FLAGS_WARNINGS) -DNDB_STRDUP
|
||||
|
||||
ifeq (RELEASE, $(NDB_VERSION))
|
||||
VERSION_FLAGS += -O3
|
||||
else
|
||||
ifeq (RELEASE_TRACE, $(NDB_VERSION))
|
||||
VERSION_FLAGS += -O3 -g
|
||||
else
|
||||
VERSION_FLAGS += -g
|
||||
endif
|
||||
endif
|
||||
|
||||
|
||||
CCFLAGS_LOC_OSE= -I/vobs/cello/cls/rtosi_if/include.sparc
|
||||
|
||||
|
||||
CCFLAGS = $(CCFLAGS_LOC_OSE) $(CCFLAGS_LOC) $(CCFLAGS_TOP) $(USER_FLAGS) $(VERSION_FLAGS) $(CCFLAGS_WARNINGS)
|
||||
CFLAGS = $(CCFLAGS_LOC_OSE) $(CCFLAGS_LOC) $(CCFLAGS_TOP) $(USER_FLAGS) $(VERSION_FLAGS) $(CCFLAGS_WARNINGS)
|
||||
|
||||
LDLIBS_LOC = -L$(NDB_TOP)/lib -L$(OSE_LOC)/sfk-solaris2/lib -L$(OSE_LOC)/sfk-solaris2/krn-solaris2/lib
|
||||
|
||||
LDLIBS_TOP =
|
||||
|
||||
LDFLAGS = $(LDFLAGS_LOC) $(LDFLAGS_TOP)
|
||||
|
||||
LDLIBS = $(LDLIBS_LOC) $(LDLIBS_TOP)
|
||||
|
||||
LINK.cc = $(PURE) $(C++) $(CCFLAGS) $(LDFLAGS)
|
||||
|
||||
LINK.c = $(PURE) $(CC) $(LDFLAGS)
|
||||
|
||||
|
||||
|
57
ndb/config/Defs.SOFTOSE.SPARC.GCC.mk
Normal file
57
ndb/config/Defs.SOFTOSE.SPARC.GCC.mk
Normal file
|
@ -0,0 +1,57 @@
|
|||
###
|
||||
#
|
||||
# Defines
|
||||
SHELL := /bin/sh
|
||||
|
||||
C++ := g++
|
||||
CC := gcc
|
||||
AR_RCS := $(PURE) ar rcs
|
||||
SO := g++ -shared -o
|
||||
|
||||
MAKEDEPEND := g++ -M
|
||||
PIC := -fPIC
|
||||
|
||||
###
|
||||
#
|
||||
# Flags
|
||||
#
|
||||
NDB_STRDUP := Y
|
||||
CCFLAGS_WARNINGS = -Wno-long-long -Wall -pedantic -Wno-sign-compare -ansi
|
||||
CC_FLAGS_OSE = -DUSE_OSEDEF_H -DOSE_DELTA -DOS_DEBUG -DBIG_ENDIAN
|
||||
CCFLAGS_TOP = $(CC_FLAGS_OSE) $(CC_FLAGS_WARNINGS) -DNDB_STRDUP
|
||||
|
||||
ifeq (RELEASE, $(NDB_VERSION))
|
||||
VERSION_FLAGS += -O3
|
||||
else
|
||||
ifeq (RELEASE_TRACE, $(NDB_VERSION))
|
||||
VERSION_FLAGS += -O3 -g
|
||||
else
|
||||
VERSION_FLAGS += -g -DOS_DEBUG
|
||||
endif
|
||||
endif
|
||||
|
||||
OSE_LOC = /opt/as/OSE/OSE4.3.1
|
||||
|
||||
CCFLAGS_LOC_OSESTD = -I$(OSE_LOC)/sfk-solaris2/std-include
|
||||
CCFLAGS_LOC_OSE = -I$(OSE_LOC)/sfk-solaris2/include -I$(OSE_LOC)/sfk-solaris2/krn-solaris2/include -I$(NDB_TOP)/src/env/softose
|
||||
|
||||
|
||||
CCFLAGS = $(CCFLAGS_LOC_OSE) $(CCFLAGS_LOC_OSESTD) $(CCFLAGS_LOC) $(CCFLAGS_TOP) $(USER_FLAGS) $(VERSION_FLAGS) $(CCFLAGS_WARNINGS)
|
||||
CFLAGS = $(CCFLAGS_LOC_OSE) $(CCFLAGS_LOC_OSESTD) $(CCFLAGS_LOC) $(CCFLAGS_TOP) $(USER_FLAGS) $(VERSION_FLAGS) $(CCFLAGS_WARNINGS)
|
||||
|
||||
LDLIBS_LOC = -L$(NDB_TOP)/lib -L$(OSE_LOC)/sfk-solaris2/lib -L$(OSE_LOC)/sfk-solaris2/krn-solaris2/lib
|
||||
|
||||
LDLIBS_TOP =
|
||||
|
||||
LDLIBS_LAST = -lsoftose_env -lsoftose_krn -llnh -lefs -lshell -lfss -ltosv -lrtc -lheap -linetutil -linetapi -lsoftose -lsoftose_env -lsoftose_krn -losepthread -lrtc -lnsl -lsocket -lpthread -lcrt -lm
|
||||
|
||||
LDFLAGS = $(LDFLAGS_LOC) $(LDFLAGS_TOP)
|
||||
|
||||
LDLIBS = $(LDLIBS_LOC) $(LDLIBS_TOP)
|
||||
|
||||
LINK.cc = $(PURE) $(C++) $(CCFLAGS) $(LDFLAGS)
|
||||
|
||||
LINK.c = $(PURE) $(CC) $(LDFLAGS)
|
||||
|
||||
|
||||
|
54
ndb/config/Defs.SOLARIS.SPARC.FORTE6.mk
Normal file
54
ndb/config/Defs.SOLARIS.SPARC.FORTE6.mk
Normal file
|
@ -0,0 +1,54 @@
|
|||
###
|
||||
#
|
||||
# Defines
|
||||
SHELL := /bin/sh
|
||||
|
||||
C++ := CC
|
||||
CC := /opt/as/forte6/SUNWspro/bin/cc
|
||||
AR_RCS := $(PURE) CC -xar -o
|
||||
SO := CC -G -z text -o
|
||||
|
||||
MAKEDEPEND := CC -xM1
|
||||
PIC := -KPIC
|
||||
ETAGS := etags
|
||||
CTAGS := ctags
|
||||
|
||||
RPCGENFLAGS := -MA -C -N
|
||||
|
||||
###
|
||||
#
|
||||
# Flags
|
||||
|
||||
CCFLAGS_TOP = -mt -DSOLARIS -D_REENTRANT -D_POSIX_PTHREAD_SEMANTICS
|
||||
|
||||
ifneq ($(PURE),)
|
||||
CCFLAGS_TOP += -xs
|
||||
CCFLAGS_TOP += -DNDB_PURIFY
|
||||
endif
|
||||
|
||||
ifeq (RELEASE, $(NDB_VERSION))
|
||||
VERSION_FLAGS += -xO3
|
||||
else
|
||||
ifeq (RELEASE_TRACE, $(NDB_VERSION))
|
||||
VERSION_FLAGS += -xO3 -g
|
||||
else
|
||||
VERSION_FLAGS += -g
|
||||
endif
|
||||
endif
|
||||
|
||||
CCFLAGS = $(CCFLAGS_LOC) $(CCFLAGS_TOP) $(USER_FLAGS) $(VERSION_FLAGS)
|
||||
CFLAGS = $(CCFLAGS_LOC) $(CCFLAGS_TOP) $(USER_FLAGS) $(VERSION_FLAGS)
|
||||
|
||||
LDFLAGS_TOP = -L/opt/as/forte6/SUNWspro/WS6/lib -lpthread -lsocket -lnsl -lrt
|
||||
|
||||
LDFLAGS = $(LDFLAGS_LOC) $(LDFLAGS_TOP)
|
||||
|
||||
LDLIBS = $(LDLIBS_LOC) $(LDLIBS_TOP)
|
||||
|
||||
LINK.cc = $(PURE) $(C++) -xildoff $(CCFLAGS) $(LDFLAGS)
|
||||
|
||||
LINK.c = $(PURE) $(CC) $(CFLAGS) $(LDFLAGS)
|
||||
|
||||
|
||||
|
||||
|
54
ndb/config/Defs.SOLARIS.SPARC.GCC.mk
Normal file
54
ndb/config/Defs.SOLARIS.SPARC.GCC.mk
Normal file
|
@ -0,0 +1,54 @@
|
|||
###
|
||||
#
|
||||
# Defines
|
||||
SHELL := /bin/sh
|
||||
|
||||
CXX := gcc
|
||||
C++ := g++
|
||||
CC := gcc
|
||||
AR_RCS := ar rcs
|
||||
SO := gcc -G -o
|
||||
|
||||
#GXX_VERSION := $(shell gcc --version | sed -e 's,.*\([0-9][0-9]*\.[0-9][0-9]*\.[0-9][0-9]*\).*,\1,1' -e q)
|
||||
|
||||
MAKEDEPEND := g++ -M
|
||||
PIC := -fPIC
|
||||
|
||||
RPCGENFLAGS := -MA -C -N
|
||||
ETAGS := etags
|
||||
CTAGS := ctags
|
||||
|
||||
###
|
||||
#
|
||||
# Flags
|
||||
#
|
||||
CCFLAGS_WARNINGS = -Wno-long-long -W -Wall -pedantic
|
||||
# -Wno-sign-compare Use this flag if you are annoyed with all the warnings
|
||||
CCFLAGS_TOP = -DSOLARIS -D_REENTRANT -D_POSIX_PTHREAD_SEMANTICS -DNO_COMMAND_HANDLER
|
||||
CCFLAGS_TOP += -fno-rtti
|
||||
|
||||
ifeq (RELEASE, $(NDB_VERSION))
|
||||
VERSION_FLAGS += -O2
|
||||
else
|
||||
ifeq (RELEASE_TRACE, $(NDB_VERSION))
|
||||
VERSION_FLAGS += -O2 -g
|
||||
else
|
||||
VERSION_FLAGS += -g
|
||||
endif
|
||||
endif
|
||||
|
||||
CCFLAGS = $(CCFLAGS_LOC) $(CCFLAGS_TOP) $(USER_FLAGS) $(VERSION_FLAGS) $(CCFLAGS_WARNINGS)
|
||||
CFLAGS = $(CCFLAGS_LOC) $(CCFLAGS_TOP) $(USER_FLAGS) $(VERSION_FLAGS) $(CCFLAGS_WARNINGS)
|
||||
|
||||
LDFLAGS_TOP =
|
||||
|
||||
LDFLAGS = $(LDFLAGS_LOC) $(LDFLAGS_TOP)
|
||||
|
||||
LDLIBS = $(LDLIBS_LOC) $(LDLIBS_TOP)
|
||||
|
||||
LINK.cc = $(PURE) $(CXX) $(CCFLAGS) $(LDFLAGS)
|
||||
|
||||
LINK.c = $(PURE) $(CC) $(CFLAGS) $(LDFLAGS)
|
||||
|
||||
LDFLAGS_LAST = -lpthread -lsocket -lnsl -lrt -Wl,-Bstatic -lstdc++ -Wl,-Bdynamic
|
||||
|
53
ndb/config/Defs.SOLARIS.SPARC_64.GCC.mk
Normal file
53
ndb/config/Defs.SOLARIS.SPARC_64.GCC.mk
Normal file
|
@ -0,0 +1,53 @@
|
|||
###
|
||||
#
|
||||
# Note: LD_LIBRARY_PATH must be set for /usr/local/lib/sparcv9 to dynamically link
|
||||
# to 64-bit libraries
|
||||
#
|
||||
# Defines
|
||||
SHELL := /bin/sh
|
||||
|
||||
C++ := g++ -m64
|
||||
CC := gcc -m64
|
||||
AR_RCS := ar rcs
|
||||
SO := g++ -m64 -shared -o
|
||||
|
||||
MAKEDEPEND := g++ -M
|
||||
PIC := -fPIC
|
||||
|
||||
RPCGENFLAGS := -MA -C -N
|
||||
ETAGS := etags
|
||||
CTAGS := ctags
|
||||
|
||||
###
|
||||
#
|
||||
# Flags
|
||||
#
|
||||
CCFLAGS_WARNINGS = -Wno-long-long -W -Wall -pedantic
|
||||
# -Wno-sign-compare Use this flag if you are annoyed with all the warnings
|
||||
CCFLAGS_TOP = -DSOLARIS -D_REENTRANT -D_POSIX_PTHREAD_SEMANTICS -DNO_COMMAND_HANDLER
|
||||
CCFLAGS_TOP += -fno-rtti
|
||||
|
||||
ifeq (RELEASE, $(NDB_VERSION))
|
||||
VERSION_FLAGS += -O2
|
||||
else
|
||||
ifeq (RELEASE_TRACE, $(NDB_VERSION))
|
||||
VERSION_FLAGS += -O2 -g
|
||||
else
|
||||
VERSION_FLAGS += -g
|
||||
endif
|
||||
endif
|
||||
|
||||
CCFLAGS = $(CCFLAGS_LOC) $(CCFLAGS_TOP) $(USER_FLAGS) $(VERSION_FLAGS) $(CCFLAGS_WARNINGS)
|
||||
CFLAGS = $(CCFLAGS_LOC) $(CCFLAGS_TOP) $(USER_FLAGS) $(VERSION_FLAGS) $(CCFLAGS_WARNINGS)
|
||||
|
||||
LDFLAGS_TOP = -lpthread -lsocket -lnsl -lrt
|
||||
|
||||
LDFLAGS = $(LDFLAGS_LOC) $(LDFLAGS_TOP)
|
||||
|
||||
LDLIBS = $(LDLIBS_LOC) $(LDLIBS_TOP)
|
||||
|
||||
LINK.cc = $(PURE) $(C++) $(CCFLAGS) $(LDFLAGS)
|
||||
|
||||
LINK.c = $(PURE) $(CC) $(CFLAGS) $(LDFLAGS)
|
||||
|
||||
|
53
ndb/config/Defs.SOLARIS6.SPARC.GCC.mk
Normal file
53
ndb/config/Defs.SOLARIS6.SPARC.GCC.mk
Normal file
|
@ -0,0 +1,53 @@
|
|||
###
|
||||
#
|
||||
# Defines
|
||||
SHELL := /bin/sh
|
||||
|
||||
C++ := g++
|
||||
CC := gcc
|
||||
AR_RCS := $(PURE) ar rcs
|
||||
SO := g++ -shared -o
|
||||
|
||||
MAKEDEPEND := g++ -M
|
||||
PIC := -fPIC
|
||||
|
||||
RPCGENFLAGS := -MA -C -N
|
||||
|
||||
###
|
||||
#
|
||||
# Flags
|
||||
#
|
||||
CCFLAGS_WARNINGS = -Wno-long-long -Wall -pedantic
|
||||
# -Wno-sign-compare Use this flag if you are annoyed with all the warnings
|
||||
CCFLAGS_TOP = -DSOLARIS -D_REENTRANT -D_POSIX_PTHREAD_SEMANTICS -DNO_COMMAND_HANDLER
|
||||
|
||||
# SOLARIS 6 should use the same settings as SOLARIS7
|
||||
# if something in the SOLARIS 7 port does not work for SOLARIS 6
|
||||
# it can be ifdefed using
|
||||
# if ! defined NDB_SOLRIS6
|
||||
CCFLAGS_TOP = -DNDB_SOLARIS
|
||||
|
||||
ifeq (RELEASE, $(NDB_VERSION))
|
||||
VERSION_FLAGS += -O3
|
||||
else
|
||||
ifeq (RELEASE_TRACE, $(NDB_VERSION))
|
||||
VERSION_FLAGS += -O3
|
||||
else
|
||||
VERSION_FLAGS += -g
|
||||
endif
|
||||
endif
|
||||
|
||||
CCFLAGS = $(CCFLAGS_LOC) $(CCFLAGS_TOP) $(USER_FLAGS) $(VERSION_FLAGS) $(CCFLAGS_WARNINGS)
|
||||
CFLAGS = $(CCFLAGS_LOC) $(CCFLAGS_TOP) $(USER_FLAGS) $(VERSION_FLAGS) $(CCFLAGS_WARNINGS)
|
||||
|
||||
LDFLAGS_TOP = -lpthread -lsocket -lnsl -lposix4
|
||||
|
||||
LDFLAGS = $(LDFLAGS_LOC) $(LDFLAGS_TOP)
|
||||
|
||||
LDLIBS = $(LDLIBS_LOC) $(LDLIBS_TOP)
|
||||
|
||||
LINK.cc = $(PURE) $(C++) $(CCFLAGS) $(LDFLAGS)
|
||||
|
||||
LINK.c = $(PURE) $(CC) $(CFLAGS) $(LDFLAGS)
|
||||
|
||||
|
49
ndb/config/Defs.TRU64X.ALPHA.GCC.mk
Normal file
49
ndb/config/Defs.TRU64X.ALPHA.GCC.mk
Normal file
|
@ -0,0 +1,49 @@
|
|||
###
|
||||
#
|
||||
# Defines
|
||||
SHELL := /bin/sh
|
||||
|
||||
C++ := g++
|
||||
CC := gcc
|
||||
AR_RCS := $(PURE) ar rcs
|
||||
SO := g++ -shared -o
|
||||
|
||||
MAKEDEPEND := g++ -M
|
||||
PIC := -fPIC
|
||||
|
||||
RPCGENFLAGS := -M -C -N
|
||||
|
||||
ETAGS := etags
|
||||
CTAGS := ctags
|
||||
|
||||
###
|
||||
#
|
||||
# Flags
|
||||
#
|
||||
CCFLAGS_WARNINGS = -Wno-long-long -Wall #-pedantic
|
||||
# Add these for more warnings -Weffc++ -W
|
||||
CCFLAGS_TOP = -D_REENTRANT -D_POSIX_PTHREAD_SEMANTICS
|
||||
CCFLAGS_TOP += -fno-rtti
|
||||
|
||||
ifeq (RELEASE, $(NDB_VERSION))
|
||||
VERSION_FLAGS += -O3
|
||||
else
|
||||
ifeq (RELEASE_TRACE, $(NDB_VERSION))
|
||||
VERSION_FLAGS += -O3 -g
|
||||
else
|
||||
VERSION_FLAGS += -g
|
||||
endif
|
||||
endif
|
||||
|
||||
CCFLAGS = $(CCFLAGS_LOC) $(CCFLAGS_TOP) $(USER_FLAGS) $(VERSION_FLAGS) $(CCFLAGS_WARNINGS)
|
||||
CFLAGS = $(CCFLAGS_LOC) $(CCFLAGS_TOP) $(USER_FLAGS) $(VERSION_FLAGS) $(CCFLAGS_WARNINGS)
|
||||
|
||||
LDFLAGS_TOP = -lpthread -lrt
|
||||
|
||||
LDFLAGS = $(LDFLAGS_LOC) $(LDFLAGS_TOP)
|
||||
|
||||
LDLIBS = $(LDLIBS_LOC) $(LDLIBS_TOP)
|
||||
|
||||
LINK.cc = $(PURE) $(C++) $(CCFLAGS) $(LDFLAGS)
|
||||
|
||||
LINK.c = $(PURE) $(CC) $(CFLAGS) $(LDFLAGS)
|
61
ndb/config/Defs.WIN32.x86.VC7.mk
Normal file
61
ndb/config/Defs.WIN32.x86.VC7.mk
Normal file
|
@ -0,0 +1,61 @@
|
|||
###
|
||||
#
|
||||
# Defines
|
||||
SHELL := /bin/sh
|
||||
|
||||
|
||||
DEFINES = -D_WIN32 -D_M_IX86=600 -D_MSC_EXTENSIONS=0 -U_cdecl -D_MT
|
||||
#
|
||||
MAKEDEPEND = g++ -M --nostdinc --nostdinc++ -I"`cygpath -u "$(MSVCDIR)\include"`" -I"`cygpath -u "$(MSVCDIR)\PlatformSDK\include"`" $(DEFINES)
|
||||
PIC = -D_LIB
|
||||
NON_PIC = -D_LIB
|
||||
|
||||
RPCGENFLAGS := -M -C -N
|
||||
|
||||
ETAGS := etags
|
||||
CTAGS := ctags
|
||||
|
||||
###
|
||||
#
|
||||
# Flags
|
||||
#
|
||||
CCFLAGS_WARNINGS =
|
||||
CCFLAGS_TOP =
|
||||
CCFLAGS_LOC =
|
||||
CCFLAGS_WIN = -DWIN32 -D_WIN32_WINNT=0x0500 -DWINVER=0x0500 -D_MBCS -DNO_COMMAND_HANDLER
|
||||
CCFLAGS_WIN += -W3 -EHsc
|
||||
#CCFLAGS_WIN += -clr
|
||||
|
||||
ifeq (RELEASE, $(NDB_VERSION))
|
||||
CCFLAGS_WIN += -MT -O2 -Ob1 -DNO_DEBUG_MESSAGES
|
||||
else
|
||||
ifeq (RELEASE_TRACE, $(NDB_VERSION))
|
||||
CCFLAGS_WIN += -MT -O2 -Ob1 -DNO_DEBUG_MESSAGES
|
||||
else
|
||||
CCFLAGS_WIN += -MTd -Zi -Od -GS -D_DEBUG
|
||||
endif
|
||||
endif
|
||||
|
||||
C++ = cl -nologo $(CCFLAGS_WIN)
|
||||
CC = cl -nologo $(CCFLAGS_WIN)
|
||||
|
||||
CCFLAGS = $(CCFLAGS_LOC) $(CCFLAGS_TOP) $(USER_FLAGS) $(VERSION_FLAGS) $(CCFLAGS_WARNINGS)
|
||||
CFLAGS = $(CCFLAGS_LOC) $(CCFLAGS_TOP) $(USER_FLAGS) $(VERSION_FLAGS) $(CCFLAGS_WARNINGS)
|
||||
|
||||
LDFLAGS_TOP =
|
||||
|
||||
LDFLAGS = $(LDFLAGS_LOC) $(LDFLAGS_TOP)
|
||||
|
||||
LDLIBS = $(LDLIBS_LOC) $(LDLIBS_TOP)
|
||||
|
||||
WIN_LIBS := Ws2_32.lib Advapi32.lib
|
||||
|
||||
ifeq (RELEASE, $(NDB_VERSION))
|
||||
LINK.cc = link -INCREMENTAL:NO -NOLOGO -LARGEADDRESSAWARE $(WIN_LIBS)
|
||||
else
|
||||
ifeq (RELEASE_TRACE, $(NDB_VERSION))
|
||||
LINK.cc = link -INCREMENTAL:NO -NOLOGO -LARGEADDRESSAWARE $(WIN_LIBS)
|
||||
else
|
||||
LINK.cc = link -INCREMENTAL -NOLOGO -DEBUG -LARGEADDRESSAWARE $(WIN_LIBS)
|
||||
endif
|
||||
endif
|
113
ndb/config/GuessConfig.sh
Executable file
113
ndb/config/GuessConfig.sh
Executable file
|
@ -0,0 +1,113 @@
|
|||
#! /bin/sh
|
||||
|
||||
if [ -z "$NDB_TOP" ]
|
||||
then
|
||||
echo "You have not set NDB_TOP. Exiting" 1>&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$NDB_SCI" ]
|
||||
then
|
||||
NDB_SCI=N
|
||||
fi
|
||||
|
||||
os=`uname -s`
|
||||
case $os in
|
||||
Linux)
|
||||
NDB_OS=LINUX
|
||||
NDB_ARCH=x86
|
||||
NDB_COMPILER=GCC
|
||||
;;
|
||||
Darwin)
|
||||
NDB_OS=MACOSX
|
||||
NDB_ARCH=POWERPC
|
||||
NDB_COMPILER=GCC
|
||||
;;
|
||||
HP-UX)
|
||||
NDB_OS=HPUX
|
||||
NDB_ARCH=HPPA
|
||||
NDB_COMPILER=GCC
|
||||
;;
|
||||
CYGWIN_NT-5.0)
|
||||
NDB_OS=WIN32
|
||||
NDB_ARCH=x86
|
||||
NDB_COMPILER=VC7
|
||||
;;
|
||||
*)
|
||||
if [ "$os" = "SunOS" ] && [ `uname -r` = "5.6" ]
|
||||
then
|
||||
NDB_OS=OSE
|
||||
NDB_ARCH=PPC750
|
||||
NDB_COMPILER=DIAB
|
||||
else
|
||||
NDB_OS=SOLARIS
|
||||
NDB_ARCH=SPARC
|
||||
NDB_COMPILER=GCC
|
||||
fi;;
|
||||
esac
|
||||
|
||||
if [ -z "$NDB_ODBC" ]
|
||||
then
|
||||
val=N
|
||||
if [ -f /usr/include/sqlext.h -o -f /usr/local/include/sqlext.h ]
|
||||
then
|
||||
val=Y
|
||||
fi
|
||||
case $NDB_OS in
|
||||
LINUX)
|
||||
NDB_ODBC=$val
|
||||
;;
|
||||
MACOSX)
|
||||
NDB_ODBC=$val
|
||||
;;
|
||||
*)
|
||||
NDB_ODBC=N
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
|
||||
mch=`uname -m`
|
||||
case $mch in
|
||||
x86_64)
|
||||
NDB_ARCH=x86_64
|
||||
;;
|
||||
*)
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ -f $NDB_TOP/config/Makefile ]
|
||||
then
|
||||
TERMCAP_LIB=`grep TERMCAP_LIB $NDB_TOP/config/Makefile | sed -e s,"TERMCAP_LIB.*=.*-l","",g`
|
||||
fi
|
||||
if [ "$TERMCAP_LIB" = "" ]
|
||||
then
|
||||
TERMCAP_LIB=termcap
|
||||
fi
|
||||
|
||||
# Allow for selecting GCC, but must be 2nd parameter
|
||||
if [ $# -gt 1 -a "$2" = "-GCC" ]
|
||||
then
|
||||
NDB_COMPILER=GCC
|
||||
fi
|
||||
|
||||
(
|
||||
echo "# This file was automatically generated `date`"
|
||||
echo "NDB_OS := $NDB_OS"
|
||||
echo "NDB_ARCH := $NDB_ARCH"
|
||||
echo "NDB_COMPILER := $NDB_COMPILER"
|
||||
|
||||
if [ $# -gt 0 -a "$1" = "-R" ]
|
||||
then
|
||||
echo "NDB_VERSION := RELEASE"
|
||||
else
|
||||
echo "NDB_VERSION := DEBUG"
|
||||
fi
|
||||
|
||||
echo "NDB_SCI := $NDB_SCI"
|
||||
echo "NDB_ODBC := $NDB_ODBC"
|
||||
echo "TERMCAP_LIB := $TERMCAP_LIB"
|
||||
) > $NDB_TOP/config/config.mk
|
||||
|
||||
exit 0
|
||||
|
31
ndb/config/Makefile.am
Normal file
31
ndb/config/Makefile.am
Normal file
|
@ -0,0 +1,31 @@
|
|||
# Copyright (C) 2003 MySQL AB
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
|
||||
# Process this file with automake to create Makefile.in
|
||||
|
||||
AUTOMAKE_OPTIONS = foreign
|
||||
|
||||
# These are built from source in the Docs directory
|
||||
EXTRA_DIST =
|
||||
SUBDIRS =
|
||||
|
||||
# Relink after clean
|
||||
linked_sources =
|
||||
|
||||
CLEANFILES = $(linked_sources)
|
||||
|
||||
# This is just so that the linking is done early.
|
||||
config.h:
|
1513
ndb/config/acinclude.m4
Normal file
1513
ndb/config/acinclude.m4
Normal file
File diff suppressed because it is too large
Load diff
2085
ndb/config/configure.in
Normal file
2085
ndb/config/configure.in
Normal file
File diff suppressed because it is too large
Load diff
2
ndb/demos/1-node/1-api-3/Ndb.cfg
Normal file
2
ndb/demos/1-node/1-api-3/Ndb.cfg
Normal file
|
@ -0,0 +1,2 @@
|
|||
OwnProcessId 3
|
||||
127.0.0.1 10000
|
2
ndb/demos/1-node/1-db-2/Ndb.cfg
Normal file
2
ndb/demos/1-node/1-db-2/Ndb.cfg
Normal file
|
@ -0,0 +1,2 @@
|
|||
OwnProcessId 2
|
||||
127.0.0.1 10000
|
2
ndb/demos/1-node/1-mgm-1/Ndb.cfg
Normal file
2
ndb/demos/1-node/1-mgm-1/Ndb.cfg
Normal file
|
@ -0,0 +1,2 @@
|
|||
OwnProcessId 1
|
||||
127.0.0.1 10000
|
70
ndb/demos/1-node/1-mgm-1/template_config.ini
Normal file
70
ndb/demos/1-node/1-mgm-1/template_config.ini
Normal file
|
@ -0,0 +1,70 @@
|
|||
###############################################################################
|
||||
#
|
||||
# Initial system configuration file for MySQL Cluster v3.1.0 (Demo 1)
|
||||
#
|
||||
###############################################################################
|
||||
|
||||
[DB DEFAULT]
|
||||
NoOfReplicas: 1
|
||||
#LockPagesInMainMemory: Y
|
||||
StopOnError: Y
|
||||
#MaxNoOfConcurrentOperations: 1024
|
||||
#MaxNoOfConcurrentTransactions: 1024
|
||||
NoOfIndexPages: 1500
|
||||
NoOfDataPages: 5000
|
||||
#TimeBetweenLocalCheckpoints: 20
|
||||
#TimeBetweenGlobalCheckpoints: 1500
|
||||
#NoOfFragmentLogFiles: 8
|
||||
BackupMemory: 4M
|
||||
BackupDataBufferSize: 2M
|
||||
BackupLogBufferSize: 2M
|
||||
BackupWriteSize: 32k
|
||||
|
||||
[COMPUTER]
|
||||
Id: 1
|
||||
ByteOrder: Little
|
||||
HostName: localhost
|
||||
|
||||
[MGM]
|
||||
Id: 1
|
||||
ExecuteOnComputer: 1
|
||||
PortNumber: 10000
|
||||
PortNumberStats: 10001
|
||||
|
||||
|
||||
[DB]
|
||||
Id: 2
|
||||
ExecuteOnComputer: 1
|
||||
FileSystemPath: WRITE_PATH_TO_FILESYSTEM_2_HERE
|
||||
|
||||
[API]
|
||||
Id: 3
|
||||
ExecuteOnComputer: 1
|
||||
|
||||
# Designated MySQL Server API node id
|
||||
[API]
|
||||
Id: 11
|
||||
ExecuteOnComputer: 1
|
||||
|
||||
[TCP DEFAULT]
|
||||
SendSignalId: N
|
||||
Compression: N
|
||||
Checksum: N
|
||||
SendBufferSize: 2000
|
||||
MaxReceiveSize: 2000
|
||||
|
||||
[TCP]
|
||||
NodeId1: 1
|
||||
NodeId2: 2
|
||||
PortNumber: 10002
|
||||
|
||||
[TCP]
|
||||
NodeId1: 2
|
||||
NodeId2: 3
|
||||
PortNumber: 10003
|
||||
|
||||
# Designated MySQL Server API node connection
|
||||
[TCP]
|
||||
NodeId1: 2
|
||||
NodeId2: 11
|
||||
PortNumber: 10011
|
2
ndb/demos/2-node/2-api-4/Ndb.cfg
Normal file
2
ndb/demos/2-node/2-api-4/Ndb.cfg
Normal file
|
@ -0,0 +1,2 @@
|
|||
OwnProcessId 4
|
||||
127.0.0.1 10000
|
2
ndb/demos/2-node/2-api-5/Ndb.cfg
Normal file
2
ndb/demos/2-node/2-api-5/Ndb.cfg
Normal file
|
@ -0,0 +1,2 @@
|
|||
OwnProcessId 5
|
||||
127.0.0.1 10000
|
2
ndb/demos/2-node/2-api-6/Ndb.cfg
Normal file
2
ndb/demos/2-node/2-api-6/Ndb.cfg
Normal file
|
@ -0,0 +1,2 @@
|
|||
OwnProcessId 6
|
||||
127.0.0.1 10000
|
2
ndb/demos/2-node/2-api-7/Ndb.cfg
Normal file
2
ndb/demos/2-node/2-api-7/Ndb.cfg
Normal file
|
@ -0,0 +1,2 @@
|
|||
OwnProcessId 7
|
||||
127.0.0.1 10000
|
2
ndb/demos/2-node/2-db-2/Ndb.cfg
Normal file
2
ndb/demos/2-node/2-db-2/Ndb.cfg
Normal file
|
@ -0,0 +1,2 @@
|
|||
OwnProcessId 2
|
||||
127.0.0.1 10000
|
2
ndb/demos/2-node/2-db-3/Ndb.cfg
Normal file
2
ndb/demos/2-node/2-db-3/Ndb.cfg
Normal file
|
@ -0,0 +1,2 @@
|
|||
OwnProcessId 3
|
||||
127.0.0.1 10000
|
2
ndb/demos/2-node/2-mgm-1/Ndb.cfg
Normal file
2
ndb/demos/2-node/2-mgm-1/Ndb.cfg
Normal file
|
@ -0,0 +1,2 @@
|
|||
OwnProcessId 1
|
||||
127.0.0.1 10000
|
157
ndb/demos/2-node/2-mgm-1/template_config.ini
Normal file
157
ndb/demos/2-node/2-mgm-1/template_config.ini
Normal file
|
@ -0,0 +1,157 @@
|
|||
###############################################################################
|
||||
#
|
||||
# Initial system configuration file for MySQL Cluster v3.1.0 (Demo 2)
|
||||
#
|
||||
###############################################################################
|
||||
|
||||
[COMPUTER]
|
||||
Id: 1
|
||||
ByteOrder: Little
|
||||
HostName: localhost
|
||||
|
||||
[COMPUTER]
|
||||
Id: 2
|
||||
ByteOrder: Little
|
||||
HostName: localhost
|
||||
|
||||
[MGM]
|
||||
Id: 1
|
||||
ExecuteOnComputer: 1
|
||||
PortNumber: 10000
|
||||
PortNumberStats: 10001
|
||||
ArbitrationRank: 1
|
||||
|
||||
[DB DEFAULT]
|
||||
NoOfReplicas: 2
|
||||
#LockPagesInMainMemory: N
|
||||
StopOnError: N
|
||||
#MaxNoOfConcurrentOperations: 1024
|
||||
#MaxNoOfConcurrentTransactions: 1024
|
||||
NoOfIndexPages: 200
|
||||
NoOfDataPages: 600
|
||||
#TimeBetweenLocalCheckpoints: 20
|
||||
#TimeBetweenGlobalCheckpoints: 1500
|
||||
#NoOfFragmentLogFiles: 8
|
||||
BackupMemory: 4M
|
||||
BackupDataBufferSize: 2M
|
||||
BackupLogBufferSize: 2M
|
||||
BackupWriteSize: 32k
|
||||
|
||||
[DB]
|
||||
Id: 2
|
||||
ExecuteOnComputer: 1
|
||||
FileSystemPath: WRITE_PATH_TO_FILESYSTEM_2_HERE
|
||||
|
||||
[DB]
|
||||
Id: 3
|
||||
ExecuteOnComputer: 2
|
||||
FileSystemPath: WRITE_PATH_TO_FILESYSTEM_3_HERE
|
||||
|
||||
[API DEFAULT]
|
||||
ArbitrationRank: 1
|
||||
|
||||
[API]
|
||||
Id: 4
|
||||
ExecuteOnComputer: 1
|
||||
|
||||
[API]
|
||||
Id: 5
|
||||
ExecuteOnComputer: 1
|
||||
|
||||
[API]
|
||||
Id: 6
|
||||
ExecuteOnComputer: 2
|
||||
|
||||
[API]
|
||||
Id: 7
|
||||
ExecuteOnComputer: 2
|
||||
|
||||
# Designated MySQL Server API node id
|
||||
[API]
|
||||
Id: 11
|
||||
ExecuteOnComputer: 1
|
||||
|
||||
# Designated MySQL Server API node id
|
||||
[API]
|
||||
Id: 12
|
||||
ExecuteOnComputer: 2
|
||||
|
||||
|
||||
[TCP]
|
||||
NodeId1: 1
|
||||
NodeId2: 2
|
||||
PortNumber: 10002
|
||||
|
||||
[TCP]
|
||||
NodeId1: 1
|
||||
NodeId2: 3
|
||||
PortNumber: 10003
|
||||
|
||||
[TCP]
|
||||
NodeId1: 2
|
||||
NodeId2: 3
|
||||
PortNumber: 10004
|
||||
|
||||
[TCP]
|
||||
NodeId1: 2
|
||||
NodeId2: 4
|
||||
PortNumber: 10005
|
||||
|
||||
[TCP]
|
||||
NodeId1: 2
|
||||
NodeId2: 5
|
||||
PortNumber: 10006
|
||||
|
||||
[TCP]
|
||||
NodeId1: 2
|
||||
NodeId2: 6
|
||||
PortNumber: 10007
|
||||
|
||||
[TCP]
|
||||
NodeId1: 2
|
||||
NodeId2: 7
|
||||
PortNumber: 10008
|
||||
|
||||
[TCP]
|
||||
NodeId1: 3
|
||||
NodeId2: 4
|
||||
PortNumber: 10009
|
||||
|
||||
[TCP]
|
||||
NodeId1: 3
|
||||
NodeId2: 5
|
||||
PortNumber: 10010
|
||||
|
||||
[TCP]
|
||||
NodeId1: 3
|
||||
NodeId2: 6
|
||||
PortNumber: 10011
|
||||
|
||||
[TCP]
|
||||
NodeId1: 3
|
||||
NodeId2: 7
|
||||
PortNumber: 10012
|
||||
|
||||
# Designated MySQL Server API node connection
|
||||
[TCP]
|
||||
NodeId1: 2
|
||||
NodeId2: 11
|
||||
PortNumber: 10013
|
||||
|
||||
# Designated MySQL Server API node connection
|
||||
[TCP]
|
||||
NodeId1: 3
|
||||
NodeId2: 11
|
||||
PortNumber: 10014
|
||||
|
||||
# Designated MySQL Server API node connection
|
||||
[TCP]
|
||||
NodeId1: 2
|
||||
NodeId2: 12
|
||||
PortNumber: 10015
|
||||
|
||||
# Designated MySQL Server API node connection
|
||||
[TCP]
|
||||
NodeId1: 3
|
||||
NodeId2: 12
|
||||
PortNumber: 10016
|
87
ndb/demos/config-templates/config_template-1-REP.ini
Normal file
87
ndb/demos/config-templates/config_template-1-REP.ini
Normal file
|
@ -0,0 +1,87 @@
|
|||
###############################################################################
|
||||
#
|
||||
# Initial system configuration file for MySQL Cluster v3.1.0 (Demo 1)
|
||||
#
|
||||
###############################################################################
|
||||
|
||||
[DB DEFAULT]
|
||||
NoOfReplicas: 1
|
||||
StopOnError: Y
|
||||
NoOfIndexPages: 1500
|
||||
NoOfDataPages: 5000
|
||||
BackupMemory: 4M
|
||||
BackupDataBufferSize: 2M
|
||||
BackupLogBufferSize: 2M
|
||||
BackupWriteSize: 32k
|
||||
|
||||
[COMPUTER]
|
||||
Id: 1
|
||||
ByteOrder: Little
|
||||
HostName: CHOOSE_HOSTNAME
|
||||
|
||||
[EXTERNAL SYSTEM]
|
||||
Name: External
|
||||
|
||||
[MGM]
|
||||
Id: 1
|
||||
ExecuteOnComputer: 1
|
||||
PortNumber: CHOOSE_PORT_BASE00
|
||||
PortNumberStats: CHOOSE_PORT_BASE01
|
||||
|
||||
|
||||
[DB]
|
||||
Id: 2
|
||||
ExecuteOnComputer: 1
|
||||
FileSystemPath: WRITE_PATH_TO_FILESYSTEM_2_HERE
|
||||
|
||||
[API]
|
||||
Id: 3
|
||||
ExecuteOnComputer: 1
|
||||
|
||||
[REP]
|
||||
Id: CHOOSE_REP_ID
|
||||
ExecuteOnComputer: 1
|
||||
|
||||
[EXTERNAL REP]
|
||||
Id: CHOOSE_EXTREP_ID
|
||||
System: External
|
||||
|
||||
# Designated MySQL Server API node id
|
||||
[API]
|
||||
Id: 11
|
||||
ExecuteOnComputer: 1
|
||||
|
||||
[TCP DEFAULT]
|
||||
SendSignalId: N
|
||||
Compression: N
|
||||
Checksum: N
|
||||
SendBufferSize: 2000
|
||||
MaxReceiveSize: 2000
|
||||
|
||||
[TCP]
|
||||
NodeId1: 1
|
||||
NodeId2: 2
|
||||
PortNumber: CHOOSE_PORT_BASE02
|
||||
|
||||
[TCP]
|
||||
NodeId1: 2
|
||||
NodeId2: 3
|
||||
PortNumber: CHOOSE_PORT_BASE03
|
||||
|
||||
[TCP]
|
||||
NodeId1: 2
|
||||
NodeId2: CHOOSE_REP_ID
|
||||
PortNumber: CHOOSE_PORT_BASE04
|
||||
|
||||
[TCP]
|
||||
Hostname1: CHOOSE_HOSTNAME
|
||||
Hostname2: CHOOSE_EXTHOSTNAME
|
||||
NodeId1: CHOOSE_REP_ID
|
||||
NodeId2: External.CHOOSE_EXTREP_ID
|
||||
PortNumber: 10099
|
||||
|
||||
# Designated MySQL Server API node connection
|
||||
[TCP]
|
||||
NodeId1: 2
|
||||
NodeId2: 11
|
||||
PortNumber: CHOOSE_PORT_BASE11
|
336
ndb/demos/config-templates/config_template-4.ini
Normal file
336
ndb/demos/config-templates/config_template-4.ini
Normal file
|
@ -0,0 +1,336 @@
|
|||
###############################################################################
|
||||
#
|
||||
# 4-node system configuration file for MySQL Cluster
|
||||
#
|
||||
###############################################################################
|
||||
|
||||
[DB DEFAULT]
|
||||
NoOfReplicas: 1
|
||||
StopOnError: N
|
||||
NoOfIndexPages: 1500
|
||||
NoOfDataPages: 5000
|
||||
BackupMemory: 4M
|
||||
BackupDataBufferSize: 2M
|
||||
BackupLogBufferSize: 2M
|
||||
BackupWriteSize: 32k
|
||||
|
||||
[COMPUTER]
|
||||
Id: 1
|
||||
ByteOrder: Little
|
||||
HostName: CHOOSE_HOSTNAME_1
|
||||
|
||||
[COMPUTER]
|
||||
Id: 2
|
||||
ByteOrder: Little
|
||||
HostName: CHOOSE_HOSTNAME_2
|
||||
|
||||
[COMPUTER]
|
||||
Id: 3
|
||||
ByteOrder: Little
|
||||
HostName: CHOOSE_HOSTNAME_3
|
||||
|
||||
[COMPUTER]
|
||||
Id: 4
|
||||
ByteOrder: Little
|
||||
HostName: CHOOSE_HOSTNAME_4
|
||||
|
||||
[MGM]
|
||||
Id: 1
|
||||
ExecuteOnComputer: 1
|
||||
PortNumber: CHOOSE_PORT_BASE00
|
||||
PortNumberStats: CHOOSE_PORT_BASE01
|
||||
|
||||
[DB]
|
||||
Id: 2
|
||||
ExecuteOnComputer: 1
|
||||
FileSystemPath: WRITE_PATH_TO_FILESYSTEM_1_HERE
|
||||
|
||||
[DB]
|
||||
Id: 3
|
||||
ExecuteOnComputer: 2
|
||||
FileSystemPath: WRITE_PATH_TO_FILESYSTEM_2_HERE
|
||||
|
||||
[DB]
|
||||
Id: 4
|
||||
ExecuteOnComputer: 3
|
||||
FileSystemPath: WRITE_PATH_TO_FILESYSTEM_3_HERE
|
||||
|
||||
[DB]
|
||||
Id: 5
|
||||
ExecuteOnComputer: 4
|
||||
FileSystemPath: WRITE_PATH_TO_FILESYSTEM_4_HERE
|
||||
|
||||
[API]
|
||||
Id: 6
|
||||
ExecuteOnComputer: 1
|
||||
|
||||
[API]
|
||||
Id: 7
|
||||
ExecuteOnComputer: 2
|
||||
|
||||
[API]
|
||||
Id: 8
|
||||
ExecuteOnComputer: 3
|
||||
|
||||
[API]
|
||||
Id: 9
|
||||
ExecuteOnComputer: 4
|
||||
|
||||
# Designated MySQL Server API node id
|
||||
[API]
|
||||
Id: 11
|
||||
ExecuteOnComputer: 1
|
||||
|
||||
# Designated MySQL Server API node id
|
||||
[API]
|
||||
Id: 12
|
||||
ExecuteOnComputer: 2
|
||||
|
||||
# Designated MySQL Server API node id
|
||||
[API]
|
||||
Id: 13
|
||||
ExecuteOnComputer: 3
|
||||
|
||||
# Designated MySQL Server API node id
|
||||
[API]
|
||||
Id: 14
|
||||
ExecuteOnComputer: 4
|
||||
|
||||
[TCP DEFAULT]
|
||||
SendSignalId: N
|
||||
Compression: N
|
||||
Checksum: N
|
||||
SendBufferSize: 2000
|
||||
MaxReceiveSize: 2000
|
||||
|
||||
# Management server
|
||||
[TCP]
|
||||
NodeId1: 1
|
||||
NodeId2: 2
|
||||
PortNumber: CHOOSE_PORT_BASE02
|
||||
|
||||
[TCP]
|
||||
NodeId1: 1
|
||||
NodeId2: 3
|
||||
PortNumber: CHOOSE_PORT_BASE03
|
||||
|
||||
[TCP]
|
||||
NodeId1: 1
|
||||
NodeId2: 4
|
||||
PortNumber: CHOOSE_PORT_BASE04
|
||||
|
||||
[TCP]
|
||||
NodeId1: 1
|
||||
NodeId2: 5
|
||||
PortNumber: CHOOSE_PORT_BASE05
|
||||
|
||||
# Database cluster
|
||||
[TCP]
|
||||
NodeId1: 2
|
||||
NodeId2: 3
|
||||
PortNumber: CHOOSE_PORT_BASE06
|
||||
|
||||
[TCP]
|
||||
NodeId1: 2
|
||||
NodeId2: 4
|
||||
PortNumber: CHOOSE_PORT_BASE07
|
||||
|
||||
[TCP]
|
||||
NodeId1: 2
|
||||
NodeId2: 5
|
||||
PortNumber: CHOOSE_PORT_BASE08
|
||||
|
||||
[TCP]
|
||||
NodeId1: 3
|
||||
NodeId2: 4
|
||||
PortNumber: CHOOSE_PORT_BASE09
|
||||
|
||||
[TCP]
|
||||
NodeId1: 3
|
||||
NodeId2: 5
|
||||
PortNumber: CHOOSE_PORT_BASE10
|
||||
|
||||
[TCP]
|
||||
NodeId1: 4
|
||||
NodeId2: 5
|
||||
PortNumber: CHOOSE_PORT_BASE11
|
||||
|
||||
# API node 6
|
||||
[TCP]
|
||||
NodeId1: 6
|
||||
NodeId2: 2
|
||||
PortNumber: CHOOSE_PORT_BASE12
|
||||
|
||||
[TCP]
|
||||
NodeId1: 6
|
||||
NodeId2: 3
|
||||
PortNumber: CHOOSE_PORT_BASE13
|
||||
|
||||
[TCP]
|
||||
NodeId1: 6
|
||||
NodeId2: 4
|
||||
PortNumber: CHOOSE_PORT_BASE14
|
||||
|
||||
[TCP]
|
||||
NodeId1: 6
|
||||
NodeId2: 5
|
||||
PortNumber: CHOOSE_PORT_BASE15
|
||||
|
||||
# API node 7
|
||||
[TCP]
|
||||
NodeId1: 7
|
||||
NodeId2: 2
|
||||
PortNumber: CHOOSE_PORT_BASE16
|
||||
|
||||
[TCP]
|
||||
NodeId1: 7
|
||||
NodeId2: 3
|
||||
PortNumber: CHOOSE_PORT_BASE17
|
||||
|
||||
[TCP]
|
||||
NodeId1: 7
|
||||
NodeId2: 4
|
||||
PortNumber: CHOOSE_PORT_BASE18
|
||||
|
||||
[TCP]
|
||||
NodeId1: 7
|
||||
NodeId2: 5
|
||||
PortNumber: CHOOSE_PORT_BASE19
|
||||
|
||||
# API node 8
|
||||
[TCP]
|
||||
NodeId1: 8
|
||||
NodeId2: 2
|
||||
PortNumber: CHOOSE_PORT_BASE20
|
||||
|
||||
[TCP]
|
||||
NodeId1: 8
|
||||
NodeId2: 3
|
||||
PortNumber: CHOOSE_PORT_BASE21
|
||||
|
||||
[TCP]
|
||||
NodeId1: 8
|
||||
NodeId2: 4
|
||||
PortNumber: CHOOSE_PORT_BASE22
|
||||
|
||||
[TCP]
|
||||
NodeId1: 8
|
||||
NodeId2: 5
|
||||
PortNumber: CHOOSE_PORT_BASE23
|
||||
|
||||
# API node 9
|
||||
[TCP]
|
||||
NodeId1: 9
|
||||
NodeId2: 2
|
||||
PortNumber: CHOOSE_PORT_BASE24
|
||||
|
||||
[TCP]
|
||||
NodeId1: 9
|
||||
NodeId2: 3
|
||||
PortNumber: CHOOSE_PORT_BASE25
|
||||
|
||||
[TCP]
|
||||
NodeId1: 9
|
||||
NodeId2: 4
|
||||
PortNumber: CHOOSE_PORT_BASE26
|
||||
|
||||
[TCP]
|
||||
NodeId1: 9
|
||||
NodeId2: 5
|
||||
PortNumber: CHOOSE_PORT_BASE27
|
||||
|
||||
# Designated MySQL Server API node connection
|
||||
[TCP]
|
||||
NodeId1: 2
|
||||
NodeId2: 11
|
||||
PortNumber: CHOOSE_PORT_BASE28
|
||||
|
||||
# Designated MySQL Server API node connection
|
||||
[TCP]
|
||||
NodeId1: 3
|
||||
NodeId2: 11
|
||||
PortNumber: CHOOSE_PORT_BASE29
|
||||
|
||||
# Designated MySQL Server API node connection
|
||||
[TCP]
|
||||
NodeId1: 4
|
||||
NodeId2: 11
|
||||
PortNumber: CHOOSE_PORT_BASE30
|
||||
|
||||
# Designated MySQL Server API node connection
|
||||
[TCP]
|
||||
NodeId1: 5
|
||||
NodeId2: 11
|
||||
PortNumber: CHOOSE_PORT_BASE31
|
||||
|
||||
# Designated MySQL Server API node connection
|
||||
[TCP]
|
||||
NodeId1: 2
|
||||
NodeId2: 12
|
||||
PortNumber: CHOOSE_PORT_BASE32
|
||||
|
||||
# Designated MySQL Server API node connection
|
||||
[TCP]
|
||||
NodeId1: 3
|
||||
NodeId2: 12
|
||||
PortNumber: CHOOSE_PORT_BASE33
|
||||
|
||||
# Designated MySQL Server API node connection
|
||||
[TCP]
|
||||
NodeId1: 4
|
||||
NodeId2: 12
|
||||
PortNumber: CHOOSE_PORT_BASE34
|
||||
|
||||
# Designated MySQL Server API node connection
|
||||
[TCP]
|
||||
NodeId1: 5
|
||||
NodeId2: 12
|
||||
PortNumber: CHOOSE_PORT_BASE35
|
||||
|
||||
# Designated MySQL Server API node connection
|
||||
[TCP]
|
||||
NodeId1: 2
|
||||
NodeId2: 13
|
||||
PortNumber: CHOOSE_PORT_BASE36
|
||||
|
||||
# Designated MySQL Server API node connection
|
||||
[TCP]
|
||||
NodeId1: 3
|
||||
NodeId2: 13
|
||||
PortNumber: CHOOSE_PORT_BASE37
|
||||
|
||||
# Designated MySQL Server API node connection
|
||||
[TCP]
|
||||
NodeId1: 4
|
||||
NodeId2: 13
|
||||
PortNumber: CHOOSE_PORT_BASE38
|
||||
|
||||
# Designated MySQL Server API node connection
|
||||
[TCP]
|
||||
NodeId1: 5
|
||||
NodeId2: 13
|
||||
PortNumber: CHOOSE_PORT_BASE39
|
||||
|
||||
# Designated MySQL Server API node connection
|
||||
[TCP]
|
||||
NodeId1: 2
|
||||
NodeId2: 14
|
||||
PortNumber: CHOOSE_PORT_BASE40
|
||||
|
||||
# Designated MySQL Server API node connection
|
||||
[TCP]
|
||||
NodeId1: 3
|
||||
NodeId2: 14
|
||||
PortNumber: CHOOSE_PORT_BASE41
|
||||
|
||||
# Designated MySQL Server API node connection
|
||||
[TCP]
|
||||
NodeId1: 4
|
||||
NodeId2: 14
|
||||
PortNumber: CHOOSE_PORT_BASE42
|
||||
|
||||
# Designated MySQL Server API node connection
|
||||
[TCP]
|
||||
NodeId1: 5
|
||||
NodeId2: 14
|
||||
PortNumber: CHOOSE_PORT_BASE43
|
64
ndb/demos/config-templates/config_template-install.ini
Normal file
64
ndb/demos/config-templates/config_template-install.ini
Normal file
|
@ -0,0 +1,64 @@
|
|||
###############################################################################
|
||||
#
|
||||
# Initial system configuration file for MySQL Cluster v3.1.0 (Demo 1)
|
||||
#
|
||||
###############################################################################
|
||||
|
||||
[DB DEFAULT]
|
||||
NoOfReplicas: 1
|
||||
StopOnError: N
|
||||
NoOfIndexPages: 1500
|
||||
NoOfDataPages: 5000
|
||||
BackupMemory: 4M
|
||||
BackupDataBufferSize: 2M
|
||||
BackupLogBufferSize: 2M
|
||||
BackupWriteSize: 32k
|
||||
|
||||
[COMPUTER]
|
||||
Id: 1
|
||||
ByteOrder: Little
|
||||
HostName: localhost
|
||||
|
||||
[MGM]
|
||||
Id: 1
|
||||
ExecuteOnComputer: 1
|
||||
PortNumber: CHOOSE_PORT_BASE00
|
||||
PortNumberStats: CHOOSE_PORT_BASE01
|
||||
|
||||
|
||||
[DB]
|
||||
Id: 2
|
||||
ExecuteOnComputer: 1
|
||||
FileSystemPath: WRITE_PATH_TO_FILESYSTEM_2_HERE
|
||||
|
||||
[API]
|
||||
Id: 3
|
||||
ExecuteOnComputer: 1
|
||||
|
||||
# Designated MySQL Server API node id
|
||||
[API]
|
||||
Id: 11
|
||||
ExecuteOnComputer: 1
|
||||
|
||||
[TCP DEFAULT]
|
||||
SendSignalId: N
|
||||
Compression: N
|
||||
Checksum: N
|
||||
SendBufferSize: 2000
|
||||
MaxReceiveSize: 2000
|
||||
|
||||
[TCP]
|
||||
NodeId1: 1
|
||||
NodeId2: 2
|
||||
PortNumber: CHOOSE_PORT_BASE02
|
||||
|
||||
[TCP]
|
||||
NodeId1: 2
|
||||
NodeId2: 3
|
||||
PortNumber: CHOOSE_PORT_BASE03
|
||||
|
||||
# Designated MySQL Server API node connection
|
||||
[TCP]
|
||||
NodeId1: 2
|
||||
NodeId2: 11
|
||||
PortNumber: CHOOSE_PORT_BASE11
|
50
ndb/demos/run_demo1-PS-SS_common.sh
Normal file
50
ndb/demos/run_demo1-PS-SS_common.sh
Normal file
|
@ -0,0 +1,50 @@
|
|||
echo $NDB_HOST $NDB_EXTHOST
|
||||
|
||||
NDB_PORT=$NDB_PORT_BASE"00"
|
||||
NDB_CONNECTSTRING_BASE="host=$NDB_HOST:$NDB_PORT;nodeid="
|
||||
|
||||
# Edit file system path
|
||||
|
||||
cd $NDB_DEMO
|
||||
sed -e s,"WRITE_PATH_TO_FILESYSTEM_2_HERE",$NDB_DEMO/filesystem,g \
|
||||
-e s,"CHOOSE_HOSTNAME",$NDB_HOST,g\
|
||||
-e s,"CHOOSE_EXTHOSTNAME",$NDB_EXTHOST,g\
|
||||
-e s,"CHOOSE_PORT_BASE",$NDB_PORT_BASE,g\
|
||||
-e s,"CHOOSE_REP_ID",$NDB_REP_ID,g\
|
||||
-e s,"CHOOSE_EXTREP_ID",$NDB_EXTREP_ID,g\
|
||||
< ../config-templates/config_template-1-REP.ini > config.ini
|
||||
|
||||
# Start management server as deamon
|
||||
|
||||
NDB_ID="1"
|
||||
NDB_CONNECTSTRING=$NDB_CONNECTSTRING_BASE$NDB_ID
|
||||
export NDB_CONNECTSTRING
|
||||
if mgmtsrvr -d -c config.ini ; then :; else
|
||||
echo "Unable to start mgmtsrvr"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Start database node
|
||||
|
||||
NDB_ID="2"
|
||||
NDB_CONNECTSTRING=$NDB_CONNECTSTRING_BASE$NDB_ID
|
||||
export NDB_CONNECTSTRING
|
||||
xterm -T "$NDB_DEMO_NAME DB Node $NDB_ID" -geometry 80x10 -xrm *.hold:true -e ndb -i &
|
||||
|
||||
# Start xterm for application programs
|
||||
|
||||
NDB_ID="3"
|
||||
NDB_CONNECTSTRING=$NDB_CONNECTSTRING_BASE$NDB_ID
|
||||
export NDB_CONNECTSTRING
|
||||
xterm -T "$NDB_DEMO_NAME API Node $NDB_ID" -geometry 80x10 &
|
||||
|
||||
# Start xterm for rep node
|
||||
|
||||
NDB_ID=$NDB_REP_ID
|
||||
NDB_CONNECTSTRING=$NDB_CONNECTSTRING_BASE$NDB_ID
|
||||
export NDB_CONNECTSTRING
|
||||
xterm -T "$NDB_DEMO_NAME REP Node $NDB_ID" -geometry 80x10 -xrm *.hold:true -e ndb_rep &
|
||||
|
||||
# Start management client
|
||||
|
||||
xterm -T "$NDB_DEMO_NAME Mgmt Client" -geometry 80x10 -xrm *.hold:true -e mgmtclient $NDB_HOST $NDB_PORT &
|
30
ndb/demos/run_demo1-PS.sh
Executable file
30
ndb/demos/run_demo1-PS.sh
Executable file
|
@ -0,0 +1,30 @@
|
|||
#!/bin/sh
|
||||
if [ -z "$MYSQLCLUSTER_TOP" ]; then
|
||||
echo "MYSQLCLUSTER_TOP not set"
|
||||
exit 1
|
||||
fi
|
||||
if [ -d "$MYSQLCLUSTER_TOP/ndb" ]; then :; else
|
||||
echo "$MYSQLCLUSTER_TOP/ndb directory does not exist"
|
||||
exit 1
|
||||
fi
|
||||
NDB_CONNECTSTRING=
|
||||
NDB_HOME=
|
||||
NDB_DEMO=$MYSQLCLUSTER_TOP/ndb/demos/1-node-PS
|
||||
|
||||
NDB_PORT_BASE="102"
|
||||
NDB_REP_ID="5"
|
||||
NDB_EXTREP_ID="4"
|
||||
|
||||
NDB_DEMO_NAME="Demo 1-PS MySQL Cluster"
|
||||
NDB_HOST1=$1
|
||||
NDB_HOST2=$2
|
||||
if [ -z "$NDB_HOST1" ]; then
|
||||
NDB_HOST1=localhost
|
||||
fi
|
||||
if [ -z "$NDB_HOST2" ]; then
|
||||
NDB_HOST2=localhost
|
||||
fi
|
||||
NDB_HOST=$NDB_HOST1
|
||||
NDB_EXTHOST=$NDB_HOST2
|
||||
|
||||
source $MYSQLCLUSTER_TOP/ndb/demos/run_demo1-PS-SS_common.sh
|
30
ndb/demos/run_demo1-SS.sh
Executable file
30
ndb/demos/run_demo1-SS.sh
Executable file
|
@ -0,0 +1,30 @@
|
|||
#!/bin/sh
|
||||
if [ -z "$MYSQLCLUSTER_TOP" ]; then
|
||||
echo "MYSQLCLUSTER_TOP not set"
|
||||
exit 1
|
||||
fi
|
||||
if [ -d "$MYSQLCLUSTER_TOP/ndb" ]; then :; else
|
||||
echo "$MYSQLCLUSTER_TOP/ndb directory does not exist"
|
||||
exit 1
|
||||
fi
|
||||
NDB_CONNECTSTRING=
|
||||
NDB_HOME=
|
||||
NDB_DEMO=$MYSQLCLUSTER_TOP/ndb/demos/1-node-SS
|
||||
|
||||
NDB_PORT_BASE="101"
|
||||
NDB_REP_ID="4"
|
||||
NDB_EXTREP_ID="5"
|
||||
|
||||
NDB_DEMO_NAME="Demo 1-SS MySQL Cluster"
|
||||
NDB_HOST1=$1
|
||||
NDB_HOST2=$2
|
||||
if [ -z "$NDB_HOST1" ]; then
|
||||
NDB_HOST1=localhost
|
||||
fi
|
||||
if [ -z "$NDB_HOST2" ]; then
|
||||
NDB_HOST2=localhost
|
||||
fi
|
||||
NDB_HOST=$NDB_HOST2
|
||||
NDB_EXTHOST=$NDB_HOST1
|
||||
|
||||
source $MYSQLCLUSTER_TOP/ndb/demos/run_demo1-PS-SS_common.sh
|
41
ndb/demos/run_demo1.sh
Executable file
41
ndb/demos/run_demo1.sh
Executable file
|
@ -0,0 +1,41 @@
|
|||
#!/bin/sh
|
||||
if [ -z "$MYSQLCLUSTER_TOP" ]; then
|
||||
echo "MYSQLCLUSTER_TOP not set"
|
||||
exit 1
|
||||
fi
|
||||
if [ -d "$MYSQLCLUSTER_TOP/ndb" ]; then :; else
|
||||
echo "$MYSQLCLUSTER_TOP/ndb directory does not exist"
|
||||
exit 1
|
||||
fi
|
||||
NDB_CONNECTSTRING=
|
||||
NDB_HOME=
|
||||
ndb_demo=$MYSQLCLUSTER_TOP/ndb/demos
|
||||
|
||||
# Edit file system path
|
||||
|
||||
cd $ndb_demo/1-node/1-mgm-1
|
||||
sed -e s,"WRITE_PATH_TO_FILESYSTEM_2_HERE",$ndb_demo/1-node/1-db-2/filesystem,g \
|
||||
< template_config.ini > config.ini
|
||||
|
||||
# Start management server as deamon
|
||||
|
||||
cd $ndb_demo/1-node/1-mgm-1
|
||||
if mgmtsrvr -d -c config.ini ; then :; else
|
||||
echo "Unable to start mgmtsrvr"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Start database node
|
||||
|
||||
cd $ndb_demo/1-node/1-db-2
|
||||
xterm -T "Demo 1 NDB Cluster DB Node 2" -geometry 80x10 -xrm *.hold:true -e ndb -i &
|
||||
|
||||
# Start xterm for application programs
|
||||
|
||||
cd $ndb_demo/1-node/1-api-3
|
||||
xterm -T "Demo 1 NDB Cluster API Node 3" -geometry 80x10 &
|
||||
|
||||
# Start management client
|
||||
|
||||
cd $ndb_demo
|
||||
xterm -T "Demo 1 NDB Management Client" -geometry 80x10 -xrm *.hold:true -e mgmtclient localhost 10000 &
|
54
ndb/demos/run_demo2.sh
Executable file
54
ndb/demos/run_demo2.sh
Executable file
|
@ -0,0 +1,54 @@
|
|||
#!/bin/sh
|
||||
if [ -z "$MYSQLCLUSTER_TOP" ]; then
|
||||
echo "MYSQLCLUSTER_TOP not set"
|
||||
exit 1
|
||||
fi
|
||||
if [ -d "$MYSQLCLUSTER_TOP/ndb" ]; then :; else
|
||||
echo "$MYSQLCLUSTER_TOP/ndb directory does not exist"
|
||||
exit 1
|
||||
fi
|
||||
NDB_CONNECTSTRING=
|
||||
NDB_HOME=
|
||||
ndb_demo=$MYSQLCLUSTER_TOP/ndb/demos
|
||||
|
||||
# Edit file system path
|
||||
|
||||
cd $ndb_demo/2-node/2-mgm-1
|
||||
sed -e s,"WRITE_PATH_TO_FILESYSTEM_2_HERE",$ndb_demo/2-node/2-db-2/filesystem,g \
|
||||
-e s,"WRITE_PATH_TO_FILESYSTEM_3_HERE",$ndb_demo/2-node/2-db-3/filesystem,g \
|
||||
< template_config.ini > config.ini
|
||||
|
||||
# Start management server as deamon
|
||||
|
||||
cd $ndb_demo/2-node/2-mgm-1
|
||||
if mgmtsrvr -d -c config.ini ; then :; else
|
||||
echo "Unable to start mgmtsrvr"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
#xterm -T "Demo 2 NDB Management Server" -geometry 80x10 -xrm *.hold:true -e mgmtsrvr -c config.ini &
|
||||
|
||||
# Start database node
|
||||
|
||||
cd $ndb_demo/2-node/2-db-2
|
||||
xterm -T "Demo 2 NDB Cluster DB Node 2" -geometry 80x10 -xrm *.hold:true -e ndb -i &
|
||||
|
||||
# Start database node
|
||||
|
||||
cd $ndb_demo/2-node/2-db-3
|
||||
xterm -T "Demo 2 NDB Cluster DB Node 3" -geometry 80x10 -xrm *.hold:true -e ndb -i &
|
||||
|
||||
# Start xterm for application programs
|
||||
|
||||
cd $ndb_demo/2-node/2-api-4
|
||||
xterm -T "Demo 2 NDB Cluster API Node 4" -geometry 80x10 &
|
||||
|
||||
# Start xterm for application programs
|
||||
|
||||
cd $ndb_demo/2-node/2-api-5
|
||||
xterm -T "Demo 2 NDB Cluster API Node 5" -geometry 80x10 &
|
||||
|
||||
# Start management client
|
||||
|
||||
cd $ndb_demo
|
||||
xterm -T "Demo 2 NDB Management Client" -geometry 80x10 -xrm *.hold:true -e mgmtclient localhost 10000 &
|
97
ndb/docs/Makefile
Normal file
97
ndb/docs/Makefile
Normal file
|
@ -0,0 +1,97 @@
|
|||
include .defs.mk
|
||||
#
|
||||
# hack before full autoconf
|
||||
replace-targets := all clean
|
||||
first-docs: all
|
||||
|
||||
include $(NDB_TOP)/Epilogue.mk
|
||||
|
||||
all: ndbapidoc mgmapidoc
|
||||
|
||||
DOXYGEN = doxygen
|
||||
DOXYTOP = $(shell cd $(NDB_TOP); pwd)/docs
|
||||
DOXYDIR = $(DOXYTOP)/doxygen
|
||||
DOXYTMP = $(DOXYTOP)/.doxytmp
|
||||
DOXYOUT = $(DOXYTOP)/.doxyout
|
||||
|
||||
clean:
|
||||
rm -rf ndbapi.pdf ndbapi.html mgmapi.pdf mgmapi.html
|
||||
rm -rf $(DOXYTMP) $(DOXYOUT)
|
||||
|
||||
###
|
||||
#
|
||||
# NDB API Programmer's Guide
|
||||
#
|
||||
ndbapidoc: ndbapi.pdf
|
||||
|
||||
ndbapi.pdf: $(NDB_TOP)/include/ndb_version.h
|
||||
@set -x; \
|
||||
rm -rf ndbapi.pdf ndbapi.html; \
|
||||
rm -rf $(DOXYTMP) $(DOXYOUT); \
|
||||
mkdir -p $(DOXYTMP) $(DOXYOUT); \
|
||||
(cd $(NDB_TOP)/include/ndbapi && \
|
||||
find . -type f -print | \
|
||||
grep -v /SCCS | \
|
||||
cpio -pdm $(DOXYTMP)); \
|
||||
(cd $(NDB_TOP)/examples && \
|
||||
cp -p */*.[ch]pp $(DOXYTMP)); \
|
||||
$(DOXYDIR)/predoxy.pl; \
|
||||
mv footer.html $(DOXYTMP); \
|
||||
(cd $(DOXYTMP) && \
|
||||
$(DOXYGEN) $(DOXYDIR)/Doxyfile.ndbapi); \
|
||||
$(DOXYDIR)/postdoxy.pl $(DOXYOUT)/ndbapi.latex "NDB API Programmer Guide"; \
|
||||
(cd $(DOXYOUT) && \
|
||||
find ndbapi.html -print | cpio -pdm $(DOXYTOP)); \
|
||||
(cd $(DOXYOUT)/ndbapi.latex && \
|
||||
pdflatex refman.tex && makeindex refman && pdflatex refman.tex && \
|
||||
cp -p refman.pdf $(DOXYTOP)/ndbapi.pdf);
|
||||
|
||||
###
|
||||
#
|
||||
# MGM API Guide
|
||||
#
|
||||
mgmapidoc: mgmapi.pdf
|
||||
|
||||
mgmapi.pdf: $(NDB_TOP)/include/ndb_version.h
|
||||
@set -x; \
|
||||
rm -rf mgmapi.pdf mgmapi.html; \
|
||||
rm -rf $(DOXYTMP) $(DOXYOUT); \
|
||||
mkdir -p $(DOXYTMP) $(DOXYOUT); \
|
||||
(cd $(NDB_TOP)/include/mgmapi && \
|
||||
find . -type f -print | \
|
||||
grep -v /SCCS | \
|
||||
cpio -pdm $(DOXYTMP)); \
|
||||
$(DOXYDIR)/predoxy.pl; \
|
||||
mv footer.html $(DOXYTMP); \
|
||||
(cd $(DOXYTMP) && \
|
||||
$(DOXYGEN) $(DOXYDIR)/Doxyfile.mgmapi); \
|
||||
$(DOXYDIR)/postdoxy.pl $(OUTDIR)/mgmapi.latex "NDB Cluster MGM API Guide"; \
|
||||
(cd $(DOXYOUT) && \
|
||||
find mgmapi.html -print | cpio -pdm $(DOXYTOP)); \
|
||||
(cd $(DOXYOUT)/mgmapi.latex && \
|
||||
pdflatex refman.tex && makeindex refman && pdflatex refman.tex && \
|
||||
cp -p refman.pdf $(DOXYTOP)/mgmapi.pdf);
|
||||
|
||||
###
|
||||
#
|
||||
# Complete Source Browser except for
|
||||
# ndbapi odbc test tools win32 lib examples docs CVS config bin
|
||||
# include/ndbapi
|
||||
# include/newtonapi src/newtonapi
|
||||
# include/mgmapi src/mgmapi
|
||||
# src/client
|
||||
ndbdoc: DUMMY
|
||||
mkdir -p $(OUTDIR)
|
||||
cd $(NDB_TOP) ; $(DOXYGEN) $(DOXYDIR)/Doxyfile.ndb
|
||||
|
||||
###
|
||||
#
|
||||
# odbcdoc - Complete Source Browser for NDB ODBC (src/client/odbc)
|
||||
|
||||
odbcdoc: DUMMY
|
||||
mkdir -p $(OUTDIR)
|
||||
cd $(NDB_TOP) ; $(DOXYGEN) $(DOXYDIR)/Doxyfile.odbc
|
||||
|
||||
testdoc: DUMMY
|
||||
mkdir -p $(OUTDIR)
|
||||
cd $(NDB_TOP) ; $(DOXYGEN) $(DOXYDIR)/Doxyfile.test
|
30
ndb/docs/README
Normal file
30
ndb/docs/README
Normal file
|
@ -0,0 +1,30 @@
|
|||
Create MySQL Cluster user documentation from source code
|
||||
--------------------------------------------------------
|
||||
(All these require Doxygen.)
|
||||
|
||||
* make clean
|
||||
Remove all generated documentation and tmp files
|
||||
|
||||
* make ndbapidoc
|
||||
Makes the NDB API Programmer's Guide (in HTML)
|
||||
|
||||
* make ndbapipdf
|
||||
Makes the NDB API Programmer Guide (in PDF)
|
||||
|
||||
* make mgmapidoc
|
||||
Makes the MGM API Reference Manual (in HTML)
|
||||
|
||||
* make mgmapipdf
|
||||
Makes the MGM API Reference Manual (in PDF)
|
||||
|
||||
* make ndbdoc
|
||||
Makes source code browser for NDB Cluster (in HTML)
|
||||
(Requires Graphviz.)
|
||||
|
||||
Doxygen and Graphviz can be found at:
|
||||
http://www.doxygen.org
|
||||
or at (for Red Hat 9.0 RPMs):
|
||||
http://dentrassi.de/download/doxygen/
|
||||
|
||||
--
|
||||
lars@mysql.com
|
877
ndb/docs/doxygen/Doxyfile.mgmapi
Normal file
877
ndb/docs/doxygen/Doxyfile.mgmapi
Normal file
|
@ -0,0 +1,877 @@
|
|||
# Doxyfile 1.2.12
|
||||
|
||||
# This file describes the settings to be used by the documentation system
|
||||
# doxygen (www.doxygen.org) for a project
|
||||
#
|
||||
# All text after a hash (#) is considered a comment and will be ignored
|
||||
# The format is:
|
||||
# TAG = value [value, ...]
|
||||
# For lists items can also be appended using:
|
||||
# TAG += value [value, ...]
|
||||
# Values that contain spaces should be placed between quotes (" ")
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# General configuration options
|
||||
#---------------------------------------------------------------------------
|
||||
DETAILS_AT_TOP = yes
|
||||
HIDE_FRIEND_COMPOUNDS = yes
|
||||
|
||||
# The PROJECT_NAME tag is a single word (or a sequence of words surrounded
|
||||
# by quotes) that should identify the project.
|
||||
|
||||
PROJECT_NAME =
|
||||
|
||||
# The PROJECT_NUMBER tag can be used to enter a project or revision number.
|
||||
# This could be handy for archiving the generated documentation or
|
||||
# if some version control system is used.
|
||||
|
||||
PROJECT_NUMBER =
|
||||
|
||||
# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute)
|
||||
# base path where the generated documentation will be put.
|
||||
# If a relative path is entered, it will be relative to the location
|
||||
# where doxygen was started. If left blank the current directory will be used.
|
||||
|
||||
OUTPUT_DIRECTORY =
|
||||
|
||||
# The OUTPUT_LANGUAGE tag is used to specify the language in which all
|
||||
# documentation generated by doxygen is written. Doxygen will use this
|
||||
# information to generate all constant output in the proper language.
|
||||
# The default language is English, other supported languages are:
|
||||
# Brazilian, Chinese, Croatian, Czech, Danish, Dutch, Finnish, French,
|
||||
# German, Hungarian, Italian, Japanese, Korean, Norwegian, Polish,
|
||||
# Portuguese, Romanian, Russian, Slovak, Slovene, Spanish and Swedish.
|
||||
|
||||
OUTPUT_LANGUAGE = English
|
||||
|
||||
# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in
|
||||
# documentation are documented, even if no documentation was available.
|
||||
# Private class members and static file members will be hidden unless
|
||||
# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES
|
||||
|
||||
EXTRACT_ALL = NO
|
||||
|
||||
# If the EXTRACT_PRIVATE tag is set to YES all private members of a class
|
||||
# will be included in the documentation.
|
||||
|
||||
EXTRACT_PRIVATE = NO
|
||||
|
||||
# If the EXTRACT_STATIC tag is set to YES all static members of a file
|
||||
# will be included in the documentation.
|
||||
|
||||
EXTRACT_STATIC = NO
|
||||
|
||||
# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all
|
||||
# undocumented members of documented classes, files or namespaces.
|
||||
# If set to NO (the default) these members will be included in the
|
||||
# various overviews, but no documentation section is generated.
|
||||
# This option has no effect if EXTRACT_ALL is enabled.
|
||||
|
||||
HIDE_UNDOC_MEMBERS = NO
|
||||
|
||||
# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all
|
||||
# undocumented classes that are normally visible in the class hierarchy.
|
||||
# If set to NO (the default) these class will be included in the various
|
||||
# overviews. This option has no effect if EXTRACT_ALL is enabled.
|
||||
|
||||
HIDE_UNDOC_CLASSES = NO
|
||||
|
||||
# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will
|
||||
# include brief member descriptions after the members that are listed in
|
||||
# the file and class documentation (similar to JavaDoc).
|
||||
# Set to NO to disable this.
|
||||
|
||||
BRIEF_MEMBER_DESC = YES
|
||||
|
||||
# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend
|
||||
# the brief description of a member or function before the detailed description.
|
||||
# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the
|
||||
# brief descriptions will be completely suppressed.
|
||||
|
||||
REPEAT_BRIEF = YES
|
||||
|
||||
# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then
|
||||
# Doxygen will generate a detailed section even if there is only a brief
|
||||
# description.
|
||||
|
||||
ALWAYS_DETAILED_SEC = NO
|
||||
|
||||
# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full
|
||||
# path before files name in the file list and in the header files. If set
|
||||
# to NO the shortest path that makes the file name unique will be used.
|
||||
|
||||
FULL_PATH_NAMES = NO
|
||||
|
||||
# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag
|
||||
# can be used to strip a user defined part of the path. Stripping is
|
||||
# only done if one of the specified strings matches the left-hand part of
|
||||
# the path. It is allowed to use relative paths in the argument list.
|
||||
|
||||
STRIP_FROM_PATH =
|
||||
|
||||
# The INTERNAL_DOCS tag determines if documentation
|
||||
# that is typed after a \internal command is included. If the tag is set
|
||||
# to NO (the default) then the documentation will be excluded.
|
||||
# Set it to YES to include the internal documentation.
|
||||
|
||||
INTERNAL_DOCS = NO
|
||||
|
||||
# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct
|
||||
# doxygen to hide any special comment blocks from generated source code
|
||||
# fragments. Normal C and C++ comments will always remain visible.
|
||||
|
||||
STRIP_CODE_COMMENTS = YES
|
||||
|
||||
# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate
|
||||
# file names in lower case letters. If set to YES upper case letters are also
|
||||
# allowed. This is useful if you have classes or files whose names only differ
|
||||
# in case and if your file system supports case sensitive file names. Windows
|
||||
# users are adviced to set this option to NO.
|
||||
|
||||
CASE_SENSE_NAMES = YES
|
||||
|
||||
# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter
|
||||
# (but less readable) file names. This can be useful is your file systems
|
||||
# doesn't support long names like on DOS, Mac, or CD-ROM.
|
||||
|
||||
SHORT_NAMES = NO
|
||||
|
||||
# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen
|
||||
# will show members with their full class and namespace scopes in the
|
||||
# documentation. If set to YES the scope will be hidden.
|
||||
|
||||
HIDE_SCOPE_NAMES = NO
|
||||
|
||||
# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen
|
||||
# will generate a verbatim copy of the header file for each class for
|
||||
# which an include is specified. Set to NO to disable this.
|
||||
|
||||
VERBATIM_HEADERS = YES
|
||||
|
||||
# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen
|
||||
# will put list of the files that are included by a file in the documentation
|
||||
# of that file.
|
||||
|
||||
SHOW_INCLUDE_FILES = YES
|
||||
|
||||
# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen
|
||||
# will interpret the first line (until the first dot) of a JavaDoc-style
|
||||
# comment as the brief description. If set to NO, the JavaDoc
|
||||
# comments will behave just like the Qt-style comments (thus requiring an
|
||||
# explict @brief command for a brief description.
|
||||
|
||||
JAVADOC_AUTOBRIEF = NO
|
||||
|
||||
# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented
|
||||
# member inherits the documentation from any documented member that it
|
||||
# reimplements.
|
||||
|
||||
INHERIT_DOCS = YES
|
||||
|
||||
# If the INLINE_INFO tag is set to YES (the default) then a tag [inline]
|
||||
# is inserted in the documentation for inline members.
|
||||
|
||||
INLINE_INFO = YES
|
||||
|
||||
# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen
|
||||
# will sort the (detailed) documentation of file and class members
|
||||
# alphabetically by member name. If set to NO the members will appear in
|
||||
# declaration order.
|
||||
|
||||
SORT_MEMBER_DOCS = YES
|
||||
|
||||
# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC
|
||||
# tag is set to YES, then doxygen will reuse the documentation of the first
|
||||
# member in the group (if any) for the other members of the group. By default
|
||||
# all members of a group must be documented explicitly.
|
||||
|
||||
DISTRIBUTE_GROUP_DOC = NO
|
||||
|
||||
# The TAB_SIZE tag can be used to set the number of spaces in a tab.
|
||||
# Doxygen uses this value to replace tabs by spaces in code fragments.
|
||||
|
||||
TAB_SIZE = 8
|
||||
|
||||
# The GENERATE_TODOLIST tag can be used to enable (YES) or
|
||||
# disable (NO) the todo list. This list is created by putting \todo
|
||||
# commands in the documentation.
|
||||
|
||||
GENERATE_TODOLIST = YES
|
||||
|
||||
# The GENERATE_TESTLIST tag can be used to enable (YES) or
|
||||
# disable (NO) the test list. This list is created by putting \test
|
||||
# commands in the documentation.
|
||||
|
||||
GENERATE_TESTLIST = YES
|
||||
|
||||
# The GENERATE_BUGLIST tag can be used to enable (YES) or
|
||||
# disable (NO) the bug list. This list is created by putting \bug
|
||||
# commands in the documentation.
|
||||
|
||||
GENERATE_BUGLIST = YES
|
||||
|
||||
# This tag can be used to specify a number of aliases that acts
|
||||
# as commands in the documentation. An alias has the form "name=value".
|
||||
# For example adding "sideeffect=\par Side Effects:\n" will allow you to
|
||||
# put the command \sideeffect (or @sideeffect) in the documentation, which
|
||||
# will result in a user defined paragraph with heading "Side Effects:".
|
||||
# You can put \n's in the value part of an alias to insert newlines.
|
||||
|
||||
ALIASES =
|
||||
|
||||
# The ENABLED_SECTIONS tag can be used to enable conditional
|
||||
# documentation sections, marked by \if sectionname ... \endif.
|
||||
|
||||
ENABLED_SECTIONS =
|
||||
|
||||
# The MAX_INITIALIZER_LINES tag determines the maximum number of lines
|
||||
# the initial value of a variable or define consist of for it to appear in
|
||||
# the documentation. If the initializer consists of more lines than specified
|
||||
# here it will be hidden. Use a value of 0 to hide initializers completely.
|
||||
# The appearance of the initializer of individual variables and defines in the
|
||||
# documentation can be controlled using \showinitializer or \hideinitializer
|
||||
# command in the documentation regardless of this setting.
|
||||
|
||||
MAX_INITIALIZER_LINES = 30
|
||||
|
||||
# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources
|
||||
# only. Doxygen will then generate output that is more tailored for C.
|
||||
# For instance some of the names that are used will be different. The list
|
||||
# of all members will be omitted, etc.
|
||||
|
||||
OPTIMIZE_OUTPUT_FOR_C = NO
|
||||
|
||||
# Set the SHOW_USED_FILES tag to NO to disable the list of files generated
|
||||
# at the bottom of the documentation of classes and structs. If set to YES the
|
||||
# list will mention the files that were used to generate the documentation.
|
||||
|
||||
SHOW_USED_FILES = YES
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to warning and progress messages
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# The QUIET tag can be used to turn on/off the messages that are generated
|
||||
# by doxygen. Possible values are YES and NO. If left blank NO is used.
|
||||
|
||||
QUIET = NO
|
||||
|
||||
# The WARNINGS tag can be used to turn on/off the warning messages that are
|
||||
# generated by doxygen. Possible values are YES and NO. If left blank
|
||||
# NO is used.
|
||||
|
||||
WARNINGS = YES
|
||||
|
||||
# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings
|
||||
# for undocumented members. If EXTRACT_ALL is set to YES then this flag will
|
||||
# automatically be disabled.
|
||||
|
||||
WARN_IF_UNDOCUMENTED = YES
|
||||
|
||||
# The WARN_FORMAT tag determines the format of the warning messages that
|
||||
# doxygen can produce. The string should contain the $file, $line, and $text
|
||||
# tags, which will be replaced by the file and line number from which the
|
||||
# warning originated and the warning text.
|
||||
|
||||
WARN_FORMAT =
|
||||
|
||||
# The WARN_LOGFILE tag can be used to specify a file to which warning
|
||||
# and error messages should be written. If left blank the output is written
|
||||
# to stderr.
|
||||
|
||||
WARN_LOGFILE =
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the input files
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# The INPUT tag can be used to specify the files and/or directories that contain
|
||||
# documented source files. You may enter file names like "myfile.cpp" or
|
||||
# directories like "/usr/src/myproject". Separate the files or directories
|
||||
# with spaces.
|
||||
|
||||
INPUT = .
|
||||
|
||||
# If the value of the INPUT tag contains directories, you can use the
|
||||
# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp
|
||||
# and *.h) to filter out the source-files in the directories. If left
|
||||
# blank file matching one of the following patterns are included:
|
||||
# *.c *.cc *.cxx *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx *.hpp
|
||||
# *.h++ *.idl
|
||||
|
||||
FILE_PATTERNS =
|
||||
|
||||
# The RECURSIVE tag can be used to turn specify whether or not subdirectories
|
||||
# should be searched for input files as well. Possible values are YES and NO.
|
||||
# If left blank NO is used.
|
||||
|
||||
RECURSIVE = NO
|
||||
|
||||
# The EXCLUDE tag can be used to specify files and/or directories that should
|
||||
# excluded from the INPUT source files. This way you can easily exclude a
|
||||
# subdirectory from a directory tree whose root is specified with the INPUT tag.
|
||||
|
||||
EXCLUDE =
|
||||
|
||||
# If the value of the INPUT tag contains directories, you can use the
|
||||
# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude
|
||||
# certain files from those directories.
|
||||
|
||||
EXCLUDE_PATTERNS =
|
||||
|
||||
# The EXAMPLE_PATH tag can be used to specify one or more files or
|
||||
# directories that contain example code fragments that are included (see
|
||||
# the \include command).
|
||||
|
||||
EXAMPLE_PATH = .
|
||||
|
||||
# If the value of the EXAMPLE_PATH tag contains directories, you can use the
|
||||
# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp
|
||||
# and *.h) to filter out the source-files in the directories. If left
|
||||
# blank all files are included.
|
||||
|
||||
EXAMPLE_PATTERNS =
|
||||
|
||||
# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be
|
||||
# searched for input files to be used with the \include or \dontinclude
|
||||
# commands irrespective of the value of the RECURSIVE tag.
|
||||
# Possible values are YES and NO. If left blank NO is used.
|
||||
|
||||
EXAMPLE_RECURSIVE = NO
|
||||
|
||||
# The IMAGE_PATH tag can be used to specify one or more files or
|
||||
# directories that contain image that are included in the documentation (see
|
||||
# the \image command).
|
||||
|
||||
IMAGE_PATH =
|
||||
|
||||
# The INPUT_FILTER tag can be used to specify a program that doxygen should
|
||||
# invoke to filter for each input file. Doxygen will invoke the filter program
|
||||
# by executing (via popen()) the command <filter> <input-file>, where <filter>
|
||||
# is the value of the INPUT_FILTER tag, and <input-file> is the name of an
|
||||
# input file. Doxygen will then use the output that the filter program writes
|
||||
# to standard output.
|
||||
|
||||
INPUT_FILTER =
|
||||
|
||||
# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using
|
||||
# INPUT_FILTER) will be used to filter the input files when producing source
|
||||
# files to browse.
|
||||
|
||||
FILTER_SOURCE_FILES = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to source browsing
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the SOURCE_BROWSER tag is set to YES then a list of source files will
|
||||
# be generated. Documented entities will be cross-referenced with these sources.
|
||||
|
||||
SOURCE_BROWSER = NO
|
||||
|
||||
# Setting the INLINE_SOURCES tag to YES will include the body
|
||||
# of functions and classes directly in the documentation.
|
||||
|
||||
INLINE_SOURCES = NO
|
||||
|
||||
# If the REFERENCED_BY_RELATION tag is set to YES (the default)
|
||||
# then for each documented function all documented
|
||||
# functions referencing it will be listed.
|
||||
|
||||
REFERENCED_BY_RELATION = YES
|
||||
|
||||
# If the REFERENCES_RELATION tag is set to YES (the default)
|
||||
# then for each documented function all documented entities
|
||||
# called/used by that function will be listed.
|
||||
|
||||
REFERENCES_RELATION = YES
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the alphabetical class index
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index
|
||||
# of all compounds will be generated. Enable this if the project
|
||||
# contains a lot of classes, structs, unions or interfaces.
|
||||
|
||||
ALPHABETICAL_INDEX = NO
|
||||
|
||||
# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then
|
||||
# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns
|
||||
# in which this list will be split (can be a number in the range [1..20])
|
||||
|
||||
COLS_IN_ALPHA_INDEX = 5
|
||||
|
||||
# In case all classes in a project start with a common prefix, all
|
||||
# classes will be put under the same header in the alphabetical index.
|
||||
# The IGNORE_PREFIX tag can be used to specify one or more prefixes that
|
||||
# should be ignored while generating the index headers.
|
||||
|
||||
IGNORE_PREFIX =
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the HTML output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_HTML tag is set to YES (the default) Doxygen will
|
||||
# generate HTML output.
|
||||
|
||||
GENERATE_HTML = YES
|
||||
|
||||
# The HTML_OUTPUT tag is used to specify where the HTML docs will be put.
|
||||
# If a relative path is entered the value of OUTPUT_DIRECTORY will be
|
||||
# put in front of it. If left blank `html' will be used as the default path.
|
||||
|
||||
HTML_OUTPUT = ../.doxyout/mgmapi.html
|
||||
|
||||
# The HTML_HEADER tag can be used to specify a personal HTML header for
|
||||
# each generated HTML page. If it is left blank doxygen will generate a
|
||||
# standard header.
|
||||
|
||||
HTML_HEADER =
|
||||
|
||||
# The HTML_FOOTER tag can be used to specify a personal HTML footer for
|
||||
# each generated HTML page. If it is left blank doxygen will generate a
|
||||
# standard footer.
|
||||
|
||||
HTML_FOOTER = footer.html
|
||||
|
||||
# The HTML_STYLESHEET tag can be used to specify a user defined cascading
|
||||
# style sheet that is used by each HTML page. It can be used to
|
||||
# fine-tune the look of the HTML output. If the tag is left blank doxygen
|
||||
# will generate a default style sheet
|
||||
|
||||
HTML_STYLESHEET =
|
||||
|
||||
# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes,
|
||||
# files or namespaces will be aligned in HTML using tables. If set to
|
||||
# NO a bullet list will be used.
|
||||
|
||||
HTML_ALIGN_MEMBERS = YES
|
||||
|
||||
# If the GENERATE_HTMLHELP tag is set to YES, additional index files
|
||||
# will be generated that can be used as input for tools like the
|
||||
# Microsoft HTML help workshop to generate a compressed HTML help file (.chm)
|
||||
# of the generated HTML documentation.
|
||||
|
||||
GENERATE_HTMLHELP = NO
|
||||
|
||||
# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag
|
||||
# controls if a separate .chi index file is generated (YES) or that
|
||||
# it should be included in the master .chm file (NO).
|
||||
|
||||
GENERATE_CHI = NO
|
||||
|
||||
# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag
|
||||
# controls whether a binary table of contents is generated (YES) or a
|
||||
# normal table of contents (NO) in the .chm file.
|
||||
|
||||
BINARY_TOC = NO
|
||||
|
||||
# The TOC_EXPAND flag can be set to YES to add extra items for group members
|
||||
# to the contents of the Html help documentation and to the tree view.
|
||||
|
||||
TOC_EXPAND = NO
|
||||
|
||||
# The DISABLE_INDEX tag can be used to turn on/off the condensed index at
|
||||
# top of each HTML page. The value NO (the default) enables the index and
|
||||
# the value YES disables it.
|
||||
|
||||
DISABLE_INDEX = NO
|
||||
|
||||
# This tag can be used to set the number of enum values (range [1..20])
|
||||
# that doxygen will group on one line in the generated HTML documentation.
|
||||
|
||||
ENUM_VALUES_PER_LINE = 4
|
||||
|
||||
# If the GENERATE_TREEVIEW tag is set to YES, a side panel will be
|
||||
# generated containing a tree-like index structure (just like the one that
|
||||
# is generated for HTML Help). For this to work a browser that supports
|
||||
# JavaScript and frames is required (for instance Mozilla, Netscape 4.0+,
|
||||
# or Internet explorer 4.0+). Note that for large projects the tree generation
|
||||
# can take a very long time. In such cases it is better to disable this feature.
|
||||
# Windows users are probably better off using the HTML help feature.
|
||||
|
||||
GENERATE_TREEVIEW = NO
|
||||
|
||||
# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be
|
||||
# used to set the initial width (in pixels) of the frame in which the tree
|
||||
# is shown.
|
||||
|
||||
TREEVIEW_WIDTH = 250
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the LaTeX output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will
|
||||
# generate Latex output.
|
||||
|
||||
GENERATE_LATEX = YES
|
||||
|
||||
# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put.
|
||||
# If a relative path is entered the value of OUTPUT_DIRECTORY will be
|
||||
# put in front of it. If left blank `latex' will be used as the default path.
|
||||
|
||||
LATEX_OUTPUT = ../.doxyout/mgmapi.latex
|
||||
|
||||
# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact
|
||||
# LaTeX documents. This may be useful for small projects and may help to
|
||||
# save some trees in general.
|
||||
|
||||
COMPACT_LATEX = NO
|
||||
|
||||
# The PAPER_TYPE tag can be used to set the paper type that is used
|
||||
# by the printer. Possible values are: a4, a4wide, letter, legal and
|
||||
# executive. If left blank a4wide will be used.
|
||||
|
||||
PAPER_TYPE =
|
||||
|
||||
# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX
|
||||
# packages that should be included in the LaTeX output.
|
||||
|
||||
EXTRA_PACKAGES =
|
||||
|
||||
# The LATEX_HEADER tag can be used to specify a personal LaTeX header for
|
||||
# the generated latex document. The header should contain everything until
|
||||
# the first chapter. If it is left blank doxygen will generate a
|
||||
# standard header. Notice: only use this tag if you know what you are doing!
|
||||
|
||||
LATEX_HEADER = ../doxygen/header.mgmapi.tex
|
||||
|
||||
# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated
|
||||
# is prepared for conversion to pdf (using ps2pdf). The pdf file will
|
||||
# contain links (just like the HTML output) instead of page references
|
||||
# This makes the output suitable for online browsing using a pdf viewer.
|
||||
|
||||
PDF_HYPERLINKS = YES
|
||||
|
||||
# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of
|
||||
# plain latex in the generated Makefile. Set this option to YES to get a
|
||||
# higher quality PDF documentation.
|
||||
|
||||
USE_PDFLATEX = YES
|
||||
|
||||
# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode.
|
||||
# command to the generated LaTeX files. This will instruct LaTeX to keep
|
||||
# running if errors occur, instead of asking the user for help.
|
||||
# This option is also used when generating formulas in HTML.
|
||||
|
||||
LATEX_BATCHMODE = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the RTF output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output
|
||||
# The RTF output is optimised for Word 97 and may not look very pretty with
|
||||
# other RTF readers or editors.
|
||||
|
||||
GENERATE_RTF = NO
|
||||
|
||||
# The RTF_OUTPUT tag is used to specify where the RTF docs will be put.
|
||||
# If a relative path is entered the value of OUTPUT_DIRECTORY will be
|
||||
# put in front of it. If left blank `rtf' will be used as the default path.
|
||||
|
||||
RTF_OUTPUT = ../mgmapi.rtf
|
||||
|
||||
# If the COMPACT_RTF tag is set to YES Doxygen generates more compact
|
||||
# RTF documents. This may be useful for small projects and may help to
|
||||
# save some trees in general.
|
||||
|
||||
COMPACT_RTF = NO
|
||||
|
||||
# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated
|
||||
# will contain hyperlink fields. The RTF file will
|
||||
# contain links (just like the HTML output) instead of page references.
|
||||
# This makes the output suitable for online browsing using WORD or other
|
||||
# programs which support those fields.
|
||||
# Note: wordpad (write) and others do not support links.
|
||||
|
||||
RTF_HYPERLINKS = NO
|
||||
|
||||
# Load stylesheet definitions from file. Syntax is similar to doxygen's
|
||||
# config file, i.e. a series of assigments. You only have to provide
|
||||
# replacements, missing definitions are set to their default value.
|
||||
|
||||
RTF_STYLESHEET_FILE =
|
||||
|
||||
# Set optional variables used in the generation of an rtf document.
|
||||
# Syntax is similar to doxygen's config file.
|
||||
|
||||
RTF_EXTENSIONS_FILE =
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the man page output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_MAN tag is set to YES (the default) Doxygen will
|
||||
# generate man pages
|
||||
|
||||
GENERATE_MAN = NO
|
||||
|
||||
# The MAN_OUTPUT tag is used to specify where the man pages will be put.
|
||||
# If a relative path is entered the value of OUTPUT_DIRECTORY will be
|
||||
# put in front of it. If left blank `man' will be used as the default path.
|
||||
|
||||
MAN_OUTPUT =
|
||||
|
||||
# The MAN_EXTENSION tag determines the extension that is added to
|
||||
# the generated man pages (default is the subroutine's section .3)
|
||||
|
||||
MAN_EXTENSION =
|
||||
|
||||
# If the MAN_LINKS tag is set to YES and Doxygen generates man output,
|
||||
# then it will generate one additional man file for each entity
|
||||
# documented in the real man page(s). These additional files
|
||||
# only source the real man page, but without them the man command
|
||||
# would be unable to find the correct page. The default is NO.
|
||||
|
||||
MAN_LINKS = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the XML output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_XML tag is set to YES Doxygen will
|
||||
# generate an XML file that captures the structure of
|
||||
# the code including all documentation. Note that this
|
||||
# feature is still experimental and incomplete at the
|
||||
# moment.
|
||||
|
||||
GENERATE_XML = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the preprocessor
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will
|
||||
# evaluate all C-preprocessor directives found in the sources and include
|
||||
# files.
|
||||
|
||||
ENABLE_PREPROCESSING = YES
|
||||
|
||||
# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro
|
||||
# names in the source code. If set to NO (the default) only conditional
|
||||
# compilation will be performed. Macro expansion can be done in a controlled
|
||||
# way by setting EXPAND_ONLY_PREDEF to YES.
|
||||
|
||||
MACRO_EXPANSION = YES
|
||||
|
||||
# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES
|
||||
# then the macro expansion is limited to the macros specified with the
|
||||
# PREDEFINED and EXPAND_AS_PREDEFINED tags.
|
||||
|
||||
EXPAND_ONLY_PREDEF = YES
|
||||
|
||||
# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files
|
||||
# in the INCLUDE_PATH (see below) will be search if a #include is found.
|
||||
|
||||
SEARCH_INCLUDES = YES
|
||||
|
||||
# The INCLUDE_PATH tag can be used to specify one or more directories that
|
||||
# contain include files that are not input files but should be processed by
|
||||
# the preprocessor.
|
||||
|
||||
INCLUDE_PATH =
|
||||
|
||||
# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard
|
||||
# patterns (like *.h and *.hpp) to filter out the header-files in the
|
||||
# directories. If left blank, the patterns specified with FILE_PATTERNS will
|
||||
# be used.
|
||||
|
||||
INCLUDE_FILE_PATTERNS =
|
||||
|
||||
# The PREDEFINED tag can be used to specify one or more macro names that
|
||||
# are defined before the preprocessor is started (similar to the -D option of
|
||||
# gcc). The argument of the tag is a list of macros of the form: name
|
||||
# or name=definition (no spaces). If the definition and the = are
|
||||
# omitted =1 is assumed.
|
||||
|
||||
PREDEFINED = DOXYGEN_SHOULD_SKIP_DEPRECATED \
|
||||
DOXYGEN_SHOULD_SKIP_INTERNAL \
|
||||
protected=private
|
||||
|
||||
# If the MACRO_EXPANSION and EXPAND_PREDEF_ONLY tags are set to YES then
|
||||
# this tag can be used to specify a list of macro names that should be expanded.
|
||||
# The macro definition that is found in the sources will be used.
|
||||
# Use the PREDEFINED tag if you want to use a different macro definition.
|
||||
|
||||
EXPAND_AS_DEFINED =
|
||||
|
||||
# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then
|
||||
# doxygen's preprocessor will remove all function-like macros that are alone
|
||||
# on a line and do not end with a semicolon. Such function macros are typically
|
||||
# used for boiler-plate code, and will confuse the parser if not removed.
|
||||
|
||||
SKIP_FUNCTION_MACROS = YES
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration::addtions related to external references
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# The TAGFILES tag can be used to specify one or more tagfiles.
|
||||
|
||||
TAGFILES =
|
||||
|
||||
# When a file name is specified after GENERATE_TAGFILE, doxygen will create
|
||||
# a tag file that is based on the input files it reads.
|
||||
|
||||
GENERATE_TAGFILE =
|
||||
|
||||
# If the ALLEXTERNALS tag is set to YES all external classes will be listed
|
||||
# in the class index. If set to NO only the inherited external classes
|
||||
# will be listed.
|
||||
|
||||
ALLEXTERNALS = NO
|
||||
|
||||
# The PERL_PATH should be the absolute path and name of the perl script
|
||||
# interpreter (i.e. the result of `which perl').
|
||||
|
||||
PERL_PATH =
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the dot tool
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will
|
||||
# generate a inheritance diagram (in Html, RTF and LaTeX) for classes with base or
|
||||
# super classes. Setting the tag to NO turns the diagrams off. Note that this
|
||||
# option is superceded by the HAVE_DOT option below. This is only a fallback. It is
|
||||
# recommended to install and use dot, since it yield more powerful graphs.
|
||||
|
||||
CLASS_DIAGRAMS = YES
|
||||
|
||||
# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is
|
||||
# available from the path. This tool is part of Graphviz, a graph visualization
|
||||
# toolkit from AT&T and Lucent Bell Labs. The other options in this section
|
||||
# have no effect if this option is set to NO (the default)
|
||||
|
||||
HAVE_DOT = NO
|
||||
|
||||
# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen
|
||||
# will generate a graph for each documented class showing the direct and
|
||||
# indirect inheritance relations. Setting this tag to YES will force the
|
||||
# the CLASS_DIAGRAMS tag to NO.
|
||||
|
||||
CLASS_GRAPH = YES
|
||||
|
||||
# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen
|
||||
# will generate a graph for each documented class showing the direct and
|
||||
# indirect implementation dependencies (inheritance, containment, and
|
||||
# class references variables) of the class with other documented classes.
|
||||
|
||||
COLLABORATION_GRAPH = YES
|
||||
|
||||
# If set to YES, the inheritance and collaboration graphs will show the
|
||||
# relations between templates and their instances.
|
||||
|
||||
TEMPLATE_RELATIONS = YES
|
||||
|
||||
# If set to YES, the inheritance and collaboration graphs will hide
|
||||
# inheritance and usage relations if the target is undocumented
|
||||
# or is not a class.
|
||||
|
||||
HIDE_UNDOC_RELATIONS = YES
|
||||
|
||||
# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT
|
||||
# tags are set to YES then doxygen will generate a graph for each documented
|
||||
# file showing the direct and indirect include dependencies of the file with
|
||||
# other documented files.
|
||||
|
||||
INCLUDE_GRAPH = YES
|
||||
|
||||
# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and
|
||||
# HAVE_DOT tags are set to YES then doxygen will generate a graph for each
|
||||
# documented header file showing the documented files that directly or
|
||||
# indirectly include this file.
|
||||
|
||||
INCLUDED_BY_GRAPH = YES
|
||||
|
||||
# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen
|
||||
# will graphical hierarchy of all classes instead of a textual one.
|
||||
|
||||
GRAPHICAL_HIERARCHY = YES
|
||||
|
||||
# The tag DOT_PATH can be used to specify the path where the dot tool can be
|
||||
# found. If left blank, it is assumed the dot tool can be found on the path.
|
||||
|
||||
DOT_PATH =
|
||||
|
||||
# The DOTFILE_DIRS tag can be used to specify one or more directories that
|
||||
# contain dot files that are included in the documentation (see the
|
||||
# \dotfile command).
|
||||
|
||||
DOTFILE_DIRS =
|
||||
|
||||
# The MAX_DOT_GRAPH_WIDTH tag can be used to set the maximum allowed width
|
||||
# (in pixels) of the graphs generated by dot. If a graph becomes larger than
|
||||
# this value, doxygen will try to truncate the graph, so that it fits within
|
||||
# the specified constraint. Beware that most browsers cannot cope with very
|
||||
# large images.
|
||||
|
||||
MAX_DOT_GRAPH_WIDTH = 1024
|
||||
|
||||
# The MAX_DOT_GRAPH_HEIGHT tag can be used to set the maximum allows height
|
||||
# (in pixels) of the graphs generated by dot. If a graph becomes larger than
|
||||
# this value, doxygen will try to truncate the graph, so that it fits within
|
||||
# the specified constraint. Beware that most browsers cannot cope with very
|
||||
# large images.
|
||||
|
||||
MAX_DOT_GRAPH_HEIGHT = 1024
|
||||
|
||||
# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will
|
||||
# generate a legend page explaining the meaning of the various boxes and
|
||||
# arrows in the dot generated graphs.
|
||||
|
||||
GENERATE_LEGEND = YES
|
||||
|
||||
# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will
|
||||
# remove the intermedate dot files that are used to generate
|
||||
# the various graphs.
|
||||
|
||||
DOT_CLEANUP = YES
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration::addtions related to the search engine
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# The SEARCHENGINE tag specifies whether or not a search engine should be
|
||||
# used. If set to NO the values of all tags below this one will be ignored.
|
||||
|
||||
SEARCHENGINE = NO
|
||||
|
||||
# The CGI_NAME tag should be the name of the CGI script that
|
||||
# starts the search engine (doxysearch) with the correct parameters.
|
||||
# A script with this name will be generated by doxygen.
|
||||
|
||||
CGI_NAME =
|
||||
|
||||
# The CGI_URL tag should be the absolute URL to the directory where the
|
||||
# cgi binaries are located. See the documentation of your http daemon for
|
||||
# details.
|
||||
|
||||
CGI_URL =
|
||||
|
||||
# The DOC_URL tag should be the absolute URL to the directory where the
|
||||
# documentation is located. If left blank the absolute path to the
|
||||
# documentation, with file:// prepended to it, will be used.
|
||||
|
||||
DOC_URL =
|
||||
|
||||
# The DOC_ABSPATH tag should be the absolute path to the directory where the
|
||||
# documentation is located. If left blank the directory on the local machine
|
||||
# will be used.
|
||||
|
||||
DOC_ABSPATH =
|
||||
|
||||
# The BIN_ABSPATH tag must point to the directory where the doxysearch binary
|
||||
# is installed.
|
||||
|
||||
BIN_ABSPATH =
|
||||
|
||||
# The EXT_DOC_PATHS tag can be used to specify one or more paths to
|
||||
# documentation generated for other projects. This allows doxysearch to search
|
||||
# the documentation for these projects as well.
|
||||
|
||||
EXT_DOC_PATHS =
|
937
ndb/docs/doxygen/Doxyfile.ndb
Normal file
937
ndb/docs/doxygen/Doxyfile.ndb
Normal file
|
@ -0,0 +1,937 @@
|
|||
# Doxyfile 1.2.14
|
||||
|
||||
# This file describes the settings to be used by the documentation system
|
||||
# doxygen (www.doxygen.org) for a project
|
||||
#
|
||||
# All text after a hash (#) is considered a comment and will be ignored
|
||||
# The format is:
|
||||
# TAG = value [value, ...]
|
||||
# For lists items can also be appended using:
|
||||
# TAG += value [value, ...]
|
||||
# Values that contain spaces should be placed between quotes (" ")
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# General configuration options
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# The PROJECT_NAME tag is a single word (or a sequence of words surrounded
|
||||
# by quotes) that should identify the project.
|
||||
|
||||
PROJECT_NAME = "NDB Cluster"
|
||||
|
||||
# The PROJECT_NUMBER tag can be used to enter a project or revision number.
|
||||
# This could be handy for archiving the generated documentation or
|
||||
# if some version control system is used.
|
||||
|
||||
PROJECT_NUMBER =
|
||||
|
||||
# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute)
|
||||
# base path where the generated documentation will be put.
|
||||
# If a relative path is entered, it will be relative to the location
|
||||
# where doxygen was started. If left blank the current directory will be used.
|
||||
|
||||
OUTPUT_DIRECTORY =
|
||||
|
||||
# The OUTPUT_LANGUAGE tag is used to specify the language in which all
|
||||
# documentation generated by doxygen is written. Doxygen will use this
|
||||
# information to generate all constant output in the proper language.
|
||||
# The default language is English, other supported languages are:
|
||||
# Brazilian, Chinese, Croatian, Czech, Danish, Dutch, Finnish, French,
|
||||
# German, Greek, Hungarian, Italian, Japanese, Korean, Norwegian, Polish,
|
||||
# Portuguese, Romanian, Russian, Slovak, Slovene, Spanish and Swedish.
|
||||
|
||||
OUTPUT_LANGUAGE = English
|
||||
|
||||
# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in
|
||||
# documentation are documented, even if no documentation was available.
|
||||
# Private class members and static file members will be hidden unless
|
||||
# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES
|
||||
|
||||
EXTRACT_ALL = YES
|
||||
|
||||
# If the EXTRACT_PRIVATE tag is set to YES all private members of a class
|
||||
# will be included in the documentation.
|
||||
|
||||
EXTRACT_PRIVATE = YES
|
||||
|
||||
# If the EXTRACT_STATIC tag is set to YES all static members of a file
|
||||
# will be included in the documentation.
|
||||
|
||||
EXTRACT_STATIC = YES
|
||||
|
||||
# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs)
|
||||
# defined locally in source files will be included in the documentation.
|
||||
# If set to NO only classes defined in header files are included.
|
||||
|
||||
EXTRACT_LOCAL_CLASSES = YES
|
||||
|
||||
# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all
|
||||
# undocumented members of documented classes, files or namespaces.
|
||||
# If set to NO (the default) these members will be included in the
|
||||
# various overviews, but no documentation section is generated.
|
||||
# This option has no effect if EXTRACT_ALL is enabled.
|
||||
|
||||
HIDE_UNDOC_MEMBERS = NO
|
||||
|
||||
# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all
|
||||
# undocumented classes that are normally visible in the class hierarchy.
|
||||
# If set to NO (the default) these class will be included in the various
|
||||
# overviews. This option has no effect if EXTRACT_ALL is enabled.
|
||||
|
||||
HIDE_UNDOC_CLASSES = NO
|
||||
|
||||
# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will
|
||||
# include brief member descriptions after the members that are listed in
|
||||
# the file and class documentation (similar to JavaDoc).
|
||||
# Set to NO to disable this.
|
||||
|
||||
BRIEF_MEMBER_DESC = YES
|
||||
|
||||
# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend
|
||||
# the brief description of a member or function before the detailed description.
|
||||
# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the
|
||||
# brief descriptions will be completely suppressed.
|
||||
|
||||
REPEAT_BRIEF = YES
|
||||
|
||||
# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then
|
||||
# Doxygen will generate a detailed section even if there is only a brief
|
||||
# description.
|
||||
|
||||
ALWAYS_DETAILED_SEC = NO
|
||||
|
||||
# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all inherited
|
||||
# members of a class in the documentation of that class as if those members were
|
||||
# ordinary class members. Constructors, destructors and assignment operators of
|
||||
# the base classes will not be shown.
|
||||
|
||||
INLINE_INHERITED_MEMB = NO
|
||||
|
||||
# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full
|
||||
# path before files name in the file list and in the header files. If set
|
||||
# to NO the shortest path that makes the file name unique will be used.
|
||||
|
||||
FULL_PATH_NAMES = YES
|
||||
|
||||
# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag
|
||||
# can be used to strip a user defined part of the path. Stripping is
|
||||
# only done if one of the specified strings matches the left-hand part of
|
||||
# the path. It is allowed to use relative paths in the argument list.
|
||||
|
||||
STRIP_FROM_PATH = .
|
||||
|
||||
# The INTERNAL_DOCS tag determines if documentation
|
||||
# that is typed after a \internal command is included. If the tag is set
|
||||
# to NO (the default) then the documentation will be excluded.
|
||||
# Set it to YES to include the internal documentation.
|
||||
|
||||
INTERNAL_DOCS = YES
|
||||
|
||||
# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct
|
||||
# doxygen to hide any special comment blocks from generated source code
|
||||
# fragments. Normal C and C++ comments will always remain visible.
|
||||
|
||||
STRIP_CODE_COMMENTS = YES
|
||||
|
||||
# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate
|
||||
# file names in lower case letters. If set to YES upper case letters are also
|
||||
# allowed. This is useful if you have classes or files whose names only differ
|
||||
# in case and if your file system supports case sensitive file names. Windows
|
||||
# users are adviced to set this option to NO.
|
||||
|
||||
CASE_SENSE_NAMES = YES
|
||||
|
||||
# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter
|
||||
# (but less readable) file names. This can be useful is your file systems
|
||||
# doesn't support long names like on DOS, Mac, or CD-ROM.
|
||||
|
||||
SHORT_NAMES = NO
|
||||
|
||||
# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen
|
||||
# will show members with their full class and namespace scopes in the
|
||||
# documentation. If set to YES the scope will be hidden.
|
||||
|
||||
HIDE_SCOPE_NAMES = NO
|
||||
|
||||
# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen
|
||||
# will generate a verbatim copy of the header file for each class for
|
||||
# which an include is specified. Set to NO to disable this.
|
||||
|
||||
VERBATIM_HEADERS = YES
|
||||
|
||||
# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen
|
||||
# will put list of the files that are included by a file in the documentation
|
||||
# of that file.
|
||||
|
||||
SHOW_INCLUDE_FILES = YES
|
||||
|
||||
# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen
|
||||
# will interpret the first line (until the first dot) of a JavaDoc-style
|
||||
# comment as the brief description. If set to NO, the JavaDoc
|
||||
# comments will behave just like the Qt-style comments (thus requiring an
|
||||
# explict @brief command for a brief description.
|
||||
|
||||
JAVADOC_AUTOBRIEF = NO
|
||||
|
||||
# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented
|
||||
# member inherits the documentation from any documented member that it
|
||||
# reimplements.
|
||||
|
||||
INHERIT_DOCS = YES
|
||||
|
||||
# If the INLINE_INFO tag is set to YES (the default) then a tag [inline]
|
||||
# is inserted in the documentation for inline members.
|
||||
|
||||
INLINE_INFO = YES
|
||||
|
||||
# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen
|
||||
# will sort the (detailed) documentation of file and class members
|
||||
# alphabetically by member name. If set to NO the members will appear in
|
||||
# declaration order.
|
||||
|
||||
SORT_MEMBER_DOCS = YES
|
||||
|
||||
# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC
|
||||
# tag is set to YES, then doxygen will reuse the documentation of the first
|
||||
# member in the group (if any) for the other members of the group. By default
|
||||
# all members of a group must be documented explicitly.
|
||||
|
||||
DISTRIBUTE_GROUP_DOC = NO
|
||||
|
||||
# The TAB_SIZE tag can be used to set the number of spaces in a tab.
|
||||
# Doxygen uses this value to replace tabs by spaces in code fragments.
|
||||
|
||||
TAB_SIZE = 8
|
||||
|
||||
# The GENERATE_TODOLIST tag can be used to enable (YES) or
|
||||
# disable (NO) the todo list. This list is created by putting \todo
|
||||
# commands in the documentation.
|
||||
|
||||
GENERATE_TODOLIST = YES
|
||||
|
||||
# The GENERATE_TESTLIST tag can be used to enable (YES) or
|
||||
# disable (NO) the test list. This list is created by putting \test
|
||||
# commands in the documentation.
|
||||
|
||||
GENERATE_TESTLIST = YES
|
||||
|
||||
# The GENERATE_BUGLIST tag can be used to enable (YES) or
|
||||
# disable (NO) the bug list. This list is created by putting \bug
|
||||
# commands in the documentation.
|
||||
|
||||
GENERATE_BUGLIST = YES
|
||||
|
||||
# This tag can be used to specify a number of aliases that acts
|
||||
# as commands in the documentation. An alias has the form "name=value".
|
||||
# For example adding "sideeffect=\par Side Effects:\n" will allow you to
|
||||
# put the command \sideeffect (or @sideeffect) in the documentation, which
|
||||
# will result in a user defined paragraph with heading "Side Effects:".
|
||||
# You can put \n's in the value part of an alias to insert newlines.
|
||||
|
||||
ALIASES =
|
||||
|
||||
# The ENABLED_SECTIONS tag can be used to enable conditional
|
||||
# documentation sections, marked by \if sectionname ... \endif.
|
||||
|
||||
ENABLED_SECTIONS =
|
||||
|
||||
# The MAX_INITIALIZER_LINES tag determines the maximum number of lines
|
||||
# the initial value of a variable or define consist of for it to appear in
|
||||
# the documentation. If the initializer consists of more lines than specified
|
||||
# here it will be hidden. Use a value of 0 to hide initializers completely.
|
||||
# The appearance of the initializer of individual variables and defines in the
|
||||
# documentation can be controlled using \showinitializer or \hideinitializer
|
||||
# command in the documentation regardless of this setting.
|
||||
|
||||
MAX_INITIALIZER_LINES = 30
|
||||
|
||||
# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources
|
||||
# only. Doxygen will then generate output that is more tailored for C.
|
||||
# For instance some of the names that are used will be different. The list
|
||||
# of all members will be omitted, etc.
|
||||
|
||||
OPTIMIZE_OUTPUT_FOR_C = NO
|
||||
|
||||
# Set the SHOW_USED_FILES tag to NO to disable the list of files generated
|
||||
# at the bottom of the documentation of classes and structs. If set to YES the
|
||||
# list will mention the files that were used to generate the documentation.
|
||||
|
||||
SHOW_USED_FILES = YES
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to warning and progress messages
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# The QUIET tag can be used to turn on/off the messages that are generated
|
||||
# by doxygen. Possible values are YES and NO. If left blank NO is used.
|
||||
|
||||
QUIET = NO
|
||||
|
||||
# The WARNINGS tag can be used to turn on/off the warning messages that are
|
||||
# generated by doxygen. Possible values are YES and NO. If left blank
|
||||
# NO is used.
|
||||
|
||||
WARNINGS = YES
|
||||
|
||||
# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings
|
||||
# for undocumented members. If EXTRACT_ALL is set to YES then this flag will
|
||||
# automatically be disabled.
|
||||
|
||||
WARN_IF_UNDOCUMENTED = YES
|
||||
|
||||
# The WARN_FORMAT tag determines the format of the warning messages that
|
||||
# doxygen can produce. The string should contain the $file, $line, and $text
|
||||
# tags, which will be replaced by the file and line number from which the
|
||||
# warning originated and the warning text.
|
||||
|
||||
WARN_FORMAT = "$file:$line: $text"
|
||||
|
||||
# The WARN_LOGFILE tag can be used to specify a file to which warning
|
||||
# and error messages should be written. If left blank the output is written
|
||||
# to stderr.
|
||||
|
||||
WARN_LOGFILE =
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the input files
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# The INPUT tag can be used to specify the files and/or directories that contain
|
||||
# documented source files. You may enter file names like "myfile.cpp" or
|
||||
# directories like "/usr/src/myproject". Separate the files or directories
|
||||
# with spaces.
|
||||
|
||||
INPUT =
|
||||
|
||||
# If the value of the INPUT tag contains directories, you can use the
|
||||
# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp
|
||||
# and *.h) to filter out the source-files in the directories. If left
|
||||
# blank the following patterns are tested:
|
||||
# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx *.hpp
|
||||
# *.h++ *.idl *.odl
|
||||
|
||||
FILE_PATTERNS =
|
||||
|
||||
# The RECURSIVE tag can be used to turn specify whether or not subdirectories
|
||||
# should be searched for input files as well. Possible values are YES and NO.
|
||||
# If left blank NO is used.
|
||||
|
||||
RECURSIVE = YES
|
||||
|
||||
# The EXCLUDE tag can be used to specify files and/or directories that should
|
||||
# excluded from the INPUT source files. This way you can easily exclude a
|
||||
# subdirectory from a directory tree whose root is specified with the INPUT tag.
|
||||
|
||||
EXCLUDE = test \
|
||||
tools \
|
||||
win32 \
|
||||
lib \
|
||||
examples \
|
||||
docs \
|
||||
CVS \
|
||||
SCCS \
|
||||
config \
|
||||
bin \
|
||||
include/ndbapi \
|
||||
include/newtonapi \
|
||||
src/newtonapi \
|
||||
include/mgmapi \
|
||||
src/mgmapi \
|
||||
src/client
|
||||
|
||||
# The EXCLUDE_SYMLINKS tag can be used select whether or not files or directories
|
||||
# that are symbolic links (a Unix filesystem feature) are excluded from the input.
|
||||
|
||||
EXCLUDE_SYMLINKS = NO
|
||||
|
||||
# If the value of the INPUT tag contains directories, you can use the
|
||||
# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude
|
||||
# certain files from those directories.
|
||||
|
||||
EXCLUDE_PATTERNS = *CVS* \
|
||||
*SCCS*
|
||||
|
||||
# The EXAMPLE_PATH tag can be used to specify one or more files or
|
||||
# directories that contain example code fragments that are included (see
|
||||
# the \include command).
|
||||
|
||||
EXAMPLE_PATH =
|
||||
|
||||
# If the value of the EXAMPLE_PATH tag contains directories, you can use the
|
||||
# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp
|
||||
# and *.h) to filter out the source-files in the directories. If left
|
||||
# blank all files are included.
|
||||
|
||||
EXAMPLE_PATTERNS =
|
||||
|
||||
# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be
|
||||
# searched for input files to be used with the \include or \dontinclude
|
||||
# commands irrespective of the value of the RECURSIVE tag.
|
||||
# Possible values are YES and NO. If left blank NO is used.
|
||||
|
||||
EXAMPLE_RECURSIVE = NO
|
||||
|
||||
# The IMAGE_PATH tag can be used to specify one or more files or
|
||||
# directories that contain image that are included in the documentation (see
|
||||
# the \image command).
|
||||
|
||||
IMAGE_PATH =
|
||||
|
||||
# The INPUT_FILTER tag can be used to specify a program that doxygen should
|
||||
# invoke to filter for each input file. Doxygen will invoke the filter program
|
||||
# by executing (via popen()) the command <filter> <input-file>, where <filter>
|
||||
# is the value of the INPUT_FILTER tag, and <input-file> is the name of an
|
||||
# input file. Doxygen will then use the output that the filter program writes
|
||||
# to standard output.
|
||||
|
||||
INPUT_FILTER =
|
||||
|
||||
# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using
|
||||
# INPUT_FILTER) will be used to filter the input files when producing source
|
||||
# files to browse.
|
||||
|
||||
FILTER_SOURCE_FILES = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to source browsing
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the SOURCE_BROWSER tag is set to YES then a list of source files will
|
||||
# be generated. Documented entities will be cross-referenced with these sources.
|
||||
|
||||
SOURCE_BROWSER = YES
|
||||
|
||||
# Setting the INLINE_SOURCES tag to YES will include the body
|
||||
# of functions and classes directly in the documentation.
|
||||
|
||||
INLINE_SOURCES = YES
|
||||
|
||||
# If the REFERENCED_BY_RELATION tag is set to YES (the default)
|
||||
# then for each documented function all documented
|
||||
# functions referencing it will be listed.
|
||||
|
||||
REFERENCED_BY_RELATION = YES
|
||||
|
||||
# If the REFERENCES_RELATION tag is set to YES (the default)
|
||||
# then for each documented function all documented entities
|
||||
# called/used by that function will be listed.
|
||||
|
||||
REFERENCES_RELATION = YES
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the alphabetical class index
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index
|
||||
# of all compounds will be generated. Enable this if the project
|
||||
# contains a lot of classes, structs, unions or interfaces.
|
||||
|
||||
ALPHABETICAL_INDEX = YES
|
||||
|
||||
# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then
|
||||
# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns
|
||||
# in which this list will be split (can be a number in the range [1..20])
|
||||
|
||||
COLS_IN_ALPHA_INDEX = 5
|
||||
|
||||
# In case all classes in a project start with a common prefix, all
|
||||
# classes will be put under the same header in the alphabetical index.
|
||||
# The IGNORE_PREFIX tag can be used to specify one or more prefixes that
|
||||
# should be ignored while generating the index headers.
|
||||
|
||||
IGNORE_PREFIX =
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the HTML output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_HTML tag is set to YES (the default) Doxygen will
|
||||
# generate HTML output.
|
||||
|
||||
GENERATE_HTML = YES
|
||||
|
||||
# The HTML_OUTPUT tag is used to specify where the HTML docs will be put.
|
||||
# If a relative path is entered the value of OUTPUT_DIRECTORY will be
|
||||
# put in front of it. If left blank `html' will be used as the default path.
|
||||
|
||||
HTML_OUTPUT = ndb.html
|
||||
|
||||
# The HTML_FILE_EXTENSION tag can be used to specify the file extension for
|
||||
# each generated HTML page (for example: .htm,.php,.asp). If it is left blank
|
||||
# doxygen will generate files with .html extension.
|
||||
|
||||
HTML_FILE_EXTENSION = .html
|
||||
|
||||
# The HTML_HEADER tag can be used to specify a personal HTML header for
|
||||
# each generated HTML page. If it is left blank doxygen will generate a
|
||||
# standard header.
|
||||
|
||||
HTML_HEADER =
|
||||
|
||||
# The HTML_FOOTER tag can be used to specify a personal HTML footer for
|
||||
# each generated HTML page. If it is left blank doxygen will generate a
|
||||
# standard footer.
|
||||
|
||||
HTML_FOOTER =
|
||||
|
||||
# The HTML_STYLESHEET tag can be used to specify a user defined cascading
|
||||
# style sheet that is used by each HTML page. It can be used to
|
||||
# fine-tune the look of the HTML output. If the tag is left blank doxygen
|
||||
# will generate a default style sheet
|
||||
|
||||
HTML_STYLESHEET =
|
||||
|
||||
# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes,
|
||||
# files or namespaces will be aligned in HTML using tables. If set to
|
||||
# NO a bullet list will be used.
|
||||
|
||||
HTML_ALIGN_MEMBERS = YES
|
||||
|
||||
# If the GENERATE_HTMLHELP tag is set to YES, additional index files
|
||||
# will be generated that can be used as input for tools like the
|
||||
# Microsoft HTML help workshop to generate a compressed HTML help file (.chm)
|
||||
# of the generated HTML documentation.
|
||||
|
||||
GENERATE_HTMLHELP = NO
|
||||
|
||||
# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag
|
||||
# controls if a separate .chi index file is generated (YES) or that
|
||||
# it should be included in the master .chm file (NO).
|
||||
|
||||
GENERATE_CHI = NO
|
||||
|
||||
# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag
|
||||
# controls whether a binary table of contents is generated (YES) or a
|
||||
# normal table of contents (NO) in the .chm file.
|
||||
|
||||
BINARY_TOC = NO
|
||||
|
||||
# The TOC_EXPAND flag can be set to YES to add extra items for group members
|
||||
# to the contents of the Html help documentation and to the tree view.
|
||||
|
||||
TOC_EXPAND = NO
|
||||
|
||||
# The DISABLE_INDEX tag can be used to turn on/off the condensed index at
|
||||
# top of each HTML page. The value NO (the default) enables the index and
|
||||
# the value YES disables it.
|
||||
|
||||
DISABLE_INDEX = NO
|
||||
|
||||
# This tag can be used to set the number of enum values (range [1..20])
|
||||
# that doxygen will group on one line in the generated HTML documentation.
|
||||
|
||||
ENUM_VALUES_PER_LINE = 4
|
||||
|
||||
# If the GENERATE_TREEVIEW tag is set to YES, a side panel will be
|
||||
# generated containing a tree-like index structure (just like the one that
|
||||
# is generated for HTML Help). For this to work a browser that supports
|
||||
# JavaScript and frames is required (for instance Mozilla, Netscape 4.0+,
|
||||
# or Internet explorer 4.0+). Note that for large projects the tree generation
|
||||
# can take a very long time. In such cases it is better to disable this feature.
|
||||
# Windows users are probably better off using the HTML help feature.
|
||||
|
||||
GENERATE_TREEVIEW = NO
|
||||
|
||||
# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be
|
||||
# used to set the initial width (in pixels) of the frame in which the tree
|
||||
# is shown.
|
||||
|
||||
TREEVIEW_WIDTH = 250
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the LaTeX output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will
|
||||
# generate Latex output.
|
||||
|
||||
GENERATE_LATEX = NO
|
||||
|
||||
# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put.
|
||||
# If a relative path is entered the value of OUTPUT_DIRECTORY will be
|
||||
# put in front of it. If left blank `latex' will be used as the default path.
|
||||
|
||||
LATEX_OUTPUT = ndb.latex
|
||||
|
||||
# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact
|
||||
# LaTeX documents. This may be useful for small projects and may help to
|
||||
# save some trees in general.
|
||||
|
||||
COMPACT_LATEX = NO
|
||||
|
||||
# The PAPER_TYPE tag can be used to set the paper type that is used
|
||||
# by the printer. Possible values are: a4, a4wide, letter, legal and
|
||||
# executive. If left blank a4wide will be used.
|
||||
|
||||
PAPER_TYPE = a4wide
|
||||
|
||||
# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX
|
||||
# packages that should be included in the LaTeX output.
|
||||
|
||||
EXTRA_PACKAGES =
|
||||
|
||||
# The LATEX_HEADER tag can be used to specify a personal LaTeX header for
|
||||
# the generated latex document. The header should contain everything until
|
||||
# the first chapter. If it is left blank doxygen will generate a
|
||||
# standard header. Notice: only use this tag if you know what you are doing!
|
||||
|
||||
LATEX_HEADER =
|
||||
|
||||
# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated
|
||||
# is prepared for conversion to pdf (using ps2pdf). The pdf file will
|
||||
# contain links (just like the HTML output) instead of page references
|
||||
# This makes the output suitable for online browsing using a pdf viewer.
|
||||
|
||||
PDF_HYPERLINKS = NO
|
||||
|
||||
# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of
|
||||
# plain latex in the generated Makefile. Set this option to YES to get a
|
||||
# higher quality PDF documentation.
|
||||
|
||||
USE_PDFLATEX = NO
|
||||
|
||||
# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode.
|
||||
# command to the generated LaTeX files. This will instruct LaTeX to keep
|
||||
# running if errors occur, instead of asking the user for help.
|
||||
# This option is also used when generating formulas in HTML.
|
||||
|
||||
LATEX_BATCHMODE = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the RTF output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output
|
||||
# The RTF output is optimised for Word 97 and may not look very pretty with
|
||||
# other RTF readers or editors.
|
||||
|
||||
GENERATE_RTF = NO
|
||||
|
||||
# The RTF_OUTPUT tag is used to specify where the RTF docs will be put.
|
||||
# If a relative path is entered the value of OUTPUT_DIRECTORY will be
|
||||
# put in front of it. If left blank `rtf' will be used as the default path.
|
||||
|
||||
RTF_OUTPUT = rtf
|
||||
|
||||
# If the COMPACT_RTF tag is set to YES Doxygen generates more compact
|
||||
# RTF documents. This may be useful for small projects and may help to
|
||||
# save some trees in general.
|
||||
|
||||
COMPACT_RTF = NO
|
||||
|
||||
# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated
|
||||
# will contain hyperlink fields. The RTF file will
|
||||
# contain links (just like the HTML output) instead of page references.
|
||||
# This makes the output suitable for online browsing using WORD or other
|
||||
# programs which support those fields.
|
||||
# Note: wordpad (write) and others do not support links.
|
||||
|
||||
RTF_HYPERLINKS = NO
|
||||
|
||||
# Load stylesheet definitions from file. Syntax is similar to doxygen's
|
||||
# config file, i.e. a series of assigments. You only have to provide
|
||||
# replacements, missing definitions are set to their default value.
|
||||
|
||||
RTF_STYLESHEET_FILE =
|
||||
|
||||
# Set optional variables used in the generation of an rtf document.
|
||||
# Syntax is similar to doxygen's config file.
|
||||
|
||||
RTF_EXTENSIONS_FILE =
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the man page output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_MAN tag is set to YES (the default) Doxygen will
|
||||
# generate man pages
|
||||
|
||||
GENERATE_MAN = NO
|
||||
|
||||
# The MAN_OUTPUT tag is used to specify where the man pages will be put.
|
||||
# If a relative path is entered the value of OUTPUT_DIRECTORY will be
|
||||
# put in front of it. If left blank `man' will be used as the default path.
|
||||
|
||||
MAN_OUTPUT = man
|
||||
|
||||
# The MAN_EXTENSION tag determines the extension that is added to
|
||||
# the generated man pages (default is the subroutine's section .3)
|
||||
|
||||
MAN_EXTENSION = .3
|
||||
|
||||
# If the MAN_LINKS tag is set to YES and Doxygen generates man output,
|
||||
# then it will generate one additional man file for each entity
|
||||
# documented in the real man page(s). These additional files
|
||||
# only source the real man page, but without them the man command
|
||||
# would be unable to find the correct page. The default is NO.
|
||||
|
||||
MAN_LINKS = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the XML output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_XML tag is set to YES Doxygen will
|
||||
# generate an XML file that captures the structure of
|
||||
# the code including all documentation. Note that this
|
||||
# feature is still experimental and incomplete at the
|
||||
# moment.
|
||||
|
||||
GENERATE_XML = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options for the AutoGen Definitions output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will
|
||||
# generate an AutoGen Definitions (see autogen.sf.net) file
|
||||
# that captures the structure of the code including all
|
||||
# documentation. Note that this feature is still experimental
|
||||
# and incomplete at the moment.
|
||||
|
||||
GENERATE_AUTOGEN_DEF = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the preprocessor
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will
|
||||
# evaluate all C-preprocessor directives found in the sources and include
|
||||
# files.
|
||||
|
||||
ENABLE_PREPROCESSING = YES
|
||||
|
||||
# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro
|
||||
# names in the source code. If set to NO (the default) only conditional
|
||||
# compilation will be performed. Macro expansion can be done in a controlled
|
||||
# way by setting EXPAND_ONLY_PREDEF to YES.
|
||||
|
||||
MACRO_EXPANSION = NO
|
||||
|
||||
# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES
|
||||
# then the macro expansion is limited to the macros specified with the
|
||||
# PREDEFINED and EXPAND_AS_PREDEFINED tags.
|
||||
|
||||
EXPAND_ONLY_PREDEF = NO
|
||||
|
||||
# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files
|
||||
# in the INCLUDE_PATH (see below) will be search if a #include is found.
|
||||
|
||||
SEARCH_INCLUDES = YES
|
||||
|
||||
# The INCLUDE_PATH tag can be used to specify one or more directories that
|
||||
# contain include files that are not input files but should be processed by
|
||||
# the preprocessor.
|
||||
|
||||
INCLUDE_PATH =
|
||||
|
||||
# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard
|
||||
# patterns (like *.h and *.hpp) to filter out the header-files in the
|
||||
# directories. If left blank, the patterns specified with FILE_PATTERNS will
|
||||
# be used.
|
||||
|
||||
INCLUDE_FILE_PATTERNS =
|
||||
|
||||
# The PREDEFINED tag can be used to specify one or more macro names that
|
||||
# are defined before the preprocessor is started (similar to the -D option of
|
||||
# gcc). The argument of the tag is a list of macros of the form: name
|
||||
# or name=definition (no spaces). If the definition and the = are
|
||||
# omitted =1 is assumed.
|
||||
|
||||
PREDEFINED =
|
||||
|
||||
# If the MACRO_EXPANSION and EXPAND_PREDEF_ONLY tags are set to YES then
|
||||
# this tag can be used to specify a list of macro names that should be expanded.
|
||||
# The macro definition that is found in the sources will be used.
|
||||
# Use the PREDEFINED tag if you want to use a different macro definition.
|
||||
|
||||
EXPAND_AS_DEFINED =
|
||||
|
||||
# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then
|
||||
# doxygen's preprocessor will remove all function-like macros that are alone
|
||||
# on a line and do not end with a semicolon. Such function macros are typically
|
||||
# used for boiler-plate code, and will confuse the parser if not removed.
|
||||
|
||||
SKIP_FUNCTION_MACROS = YES
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration::addtions related to external references
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# The TAGFILES tag can be used to specify one or more tagfiles.
|
||||
|
||||
TAGFILES =
|
||||
|
||||
# When a file name is specified after GENERATE_TAGFILE, doxygen will create
|
||||
# a tag file that is based on the input files it reads.
|
||||
|
||||
GENERATE_TAGFILE =
|
||||
|
||||
# If the ALLEXTERNALS tag is set to YES all external classes will be listed
|
||||
# in the class index. If set to NO only the inherited external classes
|
||||
# will be listed.
|
||||
|
||||
ALLEXTERNALS = NO
|
||||
|
||||
# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed
|
||||
# in the modules index. If set to NO, only the current project's groups will
|
||||
# be listed.
|
||||
|
||||
EXTERNAL_GROUPS = YES
|
||||
|
||||
# The PERL_PATH should be the absolute path and name of the perl script
|
||||
# interpreter (i.e. the result of `which perl').
|
||||
|
||||
PERL_PATH = /usr/bin/perl
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the dot tool
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will
|
||||
# generate a inheritance diagram (in Html, RTF and LaTeX) for classes with base or
|
||||
# super classes. Setting the tag to NO turns the diagrams off. Note that this
|
||||
# option is superceded by the HAVE_DOT option below. This is only a fallback. It is
|
||||
# recommended to install and use dot, since it yield more powerful graphs.
|
||||
|
||||
CLASS_DIAGRAMS = YES
|
||||
|
||||
# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is
|
||||
# available from the path. This tool is part of Graphviz, a graph visualization
|
||||
# toolkit from AT&T and Lucent Bell Labs. The other options in this section
|
||||
# have no effect if this option is set to NO (the default)
|
||||
|
||||
HAVE_DOT = YES
|
||||
|
||||
# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen
|
||||
# will generate a graph for each documented class showing the direct and
|
||||
# indirect inheritance relations. Setting this tag to YES will force the
|
||||
# the CLASS_DIAGRAMS tag to NO.
|
||||
|
||||
CLASS_GRAPH = YES
|
||||
|
||||
# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen
|
||||
# will generate a graph for each documented class showing the direct and
|
||||
# indirect implementation dependencies (inheritance, containment, and
|
||||
# class references variables) of the class with other documented classes.
|
||||
|
||||
COLLABORATION_GRAPH = YES
|
||||
|
||||
# If set to YES, the inheritance and collaboration graphs will show the
|
||||
# relations between templates and their instances.
|
||||
|
||||
TEMPLATE_RELATIONS = YES
|
||||
|
||||
# If set to YES, the inheritance and collaboration graphs will hide
|
||||
# inheritance and usage relations if the target is undocumented
|
||||
# or is not a class.
|
||||
|
||||
HIDE_UNDOC_RELATIONS = NO
|
||||
|
||||
# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT
|
||||
# tags are set to YES then doxygen will generate a graph for each documented
|
||||
# file showing the direct and indirect include dependencies of the file with
|
||||
# other documented files.
|
||||
|
||||
INCLUDE_GRAPH = YES
|
||||
|
||||
# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and
|
||||
# HAVE_DOT tags are set to YES then doxygen will generate a graph for each
|
||||
# documented header file showing the documented files that directly or
|
||||
# indirectly include this file.
|
||||
|
||||
INCLUDED_BY_GRAPH = YES
|
||||
|
||||
# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen
|
||||
# will graphical hierarchy of all classes instead of a textual one.
|
||||
|
||||
GRAPHICAL_HIERARCHY = YES
|
||||
|
||||
# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images
|
||||
# generated by dot. Possible values are gif, jpg, and png
|
||||
# If left blank gif will be used.
|
||||
|
||||
DOT_IMAGE_FORMAT = gif
|
||||
|
||||
# The tag DOT_PATH can be used to specify the path where the dot tool can be
|
||||
# found. If left blank, it is assumed the dot tool can be found on the path.
|
||||
|
||||
DOT_PATH =
|
||||
|
||||
# The DOTFILE_DIRS tag can be used to specify one or more directories that
|
||||
# contain dot files that are included in the documentation (see the
|
||||
# \dotfile command).
|
||||
|
||||
DOTFILE_DIRS =
|
||||
|
||||
# The MAX_DOT_GRAPH_WIDTH tag can be used to set the maximum allowed width
|
||||
# (in pixels) of the graphs generated by dot. If a graph becomes larger than
|
||||
# this value, doxygen will try to truncate the graph, so that it fits within
|
||||
# the specified constraint. Beware that most browsers cannot cope with very
|
||||
# large images.
|
||||
|
||||
MAX_DOT_GRAPH_WIDTH = 1024
|
||||
|
||||
# The MAX_DOT_GRAPH_HEIGHT tag can be used to set the maximum allows height
|
||||
# (in pixels) of the graphs generated by dot. If a graph becomes larger than
|
||||
# this value, doxygen will try to truncate the graph, so that it fits within
|
||||
# the specified constraint. Beware that most browsers cannot cope with very
|
||||
# large images.
|
||||
|
||||
MAX_DOT_GRAPH_HEIGHT = 1024
|
||||
|
||||
# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will
|
||||
# generate a legend page explaining the meaning of the various boxes and
|
||||
# arrows in the dot generated graphs.
|
||||
|
||||
GENERATE_LEGEND = YES
|
||||
|
||||
# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will
|
||||
# remove the intermedate dot files that are used to generate
|
||||
# the various graphs.
|
||||
|
||||
DOT_CLEANUP = YES
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration::addtions related to the search engine
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# The SEARCHENGINE tag specifies whether or not a search engine should be
|
||||
# used. If set to NO the values of all tags below this one will be ignored.
|
||||
|
||||
SEARCHENGINE = NO
|
||||
|
||||
# The CGI_NAME tag should be the name of the CGI script that
|
||||
# starts the search engine (doxysearch) with the correct parameters.
|
||||
# A script with this name will be generated by doxygen.
|
||||
|
||||
CGI_NAME = search.cgi
|
||||
|
||||
# The CGI_URL tag should be the absolute URL to the directory where the
|
||||
# cgi binaries are located. See the documentation of your http daemon for
|
||||
# details.
|
||||
|
||||
CGI_URL =
|
||||
|
||||
# The DOC_URL tag should be the absolute URL to the directory where the
|
||||
# documentation is located. If left blank the absolute path to the
|
||||
# documentation, with file:// prepended to it, will be used.
|
||||
|
||||
DOC_URL =
|
||||
|
||||
# The DOC_ABSPATH tag should be the absolute path to the directory where the
|
||||
# documentation is located. If left blank the directory on the local machine
|
||||
# will be used.
|
||||
|
||||
DOC_ABSPATH =
|
||||
|
||||
# The BIN_ABSPATH tag must point to the directory where the doxysearch binary
|
||||
# is installed.
|
||||
|
||||
BIN_ABSPATH = /usr/local/bin
|
||||
|
||||
# The EXT_DOC_PATHS tag can be used to specify one or more paths to
|
||||
# documentation generated for other projects. This allows doxysearch to search
|
||||
# the documentation for these projects as well.
|
||||
|
||||
EXT_DOC_PATHS =
|
877
ndb/docs/doxygen/Doxyfile.ndbapi
Normal file
877
ndb/docs/doxygen/Doxyfile.ndbapi
Normal file
|
@ -0,0 +1,877 @@
|
|||
# Doxyfile 1.2.12
|
||||
|
||||
# This file describes the settings to be used by the documentation system
|
||||
# doxygen (www.doxygen.org) for a project
|
||||
#
|
||||
# All text after a hash (#) is considered a comment and will be ignored
|
||||
# The format is:
|
||||
# TAG = value [value, ...]
|
||||
# For lists items can also be appended using:
|
||||
# TAG += value [value, ...]
|
||||
# Values that contain spaces should be placed between quotes (" ")
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# General configuration options
|
||||
#---------------------------------------------------------------------------
|
||||
DETAILS_AT_TOP = yes
|
||||
HIDE_FRIEND_COMPOUNDS = yes
|
||||
|
||||
# The PROJECT_NAME tag is a single word (or a sequence of words surrounded
|
||||
# by quotes) that should identify the project.
|
||||
|
||||
PROJECT_NAME =
|
||||
|
||||
# The PROJECT_NUMBER tag can be used to enter a project or revision number.
|
||||
# This could be handy for archiving the generated documentation or
|
||||
# if some version control system is used.
|
||||
|
||||
PROJECT_NUMBER =
|
||||
|
||||
# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute)
|
||||
# base path where the generated documentation will be put.
|
||||
# If a relative path is entered, it will be relative to the location
|
||||
# where doxygen was started. If left blank the current directory will be used.
|
||||
|
||||
OUTPUT_DIRECTORY =
|
||||
|
||||
# The OUTPUT_LANGUAGE tag is used to specify the language in which all
|
||||
# documentation generated by doxygen is written. Doxygen will use this
|
||||
# information to generate all constant output in the proper language.
|
||||
# The default language is English, other supported languages are:
|
||||
# Brazilian, Chinese, Croatian, Czech, Danish, Dutch, Finnish, French,
|
||||
# German, Hungarian, Italian, Japanese, Korean, Norwegian, Polish,
|
||||
# Portuguese, Romanian, Russian, Slovak, Slovene, Spanish and Swedish.
|
||||
|
||||
OUTPUT_LANGUAGE = English
|
||||
|
||||
# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in
|
||||
# documentation are documented, even if no documentation was available.
|
||||
# Private class members and static file members will be hidden unless
|
||||
# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES
|
||||
|
||||
EXTRACT_ALL = NO
|
||||
|
||||
# If the EXTRACT_PRIVATE tag is set to YES all private members of a class
|
||||
# will be included in the documentation.
|
||||
|
||||
EXTRACT_PRIVATE = NO
|
||||
|
||||
# If the EXTRACT_STATIC tag is set to YES all static members of a file
|
||||
# will be included in the documentation.
|
||||
|
||||
EXTRACT_STATIC = NO
|
||||
|
||||
# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all
|
||||
# undocumented members of documented classes, files or namespaces.
|
||||
# If set to NO (the default) these members will be included in the
|
||||
# various overviews, but no documentation section is generated.
|
||||
# This option has no effect if EXTRACT_ALL is enabled.
|
||||
|
||||
HIDE_UNDOC_MEMBERS = NO
|
||||
|
||||
# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all
|
||||
# undocumented classes that are normally visible in the class hierarchy.
|
||||
# If set to NO (the default) these class will be included in the various
|
||||
# overviews. This option has no effect if EXTRACT_ALL is enabled.
|
||||
|
||||
HIDE_UNDOC_CLASSES = NO
|
||||
|
||||
# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will
|
||||
# include brief member descriptions after the members that are listed in
|
||||
# the file and class documentation (similar to JavaDoc).
|
||||
# Set to NO to disable this.
|
||||
|
||||
BRIEF_MEMBER_DESC = YES
|
||||
|
||||
# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend
|
||||
# the brief description of a member or function before the detailed description.
|
||||
# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the
|
||||
# brief descriptions will be completely suppressed.
|
||||
|
||||
REPEAT_BRIEF = YES
|
||||
|
||||
# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then
|
||||
# Doxygen will generate a detailed section even if there is only a brief
|
||||
# description.
|
||||
|
||||
ALWAYS_DETAILED_SEC = NO
|
||||
|
||||
# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full
|
||||
# path before files name in the file list and in the header files. If set
|
||||
# to NO the shortest path that makes the file name unique will be used.
|
||||
|
||||
FULL_PATH_NAMES = NO
|
||||
|
||||
# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag
|
||||
# can be used to strip a user defined part of the path. Stripping is
|
||||
# only done if one of the specified strings matches the left-hand part of
|
||||
# the path. It is allowed to use relative paths in the argument list.
|
||||
|
||||
STRIP_FROM_PATH =
|
||||
|
||||
# The INTERNAL_DOCS tag determines if documentation
|
||||
# that is typed after a \internal command is included. If the tag is set
|
||||
# to NO (the default) then the documentation will be excluded.
|
||||
# Set it to YES to include the internal documentation.
|
||||
|
||||
INTERNAL_DOCS = NO
|
||||
|
||||
# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct
|
||||
# doxygen to hide any special comment blocks from generated source code
|
||||
# fragments. Normal C and C++ comments will always remain visible.
|
||||
|
||||
STRIP_CODE_COMMENTS = YES
|
||||
|
||||
# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate
|
||||
# file names in lower case letters. If set to YES upper case letters are also
|
||||
# allowed. This is useful if you have classes or files whose names only differ
|
||||
# in case and if your file system supports case sensitive file names. Windows
|
||||
# users are adviced to set this option to NO.
|
||||
|
||||
CASE_SENSE_NAMES = YES
|
||||
|
||||
# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter
|
||||
# (but less readable) file names. This can be useful is your file systems
|
||||
# doesn't support long names like on DOS, Mac, or CD-ROM.
|
||||
|
||||
SHORT_NAMES = NO
|
||||
|
||||
# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen
|
||||
# will show members with their full class and namespace scopes in the
|
||||
# documentation. If set to YES the scope will be hidden.
|
||||
|
||||
HIDE_SCOPE_NAMES = NO
|
||||
|
||||
# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen
|
||||
# will generate a verbatim copy of the header file for each class for
|
||||
# which an include is specified. Set to NO to disable this.
|
||||
|
||||
VERBATIM_HEADERS = YES
|
||||
|
||||
# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen
|
||||
# will put list of the files that are included by a file in the documentation
|
||||
# of that file.
|
||||
|
||||
SHOW_INCLUDE_FILES = YES
|
||||
|
||||
# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen
|
||||
# will interpret the first line (until the first dot) of a JavaDoc-style
|
||||
# comment as the brief description. If set to NO, the JavaDoc
|
||||
# comments will behave just like the Qt-style comments (thus requiring an
|
||||
# explict @brief command for a brief description.
|
||||
|
||||
JAVADOC_AUTOBRIEF = NO
|
||||
|
||||
# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented
|
||||
# member inherits the documentation from any documented member that it
|
||||
# reimplements.
|
||||
|
||||
INHERIT_DOCS = YES
|
||||
|
||||
# If the INLINE_INFO tag is set to YES (the default) then a tag [inline]
|
||||
# is inserted in the documentation for inline members.
|
||||
|
||||
INLINE_INFO = YES
|
||||
|
||||
# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen
|
||||
# will sort the (detailed) documentation of file and class members
|
||||
# alphabetically by member name. If set to NO the members will appear in
|
||||
# declaration order.
|
||||
|
||||
SORT_MEMBER_DOCS = YES
|
||||
|
||||
# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC
|
||||
# tag is set to YES, then doxygen will reuse the documentation of the first
|
||||
# member in the group (if any) for the other members of the group. By default
|
||||
# all members of a group must be documented explicitly.
|
||||
|
||||
DISTRIBUTE_GROUP_DOC = NO
|
||||
|
||||
# The TAB_SIZE tag can be used to set the number of spaces in a tab.
|
||||
# Doxygen uses this value to replace tabs by spaces in code fragments.
|
||||
|
||||
TAB_SIZE = 8
|
||||
|
||||
# The GENERATE_TODOLIST tag can be used to enable (YES) or
|
||||
# disable (NO) the todo list. This list is created by putting \todo
|
||||
# commands in the documentation.
|
||||
|
||||
GENERATE_TODOLIST = YES
|
||||
|
||||
# The GENERATE_TESTLIST tag can be used to enable (YES) or
|
||||
# disable (NO) the test list. This list is created by putting \test
|
||||
# commands in the documentation.
|
||||
|
||||
GENERATE_TESTLIST = YES
|
||||
|
||||
# The GENERATE_BUGLIST tag can be used to enable (YES) or
|
||||
# disable (NO) the bug list. This list is created by putting \bug
|
||||
# commands in the documentation.
|
||||
|
||||
GENERATE_BUGLIST = YES
|
||||
|
||||
# This tag can be used to specify a number of aliases that acts
|
||||
# as commands in the documentation. An alias has the form "name=value".
|
||||
# For example adding "sideeffect=\par Side Effects:\n" will allow you to
|
||||
# put the command \sideeffect (or @sideeffect) in the documentation, which
|
||||
# will result in a user defined paragraph with heading "Side Effects:".
|
||||
# You can put \n's in the value part of an alias to insert newlines.
|
||||
|
||||
ALIASES =
|
||||
|
||||
# The ENABLED_SECTIONS tag can be used to enable conditional
|
||||
# documentation sections, marked by \if sectionname ... \endif.
|
||||
|
||||
ENABLED_SECTIONS =
|
||||
|
||||
# The MAX_INITIALIZER_LINES tag determines the maximum number of lines
|
||||
# the initial value of a variable or define consist of for it to appear in
|
||||
# the documentation. If the initializer consists of more lines than specified
|
||||
# here it will be hidden. Use a value of 0 to hide initializers completely.
|
||||
# The appearance of the initializer of individual variables and defines in the
|
||||
# documentation can be controlled using \showinitializer or \hideinitializer
|
||||
# command in the documentation regardless of this setting.
|
||||
|
||||
MAX_INITIALIZER_LINES = 30
|
||||
|
||||
# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources
|
||||
# only. Doxygen will then generate output that is more tailored for C.
|
||||
# For instance some of the names that are used will be different. The list
|
||||
# of all members will be omitted, etc.
|
||||
|
||||
OPTIMIZE_OUTPUT_FOR_C = NO
|
||||
|
||||
# Set the SHOW_USED_FILES tag to NO to disable the list of files generated
|
||||
# at the bottom of the documentation of classes and structs. If set to YES the
|
||||
# list will mention the files that were used to generate the documentation.
|
||||
|
||||
SHOW_USED_FILES = YES
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to warning and progress messages
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# The QUIET tag can be used to turn on/off the messages that are generated
|
||||
# by doxygen. Possible values are YES and NO. If left blank NO is used.
|
||||
|
||||
QUIET = NO
|
||||
|
||||
# The WARNINGS tag can be used to turn on/off the warning messages that are
|
||||
# generated by doxygen. Possible values are YES and NO. If left blank
|
||||
# NO is used.
|
||||
|
||||
WARNINGS = YES
|
||||
|
||||
# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings
|
||||
# for undocumented members. If EXTRACT_ALL is set to YES then this flag will
|
||||
# automatically be disabled.
|
||||
|
||||
WARN_IF_UNDOCUMENTED = YES
|
||||
|
||||
# The WARN_FORMAT tag determines the format of the warning messages that
|
||||
# doxygen can produce. The string should contain the $file, $line, and $text
|
||||
# tags, which will be replaced by the file and line number from which the
|
||||
# warning originated and the warning text.
|
||||
|
||||
WARN_FORMAT =
|
||||
|
||||
# The WARN_LOGFILE tag can be used to specify a file to which warning
|
||||
# and error messages should be written. If left blank the output is written
|
||||
# to stderr.
|
||||
|
||||
WARN_LOGFILE =
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the input files
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# The INPUT tag can be used to specify the files and/or directories that contain
|
||||
# documented source files. You may enter file names like "myfile.cpp" or
|
||||
# directories like "/usr/src/myproject". Separate the files or directories
|
||||
# with spaces.
|
||||
|
||||
INPUT = .
|
||||
|
||||
# If the value of the INPUT tag contains directories, you can use the
|
||||
# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp
|
||||
# and *.h) to filter out the source-files in the directories. If left
|
||||
# blank file matching one of the following patterns are included:
|
||||
# *.c *.cc *.cxx *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx *.hpp
|
||||
# *.h++ *.idl
|
||||
|
||||
FILE_PATTERNS =
|
||||
|
||||
# The RECURSIVE tag can be used to turn specify whether or not subdirectories
|
||||
# should be searched for input files as well. Possible values are YES and NO.
|
||||
# If left blank NO is used.
|
||||
|
||||
RECURSIVE = NO
|
||||
|
||||
# The EXCLUDE tag can be used to specify files and/or directories that should
|
||||
# excluded from the INPUT source files. This way you can easily exclude a
|
||||
# subdirectory from a directory tree whose root is specified with the INPUT tag.
|
||||
|
||||
EXCLUDE =
|
||||
|
||||
# If the value of the INPUT tag contains directories, you can use the
|
||||
# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude
|
||||
# certain files from those directories.
|
||||
|
||||
EXCLUDE_PATTERNS =
|
||||
|
||||
# The EXAMPLE_PATH tag can be used to specify one or more files or
|
||||
# directories that contain example code fragments that are included (see
|
||||
# the \include command).
|
||||
|
||||
EXAMPLE_PATH = .
|
||||
|
||||
# If the value of the EXAMPLE_PATH tag contains directories, you can use the
|
||||
# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp
|
||||
# and *.h) to filter out the source-files in the directories. If left
|
||||
# blank all files are included.
|
||||
|
||||
EXAMPLE_PATTERNS =
|
||||
|
||||
# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be
|
||||
# searched for input files to be used with the \include or \dontinclude
|
||||
# commands irrespective of the value of the RECURSIVE tag.
|
||||
# Possible values are YES and NO. If left blank NO is used.
|
||||
|
||||
EXAMPLE_RECURSIVE = NO
|
||||
|
||||
# The IMAGE_PATH tag can be used to specify one or more files or
|
||||
# directories that contain image that are included in the documentation (see
|
||||
# the \image command).
|
||||
|
||||
IMAGE_PATH =
|
||||
|
||||
# The INPUT_FILTER tag can be used to specify a program that doxygen should
|
||||
# invoke to filter for each input file. Doxygen will invoke the filter program
|
||||
# by executing (via popen()) the command <filter> <input-file>, where <filter>
|
||||
# is the value of the INPUT_FILTER tag, and <input-file> is the name of an
|
||||
# input file. Doxygen will then use the output that the filter program writes
|
||||
# to standard output.
|
||||
|
||||
INPUT_FILTER =
|
||||
|
||||
# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using
|
||||
# INPUT_FILTER) will be used to filter the input files when producing source
|
||||
# files to browse.
|
||||
|
||||
FILTER_SOURCE_FILES = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to source browsing
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the SOURCE_BROWSER tag is set to YES then a list of source files will
|
||||
# be generated. Documented entities will be cross-referenced with these sources.
|
||||
|
||||
SOURCE_BROWSER = NO
|
||||
|
||||
# Setting the INLINE_SOURCES tag to YES will include the body
|
||||
# of functions and classes directly in the documentation.
|
||||
|
||||
INLINE_SOURCES = NO
|
||||
|
||||
# If the REFERENCED_BY_RELATION tag is set to YES (the default)
|
||||
# then for each documented function all documented
|
||||
# functions referencing it will be listed.
|
||||
|
||||
REFERENCED_BY_RELATION = YES
|
||||
|
||||
# If the REFERENCES_RELATION tag is set to YES (the default)
|
||||
# then for each documented function all documented entities
|
||||
# called/used by that function will be listed.
|
||||
|
||||
REFERENCES_RELATION = YES
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the alphabetical class index
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index
|
||||
# of all compounds will be generated. Enable this if the project
|
||||
# contains a lot of classes, structs, unions or interfaces.
|
||||
|
||||
ALPHABETICAL_INDEX = NO
|
||||
|
||||
# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then
|
||||
# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns
|
||||
# in which this list will be split (can be a number in the range [1..20])
|
||||
|
||||
COLS_IN_ALPHA_INDEX = 5
|
||||
|
||||
# In case all classes in a project start with a common prefix, all
|
||||
# classes will be put under the same header in the alphabetical index.
|
||||
# The IGNORE_PREFIX tag can be used to specify one or more prefixes that
|
||||
# should be ignored while generating the index headers.
|
||||
|
||||
IGNORE_PREFIX =
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the HTML output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_HTML tag is set to YES (the default) Doxygen will
|
||||
# generate HTML output.
|
||||
|
||||
GENERATE_HTML = YES
|
||||
|
||||
# The HTML_OUTPUT tag is used to specify where the HTML docs will be put.
|
||||
# If a relative path is entered the value of OUTPUT_DIRECTORY will be
|
||||
# put in front of it. If left blank `html' will be used as the default path.
|
||||
|
||||
HTML_OUTPUT = ../.doxyout/ndbapi.html
|
||||
|
||||
# The HTML_HEADER tag can be used to specify a personal HTML header for
|
||||
# each generated HTML page. If it is left blank doxygen will generate a
|
||||
# standard header.
|
||||
|
||||
HTML_HEADER =
|
||||
|
||||
# The HTML_FOOTER tag can be used to specify a personal HTML footer for
|
||||
# each generated HTML page. If it is left blank doxygen will generate a
|
||||
# standard footer.
|
||||
|
||||
HTML_FOOTER = footer.html
|
||||
|
||||
# The HTML_STYLESHEET tag can be used to specify a user defined cascading
|
||||
# style sheet that is used by each HTML page. It can be used to
|
||||
# fine-tune the look of the HTML output. If the tag is left blank doxygen
|
||||
# will generate a default style sheet
|
||||
|
||||
HTML_STYLESHEET =
|
||||
|
||||
# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes,
|
||||
# files or namespaces will be aligned in HTML using tables. If set to
|
||||
# NO a bullet list will be used.
|
||||
|
||||
HTML_ALIGN_MEMBERS = YES
|
||||
|
||||
# If the GENERATE_HTMLHELP tag is set to YES, additional index files
|
||||
# will be generated that can be used as input for tools like the
|
||||
# Microsoft HTML help workshop to generate a compressed HTML help file (.chm)
|
||||
# of the generated HTML documentation.
|
||||
|
||||
GENERATE_HTMLHELP = NO
|
||||
|
||||
# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag
|
||||
# controls if a separate .chi index file is generated (YES) or that
|
||||
# it should be included in the master .chm file (NO).
|
||||
|
||||
GENERATE_CHI = NO
|
||||
|
||||
# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag
|
||||
# controls whether a binary table of contents is generated (YES) or a
|
||||
# normal table of contents (NO) in the .chm file.
|
||||
|
||||
BINARY_TOC = NO
|
||||
|
||||
# The TOC_EXPAND flag can be set to YES to add extra items for group members
|
||||
# to the contents of the Html help documentation and to the tree view.
|
||||
|
||||
TOC_EXPAND = NO
|
||||
|
||||
# The DISABLE_INDEX tag can be used to turn on/off the condensed index at
|
||||
# top of each HTML page. The value NO (the default) enables the index and
|
||||
# the value YES disables it.
|
||||
|
||||
DISABLE_INDEX = NO
|
||||
|
||||
# This tag can be used to set the number of enum values (range [1..20])
|
||||
# that doxygen will group on one line in the generated HTML documentation.
|
||||
|
||||
ENUM_VALUES_PER_LINE = 4
|
||||
|
||||
# If the GENERATE_TREEVIEW tag is set to YES, a side panel will be
|
||||
# generated containing a tree-like index structure (just like the one that
|
||||
# is generated for HTML Help). For this to work a browser that supports
|
||||
# JavaScript and frames is required (for instance Mozilla, Netscape 4.0+,
|
||||
# or Internet explorer 4.0+). Note that for large projects the tree generation
|
||||
# can take a very long time. In such cases it is better to disable this feature.
|
||||
# Windows users are probably better off using the HTML help feature.
|
||||
|
||||
GENERATE_TREEVIEW = NO
|
||||
|
||||
# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be
|
||||
# used to set the initial width (in pixels) of the frame in which the tree
|
||||
# is shown.
|
||||
|
||||
TREEVIEW_WIDTH = 250
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the LaTeX output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will
|
||||
# generate Latex output.
|
||||
|
||||
GENERATE_LATEX = YES
|
||||
|
||||
# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put.
|
||||
# If a relative path is entered the value of OUTPUT_DIRECTORY will be
|
||||
# put in front of it. If left blank `latex' will be used as the default path.
|
||||
|
||||
LATEX_OUTPUT = ../.doxyout/ndbapi.latex
|
||||
|
||||
# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact
|
||||
# LaTeX documents. This may be useful for small projects and may help to
|
||||
# save some trees in general.
|
||||
|
||||
COMPACT_LATEX = NO
|
||||
|
||||
# The PAPER_TYPE tag can be used to set the paper type that is used
|
||||
# by the printer. Possible values are: a4, a4wide, letter, legal and
|
||||
# executive. If left blank a4wide will be used.
|
||||
|
||||
PAPER_TYPE =
|
||||
|
||||
# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX
|
||||
# packages that should be included in the LaTeX output.
|
||||
|
||||
EXTRA_PACKAGES =
|
||||
|
||||
# The LATEX_HEADER tag can be used to specify a personal LaTeX header for
|
||||
# the generated latex document. The header should contain everything until
|
||||
# the first chapter. If it is left blank doxygen will generate a
|
||||
# standard header. Notice: only use this tag if you know what you are doing!
|
||||
|
||||
LATEX_HEADER = ../doxygen/header.ndbapi.tex
|
||||
|
||||
# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated
|
||||
# is prepared for conversion to pdf (using ps2pdf). The pdf file will
|
||||
# contain links (just like the HTML output) instead of page references
|
||||
# This makes the output suitable for online browsing using a pdf viewer.
|
||||
|
||||
PDF_HYPERLINKS = YES
|
||||
|
||||
# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of
|
||||
# plain latex in the generated Makefile. Set this option to YES to get a
|
||||
# higher quality PDF documentation.
|
||||
|
||||
USE_PDFLATEX = YES
|
||||
|
||||
# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode.
|
||||
# command to the generated LaTeX files. This will instruct LaTeX to keep
|
||||
# running if errors occur, instead of asking the user for help.
|
||||
# This option is also used when generating formulas in HTML.
|
||||
|
||||
LATEX_BATCHMODE = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the RTF output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output
|
||||
# The RTF output is optimised for Word 97 and may not look very pretty with
|
||||
# other RTF readers or editors.
|
||||
|
||||
GENERATE_RTF = NO
|
||||
|
||||
# The RTF_OUTPUT tag is used to specify where the RTF docs will be put.
|
||||
# If a relative path is entered the value of OUTPUT_DIRECTORY will be
|
||||
# put in front of it. If left blank `rtf' will be used as the default path.
|
||||
|
||||
RTF_OUTPUT = ../ndbapi.rtf
|
||||
|
||||
# If the COMPACT_RTF tag is set to YES Doxygen generates more compact
|
||||
# RTF documents. This may be useful for small projects and may help to
|
||||
# save some trees in general.
|
||||
|
||||
COMPACT_RTF = NO
|
||||
|
||||
# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated
|
||||
# will contain hyperlink fields. The RTF file will
|
||||
# contain links (just like the HTML output) instead of page references.
|
||||
# This makes the output suitable for online browsing using WORD or other
|
||||
# programs which support those fields.
|
||||
# Note: wordpad (write) and others do not support links.
|
||||
|
||||
RTF_HYPERLINKS = NO
|
||||
|
||||
# Load stylesheet definitions from file. Syntax is similar to doxygen's
|
||||
# config file, i.e. a series of assigments. You only have to provide
|
||||
# replacements, missing definitions are set to their default value.
|
||||
|
||||
RTF_STYLESHEET_FILE =
|
||||
|
||||
# Set optional variables used in the generation of an rtf document.
|
||||
# Syntax is similar to doxygen's config file.
|
||||
|
||||
RTF_EXTENSIONS_FILE =
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the man page output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_MAN tag is set to YES (the default) Doxygen will
|
||||
# generate man pages
|
||||
|
||||
GENERATE_MAN = NO
|
||||
|
||||
# The MAN_OUTPUT tag is used to specify where the man pages will be put.
|
||||
# If a relative path is entered the value of OUTPUT_DIRECTORY will be
|
||||
# put in front of it. If left blank `man' will be used as the default path.
|
||||
|
||||
MAN_OUTPUT =
|
||||
|
||||
# The MAN_EXTENSION tag determines the extension that is added to
|
||||
# the generated man pages (default is the subroutine's section .3)
|
||||
|
||||
MAN_EXTENSION =
|
||||
|
||||
# If the MAN_LINKS tag is set to YES and Doxygen generates man output,
|
||||
# then it will generate one additional man file for each entity
|
||||
# documented in the real man page(s). These additional files
|
||||
# only source the real man page, but without them the man command
|
||||
# would be unable to find the correct page. The default is NO.
|
||||
|
||||
MAN_LINKS = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the XML output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_XML tag is set to YES Doxygen will
|
||||
# generate an XML file that captures the structure of
|
||||
# the code including all documentation. Note that this
|
||||
# feature is still experimental and incomplete at the
|
||||
# moment.
|
||||
|
||||
GENERATE_XML = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the preprocessor
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will
|
||||
# evaluate all C-preprocessor directives found in the sources and include
|
||||
# files.
|
||||
|
||||
ENABLE_PREPROCESSING = YES
|
||||
|
||||
# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro
|
||||
# names in the source code. If set to NO (the default) only conditional
|
||||
# compilation will be performed. Macro expansion can be done in a controlled
|
||||
# way by setting EXPAND_ONLY_PREDEF to YES.
|
||||
|
||||
MACRO_EXPANSION = YES
|
||||
|
||||
# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES
|
||||
# then the macro expansion is limited to the macros specified with the
|
||||
# PREDEFINED and EXPAND_AS_PREDEFINED tags.
|
||||
|
||||
EXPAND_ONLY_PREDEF = YES
|
||||
|
||||
# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files
|
||||
# in the INCLUDE_PATH (see below) will be search if a #include is found.
|
||||
|
||||
SEARCH_INCLUDES = YES
|
||||
|
||||
# The INCLUDE_PATH tag can be used to specify one or more directories that
|
||||
# contain include files that are not input files but should be processed by
|
||||
# the preprocessor.
|
||||
|
||||
INCLUDE_PATH =
|
||||
|
||||
# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard
|
||||
# patterns (like *.h and *.hpp) to filter out the header-files in the
|
||||
# directories. If left blank, the patterns specified with FILE_PATTERNS will
|
||||
# be used.
|
||||
|
||||
INCLUDE_FILE_PATTERNS =
|
||||
|
||||
# The PREDEFINED tag can be used to specify one or more macro names that
|
||||
# are defined before the preprocessor is started (similar to the -D option of
|
||||
# gcc). The argument of the tag is a list of macros of the form: name
|
||||
# or name=definition (no spaces). If the definition and the = are
|
||||
# omitted =1 is assumed.
|
||||
|
||||
PREDEFINED = DOXYGEN_SHOULD_SKIP_DEPRECATED \
|
||||
DOXYGEN_SHOULD_SKIP_INTERNAL \
|
||||
protected=private
|
||||
|
||||
# If the MACRO_EXPANSION and EXPAND_PREDEF_ONLY tags are set to YES then
|
||||
# this tag can be used to specify a list of macro names that should be expanded.
|
||||
# The macro definition that is found in the sources will be used.
|
||||
# Use the PREDEFINED tag if you want to use a different macro definition.
|
||||
|
||||
EXPAND_AS_DEFINED =
|
||||
|
||||
# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then
|
||||
# doxygen's preprocessor will remove all function-like macros that are alone
|
||||
# on a line and do not end with a semicolon. Such function macros are typically
|
||||
# used for boiler-plate code, and will confuse the parser if not removed.
|
||||
|
||||
SKIP_FUNCTION_MACROS = YES
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration::addtions related to external references
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# The TAGFILES tag can be used to specify one or more tagfiles.
|
||||
|
||||
TAGFILES =
|
||||
|
||||
# When a file name is specified after GENERATE_TAGFILE, doxygen will create
|
||||
# a tag file that is based on the input files it reads.
|
||||
|
||||
GENERATE_TAGFILE =
|
||||
|
||||
# If the ALLEXTERNALS tag is set to YES all external classes will be listed
|
||||
# in the class index. If set to NO only the inherited external classes
|
||||
# will be listed.
|
||||
|
||||
ALLEXTERNALS = NO
|
||||
|
||||
# The PERL_PATH should be the absolute path and name of the perl script
|
||||
# interpreter (i.e. the result of `which perl').
|
||||
|
||||
PERL_PATH =
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the dot tool
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will
|
||||
# generate a inheritance diagram (in Html, RTF and LaTeX) for classes with base or
|
||||
# super classes. Setting the tag to NO turns the diagrams off. Note that this
|
||||
# option is superceded by the HAVE_DOT option below. This is only a fallback. It is
|
||||
# recommended to install and use dot, since it yield more powerful graphs.
|
||||
|
||||
CLASS_DIAGRAMS = YES
|
||||
|
||||
# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is
|
||||
# available from the path. This tool is part of Graphviz, a graph visualization
|
||||
# toolkit from AT&T and Lucent Bell Labs. The other options in this section
|
||||
# have no effect if this option is set to NO (the default)
|
||||
|
||||
HAVE_DOT = NO
|
||||
|
||||
# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen
|
||||
# will generate a graph for each documented class showing the direct and
|
||||
# indirect inheritance relations. Setting this tag to YES will force the
|
||||
# the CLASS_DIAGRAMS tag to NO.
|
||||
|
||||
CLASS_GRAPH = YES
|
||||
|
||||
# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen
|
||||
# will generate a graph for each documented class showing the direct and
|
||||
# indirect implementation dependencies (inheritance, containment, and
|
||||
# class references variables) of the class with other documented classes.
|
||||
|
||||
COLLABORATION_GRAPH = YES
|
||||
|
||||
# If set to YES, the inheritance and collaboration graphs will show the
|
||||
# relations between templates and their instances.
|
||||
|
||||
TEMPLATE_RELATIONS = YES
|
||||
|
||||
# If set to YES, the inheritance and collaboration graphs will hide
|
||||
# inheritance and usage relations if the target is undocumented
|
||||
# or is not a class.
|
||||
|
||||
HIDE_UNDOC_RELATIONS = YES
|
||||
|
||||
# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT
|
||||
# tags are set to YES then doxygen will generate a graph for each documented
|
||||
# file showing the direct and indirect include dependencies of the file with
|
||||
# other documented files.
|
||||
|
||||
INCLUDE_GRAPH = YES
|
||||
|
||||
# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and
|
||||
# HAVE_DOT tags are set to YES then doxygen will generate a graph for each
|
||||
# documented header file showing the documented files that directly or
|
||||
# indirectly include this file.
|
||||
|
||||
INCLUDED_BY_GRAPH = YES
|
||||
|
||||
# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen
|
||||
# will graphical hierarchy of all classes instead of a textual one.
|
||||
|
||||
GRAPHICAL_HIERARCHY = YES
|
||||
|
||||
# The tag DOT_PATH can be used to specify the path where the dot tool can be
|
||||
# found. If left blank, it is assumed the dot tool can be found on the path.
|
||||
|
||||
DOT_PATH =
|
||||
|
||||
# The DOTFILE_DIRS tag can be used to specify one or more directories that
|
||||
# contain dot files that are included in the documentation (see the
|
||||
# \dotfile command).
|
||||
|
||||
DOTFILE_DIRS =
|
||||
|
||||
# The MAX_DOT_GRAPH_WIDTH tag can be used to set the maximum allowed width
|
||||
# (in pixels) of the graphs generated by dot. If a graph becomes larger than
|
||||
# this value, doxygen will try to truncate the graph, so that it fits within
|
||||
# the specified constraint. Beware that most browsers cannot cope with very
|
||||
# large images.
|
||||
|
||||
MAX_DOT_GRAPH_WIDTH = 1024
|
||||
|
||||
# The MAX_DOT_GRAPH_HEIGHT tag can be used to set the maximum allows height
|
||||
# (in pixels) of the graphs generated by dot. If a graph becomes larger than
|
||||
# this value, doxygen will try to truncate the graph, so that it fits within
|
||||
# the specified constraint. Beware that most browsers cannot cope with very
|
||||
# large images.
|
||||
|
||||
MAX_DOT_GRAPH_HEIGHT = 1024
|
||||
|
||||
# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will
|
||||
# generate a legend page explaining the meaning of the various boxes and
|
||||
# arrows in the dot generated graphs.
|
||||
|
||||
GENERATE_LEGEND = YES
|
||||
|
||||
# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will
|
||||
# remove the intermedate dot files that are used to generate
|
||||
# the various graphs.
|
||||
|
||||
DOT_CLEANUP = YES
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration::addtions related to the search engine
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# The SEARCHENGINE tag specifies whether or not a search engine should be
|
||||
# used. If set to NO the values of all tags below this one will be ignored.
|
||||
|
||||
SEARCHENGINE = NO
|
||||
|
||||
# The CGI_NAME tag should be the name of the CGI script that
|
||||
# starts the search engine (doxysearch) with the correct parameters.
|
||||
# A script with this name will be generated by doxygen.
|
||||
|
||||
CGI_NAME =
|
||||
|
||||
# The CGI_URL tag should be the absolute URL to the directory where the
|
||||
# cgi binaries are located. See the documentation of your http daemon for
|
||||
# details.
|
||||
|
||||
CGI_URL =
|
||||
|
||||
# The DOC_URL tag should be the absolute URL to the directory where the
|
||||
# documentation is located. If left blank the absolute path to the
|
||||
# documentation, with file:// prepended to it, will be used.
|
||||
|
||||
DOC_URL =
|
||||
|
||||
# The DOC_ABSPATH tag should be the absolute path to the directory where the
|
||||
# documentation is located. If left blank the directory on the local machine
|
||||
# will be used.
|
||||
|
||||
DOC_ABSPATH =
|
||||
|
||||
# The BIN_ABSPATH tag must point to the directory where the doxysearch binary
|
||||
# is installed.
|
||||
|
||||
BIN_ABSPATH =
|
||||
|
||||
# The EXT_DOC_PATHS tag can be used to specify one or more paths to
|
||||
# documentation generated for other projects. This allows doxysearch to search
|
||||
# the documentation for these projects as well.
|
||||
|
||||
EXT_DOC_PATHS =
|
921
ndb/docs/doxygen/Doxyfile.odbc
Normal file
921
ndb/docs/doxygen/Doxyfile.odbc
Normal file
|
@ -0,0 +1,921 @@
|
|||
# Doxyfile 1.2.14
|
||||
|
||||
# This file describes the settings to be used by the documentation system
|
||||
# doxygen (www.doxygen.org) for a project
|
||||
#
|
||||
# All text after a hash (#) is considered a comment and will be ignored
|
||||
# The format is:
|
||||
# TAG = value [value, ...]
|
||||
# For lists items can also be appended using:
|
||||
# TAG += value [value, ...]
|
||||
# Values that contain spaces should be placed between quotes (" ")
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# General configuration options
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# The PROJECT_NAME tag is a single word (or a sequence of words surrounded
|
||||
# by quotes) that should identify the project.
|
||||
|
||||
PROJECT_NAME = "NDB ODBC"
|
||||
|
||||
# The PROJECT_NUMBER tag can be used to enter a project or revision number.
|
||||
# This could be handy for archiving the generated documentation or
|
||||
# if some version control system is used.
|
||||
|
||||
PROJECT_NUMBER =
|
||||
|
||||
# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute)
|
||||
# base path where the generated documentation will be put.
|
||||
# If a relative path is entered, it will be relative to the location
|
||||
# where doxygen was started. If left blank the current directory will be used.
|
||||
|
||||
OUTPUT_DIRECTORY =
|
||||
|
||||
# The OUTPUT_LANGUAGE tag is used to specify the language in which all
|
||||
# documentation generated by doxygen is written. Doxygen will use this
|
||||
# information to generate all constant output in the proper language.
|
||||
# The default language is English, other supported languages are:
|
||||
# Brazilian, Chinese, Croatian, Czech, Danish, Dutch, Finnish, French,
|
||||
# German, Greek, Hungarian, Italian, Japanese, Korean, Norwegian, Polish,
|
||||
# Portuguese, Romanian, Russian, Slovak, Slovene, Spanish and Swedish.
|
||||
|
||||
OUTPUT_LANGUAGE = English
|
||||
|
||||
# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in
|
||||
# documentation are documented, even if no documentation was available.
|
||||
# Private class members and static file members will be hidden unless
|
||||
# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES
|
||||
|
||||
EXTRACT_ALL = YES
|
||||
|
||||
# If the EXTRACT_PRIVATE tag is set to YES all private members of a class
|
||||
# will be included in the documentation.
|
||||
|
||||
EXTRACT_PRIVATE = YES
|
||||
|
||||
# If the EXTRACT_STATIC tag is set to YES all static members of a file
|
||||
# will be included in the documentation.
|
||||
|
||||
EXTRACT_STATIC = YES
|
||||
|
||||
# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs)
|
||||
# defined locally in source files will be included in the documentation.
|
||||
# If set to NO only classes defined in header files are included.
|
||||
|
||||
EXTRACT_LOCAL_CLASSES = YES
|
||||
|
||||
# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all
|
||||
# undocumented members of documented classes, files or namespaces.
|
||||
# If set to NO (the default) these members will be included in the
|
||||
# various overviews, but no documentation section is generated.
|
||||
# This option has no effect if EXTRACT_ALL is enabled.
|
||||
|
||||
HIDE_UNDOC_MEMBERS = NO
|
||||
|
||||
# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all
|
||||
# undocumented classes that are normally visible in the class hierarchy.
|
||||
# If set to NO (the default) these class will be included in the various
|
||||
# overviews. This option has no effect if EXTRACT_ALL is enabled.
|
||||
|
||||
HIDE_UNDOC_CLASSES = NO
|
||||
|
||||
# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will
|
||||
# include brief member descriptions after the members that are listed in
|
||||
# the file and class documentation (similar to JavaDoc).
|
||||
# Set to NO to disable this.
|
||||
|
||||
BRIEF_MEMBER_DESC = YES
|
||||
|
||||
# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend
|
||||
# the brief description of a member or function before the detailed description.
|
||||
# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the
|
||||
# brief descriptions will be completely suppressed.
|
||||
|
||||
REPEAT_BRIEF = YES
|
||||
|
||||
# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then
|
||||
# Doxygen will generate a detailed section even if there is only a brief
|
||||
# description.
|
||||
|
||||
ALWAYS_DETAILED_SEC = NO
|
||||
|
||||
# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all inherited
|
||||
# members of a class in the documentation of that class as if those members were
|
||||
# ordinary class members. Constructors, destructors and assignment operators of
|
||||
# the base classes will not be shown.
|
||||
|
||||
INLINE_INHERITED_MEMB = NO
|
||||
|
||||
# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full
|
||||
# path before files name in the file list and in the header files. If set
|
||||
# to NO the shortest path that makes the file name unique will be used.
|
||||
|
||||
FULL_PATH_NAMES = YES
|
||||
|
||||
# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag
|
||||
# can be used to strip a user defined part of the path. Stripping is
|
||||
# only done if one of the specified strings matches the left-hand part of
|
||||
# the path. It is allowed to use relative paths in the argument list.
|
||||
|
||||
STRIP_FROM_PATH = .
|
||||
|
||||
# The INTERNAL_DOCS tag determines if documentation
|
||||
# that is typed after a \internal command is included. If the tag is set
|
||||
# to NO (the default) then the documentation will be excluded.
|
||||
# Set it to YES to include the internal documentation.
|
||||
|
||||
INTERNAL_DOCS = YES
|
||||
|
||||
# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct
|
||||
# doxygen to hide any special comment blocks from generated source code
|
||||
# fragments. Normal C and C++ comments will always remain visible.
|
||||
|
||||
STRIP_CODE_COMMENTS = YES
|
||||
|
||||
# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate
|
||||
# file names in lower case letters. If set to YES upper case letters are also
|
||||
# allowed. This is useful if you have classes or files whose names only differ
|
||||
# in case and if your file system supports case sensitive file names. Windows
|
||||
# users are adviced to set this option to NO.
|
||||
|
||||
CASE_SENSE_NAMES = YES
|
||||
|
||||
# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter
|
||||
# (but less readable) file names. This can be useful is your file systems
|
||||
# doesn't support long names like on DOS, Mac, or CD-ROM.
|
||||
|
||||
SHORT_NAMES = NO
|
||||
|
||||
# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen
|
||||
# will show members with their full class and namespace scopes in the
|
||||
# documentation. If set to YES the scope will be hidden.
|
||||
|
||||
HIDE_SCOPE_NAMES = NO
|
||||
|
||||
# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen
|
||||
# will generate a verbatim copy of the header file for each class for
|
||||
# which an include is specified. Set to NO to disable this.
|
||||
|
||||
VERBATIM_HEADERS = YES
|
||||
|
||||
# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen
|
||||
# will put list of the files that are included by a file in the documentation
|
||||
# of that file.
|
||||
|
||||
SHOW_INCLUDE_FILES = YES
|
||||
|
||||
# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen
|
||||
# will interpret the first line (until the first dot) of a JavaDoc-style
|
||||
# comment as the brief description. If set to NO, the JavaDoc
|
||||
# comments will behave just like the Qt-style comments (thus requiring an
|
||||
# explict @brief command for a brief description.
|
||||
|
||||
JAVADOC_AUTOBRIEF = NO
|
||||
|
||||
# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented
|
||||
# member inherits the documentation from any documented member that it
|
||||
# reimplements.
|
||||
|
||||
INHERIT_DOCS = YES
|
||||
|
||||
# If the INLINE_INFO tag is set to YES (the default) then a tag [inline]
|
||||
# is inserted in the documentation for inline members.
|
||||
|
||||
INLINE_INFO = YES
|
||||
|
||||
# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen
|
||||
# will sort the (detailed) documentation of file and class members
|
||||
# alphabetically by member name. If set to NO the members will appear in
|
||||
# declaration order.
|
||||
|
||||
SORT_MEMBER_DOCS = YES
|
||||
|
||||
# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC
|
||||
# tag is set to YES, then doxygen will reuse the documentation of the first
|
||||
# member in the group (if any) for the other members of the group. By default
|
||||
# all members of a group must be documented explicitly.
|
||||
|
||||
DISTRIBUTE_GROUP_DOC = NO
|
||||
|
||||
# The TAB_SIZE tag can be used to set the number of spaces in a tab.
|
||||
# Doxygen uses this value to replace tabs by spaces in code fragments.
|
||||
|
||||
TAB_SIZE = 8
|
||||
|
||||
# The GENERATE_TODOLIST tag can be used to enable (YES) or
|
||||
# disable (NO) the todo list. This list is created by putting \todo
|
||||
# commands in the documentation.
|
||||
|
||||
GENERATE_TODOLIST = YES
|
||||
|
||||
# The GENERATE_TESTLIST tag can be used to enable (YES) or
|
||||
# disable (NO) the test list. This list is created by putting \test
|
||||
# commands in the documentation.
|
||||
|
||||
GENERATE_TESTLIST = YES
|
||||
|
||||
# The GENERATE_BUGLIST tag can be used to enable (YES) or
|
||||
# disable (NO) the bug list. This list is created by putting \bug
|
||||
# commands in the documentation.
|
||||
|
||||
GENERATE_BUGLIST = YES
|
||||
|
||||
# This tag can be used to specify a number of aliases that acts
|
||||
# as commands in the documentation. An alias has the form "name=value".
|
||||
# For example adding "sideeffect=\par Side Effects:\n" will allow you to
|
||||
# put the command \sideeffect (or @sideeffect) in the documentation, which
|
||||
# will result in a user defined paragraph with heading "Side Effects:".
|
||||
# You can put \n's in the value part of an alias to insert newlines.
|
||||
|
||||
ALIASES =
|
||||
|
||||
# The ENABLED_SECTIONS tag can be used to enable conditional
|
||||
# documentation sections, marked by \if sectionname ... \endif.
|
||||
|
||||
ENABLED_SECTIONS =
|
||||
|
||||
# The MAX_INITIALIZER_LINES tag determines the maximum number of lines
|
||||
# the initial value of a variable or define consist of for it to appear in
|
||||
# the documentation. If the initializer consists of more lines than specified
|
||||
# here it will be hidden. Use a value of 0 to hide initializers completely.
|
||||
# The appearance of the initializer of individual variables and defines in the
|
||||
# documentation can be controlled using \showinitializer or \hideinitializer
|
||||
# command in the documentation regardless of this setting.
|
||||
|
||||
MAX_INITIALIZER_LINES = 30
|
||||
|
||||
# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources
|
||||
# only. Doxygen will then generate output that is more tailored for C.
|
||||
# For instance some of the names that are used will be different. The list
|
||||
# of all members will be omitted, etc.
|
||||
|
||||
OPTIMIZE_OUTPUT_FOR_C = NO
|
||||
|
||||
# Set the SHOW_USED_FILES tag to NO to disable the list of files generated
|
||||
# at the bottom of the documentation of classes and structs. If set to YES the
|
||||
# list will mention the files that were used to generate the documentation.
|
||||
|
||||
SHOW_USED_FILES = YES
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to warning and progress messages
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# The QUIET tag can be used to turn on/off the messages that are generated
|
||||
# by doxygen. Possible values are YES and NO. If left blank NO is used.
|
||||
|
||||
QUIET = NO
|
||||
|
||||
# The WARNINGS tag can be used to turn on/off the warning messages that are
|
||||
# generated by doxygen. Possible values are YES and NO. If left blank
|
||||
# NO is used.
|
||||
|
||||
WARNINGS = YES
|
||||
|
||||
# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings
|
||||
# for undocumented members. If EXTRACT_ALL is set to YES then this flag will
|
||||
# automatically be disabled.
|
||||
|
||||
WARN_IF_UNDOCUMENTED = YES
|
||||
|
||||
# The WARN_FORMAT tag determines the format of the warning messages that
|
||||
# doxygen can produce. The string should contain the $file, $line, and $text
|
||||
# tags, which will be replaced by the file and line number from which the
|
||||
# warning originated and the warning text.
|
||||
|
||||
WARN_FORMAT = "$file:$line: $text"
|
||||
|
||||
# The WARN_LOGFILE tag can be used to specify a file to which warning
|
||||
# and error messages should be written. If left blank the output is written
|
||||
# to stderr.
|
||||
|
||||
WARN_LOGFILE =
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the input files
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# The INPUT tag can be used to specify the files and/or directories that contain
|
||||
# documented source files. You may enter file names like "myfile.cpp" or
|
||||
# directories like "/usr/src/myproject". Separate the files or directories
|
||||
# with spaces.
|
||||
|
||||
INPUT = src/client/odbc
|
||||
|
||||
# If the value of the INPUT tag contains directories, you can use the
|
||||
# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp
|
||||
# and *.h) to filter out the source-files in the directories. If left
|
||||
# blank the following patterns are tested:
|
||||
# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx *.hpp
|
||||
# *.h++ *.idl *.odl
|
||||
|
||||
FILE_PATTERNS =
|
||||
|
||||
# The RECURSIVE tag can be used to turn specify whether or not subdirectories
|
||||
# should be searched for input files as well. Possible values are YES and NO.
|
||||
# If left blank NO is used.
|
||||
|
||||
RECURSIVE = YES
|
||||
|
||||
# The EXCLUDE tag can be used to specify files and/or directories that should
|
||||
# excluded from the INPUT source files. This way you can easily exclude a
|
||||
# subdirectory from a directory tree whose root is specified with the INPUT tag.
|
||||
|
||||
EXCLUDE =
|
||||
|
||||
# The EXCLUDE_SYMLINKS tag can be used select whether or not files or directories
|
||||
# that are symbolic links (a Unix filesystem feature) are excluded from the input.
|
||||
|
||||
EXCLUDE_SYMLINKS = NO
|
||||
|
||||
# If the value of the INPUT tag contains directories, you can use the
|
||||
# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude
|
||||
# certain files from those directories.
|
||||
|
||||
EXCLUDE_PATTERNS =
|
||||
|
||||
# The EXAMPLE_PATH tag can be used to specify one or more files or
|
||||
# directories that contain example code fragments that are included (see
|
||||
# the \include command).
|
||||
|
||||
EXAMPLE_PATH =
|
||||
|
||||
# If the value of the EXAMPLE_PATH tag contains directories, you can use the
|
||||
# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp
|
||||
# and *.h) to filter out the source-files in the directories. If left
|
||||
# blank all files are included.
|
||||
|
||||
EXAMPLE_PATTERNS =
|
||||
|
||||
# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be
|
||||
# searched for input files to be used with the \include or \dontinclude
|
||||
# commands irrespective of the value of the RECURSIVE tag.
|
||||
# Possible values are YES and NO. If left blank NO is used.
|
||||
|
||||
EXAMPLE_RECURSIVE = NO
|
||||
|
||||
# The IMAGE_PATH tag can be used to specify one or more files or
|
||||
# directories that contain image that are included in the documentation (see
|
||||
# the \image command).
|
||||
|
||||
IMAGE_PATH =
|
||||
|
||||
# The INPUT_FILTER tag can be used to specify a program that doxygen should
|
||||
# invoke to filter for each input file. Doxygen will invoke the filter program
|
||||
# by executing (via popen()) the command <filter> <input-file>, where <filter>
|
||||
# is the value of the INPUT_FILTER tag, and <input-file> is the name of an
|
||||
# input file. Doxygen will then use the output that the filter program writes
|
||||
# to standard output.
|
||||
|
||||
INPUT_FILTER =
|
||||
|
||||
# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using
|
||||
# INPUT_FILTER) will be used to filter the input files when producing source
|
||||
# files to browse.
|
||||
|
||||
FILTER_SOURCE_FILES = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to source browsing
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the SOURCE_BROWSER tag is set to YES then a list of source files will
|
||||
# be generated. Documented entities will be cross-referenced with these sources.
|
||||
|
||||
SOURCE_BROWSER = YES
|
||||
|
||||
# Setting the INLINE_SOURCES tag to YES will include the body
|
||||
# of functions and classes directly in the documentation.
|
||||
|
||||
INLINE_SOURCES = YES
|
||||
|
||||
# If the REFERENCED_BY_RELATION tag is set to YES (the default)
|
||||
# then for each documented function all documented
|
||||
# functions referencing it will be listed.
|
||||
|
||||
REFERENCED_BY_RELATION = YES
|
||||
|
||||
# If the REFERENCES_RELATION tag is set to YES (the default)
|
||||
# then for each documented function all documented entities
|
||||
# called/used by that function will be listed.
|
||||
|
||||
REFERENCES_RELATION = YES
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the alphabetical class index
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index
|
||||
# of all compounds will be generated. Enable this if the project
|
||||
# contains a lot of classes, structs, unions or interfaces.
|
||||
|
||||
ALPHABETICAL_INDEX = YES
|
||||
|
||||
# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then
|
||||
# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns
|
||||
# in which this list will be split (can be a number in the range [1..20])
|
||||
|
||||
COLS_IN_ALPHA_INDEX = 5
|
||||
|
||||
# In case all classes in a project start with a common prefix, all
|
||||
# classes will be put under the same header in the alphabetical index.
|
||||
# The IGNORE_PREFIX tag can be used to specify one or more prefixes that
|
||||
# should be ignored while generating the index headers.
|
||||
|
||||
IGNORE_PREFIX =
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the HTML output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_HTML tag is set to YES (the default) Doxygen will
|
||||
# generate HTML output.
|
||||
|
||||
GENERATE_HTML = YES
|
||||
|
||||
# The HTML_OUTPUT tag is used to specify where the HTML docs will be put.
|
||||
# If a relative path is entered the value of OUTPUT_DIRECTORY will be
|
||||
# put in front of it. If left blank `html' will be used as the default path.
|
||||
|
||||
HTML_OUTPUT = ../.doxyout/odbc.html
|
||||
|
||||
# The HTML_FILE_EXTENSION tag can be used to specify the file extension for
|
||||
# each generated HTML page (for example: .htm,.php,.asp). If it is left blank
|
||||
# doxygen will generate files with .html extension.
|
||||
|
||||
HTML_FILE_EXTENSION = .html
|
||||
|
||||
# The HTML_HEADER tag can be used to specify a personal HTML header for
|
||||
# each generated HTML page. If it is left blank doxygen will generate a
|
||||
# standard header.
|
||||
|
||||
HTML_HEADER =
|
||||
|
||||
# The HTML_FOOTER tag can be used to specify a personal HTML footer for
|
||||
# each generated HTML page. If it is left blank doxygen will generate a
|
||||
# standard footer.
|
||||
|
||||
HTML_FOOTER =
|
||||
|
||||
# The HTML_STYLESHEET tag can be used to specify a user defined cascading
|
||||
# style sheet that is used by each HTML page. It can be used to
|
||||
# fine-tune the look of the HTML output. If the tag is left blank doxygen
|
||||
# will generate a default style sheet
|
||||
|
||||
HTML_STYLESHEET =
|
||||
|
||||
# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes,
|
||||
# files or namespaces will be aligned in HTML using tables. If set to
|
||||
# NO a bullet list will be used.
|
||||
|
||||
HTML_ALIGN_MEMBERS = YES
|
||||
|
||||
# If the GENERATE_HTMLHELP tag is set to YES, additional index files
|
||||
# will be generated that can be used as input for tools like the
|
||||
# Microsoft HTML help workshop to generate a compressed HTML help file (.chm)
|
||||
# of the generated HTML documentation.
|
||||
|
||||
GENERATE_HTMLHELP = NO
|
||||
|
||||
# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag
|
||||
# controls if a separate .chi index file is generated (YES) or that
|
||||
# it should be included in the master .chm file (NO).
|
||||
|
||||
GENERATE_CHI = NO
|
||||
|
||||
# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag
|
||||
# controls whether a binary table of contents is generated (YES) or a
|
||||
# normal table of contents (NO) in the .chm file.
|
||||
|
||||
BINARY_TOC = NO
|
||||
|
||||
# The TOC_EXPAND flag can be set to YES to add extra items for group members
|
||||
# to the contents of the Html help documentation and to the tree view.
|
||||
|
||||
TOC_EXPAND = NO
|
||||
|
||||
# The DISABLE_INDEX tag can be used to turn on/off the condensed index at
|
||||
# top of each HTML page. The value NO (the default) enables the index and
|
||||
# the value YES disables it.
|
||||
|
||||
DISABLE_INDEX = NO
|
||||
|
||||
# This tag can be used to set the number of enum values (range [1..20])
|
||||
# that doxygen will group on one line in the generated HTML documentation.
|
||||
|
||||
ENUM_VALUES_PER_LINE = 4
|
||||
|
||||
# If the GENERATE_TREEVIEW tag is set to YES, a side panel will be
|
||||
# generated containing a tree-like index structure (just like the one that
|
||||
# is generated for HTML Help). For this to work a browser that supports
|
||||
# JavaScript and frames is required (for instance Mozilla, Netscape 4.0+,
|
||||
# or Internet explorer 4.0+). Note that for large projects the tree generation
|
||||
# can take a very long time. In such cases it is better to disable this feature.
|
||||
# Windows users are probably better off using the HTML help feature.
|
||||
|
||||
GENERATE_TREEVIEW = NO
|
||||
|
||||
# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be
|
||||
# used to set the initial width (in pixels) of the frame in which the tree
|
||||
# is shown.
|
||||
|
||||
TREEVIEW_WIDTH = 250
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the LaTeX output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will
|
||||
# generate Latex output.
|
||||
|
||||
GENERATE_LATEX = NO
|
||||
|
||||
# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put.
|
||||
# If a relative path is entered the value of OUTPUT_DIRECTORY will be
|
||||
# put in front of it. If left blank `latex' will be used as the default path.
|
||||
|
||||
LATEX_OUTPUT = ../.doxyout/odbc.latex
|
||||
|
||||
# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact
|
||||
# LaTeX documents. This may be useful for small projects and may help to
|
||||
# save some trees in general.
|
||||
|
||||
COMPACT_LATEX = NO
|
||||
|
||||
# The PAPER_TYPE tag can be used to set the paper type that is used
|
||||
# by the printer. Possible values are: a4, a4wide, letter, legal and
|
||||
# executive. If left blank a4wide will be used.
|
||||
|
||||
PAPER_TYPE = a4wide
|
||||
|
||||
# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX
|
||||
# packages that should be included in the LaTeX output.
|
||||
|
||||
EXTRA_PACKAGES =
|
||||
|
||||
# The LATEX_HEADER tag can be used to specify a personal LaTeX header for
|
||||
# the generated latex document. The header should contain everything until
|
||||
# the first chapter. If it is left blank doxygen will generate a
|
||||
# standard header. Notice: only use this tag if you know what you are doing!
|
||||
|
||||
LATEX_HEADER =
|
||||
|
||||
# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated
|
||||
# is prepared for conversion to pdf (using ps2pdf). The pdf file will
|
||||
# contain links (just like the HTML output) instead of page references
|
||||
# This makes the output suitable for online browsing using a pdf viewer.
|
||||
|
||||
PDF_HYPERLINKS = NO
|
||||
|
||||
# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of
|
||||
# plain latex in the generated Makefile. Set this option to YES to get a
|
||||
# higher quality PDF documentation.
|
||||
|
||||
USE_PDFLATEX = NO
|
||||
|
||||
# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode.
|
||||
# command to the generated LaTeX files. This will instruct LaTeX to keep
|
||||
# running if errors occur, instead of asking the user for help.
|
||||
# This option is also used when generating formulas in HTML.
|
||||
|
||||
LATEX_BATCHMODE = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the RTF output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output
|
||||
# The RTF output is optimised for Word 97 and may not look very pretty with
|
||||
# other RTF readers or editors.
|
||||
|
||||
GENERATE_RTF = NO
|
||||
|
||||
# The RTF_OUTPUT tag is used to specify where the RTF docs will be put.
|
||||
# If a relative path is entered the value of OUTPUT_DIRECTORY will be
|
||||
# put in front of it. If left blank `rtf' will be used as the default path.
|
||||
|
||||
RTF_OUTPUT = rtf
|
||||
|
||||
# If the COMPACT_RTF tag is set to YES Doxygen generates more compact
|
||||
# RTF documents. This may be useful for small projects and may help to
|
||||
# save some trees in general.
|
||||
|
||||
COMPACT_RTF = NO
|
||||
|
||||
# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated
|
||||
# will contain hyperlink fields. The RTF file will
|
||||
# contain links (just like the HTML output) instead of page references.
|
||||
# This makes the output suitable for online browsing using WORD or other
|
||||
# programs which support those fields.
|
||||
# Note: wordpad (write) and others do not support links.
|
||||
|
||||
RTF_HYPERLINKS = NO
|
||||
|
||||
# Load stylesheet definitions from file. Syntax is similar to doxygen's
|
||||
# config file, i.e. a series of assigments. You only have to provide
|
||||
# replacements, missing definitions are set to their default value.
|
||||
|
||||
RTF_STYLESHEET_FILE =
|
||||
|
||||
# Set optional variables used in the generation of an rtf document.
|
||||
# Syntax is similar to doxygen's config file.
|
||||
|
||||
RTF_EXTENSIONS_FILE =
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the man page output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_MAN tag is set to YES (the default) Doxygen will
|
||||
# generate man pages
|
||||
|
||||
GENERATE_MAN = NO
|
||||
|
||||
# The MAN_OUTPUT tag is used to specify where the man pages will be put.
|
||||
# If a relative path is entered the value of OUTPUT_DIRECTORY will be
|
||||
# put in front of it. If left blank `man' will be used as the default path.
|
||||
|
||||
MAN_OUTPUT = man
|
||||
|
||||
# The MAN_EXTENSION tag determines the extension that is added to
|
||||
# the generated man pages (default is the subroutine's section .3)
|
||||
|
||||
MAN_EXTENSION = .3
|
||||
|
||||
# If the MAN_LINKS tag is set to YES and Doxygen generates man output,
|
||||
# then it will generate one additional man file for each entity
|
||||
# documented in the real man page(s). These additional files
|
||||
# only source the real man page, but without them the man command
|
||||
# would be unable to find the correct page. The default is NO.
|
||||
|
||||
MAN_LINKS = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the XML output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_XML tag is set to YES Doxygen will
|
||||
# generate an XML file that captures the structure of
|
||||
# the code including all documentation. Note that this
|
||||
# feature is still experimental and incomplete at the
|
||||
# moment.
|
||||
|
||||
GENERATE_XML = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options for the AutoGen Definitions output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will
|
||||
# generate an AutoGen Definitions (see autogen.sf.net) file
|
||||
# that captures the structure of the code including all
|
||||
# documentation. Note that this feature is still experimental
|
||||
# and incomplete at the moment.
|
||||
|
||||
GENERATE_AUTOGEN_DEF = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the preprocessor
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will
|
||||
# evaluate all C-preprocessor directives found in the sources and include
|
||||
# files.
|
||||
|
||||
ENABLE_PREPROCESSING = YES
|
||||
|
||||
# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro
|
||||
# names in the source code. If set to NO (the default) only conditional
|
||||
# compilation will be performed. Macro expansion can be done in a controlled
|
||||
# way by setting EXPAND_ONLY_PREDEF to YES.
|
||||
|
||||
MACRO_EXPANSION = NO
|
||||
|
||||
# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES
|
||||
# then the macro expansion is limited to the macros specified with the
|
||||
# PREDEFINED and EXPAND_AS_PREDEFINED tags.
|
||||
|
||||
EXPAND_ONLY_PREDEF = NO
|
||||
|
||||
# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files
|
||||
# in the INCLUDE_PATH (see below) will be search if a #include is found.
|
||||
|
||||
SEARCH_INCLUDES = YES
|
||||
|
||||
# The INCLUDE_PATH tag can be used to specify one or more directories that
|
||||
# contain include files that are not input files but should be processed by
|
||||
# the preprocessor.
|
||||
|
||||
INCLUDE_PATH =
|
||||
|
||||
# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard
|
||||
# patterns (like *.h and *.hpp) to filter out the header-files in the
|
||||
# directories. If left blank, the patterns specified with FILE_PATTERNS will
|
||||
# be used.
|
||||
|
||||
INCLUDE_FILE_PATTERNS =
|
||||
|
||||
# The PREDEFINED tag can be used to specify one or more macro names that
|
||||
# are defined before the preprocessor is started (similar to the -D option of
|
||||
# gcc). The argument of the tag is a list of macros of the form: name
|
||||
# or name=definition (no spaces). If the definition and the = are
|
||||
# omitted =1 is assumed.
|
||||
|
||||
PREDEFINED =
|
||||
|
||||
# If the MACRO_EXPANSION and EXPAND_PREDEF_ONLY tags are set to YES then
|
||||
# this tag can be used to specify a list of macro names that should be expanded.
|
||||
# The macro definition that is found in the sources will be used.
|
||||
# Use the PREDEFINED tag if you want to use a different macro definition.
|
||||
|
||||
EXPAND_AS_DEFINED =
|
||||
|
||||
# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then
|
||||
# doxygen's preprocessor will remove all function-like macros that are alone
|
||||
# on a line and do not end with a semicolon. Such function macros are typically
|
||||
# used for boiler-plate code, and will confuse the parser if not removed.
|
||||
|
||||
SKIP_FUNCTION_MACROS = YES
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration::addtions related to external references
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# The TAGFILES tag can be used to specify one or more tagfiles.
|
||||
|
||||
TAGFILES =
|
||||
|
||||
# When a file name is specified after GENERATE_TAGFILE, doxygen will create
|
||||
# a tag file that is based on the input files it reads.
|
||||
|
||||
GENERATE_TAGFILE =
|
||||
|
||||
# If the ALLEXTERNALS tag is set to YES all external classes will be listed
|
||||
# in the class index. If set to NO only the inherited external classes
|
||||
# will be listed.
|
||||
|
||||
ALLEXTERNALS = NO
|
||||
|
||||
# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed
|
||||
# in the modules index. If set to NO, only the current project's groups will
|
||||
# be listed.
|
||||
|
||||
EXTERNAL_GROUPS = YES
|
||||
|
||||
# The PERL_PATH should be the absolute path and name of the perl script
|
||||
# interpreter (i.e. the result of `which perl').
|
||||
|
||||
PERL_PATH = /usr/bin/perl
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the dot tool
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will
|
||||
# generate a inheritance diagram (in Html, RTF and LaTeX) for classes with base or
|
||||
# super classes. Setting the tag to NO turns the diagrams off. Note that this
|
||||
# option is superceded by the HAVE_DOT option below. This is only a fallback. It is
|
||||
# recommended to install and use dot, since it yield more powerful graphs.
|
||||
|
||||
CLASS_DIAGRAMS = YES
|
||||
|
||||
# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is
|
||||
# available from the path. This tool is part of Graphviz, a graph visualization
|
||||
# toolkit from AT&T and Lucent Bell Labs. The other options in this section
|
||||
# have no effect if this option is set to NO (the default)
|
||||
|
||||
HAVE_DOT = YES
|
||||
|
||||
# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen
|
||||
# will generate a graph for each documented class showing the direct and
|
||||
# indirect inheritance relations. Setting this tag to YES will force the
|
||||
# the CLASS_DIAGRAMS tag to NO.
|
||||
|
||||
CLASS_GRAPH = YES
|
||||
|
||||
# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen
|
||||
# will generate a graph for each documented class showing the direct and
|
||||
# indirect implementation dependencies (inheritance, containment, and
|
||||
# class references variables) of the class with other documented classes.
|
||||
|
||||
COLLABORATION_GRAPH = YES
|
||||
|
||||
# If set to YES, the inheritance and collaboration graphs will show the
|
||||
# relations between templates and their instances.
|
||||
|
||||
TEMPLATE_RELATIONS = YES
|
||||
|
||||
# If set to YES, the inheritance and collaboration graphs will hide
|
||||
# inheritance and usage relations if the target is undocumented
|
||||
# or is not a class.
|
||||
|
||||
HIDE_UNDOC_RELATIONS = NO
|
||||
|
||||
# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT
|
||||
# tags are set to YES then doxygen will generate a graph for each documented
|
||||
# file showing the direct and indirect include dependencies of the file with
|
||||
# other documented files.
|
||||
|
||||
INCLUDE_GRAPH = YES
|
||||
|
||||
# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and
|
||||
# HAVE_DOT tags are set to YES then doxygen will generate a graph for each
|
||||
# documented header file showing the documented files that directly or
|
||||
# indirectly include this file.
|
||||
|
||||
INCLUDED_BY_GRAPH = YES
|
||||
|
||||
# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen
|
||||
# will graphical hierarchy of all classes instead of a textual one.
|
||||
|
||||
GRAPHICAL_HIERARCHY = YES
|
||||
|
||||
# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images
|
||||
# generated by dot. Possible values are gif, jpg, and png
|
||||
# If left blank gif will be used.
|
||||
|
||||
DOT_IMAGE_FORMAT = gif
|
||||
|
||||
# The tag DOT_PATH can be used to specify the path where the dot tool can be
|
||||
# found. If left blank, it is assumed the dot tool can be found on the path.
|
||||
|
||||
DOT_PATH =
|
||||
|
||||
# The DOTFILE_DIRS tag can be used to specify one or more directories that
|
||||
# contain dot files that are included in the documentation (see the
|
||||
# \dotfile command).
|
||||
|
||||
DOTFILE_DIRS =
|
||||
|
||||
# The MAX_DOT_GRAPH_WIDTH tag can be used to set the maximum allowed width
|
||||
# (in pixels) of the graphs generated by dot. If a graph becomes larger than
|
||||
# this value, doxygen will try to truncate the graph, so that it fits within
|
||||
# the specified constraint. Beware that most browsers cannot cope with very
|
||||
# large images.
|
||||
|
||||
MAX_DOT_GRAPH_WIDTH = 1024
|
||||
|
||||
# The MAX_DOT_GRAPH_HEIGHT tag can be used to set the maximum allows height
|
||||
# (in pixels) of the graphs generated by dot. If a graph becomes larger than
|
||||
# this value, doxygen will try to truncate the graph, so that it fits within
|
||||
# the specified constraint. Beware that most browsers cannot cope with very
|
||||
# large images.
|
||||
|
||||
MAX_DOT_GRAPH_HEIGHT = 1024
|
||||
|
||||
# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will
|
||||
# generate a legend page explaining the meaning of the various boxes and
|
||||
# arrows in the dot generated graphs.
|
||||
|
||||
GENERATE_LEGEND = YES
|
||||
|
||||
# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will
|
||||
# remove the intermedate dot files that are used to generate
|
||||
# the various graphs.
|
||||
|
||||
DOT_CLEANUP = YES
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration::addtions related to the search engine
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# The SEARCHENGINE tag specifies whether or not a search engine should be
|
||||
# used. If set to NO the values of all tags below this one will be ignored.
|
||||
|
||||
SEARCHENGINE = YES
|
||||
|
||||
# The CGI_NAME tag should be the name of the CGI script that
|
||||
# starts the search engine (doxysearch) with the correct parameters.
|
||||
# A script with this name will be generated by doxygen.
|
||||
|
||||
CGI_NAME = search.cgi
|
||||
|
||||
# The CGI_URL tag should be the absolute URL to the directory where the
|
||||
# cgi binaries are located. See the documentation of your http daemon for
|
||||
# details.
|
||||
|
||||
CGI_URL =
|
||||
|
||||
# The DOC_URL tag should be the absolute URL to the directory where the
|
||||
# documentation is located. If left blank the absolute path to the
|
||||
# documentation, with file:// prepended to it, will be used.
|
||||
|
||||
DOC_URL =
|
||||
|
||||
# The DOC_ABSPATH tag should be the absolute path to the directory where the
|
||||
# documentation is located. If left blank the directory on the local machine
|
||||
# will be used.
|
||||
|
||||
DOC_ABSPATH =
|
||||
|
||||
# The BIN_ABSPATH tag must point to the directory where the doxysearch binary
|
||||
# is installed.
|
||||
|
||||
BIN_ABSPATH = /usr/local/bin/
|
||||
|
||||
# The EXT_DOC_PATHS tag can be used to specify one or more paths to
|
||||
# documentation generated for other projects. This allows doxysearch to search
|
||||
# the documentation for these projects as well.
|
||||
|
||||
EXT_DOC_PATHS =
|
921
ndb/docs/doxygen/Doxyfile.test
Normal file
921
ndb/docs/doxygen/Doxyfile.test
Normal file
|
@ -0,0 +1,921 @@
|
|||
# Doxyfile 1.2.14
|
||||
|
||||
# This file describes the settings to be used by the documentation system
|
||||
# doxygen (www.doxygen.org) for a project
|
||||
#
|
||||
# All text after a hash (#) is considered a comment and will be ignored
|
||||
# The format is:
|
||||
# TAG = value [value, ...]
|
||||
# For lists items can also be appended using:
|
||||
# TAG += value [value, ...]
|
||||
# Values that contain spaces should be placed between quotes (" ")
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# General configuration options
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# The PROJECT_NAME tag is a single word (or a sequence of words surrounded
|
||||
# by quotes) that should identify the project.
|
||||
|
||||
PROJECT_NAME = "NDB Cluster Test Programs"
|
||||
|
||||
# The PROJECT_NUMBER tag can be used to enter a project or revision number.
|
||||
# This could be handy for archiving the generated documentation or
|
||||
# if some version control system is used.
|
||||
|
||||
PROJECT_NUMBER =
|
||||
|
||||
# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute)
|
||||
# base path where the generated documentation will be put.
|
||||
# If a relative path is entered, it will be relative to the location
|
||||
# where doxygen was started. If left blank the current directory will be used.
|
||||
|
||||
OUTPUT_DIRECTORY =
|
||||
|
||||
# The OUTPUT_LANGUAGE tag is used to specify the language in which all
|
||||
# documentation generated by doxygen is written. Doxygen will use this
|
||||
# information to generate all constant output in the proper language.
|
||||
# The default language is English, other supported languages are:
|
||||
# Brazilian, Chinese, Croatian, Czech, Danish, Dutch, Finnish, French,
|
||||
# German, Greek, Hungarian, Italian, Japanese, Korean, Norwegian, Polish,
|
||||
# Portuguese, Romanian, Russian, Slovak, Slovene, Spanish and Swedish.
|
||||
|
||||
OUTPUT_LANGUAGE = English
|
||||
|
||||
# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in
|
||||
# documentation are documented, even if no documentation was available.
|
||||
# Private class members and static file members will be hidden unless
|
||||
# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES
|
||||
|
||||
EXTRACT_ALL = YES
|
||||
|
||||
# If the EXTRACT_PRIVATE tag is set to YES all private members of a class
|
||||
# will be included in the documentation.
|
||||
|
||||
EXTRACT_PRIVATE = YES
|
||||
|
||||
# If the EXTRACT_STATIC tag is set to YES all static members of a file
|
||||
# will be included in the documentation.
|
||||
|
||||
EXTRACT_STATIC = YES
|
||||
|
||||
# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs)
|
||||
# defined locally in source files will be included in the documentation.
|
||||
# If set to NO only classes defined in header files are included.
|
||||
|
||||
EXTRACT_LOCAL_CLASSES = YES
|
||||
|
||||
# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all
|
||||
# undocumented members of documented classes, files or namespaces.
|
||||
# If set to NO (the default) these members will be included in the
|
||||
# various overviews, but no documentation section is generated.
|
||||
# This option has no effect if EXTRACT_ALL is enabled.
|
||||
|
||||
HIDE_UNDOC_MEMBERS = NO
|
||||
|
||||
# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all
|
||||
# undocumented classes that are normally visible in the class hierarchy.
|
||||
# If set to NO (the default) these class will be included in the various
|
||||
# overviews. This option has no effect if EXTRACT_ALL is enabled.
|
||||
|
||||
HIDE_UNDOC_CLASSES = NO
|
||||
|
||||
# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will
|
||||
# include brief member descriptions after the members that are listed in
|
||||
# the file and class documentation (similar to JavaDoc).
|
||||
# Set to NO to disable this.
|
||||
|
||||
BRIEF_MEMBER_DESC = YES
|
||||
|
||||
# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend
|
||||
# the brief description of a member or function before the detailed description.
|
||||
# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the
|
||||
# brief descriptions will be completely suppressed.
|
||||
|
||||
REPEAT_BRIEF = YES
|
||||
|
||||
# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then
|
||||
# Doxygen will generate a detailed section even if there is only a brief
|
||||
# description.
|
||||
|
||||
ALWAYS_DETAILED_SEC = NO
|
||||
|
||||
# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all inherited
|
||||
# members of a class in the documentation of that class as if those members were
|
||||
# ordinary class members. Constructors, destructors and assignment operators of
|
||||
# the base classes will not be shown.
|
||||
|
||||
INLINE_INHERITED_MEMB = NO
|
||||
|
||||
# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full
|
||||
# path before files name in the file list and in the header files. If set
|
||||
# to NO the shortest path that makes the file name unique will be used.
|
||||
|
||||
FULL_PATH_NAMES = YES
|
||||
|
||||
# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag
|
||||
# can be used to strip a user defined part of the path. Stripping is
|
||||
# only done if one of the specified strings matches the left-hand part of
|
||||
# the path. It is allowed to use relative paths in the argument list.
|
||||
|
||||
STRIP_FROM_PATH = .
|
||||
|
||||
# The INTERNAL_DOCS tag determines if documentation
|
||||
# that is typed after a \internal command is included. If the tag is set
|
||||
# to NO (the default) then the documentation will be excluded.
|
||||
# Set it to YES to include the internal documentation.
|
||||
|
||||
INTERNAL_DOCS = YES
|
||||
|
||||
# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct
|
||||
# doxygen to hide any special comment blocks from generated source code
|
||||
# fragments. Normal C and C++ comments will always remain visible.
|
||||
|
||||
STRIP_CODE_COMMENTS = YES
|
||||
|
||||
# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate
|
||||
# file names in lower case letters. If set to YES upper case letters are also
|
||||
# allowed. This is useful if you have classes or files whose names only differ
|
||||
# in case and if your file system supports case sensitive file names. Windows
|
||||
# users are adviced to set this option to NO.
|
||||
|
||||
CASE_SENSE_NAMES = YES
|
||||
|
||||
# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter
|
||||
# (but less readable) file names. This can be useful is your file systems
|
||||
# doesn't support long names like on DOS, Mac, or CD-ROM.
|
||||
|
||||
SHORT_NAMES = NO
|
||||
|
||||
# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen
|
||||
# will show members with their full class and namespace scopes in the
|
||||
# documentation. If set to YES the scope will be hidden.
|
||||
|
||||
HIDE_SCOPE_NAMES = NO
|
||||
|
||||
# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen
|
||||
# will generate a verbatim copy of the header file for each class for
|
||||
# which an include is specified. Set to NO to disable this.
|
||||
|
||||
VERBATIM_HEADERS = YES
|
||||
|
||||
# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen
|
||||
# will put list of the files that are included by a file in the documentation
|
||||
# of that file.
|
||||
|
||||
SHOW_INCLUDE_FILES = YES
|
||||
|
||||
# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen
|
||||
# will interpret the first line (until the first dot) of a JavaDoc-style
|
||||
# comment as the brief description. If set to NO, the JavaDoc
|
||||
# comments will behave just like the Qt-style comments (thus requiring an
|
||||
# explict @brief command for a brief description.
|
||||
|
||||
JAVADOC_AUTOBRIEF = NO
|
||||
|
||||
# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented
|
||||
# member inherits the documentation from any documented member that it
|
||||
# reimplements.
|
||||
|
||||
INHERIT_DOCS = YES
|
||||
|
||||
# If the INLINE_INFO tag is set to YES (the default) then a tag [inline]
|
||||
# is inserted in the documentation for inline members.
|
||||
|
||||
INLINE_INFO = YES
|
||||
|
||||
# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen
|
||||
# will sort the (detailed) documentation of file and class members
|
||||
# alphabetically by member name. If set to NO the members will appear in
|
||||
# declaration order.
|
||||
|
||||
SORT_MEMBER_DOCS = YES
|
||||
|
||||
# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC
|
||||
# tag is set to YES, then doxygen will reuse the documentation of the first
|
||||
# member in the group (if any) for the other members of the group. By default
|
||||
# all members of a group must be documented explicitly.
|
||||
|
||||
DISTRIBUTE_GROUP_DOC = NO
|
||||
|
||||
# The TAB_SIZE tag can be used to set the number of spaces in a tab.
|
||||
# Doxygen uses this value to replace tabs by spaces in code fragments.
|
||||
|
||||
TAB_SIZE = 8
|
||||
|
||||
# The GENERATE_TODOLIST tag can be used to enable (YES) or
|
||||
# disable (NO) the todo list. This list is created by putting \todo
|
||||
# commands in the documentation.
|
||||
|
||||
GENERATE_TODOLIST = YES
|
||||
|
||||
# The GENERATE_TESTLIST tag can be used to enable (YES) or
|
||||
# disable (NO) the test list. This list is created by putting \test
|
||||
# commands in the documentation.
|
||||
|
||||
GENERATE_TESTLIST = YES
|
||||
|
||||
# The GENERATE_BUGLIST tag can be used to enable (YES) or
|
||||
# disable (NO) the bug list. This list is created by putting \bug
|
||||
# commands in the documentation.
|
||||
|
||||
GENERATE_BUGLIST = YES
|
||||
|
||||
# This tag can be used to specify a number of aliases that acts
|
||||
# as commands in the documentation. An alias has the form "name=value".
|
||||
# For example adding "sideeffect=\par Side Effects:\n" will allow you to
|
||||
# put the command \sideeffect (or @sideeffect) in the documentation, which
|
||||
# will result in a user defined paragraph with heading "Side Effects:".
|
||||
# You can put \n's in the value part of an alias to insert newlines.
|
||||
|
||||
ALIASES =
|
||||
|
||||
# The ENABLED_SECTIONS tag can be used to enable conditional
|
||||
# documentation sections, marked by \if sectionname ... \endif.
|
||||
|
||||
ENABLED_SECTIONS =
|
||||
|
||||
# The MAX_INITIALIZER_LINES tag determines the maximum number of lines
|
||||
# the initial value of a variable or define consist of for it to appear in
|
||||
# the documentation. If the initializer consists of more lines than specified
|
||||
# here it will be hidden. Use a value of 0 to hide initializers completely.
|
||||
# The appearance of the initializer of individual variables and defines in the
|
||||
# documentation can be controlled using \showinitializer or \hideinitializer
|
||||
# command in the documentation regardless of this setting.
|
||||
|
||||
MAX_INITIALIZER_LINES = 30
|
||||
|
||||
# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources
|
||||
# only. Doxygen will then generate output that is more tailored for C.
|
||||
# For instance some of the names that are used will be different. The list
|
||||
# of all members will be omitted, etc.
|
||||
|
||||
OPTIMIZE_OUTPUT_FOR_C = NO
|
||||
|
||||
# Set the SHOW_USED_FILES tag to NO to disable the list of files generated
|
||||
# at the bottom of the documentation of classes and structs. If set to YES the
|
||||
# list will mention the files that were used to generate the documentation.
|
||||
|
||||
SHOW_USED_FILES = YES
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to warning and progress messages
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# The QUIET tag can be used to turn on/off the messages that are generated
|
||||
# by doxygen. Possible values are YES and NO. If left blank NO is used.
|
||||
|
||||
QUIET = NO
|
||||
|
||||
# The WARNINGS tag can be used to turn on/off the warning messages that are
|
||||
# generated by doxygen. Possible values are YES and NO. If left blank
|
||||
# NO is used.
|
||||
|
||||
WARNINGS = YES
|
||||
|
||||
# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings
|
||||
# for undocumented members. If EXTRACT_ALL is set to YES then this flag will
|
||||
# automatically be disabled.
|
||||
|
||||
WARN_IF_UNDOCUMENTED = YES
|
||||
|
||||
# The WARN_FORMAT tag determines the format of the warning messages that
|
||||
# doxygen can produce. The string should contain the $file, $line, and $text
|
||||
# tags, which will be replaced by the file and line number from which the
|
||||
# warning originated and the warning text.
|
||||
|
||||
WARN_FORMAT = "$file:$line: $text"
|
||||
|
||||
# The WARN_LOGFILE tag can be used to specify a file to which warning
|
||||
# and error messages should be written. If left blank the output is written
|
||||
# to stderr.
|
||||
|
||||
WARN_LOGFILE =
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the input files
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# The INPUT tag can be used to specify the files and/or directories that contain
|
||||
# documented source files. You may enter file names like "myfile.cpp" or
|
||||
# directories like "/usr/src/myproject". Separate the files or directories
|
||||
# with spaces.
|
||||
|
||||
INPUT = test
|
||||
|
||||
# If the value of the INPUT tag contains directories, you can use the
|
||||
# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp
|
||||
# and *.h) to filter out the source-files in the directories. If left
|
||||
# blank the following patterns are tested:
|
||||
# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx *.hpp
|
||||
# *.h++ *.idl *.odl
|
||||
|
||||
FILE_PATTERNS =
|
||||
|
||||
# The RECURSIVE tag can be used to turn specify whether or not subdirectories
|
||||
# should be searched for input files as well. Possible values are YES and NO.
|
||||
# If left blank NO is used.
|
||||
|
||||
RECURSIVE = YES
|
||||
|
||||
# The EXCLUDE tag can be used to specify files and/or directories that should
|
||||
# excluded from the INPUT source files. This way you can easily exclude a
|
||||
# subdirectory from a directory tree whose root is specified with the INPUT tag.
|
||||
|
||||
EXCLUDE =
|
||||
|
||||
# The EXCLUDE_SYMLINKS tag can be used select whether or not files or directories
|
||||
# that are symbolic links (a Unix filesystem feature) are excluded from the input.
|
||||
|
||||
EXCLUDE_SYMLINKS = NO
|
||||
|
||||
# If the value of the INPUT tag contains directories, you can use the
|
||||
# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude
|
||||
# certain files from those directories.
|
||||
|
||||
EXCLUDE_PATTERNS =
|
||||
|
||||
# The EXAMPLE_PATH tag can be used to specify one or more files or
|
||||
# directories that contain example code fragments that are included (see
|
||||
# the \include command).
|
||||
|
||||
EXAMPLE_PATH =
|
||||
|
||||
# If the value of the EXAMPLE_PATH tag contains directories, you can use the
|
||||
# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp
|
||||
# and *.h) to filter out the source-files in the directories. If left
|
||||
# blank all files are included.
|
||||
|
||||
EXAMPLE_PATTERNS =
|
||||
|
||||
# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be
|
||||
# searched for input files to be used with the \include or \dontinclude
|
||||
# commands irrespective of the value of the RECURSIVE tag.
|
||||
# Possible values are YES and NO. If left blank NO is used.
|
||||
|
||||
EXAMPLE_RECURSIVE = NO
|
||||
|
||||
# The IMAGE_PATH tag can be used to specify one or more files or
|
||||
# directories that contain image that are included in the documentation (see
|
||||
# the \image command).
|
||||
|
||||
IMAGE_PATH =
|
||||
|
||||
# The INPUT_FILTER tag can be used to specify a program that doxygen should
|
||||
# invoke to filter for each input file. Doxygen will invoke the filter program
|
||||
# by executing (via popen()) the command <filter> <input-file>, where <filter>
|
||||
# is the value of the INPUT_FILTER tag, and <input-file> is the name of an
|
||||
# input file. Doxygen will then use the output that the filter program writes
|
||||
# to standard output.
|
||||
|
||||
INPUT_FILTER =
|
||||
|
||||
# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using
|
||||
# INPUT_FILTER) will be used to filter the input files when producing source
|
||||
# files to browse.
|
||||
|
||||
FILTER_SOURCE_FILES = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to source browsing
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the SOURCE_BROWSER tag is set to YES then a list of source files will
|
||||
# be generated. Documented entities will be cross-referenced with these sources.
|
||||
|
||||
SOURCE_BROWSER = YES
|
||||
|
||||
# Setting the INLINE_SOURCES tag to YES will include the body
|
||||
# of functions and classes directly in the documentation.
|
||||
|
||||
INLINE_SOURCES = YES
|
||||
|
||||
# If the REFERENCED_BY_RELATION tag is set to YES (the default)
|
||||
# then for each documented function all documented
|
||||
# functions referencing it will be listed.
|
||||
|
||||
REFERENCED_BY_RELATION = YES
|
||||
|
||||
# If the REFERENCES_RELATION tag is set to YES (the default)
|
||||
# then for each documented function all documented entities
|
||||
# called/used by that function will be listed.
|
||||
|
||||
REFERENCES_RELATION = YES
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the alphabetical class index
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index
|
||||
# of all compounds will be generated. Enable this if the project
|
||||
# contains a lot of classes, structs, unions or interfaces.
|
||||
|
||||
ALPHABETICAL_INDEX = YES
|
||||
|
||||
# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then
|
||||
# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns
|
||||
# in which this list will be split (can be a number in the range [1..20])
|
||||
|
||||
COLS_IN_ALPHA_INDEX = 5
|
||||
|
||||
# In case all classes in a project start with a common prefix, all
|
||||
# classes will be put under the same header in the alphabetical index.
|
||||
# The IGNORE_PREFIX tag can be used to specify one or more prefixes that
|
||||
# should be ignored while generating the index headers.
|
||||
|
||||
IGNORE_PREFIX =
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the HTML output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_HTML tag is set to YES (the default) Doxygen will
|
||||
# generate HTML output.
|
||||
|
||||
GENERATE_HTML = YES
|
||||
|
||||
# The HTML_OUTPUT tag is used to specify where the HTML docs will be put.
|
||||
# If a relative path is entered the value of OUTPUT_DIRECTORY will be
|
||||
# put in front of it. If left blank `html' will be used as the default path.
|
||||
|
||||
HTML_OUTPUT = html
|
||||
|
||||
# The HTML_FILE_EXTENSION tag can be used to specify the file extension for
|
||||
# each generated HTML page (for example: .htm,.php,.asp). If it is left blank
|
||||
# doxygen will generate files with .html extension.
|
||||
|
||||
HTML_FILE_EXTENSION = .html
|
||||
|
||||
# The HTML_HEADER tag can be used to specify a personal HTML header for
|
||||
# each generated HTML page. If it is left blank doxygen will generate a
|
||||
# standard header.
|
||||
|
||||
HTML_HEADER =
|
||||
|
||||
# The HTML_FOOTER tag can be used to specify a personal HTML footer for
|
||||
# each generated HTML page. If it is left blank doxygen will generate a
|
||||
# standard footer.
|
||||
|
||||
HTML_FOOTER =
|
||||
|
||||
# The HTML_STYLESHEET tag can be used to specify a user defined cascading
|
||||
# style sheet that is used by each HTML page. It can be used to
|
||||
# fine-tune the look of the HTML output. If the tag is left blank doxygen
|
||||
# will generate a default style sheet
|
||||
|
||||
HTML_STYLESHEET =
|
||||
|
||||
# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes,
|
||||
# files or namespaces will be aligned in HTML using tables. If set to
|
||||
# NO a bullet list will be used.
|
||||
|
||||
HTML_ALIGN_MEMBERS = YES
|
||||
|
||||
# If the GENERATE_HTMLHELP tag is set to YES, additional index files
|
||||
# will be generated that can be used as input for tools like the
|
||||
# Microsoft HTML help workshop to generate a compressed HTML help file (.chm)
|
||||
# of the generated HTML documentation.
|
||||
|
||||
GENERATE_HTMLHELP = NO
|
||||
|
||||
# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag
|
||||
# controls if a separate .chi index file is generated (YES) or that
|
||||
# it should be included in the master .chm file (NO).
|
||||
|
||||
GENERATE_CHI = NO
|
||||
|
||||
# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag
|
||||
# controls whether a binary table of contents is generated (YES) or a
|
||||
# normal table of contents (NO) in the .chm file.
|
||||
|
||||
BINARY_TOC = NO
|
||||
|
||||
# The TOC_EXPAND flag can be set to YES to add extra items for group members
|
||||
# to the contents of the Html help documentation and to the tree view.
|
||||
|
||||
TOC_EXPAND = NO
|
||||
|
||||
# The DISABLE_INDEX tag can be used to turn on/off the condensed index at
|
||||
# top of each HTML page. The value NO (the default) enables the index and
|
||||
# the value YES disables it.
|
||||
|
||||
DISABLE_INDEX = NO
|
||||
|
||||
# This tag can be used to set the number of enum values (range [1..20])
|
||||
# that doxygen will group on one line in the generated HTML documentation.
|
||||
|
||||
ENUM_VALUES_PER_LINE = 4
|
||||
|
||||
# If the GENERATE_TREEVIEW tag is set to YES, a side panel will be
|
||||
# generated containing a tree-like index structure (just like the one that
|
||||
# is generated for HTML Help). For this to work a browser that supports
|
||||
# JavaScript and frames is required (for instance Mozilla, Netscape 4.0+,
|
||||
# or Internet explorer 4.0+). Note that for large projects the tree generation
|
||||
# can take a very long time. In such cases it is better to disable this feature.
|
||||
# Windows users are probably better off using the HTML help feature.
|
||||
|
||||
GENERATE_TREEVIEW = NO
|
||||
|
||||
# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be
|
||||
# used to set the initial width (in pixels) of the frame in which the tree
|
||||
# is shown.
|
||||
|
||||
TREEVIEW_WIDTH = 250
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the LaTeX output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will
|
||||
# generate Latex output.
|
||||
|
||||
GENERATE_LATEX = NO
|
||||
|
||||
# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put.
|
||||
# If a relative path is entered the value of OUTPUT_DIRECTORY will be
|
||||
# put in front of it. If left blank `latex' will be used as the default path.
|
||||
|
||||
LATEX_OUTPUT = latex
|
||||
|
||||
# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact
|
||||
# LaTeX documents. This may be useful for small projects and may help to
|
||||
# save some trees in general.
|
||||
|
||||
COMPACT_LATEX = NO
|
||||
|
||||
# The PAPER_TYPE tag can be used to set the paper type that is used
|
||||
# by the printer. Possible values are: a4, a4wide, letter, legal and
|
||||
# executive. If left blank a4wide will be used.
|
||||
|
||||
PAPER_TYPE = a4wide
|
||||
|
||||
# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX
|
||||
# packages that should be included in the LaTeX output.
|
||||
|
||||
EXTRA_PACKAGES =
|
||||
|
||||
# The LATEX_HEADER tag can be used to specify a personal LaTeX header for
|
||||
# the generated latex document. The header should contain everything until
|
||||
# the first chapter. If it is left blank doxygen will generate a
|
||||
# standard header. Notice: only use this tag if you know what you are doing!
|
||||
|
||||
LATEX_HEADER =
|
||||
|
||||
# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated
|
||||
# is prepared for conversion to pdf (using ps2pdf). The pdf file will
|
||||
# contain links (just like the HTML output) instead of page references
|
||||
# This makes the output suitable for online browsing using a pdf viewer.
|
||||
|
||||
PDF_HYPERLINKS = NO
|
||||
|
||||
# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of
|
||||
# plain latex in the generated Makefile. Set this option to YES to get a
|
||||
# higher quality PDF documentation.
|
||||
|
||||
USE_PDFLATEX = NO
|
||||
|
||||
# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode.
|
||||
# command to the generated LaTeX files. This will instruct LaTeX to keep
|
||||
# running if errors occur, instead of asking the user for help.
|
||||
# This option is also used when generating formulas in HTML.
|
||||
|
||||
LATEX_BATCHMODE = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the RTF output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output
|
||||
# The RTF output is optimised for Word 97 and may not look very pretty with
|
||||
# other RTF readers or editors.
|
||||
|
||||
GENERATE_RTF = NO
|
||||
|
||||
# The RTF_OUTPUT tag is used to specify where the RTF docs will be put.
|
||||
# If a relative path is entered the value of OUTPUT_DIRECTORY will be
|
||||
# put in front of it. If left blank `rtf' will be used as the default path.
|
||||
|
||||
RTF_OUTPUT = rtf
|
||||
|
||||
# If the COMPACT_RTF tag is set to YES Doxygen generates more compact
|
||||
# RTF documents. This may be useful for small projects and may help to
|
||||
# save some trees in general.
|
||||
|
||||
COMPACT_RTF = NO
|
||||
|
||||
# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated
|
||||
# will contain hyperlink fields. The RTF file will
|
||||
# contain links (just like the HTML output) instead of page references.
|
||||
# This makes the output suitable for online browsing using WORD or other
|
||||
# programs which support those fields.
|
||||
# Note: wordpad (write) and others do not support links.
|
||||
|
||||
RTF_HYPERLINKS = NO
|
||||
|
||||
# Load stylesheet definitions from file. Syntax is similar to doxygen's
|
||||
# config file, i.e. a series of assigments. You only have to provide
|
||||
# replacements, missing definitions are set to their default value.
|
||||
|
||||
RTF_STYLESHEET_FILE =
|
||||
|
||||
# Set optional variables used in the generation of an rtf document.
|
||||
# Syntax is similar to doxygen's config file.
|
||||
|
||||
RTF_EXTENSIONS_FILE =
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the man page output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_MAN tag is set to YES (the default) Doxygen will
|
||||
# generate man pages
|
||||
|
||||
GENERATE_MAN = NO
|
||||
|
||||
# The MAN_OUTPUT tag is used to specify where the man pages will be put.
|
||||
# If a relative path is entered the value of OUTPUT_DIRECTORY will be
|
||||
# put in front of it. If left blank `man' will be used as the default path.
|
||||
|
||||
MAN_OUTPUT = man
|
||||
|
||||
# The MAN_EXTENSION tag determines the extension that is added to
|
||||
# the generated man pages (default is the subroutine's section .3)
|
||||
|
||||
MAN_EXTENSION = .3
|
||||
|
||||
# If the MAN_LINKS tag is set to YES and Doxygen generates man output,
|
||||
# then it will generate one additional man file for each entity
|
||||
# documented in the real man page(s). These additional files
|
||||
# only source the real man page, but without them the man command
|
||||
# would be unable to find the correct page. The default is NO.
|
||||
|
||||
MAN_LINKS = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the XML output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_XML tag is set to YES Doxygen will
|
||||
# generate an XML file that captures the structure of
|
||||
# the code including all documentation. Note that this
|
||||
# feature is still experimental and incomplete at the
|
||||
# moment.
|
||||
|
||||
GENERATE_XML = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options for the AutoGen Definitions output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will
|
||||
# generate an AutoGen Definitions (see autogen.sf.net) file
|
||||
# that captures the structure of the code including all
|
||||
# documentation. Note that this feature is still experimental
|
||||
# and incomplete at the moment.
|
||||
|
||||
GENERATE_AUTOGEN_DEF = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the preprocessor
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will
|
||||
# evaluate all C-preprocessor directives found in the sources and include
|
||||
# files.
|
||||
|
||||
ENABLE_PREPROCESSING = YES
|
||||
|
||||
# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro
|
||||
# names in the source code. If set to NO (the default) only conditional
|
||||
# compilation will be performed. Macro expansion can be done in a controlled
|
||||
# way by setting EXPAND_ONLY_PREDEF to YES.
|
||||
|
||||
MACRO_EXPANSION = NO
|
||||
|
||||
# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES
|
||||
# then the macro expansion is limited to the macros specified with the
|
||||
# PREDEFINED and EXPAND_AS_PREDEFINED tags.
|
||||
|
||||
EXPAND_ONLY_PREDEF = NO
|
||||
|
||||
# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files
|
||||
# in the INCLUDE_PATH (see below) will be search if a #include is found.
|
||||
|
||||
SEARCH_INCLUDES = YES
|
||||
|
||||
# The INCLUDE_PATH tag can be used to specify one or more directories that
|
||||
# contain include files that are not input files but should be processed by
|
||||
# the preprocessor.
|
||||
|
||||
INCLUDE_PATH =
|
||||
|
||||
# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard
|
||||
# patterns (like *.h and *.hpp) to filter out the header-files in the
|
||||
# directories. If left blank, the patterns specified with FILE_PATTERNS will
|
||||
# be used.
|
||||
|
||||
INCLUDE_FILE_PATTERNS =
|
||||
|
||||
# The PREDEFINED tag can be used to specify one or more macro names that
|
||||
# are defined before the preprocessor is started (similar to the -D option of
|
||||
# gcc). The argument of the tag is a list of macros of the form: name
|
||||
# or name=definition (no spaces). If the definition and the = are
|
||||
# omitted =1 is assumed.
|
||||
|
||||
PREDEFINED =
|
||||
|
||||
# If the MACRO_EXPANSION and EXPAND_PREDEF_ONLY tags are set to YES then
|
||||
# this tag can be used to specify a list of macro names that should be expanded.
|
||||
# The macro definition that is found in the sources will be used.
|
||||
# Use the PREDEFINED tag if you want to use a different macro definition.
|
||||
|
||||
EXPAND_AS_DEFINED =
|
||||
|
||||
# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then
|
||||
# doxygen's preprocessor will remove all function-like macros that are alone
|
||||
# on a line and do not end with a semicolon. Such function macros are typically
|
||||
# used for boiler-plate code, and will confuse the parser if not removed.
|
||||
|
||||
SKIP_FUNCTION_MACROS = YES
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration::addtions related to external references
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# The TAGFILES tag can be used to specify one or more tagfiles.
|
||||
|
||||
TAGFILES =
|
||||
|
||||
# When a file name is specified after GENERATE_TAGFILE, doxygen will create
|
||||
# a tag file that is based on the input files it reads.
|
||||
|
||||
GENERATE_TAGFILE =
|
||||
|
||||
# If the ALLEXTERNALS tag is set to YES all external classes will be listed
|
||||
# in the class index. If set to NO only the inherited external classes
|
||||
# will be listed.
|
||||
|
||||
ALLEXTERNALS = NO
|
||||
|
||||
# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed
|
||||
# in the modules index. If set to NO, only the current project's groups will
|
||||
# be listed.
|
||||
|
||||
EXTERNAL_GROUPS = YES
|
||||
|
||||
# The PERL_PATH should be the absolute path and name of the perl script
|
||||
# interpreter (i.e. the result of `which perl').
|
||||
|
||||
PERL_PATH = /usr/bin/perl
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the dot tool
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will
|
||||
# generate a inheritance diagram (in Html, RTF and LaTeX) for classes with base or
|
||||
# super classes. Setting the tag to NO turns the diagrams off. Note that this
|
||||
# option is superceded by the HAVE_DOT option below. This is only a fallback. It is
|
||||
# recommended to install and use dot, since it yield more powerful graphs.
|
||||
|
||||
CLASS_DIAGRAMS = YES
|
||||
|
||||
# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is
|
||||
# available from the path. This tool is part of Graphviz, a graph visualization
|
||||
# toolkit from AT&T and Lucent Bell Labs. The other options in this section
|
||||
# have no effect if this option is set to NO (the default)
|
||||
|
||||
HAVE_DOT = YES
|
||||
|
||||
# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen
|
||||
# will generate a graph for each documented class showing the direct and
|
||||
# indirect inheritance relations. Setting this tag to YES will force the
|
||||
# the CLASS_DIAGRAMS tag to NO.
|
||||
|
||||
CLASS_GRAPH = YES
|
||||
|
||||
# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen
|
||||
# will generate a graph for each documented class showing the direct and
|
||||
# indirect implementation dependencies (inheritance, containment, and
|
||||
# class references variables) of the class with other documented classes.
|
||||
|
||||
COLLABORATION_GRAPH = YES
|
||||
|
||||
# If set to YES, the inheritance and collaboration graphs will show the
|
||||
# relations between templates and their instances.
|
||||
|
||||
TEMPLATE_RELATIONS = YES
|
||||
|
||||
# If set to YES, the inheritance and collaboration graphs will hide
|
||||
# inheritance and usage relations if the target is undocumented
|
||||
# or is not a class.
|
||||
|
||||
HIDE_UNDOC_RELATIONS = NO
|
||||
|
||||
# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT
|
||||
# tags are set to YES then doxygen will generate a graph for each documented
|
||||
# file showing the direct and indirect include dependencies of the file with
|
||||
# other documented files.
|
||||
|
||||
INCLUDE_GRAPH = YES
|
||||
|
||||
# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and
|
||||
# HAVE_DOT tags are set to YES then doxygen will generate a graph for each
|
||||
# documented header file showing the documented files that directly or
|
||||
# indirectly include this file.
|
||||
|
||||
INCLUDED_BY_GRAPH = YES
|
||||
|
||||
# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen
|
||||
# will graphical hierarchy of all classes instead of a textual one.
|
||||
|
||||
GRAPHICAL_HIERARCHY = YES
|
||||
|
||||
# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images
|
||||
# generated by dot. Possible values are gif, jpg, and png
|
||||
# If left blank gif will be used.
|
||||
|
||||
DOT_IMAGE_FORMAT = gif
|
||||
|
||||
# The tag DOT_PATH can be used to specify the path where the dot tool can be
|
||||
# found. If left blank, it is assumed the dot tool can be found on the path.
|
||||
|
||||
DOT_PATH =
|
||||
|
||||
# The DOTFILE_DIRS tag can be used to specify one or more directories that
|
||||
# contain dot files that are included in the documentation (see the
|
||||
# \dotfile command).
|
||||
|
||||
DOTFILE_DIRS =
|
||||
|
||||
# The MAX_DOT_GRAPH_WIDTH tag can be used to set the maximum allowed width
|
||||
# (in pixels) of the graphs generated by dot. If a graph becomes larger than
|
||||
# this value, doxygen will try to truncate the graph, so that it fits within
|
||||
# the specified constraint. Beware that most browsers cannot cope with very
|
||||
# large images.
|
||||
|
||||
MAX_DOT_GRAPH_WIDTH = 1024
|
||||
|
||||
# The MAX_DOT_GRAPH_HEIGHT tag can be used to set the maximum allows height
|
||||
# (in pixels) of the graphs generated by dot. If a graph becomes larger than
|
||||
# this value, doxygen will try to truncate the graph, so that it fits within
|
||||
# the specified constraint. Beware that most browsers cannot cope with very
|
||||
# large images.
|
||||
|
||||
MAX_DOT_GRAPH_HEIGHT = 1024
|
||||
|
||||
# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will
|
||||
# generate a legend page explaining the meaning of the various boxes and
|
||||
# arrows in the dot generated graphs.
|
||||
|
||||
GENERATE_LEGEND = YES
|
||||
|
||||
# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will
|
||||
# remove the intermedate dot files that are used to generate
|
||||
# the various graphs.
|
||||
|
||||
DOT_CLEANUP = YES
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration::addtions related to the search engine
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# The SEARCHENGINE tag specifies whether or not a search engine should be
|
||||
# used. If set to NO the values of all tags below this one will be ignored.
|
||||
|
||||
SEARCHENGINE = NO
|
||||
|
||||
# The CGI_NAME tag should be the name of the CGI script that
|
||||
# starts the search engine (doxysearch) with the correct parameters.
|
||||
# A script with this name will be generated by doxygen.
|
||||
|
||||
CGI_NAME = search.cgi
|
||||
|
||||
# The CGI_URL tag should be the absolute URL to the directory where the
|
||||
# cgi binaries are located. See the documentation of your http daemon for
|
||||
# details.
|
||||
|
||||
CGI_URL =
|
||||
|
||||
# The DOC_URL tag should be the absolute URL to the directory where the
|
||||
# documentation is located. If left blank the absolute path to the
|
||||
# documentation, with file:// prepended to it, will be used.
|
||||
|
||||
DOC_URL =
|
||||
|
||||
# The DOC_ABSPATH tag should be the absolute path to the directory where the
|
||||
# documentation is located. If left blank the directory on the local machine
|
||||
# will be used.
|
||||
|
||||
DOC_ABSPATH =
|
||||
|
||||
# The BIN_ABSPATH tag must point to the directory where the doxysearch binary
|
||||
# is installed.
|
||||
|
||||
BIN_ABSPATH = /usr/local/bin
|
||||
|
||||
# The EXT_DOC_PATHS tag can be used to specify one or more paths to
|
||||
# documentation generated for other projects. This allows doxysearch to search
|
||||
# the documentation for these projects as well.
|
||||
|
||||
EXT_DOC_PATHS =
|
44
ndb/docs/doxygen/header.mgmapi.tex
Normal file
44
ndb/docs/doxygen/header.mgmapi.tex
Normal file
|
@ -0,0 +1,44 @@
|
|||
\documentclass[a4paper]{book}
|
||||
\usepackage{a4wide}
|
||||
\usepackage{makeidx}
|
||||
\usepackage{fancyhdr}
|
||||
\usepackage{graphicx}
|
||||
\usepackage{multicol}
|
||||
\usepackage{float}
|
||||
\usepackage{textcomp}
|
||||
\usepackage{alltt}
|
||||
\usepackage{times}
|
||||
\ifx\pdfoutput\undefined
|
||||
\usepackage[ps2pdf,
|
||||
pagebackref=true,
|
||||
colorlinks=true,
|
||||
linkcolor=blue
|
||||
]{hyperref}
|
||||
\usepackage{pspicture}
|
||||
\else
|
||||
\usepackage[pdftex,
|
||||
pagebackref=true,
|
||||
colorlinks=true,
|
||||
linkcolor=blue
|
||||
]{hyperref}
|
||||
\fi
|
||||
\usepackage{doxygen}
|
||||
\makeindex
|
||||
\setcounter{tocdepth}{1}
|
||||
\renewcommand{\footrulewidth}{0.4pt}
|
||||
\begin{document}
|
||||
\begin{titlepage}
|
||||
\vspace*{7cm}
|
||||
\begin{center}
|
||||
{\Huge NDB Cluster MGM API Guide \mbox{}\vspace{-3cm}\mbox{}\hrule\bigskip\bigskip\bigskip\bigskip\mbox{}\Huge{}}\\\vspace*{1cm}
|
||||
\begin{center}\LARGE{MySQL AB}\end{center}\hfill\bigskip\bigskip\bigskip\hrule\bigskip\bigskip\bigskip\bigskip\bigskip\bigskip\bigskip\bigskip\bigskip\bigskip\bigskip\bigskip NDB Cluster Release RELEASE
|
||||
\bigskip\bigskip\bigskip\bigskip\bigskip\hfill\vspace*{0.5cm}
|
||||
{\small DATE}\\
|
||||
\end{center}
|
||||
\end{titlepage}
|
||||
\clearemptydoublepage
|
||||
\pagenumbering{roman}
|
||||
\tableofcontents
|
||||
\clearemptydoublepage
|
||||
\pagenumbering{arabic}
|
||||
|
44
ndb/docs/doxygen/header.ndbapi.tex
Normal file
44
ndb/docs/doxygen/header.ndbapi.tex
Normal file
|
@ -0,0 +1,44 @@
|
|||
\documentclass[a4paper]{book}
|
||||
\usepackage{a4wide}
|
||||
\usepackage{makeidx}
|
||||
\usepackage{fancyhdr}
|
||||
\usepackage{graphicx}
|
||||
\usepackage{multicol}
|
||||
\usepackage{float}
|
||||
\usepackage{textcomp}
|
||||
\usepackage{alltt}
|
||||
\usepackage{times}
|
||||
\ifx\pdfoutput\undefined
|
||||
\usepackage[ps2pdf,
|
||||
pagebackref=true,
|
||||
colorlinks=true,
|
||||
linkcolor=blue
|
||||
]{hyperref}
|
||||
\usepackage{pspicture}
|
||||
\else
|
||||
\usepackage[pdftex,
|
||||
pagebackref=true,
|
||||
colorlinks=true,
|
||||
linkcolor=blue
|
||||
]{hyperref}
|
||||
\fi
|
||||
\usepackage{doxygen}
|
||||
\makeindex
|
||||
\setcounter{tocdepth}{1}
|
||||
\renewcommand{\footrulewidth}{0.4pt}
|
||||
\begin{document}
|
||||
\begin{titlepage}
|
||||
\vspace*{7cm}
|
||||
\begin{center}
|
||||
{\Huge NDB API Programmer's Guide \mbox{}\vspace{-3cm}\mbox{}\hrule\bigskip\bigskip\bigskip\bigskip\mbox{}\Huge{}}\\\vspace*{1cm}
|
||||
\begin{center}\LARGE{MySQL AB}\end{center}\hfill\bigskip\bigskip\bigskip\hrule\bigskip\bigskip\bigskip\bigskip\bigskip\bigskip\bigskip\bigskip\bigskip\bigskip\bigskip\bigskip NDB Cluster Release RELEASE
|
||||
\bigskip\bigskip\bigskip\bigskip\bigskip\hfill\vspace*{0.5cm}
|
||||
{\small DATE}\\
|
||||
\end{center}
|
||||
\end{titlepage}
|
||||
\clearemptydoublepage
|
||||
\pagenumbering{roman}
|
||||
\tableofcontents
|
||||
\clearemptydoublepage
|
||||
\pagenumbering{arabic}
|
||||
|
97
ndb/docs/doxygen/postdoxy.pl
Executable file
97
ndb/docs/doxygen/postdoxy.pl
Executable file
|
@ -0,0 +1,97 @@
|
|||
#!/usr/local/bin/perl
|
||||
#
|
||||
# Written by Lars Thalmann, lars@mysql.com, 2003.
|
||||
#
|
||||
|
||||
use strict;
|
||||
umask 000;
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Settings
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
$ENV{LD_LIBRARY_PATH} = "/usr/local/lib:/opt/as/local/lib";
|
||||
$ENV{LD_LIBRARY_PATH} = $ENV{LD_LIBRARY_PATH} . ":/opt/as/forte6/SUNWspro/lib";
|
||||
$ENV{PATH} = $ENV{PATH} . ":/usr/local/bin:/opt/as/local/bin";
|
||||
$ENV{PATH} = $ENV{PATH} . ":/opt/as/local/teTeX/bin/sparc-sun-solaris2.8";
|
||||
|
||||
my $destdir = @ARGV[0];
|
||||
my $title = ""; # $ARGV[1];
|
||||
|
||||
my $release;
|
||||
if (defined $ENV{'NDB_RELEASE'}) {
|
||||
$release = $ENV{'NDB_RELEASE'};
|
||||
print "----------------------------------------------------------------\n";
|
||||
print "Relase = " . $release . "\n";
|
||||
print "----------------------------------------------------------------\n";
|
||||
} else {
|
||||
print "----------------------------------------------------------------\n";
|
||||
print "NDB Documentation is being modified to statndard format\n";
|
||||
print "(If you want this automatic, use env variable NDB_RELEASE.)\n";
|
||||
print "Enter release (Examples: \"1.43.0 (alpha)\" or \"2.1.0 (gamma)\"): ";
|
||||
$release = <stdin>;
|
||||
print "----------------------------------------------------------------\n";
|
||||
}
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Change a little in refman.tex
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
open (INFILE, "< ${destdir}/refman.tex")
|
||||
or die "Error opening ${destdir}/refman.tex.\n";
|
||||
open (OUTFILE, "> ${destdir}/refman.tex.new")
|
||||
or die "Error opening ${destdir}/refman.tex.new.\n";
|
||||
|
||||
while (<INFILE>)
|
||||
{
|
||||
if (/(.*)(RELEASE)(.*)$/) {
|
||||
print OUTFILE $1 . $release . $3;
|
||||
} elsif (/(.*)(DATE)(.*)$/) {
|
||||
print OUTFILE $1 . localtime() . $3;
|
||||
} elsif (/\\chapter\{File Index\}/) {
|
||||
# Erase
|
||||
} elsif (/\\input\{files\}/) {
|
||||
# Erase
|
||||
} elsif (/\\chapter\{Hierarchical Index\}/) {
|
||||
# Erase
|
||||
} elsif (/\\input\{hierarchy\}/) {
|
||||
# Erase
|
||||
} elsif (/\\chapter\{Page Index\}/) {
|
||||
# Erase
|
||||
} elsif (/\\input\{pages\}/) {
|
||||
# Erase
|
||||
} else {
|
||||
print OUTFILE;
|
||||
}
|
||||
}
|
||||
|
||||
close INFILE;
|
||||
close OUTFILE;
|
||||
|
||||
system("mv ${destdir}/refman.tex.new ${destdir}/refman.tex");
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Change a little in doxygen.sty
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
open (INFILE, "< ${destdir}/doxygen.sty")
|
||||
or die "Error opening INFILE.\n";
|
||||
open (OUTFILE, "> ${destdir}/doxygen.sty.new")
|
||||
or die "Error opening OUTFILE.\n";
|
||||
|
||||
while (<INFILE>)
|
||||
{
|
||||
if (/\\rfoot/) {
|
||||
print OUTFILE "\\rfoot[\\fancyplain{}{\\bfseries\\small \\copyright~Copyright 2003-2004 MySQL AB\\hfill support-cluster\@mysql.com}]{}\n";
|
||||
} elsif (/\\lfoot/) {
|
||||
print OUTFILE "\\lfoot[]{\\fancyplain{}{\\bfseries\\small support-cluster\@mysql.com\\hfill \\copyright~Copyright 2003-2004 MySQL AB}}\n";
|
||||
} else {
|
||||
print OUTFILE;
|
||||
}
|
||||
}
|
||||
|
||||
close INFILE;
|
||||
close OUTFILE;
|
||||
|
||||
system("mv ${destdir}/doxygen.sty.new ${destdir}/doxygen.sty");
|
||||
|
34
ndb/docs/doxygen/predoxy.pl
Executable file
34
ndb/docs/doxygen/predoxy.pl
Executable file
|
@ -0,0 +1,34 @@
|
|||
#!/usr/local/bin/perl
|
||||
#
|
||||
# Written by Lars Thalmann, lars@mysql.com, 2003.
|
||||
#
|
||||
|
||||
use strict;
|
||||
umask 000;
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Fix HTML Footer
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
open (OUTFILE, "> footer.html");
|
||||
|
||||
print OUTFILE<<EOT;
|
||||
<hr>
|
||||
<address>
|
||||
<small>
|
||||
<center>
|
||||
EOT
|
||||
print OUTFILE "Documentation generated " . localtime() .
|
||||
" from NDB Cluster source files.";
|
||||
print OUTFILE<<EOT;
|
||||
<br>
|
||||
© 2003-2004
|
||||
<a href="http://www.mysql.com">MySQL AB</a>
|
||||
<br>
|
||||
</center>
|
||||
</small></address>
|
||||
</body>
|
||||
</html>
|
||||
EOT
|
||||
|
||||
print "Preformat finished\n\n";
|
8
ndb/env.sh
Normal file
8
ndb/env.sh
Normal file
|
@ -0,0 +1,8 @@
|
|||
#
|
||||
|
||||
NDB_TOP=`pwd`
|
||||
export NDB_TOP
|
||||
|
||||
NDB_PROJ_HOME=$NDB_TOP/home
|
||||
export NDB_PROJ_HOME
|
||||
|
26
ndb/examples/Makefile
Normal file
26
ndb/examples/Makefile
Normal file
|
@ -0,0 +1,26 @@
|
|||
-include .defs.mk
|
||||
|
||||
#ifneq ($(C++),)
|
||||
#OPTS = CC=$(CC) CXX=$(C++)
|
||||
#endif
|
||||
|
||||
# XXX ndbapi_example4 commented out until fixed
|
||||
BIN_DIRS := ndbapi_example1 ndbapi_example2 ndbapi_example3 $(ndbapi_example4) \
|
||||
ndbapi_example5 select_all
|
||||
|
||||
bins: $(patsubst %, _bins_%, $(BIN_DIRS))
|
||||
|
||||
$(patsubst %, _bins_%, $(BIN_DIRS)) :
|
||||
$(MAKE) -C $(patsubst _bins_%, %, $@) $(OPTS)
|
||||
|
||||
libs:
|
||||
|
||||
clean:
|
||||
for f in ${BIN_DIRS}; do \
|
||||
$(MAKE) -C $$f $@;\
|
||||
done
|
||||
|
||||
cleanall: clean
|
||||
tidy: clean
|
||||
distclean: clean
|
||||
|
BIN
ndb/examples/configurations/demos.tar
Normal file
BIN
ndb/examples/configurations/demos.tar
Normal file
Binary file not shown.
34
ndb/examples/ndbapi_async_example/Makefile
Normal file
34
ndb/examples/ndbapi_async_example/Makefile
Normal file
|
@ -0,0 +1,34 @@
|
|||
-include ../../Defs.mk
|
||||
#NDB_OS = OS_YOU_ARE_RUNNING_ON
|
||||
#NDB_OS = LINUX
|
||||
#You need to set the NDB_OS variable here (LINUX, SOLARIS, MACOSX)
|
||||
TARGET = ndbapi_async
|
||||
SRCS = ndbapi_async.cpp
|
||||
OBJS = ndbapi_async.o
|
||||
CC = g++
|
||||
CFLAGS = -c -Wall -fno-rtti -D$(NDB_OS)
|
||||
DEBUG =
|
||||
LFLAGS = -Wall
|
||||
INCLUDE_DIR = ../../include
|
||||
LIB_DIR = ../../lib
|
||||
ifeq ($(NDB_OS), SOLARIS)
|
||||
# Here is the definition of system libraries necessary for Solaris 7
|
||||
SYS_LIB = -lpthread -lsocket -lnsl -lrt
|
||||
endif
|
||||
ifeq ($(NDB_OS), LINUX)
|
||||
# Here is the definition of system libraries necessary for Linux 2.4
|
||||
SYS_LIB = -lpthread
|
||||
endif
|
||||
ifeq ($(NDB_OS), MACOSX)
|
||||
# Here is the definition of system libraries necessary for Mac OS X
|
||||
SYS_LIB =
|
||||
endif
|
||||
|
||||
$(TARGET): $(OBJS)
|
||||
$(CC) $(LFLAGS) -L$(LIB_DIR) -lNDB_API $(OBJS) $(SYS_LIB) -o $(TARGET)
|
||||
|
||||
$(TARGET).o: $(SRCS)
|
||||
$(CC) $(CFLAGS) -I$(INCLUDE_DIR) -I$(INCLUDE_DIR)/ndbapi $(SRCS)
|
||||
|
||||
clean:
|
||||
rm -f *.o $(TARGET)
|
505
ndb/examples/ndbapi_async_example/ndbapi_async.cpp
Normal file
505
ndb/examples/ndbapi_async_example/ndbapi_async.cpp
Normal file
|
@ -0,0 +1,505 @@
|
|||
|
||||
|
||||
/* Copyright (C) 2003 MySQL AB
|
||||
|
||||
This program is free software; you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation; either version 2 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program; if not, write to the Free Software
|
||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */
|
||||
|
||||
|
||||
/**
|
||||
* ndbapi_async.cpp:
|
||||
* Illustrates how to use callbacks and error handling using the asynchronous
|
||||
* part of the NDBAPI.
|
||||
*
|
||||
* Classes and methods in NDBAPI used in this example:
|
||||
*
|
||||
* Ndb
|
||||
* init()
|
||||
* waitUntilRead()
|
||||
* getDictionary()
|
||||
* startTransaction()
|
||||
* closeTransaction()
|
||||
* sendPollNdb()
|
||||
* getNdbError()
|
||||
*
|
||||
* NdbConnection
|
||||
* getNdbOperation()
|
||||
* executeAsynchPrepare()
|
||||
* getNdbError()
|
||||
*
|
||||
* NdbDictionary::Dictionary
|
||||
* getTable()
|
||||
* dropTable()
|
||||
* createTable()
|
||||
* getNdbError()
|
||||
*
|
||||
* NdbDictionary::Column
|
||||
* setName()
|
||||
* setPrimaryKey()
|
||||
* setType()
|
||||
* setLength()
|
||||
* setNullable()
|
||||
*
|
||||
* NdbDictionary::Table
|
||||
* setName()
|
||||
* addColumn()
|
||||
*
|
||||
* NdbOperation
|
||||
* insertTuple()
|
||||
* equal()
|
||||
* setValue()
|
||||
*
|
||||
*/
|
||||
|
||||
|
||||
#include <NdbApi.hpp>
|
||||
#include <NdbScanFilter.hpp>
|
||||
#include <iostream> // Used for cout
|
||||
|
||||
#ifdef SOLARIS
|
||||
#include <sys/types.h>
|
||||
#include <unistd.h>
|
||||
#endif
|
||||
|
||||
#if defined LINUX || defined MACOSX
|
||||
#include <time.h>
|
||||
#include <unistd.h>
|
||||
#endif
|
||||
|
||||
/**
|
||||
* Helper sleep function
|
||||
*/
|
||||
int
|
||||
milliSleep(int milliseconds){
|
||||
int result = 0;
|
||||
struct timespec sleeptime;
|
||||
sleeptime.tv_sec = milliseconds / 1000;
|
||||
sleeptime.tv_nsec = (milliseconds - (sleeptime.tv_sec * 1000)) * 1000000;
|
||||
result = nanosleep(&sleeptime, NULL);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* error printout macro
|
||||
*/
|
||||
#define APIERROR(error) \
|
||||
{ std::cout << "Error in " << __FILE__ << ", line:" << __LINE__ << ", code:" \
|
||||
<< error.code << ", msg: " << error.message << "." << std::endl; \
|
||||
exit(-1); }
|
||||
|
||||
|
||||
#ifndef DOXYGEN_SHOULD_SKIP_INTERNAL
|
||||
/**
|
||||
* callback struct.
|
||||
* transaction : index of the transaction in transaction[] array below
|
||||
* data : the data that the transaction was modifying.
|
||||
* retries : counter for how many times the trans. has been retried
|
||||
*/
|
||||
typedef struct {
|
||||
Ndb * ndb;
|
||||
int transaction;
|
||||
int data;
|
||||
int retries;
|
||||
} async_callback_t;
|
||||
|
||||
/**
|
||||
* Structure used in "free list" to a NdbConnection
|
||||
*/
|
||||
typedef struct {
|
||||
NdbConnection* conn;
|
||||
int used;
|
||||
} transaction_t;
|
||||
|
||||
/**
|
||||
* Free list holding transactions
|
||||
*/
|
||||
transaction_t transaction[1024]; //1024 - max number of outstanding
|
||||
//transaction in one Ndb object
|
||||
|
||||
#endif
|
||||
/**
|
||||
* prototypes
|
||||
*/
|
||||
|
||||
/**
|
||||
* Prepare and send transaction
|
||||
*/
|
||||
int populate(Ndb * myNdb, int data, async_callback_t * cbData);
|
||||
|
||||
/**
|
||||
* Error handler.
|
||||
*/
|
||||
bool asynchErrorHandler(NdbConnection * trans, Ndb* ndb);
|
||||
|
||||
/**
|
||||
* Exit function
|
||||
*/
|
||||
void asynchExitHandler(Ndb * m_ndb) ;
|
||||
|
||||
/**
|
||||
* Helper function used in callback(...)
|
||||
*/
|
||||
void closeTransaction(Ndb * ndb , async_callback_t * cb);
|
||||
|
||||
/**
|
||||
* Function to create table
|
||||
*/
|
||||
int create_table(Ndb * myNdb);
|
||||
|
||||
/**
|
||||
* stat. variables
|
||||
*/
|
||||
int tempErrors = 0;
|
||||
int permErrors = 0;
|
||||
|
||||
/**
|
||||
* Helper function for callback(...)
|
||||
*/
|
||||
void
|
||||
closeTransaction(Ndb * ndb , async_callback_t * cb)
|
||||
{
|
||||
ndb->closeTransaction(transaction[cb->transaction].conn);
|
||||
transaction[cb->transaction].conn = 0;
|
||||
transaction[cb->transaction].used = 0;
|
||||
cb->retries++;
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback executed when transaction has return from NDB
|
||||
*/
|
||||
static void
|
||||
callback(int result, NdbConnection* trans, void* aObject)
|
||||
{
|
||||
async_callback_t * cbData = (async_callback_t *)aObject;
|
||||
if (result<0)
|
||||
{
|
||||
/**
|
||||
* Error: Temporary or permanent?
|
||||
*/
|
||||
if (asynchErrorHandler(trans, (Ndb*)cbData->ndb))
|
||||
{
|
||||
closeTransaction((Ndb*)cbData->ndb, cbData);
|
||||
while(populate((Ndb*)cbData->ndb, cbData->data, cbData) < 0)
|
||||
milliSleep(10);
|
||||
}
|
||||
else
|
||||
{
|
||||
std::cout << "Restore: Failed to restore data "
|
||||
<< "due to a unrecoverable error. Exiting..." << std::endl;
|
||||
delete cbData;
|
||||
asynchExitHandler((Ndb*)cbData->ndb);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
/**
|
||||
* OK! close transaction
|
||||
*/
|
||||
closeTransaction((Ndb*)cbData->ndb, cbData);
|
||||
delete cbData;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Create table "GARAGE"
|
||||
*/
|
||||
int create_table(Ndb * myNdb)
|
||||
{
|
||||
NdbDictionary::Table myTable;
|
||||
NdbDictionary::Column myColumn;
|
||||
|
||||
NdbDictionary::Dictionary* myDict = myNdb->getDictionary();
|
||||
|
||||
/*********************************************************
|
||||
* Create a table named GARAGE if it does not exist *
|
||||
*********************************************************/
|
||||
if (myDict->getTable("GARAGE") != NULL)
|
||||
{
|
||||
std::cout << "NDB already has example table: GARAGE. "
|
||||
<< "Dropping it..." << std::endl;
|
||||
if(myDict->dropTable("GARAGE") == -1)
|
||||
{
|
||||
std::cout << "Failed to drop: GARAGE." << std::endl;
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
myTable.setName("GARAGE");
|
||||
|
||||
/**
|
||||
* Column REG_NO
|
||||
*/
|
||||
myColumn.setName("REG_NO");
|
||||
myColumn.setPrimaryKey(true);
|
||||
myColumn.setType(NdbDictionary::Column::Unsigned);
|
||||
myColumn.setLength(1);
|
||||
myColumn.setNullable(false);
|
||||
myTable.addColumn(myColumn);
|
||||
|
||||
/**
|
||||
* Column BRAND
|
||||
*/
|
||||
myColumn.setName("BRAND");
|
||||
myColumn.setPrimaryKey(false);
|
||||
myColumn.setType(NdbDictionary::Column::Char);
|
||||
myColumn.setLength(20);
|
||||
myColumn.setNullable(false);
|
||||
myTable.addColumn(myColumn);
|
||||
|
||||
/**
|
||||
* Column COLOR
|
||||
*/
|
||||
myColumn.setName("COLOR");
|
||||
myColumn.setPrimaryKey(false);
|
||||
myColumn.setType(NdbDictionary::Column::Char);
|
||||
myColumn.setLength(20);
|
||||
myColumn.setNullable(false);
|
||||
myTable.addColumn(myColumn);
|
||||
|
||||
if (myDict->createTable(myTable) == -1) {
|
||||
APIERROR(myDict->getNdbError());
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
|
||||
void asynchExitHandler(Ndb * m_ndb)
|
||||
{
|
||||
if (m_ndb != NULL)
|
||||
delete m_ndb;
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
/* returns true if is recoverable (temporary),
|
||||
* false if it is an error that is permanent.
|
||||
*/
|
||||
bool asynchErrorHandler(NdbConnection * trans, Ndb* ndb)
|
||||
{
|
||||
NdbError error = trans->getNdbError();
|
||||
switch(error.status)
|
||||
{
|
||||
case NdbError::Success:
|
||||
return false;
|
||||
break;
|
||||
|
||||
case NdbError::TemporaryError:
|
||||
/**
|
||||
* The error code indicates a temporary error.
|
||||
* The application should typically retry.
|
||||
* (Includes classifications: NdbError::InsufficientSpace,
|
||||
* NdbError::TemporaryResourceError, NdbError::NodeRecoveryError,
|
||||
* NdbError::OverloadError, NdbError::NodeShutdown
|
||||
* and NdbError::TimeoutExpired.)
|
||||
*
|
||||
* We should sleep for a while and retry, except for insufficient space
|
||||
*/
|
||||
if(error.classification == NdbError::InsufficientSpace)
|
||||
return false;
|
||||
milliSleep(10);
|
||||
tempErrors++;
|
||||
return true;
|
||||
break;
|
||||
case NdbError::UnknownResult:
|
||||
std::cout << error.message << std::endl;
|
||||
return false;
|
||||
break;
|
||||
default:
|
||||
case NdbError::PermanentError:
|
||||
switch (error.code)
|
||||
{
|
||||
case 499:
|
||||
case 250:
|
||||
milliSleep(10);
|
||||
return true; // SCAN errors that can be retried. Requires restart of scan.
|
||||
default:
|
||||
break;
|
||||
}
|
||||
//ERROR
|
||||
std::cout << error.message << std::endl;
|
||||
return false;
|
||||
break;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
static int nPreparedTransactions = 0;
|
||||
static int MAX_RETRIES = 10;
|
||||
static int parallelism = 100;
|
||||
|
||||
|
||||
/************************************************************************
|
||||
* populate()
|
||||
* 1. Prepare 'parallelism' number of insert transactions.
|
||||
* 2. Send transactions to NDB and wait for callbacks to execute
|
||||
*/
|
||||
int populate(Ndb * myNdb, int data, async_callback_t * cbData)
|
||||
{
|
||||
|
||||
NdbOperation* myNdbOperation; // For operations
|
||||
|
||||
async_callback_t * cb;
|
||||
int retries;
|
||||
int current = 0;
|
||||
for(int i=0; i<1024; i++)
|
||||
{
|
||||
if(transaction[i].used == 0)
|
||||
{
|
||||
current = i;
|
||||
if (cbData == 0)
|
||||
{
|
||||
/**
|
||||
* We already have a callback
|
||||
* This is an absolutely new transaction
|
||||
*/
|
||||
cb = new async_callback_t;
|
||||
cb->retries = 0;
|
||||
}
|
||||
else
|
||||
{
|
||||
/**
|
||||
* We already have a callback
|
||||
*/
|
||||
cb =cbData;
|
||||
retries = cbData->retries;
|
||||
}
|
||||
/**
|
||||
* Set data used by the callback
|
||||
*/
|
||||
cb->ndb = myNdb; //handle to Ndb object so that we can close transaction
|
||||
// in the callback (alt. make myNdb global).
|
||||
|
||||
cb->data = data; //this is the data we want to insert
|
||||
cb->transaction = current; //This is the number (id) of this transaction
|
||||
transaction[current].used = 1 ; //Mark the transaction as used
|
||||
break;
|
||||
}
|
||||
}
|
||||
if(!current)
|
||||
return -1;
|
||||
|
||||
while(retries < MAX_RETRIES)
|
||||
{
|
||||
transaction[current].conn = myNdb->startTransaction();
|
||||
if (transaction[current].conn == NULL) {
|
||||
if (asynchErrorHandler(transaction[current].conn, myNdb))
|
||||
{
|
||||
/**
|
||||
* no transaction to close since conn == null
|
||||
*/
|
||||
milliSleep(10);
|
||||
retries++;
|
||||
continue;
|
||||
}
|
||||
asynchExitHandler(myNdb);
|
||||
}
|
||||
// Error check. If error, then maybe table GARAGE is not in database
|
||||
myNdbOperation = transaction[current].conn->getNdbOperation("GARAGE");
|
||||
if (myNdbOperation == NULL)
|
||||
{
|
||||
if (asynchErrorHandler(transaction[current].conn, myNdb))
|
||||
{
|
||||
myNdb->closeTransaction(transaction[current].conn);
|
||||
transaction[current].conn = 0;
|
||||
milliSleep(10);
|
||||
retries++;
|
||||
continue;
|
||||
}
|
||||
asynchExitHandler(myNdb);
|
||||
} // if
|
||||
if(myNdbOperation->insertTuple() < 0 ||
|
||||
myNdbOperation->equal("REG_NO", data) < 0 ||
|
||||
myNdbOperation->setValue("BRAND", "Mercedes") <0 ||
|
||||
myNdbOperation->setValue("COLOR", "Blue") < 0)
|
||||
{
|
||||
if (asynchErrorHandler(transaction[current].conn, myNdb))
|
||||
{
|
||||
myNdb->closeTransaction(transaction[current].conn);
|
||||
transaction[current].conn = 0;
|
||||
retries++;
|
||||
milliSleep(10);
|
||||
continue;
|
||||
}
|
||||
asynchExitHandler(myNdb);
|
||||
}
|
||||
|
||||
/*Prepare transaction (the transaction is NOT yet sent to NDB)*/
|
||||
transaction[current].conn->executeAsynchPrepare(Commit,
|
||||
&callback,
|
||||
cb);
|
||||
/**
|
||||
* When we have prepared parallelism number of transactions ->
|
||||
* send the transaction to ndb.
|
||||
* Next time we will deal with the transactions are in the
|
||||
* callback. There we will see which ones that were successful
|
||||
* and which ones to retry.
|
||||
*/
|
||||
if (nPreparedTransactions == parallelism-1)
|
||||
{
|
||||
// send-poll all transactions
|
||||
// close transaction is done in callback
|
||||
myNdb->sendPollNdb(3000, parallelism );
|
||||
nPreparedTransactions=0;
|
||||
}
|
||||
else
|
||||
nPreparedTransactions++;
|
||||
return 1;
|
||||
}
|
||||
std::cout << "Unable to recover from errors. Exiting..." << std::endl;
|
||||
asynchExitHandler(myNdb);
|
||||
return -1;
|
||||
}
|
||||
|
||||
int main()
|
||||
{
|
||||
Ndb* myNdb = new Ndb( "TEST_DB" ); // Object representing the database
|
||||
|
||||
/*******************************************
|
||||
* Initialize NDB and wait until its ready *
|
||||
*******************************************/
|
||||
if (myNdb->init(1024) == -1) { // Set max 1024 parallel transactions
|
||||
APIERROR(myNdb->getNdbError());
|
||||
}
|
||||
|
||||
if (myNdb->waitUntilReady(30) != 0) {
|
||||
std::cout << "NDB was not ready within 30 secs." << std::endl;
|
||||
exit(-1);
|
||||
}
|
||||
create_table(myNdb);
|
||||
|
||||
|
||||
/**
|
||||
* Initialise transaction array
|
||||
*/
|
||||
for(int i = 0 ; i < 1024 ; i++)
|
||||
{
|
||||
transaction[i].used = 0;
|
||||
transaction[i].conn = 0;
|
||||
|
||||
}
|
||||
int i=0;
|
||||
/**
|
||||
* Do 20000 insert transactions.
|
||||
*/
|
||||
while(i < 20000)
|
||||
{
|
||||
while(populate(myNdb,i,0)<0) // <0, no space on free list. Sleep and try again.
|
||||
milliSleep(10);
|
||||
|
||||
i++;
|
||||
}
|
||||
std::cout << "Number of temporary errors: " << tempErrors << std::endl;
|
||||
delete myNdb;
|
||||
}
|
||||
|
||||
|
3
ndb/examples/ndbapi_async_example/readme.txt
Normal file
3
ndb/examples/ndbapi_async_example/readme.txt
Normal file
|
@ -0,0 +1,3 @@
|
|||
1. Set NDB_OS in Makefile
|
||||
2. Add path to libNDB_API.so in LD_LIBRARY_PATH
|
||||
3. Set NDB_CONNECTSTRING
|
33
ndb/examples/ndbapi_example1/Makefile
Normal file
33
ndb/examples/ndbapi_example1/Makefile
Normal file
|
@ -0,0 +1,33 @@
|
|||
-include .defs.mk
|
||||
#NDB_OS = OS_YOU_ARE_RUNNING_ON
|
||||
#You need to set the NDB_OS variable here
|
||||
TARGET = ndbapi_example1
|
||||
SRCS = ndbapi_example1.cpp
|
||||
OBJS = ndbapi_example1.o
|
||||
CXX = g++
|
||||
CFLAGS = -c -Wall -fno-rtti -fno-exceptions
|
||||
DEBUG =
|
||||
LFLAGS = -Wall
|
||||
INCLUDE_DIR = ../../include
|
||||
LIB_DIR = ../../lib
|
||||
ifeq ($(NDB_OS), SOLARIS)
|
||||
# Here is the definition of system libraries necessary for Solaris 7
|
||||
SYS_LIB =
|
||||
endif
|
||||
ifeq ($(NDB_OS), LINUX)
|
||||
# Here is the definition of system libraries necessary for Linux 2.4
|
||||
SYS_LIB =
|
||||
endif
|
||||
ifeq ($(NDB_OS), MACOSX)
|
||||
# Here is the definition of system libraries necessary for Mac OS X
|
||||
SYS_LIB =
|
||||
endif
|
||||
|
||||
$(TARGET): $(OBJS)
|
||||
$(CXX) $(LFLAGS) -L$(LIB_DIR) $(OBJS) -lNDB_API $(SYS_LIB) -o $(TARGET)
|
||||
|
||||
$(TARGET).o: $(SRCS)
|
||||
$(CXX) $(CFLAGS) -I$(INCLUDE_DIR) -I$(INCLUDE_DIR)/ndbapi $(SRCS)
|
||||
|
||||
clean:
|
||||
rm -f *.o $(TARGET)
|
193
ndb/examples/ndbapi_example1/ndbapi_example1.cpp
Normal file
193
ndb/examples/ndbapi_example1/ndbapi_example1.cpp
Normal file
|
@ -0,0 +1,193 @@
|
|||
/* Copyright (C) 2003 MySQL AB
|
||||
|
||||
This program is free software; you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation; either version 2 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program; if not, write to the Free Software
|
||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */
|
||||
|
||||
//
|
||||
// ndbapi_example1.cpp: Using synchronous transactions in NDB API
|
||||
//
|
||||
// Correct output from this program is:
|
||||
//
|
||||
// ATTR1 ATTR2
|
||||
// 0 10
|
||||
// 1 1
|
||||
// 2 12
|
||||
// Detected that deleted tuple doesn't exist!
|
||||
// 4 14
|
||||
// 5 5
|
||||
// 6 16
|
||||
// 7 7
|
||||
// 8 18
|
||||
// 9 9
|
||||
|
||||
#include <NdbApi.hpp>
|
||||
|
||||
// Used for cout
|
||||
#include <stdio.h>
|
||||
#include <iostream>
|
||||
|
||||
#define APIERROR(error) \
|
||||
{ std::cout << "Error in " << __FILE__ << ", line:" << __LINE__ << ", code:" \
|
||||
<< error.code << ", msg: " << error.message << "." << std::endl; \
|
||||
exit(-1); }
|
||||
|
||||
int main()
|
||||
{
|
||||
Ndb* myNdb = new Ndb( "TEST_DB_1" ); // Object representing the database
|
||||
NdbDictionary::Table myTable;
|
||||
NdbDictionary::Column myColumn;
|
||||
|
||||
NdbConnection *myConnection; // For other transactions
|
||||
NdbOperation *myOperation; // For other operations
|
||||
NdbRecAttr *myRecAttr; // Result of reading attribute value
|
||||
|
||||
/********************************************
|
||||
* Initialize NDB and wait until it's ready *
|
||||
********************************************/
|
||||
if (myNdb->init() == -1) {
|
||||
APIERROR(myNdb->getNdbError());
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
if (myNdb->waitUntilReady(30) != 0) {
|
||||
std::cout << "NDB was not ready within 30 secs." << std::endl;
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
NdbDictionary::Dictionary* myDict = myNdb->getDictionary();
|
||||
|
||||
/*********************************************************
|
||||
* Create a table named MYTABLENAME if it does not exist *
|
||||
*********************************************************/
|
||||
if (myDict->getTable("MYTABLENAME") != NULL) {
|
||||
std::cout << "NDB already has example table: MYTABLENAME." << std::endl;
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
myTable.setName("MYTABLENAME");
|
||||
|
||||
myColumn.setName("ATTR1");
|
||||
myColumn.setPrimaryKey(true);
|
||||
myColumn.setType(NdbDictionary::Column::Unsigned);
|
||||
myColumn.setLength(1);
|
||||
myColumn.setNullable(false);
|
||||
myTable.addColumn(myColumn);
|
||||
|
||||
myColumn.setName("ATTR2");
|
||||
myColumn.setPrimaryKey(false);
|
||||
myColumn.setType(NdbDictionary::Column::Unsigned);
|
||||
myColumn.setLength(1);
|
||||
myColumn.setNullable(false);
|
||||
myTable.addColumn(myColumn);
|
||||
|
||||
if (myDict->createTable(myTable) == -1)
|
||||
APIERROR(myDict->getNdbError());
|
||||
|
||||
/**************************************************************************
|
||||
* Using 5 transactions, insert 10 tuples in table: (0,0),(1,1),...,(9,9) *
|
||||
**************************************************************************/
|
||||
for (int i = 0; i < 5; i++) {
|
||||
myConnection = myNdb->startTransaction();
|
||||
if (myConnection == NULL) APIERROR(myNdb->getNdbError());
|
||||
|
||||
myOperation = myConnection->getNdbOperation("MYTABLENAME");
|
||||
if (myOperation == NULL) APIERROR(myConnection->getNdbError());
|
||||
|
||||
myOperation->insertTuple();
|
||||
myOperation->equal("ATTR1", i);
|
||||
myOperation->setValue("ATTR2", i);
|
||||
|
||||
myOperation = myConnection->getNdbOperation("MYTABLENAME");
|
||||
if (myOperation == NULL) APIERROR(myConnection->getNdbError());
|
||||
|
||||
myOperation->insertTuple();
|
||||
myOperation->equal("ATTR1", i+5);
|
||||
myOperation->setValue("ATTR2", i+5);
|
||||
|
||||
if (myConnection->execute( Commit ) == -1)
|
||||
APIERROR(myConnection->getNdbError());
|
||||
|
||||
myNdb->closeTransaction(myConnection);
|
||||
}
|
||||
|
||||
/*****************************************************************
|
||||
* Update the second attribute in half of the tuples (adding 10) *
|
||||
*****************************************************************/
|
||||
for (int i = 0; i < 10; i+=2) {
|
||||
myConnection = myNdb->startTransaction();
|
||||
if (myConnection == NULL) APIERROR(myNdb->getNdbError());
|
||||
|
||||
myOperation = myConnection->getNdbOperation("MYTABLENAME");
|
||||
if (myOperation == NULL) APIERROR(myConnection->getNdbError());
|
||||
|
||||
myOperation->updateTuple();
|
||||
myOperation->equal( "ATTR1", i );
|
||||
myOperation->setValue( "ATTR2", i+10);
|
||||
|
||||
if( myConnection->execute( Commit ) == -1 )
|
||||
APIERROR(myConnection->getNdbError());
|
||||
|
||||
myNdb->closeTransaction(myConnection);
|
||||
}
|
||||
|
||||
/*************************************************
|
||||
* Delete one tuple (the one with primary key 3) *
|
||||
*************************************************/
|
||||
myConnection = myNdb->startTransaction();
|
||||
if (myConnection == NULL) APIERROR(myNdb->getNdbError());
|
||||
|
||||
myOperation = myConnection->getNdbOperation("MYTABLENAME");
|
||||
if (myOperation == NULL)
|
||||
APIERROR(myConnection->getNdbError());
|
||||
|
||||
myOperation->deleteTuple();
|
||||
myOperation->equal( "ATTR1", 3 );
|
||||
|
||||
if (myConnection->execute(Commit) == -1)
|
||||
APIERROR(myConnection->getNdbError());
|
||||
|
||||
myNdb->closeTransaction(myConnection);
|
||||
|
||||
/*****************************
|
||||
* Read and print all tuples *
|
||||
*****************************/
|
||||
std::cout << "ATTR1 ATTR2" << std::endl;
|
||||
|
||||
for (int i = 0; i < 10; i++) {
|
||||
myConnection = myNdb->startTransaction();
|
||||
if (myConnection == NULL) APIERROR(myNdb->getNdbError());
|
||||
|
||||
myOperation = myConnection->getNdbOperation("MYTABLENAME");
|
||||
if (myOperation == NULL) APIERROR(myConnection->getNdbError());
|
||||
|
||||
myOperation->readTuple();
|
||||
myOperation->equal("ATTR1", i);
|
||||
|
||||
myRecAttr = myOperation->getValue("ATTR2", NULL);
|
||||
if (myRecAttr == NULL) APIERROR(myConnection->getNdbError());
|
||||
|
||||
if(myConnection->execute( Commit ) == -1)
|
||||
if (i == 3) {
|
||||
std::cout << "Detected that deleted tuple doesn't exist!" << std::endl;
|
||||
} else {
|
||||
APIERROR(myConnection->getNdbError());
|
||||
}
|
||||
|
||||
if (i != 3) {
|
||||
printf(" %2d %2d\n", i, myRecAttr->u_32_value());
|
||||
}
|
||||
myNdb->closeTransaction(myConnection);
|
||||
}
|
||||
delete myNdb;
|
||||
}
|
33
ndb/examples/ndbapi_example2/Makefile
Normal file
33
ndb/examples/ndbapi_example2/Makefile
Normal file
|
@ -0,0 +1,33 @@
|
|||
-include .defs.mk
|
||||
#NDB_OS = OS_YOU_ARE_RUNNING_ON
|
||||
#You need to set the NDB_OS variable here
|
||||
TARGET = ndbapi_example2
|
||||
SRCS = ndbapi_example2.cpp
|
||||
OBJS = ndbapi_example2.o
|
||||
CXX = g++
|
||||
CFLAGS = -c -Wall -fno-rtti -fno-exceptions
|
||||
DEBUG =
|
||||
LFLAGS = -Wall
|
||||
INCLUDE_DIR = ../../include
|
||||
LIB_DIR = ../../lib
|
||||
ifeq ($(NDB_OS), SOLARIS)
|
||||
# Here is the definition of system libraries necessary for Solaris 7
|
||||
SYS_LIB =
|
||||
endif
|
||||
ifeq ($(NDB_OS), LINUX)
|
||||
# Here is the definition of system libraries necessary for Linux 2.4
|
||||
SYS_LIB =
|
||||
endif
|
||||
ifeq ($(NDB_OS), MACOSX)
|
||||
# Here is the definition of system libraries necessary for Mac OS X
|
||||
SYS_LIB =
|
||||
endif
|
||||
|
||||
$(TARGET): $(OBJS)
|
||||
$(CXX) $(LFLAGS) -L$(LIB_DIR) $(OBJS) -lNDB_API $(SYS_LIB) -o $(TARGET)
|
||||
|
||||
$(TARGET).o: $(SRCS)
|
||||
$(CXX) $(CFLAGS) -I$(INCLUDE_DIR) -I$(INCLUDE_DIR)/ndbapi $(SRCS)
|
||||
|
||||
clean:
|
||||
rm -f *.o $(TARGET)
|
110
ndb/examples/ndbapi_example2/ndbapi_example2.cpp
Normal file
110
ndb/examples/ndbapi_example2/ndbapi_example2.cpp
Normal file
|
@ -0,0 +1,110 @@
|
|||
/* Copyright (C) 2003 MySQL AB
|
||||
|
||||
This program is free software; you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation; either version 2 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program; if not, write to the Free Software
|
||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */
|
||||
|
||||
//
|
||||
// ndbapi_example2.cpp: Using asynchronous transactions in NDB API
|
||||
//
|
||||
// Execute ndbapi_example1 to create the table "MYTABLENAME"
|
||||
// before executing this program.
|
||||
//
|
||||
// Correct output from this program is:
|
||||
//
|
||||
// Successful insert.
|
||||
// Successful insert.
|
||||
|
||||
#include <NdbApi.hpp>
|
||||
|
||||
// Used for cout
|
||||
#include <iostream>
|
||||
|
||||
#define APIERROR(error) \
|
||||
{ std::cout << "Error in " << __FILE__ << ", line:" << __LINE__ << ", code:" \
|
||||
<< error.code << ", msg: " << error.message << "." << std::endl; \
|
||||
exit(-1); }
|
||||
|
||||
static void callback(int result, NdbConnection* NdbObject, void* aObject);
|
||||
|
||||
int main()
|
||||
{
|
||||
Ndb* myNdb = new Ndb( "TEST_DB_2" ); // Object representing the database
|
||||
|
||||
NdbConnection* myNdbConnection[2]; // For transactions
|
||||
NdbOperation* myNdbOperation; // For operations
|
||||
|
||||
/*******************************************
|
||||
* Initialize NDB and wait until its ready *
|
||||
*******************************************/
|
||||
if (myNdb->init(2) == -1) { // Want two parallel insert transactions
|
||||
APIERROR(myNdb->getNdbError());
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
if (myNdb->waitUntilReady(30) != 0) {
|
||||
std::cout << "NDB was not ready within 30 secs." << std::endl;
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
/******************************************************
|
||||
* Insert (we do two insert transactions in parallel) *
|
||||
******************************************************/
|
||||
for (int i = 0; i < 2; i++) {
|
||||
myNdbConnection[i] = myNdb->startTransaction();
|
||||
if (myNdbConnection[i] == NULL) APIERROR(myNdb->getNdbError());
|
||||
|
||||
myNdbOperation = myNdbConnection[i]->getNdbOperation("MYTABLENAME");
|
||||
// Error check. If error, then maybe table MYTABLENAME is not in database
|
||||
if (myNdbOperation == NULL) APIERROR(myNdbConnection[i]->getNdbError());
|
||||
|
||||
myNdbOperation->insertTuple();
|
||||
myNdbOperation->equal("ATTR1", 20 + i);
|
||||
myNdbOperation->setValue("ATTR2", 20 + i);
|
||||
|
||||
// Prepare transaction (the transaction is NOT yet sent to NDB)
|
||||
myNdbConnection[i]->executeAsynchPrepare(Commit, &callback, NULL);
|
||||
}
|
||||
|
||||
// Send all transactions to NDB
|
||||
myNdb->sendPreparedTransactions(0);
|
||||
|
||||
// Poll all transactions
|
||||
myNdb->pollNdb(3000, 2);
|
||||
|
||||
// Close all transactions
|
||||
for (int i = 0; i < 2; i++)
|
||||
myNdb->closeTransaction(myNdbConnection[i]);
|
||||
|
||||
delete myNdb;
|
||||
}
|
||||
|
||||
/*
|
||||
* callback : This is called when the transaction is polled
|
||||
*
|
||||
* (This function must have three arguments:
|
||||
* - The result of the transaction,
|
||||
* - The NdbConnection object, and
|
||||
* - A pointer to an arbitrary object.)
|
||||
*/
|
||||
|
||||
static void
|
||||
callback(int result, NdbConnection* myTrans, void* aObject)
|
||||
{
|
||||
if (result == -1) {
|
||||
std::cout << "Poll error: " << std::endl;
|
||||
APIERROR(myTrans->getNdbError());
|
||||
} else {
|
||||
std::cout << "Successful insert." << std::endl;
|
||||
}
|
||||
}
|
33
ndb/examples/ndbapi_example3/Makefile
Normal file
33
ndb/examples/ndbapi_example3/Makefile
Normal file
|
@ -0,0 +1,33 @@
|
|||
-include .defs.mk
|
||||
#NDB_OS = OS_YOU_ARE_RUNNING_ON
|
||||
#You need to set the NDB_OS variable here
|
||||
TARGET = ndbapi_example3
|
||||
SRCS = ndbapi_example3.cpp
|
||||
OBJS = ndbapi_example3.o
|
||||
CXX = g++
|
||||
CFLAGS = -c -Wall -fno-rtti -fno-exceptions
|
||||
DEBUG =
|
||||
LFLAGS = -Wall
|
||||
INCLUDE_DIR = ../../include
|
||||
LIB_DIR = ../../lib
|
||||
ifeq ($(NDB_OS), SOLARIS)
|
||||
# Here is the definition of system libraries necessary for Solaris 7
|
||||
SYS_LIB =
|
||||
endif
|
||||
ifeq ($(NDB_OS), LINUX)
|
||||
# Here is the definition of system libraries necessary for Linux 2.4
|
||||
SYS_LIB =
|
||||
endif
|
||||
ifeq ($(NDB_OS), MACOSX)
|
||||
# Here is the definition of system libraries necessary for Mac OS X
|
||||
SYS_LIB =
|
||||
endif
|
||||
|
||||
$(TARGET): $(OBJS)
|
||||
$(CXX) $(LFLAGS) -L$(LIB_DIR) $(OBJS) -lNDB_API $(SYS_LIB) -o $(TARGET)
|
||||
|
||||
$(TARGET).o: $(SRCS)
|
||||
$(CXX) $(CFLAGS) -I$(INCLUDE_DIR) -I$(INCLUDE_DIR)/ndbapi $(SRCS)
|
||||
|
||||
clean:
|
||||
rm -f *.o $(TARGET)
|
202
ndb/examples/ndbapi_example3/ndbapi_example3.cpp
Normal file
202
ndb/examples/ndbapi_example3/ndbapi_example3.cpp
Normal file
|
@ -0,0 +1,202 @@
|
|||
/* Copyright (C) 2003 MySQL AB
|
||||
|
||||
This program is free software; you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation; either version 2 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program; if not, write to the Free Software
|
||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */
|
||||
|
||||
//
|
||||
// ndbapi_example3.cpp: Error handling and transaction retries
|
||||
//
|
||||
// Execute ndbapi_example1 to create the table "MYTABLENAME"
|
||||
// before executing this program.
|
||||
//
|
||||
// There are many ways to program using the NDB API. In this example
|
||||
// we execute two inserts in the same transaction using
|
||||
// NdbConnection::Ndbexecute(NoCommit).
|
||||
//
|
||||
// Transaction failing is handled by re-executing the transaction
|
||||
// in case of non-permanent transaction errors.
|
||||
// Application errors (i.e. errors at points marked with APIERROR)
|
||||
// should be handled by the application programmer.
|
||||
|
||||
#include <NdbApi.hpp>
|
||||
|
||||
// Used for cout
|
||||
#include <iostream>
|
||||
|
||||
// Used for sleep (use your own version of sleep)
|
||||
#include <unistd.h>
|
||||
#define TIME_TO_SLEEP_BETWEEN_TRANSACTION_RETRIES 1
|
||||
|
||||
//
|
||||
// APIERROR prints an NdbError object
|
||||
//
|
||||
#define APIERROR(error) \
|
||||
{ std::cout << "API ERROR: " << error.code << " " << error.message \
|
||||
<< std::endl \
|
||||
<< " " << "Status: " << error.status \
|
||||
<< ", Classification: " << error.classification << std::endl\
|
||||
<< " " << "File: " << __FILE__ \
|
||||
<< " (Line: " << __LINE__ << ")" << std::endl \
|
||||
; \
|
||||
}
|
||||
|
||||
//
|
||||
// CONERROR prints all error info regarding an NdbConnection
|
||||
//
|
||||
#define CONERROR(ndbConnection) \
|
||||
{ NdbError error = ndbConnection->getNdbError(); \
|
||||
std::cout << "CON ERROR: " << error.code << " " << error.message \
|
||||
<< std::endl \
|
||||
<< " " << "Status: " << error.status \
|
||||
<< ", Classification: " << error.classification << std::endl \
|
||||
<< " " << "File: " << __FILE__ \
|
||||
<< " (Line: " << __LINE__ << ")" << std::endl \
|
||||
; \
|
||||
printTransactionError(ndbConnection); \
|
||||
}
|
||||
|
||||
void printTransactionError(NdbConnection *ndbConnection) {
|
||||
const NdbOperation *ndbOp = NULL;
|
||||
int i=0;
|
||||
|
||||
/****************************************************************
|
||||
* Print NdbError object of every operations in the transaction *
|
||||
****************************************************************/
|
||||
while ((ndbOp = ndbConnection->getNextCompletedOperation(ndbOp)) != NULL) {
|
||||
NdbError error = ndbOp->getNdbError();
|
||||
std::cout << " OPERATION " << i+1 << ": "
|
||||
<< error.code << " " << error.message << std::endl
|
||||
<< " Status: " << error.status
|
||||
<< ", Classification: " << error.classification << std::endl;
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
//
|
||||
// Example insert
|
||||
// @param myNdb Ndb object representing NDB Cluster
|
||||
// @param myConnection NdbConnection used for transaction
|
||||
// @param error NdbError object returned in case of errors
|
||||
// @return -1 in case of failures, 0 otherwise
|
||||
//
|
||||
int insert(int transactionId, NdbConnection* myConnection) {
|
||||
NdbOperation *myOperation; // For other operations
|
||||
|
||||
myOperation = myConnection->getNdbOperation("MYTABLENAME");
|
||||
if (myOperation == NULL) return -1;
|
||||
|
||||
if (myOperation->insertTuple() ||
|
||||
myOperation->equal("ATTR1", transactionId) ||
|
||||
myOperation->setValue("ATTR2", transactionId)) {
|
||||
APIERROR(myOperation->getNdbError());
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
return myConnection->execute(NoCommit);
|
||||
}
|
||||
|
||||
|
||||
//
|
||||
// Execute function which re-executes (tries 10 times) the transaction
|
||||
// if there are temporary errors (e.g. the NDB Cluster is overloaded).
|
||||
// @return -1 failure, 1 success
|
||||
//
|
||||
int executeInsertTransaction(int transactionId, Ndb* myNdb) {
|
||||
int result = 0; // No result yet
|
||||
int noOfRetriesLeft = 10;
|
||||
NdbConnection *myConnection; // For other transactions
|
||||
NdbError ndberror;
|
||||
|
||||
while (noOfRetriesLeft > 0 && !result) {
|
||||
|
||||
/*********************************
|
||||
* Start and execute transaction *
|
||||
*********************************/
|
||||
myConnection = myNdb->startTransaction();
|
||||
if (myConnection == NULL) {
|
||||
APIERROR(myNdb->getNdbError());
|
||||
ndberror = myNdb->getNdbError();
|
||||
result = -1; // Failure
|
||||
} else if (insert(transactionId, myConnection) ||
|
||||
insert(10000+transactionId, myConnection) ||
|
||||
myConnection->execute(Commit)) {
|
||||
CONERROR(myConnection);
|
||||
ndberror = myConnection->getNdbError();
|
||||
result = -1; // Failure
|
||||
} else {
|
||||
result = 1; // Success
|
||||
}
|
||||
|
||||
/**********************************
|
||||
* If failure, then analyze error *
|
||||
**********************************/
|
||||
if (result == -1) {
|
||||
switch (ndberror.status) {
|
||||
case NdbError::Success:
|
||||
break;
|
||||
case NdbError::TemporaryError:
|
||||
std::cout << "Retrying transaction..." << std::endl;
|
||||
sleep(TIME_TO_SLEEP_BETWEEN_TRANSACTION_RETRIES);
|
||||
--noOfRetriesLeft;
|
||||
result = 0; // No completed transaction yet
|
||||
break;
|
||||
|
||||
case NdbError::UnknownResult:
|
||||
case NdbError::PermanentError:
|
||||
std::cout << "No retry of transaction..." << std::endl;
|
||||
result = -1; // Permanent failure
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/*********************
|
||||
* Close transaction *
|
||||
*********************/
|
||||
if (myConnection != NULL) {
|
||||
myNdb->closeTransaction(myConnection);
|
||||
}
|
||||
}
|
||||
|
||||
if (result != 1) exit(-1);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
int main()
|
||||
{
|
||||
Ndb* myNdb = new Ndb( "TEST_DB_1" ); // Object representing the database
|
||||
|
||||
/*******************************************
|
||||
* Initialize NDB and wait until its ready *
|
||||
*******************************************/
|
||||
if (myNdb->init() == -1) {
|
||||
APIERROR(myNdb->getNdbError());
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
if (myNdb->waitUntilReady(30) != 0) {
|
||||
std::cout << "NDB was not ready within 30 secs." << std::endl;
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
/************************************
|
||||
* Execute some insert transactions *
|
||||
************************************/
|
||||
for (int i = 10000; i < 20000; i++) {
|
||||
executeInsertTransaction(i, myNdb);
|
||||
}
|
||||
|
||||
delete myNdb;
|
||||
}
|
33
ndb/examples/ndbapi_example4/Makefile
Normal file
33
ndb/examples/ndbapi_example4/Makefile
Normal file
|
@ -0,0 +1,33 @@
|
|||
-include .defs.mk
|
||||
#NDB_OS = OS_YOU_ARE_RUNNING_ON
|
||||
#You need to set the NDB_OS variable here
|
||||
TARGET = ndbapi_example4
|
||||
SRCS = ndbapi_example4.cpp
|
||||
OBJS = ndbapi_example4.o
|
||||
CXX = g++
|
||||
CFLAGS = -c -Wall -fno-rtti -fno-exceptions
|
||||
DEBUG =
|
||||
LFLAGS = -Wall
|
||||
INCLUDE_DIR = ../../include
|
||||
LIB_DIR = ../../lib
|
||||
ifeq ($(NDB_OS), SOLARIS)
|
||||
# Here is the definition of system libraries necessary for Solaris 7
|
||||
SYS_LIB =
|
||||
endif
|
||||
ifeq ($(NDB_OS), LINUX)
|
||||
# Here is the definition of system libraries necessary for Linux 2.4
|
||||
SYS_LIB =
|
||||
endif
|
||||
ifeq ($(NDB_OS), MACOSX)
|
||||
# Here is the definition of system libraries necessary for Mac OS X
|
||||
SYS_LIB =
|
||||
endif
|
||||
|
||||
$(TARGET): $(OBJS)
|
||||
$(CXX) $(LFLAGS) -L$(LIB_DIR) $(OBJS) -lNDB_API $(SYS_LIB) -o $(TARGET)
|
||||
|
||||
$(TARGET).o: $(SRCS)
|
||||
$(CXX) $(CFLAGS) -I$(INCLUDE_DIR) -I$(INCLUDE_DIR)/ndbapi $(SRCS)
|
||||
|
||||
clean:
|
||||
rm -f *.o $(TARGET)
|
252
ndb/examples/ndbapi_example4/ndbapi_example4.cpp
Normal file
252
ndb/examples/ndbapi_example4/ndbapi_example4.cpp
Normal file
|
@ -0,0 +1,252 @@
|
|||
/* Copyright (C) 2003 MySQL AB
|
||||
|
||||
This program is free software; you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation; either version 2 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program; if not, write to the Free Software
|
||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */
|
||||
|
||||
//
|
||||
// ndbapi_example4.cpp: Using secondary indexes in NDB API
|
||||
//
|
||||
// Correct output from this program is:
|
||||
//
|
||||
// ATTR1 ATTR2
|
||||
// 0 10
|
||||
// 1 1
|
||||
// 2 12
|
||||
// Detected that deleted tuple doesn't exist!
|
||||
// 4 14
|
||||
// 5 5
|
||||
// 6 16
|
||||
// 7 7
|
||||
// 8 18
|
||||
// 9 9
|
||||
|
||||
#include <NdbApi.hpp>
|
||||
|
||||
// Used for cout
|
||||
#include <stdio.h>
|
||||
#include <iostream>
|
||||
|
||||
#define APIERROR(error) \
|
||||
{ std::cout << "Error in " << __FILE__ << ", line:" << __LINE__ << ", code:" \
|
||||
<< error.code << ", msg: " << error.message << "." << std::endl; \
|
||||
exit(-1); }
|
||||
|
||||
int main()
|
||||
{
|
||||
Ndb* myNdb = new Ndb( "TEST_DB_1" ); // Object representing the database
|
||||
NdbDictionary::Table myTable;
|
||||
NdbDictionary::Column myColumn;
|
||||
NdbDictionary::Index myIndex;
|
||||
|
||||
NdbConnection *myConnection; // For transactions
|
||||
NdbOperation *myOperation; // For primary key operations
|
||||
NdbIndexOperation *myIndexOperation; // For index operations
|
||||
NdbRecAttr *myRecAttr; // Result of reading attribute value
|
||||
|
||||
/********************************************
|
||||
* Initialize NDB and wait until it's ready *
|
||||
********************************************/
|
||||
if (myNdb->init() == -1) {
|
||||
APIERROR(myNdb->getNdbError());
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
if (myNdb->waitUntilReady(30) != 0) {
|
||||
std::cout << "NDB was not ready within 30 secs." << std::endl;
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
/*********************************************************
|
||||
* Create a table named MYTABLENAME if it does not exist *
|
||||
*********************************************************/
|
||||
NdbDictionary::Dictionary* myDict = myNdb->getDictionary();
|
||||
if (myDict->getTable("MYTABLENAME") != NULL) {
|
||||
std::cout << "NDB already has example table: MYTABLENAME." << std::endl;
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
myTable.setName("MYTABLENAME");
|
||||
|
||||
myColumn.setName("ATTR1");
|
||||
myColumn.setPrimaryKey(true);
|
||||
myColumn.setType(NdbDictionary::Column::Unsigned);
|
||||
myColumn.setLength(1);
|
||||
myColumn.setNullable(false);
|
||||
myTable.addColumn(myColumn);
|
||||
|
||||
myColumn.setName("ATTR2");
|
||||
myColumn.setPrimaryKey(false);
|
||||
myColumn.setType(NdbDictionary::Column::Unsigned);
|
||||
myColumn.setLength(1);
|
||||
myColumn.setNullable(false);
|
||||
myTable.addColumn(myColumn);
|
||||
|
||||
if (myDict->createTable(myTable) == -1)
|
||||
APIERROR(myDict->getNdbError());
|
||||
|
||||
|
||||
/**********************************************************
|
||||
* Create an index named MYINDEXNAME if it does not exist *
|
||||
**********************************************************/
|
||||
if (myDict->getIndex("MYINDEXNAME", "MYTABLENAME") != NULL) {
|
||||
std::cout << "NDB already has example index: MYINDEXNAME." << std::endl;
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
myIndex.setName("MYINDEXNAME");
|
||||
myIndex.setTable("MYTABLENAME");
|
||||
myIndex.setType(NdbDictionary::Index::UniqueHashIndex);
|
||||
const char* attr_arr[] = {"ATTR2"};
|
||||
myIndex.addIndexColumns(1, attr_arr);
|
||||
|
||||
if (myDict->createIndex(myIndex) == -1)
|
||||
APIERROR(myDict->getNdbError());
|
||||
|
||||
|
||||
/**************************************************************************
|
||||
* Using 5 transactions, insert 10 tuples in table: (0,0),(1,1),...,(9,9) *
|
||||
**************************************************************************/
|
||||
for (int i = 0; i < 5; i++) {
|
||||
myConnection = myNdb->startTransaction();
|
||||
if (myConnection == NULL) APIERROR(myNdb->getNdbError());
|
||||
|
||||
myOperation = myConnection->getNdbOperation("MYTABLENAME");
|
||||
if (myOperation == NULL) APIERROR(myConnection->getNdbError());
|
||||
|
||||
myOperation->insertTuple();
|
||||
myOperation->equal("ATTR1", i);
|
||||
myOperation->setValue("ATTR2", i);
|
||||
|
||||
myOperation = myConnection->getNdbOperation("MYTABLENAME");
|
||||
if (myOperation == NULL) APIERROR(myConnection->getNdbError());
|
||||
|
||||
myOperation->insertTuple();
|
||||
myOperation->equal("ATTR1", i+5);
|
||||
myOperation->setValue("ATTR2", i+5);
|
||||
|
||||
if (myConnection->execute( Commit ) == -1)
|
||||
APIERROR(myConnection->getNdbError());
|
||||
|
||||
myNdb->closeTransaction(myConnection);
|
||||
}
|
||||
|
||||
/*****************************************
|
||||
* Read and print all tuples using index *
|
||||
*****************************************/
|
||||
std::cout << "ATTR1 ATTR2" << std::endl;
|
||||
|
||||
for (int i = 0; i < 10; i++) {
|
||||
myConnection = myNdb->startTransaction();
|
||||
if (myConnection == NULL) APIERROR(myNdb->getNdbError());
|
||||
|
||||
myIndexOperation = myConnection->getNdbIndexOperation("MYINDEXNAME",
|
||||
"MYTABLENAME");
|
||||
if (myIndexOperation == NULL) APIERROR(myConnection->getNdbError());
|
||||
|
||||
myIndexOperation->readTuple();
|
||||
myIndexOperation->equal("ATTR2", i);
|
||||
|
||||
myRecAttr = myIndexOperation->getValue("ATTR1", NULL);
|
||||
if (myRecAttr == NULL) APIERROR(myConnection->getNdbError());
|
||||
|
||||
if(myConnection->execute( Commit ) != -1)
|
||||
printf(" %2d %2d\n", myRecAttr->u_32_value(), i);
|
||||
}
|
||||
myNdb->closeTransaction(myConnection);
|
||||
|
||||
/*****************************************************************
|
||||
* Update the second attribute in half of the tuples (adding 10) *
|
||||
*****************************************************************/
|
||||
for (int i = 0; i < 10; i+=2) {
|
||||
myConnection = myNdb->startTransaction();
|
||||
if (myConnection == NULL) APIERROR(myNdb->getNdbError());
|
||||
|
||||
myIndexOperation = myConnection->getNdbIndexOperation("MYINDEXNAME",
|
||||
"MYTABLENAME");
|
||||
if (myIndexOperation == NULL) APIERROR(myConnection->getNdbError());
|
||||
|
||||
myIndexOperation->updateTuple();
|
||||
myIndexOperation->equal( "ATTR2", i );
|
||||
myIndexOperation->setValue( "ATTR2", i+10);
|
||||
|
||||
if( myConnection->execute( Commit ) == -1 )
|
||||
APIERROR(myConnection->getNdbError());
|
||||
|
||||
myNdb->closeTransaction(myConnection);
|
||||
}
|
||||
|
||||
/*************************************************
|
||||
* Delete one tuple (the one with primary key 3) *
|
||||
*************************************************/
|
||||
myConnection = myNdb->startTransaction();
|
||||
if (myConnection == NULL) APIERROR(myNdb->getNdbError());
|
||||
|
||||
myIndexOperation = myConnection->getNdbIndexOperation("MYINDEXNAME",
|
||||
"MYTABLENAME");
|
||||
if (myIndexOperation == NULL)
|
||||
APIERROR(myConnection->getNdbError());
|
||||
|
||||
myIndexOperation->deleteTuple();
|
||||
myIndexOperation->equal( "ATTR2", 3 );
|
||||
|
||||
if (myConnection->execute(Commit) == -1)
|
||||
APIERROR(myConnection->getNdbError());
|
||||
|
||||
myNdb->closeTransaction(myConnection);
|
||||
|
||||
/*****************************
|
||||
* Read and print all tuples *
|
||||
*****************************/
|
||||
std::cout << "ATTR1 ATTR2" << std::endl;
|
||||
|
||||
for (int i = 0; i < 10; i++) {
|
||||
myConnection = myNdb->startTransaction();
|
||||
if (myConnection == NULL) APIERROR(myNdb->getNdbError());
|
||||
|
||||
myOperation = myConnection->getNdbOperation("MYTABLENAME");
|
||||
if (myOperation == NULL) APIERROR(myConnection->getNdbError());
|
||||
|
||||
myOperation->readTuple();
|
||||
myOperation->equal("ATTR1", i);
|
||||
|
||||
myRecAttr = myOperation->getValue("ATTR2", NULL);
|
||||
if (myRecAttr == NULL) APIERROR(myConnection->getNdbError());
|
||||
|
||||
if(myConnection->execute( Commit ) == -1)
|
||||
if (i == 3) {
|
||||
std::cout << "Detected that deleted tuple doesn't exist!" << std::endl;
|
||||
} else {
|
||||
APIERROR(myConnection->getNdbError());
|
||||
}
|
||||
|
||||
if (i != 3) {
|
||||
printf(" %2d %2d\n", i, myRecAttr->u_32_value());
|
||||
}
|
||||
myNdb->closeTransaction(myConnection);
|
||||
}
|
||||
|
||||
/**************
|
||||
* Drop index *
|
||||
**************/
|
||||
if (myDict->dropIndex("MYINDEXNAME", "MYTABLENAME") == -1)
|
||||
APIERROR(myDict->getNdbError());
|
||||
|
||||
/**************
|
||||
* Drop table *
|
||||
**************/
|
||||
if (myDict->dropTable("MYTABLENAME") == -1)
|
||||
APIERROR(myDict->getNdbError());
|
||||
|
||||
delete myNdb;
|
||||
}
|
33
ndb/examples/ndbapi_example5/Makefile
Normal file
33
ndb/examples/ndbapi_example5/Makefile
Normal file
|
@ -0,0 +1,33 @@
|
|||
-include .defs.mk
|
||||
#NDB_OS = OS_YOU_ARE_RUNNING_ON
|
||||
#You need to set the NDB_OS variable here
|
||||
TARGET = ndbapi_example5
|
||||
SRCS = ndbapi_example5.cpp
|
||||
OBJS = ndbapi_example5.o
|
||||
CXX = g++
|
||||
CFLAGS = -c -Wall -fno-rtti -fno-exceptions
|
||||
DEBUG =
|
||||
LFLAGS = -Wall
|
||||
INCLUDE_DIR = ../../include
|
||||
LIB_DIR = ../../lib
|
||||
ifeq ($(NDB_OS), SOLARIS)
|
||||
# Here is the definition of system libraries necessary for Solaris 7
|
||||
SYS_LIB =
|
||||
endif
|
||||
ifeq ($(NDB_OS), LINUX)
|
||||
# Here is the definition of system libraries necessary for Linux 2.4
|
||||
SYS_LIB =
|
||||
endif
|
||||
ifeq ($(NDB_OS), MACOSX)
|
||||
# Here is the definition of system libraries necessary for Mac OS X
|
||||
SYS_LIB =
|
||||
endif
|
||||
|
||||
$(TARGET): $(OBJS)
|
||||
$(CXX) $(LFLAGS) -L$(LIB_DIR) $(OBJS) -lNDB_API $(SYS_LIB) -o $(TARGET)
|
||||
|
||||
$(TARGET).o: $(SRCS)
|
||||
$(CXX) $(CFLAGS) -I$(INCLUDE_DIR) -I$(INCLUDE_DIR)/ndbapi $(SRCS)
|
||||
|
||||
clean:
|
||||
rm -f *.o $(TARGET)
|
230
ndb/examples/ndbapi_example5/ndbapi_example5.cpp
Normal file
230
ndb/examples/ndbapi_example5/ndbapi_example5.cpp
Normal file
|
@ -0,0 +1,230 @@
|
|||
/* Copyright (C) 2003 MySQL AB
|
||||
|
||||
This program is free software; you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation; either version 2 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program; if not, write to the Free Software
|
||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */
|
||||
|
||||
/**
|
||||
* ndbapi_example5.cpp: Using API level events in NDB API
|
||||
*/
|
||||
|
||||
#include <NdbApi.hpp>
|
||||
#include <NdbEventOperation.hpp>
|
||||
|
||||
// Used for cout
|
||||
#include <stdio.h>
|
||||
#include <iostream>
|
||||
#include <unistd.h>
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* Assume that there is a table TAB0 which is being updated by
|
||||
* another process (e.g. flexBench -l 0 -stdtables).
|
||||
* We want to monitor what happens with columns COL0, COL2, COL11
|
||||
*
|
||||
* or together with the mysqlcluster client;
|
||||
*
|
||||
* shell> mysqlcluster -u root
|
||||
* mysql> create database TEST_DB;
|
||||
* mysql> use TEST_DB;
|
||||
* mysql> create table TAB0 (COL0 int primary key, COL1 int, COL11 int);
|
||||
*
|
||||
* In another window start ndbapi_example5, wait until properly started
|
||||
*
|
||||
* mysql> insert into TAB0 values (1,2,3);
|
||||
* mysql> insert into TAB0 values (2,2,3);
|
||||
* mysql> insert into TAB0 values (3,2,9);
|
||||
* mysql>
|
||||
*
|
||||
* you should see the data popping up in the example window
|
||||
*
|
||||
*/
|
||||
|
||||
#define APIERROR(error) \
|
||||
{ std::cout << "Error in " << __FILE__ << ", line:" << __LINE__ << ", code:" \
|
||||
<< error.code << ", msg: " << error.message << "." << std::endl; \
|
||||
exit(-1); }
|
||||
|
||||
Ndb* myCreateNdb();
|
||||
int myCreateEvent(Ndb* myNdb,
|
||||
const char *eventName,
|
||||
const char *eventTableName,
|
||||
const char **eventComlumnName,
|
||||
const int noEventComlumnName);
|
||||
|
||||
int main()
|
||||
{
|
||||
Ndb* myNdb = myCreateNdb();
|
||||
NdbDictionary::Dictionary *myDict;
|
||||
|
||||
const char *eventName = "CHNG_IN_TAB0";
|
||||
const char *eventTableName = "TAB0";
|
||||
const int noEventColumnName = 3;
|
||||
const char *eventColumnName[noEventColumnName] =
|
||||
{"COL0",
|
||||
"COL1",
|
||||
"COL11"};
|
||||
|
||||
myDict = myNdb->getDictionary();
|
||||
|
||||
// Create events
|
||||
myCreateEvent(myNdb,
|
||||
eventName,
|
||||
eventTableName,
|
||||
eventColumnName,
|
||||
noEventColumnName);
|
||||
int j = 0;
|
||||
while (j < 5) {
|
||||
|
||||
// Start "transaction" for handling events
|
||||
NdbEventOperation* op;
|
||||
printf("create EventOperation\n");
|
||||
if ((op = myNdb->createEventOperation(eventName,100)) == NULL) {
|
||||
printf("Event operation creation failed\n");
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
printf("get values\n");
|
||||
NdbRecAttr* recAttr[noEventColumnName];
|
||||
NdbRecAttr* recAttrPre[noEventColumnName];
|
||||
// primary keys should always be a part of the result
|
||||
for (int i = 0; i < noEventColumnName; i++) {
|
||||
recAttr[i] = op->getValue(eventColumnName[i]);
|
||||
recAttrPre[i] = op->getPreValue(eventColumnName[i]);
|
||||
}
|
||||
|
||||
// set up the callbacks
|
||||
printf("execute\n");
|
||||
if (op->execute()) { // This starts changes to "start flowing"
|
||||
printf("operationd execution failed\n");
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
int i = 0;
|
||||
|
||||
while(i < 40) {
|
||||
//printf("now waiting for event...\n");
|
||||
int r = myNdb->pollEvents(1000); // wait for event or 1000 ms
|
||||
if (r>0) {
|
||||
//printf("got data! %d\n", r);
|
||||
int overrun;
|
||||
while (op->next(&overrun) > 0) {
|
||||
i++;
|
||||
if (!op->isConsistent())
|
||||
printf("A node failiure has occured and events might be missing\n");
|
||||
switch (op->getEventType()) {
|
||||
case NdbDictionary::Event::TE_INSERT:
|
||||
printf("%u INSERT: ", i);
|
||||
break;
|
||||
case NdbDictionary::Event::TE_DELETE:
|
||||
printf("%u DELETE: ", i);
|
||||
break;
|
||||
case NdbDictionary::Event::TE_UPDATE:
|
||||
printf("%u UPDATE: ", i);
|
||||
break;
|
||||
}
|
||||
printf("overrun %u pk %u: ", overrun, recAttr[0]->u_32_value());
|
||||
for (int i = 1; i < noEventColumnName; i++) {
|
||||
if (recAttr[i]->isNULL() >= 0) { // we have a value
|
||||
printf(" post[%u]=", i);
|
||||
if (recAttr[i]->isNULL() == 0) // we have a non-null value
|
||||
printf("%u", recAttr[i]->u_32_value());
|
||||
else // we have a null value
|
||||
printf("NULL");
|
||||
}
|
||||
if (recAttrPre[i]->isNULL() >= 0) { // we have a value
|
||||
printf(" post[%u]=", i);
|
||||
if (recAttrPre[i]->isNULL() == 0) // we have a non-null value
|
||||
printf("%u", recAttrPre[i]->u_32_value());
|
||||
else // we have a null value
|
||||
printf("NULL");
|
||||
}
|
||||
}
|
||||
printf("\n");
|
||||
}
|
||||
} else
|
||||
;//printf("timed out\n");
|
||||
}
|
||||
// don't want to listen to eventsanymore
|
||||
myNdb->dropEventOperation(op);
|
||||
|
||||
j++;
|
||||
}
|
||||
|
||||
myDict->dropEvent(eventName); // remove event from database
|
||||
|
||||
delete myNdb;
|
||||
}
|
||||
|
||||
Ndb* myCreateNdb()
|
||||
{
|
||||
Ndb* myNdb = new Ndb("TEST_DB");
|
||||
|
||||
/********************************************
|
||||
* Initialize NDB and wait until it's ready *
|
||||
********************************************/
|
||||
if (myNdb->init() == -1) {
|
||||
APIERROR(myNdb->getNdbError());
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
if (myNdb->waitUntilReady(30) != 0) {
|
||||
std::cout << "NDB was not ready within 30 secs." << std::endl;
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
return myNdb;
|
||||
}
|
||||
|
||||
int myCreateEvent(Ndb* myNdb,
|
||||
const char *eventName,
|
||||
const char *eventTableName,
|
||||
const char **eventColumnName,
|
||||
const int noEventColumnName)
|
||||
{
|
||||
NdbDictionary::Dictionary *myDict = myNdb->getDictionary();
|
||||
|
||||
if (!myDict) {
|
||||
printf("Event Creation failedDictionary not found");
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
NdbDictionary::Event myEvent(eventName);
|
||||
myEvent.setTable(eventTableName);
|
||||
myEvent.addTableEvent(NdbDictionary::Event::TE_ALL);
|
||||
// myEvent.addTableEvent(NdbDictionary::Event::TE_INSERT);
|
||||
// myEvent.addTableEvent(NdbDictionary::Event::TE_UPDATE);
|
||||
// myEvent.addTableEvent(NdbDictionary::Event::TE_DELETE);
|
||||
|
||||
for (int i = 0; i < noEventColumnName; i++)
|
||||
myEvent.addEventColumn(eventColumnName[i]);
|
||||
|
||||
int res = myDict->createEvent(myEvent); // Add event to database
|
||||
|
||||
if (res == 0)
|
||||
myEvent.print();
|
||||
else {
|
||||
printf("Event creation failed\n");
|
||||
printf("trying drop Event, maybe event exists\n");
|
||||
res = myDict->dropEvent(eventName);
|
||||
if (res)
|
||||
exit(-1);
|
||||
// try again
|
||||
res = myDict->createEvent(myEvent); // Add event to database
|
||||
if (res)
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
35
ndb/examples/ndbapi_scan_example/Makefile
Normal file
35
ndb/examples/ndbapi_scan_example/Makefile
Normal file
|
@ -0,0 +1,35 @@
|
|||
-include ../../Defs.mk
|
||||
#NDB_OS = OS_YOU_ARE_RUNNING_ON
|
||||
#You need to set the NDB_OS variable here (LINUX, SOLARIS, MACOSX)
|
||||
#NDB_OS = LINUX
|
||||
|
||||
TARGET = ndbapi_scan
|
||||
SRCS = ndbapi_scan.cpp
|
||||
OBJS = ndbapi_scan.o
|
||||
CC = g++
|
||||
CFLAGS = -c -Wall -fno-rtti
|
||||
DEBUG =
|
||||
LFLAGS = -Wall
|
||||
INCLUDE_DIR = ../../include
|
||||
LIB_DIR = ../../lib
|
||||
ifeq ($(NDB_OS), SOLARIS)
|
||||
# Here is the definition of system libraries necessary for Solaris 7
|
||||
SYS_LIB = -lpthread -lsocket -lnsl -lrt
|
||||
endif
|
||||
ifeq ($(NDB_OS), LINUX)
|
||||
# Here is the definition of system libraries necessary for Linux 2.4
|
||||
SYS_LIB = -lpthread
|
||||
endif
|
||||
ifeq ($(NDB_OS), MACOSX)
|
||||
# Here is the definition of system libraries necessary for Mac OS X
|
||||
SYS_LIB =
|
||||
endif
|
||||
|
||||
$(TARGET): $(OBJS)
|
||||
$(CC) $(LFLAGS) -L$(LIB_DIR) -lNDB_API $(OBJS) $(SYS_LIB) -o $(TARGET)
|
||||
|
||||
$(TARGET).o: $(SRCS)
|
||||
$(CC) $(CFLAGS) -I$(INCLUDE_DIR) -I$(INCLUDE_DIR)/ndbapi $(SRCS)
|
||||
|
||||
clean:
|
||||
rm -f *.o $(TARGET)
|
824
ndb/examples/ndbapi_scan_example/ndbapi_scan.cpp
Normal file
824
ndb/examples/ndbapi_scan_example/ndbapi_scan.cpp
Normal file
|
@ -0,0 +1,824 @@
|
|||
|
||||
/* Copyright (C) 2003 MySQL AB
|
||||
|
||||
This program is free software; you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation; either version 2 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program; if not, write to the Free Software
|
||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */
|
||||
|
||||
|
||||
/*
|
||||
* ndbapi_scan.cpp:
|
||||
* Illustrates how to use the scan api in the NDBAPI.
|
||||
* The example shows how to do scan, scan for update and scan for delete
|
||||
* using NdbScanFilter and NdbScanOperation
|
||||
*
|
||||
* Classes and methods used in this example:
|
||||
*
|
||||
* Ndb
|
||||
* init()
|
||||
* waitUntilRead()
|
||||
* getDictionary()
|
||||
* startTransaction()
|
||||
* closeTransaction()
|
||||
* sendPreparedTransactions()
|
||||
* pollNdb()
|
||||
*
|
||||
* NdbConnection
|
||||
* getNdbOperation()
|
||||
* executeAsynchPrepare()
|
||||
* getNdbError()
|
||||
* executeScan()
|
||||
* nextScanResult()
|
||||
*
|
||||
* NdbDictionary::Dictionary
|
||||
* getTable()
|
||||
* dropTable()
|
||||
* createTable()
|
||||
*
|
||||
* NdbDictionary::Column
|
||||
* setName()
|
||||
* setPrimaryKey()
|
||||
* setType()
|
||||
* setLength()
|
||||
* setNullable()
|
||||
*
|
||||
* NdbDictionary::Table
|
||||
* setName()
|
||||
* addColumn()
|
||||
*
|
||||
* NdbOperation
|
||||
* insertTuple()
|
||||
* equal()
|
||||
* setValue()
|
||||
* openScanRead()
|
||||
* openScanExclusive()
|
||||
*
|
||||
* NdbRecAttr
|
||||
* aRef()
|
||||
* u_32_value()
|
||||
*
|
||||
* NdbResultSet
|
||||
* nextResult()
|
||||
* deleteTuple()
|
||||
* updateTuple()
|
||||
*
|
||||
* NdbScanOperation
|
||||
* getValue()
|
||||
* readTuplesExclusive()
|
||||
*
|
||||
* NdbScanFilter
|
||||
* begin()
|
||||
* eq()
|
||||
* end()
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
|
||||
#include <NdbApi.hpp>
|
||||
#include <NdbScanFilter.hpp>
|
||||
// Used for cout
|
||||
#include <iostream>
|
||||
|
||||
#ifdef SOLARIS
|
||||
#include <sys/types.h>
|
||||
#include <unistd.h>
|
||||
#endif
|
||||
|
||||
#if defined LINUX || defined MACOSX
|
||||
#include <time.h>
|
||||
#include <unistd.h>
|
||||
#endif
|
||||
|
||||
/**
|
||||
* Helper sleep function
|
||||
*/
|
||||
int
|
||||
milliSleep(int milliseconds){
|
||||
int result = 0;
|
||||
struct timespec sleeptime;
|
||||
sleeptime.tv_sec = milliseconds / 1000;
|
||||
sleeptime.tv_nsec = (milliseconds - (sleeptime.tv_sec * 1000)) * 1000000;
|
||||
result = nanosleep(&sleeptime, NULL);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Helper sleep function
|
||||
*/
|
||||
#define APIERROR(error) \
|
||||
{ std::cout << "Error in " << __FILE__ << ", line:" << __LINE__ << ", code:" \
|
||||
<< error.code << ", msg: " << error.message << "." << std::endl; \
|
||||
exit(-1); }
|
||||
|
||||
/*
|
||||
* callback : This is called when the transaction is polled
|
||||
*
|
||||
* (This function must have three arguments:
|
||||
* - The result of the transaction,
|
||||
* - The NdbConnection object, and
|
||||
* - A pointer to an arbitrary object.)
|
||||
*/
|
||||
static void
|
||||
callback(int result, NdbConnection* myTrans, void* aObject)
|
||||
{
|
||||
if (result == -1) {
|
||||
std::cout << "In callback: " << std::endl;
|
||||
/**
|
||||
* Put error checking code here (see ndb_async_example)
|
||||
*/
|
||||
APIERROR(myTrans->getNdbError());
|
||||
} else {
|
||||
/**
|
||||
* Ok!
|
||||
*/
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Function to create table
|
||||
*/
|
||||
int create_table(Ndb * myNdb)
|
||||
{
|
||||
NdbDictionary::Table myTable;
|
||||
NdbDictionary::Column myColumn;
|
||||
|
||||
NdbDictionary::Dictionary* myDict = myNdb->getDictionary();
|
||||
|
||||
/*********************************************************
|
||||
* Create a table named GARAGE if it does not exist *
|
||||
*********************************************************/
|
||||
if (myDict->getTable("GARAGE") != NULL) {
|
||||
std::cout << "NDB already has example table: GARAGE. "
|
||||
<< "Dropping it..." << std::endl;
|
||||
if(myDict->dropTable("GARAGE") == -1)
|
||||
{
|
||||
std::cout << "Failed to drop: GARAGE." << std::endl;
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
myTable.setName("GARAGE");
|
||||
|
||||
myColumn.setName("REG_NO");
|
||||
myColumn.setPrimaryKey(true);
|
||||
myColumn.setType(NdbDictionary::Column::Unsigned);
|
||||
myColumn.setLength(1);
|
||||
myColumn.setNullable(false);
|
||||
myTable.addColumn(myColumn);
|
||||
|
||||
myColumn.setName("BRAND");
|
||||
myColumn.setPrimaryKey(false);
|
||||
myColumn.setType(NdbDictionary::Column::Char);
|
||||
myColumn.setLength(20);
|
||||
myColumn.setNullable(false);
|
||||
myTable.addColumn(myColumn);
|
||||
|
||||
|
||||
myColumn.setName("COLOR");
|
||||
myColumn.setPrimaryKey(false);
|
||||
myColumn.setType(NdbDictionary::Column::Char);
|
||||
myColumn.setLength(20);
|
||||
myColumn.setNullable(false);
|
||||
myTable.addColumn(myColumn);
|
||||
|
||||
if (myDict->createTable(myTable) == -1) {
|
||||
APIERROR(myDict->getNdbError());
|
||||
return -1;
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
int populate(Ndb * myNdb)
|
||||
{
|
||||
NdbConnection* myNdbConnection[15]; // For transactions
|
||||
NdbOperation* myNdbOperation; // For operations
|
||||
/******************************************************
|
||||
* Insert (we do 15 insert transactions in parallel) *
|
||||
******************************************************/
|
||||
/**
|
||||
* Five blue mercedes
|
||||
*/
|
||||
for (int i = 0; i < 5; i++)
|
||||
{
|
||||
myNdbConnection[i] = myNdb->startTransaction();
|
||||
if (myNdbConnection[i] == NULL)
|
||||
APIERROR(myNdb->getNdbError());
|
||||
myNdbOperation = myNdbConnection[i]->getNdbOperation("GARAGE");
|
||||
// Error check. If error, then maybe table GARAGE is not in database
|
||||
if (myNdbOperation == NULL)
|
||||
APIERROR(myNdbConnection[i]->getNdbError());
|
||||
myNdbOperation->insertTuple();
|
||||
myNdbOperation->equal("REG_NO", i);
|
||||
myNdbOperation->setValue("BRAND", "Mercedes");
|
||||
myNdbOperation->setValue("COLOR", "Blue");
|
||||
// Prepare transaction (the transaction is NOT yet sent to NDB)
|
||||
myNdbConnection[i]->executeAsynchPrepare(Commit, &callback, NULL);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Five black bmw
|
||||
*/
|
||||
for (int i = 5; i < 10; i++)
|
||||
{
|
||||
myNdbConnection[i] = myNdb->startTransaction();
|
||||
if (myNdbConnection[i] == NULL)
|
||||
APIERROR(myNdb->getNdbError());
|
||||
myNdbOperation = myNdbConnection[i]->getNdbOperation("GARAGE");
|
||||
// Error check. If error, then maybe table MYTABLENAME is not in database
|
||||
if (myNdbOperation == NULL)
|
||||
APIERROR(myNdbConnection[i]->getNdbError());
|
||||
myNdbOperation->insertTuple();
|
||||
myNdbOperation->equal("REG_NO", i);
|
||||
myNdbOperation->setValue("BRAND", "BMW");
|
||||
myNdbOperation->setValue("COLOR", "Black");
|
||||
// Prepare transaction (the transaction is NOT yet sent to NDB)
|
||||
myNdbConnection[i]->executeAsynchPrepare(Commit, &callback, NULL);
|
||||
}
|
||||
|
||||
/**
|
||||
* Five pink toyotas
|
||||
*/
|
||||
for (int i = 10; i < 15; i++) {
|
||||
myNdbConnection[i] = myNdb->startTransaction();
|
||||
if (myNdbConnection[i] == NULL) APIERROR(myNdb->getNdbError());
|
||||
myNdbOperation = myNdbConnection[i]->getNdbOperation("GARAGE");
|
||||
// Error check. If error, then maybe table MYTABLENAME is not in database
|
||||
if (myNdbOperation == NULL) APIERROR(myNdbConnection[i]->getNdbError());
|
||||
myNdbOperation->insertTuple();
|
||||
myNdbOperation->equal("REG_NO", i);
|
||||
myNdbOperation->setValue("BRAND", "Toyota");
|
||||
myNdbOperation->setValue("COLOR", "Pink");
|
||||
// Prepare transaction (the transaction is NOT yet sent to NDB)
|
||||
myNdbConnection[i]->executeAsynchPrepare(Commit, &callback, NULL);
|
||||
}
|
||||
|
||||
// Send all transactions to NDB
|
||||
myNdb->sendPreparedTransactions(0);
|
||||
// Poll all transactions
|
||||
myNdb->pollNdb(3000, 0);
|
||||
|
||||
// it is also possible to use sendPollNdb instead of
|
||||
// myNdb->sendPreparedTransactions(0); and myNdb->pollNdb(3000, 15); above.
|
||||
// myNdb->sendPollNdb(3000,0);
|
||||
// Note! Neither sendPollNdb or pollNdb returs until all 15 callbacks have
|
||||
// executed.
|
||||
|
||||
// Close all transactions. It is also possible to close transactions
|
||||
// in the callback.
|
||||
for (int i = 0; i < 15; i++)
|
||||
myNdb->closeTransaction(myNdbConnection[i]);
|
||||
return 1;
|
||||
}
|
||||
|
||||
int scan_delete(Ndb* myNdb,
|
||||
int parallelism,
|
||||
int column,
|
||||
int column_len,
|
||||
const char * color)
|
||||
|
||||
{
|
||||
|
||||
// Scan all records exclusive and delete
|
||||
// them one by one
|
||||
int retryAttempt = 0;
|
||||
const int retryMax = 10;
|
||||
int deletedRows = 0;
|
||||
int check;
|
||||
NdbError err;
|
||||
NdbConnection *myTrans;
|
||||
NdbScanOperation *myScanOp;
|
||||
|
||||
/**
|
||||
* Loop as long as :
|
||||
* retryMax not reached
|
||||
* failed operations due to TEMPORARY erros
|
||||
*
|
||||
* Exit loop;
|
||||
* retyrMax reached
|
||||
* Permanent error (return -1)
|
||||
*/
|
||||
while (true)
|
||||
{
|
||||
if (retryAttempt >= retryMax)
|
||||
{
|
||||
std::cout << "ERROR: has retried this operation " << retryAttempt
|
||||
<< " times, failing!" << std::endl;
|
||||
return -1;
|
||||
}
|
||||
|
||||
myTrans = myNdb->startTransaction();
|
||||
if (myTrans == NULL)
|
||||
{
|
||||
const NdbError err = myNdb->getNdbError();
|
||||
|
||||
if (err.status == NdbError::TemporaryError)
|
||||
{
|
||||
milliSleep(50);
|
||||
retryAttempt++;
|
||||
continue;
|
||||
}
|
||||
std::cout << err.message << std::endl;
|
||||
return -1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a scan operation.
|
||||
*/
|
||||
myScanOp = myTrans->getNdbScanOperation("GARAGE");
|
||||
if (myScanOp == NULL)
|
||||
{
|
||||
std::cout << myTrans->getNdbError().message << std::endl;
|
||||
myNdb->closeTransaction(myTrans);
|
||||
return -1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Define a result set for the scan.
|
||||
*/
|
||||
NdbResultSet * rs = myScanOp->readTuplesExclusive(parallelism);
|
||||
if( rs == 0 ) {
|
||||
std::cout << myTrans->getNdbError().message << std::endl;
|
||||
myNdb->closeTransaction(myTrans);
|
||||
return -1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Use NdbScanFilter to define a search critera
|
||||
*/
|
||||
NdbScanFilter filter(myScanOp) ;
|
||||
if(filter.begin(NdbScanFilter::AND) < 0 ||
|
||||
filter.eq(column, color, column_len, false) <0||
|
||||
filter.end() <0)
|
||||
{
|
||||
std::cout << myTrans->getNdbError().message << std::endl;
|
||||
myNdb->closeTransaction(myTrans);
|
||||
return -1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Start scan (NoCommit since we are only reading at this stage);
|
||||
*/
|
||||
if(myTrans->execute(NoCommit) != 0){
|
||||
err = myTrans->getNdbError();
|
||||
if(err.status == NdbError::TemporaryError){
|
||||
std::cout << myTrans->getNdbError().message << std::endl;
|
||||
myNdb->closeTransaction(myTrans);
|
||||
milliSleep(50);
|
||||
continue;
|
||||
}
|
||||
std::cout << err.code << std::endl;
|
||||
std::cout << myTrans->getNdbError().code << std::endl;
|
||||
myNdb->closeTransaction(myTrans);
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* start of loop: nextResult(true) means that "parallelism" number of
|
||||
* rows are fetched from NDB and cached in NDBAPI
|
||||
*/
|
||||
while((check = rs->nextResult(true)) == 0){
|
||||
do {
|
||||
if (rs->deleteTuple() != 0){
|
||||
std::cout << myTrans->getNdbError().message << std::endl;
|
||||
myNdb->closeTransaction(myTrans);
|
||||
return -1;
|
||||
}
|
||||
deletedRows++;
|
||||
|
||||
/**
|
||||
* nextResult(false) means that the records
|
||||
* cached in the NDBAPI are modified before
|
||||
* fetching more rows from NDB.
|
||||
*/
|
||||
} while((check = rs->nextResult(false)) == 0);
|
||||
|
||||
/**
|
||||
* Commit when all cached tuple have been marked for deletion
|
||||
*/
|
||||
if(check != -1){
|
||||
check = myTrans->execute(Commit);
|
||||
myTrans->releaseCompletedOperations();
|
||||
}
|
||||
/**
|
||||
* Check for errors
|
||||
*/
|
||||
err = myTrans->getNdbError();
|
||||
if(check == -1){
|
||||
if(err.status == NdbError::TemporaryError){
|
||||
std::cout << myTrans->getNdbError().message << std::endl;
|
||||
myNdb->closeTransaction(myTrans);
|
||||
milliSleep(50);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* End of loop
|
||||
*/
|
||||
}
|
||||
std::cout << myTrans->getNdbError().message << std::endl;
|
||||
myNdb->closeTransaction(myTrans);
|
||||
return 0;
|
||||
|
||||
|
||||
}
|
||||
if(myTrans!=0) {
|
||||
std::cout << myTrans->getNdbError().message << std::endl;
|
||||
myNdb->closeTransaction(myTrans);
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
||||
int scan_update(Ndb* myNdb,
|
||||
int parallelism,
|
||||
int column_len,
|
||||
int update_column,
|
||||
const char * column_name,
|
||||
const char * before_color,
|
||||
const char * after_color)
|
||||
|
||||
{
|
||||
|
||||
// Scan all records exclusive and update
|
||||
// them one by one
|
||||
int retryAttempt = 0;
|
||||
const int retryMax = 10;
|
||||
int updatedRows = 0;
|
||||
int check;
|
||||
NdbError err;
|
||||
NdbConnection *myTrans;
|
||||
NdbScanOperation *myScanOp;
|
||||
|
||||
/**
|
||||
* Loop as long as :
|
||||
* retryMax not reached
|
||||
* failed operations due to TEMPORARY erros
|
||||
*
|
||||
* Exit loop;
|
||||
* retyrMax reached
|
||||
* Permanent error (return -1)
|
||||
*/
|
||||
while (true)
|
||||
{
|
||||
|
||||
if (retryAttempt >= retryMax)
|
||||
{
|
||||
std::cout << "ERROR: has retried this operation " << retryAttempt
|
||||
<< " times, failing!" << std::endl;
|
||||
return -1;
|
||||
}
|
||||
|
||||
myTrans = myNdb->startTransaction();
|
||||
if (myTrans == NULL)
|
||||
{
|
||||
const NdbError err = myNdb->getNdbError();
|
||||
|
||||
if (err.status == NdbError::TemporaryError)
|
||||
{
|
||||
milliSleep(50);
|
||||
retryAttempt++;
|
||||
continue;
|
||||
}
|
||||
std::cout << err.message << std::endl;
|
||||
return -1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a scan operation.
|
||||
*/
|
||||
myScanOp = myTrans->getNdbScanOperation("GARAGE");
|
||||
if (myScanOp == NULL)
|
||||
{
|
||||
std::cout << myTrans->getNdbError().message << std::endl;
|
||||
myNdb->closeTransaction(myTrans);
|
||||
return -1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Define a result set for the scan.
|
||||
*/
|
||||
NdbResultSet * rs = myScanOp->readTuplesExclusive(parallelism);
|
||||
if( rs == 0 ) {
|
||||
std::cout << myTrans->getNdbError().message << std::endl;
|
||||
myNdb->closeTransaction(myTrans);
|
||||
return -1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Use NdbScanFilter to define a search critera
|
||||
*/
|
||||
NdbScanFilter filter(myScanOp) ;
|
||||
if(filter.begin(NdbScanFilter::AND) < 0 ||
|
||||
filter.eq(update_column, before_color, column_len, false) <0||
|
||||
filter.end() <0)
|
||||
{
|
||||
std::cout << myTrans->getNdbError().message << std::endl;
|
||||
myNdb->closeTransaction(myTrans);
|
||||
return -1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Start scan (NoCommit since we are only reading at this stage);
|
||||
*/
|
||||
if(myTrans->execute(NoCommit) != 0){
|
||||
err = myTrans->getNdbError();
|
||||
if(err.status == NdbError::TemporaryError){
|
||||
std::cout << myTrans->getNdbError().message << std::endl;
|
||||
myNdb->closeTransaction(myTrans);
|
||||
milliSleep(50);
|
||||
continue;
|
||||
}
|
||||
std::cout << myTrans->getNdbError().code << std::endl;
|
||||
myNdb->closeTransaction(myTrans);
|
||||
return -1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Define an update operation
|
||||
*/
|
||||
NdbOperation * myUpdateOp;
|
||||
/**
|
||||
* start of loop: nextResult(true) means that "parallelism" number of
|
||||
* rows are fetched from NDB and cached in NDBAPI
|
||||
*/
|
||||
while((check = rs->nextResult(true)) == 0){
|
||||
do {
|
||||
/**
|
||||
* Get update operation
|
||||
*/
|
||||
myUpdateOp = rs->updateTuple();
|
||||
if (myUpdateOp == 0){
|
||||
std::cout << myTrans->getNdbError().message << std::endl;
|
||||
myNdb->closeTransaction(myTrans);
|
||||
return -1;
|
||||
}
|
||||
updatedRows++;
|
||||
/**
|
||||
* do the update
|
||||
*/
|
||||
myUpdateOp->setValue(update_column,after_color);
|
||||
/**
|
||||
* nextResult(false) means that the records
|
||||
* cached in the NDBAPI are modified before
|
||||
* fetching more rows from NDB.
|
||||
*/
|
||||
} while((check = rs->nextResult(false)) == 0);
|
||||
|
||||
/**
|
||||
* Commit when all cached tuple have been updated
|
||||
*/
|
||||
if(check != -1){
|
||||
check = myTrans->execute(Commit);
|
||||
myTrans->releaseCompletedOperations();
|
||||
}
|
||||
/**
|
||||
* Check for errors
|
||||
*/
|
||||
err = myTrans->getNdbError();
|
||||
if(check == -1){
|
||||
if(err.status == NdbError::TemporaryError){
|
||||
std::cout << myTrans->getNdbError().message << std::endl;
|
||||
myNdb->closeTransaction(myTrans);
|
||||
milliSleep(50);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* End of loop
|
||||
*/
|
||||
}
|
||||
std::cout << myTrans->getNdbError().message << std::endl;
|
||||
myNdb->closeTransaction(myTrans);
|
||||
return 0;
|
||||
|
||||
|
||||
}
|
||||
if(myTrans!=0) {
|
||||
std::cout << myTrans->getNdbError().message << std::endl;
|
||||
myNdb->closeTransaction(myTrans);
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
||||
|
||||
int scan_print(Ndb * myNdb, int parallelism,
|
||||
int column_len_brand,
|
||||
int column_len_color)
|
||||
{
|
||||
// Scan all records exclusive and update
|
||||
// them one by one
|
||||
int retryAttempt = 0;
|
||||
const int retryMax = 10;
|
||||
int fetchedRows = 0;
|
||||
int check;
|
||||
NdbError err;
|
||||
NdbConnection *myTrans;
|
||||
NdbScanOperation *myScanOp;
|
||||
/* Result of reading attribute value, three columns:
|
||||
REG_NO, BRAND, and COLOR
|
||||
*/
|
||||
NdbRecAttr * myRecAttr[3];
|
||||
|
||||
/**
|
||||
* Loop as long as :
|
||||
* retryMax not reached
|
||||
* failed operations due to TEMPORARY erros
|
||||
*
|
||||
* Exit loop;
|
||||
* retyrMax reached
|
||||
* Permanent error (return -1)
|
||||
*/
|
||||
while (true)
|
||||
{
|
||||
|
||||
if (retryAttempt >= retryMax)
|
||||
{
|
||||
std::cout << "ERROR: has retried this operation " << retryAttempt
|
||||
<< " times, failing!" << std::endl;
|
||||
return -1;
|
||||
}
|
||||
|
||||
myTrans = myNdb->startTransaction();
|
||||
if (myTrans == NULL)
|
||||
{
|
||||
const NdbError err = myNdb->getNdbError();
|
||||
|
||||
if (err.status == NdbError::TemporaryError)
|
||||
{
|
||||
milliSleep(50);
|
||||
retryAttempt++;
|
||||
continue;
|
||||
}
|
||||
std::cout << err.message << std::endl;
|
||||
return -1;
|
||||
}
|
||||
/*
|
||||
* Define a scan operation.
|
||||
* NDBAPI.
|
||||
*/
|
||||
myScanOp = myTrans->getNdbScanOperation("GARAGE");
|
||||
if (myScanOp == NULL)
|
||||
{
|
||||
std::cout << myTrans->getNdbError().message << std::endl;
|
||||
myNdb->closeTransaction(myTrans);
|
||||
return -1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Define a result set for the scan.
|
||||
*/
|
||||
NdbResultSet * rs = myScanOp->readTuplesExclusive(parallelism);
|
||||
if( rs == 0 ) {
|
||||
std::cout << myTrans->getNdbError().message << std::endl;
|
||||
myNdb->closeTransaction(myTrans);
|
||||
return -1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Define storage for fetched attributes.
|
||||
* E.g., the resulting attributes of executing
|
||||
* myOp->getValue("REG_NO") is placed in myRecAttr[0].
|
||||
* No data exists in myRecAttr until transaction has commited!
|
||||
*/
|
||||
myRecAttr[0] = myScanOp->getValue("REG_NO");
|
||||
myRecAttr[1] = myScanOp->getValue("BRAND");
|
||||
myRecAttr[2] = myScanOp->getValue("COLOR");
|
||||
if(myRecAttr[0] ==NULL || myRecAttr[1] == NULL || myRecAttr[2]==NULL)
|
||||
{
|
||||
std::cout << myTrans->getNdbError().message << std::endl;
|
||||
myNdb->closeTransaction(myTrans);
|
||||
return -1;
|
||||
}
|
||||
/**
|
||||
* Start scan (NoCommit since we are only reading at this stage);
|
||||
*/
|
||||
if(myTrans->execute(NoCommit) != 0){
|
||||
err = myTrans->getNdbError();
|
||||
if(err.status == NdbError::TemporaryError){
|
||||
std::cout << myTrans->getNdbError().message << std::endl;
|
||||
myNdb->closeTransaction(myTrans);
|
||||
milliSleep(50);
|
||||
continue;
|
||||
}
|
||||
std::cout << err.code << std::endl;
|
||||
std::cout << myTrans->getNdbError().code << std::endl;
|
||||
myNdb->closeTransaction(myTrans);
|
||||
return -1;
|
||||
}
|
||||
|
||||
/**
|
||||
* start of loop: nextResult(true) means that "parallelism" number of
|
||||
* rows are fetched from NDB and cached in NDBAPI
|
||||
*/
|
||||
while((check = rs->nextResult(true)) == 0){
|
||||
do {
|
||||
|
||||
fetchedRows++;
|
||||
/**
|
||||
* print REG_NO unsigned int
|
||||
*/
|
||||
std::cout << myRecAttr[0]->u_32_value() << "\t";
|
||||
char * buf_brand = new char[column_len_brand+1];
|
||||
char * buf_color = new char[column_len_color+1];
|
||||
/**
|
||||
* print BRAND character string
|
||||
*/
|
||||
memcpy(buf_brand, myRecAttr[1]->aRef(), column_len_brand);
|
||||
buf_brand[column_len_brand] = 0;
|
||||
std::cout << buf_brand << "\t";
|
||||
delete [] buf_brand;
|
||||
/**
|
||||
* print COLOR character string
|
||||
*/
|
||||
memcpy(buf_color, myRecAttr[2]->aRef(), column_len_color);
|
||||
buf_brand[column_len_color] = 0;
|
||||
std::cout << buf_color << std::endl;
|
||||
delete [] buf_color;
|
||||
/**
|
||||
* nextResult(false) means that the records
|
||||
* cached in the NDBAPI are modified before
|
||||
* fetching more rows from NDB.
|
||||
*/
|
||||
} while((check = rs->nextResult(false)) == 0);
|
||||
|
||||
}
|
||||
myNdb->closeTransaction(myTrans);
|
||||
return 1;
|
||||
}
|
||||
return -1;
|
||||
|
||||
}
|
||||
|
||||
|
||||
int main()
|
||||
{
|
||||
Ndb* myNdb = new Ndb( "TEST_DB" ); // Object representing the database
|
||||
|
||||
|
||||
|
||||
/*******************************************
|
||||
* Initialize NDB and wait until its ready *
|
||||
*******************************************/
|
||||
if (myNdb->init(1024) == -1) { // Set max 1024 parallel transactions
|
||||
APIERROR(myNdb->getNdbError());
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
if (myNdb->waitUntilReady(30) != 0) {
|
||||
std::cout << "NDB was not ready within 30 secs." << std::endl;
|
||||
exit(-1);
|
||||
}
|
||||
create_table(myNdb);
|
||||
|
||||
NdbDictionary::Dictionary* myDict = myNdb->getDictionary();
|
||||
int column_color = myDict->getTable("GARAGE")->getColumn("COLOR")->getColumnNo();
|
||||
int column_len_color =
|
||||
myDict->getTable("GARAGE")->getColumn("COLOR")->getLength();
|
||||
int column_len_brand =
|
||||
myDict->getTable("GARAGE")->getColumn("BRAND")->getLength();
|
||||
int parallelism = 16;
|
||||
|
||||
|
||||
if(populate(myNdb) > 0)
|
||||
std::cout << "populate: Success!" << std::endl;
|
||||
|
||||
if(scan_print(myNdb, parallelism, column_len_brand, column_len_color) > 0)
|
||||
std::cout << "scan_print: Success!" << std::endl << std::endl;
|
||||
|
||||
std::cout << "Going to delete all pink cars!" << std::endl;
|
||||
if(scan_delete(myNdb, parallelism, column_color,
|
||||
column_len_color, "Pink") > 0)
|
||||
std::cout << "scan_delete: Success!" << std::endl << std::endl;
|
||||
|
||||
if(scan_print(myNdb, parallelism, column_len_brand, column_len_color) > 0)
|
||||
std::cout << "scan_print: Success!" << std::endl << std::endl;
|
||||
|
||||
std::cout << "Going to update all blue cars to black cars!" << std::endl;
|
||||
if(scan_update(myNdb, parallelism, column_len_color, column_color,
|
||||
"COLOR", "Blue", "Black") > 0)
|
||||
{
|
||||
std::cout << "scan_update: Success!" << std::endl << std::endl;
|
||||
}
|
||||
if(scan_print(myNdb, parallelism, column_len_brand, column_len_color) > 0)
|
||||
std::cout << "scan_print: Success!" << std::endl << std::endl;
|
||||
|
||||
delete myNdb;
|
||||
}
|
||||
|
3
ndb/examples/ndbapi_scan_example/readme.txt
Normal file
3
ndb/examples/ndbapi_scan_example/readme.txt
Normal file
|
@ -0,0 +1,3 @@
|
|||
1. Set NDB_OS in Makefile
|
||||
2. Add path to libNDB_API.so in LD_LIBRARY_PATH
|
||||
3. Set NDB_CONNECTSTRING
|
33
ndb/examples/select_all/Makefile
Normal file
33
ndb/examples/select_all/Makefile
Normal file
|
@ -0,0 +1,33 @@
|
|||
-include .defs.mk
|
||||
#NDB_OS = OS_YOU_ARE_RUNNING_ON
|
||||
#You need to set the NDB_OS variable here
|
||||
TARGET = select_all
|
||||
SRCS = select_all.cpp
|
||||
OBJS = select_all.o
|
||||
CXX = g++
|
||||
CFLAGS = -c -Wall -fno-rtti -fno-exceptions
|
||||
DEBUG =
|
||||
LFLAGS = -Wall
|
||||
INCLUDE_DIR = ../../include
|
||||
LIB_DIR = ../../lib
|
||||
ifeq ($(NDB_OS), SOLARIS)
|
||||
# Here is the definition of system libraries necessary for Solaris 7
|
||||
SYS_LIB =
|
||||
endif
|
||||
ifeq ($(NDB_OS), LINUX)
|
||||
# Here is the definition of system libraries necessary for Linux 2.4
|
||||
SYS_LIB =
|
||||
endif
|
||||
ifeq ($(NDB_OS), MACOSX)
|
||||
# Here is the definition of system libraries necessary for Mac OS X
|
||||
SYS_LIB =
|
||||
endif
|
||||
|
||||
$(TARGET): $(OBJS)
|
||||
$(CXX) $(LFLAGS) -L$(LIB_DIR) $(OBJS) -lNDB_API $(SYS_LIB) -o $(TARGET)
|
||||
|
||||
$(TARGET).o: $(SRCS)
|
||||
$(CXX) $(CFLAGS) -I$(INCLUDE_DIR) -I$(INCLUDE_DIR)/ndbapi $(SRCS)
|
||||
|
||||
clean:
|
||||
rm -f *.o $(TARGET)
|
258
ndb/examples/select_all/select_all.cpp
Normal file
258
ndb/examples/select_all/select_all.cpp
Normal file
|
@ -0,0 +1,258 @@
|
|||
/* Copyright (C) 2003 MySQL AB
|
||||
|
||||
This program is free software; you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation; either version 2 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program; if not, write to the Free Software
|
||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */
|
||||
|
||||
//
|
||||
// select_all.cpp: Prints all rows of a table
|
||||
//
|
||||
// Usage: select_all <table_name>+
|
||||
|
||||
#include <NdbApi.hpp>
|
||||
|
||||
// Used for cout
|
||||
#include <iostream>
|
||||
using namespace std;
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
|
||||
#define APIERROR(error) \
|
||||
{ cout << "Error in " << __FILE__ << ", line:" << __LINE__ << ", code:" \
|
||||
<< error.code << ", msg: " << error.message << "." << endl; \
|
||||
exit(-1); }
|
||||
|
||||
void usage(const char* prg) {
|
||||
cout << "Usage: " << prg << " <table name>" << endl;
|
||||
cout << "Prints all rows of table named <table name>" << endl;
|
||||
exit(0);
|
||||
}
|
||||
|
||||
/*****************************************************************************
|
||||
*************************** Result Set Container ****************************
|
||||
*****************************************************************************/
|
||||
|
||||
/*
|
||||
* Container of NdbRecAttr objects.
|
||||
* (NdbRecAttr objects are database rows read by a scan operation.)
|
||||
*/
|
||||
class ResultSetContainer {
|
||||
public:
|
||||
/**
|
||||
* Initialize ResultSetContainer object for table named <tableName>
|
||||
* - Allocates memory
|
||||
* - Fetches attribute names from NDB Cluster
|
||||
*/
|
||||
void init(NdbDictionary::Dictionary* dict, const char* tableName);
|
||||
|
||||
/**
|
||||
* Get no of attributes for stored NdbRecAttr objects
|
||||
*/
|
||||
int getNoOfAttributes() const;
|
||||
|
||||
/**
|
||||
* Get NdbRecAttr object no i
|
||||
*/
|
||||
NdbRecAttr* & getAttrStore(int i);
|
||||
|
||||
/**
|
||||
* Get attribute name of attribute no i
|
||||
*/
|
||||
const char* getAttrName(int i) const;
|
||||
|
||||
/**
|
||||
* Print header of rows
|
||||
*/
|
||||
void header() const;
|
||||
|
||||
private:
|
||||
int m_cols; // No of attributes for stored NdbRecAttr objects
|
||||
char **m_names; // Names of attributes
|
||||
NdbRecAttr **m_data; // The actual stored NdbRecAttr objects
|
||||
};
|
||||
|
||||
void ResultSetContainer::init(NdbDictionary::Dictionary * dict,
|
||||
const char* tableName)
|
||||
{
|
||||
// Get Table object from NDB (this contains metadata about all tables)
|
||||
const NdbDictionary::Table * tab = dict->getTable(tableName);
|
||||
|
||||
// Get table id of the table we are interested in
|
||||
if (tab == 0) APIERROR(dict->getNdbError()); // E.g. table didn't exist
|
||||
|
||||
// Get no of attributes and allocate memory
|
||||
m_cols = tab->getNoOfColumns();
|
||||
m_names = new char* [m_cols];
|
||||
m_data = new NdbRecAttr* [m_cols];
|
||||
|
||||
// Store all attribute names for the table
|
||||
for (int i = 0; i < m_cols; i++) {
|
||||
m_names[i] = new char[255];
|
||||
snprintf(m_names[i], 255, "%s", tab->getColumn(i)->getName());
|
||||
}
|
||||
}
|
||||
|
||||
int ResultSetContainer::getNoOfAttributes() const {return m_cols;}
|
||||
NdbRecAttr*& ResultSetContainer::getAttrStore(int i) {return m_data[i];}
|
||||
const char* ResultSetContainer::getAttrName(int i) const {return m_names[i];}
|
||||
|
||||
/*****************************************************************************
|
||||
********************************** MAIN ***********************************
|
||||
*****************************************************************************/
|
||||
|
||||
int main(int argc, const char** argv)
|
||||
{
|
||||
Ndb* myNdb = new Ndb("ndbapi_example4"); // Object representing the database
|
||||
NdbConnection* myNdbConnection; // For transactions
|
||||
NdbOperation* myNdbOperation; // For operations
|
||||
int check;
|
||||
|
||||
if (argc != 2) {
|
||||
usage(argv[0]);
|
||||
exit(0);
|
||||
}
|
||||
const char* tableName = argv[1];
|
||||
|
||||
/*******************************************
|
||||
* Initialize NDB and wait until its ready *
|
||||
*******************************************/
|
||||
if (myNdb->init() == -1) {
|
||||
APIERROR(myNdb->getNdbError());
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
if (myNdb->waitUntilReady(30) != 0) {
|
||||
cout << "NDB was not ready within 30 secs." << endl;
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
/***************************
|
||||
* Define and execute scan *
|
||||
***************************/
|
||||
cout << "Select * from " << tableName << endl;
|
||||
|
||||
ResultSetContainer * container = new ResultSetContainer;
|
||||
container->init(myNdb->getDictionary(), tableName);
|
||||
|
||||
myNdbConnection = myNdb->startTransaction();
|
||||
if (myNdbConnection == NULL) APIERROR(myNdb->getNdbError());
|
||||
|
||||
myNdbOperation = myNdbConnection->getNdbOperation(tableName);
|
||||
if (myNdbOperation == NULL) APIERROR(myNdbConnection->getNdbError());
|
||||
|
||||
// Define the operation to be an 'openScanRead' operation.
|
||||
check = myNdbOperation->openScanRead(1);
|
||||
if (check == -1) APIERROR(myNdbConnection->getNdbError());
|
||||
|
||||
// Set interpreted program to just be the single instruction
|
||||
// 'interpret_exit_ok'. (This approves all rows of the table.)
|
||||
if (myNdbOperation->interpret_exit_ok() == -1)
|
||||
APIERROR(myNdbConnection->getNdbError());
|
||||
|
||||
// Get all attribute values of the row
|
||||
for(int i = 0; i < container->getNoOfAttributes(); i++){
|
||||
if((container->getAttrStore(i) =
|
||||
myNdbOperation->getValue(container->getAttrName(i))) == 0)
|
||||
APIERROR(myNdbConnection->getNdbError());
|
||||
}
|
||||
|
||||
// Execute scan operation
|
||||
check = myNdbConnection->executeScan();
|
||||
if (check == -1) APIERROR(myNdbConnection->getNdbError());
|
||||
|
||||
/****************
|
||||
* Print header *
|
||||
****************/
|
||||
for (int i = 0; i < container->getNoOfAttributes(); i++)
|
||||
cout << container->getAttrName(i) << "\t";
|
||||
|
||||
cout << endl;
|
||||
for (int i = 0; i < container->getNoOfAttributes(); i++) {
|
||||
for (int j = strlen(container->getAttrName(i)); j > 0; j--)
|
||||
cout << "-";
|
||||
cout << "\t";
|
||||
}
|
||||
cout << "\n";
|
||||
|
||||
/**************
|
||||
* Scan table *
|
||||
**************/
|
||||
int eof;
|
||||
int rows = 0;
|
||||
|
||||
// Print all rows of table
|
||||
while ((eof = myNdbConnection->nextScanResult()) == 0) {
|
||||
rows++;
|
||||
|
||||
for (int i = 0; i < container->getNoOfAttributes(); i++) {
|
||||
if (container->getAttrStore(i)->isNULL()) {
|
||||
cout << "NULL";
|
||||
} else {
|
||||
|
||||
// Element size of value (No of bits per element in attribute value)
|
||||
const int size = container->getAttrStore(i)->attrSize();
|
||||
|
||||
// No of elements in an array attribute (Is 1 if non-array attribute)
|
||||
const int aSize = container->getAttrStore(i)->arraySize();
|
||||
|
||||
switch(container->getAttrStore(i)->attrType()){
|
||||
case UnSigned:
|
||||
switch(size) {
|
||||
case 8: cout << container->getAttrStore(i)->u_64_value(); break;
|
||||
case 4: cout << container->getAttrStore(i)->u_32_value(); break;
|
||||
case 2: cout << container->getAttrStore(i)->u_short_value(); break;
|
||||
case 1: cout << (unsigned) container->getAttrStore(i)->u_char_value();
|
||||
break;
|
||||
default: cout << "Unknown size" << endl;
|
||||
}
|
||||
break;
|
||||
|
||||
case Signed:
|
||||
switch(size) {
|
||||
case 8: cout << container->getAttrStore(i)->int64_value(); break;
|
||||
case 4: cout << container->getAttrStore(i)->int32_value(); break;
|
||||
case 2: cout << container->getAttrStore(i)->short_value(); break;
|
||||
case 1: cout << (int) container->getAttrStore(i)->char_value(); break;
|
||||
default: cout << "Unknown size" << endl;
|
||||
}
|
||||
break;
|
||||
|
||||
case String:
|
||||
{
|
||||
char* buf = new char[aSize+1];
|
||||
memcpy(buf, container->getAttrStore(i)->aRef(), aSize);
|
||||
buf[aSize] = 0;
|
||||
cout << buf;
|
||||
delete [] buf;
|
||||
}
|
||||
break;
|
||||
|
||||
case Float:
|
||||
cout << container->getAttrStore(i)->float_value();
|
||||
break;
|
||||
|
||||
default:
|
||||
cout << "Unknown";
|
||||
break;
|
||||
}
|
||||
}
|
||||
cout << "\t";
|
||||
}
|
||||
cout << endl;
|
||||
}
|
||||
if (eof == -1) APIERROR(myNdbConnection->getNdbError());
|
||||
|
||||
myNdb->closeTransaction(myNdbConnection);
|
||||
|
||||
cout << "Selected " << rows << " rows." << endl;
|
||||
}
|
BIN
ndb/home/bin/Linuxmkisofs
Executable file
BIN
ndb/home/bin/Linuxmkisofs
Executable file
Binary file not shown.
BIN
ndb/home/bin/Solarismkisofs
Executable file
BIN
ndb/home/bin/Solarismkisofs
Executable file
Binary file not shown.
1865
ndb/home/bin/cvs2cl.pl
Executable file
1865
ndb/home/bin/cvs2cl.pl
Executable file
File diff suppressed because it is too large
Load diff
569
ndb/home/bin/cvschk
Executable file
569
ndb/home/bin/cvschk
Executable file
|
@ -0,0 +1,569 @@
|
|||
#!/usr/bin/perl -w
|
||||
#
|
||||
# cvschk -- fast offline check for new files and modifications of files
|
||||
|
||||
# cvschk : A perl program which checks the status of the CVS controlled
|
||||
# files and gives an ASCII table sorted after the status of files.
|
||||
#
|
||||
# If you have used CVS, then you know that it is hard to
|
||||
# get a good overview the CVS-status of the files in you
|
||||
# directories. Any new files? Any files changes?
|
||||
# cvschk will help the programmer get the overview in the
|
||||
# situation, where we do not have access to the CVS repository.
|
||||
#
|
||||
# Note that the program does only local checks of the files
|
||||
# If you have fast access to the CVS repositiory, then consider
|
||||
# the cvsstat-program - which additionally can tell if other
|
||||
# people have made newer versions of the files.
|
||||
#
|
||||
# The program requires Perl 5.004 (maybe previous versions also work).
|
||||
#
|
||||
# It is tuned to parse the output of cvs(1) version 1.9.
|
||||
# Earlier and later versions may require modifications to the script.
|
||||
#
|
||||
# ** Note that the first line might be wrong depending **
|
||||
# ** on the location of your perl program. **
|
||||
#
|
||||
# Sample output:
|
||||
# The directory ./mytempdir is not under CVS control
|
||||
#
|
||||
# Changed files
|
||||
# ---------------
|
||||
# ./cvs2html
|
||||
# ./cvschk
|
||||
# ./cvsstat
|
||||
#
|
||||
# New files
|
||||
# ---------------
|
||||
# ./.#cvschk
|
||||
# ./XX
|
||||
# ./cvs2html.ok
|
||||
#
|
||||
# Deleted files
|
||||
# ---------------
|
||||
# (none)
|
||||
|
||||
# Changelog:
|
||||
#
|
||||
# Ver Date Author Changelog
|
||||
# --- ---------- -------------------- -------------------------------------
|
||||
# 1.12 2002-01-04 Michael Kohne Fixed a $foo=<> warning for
|
||||
# 5.004_01 with defined($foo=<>)
|
||||
# Added a --tabular|-t switch
|
||||
#
|
||||
# 1.11 2001-12-27 Michael Kohne Added cvsignore functionality
|
||||
# Handling of 'dummy timestamp'
|
||||
# Handling of 'Result of Merge'
|
||||
#
|
||||
# 1.10 2001-11-06 Michael Kohne Added -r and -l options
|
||||
#
|
||||
# 1.9 2001-08-03 Lars G. T. Jørgensen Hack to allow special entry-line
|
||||
#
|
||||
# 1.8 2001-06-07 Peter Toft Back to the same as 1.6
|
||||
# CVS is my friend
|
||||
#
|
||||
# 1.7 2001-06-04 Peter Toft Peter was very tired and
|
||||
# applied a wrong patch -
|
||||
# version 1.7 is crap
|
||||
#
|
||||
# 1.6 2000-12-17 Peter Toft Better description added
|
||||
#
|
||||
# 1.5 2000-11-04 Peter Toft URL of cvsstat changed
|
||||
#
|
||||
# 1.4 2000-09-20 Peter Toft Must show deleted files also
|
||||
# as the default
|
||||
#
|
||||
# 1.3 2000-08-08 Ole Tange and Initial version
|
||||
# Peter Toft
|
||||
# ---- ---------- -------------------- -------------------------------------
|
||||
#
|
||||
# -----------------------------------------------------------------------------
|
||||
#
|
||||
# This program is protected by the GPL, and all modifications of
|
||||
# general interest should be emailed to the maintainer (pto@sslug.dk).
|
||||
#
|
||||
# This program also uses code parts from cvsstat
|
||||
# (same homepage as cvschk)
|
||||
#
|
||||
# Copyright 2000,2001 by Peter Toft <pto@sslug.dk> and Ole Tange <ole@tange.dk>
|
||||
# as well as
|
||||
# Lars G. T. Jørgensen <larsj@diku.dk>
|
||||
#
|
||||
# The URL of the home page of cvschk is shown below.
|
||||
|
||||
|
||||
use Time::Local;
|
||||
use strict;
|
||||
use Getopt::Long;
|
||||
|
||||
my $startdir = ".";
|
||||
|
||||
my $debug = 0;
|
||||
my (%files,%filesok,%seen,%skip);
|
||||
|
||||
|
||||
# Michael Kohne 12/16/01
|
||||
#
|
||||
# Simulation of .cvsignore as CVS does it...
|
||||
#
|
||||
# using .cvsignore handling makes cvschk take from 2 to 3 times
|
||||
# longer to run over the same set of files.
|
||||
# in my tests, disabling cvsignore altogether, cvschk takes .2
|
||||
# seconds on my working directory. Adding cvsignore,takes
|
||||
# .4 seconds.
|
||||
# Note that I do not use individual .cvsignore files - if there
|
||||
# are a lot of them in your directory tree, it will add run time
|
||||
#
|
||||
# variables used for .cvsignore handling
|
||||
my $initcvsignoreregex;# regex holding all startup cvsignore pattersn (no ())
|
||||
my $cvsignoreregex;# one regex holding all current cvsignore patterns
|
||||
my $disable_cvsignore=0;# set to 1 to disable cvsignore emulation
|
||||
# (available in case it's REALLY screwed up)
|
||||
my $disable_ind_cvsignore=0;# set to 1 to disable finding .cvsignore files
|
||||
# in each directory.
|
||||
my $debug_cvsignore = 0; # For debugging .cvsignore problems
|
||||
|
||||
my %mon;
|
||||
@mon{qw(Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec)}=
|
||||
0..11; # Perl months are 0 .. 11
|
||||
|
||||
my ($version) = ('$Revision: 1.12 $ ' =~ /^\$\w+: (.*) \$ $/);
|
||||
my $URL = "http://cvs.sslug.dk/cvs2html";
|
||||
my $version_line = "cvschk version $version (see $URL)\n";
|
||||
|
||||
my $opt_all;
|
||||
my $restrict;
|
||||
my $local;
|
||||
my $tabular;
|
||||
|
||||
my $opt_restrict;
|
||||
|
||||
sub show_version {print $version_line}
|
||||
|
||||
sub die_version {die $version_line}
|
||||
|
||||
sub die_usage {
|
||||
my $bundled = ($] > 5.00399
|
||||
? "can be bundled"
|
||||
: "can't be bundled, because your Perl is too old");
|
||||
die <<END_OF_USAGE; # Help in the style of GNU `ls --help' or `make --help'
|
||||
Usage: $0 [OPTION]...
|
||||
Show the CVS status of FILEs (the current directory by default),
|
||||
traversing directories recursively and telling if new files exist
|
||||
in the repository.
|
||||
Options:
|
||||
-a, --all Show all statistics, including the names of files that
|
||||
are up to date, used tags, ignored patterns and more
|
||||
-r, --restrict Don't show the names of the unknown files
|
||||
(useful if you have many temporary files)
|
||||
-t, --tabular Show one file per line, each preceeded with a status word,
|
||||
Sorted by filename.
|
||||
-l, --local Don't descend into sub-directories
|
||||
-d, --debug Debug info
|
||||
-h, --help Show this help end exit immediately
|
||||
-V, --version Show the version line and exit immediately
|
||||
The one-letter options $bundled.
|
||||
END_OF_USAGE
|
||||
}
|
||||
|
||||
sub die_help {show_version; die_usage}
|
||||
|
||||
# Let `-ar' mean `-a -r' and require `--all' (or -a) instead of `-all'.
|
||||
if ($] > 5.00399) { # This requires 5.004, so silently skip it for older Perls.
|
||||
eval {Getopt::Long::config("bundling")}; # avoid 5.003 compilation error
|
||||
warn $@ if $@; # For Perl 5.004+ we do want to see any compilation error
|
||||
}
|
||||
|
||||
|
||||
GetOptions( "all|a" => \$opt_all,
|
||||
"tabular|t" => \$tabular,
|
||||
"restrict|r" => \$restrict,
|
||||
"local|l" => \$local,
|
||||
"help|h" => \&die_help,
|
||||
"debug|d" => \$debug,
|
||||
"version|V" => \&die_version,
|
||||
) or die_usage;
|
||||
|
||||
sub cvs_changed_in_dir($); #define prototype (for recursion)
|
||||
|
||||
# functions for .cvsignore handling
|
||||
|
||||
# converts a given filename pattern
|
||||
# (of the sort that sh(1) takes) to
|
||||
# a perl regex of similar meaning.
|
||||
#
|
||||
# It works by doing the following:
|
||||
#
|
||||
# change:
|
||||
# . to \.
|
||||
# $ to \$
|
||||
# * to .*
|
||||
# ? to .
|
||||
#
|
||||
sub fpat_to_regex($)
|
||||
{
|
||||
my $fexp;
|
||||
$fexp = shift;
|
||||
$fexp =~ s/\./\\\./g;#change . to \.
|
||||
$fexp =~ s/\$/\\\$/g;#change dollar sign to \dollar sign
|
||||
$fexp =~ s/\*/.*/g;# change * to .*
|
||||
$fexp =~ s/\?/./g; # change ? to .
|
||||
return $fexp;
|
||||
}
|
||||
|
||||
# copy the input list to one single regex,
|
||||
# items seperated by | symbols.
|
||||
# return the regex string
|
||||
sub do_regex_convert
|
||||
{
|
||||
my $rx = "";
|
||||
my $first = 1;#true for first element only
|
||||
|
||||
|
||||
# convert each element of cvsignore into a regex
|
||||
# this makes the patterns usable in perl
|
||||
my $cp;
|
||||
foreach $cp (@_) {
|
||||
if (not $first) { $rx = $rx . "|"; }
|
||||
if ($first) { $first = 0; }
|
||||
$rx = $rx . fpat_to_regex($cp);
|
||||
}
|
||||
|
||||
return $rx;
|
||||
}
|
||||
|
||||
# first parameter is a reference to the array
|
||||
# to be loaded
|
||||
# the rest of the parameters are just items
|
||||
# that need to be loaded into the array.
|
||||
# Note that if a ! is found, the list is
|
||||
# emptied, then further items are added.
|
||||
# returns true if a ! was found
|
||||
sub load_list_from_list
|
||||
{
|
||||
my $arref = shift;# get reference to array from front
|
||||
my $item;
|
||||
my $ret=0;#false means no ! found
|
||||
|
||||
chomp @_;#kill newlines
|
||||
foreach $item (@_) {
|
||||
$item =~ s/^\s*(.*?)\s*$/$1/;#kill leading/trailing whitespace
|
||||
if ($item) { # empty string is false
|
||||
push @$arref,$item;
|
||||
}
|
||||
if ($item eq "!") {
|
||||
@$arref = ();# '!' causes list to clear
|
||||
$ret = 1;# ! found
|
||||
}
|
||||
}
|
||||
|
||||
return $ret;
|
||||
}
|
||||
|
||||
# loads the given list with lines from the
|
||||
# specified file. Note that if a '!' is found
|
||||
# all prior patterns are removed from the list
|
||||
# before the following patterns are loaded
|
||||
# first param is the filename,
|
||||
# second param is a reference to an array
|
||||
# that the data is to go into
|
||||
# returns true if a ! was found
|
||||
sub load_list_from_file
|
||||
{
|
||||
my @inlist;
|
||||
my $fname = shift;#filename to read from
|
||||
#if (not -e $fname) { return; }
|
||||
my $arref = shift;#array to store into
|
||||
open CVSIGNORE,"$fname" or return;#file might not exist, that's OK
|
||||
@inlist = <CVSIGNORE>;
|
||||
close CVSIGNORE;
|
||||
return load_list_from_list($arref,@inlist);
|
||||
}
|
||||
|
||||
# loads $cvsignoreregex from
|
||||
# $initcvsignoreregex and the .cvsignore file
|
||||
# in the local directory
|
||||
sub load_cvsignore
|
||||
{
|
||||
if ($disable_ind_cvsignore) {return;}#don't look for local .cvsignore files
|
||||
if ($disable_cvsignore) {return;}#don't do anything
|
||||
|
||||
my $dir = shift;
|
||||
my @cvsignore;
|
||||
|
||||
# bang will be true if a ! was found. In such cases, I need
|
||||
# to not use the pre-exisitng regex list.
|
||||
my $bang = load_list_from_file("$dir/.cvsignore",\@cvsignore);
|
||||
|
||||
# if we get a local cvsignore list, then...
|
||||
my $rx = do_regex_convert(@cvsignore);
|
||||
if ($rx) {
|
||||
$cvsignoreregex = "(";
|
||||
if (not $bang) {$cvsignoreregex = $cvsignoreregex . $initcvsignoreregex . "|";}
|
||||
$cvsignoreregex = $cvsignoreregex . $rx . ")";
|
||||
} else {
|
||||
if ($bang) {$cvsignoreregex = "";}
|
||||
else {$cvsignoreregex = "(" . $initcvsignoreregex . ")";}
|
||||
}
|
||||
|
||||
if ($debug_cvsignore) {print $dir,":",$cvsignoreregex, "\n";}
|
||||
}
|
||||
|
||||
|
||||
# loads all of the cvsignore patterns that
|
||||
# can be loaded at script startup
|
||||
sub load_initial_cvsignore()
|
||||
{
|
||||
#load the default patterns
|
||||
# (taken from http://www.gnu.org/manual/cvs-1.9/html_node/cvs_141.html#IDX399)
|
||||
#
|
||||
# this gives you the patterns that cvs normally starts with
|
||||
my @initcvsignore;
|
||||
push @initcvsignore,("RCS");
|
||||
push @initcvsignore,("SCCS");
|
||||
push @initcvsignore,("CVS");
|
||||
push @initcvsignore,("CVS.adm");
|
||||
push @initcvsignore,("RCSLOG");
|
||||
push @initcvsignore,("cvslog.*");
|
||||
push @initcvsignore,("tags");
|
||||
push @initcvsignore,("TAGS");
|
||||
push @initcvsignore,(".make.state");
|
||||
push @initcvsignore,(".nse_depinfo");
|
||||
push @initcvsignore,("*~");
|
||||
push @initcvsignore,("\#*");
|
||||
push @initcvsignore,(".\#*");
|
||||
push @initcvsignore,("\,*");
|
||||
push @initcvsignore,("_\$\*");
|
||||
push @initcvsignore,("*\$");
|
||||
push @initcvsignore,("*.old");
|
||||
push @initcvsignore,("*.bak");
|
||||
push @initcvsignore,("*.BAK");
|
||||
push @initcvsignore,("*.orig");
|
||||
push @initcvsignore,("*.rej");
|
||||
push @initcvsignore,(".del-*");
|
||||
push @initcvsignore,("*.a");
|
||||
push @initcvsignore,("*.olb");
|
||||
push @initcvsignore,("*.o");
|
||||
push @initcvsignore,("*.obj");
|
||||
push @initcvsignore,("*.so");
|
||||
push @initcvsignore,("*.exe");
|
||||
push @initcvsignore,("*.Z");
|
||||
push @initcvsignore,("*.elc");
|
||||
push @initcvsignore,("*.ln");
|
||||
push @initcvsignore,("core");
|
||||
|
||||
|
||||
# now, load (in proper order!)
|
||||
# each of the possible cvsignore files
|
||||
|
||||
# there are 4 possible .cvsignore files:
|
||||
|
||||
# $CVSROOT/CVSROOT/cvsignore
|
||||
# ~/.cvsignore
|
||||
# $CVSIGNORE environment variable
|
||||
# .cvsignore in current directory
|
||||
|
||||
# The first (CVSROOT/cvsignore) would require calling cvs, so
|
||||
# we won't do that one.
|
||||
# The last (.cvsignore in current directory) is done
|
||||
# for each directory. It's handled in the load_cvsignore routine.
|
||||
|
||||
# ~/.cvsignore
|
||||
my @inlist;
|
||||
my $item;
|
||||
my $HOME=$ENV{"HOME"};
|
||||
if (not $HOME) {$HOME = ".";}
|
||||
load_list_from_file("$HOME/.cvsignore",\@initcvsignore);
|
||||
|
||||
# $CVSIGNORE environment variable
|
||||
my $igstr = $ENV{"CVSIGNORE"}; # get env var
|
||||
if ($igstr) {
|
||||
my @iglist = split(/\s+/, $igstr); #if it exists, convert to list
|
||||
load_list_from_list(\@initcvsignore,@iglist);
|
||||
}
|
||||
|
||||
# now that @initcvsignore is setup,
|
||||
# turn it into a regex string
|
||||
$initcvsignoreregex = do_regex_convert(@initcvsignore);
|
||||
|
||||
# now preset the cvsignore regex string to match
|
||||
# @initcvsignore. That way, if we aren't using local
|
||||
# cvsignore files, we do nothing.
|
||||
$cvsignoreregex = "(" . $initcvsignoreregex . ")";
|
||||
}
|
||||
# routine to see if the given name is in the cvsignore regex
|
||||
# returns true if it is, false if it's not
|
||||
sub ignore_file($)
|
||||
{
|
||||
#allow user to disable the cvsignore stuff
|
||||
if ($disable_cvsignore) {return 0;}
|
||||
if (not $cvsignoreregex) {return 0;}# if regex is empty, nothing matches the regex
|
||||
my $filename = shift;
|
||||
|
||||
if ($debug_cvsignore) {print "ignore_file:",$filename,"\n";}
|
||||
|
||||
if ($filename =~ $cvsignoreregex) {
|
||||
if ($debug_cvsignore) {print $filename," matches\n";}
|
||||
return 1;
|
||||
}
|
||||
|
||||
if ($debug_cvsignore) {print $filename," doesn't match\n";}
|
||||
return 0;
|
||||
}
|
||||
|
||||
sub cvs_changed_in_dir($) {
|
||||
my $dir = shift;
|
||||
|
||||
my ($line,$filename,$version,$mtime,$date,
|
||||
$dir_filename,$cvstime,@subdirs,
|
||||
@new_in_dir,$i);
|
||||
|
||||
# Examine status of files in CVS/Entries
|
||||
if(not open(ENTRIES,"$dir/CVS/Entries")) {
|
||||
if ($tabular) {
|
||||
push @{$files{Unknown}}, $dir;
|
||||
}
|
||||
else {
|
||||
warn "The directory $dir is not under CVS control\n";
|
||||
}
|
||||
} else {
|
||||
load_cvsignore($dir);#load up proper cvsignore for given directory
|
||||
|
||||
while(defined ($line=<ENTRIES>)) {
|
||||
# Parse CVS/Entries-line
|
||||
$line=~m!^/(.*)/(.*)/(.*)/.*/! or do {
|
||||
$debug and warn("Skipping entry-line $line");
|
||||
next;
|
||||
};
|
||||
($filename,$version,$date) = ($1,$2,$3);
|
||||
$dir_filename=$dir."/".$filename;
|
||||
|
||||
# Mark this file as seen
|
||||
$seen{$dir_filename}=1;
|
||||
|
||||
# if not exists: Deleted
|
||||
if(not -e $dir_filename) {
|
||||
push @{$files{Deleted}}, $dir_filename; next;
|
||||
}
|
||||
# if dir: save name for recursion
|
||||
-d $dir_filename and do {
|
||||
push @subdirs, $dir_filename; next;
|
||||
};
|
||||
|
||||
# modification time of $dir_filename
|
||||
$mtime= (stat $dir_filename)[9];
|
||||
|
||||
|
||||
if($date eq "dummy timestamp") {
|
||||
# dummy timestamp means it's new to the repository.
|
||||
push @{$files{Changed}}, $dir_filename;
|
||||
if ($debug) {
|
||||
print "$dir_filename is changed\n";
|
||||
}
|
||||
}
|
||||
elsif($date eq "Result of merge") {
|
||||
# result of merge means it's changed, then updated.
|
||||
push @{$files{Changed}}, $dir_filename;
|
||||
if ($debug) {
|
||||
print "$dir_filename is changed\n";
|
||||
}
|
||||
}
|
||||
elsif(not
|
||||
$date=~/... (...)\s+(\d+)\s+(\d+):(\d+):(\d+) (\d{4})/)
|
||||
{
|
||||
#bogus entry in Entires
|
||||
warn "Warning: $dir_filename -> '$date' ".
|
||||
"not in ctime(3) format\n";
|
||||
} else {
|
||||
$cvstime=timegm($5,$4,$3,$2,$mon{$1},$6);
|
||||
if($cvstime != $mtime) {
|
||||
push @{$files{Changed}}, $dir_filename;
|
||||
if ($debug) {
|
||||
print "$dir_filename is changed\n";
|
||||
}
|
||||
} else {
|
||||
push @{$files{Unchanged}}, $dir_filename;
|
||||
if ($debug) {
|
||||
print "$dir_filename is Unchanged\n";
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
close ENTRIES;
|
||||
|
||||
# Locate any new files/dirs
|
||||
if(not opendir(D,$dir)) {
|
||||
warn("Cannot open $dir");
|
||||
@new_in_dir= ();
|
||||
} else {
|
||||
@skip{qw(. .. CVS)}=1..3; # Filenames that that we want to ignore
|
||||
#(note: these are exact filenames)
|
||||
@new_in_dir=
|
||||
(grep { not $seen{$_} } # files we have not already processed
|
||||
map { $dir."/".$_ } # map from file to dir/file
|
||||
grep { not ignore_file($_) } # ignore files in the cvsignore list
|
||||
grep { not $skip{$_} } # skip files to be ignored
|
||||
readdir(D));
|
||||
closedir(D);
|
||||
}
|
||||
|
||||
# Remember new files (actually non-directories)
|
||||
push @{$files{New}}, grep { not -d $_ } @new_in_dir;
|
||||
if ($debug) { print "@{$files{New}} are new in $dir\n"; }
|
||||
|
||||
# Remember new subdirs
|
||||
push @subdirs, grep { -d $_ } @new_in_dir;
|
||||
|
||||
# Recurse all subdirs
|
||||
if (not $local) {
|
||||
for $i (@subdirs) { cvs_changed_in_dir($i); }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sub print_status()
|
||||
{
|
||||
my $k;
|
||||
my %show_these_states = ("Changed" => 1);
|
||||
if(not $restrict) {
|
||||
$show_these_states{"New"} = 1;
|
||||
$show_these_states{"Deleted"} = 1;
|
||||
}
|
||||
|
||||
if($opt_all) { $show_these_states{"Unchanged"} = 1; }
|
||||
|
||||
if ($tabular) {
|
||||
my %allfiles; # key: filesname, value: state
|
||||
my ($file, $state, $statefiles);
|
||||
|
||||
$show_these_states{"Unknown"} = 1;
|
||||
while (($state, $statefiles) = each %files) {
|
||||
for my $f (@{$statefiles}) {
|
||||
$allfiles{$f} = $state;
|
||||
}
|
||||
}
|
||||
for $file (sort keys %allfiles) {
|
||||
$state = $allfiles{$file};
|
||||
printf("%-10s %s\n", $state, $file) if $show_these_states{$state};
|
||||
}
|
||||
}
|
||||
else {
|
||||
print "\n";
|
||||
for $k (keys %show_these_states) {
|
||||
if(not $files{$k} or not @{$files{$k}}) {
|
||||
# no files
|
||||
$files{$k}=["(none)"];
|
||||
}
|
||||
print("$k files\n",
|
||||
"---------------\n",
|
||||
map { "$_\n" } sort @{$files{$k}});
|
||||
print "\n";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
load_initial_cvsignore();
|
||||
if ($debug_cvsignore) {print "initial regex:",$cvsignoreregex,"\n";}
|
||||
cvs_changed_in_dir($startdir);
|
||||
print_status();
|
||||
|
17
ndb/home/bin/fix-cvs-root
Executable file
17
ndb/home/bin/fix-cvs-root
Executable file
|
@ -0,0 +1,17 @@
|
|||
#! /bin/sh
|
||||
|
||||
# change all CVS/Root to current CVSROOT
|
||||
|
||||
[ "$CVSROOT" ] || { echo "no CVSROOT in environment" >&2; exit 1; }
|
||||
|
||||
echo "changing all CVS/Root files under `pwd`"
|
||||
sleep 1
|
||||
|
||||
find . -path '*/CVS/Root' -print |
|
||||
while read file; do
|
||||
echo "$file"
|
||||
chmod +w $file || exit 1
|
||||
echo $CVSROOT >$file || exit 1
|
||||
done
|
||||
|
||||
echo "done"
|
158
ndb/home/bin/import-from-bk.sh
Executable file
158
ndb/home/bin/import-from-bk.sh
Executable file
|
@ -0,0 +1,158 @@
|
|||
#! /bin/sh
|
||||
|
||||
# XXX does not delete files
|
||||
# XXX does not handle nested new dirs
|
||||
# this script screams for perl, no time now
|
||||
# look for bk2cvs on the net
|
||||
|
||||
PATH=/usr/local/bin:$PATH; export PATH
|
||||
LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH; export LD_LIBRARY_PATH
|
||||
|
||||
batch=n
|
||||
if [ "$1" = "-batch" ]; then
|
||||
batch=y
|
||||
shift
|
||||
fi
|
||||
|
||||
say() {
|
||||
echo "$*"
|
||||
}
|
||||
|
||||
die() {
|
||||
case $# in
|
||||
0) set -- "command failed" ;;
|
||||
esac
|
||||
say "$* -- aborted" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
usage() {
|
||||
die "usage: $0 [-batch] top -- copy from mysql/ndb to another NDB_TOP"
|
||||
}
|
||||
|
||||
doit() {
|
||||
cmd="$*"
|
||||
if [ $batch = n ]; then
|
||||
echo -n "$cmd [y]"
|
||||
read junk
|
||||
sh -c "$cmd"
|
||||
return 0
|
||||
else
|
||||
echo "$cmd"
|
||||
sh -c "$cmd"
|
||||
return $?
|
||||
fi
|
||||
}
|
||||
|
||||
say "======================"
|
||||
say "`date`"
|
||||
|
||||
case $# in
|
||||
1) [ -d $1/src/CVS ] || die "$1 is not an NDB_TOP"
|
||||
top=$1 ;;
|
||||
*) usage ;;
|
||||
esac
|
||||
|
||||
if ! fgrep ndb_kernel_version.h $top/include/kernel/CVS/Entries >/dev/null 2>&1; then
|
||||
die "$top is not an NDB_TOP"
|
||||
fi
|
||||
|
||||
if find $top -path '*/CVS/Tag' -print | grep . >/dev/null; then
|
||||
die "$top: contains CVS/Tag files, not accepted"
|
||||
fi
|
||||
|
||||
if [ ! -f include/SCCS/s.ndb_version.h ]; then
|
||||
die "current dir ($PWD) is not an NDB_TOP"
|
||||
fi
|
||||
|
||||
doit "bk pull" || exit 1
|
||||
doit "bk -r clean"
|
||||
doit "bk -r get -q"
|
||||
|
||||
files=`bk -r. sfiles -g |
|
||||
fgrep -v ' ' |
|
||||
fgrep -v /.cvsignore`
|
||||
|
||||
n=0
|
||||
files2=
|
||||
for f in $files; do
|
||||
if [ ! -f $f ]; then
|
||||
die "$f: no such file"
|
||||
fi
|
||||
if [ -w $f ]; then
|
||||
say "$f: is writable, accept anyway"
|
||||
fi
|
||||
files2="$files2 $f"
|
||||
n=$((n+1))
|
||||
done
|
||||
files=$files2
|
||||
say "$n files..."
|
||||
|
||||
adddirs= addfiles= updfiles=
|
||||
for f in $files; do
|
||||
d=`dirname $f`
|
||||
b=`basename $f`
|
||||
if [ ! -f $top/$d/CVS/Entries ]; then
|
||||
found=n
|
||||
for x in $adddirs; do
|
||||
if [ $x = $d ]; then found=y; break; fi
|
||||
done
|
||||
if [ $found = n ]; then
|
||||
say "$d: to create dir"
|
||||
adddirs="$adddirs $d"
|
||||
fi
|
||||
addfiles="$addfiles $f"
|
||||
say "$f: to create"
|
||||
elif ! fgrep "/$b/" $top/$d/CVS/Entries >/dev/null; then
|
||||
addfiles="$addfiles $f"
|
||||
say "$f: to create"
|
||||
else
|
||||
cmp $f $top/$f >/dev/null
|
||||
case $? in
|
||||
0) continue ;;
|
||||
1) ;;
|
||||
*) die "$f: unknown error" ;;
|
||||
esac
|
||||
updfiles="$updfiles $f"
|
||||
say "$f: to update"
|
||||
fi
|
||||
done
|
||||
|
||||
for d in $adddirs; do
|
||||
doit "cd $top && mkdir -p $d" || die
|
||||
done
|
||||
|
||||
for f in $addfiles $updfiles; do
|
||||
doit "cp -fp $f $top/$f" || die
|
||||
done
|
||||
|
||||
for d in $adddirs; do
|
||||
# fix 1 level up
|
||||
d2=`dirname $d`
|
||||
if [ ! -d $top/$d2/CVS ]; then
|
||||
doit "cd $top && cvs add $d2" || die
|
||||
fi
|
||||
doit "cd $top && cvs add $d" || die
|
||||
done
|
||||
|
||||
for f in $addfiles; do
|
||||
kb=
|
||||
if echo $f | perl -nle "print(-B $_)" | grep 1 >/dev/null; then
|
||||
kb="-kb"
|
||||
fi
|
||||
doit "cd $top && cvs add $kb $f" || die
|
||||
done
|
||||
|
||||
tag=import_bk_`date +%Y_%m_%d`
|
||||
|
||||
doit "cd $top && cvs commit -m $tag" || die
|
||||
doit "cd $top && cvs tag -F $tag" || die
|
||||
|
||||
env="NDB_TOP=$top; export NDB_TOP"
|
||||
env="$env; USER_FLAGS='-DAPI_TRACE -fmessage-length=0'; export USER_FLAGS"
|
||||
doit "$env; cd $top && ./configure"
|
||||
doit "$env; cd $top && sh config/GuessConfig.sh"
|
||||
doit "$env; cd $top && make clean nuke-deps vim-tags"
|
||||
doit "$env; cd $top && make" || die
|
||||
|
||||
say "imported ok"
|
27
ndb/home/bin/ndb_deploy
Executable file
27
ndb/home/bin/ndb_deploy
Executable file
|
@ -0,0 +1,27 @@
|
|||
#!/bin/sh
|
||||
|
||||
if [ $# -eq 0 ]
|
||||
then
|
||||
for i in $DEPLOY_DST
|
||||
do
|
||||
rsync -r -v --exclude '*.a' $NDB_TOP/bin $NDB_TOP/lib $i/
|
||||
done
|
||||
else
|
||||
while [ $# -gt 0 ]
|
||||
do
|
||||
arg=$1
|
||||
shift;
|
||||
if [ `echo $arg | grep -c lib` -eq 0 ]
|
||||
then
|
||||
dst=bin/
|
||||
else
|
||||
dst=lib/
|
||||
fi
|
||||
|
||||
for i in $DEPLOY_DST
|
||||
do
|
||||
rsync -v $arg $i/$dst
|
||||
done
|
||||
done
|
||||
fi
|
||||
|
184
ndb/home/bin/ndbdoxy.pl
Executable file
184
ndb/home/bin/ndbdoxy.pl
Executable file
|
@ -0,0 +1,184 @@
|
|||
#!/usr/local/bin/perl
|
||||
#
|
||||
# ndbdoxy.pl Executes doxygen on a checked out version of NDB Cluster
|
||||
#
|
||||
# Written by Lars Thalmann, 2003.
|
||||
|
||||
use strict;
|
||||
umask 000;
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Settings
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
my $root = "/home/elathal/public_html/cvsdoxy";
|
||||
|
||||
$ENV{LD_LIBRARY_PATH} = "/usr/local/lib:/opt/as/local/lib";
|
||||
$ENV{LD_LIBRARY_PATH} = $ENV{LD_LIBRARY_PATH} . ":/opt/as/forte6/SUNWspro/lib";
|
||||
$ENV{PATH} = $ENV{PATH} . ":/usr/local/bin:/opt/as/local/bin";
|
||||
$ENV{PATH} = $ENV{PATH} . ":/opt/as/local/teTeX/bin/sparc-sun-solaris2.8";
|
||||
|
||||
my $DOXYGEN = "doxygen";
|
||||
my $PDFLATEX = "pdflatex";
|
||||
my $MAKEINDEX = "makeindex";
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Argument handling
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
if (@ARGV != 3) {
|
||||
print<<END;
|
||||
Usage:
|
||||
ndbdoxy.pl <module> <title> <version>
|
||||
|
||||
where
|
||||
<module> is cvsdoxy module to doxgenify
|
||||
<title> is title of report
|
||||
<version> is version of NDB Cluster
|
||||
END
|
||||
exit;
|
||||
}
|
||||
my $module = $ARGV[0];
|
||||
my $title = $ARGV[1];
|
||||
my $version = $ARGV[2];
|
||||
my $destdir = ".";
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Execute Doxygen -g
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
if (-r "${root}/doxyfiles/${module}.doxyfile") {
|
||||
system("cd ${destdir}; \
|
||||
cp ${root}/doxyfiles/${module}.doxyfile Doxyfile");
|
||||
} elsif (-r "${root}/doxyfiles/default.doxyfile") {
|
||||
system("cd ${destdir}; \
|
||||
cp ${root}/doxyfiles/default.doxyfile Doxyfile");
|
||||
} else {
|
||||
system("cd ${destdir}; $DOXYGEN -g");
|
||||
}
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# HTML Footer
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
if (-r "${root}/doxyfiles/htmlfooter") {
|
||||
system("cd ${destdir}; \
|
||||
cp ${root}/doxyfiles/htmlfooter footer.html");
|
||||
|
||||
open (INFILE, "< ${destdir}/footer.html")
|
||||
or die "Error opening ${destdir}/footer.html.\n";
|
||||
open (OUTFILE, "> ${destdir}/footer.html.new")
|
||||
or die "Error opening ${destdir}/footer.html.new.\n";
|
||||
while (<INFILE>) {
|
||||
if (/(.*)DATE(.*)$/) {
|
||||
print OUTFILE $1 . localtime() . $2;
|
||||
} else {
|
||||
print OUTFILE;
|
||||
}
|
||||
}
|
||||
close INFILE;
|
||||
close OUTFILE;
|
||||
|
||||
system("mv ${destdir}/footer.html.new ${destdir}/footer.html");
|
||||
} else {
|
||||
print("Warning: No ${root}/doxyfiles/${module}.htmlfooter");
|
||||
}
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Execute Doxygen
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
system("cd ${destdir}; $DOXYGEN");
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Change a little in refman.tex
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
open (INFILE, "< ${destdir}/latex/refman.tex")
|
||||
or die "Error opening ${destdir}/latex/refman.tex.\n";
|
||||
open (OUTFILE, "> ${destdir}/latex/refman.tex.new")
|
||||
or die "Error opening ${destdir}/latex/refman.tex.new.\n";
|
||||
|
||||
while (<INFILE>)
|
||||
{
|
||||
if (/(.*)Reference Manual(.*)$/) {
|
||||
print OUTFILE $1 .
|
||||
"\\mbox{}\\vspace{-3cm}\\mbox{}" .
|
||||
"\\hrule\\bigskip\\bigskip\\bigskip\\bigskip" .
|
||||
"\\Huge{" . $title . "}" . $2;
|
||||
} elsif (/(.*)Generated by Doxygen 1.2.1[0-9](.*)$/) {
|
||||
print OUTFILE $1 .
|
||||
"\\begin{center}" .
|
||||
"\\LARGE{MySQL AB}" .
|
||||
"\\end{center}".
|
||||
"\\hfill\\bigskip\\bigskip\\bigskip\\hrule" .
|
||||
"\\bigskip\\bigskip\\bigskip\\bigskip\\bigskip" .
|
||||
"\\bigskip\\bigskip\\bigskip\\bigskip\\bigskip" .
|
||||
"\\bigskip\\bigskip NDB Cluster Release " . $version .
|
||||
"\\bigskip\\bigskip\\bigskip\\bigskip\\bigskip\\hfill" .
|
||||
$2;
|
||||
} elsif (/\\chapter\{File Index\}/) {
|
||||
print OUTFILE "\%\\chapter{File Index}\n";
|
||||
} elsif (/\\input{files}/) {
|
||||
print OUTFILE "\%\\input{files}\n";
|
||||
} elsif (/\\chapter\{Page Index\}/) {
|
||||
print OUTFILE "\%\\chapter{Page Index}\n";
|
||||
} elsif (/\\input{pages}/) {
|
||||
print OUTFILE "\%\\input{pages}\n";
|
||||
} else {
|
||||
print OUTFILE;
|
||||
}
|
||||
}
|
||||
|
||||
close INFILE;
|
||||
close OUTFILE;
|
||||
|
||||
system("mv ${destdir}/latex/refman.tex.new ${destdir}/latex/refman.tex");
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Change a little in doxygen.sty
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
open (INFILE, "< ${destdir}/latex/doxygen.sty")
|
||||
or die "Error opening INFILE.\n";
|
||||
open (OUTFILE, "> ${destdir}/latex/doxygen.sty.new")
|
||||
or die "Error opening OUTFILE.\n";
|
||||
|
||||
while (<INFILE>)
|
||||
{
|
||||
if (/\\rfoot/) {
|
||||
print OUTFILE "\\rfoot[\\fancyplain{}{\\bfseries\\small \\copyright~Copyright 2003 MySQL AB\\hfill support-cluster\@mysql.com}]{}\n";
|
||||
} elsif (/\\lfoot/) {
|
||||
print OUTFILE "\\lfoot[]{\\fancyplain{}{\\bfseries\\small support-cluster\@mysql.com\\hfill \\copyright~Copyright 2003 MySQL AB}}\n";
|
||||
} else {
|
||||
print OUTFILE;
|
||||
}
|
||||
}
|
||||
|
||||
close INFILE;
|
||||
close OUTFILE;
|
||||
|
||||
system("mv ${destdir}/latex/doxygen.sty.new ${destdir}/latex/doxygen.sty");
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Other
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
#system("cd ${root}/tmp/${module}; \
|
||||
# mkdir html.tar; \
|
||||
# cd html.tar; \
|
||||
# cp -r ../html ${module}; \
|
||||
# tar cf ${module}.html.tar ${module}; \
|
||||
# /usr/local/bin/gzip ${module}.html.tar; \
|
||||
# /bin/rm -rf ${root}/tmp/${module}/html.tar/${module}");
|
||||
|
||||
#system("cd ${destdir}/latex/; \
|
||||
# $PDFLATEX refman.tex \
|
||||
# $MAKEINDEX refman.idx \
|
||||
# $PDFLATEX refman.tex \
|
||||
# mv -f refman.pdf ${module}.pdf");
|
||||
|
||||
print<<END;
|
||||
Execute:
|
||||
latex refman; makeindex refman; latex refman
|
||||
END
|
78
ndb/home/bin/ngcalc
Executable file
78
ndb/home/bin/ngcalc
Executable file
|
@ -0,0 +1,78 @@
|
|||
#! /usr/local/bin/perl
|
||||
|
||||
use strict;
|
||||
use Getopt::Long;
|
||||
|
||||
sub usage {
|
||||
print <<END;
|
||||
ngcalc -- calculate node groups and table fragments
|
||||
usage: ngcalc [ options ] f1 f2 ...
|
||||
-g num number of node groups (default 2)
|
||||
-r num number of replicas (default 2)
|
||||
-n list comma-separated list of db nodes (default 1,2,...)
|
||||
fX number of fragments per node group in table X (e.g. 1,2,8)
|
||||
(all replicas count as same fragment)
|
||||
END
|
||||
exit(1);
|
||||
};
|
||||
|
||||
use vars qw($cnoOfNodeGroups $cnoReplicas $nodeArray);
|
||||
|
||||
$cnoOfNodeGroups = 2;
|
||||
$cnoReplicas = 2;
|
||||
GetOptions(
|
||||
"g=i" => \$cnoOfNodeGroups,
|
||||
"r=i" => \$cnoReplicas,
|
||||
"n=s" => \$nodeArray,
|
||||
) or &usage;
|
||||
|
||||
my @tableList = @ARGV;
|
||||
|
||||
$cnoOfNodeGroups > 0 or &usage;
|
||||
$cnoReplicas > 0 or &usage;
|
||||
if (! defined($nodeArray)) {
|
||||
$nodeArray = join(',', 1..($cnoOfNodeGroups*$cnoReplicas));
|
||||
}
|
||||
$nodeArray =~ /^\d+(,\d+)*$/ or &usage;
|
||||
my @nodeArray = split(/,/, $nodeArray);
|
||||
@nodeArray == $cnoOfNodeGroups*$cnoReplicas or &usage;
|
||||
|
||||
my @nodeGroupRecord;
|
||||
for (my $i = 0; $i < $cnoOfNodeGroups; $i++) {
|
||||
my $rec = {};
|
||||
my $nodes = [];
|
||||
for (my $j = 0; $j < $cnoReplicas; $j++) {
|
||||
push(@$nodes, $nodeArray[$i * $cnoReplicas + $j]);
|
||||
}
|
||||
$rec->{nodesInGroup} = $nodes;
|
||||
$rec->{nodeCount} = $cnoReplicas;
|
||||
$rec->{nextReplicaNode} = 0;
|
||||
$nodeGroupRecord[$i] = $rec;
|
||||
print "NG $i: ", join(" ", @{$rec->{nodesInGroup}}), "\n";
|
||||
}
|
||||
|
||||
# see Dbdih::execCREATE_FRAGMENTATION_REQ
|
||||
|
||||
my $c_nextNodeGroup = 0;
|
||||
for (my $t = 0; $t < @tableList; $t++) {
|
||||
use integer;
|
||||
my $f = $tableList[$t];
|
||||
my $ng = $c_nextNodeGroup++;
|
||||
$c_nextNodeGroup = 0 if $c_nextNodeGroup == $cnoOfNodeGroups;
|
||||
my $noOfFragments = $f * $cnoOfNodeGroups;
|
||||
my @fragments;
|
||||
for (my $fragNo = 0; $fragNo < $noOfFragments; $fragNo++) {
|
||||
my $rec = $nodeGroupRecord[$ng];
|
||||
my $max = $rec->{nodeCount};
|
||||
my $ind = $rec->{nextReplicaNode};
|
||||
$rec->{nextReplicaNode} = ($ind + 1 >= $max ? 0 : $ind + 1);
|
||||
for (my $replicaNo = 0; $replicaNo < $cnoReplicas; $replicaNo++) {
|
||||
my $nodeId = $rec->{nodesInGroup}[$ind++];
|
||||
push(@fragments, $nodeId);
|
||||
$ind = ($ind == $max ? 0 : $ind);
|
||||
}
|
||||
$ng++;
|
||||
$ng = ($ng == $cnoOfNodeGroups ? 0 : $ng);
|
||||
}
|
||||
printf "%02d %s\n", $t, join(" ", @fragments);
|
||||
}
|
102
ndb/home/bin/signallog2html.lib/signallog2list.awk
Normal file
102
ndb/home/bin/signallog2html.lib/signallog2list.awk
Normal file
|
@ -0,0 +1,102 @@
|
|||
BEGIN{
|
||||
PRINT=0;
|
||||
SIGNAL_ARRAY[0]="";
|
||||
BLOCK_ID=0;
|
||||
SIGNAL_ID=-22;
|
||||
}
|
||||
{
|
||||
SIGNAL_ARRAY[SIGNAL_ID]=SIGNAL_ID;
|
||||
}
|
||||
|
||||
/^---- Send ----- Signal ----------------/ {
|
||||
DIRECTION="S";
|
||||
SENDER="";
|
||||
SENDPROCESS="";
|
||||
RECEIVER="";
|
||||
RECPROCESS="";
|
||||
SIGNAL="";
|
||||
RECSIGID="?";
|
||||
SIGID="?";
|
||||
DELAY="N/A";
|
||||
}
|
||||
|
||||
/^---- Send delay Signal/ {
|
||||
DIRECTION="SD";
|
||||
SENDER="";
|
||||
SENDPROCESS="";
|
||||
RECEIVER="";
|
||||
RECPROCESS="";
|
||||
SIGNAL="";
|
||||
RECSIGID="?";
|
||||
SIGID="?";
|
||||
DELAY=$5;
|
||||
|
||||
LEN=length(DELAY);
|
||||
DELAY=substr(DELAY,2,LEN);
|
||||
}
|
||||
|
||||
/^---- Received - Signal ----------------/ {
|
||||
DIRECTION="R";
|
||||
SENDER="";
|
||||
SENDPROCESS="";
|
||||
RECEIVER="";
|
||||
RECPROCESS="";
|
||||
SIGNAL="";
|
||||
RECSIGID="?";
|
||||
SIGID="?";
|
||||
DELAY="N/A";
|
||||
}
|
||||
|
||||
/r.bn:/{
|
||||
|
||||
RECEIVER=$3;
|
||||
RECPROCESS=$5;
|
||||
|
||||
if(DIRECTION == "R"){
|
||||
SIGNAL=$10;
|
||||
RECSIGID=$7;
|
||||
}
|
||||
else
|
||||
SIGNAL=$8;
|
||||
}
|
||||
|
||||
/s.bn:/{
|
||||
|
||||
SENDER=$3;
|
||||
SIGID=$7;
|
||||
|
||||
if(SIGID == SIGNAL_ARRAY[SIGID]){
|
||||
PRINT=1;
|
||||
if(DIRECTION == "R"){
|
||||
SIGNAL_ARRAY[RECSIGID]=RECSIGID;
|
||||
};
|
||||
}
|
||||
|
||||
SENDPROCESS=$5;
|
||||
|
||||
LEN=length(RECEIVER);
|
||||
RECEIVER=substr(RECEIVER,2,LEN-3);
|
||||
|
||||
if(BLOCK_ID == "ALL" || RECEIVER==BLOCK_ID){PRINT=1; }
|
||||
|
||||
LEN=length(SENDER);
|
||||
SENDER=substr(SENDER,2,LEN-3);
|
||||
if(BLOCK_ID == "ALL" || SENDER == BLOCK_ID){ PRINT=1;}
|
||||
|
||||
LEN=length(SIGNAL);
|
||||
SIGNAL=substr(SIGNAL,2,LEN-2);
|
||||
|
||||
LEN=length(SENDPROCESS);
|
||||
SENDPROCESS=substr(SENDPROCESS,1,LEN-1);
|
||||
|
||||
LEN=length(RECPROCESS);
|
||||
RECPROCESS=substr(RECPROCESS,1,LEN-1);
|
||||
|
||||
if( PRINT == 1){
|
||||
print DIRECTION" "SENDPROCESS" "SENDER" "RECPROCESS" "RECEIVER" "SIGNAL" "SIGID" "RECSIGID" "DELAY;
|
||||
}
|
||||
|
||||
PRINT=0;
|
||||
}
|
||||
|
||||
|
29
ndb/home/bin/signallog2html.lib/uniq_blocks.awk
Normal file
29
ndb/home/bin/signallog2html.lib/uniq_blocks.awk
Normal file
|
@ -0,0 +1,29 @@
|
|||
BEGIN{
|
||||
NAMES[""]="";
|
||||
ORDER[0]="";
|
||||
NUM=0;
|
||||
}
|
||||
|
||||
{
|
||||
if(NAMES[$2$3]!=$2$3){
|
||||
NAMES[$2$3]=$2$3;
|
||||
ORDER[NUM]=$2$3;
|
||||
NUM++;
|
||||
}
|
||||
|
||||
if(NAMES[$4$5]!=$4$5){
|
||||
NAMES[$4$5]=$4$5;
|
||||
ORDER[NUM]=$4$5;
|
||||
NUM++;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
END{
|
||||
for(i=0; i<NUM; i++){
|
||||
LIST=ORDER[i]" "LIST;
|
||||
|
||||
}
|
||||
print LIST;
|
||||
}
|
||||
|
349
ndb/home/bin/signallog2html.sh
Executable file
349
ndb/home/bin/signallog2html.sh
Executable file
|
@ -0,0 +1,349 @@
|
|||
#!/bin/sh
|
||||
# NAME
|
||||
# signallog2html.sh
|
||||
#
|
||||
# SYNOPSIS
|
||||
# signallog2html.sh [ -b <block_name | ALL> ] [ -s <signal_id> ] -f signal_log_file
|
||||
#
|
||||
# DESCRIPTION
|
||||
# Creates a signal sequence diagram in HTML format that can be
|
||||
# viewed from a web browser. The HTML file is created from a signal
|
||||
# log file and it contains a big table with jpeg files in every
|
||||
# table cell. Every row in the table is a signal. The block_name
|
||||
# could be one of the following: CMVMI MISSRA NDBFS NDBCNTR DBACC
|
||||
# DBDICT DBLQH DBDIH DBTC DBTUP QMGR ALL. The signal_id is a
|
||||
# number. If no block_name or signal_id is given the default
|
||||
# block_name "ALL" is used.
|
||||
#
|
||||
#
|
||||
#
|
||||
# OPTIONS
|
||||
#
|
||||
# EXAMPLES
|
||||
#
|
||||
#
|
||||
# ENVIRONMENT
|
||||
# NDB_PROJ_HOME Home dir for ndb
|
||||
#
|
||||
# FILES
|
||||
# $NDB_PROJ_HOME/lib/funcs.sh General shell script functions.
|
||||
# uniq_blocks.awk Creates a list of unique blocks
|
||||
# in the signal_log_file.
|
||||
# signallog2list.awk Creates a list file from the signal_log_file.
|
||||
# empty.JPG Jpeg file, must exist in the HTML file
|
||||
# directory for viewing.
|
||||
# left_line.JPG
|
||||
# line.JPG
|
||||
# right_line.JPG
|
||||
# self_line.JPG
|
||||
#
|
||||
#
|
||||
# SEE ALSO
|
||||
#
|
||||
# DIAGNOSTICTS
|
||||
#
|
||||
# VERSION
|
||||
# 1.0
|
||||
#
|
||||
# DATE
|
||||
# 011029
|
||||
#
|
||||
# AUTHOR
|
||||
# Jan Markborg
|
||||
#
|
||||
|
||||
progname=`basename $0`
|
||||
synopsis="signallog2html.sh [ -b <block_name | ALL> ] [ -s <signal_id> ] -f signal_log_file"
|
||||
block_name=""
|
||||
signal_id=""
|
||||
verbose=yes
|
||||
signal_log_file=""
|
||||
|
||||
: ${NDB_PROJ_HOME:?} # If undefined, exit with error message
|
||||
|
||||
: ${NDB_LOCAL_BUILD_OPTIONS:=--} # If undef, set to --. Keeps getopts happy.
|
||||
# You may have to experiment a bit
|
||||
# to get quoting right (if you need it).
|
||||
|
||||
|
||||
. $NDB_PROJ_HOME/lib/funcs.sh # Load some good stuff
|
||||
|
||||
# defaults for options related variables
|
||||
#
|
||||
report_date=`date '+%Y-%m-%d'`
|
||||
|
||||
# Option parsing for the the command line.
|
||||
#
|
||||
|
||||
while getopts f:b:s: i
|
||||
do
|
||||
case $i in
|
||||
f) signal_log_file=$OPTARG;;
|
||||
b) block_name=$OPTARG;;
|
||||
s) signal_id=$OPTARG;;
|
||||
\?) syndie ;; # print synopsis and exit
|
||||
esac
|
||||
done
|
||||
|
||||
# -- Verify
|
||||
trace "Verifying signal_log_file $signal_log_file"
|
||||
|
||||
if [ x$signal_log_file = "x" ]
|
||||
then
|
||||
syndie "Invalid signal_log_file name: $signal_log_file not found"
|
||||
fi
|
||||
|
||||
|
||||
if [ ! -r $signal_log_file ]
|
||||
then
|
||||
syndie "Invalid signal_log_file name: $signal_log_file not found"
|
||||
fi
|
||||
|
||||
|
||||
|
||||
if [ blocknameSET = 1 ]
|
||||
then
|
||||
|
||||
trace "Verifying block_name"
|
||||
case $block_name in
|
||||
CMVMI| MISSRA| NDBFS| NDBCNTR| DBACC| DBDICT| DBLQH| DBDIH| DBTC| DBTUP| QMGR);;
|
||||
ALL) trace "Signals to/from every block will be traced!";;
|
||||
*) syndie "Unknown block name: $block_name";;
|
||||
esac
|
||||
fi
|
||||
|
||||
if [ block_name="" -a signal_id="" ]
|
||||
then
|
||||
block_name=ALL
|
||||
trace "block_name = $block_name"
|
||||
fi
|
||||
|
||||
trace "Arguments OK"
|
||||
|
||||
###
|
||||
#
|
||||
# General html functions
|
||||
header(){
|
||||
cat <<EOF
|
||||
<html><head><title>$*</title></head>
|
||||
<body>
|
||||
EOF
|
||||
}
|
||||
|
||||
footer(){
|
||||
cat <<EOF
|
||||
</body></html>
|
||||
EOF
|
||||
}
|
||||
|
||||
heading(){
|
||||
h=$1; shift
|
||||
cat <<EOF
|
||||
<h$h>$*</h$h>
|
||||
EOF
|
||||
}
|
||||
|
||||
table(){
|
||||
echo "<table $*>"
|
||||
}
|
||||
|
||||
table_header(){
|
||||
echo "<th>$*</th>"
|
||||
}
|
||||
|
||||
end_table(){
|
||||
echo "</table>"
|
||||
}
|
||||
|
||||
row(){
|
||||
echo "<tr>"
|
||||
}
|
||||
|
||||
end_row(){
|
||||
echo "</tr>"
|
||||
}
|
||||
|
||||
c_column(){
|
||||
cat <<EOF
|
||||
<td valign=center align=center>$*</td>
|
||||
EOF
|
||||
}
|
||||
|
||||
bold(){
|
||||
cat <<EOF
|
||||
<b>$*</b>
|
||||
EOF
|
||||
}
|
||||
|
||||
column(){
|
||||
cat <<EOF
|
||||
<td align=left>$*</td>
|
||||
EOF
|
||||
}
|
||||
|
||||
para(){
|
||||
cat <<EOF
|
||||
<p></p>
|
||||
EOF
|
||||
}
|
||||
|
||||
hr(){
|
||||
cat <<EOF
|
||||
<hr>
|
||||
EOF
|
||||
}
|
||||
|
||||
img_column(){
|
||||
cat <<EOF
|
||||
<td><center><$* height=100% width=100%></center></td>
|
||||
EOF
|
||||
}
|
||||
|
||||
# Check the direction of arrow.
|
||||
# arrowDirection(){ $columnarray $sendnode$sendblock $recnode$recblock
|
||||
arrowDirection(){
|
||||
if [ $2 = $3 ]
|
||||
then
|
||||
arrow=SELF
|
||||
return;
|
||||
else
|
||||
for x in $1
|
||||
do
|
||||
if [ $x = $2 ]
|
||||
then
|
||||
arrow=RIGHT
|
||||
break
|
||||
elif [ $x = $3 ]
|
||||
then
|
||||
arrow=LEFT
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
}
|
||||
|
||||
drawImages(){
|
||||
for x in $columnarray
|
||||
do
|
||||
case $arrow in
|
||||
SELF)
|
||||
if [ $x = $sendnode$sendblock ]
|
||||
then
|
||||
img_column img SRC=\"self_line.JPG\"
|
||||
else
|
||||
img_column img SRC=\"empty.JPG\"
|
||||
fi;;
|
||||
|
||||
RIGHT)
|
||||
if [ $x = $recnode$recblock ]
|
||||
then
|
||||
img_column img SRC=\"right_line.JPG\"
|
||||
weHavePassedRec=1
|
||||
elif [ $x = $sendnode$sendblock ]
|
||||
then
|
||||
img_column img SRC=\"empty.JPG\"
|
||||
weHavePassedSen=1
|
||||
elif [ $weHavePassedRec = 1 -o $weHavePassedSen = 0 ]
|
||||
then
|
||||
img_column img SRC=\"empty.JPG\"
|
||||
elif [ $weHavePassedRec = 0 -a $weHavePassedSen = 1 ]
|
||||
then
|
||||
img_column img SRC=\"line.JPG\"
|
||||
fi;;
|
||||
|
||||
LEFT)
|
||||
if [ $x = $recnode$recblock ]
|
||||
then
|
||||
img_column img SRC=\"empty.JPG\"
|
||||
weHaveJustPassedRec=1
|
||||
weHavePassedRec=1
|
||||
continue
|
||||
fi
|
||||
if [ $x = $sendnode$sendblock -a $weHaveJustPassedRec = 1 ]
|
||||
then
|
||||
img_column img SRC=\"left_line.JPG\"
|
||||
weHaveJustPassedRec=0
|
||||
weHavePassedSen=1
|
||||
continue
|
||||
fi
|
||||
if [ $x = $sendnode$sendblock ]
|
||||
then
|
||||
img_column img SRC=\"line.JPG\"
|
||||
weHavePassedSen=1
|
||||
continue
|
||||
fi
|
||||
if [ $weHaveJustPassedRec = 1 ]
|
||||
then
|
||||
img_column img SRC=\"left_line.JPG\"
|
||||
weHaveJustPassedRec=0
|
||||
continue
|
||||
fi
|
||||
if [ $weHavePassedSen = 1 -o $weHavePassedRec = 0 ]
|
||||
then
|
||||
img_column img SRC=\"empty.JPG\"
|
||||
continue
|
||||
fi
|
||||
|
||||
if [ $weHavePassedRec = 1 -a $weHavePassedSen = 0 ]
|
||||
then
|
||||
img_column img SRC=\"line.JPG\"
|
||||
continue
|
||||
|
||||
fi
|
||||
column ERROR;;
|
||||
|
||||
*)
|
||||
echo ERROR;;
|
||||
esac
|
||||
done
|
||||
column $signal
|
||||
}
|
||||
|
||||
### Main
|
||||
trace "Making HTML file"
|
||||
(
|
||||
header "Signal sequence diagram $report_date"
|
||||
heading 1 "Signal sequence diagram $report_date"
|
||||
|
||||
trace "Making list file"
|
||||
#make a signal list file from the signal log file.
|
||||
`awk -f /home/ndb/bin/signallog2html.lib/signallog2list.awk SIGNAL_ID=$signal_id BLOCK_ID=$block_name $signal_log_file > $signal_log_file.list`
|
||||
|
||||
COLUMNS=`awk -f /home/ndb/bin/signallog2html.lib/uniq_blocks.awk $signal_log_file.list | wc -w`
|
||||
|
||||
table "border=0 cellspacing=0 cellpadding=0 cols=`expr $COLUMNS + 1`"
|
||||
|
||||
columnarray=`awk -f /home/ndb/bin/signallog2html.lib/uniq_blocks.awk $signal_log_file.list`
|
||||
|
||||
row
|
||||
column #make an empty first column!
|
||||
for col in $columnarray
|
||||
do
|
||||
table_header $col
|
||||
done
|
||||
|
||||
grep "" $signal_log_file.list | \
|
||||
while read direction sendnode sendblock recnode recblock signal sigid recsigid delay
|
||||
do
|
||||
if [ $direction = "R" ]
|
||||
then
|
||||
row
|
||||
weHavePassedRec=0
|
||||
weHavePassedSen=0
|
||||
weHaveJustPassedRec=0
|
||||
arrow=""
|
||||
|
||||
# calculate the direction of the arrow.
|
||||
arrowDirection "$columnarray" "$sendnode$sendblock" "$recnode$recblock"
|
||||
|
||||
# Draw the arrow images.
|
||||
drawImages
|
||||
end_row
|
||||
fi
|
||||
done
|
||||
end_table
|
||||
|
||||
footer
|
||||
) > $signal_log_file.html
|
||||
|
||||
exit 0
|
90
ndb/home/bin/stripcr
Executable file
90
ndb/home/bin/stripcr
Executable file
|
@ -0,0 +1,90 @@
|
|||
#!/bin/sh
|
||||
|
||||
|
||||
# NAME
|
||||
# stripcr - a program for removing carriage return chars from dos-files.
|
||||
#
|
||||
# SYNOPSIS
|
||||
# stripcr [file...]
|
||||
#
|
||||
# DESCRIPTION
|
||||
# stripcr deletes all CR characters from the given files.
|
||||
# The files are edited in place.
|
||||
# If no files are given, stdin and stdout are used instead.
|
||||
#
|
||||
# OPTIONS
|
||||
# -s extension Make a copy of the original of each file, and
|
||||
# give it the given extension (.bak, .orig, -bak, ...).
|
||||
#
|
||||
# EXAMPLES
|
||||
# stripcr file.txt innerloop.cc
|
||||
# stripcr -i.bak *.cc
|
||||
#
|
||||
# ENVIRONMENT
|
||||
# NDB_PROJ_HOME Home dir for ndb
|
||||
#
|
||||
# FILES
|
||||
# $NDB_PROJ_HOME/lib/funcs.sh Some userful functions for safe execution
|
||||
# of commands, printing, and tracing.
|
||||
#
|
||||
# VERSION
|
||||
# 1.0
|
||||
#
|
||||
# AUTHOR
|
||||
# Jonas Mölsä
|
||||
#
|
||||
|
||||
|
||||
progname=`basename $0`
|
||||
synopsis="stripcr [-s extension] [file...]"
|
||||
|
||||
|
||||
: ${NDB_PROJ_HOME:?} # If undefined, exit with error message
|
||||
|
||||
: ${STRIPCR_OPTIONS:=--} # If undefined, set to --, to keep getopts happy.
|
||||
# You may have to experiment, to get quoting right.
|
||||
|
||||
. $NDB_PROJ_HOME/lib/funcs.sh
|
||||
|
||||
|
||||
# defaults for options related variables
|
||||
#
|
||||
extension=
|
||||
options="$STRIPCR_OPTIONS"
|
||||
|
||||
# used if error when parsing the options environment variable
|
||||
#
|
||||
env_opterr="options environment variable: <<$options>>"
|
||||
|
||||
|
||||
|
||||
# We want to be able to set options in an environment variable,
|
||||
# as well as on the command line. In order not to have to repeat
|
||||
# the same getopts information twice, we loop two times over the
|
||||
# getopts while loop. The first time, we process options from
|
||||
# the options environment variable, the second time we process
|
||||
# options from the command line.
|
||||
#
|
||||
# The things to change are the actual options and what they do.
|
||||
#
|
||||
#
|
||||
for optstring in "$options" "" # 1. options variable 2. cmd line
|
||||
do
|
||||
while getopts s: i $optstring # optstring empty => no arg => cmd line
|
||||
do
|
||||
case $i in
|
||||
|
||||
s) extension="$OPTARG";;
|
||||
\?) syndie $env_opterr;; # print synopsis and exit
|
||||
|
||||
esac
|
||||
done
|
||||
|
||||
[ -n "$optstring" ] && OPTIND=1 # Reset for round 2, cmd line options
|
||||
|
||||
env_opterr= # Round 2 should not use the value
|
||||
done
|
||||
shift `expr $OPTIND - 1`
|
||||
|
||||
|
||||
safe perl -i$extension -lpe 'tr/\r//d' $*
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue