2
0
Эх сурвалжийг харах

kamctl: db berkely clean up

Daniel-Constantin Mierla 4 өдөр өмнө
parent
commit
a09a788c78

+ 0 - 25
utils/kamctl/Makefile

@@ -190,31 +190,6 @@ install-modules: $(bin_prefix)/$(bin_dir)
 			$(MAKE) -C ../db_oracle/ ; \
 			$(INSTALL_BIN) ../db_oracle/kamailio_orasel $(bin_prefix)/$(bin_dir) ; \
 		fi
-		# install Berkeley database stuff
-		if [ "$(BERKELEYDBON)" = "yes" ]; then \
-			mkdir -p $(modules_prefix)/$(lib_dir)/kamctl ; \
-			sed -e "s#/usr/local/share/kamailio/#$(data_target)#g" \
-				< kamctl.db_berkeley > $(kamctltmpdir)/kamctl.db_berkeley ; \
-			$(INSTALL_CFG) $(kamctltmpdir)/kamctl.db_berkeley \
-				$(modules_prefix)/$(lib_dir)/kamctl/kamctl.db_berkeley ; \
-			rm -fr $(kamctltmpdir)/kamctl.db_berkeley ; \
-			sed -e "s#/usr/local/share/kamailio#$(data_target)#g" \
-				< kamdbctl.db_berkeley > $(kamctltmpdir)/kamdbctl.db_berkeley ; \
-			$(INSTALL_TOUCH) $(modules_prefix)/$(lib_dir)/kamctl/kamdbctl.db_berkeley ; \
-			$(INSTALL_CFG) $(kamctltmpdir)/kamdbctl.db_berkeley $(modules_prefix)/$(lib_dir)/kamctl/ ; \
-			rm -fr $(kamctltmpdir)/kamdbctl.db_berkeley ; \
-			mkdir -p $(data_prefix)/$(data_dir)/db_berkeley/kamailio ; \
-			for FILE in $(wildcard db_berkeley/kamailio/*) ; do \
-				if [ -f $$FILE ] ; then \
-				$(INSTALL_TOUCH) $$FILE \
-					$(data_prefix)/$(data_dir)/db_berkeley/kamailio/`basename "$$FILE"` ; \
-				$(INSTALL_CFG) $$FILE \
-					$(data_prefix)/$(data_dir)/db_berkeley/kamailio/`basename "$$FILE"` ; \
-				fi ;\
-			done ; \
-			$(MAKE) -C ../db_berkeley/ ; \
-			$(INSTALL_BIN) ../db_berkeley/kambdb_recover $(bin_prefix)/$(bin_dir) ; \
-		fi
 		# install dbtext stuff
 		if [ "$(DBTEXTON)" = "yes" ]; then \
 			mkdir -p $(modules_prefix)/$(lib_dir)/kamctl ; \

+ 0 - 6
utils/kamctl/kamctl

@@ -147,12 +147,6 @@ case $DBENGINE in
 			DBENGINELOADED=1
 		fi
 		;;
-	DB_BERKELEY|db_berkeley|BERKELEY|berkeley)
-		if [ -f "$MYLIBDIR/kamctl.db_berkeley" ]; then
-			. "$MYLIBDIR/kamctl.db_berkeley"
-			DBENGINELOADED=1
-		fi
-		;;
 	SQLITE|sqlite)
 		if [ -f "$MYLIBDIR/kamctl.sqlite" ]; then
 			. "$MYLIBDIR/kamctl.sqlite"

+ 0 - 57
utils/kamctl/kamctl.db_berkeley

@@ -1,57 +0,0 @@
-#
-# control tool for maintaining Kamailio
-#
-
-#===================================================================
-
-# path to the db_berkeley directory
-if [ -z "$DB_PATH" ] ; then
-	DB_PATH="/usr/local/share/kamailio/db_berkeley/kamailio"
-fi
-
-#===================================================================
-
-kamailio_bdb() {
-case $1 in
-	reload)
-		shift
-		if [ "$#" -lt 1 ] ; then
-			merr "reload - too few parameters"
-			exit 1
-		fi
-		
-		$CTLCMD bdb_reload $1
-		exit $?
-		;;
-	*)
-		usage
-		exit 1
-		;;
-esac
-}
-
-# domain don't support reload at the moment
-usage_domain() {
-        echo
-        mecho " -- command 'domain' - manage domains"
-	echo
-cat <<EOF
- domain show ........................ show list of served domains
- domain add <domainname> ............ add a new served domain
- domain rm <domainname> ............. remove a served domain
-EOF
-}
-USAGE_FUNCTIONS="$USAGE_FUNCTIONS usage_domain"
-
-# showdb is not implemented for SQL databases
-usage_showdb() {
-        echo
-        mecho " -- command 'showdb|userdb' - dump offline users"
-        echo
-cat <<EOF
- showdb ............................. display offline users
- userdb ............................. display offline users
-EOF
-}
-USAGE_FUNCTIONS="$USAGE_FUNCTIONS usage_showdb"
-

+ 2 - 2
utils/kamctl/kamctlrc

@@ -10,7 +10,7 @@
 ## chrooted directory
 # CHROOT_DIR="/path/to/chrooted/directory"
 
-## database type: MYSQL, PGSQL, ORACLE, DB_BERKELEY, DBTEXT, or SQLITE
+## database type: MYSQL, PGSQL, ORACLE, DBTEXT, or SQLITE
 ## by default none is loaded
 ##
 ## If you want to set up a database with kamdbctl, you must at least specify
@@ -26,7 +26,7 @@
 ## database name (for ORACLE this is TNS name)
 # DBNAME=kamailio
 
-## database path used by dbtext, db_berkeley or sqlite
+## database path used by dbtext or sqlite
 # DB_PATH="/usr/local/etc/kamailio/dbtext"
 
 ## database read/write user

+ 3 - 17
utils/kamctl/kamdbctl

@@ -118,15 +118,6 @@ case $DBENGINE in
 			merr "could not load the script in $MYLIBDIR/kamdbctl.dbtext for database engine $DBENGINE"
 		fi
 		;;
-	DB_BERKELEY|db_berkeley|BERKELEY|berkeley)
-		if [ -f "$MYLIBDIR/kamdbctl.db_berkeley" ]; then
-			. "$MYLIBDIR/kamdbctl.db_berkeley"
-			USED_DBENGINE="berkeley"
-			DBNAME=$DB_PATH
-		else
-			merr "could not load the script in $MYLIBDIR/kamdbctl.db_berkeley for database engine $DBENGINE"
-		fi
-		;;
 	SQLITE|sqlite)
 		if [ -f "$MYLIBDIR/kamdbctl.sqlite" ]; then
 			. "$MYLIBDIR/kamdbctl.sqlite"
@@ -290,7 +281,7 @@ kamailio_pframework() #pars: <action>
 case $1 in
 	copy)
 		# copy database to some other name
-		if [ "$USED_DBENGINE" = "berkeley" -o "$USED_DBENGINE" = "dbtext" ] ; then
+		if [ "$USED_DBENGINE" = "dbtext" ] ; then
 			merr "$USED_DBENGINE don't support this operation"
 			exit 1
 		fi
@@ -323,7 +314,7 @@ case $1 in
 		exit $ret
 		;;
 	backup)
-		if [ "$USED_DBENGINE" = "berkeley" -o "$USED_DBENGINE" = "dbtext" ] ; then
+		if [ "$USED_DBENGINE" = "dbtext" ] ; then
 			merr "$USED_DBENGINE don't support this operation"
 			exit 1
 		fi
@@ -337,7 +328,7 @@ case $1 in
 		exit $?
 		;;
 	restore)
-		if [ "$USED_DBENGINE" = "berkeley" -o "$USED_DBENGINE" = "dbtext" ] ; then
+		if [ "$USED_DBENGINE" = "dbtext" ] ; then
 			merr "$USED_DBENGINE don't support this operation"
 			exit 1
 		fi
@@ -473,11 +464,6 @@ case $1 in
 		kamailio_add_tables $DBNAME $2
 		exit $?
 		;;
-	bdb|db_berkeley)
-		shift
-		kamailio_berkeley "$@"
-		exit $?
-		;;
 	pframework)
 		shift
 		kamailio_pframework "$@"

+ 1 - 2
utils/kamctl/kamdbctl.8

@@ -20,7 +20,7 @@ Just change the DBENGINE parameter in the respective section in
 .B kamctlrc
 file.
 .TP
-Valid values are: MYSQL, PGSQL, ORACLE, DB_BERKELEY, DBTEXT.
+Valid values are: MYSQL, PGSQL, ORACLE, DBTEXT.
 .TP
 The default is 'none'.
 .TP
@@ -62,4 +62,3 @@ Mailing lists:
 [email protected] - Kamailio user community
 .nf
 [email protected] - Kamailio development community
-

+ 1 - 1
utils/kamctl/kamdbctl.base

@@ -34,7 +34,7 @@ INSTALL_EXTRA_TABLES=${INSTALL_EXTRA_TABLES:-ask}
 INSTALL_PRESENCE_TABLES=${INSTALL_PRESENCE_TABLES:-ask}
 INSTALL_DBUID_TABLES=${INSTALL_DBUID_TABLES:-ask}
 
-# Used by dbtext and db_berkeley to define tables to be created, used by
+# Used by dbtext to define tables to be created, used by
 # postgres to do the grants
 STANDARD_TABLES=${STANDARD_TABLES:-'version acc dbaliases domain domain_attrs
 		grp uri speed_dial lcr_gw lcr_rule lcr_rule_target pdt subscriber

+ 0 - 619
utils/kamctl/kamdbctl.db_berkeley

@@ -1,619 +0,0 @@
-#
-# Script for maintaining Kamailio Berkeley DB tables
-# Copyright (C) 2007 Cisco Systems
-#
-# This file is part of Kamailio, a free SIP server.
-#
-# Kamailio is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version
-#
-# Kamailio is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License 
-# along with this program; if not, write to the Free Software 
-# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
-# 
-#
-
-#constants
-PATH=$PATH:/usr/local/BerkeleyDB.4.6/bin
-DELIM="|"
-BACKUP_CMD="tar czvf "
-RESTORE_CMD="tar xzvf "
-
-#berkeley db utility program that writes out db to plain text
-#small hack to autodetect the db dump command, debian prefix the version..
-
-which db_dump > /dev/null
-ret=$?
-if [ $ret -eq 0 ] ; then
-	DUMP_CMD="db_dump"
-fi ;
-
-which db4.4_dump > /dev/null
-ret=$?
-if [ $ret -eq 0 ] ; then
-	DUMP_CMD="db4.4_dump"
-fi ;
-
-which db4.5_dump > /dev/null
-ret=$?
-if [ $ret -eq 0 ] ; then
-	DUMP_CMD="db4.5_dump"
-fi ;
-
-which db4.6_dump > /dev/null
-ret=$?
-if [ $ret -eq 0 ] ; then
-	DUMP_CMD="db4.6_dump"
-fi ;
-
-
-which db5.0_dump > /dev/null
-ret=$?
-if [ $ret -eq 0 ] ; then
-	DUMP_CMD="db5.0_dump"
-fi ;
-
-which db5.1_dump > /dev/null
-ret=$?
-if [ $ret -eq 0 ] ; then
-	DUMP_CMD="db5.1_dump"
-fi ;
-
-
-#berkeley db utility program that imports data from plain text file
-#small hack to autodetect the db load command, debian prefix the version..
-
-which db_load > /dev/null
-ret=$?
-if [ $ret -eq 0 ] ; then
-	LOAD_CMD="db_load"
-fi ;
-
-which db4.4_load > /dev/null
-ret=$?
-if [ $ret -eq 0 ] ; then
-	LOAD_CMD="db4.4_load"
-fi ;
-
-which db4.5_load > /dev/null
-ret=$?
-if [ $ret -eq 0 ] ; then
-	LOAD_CMD="db4.5_load"
-fi ;
-
-which db4.6_load > /dev/null
-ret=$?
-if [ $ret -eq 0 ] ; then
-	LOAD_CMD="db4.6_load"
-fi ;
-
-
-which db5.0_load > /dev/null
-ret=$?
-if [ $ret -eq 0 ] ; then
-	LOAD_CMD="db5.0_load"
-fi ;
-
-which db5.1_load > /dev/null
-ret=$?
-if [ $ret -eq 0 ] ; then
-	LOAD_CMD="db5.1_load"
-fi ;
-
-# path to the database schemas
-DATA_DIR="/usr/local/share/kamailio"
-if [ -d "$DATA_DIR/db_berkeley/kamailio" ] ; then
-	DB_SCHEMA="$DATA_DIR/db_berkeley/kamailio"
-else
-	DB_SCHEMA="./db_berkeley/kamailio"
-fi
-
-# path to the db_berkeley database
-if [ -z "$DB_PATH" ]; then
-	DB_PATH="/usr/local/etc/kamailio/db_berkeley"
-fi
-
-berkeley_usage() 
-{
-COMMAND=`basename $0`
-cat <<EOF
-Script for maintaining Kamailio Berkeley DB tables
-       $COMMAND list      (lists the underlying db files in DB_PATH)
-       $COMMAND cat       <db>  (db_dump the underlying db file to STDOUT)
-       $COMMAND swap      <db>  (installs db.new by db -> db.old; db.new -> db)
-       $COMMAND append    <db> <datafile> (appends data to an existing db;output DB_PATH/db.new)
-       $COMMAND newappend <db> <datafile> (appends data to a new instance of db; output DB_PATH/db.new)
-       $COMMAND export  <dump_dir> (exports table data to plain-txt files in dump_dir)
-       $COMMAND import  <dump_dir> (imports plain-txt table data and creates new db tables in db_path)
-EOF
-} #usage
-
-
-#
-# 
-#
-kamailio_berkeley()  # parms: <op> <arg1> <arg2>
-{
-	case $1 in
-		list|ls)
-			ls -l $DB_PATH
-			exit $?
-			;;
-		cat)
-			shift
-			kamailio_cat $1 $DB_PATH
-			exit $?
-			;;
-
-		swap)
-			shift
-			kamailio_swap $1 $DB_PATH
-			exit $?
-			;;
-
-		append)
-			shift
-			kamailio_append  $1 $2 $DB_PATH
-			exit $?
-			;;
-
-		newappend)
-			shift
-			kamailio_newappend  $1 $2 $DB_PATH
-			exit $?
-			;;
-
-		export)
-			shift
-			kamailio_export  $1 $DB_PATH
-			exit $?
-			;;
-
-		import)
-			shift
-			kamailio_import  $1 $DB_PATH
-			exit $?
-			;;
-
-		*)
-			berkeley_usage
-			exit 1;
-			;;
-esac
-}
-
-
-
-##
-# EXPORT existing data to plain-txt files in DUMP_DIR
-# eg.  DB_PATH/version ---> DUMP_DIR/version.txt
-#
-# Export is used as part of a DB migration process to another 
-# major version of berkeley db.
-kamailio_export()  # parms: <DUMP_DIR> [DB_PATH]
-{
-	if [ $# -lt 2  ]; then
-		echo  "kamailio_dump parms: <DUMP_DIR> [DB_PATH]"
-		exit 1
-	fi
-	
-	# Assert: the DB_PATH directory should already exist
-	if [ ! -d $2 ] ; then
-		merr "BerkeleyDB directory does not exist at: [$2]"
-		exit 1
-	fi
-	
-	# Assert: DB_PATH directory should already contain table 'version'
-	if [ ! -f $2/version ] ; then
-		merr "BerkeleyDB directory does not have VERSION table at: [$2]"
-		exit 1
-	fi
-	
-	# Create dir at <DUMP_DIR> to store the exported data
-	if [ ! -d $1 ] ; then
-		minfo "creating DUMP_DIR at: [$1]"
-		mkdir -p $1
-	else
-		mdbg "Cleaning out DUMP_DIR to get ready for new data"
-		rm -rf $1/*
-	fi
-	
-	# DUMP_CMD will result in something like this:
-	#
-	#	VERSION=3
-	#	format=print
-	#	type=hash
-	#	h_nelem=2
-	#	db_pagesize=4096
-	#	HEADER=END
-	#	 METADATA_COLUMNS
-	#	 callid(str) method(str) from_tag(str) to_tag(str) sip_code(str) sip_reason(str) time(datetime)
-	#	 METADATA_KEY
-	#	 0
-	#	DATA=END
-	#
-	# However, we are only interested in the indented stuff between 
-	#  'HEADER=END' and 'DATA=END',
-	#  as everything else is DB instance specific. That is, we are interested in this part:
-	#
-	# METADATA_COLUMNS
-	# callid(str) method(str) from_tag(str) to_tag(str) sip_code(str) sip_reason(str) time(datetime)
-	# METADATA_KEY
-	# 0
-	#
-	# The following PERL filter will do this processing.
-	#
-	# perl -pe 's/^\w.*// ; s/^\s(.*)/$1/'
-
-	# Dump the STANDARD tables to plain-text files in DUMP_DIR
-	for TABLE in $STANDARD_TABLES; do
-	    if [ -f $2/$TABLE ] ; then
-		    mdbg "Exporting standard table: $TABLE"
-		    $DUMP_CMD -p -h $2 $TABLE  | perl -pe 's/^\w.*// ; s/^\s(.*)/$1/' > $1/$TABLE.txt
-		    
-		    # Check return code to make sure the export worked ok
-		    if [ $? -ne 0 ] ; then
-			merr "Export of standard table failed [$TABLE]"
-			# there was a problem, but it is not something
-			# we can handle here; We can deal with this at import
-			# time.
-		    fi
-	    else
-	    	    mwarn "Table not found: [$TABLE]"
-	    fi
-	done
-	
-	# Dump the PRESENCE tables to plain-text files in DUMP_DIR
-	for TABLE in $PRESENCE_TABLES; do
-	    if [ -f $2/$TABLE ] ; then
-		    mdbg "Exporting presence table: $TABLE"
-		    $DUMP_CMD -p -h $2 $TABLE  | perl -pe 's/^\w.*// ; s/^\s(.*)/$1/' > $1/$TABLE.txt
-		    if [ $? -ne 0 ] ; then
-			merr "Export of presence table failed [$TABLE]"
-		    fi
-	    else
-	    	    mwarn "Table not found: [$TABLE]"
-	    fi
-	done
-	
-	# Dump the EXTRA tables to plain-text files in DUMP_DIR
-	for TABLE in $EXTRA_TABLES; do
-	    if [ -f $2/$TABLE ] ; then
-		    mdbg "Exporting extra table: $TABLE"
-		    $DUMP_CMD -p -h $2 $TABLE  | perl -pe 's/^\w.*// ; s/^\s(.*)/$1/' > $1/$TABLE.txt
-		    if [ $? -ne 0 ] ; then
-			merr "Export of extra table failed [$TABLE]"
-		    fi
-	    else
-	    	    mwarn "Table not found: [$TABLE]"
-	    fi
-	done
-	
-	mdbg "All tables are now exported to DUMP_DIR: [$1]"
-	return 0
-
-}
-
-
-##
-# IMPORT existing plain-txt files from DUMP_DIR to DB_PATH
-# eg.  DUMP_DIR/version.txt  --> DB_PATH/version
-#
-# import is used as part of DB migrate to another major version of berkeley db.
-# this will over-write anything in DB_PATH
-kamailio_import()  # parms: <DUMP_DIR> [DB_PATH]
-{
-	if [ $# -lt 2  ]; then
-		echo  "kamailio_dump parms: <DUMP_DIR> [DB_PATH]"
-		exit 1
-	fi
-	
-	# Assert: DUMP_DIR (source dir) already exists
-	if [ ! -d $1 ] ; then
-		merr "Berkeley DUMP_DIR directory does not exist: [$1]"
-		exit 1;
-	fi
-	
-	# Assert: DUMP_DIR directory should already contain table 'version.txt'
-	if [ ! -f $1/version.txt ] ; then
-		merr "DUMP_DIR directory does not have VERSION.txt data at: [$1]"
-		exit 1
-	fi
-	
-	# Assert: destination dir exists [DB_PATH]
-	if [ ! -d $2 ] ; then
-		mdbg "Berkeley DB_PATH directory is being created: [$2]"
-		mkdir -p $2
-	else
-		# Wipe out the destination dir to make room for new data
-		mwarn "Berkeley DB_PATH directory is being purged at: [$2]"
-		rm -rf $2/*
-	fi
-	
-	# Creates STANDARD tables from plain-text files in DUMP_DIR
-	for TABLE in $STANDARD_TABLES; do
-	    if [ -s $1/$TABLE.txt ] ; then
-		    mdbg "Importing standard table: $TABLE"
-		    $LOAD_CMD -T -t hash -f $1/$TABLE.txt -h $2 $TABLE
-		    
-		    # Check return code to make sure the export worked ok
-		    if [ $? -ne 0 ] ; then
-			merr "Import of standard table failed [$TABLE.txt]"
-			merr "Create this missing table with kambdb_recover."
-		    fi
-	    else
-	    	    merr "Import data not found for table: [$TABLE.txt]" 
-		    merr "Create this missing table with kambdb_recover."
-	    fi
-	done
-	
-
-	# Creates PRESENCE tables from plain-text files in DUMP_DIR
-	for TABLE in $PRESENCE_TABLES; do
-	    if [ -s $1/$TABLE.txt ] ; then
-		    mdbg "Importing presence table: $TABLE"
-		    $LOAD_CMD -T -t hash -f $1/$TABLE.txt -h $2 $TABLE
-		    
-		    # Check return code to make sure the export worked ok
-		    if [ $? -ne 0 ] ; then
-			merr "Import of presence table failed [$TABLE.txt]"
-			merr "Create this missing table with kambdb_recover."
-		    fi
-	    else
-		    mwarn "Import data not found for table: [$TABLE.txt]"
-	    fi
-	done
-
-	# Creates EXTRA tables from plain-text files in DUMP_DIR
-	for TABLE in $EXTRA_TABLES; do
-	    if [ -s $1/$TABLE.txt ] ; then
-		    mdbg "Importing extra table: $TABLE"
-		    $LOAD_CMD -T -t hash -f $1/$TABLE.txt -h $2 $TABLE
-		    
-		    # Check return code to make sure the export worked ok
-		    if [ $? -ne 0 ] ; then
-			merr "Import of extra table failed [$TABLE.txt]"
-			merr "Create this missing table with kambdb_recover."
-		    fi
-	    else
-		    mwarn "Import data not found for table: [$TABLE.txt]"
-	    fi
-	done
-	
-	mdbg "All tables are now imported to DB_PATH: [$2]"
-	return 0
-
-}
-
-
-kamailio_swap()  # parms: <db> [DB_PATH]
-{
-	if [ $# -lt 2  ]; then
-		echo  "kamailio_swap parms: <db> [DB_PATH]"
-		exit 1
-	fi
-	
-	DB=$2/$1
-	DBNEW=$DB.new
-	DBOLD=$DB.old
-	cp $DB $DBOLD
-	mv $DBNEW $DB
-}
-
-#####
-# append process is:
-# 1. copy DB_PATH/db to DB_PATH/db.new
-# 2. appends contents of newdata to DB_PATH/db.new
-#
-kamailio_append()  # parms: <db> <newdata> [DB_PATH]
-{
-	if [ $# -lt 3  ]; then
-		echo  "kamailio_append parms: <db> <newdata> [DB_PATH]"
-		exit 1
-	fi
-	
-	DB=$3/$1
-	DBNEW=$DB.new
-	if [ -f $DB.new ] ; then
-		rm $DB.new
-	fi
-	
-	cp $DB $DBNEW
-# echo "$LOAD_CMD -T -t hash -f $2 -h $3 $1.new"
-	$LOAD_CMD -T -t hash -f $2 -h $3 $1.new
-	
-# echo "$LOAD_CMD -r fileid -h $3 $1.new"
-  	$LOAD_CMD -r fileid -h $3 $1.new
-}
-
-
-#####
-# newappend process is:
-# 1. create a new temp DBENV in /tmp/sc-<processID>
-# 2. appends contents of newdata to /tmp/sc-<processID>/db
-# 3. move /tmp/sc-<processID>/db over to DB_PATH/db.new
-# 4. delete temp DBENV dir /tmp/sc-<processID>
-#
-kamailio_newappend()  # parms: <db> <newdata> [DB_PATH]
-{
-	if [ $# -lt 3  ]; then
-		echo  "kamailio_append parms: <db> <newdata> [DB_PATH]"
-		exit 1
-	fi
-	
-	DB=$3/$1
-	DBNEW=$DB.new
-	if [ -f $DBNEW ] ; then
-		rm $DBNEW
-	fi
-	TMPENV=/tmp/sc-$$
-	kamailio_create $TMPENV
-	cd $OLDPWD
-	$LOAD_CMD -T -t hash -f $2 -h $TMPENV $1
-	mv $TMPENV/$1 $DBNEW
-	rm -rf $TMPENV
-}
-
-
-# cat all rows to STDOUT
-kamailio_cat()  # pars: <database name> <DB_PATH>
-{
-	if [ $# -ne 2 ] ; then
-		echo  "kamailio_cat params <db> [DB_PATH]"
-		exit 1
-	fi
-	
-	$DUMP_CMD -p -h $2 $1
-}
-
-kamailio_drop()  # pars:  <DB_PATH>
-{
-	if [ $# -ne 1 ] ; then
-		echo "kamailio_drop function takes one param"
-		exit 1
-	fi
-	
-	if [ ! -d $1 ] ; then
-		echo "Directory does not exist:  $1"
-	fi
-	
-	minfo "Dropping Berkeley DB database at: $1 ..."
-	
-	# core
-	if [ -f $1/version ] ; then
-		for TABLE in $STANDARD_TABLES; do
-		    mdbg "Dropping core table: $TABLE"
-		    rm -f $1/$TABLE
-		done
-	fi
-	
-	# presence
-	if [ -f $1/presentity ] ; then
-		for TABLE in $PRESENCE_TABLES; do
-		    mdbg "Dropping presence table: $TABLE"
-		    rm -f $1/$TABLE
-		done
-	fi
-	
-	# extra tables
-	if [ -f $1/cpl ] ; then
-		for TABLE in $EXTRA_TABLES; do
-		    mdbg "Dropping extra table: $TABLE"
-		    rm -f $1/$TABLE
-		done
-	fi
-
-	# delete db files and directory
-	rm -rf $1/__db.001
-	rm -rf $1/__db.002
-	rm -rf $1/__db.003
-	rm -rf $1/__db.004
-	rmdir $1
-}
-
-
-kamailio_create() # pars: <DB_PATH>
-{
-	if [ $# -ne 1 ] ; then
-		echo "kamailio_create param [DB_PATH]"
-		exit 1
-	fi
-	
-	DB_PATH=$1
-	if [ ! -d $1 ] ; then
-		minfo "creating Berkeley DB database at: [$1]"
-		mkdir -p $DB_PATH
-	fi
-	
-	for TABLE in $STANDARD_TABLES; do
-	    mdbg "Creating standard table: $TABLE"
-	    $LOAD_CMD -T -t hash -f $DB_SCHEMA/$TABLE -h $1 $TABLE
-	    if [ $? -ne 0 ] ; then
-		merr "Creating standard tables failed!"
-		exit 1
-	    fi
-	done
-
-	get_answer $INSTALL_PRESENCE_TABLES "Install presence related tables? (y/n): "
-	if [ "$ANSWER" = "y" ]; then
-		presence_create $1
-	fi
-
-	get_answer $INSTALL_EXTRA_TABLES "Install tables for $EXTRA_MODULES? (y/n): "
-	if [ "$ANSWER" = "y" ]; then
-		extra_create $1
-	fi
-
-} # kamailio_create
-
-
-presence_create() # pars: <DB_PATH>
-{
-	if [ $# -ne 1 ] ; then
-		merr "presence_create param [DB_PATH]"
-		exit 1
-	fi
-	
-	DB_PATH=$1
-	if [ ! -d $1 ] ; then
-		# Assert: the directory should already exist
-		merr "BerkeleyDB directory does not exist at: [$1]"
-		exit 1
-	fi
-
-	if [ ! -f $1/version ] ; then
-		# Assert: directory should already contain table 'version'
-		merr "BerkeleyDB directory does not have VERSION table at: [$1]"
-		exit 1
-	fi
-	
-	for TABLE in $PRESENCE_TABLES; do
-	    mdbg "Creating presence table: $TABLE"
-	    $LOAD_CMD -T -t hash -f $DB_SCHEMA/$TABLE -h $1 $TABLE
-	    if [ $? -ne 0 ] ; then
-		merr "Creating presence tables failed!"
-		exit 1
-	    fi
-	done
-	
-}  # end presence_create
-
-
-extra_create() # pars: <DB_PATH>
-{
-
-	if [ $# -ne 1 ] ; then
-		merr "extra_create function takes one param (DB_PATH)"
-		exit 1
-	fi
-	
-	DB_PATH=$1
-	if [ ! -d $1 ] ; then
-		# Assert: the directory should already exist
-		merr "BerkeleyDB directory does not exist at: [$1]"
-		exit 1
-	fi
-
-	if [ ! -f $1/version ] ; then
-		# Assert: directory should already contain table 'version'
-		merr "BerkeleyDB directory does not have VERSION table at: [$1]"
-		exit 1
-	fi
-	
-	for TABLE in $EXTRA_TABLES; do
-	    mdbg "Creating extra table: $TABLE"
-	    $LOAD_CMD -T -t hash -f $DB_SCHEMA/$TABLE -h $1 $TABLE
-	    if [ $? -ne 0 ] ; then
-		merr "Creating extra tables failed!"
-		exit 1
-	    fi
-	done
-	
-}  # end extra_create