|
|
@@ -1,619 +0,0 @@
|
|
|
-#
|
|
|
-# Script for maintaining Kamailio Berkeley DB tables
|
|
|
-# Copyright (C) 2007 Cisco Systems
|
|
|
-#
|
|
|
-# This file is part of Kamailio, a free SIP server.
|
|
|
-#
|
|
|
-# Kamailio is free software; you can redistribute it and/or modify
|
|
|
-# it under the terms of the GNU General Public License as published by
|
|
|
-# the Free Software Foundation; either version 2 of the License, or
|
|
|
-# (at your option) any later version
|
|
|
-#
|
|
|
-# Kamailio is distributed in the hope that it will be useful,
|
|
|
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
-# GNU General Public License for more details.
|
|
|
-#
|
|
|
-# You should have received a copy of the GNU General Public License
|
|
|
-# along with this program; if not, write to the Free Software
|
|
|
-# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
|
|
-#
|
|
|
-#
|
|
|
-
|
|
|
-#constants
|
|
|
-PATH=$PATH:/usr/local/BerkeleyDB.4.6/bin
|
|
|
-DELIM="|"
|
|
|
-BACKUP_CMD="tar czvf "
|
|
|
-RESTORE_CMD="tar xzvf "
|
|
|
-
|
|
|
-#berkeley db utility program that writes out db to plain text
|
|
|
-#small hack to autodetect the db dump command, debian prefix the version..
|
|
|
-
|
|
|
-which db_dump > /dev/null
|
|
|
-ret=$?
|
|
|
-if [ $ret -eq 0 ] ; then
|
|
|
- DUMP_CMD="db_dump"
|
|
|
-fi ;
|
|
|
-
|
|
|
-which db4.4_dump > /dev/null
|
|
|
-ret=$?
|
|
|
-if [ $ret -eq 0 ] ; then
|
|
|
- DUMP_CMD="db4.4_dump"
|
|
|
-fi ;
|
|
|
-
|
|
|
-which db4.5_dump > /dev/null
|
|
|
-ret=$?
|
|
|
-if [ $ret -eq 0 ] ; then
|
|
|
- DUMP_CMD="db4.5_dump"
|
|
|
-fi ;
|
|
|
-
|
|
|
-which db4.6_dump > /dev/null
|
|
|
-ret=$?
|
|
|
-if [ $ret -eq 0 ] ; then
|
|
|
- DUMP_CMD="db4.6_dump"
|
|
|
-fi ;
|
|
|
-
|
|
|
-
|
|
|
-which db5.0_dump > /dev/null
|
|
|
-ret=$?
|
|
|
-if [ $ret -eq 0 ] ; then
|
|
|
- DUMP_CMD="db5.0_dump"
|
|
|
-fi ;
|
|
|
-
|
|
|
-which db5.1_dump > /dev/null
|
|
|
-ret=$?
|
|
|
-if [ $ret -eq 0 ] ; then
|
|
|
- DUMP_CMD="db5.1_dump"
|
|
|
-fi ;
|
|
|
-
|
|
|
-
|
|
|
-#berkeley db utility program that imports data from plain text file
|
|
|
-#small hack to autodetect the db load command, debian prefix the version..
|
|
|
-
|
|
|
-which db_load > /dev/null
|
|
|
-ret=$?
|
|
|
-if [ $ret -eq 0 ] ; then
|
|
|
- LOAD_CMD="db_load"
|
|
|
-fi ;
|
|
|
-
|
|
|
-which db4.4_load > /dev/null
|
|
|
-ret=$?
|
|
|
-if [ $ret -eq 0 ] ; then
|
|
|
- LOAD_CMD="db4.4_load"
|
|
|
-fi ;
|
|
|
-
|
|
|
-which db4.5_load > /dev/null
|
|
|
-ret=$?
|
|
|
-if [ $ret -eq 0 ] ; then
|
|
|
- LOAD_CMD="db4.5_load"
|
|
|
-fi ;
|
|
|
-
|
|
|
-which db4.6_load > /dev/null
|
|
|
-ret=$?
|
|
|
-if [ $ret -eq 0 ] ; then
|
|
|
- LOAD_CMD="db4.6_load"
|
|
|
-fi ;
|
|
|
-
|
|
|
-
|
|
|
-which db5.0_load > /dev/null
|
|
|
-ret=$?
|
|
|
-if [ $ret -eq 0 ] ; then
|
|
|
- LOAD_CMD="db5.0_load"
|
|
|
-fi ;
|
|
|
-
|
|
|
-which db5.1_load > /dev/null
|
|
|
-ret=$?
|
|
|
-if [ $ret -eq 0 ] ; then
|
|
|
- LOAD_CMD="db5.1_load"
|
|
|
-fi ;
|
|
|
-
|
|
|
-# path to the database schemas
|
|
|
-DATA_DIR="/usr/local/share/kamailio"
|
|
|
-if [ -d "$DATA_DIR/db_berkeley/kamailio" ] ; then
|
|
|
- DB_SCHEMA="$DATA_DIR/db_berkeley/kamailio"
|
|
|
-else
|
|
|
- DB_SCHEMA="./db_berkeley/kamailio"
|
|
|
-fi
|
|
|
-
|
|
|
-# path to the db_berkeley database
|
|
|
-if [ -z "$DB_PATH" ]; then
|
|
|
- DB_PATH="/usr/local/etc/kamailio/db_berkeley"
|
|
|
-fi
|
|
|
-
|
|
|
-berkeley_usage()
|
|
|
-{
|
|
|
-COMMAND=`basename $0`
|
|
|
-cat <<EOF
|
|
|
-Script for maintaining Kamailio Berkeley DB tables
|
|
|
- $COMMAND list (lists the underlying db files in DB_PATH)
|
|
|
- $COMMAND cat <db> (db_dump the underlying db file to STDOUT)
|
|
|
- $COMMAND swap <db> (installs db.new by db -> db.old; db.new -> db)
|
|
|
- $COMMAND append <db> <datafile> (appends data to an existing db;output DB_PATH/db.new)
|
|
|
- $COMMAND newappend <db> <datafile> (appends data to a new instance of db; output DB_PATH/db.new)
|
|
|
- $COMMAND export <dump_dir> (exports table data to plain-txt files in dump_dir)
|
|
|
- $COMMAND import <dump_dir> (imports plain-txt table data and creates new db tables in db_path)
|
|
|
-EOF
|
|
|
-} #usage
|
|
|
-
|
|
|
-
|
|
|
-#
|
|
|
-#
|
|
|
-#
|
|
|
-kamailio_berkeley() # parms: <op> <arg1> <arg2>
|
|
|
-{
|
|
|
- case $1 in
|
|
|
- list|ls)
|
|
|
- ls -l $DB_PATH
|
|
|
- exit $?
|
|
|
- ;;
|
|
|
- cat)
|
|
|
- shift
|
|
|
- kamailio_cat $1 $DB_PATH
|
|
|
- exit $?
|
|
|
- ;;
|
|
|
-
|
|
|
- swap)
|
|
|
- shift
|
|
|
- kamailio_swap $1 $DB_PATH
|
|
|
- exit $?
|
|
|
- ;;
|
|
|
-
|
|
|
- append)
|
|
|
- shift
|
|
|
- kamailio_append $1 $2 $DB_PATH
|
|
|
- exit $?
|
|
|
- ;;
|
|
|
-
|
|
|
- newappend)
|
|
|
- shift
|
|
|
- kamailio_newappend $1 $2 $DB_PATH
|
|
|
- exit $?
|
|
|
- ;;
|
|
|
-
|
|
|
- export)
|
|
|
- shift
|
|
|
- kamailio_export $1 $DB_PATH
|
|
|
- exit $?
|
|
|
- ;;
|
|
|
-
|
|
|
- import)
|
|
|
- shift
|
|
|
- kamailio_import $1 $DB_PATH
|
|
|
- exit $?
|
|
|
- ;;
|
|
|
-
|
|
|
- *)
|
|
|
- berkeley_usage
|
|
|
- exit 1;
|
|
|
- ;;
|
|
|
-esac
|
|
|
-}
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-##
|
|
|
-# EXPORT existing data to plain-txt files in DUMP_DIR
|
|
|
-# eg. DB_PATH/version ---> DUMP_DIR/version.txt
|
|
|
-#
|
|
|
-# Export is used as part of a DB migration process to another
|
|
|
-# major version of berkeley db.
|
|
|
-kamailio_export() # parms: <DUMP_DIR> [DB_PATH]
|
|
|
-{
|
|
|
- if [ $# -lt 2 ]; then
|
|
|
- echo "kamailio_dump parms: <DUMP_DIR> [DB_PATH]"
|
|
|
- exit 1
|
|
|
- fi
|
|
|
-
|
|
|
- # Assert: the DB_PATH directory should already exist
|
|
|
- if [ ! -d $2 ] ; then
|
|
|
- merr "BerkeleyDB directory does not exist at: [$2]"
|
|
|
- exit 1
|
|
|
- fi
|
|
|
-
|
|
|
- # Assert: DB_PATH directory should already contain table 'version'
|
|
|
- if [ ! -f $2/version ] ; then
|
|
|
- merr "BerkeleyDB directory does not have VERSION table at: [$2]"
|
|
|
- exit 1
|
|
|
- fi
|
|
|
-
|
|
|
- # Create dir at <DUMP_DIR> to store the exported data
|
|
|
- if [ ! -d $1 ] ; then
|
|
|
- minfo "creating DUMP_DIR at: [$1]"
|
|
|
- mkdir -p $1
|
|
|
- else
|
|
|
- mdbg "Cleaning out DUMP_DIR to get ready for new data"
|
|
|
- rm -rf $1/*
|
|
|
- fi
|
|
|
-
|
|
|
- # DUMP_CMD will result in something like this:
|
|
|
- #
|
|
|
- # VERSION=3
|
|
|
- # format=print
|
|
|
- # type=hash
|
|
|
- # h_nelem=2
|
|
|
- # db_pagesize=4096
|
|
|
- # HEADER=END
|
|
|
- # METADATA_COLUMNS
|
|
|
- # callid(str) method(str) from_tag(str) to_tag(str) sip_code(str) sip_reason(str) time(datetime)
|
|
|
- # METADATA_KEY
|
|
|
- # 0
|
|
|
- # DATA=END
|
|
|
- #
|
|
|
- # However, we are only interested in the indented stuff between
|
|
|
- # 'HEADER=END' and 'DATA=END',
|
|
|
- # as everything else is DB instance specific. That is, we are interested in this part:
|
|
|
- #
|
|
|
- # METADATA_COLUMNS
|
|
|
- # callid(str) method(str) from_tag(str) to_tag(str) sip_code(str) sip_reason(str) time(datetime)
|
|
|
- # METADATA_KEY
|
|
|
- # 0
|
|
|
- #
|
|
|
- # The following PERL filter will do this processing.
|
|
|
- #
|
|
|
- # perl -pe 's/^\w.*// ; s/^\s(.*)/$1/'
|
|
|
-
|
|
|
- # Dump the STANDARD tables to plain-text files in DUMP_DIR
|
|
|
- for TABLE in $STANDARD_TABLES; do
|
|
|
- if [ -f $2/$TABLE ] ; then
|
|
|
- mdbg "Exporting standard table: $TABLE"
|
|
|
- $DUMP_CMD -p -h $2 $TABLE | perl -pe 's/^\w.*// ; s/^\s(.*)/$1/' > $1/$TABLE.txt
|
|
|
-
|
|
|
- # Check return code to make sure the export worked ok
|
|
|
- if [ $? -ne 0 ] ; then
|
|
|
- merr "Export of standard table failed [$TABLE]"
|
|
|
- # there was a problem, but it is not something
|
|
|
- # we can handle here; We can deal with this at import
|
|
|
- # time.
|
|
|
- fi
|
|
|
- else
|
|
|
- mwarn "Table not found: [$TABLE]"
|
|
|
- fi
|
|
|
- done
|
|
|
-
|
|
|
- # Dump the PRESENCE tables to plain-text files in DUMP_DIR
|
|
|
- for TABLE in $PRESENCE_TABLES; do
|
|
|
- if [ -f $2/$TABLE ] ; then
|
|
|
- mdbg "Exporting presence table: $TABLE"
|
|
|
- $DUMP_CMD -p -h $2 $TABLE | perl -pe 's/^\w.*// ; s/^\s(.*)/$1/' > $1/$TABLE.txt
|
|
|
- if [ $? -ne 0 ] ; then
|
|
|
- merr "Export of presence table failed [$TABLE]"
|
|
|
- fi
|
|
|
- else
|
|
|
- mwarn "Table not found: [$TABLE]"
|
|
|
- fi
|
|
|
- done
|
|
|
-
|
|
|
- # Dump the EXTRA tables to plain-text files in DUMP_DIR
|
|
|
- for TABLE in $EXTRA_TABLES; do
|
|
|
- if [ -f $2/$TABLE ] ; then
|
|
|
- mdbg "Exporting extra table: $TABLE"
|
|
|
- $DUMP_CMD -p -h $2 $TABLE | perl -pe 's/^\w.*// ; s/^\s(.*)/$1/' > $1/$TABLE.txt
|
|
|
- if [ $? -ne 0 ] ; then
|
|
|
- merr "Export of extra table failed [$TABLE]"
|
|
|
- fi
|
|
|
- else
|
|
|
- mwarn "Table not found: [$TABLE]"
|
|
|
- fi
|
|
|
- done
|
|
|
-
|
|
|
- mdbg "All tables are now exported to DUMP_DIR: [$1]"
|
|
|
- return 0
|
|
|
-
|
|
|
-}
|
|
|
-
|
|
|
-
|
|
|
-##
|
|
|
-# IMPORT existing plain-txt files from DUMP_DIR to DB_PATH
|
|
|
-# eg. DUMP_DIR/version.txt --> DB_PATH/version
|
|
|
-#
|
|
|
-# import is used as part of DB migrate to another major version of berkeley db.
|
|
|
-# this will over-write anything in DB_PATH
|
|
|
-kamailio_import() # parms: <DUMP_DIR> [DB_PATH]
|
|
|
-{
|
|
|
- if [ $# -lt 2 ]; then
|
|
|
- echo "kamailio_dump parms: <DUMP_DIR> [DB_PATH]"
|
|
|
- exit 1
|
|
|
- fi
|
|
|
-
|
|
|
- # Assert: DUMP_DIR (source dir) already exists
|
|
|
- if [ ! -d $1 ] ; then
|
|
|
- merr "Berkeley DUMP_DIR directory does not exist: [$1]"
|
|
|
- exit 1;
|
|
|
- fi
|
|
|
-
|
|
|
- # Assert: DUMP_DIR directory should already contain table 'version.txt'
|
|
|
- if [ ! -f $1/version.txt ] ; then
|
|
|
- merr "DUMP_DIR directory does not have VERSION.txt data at: [$1]"
|
|
|
- exit 1
|
|
|
- fi
|
|
|
-
|
|
|
- # Assert: destination dir exists [DB_PATH]
|
|
|
- if [ ! -d $2 ] ; then
|
|
|
- mdbg "Berkeley DB_PATH directory is being created: [$2]"
|
|
|
- mkdir -p $2
|
|
|
- else
|
|
|
- # Wipe out the destination dir to make room for new data
|
|
|
- mwarn "Berkeley DB_PATH directory is being purged at: [$2]"
|
|
|
- rm -rf $2/*
|
|
|
- fi
|
|
|
-
|
|
|
- # Creates STANDARD tables from plain-text files in DUMP_DIR
|
|
|
- for TABLE in $STANDARD_TABLES; do
|
|
|
- if [ -s $1/$TABLE.txt ] ; then
|
|
|
- mdbg "Importing standard table: $TABLE"
|
|
|
- $LOAD_CMD -T -t hash -f $1/$TABLE.txt -h $2 $TABLE
|
|
|
-
|
|
|
- # Check return code to make sure the export worked ok
|
|
|
- if [ $? -ne 0 ] ; then
|
|
|
- merr "Import of standard table failed [$TABLE.txt]"
|
|
|
- merr "Create this missing table with kambdb_recover."
|
|
|
- fi
|
|
|
- else
|
|
|
- merr "Import data not found for table: [$TABLE.txt]"
|
|
|
- merr "Create this missing table with kambdb_recover."
|
|
|
- fi
|
|
|
- done
|
|
|
-
|
|
|
-
|
|
|
- # Creates PRESENCE tables from plain-text files in DUMP_DIR
|
|
|
- for TABLE in $PRESENCE_TABLES; do
|
|
|
- if [ -s $1/$TABLE.txt ] ; then
|
|
|
- mdbg "Importing presence table: $TABLE"
|
|
|
- $LOAD_CMD -T -t hash -f $1/$TABLE.txt -h $2 $TABLE
|
|
|
-
|
|
|
- # Check return code to make sure the export worked ok
|
|
|
- if [ $? -ne 0 ] ; then
|
|
|
- merr "Import of presence table failed [$TABLE.txt]"
|
|
|
- merr "Create this missing table with kambdb_recover."
|
|
|
- fi
|
|
|
- else
|
|
|
- mwarn "Import data not found for table: [$TABLE.txt]"
|
|
|
- fi
|
|
|
- done
|
|
|
-
|
|
|
- # Creates EXTRA tables from plain-text files in DUMP_DIR
|
|
|
- for TABLE in $EXTRA_TABLES; do
|
|
|
- if [ -s $1/$TABLE.txt ] ; then
|
|
|
- mdbg "Importing extra table: $TABLE"
|
|
|
- $LOAD_CMD -T -t hash -f $1/$TABLE.txt -h $2 $TABLE
|
|
|
-
|
|
|
- # Check return code to make sure the export worked ok
|
|
|
- if [ $? -ne 0 ] ; then
|
|
|
- merr "Import of extra table failed [$TABLE.txt]"
|
|
|
- merr "Create this missing table with kambdb_recover."
|
|
|
- fi
|
|
|
- else
|
|
|
- mwarn "Import data not found for table: [$TABLE.txt]"
|
|
|
- fi
|
|
|
- done
|
|
|
-
|
|
|
- mdbg "All tables are now imported to DB_PATH: [$2]"
|
|
|
- return 0
|
|
|
-
|
|
|
-}
|
|
|
-
|
|
|
-
|
|
|
-kamailio_swap() # parms: <db> [DB_PATH]
|
|
|
-{
|
|
|
- if [ $# -lt 2 ]; then
|
|
|
- echo "kamailio_swap parms: <db> [DB_PATH]"
|
|
|
- exit 1
|
|
|
- fi
|
|
|
-
|
|
|
- DB=$2/$1
|
|
|
- DBNEW=$DB.new
|
|
|
- DBOLD=$DB.old
|
|
|
- cp $DB $DBOLD
|
|
|
- mv $DBNEW $DB
|
|
|
-}
|
|
|
-
|
|
|
-#####
|
|
|
-# append process is:
|
|
|
-# 1. copy DB_PATH/db to DB_PATH/db.new
|
|
|
-# 2. appends contents of newdata to DB_PATH/db.new
|
|
|
-#
|
|
|
-kamailio_append() # parms: <db> <newdata> [DB_PATH]
|
|
|
-{
|
|
|
- if [ $# -lt 3 ]; then
|
|
|
- echo "kamailio_append parms: <db> <newdata> [DB_PATH]"
|
|
|
- exit 1
|
|
|
- fi
|
|
|
-
|
|
|
- DB=$3/$1
|
|
|
- DBNEW=$DB.new
|
|
|
- if [ -f $DB.new ] ; then
|
|
|
- rm $DB.new
|
|
|
- fi
|
|
|
-
|
|
|
- cp $DB $DBNEW
|
|
|
-# echo "$LOAD_CMD -T -t hash -f $2 -h $3 $1.new"
|
|
|
- $LOAD_CMD -T -t hash -f $2 -h $3 $1.new
|
|
|
-
|
|
|
-# echo "$LOAD_CMD -r fileid -h $3 $1.new"
|
|
|
- $LOAD_CMD -r fileid -h $3 $1.new
|
|
|
-}
|
|
|
-
|
|
|
-
|
|
|
-#####
|
|
|
-# newappend process is:
|
|
|
-# 1. create a new temp DBENV in /tmp/sc-<processID>
|
|
|
-# 2. appends contents of newdata to /tmp/sc-<processID>/db
|
|
|
-# 3. move /tmp/sc-<processID>/db over to DB_PATH/db.new
|
|
|
-# 4. delete temp DBENV dir /tmp/sc-<processID>
|
|
|
-#
|
|
|
-kamailio_newappend() # parms: <db> <newdata> [DB_PATH]
|
|
|
-{
|
|
|
- if [ $# -lt 3 ]; then
|
|
|
- echo "kamailio_append parms: <db> <newdata> [DB_PATH]"
|
|
|
- exit 1
|
|
|
- fi
|
|
|
-
|
|
|
- DB=$3/$1
|
|
|
- DBNEW=$DB.new
|
|
|
- if [ -f $DBNEW ] ; then
|
|
|
- rm $DBNEW
|
|
|
- fi
|
|
|
- TMPENV=/tmp/sc-$$
|
|
|
- kamailio_create $TMPENV
|
|
|
- cd $OLDPWD
|
|
|
- $LOAD_CMD -T -t hash -f $2 -h $TMPENV $1
|
|
|
- mv $TMPENV/$1 $DBNEW
|
|
|
- rm -rf $TMPENV
|
|
|
-}
|
|
|
-
|
|
|
-
|
|
|
-# cat all rows to STDOUT
|
|
|
-kamailio_cat() # pars: <database name> <DB_PATH>
|
|
|
-{
|
|
|
- if [ $# -ne 2 ] ; then
|
|
|
- echo "kamailio_cat params <db> [DB_PATH]"
|
|
|
- exit 1
|
|
|
- fi
|
|
|
-
|
|
|
- $DUMP_CMD -p -h $2 $1
|
|
|
-}
|
|
|
-
|
|
|
-kamailio_drop() # pars: <DB_PATH>
|
|
|
-{
|
|
|
- if [ $# -ne 1 ] ; then
|
|
|
- echo "kamailio_drop function takes one param"
|
|
|
- exit 1
|
|
|
- fi
|
|
|
-
|
|
|
- if [ ! -d $1 ] ; then
|
|
|
- echo "Directory does not exist: $1"
|
|
|
- fi
|
|
|
-
|
|
|
- minfo "Dropping Berkeley DB database at: $1 ..."
|
|
|
-
|
|
|
- # core
|
|
|
- if [ -f $1/version ] ; then
|
|
|
- for TABLE in $STANDARD_TABLES; do
|
|
|
- mdbg "Dropping core table: $TABLE"
|
|
|
- rm -f $1/$TABLE
|
|
|
- done
|
|
|
- fi
|
|
|
-
|
|
|
- # presence
|
|
|
- if [ -f $1/presentity ] ; then
|
|
|
- for TABLE in $PRESENCE_TABLES; do
|
|
|
- mdbg "Dropping presence table: $TABLE"
|
|
|
- rm -f $1/$TABLE
|
|
|
- done
|
|
|
- fi
|
|
|
-
|
|
|
- # extra tables
|
|
|
- if [ -f $1/cpl ] ; then
|
|
|
- for TABLE in $EXTRA_TABLES; do
|
|
|
- mdbg "Dropping extra table: $TABLE"
|
|
|
- rm -f $1/$TABLE
|
|
|
- done
|
|
|
- fi
|
|
|
-
|
|
|
- # delete db files and directory
|
|
|
- rm -rf $1/__db.001
|
|
|
- rm -rf $1/__db.002
|
|
|
- rm -rf $1/__db.003
|
|
|
- rm -rf $1/__db.004
|
|
|
- rmdir $1
|
|
|
-}
|
|
|
-
|
|
|
-
|
|
|
-kamailio_create() # pars: <DB_PATH>
|
|
|
-{
|
|
|
- if [ $# -ne 1 ] ; then
|
|
|
- echo "kamailio_create param [DB_PATH]"
|
|
|
- exit 1
|
|
|
- fi
|
|
|
-
|
|
|
- DB_PATH=$1
|
|
|
- if [ ! -d $1 ] ; then
|
|
|
- minfo "creating Berkeley DB database at: [$1]"
|
|
|
- mkdir -p $DB_PATH
|
|
|
- fi
|
|
|
-
|
|
|
- for TABLE in $STANDARD_TABLES; do
|
|
|
- mdbg "Creating standard table: $TABLE"
|
|
|
- $LOAD_CMD -T -t hash -f $DB_SCHEMA/$TABLE -h $1 $TABLE
|
|
|
- if [ $? -ne 0 ] ; then
|
|
|
- merr "Creating standard tables failed!"
|
|
|
- exit 1
|
|
|
- fi
|
|
|
- done
|
|
|
-
|
|
|
- get_answer $INSTALL_PRESENCE_TABLES "Install presence related tables? (y/n): "
|
|
|
- if [ "$ANSWER" = "y" ]; then
|
|
|
- presence_create $1
|
|
|
- fi
|
|
|
-
|
|
|
- get_answer $INSTALL_EXTRA_TABLES "Install tables for $EXTRA_MODULES? (y/n): "
|
|
|
- if [ "$ANSWER" = "y" ]; then
|
|
|
- extra_create $1
|
|
|
- fi
|
|
|
-
|
|
|
-} # kamailio_create
|
|
|
-
|
|
|
-
|
|
|
-presence_create() # pars: <DB_PATH>
|
|
|
-{
|
|
|
- if [ $# -ne 1 ] ; then
|
|
|
- merr "presence_create param [DB_PATH]"
|
|
|
- exit 1
|
|
|
- fi
|
|
|
-
|
|
|
- DB_PATH=$1
|
|
|
- if [ ! -d $1 ] ; then
|
|
|
- # Assert: the directory should already exist
|
|
|
- merr "BerkeleyDB directory does not exist at: [$1]"
|
|
|
- exit 1
|
|
|
- fi
|
|
|
-
|
|
|
- if [ ! -f $1/version ] ; then
|
|
|
- # Assert: directory should already contain table 'version'
|
|
|
- merr "BerkeleyDB directory does not have VERSION table at: [$1]"
|
|
|
- exit 1
|
|
|
- fi
|
|
|
-
|
|
|
- for TABLE in $PRESENCE_TABLES; do
|
|
|
- mdbg "Creating presence table: $TABLE"
|
|
|
- $LOAD_CMD -T -t hash -f $DB_SCHEMA/$TABLE -h $1 $TABLE
|
|
|
- if [ $? -ne 0 ] ; then
|
|
|
- merr "Creating presence tables failed!"
|
|
|
- exit 1
|
|
|
- fi
|
|
|
- done
|
|
|
-
|
|
|
-} # end presence_create
|
|
|
-
|
|
|
-
|
|
|
-extra_create() # pars: <DB_PATH>
|
|
|
-{
|
|
|
-
|
|
|
- if [ $# -ne 1 ] ; then
|
|
|
- merr "extra_create function takes one param (DB_PATH)"
|
|
|
- exit 1
|
|
|
- fi
|
|
|
-
|
|
|
- DB_PATH=$1
|
|
|
- if [ ! -d $1 ] ; then
|
|
|
- # Assert: the directory should already exist
|
|
|
- merr "BerkeleyDB directory does not exist at: [$1]"
|
|
|
- exit 1
|
|
|
- fi
|
|
|
-
|
|
|
- if [ ! -f $1/version ] ; then
|
|
|
- # Assert: directory should already contain table 'version'
|
|
|
- merr "BerkeleyDB directory does not have VERSION table at: [$1]"
|
|
|
- exit 1
|
|
|
- fi
|
|
|
-
|
|
|
- for TABLE in $EXTRA_TABLES; do
|
|
|
- mdbg "Creating extra table: $TABLE"
|
|
|
- $LOAD_CMD -T -t hash -f $DB_SCHEMA/$TABLE -h $1 $TABLE
|
|
|
- if [ $? -ne 0 ] ; then
|
|
|
- merr "Creating extra tables failed!"
|
|
|
- exit 1
|
|
|
- fi
|
|
|
- done
|
|
|
-
|
|
|
-} # end extra_create
|