kamdbctl.db_berkeley 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619
  1. #
  2. # Script for maintaining Kamailio Berkeley DB tables
  3. # Copyright (C) 2007 Cisco Systems
  4. #
  5. # This file is part of Kamailio, a free SIP server.
  6. #
  7. # Kamailio is free software; you can redistribute it and/or modify
  8. # it under the terms of the GNU General Public License as published by
  9. # the Free Software Foundation; either version 2 of the License, or
  10. # (at your option) any later version
  11. #
  12. # Kamailio is distributed in the hope that it will be useful,
  13. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  15. # GNU General Public License for more details.
  16. #
  17. # You should have received a copy of the GNU General Public License
  18. # along with this program; if not, write to the Free Software
  19. # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. #
  21. #
  22. #constants
  23. PATH=$PATH:/usr/local/BerkeleyDB.4.6/bin
  24. DELIM="|"
  25. BACKUP_CMD="tar czvf "
  26. RESTORE_CMD="tar xzvf "
  27. #berkeley db utility program that writes out db to plain text
  28. #small hack to autodetect the db dump command, debian prefix the version..
  29. which db_dump > /dev/null
  30. ret=$?
  31. if [ $ret -eq 0 ] ; then
  32. DUMP_CMD="db_dump"
  33. fi ;
  34. which db4.4_dump > /dev/null
  35. ret=$?
  36. if [ $ret -eq 0 ] ; then
  37. DUMP_CMD="db4.4_dump"
  38. fi ;
  39. which db4.5_dump > /dev/null
  40. ret=$?
  41. if [ $ret -eq 0 ] ; then
  42. DUMP_CMD="db4.5_dump"
  43. fi ;
  44. which db4.6_dump > /dev/null
  45. ret=$?
  46. if [ $ret -eq 0 ] ; then
  47. DUMP_CMD="db4.6_dump"
  48. fi ;
  49. which db5.0_dump > /dev/null
  50. ret=$?
  51. if [ $ret -eq 0 ] ; then
  52. DUMP_CMD="db5.0_dump"
  53. fi ;
  54. which db5.1_dump > /dev/null
  55. ret=$?
  56. if [ $ret -eq 0 ] ; then
  57. DUMP_CMD="db5.1_dump"
  58. fi ;
  59. #berkeley db utility program that imports data from plain text file
  60. #small hack to autodetect the db load command, debian prefix the version..
  61. which db_load > /dev/null
  62. ret=$?
  63. if [ $ret -eq 0 ] ; then
  64. LOAD_CMD="db_load"
  65. fi ;
  66. which db4.4_load > /dev/null
  67. ret=$?
  68. if [ $ret -eq 0 ] ; then
  69. LOAD_CMD="db4.4_load"
  70. fi ;
  71. which db4.5_load > /dev/null
  72. ret=$?
  73. if [ $ret -eq 0 ] ; then
  74. LOAD_CMD="db4.5_load"
  75. fi ;
  76. which db4.6_load > /dev/null
  77. ret=$?
  78. if [ $ret -eq 0 ] ; then
  79. LOAD_CMD="db4.6_load"
  80. fi ;
  81. which db5.0_load > /dev/null
  82. ret=$?
  83. if [ $ret -eq 0 ] ; then
  84. LOAD_CMD="db5.0_load"
  85. fi ;
  86. which db5.1_load > /dev/null
  87. ret=$?
  88. if [ $ret -eq 0 ] ; then
  89. LOAD_CMD="db5.1_load"
  90. fi ;
  91. # path to the database schemas
  92. DATA_DIR="/usr/local/share/kamailio"
  93. if [ -d "$DATA_DIR/db_berkeley/kamailio" ] ; then
  94. DB_SCHEMA="$DATA_DIR/db_berkeley/kamailio"
  95. else
  96. DB_SCHEMA="./db_berkeley/kamailio"
  97. fi
  98. # path to the db_berkeley database
  99. if [ -z "$DB_PATH" ]; then
  100. DB_PATH="/usr/local/etc/kamailio/db_berkeley"
  101. fi
  102. berkeley_usage()
  103. {
  104. COMMAND=`basename $0`
  105. cat <<EOF
  106. Script for maintaining Kamailio Berkeley DB tables
  107. $COMMAND list (lists the underlying db files in DB_PATH)
  108. $COMMAND cat <db> (db_dump the underlying db file to STDOUT)
  109. $COMMAND swap <db> (installs db.new by db -> db.old; db.new -> db)
  110. $COMMAND append <db> <datafile> (appends data to an existing db;output DB_PATH/db.new)
  111. $COMMAND newappend <db> <datafile> (appends data to a new instance of db; output DB_PATH/db.new)
  112. $COMMAND export <dump_dir> (exports table data to plain-txt files in dump_dir)
  113. $COMMAND import <dump_dir> (imports plain-txt table data and creates new db tables in db_path)
  114. EOF
  115. } #usage
  116. #
  117. #
  118. #
  119. kamailio_berkeley() # parms: <op> <arg1> <arg2>
  120. {
  121. case $1 in
  122. list|ls)
  123. ls -l $DB_PATH
  124. exit $?
  125. ;;
  126. cat)
  127. shift
  128. kamailio_cat $1 $DB_PATH
  129. exit $?
  130. ;;
  131. swap)
  132. shift
  133. kamailio_swap $1 $DB_PATH
  134. exit $?
  135. ;;
  136. append)
  137. shift
  138. kamailio_append $1 $2 $DB_PATH
  139. exit $?
  140. ;;
  141. newappend)
  142. shift
  143. kamailio_newappend $1 $2 $DB_PATH
  144. exit $?
  145. ;;
  146. export)
  147. shift
  148. kamailio_export $1 $DB_PATH
  149. exit $?
  150. ;;
  151. import)
  152. shift
  153. kamailio_import $1 $DB_PATH
  154. exit $?
  155. ;;
  156. *)
  157. berkeley_usage
  158. exit 1;
  159. ;;
  160. esac
  161. }
  162. ##
  163. # EXPORT existing data to plain-txt files in DUMP_DIR
  164. # eg. DB_PATH/version ---> DUMP_DIR/version.txt
  165. #
  166. # Export is used as part of a DB migration process to another
  167. # major version of berkeley db.
  168. kamailio_export() # parms: <DUMP_DIR> [DB_PATH]
  169. {
  170. if [ $# -lt 2 ]; then
  171. echo "kamailio_dump parms: <DUMP_DIR> [DB_PATH]"
  172. exit 1
  173. fi
  174. # Assert: the DB_PATH directory should already exist
  175. if [ ! -d $2 ] ; then
  176. merr "BerkeleyDB directory does not exist at: [$2]"
  177. exit 1
  178. fi
  179. # Assert: DB_PATH directory should already contain table 'version'
  180. if [ ! -f $2/version ] ; then
  181. merr "BerkeleyDB directory does not have VERSION table at: [$2]"
  182. exit 1
  183. fi
  184. # Create dir at <DUMP_DIR> to store the exported data
  185. if [ ! -d $1 ] ; then
  186. minfo "creating DUMP_DIR at: [$1]"
  187. mkdir -p $1
  188. else
  189. mdbg "Cleaning out DUMP_DIR to get ready for new data"
  190. rm -rf $1/*
  191. fi
  192. # DUMP_CMD will result in something like this:
  193. #
  194. # VERSION=3
  195. # format=print
  196. # type=hash
  197. # h_nelem=2
  198. # db_pagesize=4096
  199. # HEADER=END
  200. # METADATA_COLUMNS
  201. # callid(str) method(str) from_tag(str) to_tag(str) sip_code(str) sip_reason(str) time(datetime)
  202. # METADATA_KEY
  203. # 0
  204. # DATA=END
  205. #
  206. # However, we are only interested in the indented stuff between
  207. # 'HEADER=END' and 'DATA=END',
  208. # as everything else is DB instance specific. That is, we are interested in this part:
  209. #
  210. # METADATA_COLUMNS
  211. # callid(str) method(str) from_tag(str) to_tag(str) sip_code(str) sip_reason(str) time(datetime)
  212. # METADATA_KEY
  213. # 0
  214. #
  215. # The following PERL filter will do this processing.
  216. #
  217. # perl -pe 's/^\w.*// ; s/^\s(.*)/$1/'
  218. # Dump the STANDARD tables to plain-text files in DUMP_DIR
  219. for TABLE in $STANDARD_TABLES; do
  220. if [ -f $2/$TABLE ] ; then
  221. mdbg "Exporting standard table: $TABLE"
  222. $DUMP_CMD -p -h $2 $TABLE | perl -pe 's/^\w.*// ; s/^\s(.*)/$1/' > $1/$TABLE.txt
  223. # Check return code to make sure the export worked ok
  224. if [ $? -ne 0 ] ; then
  225. merr "Export of standard table failed [$TABLE]"
  226. # there was a problem, but it is not something
  227. # we can handle here; We can deal with this at import
  228. # time.
  229. fi
  230. else
  231. mwarn "Table not found: [$TABLE]"
  232. fi
  233. done
  234. # Dump the PRESENCE tables to plain-text files in DUMP_DIR
  235. for TABLE in $PRESENCE_TABLES; do
  236. if [ -f $2/$TABLE ] ; then
  237. mdbg "Exporting presence table: $TABLE"
  238. $DUMP_CMD -p -h $2 $TABLE | perl -pe 's/^\w.*// ; s/^\s(.*)/$1/' > $1/$TABLE.txt
  239. if [ $? -ne 0 ] ; then
  240. merr "Export of presence table failed [$TABLE]"
  241. fi
  242. else
  243. mwarn "Table not found: [$TABLE]"
  244. fi
  245. done
  246. # Dump the EXTRA tables to plain-text files in DUMP_DIR
  247. for TABLE in $EXTRA_TABLES; do
  248. if [ -f $2/$TABLE ] ; then
  249. mdbg "Exporting extra table: $TABLE"
  250. $DUMP_CMD -p -h $2 $TABLE | perl -pe 's/^\w.*// ; s/^\s(.*)/$1/' > $1/$TABLE.txt
  251. if [ $? -ne 0 ] ; then
  252. merr "Export of extra table failed [$TABLE]"
  253. fi
  254. else
  255. mwarn "Table not found: [$TABLE]"
  256. fi
  257. done
  258. mdbg "All tables are now exported to DUMP_DIR: [$1]"
  259. return 0
  260. }
  261. ##
  262. # IMPORT existing plain-txt files from DUMP_DIR to DB_PATH
  263. # eg. DUMP_DIR/version.txt --> DB_PATH/version
  264. #
  265. # import is used as part of DB migrate to another major version of berkeley db.
  266. # this will over-write anything in DB_PATH
  267. kamailio_import() # parms: <DUMP_DIR> [DB_PATH]
  268. {
  269. if [ $# -lt 2 ]; then
  270. echo "kamailio_dump parms: <DUMP_DIR> [DB_PATH]"
  271. exit 1
  272. fi
  273. # Assert: DUMP_DIR (source dir) already exists
  274. if [ ! -d $1 ] ; then
  275. merr "Berkeley DUMP_DIR directory does not exist: [$1]"
  276. exit 1;
  277. fi
  278. # Assert: DUMP_DIR directory should already contain table 'version.txt'
  279. if [ ! -f $1/version.txt ] ; then
  280. merr "DUMP_DIR directory does not have VERSION.txt data at: [$1]"
  281. exit 1
  282. fi
  283. # Assert: destination dir exists [DB_PATH]
  284. if [ ! -d $2 ] ; then
  285. mdbg "Berkeley DB_PATH directory is being created: [$2]"
  286. mkdir -p $2
  287. else
  288. # Wipe out the destination dir to make room for new data
  289. mwarn "Berkeley DB_PATH directory is being purged at: [$2]"
  290. rm -rf $2/*
  291. fi
  292. # Creates STANDARD tables from plain-text files in DUMP_DIR
  293. for TABLE in $STANDARD_TABLES; do
  294. if [ -s $1/$TABLE.txt ] ; then
  295. mdbg "Importing standard table: $TABLE"
  296. $LOAD_CMD -T -t hash -f $1/$TABLE.txt -h $2 $TABLE
  297. # Check return code to make sure the export worked ok
  298. if [ $? -ne 0 ] ; then
  299. merr "Import of standard table failed [$TABLE.txt]"
  300. merr "Create this missing table with kambdb_recover."
  301. fi
  302. else
  303. merr "Import data not found for table: [$TABLE.txt]"
  304. merr "Create this missing table with kambdb_recover."
  305. fi
  306. done
  307. # Creates PRESENCE tables from plain-text files in DUMP_DIR
  308. for TABLE in $PRESENCE_TABLES; do
  309. if [ -s $1/$TABLE.txt ] ; then
  310. mdbg "Importing presence table: $TABLE"
  311. $LOAD_CMD -T -t hash -f $1/$TABLE.txt -h $2 $TABLE
  312. # Check return code to make sure the export worked ok
  313. if [ $? -ne 0 ] ; then
  314. merr "Import of presence table failed [$TABLE.txt]"
  315. merr "Create this missing table with kambdb_recover."
  316. fi
  317. else
  318. mwarn "Import data not found for table: [$TABLE.txt]"
  319. fi
  320. done
  321. # Creates EXTRA tables from plain-text files in DUMP_DIR
  322. for TABLE in $EXTRA_TABLES; do
  323. if [ -s $1/$TABLE.txt ] ; then
  324. mdbg "Importing extra table: $TABLE"
  325. $LOAD_CMD -T -t hash -f $1/$TABLE.txt -h $2 $TABLE
  326. # Check return code to make sure the export worked ok
  327. if [ $? -ne 0 ] ; then
  328. merr "Import of extra table failed [$TABLE.txt]"
  329. merr "Create this missing table with kambdb_recover."
  330. fi
  331. else
  332. mwarn "Import data not found for table: [$TABLE.txt]"
  333. fi
  334. done
  335. mdbg "All tables are now imported to DB_PATH: [$2]"
  336. return 0
  337. }
  338. kamailio_swap() # parms: <db> [DB_PATH]
  339. {
  340. if [ $# -lt 2 ]; then
  341. echo "kamailio_swap parms: <db> [DB_PATH]"
  342. exit 1
  343. fi
  344. DB=$2/$1
  345. DBNEW=$DB.new
  346. DBOLD=$DB.old
  347. cp $DB $DBOLD
  348. mv $DBNEW $DB
  349. }
  350. #####
  351. # append process is:
  352. # 1. copy DB_PATH/db to DB_PATH/db.new
  353. # 2. appends contents of newdata to DB_PATH/db.new
  354. #
  355. kamailio_append() # parms: <db> <newdata> [DB_PATH]
  356. {
  357. if [ $# -lt 3 ]; then
  358. echo "kamailio_append parms: <db> <newdata> [DB_PATH]"
  359. exit 1
  360. fi
  361. DB=$3/$1
  362. DBNEW=$DB.new
  363. if [ -f $DB.new ] ; then
  364. rm $DB.new
  365. fi
  366. cp $DB $DBNEW
  367. # echo "$LOAD_CMD -T -t hash -f $2 -h $3 $1.new"
  368. $LOAD_CMD -T -t hash -f $2 -h $3 $1.new
  369. # echo "$LOAD_CMD -r fileid -h $3 $1.new"
  370. $LOAD_CMD -r fileid -h $3 $1.new
  371. }
  372. #####
  373. # newappend process is:
  374. # 1. create a new temp DBENV in /tmp/sc-<processID>
  375. # 2. appends contents of newdata to /tmp/sc-<processID>/db
  376. # 3. move /tmp/sc-<processID>/db over to DB_PATH/db.new
  377. # 4. delete temp DBENV dir /tmp/sc-<processID>
  378. #
  379. kamailio_newappend() # parms: <db> <newdata> [DB_PATH]
  380. {
  381. if [ $# -lt 3 ]; then
  382. echo "kamailio_append parms: <db> <newdata> [DB_PATH]"
  383. exit 1
  384. fi
  385. DB=$3/$1
  386. DBNEW=$DB.new
  387. if [ -f $DBNEW ] ; then
  388. rm $DBNEW
  389. fi
  390. TMPENV=/tmp/sc-$$
  391. kamailio_create $TMPENV
  392. cd $OLDPWD
  393. $LOAD_CMD -T -t hash -f $2 -h $TMPENV $1
  394. mv $TMPENV/$1 $DBNEW
  395. rm -rf $TMPENV
  396. }
  397. # cat all rows to STDOUT
  398. kamailio_cat() # pars: <database name> <DB_PATH>
  399. {
  400. if [ $# -ne 2 ] ; then
  401. echo "kamailio_cat params <db> [DB_PATH]"
  402. exit 1
  403. fi
  404. $DUMP_CMD -p -h $2 $1
  405. }
  406. kamailio_drop() # pars: <DB_PATH>
  407. {
  408. if [ $# -ne 1 ] ; then
  409. echo "kamailio_drop function takes one param"
  410. exit 1
  411. fi
  412. if [ ! -d $1 ] ; then
  413. echo "Directory does not exist: $1"
  414. fi
  415. minfo "Dropping Berkeley DB database at: $1 ..."
  416. # core
  417. if [ -f $1/version ] ; then
  418. for TABLE in $STANDARD_TABLES; do
  419. mdbg "Dropping core table: $TABLE"
  420. rm -f $1/$TABLE
  421. done
  422. fi
  423. # presence
  424. if [ -f $1/presentity ] ; then
  425. for TABLE in $PRESENCE_TABLES; do
  426. mdbg "Dropping presence table: $TABLE"
  427. rm -f $1/$TABLE
  428. done
  429. fi
  430. # extra tables
  431. if [ -f $1/cpl ] ; then
  432. for TABLE in $EXTRA_TABLES; do
  433. mdbg "Dropping extra table: $TABLE"
  434. rm -f $1/$TABLE
  435. done
  436. fi
  437. # delete db files and directory
  438. rm -rf $1/__db.001
  439. rm -rf $1/__db.002
  440. rm -rf $1/__db.003
  441. rm -rf $1/__db.004
  442. rmdir $1
  443. }
  444. kamailio_create() # pars: <DB_PATH>
  445. {
  446. if [ $# -ne 1 ] ; then
  447. echo "kamailio_create param [DB_PATH]"
  448. exit 1
  449. fi
  450. DB_PATH=$1
  451. if [ ! -d $1 ] ; then
  452. minfo "creating Berkeley DB database at: [$1]"
  453. mkdir -p $DB_PATH
  454. fi
  455. for TABLE in $STANDARD_TABLES; do
  456. mdbg "Creating standard table: $TABLE"
  457. $LOAD_CMD -T -t hash -f $DB_SCHEMA/$TABLE -h $1 $TABLE
  458. if [ $? -ne 0 ] ; then
  459. merr "Creating standard tables failed!"
  460. exit 1
  461. fi
  462. done
  463. get_answer $INSTALL_PRESENCE_TABLES "Install presence related tables? (y/n): "
  464. if [ "$ANSWER" = "y" ]; then
  465. presence_create $1
  466. fi
  467. get_answer $INSTALL_EXTRA_TABLES "Install tables for $EXTRA_MODULES? (y/n): "
  468. if [ "$ANSWER" = "y" ]; then
  469. extra_create $1
  470. fi
  471. } # kamailio_create
  472. presence_create() # pars: <DB_PATH>
  473. {
  474. if [ $# -ne 1 ] ; then
  475. merr "presence_create param [DB_PATH]"
  476. exit 1
  477. fi
  478. DB_PATH=$1
  479. if [ ! -d $1 ] ; then
  480. # Assert: the directory should already exist
  481. merr "BerkeleyDB directory does not exist at: [$1]"
  482. exit 1
  483. fi
  484. if [ ! -f $1/version ] ; then
  485. # Assert: directory should already contain table 'version'
  486. merr "BerkeleyDB directory does not have VERSION table at: [$1]"
  487. exit 1
  488. fi
  489. for TABLE in $PRESENCE_TABLES; do
  490. mdbg "Creating presence table: $TABLE"
  491. $LOAD_CMD -T -t hash -f $DB_SCHEMA/$TABLE -h $1 $TABLE
  492. if [ $? -ne 0 ] ; then
  493. merr "Creating presence tables failed!"
  494. exit 1
  495. fi
  496. done
  497. } # end presence_create
  498. extra_create() # pars: <DB_PATH>
  499. {
  500. if [ $# -ne 1 ] ; then
  501. merr "extra_create function takes one param (DB_PATH)"
  502. exit 1
  503. fi
  504. DB_PATH=$1
  505. if [ ! -d $1 ] ; then
  506. # Assert: the directory should already exist
  507. merr "BerkeleyDB directory does not exist at: [$1]"
  508. exit 1
  509. fi
  510. if [ ! -f $1/version ] ; then
  511. # Assert: directory should already contain table 'version'
  512. merr "BerkeleyDB directory does not have VERSION table at: [$1]"
  513. exit 1
  514. fi
  515. for TABLE in $EXTRA_TABLES; do
  516. mdbg "Creating extra table: $TABLE"
  517. $LOAD_CMD -T -t hash -f $DB_SCHEMA/$TABLE -h $1 $TABLE
  518. if [ $? -ne 0 ] ; then
  519. merr "Creating extra tables failed!"
  520. exit 1
  521. fi
  522. done
  523. } # end extra_create