2004-07-31 00:26:49 +05:30
|
|
|
#
|
|
|
|
# Gramps - a GTK+/GNOME based genealogy program
|
|
|
|
#
|
2005-04-01 10:04:31 +05:30
|
|
|
# Copyright (C) 2000-2005 Donald N. Allingham
|
2004-07-31 00:26:49 +05:30
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation; either version 2 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program; if not, write to the Free Software
|
|
|
|
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
|
|
#
|
|
|
|
|
|
|
|
# $Id$
|
|
|
|
|
2004-12-06 09:43:13 +05:30
|
|
|
"""
|
|
|
|
Provides the Berkeley DB (BSDDB) database backend for GRAMPS
|
|
|
|
"""
|
|
|
|
|
2005-04-01 10:04:31 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# Standard python modules
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2004-08-01 09:51:31 +05:30
|
|
|
import os
|
2004-08-24 09:18:15 +05:30
|
|
|
import time
|
2004-08-27 03:24:14 +05:30
|
|
|
import locale
|
2005-08-18 11:28:28 +05:30
|
|
|
import sets
|
2004-12-20 05:07:40 +05:30
|
|
|
from gettext import gettext as _
|
2005-04-01 10:04:31 +05:30
|
|
|
from bsddb import dbshelve, db
|
2004-08-01 09:51:31 +05:30
|
|
|
|
2005-04-01 10:04:31 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# Gramps modules
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2004-07-31 00:26:49 +05:30
|
|
|
from RelLib import *
|
2004-08-01 09:51:31 +05:30
|
|
|
from GrampsDbBase import *
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
_MINVERSION = 5
|
|
|
|
_DBVERSION = 9
|
2005-02-20 03:41:51 +05:30
|
|
|
|
2004-08-01 09:51:31 +05:30
|
|
|
def find_surname(key,data):
|
2004-10-01 00:02:56 +05:30
|
|
|
return str(data[3].get_surname())
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2004-08-01 09:51:31 +05:30
|
|
|
def find_idmap(key,data):
|
|
|
|
return str(data[1])
|
|
|
|
|
2004-08-20 07:50:06 +05:30
|
|
|
def find_fidmap(key,data):
|
|
|
|
return str(data[1])
|
|
|
|
|
2004-08-01 09:51:31 +05:30
|
|
|
def find_eventname(key,data):
|
2005-03-09 09:28:44 +05:30
|
|
|
return str(data[2])
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2005-05-27 23:13:04 +05:30
|
|
|
def find_repository_type(key,data):
|
|
|
|
return str(data[2])
|
|
|
|
|
2004-12-06 09:43:13 +05:30
|
|
|
class GrampsBSDDBCursor(GrampsCursor):
|
|
|
|
|
|
|
|
def __init__(self,source):
|
|
|
|
self.cursor = source.cursor()
|
|
|
|
|
|
|
|
def first(self):
|
|
|
|
return self.cursor.first()
|
|
|
|
|
|
|
|
def next(self):
|
|
|
|
return self.cursor.next()
|
|
|
|
|
|
|
|
def close(self):
|
|
|
|
self.cursor.close()
|
|
|
|
|
2004-07-31 00:26:49 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# GrampsBSDDB
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2004-08-01 09:51:31 +05:30
|
|
|
class GrampsBSDDB(GrampsDbBase):
|
2004-07-31 00:26:49 +05:30
|
|
|
"""GRAMPS database object. This object is a base class for other
|
|
|
|
objects."""
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
"""creates a new GrampsDB"""
|
2004-08-01 09:51:31 +05:30
|
|
|
GrampsDbBase.__init__(self)
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2004-10-19 08:49:25 +05:30
|
|
|
def dbopen(self,name,dbname):
|
|
|
|
dbmap = dbshelve.DBShelf(self.env)
|
2004-10-23 09:26:48 +05:30
|
|
|
dbmap.db.set_pagesize(16384)
|
2005-02-17 04:19:54 +05:30
|
|
|
if self.readonly:
|
|
|
|
dbmap.open(name, dbname, db.DB_HASH, db.DB_RDONLY)
|
|
|
|
else:
|
|
|
|
dbmap.open(name, dbname, db.DB_HASH, db.DB_CREATE, 0666)
|
2004-10-19 08:49:25 +05:30
|
|
|
return dbmap
|
|
|
|
|
2004-12-05 09:45:48 +05:30
|
|
|
def get_person_cursor(self):
|
2004-12-06 09:43:13 +05:30
|
|
|
return GrampsBSDDBCursor(self.person_map)
|
2004-12-10 06:58:43 +05:30
|
|
|
|
|
|
|
def get_family_cursor(self):
|
|
|
|
return GrampsBSDDBCursor(self.family_map)
|
|
|
|
|
2005-04-01 10:04:31 +05:30
|
|
|
def get_event_cursor(self):
|
|
|
|
return GrampsBSDDBCursor(self.event_map)
|
|
|
|
|
2004-12-10 06:58:43 +05:30
|
|
|
def get_place_cursor(self):
|
|
|
|
return GrampsBSDDBCursor(self.place_map)
|
|
|
|
|
|
|
|
def get_source_cursor(self):
|
|
|
|
return GrampsBSDDBCursor(self.source_map)
|
|
|
|
|
|
|
|
def get_media_cursor(self):
|
|
|
|
return GrampsBSDDBCursor(self.media_map)
|
2004-12-05 09:45:48 +05:30
|
|
|
|
2005-05-27 23:13:04 +05:30
|
|
|
def get_repository_cursor(self):
|
|
|
|
return GrampsBSDDBCursor(self.repository_map)
|
|
|
|
|
2005-06-05 09:31:56 +05:30
|
|
|
def version_supported(self):
|
2005-12-15 11:49:37 +05:30
|
|
|
return (self.metadata.get('version',0) <= _DBVERSION and
|
|
|
|
self.metadata.get('version',0) >= _MINVERSION)
|
2005-06-05 09:31:56 +05:30
|
|
|
|
2005-03-03 11:03:22 +05:30
|
|
|
def need_upgrade(self):
|
2005-12-15 11:49:37 +05:30
|
|
|
return not self.readonly \
|
|
|
|
and self.metadata.get('version',0) < _DBVERSION
|
2005-03-03 11:03:22 +05:30
|
|
|
|
2005-02-17 04:19:54 +05:30
|
|
|
def load(self,name,callback,mode="w"):
|
2005-12-15 11:49:37 +05:30
|
|
|
self.load_primary(name,callback,mode)
|
|
|
|
self.load_secondary(callback)
|
|
|
|
if self.need_upgrade():
|
|
|
|
self.gramps_upgrade()
|
|
|
|
|
|
|
|
def load_primary(self,name,callback,mode="w"):
|
2004-07-31 00:26:49 +05:30
|
|
|
if self.person_map:
|
|
|
|
self.close()
|
|
|
|
|
2005-02-17 04:19:54 +05:30
|
|
|
self.readonly = mode == "r"
|
|
|
|
|
2004-07-31 00:26:49 +05:30
|
|
|
self.env = db.DBEnv()
|
2004-10-16 10:40:35 +05:30
|
|
|
self.env.set_cachesize(0,0x2000000) # 2MB
|
2004-07-31 00:26:49 +05:30
|
|
|
flags = db.DB_CREATE|db.DB_INIT_MPOOL|db.DB_PRIVATE
|
|
|
|
|
|
|
|
self.undolog = "%s.log" % name
|
|
|
|
self.env.open(os.path.dirname(name), flags)
|
|
|
|
|
|
|
|
name = os.path.basename(name)
|
2005-08-18 11:28:28 +05:30
|
|
|
self.save_name = name
|
2004-10-19 08:49:25 +05:30
|
|
|
|
2005-05-27 23:13:04 +05:30
|
|
|
self.family_map = self.dbopen(name, "family")
|
|
|
|
self.place_map = self.dbopen(name, "places")
|
|
|
|
self.source_map = self.dbopen(name, "sources")
|
|
|
|
self.media_map = self.dbopen(name, "media")
|
|
|
|
self.event_map = self.dbopen(name, "events")
|
|
|
|
self.metadata = self.dbopen(name, "meta")
|
|
|
|
self.person_map = self.dbopen(name, "person")
|
|
|
|
self.repository_map = self.dbopen(name, "repository")
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
if not self.readonly:
|
|
|
|
self.undodb = db.DB()
|
|
|
|
self.undodb.open(self.undolog, db.DB_RECNO, db.DB_CREATE)
|
|
|
|
|
|
|
|
self.metadata = self.dbopen(name, "meta")
|
|
|
|
self.bookmarks = self.metadata.get('bookmarks')
|
|
|
|
self.family_event_names = sets.Set(self.metadata.get('fevent_names',[]))
|
|
|
|
self.individual_event_names = sets.Set(self.metadata.get('pevent_names',[]))
|
|
|
|
self.family_attributes = sets.Set(self.metadata.get('fattr_names',[]))
|
|
|
|
self.individual_attributes = sets.Set(self.metadata.get('pattr_names',[]))
|
|
|
|
|
2005-02-17 04:19:54 +05:30
|
|
|
if self.readonly:
|
|
|
|
openflags = db.DB_RDONLY
|
|
|
|
else:
|
|
|
|
openflags = db.DB_CREATE
|
|
|
|
|
2004-07-31 00:26:49 +05:30
|
|
|
self.surnames = db.DB(self.env)
|
|
|
|
self.surnames.set_flags(db.DB_DUP)
|
2005-12-15 11:49:37 +05:30
|
|
|
self.surnames.open(self.save_name, "surnames",
|
|
|
|
db.DB_HASH, flags=openflags)
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2004-10-01 00:02:56 +05:30
|
|
|
self.name_group = db.DB(self.env)
|
|
|
|
self.name_group.set_flags(db.DB_DUP)
|
2005-12-15 11:49:37 +05:30
|
|
|
self.name_group.open(self.save_name, "name_group",
|
|
|
|
db.DB_HASH, flags=openflags)
|
|
|
|
|
|
|
|
self.eventnames = db.DB(self.env)
|
|
|
|
self.eventnames.set_flags(db.DB_DUP)
|
|
|
|
self.eventnames.open(self.save_name, "eventnames",
|
|
|
|
db.DB_HASH, flags=openflags)
|
|
|
|
|
|
|
|
self.repository_types = db.DB(self.env)
|
|
|
|
self.repository_types.set_flags(db.DB_DUP)
|
|
|
|
self.repository_types.open(self.save_name, "repostypes",
|
|
|
|
db.DB_HASH, flags=openflags)
|
|
|
|
|
|
|
|
gstats = self.metadata.get('gender_stats')
|
|
|
|
|
|
|
|
if not self.readonly:
|
|
|
|
if gstats == None:
|
|
|
|
self.metadata['version'] = _DBVERSION
|
|
|
|
elif not self.metadata.has_key('version'):
|
|
|
|
self.metadata['version'] = 0
|
|
|
|
|
|
|
|
if self.bookmarks == None:
|
|
|
|
self.bookmarks = []
|
|
|
|
|
|
|
|
self.genderStats = GenderStats(gstats)
|
|
|
|
return 1
|
|
|
|
|
|
|
|
def load_secondary(self,callback):
|
|
|
|
if self.readonly:
|
|
|
|
openflags = db.DB_RDONLY
|
|
|
|
else:
|
|
|
|
openflags = db.DB_CREATE
|
2005-02-17 04:19:54 +05:30
|
|
|
|
2004-07-31 00:26:49 +05:30
|
|
|
self.id_trans = db.DB(self.env)
|
|
|
|
self.id_trans.set_flags(db.DB_DUP)
|
2005-12-15 11:49:37 +05:30
|
|
|
self.id_trans.open(self.save_name, "idtrans",
|
|
|
|
db.DB_HASH, flags=openflags)
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2004-08-20 07:50:06 +05:30
|
|
|
self.fid_trans = db.DB(self.env)
|
|
|
|
self.fid_trans.set_flags(db.DB_DUP)
|
2005-12-15 11:49:37 +05:30
|
|
|
self.fid_trans.open(self.save_name, "fidtrans",
|
|
|
|
db.DB_HASH, flags=openflags)
|
2004-08-20 07:50:06 +05:30
|
|
|
|
2005-06-08 10:10:33 +05:30
|
|
|
self.eid_trans = db.DB(self.env)
|
|
|
|
self.eid_trans.set_flags(db.DB_DUP)
|
2005-12-15 11:49:37 +05:30
|
|
|
self.eid_trans.open(self.save_name, "eidtrans",
|
|
|
|
db.DB_HASH, flags=openflags)
|
2005-06-08 10:10:33 +05:30
|
|
|
|
2004-08-24 09:18:15 +05:30
|
|
|
self.pid_trans = db.DB(self.env)
|
|
|
|
self.pid_trans.set_flags(db.DB_DUP)
|
2005-12-15 11:49:37 +05:30
|
|
|
self.pid_trans.open(self.save_name, "pidtrans",
|
|
|
|
db.DB_HASH, flags=openflags)
|
2004-08-24 09:18:15 +05:30
|
|
|
|
|
|
|
self.sid_trans = db.DB(self.env)
|
|
|
|
self.sid_trans.set_flags(db.DB_DUP)
|
2005-12-15 11:49:37 +05:30
|
|
|
self.sid_trans.open(self.save_name, "sidtrans",
|
|
|
|
db.DB_HASH, flags=openflags)
|
2004-08-24 09:18:15 +05:30
|
|
|
|
|
|
|
self.oid_trans = db.DB(self.env)
|
|
|
|
self.oid_trans.set_flags(db.DB_DUP)
|
2005-12-15 11:49:37 +05:30
|
|
|
self.oid_trans.open(self.save_name, "oidtrans",
|
|
|
|
db.DB_HASH, flags=openflags)
|
2004-08-24 09:18:15 +05:30
|
|
|
|
2005-05-27 23:13:04 +05:30
|
|
|
self.rid_trans = db.DB(self.env)
|
|
|
|
self.rid_trans.set_flags(db.DB_DUP)
|
2005-12-15 11:49:37 +05:30
|
|
|
self.rid_trans.open(self.save_name, "ridtrans",
|
|
|
|
db.DB_HASH, flags=openflags)
|
2005-05-27 23:13:04 +05:30
|
|
|
|
2005-02-17 04:19:54 +05:30
|
|
|
if not self.readonly:
|
|
|
|
self.person_map.associate(self.surnames, find_surname, openflags)
|
|
|
|
self.person_map.associate(self.id_trans, find_idmap, openflags)
|
|
|
|
self.family_map.associate(self.fid_trans, find_idmap, openflags)
|
2005-06-08 10:10:33 +05:30
|
|
|
self.event_map.associate(self.eid_trans, find_idmap, openflags)
|
2005-12-15 11:49:37 +05:30
|
|
|
self.repository_map.associate(self.rid_trans, find_idmap,
|
|
|
|
openflags)
|
|
|
|
self.repository_map.associate(self.repository_types,
|
|
|
|
find_repository_type, openflags)
|
2005-02-17 04:19:54 +05:30
|
|
|
self.place_map.associate(self.pid_trans, find_idmap, openflags)
|
|
|
|
self.media_map.associate(self.oid_trans, find_idmap, openflags)
|
|
|
|
self.source_map.associate(self.sid_trans, find_idmap, openflags)
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2005-08-18 11:28:28 +05:30
|
|
|
def rebuild_secondary(self,callback=None):
|
|
|
|
|
|
|
|
# Repair secondary indices related to person_map
|
|
|
|
|
|
|
|
self.id_trans.close()
|
|
|
|
self.surnames.close()
|
|
|
|
|
|
|
|
self.id_trans = db.DB(self.env)
|
|
|
|
self.id_trans.set_flags(db.DB_DUP)
|
|
|
|
self.id_trans.open(self.save_name, "idtrans", db.DB_HASH,
|
|
|
|
flags=db.DB_CREATE)
|
|
|
|
self.id_trans.truncate()
|
|
|
|
|
|
|
|
self.surnames = db.DB(self.env)
|
|
|
|
self.surnames.set_flags(db.DB_DUP)
|
|
|
|
self.surnames.open(self.save_name, "surnames", db.DB_HASH,
|
|
|
|
flags=db.DB_CREATE)
|
|
|
|
self.surnames.truncate()
|
|
|
|
|
|
|
|
self.person_map.associate(self.surnames, find_surname, db.DB_CREATE)
|
|
|
|
self.person_map.associate(self.id_trans, find_idmap, db.DB_CREATE)
|
|
|
|
|
|
|
|
for key in self.person_map.keys():
|
|
|
|
if callback:
|
|
|
|
callback()
|
|
|
|
self.person_map[key] = self.person_map[key]
|
|
|
|
self.person_map.sync()
|
|
|
|
|
|
|
|
# Repair secondary indices related to family_map
|
|
|
|
|
|
|
|
self.fid_trans.close()
|
|
|
|
self.fid_trans = db.DB(self.env)
|
|
|
|
self.fid_trans.set_flags(db.DB_DUP)
|
|
|
|
self.fid_trans.open(self.save_name, "fidtrans", db.DB_HASH,
|
|
|
|
flags=db.DB_CREATE)
|
|
|
|
self.fid_trans.truncate()
|
|
|
|
self.family_map.associate(self.fid_trans, find_idmap, db.DB_CREATE)
|
|
|
|
|
|
|
|
for key in self.family_map.keys():
|
|
|
|
if callback:
|
|
|
|
callback()
|
|
|
|
self.family_map[key] = self.family_map[key]
|
|
|
|
self.family_map.sync()
|
|
|
|
|
|
|
|
# Repair secondary indices related to place_map
|
|
|
|
|
|
|
|
self.pid_trans.close()
|
|
|
|
self.pid_trans = db.DB(self.env)
|
|
|
|
self.pid_trans.set_flags(db.DB_DUP)
|
|
|
|
self.pid_trans.open(self.save_name, "pidtrans", db.DB_HASH,
|
|
|
|
flags=db.DB_CREATE)
|
|
|
|
self.pid_trans.truncate()
|
|
|
|
self.place_map.associate(self.pid_trans, find_idmap, db.DB_CREATE)
|
|
|
|
|
|
|
|
for key in self.place_map.keys():
|
|
|
|
if callback:
|
|
|
|
callback()
|
|
|
|
self.place_map[key] = self.place_map[key]
|
|
|
|
self.place_map.sync()
|
|
|
|
|
|
|
|
# Repair secondary indices related to media_map
|
|
|
|
|
|
|
|
self.oid_trans.close()
|
|
|
|
self.oid_trans = db.DB(self.env)
|
|
|
|
self.oid_trans.set_flags(db.DB_DUP)
|
|
|
|
self.oid_trans.open(self.save_name, "oidtrans", db.DB_HASH,
|
|
|
|
flags=db.DB_CREATE)
|
|
|
|
self.oid_trans.truncate()
|
|
|
|
self.media_map.associate(self.oid_trans, find_idmap, db.DB_CREATE)
|
|
|
|
|
|
|
|
for key in self.media_map.keys():
|
|
|
|
if callback:
|
|
|
|
callback()
|
|
|
|
self.media_map[key] = self.media_map[key]
|
|
|
|
self.media_map.sync()
|
|
|
|
|
|
|
|
# Repair secondary indices related to source_map
|
|
|
|
|
|
|
|
self.sid_trans.close()
|
|
|
|
self.sid_trans = db.DB(self.env)
|
|
|
|
self.sid_trans.set_flags(db.DB_DUP)
|
|
|
|
self.sid_trans.open(self.save_name, "sidtrans", db.DB_HASH,
|
|
|
|
flags=db.DB_CREATE)
|
|
|
|
self.sid_trans.truncate()
|
|
|
|
self.source_map.associate(self.sid_trans, find_idmap, db.DB_CREATE)
|
|
|
|
|
|
|
|
for key in self.source_map.keys():
|
|
|
|
if callback:
|
|
|
|
callback()
|
|
|
|
self.source_map[key] = self.source_map[key]
|
|
|
|
self.source_map.sync()
|
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
# Repair secondary indices related to repository_map
|
|
|
|
|
|
|
|
self.rid_trans.close()
|
|
|
|
self.rid_trans = db.DB(self.env)
|
|
|
|
self.rid_trans.set_flags(db.DB_DUP)
|
|
|
|
self.rid_trans.open(self.save_name, "ridtrans", db.DB_HASH,
|
|
|
|
flags=db.DB_CREATE)
|
|
|
|
self.rid_trans.truncate()
|
|
|
|
self.repository_map.associate(self.rid_trans, find_idmap, db.DB_CREATE)
|
|
|
|
|
|
|
|
for key in self.repository_map.keys():
|
|
|
|
if callback:
|
|
|
|
callback()
|
|
|
|
self.repository_map[key] = self.repository_map[key]
|
|
|
|
self.repository_map.sync()
|
|
|
|
|
2004-10-13 09:21:27 +05:30
|
|
|
def abort_changes(self):
|
|
|
|
while self.undo():
|
|
|
|
pass
|
|
|
|
self.close()
|
|
|
|
|
2004-07-31 00:26:49 +05:30
|
|
|
def close(self):
|
2005-05-11 19:34:47 +05:30
|
|
|
if self.person_map == None:
|
2005-04-04 06:41:50 +05:30
|
|
|
return
|
2004-10-01 00:02:56 +05:30
|
|
|
self.name_group.close()
|
2004-07-31 00:26:49 +05:30
|
|
|
self.person_map.close()
|
|
|
|
self.family_map.close()
|
2005-05-27 23:13:04 +05:30
|
|
|
self.repository_map.close()
|
2004-07-31 00:26:49 +05:30
|
|
|
self.place_map.close()
|
|
|
|
self.source_map.close()
|
|
|
|
self.media_map.close()
|
|
|
|
self.event_map.close()
|
2005-02-17 04:19:54 +05:30
|
|
|
if not self.readonly:
|
|
|
|
self.metadata['bookmarks'] = self.bookmarks
|
|
|
|
self.metadata['gender_stats'] = self.genderStats.save_stats()
|
2005-08-18 11:28:28 +05:30
|
|
|
self.metadata['fevent_names'] = list(self.family_event_names)
|
|
|
|
self.metadata['pevent_names'] = list(self.individual_event_names)
|
|
|
|
self.metadata['fattr_names'] = list(self.family_attributes)
|
|
|
|
self.metadata['pattr_names'] = list(self.individual_attributes)
|
2004-07-31 00:26:49 +05:30
|
|
|
self.metadata.close()
|
|
|
|
self.surnames.close()
|
|
|
|
self.eventnames.close()
|
2005-05-27 23:13:04 +05:30
|
|
|
self.repository_types.close()
|
2004-07-31 00:26:49 +05:30
|
|
|
self.id_trans.close()
|
2004-08-20 07:50:06 +05:30
|
|
|
self.fid_trans.close()
|
2005-06-08 10:10:33 +05:30
|
|
|
self.eid_trans.close()
|
2005-05-27 23:13:04 +05:30
|
|
|
self.rid_trans.close()
|
2004-08-24 09:18:15 +05:30
|
|
|
self.oid_trans.close()
|
|
|
|
self.sid_trans.close()
|
|
|
|
self.pid_trans.close()
|
2004-07-31 00:26:49 +05:30
|
|
|
self.env.close()
|
|
|
|
|
2005-02-17 04:19:54 +05:30
|
|
|
if not self.readonly:
|
|
|
|
self.undodb.close()
|
|
|
|
try:
|
|
|
|
os.remove(self.undolog)
|
|
|
|
except:
|
|
|
|
pass
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2005-05-27 23:13:04 +05:30
|
|
|
self.person_map = None
|
|
|
|
self.family_map = None
|
|
|
|
self.repository_map = None
|
|
|
|
self.place_map = None
|
|
|
|
self.source_map = None
|
|
|
|
self.media_map = None
|
|
|
|
self.event_map = None
|
|
|
|
self.surnames = None
|
|
|
|
self.env = None
|
|
|
|
self.metadata = None
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2005-04-18 04:04:56 +05:30
|
|
|
def _del_person(self,handle):
|
|
|
|
self.person_map.delete(str(handle))
|
|
|
|
|
|
|
|
def _del_source(self,handle):
|
|
|
|
self.source_map.delete(str(handle))
|
|
|
|
|
2005-05-27 23:13:04 +05:30
|
|
|
def _del_repository(self,handle):
|
|
|
|
self.repository_map.delete(str(handle))
|
|
|
|
|
2005-04-18 04:04:56 +05:30
|
|
|
def _del_place(self,handle):
|
|
|
|
self.place_map.delete(str(handle))
|
|
|
|
|
|
|
|
def _del_media(self,handle):
|
|
|
|
self.media_map.delete(str(handle))
|
|
|
|
|
|
|
|
def _del_family(self,handle):
|
|
|
|
self.family_map.delete(str(handle))
|
|
|
|
|
|
|
|
def _del_event(self,handle):
|
|
|
|
self.event_map.delete(str(handle))
|
|
|
|
|
2004-10-01 00:02:56 +05:30
|
|
|
def set_name_group_mapping(self,name,group):
|
2005-02-17 04:19:54 +05:30
|
|
|
if not self.readonly:
|
|
|
|
name = str(name)
|
|
|
|
if not group and self.name_group.has_key(name):
|
|
|
|
self.name_group.delete(name)
|
|
|
|
else:
|
|
|
|
self.name_group[name] = group
|
2005-08-18 11:28:28 +05:30
|
|
|
self.emit('person-rebuild')
|
2004-10-01 00:02:56 +05:30
|
|
|
|
2004-08-13 10:04:07 +05:30
|
|
|
def get_surname_list(self):
|
2004-07-31 00:26:49 +05:30
|
|
|
names = self.surnames.keys()
|
|
|
|
a = {}
|
|
|
|
for name in names:
|
|
|
|
a[unicode(name)] = 1
|
|
|
|
vals = a.keys()
|
2004-08-27 03:24:14 +05:30
|
|
|
vals.sort(locale.strcoll)
|
2004-07-31 00:26:49 +05:30
|
|
|
return vals
|
|
|
|
|
2004-08-13 10:04:07 +05:30
|
|
|
def get_person_event_type_list(self):
|
2004-07-31 00:26:49 +05:30
|
|
|
names = self.eventnames.keys()
|
|
|
|
a = {}
|
|
|
|
for name in names:
|
|
|
|
a[unicode(name)] = 1
|
|
|
|
vals = a.keys()
|
|
|
|
vals.sort()
|
|
|
|
return vals
|
|
|
|
|
2005-05-27 23:13:04 +05:30
|
|
|
def get_repository_type_list(self):
|
|
|
|
repos_types = self.repository_types.keys()
|
|
|
|
a = {}
|
|
|
|
for repos_type in repos_types:
|
|
|
|
|
|
|
|
a[unicode(repos_type)] = 1
|
|
|
|
vals = a.keys()
|
|
|
|
vals.sort()
|
|
|
|
return vals
|
|
|
|
|
2005-05-24 18:38:06 +05:30
|
|
|
def remove_person(self,handle,transaction):
|
|
|
|
if not self.readonly and handle and str(handle) in self.person_map:
|
|
|
|
person = self.get_person_from_handle(handle)
|
|
|
|
self.genderStats.uncount_person (person)
|
|
|
|
if transaction != None:
|
|
|
|
transaction.add(PERSON_KEY,handle,person.serialize())
|
|
|
|
self.emit('person-delete',([str(handle)],))
|
|
|
|
self.person_map.delete(str(handle))
|
|
|
|
|
|
|
|
def remove_source(self,handle,transaction):
|
|
|
|
if not self.readonly and handle and str(handle) in self.source_map:
|
|
|
|
if transaction != None:
|
|
|
|
old_data = self.source_map.get(str(handle))
|
|
|
|
transaction.add(SOURCE_KEY,handle,old_data)
|
|
|
|
self.emit('source-delete',([handle],))
|
|
|
|
self.source_map.delete(str(handle))
|
|
|
|
|
2005-05-27 23:13:04 +05:30
|
|
|
def remove_repository(self,handle,transaction):
|
|
|
|
if not self.readonly and handle and str(handle) in self.repository_map:
|
|
|
|
if transaction != None:
|
|
|
|
old_data = self.repository_map.get(str(handle))
|
|
|
|
transaction.add(REPOSITORY_KEY,handle,old_data)
|
|
|
|
self.emit('repository-delete',([handle],))
|
|
|
|
self.repository_map.delete(str(handle))
|
|
|
|
|
2005-05-24 18:38:06 +05:30
|
|
|
def remove_family(self,handle,transaction):
|
|
|
|
if not self.readonly and handle and str(handle) in self.family_map:
|
|
|
|
if transaction != None:
|
|
|
|
old_data = self.family_map.get(str(handle))
|
|
|
|
transaction.add(FAMILY_KEY,handle,old_data)
|
|
|
|
self.emit('family-delete',([str(handle)],))
|
|
|
|
self.family_map.delete(str(handle))
|
|
|
|
|
|
|
|
def remove_event(self,handle,transaction):
|
|
|
|
if not self.readonly and handle and str(handle) in self.event_map:
|
|
|
|
if transaction != None:
|
|
|
|
old_data = self.event_map.get(str(handle))
|
|
|
|
transaction.add(EVENT_KEY,handle,old_data)
|
2005-06-08 10:10:33 +05:30
|
|
|
self.emit('event-delete',([str(handle)],))
|
2005-05-24 18:38:06 +05:30
|
|
|
self.event_map.delete(str(handle))
|
|
|
|
|
|
|
|
def remove_place(self,handle,transaction):
|
|
|
|
if not self.readonly and handle and str(handle) in self.place_map:
|
|
|
|
if transaction != None:
|
2005-12-06 12:08:09 +05:30
|
|
|
old_data = self.place_map.get(str(handle))
|
2005-05-24 18:38:06 +05:30
|
|
|
transaction.add(PLACE_KEY,handle,old_data)
|
|
|
|
self.emit('place-delete',([handle],))
|
|
|
|
self.place_map.delete(str(handle))
|
|
|
|
|
|
|
|
def remove_object(self,handle,transaction):
|
|
|
|
if not self.readonly and handle and str(handle) in self.media_map:
|
|
|
|
if transaction != None:
|
2005-12-06 12:08:09 +05:30
|
|
|
old_data = self.media_map.get(str(handle))
|
2005-05-24 18:38:06 +05:30
|
|
|
transaction.add(MEDIA_KEY,handle,old_data)
|
|
|
|
self.emit('media-delete',([handle],))
|
|
|
|
self.media_map.delete(str(handle))
|
|
|
|
|
2004-08-20 07:50:06 +05:30
|
|
|
def get_person_from_gramps_id(self,val):
|
|
|
|
"""finds a Person in the database from the passed gramps' ID.
|
2005-06-08 10:10:33 +05:30
|
|
|
If no such Person exists, None is returned."""
|
2004-08-20 07:50:06 +05:30
|
|
|
|
|
|
|
data = self.id_trans.get(str(val))
|
|
|
|
if data:
|
|
|
|
person = Person()
|
|
|
|
person.unserialize(cPickle.loads(data))
|
|
|
|
return person
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
|
|
|
def get_family_from_gramps_id(self,val):
|
2004-08-24 09:18:15 +05:30
|
|
|
"""finds a Family in the database from the passed gramps' ID.
|
2005-06-08 10:10:33 +05:30
|
|
|
If no such Family exists, None is returned."""
|
2004-08-20 07:50:06 +05:30
|
|
|
|
|
|
|
data = self.fid_trans.get(str(val))
|
|
|
|
if data:
|
|
|
|
family = Family()
|
|
|
|
family.unserialize(cPickle.loads(data))
|
|
|
|
return family
|
|
|
|
else:
|
|
|
|
return None
|
2004-08-24 09:18:15 +05:30
|
|
|
|
2005-06-08 10:10:33 +05:30
|
|
|
def get_event_from_gramps_id(self,val):
|
|
|
|
"""finds an Event in the database from the passed gramps' ID.
|
|
|
|
If no such Event exists, None is returned."""
|
|
|
|
|
|
|
|
data = self.eid_trans.get(str(val))
|
|
|
|
if data:
|
|
|
|
event = Event()
|
|
|
|
event.unserialize(cPickle.loads(data))
|
|
|
|
return event
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
2004-08-24 09:18:15 +05:30
|
|
|
def get_place_from_gramps_id(self,val):
|
|
|
|
"""finds a Place in the database from the passed gramps' ID.
|
2005-06-08 10:10:33 +05:30
|
|
|
If no such Place exists, None is returned."""
|
2004-08-24 09:18:15 +05:30
|
|
|
|
|
|
|
data = self.pid_trans.get(str(val))
|
|
|
|
if data:
|
|
|
|
place = Place()
|
|
|
|
place.unserialize(cPickle.loads(data))
|
|
|
|
return place
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
|
|
|
def get_source_from_gramps_id(self,val):
|
|
|
|
"""finds a Source in the database from the passed gramps' ID.
|
2005-06-08 10:10:33 +05:30
|
|
|
If no such Source exists, None is returned."""
|
2004-08-24 09:18:15 +05:30
|
|
|
|
|
|
|
data = self.sid_trans.get(str(val))
|
|
|
|
if data:
|
|
|
|
source = Source()
|
|
|
|
source.unserialize(cPickle.loads(data))
|
|
|
|
return source
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
2005-05-27 23:13:04 +05:30
|
|
|
def get_repository_from_gramps_id(self,val):
|
|
|
|
"""finds a Repository in the database from the passed gramps' ID.
|
2005-06-08 10:10:33 +05:30
|
|
|
If no such Repository exists, None is returned."""
|
2005-05-27 23:13:04 +05:30
|
|
|
|
|
|
|
data = self.rid_trans.get(str(val))
|
|
|
|
if data:
|
|
|
|
repository = Repository()
|
|
|
|
repository.unserialize(cPickle.loads(data))
|
|
|
|
return repository
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
2004-08-24 09:18:15 +05:30
|
|
|
def get_object_from_gramps_id(self,val):
|
|
|
|
"""finds a MediaObject in the database from the passed gramps' ID.
|
2005-06-08 10:10:33 +05:30
|
|
|
If no such MediaObject exists, None is returned."""
|
2004-08-24 09:18:15 +05:30
|
|
|
|
|
|
|
data = self.oid_trans.get(str(val))
|
|
|
|
if data:
|
|
|
|
obj = MediaObject()
|
|
|
|
obj.unserialize(cPickle.loads(data))
|
|
|
|
return obj
|
|
|
|
else:
|
|
|
|
return None
|
2005-03-03 11:03:22 +05:30
|
|
|
|
2005-05-24 18:38:06 +05:30
|
|
|
def transaction_commit(self,transaction,msg):
|
|
|
|
GrampsDbBase.transaction_commit(self,transaction,msg)
|
|
|
|
self.family_map.sync()
|
|
|
|
self.place_map.sync()
|
|
|
|
self.source_map.sync()
|
2005-05-27 23:13:04 +05:30
|
|
|
self.repository_map.sync()
|
|
|
|
self.repository_types.sync()
|
2005-05-24 18:38:06 +05:30
|
|
|
self.media_map.sync()
|
|
|
|
self.event_map.sync()
|
|
|
|
self.metadata.sync()
|
|
|
|
self.person_map.sync()
|
|
|
|
self.surnames.sync()
|
|
|
|
self.name_group.sync()
|
|
|
|
self.id_trans.sync()
|
|
|
|
self.fid_trans.sync()
|
2005-06-08 10:10:33 +05:30
|
|
|
self.eid_trans.sync()
|
2005-05-24 18:38:06 +05:30
|
|
|
self.pid_trans.sync()
|
|
|
|
self.sid_trans.sync()
|
2005-05-27 23:13:04 +05:30
|
|
|
self.rid_trans.sync()
|
2005-05-24 18:38:06 +05:30
|
|
|
self.oid_trans.sync()
|
|
|
|
self.undodb.sync()
|
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
def gramps_upgrade(self):
|
2005-03-03 11:03:22 +05:30
|
|
|
child_rel_notrans = [
|
|
|
|
"None", "Birth", "Adopted", "Stepchild",
|
|
|
|
"Sponsored", "Foster", "Unknown", "Other", ]
|
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
version = self.metadata.get('version',_MINVERSION)
|
|
|
|
|
2005-05-24 18:38:06 +05:30
|
|
|
if version < 6:
|
2005-12-15 11:49:37 +05:30
|
|
|
self.gramps_upgrade_6()
|
2005-06-25 03:30:03 +05:30
|
|
|
if version < 7:
|
2005-12-15 11:49:37 +05:30
|
|
|
self.gramps_upgrade_7()
|
2005-07-09 01:54:54 +05:30
|
|
|
if version < 8:
|
2005-12-15 11:49:37 +05:30
|
|
|
self.gramps_upgrade_8()
|
|
|
|
if version < 9:
|
|
|
|
self.gramps_upgrade_9()
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2005-07-09 01:54:54 +05:30
|
|
|
self.metadata['version'] = _DBVERSION
|
2005-04-01 10:04:31 +05:30
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
def gramps_upgrade_6(self):
|
2005-05-24 18:38:06 +05:30
|
|
|
print "Upgrading to DB version 6"
|
|
|
|
order = []
|
|
|
|
for val in self.get_media_column_order():
|
|
|
|
if val[1] != 6:
|
|
|
|
order.append(val)
|
|
|
|
self.set_media_column_order(order)
|
2005-05-25 09:28:27 +05:30
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
def gramps_upgrade_7(self):
|
2005-05-27 23:13:04 +05:30
|
|
|
print "Upgrading to DB version 7"
|
2005-07-09 01:54:54 +05:30
|
|
|
|
|
|
|
self.genderStats = GenderStats()
|
|
|
|
cursor = self.get_person_cursor()
|
|
|
|
data = cursor.first()
|
|
|
|
while data:
|
|
|
|
handle,val = data
|
|
|
|
p = Person(val)
|
|
|
|
self.genderStats.count_person(p,self)
|
|
|
|
data = cursor.next()
|
|
|
|
cursor.close()
|
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
def gramps_upgrade_8(self):
|
2005-07-09 01:54:54 +05:30
|
|
|
print "Upgrading to DB version 8"
|
2005-08-18 11:28:28 +05:30
|
|
|
cursor = self.get_person_cursor()
|
|
|
|
data = cursor.first()
|
|
|
|
while data:
|
|
|
|
handle,val = data
|
|
|
|
handle_list = val[8]
|
|
|
|
if type(handle_list) == list:
|
|
|
|
# Check to prevent crash on corrupted data (event_list=None)
|
|
|
|
for handle in handle_list:
|
|
|
|
event = self.get_event_from_handle(handle)
|
|
|
|
self.individual_event_names.add(event.name)
|
|
|
|
data = cursor.next()
|
|
|
|
cursor.close()
|
|
|
|
|
|
|
|
cursor = self.get_family_cursor()
|
|
|
|
data = cursor.first()
|
|
|
|
while data:
|
|
|
|
handle,val = data
|
|
|
|
handle_list = val[6]
|
|
|
|
if type(handle_list) == list:
|
|
|
|
# Check to prevent crash on corrupted data (event_list=None)
|
|
|
|
for handle in handle_list:
|
|
|
|
event = self.get_event_from_handle(handle)
|
|
|
|
self.family_event_names.add(event.name)
|
|
|
|
data = cursor.next()
|
|
|
|
cursor.close()
|
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
def gramps_upgrade_9(self):
|
2005-08-18 11:28:28 +05:30
|
|
|
print "Upgrading to DB version 9"
|
2005-05-28 12:05:15 +05:30
|
|
|
# First, make sure the stored default person handle is str, not unicode
|
2005-05-30 20:55:17 +05:30
|
|
|
try:
|
|
|
|
handle = self.metadata['default']
|
|
|
|
self.metadata['default'] = str(handle)
|
|
|
|
except KeyError:
|
|
|
|
# default person was not stored in database
|
|
|
|
pass
|
2005-05-27 23:13:04 +05:30
|
|
|
trans = Transaction("",self)
|
|
|
|
trans.set_batch(True)
|
|
|
|
# Change every source to have reporef_list
|
|
|
|
cursor = self.get_source_cursor()
|
|
|
|
data = cursor.first()
|
|
|
|
while data:
|
|
|
|
handle,info = data
|
|
|
|
source = Source()
|
|
|
|
(source.handle, source.gramps_id, source.title, source.author,
|
|
|
|
source.pubinfo, source.note, source.media_list,
|
|
|
|
source.abbrev, source.change, source.datamap) = info
|
|
|
|
self.commit_source(source,trans)
|
|
|
|
data = cursor.next()
|
|
|
|
cursor.close()
|
2005-12-06 12:08:09 +05:30
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
# Change every event handle to the EventRef
|
|
|
|
# in Person and Family objects
|
|
|
|
# person
|
|
|
|
cursor = self.get_person_cursor()
|
|
|
|
data = cursor.first()
|
|
|
|
while data:
|
|
|
|
changed = False
|
|
|
|
handle,info = data
|
|
|
|
person = Person()
|
|
|
|
# Restore data from dbversion 8 (gramps 2.0.9)
|
|
|
|
(person.handle, person.gramps_id, person.gender,
|
|
|
|
person.primary_name, person.alternate_names, person.nickname,
|
|
|
|
death_handle, birth_handle, event_list,
|
|
|
|
person.family_list, person.parent_family_list,
|
|
|
|
person.media_list, person.address_list, person.attribute_list,
|
|
|
|
person.urls, person.lds_bapt, person.lds_endow, person.lds_seal,
|
|
|
|
person.complete, person.source_list, person.note,
|
|
|
|
person.change, person.private) = (info + (False,))[0:23]
|
|
|
|
|
|
|
|
if birth_handle:
|
|
|
|
event_ref = EventRef()
|
|
|
|
event_ref.set_reference_handle(birth_handle)
|
|
|
|
event_ref.set_role((EventRef.PRIMARY,''))
|
|
|
|
person.birth_ref = event_ref
|
|
|
|
changed = True
|
|
|
|
|
|
|
|
if death_handle:
|
|
|
|
event_ref = EventRef()
|
|
|
|
event_ref.set_reference_handle(death_handle)
|
|
|
|
event_ref.set_role((EventRef.PRIMARY,''))
|
|
|
|
person.death_ref = event_ref
|
|
|
|
changed = True
|
|
|
|
|
|
|
|
event_ref_list = []
|
|
|
|
for event_handle in event_list:
|
|
|
|
event_ref = EventRef()
|
|
|
|
event_ref.set_reference_handle(event_handle)
|
|
|
|
event_ref.set_role((EventRef.PRIMARY,''))
|
|
|
|
event_ref_list.append(event_ref)
|
|
|
|
|
|
|
|
if event_ref_list:
|
|
|
|
person.event_ref_list = event_ref_list[:]
|
|
|
|
changed = True
|
|
|
|
|
|
|
|
if changed:
|
|
|
|
self.commit_person(person,trans)
|
|
|
|
data = cursor.next()
|
|
|
|
cursor.close()
|
|
|
|
|
|
|
|
# family
|
|
|
|
cursor = self.get_family_cursor()
|
|
|
|
data = cursor.first()
|
|
|
|
while data:
|
|
|
|
handle,info = data
|
|
|
|
family = Family()
|
|
|
|
# Restore data from dbversion 8 (gramps 2.0.9)
|
|
|
|
(family.handle, family.gramps_id, family.father_handle,
|
|
|
|
family.mother_handle, family.child_list, family.type,
|
|
|
|
event_list, family.media_list, family.attribute_list,
|
|
|
|
family.lds_seal, family.complete, family.source_list,
|
|
|
|
family.note, family.change) = info
|
|
|
|
|
|
|
|
event_ref_list = []
|
|
|
|
for event_handle in event_list:
|
|
|
|
event_ref = EventRef()
|
|
|
|
event_ref.set_reference_handle(event_handle)
|
|
|
|
event_ref.set_role((EventRef.PRIMARY,''))
|
|
|
|
event_ref_list.append(event_ref)
|
|
|
|
|
|
|
|
if event_ref_list:
|
|
|
|
family.event_ref_list = event_ref_list[:]
|
|
|
|
changed = True
|
|
|
|
|
|
|
|
if changed:
|
|
|
|
self.commit_family(family,trans)
|
|
|
|
data = cursor.next()
|
|
|
|
cursor.close()
|
|
|
|
|
|
|
|
# Remove Witness from every event and convert name to type
|
|
|
|
cursor = self.get_event_cursor()
|
|
|
|
data = cursor.first()
|
|
|
|
while data:
|
|
|
|
handle,info = data
|
|
|
|
event = Event()
|
|
|
|
(event.handle, event.gramps_id, name, event.date,
|
|
|
|
event.description, event.place, event.cause, event.private,
|
|
|
|
event.source_list, event.note, witness, event.media_list,
|
|
|
|
event.change) = info
|
|
|
|
self.commit_event(event,trans)
|
|
|
|
data = cursor.next()
|
|
|
|
cursor.close()
|
|
|
|
|
|
|
|
self.transaction_commit(trans,"Upgrade to DB version 9")
|
2005-12-06 12:08:09 +05:30
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
|
|
|
|
import sys
|
|
|
|
|
|
|
|
d = GrampsBSDDB()
|
|
|
|
d.load(sys.argv[1],lambda x: x)
|
|
|
|
|
|
|
|
c = d.get_person_cursor()
|
|
|
|
data = c.first()
|
|
|
|
while data:
|
|
|
|
person = Person(data[1])
|
|
|
|
print data[0], person.get_primary_name().get_name(),
|
|
|
|
data = c.next()
|
|
|
|
c.close()
|
|
|
|
|
|
|
|
print d.surnames.keys()
|