2004-07-31 00:26:49 +05:30
|
|
|
#
|
|
|
|
# Gramps - a GTK+/GNOME based genealogy program
|
|
|
|
#
|
2005-04-01 10:04:31 +05:30
|
|
|
# Copyright (C) 2000-2005 Donald N. Allingham
|
2004-07-31 00:26:49 +05:30
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation; either version 2 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program; if not, write to the Free Software
|
|
|
|
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
|
|
#
|
|
|
|
|
|
|
|
# $Id$
|
|
|
|
|
2004-12-06 09:43:13 +05:30
|
|
|
"""
|
|
|
|
Provides the Berkeley DB (BSDDB) database backend for GRAMPS
|
|
|
|
"""
|
|
|
|
|
2005-04-01 10:04:31 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# Standard python modules
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2004-08-01 09:51:31 +05:30
|
|
|
import os
|
2004-08-24 09:18:15 +05:30
|
|
|
import time
|
2004-08-27 03:24:14 +05:30
|
|
|
import locale
|
2004-12-20 05:07:40 +05:30
|
|
|
from gettext import gettext as _
|
2005-04-01 10:04:31 +05:30
|
|
|
from bsddb import dbshelve, db
|
2004-08-01 09:51:31 +05:30
|
|
|
|
2005-04-01 10:04:31 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# Gramps modules
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2004-07-31 00:26:49 +05:30
|
|
|
from RelLib import *
|
2004-08-01 09:51:31 +05:30
|
|
|
from GrampsDbBase import *
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2005-04-01 10:04:31 +05:30
|
|
|
_DBVERSION = 5
|
2005-02-20 03:41:51 +05:30
|
|
|
|
2004-08-01 09:51:31 +05:30
|
|
|
def find_surname(key,data):
|
2004-10-01 00:02:56 +05:30
|
|
|
return str(data[3].get_surname())
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2004-08-01 09:51:31 +05:30
|
|
|
def find_idmap(key,data):
|
|
|
|
return str(data[1])
|
|
|
|
|
2004-08-20 07:50:06 +05:30
|
|
|
def find_fidmap(key,data):
|
|
|
|
return str(data[1])
|
|
|
|
|
2004-08-01 09:51:31 +05:30
|
|
|
def find_eventname(key,data):
|
2005-03-09 09:28:44 +05:30
|
|
|
return str(data[2])
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2004-12-06 09:43:13 +05:30
|
|
|
class GrampsBSDDBCursor(GrampsCursor):
|
|
|
|
|
|
|
|
def __init__(self,source):
|
|
|
|
self.cursor = source.cursor()
|
|
|
|
|
|
|
|
def first(self):
|
|
|
|
return self.cursor.first()
|
|
|
|
|
|
|
|
def next(self):
|
|
|
|
return self.cursor.next()
|
|
|
|
|
|
|
|
def close(self):
|
|
|
|
self.cursor.close()
|
|
|
|
|
2004-07-31 00:26:49 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# GrampsBSDDB
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2004-08-01 09:51:31 +05:30
|
|
|
class GrampsBSDDB(GrampsDbBase):
|
2004-07-31 00:26:49 +05:30
|
|
|
"""GRAMPS database object. This object is a base class for other
|
|
|
|
objects."""
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
"""creates a new GrampsDB"""
|
2004-08-01 09:51:31 +05:30
|
|
|
GrampsDbBase.__init__(self)
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2004-10-19 08:49:25 +05:30
|
|
|
def dbopen(self,name,dbname):
|
|
|
|
dbmap = dbshelve.DBShelf(self.env)
|
2004-10-23 09:26:48 +05:30
|
|
|
dbmap.db.set_pagesize(16384)
|
2005-02-17 04:19:54 +05:30
|
|
|
if self.readonly:
|
|
|
|
dbmap.open(name, dbname, db.DB_HASH, db.DB_RDONLY)
|
|
|
|
else:
|
|
|
|
dbmap.open(name, dbname, db.DB_HASH, db.DB_CREATE, 0666)
|
2004-10-19 08:49:25 +05:30
|
|
|
return dbmap
|
|
|
|
|
2004-12-05 09:45:48 +05:30
|
|
|
def get_person_cursor(self):
|
2004-12-06 09:43:13 +05:30
|
|
|
return GrampsBSDDBCursor(self.person_map)
|
2004-12-10 06:58:43 +05:30
|
|
|
|
|
|
|
def get_family_cursor(self):
|
|
|
|
return GrampsBSDDBCursor(self.family_map)
|
|
|
|
|
2005-04-01 10:04:31 +05:30
|
|
|
def get_event_cursor(self):
|
|
|
|
return GrampsBSDDBCursor(self.event_map)
|
|
|
|
|
2004-12-10 06:58:43 +05:30
|
|
|
def get_place_cursor(self):
|
|
|
|
return GrampsBSDDBCursor(self.place_map)
|
|
|
|
|
|
|
|
def get_source_cursor(self):
|
|
|
|
return GrampsBSDDBCursor(self.source_map)
|
|
|
|
|
|
|
|
def get_media_cursor(self):
|
|
|
|
return GrampsBSDDBCursor(self.media_map)
|
2004-12-05 09:45:48 +05:30
|
|
|
|
2005-03-03 11:03:22 +05:30
|
|
|
def need_upgrade(self):
|
|
|
|
return self.metadata['version'] < _DBVERSION
|
|
|
|
|
2005-02-17 04:19:54 +05:30
|
|
|
def load(self,name,callback,mode="w"):
|
2004-07-31 00:26:49 +05:30
|
|
|
if self.person_map:
|
|
|
|
self.close()
|
|
|
|
|
2005-02-17 04:19:54 +05:30
|
|
|
self.readonly = mode == "r"
|
|
|
|
|
2004-07-31 00:26:49 +05:30
|
|
|
self.env = db.DBEnv()
|
2004-10-16 10:40:35 +05:30
|
|
|
self.env.set_cachesize(0,0x2000000) # 2MB
|
2004-07-31 00:26:49 +05:30
|
|
|
flags = db.DB_CREATE|db.DB_INIT_MPOOL|db.DB_PRIVATE
|
|
|
|
|
|
|
|
self.undolog = "%s.log" % name
|
|
|
|
self.env.open(os.path.dirname(name), flags)
|
|
|
|
|
|
|
|
name = os.path.basename(name)
|
2004-10-19 08:49:25 +05:30
|
|
|
|
|
|
|
self.family_map = self.dbopen(name, "family")
|
|
|
|
self.place_map = self.dbopen(name, "places")
|
|
|
|
self.source_map = self.dbopen(name, "sources")
|
|
|
|
self.media_map = self.dbopen(name, "media")
|
|
|
|
self.event_map = self.dbopen(name, "events")
|
|
|
|
self.metadata = self.dbopen(name, "meta")
|
|
|
|
self.person_map = self.dbopen(name, "person")
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2005-02-17 04:19:54 +05:30
|
|
|
if self.readonly:
|
|
|
|
openflags = db.DB_RDONLY
|
|
|
|
else:
|
|
|
|
openflags = db.DB_CREATE
|
|
|
|
|
2004-07-31 00:26:49 +05:30
|
|
|
self.surnames = db.DB(self.env)
|
|
|
|
self.surnames.set_flags(db.DB_DUP)
|
2005-02-17 04:19:54 +05:30
|
|
|
self.surnames.open(name, "surnames", db.DB_HASH, flags=openflags)
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2004-10-01 00:02:56 +05:30
|
|
|
self.name_group = db.DB(self.env)
|
|
|
|
self.name_group.set_flags(db.DB_DUP)
|
2005-02-17 04:19:54 +05:30
|
|
|
self.name_group.open(name, "name_group", db.DB_HASH, flags=openflags)
|
|
|
|
|
2004-07-31 00:26:49 +05:30
|
|
|
self.id_trans = db.DB(self.env)
|
|
|
|
self.id_trans.set_flags(db.DB_DUP)
|
2005-02-17 04:19:54 +05:30
|
|
|
self.id_trans.open(name, "idtrans", db.DB_HASH, flags=openflags)
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2004-08-20 07:50:06 +05:30
|
|
|
self.fid_trans = db.DB(self.env)
|
|
|
|
self.fid_trans.set_flags(db.DB_DUP)
|
2005-02-17 04:19:54 +05:30
|
|
|
self.fid_trans.open(name, "fidtrans", db.DB_HASH, flags=openflags)
|
2004-08-20 07:50:06 +05:30
|
|
|
|
2004-08-24 09:18:15 +05:30
|
|
|
self.pid_trans = db.DB(self.env)
|
|
|
|
self.pid_trans.set_flags(db.DB_DUP)
|
2005-02-17 04:19:54 +05:30
|
|
|
self.pid_trans.open(name, "pidtrans", db.DB_HASH, flags=openflags)
|
2004-08-24 09:18:15 +05:30
|
|
|
|
|
|
|
self.sid_trans = db.DB(self.env)
|
|
|
|
self.sid_trans.set_flags(db.DB_DUP)
|
2005-02-17 04:19:54 +05:30
|
|
|
self.sid_trans.open(name, "sidtrans", db.DB_HASH, flags=openflags)
|
2004-08-24 09:18:15 +05:30
|
|
|
|
|
|
|
self.oid_trans = db.DB(self.env)
|
|
|
|
self.oid_trans.set_flags(db.DB_DUP)
|
2005-02-17 04:19:54 +05:30
|
|
|
self.oid_trans.open(name, "oidtrans", db.DB_HASH, flags=openflags)
|
2004-08-24 09:18:15 +05:30
|
|
|
|
2004-07-31 00:26:49 +05:30
|
|
|
self.eventnames = db.DB(self.env)
|
|
|
|
self.eventnames.set_flags(db.DB_DUP)
|
2005-02-17 04:19:54 +05:30
|
|
|
self.eventnames.open(name, "eventnames", db.DB_HASH, flags=openflags)
|
|
|
|
|
|
|
|
if not self.readonly:
|
|
|
|
self.person_map.associate(self.surnames, find_surname, openflags)
|
|
|
|
self.person_map.associate(self.id_trans, find_idmap, openflags)
|
|
|
|
self.family_map.associate(self.fid_trans, find_idmap, openflags)
|
|
|
|
self.place_map.associate(self.pid_trans, find_idmap, openflags)
|
|
|
|
self.media_map.associate(self.oid_trans, find_idmap, openflags)
|
|
|
|
self.source_map.associate(self.sid_trans, find_idmap, openflags)
|
|
|
|
self.event_map.associate(self.eventnames, find_eventname, openflags)
|
|
|
|
self.undodb = db.DB()
|
|
|
|
self.undodb.open(self.undolog, db.DB_RECNO, db.DB_CREATE)
|
2005-02-20 03:41:51 +05:30
|
|
|
|
|
|
|
self.metadata = self.dbopen(name, "meta")
|
2004-07-31 00:26:49 +05:30
|
|
|
self.bookmarks = self.metadata.get('bookmarks')
|
2005-03-03 11:03:22 +05:30
|
|
|
|
|
|
|
gstats = self.metadata.get('gender_stats')
|
|
|
|
|
|
|
|
if not self.readonly and gstats == None:
|
|
|
|
self.metadata['version'] = _DBVERSION
|
|
|
|
elif not self.metadata.has_key('version'):
|
|
|
|
self.metadata['version'] = 0
|
|
|
|
|
2004-07-31 00:26:49 +05:30
|
|
|
if self.bookmarks == None:
|
|
|
|
self.bookmarks = []
|
2004-08-07 10:46:57 +05:30
|
|
|
|
2005-03-03 11:03:22 +05:30
|
|
|
self.genderStats = GenderStats(gstats)
|
2004-07-31 00:26:49 +05:30
|
|
|
return 1
|
|
|
|
|
2004-10-13 09:21:27 +05:30
|
|
|
def abort_changes(self):
|
|
|
|
while self.undo():
|
|
|
|
pass
|
|
|
|
self.close()
|
|
|
|
|
2004-07-31 00:26:49 +05:30
|
|
|
def close(self):
|
2004-10-01 00:02:56 +05:30
|
|
|
self.name_group.close()
|
2004-07-31 00:26:49 +05:30
|
|
|
self.person_map.close()
|
|
|
|
self.family_map.close()
|
|
|
|
self.place_map.close()
|
|
|
|
self.source_map.close()
|
|
|
|
self.media_map.close()
|
|
|
|
self.event_map.close()
|
2005-02-17 04:19:54 +05:30
|
|
|
if not self.readonly:
|
|
|
|
self.metadata['bookmarks'] = self.bookmarks
|
|
|
|
self.metadata['gender_stats'] = self.genderStats.save_stats()
|
2004-07-31 00:26:49 +05:30
|
|
|
self.metadata.close()
|
|
|
|
self.surnames.close()
|
|
|
|
self.eventnames.close()
|
|
|
|
self.id_trans.close()
|
2004-08-20 07:50:06 +05:30
|
|
|
self.fid_trans.close()
|
2004-08-24 09:18:15 +05:30
|
|
|
self.oid_trans.close()
|
|
|
|
self.sid_trans.close()
|
|
|
|
self.pid_trans.close()
|
2004-07-31 00:26:49 +05:30
|
|
|
self.env.close()
|
|
|
|
|
2005-02-17 04:19:54 +05:30
|
|
|
if not self.readonly:
|
|
|
|
self.undodb.close()
|
|
|
|
try:
|
|
|
|
os.remove(self.undolog)
|
|
|
|
except:
|
|
|
|
pass
|
2004-07-31 00:26:49 +05:30
|
|
|
|
|
|
|
self.person_map = None
|
|
|
|
self.family_map = None
|
|
|
|
self.place_map = None
|
|
|
|
self.source_map = None
|
|
|
|
self.media_map = None
|
|
|
|
self.event_map = None
|
|
|
|
self.surnames = None
|
|
|
|
self.env = None
|
|
|
|
self.metadata = None
|
|
|
|
|
2004-10-01 00:02:56 +05:30
|
|
|
def set_name_group_mapping(self,name,group):
|
2005-02-17 04:19:54 +05:30
|
|
|
if not self.readonly:
|
|
|
|
name = str(name)
|
|
|
|
if not group and self.name_group.has_key(name):
|
|
|
|
self.name_group.delete(name)
|
|
|
|
else:
|
|
|
|
self.name_group[name] = group
|
2004-10-01 00:02:56 +05:30
|
|
|
|
2004-08-13 10:04:07 +05:30
|
|
|
def get_surname_list(self):
|
2004-07-31 00:26:49 +05:30
|
|
|
names = self.surnames.keys()
|
|
|
|
a = {}
|
|
|
|
for name in names:
|
|
|
|
a[unicode(name)] = 1
|
|
|
|
vals = a.keys()
|
2004-08-27 03:24:14 +05:30
|
|
|
vals.sort(locale.strcoll)
|
2004-07-31 00:26:49 +05:30
|
|
|
return vals
|
|
|
|
|
2004-08-13 10:04:07 +05:30
|
|
|
def get_person_event_type_list(self):
|
2004-07-31 00:26:49 +05:30
|
|
|
names = self.eventnames.keys()
|
|
|
|
a = {}
|
|
|
|
for name in names:
|
2005-03-09 09:28:44 +05:30
|
|
|
|
2004-07-31 00:26:49 +05:30
|
|
|
a[unicode(name)] = 1
|
|
|
|
vals = a.keys()
|
|
|
|
vals.sort()
|
|
|
|
return vals
|
|
|
|
|
2004-08-13 10:04:07 +05:30
|
|
|
def remove_person(self,handle,transaction):
|
2005-02-17 04:19:54 +05:30
|
|
|
if not self.readonly:
|
|
|
|
person = self.get_person_from_handle(handle)
|
|
|
|
self.genderStats.uncount_person (person)
|
|
|
|
if transaction != None:
|
|
|
|
transaction.add(PERSON_KEY,handle,person.serialize())
|
2005-04-04 05:57:06 +05:30
|
|
|
if not transaction.batch:
|
|
|
|
self.emit('person-delete',([str(handle)],))
|
2005-02-17 04:19:54 +05:30
|
|
|
self.person_map.delete(str(handle))
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2004-08-13 10:04:07 +05:30
|
|
|
def remove_source(self,handle,transaction):
|
2005-02-17 04:19:54 +05:30
|
|
|
if not self.readonly:
|
|
|
|
if transaction != None:
|
|
|
|
old_data = self.source_map.get(str(handle))
|
|
|
|
transaction.add(SOURCE_KEY,handle,old_data)
|
2005-04-04 05:57:06 +05:30
|
|
|
if not transaction.batch:
|
|
|
|
self.emit('source-delete',([handle],))
|
2005-02-17 04:19:54 +05:30
|
|
|
self.source_map.delete(str(handle))
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2004-09-19 09:41:34 +05:30
|
|
|
def remove_family(self,handle,transaction):
|
2005-02-17 04:19:54 +05:30
|
|
|
if not self.readonly:
|
|
|
|
if transaction != None:
|
|
|
|
old_data = self.family_map.get(str(handle))
|
|
|
|
transaction.add(FAMILY_KEY,handle,old_data)
|
2005-04-04 05:57:06 +05:30
|
|
|
if not transaction.batch:
|
|
|
|
self.emit('family-delete',([str(handle)],))
|
2005-02-17 04:19:54 +05:30
|
|
|
self.family_map.delete(str(handle))
|
2004-08-13 10:04:07 +05:30
|
|
|
|
|
|
|
def remove_event(self,handle,transaction):
|
2005-02-17 04:19:54 +05:30
|
|
|
if not self.readonly:
|
|
|
|
if transaction != None:
|
|
|
|
old_data = self.event_map.get(str(handle))
|
|
|
|
transaction.add(EVENT_KEY,handle,old_data)
|
|
|
|
self.event_map.delete(str(handle))
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2004-08-30 09:20:33 +05:30
|
|
|
def remove_place(self,handle,transaction):
|
2005-02-17 04:19:54 +05:30
|
|
|
if not self.readonly:
|
|
|
|
if transaction != None:
|
|
|
|
old_data = self.place_map.get(handle)
|
|
|
|
transaction.add(PLACE_KEY,handle,old_data)
|
2005-04-04 05:57:06 +05:30
|
|
|
if not transaction.batch:
|
|
|
|
self.emit('place-delete',([handle],))
|
2005-02-17 04:19:54 +05:30
|
|
|
self.place_map.delete(str(handle))
|
2004-08-30 09:20:33 +05:30
|
|
|
|
2004-12-01 09:45:08 +05:30
|
|
|
def remove_object(self,handle,transaction):
|
2005-02-17 04:19:54 +05:30
|
|
|
if not self.readonly:
|
|
|
|
if transaction != None:
|
|
|
|
old_data = self.media_map.get(handle)
|
|
|
|
transaction.add(PLACE_KEY,handle,old_data)
|
2005-04-04 05:57:06 +05:30
|
|
|
if not transaction.batch:
|
|
|
|
self.emit('media-delete',([handle],))
|
2005-02-17 04:19:54 +05:30
|
|
|
self.media_map.delete(str(handle))
|
2004-12-01 09:45:08 +05:30
|
|
|
|
2004-08-20 07:50:06 +05:30
|
|
|
def get_person_from_gramps_id(self,val):
|
|
|
|
"""finds a Person in the database from the passed gramps' ID.
|
|
|
|
If no such Person exists, a new Person is added to the database."""
|
|
|
|
|
|
|
|
data = self.id_trans.get(str(val))
|
|
|
|
if data:
|
|
|
|
person = Person()
|
|
|
|
person.unserialize(cPickle.loads(data))
|
|
|
|
return person
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
|
|
|
def get_family_from_gramps_id(self,val):
|
2004-08-24 09:18:15 +05:30
|
|
|
"""finds a Family in the database from the passed gramps' ID.
|
|
|
|
If no such Family exists, a new Person is added to the database."""
|
2004-08-20 07:50:06 +05:30
|
|
|
|
|
|
|
data = self.fid_trans.get(str(val))
|
|
|
|
if data:
|
|
|
|
family = Family()
|
|
|
|
family.unserialize(cPickle.loads(data))
|
|
|
|
return family
|
|
|
|
else:
|
|
|
|
return None
|
2004-08-24 09:18:15 +05:30
|
|
|
|
|
|
|
def get_place_from_gramps_id(self,val):
|
|
|
|
"""finds a Place in the database from the passed gramps' ID.
|
|
|
|
If no such Place exists, a new Person is added to the database."""
|
|
|
|
|
|
|
|
data = self.pid_trans.get(str(val))
|
|
|
|
if data:
|
|
|
|
place = Place()
|
|
|
|
place.unserialize(cPickle.loads(data))
|
|
|
|
return place
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
|
|
|
def get_source_from_gramps_id(self,val):
|
|
|
|
"""finds a Source in the database from the passed gramps' ID.
|
|
|
|
If no such Source exists, a new Person is added to the database."""
|
|
|
|
|
|
|
|
data = self.sid_trans.get(str(val))
|
|
|
|
if data:
|
|
|
|
source = Source()
|
|
|
|
source.unserialize(cPickle.loads(data))
|
|
|
|
return source
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
|
|
|
def get_object_from_gramps_id(self,val):
|
|
|
|
"""finds a MediaObject in the database from the passed gramps' ID.
|
|
|
|
If no such MediaObject exists, a new Person is added to the database."""
|
|
|
|
|
|
|
|
data = self.oid_trans.get(str(val))
|
|
|
|
if data:
|
|
|
|
obj = MediaObject()
|
|
|
|
obj.unserialize(cPickle.loads(data))
|
|
|
|
return obj
|
|
|
|
else:
|
|
|
|
return None
|
2005-03-03 11:03:22 +05:30
|
|
|
|
|
|
|
def upgrade(self):
|
|
|
|
child_rel_notrans = [
|
|
|
|
"None", "Birth", "Adopted", "Stepchild",
|
|
|
|
"Sponsored", "Foster", "Unknown", "Other", ]
|
|
|
|
|
|
|
|
version = self.metadata['version']
|
|
|
|
if version < 2:
|
2005-04-01 10:04:31 +05:30
|
|
|
self.upgrade_2(child_rel_notrans)
|
2005-03-07 05:17:26 +05:30
|
|
|
if version < 3:
|
2005-04-01 10:04:31 +05:30
|
|
|
self.upgrade_3()
|
2005-03-10 07:08:08 +05:30
|
|
|
if version < 4:
|
2005-04-01 10:04:31 +05:30
|
|
|
self.upgrade_4(child_rel_notrans)
|
|
|
|
if version < 5:
|
|
|
|
self.upgrade_5()
|
2005-03-10 07:08:08 +05:30
|
|
|
self.metadata['version'] = _DBVERSION
|
2005-04-01 10:04:31 +05:30
|
|
|
print 'Successfully finished all upgrades'
|
|
|
|
|
|
|
|
def upgrade_2(self,child_rel_notrans):
|
|
|
|
print "Upgrading to DB version 2"
|
|
|
|
cursor = self.get_person_cursor()
|
|
|
|
data = cursor.first()
|
|
|
|
while data:
|
|
|
|
handle,info = data
|
|
|
|
person = Person()
|
|
|
|
person.unserialize(info)
|
|
|
|
|
|
|
|
plist = person.get_parent_family_handle_list()
|
|
|
|
new_list = []
|
|
|
|
for (f,mrel,frel) in plist:
|
|
|
|
try:
|
|
|
|
mrel = child_rel_notrans.index(mrel)
|
|
|
|
except:
|
|
|
|
mrel = Person.CHILD_REL_BIRTH
|
|
|
|
try:
|
|
|
|
frel = child_rel_notrans.index(frel)
|
|
|
|
except:
|
|
|
|
frel = Person.CHILD_REL_BIRTH
|
|
|
|
new_list.append((f,mrel,frel))
|
|
|
|
person.parent_family_list = new_list
|
|
|
|
self.commit_person(person,None)
|
|
|
|
data = cursor.next()
|
|
|
|
cursor.close()
|
|
|
|
|
|
|
|
def upgrade_3(self):
|
|
|
|
print "Upgrading to DB version 3"
|
|
|
|
cursor = self.get_person_cursor()
|
|
|
|
data = cursor.first()
|
|
|
|
while data:
|
|
|
|
handle,info = data
|
|
|
|
person = Person()
|
|
|
|
person.unserialize(info)
|
|
|
|
|
|
|
|
person.primary_name.date = None
|
|
|
|
for name in person.alternate_names:
|
|
|
|
name.date = None
|
|
|
|
self.commit_person(person,None)
|
|
|
|
data = cursor.next()
|
|
|
|
cursor.close()
|
|
|
|
|
|
|
|
def upgrade_4(self,child_rel_notrans):
|
|
|
|
print "Upgrading to DB version 4"
|
|
|
|
cursor = self.get_person_cursor()
|
|
|
|
data = cursor.first()
|
|
|
|
while data:
|
|
|
|
handle,info = data
|
|
|
|
person = Person()
|
|
|
|
person.unserialize(info)
|
|
|
|
|
|
|
|
plist = person.get_parent_family_handle_list()
|
|
|
|
new_list = []
|
|
|
|
change = False
|
|
|
|
for (f,mrel,frel) in plist:
|
|
|
|
if type(mrel) == str:
|
|
|
|
mrel = child_rel_notrans.index(mrel)
|
|
|
|
change = True
|
|
|
|
if type(frel) == str:
|
|
|
|
frel = child_rel_notrans.index(frel)
|
|
|
|
change = True
|
|
|
|
new_list.append((f,mrel,frel))
|
|
|
|
if change:
|
|
|
|
person.parent_family_list = new_list
|
|
|
|
self.commit_person(person,None)
|
|
|
|
data = cursor.next()
|
|
|
|
cursor.close()
|
|
|
|
|
|
|
|
def upgrade_5(self):
|
|
|
|
print "Upgrading to DB version 5 -- this may take a while"
|
|
|
|
# Need to rename:
|
|
|
|
# attrlist into attribute_list in MediaRefs
|
|
|
|
# comments into note in SourceRefs
|
|
|
|
# in all primary and secondary objects
|
|
|
|
# Also MediaObject gets place attribute removed
|
|
|
|
cursor = self.get_media_cursor()
|
|
|
|
data = cursor.first()
|
|
|
|
while data:
|
|
|
|
handle,info = data
|
|
|
|
obj = MediaObject()
|
|
|
|
# can't use unserialize here, since the new class
|
|
|
|
# defines tuples one element short
|
|
|
|
(obj.handle, obj.gramps_id, obj.path, obj.mime, obj.desc,
|
|
|
|
obj.attribute_list, obj.source_list, obj.note, obj.change,
|
|
|
|
obj.date, junk) = info
|
|
|
|
for src_ref in obj.source_list:
|
|
|
|
src_ref.note = src_ref.comments
|
|
|
|
del src_ref.comments
|
|
|
|
for attr in obj.attribute_list:
|
|
|
|
for src_ref in attr.source_list:
|
|
|
|
src_ref.note = src_ref.comments
|
|
|
|
del src_ref.comments
|
|
|
|
self.commit_media_object(obj,None)
|
|
|
|
data = cursor.next()
|
|
|
|
cursor.close()
|
|
|
|
# person
|
|
|
|
cursor = self.get_person_cursor()
|
|
|
|
data = cursor.first()
|
|
|
|
while data:
|
|
|
|
handle,info = data
|
|
|
|
person = Person()
|
|
|
|
person.unserialize(info)
|
|
|
|
changed = person.media_list or person.source_list or person.attribute_list
|
|
|
|
for media_ref in person.media_list:
|
|
|
|
media_ref.attribute_list = media_ref.attrlist
|
|
|
|
del media_ref.attrlist
|
|
|
|
for src_ref in media_ref.source_list:
|
|
|
|
src_ref.note = src_ref.comments
|
|
|
|
del src_ref.comments
|
|
|
|
for attr in media_ref.attribute_list:
|
|
|
|
for src_ref in attr.source_list:
|
|
|
|
src_ref.note = src_ref.comments
|
|
|
|
del src_ref.comments
|
|
|
|
for src_ref in person.source_list:
|
|
|
|
src_ref.note = src_ref.comments
|
|
|
|
del src_ref.comments
|
|
|
|
for attr in person.attribute_list:
|
|
|
|
for src_ref in attr.source_list:
|
|
|
|
src_ref.note = src_ref.comments
|
|
|
|
del src_ref.comments
|
|
|
|
for o in [o for o in [person.lds_bapt,
|
|
|
|
person.lds_endow,
|
|
|
|
person.lds_seal] if o]:
|
|
|
|
for src_ref in o.source_list:
|
|
|
|
src_ref.note = src_ref.comments
|
|
|
|
del src_ref.comments
|
|
|
|
changed = True
|
|
|
|
for name in person.alternate_names + [person.primary_name]:
|
|
|
|
for src_ref in name.source_list:
|
|
|
|
src_ref.note = src_ref.comments
|
|
|
|
del src_ref.comments
|
|
|
|
changed = True
|
|
|
|
for addr in person.address_list:
|
|
|
|
for src_ref in addr.source_list:
|
|
|
|
src_ref.note = src_ref.comments
|
|
|
|
del src_ref.comments
|
|
|
|
changed = True
|
|
|
|
if changed:
|
|
|
|
self.commit_person(person,None)
|
|
|
|
data = cursor.next()
|
|
|
|
cursor.close()
|
|
|
|
# family
|
|
|
|
cursor = self.get_family_cursor()
|
|
|
|
data = cursor.first()
|
|
|
|
while data:
|
|
|
|
handle,info = data
|
|
|
|
family = Family()
|
|
|
|
family.unserialize(info)
|
|
|
|
changed = family.media_list or family.source_list or family.attribute_list
|
|
|
|
for media_ref in family.media_list:
|
|
|
|
media_ref.attribute_list = media_ref.attrlist
|
|
|
|
del media_ref.attrlist
|
|
|
|
for src_ref in media_ref.source_list:
|
|
|
|
src_ref.note = src_ref.comments
|
|
|
|
del src_ref.comments
|
|
|
|
for attr in media_ref.attribute_list:
|
|
|
|
for src_ref in attr.source_list:
|
|
|
|
src_ref.note = src_ref.comments
|
|
|
|
del src_ref.comments
|
|
|
|
for src_ref in family.source_list:
|
|
|
|
src_ref.note = src_ref.comments
|
|
|
|
del src_ref.comments
|
|
|
|
for attr in family.attribute_list:
|
|
|
|
for src_ref in attr.source_list:
|
|
|
|
src_ref.note = src_ref.comments
|
|
|
|
del src_ref.comments
|
|
|
|
if family.lds_seal:
|
|
|
|
for src_ref in family.lds_seal.source_list:
|
|
|
|
src_ref.note = src_ref.comments
|
|
|
|
del src_ref.comments
|
|
|
|
changed = True
|
|
|
|
if changed:
|
|
|
|
self.commit_family(family,None)
|
|
|
|
data = cursor.next()
|
|
|
|
cursor.close()
|
|
|
|
# event
|
|
|
|
cursor = self.get_event_cursor()
|
|
|
|
data = cursor.first()
|
|
|
|
while data:
|
|
|
|
handle,info = data
|
|
|
|
event = Event()
|
|
|
|
event.unserialize(info)
|
|
|
|
changed = event.media_list or event.source_list
|
|
|
|
for media_ref in event.media_list:
|
|
|
|
media_ref.attribute_list = media_ref.attrlist
|
|
|
|
del media_ref.attrlist
|
|
|
|
for src_ref in media_ref.source_list:
|
|
|
|
src_ref.note = src_ref.comments
|
|
|
|
del src_ref.comments
|
|
|
|
for attr in media_ref.attribute_list:
|
|
|
|
for src_ref in attr.source_list:
|
|
|
|
src_ref.note = src_ref.comments
|
|
|
|
del src_ref.comments
|
|
|
|
for src_ref in event.source_list:
|
|
|
|
src_ref.note = src_ref.comments
|
|
|
|
del src_ref.comments
|
|
|
|
if changed:
|
|
|
|
self.commit_event(event,None)
|
|
|
|
data = cursor.next()
|
|
|
|
cursor.close()
|
|
|
|
# place
|
|
|
|
cursor = self.get_place_cursor()
|
|
|
|
data = cursor.first()
|
|
|
|
while data:
|
|
|
|
handle,info = data
|
|
|
|
place = Place()
|
|
|
|
place.unserialize(info)
|
|
|
|
changed = place.media_list or place.source_list
|
|
|
|
for media_ref in place.media_list:
|
|
|
|
media_ref.attribute_list = media_ref.attrlist
|
|
|
|
del media_ref.attrlist
|
|
|
|
for src_ref in media_ref.source_list:
|
|
|
|
src_ref.note = src_ref.comments
|
|
|
|
del src_ref.comments
|
|
|
|
for attr in media_ref.attribute_list:
|
|
|
|
for src_ref in attr.source_list:
|
|
|
|
src_ref.note = src_ref.comments
|
|
|
|
del src_ref.comments
|
|
|
|
for src_ref in place.source_list:
|
|
|
|
src_ref.note = src_ref.comments
|
|
|
|
del src_ref.comments
|
|
|
|
if changed:
|
|
|
|
self.commit_place(place,None)
|
|
|
|
data = cursor.next()
|
|
|
|
cursor.close()
|
|
|
|
# source
|
|
|
|
cursor = self.get_source_cursor()
|
|
|
|
data = cursor.first()
|
|
|
|
while data:
|
|
|
|
handle,info = data
|
|
|
|
source = Source()
|
|
|
|
source.unserialize(info)
|
|
|
|
changed = source.media_list
|
|
|
|
for media_ref in source.media_list:
|
|
|
|
media_ref.attribute_list = media_ref.attrlist
|
|
|
|
del media_ref.attrlist
|
|
|
|
for src_ref in media_ref.source_list:
|
|
|
|
src_ref.note = src_ref.comments
|
|
|
|
del src_ref.comments
|
|
|
|
for attr in media_ref.attribute_list:
|
|
|
|
for src_ref in attr.source_list:
|
|
|
|
src_ref.note = src_ref.comments
|
|
|
|
del src_ref.comments
|
|
|
|
if changed:
|
|
|
|
self.commit_source(source,None)
|
|
|
|
data = cursor.next()
|
|
|
|
cursor.close()
|