2004-07-31 00:26:49 +05:30
|
|
|
#
|
|
|
|
# Gramps - a GTK+/GNOME based genealogy program
|
|
|
|
#
|
2006-01-07 02:25:49 +05:30
|
|
|
# Copyright (C) 2000-2006 Donald N. Allingham
|
2004-07-31 00:26:49 +05:30
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation; either version 2 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program; if not, write to the Free Software
|
|
|
|
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
|
|
#
|
|
|
|
|
|
|
|
# $Id$
|
|
|
|
|
2004-12-06 09:43:13 +05:30
|
|
|
"""
|
|
|
|
Provides the Berkeley DB (BSDDB) database backend for GRAMPS
|
|
|
|
"""
|
|
|
|
|
2005-04-01 10:04:31 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# Standard python modules
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2005-12-15 23:02:10 +05:30
|
|
|
import cPickle
|
2004-08-01 09:51:31 +05:30
|
|
|
import os
|
2004-08-24 09:18:15 +05:30
|
|
|
import time
|
2004-08-27 03:24:14 +05:30
|
|
|
import locale
|
2005-08-18 11:28:28 +05:30
|
|
|
import sets
|
2004-12-20 05:07:40 +05:30
|
|
|
from gettext import gettext as _
|
2005-04-01 10:04:31 +05:30
|
|
|
from bsddb import dbshelve, db
|
2006-01-07 02:25:49 +05:30
|
|
|
import logging
|
|
|
|
log = logging.getLogger(".GrampsDb")
|
2004-08-01 09:51:31 +05:30
|
|
|
|
2005-12-16 02:56:55 +05:30
|
|
|
# hack to use native set for python2.4
|
|
|
|
# and module sets for earlier pythons
|
|
|
|
try:
|
|
|
|
set()
|
|
|
|
except NameError:
|
|
|
|
from sets import Set as set
|
|
|
|
|
2005-04-01 10:04:31 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# Gramps modules
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2004-07-31 00:26:49 +05:30
|
|
|
from RelLib import *
|
2005-12-21 16:57:05 +05:30
|
|
|
from _GrampsDbBase import *
|
2006-01-07 02:25:49 +05:30
|
|
|
import const
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
_MINVERSION = 5
|
|
|
|
_DBVERSION = 9
|
2005-02-20 03:41:51 +05:30
|
|
|
|
2004-08-01 09:51:31 +05:30
|
|
|
def find_surname(key,data):
|
2004-10-01 00:02:56 +05:30
|
|
|
return str(data[3].get_surname())
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2004-08-01 09:51:31 +05:30
|
|
|
def find_idmap(key,data):
|
|
|
|
return str(data[1])
|
|
|
|
|
2004-08-20 07:50:06 +05:30
|
|
|
def find_fidmap(key,data):
|
|
|
|
return str(data[1])
|
|
|
|
|
2004-08-01 09:51:31 +05:30
|
|
|
def find_eventname(key,data):
|
2005-03-09 09:28:44 +05:30
|
|
|
return str(data[2])
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2005-05-27 23:13:04 +05:30
|
|
|
def find_repository_type(key,data):
|
|
|
|
return str(data[2])
|
|
|
|
|
2005-12-15 23:02:10 +05:30
|
|
|
# Secondary database key lookups for reference_map table
|
|
|
|
# reference_map data values are of the form:
|
|
|
|
# ((primary_object_class_name, primary_object_handle),
|
|
|
|
# (referenced_object_class_name, referenced_object_handle))
|
|
|
|
|
|
|
|
def find_primary_handle(key,data):
|
2005-12-18 06:58:35 +05:30
|
|
|
return str((data)[0][1])
|
2005-12-15 23:02:10 +05:30
|
|
|
|
|
|
|
def find_referenced_handle(key,data):
|
2005-12-18 06:58:35 +05:30
|
|
|
return str((data)[1][1])
|
2005-12-15 23:02:10 +05:30
|
|
|
|
2004-12-06 09:43:13 +05:30
|
|
|
class GrampsBSDDBCursor(GrampsCursor):
|
|
|
|
|
2006-01-07 02:25:49 +05:30
|
|
|
def __init__(self,source,txn=None):
|
|
|
|
self.cursor = source.cursor(txn)
|
2004-12-06 09:43:13 +05:30
|
|
|
|
|
|
|
def first(self):
|
|
|
|
return self.cursor.first()
|
|
|
|
|
|
|
|
def next(self):
|
|
|
|
return self.cursor.next()
|
|
|
|
|
|
|
|
def close(self):
|
|
|
|
self.cursor.close()
|
|
|
|
|
2005-12-15 23:02:10 +05:30
|
|
|
class GrampsBSDDBDupCursor(GrampsBSDDBCursor):
|
|
|
|
"""Cursor that includes handling for duplicate keys"""
|
|
|
|
|
|
|
|
def set(self,key):
|
2005-12-16 02:56:55 +05:30
|
|
|
return self.cursor.set(str(key))
|
2005-12-15 23:02:10 +05:30
|
|
|
|
|
|
|
def next_dup(self):
|
|
|
|
return self.cursor.next_dup()
|
|
|
|
|
|
|
|
|
2004-07-31 00:26:49 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# GrampsBSDDB
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2004-08-01 09:51:31 +05:30
|
|
|
class GrampsBSDDB(GrampsDbBase):
|
2004-07-31 00:26:49 +05:30
|
|
|
"""GRAMPS database object. This object is a base class for other
|
|
|
|
objects."""
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
"""creates a new GrampsDB"""
|
2004-08-01 09:51:31 +05:30
|
|
|
GrampsDbBase.__init__(self)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.txn = None
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2006-01-07 02:25:49 +05:30
|
|
|
def open_table(self,name,dbname,no_txn=False):
|
2004-10-19 08:49:25 +05:30
|
|
|
dbmap = dbshelve.DBShelf(self.env)
|
2004-10-23 09:26:48 +05:30
|
|
|
dbmap.db.set_pagesize(16384)
|
2005-02-17 04:19:54 +05:30
|
|
|
if self.readonly:
|
|
|
|
dbmap.open(name, dbname, db.DB_HASH, db.DB_RDONLY)
|
2006-01-07 02:25:49 +05:30
|
|
|
elif no_txn:
|
2005-02-17 04:19:54 +05:30
|
|
|
dbmap.open(name, dbname, db.DB_HASH, db.DB_CREATE, 0666)
|
2006-01-07 02:25:49 +05:30
|
|
|
else:
|
|
|
|
dbmap.open(name, dbname, db.DB_HASH,
|
|
|
|
db.DB_CREATE|db.DB_AUTO_COMMIT, 0666)
|
2004-10-19 08:49:25 +05:30
|
|
|
return dbmap
|
2006-01-11 03:58:09 +05:30
|
|
|
|
|
|
|
def _all_handles(self,table):
|
|
|
|
return table.keys(self.txn)
|
2004-10-19 08:49:25 +05:30
|
|
|
|
2004-12-05 09:45:48 +05:30
|
|
|
def get_person_cursor(self):
|
2006-01-07 02:25:49 +05:30
|
|
|
return GrampsBSDDBCursor(self.person_map,self.txn)
|
2004-12-10 06:58:43 +05:30
|
|
|
|
|
|
|
def get_family_cursor(self):
|
2006-01-07 02:25:49 +05:30
|
|
|
return GrampsBSDDBCursor(self.family_map,self.txn)
|
2004-12-10 06:58:43 +05:30
|
|
|
|
2005-04-01 10:04:31 +05:30
|
|
|
def get_event_cursor(self):
|
2006-01-07 02:25:49 +05:30
|
|
|
return GrampsBSDDBCursor(self.event_map,self.txn)
|
2005-04-01 10:04:31 +05:30
|
|
|
|
2004-12-10 06:58:43 +05:30
|
|
|
def get_place_cursor(self):
|
2006-01-07 02:25:49 +05:30
|
|
|
return GrampsBSDDBCursor(self.place_map,self.txn)
|
2004-12-10 06:58:43 +05:30
|
|
|
|
|
|
|
def get_source_cursor(self):
|
2006-01-07 02:25:49 +05:30
|
|
|
return GrampsBSDDBCursor(self.source_map,self.txn)
|
2004-12-10 06:58:43 +05:30
|
|
|
|
|
|
|
def get_media_cursor(self):
|
2006-01-07 02:25:49 +05:30
|
|
|
return GrampsBSDDBCursor(self.media_map,self.txn)
|
2004-12-05 09:45:48 +05:30
|
|
|
|
2005-05-27 23:13:04 +05:30
|
|
|
def get_repository_cursor(self):
|
2006-01-07 02:25:49 +05:30
|
|
|
return GrampsBSDDBCursor(self.repository_map,self.txn)
|
2005-05-27 23:13:04 +05:30
|
|
|
|
2006-01-07 03:38:40 +05:30
|
|
|
def has_person_handle(self,handle):
|
|
|
|
"""
|
|
|
|
returns True if the handle exists in the current Person database.
|
|
|
|
"""
|
|
|
|
return self.person_map.get(str(handle),txn=self.txn) != None
|
|
|
|
|
|
|
|
def has_family_handle(self,handle):
|
|
|
|
"""
|
|
|
|
returns True if the handle exists in the current Family database.
|
|
|
|
"""
|
|
|
|
return self.family_map.get(str(handle),txn=self.txn) != None
|
|
|
|
|
|
|
|
def has_object_handle(self,handle):
|
|
|
|
"""
|
|
|
|
returns True if the handle exists in the current MediaObjectdatabase.
|
|
|
|
"""
|
|
|
|
return self.media_map.get(str(handle),txn=self.txn) != None
|
|
|
|
|
|
|
|
def has_repository_handle(self,handle):
|
|
|
|
"""
|
|
|
|
returns True if the handle exists in the current Repository database.
|
|
|
|
"""
|
|
|
|
return self.repository_map.get(str(handle),txn=self.txn) != None
|
|
|
|
|
|
|
|
def has_event_handle(self,handle):
|
|
|
|
"""
|
|
|
|
returns True if the handle exists in the current Repository database.
|
|
|
|
"""
|
|
|
|
return self.event_map.get(str(handle),txn=self.txn) != None
|
|
|
|
|
|
|
|
def has_place_handle(self,handle):
|
|
|
|
"""
|
|
|
|
returns True if the handle exists in the current Repository database.
|
|
|
|
"""
|
|
|
|
return self.place_map.get(str(handle),txn=self.txn) != None
|
|
|
|
|
|
|
|
def has_source_handle(self,handle):
|
|
|
|
"""
|
|
|
|
returns True if the handle exists in the current Repository database.
|
|
|
|
"""
|
|
|
|
return self.source_map.get(str(handle),txn=self.txn) != None
|
|
|
|
|
|
|
|
def get_raw_person_data(self,handle):
|
|
|
|
return self.person_map.get(str(handle),txn=self.txn)
|
|
|
|
|
|
|
|
def get_raw_family_data(self,handle):
|
|
|
|
return self.family_map.get(str(handle),txn=self.txn)
|
|
|
|
|
|
|
|
def get_raw_object_data(self,handle):
|
|
|
|
return self.media_map.get(str(handle),txn=self.txn)
|
|
|
|
|
|
|
|
def get_raw_place_data(self,handle):
|
|
|
|
return self.place_map.get(str(handle),txn=self.txn)
|
|
|
|
|
|
|
|
def get_raw_event_data(self,handle):
|
|
|
|
return self.event_map.get(str(handle),txn=self.txn)
|
|
|
|
|
|
|
|
def get_raw_source_data(self,handle):
|
|
|
|
return self.source_map.get(str(handle),txn=self.txn)
|
|
|
|
|
|
|
|
def get_raw_repository_data(self,handle):
|
|
|
|
return self.repository_map.get(str(handle),txn=self.txn)
|
|
|
|
|
2005-12-15 23:02:10 +05:30
|
|
|
# cursors for lookups in the reference_map for back reference
|
|
|
|
# lookups. The reference_map has three indexes:
|
|
|
|
# the main index: a tuple of (primary_handle,referenced_handle)
|
|
|
|
# the primary_handle index: the primary_handle
|
|
|
|
# the referenced_handle index: the referenced_handle
|
|
|
|
# the main index is unique, the others allow duplicate entries.
|
|
|
|
|
|
|
|
def get_reference_map_cursor(self):
|
2006-01-07 02:25:49 +05:30
|
|
|
return GrampsBSDDBCursor(self.reference_map,self.txn)
|
2005-12-15 23:02:10 +05:30
|
|
|
|
|
|
|
def get_reference_map_primary_cursor(self):
|
2006-01-07 02:25:49 +05:30
|
|
|
return GrampsBSDDBDupCursor(self.reference_map_primary_map,self.txn)
|
2005-12-15 23:02:10 +05:30
|
|
|
|
|
|
|
def get_reference_map_referenced_cursor(self):
|
2006-01-07 02:25:49 +05:30
|
|
|
return GrampsBSDDBDupCursor(self.reference_map_referenced_map,self.txn)
|
2005-12-15 23:02:10 +05:30
|
|
|
|
|
|
|
|
2005-06-05 09:31:56 +05:30
|
|
|
def version_supported(self):
|
2005-12-15 11:49:37 +05:30
|
|
|
return (self.metadata.get('version',0) <= _DBVERSION and
|
|
|
|
self.metadata.get('version',0) >= _MINVERSION)
|
2005-06-05 09:31:56 +05:30
|
|
|
|
2005-03-03 11:03:22 +05:30
|
|
|
def need_upgrade(self):
|
2005-12-15 11:49:37 +05:30
|
|
|
return not self.readonly \
|
|
|
|
and self.metadata.get('version',0) < _DBVERSION
|
2005-03-03 11:03:22 +05:30
|
|
|
|
2005-02-17 04:19:54 +05:30
|
|
|
def load(self,name,callback,mode="w"):
|
2006-01-11 03:58:09 +05:30
|
|
|
if self.db_is_open:
|
2004-07-31 00:26:49 +05:30
|
|
|
self.close()
|
|
|
|
|
2005-02-17 04:19:54 +05:30
|
|
|
self.readonly = mode == "r"
|
|
|
|
|
2005-12-29 04:28:26 +05:30
|
|
|
callback(0.25)
|
|
|
|
|
2004-07-31 00:26:49 +05:30
|
|
|
self.env = db.DBEnv()
|
2006-01-07 02:25:49 +05:30
|
|
|
self.env.set_cachesize(0,0x2000000) # 2MB
|
|
|
|
# The DB_PRIVATE flag must go if we ever move to multi-user setup
|
2006-01-13 03:32:58 +05:30
|
|
|
env_flags = db.DB_CREATE|db.DB_PRIVATE|\
|
|
|
|
db.DB_INIT_MPOOL|db.DB_INIT_LOCK|\
|
|
|
|
db.DB_INIT_LOG|db.DB_INIT_TXN|db.DB_RECOVER
|
2004-07-31 00:26:49 +05:30
|
|
|
|
|
|
|
self.undolog = "%s.log" % name
|
2006-01-07 02:25:49 +05:30
|
|
|
env_name = os.path.expanduser(const.bsddbenv_dir)
|
|
|
|
if not os.path.isdir(env_name):
|
|
|
|
os.mkdir(env_name)
|
2006-01-13 03:32:58 +05:30
|
|
|
self.env.open(env_name,env_flags)
|
2006-01-07 02:25:49 +05:30
|
|
|
|
|
|
|
self.full_name = os.path.abspath(name)
|
|
|
|
self.brief_name = os.path.basename(name)
|
|
|
|
|
|
|
|
self.family_map = self.open_table(self.full_name, "family")
|
|
|
|
self.place_map = self.open_table(self.full_name, "places")
|
|
|
|
self.source_map = self.open_table(self.full_name, "sources")
|
|
|
|
self.media_map = self.open_table(self.full_name, "media")
|
|
|
|
self.event_map = self.open_table(self.full_name, "events")
|
|
|
|
self.metadata = self.open_table(self.full_name, "meta")
|
|
|
|
self.person_map = self.open_table(self.full_name, "person")
|
|
|
|
self.repository_map = self.open_table(self.full_name, "repository")
|
|
|
|
self.reference_map = self.open_table(self.full_name, "reference_map")
|
|
|
|
|
2005-12-15 23:02:10 +05:30
|
|
|
# index tables used just for speeding up searches
|
2005-02-17 04:19:54 +05:30
|
|
|
if self.readonly:
|
2006-01-13 03:32:58 +05:30
|
|
|
table_flags = db.DB_RDONLY
|
2005-02-17 04:19:54 +05:30
|
|
|
else:
|
2006-01-13 03:32:58 +05:30
|
|
|
table_flags = db.DB_CREATE|db.DB_AUTO_COMMIT
|
2005-02-17 04:19:54 +05:30
|
|
|
|
2004-07-31 00:26:49 +05:30
|
|
|
self.surnames = db.DB(self.env)
|
|
|
|
self.surnames.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.surnames.open(self.full_name, "surnames",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_HASH, flags=table_flags)
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2004-10-01 00:02:56 +05:30
|
|
|
self.name_group = db.DB(self.env)
|
|
|
|
self.name_group.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.name_group.open(self.full_name, "name_group",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_HASH, flags=table_flags)
|
2005-12-15 11:49:37 +05:30
|
|
|
|
2004-07-31 00:26:49 +05:30
|
|
|
self.id_trans = db.DB(self.env)
|
|
|
|
self.id_trans.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.id_trans.open(self.full_name, "idtrans",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_HASH, flags=table_flags)
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2004-08-20 07:50:06 +05:30
|
|
|
self.fid_trans = db.DB(self.env)
|
|
|
|
self.fid_trans.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.fid_trans.open(self.full_name, "fidtrans",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_HASH, flags=table_flags)
|
2004-08-20 07:50:06 +05:30
|
|
|
|
2005-06-08 10:10:33 +05:30
|
|
|
self.eid_trans = db.DB(self.env)
|
|
|
|
self.eid_trans.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.eid_trans.open(self.full_name, "eidtrans",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_HASH, flags=table_flags)
|
2005-06-08 10:10:33 +05:30
|
|
|
|
2004-08-24 09:18:15 +05:30
|
|
|
self.pid_trans = db.DB(self.env)
|
|
|
|
self.pid_trans.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.pid_trans.open(self.full_name, "pidtrans",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_HASH, flags=table_flags)
|
2004-08-24 09:18:15 +05:30
|
|
|
|
|
|
|
self.sid_trans = db.DB(self.env)
|
|
|
|
self.sid_trans.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.sid_trans.open(self.full_name, "sidtrans",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_HASH, flags=table_flags)
|
2004-08-24 09:18:15 +05:30
|
|
|
|
|
|
|
self.oid_trans = db.DB(self.env)
|
|
|
|
self.oid_trans.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.oid_trans.open(self.full_name, "oidtrans",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_HASH, flags=table_flags)
|
2004-08-24 09:18:15 +05:30
|
|
|
|
2005-05-27 23:13:04 +05:30
|
|
|
self.rid_trans = db.DB(self.env)
|
|
|
|
self.rid_trans.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.rid_trans.open(self.full_name, "ridtrans",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_HASH, flags=table_flags)
|
2005-05-27 23:13:04 +05:30
|
|
|
|
2005-12-15 23:02:10 +05:30
|
|
|
|
2005-12-16 06:19:54 +05:30
|
|
|
self.eventnames = db.DB(self.env)
|
|
|
|
self.eventnames.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.eventnames.open(self.full_name, "eventnames",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_HASH, flags=table_flags)
|
2005-12-16 06:19:54 +05:30
|
|
|
|
|
|
|
self.repository_types = db.DB(self.env)
|
|
|
|
self.repository_types.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.repository_types.open(self.full_name, "repostypes",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_HASH, flags=table_flags)
|
2005-12-16 06:19:54 +05:30
|
|
|
|
2005-12-15 23:02:10 +05:30
|
|
|
self.reference_map_primary_map = db.DB(self.env)
|
|
|
|
self.reference_map_primary_map.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.reference_map_primary_map.open(self.full_name,
|
2005-12-15 23:02:10 +05:30
|
|
|
"reference_map_primary_map",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_BTREE, flags=table_flags)
|
2005-12-15 23:02:10 +05:30
|
|
|
|
|
|
|
self.reference_map_referenced_map = db.DB(self.env)
|
|
|
|
self.reference_map_referenced_map.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.reference_map_referenced_map.open(self.full_name,
|
2005-12-15 23:02:10 +05:30
|
|
|
"reference_map_referenced_map",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_BTREE, flags=table_flags)
|
2005-12-15 23:02:10 +05:30
|
|
|
|
2005-02-17 04:19:54 +05:30
|
|
|
if not self.readonly:
|
2006-01-13 03:32:58 +05:30
|
|
|
self.person_map.associate(self.surnames, find_surname, table_flags)
|
|
|
|
self.person_map.associate(self.id_trans, find_idmap, table_flags)
|
|
|
|
self.family_map.associate(self.fid_trans, find_idmap, table_flags)
|
|
|
|
self.event_map.associate(self.eid_trans, find_idmap, table_flags)
|
2005-12-15 11:49:37 +05:30
|
|
|
self.repository_map.associate(self.rid_trans, find_idmap,
|
2006-01-13 03:32:58 +05:30
|
|
|
table_flags)
|
2005-12-15 11:49:37 +05:30
|
|
|
self.repository_map.associate(self.repository_types,
|
2006-01-13 03:32:58 +05:30
|
|
|
find_repository_type, table_flags)
|
|
|
|
self.place_map.associate(self.pid_trans, find_idmap, table_flags)
|
|
|
|
self.media_map.associate(self.oid_trans, find_idmap, table_flags)
|
|
|
|
self.source_map.associate(self.sid_trans, find_idmap, table_flags)
|
2005-12-15 23:02:10 +05:30
|
|
|
self.reference_map.associate(self.reference_map_primary_map,
|
|
|
|
find_primary_handle,
|
2006-01-13 03:32:58 +05:30
|
|
|
table_flags)
|
2005-12-15 23:02:10 +05:30
|
|
|
self.reference_map.associate(self.reference_map_referenced_map,
|
|
|
|
find_referenced_handle,
|
2006-01-13 03:32:58 +05:30
|
|
|
table_flags)
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2005-12-16 02:56:55 +05:30
|
|
|
self.undodb = db.DB()
|
|
|
|
self.undodb.open(self.undolog, db.DB_RECNO, db.DB_CREATE)
|
|
|
|
|
2005-12-29 04:28:26 +05:30
|
|
|
callback(0.5)
|
|
|
|
|
2006-01-07 02:25:49 +05:30
|
|
|
self.metadata = self.open_table(self.full_name, "meta", no_txn=True)
|
2005-12-16 02:56:55 +05:30
|
|
|
self.bookmarks = self.metadata.get('bookmarks')
|
|
|
|
self.family_event_names = sets.Set(self.metadata.get('fevent_names',[]))
|
|
|
|
self.individual_event_names = sets.Set(self.metadata.get('pevent_names',[]))
|
|
|
|
self.family_attributes = sets.Set(self.metadata.get('fattr_names',[]))
|
|
|
|
self.individual_attributes = sets.Set(self.metadata.get('pattr_names',[]))
|
|
|
|
|
|
|
|
gstats = self.metadata.get('gender_stats')
|
|
|
|
|
|
|
|
if not self.readonly:
|
|
|
|
if gstats == None:
|
|
|
|
self.metadata['version'] = _DBVERSION
|
|
|
|
elif not self.metadata.has_key('version'):
|
|
|
|
self.metadata['version'] = 0
|
|
|
|
|
|
|
|
if self.bookmarks == None:
|
|
|
|
self.bookmarks = []
|
|
|
|
|
|
|
|
self.genderStats = GenderStats(gstats)
|
2006-01-11 03:58:09 +05:30
|
|
|
self.db_is_open = True
|
2005-12-16 02:56:55 +05:30
|
|
|
return 1
|
|
|
|
|
2005-08-18 11:28:28 +05:30
|
|
|
def rebuild_secondary(self,callback=None):
|
2006-01-13 03:32:58 +05:30
|
|
|
table_flags = db.DB_CREATE|db.DB_AUTO_COMMIT
|
2005-08-18 11:28:28 +05:30
|
|
|
|
|
|
|
# Repair secondary indices related to person_map
|
|
|
|
|
|
|
|
self.id_trans.close()
|
|
|
|
self.surnames.close()
|
|
|
|
|
|
|
|
self.id_trans = db.DB(self.env)
|
|
|
|
self.id_trans.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.id_trans.open(self.full_name, "idtrans", db.DB_HASH,
|
2006-01-13 03:32:58 +05:30
|
|
|
flags=table_flags)
|
2005-08-18 11:28:28 +05:30
|
|
|
self.id_trans.truncate()
|
|
|
|
|
|
|
|
self.surnames = db.DB(self.env)
|
|
|
|
self.surnames.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.surnames.open(self.full_name, "surnames", db.DB_HASH,
|
2006-01-13 03:32:58 +05:30
|
|
|
flags=table_flags)
|
2005-08-18 11:28:28 +05:30
|
|
|
self.surnames.truncate()
|
|
|
|
|
|
|
|
self.person_map.associate(self.surnames, find_surname, db.DB_CREATE)
|
|
|
|
self.person_map.associate(self.id_trans, find_idmap, db.DB_CREATE)
|
|
|
|
|
|
|
|
for key in self.person_map.keys():
|
|
|
|
if callback:
|
|
|
|
callback()
|
2006-01-07 02:25:49 +05:30
|
|
|
data = self.person_map.get(key,txn=self.txn)
|
|
|
|
self.person_map.put(key,data,txn=self.txn)
|
2005-08-18 11:28:28 +05:30
|
|
|
self.person_map.sync()
|
|
|
|
|
|
|
|
# Repair secondary indices related to family_map
|
|
|
|
|
|
|
|
self.fid_trans.close()
|
|
|
|
self.fid_trans = db.DB(self.env)
|
|
|
|
self.fid_trans.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.fid_trans.open(self.full_name, "fidtrans", db.DB_HASH,
|
2006-01-13 03:32:58 +05:30
|
|
|
flags=table_flags)
|
2005-08-18 11:28:28 +05:30
|
|
|
self.fid_trans.truncate()
|
|
|
|
self.family_map.associate(self.fid_trans, find_idmap, db.DB_CREATE)
|
|
|
|
|
|
|
|
for key in self.family_map.keys():
|
|
|
|
if callback:
|
|
|
|
callback()
|
|
|
|
self.family_map[key] = self.family_map[key]
|
|
|
|
self.family_map.sync()
|
|
|
|
|
|
|
|
# Repair secondary indices related to place_map
|
|
|
|
|
|
|
|
self.pid_trans.close()
|
|
|
|
self.pid_trans = db.DB(self.env)
|
|
|
|
self.pid_trans.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.pid_trans.open(self.full_name, "pidtrans", db.DB_HASH,
|
2006-01-13 03:32:58 +05:30
|
|
|
flags=table_flags)
|
2005-08-18 11:28:28 +05:30
|
|
|
self.pid_trans.truncate()
|
|
|
|
self.place_map.associate(self.pid_trans, find_idmap, db.DB_CREATE)
|
|
|
|
|
|
|
|
for key in self.place_map.keys():
|
|
|
|
if callback:
|
|
|
|
callback()
|
|
|
|
self.place_map[key] = self.place_map[key]
|
|
|
|
self.place_map.sync()
|
|
|
|
|
|
|
|
# Repair secondary indices related to media_map
|
|
|
|
|
|
|
|
self.oid_trans.close()
|
|
|
|
self.oid_trans = db.DB(self.env)
|
|
|
|
self.oid_trans.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.oid_trans.open(self.full_name, "oidtrans", db.DB_HASH,
|
2006-01-13 03:32:58 +05:30
|
|
|
flags=table_flags)
|
2005-08-18 11:28:28 +05:30
|
|
|
self.oid_trans.truncate()
|
|
|
|
self.media_map.associate(self.oid_trans, find_idmap, db.DB_CREATE)
|
|
|
|
|
|
|
|
for key in self.media_map.keys():
|
|
|
|
if callback:
|
|
|
|
callback()
|
|
|
|
self.media_map[key] = self.media_map[key]
|
|
|
|
self.media_map.sync()
|
|
|
|
|
|
|
|
# Repair secondary indices related to source_map
|
|
|
|
|
|
|
|
self.sid_trans.close()
|
|
|
|
self.sid_trans = db.DB(self.env)
|
|
|
|
self.sid_trans.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.sid_trans.open(self.full_name, "sidtrans", db.DB_HASH,
|
2006-01-13 03:32:58 +05:30
|
|
|
flags=table_flags)
|
2005-08-18 11:28:28 +05:30
|
|
|
self.sid_trans.truncate()
|
|
|
|
self.source_map.associate(self.sid_trans, find_idmap, db.DB_CREATE)
|
|
|
|
|
|
|
|
for key in self.source_map.keys():
|
|
|
|
if callback:
|
|
|
|
callback()
|
|
|
|
self.source_map[key] = self.source_map[key]
|
|
|
|
self.source_map.sync()
|
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
# Repair secondary indices related to repository_map
|
|
|
|
|
|
|
|
self.rid_trans.close()
|
|
|
|
self.rid_trans = db.DB(self.env)
|
|
|
|
self.rid_trans.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.rid_trans.open(self.full_name, "ridtrans", db.DB_HASH,
|
2006-01-13 03:32:58 +05:30
|
|
|
flags=table_flags)
|
2005-12-15 11:49:37 +05:30
|
|
|
self.rid_trans.truncate()
|
|
|
|
self.repository_map.associate(self.rid_trans, find_idmap, db.DB_CREATE)
|
|
|
|
|
|
|
|
for key in self.repository_map.keys():
|
|
|
|
if callback:
|
|
|
|
callback()
|
|
|
|
self.repository_map[key] = self.repository_map[key]
|
|
|
|
self.repository_map.sync()
|
|
|
|
|
2005-12-15 23:02:10 +05:30
|
|
|
def find_backlink_handles(self, handle, include_classes=None):
|
|
|
|
"""
|
|
|
|
Find all objects that hold a reference to the object handle.
|
|
|
|
Returns an interator over alist of (class_name,handle) tuples.
|
|
|
|
|
|
|
|
@param handle: handle of the object to search for.
|
|
|
|
@type handle: database handle
|
|
|
|
@param include_classes: list of class names to include in the results.
|
|
|
|
Default: None means include all classes.
|
|
|
|
@type include_classes: list of class names
|
|
|
|
|
|
|
|
Note that this is a generator function, it returns a iterator for
|
|
|
|
use in loops. If you want a list of the results use:
|
|
|
|
|
|
|
|
result_list = [i for i in find_backlink_handles(handle)]
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
# Use the secondary index to locate all the reference_map entries
|
|
|
|
# that include a reference to the object we are looking for.
|
|
|
|
referenced_cur = self.get_reference_map_referenced_cursor()
|
|
|
|
|
2005-12-18 01:43:23 +05:30
|
|
|
try:
|
|
|
|
ret = referenced_cur.set(handle)
|
|
|
|
except:
|
|
|
|
ret = None
|
|
|
|
|
2005-12-15 23:02:10 +05:30
|
|
|
while (ret is not None):
|
|
|
|
(key,data) = ret
|
|
|
|
|
|
|
|
# data values are of the form:
|
|
|
|
# ((primary_object_class_name, primary_object_handle),
|
|
|
|
# (referenced_object_class_name, referenced_object_handle))
|
|
|
|
# so we need the first tuple to give us the type to compare
|
|
|
|
|
|
|
|
data = cPickle.loads(data)
|
2005-12-18 03:32:48 +05:30
|
|
|
if include_classes == None or KEY_TO_CLASS_MAP[data[0][0]] in include_classes:
|
2005-12-18 03:46:03 +05:30
|
|
|
yield (KEY_TO_CLASS_MAP[data[0][0]],data[0][1])
|
2005-12-15 23:02:10 +05:30
|
|
|
|
|
|
|
ret = referenced_cur.next_dup()
|
|
|
|
|
|
|
|
referenced_cur.close()
|
|
|
|
|
|
|
|
return
|
|
|
|
|
2006-01-13 03:32:58 +05:30
|
|
|
def _delete_primary_from_reference_map(self, handle, transaction):
|
2005-12-15 23:18:48 +05:30
|
|
|
"""Remove all references to the primary object from the reference_map"""
|
2005-12-15 23:02:10 +05:30
|
|
|
|
2005-12-15 23:18:48 +05:30
|
|
|
primary_cur = self.get_reference_map_primary_cursor()
|
2005-12-18 01:43:23 +05:30
|
|
|
|
|
|
|
try:
|
|
|
|
ret = primary_cur.set(handle)
|
|
|
|
except:
|
|
|
|
ret = None
|
2005-12-15 23:18:48 +05:30
|
|
|
|
|
|
|
while (ret is not None):
|
|
|
|
(key,data) = ret
|
|
|
|
|
|
|
|
# data values are of the form:
|
|
|
|
# ((primary_object_class_name, primary_object_handle),
|
|
|
|
# (referenced_object_class_name, referenced_object_handle))
|
|
|
|
|
|
|
|
# so we need the second tuple give us a reference that we can
|
|
|
|
# combine with the primary_handle to get the main key.
|
|
|
|
|
|
|
|
main_key = (handle, cPickle.loads(data)[1][1])
|
|
|
|
|
2006-01-13 03:32:58 +05:30
|
|
|
self._remove_reference(main_key,transaction)
|
2005-12-15 23:18:48 +05:30
|
|
|
|
|
|
|
ret = primary_cur.next_dup()
|
|
|
|
|
|
|
|
primary_cur.close()
|
|
|
|
|
2006-01-13 03:32:58 +05:30
|
|
|
def _update_reference_map(self, obj, transaction):
|
2005-12-15 23:02:10 +05:30
|
|
|
# Add references to the reference_map for all primary object referenced
|
|
|
|
# from the primary object 'obj' or any of its secondary objects.
|
|
|
|
|
|
|
|
# FIXME: this needs to be properly integrated into the transaction
|
|
|
|
# framework so that the reference_map changes are part of the
|
|
|
|
# transaction
|
|
|
|
|
|
|
|
handle = obj.get_handle()
|
|
|
|
|
|
|
|
# First thing to do is get hold of all rows in the reference_map
|
|
|
|
# table that hold a reference from this primary obj. This means finding
|
|
|
|
# all the rows that have this handle somewhere in the list of (class_name,handle)
|
|
|
|
# pairs.
|
|
|
|
# The primary_map secondary index allows us to look this up quickly.
|
|
|
|
|
|
|
|
existing_references = set()
|
|
|
|
|
|
|
|
primary_cur = self.get_reference_map_primary_cursor()
|
2005-12-17 18:14:06 +05:30
|
|
|
|
|
|
|
try:
|
|
|
|
ret = primary_cur.set(handle)
|
|
|
|
except:
|
|
|
|
ret = None
|
2005-12-15 23:18:48 +05:30
|
|
|
|
2005-12-15 23:02:10 +05:30
|
|
|
while (ret is not None):
|
|
|
|
(key,data) = ret
|
|
|
|
|
|
|
|
# data values are of the form:
|
|
|
|
# ((primary_object_class_name, primary_object_handle),
|
|
|
|
# (referenced_object_class_name, referenced_object_handle))
|
|
|
|
# so we need the second tuple give us a reference that we can
|
|
|
|
# compare with what is returned from get_referenced_handles_recursively
|
|
|
|
|
|
|
|
# Looks like there is a bug in the set() and next_dup() methods
|
|
|
|
# because they do not run the data through cPickle.loads before
|
|
|
|
# returning it, so we have to here.
|
2005-12-18 03:32:48 +05:30
|
|
|
existing_reference = cPickle.loads(data)[1]
|
|
|
|
existing_references.add((KEY_TO_CLASS_MAP[existing_reference[0]],
|
|
|
|
existing_reference[1]))
|
2005-12-15 23:02:10 +05:30
|
|
|
ret = primary_cur.next_dup()
|
|
|
|
|
|
|
|
primary_cur.close()
|
|
|
|
|
|
|
|
# Once we have the list of rows that already have a reference we need to compare
|
|
|
|
# it with the list of objects that are still references from the primary object.
|
|
|
|
|
|
|
|
current_references = set(obj.get_referenced_handles_recursively())
|
|
|
|
|
|
|
|
no_longer_required_references = existing_references.difference(current_references)
|
|
|
|
|
|
|
|
|
|
|
|
new_references = current_references.difference(existing_references)
|
|
|
|
|
|
|
|
# handle addition of new references
|
2005-12-18 03:32:48 +05:30
|
|
|
|
2005-12-15 23:02:10 +05:30
|
|
|
if len(new_references) > 0:
|
|
|
|
for (ref_class_name,ref_handle) in new_references:
|
2006-01-13 03:32:58 +05:30
|
|
|
self._add_reference((handle,ref_handle),data,transaction)
|
|
|
|
#self.reference_map.put(
|
|
|
|
# str((handle,ref_handle),),
|
|
|
|
# ((CLASS_TO_KEY_MAP[obj.__class__.__name__],handle),
|
|
|
|
# (CLASS_TO_KEY_MAP[ref_class_name],ref_handle),),
|
|
|
|
# txn=self.txn)
|
2005-12-15 23:02:10 +05:30
|
|
|
|
|
|
|
# handle deletion of old references
|
|
|
|
if len(no_longer_required_references) > 0:
|
|
|
|
for (ref_class_name,ref_handle) in no_longer_required_references:
|
2005-12-30 21:26:21 +05:30
|
|
|
try:
|
2006-01-13 03:32:58 +05:30
|
|
|
self._remove_reference((handle,ref_handle),transaction)
|
|
|
|
#self.reference_map.delete(str((handle,ref_handle),),
|
|
|
|
# txn=self.txn)
|
2005-12-30 21:26:21 +05:30
|
|
|
except: # ignore missing old reference
|
|
|
|
pass
|
2005-12-15 23:02:10 +05:30
|
|
|
|
2006-01-13 03:32:58 +05:30
|
|
|
def _remove_reference(self,key,transaction):
|
|
|
|
"""
|
|
|
|
Removes the reference specified by the key,
|
|
|
|
preserving the change in the passed transaction.
|
|
|
|
"""
|
|
|
|
if not self.readonly:
|
|
|
|
data = self.reference_map.get(str(main_key),txn=self.txn)
|
|
|
|
if not transaction.batch:
|
|
|
|
transaction.add(REFERENCE_KEY,str(key),cPickle.dumps(data))
|
|
|
|
transaction.reference_del.append(str(key))
|
|
|
|
|
|
|
|
def _add_reference(self,key,data,transaction):
|
|
|
|
"""
|
|
|
|
Adds the reference specified by the key and the data,
|
|
|
|
preserving the change in the passed transaction.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if self.readonly or not key:
|
|
|
|
return
|
|
|
|
|
|
|
|
if transaction.batch:
|
|
|
|
self.reference_map.put(str(key),data,txn=self.txn)
|
|
|
|
else:
|
|
|
|
transaction.add(REFERENCE_KEY,str(key),None)
|
|
|
|
transaction.reference_add.append(str(key),data)
|
|
|
|
|
2005-12-16 17:29:13 +05:30
|
|
|
def reindex_reference_map(self):
|
|
|
|
"""Reindex all primary records in the database. This will be a
|
|
|
|
slow process for large databases.
|
|
|
|
|
|
|
|
At present this method does not clear the reference_map before it
|
|
|
|
reindexes. This is fine when if reindex is run to index new content or
|
|
|
|
when upgrading from a non-reference_map version of the database. But it
|
|
|
|
might be a problem if reindex is used to repair a broken index because any
|
|
|
|
references to primary objects that are no longer in the database will
|
|
|
|
remain in the reference_map index. So if you want to reindex for repair
|
|
|
|
purposes you need to clear the reference_map first.
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
# Make a dictionary of the functions and classes that we need for
|
|
|
|
# each of the primary object tables.
|
|
|
|
primary_tables = {'Person': {'cursor_func': self.get_person_cursor,
|
|
|
|
'class_func': Person},
|
|
|
|
'Family': {'cursor_func': self.get_family_cursor,
|
|
|
|
'class_func': Family},
|
|
|
|
'Event': {'cursor_func': self.get_event_cursor,
|
|
|
|
'class_func': Event},
|
|
|
|
'Place': {'cursor_func': self.get_place_cursor,
|
|
|
|
'class_func': Place},
|
|
|
|
'Source': {'cursor_func': self.get_source_cursor,
|
|
|
|
'class_func': Source},
|
|
|
|
'MediaObject': {'cursor_func': self.get_media_cursor,
|
|
|
|
'class_func': MediaObject},
|
|
|
|
'Repository': {'cursor_func': self.get_repository_cursor,
|
|
|
|
'class_func': Repository},
|
|
|
|
}
|
|
|
|
|
|
|
|
# Now we use the functions and classes defined above to loop through each of the
|
|
|
|
# primary object tables.
|
|
|
|
for primary_table_name in primary_tables.keys():
|
|
|
|
|
|
|
|
cursor = primary_tables[primary_table_name]['cursor_func']()
|
|
|
|
data = cursor.first()
|
|
|
|
|
|
|
|
# Grap the real object class here so that the lookup does
|
|
|
|
# not happen inside the main loop.
|
|
|
|
class_func = primary_tables[primary_table_name]['class_func']
|
|
|
|
|
|
|
|
while data:
|
|
|
|
found_handle,val = data
|
|
|
|
obj = class_func()
|
|
|
|
obj.unserialize(val)
|
2005-12-15 23:02:10 +05:30
|
|
|
|
2006-01-13 03:32:58 +05:30
|
|
|
self._update_reference_map(obj,transaction)
|
2005-12-16 17:29:13 +05:30
|
|
|
|
|
|
|
data = cursor.next()
|
|
|
|
|
|
|
|
cursor.close()
|
|
|
|
|
|
|
|
return
|
|
|
|
|
2004-10-13 09:21:27 +05:30
|
|
|
def abort_changes(self):
|
|
|
|
while self.undo():
|
|
|
|
pass
|
|
|
|
self.close()
|
|
|
|
|
2004-07-31 00:26:49 +05:30
|
|
|
def close(self):
|
2006-01-11 03:58:09 +05:30
|
|
|
if not self.db_is_open:
|
2005-04-04 06:41:50 +05:30
|
|
|
return
|
2004-10-01 00:02:56 +05:30
|
|
|
self.name_group.close()
|
2004-07-31 00:26:49 +05:30
|
|
|
self.person_map.close()
|
|
|
|
self.family_map.close()
|
2005-05-27 23:13:04 +05:30
|
|
|
self.repository_map.close()
|
2004-07-31 00:26:49 +05:30
|
|
|
self.place_map.close()
|
|
|
|
self.source_map.close()
|
|
|
|
self.media_map.close()
|
|
|
|
self.event_map.close()
|
2005-12-22 10:06:26 +05:30
|
|
|
self.reference_map.close()
|
2005-02-17 04:19:54 +05:30
|
|
|
if not self.readonly:
|
|
|
|
self.metadata['bookmarks'] = self.bookmarks
|
|
|
|
self.metadata['gender_stats'] = self.genderStats.save_stats()
|
2005-08-18 11:28:28 +05:30
|
|
|
self.metadata['fevent_names'] = list(self.family_event_names)
|
|
|
|
self.metadata['pevent_names'] = list(self.individual_event_names)
|
|
|
|
self.metadata['fattr_names'] = list(self.family_attributes)
|
|
|
|
self.metadata['pattr_names'] = list(self.individual_attributes)
|
2004-07-31 00:26:49 +05:30
|
|
|
self.metadata.close()
|
|
|
|
self.surnames.close()
|
|
|
|
self.eventnames.close()
|
2005-05-27 23:13:04 +05:30
|
|
|
self.repository_types.close()
|
2004-07-31 00:26:49 +05:30
|
|
|
self.id_trans.close()
|
2004-08-20 07:50:06 +05:30
|
|
|
self.fid_trans.close()
|
2005-06-08 10:10:33 +05:30
|
|
|
self.eid_trans.close()
|
2005-05-27 23:13:04 +05:30
|
|
|
self.rid_trans.close()
|
2004-08-24 09:18:15 +05:30
|
|
|
self.oid_trans.close()
|
|
|
|
self.sid_trans.close()
|
|
|
|
self.pid_trans.close()
|
2005-12-22 10:06:26 +05:30
|
|
|
self.reference_map_primary_map.close()
|
|
|
|
self.reference_map_referenced_map.close()
|
2004-07-31 00:26:49 +05:30
|
|
|
self.env.close()
|
|
|
|
|
2005-02-17 04:19:54 +05:30
|
|
|
if not self.readonly:
|
|
|
|
self.undodb.close()
|
|
|
|
try:
|
|
|
|
os.remove(self.undolog)
|
|
|
|
except:
|
|
|
|
pass
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2005-05-27 23:13:04 +05:30
|
|
|
self.person_map = None
|
|
|
|
self.family_map = None
|
|
|
|
self.repository_map = None
|
|
|
|
self.place_map = None
|
|
|
|
self.source_map = None
|
|
|
|
self.media_map = None
|
|
|
|
self.event_map = None
|
|
|
|
self.surnames = None
|
|
|
|
self.env = None
|
|
|
|
self.metadata = None
|
2006-01-11 03:58:09 +05:30
|
|
|
self.db_is_open = False
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2005-04-18 04:04:56 +05:30
|
|
|
def _del_person(self,handle):
|
2006-01-07 02:25:49 +05:30
|
|
|
self.person_map.delete(str(handle),txn=self.txn)
|
2005-04-18 04:04:56 +05:30
|
|
|
|
|
|
|
def _del_source(self,handle):
|
2006-01-07 02:25:49 +05:30
|
|
|
self.source_map.delete(str(handle),txn=self.txn)
|
2005-04-18 04:04:56 +05:30
|
|
|
|
2005-05-27 23:13:04 +05:30
|
|
|
def _del_repository(self,handle):
|
2006-01-07 02:25:49 +05:30
|
|
|
self.repository_map.delete(str(handle),txn=self.txn)
|
2005-05-27 23:13:04 +05:30
|
|
|
|
2005-04-18 04:04:56 +05:30
|
|
|
def _del_place(self,handle):
|
2006-01-07 02:25:49 +05:30
|
|
|
self.place_map.delete(str(handle),txn=self.txn)
|
2005-04-18 04:04:56 +05:30
|
|
|
|
|
|
|
def _del_media(self,handle):
|
2006-01-07 02:25:49 +05:30
|
|
|
self.media_map.delete(str(handle),txn=self.txn)
|
2005-04-18 04:04:56 +05:30
|
|
|
|
|
|
|
def _del_family(self,handle):
|
2006-01-07 02:25:49 +05:30
|
|
|
self.family_map.delete(str(handle),txn=self.txn)
|
2005-04-18 04:04:56 +05:30
|
|
|
|
|
|
|
def _del_event(self,handle):
|
2006-01-07 02:25:49 +05:30
|
|
|
self.event_map.delete(str(handle),txn=self.txn)
|
2005-04-18 04:04:56 +05:30
|
|
|
|
2004-10-01 00:02:56 +05:30
|
|
|
def set_name_group_mapping(self,name,group):
|
2005-02-17 04:19:54 +05:30
|
|
|
if not self.readonly:
|
|
|
|
name = str(name)
|
|
|
|
if not group and self.name_group.has_key(name):
|
|
|
|
self.name_group.delete(name)
|
|
|
|
else:
|
|
|
|
self.name_group[name] = group
|
2005-08-18 11:28:28 +05:30
|
|
|
self.emit('person-rebuild')
|
2004-10-01 00:02:56 +05:30
|
|
|
|
2004-08-13 10:04:07 +05:30
|
|
|
def get_surname_list(self):
|
2005-12-22 11:43:11 +05:30
|
|
|
vals = [ unicode(val) for val in set(self.surnames.keys()) ]
|
2004-08-27 03:24:14 +05:30
|
|
|
vals.sort(locale.strcoll)
|
2004-07-31 00:26:49 +05:30
|
|
|
return vals
|
|
|
|
|
2004-08-13 10:04:07 +05:30
|
|
|
def get_person_event_type_list(self):
|
2005-12-22 11:10:27 +05:30
|
|
|
vals = [ unicode(val) for val in set(self.eventnames.keys()) ]
|
|
|
|
vals.sort(locale.strcoll)
|
2004-07-31 00:26:49 +05:30
|
|
|
return vals
|
|
|
|
|
2005-05-27 23:13:04 +05:30
|
|
|
def get_repository_type_list(self):
|
2005-12-22 11:10:27 +05:30
|
|
|
vals = list(set(self.repository_types.keys()))
|
|
|
|
vals.sort(locale.strcoll)
|
2005-05-27 23:13:04 +05:30
|
|
|
return vals
|
|
|
|
|
2005-12-22 11:10:27 +05:30
|
|
|
def _get_obj_from_gramps_id(self,val,tbl,class_init):
|
2006-01-07 02:25:49 +05:30
|
|
|
data = tbl.get(str(val),txn=self.txn)
|
2004-08-20 07:50:06 +05:30
|
|
|
if data:
|
2005-12-22 11:10:27 +05:30
|
|
|
obj = class_init()
|
|
|
|
obj.unserialize(cPickle.loads(data))
|
2006-01-07 03:38:40 +05:30
|
|
|
return obj
|
2004-08-20 07:50:06 +05:30
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
2005-12-22 11:10:27 +05:30
|
|
|
def get_person_from_gramps_id(self,val):
|
|
|
|
"""finds a Person in the database from the passed gramps' ID.
|
|
|
|
If no such Person exists, a new Person is added to the database."""
|
|
|
|
return self._get_obj_from_gramps_id(val,self.id_trans,Person)
|
|
|
|
|
2004-08-20 07:50:06 +05:30
|
|
|
def get_family_from_gramps_id(self,val):
|
2004-08-24 09:18:15 +05:30
|
|
|
"""finds a Family in the database from the passed gramps' ID.
|
2005-12-22 11:10:27 +05:30
|
|
|
If no such Family exists, a new Person is added to the database."""
|
|
|
|
return self._get_obj_from_gramps_id(val,self.fid_trans,Family)
|
2005-06-08 10:10:33 +05:30
|
|
|
|
2004-08-24 09:18:15 +05:30
|
|
|
def get_place_from_gramps_id(self,val):
|
|
|
|
"""finds a Place in the database from the passed gramps' ID.
|
2005-12-22 11:10:27 +05:30
|
|
|
If no such Place exists, a new Person is added to the database."""
|
|
|
|
return self._get_obj_from_gramps_id(val,self.pid_trans,Place)
|
2004-08-24 09:18:15 +05:30
|
|
|
|
|
|
|
def get_source_from_gramps_id(self,val):
|
|
|
|
"""finds a Source in the database from the passed gramps' ID.
|
2005-12-22 11:10:27 +05:30
|
|
|
If no such Source exists, a new Person is added to the database."""
|
|
|
|
return self._get_obj_from_gramps_id(val,self.sid_trans,Source)
|
2005-05-27 23:13:04 +05:30
|
|
|
|
2004-08-24 09:18:15 +05:30
|
|
|
def get_object_from_gramps_id(self,val):
|
|
|
|
"""finds a MediaObject in the database from the passed gramps' ID.
|
2005-12-22 11:10:27 +05:30
|
|
|
If no such MediaObject exists, a new Person is added to the database."""
|
|
|
|
return self._get_obj_from_gramps_id(val,self.oid_trans,MediaObject)
|
2004-08-24 09:18:15 +05:30
|
|
|
|
2005-12-22 11:10:27 +05:30
|
|
|
def get_repository_from_gramps_id(self,val):
|
|
|
|
"""finds a MediaObject in the database from the passed gramps' ID.
|
|
|
|
If no such MediaObject exists, a new Person is added to the database."""
|
|
|
|
return self._get_obj_from_gramps_id(val,self.rid_trans,Repository)
|
2005-03-03 11:03:22 +05:30
|
|
|
|
2006-01-07 02:25:49 +05:30
|
|
|
def _commit_base(self, obj, data_map, key, update_list, add_list,
|
|
|
|
transaction, change_time):
|
|
|
|
"""
|
|
|
|
Commits the specified Person to the database, storing the changes
|
|
|
|
as part of the transaction.
|
|
|
|
"""
|
|
|
|
if self.readonly or not obj or not obj.handle:
|
|
|
|
return
|
|
|
|
|
|
|
|
if change_time:
|
|
|
|
obj.change = int(change_time)
|
|
|
|
else:
|
|
|
|
obj.change = int(time.time())
|
|
|
|
handle = str(obj.handle)
|
|
|
|
|
|
|
|
if transaction.batch:
|
|
|
|
data_map.put(handle,obj.serialize(),txn=self.txn)
|
|
|
|
old_data = None
|
|
|
|
else:
|
|
|
|
old_data = data_map.get(handle,txn=self.txn)
|
|
|
|
transaction.add(key,handle,old_data)
|
|
|
|
if old_data:
|
|
|
|
update_list.append((handle,obj.serialize()))
|
|
|
|
else:
|
|
|
|
add_list.append((handle,obj.serialize()))
|
|
|
|
return old_data
|
|
|
|
|
|
|
|
def _do_commit(self,add_list,db_map):
|
|
|
|
retlist = []
|
|
|
|
for (handle,data) in add_list:
|
|
|
|
db_map.put(handle,data,self.txn)
|
|
|
|
retlist.append(str(handle))
|
|
|
|
return retlist
|
|
|
|
|
|
|
|
def _get_from_handle(self, handle, class_type, data_map):
|
|
|
|
try:
|
|
|
|
data = data_map.get(str(handle),txn=self.txn)
|
|
|
|
except:
|
|
|
|
data = None
|
|
|
|
log.error("Failed to get from handle",exc_info=True)
|
|
|
|
if data:
|
|
|
|
newobj = class_type()
|
|
|
|
newobj.unserialize(data)
|
|
|
|
return newobj
|
|
|
|
return None
|
|
|
|
|
|
|
|
def _find_from_handle(self,handle,transaction,class_type,dmap,add_func):
|
|
|
|
obj = class_type()
|
|
|
|
handle = str(handle)
|
|
|
|
data = dmap.get(handle,txn=self.txn)
|
|
|
|
if data:
|
|
|
|
obj.unserialize(data)
|
|
|
|
else:
|
|
|
|
obj.set_handle(handle)
|
|
|
|
add_func(obj,transaction)
|
|
|
|
return obj
|
|
|
|
|
2006-01-07 03:00:51 +05:30
|
|
|
def find_next_person_gramps_id(self):
|
|
|
|
"""
|
|
|
|
Returns the next available GRAMPS' ID for a Person object based
|
|
|
|
off the person ID prefix.
|
|
|
|
"""
|
|
|
|
index = self.iprefix % self.pmap_index
|
|
|
|
while self.id_trans.get(str(index),txn=self.txn):
|
|
|
|
self.pmap_index += 1
|
|
|
|
index = self.iprefix % self.pmap_index
|
|
|
|
self.pmap_index += 1
|
|
|
|
return index
|
|
|
|
|
|
|
|
def find_next_place_gramps_id(self):
|
|
|
|
"""
|
|
|
|
Returns the next available GRAMPS' ID for a Place object based
|
|
|
|
off the person ID prefix.
|
|
|
|
"""
|
|
|
|
index = self.pprefix % self.lmap_index
|
|
|
|
while self.pid_trans.get(str(index),txn=self.txn):
|
|
|
|
self.lmap_index += 1
|
|
|
|
index = self.pprefix % self.lmap_index
|
|
|
|
self.lmap_index += 1
|
|
|
|
return index
|
|
|
|
|
|
|
|
def find_next_event_gramps_id(self):
|
|
|
|
"""
|
|
|
|
Returns the next available GRAMPS' ID for a Event object based
|
|
|
|
off the person ID prefix.
|
|
|
|
"""
|
|
|
|
index = self.eprefix % self.emap_index
|
|
|
|
while self.eid_trans.get(str(index),txn=self.txn):
|
|
|
|
self.emap_index += 1
|
|
|
|
index = self.eprefix % self.emap_index
|
|
|
|
self.emap_index += 1
|
|
|
|
return index
|
|
|
|
|
|
|
|
def find_next_object_gramps_id(self):
|
|
|
|
"""
|
|
|
|
Returns the next available GRAMPS' ID for a MediaObject object based
|
|
|
|
off the person ID prefix.
|
|
|
|
"""
|
|
|
|
index = self.oprefix % self.omap_index
|
|
|
|
while self.oid_trans.get(str(index),txn=self.txn):
|
|
|
|
self.omap_index += 1
|
|
|
|
index = self.oprefix % self.omap_index
|
|
|
|
self.omap_index += 1
|
|
|
|
return index
|
|
|
|
|
|
|
|
def find_next_source_gramps_id(self):
|
|
|
|
"""
|
|
|
|
Returns the next available GRAMPS' ID for a Source object based
|
|
|
|
off the person ID prefix.
|
|
|
|
"""
|
|
|
|
index = self.sprefix % self.smap_index
|
|
|
|
while self.sid_trans.get(str(index),txn=self.txn):
|
|
|
|
self.smap_index += 1
|
|
|
|
index = self.sprefix % self.smap_index
|
|
|
|
self.smap_index += 1
|
|
|
|
return index
|
|
|
|
|
|
|
|
def find_next_family_gramps_id(self):
|
|
|
|
"""
|
|
|
|
Returns the next available GRAMPS' ID for a Family object based
|
|
|
|
off the person ID prefix.
|
|
|
|
"""
|
|
|
|
index = self.fprefix % self.fmap_index
|
|
|
|
while self.fid_trans.get(str(index),txn=self.txn):
|
|
|
|
self.fmap_index += 1
|
|
|
|
index = self.fprefix % self.fmap_index
|
|
|
|
self.fmap_index += 1
|
|
|
|
return index
|
|
|
|
|
|
|
|
def find_next_repository_gramps_id(self):
|
|
|
|
"""
|
|
|
|
Returns the next available GRAMPS' ID for a Respository object based
|
|
|
|
off the repository ID prefix.
|
|
|
|
"""
|
|
|
|
index = self.rprefix % self.rmap_index
|
|
|
|
while self.rid_trans.get(str(index),txn=self.txn):
|
|
|
|
self.rmap_index += 1
|
|
|
|
index = self.rprefix % self.rmap_index
|
|
|
|
self.rmap_index += 1
|
|
|
|
return index
|
|
|
|
|
2006-01-07 02:48:50 +05:30
|
|
|
def transaction_begin(self,msg=""):
|
|
|
|
"""
|
|
|
|
Creates a new Transaction tied to the current UNDO database. The
|
|
|
|
transaction has no effect until it is committed using the
|
|
|
|
transaction_commit function of the this database object.
|
|
|
|
"""
|
2006-01-07 02:53:27 +05:30
|
|
|
|
2006-01-07 02:25:49 +05:30
|
|
|
# Start BSD DB transaction -- DBTxn
|
|
|
|
self.txn = self.env.txn_begin()
|
|
|
|
|
2006-01-13 03:32:58 +05:30
|
|
|
return BdbTransaction(msg,self.undodb)
|
2006-01-07 02:48:50 +05:30
|
|
|
|
|
|
|
def transaction_commit(self,transaction,msg):
|
|
|
|
|
2005-05-24 18:38:06 +05:30
|
|
|
GrampsDbBase.transaction_commit(self,transaction,msg)
|
2006-01-07 02:25:49 +05:30
|
|
|
|
2006-01-13 03:32:58 +05:30
|
|
|
for (key,data) in transaction.reference_add:
|
|
|
|
self.reference_map.put(str(key),data,txn=self.txn)
|
|
|
|
|
|
|
|
for (key,data) in transaction.reference_del:
|
|
|
|
self.reference_map.delete(str(key),txn=self.txn)
|
|
|
|
|
2006-01-07 02:25:49 +05:30
|
|
|
# Commit BSD DB transaction -- DBTxn
|
|
|
|
self.txn.commit()
|
|
|
|
self.txn = None
|
2005-05-24 18:38:06 +05:30
|
|
|
|
2006-01-13 03:32:58 +05:30
|
|
|
def undo(self):
|
|
|
|
print "Doing it"
|
|
|
|
self.txn = self.env.txn_begin()
|
|
|
|
GrampsDbBase.undo(self)
|
|
|
|
self.txn.commit()
|
|
|
|
self.txn = None
|
|
|
|
|
|
|
|
def undo_reference(self,data,handle):
|
|
|
|
if data == None:
|
|
|
|
self.reference_map.delete(handle,txn=self.txn)
|
|
|
|
else:
|
|
|
|
self.reference_map.put(handl,data,txn=self.txn)
|
|
|
|
|
|
|
|
def undo_data(self,data,handle,db_map,signal_root):
|
|
|
|
if data == None:
|
|
|
|
self.emit(signal_root + '-delete',([handle],))
|
|
|
|
db_map.delete(handle,txn=self.txn)
|
|
|
|
else:
|
|
|
|
ex_data = db_map.get(handle,txn=self.txn)
|
|
|
|
if ex_data:
|
|
|
|
signal = signal_root + '-update'
|
|
|
|
else:
|
|
|
|
signal = signal_root + '-add'
|
|
|
|
db_map.put(handle,data,txn=self.txn)
|
|
|
|
self.emit(signal,([handle],))
|
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
def gramps_upgrade(self):
|
2005-03-03 11:03:22 +05:30
|
|
|
child_rel_notrans = [
|
|
|
|
"None", "Birth", "Adopted", "Stepchild",
|
|
|
|
"Sponsored", "Foster", "Unknown", "Other", ]
|
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
version = self.metadata.get('version',_MINVERSION)
|
|
|
|
|
2005-05-24 18:38:06 +05:30
|
|
|
if version < 6:
|
2005-12-15 11:49:37 +05:30
|
|
|
self.gramps_upgrade_6()
|
2005-06-25 03:30:03 +05:30
|
|
|
if version < 7:
|
2005-12-15 11:49:37 +05:30
|
|
|
self.gramps_upgrade_7()
|
2005-07-09 01:54:54 +05:30
|
|
|
if version < 8:
|
2005-12-15 11:49:37 +05:30
|
|
|
self.gramps_upgrade_8()
|
|
|
|
if version < 9:
|
|
|
|
self.gramps_upgrade_9()
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2005-07-09 01:54:54 +05:30
|
|
|
self.metadata['version'] = _DBVERSION
|
2005-12-18 06:59:54 +05:30
|
|
|
self.metadata.sync()
|
2005-04-01 10:04:31 +05:30
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
def gramps_upgrade_6(self):
|
2005-05-24 18:38:06 +05:30
|
|
|
print "Upgrading to DB version 6"
|
|
|
|
order = []
|
|
|
|
for val in self.get_media_column_order():
|
|
|
|
if val[1] != 6:
|
|
|
|
order.append(val)
|
|
|
|
self.set_media_column_order(order)
|
2005-05-25 09:28:27 +05:30
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
def gramps_upgrade_7(self):
|
2005-05-27 23:13:04 +05:30
|
|
|
print "Upgrading to DB version 7"
|
2005-07-09 01:54:54 +05:30
|
|
|
|
|
|
|
self.genderStats = GenderStats()
|
|
|
|
cursor = self.get_person_cursor()
|
|
|
|
data = cursor.first()
|
|
|
|
while data:
|
|
|
|
handle,val = data
|
|
|
|
p = Person(val)
|
|
|
|
self.genderStats.count_person(p,self)
|
|
|
|
data = cursor.next()
|
|
|
|
cursor.close()
|
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
def gramps_upgrade_8(self):
|
2005-07-09 01:54:54 +05:30
|
|
|
print "Upgrading to DB version 8"
|
2005-08-18 11:28:28 +05:30
|
|
|
cursor = self.get_person_cursor()
|
|
|
|
data = cursor.first()
|
|
|
|
while data:
|
|
|
|
handle,val = data
|
|
|
|
handle_list = val[8]
|
|
|
|
if type(handle_list) == list:
|
|
|
|
# Check to prevent crash on corrupted data (event_list=None)
|
|
|
|
for handle in handle_list:
|
|
|
|
event = self.get_event_from_handle(handle)
|
|
|
|
self.individual_event_names.add(event.name)
|
|
|
|
data = cursor.next()
|
|
|
|
cursor.close()
|
|
|
|
|
|
|
|
cursor = self.get_family_cursor()
|
|
|
|
data = cursor.first()
|
|
|
|
while data:
|
|
|
|
handle,val = data
|
|
|
|
handle_list = val[6]
|
|
|
|
if type(handle_list) == list:
|
|
|
|
# Check to prevent crash on corrupted data (event_list=None)
|
|
|
|
for handle in handle_list:
|
|
|
|
event = self.get_event_from_handle(handle)
|
|
|
|
self.family_event_names.add(event.name)
|
|
|
|
data = cursor.next()
|
|
|
|
cursor.close()
|
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
def gramps_upgrade_9(self):
|
2005-12-16 02:56:55 +05:30
|
|
|
print "Upgrading to DB version 9 -- this may take a while"
|
2005-05-28 12:05:15 +05:30
|
|
|
# First, make sure the stored default person handle is str, not unicode
|
2005-05-30 20:55:17 +05:30
|
|
|
try:
|
|
|
|
handle = self.metadata['default']
|
|
|
|
self.metadata['default'] = str(handle)
|
|
|
|
except KeyError:
|
|
|
|
# default person was not stored in database
|
|
|
|
pass
|
2005-12-16 02:56:55 +05:30
|
|
|
|
|
|
|
# The rest of the upgrade deals with real data, not metadata
|
|
|
|
# so starting transaction here.
|
2005-05-27 23:13:04 +05:30
|
|
|
trans = Transaction("",self)
|
|
|
|
trans.set_batch(True)
|
2005-12-16 02:56:55 +05:30
|
|
|
|
|
|
|
# This upgrade adds marker to every primary object.
|
|
|
|
# We need to extract and commit every primary object
|
|
|
|
# even if no other changes are made.
|
|
|
|
|
|
|
|
# Change every Source to have reporef_list
|
2005-12-18 09:00:13 +05:30
|
|
|
# cursor = self.get_source_cursor()
|
|
|
|
# data = cursor.first()
|
|
|
|
# while data:
|
|
|
|
# handle,info = data
|
|
|
|
for handle in self.source_map.keys():
|
|
|
|
info = self.source_map[handle]
|
2005-05-27 23:13:04 +05:30
|
|
|
source = Source()
|
2005-12-18 07:04:13 +05:30
|
|
|
source.handle = handle
|
2005-12-16 02:56:55 +05:30
|
|
|
# We already have a new Source object with the reporef_list
|
|
|
|
# just fill in the rest of the fields for this source
|
2005-12-18 07:04:13 +05:30
|
|
|
(junk_handle, source.gramps_id, source.title, source.author,
|
2005-05-27 23:13:04 +05:30
|
|
|
source.pubinfo, source.note, source.media_list,
|
|
|
|
source.abbrev, source.change, source.datamap) = info
|
|
|
|
self.commit_source(source,trans)
|
2005-12-18 09:00:13 +05:30
|
|
|
# data = cursor.next()
|
|
|
|
# cursor.close()
|
2005-12-06 12:08:09 +05:30
|
|
|
|
2005-12-18 09:00:13 +05:30
|
|
|
#cursor = self.get_person_cursor()
|
|
|
|
#data = cursor.first()
|
|
|
|
#while data:
|
|
|
|
# handle,info = data
|
|
|
|
for handle in self.person_map.keys():
|
|
|
|
info = self.person_map[handle]
|
2005-12-15 11:49:37 +05:30
|
|
|
person = Person()
|
2005-12-18 07:04:13 +05:30
|
|
|
person.handle = handle
|
2005-12-15 11:49:37 +05:30
|
|
|
# Restore data from dbversion 8 (gramps 2.0.9)
|
2005-12-18 07:04:13 +05:30
|
|
|
(junk_handle, person.gramps_id, person.gender,
|
2005-12-15 11:49:37 +05:30
|
|
|
person.primary_name, person.alternate_names, person.nickname,
|
|
|
|
death_handle, birth_handle, event_list,
|
|
|
|
person.family_list, person.parent_family_list,
|
|
|
|
person.media_list, person.address_list, person.attribute_list,
|
|
|
|
person.urls, person.lds_bapt, person.lds_endow, person.lds_seal,
|
2005-12-16 02:56:55 +05:30
|
|
|
complete, person.source_list, person.note,
|
2005-12-15 11:49:37 +05:30
|
|
|
person.change, person.private) = (info + (False,))[0:23]
|
|
|
|
|
2005-12-19 21:52:33 +05:30
|
|
|
# Convert complete flag into marker
|
2005-12-16 02:56:55 +05:30
|
|
|
if complete:
|
|
|
|
person.marker = (PrimaryObject.MARKER_COMPLETE,"")
|
2005-12-19 21:52:33 +05:30
|
|
|
|
|
|
|
# Change every event handle to the EventRef
|
2005-12-15 11:49:37 +05:30
|
|
|
if birth_handle:
|
|
|
|
event_ref = EventRef()
|
2005-12-21 05:38:47 +05:30
|
|
|
event_ref.ref = birth_handle
|
|
|
|
event_ref.role = (EventRef.PRIMARY,'')
|
2005-12-15 11:49:37 +05:30
|
|
|
person.birth_ref = event_ref
|
|
|
|
|
|
|
|
if death_handle:
|
|
|
|
event_ref = EventRef()
|
2005-12-21 05:38:47 +05:30
|
|
|
event_ref.ref = death_handle
|
|
|
|
event_ref.role = (EventRef.PRIMARY,'')
|
2005-12-15 11:49:37 +05:30
|
|
|
person.death_ref = event_ref
|
|
|
|
|
|
|
|
for event_handle in event_list:
|
|
|
|
event_ref = EventRef()
|
2005-12-21 05:38:47 +05:30
|
|
|
event_ref.ref = event_handle
|
|
|
|
event_ref.role = (EventRef.PRIMARY,'')
|
|
|
|
person.event_ref_list.append(event_ref)
|
2005-12-19 21:52:33 +05:30
|
|
|
|
|
|
|
# In all Name instances, convert type from string to a tuple
|
|
|
|
name_conversion = {
|
|
|
|
"Also Known As" : (Name.AKA,""),
|
|
|
|
"Birth Name" : (Name.BIRTH,""),
|
|
|
|
"Married Name" : (Name.MARRIED,""),
|
|
|
|
"Other Name" : (Name.CUSTOM,_("Other Name")),
|
|
|
|
}
|
|
|
|
for name in [person.primary_name] + person.alternate_names:
|
|
|
|
old_type = name.type
|
|
|
|
if old_type:
|
|
|
|
if name_conversion.has_key(old_type):
|
|
|
|
new_type = name_conversion[old_type]
|
|
|
|
else:
|
|
|
|
new_type = (Name.CUSTOM,old_type)
|
|
|
|
else:
|
|
|
|
new_type = (Name.UNKNOWN,"")
|
|
|
|
name.type = new_type
|
|
|
|
|
|
|
|
# In all Attributes, convert type from string to a tuple
|
|
|
|
for attribute in person.attribute_list:
|
|
|
|
convert_attribute_9(attribute)
|
|
|
|
# Cover attributes contained in MediaRefs
|
|
|
|
for media_ref in person.media_list:
|
|
|
|
convert_mediaref_9(media_ref)
|
2005-12-20 04:48:03 +05:30
|
|
|
|
|
|
|
# In all Urls, add type attribute
|
|
|
|
for url in person.urls:
|
|
|
|
convert_url_9(url)
|
2005-12-19 21:52:33 +05:30
|
|
|
|
2005-12-16 02:56:55 +05:30
|
|
|
self.commit_person(person,trans)
|
2005-12-18 09:00:13 +05:30
|
|
|
#data = cursor.next()
|
|
|
|
#cursor.close()
|
2005-12-15 11:49:37 +05:30
|
|
|
|
2005-12-18 09:00:13 +05:30
|
|
|
#cursor = self.get_family_cursor()
|
|
|
|
#data = cursor.first()
|
|
|
|
#while data:
|
|
|
|
# handle,info = data
|
|
|
|
for handle in self.family_map.keys():
|
|
|
|
info = self.family_map[handle]
|
2005-12-15 11:49:37 +05:30
|
|
|
family = Family()
|
2005-12-18 07:04:13 +05:30
|
|
|
family.handle = handle
|
2005-12-15 11:49:37 +05:30
|
|
|
# Restore data from dbversion 8 (gramps 2.0.9)
|
2005-12-18 07:04:13 +05:30
|
|
|
(junk_handle, family.gramps_id, family.father_handle,
|
2005-12-15 11:49:37 +05:30
|
|
|
family.mother_handle, family.child_list, family.type,
|
|
|
|
event_list, family.media_list, family.attribute_list,
|
2005-12-16 02:56:55 +05:30
|
|
|
family.lds_seal, complete, family.source_list,
|
2005-12-15 11:49:37 +05:30
|
|
|
family.note, family.change) = info
|
|
|
|
|
2005-12-16 02:56:55 +05:30
|
|
|
if complete:
|
|
|
|
family.marker = (PrimaryObject.MARKER_COMPLETE,"")
|
|
|
|
|
2005-12-19 21:52:33 +05:30
|
|
|
# Change every event handle to the EventRef
|
2005-12-15 11:49:37 +05:30
|
|
|
for event_handle in event_list:
|
|
|
|
event_ref = EventRef()
|
2005-12-21 05:38:47 +05:30
|
|
|
event_ref.ref = event_handle
|
|
|
|
event_ref.role = (EventRef.PRIMARY,'')
|
|
|
|
family.event_ref_list.append(event_ref)
|
2005-12-15 11:49:37 +05:30
|
|
|
|
2005-12-19 21:52:33 +05:30
|
|
|
# In all Attributes, convert type from string to a tuple
|
|
|
|
for attribute in family.attribute_list:
|
|
|
|
convert_attribute_9(attribute)
|
|
|
|
# Cover attributes contained in MediaRefs
|
2005-12-20 04:48:03 +05:30
|
|
|
for media_ref in family.media_list:
|
2005-12-19 21:52:33 +05:30
|
|
|
convert_mediaref_9(media_ref)
|
|
|
|
|
2005-12-16 02:56:55 +05:30
|
|
|
self.commit_family(family,trans)
|
2005-12-18 09:00:13 +05:30
|
|
|
# data = cursor.next()
|
|
|
|
# cursor.close()
|
2005-12-16 02:56:55 +05:30
|
|
|
|
|
|
|
event_conversion = {
|
|
|
|
"Alternate Marriage" : (Event.MARR_ALT,""),
|
|
|
|
"Annulment" : (Event.ANNULMENT,""),
|
|
|
|
"Divorce" : (Event.DIVORCE,""),
|
|
|
|
"Engagement" : (Event.ENGAGEMENT,""),
|
|
|
|
"Marriage Banns" : (Event.MARR_BANNS,""),
|
|
|
|
"Marriage Contract" : (Event.MARR_CONTR,""),
|
|
|
|
"Marriage License" : (Event.MARR_LIC,""),
|
|
|
|
"Marriage Settlement" : (Event.MARR_SETTL,""),
|
|
|
|
"Marriage" : (Event.MARRIAGE,""),
|
|
|
|
"Adopted" : (Event.ADOPT,""),
|
|
|
|
"Birth" : (Event.BIRTH,""),
|
|
|
|
"Alternate Birth" : (Event.BIRTH,""),
|
|
|
|
"Death" : (Event.DEATH,""),
|
|
|
|
"Alternate Death" : (Event.DEATH,""),
|
|
|
|
"Adult Christening" : (Event.ADULT_CHRISTEN,""),
|
|
|
|
"Baptism" : (Event.BAPTISM,""),
|
|
|
|
"Bar Mitzvah" : (Event.BAR_MITZVAH,""),
|
|
|
|
"Bas Mitzvah" : (Event.BAS_MITZVAH,""),
|
|
|
|
"Blessing" : (Event.BLESS,""),
|
|
|
|
"Burial" : (Event.BURIAL,""),
|
|
|
|
"Cause Of Death" : (Event.CAUSE_DEATH,""),
|
|
|
|
"Census" : (Event.CENSUS,""),
|
|
|
|
"Christening" : (Event.CHRISTEN,""),
|
|
|
|
"Confirmation" : (Event.CONFIRMATION,""),
|
|
|
|
"Cremation" : (Event.CREMATION,""),
|
|
|
|
"Degree" : (Event.DEGREE,""),
|
|
|
|
"Divorce Filing" : (Event.DIV_FILING,""),
|
|
|
|
"Education" : (Event.EDUCATION,""),
|
|
|
|
"Elected" : (Event.ELECTED,""),
|
|
|
|
"Emigration" : (Event.EMIGRATION,""),
|
|
|
|
"First Communion" : (Event.FIRST_COMMUN,""),
|
|
|
|
"Immigration" : (Event.IMMIGRATION,""),
|
|
|
|
"Graduation" : (Event.GRADUATION,""),
|
|
|
|
"Medical Information" : (Event.MED_INFO,""),
|
|
|
|
"Military Service" : (Event.MILITARY_SERV,""),
|
|
|
|
"Naturalization" : (Event.NATURALIZATION,""),
|
|
|
|
"Nobility Title" : (Event.NOB_TITLE,""),
|
|
|
|
"Number of Marriages" : (Event.NUM_MARRIAGES,""),
|
|
|
|
"Occupation" : (Event.OCCUPATION,""),
|
|
|
|
"Ordination" : (Event.ORDINATION,""),
|
|
|
|
"Probate" : (Event.PROBATE,""),
|
|
|
|
"Property" : (Event.PROPERTY,""),
|
|
|
|
"Religion" : (Event.RELIGION,""),
|
|
|
|
"Residence" : (Event.RESIDENCE,""),
|
|
|
|
"Retirement" : (Event.RETIREMENT,""),
|
|
|
|
"Will" : (Event.WILL,""),
|
|
|
|
}
|
2005-12-15 11:49:37 +05:30
|
|
|
|
2005-12-18 09:00:13 +05:30
|
|
|
# cursor = self.get_event_cursor()
|
|
|
|
# data = cursor.first()
|
|
|
|
# while data:
|
|
|
|
# handle,info = data
|
|
|
|
for handle in self.event_map.keys():
|
|
|
|
info = self.event_map[handle]
|
2005-12-15 11:49:37 +05:30
|
|
|
event = Event()
|
2005-12-18 07:04:13 +05:30
|
|
|
event.handle = handle
|
2005-12-19 21:52:33 +05:30
|
|
|
(junk_handle, event.gramps_id, old_type, event.date,
|
2005-12-15 11:49:37 +05:30
|
|
|
event.description, event.place, event.cause, event.private,
|
2005-12-21 05:38:47 +05:30
|
|
|
event.source_list, event.note, witness_list,
|
|
|
|
event.media_list, event.change) = info
|
2005-12-19 21:52:33 +05:30
|
|
|
|
|
|
|
if old_type:
|
|
|
|
if event_conversion.has_key(old_type):
|
|
|
|
new_type = event_conversion[old_type]
|
2005-12-16 02:56:55 +05:30
|
|
|
else:
|
2005-12-19 21:52:33 +05:30
|
|
|
new_type = (Event.CUSTOM,old_type)
|
2005-12-16 02:56:55 +05:30
|
|
|
else:
|
2005-12-19 21:52:33 +05:30
|
|
|
new_type = (Event.UNKNOWN,"")
|
|
|
|
event.type = new_type
|
2005-12-16 02:56:55 +05:30
|
|
|
|
2005-12-19 21:52:33 +05:30
|
|
|
# Cover attributes contained in MediaRefs
|
|
|
|
for media_ref in event.media_list:
|
|
|
|
convert_mediaref_9(media_ref)
|
|
|
|
|
2005-12-21 05:38:47 +05:30
|
|
|
# Upgrade witness -- no more Witness class
|
|
|
|
if type(witness_list) != list:
|
|
|
|
witness_list = []
|
|
|
|
for witness in witness_list:
|
|
|
|
if witness.type == 0: # witness name recorded
|
|
|
|
# Add name and comment to the event note
|
|
|
|
note_text = event.get_note() + "\n" + \
|
|
|
|
_("Witness name: %s") % witness.val
|
|
|
|
if witness.comment:
|
2005-12-22 01:38:07 +05:30
|
|
|
note_text += "\n" + _("Witness comment: %s") \
|
2005-12-21 05:38:47 +05:30
|
|
|
% witness.comment
|
|
|
|
event.set_note(note_text)
|
|
|
|
elif witness.type == 1: # witness ID recorded
|
|
|
|
# Add an EventRef from that person
|
|
|
|
# to this event using ROLE_WITNESS role
|
|
|
|
event_ref = EventRef()
|
|
|
|
event_ref.ref = event.handle
|
|
|
|
event_ref.role = (EventRef.WITNESS,'')
|
2005-12-22 01:45:45 +05:30
|
|
|
# Add privacy and comment
|
|
|
|
event_ref.private = witness.private
|
2005-12-22 01:36:30 +05:30
|
|
|
if witness.comment:
|
|
|
|
event_ref.set_note(witness.comment)
|
2005-12-21 05:38:47 +05:30
|
|
|
person = self.get_person_from_handle(witness.val)
|
|
|
|
person.event_ref_list.append(event_ref)
|
|
|
|
self.commit_person(person,trans)
|
2005-12-15 11:49:37 +05:30
|
|
|
self.commit_event(event,trans)
|
2005-12-18 09:00:13 +05:30
|
|
|
# data = cursor.next()
|
|
|
|
# cursor.close()
|
2005-12-15 11:49:37 +05:30
|
|
|
|
2005-12-16 02:56:55 +05:30
|
|
|
# Work out marker addition to the Place
|
2005-12-18 09:00:13 +05:30
|
|
|
# cursor = self.get_place_cursor()
|
|
|
|
# data = cursor.first()
|
|
|
|
# while data:
|
|
|
|
# handle,info = data
|
|
|
|
for handle in self.place_map.keys():
|
|
|
|
info = self.place_map[handle]
|
2005-12-16 02:56:55 +05:30
|
|
|
place = Place()
|
2005-12-18 07:04:13 +05:30
|
|
|
place.handle = handle
|
|
|
|
(junk_handle, place.gramps_id, place.title, place.long, place.lat,
|
2005-12-16 02:56:55 +05:30
|
|
|
place.main_loc, place.alt_loc, place.urls, place.media_list,
|
|
|
|
place.source_list, place.note, place.change) = info
|
2005-12-19 21:52:33 +05:30
|
|
|
|
|
|
|
# Cover attributes contained in MediaRefs
|
2005-12-20 04:48:03 +05:30
|
|
|
for media_ref in place.media_list:
|
2005-12-19 21:52:33 +05:30
|
|
|
convert_mediaref_9(media_ref)
|
|
|
|
|
2005-12-20 04:48:03 +05:30
|
|
|
# In all Urls, add type attribute
|
|
|
|
for url in place.urls:
|
|
|
|
convert_url_9(url)
|
|
|
|
|
2005-12-16 02:56:55 +05:30
|
|
|
self.commit_place(place,trans)
|
2005-12-18 09:00:13 +05:30
|
|
|
# data = cursor.next()
|
|
|
|
# cursor.close()
|
2005-12-16 02:56:55 +05:30
|
|
|
|
|
|
|
# Work out marker addition to the Media
|
2005-12-18 09:00:13 +05:30
|
|
|
# cursor = self.get_media_cursor()
|
|
|
|
# data = cursor.first()
|
|
|
|
# while data:
|
|
|
|
# handle,info = data
|
|
|
|
for handle in self.media_map.keys():
|
|
|
|
info = self.media_map[handle]
|
2005-12-16 02:56:55 +05:30
|
|
|
media_object = MediaObject()
|
2005-12-18 07:04:13 +05:30
|
|
|
media_object.handle = handle
|
|
|
|
(junk_handle, media_object.gramps_id, media_object.path,
|
2005-12-16 02:56:55 +05:30
|
|
|
media_object.mime, media_object.desc, media_object.attribute_list,
|
|
|
|
media_object.source_list, media_object.note, media_object.change,
|
|
|
|
media_object.date) = info
|
2005-12-19 21:52:33 +05:30
|
|
|
|
|
|
|
# In all Attributes, convert type from string to a tuple
|
2005-12-20 04:48:03 +05:30
|
|
|
for attribute in media_object.attribute_list:
|
2005-12-19 21:52:33 +05:30
|
|
|
convert_attribute_9(attribute)
|
|
|
|
|
2005-12-16 02:56:55 +05:30
|
|
|
self.commit_media_object(media_object,trans)
|
2005-12-18 09:00:13 +05:30
|
|
|
# data = cursor.next()
|
|
|
|
# cursor.close()
|
2005-12-16 02:56:55 +05:30
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
self.transaction_commit(trans,"Upgrade to DB version 9")
|
2005-12-16 02:56:55 +05:30
|
|
|
print "Done upgrading to DB version 9"
|
2005-12-06 12:08:09 +05:30
|
|
|
|
2006-01-13 03:32:58 +05:30
|
|
|
|
|
|
|
class BdbTransaction(Transaction):
|
|
|
|
def __init__(self,msg,db):
|
|
|
|
Transaction.__init__(self,msg,db)
|
|
|
|
self.reference_del = []
|
|
|
|
self.reference_add = []
|
|
|
|
|
|
|
|
|
2005-12-20 04:48:03 +05:30
|
|
|
_attribute_conversion_9 = {
|
|
|
|
"Caste" : (Attribute.CASTE,""),
|
|
|
|
"Description" : (Attribute.DESCRIPTION,""),
|
|
|
|
"Identification Number" : (Attribute.ID,""),
|
|
|
|
"National Origin" : (Attribute.NATIONAL,""),
|
|
|
|
"Number of Children" : (Attribute.NUM_CHILD,""),
|
|
|
|
"Social Security Number" : (Attribute.SSN,""),
|
|
|
|
}
|
|
|
|
|
2005-12-19 21:52:33 +05:30
|
|
|
def convert_attribute_9(attribute):
|
|
|
|
old_type = attribute.type
|
|
|
|
if old_type:
|
2005-12-20 04:48:03 +05:30
|
|
|
if _attribute_conversion_9.has_key(old_type):
|
|
|
|
new_type = _attribute_conversion_9[old_type]
|
2005-12-19 21:52:33 +05:30
|
|
|
else:
|
|
|
|
new_type = (Attribute.CUSTOM,old_type)
|
|
|
|
else:
|
|
|
|
new_type = (Attribute.UNKNOWN,"")
|
|
|
|
attribute.type = new_type
|
|
|
|
|
|
|
|
def convert_mediaref_9(media_ref):
|
|
|
|
for attribute in media_ref.attribute_list:
|
|
|
|
convert_attribute_9(attribute)
|
|
|
|
|
2005-12-20 04:48:03 +05:30
|
|
|
def convert_url_9(url):
|
|
|
|
path = url.path.strip()
|
|
|
|
if path.find('mailto:') == 0 or url.path.find('@') != -1:
|
|
|
|
url.type = (Url.EMAIL,'')
|
|
|
|
elif path.find('http://') == 0:
|
|
|
|
url.type = (Url.WEB_HOME,'')
|
|
|
|
elif path.find('ftp://') == 0:
|
|
|
|
url.type = (Url.WEB_FTP,'')
|
|
|
|
else:
|
|
|
|
url.type = (Url.CUSTOM,'')
|
2005-12-19 21:52:33 +05:30
|
|
|
|
2005-12-06 12:08:09 +05:30
|
|
|
if __name__ == "__main__":
|
|
|
|
|
|
|
|
import sys
|
|
|
|
|
|
|
|
d = GrampsBSDDB()
|
|
|
|
d.load(sys.argv[1],lambda x: x)
|
|
|
|
|
|
|
|
c = d.get_person_cursor()
|
|
|
|
data = c.first()
|
|
|
|
while data:
|
|
|
|
person = Person(data[1])
|
|
|
|
print data[0], person.get_primary_name().get_name(),
|
|
|
|
data = c.next()
|
|
|
|
c.close()
|
|
|
|
|
|
|
|
print d.surnames.keys()
|