2004-07-31 00:26:49 +05:30
|
|
|
#
|
|
|
|
# Gramps - a GTK+/GNOME based genealogy program
|
|
|
|
#
|
2006-01-07 02:25:49 +05:30
|
|
|
# Copyright (C) 2000-2006 Donald N. Allingham
|
2004-07-31 00:26:49 +05:30
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation; either version 2 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program; if not, write to the Free Software
|
|
|
|
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
|
|
#
|
|
|
|
|
|
|
|
# $Id$
|
|
|
|
|
2004-12-06 09:43:13 +05:30
|
|
|
"""
|
|
|
|
Provides the Berkeley DB (BSDDB) database backend for GRAMPS
|
|
|
|
"""
|
|
|
|
|
2005-04-01 10:04:31 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# Standard python modules
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2005-12-15 23:02:10 +05:30
|
|
|
import cPickle
|
2004-08-01 09:51:31 +05:30
|
|
|
import os
|
2004-08-24 09:18:15 +05:30
|
|
|
import time
|
2004-08-27 03:24:14 +05:30
|
|
|
import locale
|
2005-08-18 11:28:28 +05:30
|
|
|
import sets
|
2006-04-07 03:32:46 +05:30
|
|
|
from gettext import gettext as _
|
2005-04-01 10:04:31 +05:30
|
|
|
from bsddb import dbshelve, db
|
2006-01-07 02:25:49 +05:30
|
|
|
import logging
|
|
|
|
log = logging.getLogger(".GrampsDb")
|
2004-08-01 09:51:31 +05:30
|
|
|
|
2005-12-16 02:56:55 +05:30
|
|
|
# hack to use native set for python2.4
|
|
|
|
# and module sets for earlier pythons
|
|
|
|
try:
|
|
|
|
set()
|
|
|
|
except NameError:
|
|
|
|
from sets import Set as set
|
|
|
|
|
2005-04-01 10:04:31 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# Gramps modules
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2004-07-31 00:26:49 +05:30
|
|
|
from RelLib import *
|
2005-12-21 16:57:05 +05:30
|
|
|
from _GrampsDbBase import *
|
2006-01-07 02:25:49 +05:30
|
|
|
import const
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
_MINVERSION = 5
|
2006-04-13 23:55:52 +05:30
|
|
|
_DBVERSION = 9
|
2005-02-20 03:41:51 +05:30
|
|
|
|
2004-08-01 09:51:31 +05:30
|
|
|
def find_surname(key,data):
|
2006-04-20 20:35:56 +05:30
|
|
|
return str(data[3][5])
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2004-08-01 09:51:31 +05:30
|
|
|
def find_idmap(key,data):
|
|
|
|
return str(data[1])
|
|
|
|
|
2004-08-20 07:50:06 +05:30
|
|
|
def find_fidmap(key,data):
|
|
|
|
return str(data[1])
|
|
|
|
|
2004-08-01 09:51:31 +05:30
|
|
|
def find_eventname(key,data):
|
2005-03-09 09:28:44 +05:30
|
|
|
return str(data[2])
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2005-05-27 23:13:04 +05:30
|
|
|
def find_repository_type(key,data):
|
|
|
|
return str(data[2])
|
|
|
|
|
2005-12-15 23:02:10 +05:30
|
|
|
# Secondary database key lookups for reference_map table
|
|
|
|
# reference_map data values are of the form:
|
|
|
|
# ((primary_object_class_name, primary_object_handle),
|
|
|
|
# (referenced_object_class_name, referenced_object_handle))
|
|
|
|
|
|
|
|
def find_primary_handle(key,data):
|
2005-12-18 06:58:35 +05:30
|
|
|
return str((data)[0][1])
|
2005-12-15 23:02:10 +05:30
|
|
|
|
|
|
|
def find_referenced_handle(key,data):
|
2005-12-18 06:58:35 +05:30
|
|
|
return str((data)[1][1])
|
2005-12-15 23:02:10 +05:30
|
|
|
|
2006-03-20 10:47:52 +05:30
|
|
|
import cPickle as pickle
|
|
|
|
|
2004-12-06 09:43:13 +05:30
|
|
|
class GrampsBSDDBCursor(GrampsCursor):
|
|
|
|
|
2006-01-07 02:25:49 +05:30
|
|
|
def __init__(self,source,txn=None):
|
2006-03-20 10:47:52 +05:30
|
|
|
self.cursor = source.db.cursor(txn)
|
2004-12-06 09:43:13 +05:30
|
|
|
|
|
|
|
def first(self):
|
2006-03-20 10:47:52 +05:30
|
|
|
d = self.cursor.first()
|
|
|
|
if d:
|
|
|
|
return (d[0],pickle.loads(d[1]))
|
|
|
|
return None
|
2004-12-06 09:43:13 +05:30
|
|
|
|
|
|
|
def next(self):
|
2006-03-20 10:47:52 +05:30
|
|
|
d = self.cursor.next()
|
|
|
|
if d:
|
|
|
|
return (d[0],pickle.loads(d[1]))
|
|
|
|
return None
|
2004-12-06 09:43:13 +05:30
|
|
|
|
|
|
|
def close(self):
|
|
|
|
self.cursor.close()
|
|
|
|
|
2006-01-26 02:36:23 +05:30
|
|
|
def delete(self):
|
|
|
|
self.cursor.delete()
|
|
|
|
|
2006-03-21 05:35:07 +05:30
|
|
|
class GrampsBSDDBAssocCursor(GrampsCursor):
|
|
|
|
|
|
|
|
def __init__(self,source,txn=None):
|
|
|
|
self.cursor = source.cursor(txn)
|
|
|
|
|
|
|
|
def first(self):
|
|
|
|
d = self.cursor.first()
|
|
|
|
if d:
|
|
|
|
return (d[0],pickle.loads(d[1]))
|
|
|
|
return None
|
|
|
|
|
|
|
|
def next(self):
|
|
|
|
d = self.cursor.next()
|
|
|
|
if d:
|
|
|
|
return (d[0],pickle.loads(d[1]))
|
|
|
|
return None
|
|
|
|
|
|
|
|
def close(self):
|
|
|
|
self.cursor.close()
|
|
|
|
|
|
|
|
def delete(self):
|
|
|
|
self.cursor.delete()
|
|
|
|
|
|
|
|
class GrampsBSDDBDupCursor(GrampsBSDDBAssocCursor):
|
2005-12-15 23:02:10 +05:30
|
|
|
"""Cursor that includes handling for duplicate keys"""
|
|
|
|
|
|
|
|
def set(self,key):
|
2005-12-16 02:56:55 +05:30
|
|
|
return self.cursor.set(str(key))
|
2005-12-15 23:02:10 +05:30
|
|
|
|
|
|
|
def next_dup(self):
|
|
|
|
return self.cursor.next_dup()
|
|
|
|
|
|
|
|
|
2004-07-31 00:26:49 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# GrampsBSDDB
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2004-08-01 09:51:31 +05:30
|
|
|
class GrampsBSDDB(GrampsDbBase):
|
2004-07-31 00:26:49 +05:30
|
|
|
"""GRAMPS database object. This object is a base class for other
|
|
|
|
objects."""
|
|
|
|
|
2006-03-17 01:54:27 +05:30
|
|
|
UseTXN = True
|
2006-03-08 22:52:45 +05:30
|
|
|
|
2004-07-31 00:26:49 +05:30
|
|
|
def __init__(self):
|
|
|
|
"""creates a new GrampsDB"""
|
2004-08-01 09:51:31 +05:30
|
|
|
GrampsDbBase.__init__(self)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.txn = None
|
2006-01-26 02:36:23 +05:30
|
|
|
self.secondary_connected = False
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2006-03-08 22:52:45 +05:30
|
|
|
def open_flags(self):
|
|
|
|
if self.UseTXN:
|
|
|
|
return db.DB_CREATE|db.DB_AUTO_COMMIT
|
|
|
|
else:
|
|
|
|
return db.DB_CREATE
|
|
|
|
|
2006-01-19 02:30:02 +05:30
|
|
|
def open_table(self,name,dbname,no_txn=False,dbtype=db.DB_HASH):
|
2004-10-19 08:49:25 +05:30
|
|
|
dbmap = dbshelve.DBShelf(self.env)
|
2004-10-23 09:26:48 +05:30
|
|
|
dbmap.db.set_pagesize(16384)
|
2005-02-17 04:19:54 +05:30
|
|
|
if self.readonly:
|
2006-01-19 02:30:02 +05:30
|
|
|
dbmap.open(name, dbname, dbtype, db.DB_RDONLY)
|
2006-01-07 02:25:49 +05:30
|
|
|
elif no_txn:
|
2006-01-19 02:30:02 +05:30
|
|
|
dbmap.open(name, dbname, dbtype, db.DB_CREATE, 0666)
|
2006-01-07 02:25:49 +05:30
|
|
|
else:
|
2006-03-08 22:52:45 +05:30
|
|
|
dbmap.open(name, dbname, dbtype, self.open_flags(), 0666)
|
2004-10-19 08:49:25 +05:30
|
|
|
return dbmap
|
2006-01-11 03:58:09 +05:30
|
|
|
|
|
|
|
def _all_handles(self,table):
|
|
|
|
return table.keys(self.txn)
|
2004-10-19 08:49:25 +05:30
|
|
|
|
2004-12-05 09:45:48 +05:30
|
|
|
def get_person_cursor(self):
|
2006-01-07 02:25:49 +05:30
|
|
|
return GrampsBSDDBCursor(self.person_map,self.txn)
|
2004-12-10 06:58:43 +05:30
|
|
|
|
|
|
|
def get_family_cursor(self):
|
2006-01-07 02:25:49 +05:30
|
|
|
return GrampsBSDDBCursor(self.family_map,self.txn)
|
2004-12-10 06:58:43 +05:30
|
|
|
|
2005-04-01 10:04:31 +05:30
|
|
|
def get_event_cursor(self):
|
2006-01-07 02:25:49 +05:30
|
|
|
return GrampsBSDDBCursor(self.event_map,self.txn)
|
2005-04-01 10:04:31 +05:30
|
|
|
|
2004-12-10 06:58:43 +05:30
|
|
|
def get_place_cursor(self):
|
2006-01-07 02:25:49 +05:30
|
|
|
return GrampsBSDDBCursor(self.place_map,self.txn)
|
2004-12-10 06:58:43 +05:30
|
|
|
|
|
|
|
def get_source_cursor(self):
|
2006-01-07 02:25:49 +05:30
|
|
|
return GrampsBSDDBCursor(self.source_map,self.txn)
|
2004-12-10 06:58:43 +05:30
|
|
|
|
|
|
|
def get_media_cursor(self):
|
2006-01-07 02:25:49 +05:30
|
|
|
return GrampsBSDDBCursor(self.media_map,self.txn)
|
2004-12-05 09:45:48 +05:30
|
|
|
|
2005-05-27 23:13:04 +05:30
|
|
|
def get_repository_cursor(self):
|
2006-03-23 10:49:38 +05:30
|
|
|
return GrampsBSDDBCursor(self.repository_map,self.txn)
|
2005-05-27 23:13:04 +05:30
|
|
|
|
2006-01-07 03:38:40 +05:30
|
|
|
def has_person_handle(self,handle):
|
|
|
|
"""
|
|
|
|
returns True if the handle exists in the current Person database.
|
|
|
|
"""
|
|
|
|
return self.person_map.get(str(handle),txn=self.txn) != None
|
|
|
|
|
|
|
|
def has_family_handle(self,handle):
|
|
|
|
"""
|
|
|
|
returns True if the handle exists in the current Family database.
|
|
|
|
"""
|
|
|
|
return self.family_map.get(str(handle),txn=self.txn) != None
|
|
|
|
|
|
|
|
def has_object_handle(self,handle):
|
|
|
|
"""
|
|
|
|
returns True if the handle exists in the current MediaObjectdatabase.
|
|
|
|
"""
|
|
|
|
return self.media_map.get(str(handle),txn=self.txn) != None
|
|
|
|
|
|
|
|
def has_repository_handle(self,handle):
|
|
|
|
"""
|
|
|
|
returns True if the handle exists in the current Repository database.
|
|
|
|
"""
|
|
|
|
return self.repository_map.get(str(handle),txn=self.txn) != None
|
|
|
|
|
|
|
|
def has_event_handle(self,handle):
|
|
|
|
"""
|
|
|
|
returns True if the handle exists in the current Repository database.
|
|
|
|
"""
|
|
|
|
return self.event_map.get(str(handle),txn=self.txn) != None
|
|
|
|
|
|
|
|
def has_place_handle(self,handle):
|
|
|
|
"""
|
|
|
|
returns True if the handle exists in the current Repository database.
|
|
|
|
"""
|
|
|
|
return self.place_map.get(str(handle),txn=self.txn) != None
|
|
|
|
|
|
|
|
def has_source_handle(self,handle):
|
|
|
|
"""
|
|
|
|
returns True if the handle exists in the current Repository database.
|
|
|
|
"""
|
|
|
|
return self.source_map.get(str(handle),txn=self.txn) != None
|
|
|
|
|
|
|
|
def get_raw_person_data(self,handle):
|
|
|
|
return self.person_map.get(str(handle),txn=self.txn)
|
|
|
|
|
|
|
|
def get_raw_family_data(self,handle):
|
|
|
|
return self.family_map.get(str(handle),txn=self.txn)
|
|
|
|
|
|
|
|
def get_raw_object_data(self,handle):
|
|
|
|
return self.media_map.get(str(handle),txn=self.txn)
|
|
|
|
|
|
|
|
def get_raw_place_data(self,handle):
|
|
|
|
return self.place_map.get(str(handle),txn=self.txn)
|
|
|
|
|
|
|
|
def get_raw_event_data(self,handle):
|
|
|
|
return self.event_map.get(str(handle),txn=self.txn)
|
|
|
|
|
|
|
|
def get_raw_source_data(self,handle):
|
|
|
|
return self.source_map.get(str(handle),txn=self.txn)
|
|
|
|
|
|
|
|
def get_raw_repository_data(self,handle):
|
|
|
|
return self.repository_map.get(str(handle),txn=self.txn)
|
|
|
|
|
2005-12-15 23:02:10 +05:30
|
|
|
# cursors for lookups in the reference_map for back reference
|
|
|
|
# lookups. The reference_map has three indexes:
|
|
|
|
# the main index: a tuple of (primary_handle,referenced_handle)
|
|
|
|
# the primary_handle index: the primary_handle
|
|
|
|
# the referenced_handle index: the referenced_handle
|
|
|
|
# the main index is unique, the others allow duplicate entries.
|
|
|
|
|
|
|
|
def get_reference_map_cursor(self):
|
2006-03-21 05:35:07 +05:30
|
|
|
return GrampsBSDDBAssocCursor(self.reference_map,self.txn)
|
2005-12-15 23:02:10 +05:30
|
|
|
|
|
|
|
def get_reference_map_primary_cursor(self):
|
2006-01-07 02:25:49 +05:30
|
|
|
return GrampsBSDDBDupCursor(self.reference_map_primary_map,self.txn)
|
2005-12-15 23:02:10 +05:30
|
|
|
|
|
|
|
def get_reference_map_referenced_cursor(self):
|
2006-01-07 02:25:49 +05:30
|
|
|
return GrampsBSDDBDupCursor(self.reference_map_referenced_map,self.txn)
|
2005-12-15 23:02:10 +05:30
|
|
|
|
|
|
|
|
2005-06-05 09:31:56 +05:30
|
|
|
def version_supported(self):
|
2005-12-15 11:49:37 +05:30
|
|
|
return (self.metadata.get('version',0) <= _DBVERSION and
|
|
|
|
self.metadata.get('version',0) >= _MINVERSION)
|
2005-06-05 09:31:56 +05:30
|
|
|
|
2005-03-03 11:03:22 +05:30
|
|
|
def need_upgrade(self):
|
2005-12-15 11:49:37 +05:30
|
|
|
return not self.readonly \
|
|
|
|
and self.metadata.get('version',0) < _DBVERSION
|
2005-03-03 11:03:22 +05:30
|
|
|
|
2005-02-17 04:19:54 +05:30
|
|
|
def load(self,name,callback,mode="w"):
|
2006-01-11 03:58:09 +05:30
|
|
|
if self.db_is_open:
|
2004-07-31 00:26:49 +05:30
|
|
|
self.close()
|
|
|
|
|
2005-02-17 04:19:54 +05:30
|
|
|
self.readonly = mode == "r"
|
|
|
|
|
2006-01-18 09:38:28 +05:30
|
|
|
callback(12)
|
2006-01-17 04:19:49 +05:30
|
|
|
|
2004-07-31 00:26:49 +05:30
|
|
|
self.env = db.DBEnv()
|
2006-03-16 05:28:23 +05:30
|
|
|
self.env.set_cachesize(0,0x2000000) # 16MB
|
2006-01-25 05:33:31 +05:30
|
|
|
self.env.set_lk_max_locks(25000)
|
|
|
|
self.env.set_lk_max_objects(25000)
|
2006-01-18 02:47:14 +05:30
|
|
|
self.env.set_flags(db.DB_LOG_AUTOREMOVE,1) # clean up unused logs
|
2006-01-07 02:25:49 +05:30
|
|
|
# The DB_PRIVATE flag must go if we ever move to multi-user setup
|
2006-03-08 22:52:45 +05:30
|
|
|
|
|
|
|
if self.UseTXN:
|
|
|
|
env_flags = db.DB_CREATE|db.DB_RECOVER|db.DB_PRIVATE|\
|
|
|
|
db.DB_INIT_MPOOL|db.DB_INIT_LOCK|\
|
2006-03-21 05:35:07 +05:30
|
|
|
db.DB_INIT_LOG|db.DB_INIT_TXN|db.DB_THREAD
|
2006-03-08 22:52:45 +05:30
|
|
|
else:
|
|
|
|
env_flags = db.DB_CREATE|db.DB_PRIVATE|\
|
|
|
|
db.DB_INIT_MPOOL|db.DB_INIT_LOG
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2006-01-13 05:04:33 +05:30
|
|
|
self.undolog = "%s.undo" % name
|
2006-01-07 02:25:49 +05:30
|
|
|
env_name = os.path.expanduser(const.bsddbenv_dir)
|
|
|
|
if not os.path.isdir(env_name):
|
|
|
|
os.mkdir(env_name)
|
2006-01-13 03:32:58 +05:30
|
|
|
self.env.open(env_name,env_flags)
|
2006-03-08 22:52:45 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
self.env.txn_checkpoint()
|
2006-01-07 02:25:49 +05:30
|
|
|
|
2006-01-18 09:38:28 +05:30
|
|
|
callback(25)
|
|
|
|
|
2006-01-07 02:25:49 +05:30
|
|
|
self.full_name = os.path.abspath(name)
|
|
|
|
self.brief_name = os.path.basename(name)
|
|
|
|
|
2006-03-06 05:09:20 +05:30
|
|
|
self.metadata = self.open_table(self.full_name, "meta", no_txn=True)
|
|
|
|
|
2006-01-07 02:25:49 +05:30
|
|
|
self.family_map = self.open_table(self.full_name, "family")
|
|
|
|
self.place_map = self.open_table(self.full_name, "places")
|
|
|
|
self.source_map = self.open_table(self.full_name, "sources")
|
|
|
|
self.media_map = self.open_table(self.full_name, "media")
|
|
|
|
self.event_map = self.open_table(self.full_name, "events")
|
|
|
|
self.person_map = self.open_table(self.full_name, "person")
|
|
|
|
self.repository_map = self.open_table(self.full_name, "repository")
|
2006-01-19 02:30:02 +05:30
|
|
|
self.reference_map = self.open_table(self.full_name, "reference_map",
|
|
|
|
dbtype=db.DB_BTREE)
|
2006-01-18 09:38:28 +05:30
|
|
|
callback(37)
|
2006-01-26 02:36:23 +05:30
|
|
|
|
2006-04-27 03:18:13 +05:30
|
|
|
self.bookmarks = self.metadata.get('bookmarks',[])
|
|
|
|
self.family_bookmarks = self.metadata.get('family_bookmarks',[])
|
|
|
|
self.event_bookmarks = self.metadata.get('event_bookmarks',[])
|
|
|
|
self.source_bookmarks = self.metadata.get('source_bookmarks',[])
|
|
|
|
self.repo_bookmarks = self.metadata.get('repo_bookmarks',[])
|
|
|
|
self.media_bookmarks = self.metadata.get('media_bookmarks',[])
|
|
|
|
self.place_bookmarks = self.metadata.get('place_bookmarks',[])
|
2006-01-26 02:36:23 +05:30
|
|
|
self.family_event_names = set(self.metadata.get('fevent_names',[]))
|
|
|
|
self.individual_event_names = set(self.metadata.get('pevent_names',[]))
|
|
|
|
self.family_attributes = set(self.metadata.get('fattr_names',[]))
|
|
|
|
self.individual_attributes = set(self.metadata.get('pattr_names',[]))
|
|
|
|
|
|
|
|
gstats = self.metadata.get('gender_stats')
|
|
|
|
|
|
|
|
if not self.readonly:
|
|
|
|
if gstats == None:
|
|
|
|
self.metadata['version'] = _DBVERSION
|
|
|
|
elif not self.metadata.has_key('version'):
|
|
|
|
self.metadata['version'] = 0
|
|
|
|
|
|
|
|
self.genderStats = GenderStats(gstats)
|
|
|
|
|
|
|
|
# Here we take care of any changes in the tables related to new code.
|
|
|
|
# If secondary indices change, then they should removed
|
|
|
|
# or rebuilt by upgrade as well. In any case, the
|
|
|
|
# self.secondary_connected flag should be set accordingly.
|
|
|
|
if self.need_upgrade():
|
2006-03-15 01:19:34 +05:30
|
|
|
self.gramps_upgrade(callback)
|
2006-01-26 02:36:23 +05:30
|
|
|
|
|
|
|
callback(50)
|
|
|
|
|
|
|
|
if not self.secondary_connected:
|
|
|
|
self.connect_secondary()
|
|
|
|
|
|
|
|
callback(75)
|
|
|
|
|
|
|
|
if not self.readonly:
|
|
|
|
self.undodb = db.DB()
|
|
|
|
self.undodb.open(self.undolog, db.DB_RECNO, db.DB_CREATE)
|
|
|
|
self.db_is_open = True
|
|
|
|
|
|
|
|
callback(87)
|
|
|
|
|
|
|
|
return 1
|
|
|
|
|
|
|
|
def connect_secondary(self):
|
|
|
|
"""
|
|
|
|
This method connects or creates secondary index tables.
|
|
|
|
It assumes that the tables either exist and are in the right
|
|
|
|
format or do not exist (in which case they get created).
|
|
|
|
|
|
|
|
It is the responsibility of upgrade code to either create
|
|
|
|
or remove invalid secondary index tables.
|
|
|
|
"""
|
2006-01-18 09:38:28 +05:30
|
|
|
|
2005-12-15 23:02:10 +05:30
|
|
|
# index tables used just for speeding up searches
|
2005-02-17 04:19:54 +05:30
|
|
|
if self.readonly:
|
2006-01-13 03:32:58 +05:30
|
|
|
table_flags = db.DB_RDONLY
|
2005-02-17 04:19:54 +05:30
|
|
|
else:
|
2006-03-08 22:52:45 +05:30
|
|
|
table_flags = self.open_flags()
|
2005-02-17 04:19:54 +05:30
|
|
|
|
2006-01-26 02:36:23 +05:30
|
|
|
self.surnames = db.DB(self.env)
|
|
|
|
self.surnames.set_flags(db.DB_DUP|db.DB_DUPSORT)
|
|
|
|
self.surnames.open(self.full_name, "surnames", db.DB_BTREE,
|
2006-01-19 03:32:24 +05:30
|
|
|
flags=table_flags)
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2004-10-01 00:02:56 +05:30
|
|
|
self.name_group = db.DB(self.env)
|
2006-01-18 02:47:14 +05:30
|
|
|
self.name_group.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.name_group.open(self.full_name, "name_group",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_HASH, flags=table_flags)
|
2005-12-15 11:49:37 +05:30
|
|
|
|
2004-07-31 00:26:49 +05:30
|
|
|
self.id_trans = db.DB(self.env)
|
2006-01-18 02:47:14 +05:30
|
|
|
self.id_trans.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.id_trans.open(self.full_name, "idtrans",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_HASH, flags=table_flags)
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2004-08-20 07:50:06 +05:30
|
|
|
self.fid_trans = db.DB(self.env)
|
2006-01-18 02:47:14 +05:30
|
|
|
self.fid_trans.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.fid_trans.open(self.full_name, "fidtrans",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_HASH, flags=table_flags)
|
2004-08-20 07:50:06 +05:30
|
|
|
|
2005-06-08 10:10:33 +05:30
|
|
|
self.eid_trans = db.DB(self.env)
|
2006-01-18 02:47:14 +05:30
|
|
|
self.eid_trans.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.eid_trans.open(self.full_name, "eidtrans",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_HASH, flags=table_flags)
|
2005-06-08 10:10:33 +05:30
|
|
|
|
2004-08-24 09:18:15 +05:30
|
|
|
self.pid_trans = db.DB(self.env)
|
2006-01-18 02:47:14 +05:30
|
|
|
self.pid_trans.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.pid_trans.open(self.full_name, "pidtrans",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_HASH, flags=table_flags)
|
2004-08-24 09:18:15 +05:30
|
|
|
|
|
|
|
self.sid_trans = db.DB(self.env)
|
2006-01-18 02:47:14 +05:30
|
|
|
self.sid_trans.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.sid_trans.open(self.full_name, "sidtrans",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_HASH, flags=table_flags)
|
2004-08-24 09:18:15 +05:30
|
|
|
|
|
|
|
self.oid_trans = db.DB(self.env)
|
2006-01-18 02:47:14 +05:30
|
|
|
self.oid_trans.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.oid_trans.open(self.full_name, "oidtrans",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_HASH, flags=table_flags)
|
2004-08-24 09:18:15 +05:30
|
|
|
|
2005-05-27 23:13:04 +05:30
|
|
|
self.rid_trans = db.DB(self.env)
|
2006-01-18 02:47:14 +05:30
|
|
|
self.rid_trans.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.rid_trans.open(self.full_name, "ridtrans",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_HASH, flags=table_flags)
|
2005-05-27 23:13:04 +05:30
|
|
|
|
2005-12-16 06:19:54 +05:30
|
|
|
self.eventnames = db.DB(self.env)
|
2006-01-18 02:47:14 +05:30
|
|
|
self.eventnames.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.eventnames.open(self.full_name, "eventnames",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_HASH, flags=table_flags)
|
2005-12-16 06:19:54 +05:30
|
|
|
|
|
|
|
self.repository_types = db.DB(self.env)
|
2006-01-18 02:47:14 +05:30
|
|
|
self.repository_types.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.repository_types.open(self.full_name, "repostypes",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_HASH, flags=table_flags)
|
2005-12-16 06:19:54 +05:30
|
|
|
|
2005-12-15 23:02:10 +05:30
|
|
|
self.reference_map_primary_map = db.DB(self.env)
|
2006-01-18 02:47:14 +05:30
|
|
|
self.reference_map_primary_map.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.reference_map_primary_map.open(self.full_name,
|
2005-12-15 23:02:10 +05:30
|
|
|
"reference_map_primary_map",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_BTREE, flags=table_flags)
|
2005-12-15 23:02:10 +05:30
|
|
|
|
|
|
|
self.reference_map_referenced_map = db.DB(self.env)
|
2006-01-18 22:21:06 +05:30
|
|
|
self.reference_map_referenced_map.set_flags(db.DB_DUP|db.DB_DUPSORT)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.reference_map_referenced_map.open(self.full_name,
|
2005-12-15 23:02:10 +05:30
|
|
|
"reference_map_referenced_map",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_BTREE, flags=table_flags)
|
2005-12-15 23:02:10 +05:30
|
|
|
|
2005-02-17 04:19:54 +05:30
|
|
|
if not self.readonly:
|
2006-01-19 22:00:45 +05:30
|
|
|
self.person_map.associate(self.surnames, find_surname, table_flags)
|
|
|
|
self.person_map.associate(self.id_trans, find_idmap, table_flags)
|
|
|
|
self.family_map.associate(self.fid_trans,find_idmap, table_flags)
|
|
|
|
self.event_map.associate(self.eid_trans, find_idmap, table_flags)
|
2005-12-15 11:49:37 +05:30
|
|
|
self.repository_map.associate(self.rid_trans, find_idmap,
|
2006-01-13 03:32:58 +05:30
|
|
|
table_flags)
|
2005-12-15 11:49:37 +05:30
|
|
|
self.repository_map.associate(self.repository_types,
|
2006-01-13 03:32:58 +05:30
|
|
|
find_repository_type, table_flags)
|
|
|
|
self.place_map.associate(self.pid_trans, find_idmap, table_flags)
|
|
|
|
self.media_map.associate(self.oid_trans, find_idmap, table_flags)
|
|
|
|
self.source_map.associate(self.sid_trans, find_idmap, table_flags)
|
2005-12-15 23:02:10 +05:30
|
|
|
self.reference_map.associate(self.reference_map_primary_map,
|
|
|
|
find_primary_handle,
|
2006-01-13 03:32:58 +05:30
|
|
|
table_flags)
|
2005-12-15 23:02:10 +05:30
|
|
|
self.reference_map.associate(self.reference_map_referenced_map,
|
|
|
|
find_referenced_handle,
|
2006-01-13 03:32:58 +05:30
|
|
|
table_flags)
|
2006-01-26 02:36:23 +05:30
|
|
|
self.secondary_connected = True
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2005-12-16 02:56:55 +05:30
|
|
|
|
2005-08-18 11:28:28 +05:30
|
|
|
def rebuild_secondary(self,callback=None):
|
2006-02-03 21:19:59 +05:30
|
|
|
if self.readonly:
|
2006-01-26 02:36:23 +05:30
|
|
|
return
|
|
|
|
|
2006-03-08 22:52:45 +05:30
|
|
|
table_flags = self.open_flags()
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2006-01-26 02:36:23 +05:30
|
|
|
# remove existing secondary indices
|
2005-08-18 11:28:28 +05:30
|
|
|
self.id_trans.close()
|
2006-01-19 02:07:15 +05:30
|
|
|
junk = db.DB(self.env)
|
|
|
|
junk.remove(self.full_name,"idtrans")
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2006-01-19 02:07:15 +05:30
|
|
|
self.surnames.close()
|
|
|
|
junk = db.DB(self.env)
|
|
|
|
junk.remove(self.full_name,"surnames")
|
2005-08-18 11:28:28 +05:30
|
|
|
|
|
|
|
# Repair secondary indices related to family_map
|
|
|
|
self.fid_trans.close()
|
2006-01-19 02:07:15 +05:30
|
|
|
junk = db.DB(self.env)
|
|
|
|
junk.remove(self.full_name,"fidtrans")
|
2005-08-18 11:28:28 +05:30
|
|
|
|
|
|
|
# Repair secondary indices related to place_map
|
|
|
|
self.pid_trans.close()
|
2006-01-19 02:07:15 +05:30
|
|
|
junk = db.DB(self.env)
|
|
|
|
junk.remove(self.full_name,"pidtrans")
|
2005-08-18 11:28:28 +05:30
|
|
|
|
|
|
|
# Repair secondary indices related to media_map
|
|
|
|
self.oid_trans.close()
|
2006-01-19 02:07:15 +05:30
|
|
|
junk = db.DB(self.env)
|
|
|
|
junk.remove(self.full_name,"oidtrans")
|
2005-08-18 11:28:28 +05:30
|
|
|
|
|
|
|
# Repair secondary indices related to source_map
|
|
|
|
self.sid_trans.close()
|
2006-01-19 02:07:15 +05:30
|
|
|
junk = db.DB(self.env)
|
|
|
|
junk.remove(self.full_name,"sidtrans")
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2006-01-19 02:07:15 +05:30
|
|
|
# Repair secondary indices related to event_map
|
|
|
|
self.eid_trans.close()
|
|
|
|
junk = db.DB(self.env)
|
|
|
|
junk.remove(self.full_name,"eidtrans")
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
# Repair secondary indices related to repository_map
|
|
|
|
self.rid_trans.close()
|
2006-01-19 02:07:15 +05:30
|
|
|
junk = db.DB(self.env)
|
|
|
|
junk.remove(self.full_name,"ridtrans")
|
|
|
|
|
|
|
|
# Repair secondary indices related to reference_map
|
|
|
|
self.reference_map_primary_map.close()
|
|
|
|
junk = db.DB(self.env)
|
|
|
|
junk.remove(self.full_name,"reference_map_primary_map")
|
|
|
|
|
|
|
|
self.reference_map_referenced_map.close()
|
|
|
|
junk = db.DB(self.env)
|
|
|
|
junk.remove(self.full_name,"reference_map_referenced_map")
|
2006-01-26 02:36:23 +05:30
|
|
|
|
|
|
|
# Set flag saying that we have removed secondary indices
|
|
|
|
# and then call the creating routine
|
|
|
|
self.secondary_connected = False
|
|
|
|
self.connect_secondary()
|
2005-12-15 11:49:37 +05:30
|
|
|
|
2005-12-15 23:02:10 +05:30
|
|
|
def find_backlink_handles(self, handle, include_classes=None):
|
|
|
|
"""
|
|
|
|
Find all objects that hold a reference to the object handle.
|
|
|
|
Returns an interator over alist of (class_name,handle) tuples.
|
|
|
|
|
|
|
|
@param handle: handle of the object to search for.
|
|
|
|
@type handle: database handle
|
|
|
|
@param include_classes: list of class names to include in the results.
|
|
|
|
Default: None means include all classes.
|
|
|
|
@type include_classes: list of class names
|
|
|
|
|
|
|
|
Note that this is a generator function, it returns a iterator for
|
|
|
|
use in loops. If you want a list of the results use:
|
|
|
|
|
|
|
|
result_list = [i for i in find_backlink_handles(handle)]
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
# Use the secondary index to locate all the reference_map entries
|
|
|
|
# that include a reference to the object we are looking for.
|
|
|
|
referenced_cur = self.get_reference_map_referenced_cursor()
|
|
|
|
|
2005-12-18 01:43:23 +05:30
|
|
|
try:
|
|
|
|
ret = referenced_cur.set(handle)
|
|
|
|
except:
|
|
|
|
ret = None
|
|
|
|
|
2005-12-15 23:02:10 +05:30
|
|
|
while (ret is not None):
|
|
|
|
(key,data) = ret
|
|
|
|
|
|
|
|
# data values are of the form:
|
|
|
|
# ((primary_object_class_name, primary_object_handle),
|
|
|
|
# (referenced_object_class_name, referenced_object_handle))
|
|
|
|
# so we need the first tuple to give us the type to compare
|
|
|
|
|
|
|
|
data = cPickle.loads(data)
|
2005-12-18 03:32:48 +05:30
|
|
|
if include_classes == None or KEY_TO_CLASS_MAP[data[0][0]] in include_classes:
|
2005-12-18 03:46:03 +05:30
|
|
|
yield (KEY_TO_CLASS_MAP[data[0][0]],data[0][1])
|
2005-12-15 23:02:10 +05:30
|
|
|
|
|
|
|
ret = referenced_cur.next_dup()
|
|
|
|
|
|
|
|
referenced_cur.close()
|
|
|
|
|
|
|
|
return
|
|
|
|
|
2006-04-10 07:15:36 +05:30
|
|
|
def _delete_primary_from_reference_map(self,handle,transaction,txn=None):
|
2005-12-15 23:18:48 +05:30
|
|
|
"""Remove all references to the primary object from the reference_map"""
|
2005-12-15 23:02:10 +05:30
|
|
|
|
2005-12-15 23:18:48 +05:30
|
|
|
primary_cur = self.get_reference_map_primary_cursor()
|
2005-12-18 01:43:23 +05:30
|
|
|
|
|
|
|
try:
|
|
|
|
ret = primary_cur.set(handle)
|
|
|
|
except:
|
|
|
|
ret = None
|
2005-12-15 23:18:48 +05:30
|
|
|
|
|
|
|
while (ret is not None):
|
|
|
|
(key,data) = ret
|
|
|
|
|
|
|
|
# data values are of the form:
|
|
|
|
# ((primary_object_class_name, primary_object_handle),
|
|
|
|
# (referenced_object_class_name, referenced_object_handle))
|
|
|
|
|
|
|
|
# so we need the second tuple give us a reference that we can
|
|
|
|
# combine with the primary_handle to get the main key.
|
|
|
|
|
|
|
|
main_key = (handle, cPickle.loads(data)[1][1])
|
|
|
|
|
2006-04-10 07:15:36 +05:30
|
|
|
self._remove_reference(main_key,transaction,txn)
|
2005-12-15 23:18:48 +05:30
|
|
|
|
|
|
|
ret = primary_cur.next_dup()
|
|
|
|
|
|
|
|
primary_cur.close()
|
|
|
|
|
2006-01-21 03:22:26 +05:30
|
|
|
def _update_reference_map(self, obj, transaction, txn=None):
|
2006-01-18 02:40:20 +05:30
|
|
|
"""
|
2006-01-21 03:22:26 +05:30
|
|
|
If txn is given, then changes are written right away using txn.
|
2006-01-18 02:40:20 +05:30
|
|
|
"""
|
|
|
|
|
2005-12-15 23:02:10 +05:30
|
|
|
# Add references to the reference_map for all primary object referenced
|
|
|
|
# from the primary object 'obj' or any of its secondary objects.
|
|
|
|
|
2006-02-01 11:55:51 +05:30
|
|
|
handle = obj.handle
|
2006-01-21 03:22:26 +05:30
|
|
|
update = self.reference_map_primary_map.has_key(str(handle))
|
2006-01-21 00:53:42 +05:30
|
|
|
|
2006-01-21 03:22:26 +05:30
|
|
|
if update:
|
2006-01-18 02:40:20 +05:30
|
|
|
# FIXME: this needs to be properly integrated into the transaction
|
|
|
|
# framework so that the reference_map changes are part of the
|
|
|
|
# transaction
|
2005-12-15 23:02:10 +05:30
|
|
|
|
2005-12-17 18:14:06 +05:30
|
|
|
|
2006-01-18 02:40:20 +05:30
|
|
|
# First thing to do is get hold of all rows in the reference_map
|
2006-02-01 11:55:51 +05:30
|
|
|
# table that hold a reference from this primary obj. This means
|
|
|
|
# finding all the rows that have this handle somewhere in the
|
|
|
|
# list of (class_name,handle) pairs.
|
|
|
|
# The primary_map sec index allows us to look this up quickly.
|
2005-12-15 23:02:10 +05:30
|
|
|
|
2006-01-18 02:40:20 +05:30
|
|
|
existing_references = set()
|
2005-12-15 23:02:10 +05:30
|
|
|
|
2006-01-18 02:40:20 +05:30
|
|
|
primary_cur = self.get_reference_map_primary_cursor()
|
2005-12-15 23:02:10 +05:30
|
|
|
|
2006-01-18 02:40:20 +05:30
|
|
|
try:
|
|
|
|
ret = primary_cur.set(handle)
|
|
|
|
except:
|
|
|
|
ret = None
|
2005-12-15 23:02:10 +05:30
|
|
|
|
2006-01-18 02:40:20 +05:30
|
|
|
while (ret is not None):
|
|
|
|
(key,data) = ret
|
|
|
|
|
|
|
|
# data values are of the form:
|
|
|
|
# ((primary_object_class_name, primary_object_handle),
|
|
|
|
# (referenced_object_class_name, referenced_object_handle))
|
|
|
|
# so we need the second tuple give us a reference that we can
|
2006-02-01 11:55:51 +05:30
|
|
|
# compare with what is returned from
|
|
|
|
# get_referenced_handles_recursively
|
2006-01-18 02:40:20 +05:30
|
|
|
|
2006-02-01 11:55:51 +05:30
|
|
|
# secondary DBs are not DBShelf's, so we need to do pickling
|
|
|
|
# and unpicking ourselves here
|
2006-01-18 02:40:20 +05:30
|
|
|
existing_reference = cPickle.loads(data)[1]
|
2006-02-01 11:55:51 +05:30
|
|
|
existing_references.add(
|
|
|
|
(KEY_TO_CLASS_MAP[existing_reference[0]],
|
|
|
|
existing_reference[1]))
|
2006-01-18 02:40:20 +05:30
|
|
|
ret = primary_cur.next_dup()
|
|
|
|
|
|
|
|
primary_cur.close()
|
|
|
|
|
2006-02-01 11:55:51 +05:30
|
|
|
# Once we have the list of rows that already have a reference
|
|
|
|
# we need to compare it with the list of objects that are
|
|
|
|
# still references from the primary object.
|
2006-01-18 02:40:20 +05:30
|
|
|
|
|
|
|
current_references = set(obj.get_referenced_handles_recursively())
|
|
|
|
|
2006-02-01 11:55:51 +05:30
|
|
|
no_longer_required_references = existing_references.difference(
|
|
|
|
current_references)
|
2006-01-18 02:40:20 +05:30
|
|
|
|
|
|
|
new_references = current_references.difference(existing_references)
|
|
|
|
|
|
|
|
else:
|
|
|
|
new_references = set(obj.get_referenced_handles_recursively())
|
|
|
|
|
2005-12-15 23:02:10 +05:30
|
|
|
# handle addition of new references
|
2005-12-18 03:32:48 +05:30
|
|
|
|
2005-12-15 23:02:10 +05:30
|
|
|
if len(new_references) > 0:
|
|
|
|
for (ref_class_name,ref_handle) in new_references:
|
2006-01-13 08:18:23 +05:30
|
|
|
data = ((CLASS_TO_KEY_MAP[obj.__class__.__name__],handle),
|
|
|
|
(CLASS_TO_KEY_MAP[ref_class_name],ref_handle),)
|
2006-01-21 03:22:26 +05:30
|
|
|
self._add_reference((handle,ref_handle),data,transaction,txn)
|
2006-01-18 02:40:20 +05:30
|
|
|
|
2006-01-21 03:22:26 +05:30
|
|
|
if update:
|
2006-01-18 02:40:20 +05:30
|
|
|
# handle deletion of old references
|
|
|
|
if len(no_longer_required_references) > 0:
|
2006-02-01 11:55:51 +05:30
|
|
|
for (ref_class_name,ref_handle) in \
|
|
|
|
no_longer_required_references:
|
2006-01-18 02:40:20 +05:30
|
|
|
try:
|
2006-02-01 11:55:51 +05:30
|
|
|
self._remove_reference(
|
|
|
|
(handle,ref_handle),transaction,txn)
|
2006-01-18 02:40:20 +05:30
|
|
|
#self.reference_map.delete(str((handle,ref_handle),),
|
|
|
|
# txn=self.txn)
|
|
|
|
except: # ignore missing old reference
|
|
|
|
pass
|
2005-12-15 23:02:10 +05:30
|
|
|
|
2006-01-21 03:22:26 +05:30
|
|
|
def _remove_reference(self,key,transaction,txn=None):
|
2006-01-13 03:32:58 +05:30
|
|
|
"""
|
|
|
|
Removes the reference specified by the key,
|
|
|
|
preserving the change in the passed transaction.
|
|
|
|
"""
|
|
|
|
if not self.readonly:
|
2006-01-21 03:22:26 +05:30
|
|
|
if transaction.batch:
|
|
|
|
self.reference_map.delete(str(key),txn=txn)#=the_txn)
|
2006-03-08 23:14:29 +05:30
|
|
|
if not self.UseTXN:
|
|
|
|
self.reference_map.sync()
|
2006-01-21 03:22:26 +05:30
|
|
|
else:
|
|
|
|
old_data = self.reference_map.get(str(key),txn=self.txn)
|
2006-01-13 05:04:33 +05:30
|
|
|
transaction.add(REFERENCE_KEY,str(key),old_data,None)
|
2006-01-21 03:22:26 +05:30
|
|
|
transaction.reference_del.append(str(key))
|
2006-01-13 03:32:58 +05:30
|
|
|
|
2006-01-21 03:22:26 +05:30
|
|
|
def _add_reference(self,key,data,transaction,txn=None):
|
2006-01-13 03:32:58 +05:30
|
|
|
"""
|
|
|
|
Adds the reference specified by the key and the data,
|
|
|
|
preserving the change in the passed transaction.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if self.readonly or not key:
|
|
|
|
return
|
|
|
|
|
|
|
|
if transaction.batch:
|
2006-01-21 03:22:26 +05:30
|
|
|
#the_txn = self.env.txn_begin()
|
|
|
|
self.reference_map.put(str(key),data,txn=txn)#=the_txn)
|
2006-03-08 23:14:29 +05:30
|
|
|
if not self.UseTXN:
|
|
|
|
self.reference_map.sync()
|
2006-01-21 03:22:26 +05:30
|
|
|
#the_txn.commit()
|
2006-01-13 03:32:58 +05:30
|
|
|
else:
|
2006-01-13 05:04:33 +05:30
|
|
|
transaction.add(REFERENCE_KEY,str(key),None,data)
|
2006-01-14 00:50:20 +05:30
|
|
|
transaction.reference_add.append((str(key),data))
|
2006-01-13 03:32:58 +05:30
|
|
|
|
2005-12-16 17:29:13 +05:30
|
|
|
def reindex_reference_map(self):
|
|
|
|
"""Reindex all primary records in the database. This will be a
|
|
|
|
slow process for large databases.
|
|
|
|
|
|
|
|
At present this method does not clear the reference_map before it
|
|
|
|
reindexes. This is fine when if reindex is run to index new content or
|
|
|
|
when upgrading from a non-reference_map version of the database. But it
|
|
|
|
might be a problem if reindex is used to repair a broken index because any
|
|
|
|
references to primary objects that are no longer in the database will
|
|
|
|
remain in the reference_map index. So if you want to reindex for repair
|
|
|
|
purposes you need to clear the reference_map first.
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
# Make a dictionary of the functions and classes that we need for
|
|
|
|
# each of the primary object tables.
|
|
|
|
primary_tables = {'Person': {'cursor_func': self.get_person_cursor,
|
|
|
|
'class_func': Person},
|
|
|
|
'Family': {'cursor_func': self.get_family_cursor,
|
|
|
|
'class_func': Family},
|
|
|
|
'Event': {'cursor_func': self.get_event_cursor,
|
|
|
|
'class_func': Event},
|
|
|
|
'Place': {'cursor_func': self.get_place_cursor,
|
|
|
|
'class_func': Place},
|
|
|
|
'Source': {'cursor_func': self.get_source_cursor,
|
|
|
|
'class_func': Source},
|
|
|
|
'MediaObject': {'cursor_func': self.get_media_cursor,
|
|
|
|
'class_func': MediaObject},
|
|
|
|
'Repository': {'cursor_func': self.get_repository_cursor,
|
|
|
|
'class_func': Repository},
|
|
|
|
}
|
|
|
|
|
|
|
|
# Now we use the functions and classes defined above to loop through each of the
|
|
|
|
# primary object tables.
|
|
|
|
for primary_table_name in primary_tables.keys():
|
|
|
|
|
|
|
|
cursor = primary_tables[primary_table_name]['cursor_func']()
|
|
|
|
data = cursor.first()
|
|
|
|
|
|
|
|
# Grap the real object class here so that the lookup does
|
|
|
|
# not happen inside the main loop.
|
|
|
|
class_func = primary_tables[primary_table_name]['class_func']
|
|
|
|
|
|
|
|
while data:
|
|
|
|
found_handle,val = data
|
|
|
|
obj = class_func()
|
|
|
|
obj.unserialize(val)
|
2005-12-15 23:02:10 +05:30
|
|
|
|
2006-01-13 03:32:58 +05:30
|
|
|
self._update_reference_map(obj,transaction)
|
2005-12-16 17:29:13 +05:30
|
|
|
|
|
|
|
data = cursor.next()
|
|
|
|
|
|
|
|
cursor.close()
|
|
|
|
|
|
|
|
return
|
|
|
|
|
2004-07-31 00:26:49 +05:30
|
|
|
def close(self):
|
2006-01-11 03:58:09 +05:30
|
|
|
if not self.db_is_open:
|
2005-04-04 06:41:50 +05:30
|
|
|
return
|
2005-02-17 04:19:54 +05:30
|
|
|
if not self.readonly:
|
|
|
|
self.metadata['bookmarks'] = self.bookmarks
|
2006-04-27 03:18:13 +05:30
|
|
|
self.metadata['family_bookmarks'] = self.family_bookmarks
|
|
|
|
self.metadata['event_bookmarks'] = self.event_bookmarks
|
|
|
|
self.metadata['source_bookmarks'] = self.source_bookmarks
|
|
|
|
self.metadata['place_bookmarks'] = self.place_bookmarks
|
|
|
|
self.metadata['repo_bookmarks'] = self.repo_bookmarks
|
|
|
|
self.metadata['media_bookmarks'] = self.media_bookmarks
|
2005-02-17 04:19:54 +05:30
|
|
|
self.metadata['gender_stats'] = self.genderStats.save_stats()
|
2005-08-18 11:28:28 +05:30
|
|
|
self.metadata['fevent_names'] = list(self.family_event_names)
|
|
|
|
self.metadata['pevent_names'] = list(self.individual_event_names)
|
|
|
|
self.metadata['fattr_names'] = list(self.family_attributes)
|
|
|
|
self.metadata['pattr_names'] = list(self.individual_attributes)
|
2006-04-25 22:32:37 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
self.env.txn_checkpoint()
|
2004-07-31 00:26:49 +05:30
|
|
|
self.metadata.close()
|
2006-04-25 22:32:37 +05:30
|
|
|
self.name_group.close()
|
2004-07-31 00:26:49 +05:30
|
|
|
self.surnames.close()
|
|
|
|
self.eventnames.close()
|
2005-05-27 23:13:04 +05:30
|
|
|
self.repository_types.close()
|
2004-07-31 00:26:49 +05:30
|
|
|
self.id_trans.close()
|
2004-08-20 07:50:06 +05:30
|
|
|
self.fid_trans.close()
|
2005-06-08 10:10:33 +05:30
|
|
|
self.eid_trans.close()
|
2005-05-27 23:13:04 +05:30
|
|
|
self.rid_trans.close()
|
2004-08-24 09:18:15 +05:30
|
|
|
self.oid_trans.close()
|
|
|
|
self.sid_trans.close()
|
|
|
|
self.pid_trans.close()
|
2005-12-22 10:06:26 +05:30
|
|
|
self.reference_map_primary_map.close()
|
|
|
|
self.reference_map_referenced_map.close()
|
2006-01-25 23:29:22 +05:30
|
|
|
self.reference_map.close()
|
|
|
|
|
|
|
|
# primary databases must be closed after secondary indexes, or
|
|
|
|
# we run into problems with any active cursors.
|
|
|
|
self.person_map.close()
|
|
|
|
self.family_map.close()
|
|
|
|
self.repository_map.close()
|
|
|
|
self.place_map.close()
|
|
|
|
self.source_map.close()
|
|
|
|
self.media_map.close()
|
|
|
|
self.event_map.close()
|
2004-07-31 00:26:49 +05:30
|
|
|
self.env.close()
|
|
|
|
|
2005-02-17 04:19:54 +05:30
|
|
|
if not self.readonly:
|
|
|
|
self.undodb.close()
|
|
|
|
try:
|
|
|
|
os.remove(self.undolog)
|
|
|
|
except:
|
|
|
|
pass
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2005-05-27 23:13:04 +05:30
|
|
|
self.person_map = None
|
|
|
|
self.family_map = None
|
|
|
|
self.repository_map = None
|
|
|
|
self.place_map = None
|
|
|
|
self.source_map = None
|
|
|
|
self.media_map = None
|
|
|
|
self.event_map = None
|
|
|
|
self.surnames = None
|
|
|
|
self.env = None
|
|
|
|
self.metadata = None
|
2006-01-11 03:58:09 +05:30
|
|
|
self.db_is_open = False
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2006-04-10 07:15:36 +05:30
|
|
|
def _do_remove_object(self,handle,transaction,data_map,key,del_list):
|
|
|
|
if self.readonly or not handle:
|
|
|
|
return
|
|
|
|
|
|
|
|
handle = str(handle)
|
|
|
|
if transaction.batch:
|
|
|
|
if self.UseTXN:
|
|
|
|
the_txn = self.env.txn_begin()
|
|
|
|
else:
|
|
|
|
the_txn = None
|
|
|
|
self._delete_primary_from_reference_map(handle,transaction,
|
|
|
|
txn=the_txn)
|
|
|
|
data_map.delete(handle,txn=the_txn)
|
|
|
|
if not self.UseTXN:
|
|
|
|
data_map.sync()
|
|
|
|
if the_txn:
|
|
|
|
the_txn.commit()
|
|
|
|
else:
|
|
|
|
self._delete_primary_from_reference_map(handle,transaction)
|
|
|
|
old_data = data_map.get(handle,txn=self.txn)
|
|
|
|
transaction.add(key,handle,old_data,None)
|
2006-01-13 05:04:33 +05:30
|
|
|
del_list.append(handle)
|
|
|
|
|
2005-04-18 04:04:56 +05:30
|
|
|
def _del_person(self,handle):
|
2006-01-07 02:25:49 +05:30
|
|
|
self.person_map.delete(str(handle),txn=self.txn)
|
2006-03-08 23:14:29 +05:30
|
|
|
if not self.UseTXN:
|
|
|
|
self.person_map.sync()
|
2005-04-18 04:04:56 +05:30
|
|
|
|
|
|
|
def _del_source(self,handle):
|
2006-01-07 02:25:49 +05:30
|
|
|
self.source_map.delete(str(handle),txn=self.txn)
|
2006-03-08 23:14:29 +05:30
|
|
|
if not self.UseTXN:
|
|
|
|
self.source_map.sync()
|
2005-04-18 04:04:56 +05:30
|
|
|
|
2005-05-27 23:13:04 +05:30
|
|
|
def _del_repository(self,handle):
|
2006-01-07 02:25:49 +05:30
|
|
|
self.repository_map.delete(str(handle),txn=self.txn)
|
2006-03-08 23:14:29 +05:30
|
|
|
if not self.UseTXN:
|
|
|
|
self.repository_map.sync()
|
2005-05-27 23:13:04 +05:30
|
|
|
|
2005-04-18 04:04:56 +05:30
|
|
|
def _del_place(self,handle):
|
2006-01-07 02:25:49 +05:30
|
|
|
self.place_map.delete(str(handle),txn=self.txn)
|
2006-03-08 23:14:29 +05:30
|
|
|
if not self.UseTXN:
|
|
|
|
self.place_map.sync()
|
2005-04-18 04:04:56 +05:30
|
|
|
|
|
|
|
def _del_media(self,handle):
|
2006-01-07 02:25:49 +05:30
|
|
|
self.media_map.delete(str(handle),txn=self.txn)
|
2006-03-08 23:14:29 +05:30
|
|
|
if not self.UseTXN:
|
|
|
|
self.media_map.sync()
|
2005-04-18 04:04:56 +05:30
|
|
|
|
|
|
|
def _del_family(self,handle):
|
2006-01-07 02:25:49 +05:30
|
|
|
self.family_map.delete(str(handle),txn=self.txn)
|
2006-03-08 23:14:29 +05:30
|
|
|
if not self.UseTXN:
|
|
|
|
self.family_map.sync()
|
2005-04-18 04:04:56 +05:30
|
|
|
|
|
|
|
def _del_event(self,handle):
|
2006-01-07 02:25:49 +05:30
|
|
|
self.event_map.delete(str(handle),txn=self.txn)
|
2006-03-08 23:14:29 +05:30
|
|
|
if not self.UseTXN:
|
|
|
|
self.event_map.sync()
|
2005-04-18 04:04:56 +05:30
|
|
|
|
2004-10-01 00:02:56 +05:30
|
|
|
def set_name_group_mapping(self,name,group):
|
2005-02-17 04:19:54 +05:30
|
|
|
if not self.readonly:
|
|
|
|
name = str(name)
|
2006-01-17 03:37:24 +05:30
|
|
|
data = self.name_group.get(name,txn=self.txn)
|
|
|
|
if not group and data:
|
|
|
|
self.name_group.delete(name,txn=self.txn)
|
2005-02-17 04:19:54 +05:30
|
|
|
else:
|
2006-01-17 03:37:24 +05:30
|
|
|
self.name_group.put(name,group,txn=self.txn)
|
2005-08-18 11:28:28 +05:30
|
|
|
self.emit('person-rebuild')
|
2006-01-27 12:24:35 +05:30
|
|
|
|
2004-08-13 10:04:07 +05:30
|
|
|
def get_surname_list(self):
|
2006-01-29 01:02:09 +05:30
|
|
|
vals = [ (locale.strxfrm(unicode(val)),unicode(val))
|
2006-01-27 12:24:35 +05:30
|
|
|
for val in set(self.surnames.keys()) ]
|
|
|
|
vals.sort()
|
2006-01-29 01:02:09 +05:30
|
|
|
return [item[1] for item in vals]
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2005-05-27 23:13:04 +05:30
|
|
|
def get_repository_type_list(self):
|
2005-12-22 11:10:27 +05:30
|
|
|
vals = list(set(self.repository_types.keys()))
|
|
|
|
vals.sort(locale.strcoll)
|
2005-05-27 23:13:04 +05:30
|
|
|
return vals
|
|
|
|
|
2005-12-22 11:10:27 +05:30
|
|
|
def _get_obj_from_gramps_id(self,val,tbl,class_init):
|
2006-01-31 07:11:55 +05:30
|
|
|
if tbl.has_key(str(val)):
|
|
|
|
data = tbl.get(str(val),txn=self.txn)
|
2005-12-22 11:10:27 +05:30
|
|
|
obj = class_init()
|
|
|
|
obj.unserialize(cPickle.loads(data))
|
2006-01-07 03:38:40 +05:30
|
|
|
return obj
|
2004-08-20 07:50:06 +05:30
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
2005-12-22 11:10:27 +05:30
|
|
|
def get_person_from_gramps_id(self,val):
|
|
|
|
"""finds a Person in the database from the passed gramps' ID.
|
|
|
|
If no such Person exists, a new Person is added to the database."""
|
|
|
|
return self._get_obj_from_gramps_id(val,self.id_trans,Person)
|
|
|
|
|
2004-08-20 07:50:06 +05:30
|
|
|
def get_family_from_gramps_id(self,val):
|
2004-08-24 09:18:15 +05:30
|
|
|
"""finds a Family in the database from the passed gramps' ID.
|
2005-12-22 11:10:27 +05:30
|
|
|
If no such Family exists, a new Person is added to the database."""
|
|
|
|
return self._get_obj_from_gramps_id(val,self.fid_trans,Family)
|
2005-06-08 10:10:33 +05:30
|
|
|
|
2006-01-27 12:24:35 +05:30
|
|
|
def get_event_from_gramps_id(self,val):
|
|
|
|
"""finds a Family in the database from the passed gramps' ID.
|
|
|
|
If no such Family exists, a new Person is added to the database."""
|
|
|
|
return self._get_obj_from_gramps_id(val,self.eid_trans,Event)
|
|
|
|
|
2004-08-24 09:18:15 +05:30
|
|
|
def get_place_from_gramps_id(self,val):
|
|
|
|
"""finds a Place in the database from the passed gramps' ID.
|
2005-12-22 11:10:27 +05:30
|
|
|
If no such Place exists, a new Person is added to the database."""
|
|
|
|
return self._get_obj_from_gramps_id(val,self.pid_trans,Place)
|
2004-08-24 09:18:15 +05:30
|
|
|
|
|
|
|
def get_source_from_gramps_id(self,val):
|
|
|
|
"""finds a Source in the database from the passed gramps' ID.
|
2005-12-22 11:10:27 +05:30
|
|
|
If no such Source exists, a new Person is added to the database."""
|
|
|
|
return self._get_obj_from_gramps_id(val,self.sid_trans,Source)
|
2005-05-27 23:13:04 +05:30
|
|
|
|
2004-08-24 09:18:15 +05:30
|
|
|
def get_object_from_gramps_id(self,val):
|
|
|
|
"""finds a MediaObject in the database from the passed gramps' ID.
|
2005-12-22 11:10:27 +05:30
|
|
|
If no such MediaObject exists, a new Person is added to the database."""
|
|
|
|
return self._get_obj_from_gramps_id(val,self.oid_trans,MediaObject)
|
2004-08-24 09:18:15 +05:30
|
|
|
|
2005-12-22 11:10:27 +05:30
|
|
|
def get_repository_from_gramps_id(self,val):
|
|
|
|
"""finds a MediaObject in the database from the passed gramps' ID.
|
|
|
|
If no such MediaObject exists, a new Person is added to the database."""
|
|
|
|
return self._get_obj_from_gramps_id(val,self.rid_trans,Repository)
|
2005-03-03 11:03:22 +05:30
|
|
|
|
2006-01-07 02:25:49 +05:30
|
|
|
def _commit_base(self, obj, data_map, key, update_list, add_list,
|
|
|
|
transaction, change_time):
|
|
|
|
"""
|
|
|
|
Commits the specified Person to the database, storing the changes
|
|
|
|
as part of the transaction.
|
|
|
|
"""
|
|
|
|
if self.readonly or not obj or not obj.handle:
|
|
|
|
return
|
|
|
|
|
|
|
|
if change_time:
|
|
|
|
obj.change = int(change_time)
|
|
|
|
else:
|
|
|
|
obj.change = int(time.time())
|
|
|
|
handle = str(obj.handle)
|
|
|
|
|
|
|
|
if transaction.batch:
|
2006-03-08 22:52:45 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
the_txn = self.env.txn_begin()
|
|
|
|
else:
|
|
|
|
the_txn = None
|
2006-01-21 03:22:26 +05:30
|
|
|
self._update_reference_map(obj,transaction,txn=the_txn)
|
2006-01-17 03:37:24 +05:30
|
|
|
data_map.put(handle,obj.serialize(),txn=the_txn)
|
2006-03-08 23:03:07 +05:30
|
|
|
if not self.UseTXN:
|
|
|
|
data_map.sync()
|
2006-03-08 22:52:45 +05:30
|
|
|
if the_txn:
|
|
|
|
the_txn.commit()
|
2006-01-07 02:25:49 +05:30
|
|
|
old_data = None
|
|
|
|
else:
|
2006-01-21 03:22:26 +05:30
|
|
|
self._update_reference_map(obj,transaction)
|
2006-01-07 02:25:49 +05:30
|
|
|
old_data = data_map.get(handle,txn=self.txn)
|
2006-01-13 05:04:33 +05:30
|
|
|
new_data = obj.serialize()
|
|
|
|
transaction.add(key,handle,old_data,new_data)
|
2006-01-07 02:25:49 +05:30
|
|
|
if old_data:
|
2006-01-13 05:04:33 +05:30
|
|
|
update_list.append((handle,new_data))
|
2006-01-07 02:25:49 +05:30
|
|
|
else:
|
2006-01-13 05:04:33 +05:30
|
|
|
add_list.append((handle,new_data))
|
2006-01-07 02:25:49 +05:30
|
|
|
return old_data
|
|
|
|
|
|
|
|
def _do_commit(self,add_list,db_map):
|
|
|
|
retlist = []
|
|
|
|
for (handle,data) in add_list:
|
|
|
|
db_map.put(handle,data,self.txn)
|
2006-03-08 23:14:29 +05:30
|
|
|
if not self.UseTXN:
|
|
|
|
db_map.sync()
|
2006-01-07 02:25:49 +05:30
|
|
|
retlist.append(str(handle))
|
|
|
|
return retlist
|
|
|
|
|
|
|
|
def _get_from_handle(self, handle, class_type, data_map):
|
|
|
|
try:
|
|
|
|
data = data_map.get(str(handle),txn=self.txn)
|
|
|
|
except:
|
|
|
|
data = None
|
2006-04-25 08:25:41 +05:30
|
|
|
# under certain circumstances during a database reload,
|
|
|
|
# data_map can be none. If so, then don't report an error
|
|
|
|
if data_map:
|
|
|
|
log.error("Failed to get from handle",exc_info=True)
|
2006-01-07 02:25:49 +05:30
|
|
|
if data:
|
|
|
|
newobj = class_type()
|
|
|
|
newobj.unserialize(data)
|
|
|
|
return newobj
|
|
|
|
return None
|
|
|
|
|
|
|
|
def _find_from_handle(self,handle,transaction,class_type,dmap,add_func):
|
|
|
|
obj = class_type()
|
|
|
|
handle = str(handle)
|
2006-02-02 00:41:25 +05:30
|
|
|
if dmap.has_key(handle):
|
2006-01-31 07:11:55 +05:30
|
|
|
data = dmap.get(handle,txn=self.txn)
|
2006-01-07 02:25:49 +05:30
|
|
|
obj.unserialize(data)
|
|
|
|
else:
|
|
|
|
obj.set_handle(handle)
|
|
|
|
add_func(obj,transaction)
|
|
|
|
return obj
|
|
|
|
|
2006-02-02 20:23:31 +05:30
|
|
|
def transaction_begin(self,msg="",batch=False,no_magic=False):
|
2006-01-07 02:48:50 +05:30
|
|
|
"""
|
|
|
|
Creates a new Transaction tied to the current UNDO database. The
|
|
|
|
transaction has no effect until it is committed using the
|
|
|
|
transaction_commit function of the this database object.
|
|
|
|
"""
|
2006-01-07 02:53:27 +05:30
|
|
|
|
2006-02-02 20:23:31 +05:30
|
|
|
transaction = BdbTransaction(msg,self.undodb,batch,no_magic)
|
2006-01-18 02:47:14 +05:30
|
|
|
if transaction.batch:
|
2006-03-08 22:52:45 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
self.env.txn_checkpoint()
|
2006-01-18 02:47:14 +05:30
|
|
|
self.env.set_flags(db.DB_TXN_NOSYNC,1) # async txn
|
2006-02-02 20:23:31 +05:30
|
|
|
|
2006-02-04 03:33:53 +05:30
|
|
|
if self.secondary_connected and not transaction.no_magic:
|
2006-02-02 20:23:31 +05:30
|
|
|
# Disconnect unneeded secondary indices
|
|
|
|
self.surnames.close()
|
|
|
|
junk = db.DB(self.env)
|
|
|
|
junk.remove(self.full_name,"surnames")
|
|
|
|
|
|
|
|
self.reference_map_referenced_map.close()
|
|
|
|
junk = db.DB(self.env)
|
|
|
|
junk.remove(self.full_name,"reference_map_referenced_map")
|
2006-01-19 22:00:45 +05:30
|
|
|
|
2006-01-18 02:47:14 +05:30
|
|
|
return transaction
|
2006-01-07 02:48:50 +05:30
|
|
|
|
|
|
|
def transaction_commit(self,transaction,msg):
|
|
|
|
|
2006-01-17 03:37:24 +05:30
|
|
|
# Start BSD DB transaction -- DBTxn
|
2006-03-08 22:52:45 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
self.txn = self.env.txn_begin()
|
|
|
|
else:
|
|
|
|
self.txn = None
|
2006-01-17 03:37:24 +05:30
|
|
|
|
2005-05-24 18:38:06 +05:30
|
|
|
GrampsDbBase.transaction_commit(self,transaction,msg)
|
2006-01-07 02:25:49 +05:30
|
|
|
|
2006-01-13 03:32:58 +05:30
|
|
|
for (key,data) in transaction.reference_add:
|
|
|
|
self.reference_map.put(str(key),data,txn=self.txn)
|
|
|
|
|
2006-02-07 02:54:39 +05:30
|
|
|
for key in transaction.reference_del:
|
2006-01-13 03:32:58 +05:30
|
|
|
self.reference_map.delete(str(key),txn=self.txn)
|
|
|
|
|
2006-03-09 03:31:08 +05:30
|
|
|
if (len(transaction.reference_add)+len(transaction.reference_del)) > 0\
|
|
|
|
and not self.UseTXN:
|
|
|
|
self.reference_map.sync()
|
|
|
|
|
2006-01-07 02:25:49 +05:30
|
|
|
# Commit BSD DB transaction -- DBTxn
|
2006-03-08 22:52:45 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
self.txn.commit()
|
2006-01-17 04:19:49 +05:30
|
|
|
if transaction.batch:
|
2006-03-08 22:52:45 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
self.env.txn_checkpoint()
|
|
|
|
self.env.set_flags(db.DB_TXN_NOSYNC,0) # sync txn
|
2006-01-19 22:00:45 +05:30
|
|
|
|
2006-02-02 20:23:31 +05:30
|
|
|
if not transaction.no_magic:
|
|
|
|
# create new secondary indices to replace the ones removed
|
2006-03-08 22:52:45 +05:30
|
|
|
open_flags = self.open_flags()
|
2006-02-02 20:23:31 +05:30
|
|
|
dupe_flags = db.DB_DUP|db.DB_DUPSORT
|
|
|
|
|
|
|
|
self.surnames = db.DB(self.env)
|
|
|
|
self.surnames.set_flags(dupe_flags)
|
|
|
|
self.surnames.open(self.full_name,"surnames",
|
|
|
|
db.DB_BTREE,flags=open_flags)
|
|
|
|
self.person_map.associate(self.surnames,find_surname,
|
|
|
|
open_flags)
|
2006-01-19 22:00:45 +05:30
|
|
|
|
2006-02-02 20:23:31 +05:30
|
|
|
self.reference_map_referenced_map = db.DB(self.env)
|
|
|
|
self.reference_map_referenced_map.set_flags(dupe_flags)
|
|
|
|
self.reference_map_referenced_map.open(
|
|
|
|
self.full_name,"reference_map_referenced_map",
|
|
|
|
db.DB_BTREE,flags=open_flags)
|
|
|
|
self.reference_map.associate(self.reference_map_referenced_map,
|
|
|
|
find_referenced_handle,open_flags)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.txn = None
|
2005-05-24 18:38:06 +05:30
|
|
|
|
2006-01-13 03:32:58 +05:30
|
|
|
def undo(self):
|
2006-01-13 05:04:33 +05:30
|
|
|
print "Undoing it"
|
2006-03-08 22:52:45 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
self.txn = self.env.txn_begin()
|
2006-05-04 04:51:49 +05:30
|
|
|
status = GrampsDbBase.undo(self)
|
2006-03-08 22:52:45 +05:30
|
|
|
if self.UseTXN:
|
2006-05-04 04:51:49 +05:30
|
|
|
if status:
|
|
|
|
self.txn.commit()
|
|
|
|
else:
|
|
|
|
self.txn.abort()
|
2006-01-13 03:32:58 +05:30
|
|
|
self.txn = None
|
2006-05-04 04:51:49 +05:30
|
|
|
return status
|
2006-01-13 03:32:58 +05:30
|
|
|
|
2006-01-13 05:04:33 +05:30
|
|
|
def redo(self):
|
|
|
|
print "Redoing it"
|
2006-03-08 22:52:45 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
self.txn = self.env.txn_begin()
|
2006-05-04 04:51:49 +05:30
|
|
|
status = GrampsDbBase.redo(self)
|
2006-03-08 22:52:45 +05:30
|
|
|
if self.UseTXN:
|
2006-05-04 04:51:49 +05:30
|
|
|
if status:
|
|
|
|
self.txn.commit()
|
|
|
|
else:
|
|
|
|
self.txn.abort()
|
|
|
|
self.txn = None
|
|
|
|
return status
|
2006-01-13 05:04:33 +05:30
|
|
|
|
2006-01-13 03:32:58 +05:30
|
|
|
def undo_reference(self,data,handle):
|
|
|
|
if data == None:
|
|
|
|
self.reference_map.delete(handle,txn=self.txn)
|
|
|
|
else:
|
2006-01-13 05:04:33 +05:30
|
|
|
self.reference_map.put(handle,data,txn=self.txn)
|
2006-01-13 03:32:58 +05:30
|
|
|
|
|
|
|
def undo_data(self,data,handle,db_map,signal_root):
|
|
|
|
if data == None:
|
|
|
|
self.emit(signal_root + '-delete',([handle],))
|
|
|
|
db_map.delete(handle,txn=self.txn)
|
|
|
|
else:
|
|
|
|
ex_data = db_map.get(handle,txn=self.txn)
|
|
|
|
if ex_data:
|
|
|
|
signal = signal_root + '-update'
|
|
|
|
else:
|
|
|
|
signal = signal_root + '-add'
|
|
|
|
db_map.put(handle,data,txn=self.txn)
|
|
|
|
self.emit(signal,([handle],))
|
|
|
|
|
2006-03-15 01:19:34 +05:30
|
|
|
def update_empty(self,newval):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def update_real(self,newval):
|
|
|
|
if newval != self.oldval:
|
|
|
|
self.callback(newval)
|
|
|
|
self.oldval = newval
|
|
|
|
|
|
|
|
def gramps_upgrade(self,callback=None):
|
|
|
|
self.callback = callback
|
|
|
|
if '__call__' in dir(callback): # callback is really callable
|
|
|
|
self.oldval = 0
|
|
|
|
self.update = self.update_real
|
|
|
|
else:
|
|
|
|
self.update = self.update_empty
|
|
|
|
|
2005-03-03 11:03:22 +05:30
|
|
|
child_rel_notrans = [
|
|
|
|
"None", "Birth", "Adopted", "Stepchild",
|
|
|
|
"Sponsored", "Foster", "Unknown", "Other", ]
|
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
version = self.metadata.get('version',_MINVERSION)
|
2006-03-16 05:28:23 +05:30
|
|
|
t = time.time()
|
2005-05-24 18:38:06 +05:30
|
|
|
if version < 6:
|
2005-12-15 11:49:37 +05:30
|
|
|
self.gramps_upgrade_6()
|
2005-06-25 03:30:03 +05:30
|
|
|
if version < 7:
|
2005-12-15 11:49:37 +05:30
|
|
|
self.gramps_upgrade_7()
|
2005-07-09 01:54:54 +05:30
|
|
|
if version < 8:
|
2005-12-15 11:49:37 +05:30
|
|
|
self.gramps_upgrade_8()
|
|
|
|
if version < 9:
|
|
|
|
self.gramps_upgrade_9()
|
2006-01-26 02:36:23 +05:30
|
|
|
# self.metadata.put('version',_DBVERSION)
|
|
|
|
# self.metadata.sync()
|
2006-03-16 05:28:23 +05:30
|
|
|
print "Upgrade time:", int(time.time()-t), "seconds"
|
2005-04-01 10:04:31 +05:30
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
def gramps_upgrade_6(self):
|
2005-05-24 18:38:06 +05:30
|
|
|
print "Upgrading to DB version 6"
|
|
|
|
order = []
|
|
|
|
for val in self.get_media_column_order():
|
|
|
|
if val[1] != 6:
|
|
|
|
order.append(val)
|
|
|
|
self.set_media_column_order(order)
|
2006-01-26 02:36:23 +05:30
|
|
|
self.metadata.put('version',6)
|
|
|
|
self.metadata.sync()
|
2005-05-25 09:28:27 +05:30
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
def gramps_upgrade_7(self):
|
2005-05-27 23:13:04 +05:30
|
|
|
print "Upgrading to DB version 7"
|
2005-07-09 01:54:54 +05:30
|
|
|
|
|
|
|
self.genderStats = GenderStats()
|
|
|
|
cursor = self.get_person_cursor()
|
|
|
|
data = cursor.first()
|
|
|
|
while data:
|
|
|
|
handle,val = data
|
|
|
|
p = Person(val)
|
2006-04-23 09:59:14 +05:30
|
|
|
self.genderStats.count_person(p)
|
2005-07-09 01:54:54 +05:30
|
|
|
data = cursor.next()
|
|
|
|
cursor.close()
|
2006-01-26 02:36:23 +05:30
|
|
|
self.metadata.put('version',7)
|
|
|
|
self.metadata.sync()
|
2005-07-09 01:54:54 +05:30
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
def gramps_upgrade_8(self):
|
2005-07-09 01:54:54 +05:30
|
|
|
print "Upgrading to DB version 8"
|
2005-08-18 11:28:28 +05:30
|
|
|
cursor = self.get_person_cursor()
|
|
|
|
data = cursor.first()
|
|
|
|
while data:
|
|
|
|
handle,val = data
|
|
|
|
handle_list = val[8]
|
|
|
|
if type(handle_list) == list:
|
|
|
|
# Check to prevent crash on corrupted data (event_list=None)
|
|
|
|
for handle in handle_list:
|
|
|
|
event = self.get_event_from_handle(handle)
|
|
|
|
self.individual_event_names.add(event.name)
|
|
|
|
data = cursor.next()
|
|
|
|
cursor.close()
|
|
|
|
|
|
|
|
cursor = self.get_family_cursor()
|
|
|
|
data = cursor.first()
|
|
|
|
while data:
|
|
|
|
handle,val = data
|
|
|
|
handle_list = val[6]
|
|
|
|
if type(handle_list) == list:
|
|
|
|
# Check to prevent crash on corrupted data (event_list=None)
|
|
|
|
for handle in handle_list:
|
|
|
|
event = self.get_event_from_handle(handle)
|
|
|
|
self.family_event_names.add(event.name)
|
|
|
|
data = cursor.next()
|
|
|
|
cursor.close()
|
2006-01-26 02:36:23 +05:30
|
|
|
self.metadata.put('version',7)
|
|
|
|
self.metadata.sync()
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
def gramps_upgrade_9(self):
|
2005-12-16 02:56:55 +05:30
|
|
|
print "Upgrading to DB version 9 -- this may take a while"
|
2006-01-26 02:36:23 +05:30
|
|
|
# The very very first thing is to check for duplicates in the
|
|
|
|
# primary tables and remove them.
|
2006-03-15 01:19:34 +05:30
|
|
|
status,length = low_level_9(self)
|
2006-01-26 02:36:23 +05:30
|
|
|
|
2006-04-24 03:02:11 +05:30
|
|
|
# Remove column metadata, since columns have changed.
|
|
|
|
# This will reset all columns to defaults
|
|
|
|
for name in (PERSON_COL_KEY,CHILD_COL_KEY,PLACE_COL_KEY,SOURCE_COL_KEY,
|
|
|
|
MEDIA_COL_KEY,EVENT_COL_KEY,FAMILY_COL_KEY):
|
|
|
|
try:
|
|
|
|
self.metadata.delete(name)
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
2006-01-26 02:36:23 +05:30
|
|
|
# Then we remove the surname secondary index table
|
|
|
|
# because its format changed from HASH to DUPSORTed BTREE.
|
|
|
|
junk = db.DB(self.env)
|
|
|
|
junk.remove(self.full_name,"surnames")
|
|
|
|
|
|
|
|
# Create one secondary index for reference_map
|
|
|
|
# because every commit will require this to exist
|
2006-03-08 22:52:45 +05:30
|
|
|
table_flags = self.open_flags()
|
2006-01-26 02:36:23 +05:30
|
|
|
self.reference_map_primary_map = db.DB(self.env)
|
|
|
|
self.reference_map_primary_map.set_flags(db.DB_DUP)
|
|
|
|
self.reference_map_primary_map.open(self.full_name,
|
|
|
|
"reference_map_primary_map",
|
|
|
|
db.DB_BTREE, flags=table_flags)
|
|
|
|
self.reference_map.associate(self.reference_map_primary_map,
|
|
|
|
find_primary_handle,
|
|
|
|
table_flags)
|
|
|
|
|
|
|
|
### Now we're ready to proceed with the normal upgrade.
|
2005-05-28 12:05:15 +05:30
|
|
|
# First, make sure the stored default person handle is str, not unicode
|
2005-05-30 20:55:17 +05:30
|
|
|
try:
|
|
|
|
handle = self.metadata['default']
|
|
|
|
self.metadata['default'] = str(handle)
|
|
|
|
except KeyError:
|
|
|
|
# default person was not stored in database
|
|
|
|
pass
|
2005-12-16 02:56:55 +05:30
|
|
|
|
|
|
|
# The rest of the upgrade deals with real data, not metadata
|
2006-01-19 03:32:24 +05:30
|
|
|
# so starting (batch) transaction here.
|
|
|
|
trans = self.transaction_begin("",True)
|
2006-03-15 01:19:34 +05:30
|
|
|
current = 0
|
2005-12-16 02:56:55 +05:30
|
|
|
|
2006-04-13 23:55:52 +05:30
|
|
|
# Numerous changes were made between dbversions 8 and 9.
|
|
|
|
# If nothing else, we switched from storing pickled gramps classes
|
|
|
|
# to storing builting objects, via running serialize() recursively
|
|
|
|
# until the very bottom. Every stored objects needs to be
|
|
|
|
# re-committed here.
|
2005-12-16 02:56:55 +05:30
|
|
|
|
|
|
|
# Change every Source to have reporef_list
|
2005-12-18 09:00:13 +05:30
|
|
|
for handle in self.source_map.keys():
|
|
|
|
info = self.source_map[handle]
|
2005-05-27 23:13:04 +05:30
|
|
|
source = Source()
|
2005-12-18 07:04:13 +05:30
|
|
|
source.handle = handle
|
2005-12-16 02:56:55 +05:30
|
|
|
# We already have a new Source object with the reporef_list
|
|
|
|
# just fill in the rest of the fields for this source
|
2005-12-18 07:04:13 +05:30
|
|
|
(junk_handle, source.gramps_id, source.title, source.author,
|
2005-05-27 23:13:04 +05:30
|
|
|
source.pubinfo, source.note, source.media_list,
|
|
|
|
source.abbrev, source.change, source.datamap) = info
|
|
|
|
self.commit_source(source,trans)
|
2006-03-15 01:19:34 +05:30
|
|
|
current += 1
|
|
|
|
self.update(100*current/length)
|
2005-12-06 12:08:09 +05:30
|
|
|
|
2006-04-13 23:55:52 +05:30
|
|
|
# Family upgrade
|
|
|
|
for handle in self.family_map.keys():
|
|
|
|
info = self.family_map[handle]
|
|
|
|
family = Family()
|
|
|
|
family.handle = handle
|
|
|
|
# Restore data from dbversion 8 (gramps 2.0.9)
|
|
|
|
(junk_handle, family.gramps_id, family.father_handle,
|
2006-04-21 05:33:27 +05:30
|
|
|
family.mother_handle, child_list, the_type,
|
2006-04-13 23:55:52 +05:30
|
|
|
event_list, family.media_list, family.attribute_list,
|
|
|
|
lds_seal, complete, family.source_list,
|
|
|
|
family.note, family.change) = info
|
|
|
|
|
|
|
|
if complete:
|
2006-04-20 06:14:44 +05:30
|
|
|
family.marker.set(MarkerType.COMPLETE)
|
2006-04-13 23:55:52 +05:30
|
|
|
|
|
|
|
# Change every event handle to the EventRef
|
|
|
|
for event_handle in event_list:
|
|
|
|
event_ref = EventRef()
|
|
|
|
event_ref.ref = event_handle
|
2006-04-20 06:14:44 +05:30
|
|
|
event_ref.role.set(EventRoleType.FAMILY)
|
2006-04-13 23:55:52 +05:30
|
|
|
family.event_ref_list.append(event_ref)
|
|
|
|
|
|
|
|
# Change child_list into child_ref_list
|
|
|
|
for child_handle in child_list:
|
|
|
|
child_ref = ChildRef()
|
|
|
|
child_ref.ref = child_handle
|
|
|
|
family.child_ref_list.append(child_ref)
|
|
|
|
|
|
|
|
# Change relationship type from int to tuple
|
2006-04-21 05:33:27 +05:30
|
|
|
family.type.set(the_type)
|
2006-04-13 23:55:52 +05:30
|
|
|
|
|
|
|
# In all Attributes, convert type from string to a tuple
|
|
|
|
for attribute in family.attribute_list:
|
|
|
|
convert_attribute_9(attribute)
|
2006-04-22 00:34:00 +05:30
|
|
|
|
2006-04-13 23:55:52 +05:30
|
|
|
# Cover attributes contained in MediaRefs
|
|
|
|
for media_ref in family.media_list:
|
|
|
|
convert_mediaref_9(media_ref)
|
|
|
|
|
|
|
|
# Switch from fixed lds ords to a list
|
|
|
|
if lds_seal:
|
|
|
|
family.lds_ord_list = [lds_seal]
|
|
|
|
|
|
|
|
self.commit_family(family,trans)
|
|
|
|
current += 1
|
|
|
|
self.update(100*current/length)
|
|
|
|
|
|
|
|
# Person upgrade
|
|
|
|
# Needs to be run after the family upgrade completed.
|
2006-04-16 01:18:12 +05:30
|
|
|
def_rel = ChildRefType._DEFAULT
|
2005-12-18 09:00:13 +05:30
|
|
|
for handle in self.person_map.keys():
|
|
|
|
info = self.person_map[handle]
|
2005-12-15 11:49:37 +05:30
|
|
|
person = Person()
|
2005-12-18 07:04:13 +05:30
|
|
|
person.handle = handle
|
2005-12-15 11:49:37 +05:30
|
|
|
# Restore data from dbversion 8 (gramps 2.0.9)
|
2005-12-18 07:04:13 +05:30
|
|
|
(junk_handle, person.gramps_id, person.gender,
|
2005-12-15 11:49:37 +05:30
|
|
|
person.primary_name, person.alternate_names, person.nickname,
|
|
|
|
death_handle, birth_handle, event_list,
|
2006-04-13 23:55:52 +05:30
|
|
|
person.family_list, parent_family_list,
|
2005-12-15 11:49:37 +05:30
|
|
|
person.media_list, person.address_list, person.attribute_list,
|
2006-04-22 00:34:00 +05:30
|
|
|
person.urls, lds_bapt, lds_endow, lds_seal,
|
2005-12-16 02:56:55 +05:30
|
|
|
complete, person.source_list, person.note,
|
2005-12-15 11:49:37 +05:30
|
|
|
person.change, person.private) = (info + (False,))[0:23]
|
|
|
|
|
2005-12-19 21:52:33 +05:30
|
|
|
# Convert complete flag into marker
|
2005-12-16 02:56:55 +05:30
|
|
|
if complete:
|
2006-04-20 06:14:44 +05:30
|
|
|
person.marker.set(MarkerType.COMPLETE)
|
2005-12-19 21:52:33 +05:30
|
|
|
|
|
|
|
# Change every event handle to the EventRef
|
2005-12-15 11:49:37 +05:30
|
|
|
if birth_handle:
|
|
|
|
event_ref = EventRef()
|
2005-12-21 05:38:47 +05:30
|
|
|
event_ref.ref = birth_handle
|
2005-12-15 11:49:37 +05:30
|
|
|
person.birth_ref = event_ref
|
|
|
|
|
|
|
|
if death_handle:
|
|
|
|
event_ref = EventRef()
|
2005-12-21 05:38:47 +05:30
|
|
|
event_ref.ref = death_handle
|
2005-12-15 11:49:37 +05:30
|
|
|
person.death_ref = event_ref
|
|
|
|
|
|
|
|
for event_handle in event_list:
|
|
|
|
event_ref = EventRef()
|
2005-12-21 05:38:47 +05:30
|
|
|
event_ref.ref = event_handle
|
|
|
|
person.event_ref_list.append(event_ref)
|
2005-12-19 21:52:33 +05:30
|
|
|
|
|
|
|
# In all Name instances, convert type from string to a tuple
|
|
|
|
for name in [person.primary_name] + person.alternate_names:
|
|
|
|
old_type = name.type
|
2006-04-22 00:34:00 +05:30
|
|
|
new_type = NameType()
|
|
|
|
new_type.set_from_xml_str(old_type)
|
2005-12-19 21:52:33 +05:30
|
|
|
name.type = new_type
|
|
|
|
|
2006-04-13 23:55:52 +05:30
|
|
|
# Change parent_family_list into list of handles
|
|
|
|
# and transfer the relationship info into the family's
|
|
|
|
# child_ref (in family.child_ref_list) as tuples.
|
|
|
|
for (family_handle,mrel,frel) in parent_family_list:
|
|
|
|
person.parent_family_list.append(family_handle)
|
|
|
|
# Only change family is the relations are non-default
|
2006-04-16 01:18:12 +05:30
|
|
|
if (mrel,frel) != (def_rel,def_rel):
|
2006-04-13 23:55:52 +05:30
|
|
|
family = self.get_family_from_handle(family_handle)
|
|
|
|
child_handle_list = [ref.ref for ref in
|
|
|
|
family.child_ref_list]
|
2006-04-16 02:10:00 +05:30
|
|
|
index = child_handle_list.index(person.handle)
|
2006-04-13 23:55:52 +05:30
|
|
|
child_ref = family.child_ref_list[index]
|
2006-04-16 02:10:00 +05:30
|
|
|
child_ref.frel.set(frel)
|
|
|
|
child_ref.mrel.set(mrel)
|
2006-04-13 23:55:52 +05:30
|
|
|
self.commit_family(family,trans)
|
2006-01-27 04:28:49 +05:30
|
|
|
|
2005-12-19 21:52:33 +05:30
|
|
|
# In all Attributes, convert type from string to a tuple
|
|
|
|
for attribute in person.attribute_list:
|
|
|
|
convert_attribute_9(attribute)
|
2006-04-22 00:34:00 +05:30
|
|
|
|
2005-12-19 21:52:33 +05:30
|
|
|
# Cover attributes contained in MediaRefs
|
|
|
|
for media_ref in person.media_list:
|
|
|
|
convert_mediaref_9(media_ref)
|
2005-12-20 04:48:03 +05:30
|
|
|
|
|
|
|
# In all Urls, add type attribute
|
2006-04-22 00:34:00 +05:30
|
|
|
for url in person.urls:
|
|
|
|
convert_url_9(url)
|
2006-04-13 23:55:52 +05:30
|
|
|
|
|
|
|
# Switch from fixed lds ords to a list
|
|
|
|
person.lds_ord_list = [item for item
|
|
|
|
in [lds_bapt,lds_endow,lds_seal] if item]
|
2005-12-19 21:52:33 +05:30
|
|
|
|
2005-12-16 02:56:55 +05:30
|
|
|
self.commit_person(person,trans)
|
2006-03-15 01:19:34 +05:30
|
|
|
current += 1
|
|
|
|
self.update(100*current/length)
|
2005-12-15 11:49:37 +05:30
|
|
|
|
2006-04-13 23:55:52 +05:30
|
|
|
# Event upgrade
|
2006-01-28 02:08:33 +05:30
|
|
|
# Turns out that a lof ot events have duplicate gramps IDs
|
|
|
|
# We need to fix this
|
2006-03-08 22:52:45 +05:30
|
|
|
table_flags = self.open_flags()
|
2006-01-28 02:08:33 +05:30
|
|
|
self.eid_trans = db.DB(self.env)
|
|
|
|
self.eid_trans.set_flags(db.DB_DUP)
|
|
|
|
self.eid_trans.open(self.full_name, "eidtrans",
|
|
|
|
db.DB_HASH, flags=table_flags)
|
|
|
|
self.event_map.associate(self.eid_trans,find_idmap,table_flags)
|
|
|
|
eid_list = self.eid_trans.keys()
|
|
|
|
dup_ids = [eid for eid in eid_list if eid_list.count(eid) > 1 ]
|
|
|
|
|
2005-12-18 09:00:13 +05:30
|
|
|
for handle in self.event_map.keys():
|
|
|
|
info = self.event_map[handle]
|
2005-12-15 11:49:37 +05:30
|
|
|
event = Event()
|
2005-12-18 07:04:13 +05:30
|
|
|
event.handle = handle
|
2005-12-19 21:52:33 +05:30
|
|
|
(junk_handle, event.gramps_id, old_type, event.date,
|
2005-12-15 11:49:37 +05:30
|
|
|
event.description, event.place, event.cause, event.private,
|
2005-12-21 05:38:47 +05:30
|
|
|
event.source_list, event.note, witness_list,
|
|
|
|
event.media_list, event.change) = info
|
2005-12-19 21:52:33 +05:30
|
|
|
|
2006-01-28 02:08:33 +05:30
|
|
|
if event.gramps_id in dup_ids:
|
|
|
|
event.gramps_id = self.find_next_event_gramps_id()
|
|
|
|
|
2006-04-21 05:33:27 +05:30
|
|
|
event.type.set_from_xml_str(old_type)
|
2005-12-16 02:56:55 +05:30
|
|
|
|
2005-12-19 21:52:33 +05:30
|
|
|
# Cover attributes contained in MediaRefs
|
|
|
|
for media_ref in event.media_list:
|
|
|
|
convert_mediaref_9(media_ref)
|
|
|
|
|
2005-12-21 05:38:47 +05:30
|
|
|
# Upgrade witness -- no more Witness class
|
|
|
|
if type(witness_list) != list:
|
|
|
|
witness_list = []
|
|
|
|
for witness in witness_list:
|
|
|
|
if witness.type == 0: # witness name recorded
|
|
|
|
# Add name and comment to the event note
|
|
|
|
note_text = event.get_note() + "\n" + \
|
|
|
|
_("Witness name: %s") % witness.val
|
|
|
|
if witness.comment:
|
2005-12-22 01:38:07 +05:30
|
|
|
note_text += "\n" + _("Witness comment: %s") \
|
2005-12-21 05:38:47 +05:30
|
|
|
% witness.comment
|
|
|
|
event.set_note(note_text)
|
|
|
|
elif witness.type == 1: # witness ID recorded
|
|
|
|
person = self.get_person_from_handle(witness.val)
|
2006-05-02 09:50:40 +05:30
|
|
|
if person:
|
|
|
|
# Add an EventRef from that person
|
|
|
|
# to this event using ROLE_WITNESS role
|
|
|
|
event_ref = EventRef()
|
|
|
|
event_ref.ref = event.handle
|
|
|
|
event_ref.role.set(EventRoleType.WITNESS)
|
|
|
|
# Add privacy and comment
|
|
|
|
event_ref.private = witness.private
|
|
|
|
if witness.comment:
|
|
|
|
event_ref.set_note(witness.comment)
|
|
|
|
person.event_ref_list.append(event_ref)
|
|
|
|
self.commit_person(person,trans)
|
2006-05-02 20:49:25 +05:30
|
|
|
else:
|
|
|
|
# Broken witness: dangling witness handle
|
|
|
|
# with no corresponding person in the db
|
|
|
|
note_text = event.get_note() + "\n" + \
|
|
|
|
_("Broken witness reference detected "
|
|
|
|
"while upgrading database to version 9.")
|
|
|
|
event.set_note(note_text)
|
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
self.commit_event(event,trans)
|
2006-03-15 01:19:34 +05:30
|
|
|
current += 1
|
|
|
|
self.update(100*current/length)
|
2006-01-28 02:08:33 +05:30
|
|
|
self.eid_trans.close()
|
|
|
|
|
2006-04-13 23:55:52 +05:30
|
|
|
# Place upgrade
|
2005-12-18 09:00:13 +05:30
|
|
|
for handle in self.place_map.keys():
|
|
|
|
info = self.place_map[handle]
|
2005-12-16 02:56:55 +05:30
|
|
|
place = Place()
|
2005-12-18 07:04:13 +05:30
|
|
|
place.handle = handle
|
|
|
|
(junk_handle, place.gramps_id, place.title, place.long, place.lat,
|
2006-04-22 00:34:00 +05:30
|
|
|
place.main_loc, place.alt_loc, place.urls, place.media_list,
|
2005-12-16 02:56:55 +05:30
|
|
|
place.source_list, place.note, place.change) = info
|
2005-12-19 21:52:33 +05:30
|
|
|
|
|
|
|
# Cover attributes contained in MediaRefs
|
2005-12-20 04:48:03 +05:30
|
|
|
for media_ref in place.media_list:
|
2005-12-19 21:52:33 +05:30
|
|
|
convert_mediaref_9(media_ref)
|
|
|
|
|
2005-12-20 04:48:03 +05:30
|
|
|
# In all Urls, add type attribute
|
2006-04-22 00:34:00 +05:30
|
|
|
for url in place.urls:
|
|
|
|
convert_url_9(url)
|
2005-12-20 04:48:03 +05:30
|
|
|
|
2005-12-16 02:56:55 +05:30
|
|
|
self.commit_place(place,trans)
|
2006-03-15 01:19:34 +05:30
|
|
|
current += 1
|
|
|
|
self.update(100*current/length)
|
2006-04-13 23:55:52 +05:30
|
|
|
|
|
|
|
# Media upgrade
|
2005-12-18 09:00:13 +05:30
|
|
|
for handle in self.media_map.keys():
|
|
|
|
info = self.media_map[handle]
|
2005-12-16 02:56:55 +05:30
|
|
|
media_object = MediaObject()
|
2005-12-18 07:04:13 +05:30
|
|
|
media_object.handle = handle
|
|
|
|
(junk_handle, media_object.gramps_id, media_object.path,
|
2005-12-16 02:56:55 +05:30
|
|
|
media_object.mime, media_object.desc, media_object.attribute_list,
|
|
|
|
media_object.source_list, media_object.note, media_object.change,
|
|
|
|
media_object.date) = info
|
2005-12-19 21:52:33 +05:30
|
|
|
|
|
|
|
# In all Attributes, convert type from string to a tuple
|
2005-12-20 04:48:03 +05:30
|
|
|
for attribute in media_object.attribute_list:
|
2005-12-19 21:52:33 +05:30
|
|
|
convert_attribute_9(attribute)
|
|
|
|
|
2005-12-16 02:56:55 +05:30
|
|
|
self.commit_media_object(media_object,trans)
|
2006-03-15 01:19:34 +05:30
|
|
|
current += 1
|
|
|
|
self.update(100*current/length)
|
2005-12-16 02:56:55 +05:30
|
|
|
|
2006-01-28 09:53:37 +05:30
|
|
|
self.transaction_commit(trans,"Upgrade to DB version 9")
|
|
|
|
# Close secodnary index
|
|
|
|
self.reference_map_primary_map.close()
|
2006-01-26 02:36:23 +05:30
|
|
|
self.metadata.put('version',9)
|
|
|
|
self.metadata.sync()
|
2005-12-16 02:56:55 +05:30
|
|
|
print "Done upgrading to DB version 9"
|
2005-12-06 12:08:09 +05:30
|
|
|
|
2006-01-13 03:32:58 +05:30
|
|
|
class BdbTransaction(Transaction):
|
2006-02-02 20:23:31 +05:30
|
|
|
def __init__(self,msg,db,batch=False,no_magic=False):
|
|
|
|
Transaction.__init__(self,msg,db,batch,no_magic)
|
2006-01-13 03:32:58 +05:30
|
|
|
self.reference_del = []
|
|
|
|
self.reference_add = []
|
|
|
|
|
2005-12-19 21:52:33 +05:30
|
|
|
def convert_attribute_9(attribute):
|
|
|
|
old_type = attribute.type
|
2006-04-22 00:34:00 +05:30
|
|
|
new_type = AttributeType()
|
|
|
|
new_type.set_from_xml_str(old_type)
|
|
|
|
attribute.type = new_type
|
2005-12-19 21:52:33 +05:30
|
|
|
|
|
|
|
def convert_mediaref_9(media_ref):
|
|
|
|
for attribute in media_ref.attribute_list:
|
|
|
|
convert_attribute_9(attribute)
|
|
|
|
|
2005-12-20 04:48:03 +05:30
|
|
|
def convert_url_9(url):
|
|
|
|
path = url.path.strip()
|
2006-04-21 05:33:27 +05:30
|
|
|
if (path.find('mailto:') == 0) or (url.path.find('@') != -1):
|
|
|
|
new_type = UrlType.EMAIL
|
2005-12-20 04:48:03 +05:30
|
|
|
elif path.find('http://') == 0:
|
2006-04-21 05:33:27 +05:30
|
|
|
new_type = UrlType.WEB_HOME
|
2005-12-20 04:48:03 +05:30
|
|
|
elif path.find('ftp://') == 0:
|
2006-04-21 05:33:27 +05:30
|
|
|
new_type = UrlType.WEB_FTP
|
2005-12-20 04:48:03 +05:30
|
|
|
else:
|
2006-04-21 05:33:27 +05:30
|
|
|
new_type = UrlType.CUSTOM
|
2006-04-22 00:34:00 +05:30
|
|
|
url.type = UrlType(new_type)
|
2006-01-27 04:28:49 +05:30
|
|
|
|
2006-01-26 02:36:23 +05:30
|
|
|
def low_level_9(the_db):
|
|
|
|
"""
|
|
|
|
This is a low-level repair routine.
|
|
|
|
|
|
|
|
It is fixing DB inconsistencies such as duplicates.
|
|
|
|
Returns a (status,name) tuple.
|
|
|
|
The boolean status indicates the success of the procedure.
|
|
|
|
The name indicates the problematic table (empty if status is True).
|
|
|
|
"""
|
2006-03-15 01:19:34 +05:30
|
|
|
the_length = 0
|
2006-01-26 02:36:23 +05:30
|
|
|
for the_map in [('Person',the_db.person_map),
|
|
|
|
('Family',the_db.family_map),
|
|
|
|
('Event',the_db.event_map),
|
|
|
|
('Place',the_db.place_map),
|
|
|
|
('Source',the_db.source_map),
|
|
|
|
('Media',the_db.media_map)]:
|
|
|
|
|
|
|
|
print "Low-level repair: table: %s" % the_map[0]
|
2006-03-15 01:19:34 +05:30
|
|
|
status,length = _table_low_level_9(the_db.env,the_map[1])
|
|
|
|
if status:
|
2006-01-26 02:36:23 +05:30
|
|
|
print "Done."
|
2006-03-15 01:19:34 +05:30
|
|
|
the_length += length
|
2006-01-26 02:36:23 +05:30
|
|
|
else:
|
|
|
|
print "Low-level repair: Problem with table: %s" % the_map[0]
|
|
|
|
return (False,the_map[0])
|
2006-03-15 01:19:34 +05:30
|
|
|
return (True,the_length)
|
2006-01-26 02:36:23 +05:30
|
|
|
|
|
|
|
|
|
|
|
def _table_low_level_9(env,table):
|
|
|
|
"""
|
|
|
|
Low level repair for a given db table.
|
|
|
|
"""
|
|
|
|
|
|
|
|
handle_list = table.keys()
|
2006-03-15 01:19:34 +05:30
|
|
|
length = len(handle_list)
|
2006-01-26 02:36:23 +05:30
|
|
|
dup_handles = sets.Set(
|
|
|
|
[ handle for handle in handle_list if handle_list.count(handle) > 1 ]
|
|
|
|
)
|
|
|
|
|
|
|
|
if not dup_handles:
|
|
|
|
print " No dupes found for this table"
|
2006-03-15 01:19:34 +05:30
|
|
|
return (True,length)
|
2006-01-26 02:36:23 +05:30
|
|
|
|
|
|
|
the_txn = env.txn_begin()
|
|
|
|
table_cursor = GrampsBSDDBDupCursor(table,txn=the_txn)
|
2006-03-15 01:19:34 +05:30
|
|
|
# Dirty hack to prevent records from unpickling by DBShelve
|
2006-01-26 02:36:23 +05:30
|
|
|
table_cursor._extract = lambda rec: rec
|
|
|
|
|
|
|
|
for handle in dup_handles:
|
|
|
|
print " Duplicates found for handle: %s" % handle
|
|
|
|
try:
|
|
|
|
ret = table_cursor.set(handle)
|
|
|
|
except:
|
|
|
|
print " Failed setting initial cursor."
|
|
|
|
table_cursor.close()
|
|
|
|
the_txn.abort()
|
2006-03-15 01:19:34 +05:30
|
|
|
return (False,None)
|
2006-01-26 02:36:23 +05:30
|
|
|
|
|
|
|
for count in range(handle_list.count(handle)-1):
|
|
|
|
try:
|
|
|
|
table_cursor.delete()
|
|
|
|
print " Succesfully deleted dupe #%d" % (count+1)
|
|
|
|
except:
|
|
|
|
print " Failed deleting dupe."
|
|
|
|
table_cursor.close()
|
|
|
|
the_txn.abort()
|
2006-03-15 01:19:34 +05:30
|
|
|
return (False,None)
|
2006-01-26 02:36:23 +05:30
|
|
|
|
|
|
|
try:
|
|
|
|
ret = table_cursor.next_dup()
|
|
|
|
except:
|
|
|
|
print " Failed moving the cursor."
|
|
|
|
table_cursor.close()
|
|
|
|
the_txn.abort()
|
2006-03-15 01:19:34 +05:30
|
|
|
return (False,None)
|
2006-01-26 02:36:23 +05:30
|
|
|
|
|
|
|
table_cursor.close()
|
|
|
|
the_txn.commit()
|
2006-03-15 01:19:34 +05:30
|
|
|
return (True,length)
|
2006-01-26 02:36:23 +05:30
|
|
|
|
|
|
|
|
2005-12-06 12:08:09 +05:30
|
|
|
if __name__ == "__main__":
|
|
|
|
|
|
|
|
import sys
|
|
|
|
|
|
|
|
d = GrampsBSDDB()
|
|
|
|
d.load(sys.argv[1],lambda x: x)
|
|
|
|
|
|
|
|
c = d.get_person_cursor()
|
|
|
|
data = c.first()
|
|
|
|
while data:
|
|
|
|
person = Person(data[1])
|
|
|
|
print data[0], person.get_primary_name().get_name(),
|
|
|
|
data = c.next()
|
|
|
|
c.close()
|
|
|
|
|
|
|
|
print d.surnames.keys()
|