2004-07-31 00:26:49 +05:30
|
|
|
#
|
|
|
|
# Gramps - a GTK+/GNOME based genealogy program
|
|
|
|
#
|
2006-01-07 02:25:49 +05:30
|
|
|
# Copyright (C) 2000-2006 Donald N. Allingham
|
2004-07-31 00:26:49 +05:30
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation; either version 2 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program; if not, write to the Free Software
|
|
|
|
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
|
|
#
|
|
|
|
|
|
|
|
# $Id$
|
|
|
|
|
2004-12-06 09:43:13 +05:30
|
|
|
"""
|
|
|
|
Provides the Berkeley DB (BSDDB) database backend for GRAMPS
|
|
|
|
"""
|
|
|
|
|
2005-04-01 10:04:31 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# Standard python modules
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2006-05-20 00:24:21 +05:30
|
|
|
import cPickle as pickle
|
2004-08-01 09:51:31 +05:30
|
|
|
import os
|
2006-11-26 08:30:44 +05:30
|
|
|
import shutil
|
2006-06-14 01:29:14 +05:30
|
|
|
import re
|
2004-08-24 09:18:15 +05:30
|
|
|
import time
|
2004-08-27 03:24:14 +05:30
|
|
|
import locale
|
2006-04-07 03:32:46 +05:30
|
|
|
from gettext import gettext as _
|
2005-04-01 10:04:31 +05:30
|
|
|
from bsddb import dbshelve, db
|
2006-01-07 02:25:49 +05:30
|
|
|
import logging
|
|
|
|
log = logging.getLogger(".GrampsDb")
|
2004-08-01 09:51:31 +05:30
|
|
|
|
2005-12-16 02:56:55 +05:30
|
|
|
# hack to use native set for python2.4
|
|
|
|
# and module sets for earlier pythons
|
|
|
|
try:
|
|
|
|
set()
|
|
|
|
except NameError:
|
|
|
|
from sets import Set as set
|
|
|
|
|
2005-04-01 10:04:31 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# Gramps modules
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2004-07-31 00:26:49 +05:30
|
|
|
from RelLib import *
|
2005-12-21 16:57:05 +05:30
|
|
|
from _GrampsDbBase import *
|
2006-05-10 11:22:55 +05:30
|
|
|
from _DbUtils import db_copy
|
2006-01-07 02:25:49 +05:30
|
|
|
import const
|
2006-05-18 02:46:59 +05:30
|
|
|
import Errors
|
2006-05-20 00:24:21 +05:30
|
|
|
from BasicUtils import UpdateCallback
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
_MINVERSION = 5
|
2006-09-24 10:07:59 +05:30
|
|
|
_DBVERSION = 11
|
2005-02-20 03:41:51 +05:30
|
|
|
|
2004-08-01 09:51:31 +05:30
|
|
|
def find_surname(key,data):
|
2006-04-20 20:35:56 +05:30
|
|
|
return str(data[3][5])
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2004-08-01 09:51:31 +05:30
|
|
|
def find_idmap(key,data):
|
|
|
|
return str(data[1])
|
|
|
|
|
2005-12-15 23:02:10 +05:30
|
|
|
# Secondary database key lookups for reference_map table
|
|
|
|
# reference_map data values are of the form:
|
|
|
|
# ((primary_object_class_name, primary_object_handle),
|
|
|
|
# (referenced_object_class_name, referenced_object_handle))
|
|
|
|
|
|
|
|
def find_primary_handle(key,data):
|
2005-12-18 06:58:35 +05:30
|
|
|
return str((data)[0][1])
|
2005-12-15 23:02:10 +05:30
|
|
|
|
|
|
|
def find_referenced_handle(key,data):
|
2005-12-18 06:58:35 +05:30
|
|
|
return str((data)[1][1])
|
2005-12-15 23:02:10 +05:30
|
|
|
|
2004-12-06 09:43:13 +05:30
|
|
|
class GrampsBSDDBCursor(GrampsCursor):
|
|
|
|
|
2006-01-07 02:25:49 +05:30
|
|
|
def __init__(self,source,txn=None):
|
2006-03-20 10:47:52 +05:30
|
|
|
self.cursor = source.db.cursor(txn)
|
2004-12-06 09:43:13 +05:30
|
|
|
|
|
|
|
def first(self):
|
2006-03-20 10:47:52 +05:30
|
|
|
d = self.cursor.first()
|
|
|
|
if d:
|
|
|
|
return (d[0],pickle.loads(d[1]))
|
|
|
|
return None
|
2004-12-06 09:43:13 +05:30
|
|
|
|
|
|
|
def next(self):
|
2006-03-20 10:47:52 +05:30
|
|
|
d = self.cursor.next()
|
|
|
|
if d:
|
|
|
|
return (d[0],pickle.loads(d[1]))
|
|
|
|
return None
|
2004-12-06 09:43:13 +05:30
|
|
|
|
|
|
|
def close(self):
|
|
|
|
self.cursor.close()
|
|
|
|
|
2006-01-26 02:36:23 +05:30
|
|
|
def delete(self):
|
|
|
|
self.cursor.delete()
|
|
|
|
|
2006-03-21 05:35:07 +05:30
|
|
|
class GrampsBSDDBAssocCursor(GrampsCursor):
|
|
|
|
|
|
|
|
def __init__(self,source,txn=None):
|
|
|
|
self.cursor = source.cursor(txn)
|
|
|
|
|
|
|
|
def first(self):
|
|
|
|
d = self.cursor.first()
|
|
|
|
if d:
|
|
|
|
return (d[0],pickle.loads(d[1]))
|
|
|
|
return None
|
|
|
|
|
|
|
|
def next(self):
|
|
|
|
d = self.cursor.next()
|
|
|
|
if d:
|
|
|
|
return (d[0],pickle.loads(d[1]))
|
|
|
|
return None
|
|
|
|
|
|
|
|
def close(self):
|
|
|
|
self.cursor.close()
|
|
|
|
|
|
|
|
def delete(self):
|
|
|
|
self.cursor.delete()
|
|
|
|
|
|
|
|
class GrampsBSDDBDupCursor(GrampsBSDDBAssocCursor):
|
2005-12-15 23:02:10 +05:30
|
|
|
"""Cursor that includes handling for duplicate keys"""
|
|
|
|
|
|
|
|
def set(self,key):
|
2005-12-16 02:56:55 +05:30
|
|
|
return self.cursor.set(str(key))
|
2005-12-15 23:02:10 +05:30
|
|
|
|
|
|
|
def next_dup(self):
|
|
|
|
return self.cursor.next_dup()
|
|
|
|
|
|
|
|
|
2004-07-31 00:26:49 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# GrampsBSDDB
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2006-05-20 00:24:21 +05:30
|
|
|
class GrampsBSDDB(GrampsDbBase,UpdateCallback):
|
2004-07-31 00:26:49 +05:30
|
|
|
"""GRAMPS database object. This object is a base class for other
|
|
|
|
objects."""
|
|
|
|
|
2006-09-24 10:07:59 +05:30
|
|
|
def __init__(self, use_txn = True):
|
2004-07-31 00:26:49 +05:30
|
|
|
"""creates a new GrampsDB"""
|
2006-09-24 10:07:59 +05:30
|
|
|
|
2004-08-01 09:51:31 +05:30
|
|
|
GrampsDbBase.__init__(self)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.txn = None
|
2006-01-26 02:36:23 +05:30
|
|
|
self.secondary_connected = False
|
2006-09-24 10:07:59 +05:30
|
|
|
self.UseTXN = use_txn
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2006-03-08 22:52:45 +05:30
|
|
|
def open_flags(self):
|
|
|
|
if self.UseTXN:
|
|
|
|
return db.DB_CREATE|db.DB_AUTO_COMMIT
|
|
|
|
else:
|
|
|
|
return db.DB_CREATE
|
|
|
|
|
2006-08-13 03:25:52 +05:30
|
|
|
def open_table(self,file_name,table_name,dbtype=db.DB_HASH):
|
2004-10-19 08:49:25 +05:30
|
|
|
dbmap = dbshelve.DBShelf(self.env)
|
2004-10-23 09:26:48 +05:30
|
|
|
dbmap.db.set_pagesize(16384)
|
2005-02-17 04:19:54 +05:30
|
|
|
if self.readonly:
|
2006-08-13 03:25:52 +05:30
|
|
|
dbmap.open(file_name, table_name, dbtype, db.DB_RDONLY)
|
2006-01-07 02:25:49 +05:30
|
|
|
else:
|
2006-08-13 03:25:52 +05:30
|
|
|
dbmap.open(file_name, table_name, dbtype, self.open_flags(), 0666)
|
2004-10-19 08:49:25 +05:30
|
|
|
return dbmap
|
2006-01-11 03:58:09 +05:30
|
|
|
|
|
|
|
def _all_handles(self,table):
|
|
|
|
return table.keys(self.txn)
|
2004-10-19 08:49:25 +05:30
|
|
|
|
2004-12-05 09:45:48 +05:30
|
|
|
def get_person_cursor(self):
|
2006-01-07 02:25:49 +05:30
|
|
|
return GrampsBSDDBCursor(self.person_map,self.txn)
|
2004-12-10 06:58:43 +05:30
|
|
|
|
|
|
|
def get_family_cursor(self):
|
2006-01-07 02:25:49 +05:30
|
|
|
return GrampsBSDDBCursor(self.family_map,self.txn)
|
2004-12-10 06:58:43 +05:30
|
|
|
|
2005-04-01 10:04:31 +05:30
|
|
|
def get_event_cursor(self):
|
2006-01-07 02:25:49 +05:30
|
|
|
return GrampsBSDDBCursor(self.event_map,self.txn)
|
2005-04-01 10:04:31 +05:30
|
|
|
|
2004-12-10 06:58:43 +05:30
|
|
|
def get_place_cursor(self):
|
2006-01-07 02:25:49 +05:30
|
|
|
return GrampsBSDDBCursor(self.place_map,self.txn)
|
2004-12-10 06:58:43 +05:30
|
|
|
|
|
|
|
def get_source_cursor(self):
|
2006-01-07 02:25:49 +05:30
|
|
|
return GrampsBSDDBCursor(self.source_map,self.txn)
|
2004-12-10 06:58:43 +05:30
|
|
|
|
|
|
|
def get_media_cursor(self):
|
2006-01-07 02:25:49 +05:30
|
|
|
return GrampsBSDDBCursor(self.media_map,self.txn)
|
2004-12-05 09:45:48 +05:30
|
|
|
|
2005-05-27 23:13:04 +05:30
|
|
|
def get_repository_cursor(self):
|
2006-03-23 10:49:38 +05:30
|
|
|
return GrampsBSDDBCursor(self.repository_map,self.txn)
|
2005-05-27 23:13:04 +05:30
|
|
|
|
2006-01-07 03:38:40 +05:30
|
|
|
def has_person_handle(self,handle):
|
|
|
|
"""
|
|
|
|
returns True if the handle exists in the current Person database.
|
|
|
|
"""
|
|
|
|
return self.person_map.get(str(handle),txn=self.txn) != None
|
|
|
|
|
|
|
|
def has_family_handle(self,handle):
|
|
|
|
"""
|
|
|
|
returns True if the handle exists in the current Family database.
|
|
|
|
"""
|
|
|
|
return self.family_map.get(str(handle),txn=self.txn) != None
|
|
|
|
|
|
|
|
def has_object_handle(self,handle):
|
|
|
|
"""
|
|
|
|
returns True if the handle exists in the current MediaObjectdatabase.
|
|
|
|
"""
|
|
|
|
return self.media_map.get(str(handle),txn=self.txn) != None
|
|
|
|
|
|
|
|
def has_repository_handle(self,handle):
|
|
|
|
"""
|
|
|
|
returns True if the handle exists in the current Repository database.
|
|
|
|
"""
|
|
|
|
return self.repository_map.get(str(handle),txn=self.txn) != None
|
|
|
|
|
|
|
|
def has_event_handle(self,handle):
|
|
|
|
"""
|
|
|
|
returns True if the handle exists in the current Repository database.
|
|
|
|
"""
|
|
|
|
return self.event_map.get(str(handle),txn=self.txn) != None
|
|
|
|
|
|
|
|
def has_place_handle(self,handle):
|
|
|
|
"""
|
|
|
|
returns True if the handle exists in the current Repository database.
|
|
|
|
"""
|
|
|
|
return self.place_map.get(str(handle),txn=self.txn) != None
|
|
|
|
|
|
|
|
def has_source_handle(self,handle):
|
|
|
|
"""
|
|
|
|
returns True if the handle exists in the current Repository database.
|
|
|
|
"""
|
|
|
|
return self.source_map.get(str(handle),txn=self.txn) != None
|
|
|
|
|
|
|
|
def get_raw_person_data(self,handle):
|
|
|
|
return self.person_map.get(str(handle),txn=self.txn)
|
|
|
|
|
|
|
|
def get_raw_family_data(self,handle):
|
|
|
|
return self.family_map.get(str(handle),txn=self.txn)
|
|
|
|
|
|
|
|
def get_raw_object_data(self,handle):
|
|
|
|
return self.media_map.get(str(handle),txn=self.txn)
|
|
|
|
|
|
|
|
def get_raw_place_data(self,handle):
|
|
|
|
return self.place_map.get(str(handle),txn=self.txn)
|
|
|
|
|
|
|
|
def get_raw_event_data(self,handle):
|
|
|
|
return self.event_map.get(str(handle),txn=self.txn)
|
|
|
|
|
|
|
|
def get_raw_source_data(self,handle):
|
|
|
|
return self.source_map.get(str(handle),txn=self.txn)
|
|
|
|
|
|
|
|
def get_raw_repository_data(self,handle):
|
|
|
|
return self.repository_map.get(str(handle),txn=self.txn)
|
|
|
|
|
2005-12-15 23:02:10 +05:30
|
|
|
# cursors for lookups in the reference_map for back reference
|
|
|
|
# lookups. The reference_map has three indexes:
|
|
|
|
# the main index: a tuple of (primary_handle,referenced_handle)
|
|
|
|
# the primary_handle index: the primary_handle
|
|
|
|
# the referenced_handle index: the referenced_handle
|
|
|
|
# the main index is unique, the others allow duplicate entries.
|
|
|
|
|
|
|
|
def get_reference_map_cursor(self):
|
2006-03-21 05:35:07 +05:30
|
|
|
return GrampsBSDDBAssocCursor(self.reference_map,self.txn)
|
2005-12-15 23:02:10 +05:30
|
|
|
|
|
|
|
def get_reference_map_primary_cursor(self):
|
2006-01-07 02:25:49 +05:30
|
|
|
return GrampsBSDDBDupCursor(self.reference_map_primary_map,self.txn)
|
2005-12-15 23:02:10 +05:30
|
|
|
|
|
|
|
def get_reference_map_referenced_cursor(self):
|
2006-01-07 02:25:49 +05:30
|
|
|
return GrampsBSDDBDupCursor(self.reference_map_referenced_map,self.txn)
|
2005-12-15 23:02:10 +05:30
|
|
|
|
2006-08-12 05:16:24 +05:30
|
|
|
# These are overriding the GrampsDbBase's methods of saving metadata
|
|
|
|
# because we now have txn-capable metadata table
|
|
|
|
def set_default_person_handle(self, handle):
|
|
|
|
"""sets the default Person to the passed instance"""
|
|
|
|
if not self.readonly:
|
2006-08-13 03:25:52 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
# Start transaction if needed
|
|
|
|
the_txn = self.env.txn_begin()
|
|
|
|
else:
|
|
|
|
the_txn = None
|
2006-08-12 05:16:24 +05:30
|
|
|
self.metadata.put('default',str(handle),txn=the_txn)
|
2006-08-13 03:25:52 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
the_txn.commit()
|
|
|
|
else:
|
|
|
|
self.metadata.sync()
|
2006-08-12 05:16:24 +05:30
|
|
|
|
|
|
|
def get_default_person(self):
|
|
|
|
"""returns the default Person of the database"""
|
|
|
|
person = self.get_person_from_handle(self.get_default_handle())
|
|
|
|
if person:
|
|
|
|
return person
|
|
|
|
elif (self.metadata) and (not self.readonly):
|
2006-08-13 03:25:52 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
# Start transaction if needed
|
|
|
|
the_txn = self.env.txn_begin()
|
|
|
|
else:
|
|
|
|
the_txn = None
|
2006-08-12 05:16:24 +05:30
|
|
|
self.metadata.put('default',None,txn=the_txn)
|
2006-08-13 03:25:52 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
the_txn.commit()
|
|
|
|
else:
|
|
|
|
self.metadata.sync()
|
2006-08-12 05:16:24 +05:30
|
|
|
return None
|
|
|
|
|
|
|
|
def _set_column_order(self, col_list, name):
|
|
|
|
if self.metadata and not self.readonly:
|
2006-08-13 03:25:52 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
# Start transaction if needed
|
|
|
|
the_txn = self.env.txn_begin()
|
|
|
|
else:
|
|
|
|
the_txn = None
|
2006-08-12 05:16:24 +05:30
|
|
|
self.metadata.put(name,col_list,txn=the_txn)
|
2006-08-13 03:25:52 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
the_txn.commit()
|
|
|
|
else:
|
|
|
|
self.metadata.sync()
|
2006-08-12 05:16:24 +05:30
|
|
|
|
2005-06-05 09:31:56 +05:30
|
|
|
def version_supported(self):
|
2006-08-12 05:16:24 +05:30
|
|
|
dbversion = self.metadata.get('version',default=0)
|
|
|
|
return ((dbversion <= _DBVERSION) and (dbversion >= _MINVERSION))
|
2005-06-05 09:31:56 +05:30
|
|
|
|
2005-03-03 11:03:22 +05:30
|
|
|
def need_upgrade(self):
|
2006-08-12 05:16:24 +05:30
|
|
|
dbversion = self.metadata.get('version',default=0)
|
|
|
|
return not self.readonly and dbversion < _DBVERSION
|
2005-03-03 11:03:22 +05:30
|
|
|
|
2005-02-17 04:19:54 +05:30
|
|
|
def load(self,name,callback,mode="w"):
|
2006-01-11 03:58:09 +05:30
|
|
|
if self.db_is_open:
|
2004-07-31 00:26:49 +05:30
|
|
|
self.close()
|
|
|
|
|
2005-02-17 04:19:54 +05:30
|
|
|
self.readonly = mode == "r"
|
2006-07-15 10:44:39 +05:30
|
|
|
if self.readonly:
|
|
|
|
self.UseTXN = False
|
2005-02-17 04:19:54 +05:30
|
|
|
|
2006-01-18 09:38:28 +05:30
|
|
|
callback(12)
|
2006-01-17 04:19:49 +05:30
|
|
|
|
2006-05-09 04:48:16 +05:30
|
|
|
self.full_name = os.path.abspath(name)
|
|
|
|
self.brief_name = os.path.basename(name)
|
|
|
|
|
2004-07-31 00:26:49 +05:30
|
|
|
self.env = db.DBEnv()
|
2006-03-16 05:28:23 +05:30
|
|
|
self.env.set_cachesize(0,0x2000000) # 16MB
|
2006-03-08 22:52:45 +05:30
|
|
|
|
|
|
|
if self.UseTXN:
|
2006-05-18 02:46:59 +05:30
|
|
|
# These env settings are only needed for Txn environment
|
|
|
|
self.env.set_lk_max_locks(25000)
|
|
|
|
self.env.set_lk_max_objects(25000)
|
|
|
|
self.env.set_flags(db.DB_LOG_AUTOREMOVE,1) # clean up unused logs
|
|
|
|
|
|
|
|
# The DB_PRIVATE flag must go if we ever move to multi-user setup
|
2006-11-27 01:17:34 +05:30
|
|
|
env_flags = db.DB_CREATE|db.DB_PRIVATE|\
|
2006-03-08 22:52:45 +05:30
|
|
|
db.DB_INIT_MPOOL|db.DB_INIT_LOCK|\
|
2006-03-21 05:35:07 +05:30
|
|
|
db.DB_INIT_LOG|db.DB_INIT_TXN|db.DB_THREAD
|
2006-11-27 01:17:34 +05:30
|
|
|
# Only do recovery for existing databases
|
|
|
|
if os.path.isfile(self.full_name):
|
|
|
|
env_flags = env_flags | db.DB_RECOVER
|
2006-11-26 08:30:44 +05:30
|
|
|
|
|
|
|
# Environment name is now based on the filename
|
2006-11-29 09:10:35 +05:30
|
|
|
drive, tmp_name = os.path.splitdrive(self.full_name)
|
|
|
|
tmp_name = tmp_name.lstrip(os.sep)
|
2006-12-06 09:36:26 +05:30
|
|
|
env_name = os.path.join(os.path.expanduser(const.env_dir),
|
2006-11-29 09:10:35 +05:30
|
|
|
tmp_name)
|
2006-11-26 08:30:44 +05:30
|
|
|
|
2006-11-26 09:59:46 +05:30
|
|
|
# Create the env dir if it does not exist
|
2006-05-09 04:48:16 +05:30
|
|
|
if not os.path.isdir(env_name):
|
2006-11-26 08:30:44 +05:30
|
|
|
os.makedirs(env_name)
|
2006-11-26 09:56:06 +05:30
|
|
|
# If this is not a new db, see about copying old env dir
|
|
|
|
if os.path.isfile(self.full_name):
|
|
|
|
common_env_name = os.path.expanduser(const.bsddbenv_dir)
|
|
|
|
# Copy the old env dir contents into the new one
|
|
|
|
# ONLY if the old env dir exists
|
|
|
|
if os.path.isdir(common_env_name):
|
|
|
|
shutil.rmtree(env_name)
|
|
|
|
shutil.copytree(common_env_name,env_name)
|
2006-11-27 01:17:34 +05:30
|
|
|
else:
|
|
|
|
# For existing env and new database, clean the env
|
|
|
|
# (means the env dir remains from previous db
|
|
|
|
# with the same name)
|
|
|
|
if not os.path.isfile(self.full_name):
|
|
|
|
shutil.rmtree(env_name)
|
|
|
|
os.mkdir(env_name)
|
2006-11-26 08:30:44 +05:30
|
|
|
|
2006-03-08 22:52:45 +05:30
|
|
|
else:
|
2006-10-07 00:13:29 +05:30
|
|
|
env_flags = db.DB_CREATE|db.DB_PRIVATE|db.DB_INIT_MPOOL
|
2006-05-10 11:22:55 +05:30
|
|
|
env_name = os.path.expanduser('~')
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2006-01-13 03:32:58 +05:30
|
|
|
self.env.open(env_name,env_flags)
|
2006-03-08 22:52:45 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
self.env.txn_checkpoint()
|
2006-01-07 02:25:49 +05:30
|
|
|
|
2006-01-18 09:38:28 +05:30
|
|
|
callback(25)
|
2006-08-12 05:16:24 +05:30
|
|
|
self.metadata = self.open_table(self.full_name,"meta")
|
2006-05-18 02:46:59 +05:30
|
|
|
|
|
|
|
# If we cannot work with this DB version,
|
|
|
|
# it makes no sense to go further
|
|
|
|
if not self.version_supported:
|
|
|
|
self._close_early()
|
2006-09-24 10:07:59 +05:30
|
|
|
|
2006-01-07 02:25:49 +05:30
|
|
|
self.family_map = self.open_table(self.full_name, "family")
|
|
|
|
self.place_map = self.open_table(self.full_name, "places")
|
|
|
|
self.source_map = self.open_table(self.full_name, "sources")
|
|
|
|
self.media_map = self.open_table(self.full_name, "media")
|
|
|
|
self.event_map = self.open_table(self.full_name, "events")
|
|
|
|
self.person_map = self.open_table(self.full_name, "person")
|
|
|
|
self.repository_map = self.open_table(self.full_name, "repository")
|
2006-01-19 02:30:02 +05:30
|
|
|
self.reference_map = self.open_table(self.full_name, "reference_map",
|
|
|
|
dbtype=db.DB_BTREE)
|
2006-01-18 09:38:28 +05:30
|
|
|
callback(37)
|
2006-01-26 02:36:23 +05:30
|
|
|
|
2006-05-19 05:35:44 +05:30
|
|
|
self._load_metadata()
|
2006-01-26 02:36:23 +05:30
|
|
|
|
2006-08-13 03:25:52 +05:30
|
|
|
gstats = self.metadata.get('gender_stats',default=None)
|
2006-01-26 02:36:23 +05:30
|
|
|
|
|
|
|
if not self.readonly:
|
2006-08-13 03:25:52 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
# Start transaction if needed
|
|
|
|
the_txn = self.env.txn_begin()
|
|
|
|
else:
|
|
|
|
the_txn = None
|
|
|
|
|
2006-01-26 02:36:23 +05:30
|
|
|
if gstats == None:
|
2006-08-13 03:25:52 +05:30
|
|
|
# New database. Set up the current version.
|
2006-08-12 05:16:24 +05:30
|
|
|
self.metadata.put('version',_DBVERSION,txn=the_txn)
|
2006-01-26 02:36:23 +05:30
|
|
|
elif not self.metadata.has_key('version'):
|
2006-08-13 03:25:52 +05:30
|
|
|
# Not new database, but the version is missing.
|
|
|
|
# Use 0, but it is likely to fail anyway.
|
2006-08-12 05:16:24 +05:30
|
|
|
self.metadata.put('version',0,txn=the_txn)
|
2006-08-13 03:25:52 +05:30
|
|
|
|
|
|
|
if self.UseTXN:
|
|
|
|
the_txn.commit()
|
|
|
|
else:
|
|
|
|
self.metadata.sync()
|
2006-08-12 05:16:24 +05:30
|
|
|
|
2006-01-26 02:36:23 +05:30
|
|
|
self.genderStats = GenderStats(gstats)
|
|
|
|
|
|
|
|
# Here we take care of any changes in the tables related to new code.
|
|
|
|
# If secondary indices change, then they should removed
|
|
|
|
# or rebuilt by upgrade as well. In any case, the
|
|
|
|
# self.secondary_connected flag should be set accordingly.
|
2006-09-24 10:07:59 +05:30
|
|
|
|
2006-01-26 02:36:23 +05:30
|
|
|
if self.need_upgrade():
|
2006-03-15 01:19:34 +05:30
|
|
|
self.gramps_upgrade(callback)
|
2006-01-26 02:36:23 +05:30
|
|
|
|
|
|
|
callback(50)
|
|
|
|
|
|
|
|
if not self.secondary_connected:
|
|
|
|
self.connect_secondary()
|
|
|
|
|
|
|
|
callback(75)
|
|
|
|
|
2006-05-07 10:52:44 +05:30
|
|
|
self.open_undodb()
|
2006-01-26 02:36:23 +05:30
|
|
|
self.db_is_open = True
|
|
|
|
|
|
|
|
callback(87)
|
2006-05-07 10:52:44 +05:30
|
|
|
|
|
|
|
# Re-set the undo history to a fresh session start
|
|
|
|
self.undoindex = -1
|
|
|
|
self.translist = [None] * len(self.translist)
|
|
|
|
self.abort_possible = True
|
|
|
|
self.undo_history_timestamp = time.time()
|
2006-01-26 02:36:23 +05:30
|
|
|
|
|
|
|
return 1
|
|
|
|
|
2006-05-10 11:22:55 +05:30
|
|
|
def load_from(self, other_database, filename, callback):
|
|
|
|
self.load(filename,callback)
|
|
|
|
db_copy(other_database,self,callback)
|
|
|
|
return 1
|
|
|
|
|
2006-05-19 05:35:44 +05:30
|
|
|
def _load_metadata(self):
|
2006-07-06 23:16:46 +05:30
|
|
|
# name display formats
|
2006-08-12 05:16:24 +05:30
|
|
|
self.name_formats = self.metadata.get('name_formats',default=[])
|
2006-07-27 23:14:02 +05:30
|
|
|
# upgrade formats if they were saved in the old way
|
|
|
|
for format_ix in range(len(self.name_formats)):
|
|
|
|
format = self.name_formats[format_ix]
|
|
|
|
if len(format) == 3:
|
|
|
|
format = format + (True,)
|
|
|
|
self.name_formats[format_ix] = format
|
2006-05-19 05:35:44 +05:30
|
|
|
# bookmarks
|
2006-08-12 05:16:24 +05:30
|
|
|
self.bookmarks = self.metadata.get('bookmarks',default=[])
|
|
|
|
self.family_bookmarks = self.metadata.get('family_bookmarks',
|
|
|
|
default=[])
|
|
|
|
self.event_bookmarks = self.metadata.get('event_bookmarks',
|
|
|
|
default=[])
|
|
|
|
self.source_bookmarks = self.metadata.get('source_bookmarks',
|
|
|
|
default=[])
|
|
|
|
self.repo_bookmarks = self.metadata.get('repo_bookmarks',
|
|
|
|
default=[])
|
|
|
|
self.media_bookmarks = self.metadata.get('media_bookmarks',
|
|
|
|
default=[])
|
|
|
|
self.place_bookmarks = self.metadata.get('place_bookmarks',
|
|
|
|
default=[])
|
2006-05-19 05:35:44 +05:30
|
|
|
# Custom type values
|
2006-08-12 05:16:24 +05:30
|
|
|
self.family_event_names = set(self.metadata.get('fevent_names',
|
|
|
|
default=[]))
|
|
|
|
self.individual_event_names = set(self.metadata.get('pevent_names',
|
|
|
|
default=[]))
|
|
|
|
self.family_attributes = set(self.metadata.get('fattr_names',
|
|
|
|
default=[]))
|
|
|
|
self.individual_attributes = set(self.metadata.get('pattr_names',
|
|
|
|
default=[]))
|
|
|
|
self.marker_names = set(self.metadata.get('marker_names',default=[]))
|
|
|
|
self.child_ref_types = set(self.metadata.get('child_refs',
|
|
|
|
default=[]))
|
|
|
|
self.family_rel_types = set(self.metadata.get('family_rels',
|
|
|
|
default=[]))
|
|
|
|
self.event_role_names = set(self.metadata.get('event_roles',
|
|
|
|
default=[]))
|
|
|
|
self.name_types = set(self.metadata.get('name_types',default=[]))
|
|
|
|
self.repository_types = set(self.metadata.get('repo_types',
|
|
|
|
default=[]))
|
|
|
|
self.source_media_types = set(self.metadata.get('sm_types',
|
|
|
|
default=[]))
|
|
|
|
self.url_types = set(self.metadata.get('url_types',default=[]))
|
|
|
|
self.media_attributes = set(self.metadata.get('mattr_names',
|
|
|
|
default=[]))
|
2006-05-19 05:35:44 +05:30
|
|
|
|
2006-01-26 02:36:23 +05:30
|
|
|
def connect_secondary(self):
|
|
|
|
"""
|
|
|
|
This method connects or creates secondary index tables.
|
|
|
|
It assumes that the tables either exist and are in the right
|
|
|
|
format or do not exist (in which case they get created).
|
|
|
|
|
|
|
|
It is the responsibility of upgrade code to either create
|
|
|
|
or remove invalid secondary index tables.
|
|
|
|
"""
|
2006-01-18 09:38:28 +05:30
|
|
|
|
2005-12-15 23:02:10 +05:30
|
|
|
# index tables used just for speeding up searches
|
2005-02-17 04:19:54 +05:30
|
|
|
if self.readonly:
|
2006-01-13 03:32:58 +05:30
|
|
|
table_flags = db.DB_RDONLY
|
2005-02-17 04:19:54 +05:30
|
|
|
else:
|
2006-03-08 22:52:45 +05:30
|
|
|
table_flags = self.open_flags()
|
2005-02-17 04:19:54 +05:30
|
|
|
|
2006-01-26 02:36:23 +05:30
|
|
|
self.surnames = db.DB(self.env)
|
|
|
|
self.surnames.set_flags(db.DB_DUP|db.DB_DUPSORT)
|
|
|
|
self.surnames.open(self.full_name, "surnames", db.DB_BTREE,
|
2006-06-14 01:29:14 +05:30
|
|
|
flags=table_flags)
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2004-10-01 00:02:56 +05:30
|
|
|
self.name_group = db.DB(self.env)
|
2006-01-18 02:47:14 +05:30
|
|
|
self.name_group.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.name_group.open(self.full_name, "name_group",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_HASH, flags=table_flags)
|
2005-12-15 11:49:37 +05:30
|
|
|
|
2004-07-31 00:26:49 +05:30
|
|
|
self.id_trans = db.DB(self.env)
|
2006-01-18 02:47:14 +05:30
|
|
|
self.id_trans.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.id_trans.open(self.full_name, "idtrans",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_HASH, flags=table_flags)
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2004-08-20 07:50:06 +05:30
|
|
|
self.fid_trans = db.DB(self.env)
|
2006-01-18 02:47:14 +05:30
|
|
|
self.fid_trans.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.fid_trans.open(self.full_name, "fidtrans",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_HASH, flags=table_flags)
|
2004-08-20 07:50:06 +05:30
|
|
|
|
2005-06-08 10:10:33 +05:30
|
|
|
self.eid_trans = db.DB(self.env)
|
2006-01-18 02:47:14 +05:30
|
|
|
self.eid_trans.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.eid_trans.open(self.full_name, "eidtrans",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_HASH, flags=table_flags)
|
2005-06-08 10:10:33 +05:30
|
|
|
|
2004-08-24 09:18:15 +05:30
|
|
|
self.pid_trans = db.DB(self.env)
|
2006-01-18 02:47:14 +05:30
|
|
|
self.pid_trans.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.pid_trans.open(self.full_name, "pidtrans",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_HASH, flags=table_flags)
|
2004-08-24 09:18:15 +05:30
|
|
|
|
|
|
|
self.sid_trans = db.DB(self.env)
|
2006-01-18 02:47:14 +05:30
|
|
|
self.sid_trans.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.sid_trans.open(self.full_name, "sidtrans",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_HASH, flags=table_flags)
|
2004-08-24 09:18:15 +05:30
|
|
|
|
|
|
|
self.oid_trans = db.DB(self.env)
|
2006-01-18 02:47:14 +05:30
|
|
|
self.oid_trans.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.oid_trans.open(self.full_name, "oidtrans",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_HASH, flags=table_flags)
|
2004-08-24 09:18:15 +05:30
|
|
|
|
2005-05-27 23:13:04 +05:30
|
|
|
self.rid_trans = db.DB(self.env)
|
2006-01-18 02:47:14 +05:30
|
|
|
self.rid_trans.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.rid_trans.open(self.full_name, "ridtrans",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_HASH, flags=table_flags)
|
2005-05-27 23:13:04 +05:30
|
|
|
|
2005-12-15 23:02:10 +05:30
|
|
|
self.reference_map_primary_map = db.DB(self.env)
|
2006-01-18 02:47:14 +05:30
|
|
|
self.reference_map_primary_map.set_flags(db.DB_DUP)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.reference_map_primary_map.open(self.full_name,
|
2005-12-15 23:02:10 +05:30
|
|
|
"reference_map_primary_map",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_BTREE, flags=table_flags)
|
2005-12-15 23:02:10 +05:30
|
|
|
|
|
|
|
self.reference_map_referenced_map = db.DB(self.env)
|
2006-01-18 22:21:06 +05:30
|
|
|
self.reference_map_referenced_map.set_flags(db.DB_DUP|db.DB_DUPSORT)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.reference_map_referenced_map.open(self.full_name,
|
2005-12-15 23:02:10 +05:30
|
|
|
"reference_map_referenced_map",
|
2006-01-13 03:32:58 +05:30
|
|
|
db.DB_BTREE, flags=table_flags)
|
2005-12-15 23:02:10 +05:30
|
|
|
|
2005-02-17 04:19:54 +05:30
|
|
|
if not self.readonly:
|
2006-01-19 22:00:45 +05:30
|
|
|
self.person_map.associate(self.surnames, find_surname, table_flags)
|
|
|
|
self.person_map.associate(self.id_trans, find_idmap, table_flags)
|
|
|
|
self.family_map.associate(self.fid_trans,find_idmap, table_flags)
|
|
|
|
self.event_map.associate(self.eid_trans, find_idmap, table_flags)
|
2005-12-15 11:49:37 +05:30
|
|
|
self.repository_map.associate(self.rid_trans, find_idmap,
|
2006-01-13 03:32:58 +05:30
|
|
|
table_flags)
|
|
|
|
self.place_map.associate(self.pid_trans, find_idmap, table_flags)
|
|
|
|
self.media_map.associate(self.oid_trans, find_idmap, table_flags)
|
|
|
|
self.source_map.associate(self.sid_trans, find_idmap, table_flags)
|
2005-12-15 23:02:10 +05:30
|
|
|
self.reference_map.associate(self.reference_map_primary_map,
|
|
|
|
find_primary_handle,
|
2006-01-13 03:32:58 +05:30
|
|
|
table_flags)
|
2005-12-15 23:02:10 +05:30
|
|
|
self.reference_map.associate(self.reference_map_referenced_map,
|
|
|
|
find_referenced_handle,
|
2006-01-13 03:32:58 +05:30
|
|
|
table_flags)
|
2006-01-26 02:36:23 +05:30
|
|
|
self.secondary_connected = True
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2005-12-16 02:56:55 +05:30
|
|
|
|
2006-12-01 21:56:34 +05:30
|
|
|
def rebuild_secondary(self,callback):
|
2006-02-03 21:19:59 +05:30
|
|
|
if self.readonly:
|
2006-01-26 02:36:23 +05:30
|
|
|
return
|
|
|
|
|
2006-03-08 22:52:45 +05:30
|
|
|
table_flags = self.open_flags()
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2006-01-26 02:36:23 +05:30
|
|
|
# remove existing secondary indices
|
2005-08-18 11:28:28 +05:30
|
|
|
self.id_trans.close()
|
2006-01-19 02:07:15 +05:30
|
|
|
junk = db.DB(self.env)
|
|
|
|
junk.remove(self.full_name,"idtrans")
|
2006-12-01 21:56:34 +05:30
|
|
|
callback(1)
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2006-01-19 02:07:15 +05:30
|
|
|
self.surnames.close()
|
|
|
|
junk = db.DB(self.env)
|
|
|
|
junk.remove(self.full_name,"surnames")
|
2006-12-01 21:56:34 +05:30
|
|
|
callback(2)
|
2005-08-18 11:28:28 +05:30
|
|
|
|
|
|
|
# Repair secondary indices related to family_map
|
|
|
|
self.fid_trans.close()
|
2006-01-19 02:07:15 +05:30
|
|
|
junk = db.DB(self.env)
|
|
|
|
junk.remove(self.full_name,"fidtrans")
|
2006-12-01 21:56:34 +05:30
|
|
|
callback(3)
|
2005-08-18 11:28:28 +05:30
|
|
|
|
|
|
|
# Repair secondary indices related to place_map
|
|
|
|
self.pid_trans.close()
|
2006-01-19 02:07:15 +05:30
|
|
|
junk = db.DB(self.env)
|
|
|
|
junk.remove(self.full_name,"pidtrans")
|
2006-12-01 21:56:34 +05:30
|
|
|
callback(4)
|
2005-08-18 11:28:28 +05:30
|
|
|
|
|
|
|
# Repair secondary indices related to media_map
|
|
|
|
self.oid_trans.close()
|
2006-01-19 02:07:15 +05:30
|
|
|
junk = db.DB(self.env)
|
|
|
|
junk.remove(self.full_name,"oidtrans")
|
2006-12-01 21:56:34 +05:30
|
|
|
callback(5)
|
2005-08-18 11:28:28 +05:30
|
|
|
|
|
|
|
# Repair secondary indices related to source_map
|
|
|
|
self.sid_trans.close()
|
2006-01-19 02:07:15 +05:30
|
|
|
junk = db.DB(self.env)
|
|
|
|
junk.remove(self.full_name,"sidtrans")
|
2006-12-01 21:56:34 +05:30
|
|
|
callback(6)
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2006-01-19 02:07:15 +05:30
|
|
|
# Repair secondary indices related to event_map
|
|
|
|
self.eid_trans.close()
|
|
|
|
junk = db.DB(self.env)
|
|
|
|
junk.remove(self.full_name,"eidtrans")
|
2006-12-01 21:56:34 +05:30
|
|
|
callback(7)
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
# Repair secondary indices related to repository_map
|
|
|
|
self.rid_trans.close()
|
2006-01-19 02:07:15 +05:30
|
|
|
junk = db.DB(self.env)
|
|
|
|
junk.remove(self.full_name,"ridtrans")
|
2006-12-01 21:56:34 +05:30
|
|
|
callback(8)
|
2006-01-19 02:07:15 +05:30
|
|
|
|
|
|
|
# Repair secondary indices related to reference_map
|
|
|
|
self.reference_map_primary_map.close()
|
|
|
|
junk = db.DB(self.env)
|
|
|
|
junk.remove(self.full_name,"reference_map_primary_map")
|
2006-12-01 21:56:34 +05:30
|
|
|
callback(9)
|
2006-01-19 02:07:15 +05:30
|
|
|
|
|
|
|
self.reference_map_referenced_map.close()
|
|
|
|
junk = db.DB(self.env)
|
|
|
|
junk.remove(self.full_name,"reference_map_referenced_map")
|
2006-12-01 21:56:34 +05:30
|
|
|
callback(10)
|
2006-01-26 02:36:23 +05:30
|
|
|
|
|
|
|
# Set flag saying that we have removed secondary indices
|
|
|
|
# and then call the creating routine
|
|
|
|
self.secondary_connected = False
|
|
|
|
self.connect_secondary()
|
2006-12-01 21:56:34 +05:30
|
|
|
callback(11)
|
2005-12-15 11:49:37 +05:30
|
|
|
|
2005-12-15 23:02:10 +05:30
|
|
|
def find_backlink_handles(self, handle, include_classes=None):
|
|
|
|
"""
|
|
|
|
Find all objects that hold a reference to the object handle.
|
2006-05-20 00:24:21 +05:30
|
|
|
Returns an interator over a list of (class_name,handle) tuples.
|
2005-12-15 23:02:10 +05:30
|
|
|
|
|
|
|
@param handle: handle of the object to search for.
|
|
|
|
@type handle: database handle
|
|
|
|
@param include_classes: list of class names to include in the results.
|
|
|
|
Default: None means include all classes.
|
|
|
|
@type include_classes: list of class names
|
|
|
|
|
|
|
|
Note that this is a generator function, it returns a iterator for
|
|
|
|
use in loops. If you want a list of the results use:
|
|
|
|
|
2006-10-29 10:06:08 +05:30
|
|
|
> result_list = [i for i in find_backlink_handles(handle)]
|
2005-12-15 23:02:10 +05:30
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
# Use the secondary index to locate all the reference_map entries
|
|
|
|
# that include a reference to the object we are looking for.
|
|
|
|
referenced_cur = self.get_reference_map_referenced_cursor()
|
|
|
|
|
2005-12-18 01:43:23 +05:30
|
|
|
try:
|
|
|
|
ret = referenced_cur.set(handle)
|
|
|
|
except:
|
|
|
|
ret = None
|
|
|
|
|
2005-12-15 23:02:10 +05:30
|
|
|
while (ret is not None):
|
|
|
|
(key,data) = ret
|
|
|
|
|
|
|
|
# data values are of the form:
|
|
|
|
# ((primary_object_class_name, primary_object_handle),
|
|
|
|
# (referenced_object_class_name, referenced_object_handle))
|
|
|
|
# so we need the first tuple to give us the type to compare
|
|
|
|
|
2006-07-28 09:40:43 +05:30
|
|
|
### FIXME: this is a dirty hack that works without no
|
|
|
|
### sensible explanation. For some reason, for a readonly
|
|
|
|
### database, secondary index returns a primary table key
|
|
|
|
### corresponding to the data, not the data.
|
|
|
|
if self.readonly:
|
|
|
|
data = self.reference_map.get(data)
|
|
|
|
else:
|
|
|
|
data = pickle.loads(data)
|
2006-05-20 00:24:21 +05:30
|
|
|
if include_classes == None or \
|
|
|
|
KEY_TO_CLASS_MAP[data[0][0]] in include_classes:
|
2005-12-18 03:46:03 +05:30
|
|
|
yield (KEY_TO_CLASS_MAP[data[0][0]],data[0][1])
|
2005-12-15 23:02:10 +05:30
|
|
|
|
|
|
|
ret = referenced_cur.next_dup()
|
|
|
|
|
|
|
|
referenced_cur.close()
|
|
|
|
|
|
|
|
return
|
|
|
|
|
2006-04-10 07:15:36 +05:30
|
|
|
def _delete_primary_from_reference_map(self,handle,transaction,txn=None):
|
2006-05-20 00:24:21 +05:30
|
|
|
"""
|
|
|
|
Remove all references to the primary object from the reference_map.
|
|
|
|
"""
|
2005-12-15 23:02:10 +05:30
|
|
|
|
2005-12-15 23:18:48 +05:30
|
|
|
primary_cur = self.get_reference_map_primary_cursor()
|
2005-12-18 01:43:23 +05:30
|
|
|
|
|
|
|
try:
|
|
|
|
ret = primary_cur.set(handle)
|
|
|
|
except:
|
|
|
|
ret = None
|
2005-12-15 23:18:48 +05:30
|
|
|
|
2006-07-19 09:59:04 +05:30
|
|
|
remove_list = set()
|
2005-12-15 23:18:48 +05:30
|
|
|
while (ret is not None):
|
|
|
|
(key,data) = ret
|
|
|
|
|
|
|
|
# data values are of the form:
|
|
|
|
# ((primary_object_class_name, primary_object_handle),
|
|
|
|
# (referenced_object_class_name, referenced_object_handle))
|
|
|
|
|
|
|
|
# so we need the second tuple give us a reference that we can
|
|
|
|
# combine with the primary_handle to get the main key.
|
|
|
|
|
2006-05-20 00:24:21 +05:30
|
|
|
main_key = (handle, pickle.loads(data)[1][1])
|
2005-12-15 23:18:48 +05:30
|
|
|
|
2006-07-19 09:59:04 +05:30
|
|
|
# The trick is not to remove while inside the cursor,
|
|
|
|
# but collect them all and remove after the cursor is closed
|
|
|
|
remove_list.add(main_key)
|
|
|
|
|
2005-12-15 23:18:48 +05:30
|
|
|
ret = primary_cur.next_dup()
|
|
|
|
|
|
|
|
primary_cur.close()
|
2006-07-19 09:59:04 +05:30
|
|
|
|
|
|
|
# Now that the cursor is closed, we can remove things
|
|
|
|
for main_key in remove_list:
|
|
|
|
self._remove_reference(main_key,transaction,txn)
|
2005-12-15 23:18:48 +05:30
|
|
|
|
2006-01-21 03:22:26 +05:30
|
|
|
def _update_reference_map(self, obj, transaction, txn=None):
|
2006-01-18 02:40:20 +05:30
|
|
|
"""
|
2006-01-21 03:22:26 +05:30
|
|
|
If txn is given, then changes are written right away using txn.
|
2006-01-18 02:40:20 +05:30
|
|
|
"""
|
|
|
|
|
2005-12-15 23:02:10 +05:30
|
|
|
# Add references to the reference_map for all primary object referenced
|
|
|
|
# from the primary object 'obj' or any of its secondary objects.
|
|
|
|
|
2006-02-01 11:55:51 +05:30
|
|
|
handle = obj.handle
|
2006-01-21 03:22:26 +05:30
|
|
|
update = self.reference_map_primary_map.has_key(str(handle))
|
2006-01-21 00:53:42 +05:30
|
|
|
|
2006-01-21 03:22:26 +05:30
|
|
|
if update:
|
2006-01-18 02:40:20 +05:30
|
|
|
# First thing to do is get hold of all rows in the reference_map
|
2006-02-01 11:55:51 +05:30
|
|
|
# table that hold a reference from this primary obj. This means
|
|
|
|
# finding all the rows that have this handle somewhere in the
|
|
|
|
# list of (class_name,handle) pairs.
|
|
|
|
# The primary_map sec index allows us to look this up quickly.
|
2005-12-15 23:02:10 +05:30
|
|
|
|
2006-01-18 02:40:20 +05:30
|
|
|
existing_references = set()
|
2005-12-15 23:02:10 +05:30
|
|
|
|
2006-01-18 02:40:20 +05:30
|
|
|
primary_cur = self.get_reference_map_primary_cursor()
|
2005-12-15 23:02:10 +05:30
|
|
|
|
2006-01-18 02:40:20 +05:30
|
|
|
try:
|
|
|
|
ret = primary_cur.set(handle)
|
|
|
|
except:
|
|
|
|
ret = None
|
2005-12-15 23:02:10 +05:30
|
|
|
|
2006-01-18 02:40:20 +05:30
|
|
|
while (ret is not None):
|
|
|
|
(key,data) = ret
|
|
|
|
|
|
|
|
# data values are of the form:
|
|
|
|
# ((primary_object_class_name, primary_object_handle),
|
|
|
|
# (referenced_object_class_name, referenced_object_handle))
|
|
|
|
# so we need the second tuple give us a reference that we can
|
2006-02-01 11:55:51 +05:30
|
|
|
# compare with what is returned from
|
|
|
|
# get_referenced_handles_recursively
|
2006-01-18 02:40:20 +05:30
|
|
|
|
2006-02-01 11:55:51 +05:30
|
|
|
# secondary DBs are not DBShelf's, so we need to do pickling
|
|
|
|
# and unpicking ourselves here
|
2006-05-20 00:24:21 +05:30
|
|
|
existing_reference = pickle.loads(data)[1]
|
2006-02-01 11:55:51 +05:30
|
|
|
existing_references.add(
|
|
|
|
(KEY_TO_CLASS_MAP[existing_reference[0]],
|
|
|
|
existing_reference[1]))
|
2006-01-18 02:40:20 +05:30
|
|
|
ret = primary_cur.next_dup()
|
|
|
|
|
|
|
|
primary_cur.close()
|
|
|
|
|
2006-02-01 11:55:51 +05:30
|
|
|
# Once we have the list of rows that already have a reference
|
|
|
|
# we need to compare it with the list of objects that are
|
|
|
|
# still references from the primary object.
|
2006-01-18 02:40:20 +05:30
|
|
|
|
|
|
|
current_references = set(obj.get_referenced_handles_recursively())
|
|
|
|
|
2006-02-01 11:55:51 +05:30
|
|
|
no_longer_required_references = existing_references.difference(
|
|
|
|
current_references)
|
2006-01-18 02:40:20 +05:30
|
|
|
|
|
|
|
new_references = current_references.difference(existing_references)
|
|
|
|
|
|
|
|
else:
|
2006-05-20 00:24:21 +05:30
|
|
|
# No existing refs are found:
|
|
|
|
# all we have is new, nothing to remove
|
|
|
|
no_longer_required_references = set()
|
2006-01-18 02:40:20 +05:30
|
|
|
new_references = set(obj.get_referenced_handles_recursively())
|
|
|
|
|
2005-12-15 23:02:10 +05:30
|
|
|
# handle addition of new references
|
2006-05-20 00:24:21 +05:30
|
|
|
for (ref_class_name,ref_handle) in new_references:
|
|
|
|
data = ((CLASS_TO_KEY_MAP[obj.__class__.__name__],handle),
|
|
|
|
(CLASS_TO_KEY_MAP[ref_class_name],ref_handle),)
|
|
|
|
self._add_reference((handle,ref_handle),data,transaction,txn)
|
2005-12-18 03:32:48 +05:30
|
|
|
|
2006-05-20 00:24:21 +05:30
|
|
|
# handle deletion of old references
|
|
|
|
for (ref_class_name,ref_handle) in no_longer_required_references:
|
|
|
|
try:
|
|
|
|
self._remove_reference((handle,ref_handle),transaction,txn)
|
|
|
|
except:
|
|
|
|
# ignore missing old reference
|
|
|
|
pass
|
2005-12-15 23:02:10 +05:30
|
|
|
|
2006-01-21 03:22:26 +05:30
|
|
|
def _remove_reference(self,key,transaction,txn=None):
|
2006-01-13 03:32:58 +05:30
|
|
|
"""
|
|
|
|
Removes the reference specified by the key,
|
|
|
|
preserving the change in the passed transaction.
|
|
|
|
"""
|
|
|
|
if not self.readonly:
|
2006-01-21 03:22:26 +05:30
|
|
|
if transaction.batch:
|
2006-05-20 00:24:21 +05:30
|
|
|
self.reference_map.delete(str(key),txn=txn)
|
2006-03-08 23:14:29 +05:30
|
|
|
if not self.UseTXN:
|
|
|
|
self.reference_map.sync()
|
2006-01-21 03:22:26 +05:30
|
|
|
else:
|
|
|
|
old_data = self.reference_map.get(str(key),txn=self.txn)
|
2006-01-13 05:04:33 +05:30
|
|
|
transaction.add(REFERENCE_KEY,str(key),old_data,None)
|
2006-01-21 03:22:26 +05:30
|
|
|
transaction.reference_del.append(str(key))
|
2006-01-13 03:32:58 +05:30
|
|
|
|
2006-01-21 03:22:26 +05:30
|
|
|
def _add_reference(self,key,data,transaction,txn=None):
|
2006-01-13 03:32:58 +05:30
|
|
|
"""
|
|
|
|
Adds the reference specified by the key and the data,
|
|
|
|
preserving the change in the passed transaction.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if self.readonly or not key:
|
|
|
|
return
|
|
|
|
|
|
|
|
if transaction.batch:
|
2006-05-20 00:24:21 +05:30
|
|
|
self.reference_map.put(str(key),data,txn=txn)
|
2006-03-08 23:14:29 +05:30
|
|
|
if not self.UseTXN:
|
|
|
|
self.reference_map.sync()
|
2006-01-13 03:32:58 +05:30
|
|
|
else:
|
2006-01-13 05:04:33 +05:30
|
|
|
transaction.add(REFERENCE_KEY,str(key),None,data)
|
2006-01-14 00:50:20 +05:30
|
|
|
transaction.reference_add.append((str(key),data))
|
2006-01-13 03:32:58 +05:30
|
|
|
|
2006-12-01 21:56:34 +05:30
|
|
|
def reindex_reference_map(self,callback):
|
2006-05-20 00:24:21 +05:30
|
|
|
"""
|
|
|
|
Reindex all primary records in the database.
|
|
|
|
This will be a slow process for large databases.
|
2005-12-16 17:29:13 +05:30
|
|
|
|
|
|
|
"""
|
|
|
|
|
2006-12-01 13:53:51 +05:30
|
|
|
# First, remove the reference map and related tables
|
|
|
|
self.reference_map_referenced_map.close()
|
|
|
|
junk = db.DB(self.env)
|
|
|
|
junk.remove(self.full_name,"reference_map_referenced_map")
|
2006-12-01 21:56:34 +05:30
|
|
|
callback(1)
|
2006-12-01 13:53:51 +05:30
|
|
|
|
|
|
|
self.reference_map_primary_map.close()
|
|
|
|
junk = db.DB(self.env)
|
|
|
|
junk.remove(self.full_name,"reference_map_primary_map")
|
2006-12-01 21:56:34 +05:30
|
|
|
callback(2)
|
2006-12-01 13:53:51 +05:30
|
|
|
|
|
|
|
self.reference_map.close()
|
|
|
|
junk = db.DB(self.env)
|
|
|
|
junk.remove(self.full_name,"reference_map")
|
2006-12-01 21:56:34 +05:30
|
|
|
callback(3)
|
2006-12-01 13:53:51 +05:30
|
|
|
|
|
|
|
# Open reference_map and primapry map
|
|
|
|
self.reference_map = self.open_table(self.full_name, "reference_map",
|
|
|
|
dbtype=db.DB_BTREE)
|
|
|
|
|
|
|
|
open_flags = self.open_flags()
|
|
|
|
self.reference_map_primary_map = db.DB(self.env)
|
|
|
|
self.reference_map_primary_map.set_flags(db.DB_DUP)
|
|
|
|
self.reference_map_primary_map.open(self.full_name,
|
|
|
|
"reference_map_primary_map",
|
|
|
|
db.DB_BTREE, flags=open_flags)
|
|
|
|
self.reference_map.associate(self.reference_map_primary_map,
|
|
|
|
find_primary_handle,
|
|
|
|
open_flags)
|
2005-12-16 17:29:13 +05:30
|
|
|
|
|
|
|
# Make a dictionary of the functions and classes that we need for
|
|
|
|
# each of the primary object tables.
|
2006-05-20 00:24:21 +05:30
|
|
|
primary_tables = {
|
|
|
|
'Person': {'cursor_func': self.get_person_cursor,
|
|
|
|
'class_func': Person},
|
|
|
|
'Family': {'cursor_func': self.get_family_cursor,
|
|
|
|
'class_func': Family},
|
|
|
|
'Event': {'cursor_func': self.get_event_cursor,
|
|
|
|
'class_func': Event},
|
|
|
|
'Place': {'cursor_func': self.get_place_cursor,
|
|
|
|
'class_func': Place},
|
|
|
|
'Source': {'cursor_func': self.get_source_cursor,
|
|
|
|
'class_func': Source},
|
|
|
|
'MediaObject': {'cursor_func': self.get_media_cursor,
|
|
|
|
'class_func': MediaObject},
|
|
|
|
'Repository': {'cursor_func': self.get_repository_cursor,
|
|
|
|
'class_func': Repository},
|
|
|
|
}
|
|
|
|
|
2006-12-01 13:53:51 +05:30
|
|
|
transaction = self.transaction_begin(batch=True,no_magic=True)
|
2006-12-01 21:56:34 +05:30
|
|
|
callback(4)
|
2006-12-01 13:53:51 +05:30
|
|
|
|
2006-05-20 00:24:21 +05:30
|
|
|
# Now we use the functions and classes defined above
|
|
|
|
# to loop through each of the primary object tables.
|
2005-12-16 17:29:13 +05:30
|
|
|
for primary_table_name in primary_tables.keys():
|
|
|
|
|
|
|
|
cursor = primary_tables[primary_table_name]['cursor_func']()
|
|
|
|
data = cursor.first()
|
|
|
|
|
2006-05-20 00:24:21 +05:30
|
|
|
# Grab the real object class here so that the lookup does
|
|
|
|
# not happen inside the cursor loop.
|
2005-12-16 17:29:13 +05:30
|
|
|
class_func = primary_tables[primary_table_name]['class_func']
|
|
|
|
while data:
|
|
|
|
found_handle,val = data
|
|
|
|
obj = class_func()
|
|
|
|
obj.unserialize(val)
|
2005-12-15 23:02:10 +05:30
|
|
|
|
2006-12-01 13:53:51 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
the_txn = self.env.txn_begin()
|
|
|
|
else:
|
|
|
|
the_txn = None
|
|
|
|
self._update_reference_map(obj,transaction,the_txn)
|
|
|
|
if not self.UseTXN:
|
|
|
|
data_map.sync()
|
|
|
|
if the_txn:
|
|
|
|
the_txn.commit()
|
2005-12-16 17:29:13 +05:30
|
|
|
|
|
|
|
data = cursor.next()
|
|
|
|
|
|
|
|
cursor.close()
|
2006-12-01 21:56:34 +05:30
|
|
|
callback(5)
|
2006-12-01 13:53:51 +05:30
|
|
|
self.transaction_commit(transaction,_("Rebuild reference map"))
|
|
|
|
|
|
|
|
self.reference_map_referenced_map = db.DB(self.env)
|
|
|
|
self.reference_map_referenced_map.set_flags(db.DB_DUP|db.DB_DUPSORT)
|
|
|
|
self.reference_map_referenced_map.open(
|
|
|
|
self.full_name,"reference_map_referenced_map",
|
|
|
|
db.DB_BTREE,flags=open_flags)
|
|
|
|
self.reference_map.associate(self.reference_map_referenced_map,
|
|
|
|
find_referenced_handle,open_flags)
|
2006-12-01 21:56:34 +05:30
|
|
|
callback(6)
|
2005-12-16 17:29:13 +05:30
|
|
|
|
|
|
|
return
|
|
|
|
|
2006-05-19 05:35:44 +05:30
|
|
|
def _close_metadata(self):
|
|
|
|
if not self.readonly:
|
2006-08-13 03:25:52 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
# Start transaction if needed
|
|
|
|
the_txn = self.env.txn_begin()
|
|
|
|
else:
|
|
|
|
the_txn = None
|
2006-08-12 05:16:24 +05:30
|
|
|
|
2006-07-06 23:16:46 +05:30
|
|
|
# name display formats
|
2006-08-12 05:16:24 +05:30
|
|
|
self.metadata.put('name_formats',self.name_formats,txn=the_txn)
|
2006-05-19 05:35:44 +05:30
|
|
|
# bookmarks
|
2006-08-12 05:16:24 +05:30
|
|
|
self.metadata.put('bookmarks',self.bookmarks,txn=the_txn)
|
|
|
|
self.metadata.put('family_bookmarks',self.family_bookmarks,
|
|
|
|
txn=the_txn)
|
|
|
|
self.metadata.put('event_bookmarks',self.event_bookmarks,
|
|
|
|
txn=the_txn)
|
|
|
|
self.metadata.put('source_bookmarks',self.source_bookmarks,
|
|
|
|
txn=the_txn)
|
|
|
|
self.metadata.put('place_bookmarks',self.place_bookmarks,
|
|
|
|
txn=the_txn)
|
|
|
|
self.metadata.put('repo_bookmarks',self.repo_bookmarks,txn=the_txn)
|
|
|
|
self.metadata.put('media_bookmarks',self.media_bookmarks,
|
|
|
|
txn=the_txn)
|
2006-05-19 05:35:44 +05:30
|
|
|
# gender stats
|
2006-08-12 05:16:24 +05:30
|
|
|
self.metadata.put('gender_stats',self.genderStats.save_stats(),
|
|
|
|
txn=the_txn)
|
2006-05-19 05:35:44 +05:30
|
|
|
# Custom type values
|
2006-08-12 05:16:24 +05:30
|
|
|
self.metadata.put('fevent_names',list(self.family_event_names),
|
|
|
|
txn=the_txn)
|
|
|
|
self.metadata.put('pevent_names',list(self.individual_event_names),
|
|
|
|
txn=the_txn)
|
|
|
|
self.metadata.put('fattr_names',list(self.family_attributes),
|
|
|
|
txn=the_txn)
|
|
|
|
self.metadata.put('pattr_names',list(self.individual_attributes),
|
|
|
|
txn=the_txn)
|
|
|
|
self.metadata.put('marker_names',list(self.marker_names),
|
|
|
|
txn=the_txn)
|
|
|
|
self.metadata.put('child_refs',list(self.child_ref_types),
|
|
|
|
txn=the_txn)
|
|
|
|
self.metadata.put('family_rels',list(self.family_rel_types),
|
|
|
|
txn=the_txn)
|
|
|
|
self.metadata.put('event_roles',list(self.event_role_names),
|
|
|
|
txn=the_txn)
|
|
|
|
self.metadata.put('name_types',list(self.name_types),
|
|
|
|
txn=the_txn)
|
|
|
|
self.metadata.put('repo_types',list(self.repository_types),
|
|
|
|
txn=the_txn)
|
|
|
|
self.metadata.put('sm_types',list(self.source_media_types),
|
|
|
|
txn=the_txn)
|
|
|
|
self.metadata.put('url_types',list(self.url_types),
|
|
|
|
txn=the_txn)
|
|
|
|
self.metadata.put('mattr_names',list(self.media_attributes),
|
|
|
|
txn=the_txn)
|
2006-08-13 03:25:52 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
the_txn.commit()
|
|
|
|
else:
|
|
|
|
self.metadata.sync()
|
2006-05-19 05:35:44 +05:30
|
|
|
|
|
|
|
self.metadata.close()
|
|
|
|
|
2006-05-18 02:46:59 +05:30
|
|
|
def _close_early(self):
|
|
|
|
"""
|
|
|
|
Bail out if the incompatible version is discovered:
|
2006-10-29 10:06:08 +05:30
|
|
|
* close cleanly to not damage data/env
|
|
|
|
* raise exception
|
2006-05-18 02:46:59 +05:30
|
|
|
"""
|
|
|
|
self.metadata.close()
|
|
|
|
self.env.close()
|
|
|
|
self.metadata = None
|
|
|
|
self.env = None
|
|
|
|
self.db_is_open = False
|
|
|
|
raise Errors.FileVersionError(
|
|
|
|
"The database version is not supported by this "
|
|
|
|
"version of GRAMPS.\nPlease upgrade to the "
|
|
|
|
"corresponding version or use XML for porting"
|
|
|
|
"data between different database versions.")
|
|
|
|
|
2004-07-31 00:26:49 +05:30
|
|
|
def close(self):
|
2006-01-11 03:58:09 +05:30
|
|
|
if not self.db_is_open:
|
2005-04-04 06:41:50 +05:30
|
|
|
return
|
2006-05-19 05:35:44 +05:30
|
|
|
|
2006-04-25 22:32:37 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
self.env.txn_checkpoint()
|
2006-08-13 05:53:06 +05:30
|
|
|
|
|
|
|
self._close_metadata()
|
2006-04-25 22:32:37 +05:30
|
|
|
self.name_group.close()
|
2004-07-31 00:26:49 +05:30
|
|
|
self.surnames.close()
|
|
|
|
self.id_trans.close()
|
2004-08-20 07:50:06 +05:30
|
|
|
self.fid_trans.close()
|
2005-06-08 10:10:33 +05:30
|
|
|
self.eid_trans.close()
|
2005-05-27 23:13:04 +05:30
|
|
|
self.rid_trans.close()
|
2004-08-24 09:18:15 +05:30
|
|
|
self.oid_trans.close()
|
|
|
|
self.sid_trans.close()
|
|
|
|
self.pid_trans.close()
|
2005-12-22 10:06:26 +05:30
|
|
|
self.reference_map_primary_map.close()
|
|
|
|
self.reference_map_referenced_map.close()
|
2006-01-25 23:29:22 +05:30
|
|
|
self.reference_map.close()
|
|
|
|
|
|
|
|
# primary databases must be closed after secondary indexes, or
|
|
|
|
# we run into problems with any active cursors.
|
|
|
|
self.person_map.close()
|
|
|
|
self.family_map.close()
|
|
|
|
self.repository_map.close()
|
|
|
|
self.place_map.close()
|
|
|
|
self.source_map.close()
|
|
|
|
self.media_map.close()
|
|
|
|
self.event_map.close()
|
2006-08-07 07:30:14 +05:30
|
|
|
|
|
|
|
# Attempt to clear log sequence numbers, to make database portable
|
|
|
|
# This will only work for python2.5 and higher
|
2006-12-02 09:50:48 +05:30
|
|
|
# Comment this our because it causes crashes.
|
|
|
|
# To reproduce the crash, create a new DB, import example.gramps, open and close the db a few times.
|
|
|
|
# try:
|
|
|
|
# self.env.lsn_reset(self.full_name)
|
|
|
|
# except AttributeError:
|
|
|
|
# pass
|
2006-08-07 07:30:14 +05:30
|
|
|
|
2004-07-31 00:26:49 +05:30
|
|
|
self.env.close()
|
|
|
|
|
2006-10-24 08:11:02 +05:30
|
|
|
try:
|
|
|
|
self.close_undodb()
|
|
|
|
except db.DBNoSuchFileError:
|
|
|
|
pass
|
2006-05-07 10:52:44 +05:30
|
|
|
|
2005-05-27 23:13:04 +05:30
|
|
|
self.person_map = None
|
|
|
|
self.family_map = None
|
|
|
|
self.repository_map = None
|
|
|
|
self.place_map = None
|
|
|
|
self.source_map = None
|
|
|
|
self.media_map = None
|
|
|
|
self.event_map = None
|
|
|
|
self.surnames = None
|
|
|
|
self.env = None
|
|
|
|
self.metadata = None
|
2006-01-11 03:58:09 +05:30
|
|
|
self.db_is_open = False
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2006-04-10 07:15:36 +05:30
|
|
|
def _do_remove_object(self,handle,transaction,data_map,key,del_list):
|
|
|
|
if self.readonly or not handle:
|
|
|
|
return
|
|
|
|
|
|
|
|
handle = str(handle)
|
|
|
|
if transaction.batch:
|
|
|
|
if self.UseTXN:
|
|
|
|
the_txn = self.env.txn_begin()
|
|
|
|
else:
|
|
|
|
the_txn = None
|
|
|
|
self._delete_primary_from_reference_map(handle,transaction,
|
|
|
|
txn=the_txn)
|
|
|
|
data_map.delete(handle,txn=the_txn)
|
|
|
|
if not self.UseTXN:
|
|
|
|
data_map.sync()
|
|
|
|
if the_txn:
|
|
|
|
the_txn.commit()
|
|
|
|
else:
|
|
|
|
self._delete_primary_from_reference_map(handle,transaction)
|
|
|
|
old_data = data_map.get(handle,txn=self.txn)
|
|
|
|
transaction.add(key,handle,old_data,None)
|
2006-01-13 05:04:33 +05:30
|
|
|
del_list.append(handle)
|
|
|
|
|
2005-04-18 04:04:56 +05:30
|
|
|
def _del_person(self,handle):
|
2006-01-07 02:25:49 +05:30
|
|
|
self.person_map.delete(str(handle),txn=self.txn)
|
2006-03-08 23:14:29 +05:30
|
|
|
if not self.UseTXN:
|
|
|
|
self.person_map.sync()
|
2005-04-18 04:04:56 +05:30
|
|
|
|
|
|
|
def _del_source(self,handle):
|
2006-01-07 02:25:49 +05:30
|
|
|
self.source_map.delete(str(handle),txn=self.txn)
|
2006-03-08 23:14:29 +05:30
|
|
|
if not self.UseTXN:
|
|
|
|
self.source_map.sync()
|
2005-04-18 04:04:56 +05:30
|
|
|
|
2005-05-27 23:13:04 +05:30
|
|
|
def _del_repository(self,handle):
|
2006-01-07 02:25:49 +05:30
|
|
|
self.repository_map.delete(str(handle),txn=self.txn)
|
2006-03-08 23:14:29 +05:30
|
|
|
if not self.UseTXN:
|
|
|
|
self.repository_map.sync()
|
2005-05-27 23:13:04 +05:30
|
|
|
|
2005-04-18 04:04:56 +05:30
|
|
|
def _del_place(self,handle):
|
2006-01-07 02:25:49 +05:30
|
|
|
self.place_map.delete(str(handle),txn=self.txn)
|
2006-03-08 23:14:29 +05:30
|
|
|
if not self.UseTXN:
|
|
|
|
self.place_map.sync()
|
2005-04-18 04:04:56 +05:30
|
|
|
|
|
|
|
def _del_media(self,handle):
|
2006-01-07 02:25:49 +05:30
|
|
|
self.media_map.delete(str(handle),txn=self.txn)
|
2006-03-08 23:14:29 +05:30
|
|
|
if not self.UseTXN:
|
|
|
|
self.media_map.sync()
|
2005-04-18 04:04:56 +05:30
|
|
|
|
|
|
|
def _del_family(self,handle):
|
2006-01-07 02:25:49 +05:30
|
|
|
self.family_map.delete(str(handle),txn=self.txn)
|
2006-03-08 23:14:29 +05:30
|
|
|
if not self.UseTXN:
|
|
|
|
self.family_map.sync()
|
2005-04-18 04:04:56 +05:30
|
|
|
|
|
|
|
def _del_event(self,handle):
|
2006-01-07 02:25:49 +05:30
|
|
|
self.event_map.delete(str(handle),txn=self.txn)
|
2006-03-08 23:14:29 +05:30
|
|
|
if not self.UseTXN:
|
|
|
|
self.event_map.sync()
|
2005-04-18 04:04:56 +05:30
|
|
|
|
2004-10-01 00:02:56 +05:30
|
|
|
def set_name_group_mapping(self,name,group):
|
2005-02-17 04:19:54 +05:30
|
|
|
if not self.readonly:
|
2006-08-01 21:54:27 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
# Start transaction if needed
|
|
|
|
the_txn = self.env.txn_begin()
|
|
|
|
else:
|
|
|
|
the_txn = None
|
2005-02-17 04:19:54 +05:30
|
|
|
name = str(name)
|
2006-08-01 21:54:27 +05:30
|
|
|
data = self.name_group.get(name,txn=the_txn)
|
2006-01-17 03:37:24 +05:30
|
|
|
if not group and data:
|
2006-08-01 21:54:27 +05:30
|
|
|
self.name_group.delete(name,txn=the_txn)
|
|
|
|
else:
|
|
|
|
self.name_group.put(name,group,txn=the_txn)
|
|
|
|
if self.UseTXN:
|
|
|
|
the_txn.commit()
|
2005-02-17 04:19:54 +05:30
|
|
|
else:
|
2006-08-01 21:54:27 +05:30
|
|
|
self.name_group.sync()
|
2005-08-18 11:28:28 +05:30
|
|
|
self.emit('person-rebuild')
|
2006-01-27 12:24:35 +05:30
|
|
|
|
2004-08-13 10:04:07 +05:30
|
|
|
def get_surname_list(self):
|
2006-01-29 01:02:09 +05:30
|
|
|
vals = [ (locale.strxfrm(unicode(val)),unicode(val))
|
2006-01-27 12:24:35 +05:30
|
|
|
for val in set(self.surnames.keys()) ]
|
|
|
|
vals.sort()
|
2006-01-29 01:02:09 +05:30
|
|
|
return [item[1] for item in vals]
|
2004-07-31 00:26:49 +05:30
|
|
|
|
2006-07-28 09:40:43 +05:30
|
|
|
def _get_obj_from_gramps_id(self,val,tbl,class_init,prim_tbl):
|
2006-01-31 07:11:55 +05:30
|
|
|
if tbl.has_key(str(val)):
|
|
|
|
data = tbl.get(str(val),txn=self.txn)
|
2005-12-22 11:10:27 +05:30
|
|
|
obj = class_init()
|
2006-07-28 09:40:43 +05:30
|
|
|
### FIXME: this is a dirty hack that works without no
|
|
|
|
### sensible explanation. For some reason, for a readonly
|
|
|
|
### database, secondary index returns a primary table key
|
|
|
|
### corresponding to the data, not the data.
|
|
|
|
if self.readonly:
|
|
|
|
tuple_data = prim_tbl.get(data,txn=self.txn)
|
|
|
|
else:
|
|
|
|
tuple_data = pickle.loads(data)
|
|
|
|
obj.unserialize(tuple_data)
|
2006-01-07 03:38:40 +05:30
|
|
|
return obj
|
2004-08-20 07:50:06 +05:30
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
2005-12-22 11:10:27 +05:30
|
|
|
def get_person_from_gramps_id(self,val):
|
2006-05-20 00:24:21 +05:30
|
|
|
"""
|
|
|
|
Finds a Person in the database from the passed gramps' ID.
|
|
|
|
If no such Person exists, None is returned.
|
|
|
|
"""
|
2006-07-28 09:40:43 +05:30
|
|
|
return self._get_obj_from_gramps_id(val,self.id_trans,Person,
|
|
|
|
self.person_map)
|
2005-12-22 11:10:27 +05:30
|
|
|
|
2004-08-20 07:50:06 +05:30
|
|
|
def get_family_from_gramps_id(self,val):
|
2006-05-20 00:24:21 +05:30
|
|
|
"""
|
|
|
|
Finds a Family in the database from the passed gramps' ID.
|
|
|
|
If no such Family exists, None is return.
|
|
|
|
"""
|
2006-07-28 09:40:43 +05:30
|
|
|
return self._get_obj_from_gramps_id(val,self.fid_trans,Family,
|
|
|
|
self.family_map)
|
2005-06-08 10:10:33 +05:30
|
|
|
|
2006-01-27 12:24:35 +05:30
|
|
|
def get_event_from_gramps_id(self,val):
|
2006-05-20 00:24:21 +05:30
|
|
|
"""
|
|
|
|
Finds an Event in the database from the passed gramps' ID.
|
|
|
|
If no such Family exists, None is returned.
|
|
|
|
"""
|
2006-07-28 09:40:43 +05:30
|
|
|
return self._get_obj_from_gramps_id(val,self.eid_trans,Event,
|
|
|
|
self.event_map)
|
2006-01-27 12:24:35 +05:30
|
|
|
|
2004-08-24 09:18:15 +05:30
|
|
|
def get_place_from_gramps_id(self,val):
|
2006-05-20 00:24:21 +05:30
|
|
|
"""
|
|
|
|
Finds a Place in the database from the passed gramps' ID.
|
|
|
|
If no such Place exists, None is returned.
|
|
|
|
"""
|
2006-07-28 09:40:43 +05:30
|
|
|
return self._get_obj_from_gramps_id(val,self.pid_trans,Place,
|
|
|
|
self.place_map)
|
2004-08-24 09:18:15 +05:30
|
|
|
|
|
|
|
def get_source_from_gramps_id(self,val):
|
2006-05-20 00:24:21 +05:30
|
|
|
"""
|
|
|
|
Finds a Source in the database from the passed gramps' ID.
|
|
|
|
If no such Source exists, None is returned.
|
|
|
|
"""
|
2006-07-28 09:40:43 +05:30
|
|
|
return self._get_obj_from_gramps_id(val,self.sid_trans,Source,
|
|
|
|
self.source_map)
|
2005-05-27 23:13:04 +05:30
|
|
|
|
2004-08-24 09:18:15 +05:30
|
|
|
def get_object_from_gramps_id(self,val):
|
2006-05-20 00:24:21 +05:30
|
|
|
"""
|
|
|
|
Finds a MediaObject in the database from the passed gramps' ID.
|
|
|
|
If no such MediaObject exists, None is returned.
|
|
|
|
"""
|
2006-07-28 09:40:43 +05:30
|
|
|
return self._get_obj_from_gramps_id(val,self.oid_trans,MediaObject,
|
|
|
|
self.media_map)
|
2004-08-24 09:18:15 +05:30
|
|
|
|
2005-12-22 11:10:27 +05:30
|
|
|
def get_repository_from_gramps_id(self,val):
|
2006-05-20 00:24:21 +05:30
|
|
|
"""
|
|
|
|
Finds a Repository in the database from the passed gramps' ID.
|
|
|
|
If no such MediaObject exists, None is returned.
|
|
|
|
"""
|
2006-07-28 09:40:43 +05:30
|
|
|
return self._get_obj_from_gramps_id(val,self.rid_trans,Repository,
|
|
|
|
self.repository_map)
|
2005-03-03 11:03:22 +05:30
|
|
|
|
2006-01-07 02:25:49 +05:30
|
|
|
def _commit_base(self, obj, data_map, key, update_list, add_list,
|
|
|
|
transaction, change_time):
|
|
|
|
"""
|
2006-05-20 00:24:21 +05:30
|
|
|
Commits the specified object to the database, storing the changes
|
2006-01-07 02:25:49 +05:30
|
|
|
as part of the transaction.
|
|
|
|
"""
|
|
|
|
if self.readonly or not obj or not obj.handle:
|
|
|
|
return
|
|
|
|
|
|
|
|
if change_time:
|
|
|
|
obj.change = int(change_time)
|
|
|
|
else:
|
|
|
|
obj.change = int(time.time())
|
|
|
|
handle = str(obj.handle)
|
|
|
|
|
|
|
|
if transaction.batch:
|
2006-03-08 22:52:45 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
the_txn = self.env.txn_begin()
|
|
|
|
else:
|
|
|
|
the_txn = None
|
2006-01-21 03:22:26 +05:30
|
|
|
self._update_reference_map(obj,transaction,txn=the_txn)
|
2006-01-17 03:37:24 +05:30
|
|
|
data_map.put(handle,obj.serialize(),txn=the_txn)
|
2006-03-08 23:03:07 +05:30
|
|
|
if not self.UseTXN:
|
|
|
|
data_map.sync()
|
2006-03-08 22:52:45 +05:30
|
|
|
if the_txn:
|
|
|
|
the_txn.commit()
|
2006-01-07 02:25:49 +05:30
|
|
|
old_data = None
|
|
|
|
else:
|
2006-01-21 03:22:26 +05:30
|
|
|
self._update_reference_map(obj,transaction)
|
2006-01-07 02:25:49 +05:30
|
|
|
old_data = data_map.get(handle,txn=self.txn)
|
2006-01-13 05:04:33 +05:30
|
|
|
new_data = obj.serialize()
|
|
|
|
transaction.add(key,handle,old_data,new_data)
|
2006-01-07 02:25:49 +05:30
|
|
|
if old_data:
|
2006-01-13 05:04:33 +05:30
|
|
|
update_list.append((handle,new_data))
|
2006-01-07 02:25:49 +05:30
|
|
|
else:
|
2006-01-13 05:04:33 +05:30
|
|
|
add_list.append((handle,new_data))
|
2006-01-07 02:25:49 +05:30
|
|
|
return old_data
|
|
|
|
|
|
|
|
def _do_commit(self,add_list,db_map):
|
|
|
|
retlist = []
|
|
|
|
for (handle,data) in add_list:
|
|
|
|
db_map.put(handle,data,self.txn)
|
2006-03-08 23:14:29 +05:30
|
|
|
if not self.UseTXN:
|
|
|
|
db_map.sync()
|
2006-01-07 02:25:49 +05:30
|
|
|
retlist.append(str(handle))
|
|
|
|
return retlist
|
|
|
|
|
|
|
|
def _get_from_handle(self, handle, class_type, data_map):
|
|
|
|
try:
|
|
|
|
data = data_map.get(str(handle),txn=self.txn)
|
|
|
|
except:
|
|
|
|
data = None
|
2006-04-25 08:25:41 +05:30
|
|
|
# under certain circumstances during a database reload,
|
|
|
|
# data_map can be none. If so, then don't report an error
|
|
|
|
if data_map:
|
|
|
|
log.error("Failed to get from handle",exc_info=True)
|
2006-01-07 02:25:49 +05:30
|
|
|
if data:
|
|
|
|
newobj = class_type()
|
|
|
|
newobj.unserialize(data)
|
|
|
|
return newobj
|
|
|
|
return None
|
|
|
|
|
|
|
|
def _find_from_handle(self,handle,transaction,class_type,dmap,add_func):
|
|
|
|
obj = class_type()
|
|
|
|
handle = str(handle)
|
2006-02-02 00:41:25 +05:30
|
|
|
if dmap.has_key(handle):
|
2006-01-31 07:11:55 +05:30
|
|
|
data = dmap.get(handle,txn=self.txn)
|
2006-01-07 02:25:49 +05:30
|
|
|
obj.unserialize(data)
|
|
|
|
else:
|
|
|
|
obj.set_handle(handle)
|
|
|
|
add_func(obj,transaction)
|
|
|
|
return obj
|
|
|
|
|
2006-02-02 20:23:31 +05:30
|
|
|
def transaction_begin(self,msg="",batch=False,no_magic=False):
|
2006-01-07 02:48:50 +05:30
|
|
|
"""
|
|
|
|
Creates a new Transaction tied to the current UNDO database. The
|
|
|
|
transaction has no effect until it is committed using the
|
|
|
|
transaction_commit function of the this database object.
|
|
|
|
"""
|
2006-01-07 02:53:27 +05:30
|
|
|
|
2006-05-04 05:06:10 +05:30
|
|
|
if batch:
|
|
|
|
# A batch transaction does not store the commits
|
|
|
|
# Aborting the session completely will become impossible.
|
|
|
|
self.abort_possible = False
|
2006-05-04 06:08:25 +05:30
|
|
|
# Undo is also impossible after batch transaction
|
|
|
|
self.undoindex = -1
|
2006-05-18 05:06:31 +05:30
|
|
|
self.translist = [None] * len(self.translist)
|
2006-02-02 20:23:31 +05:30
|
|
|
transaction = BdbTransaction(msg,self.undodb,batch,no_magic)
|
2006-01-18 02:47:14 +05:30
|
|
|
if transaction.batch:
|
2006-03-08 22:52:45 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
self.env.txn_checkpoint()
|
2006-05-10 19:53:38 +05:30
|
|
|
self.env.set_flags(db.DB_TXN_NOSYNC,1) # async txn
|
2006-02-02 20:23:31 +05:30
|
|
|
|
2006-02-04 03:33:53 +05:30
|
|
|
if self.secondary_connected and not transaction.no_magic:
|
2006-02-02 20:23:31 +05:30
|
|
|
# Disconnect unneeded secondary indices
|
|
|
|
self.surnames.close()
|
|
|
|
junk = db.DB(self.env)
|
|
|
|
junk.remove(self.full_name,"surnames")
|
|
|
|
|
|
|
|
self.reference_map_referenced_map.close()
|
|
|
|
junk = db.DB(self.env)
|
|
|
|
junk.remove(self.full_name,"reference_map_referenced_map")
|
2006-01-19 22:00:45 +05:30
|
|
|
|
2006-01-18 02:47:14 +05:30
|
|
|
return transaction
|
2006-01-07 02:48:50 +05:30
|
|
|
|
|
|
|
def transaction_commit(self,transaction,msg):
|
|
|
|
|
2006-01-17 03:37:24 +05:30
|
|
|
# Start BSD DB transaction -- DBTxn
|
2006-03-08 22:52:45 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
self.txn = self.env.txn_begin()
|
|
|
|
else:
|
|
|
|
self.txn = None
|
2006-01-17 03:37:24 +05:30
|
|
|
|
2005-05-24 18:38:06 +05:30
|
|
|
GrampsDbBase.transaction_commit(self,transaction,msg)
|
2006-01-07 02:25:49 +05:30
|
|
|
|
2006-01-13 03:32:58 +05:30
|
|
|
for (key,data) in transaction.reference_add:
|
|
|
|
self.reference_map.put(str(key),data,txn=self.txn)
|
|
|
|
|
2006-02-07 02:54:39 +05:30
|
|
|
for key in transaction.reference_del:
|
2006-01-13 03:32:58 +05:30
|
|
|
self.reference_map.delete(str(key),txn=self.txn)
|
|
|
|
|
2006-03-09 03:31:08 +05:30
|
|
|
if (len(transaction.reference_add)+len(transaction.reference_del)) > 0\
|
|
|
|
and not self.UseTXN:
|
|
|
|
self.reference_map.sync()
|
|
|
|
|
2006-01-07 02:25:49 +05:30
|
|
|
# Commit BSD DB transaction -- DBTxn
|
2006-03-08 22:52:45 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
self.txn.commit()
|
2006-01-17 04:19:49 +05:30
|
|
|
if transaction.batch:
|
2006-03-08 22:52:45 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
self.env.txn_checkpoint()
|
|
|
|
self.env.set_flags(db.DB_TXN_NOSYNC,0) # sync txn
|
2006-01-19 22:00:45 +05:30
|
|
|
|
2006-02-02 20:23:31 +05:30
|
|
|
if not transaction.no_magic:
|
|
|
|
# create new secondary indices to replace the ones removed
|
2006-03-08 22:52:45 +05:30
|
|
|
open_flags = self.open_flags()
|
2006-02-02 20:23:31 +05:30
|
|
|
dupe_flags = db.DB_DUP|db.DB_DUPSORT
|
|
|
|
|
|
|
|
self.surnames = db.DB(self.env)
|
|
|
|
self.surnames.set_flags(dupe_flags)
|
|
|
|
self.surnames.open(self.full_name,"surnames",
|
|
|
|
db.DB_BTREE,flags=open_flags)
|
|
|
|
self.person_map.associate(self.surnames,find_surname,
|
|
|
|
open_flags)
|
2006-01-19 22:00:45 +05:30
|
|
|
|
2006-02-02 20:23:31 +05:30
|
|
|
self.reference_map_referenced_map = db.DB(self.env)
|
|
|
|
self.reference_map_referenced_map.set_flags(dupe_flags)
|
|
|
|
self.reference_map_referenced_map.open(
|
|
|
|
self.full_name,"reference_map_referenced_map",
|
|
|
|
db.DB_BTREE,flags=open_flags)
|
|
|
|
self.reference_map.associate(self.reference_map_referenced_map,
|
|
|
|
find_referenced_handle,open_flags)
|
2006-01-07 02:25:49 +05:30
|
|
|
self.txn = None
|
2005-05-24 18:38:06 +05:30
|
|
|
|
2006-05-06 02:55:48 +05:30
|
|
|
def undo(self,update_history=True):
|
2006-01-13 05:04:33 +05:30
|
|
|
print "Undoing it"
|
2006-03-08 22:52:45 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
self.txn = self.env.txn_begin()
|
2006-05-06 02:55:48 +05:30
|
|
|
status = GrampsDbBase.undo(self,update_history)
|
2006-03-08 22:52:45 +05:30
|
|
|
if self.UseTXN:
|
2006-05-04 04:51:49 +05:30
|
|
|
if status:
|
|
|
|
self.txn.commit()
|
|
|
|
else:
|
|
|
|
self.txn.abort()
|
2006-01-13 03:32:58 +05:30
|
|
|
self.txn = None
|
2006-05-04 04:51:49 +05:30
|
|
|
return status
|
2006-01-13 03:32:58 +05:30
|
|
|
|
2006-05-06 02:55:48 +05:30
|
|
|
def redo(self,update_history=True):
|
2006-01-13 05:04:33 +05:30
|
|
|
print "Redoing it"
|
2006-03-08 22:52:45 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
self.txn = self.env.txn_begin()
|
2006-05-06 02:55:48 +05:30
|
|
|
status = GrampsDbBase.redo(self,update_history)
|
2006-03-08 22:52:45 +05:30
|
|
|
if self.UseTXN:
|
2006-05-04 04:51:49 +05:30
|
|
|
if status:
|
|
|
|
self.txn.commit()
|
|
|
|
else:
|
|
|
|
self.txn.abort()
|
|
|
|
self.txn = None
|
|
|
|
return status
|
2006-01-13 05:04:33 +05:30
|
|
|
|
2006-01-13 03:32:58 +05:30
|
|
|
def undo_reference(self,data,handle):
|
|
|
|
if data == None:
|
|
|
|
self.reference_map.delete(handle,txn=self.txn)
|
|
|
|
else:
|
2006-01-13 05:04:33 +05:30
|
|
|
self.reference_map.put(handle,data,txn=self.txn)
|
2006-01-13 03:32:58 +05:30
|
|
|
|
|
|
|
def undo_data(self,data,handle,db_map,signal_root):
|
|
|
|
if data == None:
|
|
|
|
self.emit(signal_root + '-delete',([handle],))
|
|
|
|
db_map.delete(handle,txn=self.txn)
|
|
|
|
else:
|
|
|
|
ex_data = db_map.get(handle,txn=self.txn)
|
|
|
|
if ex_data:
|
|
|
|
signal = signal_root + '-update'
|
|
|
|
else:
|
|
|
|
signal = signal_root + '-add'
|
|
|
|
db_map.put(handle,data,txn=self.txn)
|
|
|
|
self.emit(signal,([handle],))
|
|
|
|
|
2006-03-15 01:19:34 +05:30
|
|
|
def gramps_upgrade(self,callback=None):
|
2006-05-20 00:24:21 +05:30
|
|
|
UpdateCallback.__init__(self,callback)
|
|
|
|
|
2005-03-03 11:03:22 +05:30
|
|
|
child_rel_notrans = [
|
|
|
|
"None", "Birth", "Adopted", "Stepchild",
|
|
|
|
"Sponsored", "Foster", "Unknown", "Other", ]
|
|
|
|
|
2006-08-12 05:16:24 +05:30
|
|
|
version = self.metadata.get('version',default=_MINVERSION)
|
2006-09-24 10:07:59 +05:30
|
|
|
|
2006-03-16 05:28:23 +05:30
|
|
|
t = time.time()
|
2005-05-24 18:38:06 +05:30
|
|
|
if version < 6:
|
2005-12-15 11:49:37 +05:30
|
|
|
self.gramps_upgrade_6()
|
2005-06-25 03:30:03 +05:30
|
|
|
if version < 7:
|
2005-12-15 11:49:37 +05:30
|
|
|
self.gramps_upgrade_7()
|
2005-07-09 01:54:54 +05:30
|
|
|
if version < 8:
|
2005-12-15 11:49:37 +05:30
|
|
|
self.gramps_upgrade_8()
|
|
|
|
if version < 9:
|
|
|
|
self.gramps_upgrade_9()
|
2006-10-07 11:26:58 +05:30
|
|
|
else:
|
|
|
|
if version < 10:
|
|
|
|
self.gramps_upgrade_10()
|
|
|
|
if version < 11:
|
|
|
|
self.gramps_upgrade_11()
|
2006-03-16 05:28:23 +05:30
|
|
|
print "Upgrade time:", int(time.time()-t), "seconds"
|
2005-04-01 10:04:31 +05:30
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
def gramps_upgrade_6(self):
|
2005-05-24 18:38:06 +05:30
|
|
|
print "Upgrading to DB version 6"
|
|
|
|
order = []
|
|
|
|
for val in self.get_media_column_order():
|
|
|
|
if val[1] != 6:
|
|
|
|
order.append(val)
|
|
|
|
self.set_media_column_order(order)
|
2006-08-13 03:25:52 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
# Start transaction if needed
|
|
|
|
the_txn = self.env.txn_begin()
|
|
|
|
else:
|
|
|
|
the_txn = None
|
2006-08-12 05:16:24 +05:30
|
|
|
self.metadata.put('version',6,txn=the_txn)
|
2006-08-13 03:25:52 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
the_txn.commit()
|
|
|
|
else:
|
|
|
|
self.metadata.sync()
|
2005-05-25 09:28:27 +05:30
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
def gramps_upgrade_7(self):
|
2005-05-27 23:13:04 +05:30
|
|
|
print "Upgrading to DB version 7"
|
2005-07-09 01:54:54 +05:30
|
|
|
|
|
|
|
self.genderStats = GenderStats()
|
|
|
|
cursor = self.get_person_cursor()
|
|
|
|
data = cursor.first()
|
|
|
|
while data:
|
|
|
|
handle,val = data
|
|
|
|
p = Person(val)
|
2006-04-23 09:59:14 +05:30
|
|
|
self.genderStats.count_person(p)
|
2005-07-09 01:54:54 +05:30
|
|
|
data = cursor.next()
|
|
|
|
cursor.close()
|
2006-08-13 03:25:52 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
# Start transaction if needed
|
|
|
|
the_txn = self.env.txn_begin()
|
|
|
|
else:
|
|
|
|
the_txn = None
|
2006-08-12 05:16:24 +05:30
|
|
|
self.metadata.put('version',7,txn=the_txn)
|
2006-08-13 03:25:52 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
the_txn.commit()
|
|
|
|
else:
|
|
|
|
self.metadata.sync()
|
2005-07-09 01:54:54 +05:30
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
def gramps_upgrade_8(self):
|
2005-07-09 01:54:54 +05:30
|
|
|
print "Upgrading to DB version 8"
|
2005-08-18 11:28:28 +05:30
|
|
|
cursor = self.get_person_cursor()
|
|
|
|
data = cursor.first()
|
|
|
|
while data:
|
|
|
|
handle,val = data
|
|
|
|
handle_list = val[8]
|
|
|
|
if type(handle_list) == list:
|
|
|
|
# Check to prevent crash on corrupted data (event_list=None)
|
|
|
|
for handle in handle_list:
|
|
|
|
event = self.get_event_from_handle(handle)
|
|
|
|
self.individual_event_names.add(event.name)
|
|
|
|
data = cursor.next()
|
|
|
|
cursor.close()
|
|
|
|
|
|
|
|
cursor = self.get_family_cursor()
|
|
|
|
data = cursor.first()
|
|
|
|
while data:
|
|
|
|
handle,val = data
|
|
|
|
handle_list = val[6]
|
|
|
|
if type(handle_list) == list:
|
|
|
|
# Check to prevent crash on corrupted data (event_list=None)
|
|
|
|
for handle in handle_list:
|
|
|
|
event = self.get_event_from_handle(handle)
|
|
|
|
self.family_event_names.add(event.name)
|
|
|
|
data = cursor.next()
|
|
|
|
cursor.close()
|
2006-08-13 03:25:52 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
# Start transaction if needed
|
|
|
|
the_txn = self.env.txn_begin()
|
|
|
|
else:
|
|
|
|
the_txn = None
|
2006-08-12 05:16:24 +05:30
|
|
|
self.metadata.put('version',8,txn=the_txn)
|
2006-08-13 03:25:52 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
the_txn.commit()
|
|
|
|
else:
|
|
|
|
self.metadata.sync()
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
def gramps_upgrade_9(self):
|
2006-10-07 11:26:58 +05:30
|
|
|
"""
|
|
|
|
This is the PIVOTAL upgrade point. Before this, things were
|
|
|
|
stored as pickled class and attribute names. After this, things
|
|
|
|
are stored as builtin Python objects: every class serializes
|
|
|
|
its members recursively, up until everything is in terms of builtins.
|
|
|
|
So we have tuples with numbers, strings, and tuples, etc.
|
|
|
|
|
|
|
|
Because of this, every subsequent upgrade must also be included
|
|
|
|
into this routine. Otherwise this routine will fail when trying
|
|
|
|
to commit the objects, because the unpickled objects will lack
|
|
|
|
some attributes that are necessary for serialization on commit.
|
|
|
|
"""
|
2006-10-07 04:33:56 +05:30
|
|
|
print "Upgrading to DB version 11 -- this may take a while"
|
2006-01-26 02:36:23 +05:30
|
|
|
# The very very first thing is to check for duplicates in the
|
|
|
|
# primary tables and remove them.
|
2006-05-20 00:24:21 +05:30
|
|
|
self.set_total(7)
|
|
|
|
status,length = low_level_9(self,self.update)
|
2006-08-15 21:03:28 +05:30
|
|
|
self.reset()
|
2006-05-20 00:24:21 +05:30
|
|
|
|
|
|
|
self.set_total(length)
|
2006-01-26 02:36:23 +05:30
|
|
|
|
2006-04-24 03:02:11 +05:30
|
|
|
# Remove column metadata, since columns have changed.
|
|
|
|
# This will reset all columns to defaults
|
|
|
|
for name in (PERSON_COL_KEY,CHILD_COL_KEY,PLACE_COL_KEY,SOURCE_COL_KEY,
|
|
|
|
MEDIA_COL_KEY,EVENT_COL_KEY,FAMILY_COL_KEY):
|
|
|
|
try:
|
2006-08-13 03:25:52 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
# Start transaction if needed
|
|
|
|
the_txn = self.env.txn_begin()
|
|
|
|
else:
|
|
|
|
the_txn = None
|
2006-08-12 05:16:24 +05:30
|
|
|
self.metadata.delete(name,txn=the_txn)
|
2006-08-13 03:25:52 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
the_txn.commit()
|
|
|
|
else:
|
|
|
|
self.metadata.sync()
|
2006-04-24 03:02:11 +05:30
|
|
|
except KeyError:
|
2006-08-13 03:25:52 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
the_txn.abort()
|
2006-04-24 03:02:11 +05:30
|
|
|
|
2006-01-26 02:36:23 +05:30
|
|
|
# Then we remove the surname secondary index table
|
|
|
|
# because its format changed from HASH to DUPSORTed BTREE.
|
|
|
|
junk = db.DB(self.env)
|
|
|
|
junk.remove(self.full_name,"surnames")
|
|
|
|
|
|
|
|
# Create one secondary index for reference_map
|
|
|
|
# because every commit will require this to exist
|
2006-03-08 22:52:45 +05:30
|
|
|
table_flags = self.open_flags()
|
2006-01-26 02:36:23 +05:30
|
|
|
self.reference_map_primary_map = db.DB(self.env)
|
|
|
|
self.reference_map_primary_map.set_flags(db.DB_DUP)
|
|
|
|
self.reference_map_primary_map.open(self.full_name,
|
|
|
|
"reference_map_primary_map",
|
|
|
|
db.DB_BTREE, flags=table_flags)
|
|
|
|
self.reference_map.associate(self.reference_map_primary_map,
|
|
|
|
find_primary_handle,
|
|
|
|
table_flags)
|
|
|
|
|
|
|
|
### Now we're ready to proceed with the normal upgrade.
|
2005-05-28 12:05:15 +05:30
|
|
|
# First, make sure the stored default person handle is str, not unicode
|
2005-05-30 20:55:17 +05:30
|
|
|
try:
|
2006-08-13 03:25:52 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
# Start transaction if needed
|
|
|
|
the_txn = self.env.txn_begin()
|
|
|
|
else:
|
|
|
|
the_txn = None
|
2006-08-12 05:16:24 +05:30
|
|
|
handle = self.metadata.get('default',txn=the_txn)
|
|
|
|
self.metadata.put('default',str(handle),txn=the_txn)
|
2006-08-13 03:25:52 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
the_txn.commit()
|
|
|
|
else:
|
|
|
|
self.metadata.sync()
|
2005-05-30 20:55:17 +05:30
|
|
|
except KeyError:
|
|
|
|
# default person was not stored in database
|
2006-08-13 03:25:52 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
the_txn.abort()
|
2005-12-16 02:56:55 +05:30
|
|
|
|
|
|
|
# The rest of the upgrade deals with real data, not metadata
|
2006-01-19 03:32:24 +05:30
|
|
|
# so starting (batch) transaction here.
|
|
|
|
trans = self.transaction_begin("",True)
|
2005-12-16 02:56:55 +05:30
|
|
|
|
2006-04-13 23:55:52 +05:30
|
|
|
# Numerous changes were made between dbversions 8 and 9.
|
|
|
|
# If nothing else, we switched from storing pickled gramps classes
|
2006-05-20 00:24:21 +05:30
|
|
|
# to storing builtin objects, via running serialize() recursively
|
|
|
|
# until the very bottom.
|
|
|
|
# Every stored object needs to be re-committed here.
|
2005-12-16 02:56:55 +05:30
|
|
|
|
|
|
|
# Change every Source to have reporef_list
|
2005-12-18 09:00:13 +05:30
|
|
|
for handle in self.source_map.keys():
|
|
|
|
info = self.source_map[handle]
|
2005-05-27 23:13:04 +05:30
|
|
|
source = Source()
|
2005-12-18 07:04:13 +05:30
|
|
|
source.handle = handle
|
2005-12-16 02:56:55 +05:30
|
|
|
# We already have a new Source object with the reporef_list
|
|
|
|
# just fill in the rest of the fields for this source
|
2005-12-18 07:04:13 +05:30
|
|
|
(junk_handle, source.gramps_id, source.title, source.author,
|
2005-05-27 23:13:04 +05:30
|
|
|
source.pubinfo, source.note, source.media_list,
|
|
|
|
source.abbrev, source.change, source.datamap) = info
|
|
|
|
self.commit_source(source,trans)
|
2006-05-20 00:24:21 +05:30
|
|
|
self.update()
|
2005-12-06 12:08:09 +05:30
|
|
|
|
2006-04-13 23:55:52 +05:30
|
|
|
# Family upgrade
|
|
|
|
for handle in self.family_map.keys():
|
|
|
|
info = self.family_map[handle]
|
|
|
|
family = Family()
|
|
|
|
family.handle = handle
|
|
|
|
# Restore data from dbversion 8 (gramps 2.0.9)
|
|
|
|
(junk_handle, family.gramps_id, family.father_handle,
|
2006-04-21 05:33:27 +05:30
|
|
|
family.mother_handle, child_list, the_type,
|
2006-04-13 23:55:52 +05:30
|
|
|
event_list, family.media_list, family.attribute_list,
|
|
|
|
lds_seal, complete, family.source_list,
|
|
|
|
family.note, family.change) = info
|
|
|
|
|
|
|
|
if complete:
|
2006-04-20 06:14:44 +05:30
|
|
|
family.marker.set(MarkerType.COMPLETE)
|
2006-04-13 23:55:52 +05:30
|
|
|
|
|
|
|
# Change every event handle to the EventRef
|
|
|
|
for event_handle in event_list:
|
|
|
|
event_ref = EventRef()
|
|
|
|
event_ref.ref = event_handle
|
2006-04-20 06:14:44 +05:30
|
|
|
event_ref.role.set(EventRoleType.FAMILY)
|
2006-04-13 23:55:52 +05:30
|
|
|
family.event_ref_list.append(event_ref)
|
|
|
|
|
|
|
|
# Change child_list into child_ref_list
|
|
|
|
for child_handle in child_list:
|
|
|
|
child_ref = ChildRef()
|
|
|
|
child_ref.ref = child_handle
|
|
|
|
family.child_ref_list.append(child_ref)
|
|
|
|
|
|
|
|
# Change relationship type from int to tuple
|
2006-04-21 05:33:27 +05:30
|
|
|
family.type.set(the_type)
|
2006-04-13 23:55:52 +05:30
|
|
|
|
|
|
|
# In all Attributes, convert type from string to a tuple
|
|
|
|
for attribute in family.attribute_list:
|
|
|
|
convert_attribute_9(attribute)
|
2006-04-22 00:34:00 +05:30
|
|
|
|
2006-04-13 23:55:52 +05:30
|
|
|
# Cover attributes contained in MediaRefs
|
|
|
|
for media_ref in family.media_list:
|
|
|
|
convert_mediaref_9(media_ref)
|
|
|
|
|
|
|
|
# Switch from fixed lds ords to a list
|
|
|
|
if lds_seal:
|
2006-05-24 05:07:24 +05:30
|
|
|
lds_seal.type = LdsOrd.SEAL_TO_SPOUSE
|
|
|
|
lds_seal.private = False
|
2006-05-24 05:42:02 +05:30
|
|
|
lds_seal.status = lds_seal_spouse_dict_9[lds_seal.status]
|
2006-04-13 23:55:52 +05:30
|
|
|
family.lds_ord_list = [lds_seal]
|
|
|
|
|
|
|
|
self.commit_family(family,trans)
|
2006-05-20 00:24:21 +05:30
|
|
|
self.update()
|
2006-04-13 23:55:52 +05:30
|
|
|
|
|
|
|
# Person upgrade
|
|
|
|
# Needs to be run after the family upgrade completed.
|
2006-04-16 01:18:12 +05:30
|
|
|
def_rel = ChildRefType._DEFAULT
|
2005-12-18 09:00:13 +05:30
|
|
|
for handle in self.person_map.keys():
|
|
|
|
info = self.person_map[handle]
|
2005-12-15 11:49:37 +05:30
|
|
|
person = Person()
|
2005-12-18 07:04:13 +05:30
|
|
|
person.handle = handle
|
2006-05-24 06:12:52 +05:30
|
|
|
# Restore data from dbversion 8 (gramps 2.0.9--2.0.11)
|
2005-12-18 07:04:13 +05:30
|
|
|
(junk_handle, person.gramps_id, person.gender,
|
2006-05-06 02:55:48 +05:30
|
|
|
person.primary_name, person.alternate_names, nickname,
|
2005-12-15 11:49:37 +05:30
|
|
|
death_handle, birth_handle, event_list,
|
2006-04-13 23:55:52 +05:30
|
|
|
person.family_list, parent_family_list,
|
2005-12-15 11:49:37 +05:30
|
|
|
person.media_list, person.address_list, person.attribute_list,
|
2006-04-22 00:34:00 +05:30
|
|
|
person.urls, lds_bapt, lds_endow, lds_seal,
|
2005-12-16 02:56:55 +05:30
|
|
|
complete, person.source_list, person.note,
|
2005-12-15 11:49:37 +05:30
|
|
|
person.change, person.private) = (info + (False,))[0:23]
|
|
|
|
|
2005-12-19 21:52:33 +05:30
|
|
|
# Convert complete flag into marker
|
2005-12-16 02:56:55 +05:30
|
|
|
if complete:
|
2006-04-20 06:14:44 +05:30
|
|
|
person.marker.set(MarkerType.COMPLETE)
|
2005-12-19 21:52:33 +05:30
|
|
|
|
|
|
|
# Change every event handle to the EventRef
|
2005-12-15 11:49:37 +05:30
|
|
|
if birth_handle:
|
|
|
|
event_ref = EventRef()
|
2005-12-21 05:38:47 +05:30
|
|
|
event_ref.ref = birth_handle
|
2006-05-24 01:25:35 +05:30
|
|
|
person.event_ref_list.append(event_ref)
|
|
|
|
person.birth_ref_index = len(person.event_ref_list) - 1
|
2005-12-15 11:49:37 +05:30
|
|
|
|
|
|
|
if death_handle:
|
|
|
|
event_ref = EventRef()
|
2005-12-21 05:38:47 +05:30
|
|
|
event_ref.ref = death_handle
|
2006-05-24 01:25:35 +05:30
|
|
|
person.event_ref_list.append(event_ref)
|
|
|
|
person.death_ref_index = len(person.event_ref_list) - 1
|
2005-12-15 11:49:37 +05:30
|
|
|
|
|
|
|
for event_handle in event_list:
|
|
|
|
event_ref = EventRef()
|
2005-12-21 05:38:47 +05:30
|
|
|
event_ref.ref = event_handle
|
|
|
|
person.event_ref_list.append(event_ref)
|
2005-12-19 21:52:33 +05:30
|
|
|
|
|
|
|
# In all Name instances, convert type from string to a tuple
|
|
|
|
for name in [person.primary_name] + person.alternate_names:
|
|
|
|
old_type = name.type
|
2006-04-22 00:34:00 +05:30
|
|
|
new_type = NameType()
|
2006-07-25 05:54:15 +05:30
|
|
|
# Mapping "Other Name" from gramps 2.0.x to Unknown
|
|
|
|
if old_type == 'Other Name':
|
2006-07-25 05:31:08 +05:30
|
|
|
new_type.set(NameType.UNKNOWN)
|
|
|
|
else:
|
|
|
|
new_type.set_from_xml_str(old_type)
|
2005-12-19 21:52:33 +05:30
|
|
|
name.type = new_type
|
2006-05-08 23:39:11 +05:30
|
|
|
name.call = ''
|
2005-12-19 21:52:33 +05:30
|
|
|
|
2006-04-13 23:55:52 +05:30
|
|
|
# Change parent_family_list into list of handles
|
|
|
|
# and transfer the relationship info into the family's
|
|
|
|
# child_ref (in family.child_ref_list) as tuples.
|
|
|
|
for (family_handle,mrel,frel) in parent_family_list:
|
|
|
|
person.parent_family_list.append(family_handle)
|
|
|
|
# Only change family is the relations are non-default
|
2006-04-16 01:18:12 +05:30
|
|
|
if (mrel,frel) != (def_rel,def_rel):
|
2006-04-13 23:55:52 +05:30
|
|
|
family = self.get_family_from_handle(family_handle)
|
|
|
|
child_handle_list = [ref.ref for ref in
|
|
|
|
family.child_ref_list]
|
2006-04-16 02:10:00 +05:30
|
|
|
index = child_handle_list.index(person.handle)
|
2006-04-13 23:55:52 +05:30
|
|
|
child_ref = family.child_ref_list[index]
|
2006-04-16 02:10:00 +05:30
|
|
|
child_ref.frel.set(frel)
|
|
|
|
child_ref.mrel.set(mrel)
|
2006-04-13 23:55:52 +05:30
|
|
|
self.commit_family(family,trans)
|
2006-01-27 04:28:49 +05:30
|
|
|
|
2005-12-19 21:52:33 +05:30
|
|
|
# In all Attributes, convert type from string to a tuple
|
|
|
|
for attribute in person.attribute_list:
|
|
|
|
convert_attribute_9(attribute)
|
2006-04-22 00:34:00 +05:30
|
|
|
|
2006-05-06 02:55:48 +05:30
|
|
|
# Nickname becomes an attribute
|
|
|
|
if nickname.strip():
|
|
|
|
attr = Attribute()
|
|
|
|
attr.set_type(AttributeType.NICKNAME)
|
|
|
|
attr.set_value(nickname)
|
|
|
|
person.attribute_list.append(attr)
|
|
|
|
|
2005-12-19 21:52:33 +05:30
|
|
|
# Cover attributes contained in MediaRefs
|
|
|
|
for media_ref in person.media_list:
|
|
|
|
convert_mediaref_9(media_ref)
|
2005-12-20 04:48:03 +05:30
|
|
|
|
|
|
|
# In all Urls, add type attribute
|
2006-04-22 00:34:00 +05:30
|
|
|
for url in person.urls:
|
|
|
|
convert_url_9(url)
|
2006-04-13 23:55:52 +05:30
|
|
|
|
|
|
|
# Switch from fixed lds ords to a list
|
2006-05-24 05:07:24 +05:30
|
|
|
if lds_bapt:
|
|
|
|
lds_bapt.type = LdsOrd.BAPTISM
|
2006-05-24 05:42:02 +05:30
|
|
|
lds_bapt.status = lds_bapt_dict_9[lds_bapt.status]
|
2006-05-24 05:07:24 +05:30
|
|
|
person.lds_ord_list.append(lds_bapt)
|
|
|
|
if lds_endow:
|
|
|
|
lds_endow.type = LdsOrd.ENDOWMENT
|
2006-05-24 05:42:02 +05:30
|
|
|
lds_endow.status = lds_bapt_dict_9[lds_endow.status]
|
2006-05-24 05:07:24 +05:30
|
|
|
person.lds_ord_list.append(lds_endow)
|
|
|
|
if lds_seal:
|
|
|
|
lds_seal.type = LdsOrd.SEAL_TO_PARENTS
|
2006-05-24 05:42:02 +05:30
|
|
|
lds_seal.status = lds_seal_parent_dict_9[lds_seal.status]
|
2006-05-24 05:07:24 +05:30
|
|
|
person.lds_ord_list.append(lds_seal)
|
|
|
|
# Old lds ords did not have private attribute
|
|
|
|
for item in person.lds_ord_list:
|
|
|
|
item.private = False
|
2006-10-07 04:33:56 +05:30
|
|
|
|
|
|
|
# Upgrade addresses: this is an upgrade_11 step
|
|
|
|
for addr in person.address_list:
|
|
|
|
addr.county = u''
|
2005-12-19 21:52:33 +05:30
|
|
|
|
2005-12-16 02:56:55 +05:30
|
|
|
self.commit_person(person,trans)
|
2006-05-20 00:24:21 +05:30
|
|
|
self.update()
|
2005-12-15 11:49:37 +05:30
|
|
|
|
2006-04-13 23:55:52 +05:30
|
|
|
# Event upgrade
|
2006-01-28 02:08:33 +05:30
|
|
|
# Turns out that a lof ot events have duplicate gramps IDs
|
2006-06-14 01:29:14 +05:30
|
|
|
# We need to fix this. For some reason secondary index gets confused
|
|
|
|
# so we resolve duplicate IDs manually.
|
|
|
|
# First a quick pass via the cursor to get a list of event ids
|
|
|
|
eid_list = []
|
|
|
|
cursor = self.get_event_cursor()
|
|
|
|
data = cursor.first()
|
|
|
|
while data:
|
|
|
|
handle,val = data
|
|
|
|
eid_list.append(val[1])
|
|
|
|
data = cursor.next()
|
|
|
|
cursor.close()
|
|
|
|
|
|
|
|
# Find the largest ID and extract the integer:
|
|
|
|
# We can do this because in 2.0.x the event id is never exposed
|
|
|
|
eid_list.sort()
|
2006-07-25 05:54:15 +05:30
|
|
|
if len(eid_list) == 0:
|
|
|
|
max_id_number = 0
|
|
|
|
else:
|
|
|
|
last_id = eid_list[-1]
|
|
|
|
nre = re.compile("\D+(\d+)")
|
|
|
|
max_id_number = int(nre.match(last_id).groups()[0])
|
2006-01-28 02:08:33 +05:30
|
|
|
|
2006-06-14 01:29:14 +05:30
|
|
|
# get the list of all IDs that are non-unique
|
|
|
|
dup_ids = [eid for eid in eid_list if eid_list.count(eid) > 1 ]
|
|
|
|
|
2005-12-18 09:00:13 +05:30
|
|
|
for handle in self.event_map.keys():
|
|
|
|
info = self.event_map[handle]
|
2005-12-15 11:49:37 +05:30
|
|
|
event = Event()
|
2005-12-18 07:04:13 +05:30
|
|
|
event.handle = handle
|
2005-12-19 21:52:33 +05:30
|
|
|
(junk_handle, event.gramps_id, old_type, event.date,
|
2006-08-15 21:03:28 +05:30
|
|
|
event.description, event.place, cause, event.private,
|
2005-12-21 05:38:47 +05:30
|
|
|
event.source_list, event.note, witness_list,
|
|
|
|
event.media_list, event.change) = info
|
2005-12-19 21:52:33 +05:30
|
|
|
|
2006-06-14 01:29:14 +05:30
|
|
|
# Change ID if it is non-unique
|
2006-01-28 02:08:33 +05:30
|
|
|
if event.gramps_id in dup_ids:
|
2006-06-14 01:29:14 +05:30
|
|
|
max_id_number += 1
|
|
|
|
event.gramps_id = self.eprefix % max_id_number
|
2006-01-28 02:08:33 +05:30
|
|
|
|
2006-06-14 01:29:14 +05:30
|
|
|
# Convert old string-based type to GrampsType
|
2006-04-21 05:33:27 +05:30
|
|
|
event.type.set_from_xml_str(old_type)
|
2005-12-16 02:56:55 +05:30
|
|
|
|
2005-12-19 21:52:33 +05:30
|
|
|
# Cover attributes contained in MediaRefs
|
|
|
|
for media_ref in event.media_list:
|
|
|
|
convert_mediaref_9(media_ref)
|
|
|
|
|
2005-12-21 05:38:47 +05:30
|
|
|
# Upgrade witness -- no more Witness class
|
|
|
|
if type(witness_list) != list:
|
|
|
|
witness_list = []
|
|
|
|
for witness in witness_list:
|
|
|
|
if witness.type == 0: # witness name recorded
|
|
|
|
# Add name and comment to the event note
|
|
|
|
note_text = event.get_note() + "\n" + \
|
|
|
|
_("Witness name: %s") % witness.val
|
|
|
|
if witness.comment:
|
2005-12-22 01:38:07 +05:30
|
|
|
note_text += "\n" + _("Witness comment: %s") \
|
2005-12-21 05:38:47 +05:30
|
|
|
% witness.comment
|
|
|
|
event.set_note(note_text)
|
|
|
|
elif witness.type == 1: # witness ID recorded
|
|
|
|
person = self.get_person_from_handle(witness.val)
|
2006-05-02 09:50:40 +05:30
|
|
|
if person:
|
|
|
|
# Add an EventRef from that person
|
|
|
|
# to this event using ROLE_WITNESS role
|
|
|
|
event_ref = EventRef()
|
|
|
|
event_ref.ref = event.handle
|
|
|
|
event_ref.role.set(EventRoleType.WITNESS)
|
|
|
|
# Add privacy and comment
|
|
|
|
event_ref.private = witness.private
|
|
|
|
if witness.comment:
|
|
|
|
event_ref.set_note(witness.comment)
|
|
|
|
person.event_ref_list.append(event_ref)
|
|
|
|
self.commit_person(person,trans)
|
2006-05-02 20:49:25 +05:30
|
|
|
else:
|
|
|
|
# Broken witness: dangling witness handle
|
|
|
|
# with no corresponding person in the db
|
|
|
|
note_text = event.get_note() + "\n" + \
|
|
|
|
_("Broken witness reference detected "
|
|
|
|
"while upgrading database to version 9.")
|
|
|
|
event.set_note(note_text)
|
2006-06-14 01:29:14 +05:30
|
|
|
|
2006-08-15 21:03:28 +05:30
|
|
|
# This is an upgrade_10 step
|
|
|
|
if cause.strip():
|
|
|
|
attr = Attribute()
|
|
|
|
attr.set_type(AttributeType.CAUSE)
|
|
|
|
attr.set_value(cause)
|
|
|
|
event.add_attribute(attr)
|
|
|
|
|
2005-12-15 11:49:37 +05:30
|
|
|
self.commit_event(event,trans)
|
2006-05-20 00:24:21 +05:30
|
|
|
self.update()
|
2006-01-28 02:08:33 +05:30
|
|
|
|
2006-04-13 23:55:52 +05:30
|
|
|
# Place upgrade
|
2005-12-18 09:00:13 +05:30
|
|
|
for handle in self.place_map.keys():
|
|
|
|
info = self.place_map[handle]
|
2005-12-16 02:56:55 +05:30
|
|
|
place = Place()
|
2005-12-18 07:04:13 +05:30
|
|
|
place.handle = handle
|
|
|
|
(junk_handle, place.gramps_id, place.title, place.long, place.lat,
|
2006-12-01 03:15:20 +05:30
|
|
|
place.main_loc, place.alt_loc, place.urls, place.media_list,
|
2005-12-16 02:56:55 +05:30
|
|
|
place.source_list, place.note, place.change) = info
|
2005-12-19 21:52:33 +05:30
|
|
|
|
|
|
|
# Cover attributes contained in MediaRefs
|
2005-12-20 04:48:03 +05:30
|
|
|
for media_ref in place.media_list:
|
2005-12-19 21:52:33 +05:30
|
|
|
convert_mediaref_9(media_ref)
|
|
|
|
|
2005-12-20 04:48:03 +05:30
|
|
|
# In all Urls, add type attribute
|
2006-04-22 00:34:00 +05:30
|
|
|
for url in place.urls:
|
|
|
|
convert_url_9(url)
|
2005-12-20 04:48:03 +05:30
|
|
|
|
2006-10-07 04:33:56 +05:30
|
|
|
# Upgrade locations: this is an upgrade_11 step
|
|
|
|
if place.main_loc:
|
|
|
|
place.main_loc.street = u''
|
|
|
|
for l in place.alt_loc:
|
|
|
|
l.street = u''
|
|
|
|
|
2005-12-16 02:56:55 +05:30
|
|
|
self.commit_place(place,trans)
|
2006-05-20 00:24:21 +05:30
|
|
|
self.update()
|
2006-04-13 23:55:52 +05:30
|
|
|
|
|
|
|
# Media upgrade
|
2005-12-18 09:00:13 +05:30
|
|
|
for handle in self.media_map.keys():
|
|
|
|
info = self.media_map[handle]
|
2005-12-16 02:56:55 +05:30
|
|
|
media_object = MediaObject()
|
2005-12-18 07:04:13 +05:30
|
|
|
media_object.handle = handle
|
|
|
|
(junk_handle, media_object.gramps_id, media_object.path,
|
2005-12-16 02:56:55 +05:30
|
|
|
media_object.mime, media_object.desc, media_object.attribute_list,
|
|
|
|
media_object.source_list, media_object.note, media_object.change,
|
|
|
|
media_object.date) = info
|
2005-12-19 21:52:33 +05:30
|
|
|
|
|
|
|
# In all Attributes, convert type from string to a tuple
|
2005-12-20 04:48:03 +05:30
|
|
|
for attribute in media_object.attribute_list:
|
2005-12-19 21:52:33 +05:30
|
|
|
convert_attribute_9(attribute)
|
|
|
|
|
2005-12-16 02:56:55 +05:30
|
|
|
self.commit_media_object(media_object,trans)
|
2006-05-20 00:24:21 +05:30
|
|
|
self.update()
|
2005-12-16 02:56:55 +05:30
|
|
|
|
2006-01-28 09:53:37 +05:30
|
|
|
self.transaction_commit(trans,"Upgrade to DB version 9")
|
|
|
|
# Close secodnary index
|
|
|
|
self.reference_map_primary_map.close()
|
2006-08-12 05:16:24 +05:30
|
|
|
|
2006-08-13 03:25:52 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
# Separate transaction to save metadata
|
|
|
|
the_txn = self.env.txn_begin()
|
|
|
|
else:
|
|
|
|
the_txn = None
|
2006-10-07 04:33:56 +05:30
|
|
|
self.metadata.put('version',11,txn=the_txn)
|
2006-08-13 03:25:52 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
the_txn.commit()
|
|
|
|
else:
|
|
|
|
self.metadata.sync()
|
2006-08-12 05:16:24 +05:30
|
|
|
|
2006-10-07 04:33:56 +05:30
|
|
|
print "Done upgrading to DB version 11"
|
2005-12-06 12:08:09 +05:30
|
|
|
|
2006-08-15 10:54:38 +05:30
|
|
|
def gramps_upgrade_10(self):
|
2006-10-07 11:26:58 +05:30
|
|
|
print "Upgrading to DB version 10..."
|
2006-08-15 10:54:38 +05:30
|
|
|
|
2006-08-21 12:02:31 +05:30
|
|
|
# Remove event column metadata, since columns have changed.
|
|
|
|
# This will reset all columns to defaults in event view
|
|
|
|
for name in (PERSON_COL_KEY,EVENT_COL_KEY):
|
|
|
|
try:
|
|
|
|
if self.UseTXN:
|
|
|
|
# Start transaction if needed
|
|
|
|
the_txn = self.env.txn_begin()
|
|
|
|
else:
|
|
|
|
the_txn = None
|
|
|
|
self.metadata.delete(name,txn=the_txn)
|
|
|
|
if self.UseTXN:
|
|
|
|
the_txn.commit()
|
|
|
|
else:
|
|
|
|
self.metadata.sync()
|
|
|
|
except KeyError:
|
|
|
|
if self.UseTXN:
|
|
|
|
the_txn.abort()
|
|
|
|
|
2006-08-15 10:54:38 +05:30
|
|
|
# This upgrade adds attribute lists to Event and EventRef objects
|
2006-08-15 21:03:28 +05:30
|
|
|
length = self.get_number_of_events() + len(self.person_map) \
|
|
|
|
+ self.get_number_of_families()
|
|
|
|
self.set_total(length)
|
2006-08-15 10:54:38 +05:30
|
|
|
|
|
|
|
for handle in self.event_map.keys():
|
2006-10-07 11:26:58 +05:30
|
|
|
info = self.event_map[handle]
|
2006-08-15 21:03:28 +05:30
|
|
|
|
|
|
|
(junk_handle, gramps_id, the_type, date,description,
|
|
|
|
place, cause,source_list, note, media_list,
|
|
|
|
change, marker, private) = info
|
|
|
|
|
|
|
|
# Cause is removed, so we're converting it into an attribute
|
2006-08-15 10:54:38 +05:30
|
|
|
if cause.strip():
|
|
|
|
attr = Attribute()
|
|
|
|
attr.set_type(AttributeType.CAUSE)
|
|
|
|
attr.set_value(cause)
|
2006-10-07 11:26:58 +05:30
|
|
|
attr_list = [attr.serialize()]
|
|
|
|
else:
|
|
|
|
attr_list = []
|
2006-08-15 10:54:38 +05:30
|
|
|
|
2006-10-07 11:26:58 +05:30
|
|
|
info = (handle, gramps_id, the_type, date,
|
|
|
|
description, place, source_list, note, media_list,
|
|
|
|
attr_list, change, marker, private)
|
|
|
|
|
2006-10-11 08:50:56 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
the_txn = self.env.txn_begin()
|
|
|
|
else:
|
|
|
|
the_txn = None
|
|
|
|
self.event_map.put(str(handle),info,txn=the_txn)
|
|
|
|
if self.UseTXN:
|
|
|
|
the_txn.commit()
|
2006-08-15 10:54:38 +05:30
|
|
|
self.update()
|
|
|
|
|
2006-10-11 08:50:56 +05:30
|
|
|
if not self.UseTXN:
|
|
|
|
self.event_map.sync()
|
|
|
|
|
2006-08-15 10:54:38 +05:30
|
|
|
# Personal event references
|
|
|
|
for handle in self.person_map.keys():
|
|
|
|
info = self.person_map[handle]
|
2006-08-15 21:03:28 +05:30
|
|
|
|
|
|
|
(junk_handle,gramps_id,gender,
|
|
|
|
primary_name,alternate_names,death_ref_index,
|
|
|
|
birth_ref_index,event_ref_list,family_list,
|
|
|
|
parent_family_list,media_list,address_list,attribute_list,
|
|
|
|
urls,lds_ord_list,source_list,note,change,marker,
|
|
|
|
private,person_ref_list,) = info
|
|
|
|
|
2006-08-18 11:19:25 +05:30
|
|
|
# Names lost the "sname" attribute
|
2006-10-07 11:26:58 +05:30
|
|
|
new_primary_name = convert_name_10(primary_name)
|
|
|
|
new_alternate_names = [convert_name_10(name)
|
|
|
|
for name in alternate_names]
|
2006-08-15 10:54:38 +05:30
|
|
|
|
2006-10-07 11:26:58 +05:30
|
|
|
# Events gained attribute_list
|
|
|
|
new_event_ref_list = [
|
|
|
|
(privacy,note,[],ref,role)
|
|
|
|
for (privacy,note,ref,role) in event_ref_list]
|
|
|
|
|
|
|
|
info = (handle,gramps_id,gender,new_primary_name,
|
|
|
|
new_alternate_names,
|
|
|
|
death_ref_index,birth_ref_index,new_event_ref_list,
|
|
|
|
family_list,parent_family_list,media_list,address_list,
|
|
|
|
attribute_list,urls,lds_ord_list,source_list,note,
|
|
|
|
change,marker,private,person_ref_list,)
|
|
|
|
|
2006-10-11 08:50:56 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
the_txn = self.env.txn_begin()
|
|
|
|
else:
|
|
|
|
the_txn = None
|
|
|
|
self.person_map.put(str(handle),info,txn=the_txn)
|
|
|
|
if self.UseTXN:
|
|
|
|
the_txn.commit()
|
2006-08-15 10:54:38 +05:30
|
|
|
self.update()
|
2006-10-11 08:50:56 +05:30
|
|
|
|
|
|
|
if not self.UseTXN:
|
|
|
|
self.person_map.sync()
|
|
|
|
|
2006-08-15 10:54:38 +05:30
|
|
|
# Family event references
|
|
|
|
for handle in self.family_map.keys():
|
|
|
|
info = self.family_map[handle]
|
|
|
|
|
2006-08-15 21:03:28 +05:30
|
|
|
(junk_handle,gramps_id,father_handle,
|
|
|
|
mother_handle,child_ref_list,the_type,event_ref_list,
|
2006-08-15 10:54:38 +05:30
|
|
|
media_list,attribute_list,lds_seal_list,source_list,note,
|
2006-08-15 21:03:28 +05:30
|
|
|
change, marker, private) = info
|
|
|
|
|
2006-10-07 11:26:58 +05:30
|
|
|
new_event_ref_list = [
|
|
|
|
(privacy,note,[],ref,role)
|
|
|
|
for (privacy,note,ref,role) in event_ref_list]
|
|
|
|
|
|
|
|
info = (handle,gramps_id,father_handle,
|
|
|
|
mother_handle,child_ref_list,the_type,
|
|
|
|
new_event_ref_list,
|
2006-08-15 21:03:28 +05:30
|
|
|
media_list,attribute_list,lds_seal_list,
|
|
|
|
source_list,note,change, marker, private)
|
|
|
|
|
2006-10-11 08:50:56 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
the_txn = self.env.txn_begin()
|
|
|
|
else:
|
|
|
|
the_txn = None
|
|
|
|
self.family_map.put(str(handle),info,txn=the_txn)
|
|
|
|
if self.UseTXN:
|
|
|
|
the_txn.commit()
|
2006-08-15 10:54:38 +05:30
|
|
|
self.update()
|
2006-10-11 08:50:56 +05:30
|
|
|
if not self.UseTXN:
|
|
|
|
self.family_map.sync()
|
|
|
|
|
2006-08-15 10:54:38 +05:30
|
|
|
self.reset()
|
|
|
|
|
|
|
|
if self.UseTXN:
|
|
|
|
# Separate transaction to save metadata
|
|
|
|
the_txn = self.env.txn_begin()
|
|
|
|
else:
|
|
|
|
the_txn = None
|
|
|
|
self.metadata.put('version',10,txn=the_txn)
|
|
|
|
if self.UseTXN:
|
|
|
|
the_txn.commit()
|
|
|
|
else:
|
|
|
|
self.metadata.sync()
|
|
|
|
|
|
|
|
print "Done upgrading to DB version 10"
|
|
|
|
|
2006-09-24 10:07:59 +05:30
|
|
|
def gramps_upgrade_11(self):
|
2006-10-07 11:26:58 +05:30
|
|
|
print "Upgrading to DB version 11..."
|
2006-09-24 10:07:59 +05:30
|
|
|
|
2006-10-07 11:26:58 +05:30
|
|
|
# This upgrade modifies addresses and locations
|
2006-10-06 04:31:43 +05:30
|
|
|
length = len(self.person_map) + len(self.place_map) \
|
|
|
|
+ len(self.repository_map)
|
2006-09-24 10:07:59 +05:30
|
|
|
self.set_total(length)
|
|
|
|
|
2006-10-06 04:31:43 +05:30
|
|
|
# Personal addresses
|
2006-09-24 10:07:59 +05:30
|
|
|
for handle in self.person_map.keys():
|
|
|
|
info = self.person_map[handle]
|
|
|
|
|
2006-10-07 11:26:58 +05:30
|
|
|
(junk_handle,gramps_id,gender,
|
|
|
|
primary_name,alternate_names,death_ref_index,
|
|
|
|
birth_ref_index,event_ref_list,family_list,
|
|
|
|
parent_family_list,media_list,address_list,attribute_list,
|
|
|
|
urls,lds_ord_list,source_list,note,change,marker,
|
|
|
|
private,person_ref_list,) = info
|
2006-09-24 10:07:59 +05:30
|
|
|
|
2006-10-07 11:26:58 +05:30
|
|
|
new_address_list = [convert_address_11(addr)
|
|
|
|
for addr in address_list]
|
2006-09-24 10:07:59 +05:30
|
|
|
|
2006-10-07 11:26:58 +05:30
|
|
|
info = (handle,gramps_id,gender,
|
|
|
|
primary_name,alternate_names,death_ref_index,
|
|
|
|
birth_ref_index,event_ref_list,family_list,
|
|
|
|
parent_family_list,media_list,new_address_list,
|
|
|
|
attribute_list,
|
|
|
|
urls,lds_ord_list,source_list,note,change,marker,
|
|
|
|
private,person_ref_list,)
|
2006-09-24 10:07:59 +05:30
|
|
|
|
2006-10-11 08:50:56 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
the_txn = self.env.txn_begin()
|
|
|
|
else:
|
|
|
|
the_txn = None
|
|
|
|
self.person_map.put(str(handle),info,txn=the_txn)
|
|
|
|
if self.UseTXN:
|
|
|
|
the_txn.commit()
|
2006-09-24 10:07:59 +05:30
|
|
|
self.update()
|
2006-10-11 08:50:56 +05:30
|
|
|
|
|
|
|
if not self.UseTXN:
|
|
|
|
self.person_map.sync()
|
2006-09-24 10:07:59 +05:30
|
|
|
|
2006-10-06 04:31:43 +05:30
|
|
|
# Repositories
|
|
|
|
for handle in self.repository_map.keys():
|
|
|
|
info = self.repository_map[handle]
|
|
|
|
|
2006-10-07 11:26:58 +05:30
|
|
|
(junk_handle, gramps_id, the_type, name, note,
|
|
|
|
address_list, urls, marker, private) = info
|
2006-10-06 04:31:43 +05:30
|
|
|
|
2006-10-07 11:26:58 +05:30
|
|
|
new_address_list = [convert_address_11(addr)
|
|
|
|
for addr in address_list]
|
2006-10-06 04:31:43 +05:30
|
|
|
|
2006-10-07 11:26:58 +05:30
|
|
|
info = (handle, gramps_id, the_type, name, note,
|
|
|
|
new_address_list, urls, marker, private)
|
|
|
|
|
2006-10-11 08:50:56 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
the_txn = self.env.txn_begin()
|
|
|
|
else:
|
|
|
|
the_txn = None
|
|
|
|
self.repository_map.put(str(handle),info,txn=the_txn)
|
|
|
|
if self.UseTXN:
|
|
|
|
the_txn.commit()
|
2006-10-06 04:31:43 +05:30
|
|
|
self.update()
|
|
|
|
|
2006-10-11 08:50:56 +05:30
|
|
|
if not self.UseTXN:
|
|
|
|
self.repository_map.sync()
|
|
|
|
|
2006-10-06 04:31:43 +05:30
|
|
|
# Places
|
2006-09-24 10:07:59 +05:30
|
|
|
for handle in self.place_map.keys():
|
|
|
|
info = self.place_map[handle]
|
|
|
|
|
2006-10-07 11:26:58 +05:30
|
|
|
(junk_handle, gramps_id, title, long, lat, main_loc, alt_loc, urls,
|
2006-09-24 10:07:59 +05:30
|
|
|
media_list, source_list, note, change, marker, private) = info
|
|
|
|
|
|
|
|
if main_loc:
|
2006-10-07 11:26:58 +05:30
|
|
|
main_loc = convert_location_11(main_loc)
|
2006-09-24 10:07:59 +05:30
|
|
|
|
2006-10-07 11:26:58 +05:30
|
|
|
new_alt_loc = [convert_location_11(loc) for loc in alt_loc]
|
2006-09-24 10:07:59 +05:30
|
|
|
|
2006-10-07 11:26:58 +05:30
|
|
|
info = (handle,gramps_id,title,long,lat,main_loc,new_alt_loc,urls,
|
2006-09-24 10:07:59 +05:30
|
|
|
media_list, source_list, note, change, marker, private)
|
|
|
|
|
2006-10-11 08:50:56 +05:30
|
|
|
if self.UseTXN:
|
|
|
|
the_txn = self.env.txn_begin()
|
|
|
|
else:
|
|
|
|
the_txn = None
|
|
|
|
self.place_map.put(str(handle),info,txn=the_txn)
|
|
|
|
if self.UseTXN:
|
|
|
|
the_txn.commit()
|
2006-09-24 10:07:59 +05:30
|
|
|
self.update()
|
|
|
|
|
2006-10-11 08:50:56 +05:30
|
|
|
if not self.UseTXN:
|
|
|
|
self.place_map.sync()
|
|
|
|
|
2006-09-24 10:07:59 +05:30
|
|
|
self.reset()
|
|
|
|
|
|
|
|
if self.UseTXN:
|
|
|
|
# Separate transaction to save metadata
|
|
|
|
the_txn = self.env.txn_begin()
|
|
|
|
else:
|
|
|
|
the_txn = None
|
|
|
|
self.metadata.put('version', 11, txn=the_txn)
|
|
|
|
if self.UseTXN:
|
|
|
|
the_txn.commit()
|
|
|
|
else:
|
|
|
|
self.metadata.sync()
|
|
|
|
|
|
|
|
print "Done upgrading to DB version 11"
|
|
|
|
|
2006-01-13 03:32:58 +05:30
|
|
|
class BdbTransaction(Transaction):
|
2006-02-02 20:23:31 +05:30
|
|
|
def __init__(self,msg,db,batch=False,no_magic=False):
|
|
|
|
Transaction.__init__(self,msg,db,batch,no_magic)
|
2006-01-13 03:32:58 +05:30
|
|
|
self.reference_del = []
|
|
|
|
self.reference_add = []
|
|
|
|
|
2005-12-19 21:52:33 +05:30
|
|
|
def convert_attribute_9(attribute):
|
|
|
|
old_type = attribute.type
|
2006-04-22 00:34:00 +05:30
|
|
|
new_type = AttributeType()
|
|
|
|
new_type.set_from_xml_str(old_type)
|
|
|
|
attribute.type = new_type
|
2005-12-19 21:52:33 +05:30
|
|
|
|
|
|
|
def convert_mediaref_9(media_ref):
|
|
|
|
for attribute in media_ref.attribute_list:
|
|
|
|
convert_attribute_9(attribute)
|
|
|
|
|
2005-12-20 04:48:03 +05:30
|
|
|
def convert_url_9(url):
|
|
|
|
path = url.path.strip()
|
2006-04-21 05:33:27 +05:30
|
|
|
if (path.find('mailto:') == 0) or (url.path.find('@') != -1):
|
|
|
|
new_type = UrlType.EMAIL
|
2005-12-20 04:48:03 +05:30
|
|
|
elif path.find('http://') == 0:
|
2006-04-21 05:33:27 +05:30
|
|
|
new_type = UrlType.WEB_HOME
|
2005-12-20 04:48:03 +05:30
|
|
|
elif path.find('ftp://') == 0:
|
2006-04-21 05:33:27 +05:30
|
|
|
new_type = UrlType.WEB_FTP
|
2005-12-20 04:48:03 +05:30
|
|
|
else:
|
2006-04-21 05:33:27 +05:30
|
|
|
new_type = UrlType.CUSTOM
|
2006-04-22 00:34:00 +05:30
|
|
|
url.type = UrlType(new_type)
|
2006-01-27 04:28:49 +05:30
|
|
|
|
2006-05-24 05:42:02 +05:30
|
|
|
lds_bapt_dict_9 = {
|
|
|
|
0: LdsOrd.STATUS_NONE,
|
|
|
|
1: LdsOrd.STATUS_CHILD,
|
|
|
|
2: LdsOrd.STATUS_CLEARED,
|
|
|
|
3: LdsOrd.STATUS_COMPLETED,
|
|
|
|
4: LdsOrd.STATUS_INFANT,
|
|
|
|
5: LdsOrd.STATUS_PRE_1970,
|
|
|
|
6: LdsOrd.STATUS_QUALIFIED,
|
|
|
|
7: LdsOrd.STATUS_STILLBORN,
|
|
|
|
8: LdsOrd.STATUS_SUBMITTED,
|
|
|
|
9: LdsOrd.STATUS_UNCLEARED,
|
|
|
|
}
|
|
|
|
|
|
|
|
lds_seal_parent_dict_9 = {
|
|
|
|
0: LdsOrd.STATUS_NONE,
|
|
|
|
1: LdsOrd.STATUS_BIC,
|
|
|
|
2: LdsOrd.STATUS_CLEARED,
|
|
|
|
3: LdsOrd.STATUS_COMPLETED,
|
|
|
|
4: LdsOrd.STATUS_DNS,
|
|
|
|
5: LdsOrd.STATUS_PRE_1970,
|
|
|
|
6: LdsOrd.STATUS_QUALIFIED,
|
|
|
|
7: LdsOrd.STATUS_STILLBORN,
|
|
|
|
8: LdsOrd.STATUS_SUBMITTED,
|
|
|
|
9: LdsOrd.STATUS_UNCLEARED,
|
|
|
|
}
|
2006-05-24 06:12:52 +05:30
|
|
|
|
2006-05-24 05:42:02 +05:30
|
|
|
lds_seal_spouse_dict_9 = {
|
|
|
|
0: LdsOrd.STATUS_NONE,
|
|
|
|
1: LdsOrd.STATUS_CANCELED,
|
|
|
|
2: LdsOrd.STATUS_CLEARED,
|
|
|
|
3: LdsOrd.STATUS_COMPLETED,
|
|
|
|
4: LdsOrd.STATUS_DNS,
|
|
|
|
5: LdsOrd.STATUS_PRE_1970,
|
|
|
|
6: LdsOrd.STATUS_QUALIFIED,
|
|
|
|
7: LdsOrd.STATUS_DNS_CAN,
|
|
|
|
8: LdsOrd.STATUS_SUBMITTED,
|
|
|
|
9: LdsOrd.STATUS_UNCLEARED,
|
|
|
|
}
|
|
|
|
|
2006-05-20 00:24:21 +05:30
|
|
|
def low_level_9(the_db,update):
|
2006-01-26 02:36:23 +05:30
|
|
|
"""
|
|
|
|
This is a low-level repair routine.
|
|
|
|
|
|
|
|
It is fixing DB inconsistencies such as duplicates.
|
|
|
|
Returns a (status,name) tuple.
|
|
|
|
The boolean status indicates the success of the procedure.
|
|
|
|
The name indicates the problematic table (empty if status is True).
|
|
|
|
"""
|
2006-03-15 01:19:34 +05:30
|
|
|
the_length = 0
|
2006-01-26 02:36:23 +05:30
|
|
|
for the_map in [('Person',the_db.person_map),
|
|
|
|
('Family',the_db.family_map),
|
|
|
|
('Event',the_db.event_map),
|
|
|
|
('Place',the_db.place_map),
|
|
|
|
('Source',the_db.source_map),
|
|
|
|
('Media',the_db.media_map)]:
|
|
|
|
|
2006-05-24 06:12:52 +05:30
|
|
|
# print "Low-level repair: table: %s" % the_map[0]
|
2006-03-15 01:19:34 +05:30
|
|
|
status,length = _table_low_level_9(the_db.env,the_map[1])
|
2006-05-20 00:24:21 +05:30
|
|
|
if update:
|
|
|
|
update()
|
2006-03-15 01:19:34 +05:30
|
|
|
if status:
|
2006-05-24 06:12:52 +05:30
|
|
|
# print "Done."
|
2006-03-15 01:19:34 +05:30
|
|
|
the_length += length
|
2006-01-26 02:36:23 +05:30
|
|
|
else:
|
|
|
|
print "Low-level repair: Problem with table: %s" % the_map[0]
|
|
|
|
return (False,the_map[0])
|
2006-03-15 01:19:34 +05:30
|
|
|
return (True,the_length)
|
2006-01-26 02:36:23 +05:30
|
|
|
|
|
|
|
|
|
|
|
def _table_low_level_9(env,table):
|
|
|
|
"""
|
|
|
|
Low level repair for a given db table.
|
|
|
|
"""
|
|
|
|
|
|
|
|
handle_list = table.keys()
|
2006-03-15 01:19:34 +05:30
|
|
|
length = len(handle_list)
|
2006-05-22 02:18:50 +05:30
|
|
|
dup_handles = set(
|
2006-01-26 02:36:23 +05:30
|
|
|
[ handle for handle in handle_list if handle_list.count(handle) > 1 ]
|
|
|
|
)
|
|
|
|
|
|
|
|
if not dup_handles:
|
2006-05-24 06:12:52 +05:30
|
|
|
# print " No dupes found for this table"
|
2006-03-15 01:19:34 +05:30
|
|
|
return (True,length)
|
2006-01-26 02:36:23 +05:30
|
|
|
|
|
|
|
the_txn = env.txn_begin()
|
|
|
|
table_cursor = GrampsBSDDBDupCursor(table,txn=the_txn)
|
2006-03-15 01:19:34 +05:30
|
|
|
# Dirty hack to prevent records from unpickling by DBShelve
|
2006-01-26 02:36:23 +05:30
|
|
|
table_cursor._extract = lambda rec: rec
|
|
|
|
|
|
|
|
for handle in dup_handles:
|
|
|
|
print " Duplicates found for handle: %s" % handle
|
|
|
|
try:
|
|
|
|
ret = table_cursor.set(handle)
|
|
|
|
except:
|
|
|
|
print " Failed setting initial cursor."
|
|
|
|
table_cursor.close()
|
|
|
|
the_txn.abort()
|
2006-03-15 01:19:34 +05:30
|
|
|
return (False,None)
|
2006-01-26 02:36:23 +05:30
|
|
|
|
|
|
|
for count in range(handle_list.count(handle)-1):
|
|
|
|
try:
|
|
|
|
table_cursor.delete()
|
|
|
|
print " Succesfully deleted dupe #%d" % (count+1)
|
|
|
|
except:
|
|
|
|
print " Failed deleting dupe."
|
|
|
|
table_cursor.close()
|
|
|
|
the_txn.abort()
|
2006-03-15 01:19:34 +05:30
|
|
|
return (False,None)
|
2006-01-26 02:36:23 +05:30
|
|
|
|
|
|
|
try:
|
|
|
|
ret = table_cursor.next_dup()
|
|
|
|
except:
|
|
|
|
print " Failed moving the cursor."
|
|
|
|
table_cursor.close()
|
|
|
|
the_txn.abort()
|
2006-03-15 01:19:34 +05:30
|
|
|
return (False,None)
|
2006-01-26 02:36:23 +05:30
|
|
|
|
|
|
|
table_cursor.close()
|
|
|
|
the_txn.commit()
|
2006-03-15 01:19:34 +05:30
|
|
|
return (True,length)
|
2006-01-26 02:36:23 +05:30
|
|
|
|
2006-08-18 11:19:25 +05:30
|
|
|
def convert_name_10(name):
|
|
|
|
# Names lost the "sname" attribute
|
|
|
|
(privacy,source_list,note,date,first_name,surname,suffix,title,name_type,
|
|
|
|
prefix,patronymic,sname,group_as,sort_as,display_as,call) = name
|
|
|
|
return (privacy,source_list,note,date,first_name,surname,suffix,title,
|
|
|
|
name_type,prefix,patronymic,group_as,sort_as,display_as,call)
|
2006-01-26 02:36:23 +05:30
|
|
|
|
2006-10-07 11:26:58 +05:30
|
|
|
def convert_address_11(addr):
|
|
|
|
# addresses got location instead of city,...
|
|
|
|
(privacy,source_list,note,date,
|
|
|
|
city,state,country,postal,phone,street) = addr
|
|
|
|
county = u''
|
|
|
|
location_base = (street,city,county,state,country,postal,phone)
|
|
|
|
return (privacy,source_list,note,date,location_base)
|
|
|
|
|
|
|
|
def convert_location_11(loc):
|
|
|
|
(location_base,parish,county) = loc
|
|
|
|
(city,state,country,postal,phone) = location_base
|
|
|
|
street = u''
|
|
|
|
new_location_base = (street,city,county,state,country,postal,phone)
|
|
|
|
return (new_location_base,parish)
|
|
|
|
|
2005-12-06 12:08:09 +05:30
|
|
|
if __name__ == "__main__":
|
|
|
|
|
|
|
|
import sys
|
|
|
|
|
|
|
|
d = GrampsBSDDB()
|
|
|
|
d.load(sys.argv[1],lambda x: x)
|
|
|
|
|
|
|
|
c = d.get_person_cursor()
|
|
|
|
data = c.first()
|
|
|
|
while data:
|
|
|
|
person = Person(data[1])
|
|
|
|
print data[0], person.get_primary_name().get_name(),
|
|
|
|
data = c.next()
|
|
|
|
c.close()
|
|
|
|
|
|
|
|
print d.surnames.keys()
|