# # Gramps - a GTK+/GNOME based genealogy program # # Copyright (C) 2000-2008 Donald N. Allingham # Copyright (C) 2010 Nick Hall # Copyright (C) 2011 Tim G L Lyons # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # # $Id$ """ Provide the Berkeley DB (DbBsddb) database backend for GRAMPS. This is used since GRAMPS version 3.0 """ #------------------------------------------------------------------------- # # Standard python modules # #------------------------------------------------------------------------- from __future__ import with_statement import cPickle as pickle import os import time import locale import bisect from functools import wraps import logging from sys import maxint from gen.ggettext import gettext as _ import config if config.get('preferences.use-bsddb3'): from bsddb3 import dbshelve, db else: from bsddb import dbshelve, db #------------------------------------------------------------------------- # # Gramps modules # #------------------------------------------------------------------------- from gen.lib import (GenderStats, Person, Family, Event, Place, Source, Citation, MediaObject, Repository, Note, Tag) from gen.db import (DbBsddbRead, DbWriteBase, BSDDBTxn, DbTxn, BsddbBaseCursor, DbVersionError, DbEnvironmentError, DbUpgradeRequiredError, find_surname, find_surname_name, DbUndoBSDDB as DbUndo) from gen.db.dbconst import * from gen.utils.callback import Callback from gen.updatecallback import UpdateCallback import Errors import constfunc _LOG = logging.getLogger(DBLOGNAME) LOG = logging.getLogger(".citation") _MINVERSION = 9 _DBVERSION = 16 IDTRANS = "person_id" FIDTRANS = "family_id" PIDTRANS = "place_id" OIDTRANS = "media_id" EIDTRANS = "event_id" RIDTRANS = "repo_id" NIDTRANS = "note_id" SIDTRANS = "source_id" CIDTRANS = "citation_id" TAGTRANS = "tag_name" SURNAMES = "surnames" NAME_GROUP = "name_group" META = "meta_data" FAMILY_TBL = "family" PLACES_TBL = "place" SOURCES_TBL = "source" CITATIONS_TBL = "citation" MEDIA_TBL = "media" EVENTS_TBL = "event" PERSON_TBL = "person" REPO_TBL = "repo" NOTE_TBL = "note" TAG_TBL = "tag" REF_MAP = "reference_map" REF_PRI = "primary_map" REF_REF = "referenced_map" DBERRS = (db.DBRunRecoveryError, db.DBAccessError, db.DBPageNotFoundError, db.DBInvalidArgError) # The following two dictionaries provide fast translation # between the primary class names and the keys used to reference # these classes in the database tables. Beware that changing # these maps or modifying the values of the keys will break # existing databases. CLASS_TO_KEY_MAP = {Person.__name__: PERSON_KEY, Family.__name__: FAMILY_KEY, Source.__name__: SOURCE_KEY, Citation.__name__: CITATION_KEY, Event.__name__: EVENT_KEY, MediaObject.__name__: MEDIA_KEY, Place.__name__: PLACE_KEY, Repository.__name__:REPOSITORY_KEY, Note.__name__: NOTE_KEY, Tag.__name__: TAG_KEY} KEY_TO_CLASS_MAP = {PERSON_KEY: Person.__name__, FAMILY_KEY: Family.__name__, SOURCE_KEY: Source.__name__, CITATION_KEY: Citation.__name__, EVENT_KEY: Event.__name__, MEDIA_KEY: MediaObject.__name__, PLACE_KEY: Place.__name__, REPOSITORY_KEY: Repository.__name__, NOTE_KEY: Note.__name__, TAG_KEY: Tag.__name__} KEY_TO_NAME_MAP = {PERSON_KEY: 'person', FAMILY_KEY: 'family', EVENT_KEY: 'event', SOURCE_KEY: 'source', CITATION_KEY: 'citation', PLACE_KEY: 'place', MEDIA_KEY: 'media', REPOSITORY_KEY: 'repository', #REFERENCE_KEY: 'reference', NOTE_KEY: 'note', TAG_KEY: 'tag'} #------------------------------------------------------------------------- # # Helper functions # #------------------------------------------------------------------------- def find_idmap(key, data): return str(data[1]) # Secondary database key lookups for reference_map table # reference_map data values are of the form: # ((primary_object_class_name, primary_object_handle), # (referenced_object_class_name, referenced_object_handle)) def find_primary_handle(key, data): return str((data)[0][1]) def find_referenced_handle(key, data): return str((data)[1][1]) #------------------------------------------------------------------------- # # BsddbWriteCursor # #------------------------------------------------------------------------- class BsddbWriteCursor(BsddbBaseCursor): def __init__(self, source, txn=None, **kwargs): BsddbBaseCursor.__init__(self, txn=txn, **kwargs) self.cursor = source.db.cursor(txn) self.source = source #------------------------------------------------------------------------- # # DbBsddbAssocCursor # #------------------------------------------------------------------------- class DbBsddbAssocCursor(BsddbBaseCursor): def __init__(self, source, txn=None, **kwargs): BsddbBaseCursor.__init__(self, txn=txn, **kwargs) self.cursor = source.cursor(txn) self.source = source #------------------------------------------------------------------------- # # DbBsddb # #------------------------------------------------------------------------- class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback): """ GRAMPS database write access object. """ # Set up dictionary for callback signal handler # --------------------------------------------- # 1. Signals for primary objects __signals__ = dict((obj+'-'+op, signal) for obj in ['person', 'family', 'event', 'place', 'source', 'citation', 'media', 'note', 'repository', 'tag'] for op, signal in zip( ['add', 'update', 'delete', 'rebuild'], [(list,), (list,), (list,), None] ) ) # 2. Signals for long operations __signals__.update(('long-op-'+op, signal) for op, signal in zip( ['start', 'heartbeat', 'end'], [(object,), None, None] )) # 3. Special signal for change in home person __signals__['home-person-changed'] = None # 4. Signal for change in person group name, parameters are __signals__['person-groupname-rebuild'] = (unicode, unicode) def __init__(self): """Create a new GrampsDB.""" self.txn = None DbBsddbRead.__init__(self) DbWriteBase.__init__(self) #UpdateCallback.__init__(self) self.secondary_connected = False self.has_changed = False self.brief_name = None def catch_db_error(func): """ Decorator function for catching database errors. If *func* throws one of the exceptions in DBERRS, the error is logged and a DbError exception is raised. """ @wraps(func) def try_(self, *args, **kwargs): try: return func(self, *args, **kwargs) except DBERRS, msg: self.__log_error() raise Errors.DbError(msg) return try_ def __open_db(self, file_name, table_name, dbtype=db.DB_HASH, flags=0): dbmap = db.DB(self.env) dbmap.set_flags(flags) fname = os.path.join(file_name, table_name + DBEXT) if self.readonly: dbmap.open(fname, table_name, dbtype, DBFLAGS_R) else: dbmap.open(fname, table_name, dbtype, DBFLAGS_O, DBMODE) return dbmap def __open_shelf(self, file_name, table_name, dbtype=db.DB_HASH): dbmap = dbshelve.DBShelf(self.env) fname = os.path.join(file_name, table_name + DBEXT) if self.readonly: dbmap.open(fname, table_name, dbtype, DBFLAGS_R) else: dbmap.open(fname, table_name, dbtype, DBFLAGS_O, DBMODE) return dbmap def __all_handles(self, table): return table.keys(self.txn) def __log_error(self): mypath = os.path.join(self.get_save_path(),DBRECOVFN) ofile = open(mypath, "w") ofile.close() try: clear_lock_file(self.get_save_path()) except: pass _log_error = __log_error # Override get_cursor method from the superclass to add udpate # capability @catch_db_error def get_cursor(self, table, txn=None, update=False, commit=False): """ Helper function to return a cursor over a table """ if update and not txn: txn = self.env.txn_begin(self.txn) return BsddbWriteCursor(table, txn=txn or self.txn, update=update, commit=commit) # cursors for lookups in the reference_map for back reference # lookups. The reference_map has three indexes: # the main index: a tuple of (primary_handle, referenced_handle) # the primary_handle index: the primary_handle # the referenced_handle index: the referenced_handle # the main index is unique, the others allow duplicate entries. @catch_db_error def get_reference_map_cursor(self): """ Returns a reference to a cursor over the reference map """ return DbBsddbAssocCursor(self.reference_map, self.txn) @catch_db_error def get_reference_map_primary_cursor(self): """ Returns a reference to a cursor over the reference map primary map """ return DbBsddbAssocCursor(self.reference_map_primary_map, self.txn) @catch_db_error def get_reference_map_referenced_cursor(self): """ Returns a reference to a cursor over the reference map referenced map """ return DbBsddbAssocCursor(self.reference_map_referenced_map, self.txn) # These are overriding the DbBsddbRead's methods of saving metadata # because we now have txn-capable metadata table @catch_db_error def set_default_person_handle(self, handle): """Set the default Person to the passed instance.""" if not self.readonly: # Start transaction with BSDDBTxn(self.env, self.metadata) as txn: txn.put('default', str(handle)) self.emit('home-person-changed') @catch_db_error def get_default_person(self): """Return the default Person of the database.""" person = self.get_person_from_handle(self.get_default_handle()) if person: return person elif (self.metadata) and (not self.readonly): # Start transaction with BSDDBTxn(self.env, self.metadata) as txn: txn.put('default', None) return None def set_mediapath(self, path): """Set the default media path for database, path should be utf-8.""" if self.metadata and not self.readonly: # Start transaction with BSDDBTxn(self.env, self.metadata) as txn: txn.put('mediapath', path) @catch_db_error def version_supported(self): dbversion = self.metadata.get('version', default=0) return ((dbversion <= _DBVERSION) and (dbversion >= _MINVERSION)) @catch_db_error def need_upgrade(self): dbversion = self.metadata.get('version', default=0) return not self.readonly and dbversion < _DBVERSION def __check_readonly(self, name): """ Return True if we don't have read/write access to the database, otherwise return False (that is, we DO have read/write access) """ # See if we write to the target directory at all? if not os.access(name, os.W_OK): return True # See if we lack write access to any files in the directory for base in [FAMILY_TBL, PLACES_TBL, SOURCES_TBL, CITATIONS_TBL, MEDIA_TBL, EVENTS_TBL, PERSON_TBL, REPO_TBL, NOTE_TBL, REF_MAP, META]: path = os.path.join(name, base + DBEXT) if os.path.isfile(path) and not os.access(path, os.W_OK): return True # All tests passed. Inform caller that we are NOT read only return False @catch_db_error def load(self, name, callback, mode=DBMODE_W, upgrade=False): if self.__check_readonly(name): mode = DBMODE_R else: write_lock_file(name) if self.db_is_open: self.close() self.readonly = mode == DBMODE_R #super(DbBsddbRead, self).load(name, callback, mode) if callback: callback(12) # Save full path and base file name self.full_name = os.path.abspath(name) self.path = self.full_name self.brief_name = os.path.basename(name) # Set up database environment self.env = db.DBEnv() self.env.set_cachesize(0, DBCACHE) # These env settings are only needed for Txn environment self.env.set_lk_max_locks(DBLOCKS) self.env.set_lk_max_objects(DBOBJECTS) # Set to auto remove stale logs self.set_auto_remove() # Set not to flush to disk synchronous, this greatly speeds up # database changes, but comes at the cause of loss of durability, so # power loss might cause a need to run db recovery, see BSDDB manual ## NOTE: due to pre 4.8 bsddb bug it is needed to set this flag before ## open of env, #16492 - http://download.oracle.com/docs/cd/E17076_02/html/installation/changelog_4_8.html self.env.set_flags(db.DB_TXN_WRITE_NOSYNC, 1) # The DB_PRIVATE flag must go if we ever move to multi-user setup env_flags = db.DB_CREATE | db.DB_PRIVATE |\ db.DB_INIT_MPOOL | db.DB_INIT_LOCK |\ db.DB_INIT_LOG | db.DB_INIT_TXN | db.DB_THREAD # As opposed to before, we always try recovery on databases env_flags |= db.DB_RECOVER # Environment name is now based on the filename env_name = name try: self.env.open(env_name, env_flags) except Exception, msg: try: self.__close_early() except: pass raise DbEnvironmentError(msg) self.env.txn_checkpoint() if callback: callback(25) # Process metadata self.metadata = self.__open_shelf(self.full_name, META) # If we cannot work with this DB version, # it makes no sense to go further if not self.version_supported(): self.__close_early() raise DbVersionError() self.__load_metadata() gstats = self.metadata.get('gender_stats', default=None) # Ensure version info in metadata if not self.readonly: # Start transaction with BSDDBTxn(self.env, self.metadata) as txn: if gstats is None: # New database. Set up the current version. #self.metadata.put('version', _DBVERSION, txn=the_txn) txn.put('version', _DBVERSION) elif 'version' not in self.metadata: # Not new database, but the version is missing. # Use 0, but it is likely to fail anyway. txn.put('version', 0) self.genderStats = GenderStats(gstats) # Open main tables in gramps database db_maps = [ ("family_map", FAMILY_TBL, db.DB_HASH), ("place_map", PLACES_TBL, db.DB_HASH), ("source_map", SOURCES_TBL, db.DB_HASH), ("citation_map", CITATIONS_TBL, db.DB_HASH), ("media_map", MEDIA_TBL, db.DB_HASH), ("event_map", EVENTS_TBL, db.DB_HASH), ("person_map", PERSON_TBL, db.DB_HASH), ("repository_map", REPO_TBL, db.DB_HASH), ("note_map", NOTE_TBL, db.DB_HASH), ("tag_map", TAG_TBL, db.DB_HASH), ("reference_map", REF_MAP, db.DB_BTREE), ] dbflags = DBFLAGS_R if self.readonly else DBFLAGS_O for (dbmap, dbname, dbtype) in db_maps: _db = self.__open_shelf(self.full_name, dbname, dbtype) setattr(self, dbmap, _db) if callback: callback(37) # Open name grouping database self.name_group = self.__open_db(self.full_name, NAME_GROUP, db.DB_HASH, db.DB_DUP) # Here we take care of any changes in the tables related to new code. # If secondary indices change, then they should removed # or rebuilt by upgrade as well. In any case, the # self.secondary_connected flag should be set accordingly. if self.need_upgrade(): if upgrade == True: self.gramps_upgrade(callback) else: self.__close_early() raise DbUpgradeRequiredError() if callback: callback(50) # Connect secondary indices if not self.secondary_connected: self.__connect_secondary() if callback: callback(75) # Open undo database self.__open_undodb() self.db_is_open = True if callback: callback(87) self.abort_possible = True return 1 def __open_undodb(self): """ Open the undo database """ if not self.readonly: self.undolog = os.path.join(self.full_name, DBUNDOFN) self.undodb = DbUndo(self, self.undolog) self.undodb.open() def __close_undodb(self): if not self.readonly: try: self.undodb.close() except db.DBNoSuchFileError: pass def get_undodb(self): """ Return the database that keeps track of Undo/Redo operations. """ return self.undodb def __load_metadata(self): # name display formats self.name_formats = self.metadata.get('name_formats', default=[]) # upgrade formats if they were saved in the old way for format_ix in range(len(self.name_formats)): format = self.name_formats[format_ix] if len(format) == 3: format = format + (True,) self.name_formats[format_ix] = format # database owner try: owner_data = self.metadata.get('researcher') if owner_data: if len(owner_data[0]) == 7: # Pre-3.3 format owner_data = upgrade_researcher(owner_data) self.owner.unserialize(owner_data) except ImportError: #handle problems with pre-alpha 3.0 pass # bookmarks meta = lambda meta: self.metadata.get(meta, default=[]) self.bookmarks.set(meta('bookmarks')) self.family_bookmarks.set(meta('family_bookmarks')) self.event_bookmarks.set(meta('event_bookmarks')) self.source_bookmarks.set(meta('source_bookmarks')) self.citation_bookmarks.set(meta('citation_bookmarks')) self.repo_bookmarks.set(meta('repo_bookmarks')) self.media_bookmarks.set(meta('media_bookmarks')) self.place_bookmarks.set(meta('place_bookmarks')) self.note_bookmarks.set(meta('note_bookmarks')) # Custom type values self.family_event_names = set(meta('fevent_names')) self.individual_event_names = set(meta('pevent_names')) self.family_attributes = set(meta('fattr_names')) self.individual_attributes = set(meta('pattr_names')) self.marker_names = set(meta('marker_names')) self.child_ref_types = set(meta('child_refs')) self.family_rel_types = set(meta('family_rels')) self.event_role_names = set(meta('event_roles')) self.name_types = set(meta('name_types')) self.origin_types = set(meta('origin_types')) self.repository_types = set(meta('repo_types')) self.note_types = set(meta('note_types')) self.source_media_types = set(meta('sm_types')) self.url_types = set(meta('url_types')) self.media_attributes = set(meta('mattr_names')) # surname list self.surname_list = meta('surname_list') def __connect_secondary(self): """ Connect or creates secondary index tables. It assumes that the tables either exist and are in the right format or do not exist (in which case they get created). It is the responsibility of upgrade code to either create or remove invalid secondary index tables. """ # index tables used just for speeding up searches self.surnames = self.__open_db(self.full_name, SURNAMES, db.DB_BTREE, db.DB_DUP | db.DB_DUPSORT) db_maps = [ ("id_trans", IDTRANS, db.DB_HASH, 0), ("fid_trans", FIDTRANS, db.DB_HASH, 0), ("eid_trans", EIDTRANS, db.DB_HASH, 0), ("pid_trans", PIDTRANS, db.DB_HASH, 0), ("sid_trans", SIDTRANS, db.DB_HASH, 0), ("cid_trans", CIDTRANS, db.DB_HASH, 0), ("oid_trans", OIDTRANS, db.DB_HASH, 0), ("rid_trans", RIDTRANS, db.DB_HASH, 0), ("nid_trans", NIDTRANS, db.DB_HASH, 0), ("tag_trans", TAGTRANS, db.DB_HASH, 0), ("reference_map_primary_map", REF_PRI, db.DB_BTREE, 0), ("reference_map_referenced_map", REF_REF, db.DB_BTREE, db.DB_DUPSORT), ] for (dbmap, dbname, dbtype, dbflags) in db_maps: _db = self.__open_db(self.full_name, dbname, dbtype, db.DB_DUP | dbflags) setattr(self, dbmap, _db) if not self.readonly: assoc = [ (self.person_map, self.surnames, find_surname), (self.person_map, self.id_trans, find_idmap), (self.family_map, self.fid_trans, find_idmap), (self.event_map, self.eid_trans, find_idmap), (self.place_map, self.pid_trans, find_idmap), (self.source_map, self.sid_trans, find_idmap), (self.citation_map, self.cid_trans, find_idmap), (self.media_map, self.oid_trans, find_idmap), (self.repository_map, self.rid_trans, find_idmap), (self.note_map, self.nid_trans, find_idmap), (self.tag_map, self.tag_trans, find_idmap), (self.reference_map, self.reference_map_primary_map, find_primary_handle), (self.reference_map, self.reference_map_referenced_map, find_referenced_handle), ] flags = DBFLAGS_R if self.readonly else DBFLAGS_O for (dbmap, a_map, a_find) in assoc: dbmap.associate(a_map, a_find, flags=flags) self.secondary_connected = True self.smap_index = len(self.source_map) self.cmap_index = len(self.citation_map) self.emap_index = len(self.event_map) self.pmap_index = len(self.person_map) self.fmap_index = len(self.family_map) self.lmap_index = len(self.place_map) self.omap_index = len(self.media_map) self.rmap_index = len(self.repository_map) self.nmap_index = len(self.note_map) @catch_db_error def rebuild_secondary(self, callback=None): if self.readonly: return table_flags = DBFLAGS_O # remove existing secondary indices items = [ ( self.id_trans, IDTRANS ), ( self.surnames, SURNAMES ), ( self.fid_trans, FIDTRANS ), ( self.pid_trans, PIDTRANS ), ( self.oid_trans, OIDTRANS ), ( self.eid_trans, EIDTRANS ), ( self.rid_trans, RIDTRANS ), ( self.nid_trans, NIDTRANS ), ( self.cid_trans, CIDTRANS ), ( self.tag_trans, TAGTRANS ), ( self.reference_map_primary_map, REF_PRI), ( self.reference_map_referenced_map, REF_REF), ] index = 1 for (database, name) in items: database.close() _db = db.DB(self.env) try: _db.remove(_mkname(self.full_name, name), name) except db.DBNoSuchFileError: pass if callback: callback(index) index += 1 if callback: callback(11) # Set flag saying that we have removed secondary indices # and then call the creating routine self.secondary_connected = False self.__connect_secondary() if callback: callback(12) @catch_db_error def find_backlink_handles(self, handle, include_classes=None): """ Find all objects that hold a reference to the object handle. Returns an interator over a list of (class_name, handle) tuples. :param handle: handle of the object to search for. :type handle: database handle :param include_classes: list of class names to include in the results. Default: None means include all classes. :type include_classes: list of class names Note that this is a generator function, it returns a iterator for use in loops. If you want a list of the results use:: result_list = list(find_backlink_handles(handle)) """ handle = str(handle) # Use the secondary index to locate all the reference_map entries # that include a reference to the object we are looking for. referenced_cur = self.get_reference_map_referenced_cursor() try: ret = referenced_cur.set(handle) except: ret = None while (ret is not None): (key, data) = ret # data values are of the form: # ((primary_object_class_name, primary_object_handle), # (referenced_object_class_name, referenced_object_handle)) # so we need the first tuple to give us the type to compare ### FIXME: this is a dirty hack that works without no ### sensible explanation. For some reason, for a readonly ### database, secondary index returns a primary table key ### corresponding to the data, not the data. if self.readonly: data = self.reference_map.get(data) else: data = pickle.loads(data) key, handle = data[0][:2] name = KEY_TO_CLASS_MAP[key] assert name == KEY_TO_CLASS_MAP[data[0][0]] assert handle == data[0][1] if (include_classes is None or name in include_classes): yield (name, handle) ret = referenced_cur.next_dup() referenced_cur.close() def delete_primary_from_reference_map(self, handle, transaction, txn=None): """ Remove all references to the primary object from the reference_map. """ primary_cur = self.get_reference_map_primary_cursor() try: ret = primary_cur.set(handle) except: ret = None remove_list = set() while (ret is not None): (key, data) = ret # data values are of the form: # ((primary_object_class_name, primary_object_handle), # (referenced_object_class_name, referenced_object_handle)) # so we need the second tuple give us a reference that we can # combine with the primary_handle to get the main key. main_key = (handle, pickle.loads(data)[1][1]) # The trick is not to remove while inside the cursor, # but collect them all and remove after the cursor is closed remove_list.add(main_key) ret = primary_cur.next_dup() primary_cur.close() # Now that the cursor is closed, we can remove things for main_key in remove_list: self.__remove_reference(main_key, transaction, txn) def update_reference_map(self, obj, transaction, txn=None): """ If txn is given, then changes are written right away using txn. """ # Add references to the reference_map for all primary object referenced # from the primary object 'obj' or any of its secondary objects. handle = obj.handle existing_references = set() primary_cur = self.get_reference_map_primary_cursor() try: ret = primary_cur.set(handle) except: ret = None while (ret is not None): (key, data) = ret # data values are of the form: # ((primary_object_class_name, primary_object_handle), # (referenced_object_class_name, referenced_object_handle)) # so we need the second tuple give us a reference that we can # compare with what is returned from # get_referenced_handles_recursively # secondary DBs are not DBShelf's, so we need to do pickling # and unpickling ourselves here existing_reference = pickle.loads(data)[1] existing_references.add((KEY_TO_CLASS_MAP[existing_reference[0]], existing_reference[1])) ret = primary_cur.next_dup() primary_cur.close() # Once we have the list of rows that already have a reference # we need to compare it with the list of objects that are # still references from the primary object. current_references = set(obj.get_referenced_handles_recursively()) no_longer_required_references = existing_references.difference( current_references) new_references = current_references.difference(existing_references) # handle addition of new references for (ref_class_name, ref_handle) in new_references: data = ((CLASS_TO_KEY_MAP[obj.__class__.__name__], handle), (CLASS_TO_KEY_MAP[ref_class_name], ref_handle),) self.__add_reference((handle, ref_handle), data, transaction, txn) # handle deletion of old references for (ref_class_name, ref_handle) in no_longer_required_references: try: self.__remove_reference((handle, ref_handle), transaction, txn) except: # ignore missing old reference pass def __remove_reference(self, key, transaction, txn): """ Remove the reference specified by the key, preserving the change in the passed transaction. """ if not self.readonly: if not transaction.batch: old_data = self.reference_map.get(str(key), txn=txn) transaction.add(REFERENCE_KEY, TXNDEL, str(key), old_data, None) #transaction.reference_del.append(str(key)) self.reference_map.delete(str(key), txn=txn) def __add_reference(self, key, data, transaction, txn): """ Add the reference specified by the key and the data, preserving the change in the passed transaction. """ if self.readonly or not key: return self.reference_map.put(str(key), data, txn=txn) if not transaction.batch: transaction.add(REFERENCE_KEY, TXNADD, str(key), None, data) #transaction.reference_add.append((str(key), data)) @catch_db_error def reindex_reference_map(self, callback): """ Reindex all primary records in the database. This will be a slow process for large databases. """ # First, remove the reference map and related tables db_maps = [ ("reference_map_referenced_map", REF_REF), ("reference_map_primary_map", REF_PRI), ("reference_map", REF_MAP), ] for index, (dbmap, dbname) in enumerate(db_maps): getattr(self, dbmap).close() _db = db.DB(self.env) try: _db.remove(_mkname(self.full_name, dbname), dbname) except db.DBNoSuchFileError: pass callback(index+1) # Open reference_map and primary map self.reference_map = self.__open_shelf(self.full_name, REF_MAP, dbtype=db.DB_BTREE) self.reference_map_primary_map = self.__open_db(self.full_name, REF_PRI, db.DB_BTREE, db.DB_DUP) self.reference_map.associate(self.reference_map_primary_map, find_primary_handle, DBFLAGS_O) # Make a tuple of the functions and classes that we need for # each of the primary object tables. with DbTxn(_("Rebuild reference map"), self, batch=True, no_magic=True) as transaction: callback(4) primary_table = ( (self.get_person_cursor, Person), (self.get_family_cursor, Family), (self.get_event_cursor, Event), (self.get_place_cursor, Place), (self.get_source_cursor, Source), (self.get_citation_cursor, Citation), (self.get_media_cursor, MediaObject), (self.get_repository_cursor, Repository), (self.get_note_cursor, Note), (self.get_tag_cursor, Tag), ) # Now we use the functions and classes defined above # to loop through each of the primary object tables. for cursor_func, class_func in primary_table: with cursor_func() as cursor: for found_handle, val in cursor: obj = class_func() obj.unserialize(val) with BSDDBTxn(self.env) as txn: self.update_reference_map(obj, transaction, txn.txn) callback(5) self.reference_map_referenced_map = self.__open_db(self.full_name, REF_REF, db.DB_BTREE, db.DB_DUP|db.DB_DUPSORT) flags = DBFLAGS_R if self.readonly else DBFLAGS_O self.reference_map.associate(self.reference_map_referenced_map, find_referenced_handle, flags=flags) callback(6) def __close_metadata(self): if not self.readonly: # Start transaction with BSDDBTxn(self.env, self.metadata) as txn: # name display formats txn.put('name_formats', self.name_formats) # database owner owner_data = self.owner.serialize() txn.put('researcher', owner_data) # bookmarks txn.put('bookmarks', self.bookmarks.get()) txn.put('family_bookmarks', self.family_bookmarks.get()) txn.put('event_bookmarks', self.event_bookmarks.get()) txn.put('source_bookmarks', self.source_bookmarks.get()) txn.put('citation_bookmarks', self.citation_bookmarks.get()) txn.put('place_bookmarks', self.place_bookmarks.get()) txn.put('repo_bookmarks', self.repo_bookmarks.get()) txn.put('media_bookmarks', self.media_bookmarks.get()) txn.put('note_bookmarks', self.note_bookmarks.get()) # gender stats txn.put('gender_stats', self.genderStats.save_stats()) # Custom type values txn.put('fevent_names', list(self.family_event_names)) txn.put('pevent_names', list(self.individual_event_names)) txn.put('fattr_names', list(self.family_attributes)) txn.put('pattr_names', list(self.individual_attributes)) txn.put('marker_names', list(self.marker_names)) txn.put('child_refs', list(self.child_ref_types)) txn.put('family_rels', list(self.family_rel_types)) txn.put('event_roles', list(self.event_role_names)) txn.put('name_types', list(self.name_types)) txn.put('origin_types', list(self.origin_types)) txn.put('repo_types', list(self.repository_types)) txn.put('note_types', list(self.note_types)) txn.put('sm_types', list(self.source_media_types)) txn.put('url_types', list(self.url_types)) txn.put('mattr_names', list(self.media_attributes)) # name display formats txn.put('surname_list', self.surname_list) self.metadata.close() def __close_early(self): """ Bail out if the incompatible version is discovered: * close cleanly to not damage data/env """ if hasattr(self, 'metadata') and self.metadata: self.metadata.close() self.env.close() self.metadata = None self.env = None self.db_is_open = False @catch_db_error def close(self): if not self.db_is_open: return if self.txn: self.transaction_abort(self.transaction) self.env.txn_checkpoint() lockstats = self.env.lock_stat() _LOG.debug("lock occupancy: %d%%, locked object occupancy: %d%%" % ( round(lockstats['maxnlocks']*100/lockstats['maxlocks']), round(lockstats['maxnobjects']*100/lockstats['maxobjects']))) self.__close_metadata() self.name_group.close() self.surnames.close() self.id_trans.close() self.fid_trans.close() self.eid_trans.close() self.rid_trans.close() self.nid_trans.close() self.oid_trans.close() self.sid_trans.close() self.cid_trans.close() self.pid_trans.close() self.tag_trans.close() self.reference_map_primary_map.close() self.reference_map_referenced_map.close() self.reference_map.close() self.secondary_connected = False # primary databases must be closed after secondary indexes, or # we run into problems with any active cursors. self.person_map.close() self.family_map.close() self.repository_map.close() self.note_map.close() self.place_map.close() self.source_map.close() self.citation_map.close() self.media_map.close() self.event_map.close() self.tag_map.close() self.env.close() self.__close_undodb() self.person_map = None self.family_map = None self.repository_map = None self.note_map = None self.place_map = None self.source_map = None self.citation_map = None self.media_map = None self.event_map = None self.tag_map = None self.surnames = None self.env = None self.metadata = None self.db_is_open = False self.surname_list = None DbBsddbRead.close(self) self.person_map = None self.family_map = None self.repository_map = None self.note_map = None self.place_map = None self.source_map = None self.citation_map = None self.media_map = None self.event_map = None self.tag_map = None self.reference_map_primary_map = None self.reference_map_referenced_map = None self.reference_map = None self.undo_callback = None self.redo_callback = None self.undo_history_callback = None self.undodb = None try: clear_lock_file(self.get_save_path()) except IOError: pass def create_id(self): return "%08x%08x" % ( int(time.time()*10000), self.rand.randint(0, maxint)) def __add_object(self, obj, transaction, find_next_func, commit_func): if find_next_func and not obj.gramps_id: obj.gramps_id = find_next_func() if not obj.handle: obj.handle = self.create_id() commit_func(obj, transaction) return obj.handle def add_person(self, person, transaction, set_gid=True): """ Add a Person to the database, assigning internal IDs if they have not already been defined. If not set_gid, then gramps_id is not set. """ handle = self.__add_object(person, transaction, self.find_next_person_gramps_id if set_gid else None, self.commit_person) self.genderStats.count_person(person) return handle def add_family(self, family, transaction, set_gid=True): """ Add a Family to the database, assigning internal IDs if they have not already been defined. If not set_gid, then gramps_id is not set. """ return self.__add_object(family, transaction, self.find_next_family_gramps_id if set_gid else None, self.commit_family) def add_source(self, source, transaction, set_gid=True): """ Add a Source to the database, assigning internal IDs if they have not already been defined. If not set_gid, then gramps_id is not set. """ return self.__add_object(source, transaction, self.find_next_source_gramps_id if set_gid else None, self.commit_source) def add_citation(self, citation, transaction, set_gid=True): """ Add a Citation to the database, assigning internal IDs if they have not already been defined. If not set_gid, then gramps_id is not set. """ return self.__add_object(citation, transaction, self.find_next_citation_gramps_id if set_gid else None, self.commit_citation) def add_event(self, event, transaction, set_gid=True): """ Add an Event to the database, assigning internal IDs if they have not already been defined. If not set_gid, then gramps_id is not set. """ return self.__add_object(event, transaction, self.find_next_event_gramps_id if set_gid else None, self.commit_event) def add_person_event(self, event, transaction): """ Add an Event to the database, assigning internal IDs if they have not already been defined. """ if event.type.is_custom(): self.individual_event_names.add(str(event.type)) return self.add_event(event, transaction) def add_family_event(self, event, transaction): """ Add an Event to the database, assigning internal IDs if they have not already been defined. """ if event.type.is_custom(): self.family_event_names.add(str(event.type)) return self.add_event(event, transaction) def add_place(self, place, transaction, set_gid=True): """ Add a Place to the database, assigning internal IDs if they have not already been defined. If not set_gid, then gramps_id is not set. """ return self.__add_object(place, transaction, self.find_next_place_gramps_id if set_gid else None, self.commit_place) def add_object(self, obj, transaction, set_gid=True): """ Add a MediaObject to the database, assigning internal IDs if they have not already been defined. If not set_gid, then gramps_id is not set. """ return self.__add_object(obj, transaction, self.find_next_object_gramps_id if set_gid else None, self.commit_media_object) def add_repository(self, obj, transaction, set_gid=True): """ Add a Repository to the database, assigning internal IDs if they have not already been defined. If not set_gid, then gramps_id is not set. """ return self.__add_object(obj, transaction, self.find_next_repository_gramps_id if set_gid else None, self.commit_repository) def add_note(self, obj, transaction, set_gid=True): """ Add a Note to the database, assigning internal IDs if they have not already been defined. If not set_gid, then gramps_id is not set. """ return self.__add_object(obj, transaction, self.find_next_note_gramps_id if set_gid else None, self.commit_note) def add_tag(self, obj, transaction): """ Add a Tag to the database, assigning a handle if it has not already been defined. """ return self.__add_object(obj, transaction, None, self.commit_tag) def __do_remove(self, handle, transaction, data_map, key): if self.readonly or not handle: return handle = str(handle) if transaction.batch: with BSDDBTxn(self.env, data_map) as txn: self.delete_primary_from_reference_map(handle, transaction, txn=txn.txn) txn.delete(handle) else: self.delete_primary_from_reference_map(handle, transaction, txn=self.txn) old_data = data_map.get(handle, txn=self.txn) data_map.delete(handle, txn=self.txn) transaction.add(key, TXNDEL, handle, old_data, None) def remove_person(self, handle, transaction): """ Remove the Person specified by the database handle from the database, preserving the change in the passed transaction. """ if self.readonly or not handle: return person = self.get_person_from_handle(handle) self.genderStats.uncount_person (person) self.remove_from_surname_list(person) if transaction.batch: with BSDDBTxn(self.env, self.person_map) as txn: self.delete_primary_from_reference_map(handle, transaction, txn=txn.txn) txn.delete(handle) else: self.delete_primary_from_reference_map(handle, transaction, txn=self.txn) self.person_map.delete(str(handle), txn=self.txn) transaction.add(PERSON_KEY, TXNDEL, handle, person.serialize(), None) def remove_source(self, handle, transaction): """ Remove the Source specified by the database handle from the database, preserving the change in the passed transaction. """ self.__do_remove(handle, transaction, self.source_map, SOURCE_KEY) def remove_citation(self, handle, transaction): """ Remove the Citation specified by the database handle from the database, preserving the change in the passed transaction. """ self.__do_remove(handle, transaction, self.citation_map, CITATION_KEY) def remove_event(self, handle, transaction): """ Remove the Event specified by the database handle from the database, preserving the change in the passed transaction. """ self.__do_remove(handle, transaction, self.event_map, EVENT_KEY) def remove_object(self, handle, transaction): """ Remove the MediaObjectPerson specified by the database handle from the database, preserving the change in the passed transaction. """ self.__do_remove(handle, transaction, self.media_map, MEDIA_KEY) def remove_place(self, handle, transaction): """ Remove the Place specified by the database handle from the database, preserving the change in the passed transaction. """ self.__do_remove(handle, transaction, self.place_map, PLACE_KEY) def remove_family(self, handle, transaction): """ Remove the Family specified by the database handle from the database, preserving the change in the passed transaction. """ self.__do_remove(handle, transaction, self.family_map, FAMILY_KEY) def remove_repository(self, handle, transaction): """ Remove the Repository specified by the database handle from the database, preserving the change in the passed transaction. """ self.__do_remove(handle, transaction, self.repository_map, REPOSITORY_KEY) def remove_note(self, handle, transaction): """ Remove the Note specified by the database handle from the database, preserving the change in the passed transaction. """ self.__do_remove(handle, transaction, self.note_map, NOTE_KEY) def remove_tag(self, handle, transaction): """ Remove the Tag specified by the database handle from the database, preserving the change in the passed transaction. """ self.__do_remove(handle, transaction, self.tag_map, TAG_KEY) @catch_db_error def set_name_group_mapping(self, name, group): if not self.readonly: # Start transaction with BSDDBTxn(self.env, self.name_group) as txn: sname = str(name) data = txn.get(sname) if data is not None: txn.delete(sname) if group is not None: txn.put(sname, group) if group == None: grouppar = u'' else: grouppar = group self.emit('person-groupname-rebuild', (name, grouppar)) def sort_surname_list(self): self.surname_list.sort(key=locale.strxfrm) @catch_db_error def build_surname_list(self): """ Build surname list for use in autocompletion """ self.surname_list = sorted(map(unicode, set(self.surnames.keys())), key=locale.strxfrm) def add_to_surname_list(self, person, batch_transaction): """ Add surname to surname list """ if batch_transaction: return name = unicode(find_surname_name(person.handle, person.get_primary_name().serialize())) i = bisect.bisect(self.surname_list, name) if 0 < i <= len(self.surname_list): if self.surname_list[i-1] != name: self.surname_list.insert(i, name) else: self.surname_list.insert(i, name) @catch_db_error def remove_from_surname_list(self, person): """ Check whether there are persons with the same surname left in the database. If not then we need to remove the name from the list. The function must be overridden in the derived class. """ name = str(find_surname_name(person.handle, person.get_primary_name().serialize())) try: cursor = self.surnames.cursor(txn=self.txn) cursor_position = cursor.set(name) if cursor_position is not None and cursor.count() == 1: i = bisect.bisect(self.surname_list, name) if 0 <= i-1 < len(self.surname_list): del self.surname_list[i-1] except db.DBError, err: if str(err) == "(0, 'DB object has been closed')": pass # A batch transaction closes the surnames db table. else: raise finally: if 'cursor' in locals(): cursor.close() def commit_base(self, obj, data_map, key, transaction, change_time): """ Commit the specified object to the database, storing the changes as part of the transaction. """ if self.readonly or not obj or not obj.handle: return obj.change = int(change_time or time.time()) handle = str(obj.handle) self.update_reference_map(obj, transaction, self.txn) new_data = obj.serialize() old_data = None if not transaction.batch: old_data = data_map.get(handle, txn=self.txn) op = TXNUPD if old_data else TXNADD transaction.add(key, op, handle, old_data, new_data) data_map.put(handle, new_data, txn=self.txn) return old_data def commit_person(self, person, transaction, change_time=None): """ Commit the specified Person to the database, storing the changes as part of the transaction. """ old_data = self.commit_base( person, self.person_map, PERSON_KEY, transaction, change_time) if old_data: old_person = Person(old_data) # Update gender statistics if necessary if (old_person.gender != person.gender or old_person.primary_name.first_name != person.primary_name.first_name): self.genderStats.uncount_person(old_person) self.genderStats.count_person(person) # Update surname list if necessary if (find_surname_name(old_person.handle, old_person.primary_name.serialize()) != find_surname_name(person.handle, person.primary_name.serialize())): self.remove_from_surname_list(old_person) self.add_to_surname_list(person, transaction.batch) else: self.genderStats.count_person(person) self.add_to_surname_list(person, transaction.batch) self.individual_attributes.update( [str(attr.type) for attr in person.attribute_list if attr.type.is_custom() and str(attr.type)]) self.event_role_names.update([str(eref.role) for eref in person.event_ref_list if eref.role.is_custom()]) self.name_types.update([str(name.type) for name in ([person.primary_name] + person.alternate_names) if name.type.is_custom()]) all_surn = [] # new list we will use for storage all_surn += person.primary_name.get_surname_list() for asurname in person.alternate_names: all_surn += asurname.get_surname_list() self.origin_types.update([str(surn.origintype) for surn in all_surn if surn.origintype.is_custom()]) all_surn = None self.url_types.update([str(url.type) for url in person.urls if url.type.is_custom()]) attr_list = [] for mref in person.media_list: attr_list += [str(attr.type) for attr in mref.attribute_list if attr.type.is_custom() and str(attr.type)] self.media_attributes.update(attr_list) def commit_media_object(self, obj, transaction, change_time=None): """ Commit the specified MediaObject to the database, storing the changes as part of the transaction. """ self.commit_base(obj, self.media_map, MEDIA_KEY, transaction, change_time) self.media_attributes.update( [str(attr.type) for attr in obj.attribute_list if attr.type.is_custom() and str(attr.type)]) def commit_source(self, source, transaction, change_time=None): """ Commit the specified Source to the database, storing the changes as part of the transaction. """ self.commit_base(source, self.source_map, SOURCE_KEY, transaction, change_time) self.source_media_types.update( [str(ref.media_type) for ref in source.reporef_list if ref.media_type.is_custom()]) attr_list = [] for mref in source.media_list: attr_list += [str(attr.type) for attr in mref.attribute_list if attr.type.is_custom() and str(attr.type)] self.media_attributes.update(attr_list) def commit_citation(self, citation, transaction, change_time=None): """ Commit the specified Citation to the database, storing the changes as part of the transaction. """ self.commit_base(citation, self.citation_map, CITATION_KEY, transaction, change_time) attr_list = [] for mref in citation.media_list: attr_list += [str(attr.type) for attr in mref.attribute_list if attr.type.is_custom() and str(attr.type)] self.media_attributes.update(attr_list) def commit_place(self, place, transaction, change_time=None): """ Commit the specified Place to the database, storing the changes as part of the transaction. """ self.commit_base(place, self.place_map, PLACE_KEY, transaction, change_time) self.url_types.update([str(url.type) for url in place.urls if url.type.is_custom()]) attr_list = [] for mref in place.media_list: attr_list += [str(attr.type) for attr in mref.attribute_list if attr.type.is_custom() and str(attr.type)] self.media_attributes.update(attr_list) def commit_personal_event(self, event, transaction, change_time=None): if event.type.is_custom(): self.individual_event_names.add(str(event.type)) self.commit_event(event, transaction, change_time) def commit_family_event(self, event, transaction, change_time=None): if event.type.is_custom(): self.family_event_names.add(str(event.type)) self.commit_event(event, transaction, change_time) def commit_event(self, event, transaction, change_time=None): """ Commit the specified Event to the database, storing the changes as part of the transaction. """ self.commit_base(event, self.event_map, EVENT_KEY, transaction, change_time) attr_list = [] for mref in event.media_list: attr_list += [str(attr.type) for attr in mref.attribute_list if attr.type.is_custom() and str(attr.type)] self.media_attributes.update(attr_list) def commit_family(self, family, transaction, change_time=None): """ Commit the specified Family to the database, storing the changes as part of the transaction. """ self.commit_base(family, self.family_map, FAMILY_KEY, transaction, change_time) self.family_attributes.update( [str(attr.type) for attr in family.attribute_list if attr.type.is_custom() and str(attr.type)]) rel_list = [] for ref in family.child_ref_list: if ref.frel.is_custom(): rel_list.append(str(ref.frel)) if ref.mrel.is_custom(): rel_list.append(str(ref.mrel)) self.child_ref_types.update(rel_list) self.event_role_names.update( [str(eref.role) for eref in family.event_ref_list if eref.role.is_custom()]) if family.type.is_custom(): self.family_rel_types.add(str(family.type)) attr_list = [] for mref in family.media_list: attr_list += [str(attr.type) for attr in mref.attribute_list if attr.type.is_custom() and str(attr.type)] self.media_attributes.update(attr_list) def commit_repository(self, repository, transaction, change_time=None): """ Commit the specified Repository to the database, storing the changes as part of the transaction. """ self.commit_base(repository, self.repository_map, REPOSITORY_KEY, transaction, change_time) if repository.type.is_custom(): self.repository_types.add(str(repository.type)) self.url_types.update([str(url.type) for url in repository.urls if url.type.is_custom()]) def commit_note(self, note, transaction, change_time=None): """ Commit the specified Note to the database, storing the changes as part of the transaction. """ self.commit_base(note, self.note_map, NOTE_KEY, transaction, change_time) if note.type.is_custom(): self.note_types.add(str(note.type)) def commit_tag(self, tag, transaction, change_time=None): """ Commit the specified Tag to the database, storing the changes as part of the transaction. """ self.commit_base(tag, self.tag_map, TAG_KEY, transaction, change_time) def get_from_handle(self, handle, class_type, data_map): try: data = data_map.get(str(handle), txn=self.txn) except: data = None # under certain circumstances during a database reload, # data_map can be none. If so, then don't report an error if data_map: _LOG.error("Failed to get from handle", exc_info=True) if data: newobj = class_type() newobj.unserialize(data) return newobj return None @catch_db_error def transaction_begin(self, transaction): """ Prepare the database for the start of a new Transaction. Supported transaction parameters: no_magic: Boolean, defaults to False, indicating if secondary indices should be disconnected. """ if self.txn is not None: msg = self.transaction.get_description() self.transaction_abort(self.transaction) raise Errors.DbError(_('A second transaction is started while there' ' is still a transaction, "%s", active in the database.') % msg) if not isinstance(transaction, DbTxn) or len(transaction) != 0: raise TypeError("transaction_begin must be called with an empty " "instance of DbTxn which typically happens by using the " "DbTxn instance as a context manager.") self.transaction = transaction if transaction.batch: # A batch transaction does not store the commits # Aborting the session completely will become impossible. self.abort_possible = False # Undo is also impossible after batch transaction self.undodb.clear() self.env.txn_checkpoint() if (self.secondary_connected and not getattr(transaction, 'no_magic', False)): # Disconnect unneeded secondary indices self.surnames.close() _db = db.DB(self.env) try: _db.remove(_mkname(self.full_name, SURNAMES), SURNAMES) except db.DBNoSuchFileError: pass self.reference_map_referenced_map.close() _db = db.DB(self.env) try: _db.remove(_mkname(self.full_name, REF_REF), REF_REF) except db.DBNoSuchFileError: pass else: self.bsddbtxn = BSDDBTxn(self.env) self.txn = self.bsddbtxn.begin() return transaction @catch_db_error def transaction_commit(self, transaction): """ Make the changes to the database final and add the content of the transaction to the undo database. """ msg = transaction.get_description() if self._LOG_ALL: _LOG.debug("%s: Transaction commit '%s'\n" % (self.__class__.__name__, msg)) if self.readonly: return if self.txn is not None: assert msg != '' self.bsddbtxn.commit() self.bsddbtxn = None self.txn = None self.env.log_flush() if not transaction.batch: emit = self.__emit for obj_type, obj_name in KEY_TO_NAME_MAP.iteritems(): emit(transaction, obj_type, TXNADD, obj_name, '-add') emit(transaction, obj_type, TXNUPD, obj_name, '-update') emit(transaction, obj_type, TXNDEL, obj_name, '-delete') self.transaction = None transaction.clear() self.undodb.commit(transaction, msg) self.__after_commit(transaction) self.has_changed = True def __emit(self, transaction, obj_type, trans_type, obj, suffix): """ Define helper function to do the actual emits """ if (obj_type, trans_type) in transaction: if trans_type == TXNDEL: handles = [handle for handle, data in transaction[(obj_type, trans_type)]] else: handles = [handle for handle, data in transaction[(obj_type, trans_type)] if (handle, None) not in transaction[(obj_type, TXNDEL)]] if handles: self.emit(obj + suffix, (handles, )) def transaction_abort(self, transaction): """ Revert the changes made to the database so far during the transaction. """ if self._LOG_ALL: _LOG.debug("%s: Transaction abort '%s'\n" % (self.__class__.__name__, transaction.get_description())) if self.readonly: return if self.txn is not None: self.bsddbtxn.abort() self.bsddbtxn = None self.txn = None if not transaction.batch: # It can occur that the listview is already updated because of # the "model-treeview automatic update" combined with a # "while gtk.events_pending(): gtk.main_iteration() loop" # (typically used in a progress bar), so emit rebuild signals # to correct that. object_types = set([x[0] for x in transaction.keys()]) for object_type in object_types: if object_type == REFERENCE_KEY: continue self.emit('%s-rebuild' % KEY_TO_NAME_MAP[object_type], ()) self.transaction = None transaction.clear() transaction.first = None transaction.last = None self.__after_commit(transaction) def __after_commit(self, transaction): """ Post-transaction commit processing """ if transaction.batch: self.env.txn_checkpoint() if not getattr(transaction, 'no_magic', False): # create new secondary indices to replace the ones removed self.surnames = self.__open_db(self.full_name, SURNAMES, db.DB_BTREE, db.DB_DUP | db.DB_DUPSORT) self.person_map.associate(self.surnames, find_surname, DBFLAGS_O) self.reference_map_referenced_map = self.__open_db(self.full_name, REF_REF, db.DB_BTREE, db.DB_DUP|db.DB_DUPSORT) self.reference_map.associate(self.reference_map_referenced_map, find_referenced_handle, DBFLAGS_O) # Only build surname list after surname index is surely back self.build_surname_list() # Reset callbacks if necessary if transaction.batch or not len(transaction): return if self.undo_callback: self.undo_callback(_("_Undo %s") % transaction.get_description()) if self.redo_callback: self.redo_callback(None) if self.undo_history_callback: self.undo_history_callback() def undo(self, update_history=True): self.undodb.undo(update_history) return def redo(self, update_history=True): self.undodb.redo(update_history) return def gramps_upgrade(self, callback=None): UpdateCallback.__init__(self, callback) version = self.metadata.get('version', default=_MINVERSION) t = time.time() import upgrade if version < 14: upgrade.gramps_upgrade_14(self) if version < 15: upgrade.gramps_upgrade_15(self) if version < 16: self.__connect_secondary() # Open undo database self.__open_undodb() self.db_is_open = True upgrade.gramps_upgrade_16(self) self.reset() self.set_total(6) self.reindex_reference_map(self.update) self.reset() # Close undo database self.__close_undodb() self.db_is_open = False _LOG.debug("Upgrade time: %d seconds" % int(time.time()-t)) def set_auto_remove(self): """ BSDDB change log settings using new method with renamed attributes """ autoremove_flag = None autoremove_method = None for flag in ["DB_LOG_AUTO_REMOVE", "DB_LOG_AUTOREMOVE"]: if hasattr(db, flag): autoremove_flag = getattr(db, flag) break for method in ["log_set_config", "set_flags"]: if hasattr(self.env, method): autoremove_method = getattr(self.env, method) break if autoremove_method and autoremove_flag: autoremove_method(autoremove_flag, 1) else: _LOG.debug("Failed to set autoremove flag") def write_version(self, name): """Write version number for a newly created DB.""" full_name = os.path.abspath(name) self.env = db.DBEnv() self.env.set_cachesize(0, DBCACHE) # These env settings are only needed for Txn environment self.env.set_lk_max_locks(DBLOCKS) self.env.set_lk_max_objects(DBOBJECTS) # clean up unused logs self.set_auto_remove() # The DB_PRIVATE flag must go if we ever move to multi-user setup env_flags = db.DB_CREATE | db.DB_PRIVATE |\ db.DB_INIT_MPOOL | db.DB_INIT_LOCK |\ db.DB_INIT_LOG | db.DB_INIT_TXN | db.DB_THREAD # As opposed to before, we always try recovery on databases env_flags |= db.DB_RECOVER # Environment name is now based on the filename env_name = name self.env.open(env_name, env_flags) self.env.txn_checkpoint() self.metadata = self.__open_shelf(full_name, META) with BSDDBTxn(self.env, self.metadata) as txn: txn.put('version', _DBVERSION) self.metadata.close() self.env.close() def get_dbid(self): """ In BSDDB, we use the file directory name as the unique ID for this database on this computer. """ return self.brief_name def _mkname(path, name): return os.path.join(path, name + DBEXT) def clear_lock_file(name): try: os.unlink(os.path.join(name, DBLOCKFN)) except OSError: return def write_lock_file(name): if not os.path.isdir(name): os.mkdir(name) f = open(os.path.join(name, DBLOCKFN), "w") if constfunc.win(): user = os.environ['USERNAME'] try: host = os.environ['USERDOMAIN'] except: host = "" else: host = os.uname()[1] # An ugly workaround for os.getlogin() issue with Konsole try: user = os.getlogin() except: user = os.environ.get('USER') if host: text = "%s@%s" % (user, host) else: text = user # Save only the username and host, so the massage can be # printed with correct locale in DbManager.py when a lock is found f.write(text) f.close() def upgrade_researcher(owner_data): """ Upgrade researcher data to include a locality field in the address. This should be called for databases prior to Gramps 3.3. """ addr = tuple([owner_data[0][0], ''] + list(owner_data[0][1:])) return (addr, owner_data[1], owner_data[2], owner_data[3]) if __name__ == "__main__": import os, sys, pdb d = DbBsddb() if len(sys.argv) > 1: db_name = sys.argv[1] else: db_home = os.path.join(os.environ['HOME'], '.gramps','grampsdb') for dir in os.listdir(db_home): db_path = os.path.join(db_home, dir) db_fn = os.path.join(db_path, 'name.txt') if os.stat(db_fn): f = open(db_fn) db_name = f.read() if db_name == 'Small Example': break print "loading", db_path d.load(db_path, lambda x: x) print d.get_default_person() with d.get_person_cursor() as c: for key, data in c: person = Person(data) print key, person.get_primary_name().get_name(), print d.surnames.keys()