diff --git a/src/gen/db/bsddbtxn.py b/src/gen/db/bsddbtxn.py new file mode 100644 index 000000000..bc2d538c7 --- /dev/null +++ b/src/gen/db/bsddbtxn.py @@ -0,0 +1,210 @@ +# +# Gramps - a GTK+/GNOME based genealogy program +# +# Copyright (C) 2009 Gerald W. Britton +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +# + +# $Id: bsddbtxn.py 12786 2009-07-11 15:32:37Z gbritton $ + +""" +BSDDBTxn class: Wrapper for BSDDB transaction-oriented methods +""" + +#------------------------------------------------------------------------- +# +# BSDDBTxn +# +#------------------------------------------------------------------------- + +class BSDDBTxn(object): + """ + Wrapper for BSDDB methods that set up and manage transactions. Implements + context management functionality allowing constructs like: + + with BSDDBTxn(env) as txn: + DB.get(txn=txn) + DB.put(txn=txn) + DB.delete(txn=txn) + + and other transaction-oriented DB access methods, where "env" is a + BSDDB DBEnv object and "DB" is a BSDDB database object. + + Transactions are automatically begun when the "with" statement is executed + and automatically committed when control flows off the end of the "with" + statement context, either implicitly by reaching the end of the indentation + level or explicity if a "return" statement is encountered or an exception + is raised. + """ + + __slots__ = ['env', 'db', 'txn', 'parent'] + + def __init__(self, env, db=None): + """ + Initialize transaction instance + """ + self.env = env + self.db = db + self.txn = None + + # Context manager methods + + def __enter__(self, parent=None, **kwargs): + """ + Context manager entry method + + Begin the transaction + """ + self.txn = self.begin(parent, **kwargs) + self.parent = parent + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + """ + Context manager exit function + + Commit the transaction if no exception occurred + """ + if exc_type is not None: + return False + if self.txn: + self.commit() + return True + + # Methods implementing txn_ methods in DBEnv + + def begin(self, *args, **kwargs): + """ + Create and begin a new transaction. A DBTxn object is returned + """ + self.txn = self.env.txn_begin(*args, **kwargs) + return self.txn + + def checkpoint(self, *args, **kwargs): + """ + Flush the underlying memory pool, write a checkpoint record to the + log and then flush the log + """ + if self.env: + self.env.txn_checkpoint(*args, **kwargs) + + def stat(self): + """ + Return a dictionary of transaction statistics + """ + if self.env: + return self.env.txn_stat() + + def recover(self): + """ + Returns a list of tuples (GID, TXN) of transactions prepared but + still unresolved + """ + if self.env: + return self.env.txn_recover() + + # Methods implementing DBTxn methods + + def abort(self): + """ + Abort the transaction + """ + if self.txn: + self.txn.abort() + self.txn = None + + def commit(self, flags=0): + """ + End the transaction, committing any changes to the databases + """ + if self.txn: + self.txn.commit(flags) + self.txn = None + + def id(self): + """ + Return the unique transaction id associated with the specified + transaction + """ + if self.txn: + return self.txn.id() + + def prepare(self, gid): + """ + Initiate the beginning of a two-phase commit + """ + if self.txn: + self.txn.prepare(gid) + + def discard(self): + """ + Release all the per-process resources associated with the specified + transaction, neither committing nor aborting the transaction + """ + if self.txn: + self.txn.discard() + self.txn = None + + # Methods implementing DB methods within the transaction context + + def get(self, key, default=None, txn=None, **kwargs): + """ + Returns the data object associated with key + """ + if txn == None: txn = self.txn + return self.db.get(key, default, txn, **kwargs) + + def pget(self, key, default=None, txn=None, **kwargs): + """ + Returns the primary key, given the secondary one, and associated data + """ + if txn == None: txn = self.txn + return self.db.pget(key, default, txn, **kwargs) + + def put(self, key, data, txn=None, **kwargs): + """ + Stores the key/data pair in the database + """ + if txn == None: txn = self.txn + return self.db.put(key, data, txn, **kwargs) + + def delete(self, key, txn=None, **kwargs): + """ + Removes a key/data pair from the database + """ + if txn == None: txn = self.txn + self.db.delete(key, txn, **kwargs) + +# test code +if __name__ == "__main__": + print "1" + from bsddb import db, dbshelve + print "2" + x = db.DBEnv() + print "3" + x.open('/tmp', db.DB_CREATE | db.DB_PRIVATE |\ + db.DB_INIT_MPOOL | db.DB_INIT_LOCK |\ + db.DB_INIT_LOG | db.DB_INIT_TXN | db.DB_THREAD) + print "4" + d = dbshelve.DBShelf(x) + print "5" + #from tran import BSDDBTxn as T + print "6" + T = BSDDBTxn + with T(x) as tx: + print "stat", tx.stat() + print "id", tx.id() + tx.checkpoint() diff --git a/src/gen/db/read.py b/src/gen/db/read.py new file mode 100644 index 000000000..fc34af605 --- /dev/null +++ b/src/gen/db/read.py @@ -0,0 +1,1566 @@ +# +# Gramps - a GTK+/GNOME based genealogy program +# +# Copyright (C) 2000-2007 Donald N. Allingham +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +# + +# $Id: read.py 12786 2009-07-11 15:32:37Z gburto01 $ + +""" +Read class for the GRAMPS databases. +""" +from __future__ import with_statement +#------------------------------------------------------------------------- +# +# libraries +# +#------------------------------------------------------------------------- +import cPickle +import time +import random +import locale +import os +from sys import maxint +from bsddb import db +from gettext import gettext as _ + +import logging + +#------------------------------------------------------------------------- +# +# GRAMPS libraries +# +#------------------------------------------------------------------------- +from gen.lib import (MediaObject, Person, Family, Source, Event, Place, + Repository, Note, GenderStats, Researcher) +from gen.db.dbconst import * +from gen.utils.callback import Callback +from gen.db.cursor import GrampsCursor +from gen.db.iterator import CursorIterator +from gen.db.base import GrampsDbBase +from Utils import create_id +import Errors + +LOG = logging.getLogger(DBLOGNAME) +#------------------------------------------------------------------------- +# +# constants +# +#------------------------------------------------------------------------- +from gen.db.dbconst import * + +_SIGBASE = ('person', 'family', 'source', 'event', + 'media', 'place', 'repository', 'reference', 'note') + +DBERRS = (db.DBRunRecoveryError, db.DBAccessError, + db.DBPageNotFoundError, db.DBInvalidArgError) + +class GrampsDbBookmarks(object): + def __init__(self, default=[]): + self.bookmarks = list(default) # want a copy (not an alias) + + def set(self, new_list): + self.bookmarks = list(new_list) + + def get(self): + return self.bookmarks + + def append(self, item): + self.bookmarks.append(item) + + def append_list(self, blist): + self.bookmarks += blist + + def remove(self, item): + self.bookmarks.remove(item) + + def pop(self, item): + return self.bookmarks.pop(item) + + def insert(self, pos, item): + self.bookmarks.insert(pos, item) + +#------------------------------------------------------------------------- +# +# GrampsDBReadCursor +# +#------------------------------------------------------------------------- +class GrampsDbReadCursor(GrampsCursor): + + def __init__(self, source, txn=None): + self.cursor = source.db.cursor(txn) + self.source = source + +class GrampsDbRead(GrampsDbBase, Callback): + """ + GRAMPS database read access object. + """ + + # This holds a reference to the gramps Config module if + # it is available, it is setup by the factory methods. + __config__ = None + __signals__ = {} + # If this is True logging will be turned on. + try: + _LOG_ALL = int(os.environ.get('GRAMPS_SIGNAL', "0")) == 1 + except: + _LOG_ALL = False + + def __init__(self): + """ + Create a new GrampsDbRead instance. + """ + + GrampsDbBase.__init__(self) + #Callback.__init__(self) + + self.set_person_id_prefix('I%04d') + self.set_object_id_prefix('O%04d') + self.set_family_id_prefix('F%04d') + self.set_source_id_prefix('S%04d') + self.set_place_id_prefix('P%04d') + self.set_event_id_prefix('E%04d') + self.set_repository_id_prefix('R%04d') + self.set_note_id_prefix('N%04d') + + self.readonly = False + self.rand = random.Random(time.time()) + self.smap_index = 0 + self.emap_index = 0 + self.pmap_index = 0 + self.fmap_index = 0 + self.lmap_index = 0 + self.omap_index = 0 + self.rmap_index = 0 + self.nmap_index = 0 + self.db_is_open = False + + self.family_event_names = set() + self.individual_event_names = set() + self.individual_attributes = set() + self.family_attributes = set() + self.marker_names = set() + self.child_ref_types = set() + self.family_rel_types = set() + self.event_role_names = set() + self.name_types = set() + self.repository_types = set() + self.note_types = set() + self.source_media_types = set() + self.url_types = set() + self.media_attributes = set() + + self.open = 0 + self.genderStats = GenderStats() + + self.undodb = [] + self.id_trans = {} + self.fid_trans = {} + self.pid_trans = {} + self.sid_trans = {} + self.oid_trans = {} + self.rid_trans = {} + self.nid_trans = {} + self.eid_trans = {} + self.env = None + self.person_map = {} + self.family_map = {} + self.place_map = {} + self.source_map = {} + self.repository_map = {} + self.note_map = {} + self.media_map = {} + self.event_map = {} + self.metadata = {} + self.name_group = {} + self.undo_callback = None + self.redo_callback = None + self.undo_history_callback = None + self.modified = 0 + + #self.undoindex = -1 + #self.translist = [None] * DBUNDO + self.abort_possible = True + #self.undo_history_timestamp = 0 + self.default = None + self.owner = Researcher() + self.name_formats = [] + self.bookmarks = GrampsDbBookmarks() + self.family_bookmarks = GrampsDbBookmarks() + self.event_bookmarks = GrampsDbBookmarks() + self.place_bookmarks = GrampsDbBookmarks() + self.source_bookmarks = GrampsDbBookmarks() + self.repo_bookmarks = GrampsDbBookmarks() + self.media_bookmarks = GrampsDbBookmarks() + self.note_bookmarks = GrampsDbBookmarks() + self._bm_changes = 0 + self.path = "" + self.surname_list = [] + self.txn = None + self.has_changed = False + + def set_prefixes(self, person, media, family, source, place, event, + repository, note): + self.person_prefix = self._validated_id_prefix(person, 'I') + self.mediaobject_prefix = self._validated_id_prefix(media, 'M') + self.family_prefix = self._validated_id_prefix(family, 'F') + self.source_prefix = self._validated_id_prefix(source, 'S') + self.place_prefix = self._validated_id_prefix(place, 'P') + self.event_prefix = self._validated_id_prefix(event, 'E') + self.repository_prefix = self._validated_id_prefix(repository, 'R') + self.note_prefix = self._validated_id_prefix(note, 'N') + + def version_supported(self): + """Return True when the file has a supported version.""" + return True + + def __get_cursor(self, table): + try: + return GrampsDbReadCursor(table, self.txn) + except DBERRS, msg: + self.__log_error() + raise Errors.DbError(msg) + + def get_person_cursor(self): + return self.__get_cursor(self.person_map) + + def get_family_cursor(self): + return self.__get_cursor(self.family_map) + + def get_event_cursor(self): + return self.__get_cursor(self.event_map) + + def get_place_cursor(self): + return self.__get_cursor(self.place_map) + + def get_source_cursor(self): + return self.__get_cursor(self.source_map) + + def get_media_cursor(self): + return self.__get_cursor(self.media_map) + + def get_repository_cursor(self): + return self.__get_cursor(self.repository_map) + + def get_note_cursor(self): + return self.__get_cursor(self.note_map) + + def get_person_cursor_iter(self, msg=_("Processing Person records")): + return CursorIterator(self, self.get_person_cursor(), msg) + + def get_family_cursor_iter(self, msg=_("Processing Family records")): + return CursorIterator(self, self.get_family_cursor(), msg) + + def get_event_cursor_iter(self, msg=_("Processing Event records")): + return CursorIterator(self, self.get_event_cursor(), msg) + + def get_place_cursor_iter(self, msg=_("Processing Place records")): + return CursorIterator(self, self.get_place_cursor(), msg) + + def get_source_cursor_iter(self, msg=_("Processing Source records")): + return CursorIterator(self, self.get_source_cursor(), msg) + + def get_media_cursor_iter(self, msg=_("Processing Media records")): + return CursorIterator(self, self.get_media_cursor(), msg) + + def get_repository_cursor_iter(self, msg=_("Processing Repository records")): + return CursorIterator(self, self.get_repository_cursor(), msg) + + def get_note_cursor_iter(self, msg=_("Processing Note records")): + return CursorIterator(self, self.get_note_cursor(), msg) + + def load(self, name, callback, mode=DBMODE_R): + """ + Open the specified database. + + The method needs to be overridden in the derived class. + """ + raise NotImplementedError + + def load_from(self, other_database, filename, callback): + """ + Load data from the other database into itself. + + The filename is the name of the file for the newly created database. + The method needs to be overridden in the derived class. + """ + raise NotImplementedError + + def close(self): + """ + Close the specified database. + + The method needs to be overridden in the derived class. + """ + pass + + def is_open(self): + """ + Return 1 if the database has been opened. + """ + return self.db_is_open + + def request_rebuild(self): + """ + Notify clients that the data has changed significantly, and that all + internal data dependent on the database should be rebuilt. + """ + self.emit('person-rebuild') + self.emit('family-rebuild') + self.emit('place-rebuild') + self.emit('source-rebuild') + self.emit('media-rebuild') + self.emit('event-rebuild') + self.emit('repository-rebuild') + self.emit('note-rebuild') + + @staticmethod + def __find_next_gramps_id(prefix, map_index, trans): + """ + Helper function for find_next__gramps_id methods + """ + index = prefix % map_index + while trans.has_key(str(index)): + map_index += 1 + index = prefix % map_index + map_index += 1 + return index + + def find_next_person_gramps_id(self): + """ + Return the next available GRAMPS' ID for a Person object based off the + person ID prefix. + """ + return self.__find_next_gramps_id(self.person_prefix, + self.pmap_index, self.id_trans) + + def find_next_place_gramps_id(self): + """ + Return the next available GRAMPS' ID for a Place object based off the + place ID prefix. + """ + return self.__find_next_gramps_id(self.place_prefix, + self.lmap_index, self.pid_trans) + + def find_next_event_gramps_id(self): + """ + Return the next available GRAMPS' ID for a Event object based off the + event ID prefix. + """ + return self.__find_next_gramps_id(self.event_prefix, + self.emap_index, self.eid_trans) + + def find_next_object_gramps_id(self): + """ + Return the next available GRAMPS' ID for a MediaObject object based + off the media object ID prefix. + """ + return self.__find_next_gramps_id(self.mediaobject_prefix, + self.omap_index, self.oid_trans) + + def find_next_source_gramps_id(self): + """ + Return the next available GRAMPS' ID for a Source object based off the + source ID prefix. + """ + return self.__find_next_gramps_id(self.source_prefix, + self.smap_index, self.sid_trans) + + def find_next_family_gramps_id(self): + """ + Return the next available GRAMPS' ID for a Family object based off the + family ID prefix. + """ + return self.__find_next_gramps_id(self.family_prefix, + self.fmap_index, self.fid_trans) + + def find_next_repository_gramps_id(self): + """ + Return the next available GRAMPS' ID for a Respository object based + off the repository ID prefix. + """ + return self.__find_next_gramps_id(self.repository_prefix, + self.rmap_index, self.rid_trans) + + def find_next_note_gramps_id(self): + """ + Return the next available GRAMPS' ID for a Note object based off the + note ID prefix. + """ + return self.__find_next_gramps_id(self.note_prefix, + self.nmap_index, self.nid_trans) + + def get_from_handle(self, handle, class_type, data_map): + data = data_map.get(str(handle)) + if data: + newobj = class_type() + newobj.unserialize(data) + return newobj + return None + + def get_person_from_handle(self, handle): + """ + Find a Person in the database from the passed gramps' ID. + + If no such Person exists, None is returned. + """ + return self.get_from_handle(handle, Person, self.person_map) + + def get_source_from_handle(self, handle): + """ + Find a Source in the database from the passed gramps' ID. + + If no such Source exists, None is returned. + """ + return self.get_from_handle(handle, Source, self.source_map) + + def get_object_from_handle(self, handle): + """ + Find an Object in the database from the passed gramps' ID. + + If no such Object exists, None is returned. + """ + return self.get_from_handle(handle, MediaObject, self.media_map) + + def get_place_from_handle(self, handle): + """ + Find a Place in the database from the passed gramps' ID. + + If no such Place exists, None is returned. + """ + return self.get_from_handle(handle, Place, self.place_map) + + def get_event_from_handle(self, handle): + """ + Find a Event in the database from the passed gramps' ID. + + If no such Event exists, None is returned. + """ + return self.get_from_handle(handle, Event, self.event_map) + + def get_family_from_handle(self, handle): + """ + Find a Family in the database from the passed gramps' ID. + + If no such Family exists, None is returned. + """ + return self.get_from_handle(handle, Family, self.family_map) + + def get_repository_from_handle(self, handle): + """ + Find a Repository in the database from the passed gramps' ID. + + If no such Repository exists, None is returned. + """ + return self.get_from_handle(handle, Repository, self.repository_map) + + def get_note_from_handle(self, handle): + """ + Find a Note in the database from the passed gramps' ID. + + If no such Note exists, None is returned. + """ + return self.get_from_handle(handle, Note, self.note_map) + + def __get_obj_from_gramps_id(self, val, tbl, class_, prim_tbl): + try: + if tbl.has_key(str(val)): + data = tbl.get(str(val), txn=self.txn) + obj = class_() + ### FIXME: this is a dirty hack that works without no + ### sensible explanation. For some reason, for a readonly + ### database, secondary index returns a primary table key + ### corresponding to the data, not the data. + if self.readonly: + tuple_data = prim_tbl.get(data, txn=self.txn) + else: + tuple_data = cPickle.loads(data) + obj.unserialize(tuple_data) + return obj + else: + return None + except DBERRS, msg: + self.__log_error() + raise Errors.DbError(msg) + + def get_person_from_gramps_id(self, val): + """ + Find a Person in the database from the passed gramps' ID. + + If no such Person exists, None is returned. + """ + return self.__get_obj_from_gramps_id(val, self.id_trans, Person, + self.person_map) + + def get_family_from_gramps_id(self, val): + """ + Find a Family in the database from the passed gramps' ID. + + If no such Family exists, None is return. + """ + return self.__get_obj_from_gramps_id(val, self.fid_trans, Family, + self.family_map) + + def get_event_from_gramps_id(self, val): + """ + Find an Event in the database from the passed gramps' ID. + + If no such Family exists, None is returned. + """ + return self.__get_obj_from_gramps_id(val, self.eid_trans, Event, + self.event_map) + + def get_place_from_gramps_id(self, val): + """ + Find a Place in the database from the passed gramps' ID. + + If no such Place exists, None is returned. + """ + return self.__get_obj_from_gramps_id(val, self.pid_trans, Place, + self.place_map) + + def get_source_from_gramps_id(self, val): + """ + Find a Source in the database from the passed gramps' ID. + + If no such Source exists, None is returned. + """ + return self.__get_obj_from_gramps_id(val, self.sid_trans, Source, + self.source_map) + + def get_object_from_gramps_id(self, val): + """ + Find a MediaObject in the database from the passed gramps' ID. + + If no such MediaObject exists, None is returned. + """ + return self.__get_obj_from_gramps_id(val, self.oid_trans, MediaObject, + self.media_map) + + def get_repository_from_gramps_id(self, val): + """ + Find a Repository in the database from the passed gramps' ID. + + If no such Repository exists, None is returned. + """ + return self.__get_obj_from_gramps_id(val, self.rid_trans, Repository, + self.repository_map) + + def get_note_from_gramps_id(self, val): + """ + Find a Note in the database from the passed gramps' ID. + + If no such Note exists, None is returned. + """ + return self.__get_obj_from_gramps_id(val, self.nid_trans, Note, + self.note_map) + + def get_name_group_mapping(self, name): + """ + Return the default grouping name for a surname. + """ + return unicode(self.name_group.get(str(name), name)) + + def get_name_group_keys(self): + """ + Return the defined names that have been assigned to a default grouping. + """ + return [unicode(k) for k in self.name_group.keys()] + + def has_name_group_key(self, name): + """ + Return if a key exists in the name_group table. + """ + return self.name_group.has_key(str(name)) + + def set_name_group_mapping(self, name, group): + """ + Set the default grouping name for a surname. + + Needs to be overridden in the derived class. + """ + raise NotImplementedError + + @staticmethod + def get_number_of_records(table): + return len(table) + + def get_number_of_people(self): + """ + Return the number of people currently in the database. + """ + if self.db_is_open: + return self.get_number_of_records(self.person_map) + #return len(self.person_map) + else: + return 0 + + def get_number_of_families(self): + """ + Return the number of families currently in the database. + """ + return self.get_number_of_records(self.family_map) + + def get_number_of_events(self): + """ + Return the number of events currently in the database. + """ + return self.get_number_of_records(self.event_map) + + def get_number_of_places(self): + """ + Return the number of places currently in the database. + """ + return self.get_number_of_records(self.place_map) + + def get_number_of_sources(self): + """ + Return the number of sources currently in the database. + """ + return self.get_number_of_records(self.source_map) + + def get_number_of_media_objects(self): + """ + Return the number of media objects currently in the database. + """ + return len(self.media_map) + + def get_number_of_repositories(self): + """ + Return the number of source repositories currently in the database. + """ + return self.get_number_of_records(self.repository_map) + + def get_number_of_notes(self): + """ + Return the number of notes currently in the database. + """ + return self.get_number_of_records(self.note_map) + + def all_handles(self, table): + return table.keys() + + def get_person_handles(self, sort_handles=True): + """ + Return a list of database handles, one handle for each Person in + the database. + + If sort_handles is True, the list is sorted by surnames. + """ + if self.db_is_open: + if sort_handles: + with self.get_person_cursor() as cursor: + slist = sorted((data[3][5], key) for key, data in cursor) + return [x[1] for x in slist] + else: + return self.all_handles(self.person_map) + return [] + + def iter_person_handles(self): + """ + Return an iterator over handles for Persons in the database + """ + with self.get_person_cursor() as cursor: + for key, data in cursor: + yield key + + def iter_people(self): + """ + Return an iterator over handles and objects for Persons in the database + """ + with self.get_person_cursor() as cursor: + for handle, data in cursor: + person = Person() + person.unserialize(data) + yield handle, person + + def get_place_handles(self, sort_handles=True): + """ + Return a list of database handles, one handle for each Place in + the database. + + If sort_handles is True, the list is sorted by Place title. + """ + if self.db_is_open: + if sort_handles: + with self.get_place_cursor() as cursor: + slist = sorted((data[2], key) for key, data in cursor) + return [x[1] for x in slist] + else: + return self.all_handles(self.place_map) + return [] + + def iter_place_handles(self): + """ + Return an iterator over handles for Places in the database + """ + with self.get_place_cursor() as cursor: + for key, data in cursor: + yield key + + def get_source_handles(self, sort_handles=True): + """ + Return a list of database handles, one handle for each Source in + the database. + + If sort_handles is True, the list is sorted by Source title. + """ + if self.db_is_open: + handle_list = self.all_handles(self.source_map) + if sort_handles: + handle_list.sort(key=self.__sortbysource_key) + return handle_list + return [] + + def iter_source_handles(self): + """ + Return an iterator over handles for Sources in the database + """ + with self.get_source_cursor() as cursor: + for key, data in cursor: + yield key + + def get_media_object_handles(self, sort_handles=True): + """ + Return a list of database handles, one handle for each MediaObject in + the database. + + If sort_handles is True, the list is sorted by title. + """ + if self.db_is_open: + handle_list = self.all_handles(self.media_map) + if sort_handles: + handle_list.sort(key=self.__sortbymedia_key) + return handle_list + return [] + + def iter_media_object_handles(self): + """ + Return an iterator over handles for Media in the database + """ + with self.get_media_cursor() as cursor: + for key, data in cursor: + yield key + + def get_event_handles(self): + """ + Return a list of database handles, one handle for each Event in the + database. + """ + if self.db_is_open: + return self.all_handles(self.event_map) + return [] + + def iter_event_handles(self): + """ + Return an iterator over handles for Events in the database + """ + with self.get_event_cursor() as cursor: + for key, data in cursor: + yield key + + def get_family_handles(self): + """ + Return a list of database handles, one handle for each Family in + the database. + """ + if self.db_is_open: + return self.all_handles(self.family_map) + return [] + + def iter_family_handles(self): + """ + Return an iterator over handles for Families in the database + """ + with self.get_family_cursor() as cursor: + for key, data in cursor: + yield key + + def get_repository_handles(self): + """ + Return a list of database handles, one handle for each Repository in + the database. + """ + if self.db_is_open: + return self.all_handles(self.repository_map) + return [] + + def iter_repository_handles(self): + """ + Return an iterator over handles for Repositories in the database + """ + with self.get_repository_cursor() as cursor: + for key, data in cursor: + yield key + + def get_note_handles(self): + """ + Return a list of database handles, one handle for each Note in the + database. + """ + if self.db_is_open: + return self.all_handles(self.note_map) + return [] + + def iter_note_handles(self): + """ + Return an iterator over handles for Notes in the database + """ + with self.get_note_cursor() as cursor: + for key, data in cursor: + yield key + + def get_gramps_ids(self, obj_key): + key2table = { + PERSON_KEY: self.id_trans, + FAMILY_KEY: self.fid_trans, + SOURCE_KEY: self.sid_trans, + EVENT_KEY: self.eid_trans, + MEDIA_KEY: self.oid_trans, + PLACE_KEY: self.pid_trans, + REPOSITORY_KEY: self.rid_trans, + NOTE_KEY: self.nid_trans, + } + + table = key2table[obj_key] + return table.keys() + + def has_gramps_id(self, obj_key, gramps_id): + key2table = { + PERSON_KEY: self.id_trans, + FAMILY_KEY: self.fid_trans, + SOURCE_KEY: self.sid_trans, + EVENT_KEY: self.eid_trans, + MEDIA_KEY: self.oid_trans, + PLACE_KEY: self.pid_trans, + REPOSITORY_KEY: self.rid_trans, + NOTE_KEY: self.nid_trans, + } + + table = key2table[obj_key] + #return str(gramps_id) in table + return table.has_key(str(gramps_id)) + + def find_initial_person(self): + person = self.get_default_person() + if not person: + the_ids = self.get_gramps_ids(PERSON_KEY) + if the_ids: + person = self.get_person_from_gramps_id(min(the_ids)) + return person + + def _validated_id_prefix(self, val, default): + if isinstance(val, basestring) and val: + try: + str_ = val % 1 + except TypeError: # missing conversion specifier + prefix_var = val + "%d" + else: + prefix_var = val # OK as given + else: + prefix_var = default+"%04d" # not a string or empty string + return prefix_var + + def set_person_id_prefix(self, val): + """ + Set the naming template for GRAMPS Person ID values. + + The string is expected to be in the form of a simple text string, or + in a format that contains a C/Python style format string using %d, + such as I%d or I%04d. + """ + self.person_prefix = self._validated_id_prefix(val, "I") + + def set_source_id_prefix(self, val): + """ + Set the naming template for GRAMPS Source ID values. + + The string is expected to be in the form of a simple text string, or + in a format that contains a C/Python style format string using %d, + such as S%d or S%04d. + """ + self.source_prefix = self._validated_id_prefix(val, "S") + + def set_object_id_prefix(self, val): + """ + Set the naming template for GRAMPS MediaObject ID values. + + The string is expected to be in the form of a simple text string, or + in a format that contains a C/Python style format string using %d, + such as O%d or O%04d. + """ + self.mediaobject_prefix = self._validated_id_prefix(val, "O") + + def set_place_id_prefix(self, val): + """ + Set the naming template for GRAMPS Place ID values. + + The string is expected to be in the form of a simple text string, or + in a format that contains a C/Python style format string using %d, + such as P%d or P%04d. + """ + self.place_prefix = self._validated_id_prefix(val, "P") + + def set_family_id_prefix(self, val): + """ + Set the naming template for GRAMPS Family ID values. The string is + expected to be in the form of a simple text string, or in a format + that contains a C/Python style format string using %d, such as F%d + or F%04d. + """ + self.family_prefix = self._validated_id_prefix(val, "F") + + def set_event_id_prefix(self, val): + """ + Set the naming template for GRAMPS Event ID values. + + The string is expected to be in the form of a simple text string, or + in a format that contains a C/Python style format string using %d, + such as E%d or E%04d. + """ + self.event_prefix = self._validated_id_prefix(val, "E") + + def set_repository_id_prefix(self, val): + """ + Set the naming template for GRAMPS Repository ID values. + + The string is expected to be in the form of a simple text string, or + in a format that contains a C/Python style format string using %d, + such as R%d or R%04d. + """ + self.repository_prefix = self._validated_id_prefix(val, "R") + + def set_note_id_prefix(self, val): + """ + Set the naming template for GRAMPS Note ID values. + + The string is expected to be in the form of a simple text string, or + in a format that contains a C/Python style format string using %d, + such as N%d or N%04d. + """ + self.note_prefix = self._validated_id_prefix(val, "N") + + def set_undo_callback(self, callback): + """ + Define the callback function that is called whenever an undo operation + is executed. + + The callback function receives a single argument that is a text string + that defines the operation. + """ + self.undo_callback = callback + + def set_redo_callback(self, callback): + """ + Define the callback function that is called whenever an redo operation + is executed. + + The callback function receives a single argument that is a text string + that defines the operation. + """ + self.redo_callback = callback + + def get_surname_list(self): + """ + Return the list of locale-sorted surnames contained in the database. + """ + return self.surname_list + + def build_surname_list(self): + """ + Build the list of locale-sorted surnames contained in the database. + + The function must be overridden in the derived class. + """ + raise NotImplementedError + + def sort_surname_list(self): + """ + Sort the surname list in place. + """ + raise NotImplementedError + + def add_to_surname_list(self, person, batch_transaction): + """ + Check to see if the surname of the given person is already in + the surname list. + + If not then we need to add the name to the list. + The function must be overridden in the derived class. + """ + raise NotImplementedError + + def remove_from_surname_list(self, person): + """ + Check whether there are persons with the same surname left in + the database. + + If not then we need to remove the name from the list. + The function must be overridden in the derived class. + """ + raise NotImplementedError + + def get_bookmarks(self): + """Return the list of Person handles in the bookmarks.""" + return self.bookmarks + + def get_family_bookmarks(self): + """Return the list of Person handles in the bookmarks.""" + return self.family_bookmarks + + def get_event_bookmarks(self): + """Return the list of Person handles in the bookmarks.""" + return self.event_bookmarks + + def get_place_bookmarks(self): + """Return the list of Person handles in the bookmarks.""" + return self.place_bookmarks + + def get_source_bookmarks(self): + """Return the list of Person handles in the bookmarks.""" + return self.source_bookmarks + + def get_media_bookmarks(self): + """Return the list of Person handles in the bookmarks.""" + return self.media_bookmarks + + def get_repo_bookmarks(self): + """Return the list of Person handles in the bookmarks.""" + return self.repo_bookmarks + + def get_note_bookmarks(self): + """Return the list of Note handles in the bookmarks.""" + return self.note_bookmarks + + def set_researcher(self, owner): + """Set the information about the owner of the database.""" + self.owner.set_from(owner) + + def get_researcher(self): + """ + Return the Researcher instance, providing information about the owner + of the database. + """ + return self.owner + + def get_default_person(self): + """Return the default Person of the database.""" + person = self.get_person_from_handle(self.get_default_handle()) + if person: + return person + elif (self.metadata is not None) and (not self.readonly): + self.metadata['default'] = None + return None + + def get_default_handle(self): + """Return the default Person of the database.""" + if self.metadata is not None: + return self.metadata.get('default') + return None + + def get_save_path(self): + """Return the save path of the file, or "" if one does not exist.""" + return self.path + + def set_save_path(self, path): + """Set the save path for the database.""" + self.path = path + + def get_person_event_types(self): + """ + Return a list of all Event types assocated with Person instances in + the database. + """ + return list(self.individual_event_names) + + def get_person_attribute_types(self): + """ + Return a list of all Attribute types assocated with Person instances + in the database. + """ + return list(self.individual_attributes) + + def get_family_attribute_types(self): + """ + Return a list of all Attribute types assocated with Family instances + in the database. + """ + return list(self.family_attributes) + + def get_family_event_types(self): + """ + Return a list of all Event types assocated with Family instances in + the database. + """ + return list(self.family_event_names) + + def get_marker_types(self): + """ + Return a list of all marker types available in the database. + """ + return list(self.marker_names) + + def get_media_attribute_types(self): + """ + Return a list of all Attribute types assocated with Media and MediaRef + instances in the database. + """ + return list(self.media_attributes) + + def get_family_relation_types(self): + """ + Return a list of all relationship types assocated with Family + instances in the database. + """ + return list(self.family_rel_types) + + def get_child_reference_types(self): + """ + Return a list of all child reference types assocated with Family + instances in the database. + """ + return list(self.child_ref_types) + + def get_event_roles(self): + """ + Return a list of all custom event role names assocated with Event + instances in the database. + """ + return list(self.event_role_names) + + def get_name_types(self): + """ + Return a list of all custom names types assocated with Person + instances in the database. + """ + return list(self.name_types) + + def get_repository_types(self): + """ + Return a list of all custom repository types assocated with Repository + instances in the database. + """ + return list(self.repository_types) + + def get_note_types(self): + """ + Return a list of all custom note types assocated with Note instances + in the database. + """ + return list(self.note_types) + + def get_source_media_types(self): + """ + Return a list of all custom source media types assocated with Source + instances in the database. + """ + return list(self.source_media_types) + + def get_url_types(self): + """ + Return a list of all custom names types assocated with Url instances + in the database. + """ + return list(self.url_types) + + def __log_error(self): + pass + + def __get_raw_data(self, table, handle): + """ + Helper method for get_raw__data methods + """ + try: + return table.get(str(handle), txn=self.txn) + except DBERRS, msg: + self.__log_error() + raise Errors.DbError(msg) + + def get_raw_person_data(self, handle): + return self.__get_raw_data(self.person_map, handle) + + def get_raw_family_data(self, handle): + return self.__get_raw_data(self.family_map, handle) + + def get_raw_object_data(self, handle): + return self.__get_raw_data(self.media_map, handle) + + def get_raw_place_data(self, handle): + return self.__get_raw_data(self.place_map, handle) + + def get_raw_event_data(self, handle): + return self.__get_raw_data(self.event_map, handle) + + def get_raw_source_data(self, handle): + return self.__get_raw_data(self.source_map, handle) + + def get_raw_repository_data(self, handle): + return self.__get_raw_data(self.repository_map, handle) + + def get_raw_note_data(self, handle): + return self.__get_raw_data(self.note_map, handle) + + def __has_handle(self, table, handle): + """ + Helper function for has__handle methods + """ + try: + return table.get(str(handle), txn=self.txn) is not None + except DBERRS, msg: + self.__log_error() + raise Errors.DbError(msg) + + def has_person_handle(self, handle): + """ + Return True if the handle exists in the current Person database. + """ + return self.__has_handle(self.person_map, handle) + + def has_family_handle(self, handle): + """ + Return True if the handle exists in the current Family database. + """ + return self.__has_handle(self.family_map, handle) + + def has_object_handle(self, handle): + """ + Return True if the handle exists in the current MediaObjectdatabase. + """ + return self.__has_handle(self.media_map, handle) + + def has_repository_handle(self, handle): + """ + Return True if the handle exists in the current Repository database. + """ + return self.__has_handle(self.repository_map, handle) + + def has_note_handle(self, handle): + """ + Return True if the handle exists in the current Note database. + """ + return self.__has_handle(self.note_map, handle) + + def has_event_handle(self, handle): + """ + Return True if the handle exists in the current Event database. + """ + return self.__has_handle(self.event_map, handle) + + def has_place_handle(self, handle): + """ + Return True if the handle exists in the current Place database. + """ + return self.__has_handle(self.place_map, handle) + + def has_source_handle(self, handle): + """ + Return True if the handle exists in the current Source database. + """ + return self.__has_handle(self.source_map, handle) + + def __sortbyplace(self, first, second): + return locale.strcoll(self.place_map.get(str(first))[2], + self.place_map.get(str(second))[2]) + + def __sortbyplace_key(self, place): + return locale.strxfrm(self.place_map.get(str(place))[2]) + + def __sortbysource(self, first, second): + source1 = unicode(self.source_map[str(first)][2]) + source2 = unicode(self.source_map[str(second)][2]) + return locale.strcoll(source1, source2) + + def __sortbysource_key(self, key): + source = unicode(self.source_map[str(key)][2]) + return locale.strxfrm(source) + + def __sortbymedia(self, first, second): + media1 = self.media_map[str(first)][4] + media2 = self.media_map[str(second)][4] + return locale.strcoll(media1, media2) + + def __sortbymedia_key(self, key): + media = self.media_map[str(key)][4] + return locale.strxfrm(media) + + def set_mediapath(self, path): + """Set the default media path for database, path should be utf-8.""" + if (self.metadata is not None) and (not self.readonly): + self.metadata['mediapath'] = path + + def get_mediapath(self): + """Return the default media path of the database.""" + if self.metadata is not None: + return self.metadata.get('mediapath', None) + return None + + def set_column_order(self, col_list, name): + if (self.metadata is not None) and (not self.readonly): + self.metadata[name] = col_list + + def set_person_column_order(self, col_list): + """ + Store the Person display common information in the database's metadata. + """ + self.set_column_order(col_list, PERSON_COL_KEY) + + def set_family_list_column_order(self, col_list): + """ + Store the Person display common information in the database's metadata. + """ + self.set_column_order(col_list, FAMILY_COL_KEY) + + def set_child_column_order(self, col_list): + """ + Store the Person display common information in the database's metadata. + """ + self.set_column_order(col_list, CHILD_COL_KEY) + + def set_place_column_order(self, col_list): + """ + Store the Place display common information in the database's metadata. + """ + self.set_column_order(col_list, PLACE_COL_KEY) + + def set_source_column_order(self, col_list): + """ + Store the Source display common information in the database's metadata. + """ + self.set_column_order(col_list, SOURCE_COL_KEY) + + def set_media_column_order(self, col_list): + """ + Store the Media display common information in the database's metadata. + """ + self.set_column_order(col_list, MEDIA_COL_KEY) + + def set_event_column_order(self, col_list): + """ + Store the Event display common information in the database's metadata. + """ + self.set_column_order(col_list, EVENT_COL_KEY) + + def set_repository_column_order(self, col_list): + """ + Store the Repository display common information in the database's + metadata. + """ + self.set_column_order(col_list, REPOSITORY_COL_KEY) + + def set_note_column_order(self, col_list): + """ + Store the Note display common information in the database's metadata. + """ + self.set_column_order(col_list, NOTE_COL_KEY) + + def __get_column_order(self, name, default): + if self.metadata is None: + return default + else: + cols = self.metadata.get(name, default) + if len(cols) != len(default): + return cols + default[len(cols):] + else: + return cols + + def get_person_column_order(self): + """ + Return the Person display common information stored in the database's + metadata. + """ + default = [(1, 1, 100), (1, 2, 100), (1, 3, 150), (0, 4, 150), + (1, 5, 150), (0, 6, 150), (0, 7, 100), (0, 8, 100), + ] + return self.__get_column_order(PERSON_COL_KEY, default) + + def __get_columns(self, key, default): + values = self.__get_column_order(key, default) + new = [] + for val in values: + if len(val) == 2: + for x in default: + if val[1] == x[1]: + new.append((val[0], val[1], x[2])) + break + else: + new.append(val) + return new + + def get_family_list_column_order(self): + """ + Return the Person display common information stored in the database's + metadata. + """ + default = [(1, 0, 75), (1, 1, 200), (1, 2, 200), (1, 3, 100), + (0, 4, 100)] + return self.__get_columns(FAMILY_COL_KEY, default) + + def get_child_column_order(self): + """ + Return the Person display common information stored in the database's + metadata. + """ + default = [(1, 0), (1, 1), (1, 2), (1, 3), (1, 4), (1, 5), + (0, 6), (0, 7)] + return self.__get_column_order(CHILD_COL_KEY, default) + + def get_place_column_order(self): + """ + Return the Place display common information stored in thedatabase's + metadata. + """ + default = [(1, 0, 250), (1, 1, 75), (1, 11, 100), (0, 3, 100), + (1, 4, 100, ), (0, 5, 150), (1, 6, 150), (0, 7, 150), + (0, 8, 150), (0, 9, 150), (0, 10, 150),(0,2,100)] + return self.__get_columns(PLACE_COL_KEY, default) + + def get_source_column_order(self): + """ + Return the Source display common information stored in the database's + metadata. + """ + default = [(1, 0, 200), (1, 1, 75), (1, 2, 150), (0, 3, 100), + (1, 4, 150), (0, 5, 100)] + return self.__get_columns(SOURCE_COL_KEY, default) + + def get_media_column_order(self): + """ + Return the MediaObject display common information stored in the + database's metadata. + """ + default = [(1, 0, 200, ), (1, 1, 75), (1, 2, 100), (1, 3, 200), + (1, 5, 150), (0, 4, 150)] + return self.__get_columns(MEDIA_COL_KEY, default) + + def get_event_column_order(self): + """ + Return the Event display common information stored in the database's + metadata. + """ + default = [(1, 0, 200), (1, 1, 75), (1, 2, 100), (1, 3, 150), + (1, 4, 200), (0, 5, 100)] + return self.__get_columns(EVENT_COL_KEY, default) + + def get_repository_column_order(self): + """ + Return the Repository display common information stored in the + database's metadata. + """ + default = [(1, 0, 200), (1, 1, 75), (0, 5, 100), (0, 6, 100), + (1, 2, 100), (1, 3, 250), (1, 4, 100), (0, 7, 100), + (0, 8, 100), (0, 9, 100), (0, 10, 100), (0, 12, 100)] + return self.__get_columns(REPOSITORY_COL_KEY, default) + + def get_note_column_order(self): + """ + Return the Note display common information stored in the database's + metadata. + """ + default = [(1, 0, 350), (1, 1, 75), (1, 2, 100), (1, 3, 100)] + return self.__get_columns(NOTE_COL_KEY, default) + + def delete_primary_from_reference_map(self, handle, transaction): + """ + Called each time an object is removed from the database. + + This can be used by subclasses to update any additional index tables + that might need to be changed. + """ + pass + + def find_backlink_handles(self, handle, include_classes=None): + """ + Find all objects that hold a reference to the object handle. + + Returns an interator over alist of (class_name, handle) tuples. + + @param handle: handle of the object to search for. + @type handle: database handle + @param include_classes: list of class names to include in the results. + Default: None means include all classes. + @type include_classes: list of class names + + This default implementation does a sequencial scan through all + the primary object databases and is very slow. Backends can + override this method to provide much faster implementations that + make use of additional capabilities of the backend. + + Note that this is a generator function, it returns a iterator for + use in loops. If you want a list of the results use: + + > result_list = list(ind_backlink_handles(handle)) + """ + assert False, "read:find_backlink_handles -- shouldn't get here!!!" + # Make a dictionary of the functions and classes that we need for + # each of the primary object tables. + primary_tables = { + 'Person': {'cursor_func': self.get_person_cursor, + 'class_func': Person}, + 'Family': {'cursor_func': self.get_family_cursor, + 'class_func': Family}, + 'Event': {'cursor_func': self.get_event_cursor, + 'class_func': Event}, + 'Place': {'cursor_func': self.get_place_cursor, + 'class_func': Place}, + 'Source': {'cursor_func': self.get_source_cursor, + 'class_func': Source}, + 'MediaObject': {'cursor_func': self.get_media_cursor, + 'class_func': MediaObject}, + 'Repository': {'cursor_func': self.get_repository_cursor, + 'class_func': Repository}, + 'Note': {'cursor_func': self.get_note_cursor, + 'class_func': Note}, + } + + + # Find which tables to iterate over + if (include_classes is None): + the_tables = primary_tables.keys() + else: + the_tables = include_classes + + # Now we use the functions and classes defined above to loop through + # each of the existing primary object tables + for primary_table_name, funcs in the_tables.iteritems(): + with funcs['cursor_func']() as cursor: + + # Grab the real object class here so that the lookup does + # not happen inside the main loop. + class_func = funcs['class_func'] + for found_handle, val in cursor: + obj = class_func() + obj.unserialize(val) + + # Now we need to loop over all object types + # that have been requests in the include_classes list + for classname in primary_tables: + if obj.has_handle_reference(classname, handle): + yield (primary_table_name, found_handle) + return + + def report_bm_change(self): + """ + Add 1 to the number of bookmark changes during this session. + """ + self._bm_changes += 1 + + def db_has_bm_changes(self): + """ + Return whethere there were bookmark changes during the session. + """ + return self._bm_changes > 0 + diff --git a/src/gen/db/txn.py b/src/gen/db/txn.py new file mode 100644 index 000000000..02be06171 --- /dev/null +++ b/src/gen/db/txn.py @@ -0,0 +1,335 @@ +# +# Gramps - a GTK+/GNOME based genealogy program +# +# Copyright (C) 2004-2006 Donald N. Allingham +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +# + +# $Id: txn.py 12672 2009-06-16 15:49:17Z gbritton $ + +""" +Exports the GrampsDbTxn class for managing Gramps transactions and the undo +database. +""" + +#------------------------------------------------------------------------- +# +# Standard python modules +# +#------------------------------------------------------------------------- +from __future__ import with_statement +import cPickle as pickle +from bsddb import dbshelve, db +import logging + +#------------------------------------------------------------------------- +# +# Gramps modules +# +#------------------------------------------------------------------------- +from gen.db.dbconst import * +from gen.db import BSDDBTxn +import Errors + +_LOG = logging.getLogger(DBLOGNAME) + +#------------------------------------------------------------------------- +# +# Gramps transaction class +# +#------------------------------------------------------------------------- +class GrampsDbTxn(dict): + """ + Define a group of database commits that define a single logical operation. + This class should not be used directly, but subclassed to reference a real + database + """ + + __slots__ = ('msg', 'commitdb', 'db', 'first', + 'last', 'timestamp', 'db_maps') + + def get_db_txn(self, value): + """ + Return a transaction object from the database + """ + raise NotImplementedError + + def __enter__(self): + """ + Context manager entry method + """ + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + """ + Context manager exit method + """ + if exc_type is None: + self.commit() + return exc_type is None + + def __init__(self, msg, commitdb, grampsdb): + """ + Create a new transaction. + + A Transaction instance should not be created directly, but by the + GrampsDbBase class or classes derived from GrampsDbBase. The commitdb + parameter is a list-like interface that stores the commit data. This + could be a simple list, or a RECNO-style database object. The grampsdb + parameter is a reference to the GrampsDbWrite object to which this + transaction will be applied. + + The data structure used to handle the transactions is a Python + dictionary where: + + key = (object type, transaction type) where: + object type = the numeric type of an object. These are + defined as PERSON_KEY = 0, FAMILY_KEY = 1, etc. + as imported from dbconst. + transaction type = a numeric representation of the type of + transaction: TXNADD = 0, TXNUPD = 1, TXNDEL = 2 + + data = Python list where: + list element = (handle, data) where: + handle = handle (database key) of the object in the transaction + data = pickled representation of the object + """ + + super(GrampsDbTxn, self).__init__({}) + + self.msg = msg + self.commitdb = commitdb + self.db = grampsdb + self.first = None + self.last = None + self.timestamp = 0 + + # Dictionary to enable table-driven logic in the class + self.db_maps = { + PERSON_KEY: (self.db.person_map, 'person'), + FAMILY_KEY: (self.db.family_map, 'family'), + EVENT_KEY: (self.db.event_map, 'event'), + SOURCE_KEY: (self.db.source_map, 'source'), + PLACE_KEY: (self.db.place_map, 'place'), + MEDIA_KEY: (self.db.media_map, 'media'), + REPOSITORY_KEY: (self.db.repository_map, 'repository'), + #REFERENCE_KEY: (self.db.reference_map, 'reference'), + NOTE_KEY: (self.db.note_map, 'note'), + } + + def get_description(self): + """ + Return the text string that describes the logical operation performed + by the Transaction. + """ + return self.msg + + def set_description(self, msg): + """ + Set the text string that describes the logical operation performed by + the Transaction. + """ + self.msg = msg + + def add(self, obj_type, trans_type, handle, old_data, new_data): + """ + Add a commit operation to the Transaction. + + The obj_type is a constant that indicates what type of PrimaryObject + is being added. The handle is the object's database handle, and the + data is the tuple returned by the object's serialize method. + """ + self.last = self.commitdb.append( + pickle.dumps((obj_type, trans_type, handle, old_data, new_data), 1)) + if self.last is None: + self.last = len(self.commitdb) -1 + if self.first is None: + self.first = self.last + if (obj_type, trans_type) in self: + self[(obj_type, trans_type)] += [(handle, new_data)] + else: + self[(obj_type, trans_type)] = [(handle, new_data)] + + def get_recnos(self, reverse=False): + """ + Return a list of record numbers associated with the transaction. + + While the list is an arbitrary index of integers, it can be used + to indicate record numbers for a database. + """ + if not reverse: + return xrange(self.first, self.last+1) + else: + return xrange(self.last, self.first-1, -1) + + def get_record(self, recno): + """ + Return a tuple representing the PrimaryObject type, database handle + for the PrimaryObject, and a tuple representing the data created by + the object's serialize method. + """ + return pickle.loads(self.commitdb[recno]) + + def __len__(self): + """ + Return the number of commits associated with the Transaction. + """ + if self.first is None or self.last is None: + return 0 + return self.last - self.first + 1 + + def commit(self, msg=None): + """ + Commit the transaction to the assocated commit database. + """ + if msg is not None: + self.msg = msg + + if not len(self) or self.db.readonly: + return + + # Begin new database transaction + txn = self.get_db_txn(self.db.env) + self.db.txn = txn.begin() + + # Commit all add transactions to the database + db_map = lambda key: self.db_maps[key][0] + for (obj_type, trans_type), data in self.iteritems(): + if trans_type == TXNADD and obj_type in self.db_maps: + for handle, new_data in data: + assert handle == str(handle) + db_map(obj_type).put(handle, new_data, txn=txn.txn) + + # Commit all update transactions to the database + for (obj_type, trans_type), data in self.iteritems(): + if trans_type == TXNUPD and obj_type in self.db_maps: + for handle, new_data in data: + assert handle == str(handle) + db_map(obj_type).put(handle, new_data, txn=txn.txn) + + # Before we commit delete transactions, emit signals as required + + # Loop through the data maps, emitting signals as required + emit = self.__emit + for obj_type, (m_, obj_name) in self.db_maps.iteritems(): + # Do an emit for each object and transaction type as required + emit(obj_type, TXNADD, obj_name, '-add') + emit(obj_type, TXNUPD, obj_name, '-update') + emit(obj_type, TXNDEL, obj_name, '-delete') + + # Commit all delete transactions to the database + for (obj_type, trans_type), data in self.iteritems(): + if trans_type == TXNDEL and obj_type in self.db_maps: + for handle, n_ in data: + assert handle == str(handle) + db_map(obj_type).delete(handle, txn=txn.txn) + + # Add new reference keys as required + db_map = self.db.reference_map + if (REFERENCE_KEY, TXNADD) in self: + for handle, new_data in self[(REFERENCE_KEY, TXNADD)]: + assert handle == str(handle) + db_map.put(handle, new_data, txn=txn.txn) + + # Delete old reference keys as required + if (REFERENCE_KEY, TXNDEL) in self: + for handle, none_ in self[(REFERENCE_KEY, TXNDEL)]: + assert handle == str(handle) + db_map.delete(handle, txn=txn.txn) + + # Commit database transaction + txn.commit() + self.db.txn = None + self.clear() + return + + # Define helper function to do the actual emits + def __emit(self,obj_type, trans_type, obj, suffix): + if (obj_type, trans_type) in self: + handles = [handle for handle, data in + self[(obj_type, trans_type)]] + if handles: + self.db.emit(obj + suffix, (handles, )) + +# Test functions + +def testtxn(): + """ + Test suite + """ + class M(dict): + """Fake database map with just two methods""" + def put(self, key, data, txn=None): + super(M, self).__setitem__(key, data) + def delete(self, key, txn=None): + super(M, self).__delitem__(key) + + class D: + """Fake gramps database""" + def __init__(self): + self.person_map = M() + self.family_map = M() + self.source_map = M() + self.event_map = M() + self.media_map = M() + self.place_map = M() + self.note_map = M() + self.repository_map = M() + self.reference_map = M() + self.readonly = False + self.env = None + def emit(self, obj, value): + pass + + class C(list): + """ Fake commit database""" + pass + + class G(GrampsDbTxn): + """Derived transacton class""" + def get_db_txn(self, env): + return T() + + class T(): + """Fake DBMS transaction class""" + def __init__(self): + self.txn = None + def begin(self): + return self + def commit(self): + pass + + commitdb = C() + grampsdb = D() + trans = G("Test Transaction", commitdb, grampsdb) + trans.add(0, TXNADD, '1', None, "data1") + trans.add(0, TXNADD, '2', None, "data2") + trans.add(0, TXNUPD, '2', None, "data3") + trans.add(0, TXNDEL, '1', None, None) + + print trans + print trans.get_description() + print trans.set_description("new text") + print trans.get_description() + for i in trans.get_recnos(): + print trans.get_record(i) + print list(trans.get_recnos()) + print list(trans.get_recnos(reverse=True)) + trans.commit("test") + print grampsdb.person_map + +if __name__ == '__main__': + testtxn() diff --git a/src/gen/db/undoredo.py b/src/gen/db/undoredo.py new file mode 100644 index 000000000..052f2f2fd --- /dev/null +++ b/src/gen/db/undoredo.py @@ -0,0 +1,509 @@ +# +# Gramps - a GTK+/GNOME based genealogy program +# +# Copyright (C) 2004-2006 Donald N. Allingham +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +# + +# $Id: undoredo.py 12672 2009-06-16 15:49:17Z gbritton $ + +""" +Exports the GrampsDbUndo class for managing Gramps transactions +undos and redos. +""" + +#------------------------------------------------------------------------- +# +# Standard python modules +# +#------------------------------------------------------------------------- +import time, os +import cPickle as pickle +from bsddb import db +from gettext import gettext as _ + +#------------------------------------------------------------------------- +# +# Gramps modules +# +#------------------------------------------------------------------------- +from gen.db.dbconst import * +from gen.db import BSDDBTxn +import Errors + +#------------------------------------------------------------------------- +# +# Local Constants +# +#------------------------------------------------------------------------- +DBERRS = (db.DBRunRecoveryError, db.DBAccessError, + db.DBPageNotFoundError, db.DBInvalidArgError) + +_SIGBASE = ('person', 'family', 'source', 'event', 'media', + 'place', 'repository', 'reference', 'note') +#------------------------------------------------------------------------- +# +# GrampsDbUndo class +# +#------------------------------------------------------------------------- +class GrampsDbUndo(object): + """ + Base class for the gramps undo/redo manager. Needs to be subclassed + for use with a real backend. + """ + + __slots__ = ['undodb', 'db', 'mapbase', 'translist', 'undoindex', + 'undo_history_timestamp', 'txn'] + + def __init__(self, grampsdb): + """ + Class constructor. Set up main instance variables + """ + self.db = grampsdb + self.clear() + self.mapbase = ( + self.db.person_map, + self.db.family_map, + self.db.source_map, + self.db.event_map, + self.db.media_map, + self.db.place_map, + self.db.repository_map, + self.db.reference_map, + self.db.note_map, + ) + + def clear(self): + """ + Clear the undo/redo list (but not the backing storage) + """ + self.translist = [] + self.undoindex = -1 + self.undo_history_timestamp = time.time() + self.txn = None + + def __enter__(self, value): + """ + Context manager method to establish the context + """ + self.open(value) + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + """ + Context manager method to finish the context + """ + if exc_type is None: + self.close() + return exc_type is None + + def open(self, value): + """ + Open the backing storage. Needs to be overridden in the derived + class. + """ + raise NotImplementedError + + def close(self): + """ + Close the backing storage. Needs to be overridden in the derived + class. + """ + raise NotImplementedError + + def append(self, value): + """ + Add a new entry on the end. Needs to be overridden in the derived + class. + """ + raise NotImplementedError + + def __getitem__(self, index): + """ + Returns an entry by index number. Needs to be overridden in the + derived class. + """ + raise NotImplementedError + + def __setitem__(self, index, value): + """ + Set an entry to a value. Needs to be overridden in the derived class. + """ + raise NotImplementedError + + def __len__(self): + """ + Returns the number of entries. Needs to be overridden in the derived + class. + """ + raise NotImplementedError + + def commit(self, txn, msg): + """ + Commit the transaction to the undo/redo database. "txn" should be + an instance of gramps gramps transaction class + """ + txn.set_description(msg) + txn.timestamp = time.time() + + # If we're within our undo limit, add this transaction + self.undoindex += 1 + if self.undoindex < DBUNDO: + if self.undoindex >= len(self.translist): + self.translist.append(txn) + else: + self.translist[self.undoindex] = txn + del self.translist[self.undoindex+1:] + + # Otherwise, we've exceeded our undo limit + else: + self.db.abort_possible = False + self.undo_history_timestamp = time.time() + self.translist[-1] = txn + + def undo_available(self): + """ + Return boolean of whether or not there's a possibility of undo. + """ + if 0 <= self.undoindex < len(self.translist): + return True + return False + + def redo_available(self): + """ + Return boolean of whether or not there's a possibility of redo. + """ + if 0 <= self.undoindex+1 < len(self.translist): + return True + return False + + def undo(self, update_history=True): + """ + Undo a previously committed transaction + """ + if self.db.readonly or not self.undo_available(): + return False + return self.__undoredo(update_history, self.__undo) + + def redo(self, update_history=True): + """ + Redo a previously committed, then undone, transaction + """ + if self.db.readonly or not self.redo_available(): + return False + return self.__undoredo(update_history, self.__redo) + + def __undoredo(self, update_history, func): + """ + Helper method used by both undo and redo methods. + """ + try: + with BSDDBTxn(self.db.env) as txn: + self.txn = self.db.txn = txn.txn + status = func(update_history) + if not status: + txn.abort() + self.db.txn = None + return status + + except DBERRS, msg: + self.db._log_error() + raise Errors.DbError(msg) + + def __undo(self, update_history=True): + """ + Access the last committed transaction, and revert the data to the + state before the transaction was committed. + """ + transaction = self.translist[self.undoindex] + db = self.db + self.undoindex -= 1 + subitems = transaction.get_recnos(reverse=True) + + # Process all records in the transaction + for record_id in subitems: + (key, trans_type, handle, old_data, new_data) = \ + pickle.loads(self.undodb[record_id]) + + if key == REFERENCE_KEY: + self.undo_reference(old_data, handle, self.mapbase[key]) + else: + self.undo_data(old_data, handle, self.mapbase[key], + db.emit, _SIGBASE[key]) + # Notify listeners + if db.undo_callback: + if self.undo_available(): + db.undo_callback(_("_Undo %s") + % transaction.get_description()) + else: + db.undo_callback(None) + + if db.redo_callback: + db.redo_callback(_("_Redo %s") + % transaction.get_description()) + + if update_history and db.undo_history_callback: + db.undo_history_callback() + return True + + def __redo(self, db=None, update_history=True): + """ + Accesse the last undone transaction, and revert the data to the state + before the transaction was undone. + """ + + self.undoindex += 1 + transaction = self.translist[self.undoindex] + db = self.db + subitems = transaction.get_recnos() + + # Process all records in the transaction + for record_id in subitems: + (key, trans_type, handle, old_data, new_data) = \ + pickle.loads(self.undodb[record_id]) + + if key == REFERENCE_KEY: + self.undo_reference(new_data, handle, self.mapbase[key]) + else: + self.undo_data(new_data, handle, self.mapbase[key], + db.emit, _SIGBASE[key]) + # Notify listeners + if db.undo_callback: + db.undo_callback(_("_Undo %s") + % transaction.get_description()) + + if db.redo_callback: + if self.redo_available(): + new_transaction = self.translist[self.undoindex+1] + db.redo_callback(_("_Redo %s") + % new_transaction.get_description()) + else: + db.redo_callback(None) + + if update_history and db.undo_history_callback: + db.undo_history_callback() + return True + + def undo_reference(self, data, handle, db_map): + """ + Helper method to undo a reference map entry + """ + try: + if data is None: + db_map.delete(handle, txn=self.txn) + else: + db_map.put(handle, data, txn=self.txn) + + except DBERRS, msg: + self.db._log_error() + raise Errors.DbError(msg) + + def undo_data(self, data, handle, db_map, emit, signal_root): + """ + Helper method to undo/redo the changes made + """ + try: + if data is None: + emit(signal_root + '-delete', ([handle],)) + db_map.delete(handle, txn=self.txn) + else: + ex_data = db_map.get(handle, txn=self.txn) + if ex_data: + signal = signal_root + '-update' + else: + signal = signal_root + '-add' + db_map.put(handle, data, txn=self.txn) + emit(signal, ([handle],)) + + except DBERRS, msg: + self.db._log_error() + raise Errors.DbError(msg) + +class GrampsDbUndoList(GrampsDbUndo): + """ + Implementation of the gramps undo database using a Python list + """ + def __init__(self, grampsdb): + """ + Class constructor + """ + super(GrampsDbUndoList, self).__init__(grampsdb) + self.undodb = [] + + def open(self): + """ + A list does not need to be opened + """ + pass + + def close(self): + """ + Close the list by resetting it to empty + """ + self.undodb = [] + self.clear() + + def append(self, value): + """ + Add an entry on the end of the list + """ + self.undodb.append(value) + return len(self.undodb)-1 + + def __getitem__(self, index): + """ + Return an item at the specified index + """ + return self.undodb[index] + + def __setitem__(self, index, value): + """ + Set an item at the speficied index to the given value + """ + self.undodb[index] = value + + def __iter__(self): + """ + Iterator + """ + for item in self.undodb: + yield item + + def __len__(self): + """ + Return number of entries in the list + """ + return len(self.undodb) + +class GrampsDbUndoBSDDB(GrampsDbUndo): + """ + Class constructor for gramps undo/redo database using a bsddb recno + database as the backing store. + """ + + def __init__(self, grampsdb, path): + """ + Class constructor + """ + super(GrampsDbUndoBSDDB, self).__init__(grampsdb) + self.undodb = db.DB() + self.path = path + + def open(self): + """ + Open the undo/redo database + """ + self.undodb.open(self.path, db.DB_RECNO, db.DB_CREATE) + + def close(self): + """ + Close the undo/redo database + """ + self.undodb.close() + try: + os.remove(self.path) + except OSError: + pass + self.clear() + + def append(self, value): + """ + Add an entry on the end of the database + """ + return self.undodb.append(value) + + def __len__(self): + """ + Returns the number of entries in the database + """ + x = self.undodb.stat()['nkeys'] + y = len(self.undodb) + assert x == y + return x + + def __getitem__(self, index): + """ + Returns the entry stored at the specified index + """ + return self.undodb.get(index) + + def __setitem__(self, index, value): + """ + Sets the entry stored at the specified index to the value given. + """ + self.undodb.put(index, value) + + def __iter__(self): + """ + Iterator + """ + cursor = self.undodb.cursor() + data = cursor.first() + while data: + yield data + data = cursor.next() + +def testundo(): + class T: + def __init__(self): + self.msg = '' + self.timetstamp = 0 + def set_description(self, msg): + self.msg = msg + + class D: + def __init__(self): + self.person_map = {} + self.family_map = {} + self.source_map = {} + self.event_map = {} + self.media_map = {} + self.place_map = {} + self.note_map = {} + self.repository_map = {} + self.reference_map = {} + + print "list tests" + undo = GrampsDbUndoList(D()) + print undo.append('foo') + print undo.append('bar') + print undo[0] + undo[0] = 'foobar' + print undo[0] + print "len", len(undo) + print "iter" + for data in undo: + print data + print + print "bsddb tests" + undo = GrampsDbUndoBSDDB(D(), '/tmp/testundo') + undo.open() + print undo.append('foo') + print undo.append('fo2') + print undo.append('fo3') + print undo[1] + undo[1] = 'bar' + print undo[1] + for data in undo: + print data + print "len", len(undo) + + print "test commit" + undo.commit(T(), msg="test commit") + undo.close() + +if __name__ == '__main__': + testundo() diff --git a/src/gen/db/upgrade.py b/src/gen/db/upgrade.py new file mode 100644 index 000000000..e80808aa3 --- /dev/null +++ b/src/gen/db/upgrade.py @@ -0,0 +1,305 @@ +from gen.db import BSDDBTxn +def gramps_upgrade_14(self): + """Upgrade database from version 13 to 14.""" + # This upgrade modifies notes and dates + length = (len(self.note_map) + len(self.person_map) + + len(self.event_map) + len(self.family_map) + + len(self.repository_map) + len(self.media_map) + + len(self.place_map) + len(self.source_map)) + self.set_total(length) + + # --------------------------------- + # Modify Notes + # --------------------------------- + # replace clear text with StyledText in Notes + for handle in self.note_map.keys(): + note = self.note_map[handle] + (junk_handle, gramps_id, text, format, note_type, + change, marker, private) = note + styled_text = (text, []) + new_note = (handle, gramps_id, styled_text, format, note_type, + change, marker, private) + with BSDDBTxn(self.env, self.note_map) as txn: + txn.put(str(handle), new_note) + self.update() + + # --------------------------------- + # Modify Event + # --------------------------------- + # update dates with newyear + for handle in self.event_map.keys(): + event = self.event_map[handle] + (junk_handle, gramps_id, the_type, date, description, place, + source_list, note_list, media_list, attribute_list, + change, marker, private) = event + new_date = convert_date_14(date) + new_source_list = new_source_list_14(source_list) + new_media_list = new_media_list_14(media_list) + new_attribute_list = new_attribute_list_14(attribute_list) + new_event = (junk_handle, gramps_id, the_type, new_date, + description, place, new_source_list, note_list, + new_media_list, new_attribute_list, change,marker,private) + with BSDDBTxn(self.env, self.event_map) as txn: + txn.put(str(handle), new_event) + self.update() + + # --------------------------------- + # Modify Person + # --------------------------------- + # update dates with newyear + for handle in self.person_map.keys(): + person = self.person_map[handle] + (junk_handle, # 0 + gramps_id, # 1 + gender, # 2 + primary_name, # 3 + alternate_names, # 4 + death_ref_index, # 5 + birth_ref_index, # 6 + event_ref_list, # 7 + family_list, # 8 + parent_family_list, # 9 + media_list, # 10 + address_list, # 11 + attribute_list, # 12 + urls, # 13 + lds_ord_list, # 14 + psource_list, # 15 + pnote_list, # 16 + change, # 17 + marker, # 18 + pprivate, # 19 + person_ref_list, # 20 + ) = person + + new_address_list = [] + for address in address_list: + (privacy, asource_list, anote_list, date, location) = address + new_date = convert_date_14(date) + new_asource_list = new_source_list_14(asource_list) + new_address_list.append((privacy, new_asource_list, anote_list, + new_date, location)) + new_ord_list = [] + for ldsord in lds_ord_list: + (lsource_list, lnote_list, date, type, place, + famc, temple, status, lprivate) = ldsord + new_date = convert_date_14(date) + new_lsource_list = new_source_list_14(lsource_list) + new_ord_list.append( (new_lsource_list, lnote_list, new_date, type, + place, famc, temple, status, lprivate)) + + new_primary_name = convert_name_14(primary_name) + + new_alternate_names = [convert_name_14(name) for name + in alternate_names] + + new_media_list = new_media_list_14(media_list) + new_psource_list = new_source_list_14(psource_list) + new_attribute_list = new_attribute_list_14(attribute_list) + new_person_ref_list = new_person_ref_list_14(person_ref_list) + + new_person = (junk_handle, # 0 + gramps_id, # 1 + gender, # 2 + new_primary_name, # 3 + new_alternate_names, # 4 + death_ref_index, # 5 + birth_ref_index, # 6 + event_ref_list, # 7 + family_list, # 8 + parent_family_list, # 9 + new_media_list, # 10 + new_address_list, # 11 + new_attribute_list, # 12 + urls, # 13 + new_ord_list, # 14 + new_psource_list, # 15 + pnote_list, # 16 + change, # 17 + marker, # 18 + pprivate, # 19 + new_person_ref_list, # 20 + ) + + with BSDDBTxn(self.env, self.person_map) as txn: + txn.put(str(handle), new_person) + self.update() + + # --------------------------------- + # Modify Family + # --------------------------------- + # update dates with newyear + for handle in self.family_map.keys(): + family = self.family_map[handle] + (junk_handle, gramps_id, father_handle, mother_handle, + child_ref_list, the_type, event_ref_list, media_list, + attribute_list, lds_seal_list, source_list, note_list, + change, marker, private) = family + new_child_ref_list = new_child_ref_list_14(child_ref_list) + new_media_list = new_media_list_14(media_list) + new_source_list = new_source_list_14(source_list) + new_attribute_list = new_attribute_list_14(attribute_list) + new_seal_list = [] + for ldsord in lds_seal_list: + (lsource_list, lnote_list, date, type, place, + famc, temple, status, lprivate) = ldsord + new_date = convert_date_14(date) + new_lsource_list = new_source_list_14(lsource_list) + new_seal_list.append( (new_lsource_list, lnote_list, new_date, type, + place, famc, temple, status, lprivate)) + + new_family = (junk_handle, gramps_id, father_handle, mother_handle, + new_child_ref_list, the_type, event_ref_list, new_media_list, + new_attribute_list, new_seal_list, new_source_list, note_list, + change, marker, private) + + with BSDDBTxn(self.env, self.family_map) as txn: + txn.put(str(handle), new_family) + self.update() + + # --------------------------------- + # Modify Repository + # --------------------------------- + # update dates with newyear + for handle in self.repository_map.keys(): + repository = self.repository_map[handle] + # address + (junk_handle, gramps_id, the_type, name, note_list, + address_list, urls, change, marker, private) = repository + + new_address_list = [] + for address in address_list: + (privacy, asource_list, anote_list, date, location) = address + new_date = convert_date_14(date) + new_asource_list = new_source_list_14(asource_list) + new_address_list.append((privacy, new_asource_list, anote_list, + new_date, location)) + + new_repository = (junk_handle, gramps_id, the_type, name, note_list, + new_address_list, urls, change, marker, private) + + with BSDDBTxn(self.env, self.repository_map) as txn: + txn.put(str(handle), new_repository) + self.update() + + # --------------------------------- + # Modify Media + # --------------------------------- + for media_handle in self.media_map.keys(): + media = self.media_map[media_handle] + (handle, gramps_id, path, mime, desc, + attribute_list, source_list, note_list, change, + date, marker, private) = media + new_source_list = new_source_list_14(source_list) + new_date = convert_date_14(date) + new_media = (handle, gramps_id, path, mime, desc, + attribute_list, new_source_list, note_list, change, + new_date, marker, private) + + with BSDDBTxn(self.env, self.media_map) as txn: + txn.put(str(handle), new_media) + self.update() + + # --------------------------------- + # Modify Place + # --------------------------------- + for place_handle in self.place_map.keys(): + place = self.place_map[place_handle] + (handle, gramps_id, title, long, lat, + main_loc, alt_loc, urls, media_list, source_list, note_list, + change, marker, private) = place + new_media_list = new_media_list_14(media_list) + new_source_list = new_source_list_14(source_list) + new_place = (handle, gramps_id, title, long, lat, + main_loc, alt_loc, urls, new_media_list, + new_source_list, note_list, change, marker, private) + + with BSDDBTxn(self.env, self.place_map) as txn: + txn.put(str(handle), new_place) + self.update() + + # --------------------------------- + # Modify Source + # --------------------------------- + for source_handle in self.source_map.keys(): + source = self.source_map[source_handle] + (handle, gramps_id, title, author, + pubinfo, note_list, media_list, + abbrev, change, datamap, reporef_list, + marker, private) = source + new_media_list = new_media_list_14(media_list) + new_source = (handle, gramps_id, title, author, + pubinfo, note_list, new_media_list, + abbrev, change, datamap, reporef_list, + marker, private) + + with BSDDBTxn(self.env, self.source_map) as txn: + txn.put(str(handle), new_source) + self.update() + + # Bump up database version. Separate transaction to save metadata. + with BSDDBTxn(self.env, self.metadata) as txn: + txn.put('version', 14) + +def new_source_list_14(source_list): + new_source_list = [] + for source in source_list: + (date, private, note_list, confidence, ref, page) = source + new_date = convert_date_14(date) + new_source_list.append((new_date, private, note_list, confidence, ref, page)) + return new_source_list + +def new_attribute_list_14(attribute_list): + new_attribute_list = [] + for attribute in attribute_list: + (private, asource_list, note_list, the_type, value) = attribute + new_asource_list = new_source_list_14(asource_list) + new_attribute_list.append((private, new_asource_list, note_list, the_type, value)) + return new_attribute_list + +def new_media_list_14(media_list): + # --------------------------------- + # Event Media list + # --------------------------------- + new_media_list = [] + for media in media_list: + (private, source_list, note_list,attribute_list,ref,role) = media + new_source_list = new_source_list_14(source_list) + new_attribute_list = new_attribute_list_14(attribute_list) + new_media_list.append((private, new_source_list, note_list, new_attribute_list, ref, role)) + return new_media_list + +def new_person_ref_list_14(person_ref_list): + new_person_ref_list = [] + for person_ref in person_ref_list: + (private, source_list, note_list, ref, rel) = person_ref + new_source_list = new_source_list_14(source_list) + new_person_ref_list.append((private, new_source_list, note_list, ref, rel)) + return new_person_ref_list + +def new_child_ref_list_14(child_ref_list): + new_child_ref_list = [] + for data in child_ref_list: + (private, source_list, note_list, ref, frel, mrel) = data + new_source_list = new_source_list_14(source_list) + new_child_ref_list.append((private, new_source_list, note_list, ref, frel, mrel)) + return new_child_ref_list + +def convert_date_14(date): + if date: + (calendar, modifier, quality, dateval, text, sortval) = date + return (calendar, modifier, quality, dateval, text, sortval, 0) + else: + return None + +def convert_name_14(name): + (privacy, source_list, note_list, date, + first_name, surname, suffix, title, + name_type, prefix, patronymic, + group_as, sort_as, display_as, call) = name + new_date = convert_date_14(date) + new_source_list = new_source_list_14(source_list) + return (privacy, new_source_list, note_list, new_date, + first_name, surname, suffix, title, + name_type, prefix, patronymic, + group_as, sort_as, display_as, call) diff --git a/src/gen/db/write.py b/src/gen/db/write.py new file mode 100644 index 000000000..6755e5e2c --- /dev/null +++ b/src/gen/db/write.py @@ -0,0 +1,1857 @@ +# +# Gramps - a GTK+/GNOME based genealogy program +# +# Copyright (C) 2000-2008 Donald N. Allingham +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +# + +# $Id: write.py 12672 2009-06-16 15:49:17Z gbritton $ + +""" +Provide the Berkeley DB (DBDir) database backend for GRAMPS. +This is used since GRAMPS version 3.0 +""" + +#------------------------------------------------------------------------- +# +# Standard python modules +# +#------------------------------------------------------------------------- +from __future__ import with_statement +import cPickle as pickle +import os +import sys +import time +import locale +import bisect +from types import InstanceType + +from gettext import gettext as _ +from bsddb import dbshelve, db +import logging +from sys import maxint + +#------------------------------------------------------------------------- +# +# Gramps modules +# +#------------------------------------------------------------------------- +from gen.lib import (GenderStats, Person, Family, Event, Place, Source, + MediaObject, Repository, Note) +from gen.db import (GrampsDbRead, BSDDBTxn) +from grampsdbtxn import GrampsDbTxn +from undoredo import GrampsDbUndoBSDDB as GrampsDbUndo +from gen.db.dbconst import * +from gen.utils.callback import Callback +from BasicUtils import UpdateCallback +from gen.db.cursor import GrampsCursor +from gen.db.exceptions import FileVersionError, FileVersionDeclineToUpgrade +import Errors +from QuestionDialog import QuestionDialog2 + +_LOG = logging.getLogger(DBLOGNAME) +_MINVERSION = 9 +_DBVERSION = 14 + +IDTRANS = "person_id" +FIDTRANS = "family_id" +PIDTRANS = "place_id" +OIDTRANS = "media_id" +EIDTRANS = "event_id" +RIDTRANS = "repo_id" +NIDTRANS = "note_id" +SIDTRANS = "source_id" +SURNAMES = "surnames" +NAME_GROUP = "name_group" +META = "meta_data" + +FAMILY_TBL = "family" +PLACES_TBL = "place" +SOURCES_TBL = "source" +MEDIA_TBL = "media" +EVENTS_TBL = "event" +PERSON_TBL = "person" +REPO_TBL = "repo" +NOTE_TBL = "note" + +REF_MAP = "reference_map" +REF_PRI = "primary_map" +REF_REF = "referenced_map" + +DBERRS = (db.DBRunRecoveryError, db.DBAccessError, + db.DBPageNotFoundError, db.DBInvalidArgError) + +# The following two dictionaries provide fast translation +# between the primary class names and the keys used to reference +# these classes in the database tables. Beware that changing +# these maps or modifying the values of the keys will break +# existing databases. + +CLASS_TO_KEY_MAP = {Person.__name__: PERSON_KEY, + Family.__name__: FAMILY_KEY, + Source.__name__: SOURCE_KEY, + Event.__name__: EVENT_KEY, + MediaObject.__name__: MEDIA_KEY, + Place.__name__: PLACE_KEY, + Repository.__name__:REPOSITORY_KEY, + Note.__name__: NOTE_KEY} + +KEY_TO_CLASS_MAP = {PERSON_KEY: Person.__name__, + FAMILY_KEY: Family.__name__, + SOURCE_KEY: Source.__name__, + EVENT_KEY: Event.__name__, + MEDIA_KEY: MediaObject.__name__, + PLACE_KEY: Place.__name__, + REPOSITORY_KEY: Repository.__name__, + NOTE_KEY: Note.__name__} + +#------------------------------------------------------------------------- +# +# Helper functions +# +#------------------------------------------------------------------------- + +def find_surname(key, data): + return str(data[3][5]) + +def find_idmap(key, data): + return str(data[1]) + +# Secondary database key lookups for reference_map table +# reference_map data values are of the form: +# ((primary_object_class_name, primary_object_handle), +# (referenced_object_class_name, referenced_object_handle)) + +def find_primary_handle(key, data): + return str((data)[0][1]) + +def find_referenced_handle(key, data): + return str((data)[1][1]) + +#------------------------------------------------------------------------- +# +# GrampsDBDirCursor +# +#------------------------------------------------------------------------- +class GrampsDBDirCursor(GrampsCursor): + + def __init__(self, source, txn=None): + self.cursor = source.db.cursor(txn) + self.source = source + +#------------------------------------------------------------------------- +# +# GrampsDBDirAssocCursor +# +#------------------------------------------------------------------------- +class GrampsDBDirAssocCursor(GrampsCursor): + + def __init__(self, source, txn=None): + self.cursor = source.cursor(txn) + self.source = source + +#------------------------------------------------------------------------- +# +# GrampsDBDir +# +#------------------------------------------------------------------------- +class GrampsDBDir(GrampsDbRead, Callback, UpdateCallback): + """ + GRAMPS database write access object. + """ + + # Set up dictionary for callback signal handler + # --------------------------------------------- + # 1. Signals for primary objects + __signals__ = dict((obj+'-'+op, signal) + for obj in + ['person', 'family', 'event', 'place', + 'source', 'media', 'note', 'repository'] + for op, signal in zip( + ['add', 'update', 'delete', 'rebuild'], + [(list,), (list,), (list,), None] + ) + ) + + # 2. Signals for long operations + __signals__.update(('long-op-'+op, signal) for op, signal in zip( + ['start', 'heartbeat', 'end'], + [(object,), None, None] + )) + + # 3. Special signal for change in home person + __signals__['home-person-changed'] = None + + def __init__(self): + """Create a new GrampsDB.""" + + self.txn = None + GrampsDbRead.__init__(self) + Callback.__init__(self) + self.secondary_connected = False + self.has_changed = False + + def __open_db(self, file_name, table_name, dbtype=db.DB_HASH, flags=0): + dbmap = db.DB(self.env) + dbmap.set_pagesize(DBPAGE) + dbmap.set_flags(flags) + + fname = os.path.join(file_name, table_name + DBEXT) + + if self.readonly: + dbmap.open(fname, table_name, dbtype, DBFLAGS_R) + else: + dbmap.open(fname, table_name, dbtype, DBFLAGS_O, DBMODE) + return dbmap + + def __open_shelf(self, file_name, table_name, dbtype=db.DB_HASH): + dbmap = dbshelve.DBShelf(self.env) + dbmap.db.set_pagesize(DBPAGE) + + fname = os.path.join(file_name, table_name + DBEXT) + + if self.readonly: + dbmap.open(fname, table_name, dbtype, DBFLAGS_R) + else: + dbmap.open(fname, table_name, dbtype, DBFLAGS_O, DBMODE) + return dbmap + + def __all_handles(self, table): + return table.keys(self.txn) + + def __log_error(self): + mypath = os.path.join(self.get_save_path(),DBRECOVFN) + ofile = open(mypath, "w") + ofile.close() + try: + clear_lock_file(self.get_save_path()) + except: + pass + + _log_error = __log_error + + # cursors for lookups in the reference_map for back reference + # lookups. The reference_map has three indexes: + # the main index: a tuple of (primary_handle, referenced_handle) + # the primary_handle index: the primary_handle + # the referenced_handle index: the referenced_handle + # the main index is unique, the others allow duplicate entries. + + def get_reference_map_cursor(self): + try: + return GrampsDBDirAssocCursor(self.reference_map, self.txn) + except DBERRS, msg: + self.__log_error() + raise Errors.DbError(msg) + + def get_reference_map_primary_cursor(self): + try: + return GrampsDBDirAssocCursor(self.reference_map_primary_map, + self.txn) + except DBERRS, msg: + self.__log_error() + raise Errors.DbError(msg) + + def get_reference_map_referenced_cursor(self): + try: + return GrampsDBDirAssocCursor(self.reference_map_referenced_map, + self.txn) + except DBERRS, msg: + self.__log_error() + raise Errors.DbError(msg) + + # These are overriding the GrampsDbRead's methods of saving metadata + # because we now have txn-capable metadata table + def set_default_person_handle(self, handle): + try: + return self.__set_default_person_handle(handle) + except DBERRS, msg: + self.__log_error() + raise Errors.DbError(msg) + + def __set_default_person_handle(self, handle): + """Set the default Person to the passed instance.""" + if not self.readonly: + # Start transaction + with BSDDBTxn(self.env, self.metadata) as txn: + txn.put('default', str(handle)) + self.emit('home-person-changed') + + def get_default_person(self): + try: + return self.__get_default_person() + except DBERRS, msg: + self.__log_error() + raise Errors.DbError(msg) + + def __get_default_person(self): + """Return the default Person of the database.""" + person = self.get_person_from_handle(self.get_default_handle()) + if person: + return person + elif (self.metadata) and (not self.readonly): + # Start transaction + with BSDDBTxn(self.env, self.metadata) as txn: + txn.put('default', None) + return None + + def set_mediapath(self, path): + """Set the default media path for database, path should be utf-8.""" + if self.metadata and not self.readonly: + # Start transaction + with BSDDBTxn(self.env, self.metadata) as txn: + txn.put('mediapath', path) + + def set_column_order(self, col_list, name): + if self.metadata and not self.readonly: + # Start transaction + with BSDDBTxn(self.env, self.metadata) as txn: + txn.put(name, col_list) + + def version_supported(self): + try: + dbversion = self.metadata.get('version', default=0) + return ((dbversion <= _DBVERSION) and (dbversion >= _MINVERSION)) + except DBERRS, msg: + self.__log_error() + raise Errors.DbError(msg) + + def need_upgrade(self): + try: + dbversion = self.metadata.get('version', default=0) + return not self.readonly and dbversion < _DBVERSION + except DBERRS, msg: + self.__log_error() + raise Errors.DbError(msg) + + def load(self, name, callback, mode=DBMODE_W): + try: + if self.__check_readonly(name): + mode = DBMODE_R + else: + write_lock_file(name) + return self.__load(name, callback, mode) + + except DBERRS, msg: + self.__log_error() + raise Errors.DbError(msg) + + def __check_readonly(self, name): + """ + Return True if we don't have read/write access to the database, + otherwise return False (that is, we DO have read/write access) + """ + + # See if we write to the target directory at all? + if not os.access(name, os.W_OK): + return True + + # See if we lack write access to any files in the directory + for base in [FAMILY_TBL, PLACES_TBL, SOURCES_TBL, MEDIA_TBL, + EVENTS_TBL, PERSON_TBL, REPO_TBL, NOTE_TBL, REF_MAP, META]: + path = os.path.join(name, base + DBEXT) + if os.path.isfile(path) and not os.access(path, os.W_OK): + return True + + # All tests passed. Inform caller that we are NOT read only + return False + + def __load(self, name, callback, mode=DBMODE_W): + + if self.db_is_open: + self.close() + + self.readonly = mode == DBMODE_R + #super(GrampsDbRead, self).load(name, callback, mode) + if callback: + callback(12) + + # Save full path and base file name + self.full_name = os.path.abspath(name) + self.path = self.full_name + self.brief_name = os.path.basename(name) + + # Set up database environment + self.env = db.DBEnv() + self.env.set_cachesize(0, DBCACHE) + + # These env settings are only needed for Txn environment + self.env.set_lk_max_locks(DBLOCKS) + self.env.set_lk_max_objects(DBOBJECTS) + + self.set_auto_remove() + + # The DB_PRIVATE flag must go if we ever move to multi-user setup + env_flags = db.DB_CREATE | db.DB_PRIVATE |\ + db.DB_INIT_MPOOL | db.DB_INIT_LOCK |\ + db.DB_INIT_LOG | db.DB_INIT_TXN | db.DB_THREAD + + # As opposed to before, we always try recovery on databases + env_flags |= db.DB_RECOVER + + # Environment name is now based on the filename + env_name = name + + self.env.open(env_name, env_flags) + self.env.txn_checkpoint() + + if callback: + callback(25) + + # Process metadata + self.metadata = self.__open_shelf(self.full_name, META) + + # If we cannot work with this DB version, + # it makes no sense to go further + if not self.version_supported(): + self.__close_early() + + self.__load_metadata() + gstats = self.metadata.get('gender_stats', default=None) + + # Ensure version info in metadata + if not self.readonly: + # Start transaction + with BSDDBTxn(self.env, self.metadata) as txn: + if gstats is None: + # New database. Set up the current version. + #self.metadata.put('version', _DBVERSION, txn=the_txn) + txn.put('version', _DBVERSION) + elif 'version' not in self.metadata: + # Not new database, but the version is missing. + # Use 0, but it is likely to fail anyway. + txn.put('version', 0) + + self.genderStats = GenderStats(gstats) + + # Open main tables in gramps database + db_maps = [ + ("family_map", FAMILY_TBL, db.DB_HASH), + ("place_map", PLACES_TBL, db.DB_HASH), + ("source_map", SOURCES_TBL, db.DB_HASH), + ("media_map", MEDIA_TBL, db.DB_HASH), + ("event_map", EVENTS_TBL, db.DB_HASH), + ("person_map", PERSON_TBL, db.DB_HASH), + ("repository_map", REPO_TBL, db.DB_HASH), + ("note_map", NOTE_TBL, db.DB_HASH), + ("reference_map", REF_MAP, db.DB_BTREE), + ] + + dbflags = DBFLAGS_R if self.readonly else DBFLAGS_O + for (dbmap, dbname, dbtype) in db_maps: + _db = self.__open_shelf(self.full_name, dbname, dbtype) + setattr(self, dbmap, _db) + + if callback: + callback(37) + + # Open name grouping database + self.name_group = self.__open_db(self.full_name, NAME_GROUP, + db.DB_HASH, db.DB_DUP) + + # Here we take care of any changes in the tables related to new code. + # If secondary indices change, then they should removed + # or rebuilt by upgrade as well. In any case, the + # self.secondary_connected flag should be set accordingly. + + if self.need_upgrade(): + if QuestionDialog2(_("Need to upgrade database!"), + _("You cannot open this database " + "without upgrading it.\n" + "If you upgrade then you won't be able " + "to use previous versions of GRAMPS.\n" + "You might want to make a backup copy " + "first."), + _("Upgrade now"), + _("Cancel")).run(): + self.gramps_upgrade(callback) + else: + raise FileVersionDeclineToUpgrade() + + if callback: + callback(50) + + # Connect secondary indices + if not self.secondary_connected: + self.__connect_secondary() + + if callback: + callback(75) + + # Open undo database + self.__open_undodb() + self.db_is_open = True + + if callback: + callback(87) + + self.abort_possible = True + return 1 + + def __open_undodb(self): + """ + Open the undo database + """ + if not self.readonly: + self.undolog = os.path.join(self.full_name, DBUNDOFN) + self.undodb = GrampsDbUndo(self, self.undolog) + self.undodb.open() + + def __close_undodb(self): + if not self.readonly: + self.undodb.close() + + def load_from(self, other_database, filename, callback): + try: + self.load(filename, callback) + from gen.utils import db_copy + db_copy(other_database, self, callback) + return 1 + except DBERRS, msg: + self.__log_error() + raise Errors.DbError(msg) + + def __load_metadata(self): + # name display formats + self.name_formats = self.metadata.get('name_formats', default=[]) + # upgrade formats if they were saved in the old way + for format_ix in range(len(self.name_formats)): + format = self.name_formats[format_ix] + if len(format) == 3: + format = format + (True,) + self.name_formats[format_ix] = format + + # database owner + try: + owner_data = self.metadata.get('researcher') + if owner_data: + self.owner.unserialize(owner_data) + except ImportError: #handle problems with pre-alpha 3.0 + pass + + # bookmarks + meta = lambda meta: self.metadata.get(meta, default=[]) + + self.bookmarks.set(meta('bookmarks')) + self.family_bookmarks.set(meta('family_bookmarks')) + self.event_bookmarks.set(meta('event_bookmarks')) + self.source_bookmarks.set(meta('source_bookmarks')) + self.repo_bookmarks.set(meta('repo_bookmarks')) + self.media_bookmarks.set(meta('media_bookmarks')) + self.place_bookmarks.set(meta('place_bookmarks')) + self.note_bookmarks.set(meta('note_bookmarks')) + + # Custom type values + self.family_event_names = set(meta('fevent_names')) + self.individual_event_names = set(meta('pevent_names')) + self.family_attributes = set(meta('fattr_names')) + self.individual_attributes = set(meta('pattr_names')) + self.marker_names = set(meta('marker_names')) + self.child_ref_types = set(meta('child_refs')) + self.family_rel_types = set(meta('family_rels')) + self.event_role_names = set(meta('event_roles')) + self.name_types = set(meta('name_types')) + self.repository_types = set(meta('repo_types')) + self.note_types = set(meta('note_types')) + self.source_media_types = set(meta('sm_types')) + self.url_types = set(meta('url_types')) + self.media_attributes = set(meta('mattr_names')) + + # surname list + self.surname_list = meta('surname_list') + + def __connect_secondary(self): + """ + Connect or creates secondary index tables. + + It assumes that the tables either exist and are in the right + format or do not exist (in which case they get created). + + It is the responsibility of upgrade code to either create + or remove invalid secondary index tables. + """ + + # index tables used just for speeding up searches + self.surnames = self.__open_db(self.full_name, SURNAMES, db.DB_BTREE, + db.DB_DUP | db.DB_DUPSORT) + + db_maps = [ + ("id_trans", IDTRANS, db.DB_HASH, 0), + ("fid_trans", FIDTRANS, db.DB_HASH, 0), + ("eid_trans", EIDTRANS, db.DB_HASH, 0), + ("pid_trans", PIDTRANS, db.DB_HASH, 0), + ("sid_trans", SIDTRANS, db.DB_HASH, 0), + ("oid_trans", OIDTRANS, db.DB_HASH, 0), + ("rid_trans", RIDTRANS, db.DB_HASH, 0), + ("nid_trans", NIDTRANS, db.DB_HASH, 0), + ("reference_map_primary_map", REF_PRI, db.DB_BTREE, 0), + ("reference_map_referenced_map", REF_REF, db.DB_BTREE, db.DB_DUPSORT), + ] + + for (dbmap, dbname, dbtype, dbflags) in db_maps: + _db = self.__open_db(self.full_name, dbname, dbtype, + db.DB_DUP | dbflags) + setattr(self, dbmap, _db) + + if not self.readonly: + + assoc = [ + (self.person_map, self.surnames, find_surname), + (self.person_map, self.id_trans, find_idmap), + (self.family_map, self.fid_trans, find_idmap), + (self.event_map, self.eid_trans, find_idmap), + (self.place_map, self.pid_trans, find_idmap), + (self.source_map, self.sid_trans, find_idmap), + (self.media_map, self.oid_trans, find_idmap), + (self.repository_map, self.rid_trans, find_idmap), + (self.note_map, self.nid_trans, find_idmap), + (self.reference_map, self.reference_map_primary_map, + find_primary_handle), + (self.reference_map, self.reference_map_referenced_map, + find_referenced_handle), + ] + + flags = DBFLAGS_R if self.readonly else DBFLAGS_O + for (dbmap, a_map, a_find) in assoc: + dbmap.associate(a_map, a_find, flags=flags) + + self.secondary_connected = True + self.smap_index = len(self.source_map) + self.emap_index = len(self.event_map) + self.pmap_index = len(self.person_map) + self.fmap_index = len(self.family_map) + self.lmap_index = len(self.place_map) + self.omap_index = len(self.media_map) + self.rmap_index = len(self.repository_map) + self.nmap_index = len(self.note_map) + + def rebuild_secondary(self, callback=None): + try: + self.__rebuild_secondary(callback) + except DBERRS, msg: + self.__log_error() + raise Errors.DbError(msg) + + def __rebuild_secondary(self, callback=None): + if self.readonly: + return + + table_flags = DBFLAGS_O + + # remove existing secondary indices + + items = [ + ( self.id_trans, IDTRANS ), + ( self.surnames, SURNAMES ), + ( self.fid_trans, FIDTRANS ), + ( self.pid_trans, PIDTRANS ), + ( self.oid_trans, OIDTRANS ), + ( self.eid_trans, EIDTRANS ), + ( self.rid_trans, RIDTRANS ), + ( self.nid_trans, NIDTRANS ), + ( self.reference_map_primary_map, REF_PRI), + ( self.reference_map_referenced_map, REF_REF), + ] + + index = 1 + for (database, name) in items: + database.close() + _db = db.DB(self.env) + _db.remove(_mkname(self.full_name, name), name) + if callback: + callback(index) + index += 1 + + if callback: + callback(11) + + # Set flag saying that we have removed secondary indices + # and then call the creating routine + self.secondary_connected = False + self.__connect_secondary() + if callback: + callback(12) + + def find_backlink_handles(self, handle, include_classes=None): + try: + return self.__find_backlink_handles(handle, include_classes) + except DBERRS, msg: + self.__log_error() + raise Errors.DbError(msg) + + def __find_backlink_handles(self, handle, include_classes=None): + """ + Find all objects that hold a reference to the object handle. + + Returns an interator over a list of (class_name, handle) tuples. + + @param handle: handle of the object to search for. + @type handle: database handle + @param include_classes: list of class names to include in the results. + Default: None means include all classes. + @type include_classes: list of class names + + Note that this is a generator function, it returns a iterator for + use in loops. If you want a list of the results use: + + > result_list = list(find_backlink_handles(handle)) + """ + + # Use the secondary index to locate all the reference_map entries + # that include a reference to the object we are looking for. + referenced_cur = self.get_reference_map_referenced_cursor() + + try: + ret = referenced_cur.set(handle) + except: + ret = None + + while (ret is not None): + (key, data) = ret + + # data values are of the form: + # ((primary_object_class_name, primary_object_handle), + # (referenced_object_class_name, referenced_object_handle)) + # so we need the first tuple to give us the type to compare + + ### FIXME: this is a dirty hack that works without no + ### sensible explanation. For some reason, for a readonly + ### database, secondary index returns a primary table key + ### corresponding to the data, not the data. + if self.readonly: + data = self.reference_map.get(data) + else: + data = pickle.loads(data) + + key, handle = data[0][:2] + name = KEY_TO_CLASS_MAP[key] + assert name == KEY_TO_CLASS_MAP[data[0][0]] + assert handle == data[0][1] + if (include_classes is None or + name in include_classes): + yield (name, handle) + + ret = referenced_cur.next_dup() + + referenced_cur.close() + + def delete_primary_from_reference_map(self, handle, transaction, txn=None): + """ + Remove all references to the primary object from the reference_map. + """ + primary_cur = self.get_reference_map_primary_cursor() + + try: + ret = primary_cur.set(handle) + except: + ret = None + + remove_list = set() + while (ret is not None): + (key, data) = ret + + # data values are of the form: + # ((primary_object_class_name, primary_object_handle), + # (referenced_object_class_name, referenced_object_handle)) + + # so we need the second tuple give us a reference that we can + # combine with the primary_handle to get the main key. + + main_key = (handle, pickle.loads(data)[1][1]) + + # The trick is not to remove while inside the cursor, + # but collect them all and remove after the cursor is closed + remove_list.add(main_key) + + ret = primary_cur.next_dup() + + primary_cur.close() + + # Now that the cursor is closed, we can remove things + for main_key in remove_list: + self.__remove_reference(main_key, transaction, txn) + + def update_reference_map(self, obj, transaction, txn=None): + """ + If txn is given, then changes are written right away using txn. + """ + + # Add references to the reference_map for all primary object referenced + # from the primary object 'obj' or any of its secondary objects. + handle = obj.handle + update = self.reference_map_primary_map.has_key(str(handle)) + + if update: + # First thing to do is get hold of all rows in the reference_map + # table that hold a reference from this primary obj. This means + # finding all the rows that have this handle somewhere in the + # list of (class_name, handle) pairs. + # The primary_map sec index allows us to look this up quickly. + + existing_references = set() + + primary_cur = self.get_reference_map_primary_cursor() + + try: + ret = primary_cur.set(handle) + except: + ret = None + + while (ret is not None): + (key, data) = ret + + # data values are of the form: + # ((primary_object_class_name, primary_object_handle), + # (referenced_object_class_name, referenced_object_handle)) + # so we need the second tuple give us a reference that we can + # compare with what is returned from + # get_referenced_handles_recursively + + # secondary DBs are not DBShelf's, so we need to do pickling + # and unpicking ourselves here + existing_reference = pickle.loads(data)[1] + existing_references.add( + (KEY_TO_CLASS_MAP[existing_reference[0]], + existing_reference[1])) + ret = primary_cur.next_dup() + + primary_cur.close() + + # Once we have the list of rows that already have a reference + # we need to compare it with the list of objects that are + # still references from the primary object. + + current_references = set(obj.get_referenced_handles_recursively()) + + no_longer_required_references = existing_references.difference( + current_references) + + new_references = current_references.difference(existing_references) + + else: + # No existing refs are found: + # all we have is new, nothing to remove + no_longer_required_references = set() + new_references = set(obj.get_referenced_handles_recursively()) + + # handle addition of new references + for (ref_class_name, ref_handle) in new_references: + data = ((CLASS_TO_KEY_MAP[obj.__class__.__name__], handle), + (CLASS_TO_KEY_MAP[ref_class_name], ref_handle),) + self.__add_reference((handle, ref_handle), data, transaction, txn) + + # handle deletion of old references + for (ref_class_name, ref_handle) in no_longer_required_references: + try: + self.__remove_reference((handle, ref_handle), transaction, txn) + except: + # ignore missing old reference + pass + + def __remove_reference(self, key, transaction, txn=None): + """ + Remove the reference specified by the key, preserving the change in + the passed transaction. + """ + if not self.readonly: + if transaction.batch: + self.reference_map.delete(str(key), txn=txn) + else: + old_data = self.reference_map.get(str(key), txn=self.txn) + transaction.add(REFERENCE_KEY, TXNDEL, str(key), old_data, None) + #transaction.reference_del.append(str(key)) + + def __add_reference(self, key, data, transaction, txn=None): + """ + Add the reference specified by the key and the data, preserving the + change in the passed transaction. + """ + + if self.readonly or not key: + return + + if transaction.batch: + self.reference_map.put(str(key), data, txn=txn) + else: + transaction.add(REFERENCE_KEY, TXNADD, str(key), None, data) + #transaction.reference_add.append((str(key), data)) + + def reindex_reference_map(self, callback): + try: + self.__reindex_reference_map(callback) + except DBERRS, msg: + self.__log_error() + raise Errors.DbError(msg) + + def __reindex_reference_map(self, callback): + """ + Reindex all primary records in the database. + + This will be a slow process for large databases. + """ + + # First, remove the reference map and related tables + + db_maps = [ + ("reference_map_referenced_map", REF_REF), + ("reference_map_primary_map", REF_PRI), + ("reference_map", REF_MAP), + ] + + for index, (dbmap, dbname) in enumerate(db_maps): + getattr(self, dbmap).close() + _db = db.DB(self.env) + _db.remove(_mkname(self.full_name, dbname), dbname) + callback(index+1) + + # Open reference_map and primary map + self.reference_map = self.__open_shelf(self.full_name, REF_MAP, + dbtype=db.DB_BTREE) + + self.reference_map_primary_map = self.__open_db(self.full_name, + REF_PRI, db.DB_BTREE, db.DB_DUP) + + self.reference_map.associate(self.reference_map_primary_map, + find_primary_handle, DBFLAGS_O) + + # Make a tuple of the functions and classes that we need for + # each of the primary object tables. + + transaction = self.transaction_begin(batch=True, no_magic=True) + callback(4) + + primary_table = ( + (self.get_person_cursor, Person), + (self.get_family_cursor, Family), + (self.get_event_cursor, Event), + (self.get_place_cursor, Place), + (self.get_source_cursor, Source), + (self.get_media_cursor, MediaObject), + (self.get_repository_cursor, Repository), + (self.get_note_cursor, Note), + ) + + # Now we use the functions and classes defined above + # to loop through each of the primary object tables. + + for cursor_func, class_func in primary_table: + with cursor_func() as cursor: + for found_handle, val in cursor: + obj = class_func() + obj.unserialize(val) + with BSDDBTxn(self.env) as txn: + self.update_reference_map(obj, transaction, txn.txn) + + callback(5) + self.transaction_commit(transaction, _("Rebuild reference map")) + + self.reference_map_referenced_map = self.__open_db(self.full_name, + REF_REF, db.DB_BTREE, db.DB_DUP|db.DB_DUPSORT) + + flags = DBFLAGS_R if self.readonly else DBFLAGS_O + self.reference_map.associate(self.reference_map_referenced_map, + find_referenced_handle, flags=flags) + callback(6) + + def __close_metadata(self): + if not self.readonly: + # Start transaction + with BSDDBTxn(self.env, self.metadata) as txn: + + # name display formats + txn.put('name_formats', self.name_formats) + + # database owner + owner_data = self.owner.serialize() + txn.put('researcher', owner_data) + + # bookmarks + txn.put('bookmarks', self.bookmarks.get()) + txn.put('family_bookmarks', self.family_bookmarks.get()) + txn.put('event_bookmarks', self.event_bookmarks.get()) + txn.put('source_bookmarks', self.source_bookmarks.get()) + txn.put('place_bookmarks', self.place_bookmarks.get()) + txn.put('repo_bookmarks', self.repo_bookmarks.get()) + txn.put('media_bookmarks', self.media_bookmarks.get()) + txn.put('note_bookmarks', self.note_bookmarks.get()) + + # gender stats + txn.put('gender_stats', self.genderStats.save_stats()) + + # Custom type values + txn.put('fevent_names', list(self.family_event_names)) + txn.put('pevent_names', list(self.individual_event_names)) + txn.put('fattr_names', list(self.family_attributes)) + txn.put('pattr_names', list(self.individual_attributes)) + txn.put('marker_names', list(self.marker_names)) + txn.put('child_refs', list(self.child_ref_types)) + txn.put('family_rels', list(self.family_rel_types)) + txn.put('event_roles', list(self.event_role_names)) + txn.put('name_types', list(self.name_types)) + txn.put('repo_types', list(self.repository_types)) + txn.put('note_types', list(self.note_types)) + txn.put('sm_types', list(self.source_media_types)) + txn.put('url_types', list(self.url_types)) + txn.put('mattr_names', list(self.media_attributes)) + + # name display formats + txn.put('surname_list', self.surname_list) + + self.metadata.close() + + def __close_early(self): + """ + Bail out if the incompatible version is discovered: + * close cleanly to not damage data/env + * raise exception + """ + self.metadata.close() + self.env.close() + self.metadata = None + self.env = None + self.db_is_open = False + raise FileVersionError( + _("The database version is not supported by this " + "version of GRAMPS.\nPlease upgrade to the " + "corresponding version or use XML for porting " + "data between different database versions.")) + + def close(self): + try: + self.__close() + clear_lock_file(self.get_save_path()) + except DBERRS, msg: + self.__log_error() + raise Errors.DbError(msg) + except IOError: + pass + + def __close(self): + if not self.db_is_open: + return + + self.env.txn_checkpoint() + + self.__close_metadata() + self.name_group.close() + self.surnames.close() + self.id_trans.close() + self.fid_trans.close() + self.eid_trans.close() + self.rid_trans.close() + self.nid_trans.close() + self.oid_trans.close() + self.sid_trans.close() + self.pid_trans.close() + self.reference_map_primary_map.close() + self.reference_map_referenced_map.close() + self.reference_map.close() + self.secondary_connected = False + + # primary databases must be closed after secondary indexes, or + # we run into problems with any active cursors. + self.person_map.close() + self.family_map.close() + self.repository_map.close() + self.note_map.close() + self.place_map.close() + self.source_map.close() + self.media_map.close() + self.event_map.close() + self.env.close() + + try: + self.__close_undodb() + except db.DBNoSuchFileError: + pass + + self.person_map = None + self.family_map = None + self.repository_map = None + self.note_map = None + self.place_map = None + self.source_map = None + self.media_map = None + self.event_map = None + self.surnames = None + self.name_group = None + self.env = None + self.metadata = None + self.db_is_open = False + + def create_id(self): + return "%08x%08x" % ( int(time.time()*10000), + self.rand.randint(0, maxint)) + + def __add_object(self, obj, transaction, find_next_func, commit_func): + if find_next_func and not obj.gramps_id: + obj.gramps_id = find_next_func() + if not obj.handle: + obj.handle = self.create_id() + commit_func(obj, transaction) + if obj.__class__.__name__ == 'Person': + self.genderStats.count_person (obj) + return obj.handle + + def add_person(self, person, transaction, set_gid=True): + """ + Add a Person to the database, assigning internal IDs if they have + not already been defined. + + If not set_gid, then gramps_id is not set. + """ + return self.__add_object(person, transaction, + self.find_next_person_gramps_id if set_gid else None, + self.commit_person) + + def add_family(self, family, transaction, set_gid=True): + """ + Add a Family to the database, assigning internal IDs if they have + not already been defined. + + If not set_gid, then gramps_id is not set. + """ + return self.__add_object(family, transaction, + self.find_next_family_gramps_id if set_gid else None, + self.commit_family) + + def add_source(self, source, transaction, set_gid=True): + """ + Add a Source to the database, assigning internal IDs if they have + not already been defined. + + If not set_gid, then gramps_id is not set. + """ + return self.__add_object(source, transaction, + self.find_next_source_gramps_id if set_gid else None, + self.commit_source) + + def add_event(self, event, transaction, set_gid=True): + """ + Add an Event to the database, assigning internal IDs if they have + not already been defined. + + If not set_gid, then gramps_id is not set. + """ + return self.__add_object(event, transaction, + self.find_next_event_gramps_id if set_gid else None, + self.commit_event) + + def add_person_event(self, event, transaction): + """ + Add an Event to the database, assigning internal IDs if they have + not already been defined. + """ + if event.type.is_custom(): + self.individual_event_names.add(str(event.type)) + return self.add_event(event, transaction) + + def add_family_event(self, event, transaction): + """ + Add an Event to the database, assigning internal IDs if they have + not already been defined. + """ + if event.type.is_custom(): + self.family_event_names.add(str(event.type)) + return self.add_event(event, transaction) + + def add_place(self, place, transaction, set_gid=True): + """ + Add a Place to the database, assigning internal IDs if they have + not already been defined. + + If not set_gid, then gramps_id is not set. + """ + return self.__add_object(place, transaction, + self.find_next_place_gramps_id if set_gid else None, + self.commit_place) + + def add_object(self, obj, transaction, set_gid=True): + """ + Add a MediaObject to the database, assigning internal IDs if they have + not already been defined. + + If not set_gid, then gramps_id is not set. + """ + return self.__add_object(obj, transaction, + self.find_next_object_gramps_id if set_gid else None, + self.commit_media_object) + + def add_repository(self, obj, transaction, set_gid=True): + """ + Add a Repository to the database, assigning internal IDs if they have + not already been defined. + + If not set_gid, then gramps_id is not set. + """ + return self.__add_object(obj, transaction, + self.find_next_repository_gramps_id if set_gid else None, + self.commit_repository) + + def add_note(self, obj, transaction, set_gid=True): + """ + Add a Note to the database, assigning internal IDs if they have + not already been defined. + + If not set_gid, then gramps_id is not set. + """ + return self.__add_object(obj, transaction, + self.find_next_note_gramps_id if set_gid else None, + self.commit_note) + + def do_remove_object(self, handle, transaction, data_map, key): + if self.readonly or not handle: + return + + handle = str(handle) + if transaction.batch: + with BSDDBTxn(self.env, data_map) as txn: + self.delete_primary_from_reference_map(handle, transaction, + txn=txn.txn) + txn.delete(handle) + else: + self.delete_primary_from_reference_map(handle, transaction) + old_data = data_map.get(handle, txn=self.txn) + transaction.add(key, TXNDEL, handle, old_data, None) + #del_list.append(handle) + + def remove_person(self, handle, transaction): + """ + Remove the Person specified by the database handle from the database, + preserving the change in the passed transaction. + """ + + if self.readonly or not handle: + return + self.delete_primary_from_reference_map(handle, transaction) + person = self.get_person_from_handle(handle) + self.genderStats.uncount_person (person) + self.remove_from_surname_list(person) + if transaction.batch: + with BSDDBTxn(self.env, self.person_map) as txn: + txn.delete(handle) + else: + transaction.add(PERSON_KEY, TXNDEL, handle, person.serialize(), None) + #transaction.person_del.append(str(handle)) + + def remove_source(self, handle, transaction): + """ + Remove the Source specified by the database handle from the + database, preserving the change in the passed transaction. + """ + self.do_remove_object(handle, transaction, self.source_map, + SOURCE_KEY) + + def remove_event(self, handle, transaction): + """ + Remove the Event specified by the database handle from the + database, preserving the change in the passed transaction. + """ + self.do_remove_object(handle, transaction, self.event_map, + EVENT_KEY) + + def remove_object(self, handle, transaction): + """ + Remove the MediaObjectPerson specified by the database handle from the + database, preserving the change in the passed transaction. + """ + self.do_remove_object(handle, transaction, self.media_map, + MEDIA_KEY) + + def remove_place(self, handle, transaction): + """ + Remove the Place specified by the database handle from the + database, preserving the change in the passed transaction. + """ + self.do_remove_object(handle, transaction, self.place_map, + PLACE_KEY) + + def remove_family(self, handle, transaction): + """ + Remove the Family specified by the database handle from the + database, preserving the change in the passed transaction. + """ + self.do_remove_object(handle, transaction, self.family_map, + FAMILY_KEY) + + def remove_repository(self, handle, transaction): + """ + Remove the Repository specified by the database handle from the + database, preserving the change in the passed transaction. + """ + self.do_remove_object(handle, transaction, self.repository_map, + REPOSITORY_KEY) + + def remove_note(self, handle, transaction): + """ + Remove the Note specified by the database handle from the + database, preserving the change in the passed transaction. + """ + self.do_remove_object(handle, transaction, self.note_map, + NOTE_KEY) + + def __set_name_group_mapping(self, name, group): + if not self.readonly: + # Start transaction + with BSDDBTxn(self.env, self.name_group) as txn: + name = str(name) + data = txn.get(name) + if data is not None: + txn.delete(name) + if group is not None: + txn.put(name, group) + self.emit('person-rebuild') + + def sort_surname_list(self): + self.surname_list.sort(key=locale.strxfrm) + + def build_surname_list(self): + try: + self.surname_list = sorted(map(unicode, set(self.surnames.keys())), key=locale.strxfrm) + except DBERRS, msg: + self.__log_error() + raise Errors.DbError(msg) + + def add_to_surname_list(self, person, batch_transaction): + if batch_transaction: + return + name = unicode(person.get_primary_name().get_surname()) + i = bisect.bisect(self.surname_list, name) + if 0 < i < len(self.surname_list): + if self.surname_list[i-1] != name: + self.surname_list.insert(i, name) + else: + self.surname_list.insert(i, name) + + def remove_from_surname_list(self, person): + """ + Check whether there are persons with the same surname left in + the database. + + If not then we need to remove the name from the list. + The function must be overridden in the derived class. + """ + name = str(person.get_primary_name().get_surname()) + try: + cursor = self.surnames.cursor(txn=self.txn) + cursor.set(name) + if cursor.count() == 1: + i = bisect.bisect(self.surname_list, name) + assert 0 <= i-1 < len(self.surname_list) + del self.surname_list[i-1] + except ValueError: + pass + except DBERRS, msg: + self.__log_error() + raise Errors.DbError(msg) + finally: + cursor.close() + + def commit_base(self, obj, data_map, key, transaction, change_time): + """ + Commit the specified object to the database, storing the changes as + part of the transaction. + """ + if self.readonly or not obj or not obj.handle: + return + + obj.change = int(change_time if change_time else time.time()) + handle = str(obj.handle) + + # If this is a batch operation, just write the data + if transaction.batch: + with BSDDBTxn(self.env, data_map) as txn: + self.update_reference_map(obj, transaction, txn=txn.txn) + txn.put(handle, obj.serialize()) + old_data = None + + # Otherwise, this is a non-batch operation, so queue the transaction + else: + self.update_reference_map(obj, transaction) + old_data = data_map.get(handle, txn=self.txn) + new_data = obj.serialize() + op = TXNUPD if old_data else TXNADD + transaction.add(key, op, handle, old_data, new_data) + return old_data + + def do_commit(self, add_list, db_map): + retlist = [] + for (handle, data) in add_list: + db_map.put(handle, data, self.txn) + retlist.append(str(handle)) + return retlist + + def commit_person(self, person, transaction, change_time=None): + """ + Commit the specified Person to the database, storing the changes as + part of the transaction. + """ + old_data = self.commit_base( + person, self.person_map, PERSON_KEY, transaction, change_time) + + if old_data: + old_person = Person(old_data) + + # Update gender statistics if necessary + if (old_person.gender != person.gender or + old_person.primary_name.first_name != + person.primary_name.first_name): + + self.genderStats.uncount_person(old_person) + self.genderStats.count_person(person) + + # Update surname list if necessary + if (old_person.primary_name.surname !=person.primary_name.surname): + self.remove_from_surname_list(old_person) + self.add_to_surname_list(person, transaction.batch) + else: + self.genderStats.count_person(person) + self.add_to_surname_list(person, transaction.batch) + + self.individual_attributes.update( + [str(attr.type) for attr in person.attribute_list + if attr.type.is_custom() and str(attr.type)]) + + if person.marker.is_custom(): + self.marker_names.add(str(person.marker)) + + self.event_role_names.update([str(eref.role) + for eref in person.event_ref_list + if eref.role.is_custom()]) + + self.name_types.update([str(name.type) + for name in ([person.primary_name] + + person.alternate_names) + if name.type.is_custom()]) + + self.url_types.update([str(url.type) for url in person.urls + if url.type.is_custom()]) + + attr_list = [] + for mref in person.media_list: + attr_list += [str(attr.type) for attr in mref.attribute_list + if attr.type.is_custom() and str(attr.type)] + self.media_attributes.update(attr_list) + + def commit_media_object(self, obj, transaction, change_time=None): + """ + Commit the specified MediaObject to the database, storing the changes + as part of the transaction. + """ + self.commit_base(obj, self.media_map, MEDIA_KEY, + transaction, change_time) + + self.media_attributes.update( + [str(attr.type) for attr in obj.attribute_list + if attr.type.is_custom() and str(attr.type)]) + + def commit_source(self, source, transaction, change_time=None): + """ + Commit the specified Source to the database, storing the changes as + part of the transaction. + """ + self.commit_base(source, self.source_map, SOURCE_KEY, + transaction, change_time) + + self.source_media_types.update( + [str(ref.media_type) for ref in source.reporef_list + if ref.media_type.is_custom()]) + + attr_list = [] + for mref in source.media_list: + attr_list += [str(attr.type) for attr in mref.attribute_list + if attr.type.is_custom() and str(attr.type)] + self.media_attributes.update(attr_list) + + def commit_place(self, place, transaction, change_time=None): + """ + Commit the specified Place to the database, storing the changes as + part of the transaction. + """ + self.commit_base(place, self.place_map, PLACE_KEY, + transaction, change_time) + + self.url_types.update([str(url.type) for url in place.urls + if url.type.is_custom()]) + + attr_list = [] + for mref in place.media_list: + attr_list += [str(attr.type) for attr in mref.attribute_list + if attr.type.is_custom() and str(attr.type)] + self.media_attributes.update(attr_list) + + def commit_personal_event(self, event, transaction, change_time=None): + if event.type.is_custom(): + self.individual_event_names.add(str(event.type)) + self.commit_event(event, transaction, change_time) + + def commit_family_event(self, event, transaction, change_time=None): + if event.type.is_custom(): + self.family_event_names.add(str(event.type)) + self.commit_event(event, transaction, change_time) + + def commit_event(self, event, transaction, change_time=None): + """ + Commit the specified Event to the database, storing the changes as + part of the transaction. + """ + self.commit_base(event, self.event_map, EVENT_KEY, + transaction, change_time) + attr_list = [] + for mref in event.media_list: + attr_list += [str(attr.type) for attr in mref.attribute_list + if attr.type.is_custom() and str(attr.type)] + self.media_attributes.update(attr_list) + + def commit_family(self, family, transaction, change_time=None): + """ + Commit the specified Family to the database, storing the changes as + part of the transaction. + """ + self.commit_base(family, self.family_map, FAMILY_KEY, + transaction, change_time) + + self.family_attributes.update( + [str(attr.type) for attr in family.attribute_list + if attr.type.is_custom() and str(attr.type)]) + + rel_list = [] + for ref in family.child_ref_list: + if ref.frel.is_custom(): + rel_list.append(str(ref.frel)) + if ref.mrel.is_custom(): + rel_list.append(str(ref.mrel)) + self.child_ref_types.update(rel_list) + + self.event_role_names.update( + [str(eref.role) for eref in family.event_ref_list + if eref.role.is_custom()]) + + if family.type.is_custom(): + self.family_rel_types.add(str(family.type)) + + attr_list = [] + for mref in family.media_list: + attr_list += [str(attr.type) for attr in mref.attribute_list + if attr.type.is_custom() and str(attr.type)] + self.media_attributes.update(attr_list) + + def commit_repository(self, repository, transaction, change_time=None): + """ + Commit the specified Repository to the database, storing the changes + as part of the transaction. + """ + self.commit_base(repository, self.repository_map, REPOSITORY_KEY, + transaction, change_time) + + if repository.type.is_custom(): + self.repository_types.add(str(repository.type)) + + self.url_types.update([str(url.type) for url in repository.urls + if url.type.is_custom()]) + + def commit_note(self, note, transaction, change_time=None): + """ + Commit the specified Note to the database, storing the changes as part + of the transaction. + """ + self.commit_base(note, self.note_map, NOTE_KEY, + transaction, change_time) + + if note.type.is_custom(): + self.note_types.add(str(note.type)) + + def get_from_handle(self, handle, class_type, data_map): + try: + data = data_map.get(str(handle), txn=self.txn) + except: + data = None + # under certain circumstances during a database reload, + # data_map can be none. If so, then don't report an error + if data_map: + _LOG.error("Failed to get from handle", exc_info=True) + if data: + newobj = class_type() + newobj.unserialize(data) + return newobj + return None + + def find_from_handle(self, handle, transaction, class_type, dmap, add_func): + """ + Find a object of class_type in the database from the passed handle. + + If no object exists, a new object is added to the database. + + @return: Returns a tuple, first the object, second a bool which is True + if the object is new + @rtype: tuple + """ + obj = class_type() + handle = str(handle) + new = True + if handle in dmap: + data = dmap.get(handle, txn=self.txn) + obj.unserialize(data) + #references create object with id None before object is really made + if obj.gramps_id is not None: + new = False + else: + obj.set_handle(handle) + add_func(obj, transaction) + return obj, new + + def transaction_begin(self, msg="", batch=False, no_magic=False): + try: + return self.__transaction_begin(msg, batch, no_magic) + except DBERRS, msg: + self.__log_error() + raise Errors.DbError(msg) + + def __transaction_begin(self, msg="", batch=False, no_magic=False): + """ + Create a new Transaction tied to the current UNDO database. + + The transaction has no effect until it is committed using the + transaction_commit function of the this database object. + """ + + transaction = BdbTransaction(msg, self.undodb, self, batch, no_magic) + if batch: + # A batch transaction does not store the commits + # Aborting the session completely will become impossible. + self.abort_possible = False + # Undo is also impossible after batch transaction + self.undodb.clear() + self.env.txn_checkpoint() + + if db.version() < (4, 7): + self.env.set_flags(db.DB_TXN_NOSYNC, 1) # async txn + + if self.secondary_connected and not no_magic: + # Disconnect unneeded secondary indices + self.surnames.close() + _db = db.DB(self.env) + _db.remove(_mkname(self.full_name, SURNAMES), SURNAMES) + + self.reference_map_referenced_map.close() + _db = db.DB(self.env) + _db.remove(_mkname(self.full_name, REF_REF), REF_REF) + return transaction + + def transaction_commit(self, transaction, msg): + if self._LOG_ALL: + LOG.debug("%s: Transaction commit '%s'\n" + % (self.__class__.__name__, str(msg))) + + if self.readonly: + return + + try: + transaction.commit(msg) + self.undodb.commit(transaction, msg) + self.__after_commit(transaction, msg) + self.has_changed = True + except DBERRS, msg: + self.__log_error() + raise Errors.DbError(msg) + + def __after_commit(self, transaction, msg): + """ + Post-transaction commit processing + """ + if transaction.batch: + self.env.txn_checkpoint() + if db.version() < (4, 7): + self.env.set_flags(db.DB_TXN_NOSYNC, 0) # sync txn + + if not transaction.no_magic: + # create new secondary indices to replace the ones removed + + self.surnames = self.__open_db(self.full_name, SURNAMES, + db.DB_BTREE, db.DB_DUP | db.DB_DUPSORT) + + self.person_map.associate(self.surnames, find_surname, + DBFLAGS_O) + + self.reference_map_referenced_map = self.__open_db(self.full_name, + REF_REF, db.DB_BTREE, db.DB_DUP|db.DB_DUPSORT) + + self.reference_map.associate(self.reference_map_referenced_map, + find_referenced_handle, DBFLAGS_O) + + # Only build surname list after surname index is surely back + self.build_surname_list() + + # Reset callbacks if necessary + if transaction.batch or not len(transaction): + return + if self.undo_callback: + self.undo_callback(_("_Undo %s") % transaction.get_description()) + if self.redo_callback: + self.redo_callback(None) + if self.undo_history_callback: + self.undo_history_callback() + + def undo(self, update_history=True): + self.undodb.undo(update_history) + return + + def redo(self, update_history=True): + self.undodb.redo(update_history) + return + + def gramps_upgrade(self, callback=None): + UpdateCallback.__init__(self, callback) + + version = self.metadata.get('version', default=_MINVERSION) + + t = time.time() + + if version < 14: + import upgrade + upgrade.gramps_upgrade_14(self) + + print "Upgrade time:", int(time.time()-t), "seconds" + + def set_auto_remove(self): + """ + BSDDB change log settings using new method with renamed attributes + """ + if db.version() < (4, 7): + # by the book: old method with old attribute + self.env.set_flags(db.DB_LOG_AUTOREMOVE, 1) + else: # look at python interface + # TODO test with new version of pybsddb + try: + # try numeric compare, just first 2 digits + # this won't work with something like "4.10a", but + # hopefully they won't do that + old_version = map(int, db.__version__.split(".",2)[:2]) < (4, 7) + except: + # fallback, weak string compare + old_version = db.__version__ < "4.7" + if old_version: + # undocumented: old method with new attribute + self.env.set_flags(db.DB_LOG_AUTO_REMOVE, 1) + else: + # by the book: new method with new attribute + self.env.log_set_config(db.DB_LOG_AUTO_REMOVE, 1) + + def write_version(self, name): + """Write version number for a newly created DB.""" + full_name = os.path.abspath(name) + + self.env = db.DBEnv() + self.env.set_cachesize(0, DBCACHE) + + # These env settings are only needed for Txn environment + self.env.set_lk_max_locks(DBLOCKS) + self.env.set_lk_max_objects(DBOBJECTS) + + # clean up unused logs + self.set_auto_remove() + + # The DB_PRIVATE flag must go if we ever move to multi-user setup + env_flags = db.DB_CREATE | db.DB_PRIVATE |\ + db.DB_INIT_MPOOL | db.DB_INIT_LOCK |\ + db.DB_INIT_LOG | db.DB_INIT_TXN | db.DB_THREAD + + # As opposed to before, we always try recovery on databases + env_flags = env_flags | db.DB_RECOVER + + # Environment name is now based on the filename + env_name = name + + self.env.open(env_name, env_flags) + self.env.txn_checkpoint() + + self.metadata = self.__open_shelf(full_name, META) + + with BSDDBTxn(self.env, self.metadata) as txn: + txn.put('version', _DBVERSION) + + self.metadata.close() + self.env.close() + +#------------------------------------------------------------------------- +# +# BdbTransaction +# +#------------------------------------------------------------------------- +class BdbTransaction(GrampsDbTxn): + """ + The batch parameter is set to True for large transactions. For such + transactions, the list of changes is not maintained, and no undo + is possible. + + The no_magic parameter is ignored for non-batch transactions, and + is also of no importance for DB backends other than BSD DB. For + the BSDDB, when this paramter is set to True, some secondary + indices will be removed at the beginning and then rebuilt at + the end of such transaction (only if it is batch). + """ + + __slots__ = ('batch', 'no_magic') + + def __init__(self, msg, undodb, grampsdb, batch=False, no_magic=False): + GrampsDbTxn.__init__(self, msg, undodb, grampsdb) + self.batch = batch + self.no_magic = no_magic + + def get_db_txn(self, value): + return BSDDBTxn(value) + +def _mkname(path, name): + return os.path.join(path, name + DBEXT) + +def clear_lock_file(name): + try: + os.unlink(os.path.join(name, DBLOCKFN)) + except OSError: + return + +def write_lock_file(name): + if not os.path.isdir(name): + os.mkdir(name) + f = open(os.path.join(name, DBLOCKFN), "w") + if os.name == 'nt': + text = os.environ['USERNAME'] + else: + host = os.uname()[1] + # An ugly workaround for os.getlogin() issue with Konsole + try: + user = os.getlogin() + except: + user = os.environ.get('USER') + text = "%s@%s" % (user, host) + # Save only the username and host, so the massage can be + # printed with correct locale in DbManager.py when a lock is found + f.write(text) + f.close() + +if __name__ == "__main__": + + import sys + + d = GrampsDBDir() + d.load(sys.argv[1], lambda x: x) + + with d.get_person_cursor() as c: + for key, data in c: + person = Person(data) + print key, person.get_primary_name().get_name(), + + print d.surnames.keys()