Merge pull request #26 from gramps-project/geps/gep-032-database-backend
Geps/gep 032 database backend
This commit is contained in:
commit
45673e847d
@ -44,7 +44,6 @@ import sys
|
||||
#-------------------------------------------------------------------------
|
||||
from gramps.gen.recentfiles import recent_files
|
||||
from gramps.gen.utils.file import rm_tempdir, get_empty_tempdir
|
||||
from gramps.gen.db import DbBsddb
|
||||
from .clidbman import CLIDbManager, NAME_FILE, find_locker_name
|
||||
|
||||
from gramps.gen.plug import BasePluginManager
|
||||
@ -491,7 +490,8 @@ class ArgHandler(object):
|
||||
self.imp_db_path, title = self.dbman.create_new_db_cli()
|
||||
else:
|
||||
self.imp_db_path = get_empty_tempdir("import_dbdir")
|
||||
newdb = DbBsddb()
|
||||
|
||||
newdb = self.dbstate.make_database("bsddb")
|
||||
newdb.write_version(self.imp_db_path)
|
||||
|
||||
try:
|
||||
|
@ -54,7 +54,6 @@ _LOG = logging.getLogger(DBLOGNAME)
|
||||
#-------------------------------------------------------------------------
|
||||
from gramps.gen.const import GRAMPS_LOCALE as glocale
|
||||
_ = glocale.translation.gettext
|
||||
from gramps.gen.db import DbBsddb
|
||||
from gramps.gen.plug import BasePluginManager
|
||||
from gramps.gen.config import config
|
||||
from gramps.gen.constfunc import win, conv_to_unicode
|
||||
@ -134,61 +133,42 @@ class CLIDbManager(object):
|
||||
|
||||
def get_dbdir_summary(self, dirpath, name):
|
||||
"""
|
||||
Returns (people_count, bsddb_version, schema_version) of
|
||||
current DB.
|
||||
Returns ("Unknown", "Unknown", "Unknown") if invalid DB or other error.
|
||||
dirpath: full path to database
|
||||
name: proper name of family tree
|
||||
|
||||
Returns dictionary of summary item.
|
||||
Should include at least, if possible:
|
||||
|
||||
_("Path")
|
||||
_("Family Tree")
|
||||
_("Last accessed")
|
||||
_("Database backend")
|
||||
_("Locked?")
|
||||
|
||||
and these details:
|
||||
|
||||
_("Number of people")
|
||||
_("Version")
|
||||
_("Schema version")
|
||||
"""
|
||||
from bsddb3 import dbshelve, db
|
||||
|
||||
from gramps.gen.db import META, PERSON_TBL
|
||||
from gramps.gen.db.dbconst import BDBVERSFN
|
||||
|
||||
bdbversion_file = os.path.join(dirpath, BDBVERSFN)
|
||||
if os.path.isfile(bdbversion_file):
|
||||
vers_file = open(bdbversion_file)
|
||||
bsddb_version = vers_file.readline().strip()
|
||||
else:
|
||||
return "Unknown", "Unknown", "Unknown"
|
||||
|
||||
current_bsddb_version = str(db.version())
|
||||
if bsddb_version != current_bsddb_version:
|
||||
return "Unknown", bsddb_version, "Unknown"
|
||||
|
||||
env = db.DBEnv()
|
||||
flags = db.DB_CREATE | db.DB_PRIVATE |\
|
||||
db.DB_INIT_MPOOL |\
|
||||
db.DB_INIT_LOG | db.DB_INIT_TXN
|
||||
dbid = "bsddb"
|
||||
dbid_path = os.path.join(dirpath, "database.txt")
|
||||
if os.path.isfile(dbid_path):
|
||||
dbid = open(dbid_path).read().strip()
|
||||
try:
|
||||
env.open(dirpath, flags)
|
||||
database = self.dbstate.make_database(dbid)
|
||||
database.load(dirpath, None)
|
||||
retval = database.get_summary()
|
||||
except Exception as msg:
|
||||
LOG.warning("Error opening db environment for '%s': %s" %
|
||||
(name, str(msg)))
|
||||
try:
|
||||
env.close()
|
||||
except Exception as msg:
|
||||
LOG.warning("Error closing db environment for '%s': %s" %
|
||||
(name, str(msg)))
|
||||
return "Unknown", bsddb_version, "Unknown"
|
||||
dbmap1 = dbshelve.DBShelf(env)
|
||||
fname = os.path.join(dirpath, META + ".db")
|
||||
try:
|
||||
dbmap1.open(fname, META, db.DB_HASH, db.DB_RDONLY)
|
||||
except:
|
||||
env.close()
|
||||
return "Unknown", bsddb_version, "Unknown"
|
||||
schema_version = dbmap1.get(b'version', default=None)
|
||||
dbmap1.close()
|
||||
dbmap2 = dbshelve.DBShelf(env)
|
||||
fname = os.path.join(dirpath, PERSON_TBL + ".db")
|
||||
try:
|
||||
dbmap2.open(fname, PERSON_TBL, db.DB_HASH, db.DB_RDONLY)
|
||||
except:
|
||||
env.close()
|
||||
return "Unknown", bsddb_version, schema_version
|
||||
count = len(dbmap2)
|
||||
dbmap2.close()
|
||||
env.close()
|
||||
return (count, bsddb_version, schema_version)
|
||||
retval = {"Unavailable": str(msg)[:74] + "..."}
|
||||
retval.update({
|
||||
_("Family Tree"): name,
|
||||
_("Path"): dirpath,
|
||||
_("Database backend"): dbid,
|
||||
_("Last accessed"): time_val(dirpath)[1],
|
||||
_("Locked?"): self.is_locked(dirpath),
|
||||
})
|
||||
return retval
|
||||
|
||||
def family_tree_summary(self):
|
||||
"""
|
||||
@ -199,19 +179,7 @@ class CLIDbManager(object):
|
||||
for item in self.current_names:
|
||||
(name, dirpath, path_name, last,
|
||||
tval, enable, stock_id) = item
|
||||
count, bsddb_version, schema_version = self.get_dbdir_summary(dirpath, name)
|
||||
retval = {}
|
||||
retval[_("Number of people")] = count
|
||||
if enable:
|
||||
retval[_("Locked?")] = _("yes")
|
||||
else:
|
||||
retval[_("Locked?")] = _("no")
|
||||
retval[_("Bsddb version")] = bsddb_version
|
||||
retval[_("Schema version")] = schema_version
|
||||
retval[_("Family Tree")] = name
|
||||
retval[_("Path")] = dirpath
|
||||
retval[_("Last accessed")] = time.strftime('%x %X',
|
||||
time.localtime(tval))
|
||||
retval = self.get_dbdir_summary(dirpath, name)
|
||||
summary_list.append( retval )
|
||||
return summary_list
|
||||
|
||||
@ -275,7 +243,7 @@ class CLIDbManager(object):
|
||||
"""
|
||||
print(_('Import finished...'))
|
||||
|
||||
def create_new_db_cli(self, title=None, create_db=True):
|
||||
def create_new_db_cli(self, title=None, create_db=True, dbid=None):
|
||||
"""
|
||||
Create a new database.
|
||||
"""
|
||||
@ -294,7 +262,9 @@ class CLIDbManager(object):
|
||||
|
||||
if create_db:
|
||||
# write the version number into metadata
|
||||
newdb = DbBsddb()
|
||||
if dbid is None:
|
||||
dbid = "bsddb"
|
||||
newdb = self.dbstate.make_database(dbid)
|
||||
newdb.write_version(new_path)
|
||||
|
||||
(tval, last) = time_val(new_path)
|
||||
@ -303,11 +273,11 @@ class CLIDbManager(object):
|
||||
last, tval, False, ""))
|
||||
return new_path, title
|
||||
|
||||
def _create_new_db(self, title=None):
|
||||
def _create_new_db(self, title=None, dbid=None):
|
||||
"""
|
||||
Create a new database, do extra stuff needed
|
||||
"""
|
||||
return self.create_new_db_cli(title)
|
||||
return self.create_new_db_cli(title, dbid=dbid)
|
||||
|
||||
def import_new_db(self, filename, user):
|
||||
"""
|
||||
@ -360,8 +330,8 @@ class CLIDbManager(object):
|
||||
|
||||
# Create a new database
|
||||
self.__start_cursor(_("Importing data..."))
|
||||
dbclass = DbBsddb
|
||||
dbase = dbclass()
|
||||
|
||||
dbase = self.dbstate.make_database("bsddb")
|
||||
dbase.load(new_path, user.callback)
|
||||
|
||||
import_function = plugin.get_import_function()
|
||||
|
@ -47,9 +47,9 @@ LOG = logging.getLogger(".grampscli")
|
||||
from gramps.gen.display.name import displayer as name_displayer
|
||||
from gramps.gen.config import config
|
||||
from gramps.gen.const import PLUGINS_DIR, USER_PLUGINS
|
||||
from gramps.gen.db.dbconst import DBBACKEND
|
||||
from gramps.gen.errors import DbError
|
||||
from gramps.gen.dbstate import DbState
|
||||
from gramps.gen.db import DbBsddb
|
||||
from gramps.gen.db.exceptions import (DbUpgradeRequiredError,
|
||||
BsddbDowngradeError,
|
||||
DbVersionError,
|
||||
@ -152,9 +152,16 @@ class CLIDbLoader(object):
|
||||
else:
|
||||
mode = 'w'
|
||||
|
||||
dbclass = DbBsddb
|
||||
dbid_path = os.path.join(filename, DBBACKEND)
|
||||
if os.path.isfile(dbid_path):
|
||||
with open(dbid_path) as fp:
|
||||
dbid = fp.read().strip()
|
||||
else:
|
||||
dbid = "bsddb"
|
||||
|
||||
db = self.dbstate.make_database(dbid)
|
||||
|
||||
self.dbstate.change_database(dbclass())
|
||||
self.dbstate.change_database(db)
|
||||
self.dbstate.db.disable_signals()
|
||||
|
||||
self._begin_progress()
|
||||
|
@ -86,11 +86,27 @@ More details can be found in the manual's
|
||||
|
||||
from .base import *
|
||||
from .dbconst import *
|
||||
from .cursor import *
|
||||
from .read import *
|
||||
from .bsddbtxn import *
|
||||
from .txn import *
|
||||
from .undoredo import *
|
||||
from .exceptions import *
|
||||
from .write import *
|
||||
from .backup import backup, restore
|
||||
from .undoredo import *
|
||||
|
||||
def find_surname_name(key, data):
|
||||
"""
|
||||
Creating a surname from raw name, to use for sort and index
|
||||
returns a byte string
|
||||
"""
|
||||
return __index_surname(data[5])
|
||||
|
||||
def __index_surname(surn_list):
|
||||
"""
|
||||
All non pa/matronymic surnames are used in indexing.
|
||||
pa/matronymic not as they change for every generation!
|
||||
returns a byte string
|
||||
"""
|
||||
from gramps.gen.lib import NameOriginType
|
||||
if surn_list:
|
||||
surn = " ".join([x[0] for x in surn_list if not (x[3][0] in [
|
||||
NameOriginType.PATRONYMIC, NameOriginType.MATRONYMIC]) ])
|
||||
else:
|
||||
surn = ""
|
||||
return surn
|
||||
|
@ -1,213 +0,0 @@
|
||||
#
|
||||
# Gramps - a GTK+/GNOME based genealogy program
|
||||
#
|
||||
# Copyright (C) 2007 Donald N. Allingham
|
||||
# Copyright (C) 2011 Tim G L Lyons
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# gen/db/backup.py
|
||||
|
||||
"""
|
||||
Description
|
||||
===========
|
||||
|
||||
This module Provides backup and restore functions for a database. The
|
||||
backup function saves the data into backup files, while the restore
|
||||
function loads the data back into a database.
|
||||
|
||||
You should only restore the data into an empty database.
|
||||
|
||||
Implementation
|
||||
==============
|
||||
|
||||
Not all of the database tables need to be backed up, since many are
|
||||
automatically generated from the others. The tables that are backed up
|
||||
are the primary tables and the metadata table.
|
||||
|
||||
The database consists of a table of "pickled" tuples. Each of the
|
||||
primary tables is "walked", and the pickled tuple is extracted, and
|
||||
written to the backup file.
|
||||
|
||||
Restoring the data is just as simple. The backup file is parsed an
|
||||
entry at a time, and inserted into the associated database table. The
|
||||
derived tables are built automatically as the items are entered into
|
||||
db.
|
||||
"""
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
# load standard python libraries
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
import os
|
||||
import pickle
|
||||
|
||||
#------------------------------------------------------------------------
|
||||
#
|
||||
# Gramps libs
|
||||
#
|
||||
#------------------------------------------------------------------------
|
||||
from .exceptions import DbException
|
||||
from .write import FAMILY_TBL, PLACES_TBL, SOURCES_TBL, MEDIA_TBL, \
|
||||
EVENTS_TBL, PERSON_TBL, REPO_TBL, NOTE_TBL, TAG_TBL, META, CITATIONS_TBL
|
||||
|
||||
#------------------------------------------------------------------------
|
||||
#
|
||||
# Set up logging
|
||||
#
|
||||
#------------------------------------------------------------------------
|
||||
import logging
|
||||
LOG = logging.getLogger(".Backup")
|
||||
|
||||
def backup(database):
|
||||
"""
|
||||
Exports the database to a set of backup files. These files consist
|
||||
of the pickled database tables, one file for each table.
|
||||
|
||||
The heavy lifting is done by the private :py:func:`__do__export` function.
|
||||
The purpose of this function is to catch any exceptions that occur.
|
||||
|
||||
:param database: database instance to backup
|
||||
:type database: DbDir
|
||||
"""
|
||||
try:
|
||||
__do_export(database)
|
||||
except (OSError, IOError) as msg:
|
||||
raise DbException(str(msg))
|
||||
|
||||
def __mk_backup_name(database, base):
|
||||
"""
|
||||
Return the backup name of the database table
|
||||
|
||||
:param database: database instance
|
||||
:type database: DbDir
|
||||
:param base: base name of the table
|
||||
:type base: str
|
||||
"""
|
||||
return os.path.join(database.get_save_path(), base + ".gbkp")
|
||||
|
||||
def __mk_tmp_name(database, base):
|
||||
"""
|
||||
Return the temporary backup name of the database table
|
||||
|
||||
:param database: database instance
|
||||
:type database: DbDir
|
||||
:param base: base name of the table
|
||||
:type base: str
|
||||
"""
|
||||
return os.path.join(database.get_save_path(), base + ".gbkp.new")
|
||||
|
||||
def __do_export(database):
|
||||
"""
|
||||
Loop through each table of the database, saving the pickled data
|
||||
a file.
|
||||
|
||||
:param database: database instance to backup
|
||||
:type database: DbDir
|
||||
"""
|
||||
try:
|
||||
for (base, tbl) in __build_tbl_map(database):
|
||||
backup_name = __mk_tmp_name(database, base)
|
||||
backup_table = open(backup_name, 'wb')
|
||||
|
||||
cursor = tbl.cursor()
|
||||
data = cursor.first()
|
||||
while data:
|
||||
pickle.dump(data, backup_table, 2)
|
||||
data = cursor.next()
|
||||
cursor.close()
|
||||
backup_table.close()
|
||||
except (IOError,OSError):
|
||||
return
|
||||
|
||||
for (base, tbl) in __build_tbl_map(database):
|
||||
new_name = __mk_backup_name(database, base)
|
||||
old_name = __mk_tmp_name(database, base)
|
||||
if os.path.isfile(new_name):
|
||||
os.unlink(new_name)
|
||||
os.rename(old_name, new_name)
|
||||
|
||||
def restore(database):
|
||||
"""
|
||||
Restores the database to a set of backup files. These files consist
|
||||
of the pickled database tables, one file for each table.
|
||||
|
||||
The heavy lifting is done by the private :py:func:`__do__restore` function.
|
||||
The purpose of this function is to catch any exceptions that occur.
|
||||
|
||||
:param database: database instance to restore
|
||||
:type database: DbDir
|
||||
"""
|
||||
try:
|
||||
__do_restore(database)
|
||||
except (OSError, IOError) as msg:
|
||||
raise DbException(str(msg))
|
||||
|
||||
def __do_restore(database):
|
||||
"""
|
||||
Loop through each table of the database, restoring the pickled data
|
||||
to the appropriate database file.
|
||||
|
||||
:param database: database instance to backup
|
||||
:type database: DbDir
|
||||
"""
|
||||
for (base, tbl) in __build_tbl_map(database):
|
||||
backup_name = __mk_backup_name(database, base)
|
||||
backup_table = open(backup_name, 'rb')
|
||||
__load_tbl_txn(database, backup_table, tbl)
|
||||
|
||||
database.rebuild_secondary()
|
||||
|
||||
def __load_tbl_txn(database, backup_table, tbl):
|
||||
"""
|
||||
Return the temporary backup name of the database table
|
||||
|
||||
:param database: database instance
|
||||
:type database: DbDir
|
||||
:param backup_table: file containing the backup data
|
||||
:type backup_table: file
|
||||
:param tbl: Berkeley db database table
|
||||
:type tbl: Berkeley db database table
|
||||
"""
|
||||
try:
|
||||
while True:
|
||||
data = pickle.load(backup_table)
|
||||
txn = database.env.txn_begin()
|
||||
tbl.put(data[0], data[1], txn=txn)
|
||||
txn.commit()
|
||||
except EOFError:
|
||||
backup_table.close()
|
||||
|
||||
def __build_tbl_map(database):
|
||||
"""
|
||||
Builds a table map of names to database tables.
|
||||
|
||||
:param database: database instance to backup
|
||||
:type database: DbDir
|
||||
"""
|
||||
return [
|
||||
( PERSON_TBL, database.person_map.db),
|
||||
( FAMILY_TBL, database.family_map.db),
|
||||
( PLACES_TBL, database.place_map.db),
|
||||
( SOURCES_TBL, database.source_map.db),
|
||||
( CITATIONS_TBL, database.citation_map.db),
|
||||
( REPO_TBL, database.repository_map.db),
|
||||
( NOTE_TBL, database.note_map.db),
|
||||
( MEDIA_TBL, database.media_map.db),
|
||||
( EVENTS_TBL, database.event_map.db),
|
||||
( TAG_TBL, database.tag_map.db),
|
||||
( META, database.metadata.db),
|
||||
]
|
@ -28,26 +28,23 @@ Declare constants used by database modules
|
||||
# constants
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
__all__ = (
|
||||
('DBPAGE', 'DBMODE', 'DBCACHE', 'DBLOCKS', 'DBOBJECTS', 'DBUNDO',
|
||||
'DBEXT', 'DBMODE_R', 'DBMODE_W', 'DBUNDOFN', 'DBLOCKFN',
|
||||
'DBRECOVFN','BDBVERSFN', 'DBLOGNAME', 'DBFLAGS_O', 'DBFLAGS_R',
|
||||
'DBFLAGS_D', 'SCHVERSFN', 'PCKVERSFN'
|
||||
) +
|
||||
|
||||
('PERSON_KEY', 'FAMILY_KEY', 'SOURCE_KEY', 'CITATION_KEY',
|
||||
'EVENT_KEY', 'MEDIA_KEY', 'PLACE_KEY', 'REPOSITORY_KEY',
|
||||
'NOTE_KEY', 'REFERENCE_KEY', 'TAG_KEY'
|
||||
) +
|
||||
|
||||
('TXNADD', 'TXNUPD', 'TXNDEL')
|
||||
)
|
||||
__all__ = ( 'DBPAGE', 'DBMODE', 'DBCACHE', 'DBLOCKS', 'DBOBJECTS', 'DBUNDO',
|
||||
'DBEXT', 'DBMODE_R', 'DBMODE_W', 'DBUNDOFN', 'DBLOCKFN',
|
||||
'DBRECOVFN','BDBVERSFN', 'DBLOGNAME', 'SCHVERSFN', 'PCKVERSFN',
|
||||
'DBBACKEND',
|
||||
'PERSON_KEY', 'FAMILY_KEY', 'SOURCE_KEY', 'CITATION_KEY',
|
||||
'EVENT_KEY', 'MEDIA_KEY', 'PLACE_KEY', 'REPOSITORY_KEY',
|
||||
'NOTE_KEY', 'REFERENCE_KEY', 'TAG_KEY',
|
||||
'TXNADD', 'TXNUPD', 'TXNDEL',
|
||||
"CLASS_TO_KEY_MAP", "KEY_TO_CLASS_MAP", "KEY_TO_NAME_MAP"
|
||||
)
|
||||
|
||||
DBEXT = ".db" # File extension to be used for database files
|
||||
DBUNDOFN = "undo.db" # File name of 'undo' database
|
||||
DBLOCKFN = "lock" # File name of lock file
|
||||
DBRECOVFN = "need_recover" # File name of recovery file
|
||||
BDBVERSFN = "bdbversion.txt"# File name of Berkeley DB version file
|
||||
DBBACKEND = "database.txt" # File name of Database backend file
|
||||
SCHVERSFN = "schemaversion.txt"# File name of schema version file
|
||||
PCKVERSFN = "pickleupgrade.txt" # Indicator that pickle has been upgrade t Python3
|
||||
DBLOGNAME = ".Db" # Name of logger
|
||||
@ -60,18 +57,6 @@ DBLOCKS = 100000 # Maximum number of locks supported
|
||||
DBOBJECTS = 100000 # Maximum number of simultaneously locked objects
|
||||
DBUNDO = 1000 # Maximum size of undo buffer
|
||||
|
||||
try:
|
||||
from bsddb3.db import DB_CREATE, DB_AUTO_COMMIT, DB_DUP, DB_DUPSORT, DB_RDONLY
|
||||
DBFLAGS_O = DB_CREATE | DB_AUTO_COMMIT # Default flags for database open
|
||||
DBFLAGS_R = DB_RDONLY # Flags to open a database read-only
|
||||
DBFLAGS_D = DB_DUP | DB_DUPSORT # Default flags for duplicate keys
|
||||
except:
|
||||
print("WARNING: no bsddb support")
|
||||
# FIXME: make this more abstract to deal with other backends, or do not import
|
||||
DBFLAGS_O = DB_CREATE = DB_AUTO_COMMIT = 0
|
||||
DBFLAGS_R = DB_RDONLY = 0
|
||||
DBFLAGS_D = DB_DUP = DB_DUPSORT = 0
|
||||
|
||||
PERSON_KEY = 0
|
||||
FAMILY_KEY = 1
|
||||
SOURCE_KEY = 2
|
||||
@ -85,3 +70,37 @@ TAG_KEY = 9
|
||||
CITATION_KEY = 10
|
||||
|
||||
TXNADD, TXNUPD, TXNDEL = 0, 1, 2
|
||||
|
||||
CLASS_TO_KEY_MAP = {"Person": PERSON_KEY,
|
||||
"Family": FAMILY_KEY,
|
||||
"Source": SOURCE_KEY,
|
||||
"Citation": CITATION_KEY,
|
||||
"Event": EVENT_KEY,
|
||||
"MediaObject": MEDIA_KEY,
|
||||
"Place": PLACE_KEY,
|
||||
"Repository": REPOSITORY_KEY,
|
||||
"Note" : NOTE_KEY,
|
||||
"Tag": TAG_KEY}
|
||||
|
||||
KEY_TO_CLASS_MAP = {PERSON_KEY: "Person",
|
||||
FAMILY_KEY: "Family",
|
||||
SOURCE_KEY: "Source",
|
||||
CITATION_KEY: "Citation",
|
||||
EVENT_KEY: "Event",
|
||||
MEDIA_KEY: "MediaObject",
|
||||
PLACE_KEY: "Place",
|
||||
REPOSITORY_KEY: "Repository",
|
||||
NOTE_KEY: "Note",
|
||||
TAG_KEY: "Tag"}
|
||||
|
||||
KEY_TO_NAME_MAP = {PERSON_KEY: 'person',
|
||||
FAMILY_KEY: 'family',
|
||||
EVENT_KEY: 'event',
|
||||
SOURCE_KEY: 'source',
|
||||
CITATION_KEY: 'citation',
|
||||
PLACE_KEY: 'place',
|
||||
MEDIA_KEY: 'media',
|
||||
REPOSITORY_KEY: 'repository',
|
||||
#REFERENCE_KEY: 'reference',
|
||||
NOTE_KEY: 'note',
|
||||
TAG_KEY: 'tag'}
|
||||
|
@ -1,77 +1,12 @@
|
||||
#
|
||||
# Gramps - a GTK+/GNOME based genealogy program
|
||||
#
|
||||
# Copyright (C) 2004-2006 Donald N. Allingham
|
||||
# Copyright (C) 2011 Tim G L Lyons
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
"""
|
||||
Exports the DbUndo class for managing Gramps transactions
|
||||
undos and redos.
|
||||
"""
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
# Standard python modules
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
import time, os
|
||||
import time
|
||||
import pickle
|
||||
from collections import deque
|
||||
|
||||
try:
|
||||
from bsddb3 import db
|
||||
except:
|
||||
# FIXME: make this more abstract to deal with other backends
|
||||
class db:
|
||||
DBRunRecoveryError = 0
|
||||
DBAccessError = 0
|
||||
DBPageNotFoundError = 0
|
||||
DBInvalidArgError = 0
|
||||
|
||||
from ..const import GRAMPS_LOCALE as glocale
|
||||
_ = glocale.translation.gettext
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
# Gramps modules
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
from ..constfunc import conv_to_unicode, handle2internal, win
|
||||
from .dbconst import *
|
||||
from . import BSDDBTxn
|
||||
from ..errors import DbError
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
# Local Constants
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
DBERRS = (db.DBRunRecoveryError, db.DBAccessError,
|
||||
db.DBPageNotFoundError, db.DBInvalidArgError)
|
||||
|
||||
_SIGBASE = ('person', 'family', 'source', 'event', 'media',
|
||||
'place', 'repository', 'reference', 'note', 'tag', 'citation')
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
# DbUndo class
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
class DbUndo(object):
|
||||
"""
|
||||
Base class for the Gramps undo/redo manager. Needs to be subclassed
|
||||
@ -100,7 +35,6 @@ class DbUndo(object):
|
||||
self.db.media_map,
|
||||
self.db.place_map,
|
||||
self.db.repository_map,
|
||||
self.db.reference_map,
|
||||
self.db.note_map,
|
||||
self.db.tag_map,
|
||||
self.db.citation_map,
|
||||
@ -171,6 +105,16 @@ class DbUndo(object):
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def __redo(self, update_history):
|
||||
"""
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def __undo(self, update_history):
|
||||
"""
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def commit(self, txn, msg):
|
||||
"""
|
||||
Commit the transaction to the undo/redo database. "txn" should be
|
||||
@ -196,105 +140,6 @@ class DbUndo(object):
|
||||
return False
|
||||
return self.__redo(update_history)
|
||||
|
||||
def undoredo(func):
|
||||
"""
|
||||
Decorator function to wrap undo and redo operations within a bsddb
|
||||
transaction. It also catches bsddb errors and raises an exception
|
||||
as appropriate
|
||||
"""
|
||||
def try_(self, *args, **kwargs):
|
||||
try:
|
||||
with BSDDBTxn(self.db.env) as txn:
|
||||
self.txn = self.db.txn = txn.txn
|
||||
status = func(self, *args, **kwargs)
|
||||
if not status:
|
||||
txn.abort()
|
||||
self.db.txn = None
|
||||
return status
|
||||
|
||||
except DBERRS as msg:
|
||||
self.db._log_error()
|
||||
raise DbError(msg)
|
||||
|
||||
return try_
|
||||
|
||||
@undoredo
|
||||
def __undo(self, update_history=True):
|
||||
"""
|
||||
Access the last committed transaction, and revert the data to the
|
||||
state before the transaction was committed.
|
||||
"""
|
||||
txn = self.undoq.pop()
|
||||
self.redoq.append(txn)
|
||||
transaction = txn
|
||||
db = self.db
|
||||
subitems = transaction.get_recnos(reverse=True)
|
||||
|
||||
# Process all records in the transaction
|
||||
for record_id in subitems:
|
||||
(key, trans_type, handle, old_data, new_data) = \
|
||||
pickle.loads(self.undodb[record_id])
|
||||
|
||||
if key == REFERENCE_KEY:
|
||||
self.undo_reference(old_data, handle, self.mapbase[key])
|
||||
else:
|
||||
self.undo_data(old_data, handle, self.mapbase[key],
|
||||
db.emit, _SIGBASE[key])
|
||||
# Notify listeners
|
||||
if db.undo_callback:
|
||||
if self.undo_count > 0:
|
||||
db.undo_callback(_("_Undo %s")
|
||||
% self.undoq[-1].get_description())
|
||||
else:
|
||||
db.undo_callback(None)
|
||||
|
||||
if db.redo_callback:
|
||||
db.redo_callback(_("_Redo %s")
|
||||
% transaction.get_description())
|
||||
|
||||
if update_history and db.undo_history_callback:
|
||||
db.undo_history_callback()
|
||||
return True
|
||||
|
||||
@undoredo
|
||||
def __redo(self, db=None, update_history=True):
|
||||
"""
|
||||
Access the last undone transaction, and revert the data to the state
|
||||
before the transaction was undone.
|
||||
"""
|
||||
txn = self.redoq.pop()
|
||||
self.undoq.append(txn)
|
||||
transaction = txn
|
||||
db = self.db
|
||||
subitems = transaction.get_recnos()
|
||||
|
||||
# Process all records in the transaction
|
||||
for record_id in subitems:
|
||||
(key, trans_type, handle, old_data, new_data) = \
|
||||
pickle.loads(self.undodb[record_id])
|
||||
|
||||
if key == REFERENCE_KEY:
|
||||
self.undo_reference(new_data, handle, self.mapbase[key])
|
||||
else:
|
||||
self.undo_data(new_data, handle, self.mapbase[key],
|
||||
db.emit, _SIGBASE[key])
|
||||
# Notify listeners
|
||||
if db.undo_callback:
|
||||
db.undo_callback(_("_Undo %s")
|
||||
% transaction.get_description())
|
||||
|
||||
if db.redo_callback:
|
||||
if self.redo_count > 1:
|
||||
new_transaction = self.redoq[-2]
|
||||
db.redo_callback(_("_Redo %s")
|
||||
% new_transaction.get_description())
|
||||
else:
|
||||
db.redo_callback(None)
|
||||
|
||||
if update_history and db.undo_history_callback:
|
||||
db.undo_history_callback()
|
||||
return True
|
||||
|
||||
def undo_reference(self, data, handle, db_map):
|
||||
"""
|
||||
Helper method to undo a reference map entry
|
||||
@ -332,185 +177,3 @@ class DbUndo(object):
|
||||
|
||||
undo_count = property(lambda self:len(self.undoq))
|
||||
redo_count = property(lambda self:len(self.redoq))
|
||||
|
||||
class DbUndoList(DbUndo):
|
||||
"""
|
||||
Implementation of the Gramps undo database using a Python list
|
||||
"""
|
||||
def __init__(self, grampsdb):
|
||||
"""
|
||||
Class constructor
|
||||
"""
|
||||
super(DbUndoList, self).__init__(grampsdb)
|
||||
self.undodb = []
|
||||
|
||||
def open(self):
|
||||
"""
|
||||
A list does not need to be opened
|
||||
"""
|
||||
pass
|
||||
|
||||
def close(self):
|
||||
"""
|
||||
Close the list by resetting it to empty
|
||||
"""
|
||||
self.undodb = []
|
||||
self.clear()
|
||||
|
||||
def append(self, value):
|
||||
"""
|
||||
Add an entry on the end of the list
|
||||
"""
|
||||
self.undodb.append(value)
|
||||
return len(self.undodb)-1
|
||||
|
||||
def __getitem__(self, index):
|
||||
"""
|
||||
Return an item at the specified index
|
||||
"""
|
||||
return self.undodb[index]
|
||||
|
||||
def __setitem__(self, index, value):
|
||||
"""
|
||||
Set an item at the speficied index to the given value
|
||||
"""
|
||||
self.undodb[index] = value
|
||||
|
||||
def __iter__(self):
|
||||
"""
|
||||
Iterator
|
||||
"""
|
||||
for item in self.undodb:
|
||||
yield item
|
||||
|
||||
def __len__(self):
|
||||
"""
|
||||
Return number of entries in the list
|
||||
"""
|
||||
return len(self.undodb)
|
||||
|
||||
class DbUndoBSDDB(DbUndo):
|
||||
"""
|
||||
Class constructor for Gramps undo/redo database using a bsddb recno
|
||||
database as the backing store.
|
||||
"""
|
||||
|
||||
def __init__(self, grampsdb, path):
|
||||
"""
|
||||
Class constructor
|
||||
"""
|
||||
super(DbUndoBSDDB, self).__init__(grampsdb)
|
||||
self.undodb = db.DB()
|
||||
self.path = path
|
||||
|
||||
def open(self):
|
||||
"""
|
||||
Open the undo/redo database
|
||||
"""
|
||||
path = self.path
|
||||
self.undodb.open(path, db.DB_RECNO, db.DB_CREATE)
|
||||
|
||||
def close(self):
|
||||
"""
|
||||
Close the undo/redo database
|
||||
"""
|
||||
self.undodb.close()
|
||||
self.undodb = None
|
||||
self.mapbase = None
|
||||
self.db = None
|
||||
|
||||
try:
|
||||
os.remove(self.path)
|
||||
except OSError:
|
||||
pass
|
||||
self.clear()
|
||||
|
||||
def append(self, value):
|
||||
"""
|
||||
Add an entry on the end of the database
|
||||
"""
|
||||
return self.undodb.append(value)
|
||||
|
||||
def __len__(self):
|
||||
"""
|
||||
Returns the number of entries in the database
|
||||
"""
|
||||
x = self.undodb.stat()['nkeys']
|
||||
y = len(self.undodb)
|
||||
assert x == y
|
||||
return x
|
||||
|
||||
def __getitem__(self, index):
|
||||
"""
|
||||
Returns the entry stored at the specified index
|
||||
"""
|
||||
return self.undodb.get(index)
|
||||
|
||||
def __setitem__(self, index, value):
|
||||
"""
|
||||
Sets the entry stored at the specified index to the value given.
|
||||
"""
|
||||
self.undodb.put(index, value)
|
||||
|
||||
def __iter__(self):
|
||||
"""
|
||||
Iterator
|
||||
"""
|
||||
cursor = self.undodb.cursor()
|
||||
data = cursor.first()
|
||||
while data:
|
||||
yield data
|
||||
data = next(cursor)
|
||||
|
||||
def testundo():
|
||||
class T:
|
||||
def __init__(self):
|
||||
self.msg = ''
|
||||
self.timetstamp = 0
|
||||
def set_description(self, msg):
|
||||
self.msg = msg
|
||||
|
||||
class D:
|
||||
def __init__(self):
|
||||
self.person_map = {}
|
||||
self.family_map = {}
|
||||
self.source_map = {}
|
||||
self.event_map = {}
|
||||
self.media_map = {}
|
||||
self.place_map = {}
|
||||
self.note_map = {}
|
||||
self.tag_map = {}
|
||||
self.repository_map = {}
|
||||
self.reference_map = {}
|
||||
|
||||
print("list tests")
|
||||
undo = DbUndoList(D())
|
||||
print(undo.append('foo'))
|
||||
print(undo.append('bar'))
|
||||
print(undo[0])
|
||||
undo[0] = 'foobar'
|
||||
print(undo[0])
|
||||
print("len", len(undo))
|
||||
print("iter")
|
||||
for data in undo:
|
||||
print(data)
|
||||
print()
|
||||
print("bsddb tests")
|
||||
undo = DbUndoBSDDB(D(), '/tmp/testundo')
|
||||
undo.open()
|
||||
print(undo.append('foo'))
|
||||
print(undo.append('fo2'))
|
||||
print(undo.append('fo3'))
|
||||
print(undo[1])
|
||||
undo[1] = 'bar'
|
||||
print(undo[1])
|
||||
for data in undo:
|
||||
print(data)
|
||||
print("len", len(undo))
|
||||
|
||||
print("test commit")
|
||||
undo.commit(T(), msg="test commit")
|
||||
undo.close()
|
||||
|
||||
if __name__ == '__main__':
|
||||
testundo()
|
||||
|
@ -22,13 +22,23 @@
|
||||
"""
|
||||
Provide the database state class
|
||||
"""
|
||||
import sys
|
||||
import os
|
||||
import io
|
||||
|
||||
from .db import DbBsddbRead
|
||||
from .db import DbReadBase
|
||||
from .proxy.proxybase import ProxyDbBase
|
||||
from .utils.callback import Callback
|
||||
from .config import config
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
# set up logging
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
import logging
|
||||
LOG = logging.getLogger(".dbstate")
|
||||
|
||||
class DbState(Callback):
|
||||
"""
|
||||
Provide a class to encapsulate the state of the database.
|
||||
@ -45,7 +55,7 @@ class DbState(Callback):
|
||||
just a place holder until a real DB is assigned.
|
||||
"""
|
||||
Callback.__init__(self)
|
||||
self.db = DbBsddbRead()
|
||||
self.db = self.make_database("bsddb")
|
||||
self.open = False
|
||||
self.stack = []
|
||||
|
||||
@ -54,9 +64,10 @@ class DbState(Callback):
|
||||
Closes the existing db, and opens a new one.
|
||||
Retained for backward compatibility.
|
||||
"""
|
||||
self.emit('no-database', ())
|
||||
self.db.close()
|
||||
self.change_database_noclose(database)
|
||||
if database:
|
||||
self.emit('no-database', ())
|
||||
self.db.close()
|
||||
self.change_database_noclose(database)
|
||||
|
||||
def change_database_noclose(self, database):
|
||||
"""
|
||||
@ -88,7 +99,7 @@ class DbState(Callback):
|
||||
"""
|
||||
self.emit('no-database', ())
|
||||
self.db.close()
|
||||
self.db = DbBsddbRead()
|
||||
self.db = self.make_database("bsddb")
|
||||
self.db.db_is_open = False
|
||||
self.open = False
|
||||
self.emit('database-changed', (self.db, ))
|
||||
@ -122,3 +133,100 @@ class DbState(Callback):
|
||||
"""
|
||||
self.db = self.stack.pop()
|
||||
self.emit('database-changed', (self.db, ))
|
||||
|
||||
def make_database(self, id):
|
||||
"""
|
||||
Make a database, given a plugin id.
|
||||
"""
|
||||
from .plug import BasePluginManager
|
||||
from .const import PLUGINS_DIR, USER_PLUGINS
|
||||
|
||||
pmgr = BasePluginManager.get_instance()
|
||||
pdata = pmgr.get_plugin(id)
|
||||
|
||||
if not pdata:
|
||||
# This might happen if using gramps from outside, and
|
||||
# we haven't loaded plugins yet
|
||||
pmgr.reg_plugins(PLUGINS_DIR, self, None)
|
||||
pmgr.reg_plugins(USER_PLUGINS, self, None, load_on_reg=True)
|
||||
pdata = pmgr.get_plugin(id)
|
||||
|
||||
if pdata:
|
||||
if pdata.reset_system:
|
||||
if self.modules_is_set():
|
||||
self.reset_modules()
|
||||
else:
|
||||
self.save_modules()
|
||||
mod = pmgr.load_plugin(pdata)
|
||||
database = getattr(mod, pdata.databaseclass)
|
||||
return database()
|
||||
|
||||
def open_database(self, dbname, force_unlock=False, callback=None):
|
||||
"""
|
||||
Open a database by name and return the database.
|
||||
"""
|
||||
data = self.lookup_family_tree(dbname)
|
||||
database = None
|
||||
if data:
|
||||
dbpath, locked, locked_by, backend = data
|
||||
if (not locked) or (locked and force_unlock):
|
||||
database = self.make_database(backend)
|
||||
database.load(dbpath, callback=callback)
|
||||
return database
|
||||
|
||||
def lookup_family_tree(self, dbname):
|
||||
"""
|
||||
Find a Family Tree given its name, and return properties.
|
||||
"""
|
||||
dbdir = os.path.expanduser(config.get('behavior.database-path'))
|
||||
for dpath in os.listdir(dbdir):
|
||||
dirpath = os.path.join(dbdir, dpath)
|
||||
path_name = os.path.join(dirpath, "name.txt")
|
||||
if os.path.isfile(path_name):
|
||||
file = io.open(path_name, 'r', encoding='utf8')
|
||||
name = file.readline().strip()
|
||||
file.close()
|
||||
if dbname == name:
|
||||
locked = False
|
||||
locked_by = None
|
||||
backend = None
|
||||
fname = os.path.join(dirpath, "database.txt")
|
||||
if os.path.isfile(fname):
|
||||
ifile = io.open(fname, 'r', encoding='utf8')
|
||||
backend = ifile.read().strip()
|
||||
ifile.close()
|
||||
else:
|
||||
backend = "bsddb"
|
||||
try:
|
||||
fname = os.path.join(dirpath, "lock")
|
||||
ifile = io.open(fname, 'r', encoding='utf8')
|
||||
locked_by = ifile.read().strip()
|
||||
locked = True
|
||||
ifile.close()
|
||||
except (OSError, IOError):
|
||||
pass
|
||||
return (dirpath, locked, locked_by, backend)
|
||||
return None
|
||||
|
||||
## Work-around for databases that need sys refresh (django):
|
||||
def modules_is_set(self):
|
||||
LOG.info("modules_is_set?")
|
||||
if hasattr(self, "_modules"):
|
||||
return self._modules != None
|
||||
else:
|
||||
self._modules = None
|
||||
return False
|
||||
|
||||
def reset_modules(self):
|
||||
LOG.info("reset_modules!")
|
||||
# First, clear out old modules:
|
||||
for key in list(sys.modules.keys()):
|
||||
del(sys.modules[key])
|
||||
# Next, restore previous:
|
||||
for key in self._modules:
|
||||
sys.modules[key] = self._modules[key]
|
||||
|
||||
def save_modules(self):
|
||||
LOG.info("save_modules!")
|
||||
self._modules = sys.modules.copy()
|
||||
|
||||
|
@ -28,7 +28,7 @@ from gramps.cli.user import User
|
||||
from ..dbstate import DbState
|
||||
from gramps.cli.grampscli import CLIManager
|
||||
from ..plug import BasePluginManager
|
||||
from ..db.dictionary import DictionaryDb
|
||||
from gramps.plugins.database.dictionarydb import DictionaryDb
|
||||
from gramps.gen.lib.handle import HandleClass, Handle
|
||||
from gramps.gen.lib import *
|
||||
from gramps.gen.lib.personref import PersonRef
|
||||
|
@ -70,7 +70,6 @@ class Gramplet(object):
|
||||
self.connect(self.gui.textview, "motion-notify-event",
|
||||
self.gui.on_motion)
|
||||
self.connect_signal('Person', self._active_changed)
|
||||
|
||||
self._db_changed(self.dbstate.db)
|
||||
active_person = self.get_active('Person')
|
||||
if active_person: # already changed
|
||||
@ -321,8 +320,6 @@ class Gramplet(object):
|
||||
self._idle_id = 0
|
||||
LOG.debug("gramplet updater: %s : One time, done!" % self.gui.title)
|
||||
return False
|
||||
# FIXME: find out why Data Entry has this error, or just ignore it
|
||||
import bsddb3 as bsddb
|
||||
try:
|
||||
retval = next(self._generator)
|
||||
if not retval:
|
||||
@ -333,10 +330,6 @@ class Gramplet(object):
|
||||
LOG.debug("gramplet updater: %s: return %s" %
|
||||
(self.gui.title, retval))
|
||||
return retval
|
||||
except bsddb.db.DBCursorClosedError:
|
||||
# not sure why---caused by Data Entry Gramplet
|
||||
LOG.warn("bsddb.db.DBCursorClosedError in: %s" % self.gui.title)
|
||||
return False
|
||||
except StopIteration:
|
||||
self._idle_id = 0
|
||||
self._generator.close()
|
||||
|
@ -412,6 +412,11 @@ class BasePluginManager(object):
|
||||
"""
|
||||
return self.__pgr.sidebar_plugins()
|
||||
|
||||
def get_reg_databases(self):
|
||||
""" Return list of registered database backends
|
||||
"""
|
||||
return self.__pgr.database_plugins()
|
||||
|
||||
def get_external_opt_dict(self):
|
||||
""" Return the dictionary of external options. """
|
||||
return self.__external_opt_dict
|
||||
|
@ -70,8 +70,9 @@ VIEW = 8
|
||||
RELCALC = 9
|
||||
GRAMPLET = 10
|
||||
SIDEBAR = 11
|
||||
DATABASE = 12
|
||||
PTYPE = [REPORT , QUICKREPORT, TOOL, IMPORT, EXPORT, DOCGEN, GENERAL,
|
||||
MAPSERVICE, VIEW, RELCALC, GRAMPLET, SIDEBAR]
|
||||
MAPSERVICE, VIEW, RELCALC, GRAMPLET, SIDEBAR, DATABASE]
|
||||
PTYPE_STR = {
|
||||
REPORT: _('Report') ,
|
||||
QUICKREPORT: _('Quickreport'),
|
||||
@ -85,6 +86,7 @@ PTYPE_STR = {
|
||||
RELCALC: _('Relationships'),
|
||||
GRAMPLET: _('Gramplet'),
|
||||
SIDEBAR: _('Sidebar'),
|
||||
DATABASE: _('Database'),
|
||||
}
|
||||
|
||||
#possible report categories
|
||||
@ -206,7 +208,7 @@ class PluginData(object):
|
||||
The python path where the plugin implementation can be found
|
||||
.. attribute:: ptype
|
||||
The plugin type. One of REPORT , QUICKREPORT, TOOL, IMPORT,
|
||||
EXPORT, DOCGEN, GENERAL, MAPSERVICE, VIEW, GRAMPLET
|
||||
EXPORT, DOCGEN, GENERAL, MAPSERVICE, VIEW, GRAMPLET, DATABASE
|
||||
.. attribute:: authors
|
||||
List of authors of the plugin, default=[]
|
||||
.. attribute:: authors_email
|
||||
@ -349,6 +351,14 @@ class PluginData(object):
|
||||
the plugin is appended to the list of plugins. If START, then the
|
||||
plugin is prepended. Only set START if you want a plugin to be the
|
||||
first in the order of plugins
|
||||
|
||||
Attributes for DATABASE plugins
|
||||
|
||||
.. attribute:: databaseclass
|
||||
The class in the module that is the database class
|
||||
.. attribute:: reset_system
|
||||
Boolean to indicate that the system (sys.modules) should
|
||||
be reset.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
@ -421,6 +431,9 @@ class PluginData(object):
|
||||
self._menu_label = ''
|
||||
#VIEW and SIDEBAR attr
|
||||
self._order = END
|
||||
#DATABASE attr
|
||||
self._databaseclass = None
|
||||
self._reset_system = False
|
||||
#GENERAL attr
|
||||
self._data = []
|
||||
self._process = None
|
||||
@ -931,6 +944,26 @@ class PluginData(object):
|
||||
|
||||
order = property(_get_order, _set_order)
|
||||
|
||||
#DATABASE attributes
|
||||
def _set_databaseclass(self, databaseclass):
|
||||
if not self._ptype == DATABASE:
|
||||
raise ValueError('databaseclass may only be set for DATABASE plugins')
|
||||
self._databaseclass = databaseclass
|
||||
|
||||
def _get_databaseclass(self):
|
||||
return self._databaseclass
|
||||
|
||||
def _set_reset_system(self, reset_system):
|
||||
if not self._ptype == DATABASE:
|
||||
raise ValueError('reset_system may only be set for DATABASE plugins')
|
||||
self._reset_system = reset_system
|
||||
|
||||
def _get_reset_system(self):
|
||||
return self._reset_system
|
||||
|
||||
databaseclass = property(_get_databaseclass, _set_databaseclass)
|
||||
reset_system = property(_get_reset_system, _set_reset_system)
|
||||
|
||||
#GENERAL attr
|
||||
def _set_data(self, data):
|
||||
if not self._ptype in (GENERAL,):
|
||||
@ -1032,6 +1065,7 @@ def make_environment(**kwargs):
|
||||
'REPORT_MODE_CLI': REPORT_MODE_CLI,
|
||||
'TOOL_MODE_GUI': TOOL_MODE_GUI,
|
||||
'TOOL_MODE_CLI': TOOL_MODE_CLI,
|
||||
'DATABASE': DATABASE,
|
||||
'GRAMPSVERSION': GRAMPSVERSION,
|
||||
'START': START,
|
||||
'END': END,
|
||||
@ -1297,6 +1331,12 @@ class PluginRegister(object):
|
||||
"""
|
||||
return self.type_plugins(SIDEBAR)
|
||||
|
||||
def database_plugins(self):
|
||||
"""
|
||||
Return a list of :class:`PluginData` that are of type DATABASE
|
||||
"""
|
||||
return self.type_plugins(DATABASE)
|
||||
|
||||
def filter_load_on_reg(self):
|
||||
"""
|
||||
Return a list of :class:`PluginData` that have load_on_reg == True
|
||||
|
@ -303,7 +303,8 @@ class CallbackManager(object):
|
||||
Do a custom db connect signal outside of the primary object ones
|
||||
managed automatically.
|
||||
"""
|
||||
self.custom_signal_keys.append(self.database.connect(name, callback))
|
||||
if self.database:
|
||||
self.custom_signal_keys.append(self.database.connect(name, callback))
|
||||
|
||||
def __callbackcreator(self, signal, noarg=False):
|
||||
"""
|
||||
|
@ -136,14 +136,6 @@ if not sys.version_info >= MIN_PYTHON_VERSION :
|
||||
'v3': MIN_PYTHON_VERSION[2]})
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
import bsddb3
|
||||
except ImportError:
|
||||
logging.warning(_("\nYou don't have the python3 bsddb3 package installed."
|
||||
" This package is needed to start Gramps.\n\n"
|
||||
"Gramps will terminate now."))
|
||||
sys.exit(1)
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
# Gramps libraries
|
||||
|
@ -28,7 +28,6 @@
|
||||
import os
|
||||
import sys
|
||||
import io
|
||||
import bsddb3 as bsddb
|
||||
|
||||
##import logging
|
||||
##_LOG = logging.getLogger(".GrampsAboutDialog")
|
||||
@ -60,6 +59,20 @@ _ = glocale.translation.gettext
|
||||
from gramps.gen.constfunc import get_env_var
|
||||
from .display import display_url
|
||||
|
||||
def ellipses(text):
|
||||
"""
|
||||
Ellipsize text on length 40
|
||||
"""
|
||||
if len(text) > 40:
|
||||
return text[:40] + "..."
|
||||
return text
|
||||
|
||||
try:
|
||||
import bsddb3 as bsddb ## ok, in try/except
|
||||
BSDDB_STR = ellipses(str(bsddb.__version__) + " " + str(bsddb.db.version()))
|
||||
except:
|
||||
BSDDB_STR = 'not found'
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
# GrampsAboutDialog
|
||||
@ -125,19 +138,11 @@ class GrampsAboutDialog(Gtk.AboutDialog):
|
||||
"Distribution: %s")
|
||||
% (ellipses(str(VERSION)),
|
||||
ellipses(str(sys.version).replace('\n','')),
|
||||
ellipses(str(bsddb.__version__) + " " + str(bsddb.db.version())),
|
||||
BSDDB_STR,
|
||||
ellipses(get_env_var('LANG','')),
|
||||
ellipses(operatingsystem),
|
||||
ellipses(distribution)))
|
||||
|
||||
def ellipses(text):
|
||||
"""
|
||||
Ellipsize text on length 40
|
||||
"""
|
||||
if len(text) > 40:
|
||||
return text[:40] + "..."
|
||||
return text
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
# AuthorParser
|
||||
|
@ -52,10 +52,10 @@ from gi.repository import GObject
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
from gramps.gen.const import GRAMPS_LOCALE as glocale
|
||||
from gramps.gen.db.dbconst import DBBACKEND
|
||||
_ = glocale.translation.gettext
|
||||
from gramps.cli.grampscli import CLIDbLoader
|
||||
from gramps.gen.config import config
|
||||
from gramps.gen.db import DbBsddb
|
||||
from gramps.gen.db.exceptions import (DbUpgradeRequiredError,
|
||||
BsddbDowngradeError,
|
||||
DbVersionError,
|
||||
@ -305,7 +305,14 @@ class DbLoader(CLIDbLoader):
|
||||
else:
|
||||
mode = 'w'
|
||||
|
||||
db = DbBsddb()
|
||||
dbid_path = os.path.join(filename, DBBACKEND)
|
||||
if os.path.isfile(dbid_path):
|
||||
with open(dbid_path) as fp:
|
||||
dbid = fp.read().strip()
|
||||
else:
|
||||
dbid = "bsddb"
|
||||
|
||||
db = self.dbstate.make_database(dbid)
|
||||
db.disable_signals()
|
||||
self.dbstate.no_database()
|
||||
|
||||
|
@ -69,20 +69,18 @@ from gi.repository import Pango
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
from gramps.gen.const import GRAMPS_LOCALE as glocale
|
||||
from gramps.gen.plug import BasePluginManager
|
||||
_ = glocale.translation.gettext
|
||||
from gramps.gen.const import URL_WIKISTRING
|
||||
from .user import User
|
||||
from .dialog import ErrorDialog, QuestionDialog, QuestionDialog2
|
||||
from gramps.gen.db import DbBsddb
|
||||
from .dialog import ErrorDialog, QuestionDialog, QuestionDialog2, ICON
|
||||
from .pluginmanager import GuiPluginManager
|
||||
from gramps.cli.clidbman import CLIDbManager, NAME_FILE, time_val
|
||||
from .ddtargets import DdTargets
|
||||
from gramps.gen.recentfiles import rename_filename, remove_filename
|
||||
from .glade import Glade
|
||||
from gramps.gen.db.backup import restore
|
||||
from gramps.gen.db.exceptions import DbException
|
||||
|
||||
|
||||
_RETURN = Gdk.keyval_from_name("Return")
|
||||
_KP_ENTER = Gdk.keyval_from_name("KP_Enter")
|
||||
|
||||
@ -106,6 +104,25 @@ ICON_COL = 6
|
||||
|
||||
RCS_BUTTON = { True : _('_Extract'), False : _('_Archive') }
|
||||
|
||||
class DatabaseDialog(Gtk.MessageDialog):
|
||||
def __init__(self, parent, options):
|
||||
"""
|
||||
options = [(pdata, number), ...]
|
||||
"""
|
||||
Gtk.MessageDialog.__init__(self,
|
||||
parent,
|
||||
flags=Gtk.DialogFlags.MODAL,
|
||||
type=Gtk.MessageType.QUESTION,
|
||||
)
|
||||
self.set_icon(ICON)
|
||||
self.set_title('')
|
||||
self.set_markup('<span size="larger" weight="bold">%s</span>' %
|
||||
_('Database Backend for New Tree'))
|
||||
self.format_secondary_text(
|
||||
_("Please select a database backend type:"))
|
||||
for option, number in options:
|
||||
self.add_button(option.name, number)
|
||||
|
||||
class DbManager(CLIDbManager):
|
||||
"""
|
||||
Database Manager. Opens a database manager window that allows users to
|
||||
@ -531,8 +548,8 @@ class DbManager(CLIDbManager):
|
||||
new_path, newname = self._create_new_db("%s : %s" % (parent_name, name))
|
||||
|
||||
self.__start_cursor(_("Extracting archive..."))
|
||||
dbclass = DbBsddb
|
||||
dbase = dbclass()
|
||||
|
||||
dbase = self.dbstate.make_database("bsddb")
|
||||
dbase.load(new_path, None)
|
||||
|
||||
self.__start_cursor(_("Importing archive..."))
|
||||
@ -719,18 +736,17 @@ class DbManager(CLIDbManager):
|
||||
fname = os.path.join(dirname, filename)
|
||||
os.unlink(fname)
|
||||
|
||||
newdb = DbBsddb()
|
||||
newdb = self.dbstate.make_database("bsddb")
|
||||
newdb.write_version(dirname)
|
||||
|
||||
dbclass = DbBsddb
|
||||
dbase = dbclass()
|
||||
dbase = self.dbstate.make_database("bsddb")
|
||||
dbase.set_save_path(dirname)
|
||||
dbase.load(dirname, None)
|
||||
|
||||
self.__start_cursor(_("Rebuilding database from backup files"))
|
||||
|
||||
try:
|
||||
restore(dbase)
|
||||
dbase.restore()
|
||||
except DbException as msg:
|
||||
DbManager.ERROR(_("Error restoring backup data"), msg)
|
||||
|
||||
@ -764,19 +780,37 @@ class DbManager(CLIDbManager):
|
||||
message.
|
||||
"""
|
||||
self.new.set_sensitive(False)
|
||||
try:
|
||||
self._create_new_db()
|
||||
except (OSError, IOError) as msg:
|
||||
DbManager.ERROR(_("Could not create Family Tree"),
|
||||
str(msg))
|
||||
dbid = None
|
||||
pmgr = BasePluginManager.get_instance()
|
||||
pdata = pmgr.get_reg_databases()
|
||||
# If just one database backend, just use it:
|
||||
if len(pdata) == 0:
|
||||
DbManager.ERROR(_("No available database backends"),
|
||||
_("Please check your dependencies."))
|
||||
elif len(pdata) == 1:
|
||||
dbid = pdata[0].id
|
||||
elif len(pdata) > 1:
|
||||
options = sorted(list(zip(pdata, range(1, len(pdata) + 1))), key=lambda items: items[0].name)
|
||||
d = DatabaseDialog(self.top, options)
|
||||
number = d.run()
|
||||
d.destroy()
|
||||
if number >= 0:
|
||||
dbid = [option[0].id for option in options if option[1] == number][0]
|
||||
### Now, let's load it up
|
||||
if dbid:
|
||||
try:
|
||||
self._create_new_db(dbid=dbid)
|
||||
except (OSError, IOError) as msg:
|
||||
DbManager.ERROR(_("Could not create Family Tree"),
|
||||
str(msg))
|
||||
self.new.set_sensitive(True)
|
||||
|
||||
def _create_new_db(self, title=None, create_db=True):
|
||||
def _create_new_db(self, title=None, create_db=True, dbid=None):
|
||||
"""
|
||||
Create a new database, append to model
|
||||
"""
|
||||
new_path, title = self.create_new_db_cli(conv_to_unicode(title, 'utf8'),
|
||||
create_db)
|
||||
create_db, dbid)
|
||||
path_name = os.path.join(new_path, NAME_FILE)
|
||||
(tval, last) = time_val(new_path)
|
||||
node = self.model.append(None, [title, new_path, path_name,
|
||||
|
@ -26,7 +26,6 @@
|
||||
# python modules
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
from bsddb3 import db as bsddb_db
|
||||
import pickle
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
@ -1026,10 +1025,11 @@ class EditFamily(EditPrimary):
|
||||
)
|
||||
|
||||
def save(self, *obj):
|
||||
try:
|
||||
self.__do_save()
|
||||
except bsddb_db.DBRunRecoveryError as msg:
|
||||
RunDatabaseRepair(msg[1])
|
||||
## FIXME: how to catch a specific error?
|
||||
#try:
|
||||
self.__do_save()
|
||||
#except bsddb_db.DBRunRecoveryError as msg:
|
||||
# RunDatabaseRepair(msg[1])
|
||||
|
||||
def __do_save(self):
|
||||
self.ok_button.set_sensitive(False)
|
||||
|
@ -30,7 +30,12 @@ from gi.repository import GdkPixbuf
|
||||
from gi.repository import GObject
|
||||
import cairo
|
||||
import sys, os
|
||||
import bsddb3 as bsddb
|
||||
|
||||
try:
|
||||
import bsddb3 as bsddb # ok, in try/except
|
||||
BSDDB_STR = str(bsddb.__version__) + " " + str(bsddb.db.version())
|
||||
except:
|
||||
BSDDB_STR = 'not found'
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
@ -166,7 +171,7 @@ class ErrorReportAssistant(Gtk.Assistant):
|
||||
"gobject version: %s\n"\
|
||||
"cairo version : %s"\
|
||||
% (str(sys.version).replace('\n',''),
|
||||
str(bsddb.__version__) + " " + str(bsddb.db.version()),
|
||||
BSDDB_STR,
|
||||
str(VERSION),
|
||||
get_env_var('LANG',''),
|
||||
operatingsystem,
|
||||
|
@ -205,7 +205,12 @@ class GuiPluginManager(Callback):
|
||||
return [plg for plg in self.basemgr.get_reg_docgens()
|
||||
if plg.id not in self.__hidden_plugins]
|
||||
|
||||
def get_reg_databases(self):
|
||||
""" Return list of non hidden registered database backends
|
||||
"""
|
||||
return [plg for plg in self.basemgr.get_reg_databases()
|
||||
if plg.id not in self.__hidden_plugins]
|
||||
|
||||
def get_reg_general(self, category=None):
|
||||
return [plg for plg in self.basemgr.get_reg_general(category)
|
||||
if plg.id not in self.__hidden_plugins]
|
||||
|
||||
|
@ -87,7 +87,6 @@ from gramps.gen.utils.file import media_path_full
|
||||
from .dbloader import DbLoader
|
||||
from .display import display_help, display_url
|
||||
from .configure import GrampsPreferences
|
||||
from gramps.gen.db.backup import backup
|
||||
from gramps.gen.db.exceptions import DbException
|
||||
from .aboutdialog import GrampsAboutDialog
|
||||
from .navigator import Navigator
|
||||
@ -762,7 +761,7 @@ class ViewManager(CLIManager):
|
||||
self.uistate.progress.show()
|
||||
self.uistate.push_message(self.dbstate, _("Autobackup..."))
|
||||
try:
|
||||
backup(self.dbstate.db)
|
||||
self.dbstate.db.backup()
|
||||
except DbException as msg:
|
||||
ErrorDialog(_("Error saving backup data"), msg)
|
||||
self.uistate.set_busy_cursor(False)
|
||||
@ -1594,6 +1593,11 @@ def run_plugin(pdata, dbstate, uistate):
|
||||
mod = pmgr.load_plugin(pdata)
|
||||
if not mod:
|
||||
#import of plugin failed
|
||||
failed = pmgr.get_fail_list()
|
||||
if failed:
|
||||
error_msg = failed[-1][1][1]
|
||||
else:
|
||||
error_msg = "(no error message)"
|
||||
ErrorDialog(
|
||||
_('Failed Loading Plugin'),
|
||||
_('The plugin %(name)s did not load and reported an error.\n\n'
|
||||
@ -1608,7 +1612,7 @@ def run_plugin(pdata, dbstate, uistate):
|
||||
'gramps_bugtracker_url' : URL_BUGHOME,
|
||||
'firstauthoremail': pdata.authors_email[0] if
|
||||
pdata.authors_email else '...',
|
||||
'error_msg': pmgr.get_fail_list()[-1][1][1]})
|
||||
'error_msg': error_msg})
|
||||
return
|
||||
|
||||
if pdata.ptype == REPORT:
|
||||
|
31
gramps/plugins/database/bsddb.gpr.py
Normal file
31
gramps/plugins/database/bsddb.gpr.py
Normal file
@ -0,0 +1,31 @@
|
||||
#
|
||||
# Gramps - a GTK+/GNOME based genealogy program
|
||||
#
|
||||
# Copyright (C) 2015 Douglas Blank <doug.blank@gmail.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
plg = newplugin()
|
||||
plg.id = 'bsddb'
|
||||
plg.name = _("BSDDB Database Backend")
|
||||
plg.name_accell = _("_BSDDB Database Backend")
|
||||
plg.description = _("Berkeley Software Distribution Database Backend")
|
||||
plg.version = '1.0'
|
||||
plg.gramps_target_version = "5.0"
|
||||
plg.status = STABLE
|
||||
plg.fname = 'bsddb.py'
|
||||
plg.ptype = DATABASE
|
||||
plg.databaseclass = 'DbBsddb'
|
3
gramps/plugins/database/bsddb.py
Normal file
3
gramps/plugins/database/bsddb.py
Normal file
@ -0,0 +1,3 @@
|
||||
|
||||
from bsddb_support import DbBsddb
|
||||
|
95
gramps/plugins/database/bsddb_support/__init__.py
Normal file
95
gramps/plugins/database/bsddb_support/__init__.py
Normal file
@ -0,0 +1,95 @@
|
||||
#
|
||||
# Gramps - a GTK+/GNOME based genealogy program
|
||||
#
|
||||
# Copyright (C) 2000-2007 Donald N. Allingham
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
"""
|
||||
Gramps Database API.
|
||||
|
||||
Database Architecture
|
||||
=====================
|
||||
|
||||
Access to the database is made through Python classes. Exactly
|
||||
what functionality you have is dependent on the properties of the
|
||||
database. For example, if you are accessing a read-only view, then
|
||||
you will only have access to a subset of the methods available.
|
||||
|
||||
At the root of any database interface is either :py:class:`.DbReadBase` and/or
|
||||
:py:class:`.DbWriteBase`. These define the methods to read and write to a
|
||||
database, respectively.
|
||||
|
||||
The full database hierarchy is:
|
||||
|
||||
- :py:class:`.DbBsddb` - read and write implementation to BSDDB databases
|
||||
|
||||
* :py:class:`.DbWriteBase` - virtual and implementation-independent methods
|
||||
for reading data
|
||||
|
||||
* :py:class:`.DbBsddbRead` - read-only (accessors, getters) implementation
|
||||
to BSDDB databases
|
||||
|
||||
+ :py:class:`.DbReadBase` - virtual and implementation-independent
|
||||
methods for reading data
|
||||
|
||||
+ :py:class:`.Callback` - callback and signal functions
|
||||
|
||||
* :py:class:`.UpdateCallback` - callback functionality
|
||||
|
||||
- :py:class:`.DbDjango` - read and write implementation to Django-based
|
||||
databases
|
||||
|
||||
* :py:class:`.DbWriteBase` - virtual and implementation-independent methods
|
||||
for reading data
|
||||
|
||||
* :py:class:`.DbReadBase` - virtual and implementation-independent methods
|
||||
for reading data
|
||||
|
||||
DbBsddb
|
||||
=======
|
||||
|
||||
The :py:class:`.DbBsddb` interface defines a hierarchical database
|
||||
(non-relational) written in
|
||||
`PyBSDDB <http://www.jcea.es/programacion/pybsddb.htm>`_. There is no
|
||||
such thing as a database schema, and the meaning of the data is
|
||||
defined in the Python classes above. The data is stored as pickled
|
||||
tuples and unserialized into the primary data types (below).
|
||||
|
||||
DbDjango
|
||||
========
|
||||
|
||||
The DbDjango interface defines the Gramps data in terms of
|
||||
*models* and *relations* from the
|
||||
`Django project <http://www.djangoproject.com/>`_. The database
|
||||
backend can be any implementation that supports Django, including
|
||||
such popular SQL implementations as sqlite, MySQL, Postgresql, and
|
||||
Oracle. The data is retrieved from the SQL fields, serialized and
|
||||
then unserialized into the primary data types (below).
|
||||
|
||||
More details can be found in the manual's
|
||||
`Using database API <http://www.gramps-project.org/wiki/index.php?title=Using_database_API>`_.
|
||||
"""
|
||||
|
||||
from gramps.gen.db.base import *
|
||||
from gramps.gen.db.dbconst import *
|
||||
from .cursor import *
|
||||
from .read import *
|
||||
from .bsddbtxn import *
|
||||
from gramps.gen.db.txn import *
|
||||
from .undoredo import *
|
||||
from gramps.gen.db.exceptions import *
|
||||
from .write import *
|
74
gramps/plugins/database/bsddb_support/backup.py
Normal file
74
gramps/plugins/database/bsddb_support/backup.py
Normal file
@ -0,0 +1,74 @@
|
||||
#
|
||||
# Gramps - a GTK+/GNOME based genealogy program
|
||||
#
|
||||
# Copyright (C) 2007 Donald N. Allingham
|
||||
# Copyright (C) 2011 Tim G L Lyons
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# gen/db/backup.py
|
||||
|
||||
"""
|
||||
Description
|
||||
===========
|
||||
|
||||
This module Provides backup and restore functions for a database. The
|
||||
backup function saves the data into backup files, while the restore
|
||||
function loads the data back into a database.
|
||||
|
||||
You should only restore the data into an empty database.
|
||||
|
||||
Implementation
|
||||
==============
|
||||
|
||||
Not all of the database tables need to be backed up, since many are
|
||||
automatically generated from the others. The tables that are backed up
|
||||
are the primary tables and the metadata table.
|
||||
|
||||
The database consists of a table of "pickled" tuples. Each of the
|
||||
primary tables is "walked", and the pickled tuple is extracted, and
|
||||
written to the backup file.
|
||||
|
||||
Restoring the data is just as simple. The backup file is parsed an
|
||||
entry at a time, and inserted into the associated database table. The
|
||||
derived tables are built automatically as the items are entered into
|
||||
db.
|
||||
"""
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
# load standard python libraries
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
import os
|
||||
import pickle
|
||||
|
||||
#------------------------------------------------------------------------
|
||||
#
|
||||
# Gramps libs
|
||||
#
|
||||
#------------------------------------------------------------------------
|
||||
from gramps.gen.db.exceptions import DbException
|
||||
from .write import FAMILY_TBL, PLACES_TBL, SOURCES_TBL, MEDIA_TBL, \
|
||||
EVENTS_TBL, PERSON_TBL, REPO_TBL, NOTE_TBL, TAG_TBL, META, CITATIONS_TBL
|
||||
|
||||
#------------------------------------------------------------------------
|
||||
#
|
||||
# Set up logging
|
||||
#
|
||||
#------------------------------------------------------------------------
|
||||
import logging
|
||||
LOG = logging.getLogger(".Backup")
|
||||
|
@ -53,30 +53,32 @@ import logging
|
||||
# GRAMPS libraries
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
from ..lib.mediaobj import MediaObject
|
||||
from ..lib.person import Person
|
||||
from ..lib.family import Family
|
||||
from ..lib.src import Source
|
||||
from ..lib.citation import Citation
|
||||
from ..lib.event import Event
|
||||
from ..lib.place import Place
|
||||
from ..lib.repo import Repository
|
||||
from ..lib.note import Note
|
||||
from ..lib.tag import Tag
|
||||
from ..lib.genderstats import GenderStats
|
||||
from ..lib.researcher import Researcher
|
||||
from ..lib.nameorigintype import NameOriginType
|
||||
from gramps.gen.lib.mediaobj import MediaObject
|
||||
from gramps.gen.lib.person import Person
|
||||
from gramps.gen.lib.family import Family
|
||||
from gramps.gen.lib.src import Source
|
||||
from gramps.gen.lib.citation import Citation
|
||||
from gramps.gen.lib.event import Event
|
||||
from gramps.gen.lib.place import Place
|
||||
from gramps.gen.lib.repo import Repository
|
||||
from gramps.gen.lib.note import Note
|
||||
from gramps.gen.lib.tag import Tag
|
||||
from gramps.gen.lib.genderstats import GenderStats
|
||||
from gramps.gen.lib.researcher import Researcher
|
||||
from gramps.gen.lib.nameorigintype import NameOriginType
|
||||
|
||||
from .dbconst import *
|
||||
from ..utils.callback import Callback
|
||||
from ..utils.cast import conv_dbstr_to_unicode
|
||||
from . import (BsddbBaseCursor, DbReadBase)
|
||||
from ..utils.id import create_id
|
||||
from ..errors import DbError
|
||||
from ..constfunc import handle2internal, get_env_var
|
||||
from ..const import GRAMPS_LOCALE as glocale
|
||||
from gramps.gen.utils.callback import Callback
|
||||
from gramps.gen.utils.cast import conv_dbstr_to_unicode
|
||||
from . import BsddbBaseCursor
|
||||
from gramps.gen.db.base import DbReadBase
|
||||
from gramps.gen.utils.id import create_id
|
||||
from gramps.gen.errors import DbError
|
||||
from gramps.gen.constfunc import handle2internal, get_env_var
|
||||
from gramps.gen.const import GRAMPS_LOCALE as glocale
|
||||
_ = glocale.translation.gettext
|
||||
|
||||
from gramps.gen.db.dbconst import *
|
||||
|
||||
LOG = logging.getLogger(DBLOGNAME)
|
||||
LOG = logging.getLogger(".citation")
|
||||
#-------------------------------------------------------------------------
|
||||
@ -84,7 +86,6 @@ LOG = logging.getLogger(".citation")
|
||||
# constants
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
from .dbconst import *
|
||||
|
||||
_SIGBASE = ('person', 'family', 'source', 'citation',
|
||||
'event', 'media', 'place', 'repository',
|
||||
@ -1975,3 +1976,16 @@ class DbBsddbRead(DbReadBase, Callback):
|
||||
self.__log_error()
|
||||
name = None
|
||||
return name
|
||||
|
||||
def get_summary(self):
|
||||
"""
|
||||
Returns dictionary of summary item.
|
||||
Should include, if possible:
|
||||
|
||||
_("Number of people")
|
||||
_("Version")
|
||||
_("Schema version")
|
||||
"""
|
||||
return {
|
||||
_("Number of people"): self.get_number_of_people(),
|
||||
}
|
62
gramps/plugins/database/bsddb_support/summary.py
Normal file
62
gramps/plugins/database/bsddb_support/summary.py
Normal file
@ -0,0 +1,62 @@
|
||||
## Removed from clidbman.py
|
||||
## specific to bsddb
|
||||
|
||||
from bsddb3 import dbshelve, db
|
||||
import os
|
||||
|
||||
from gramps.gen.db import META, PERSON_TBL
|
||||
from gramps.gen.db.dbconst import BDBVERSFN
|
||||
|
||||
def get_dbdir_summary(dirpath, name):
|
||||
"""
|
||||
Returns (people_count, bsddb_version, schema_version) of
|
||||
current DB.
|
||||
Returns ("Unknown", "Unknown", "Unknown") if invalid DB or other error.
|
||||
"""
|
||||
|
||||
bdbversion_file = os.path.join(dirpath, BDBVERSFN)
|
||||
if os.path.isfile(bdbversion_file):
|
||||
vers_file = open(bdbversion_file)
|
||||
bsddb_version = vers_file.readline().strip()
|
||||
else:
|
||||
return "Unknown", "Unknown", "Unknown"
|
||||
|
||||
current_bsddb_version = str(db.version())
|
||||
if bsddb_version != current_bsddb_version:
|
||||
return "Unknown", bsddb_version, "Unknown"
|
||||
|
||||
env = db.DBEnv()
|
||||
flags = db.DB_CREATE | db.DB_PRIVATE |\
|
||||
db.DB_INIT_MPOOL |\
|
||||
db.DB_INIT_LOG | db.DB_INIT_TXN
|
||||
try:
|
||||
env.open(dirpath, flags)
|
||||
except Exception as msg:
|
||||
LOG.warning("Error opening db environment for '%s': %s" %
|
||||
(name, str(msg)))
|
||||
try:
|
||||
env.close()
|
||||
except Exception as msg:
|
||||
LOG.warning("Error closing db environment for '%s': %s" %
|
||||
(name, str(msg)))
|
||||
return "Unknown", bsddb_version, "Unknown"
|
||||
dbmap1 = dbshelve.DBShelf(env)
|
||||
fname = os.path.join(dirpath, META + ".db")
|
||||
try:
|
||||
dbmap1.open(fname, META, db.DB_HASH, db.DB_RDONLY)
|
||||
except:
|
||||
env.close()
|
||||
return "Unknown", bsddb_version, "Unknown"
|
||||
schema_version = dbmap1.get(b'version', default=None)
|
||||
dbmap1.close()
|
||||
dbmap2 = dbshelve.DBShelf(env)
|
||||
fname = os.path.join(dirpath, PERSON_TBL + ".db")
|
||||
try:
|
||||
dbmap2.open(fname, PERSON_TBL, db.DB_HASH, db.DB_RDONLY)
|
||||
except:
|
||||
env.close()
|
||||
return "Unknown", bsddb_version, schema_version
|
||||
count = len(dbmap2)
|
||||
dbmap2.close()
|
||||
env.close()
|
||||
return (count, bsddb_version, schema_version)
|
516
gramps/plugins/database/bsddb_support/undoredo.py
Normal file
516
gramps/plugins/database/bsddb_support/undoredo.py
Normal file
@ -0,0 +1,516 @@
|
||||
#
|
||||
# Gramps - a GTK+/GNOME based genealogy program
|
||||
#
|
||||
# Copyright (C) 2004-2006 Donald N. Allingham
|
||||
# Copyright (C) 2011 Tim G L Lyons
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
"""
|
||||
Exports the DbUndo class for managing Gramps transactions
|
||||
undos and redos.
|
||||
"""
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
# Standard python modules
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
import time, os
|
||||
import pickle
|
||||
from collections import deque
|
||||
|
||||
try:
|
||||
from bsddb3 import db
|
||||
except:
|
||||
# FIXME: make this more abstract to deal with other backends
|
||||
class db:
|
||||
DBRunRecoveryError = 0
|
||||
DBAccessError = 0
|
||||
DBPageNotFoundError = 0
|
||||
DBInvalidArgError = 0
|
||||
|
||||
from gramps.gen.const import GRAMPS_LOCALE as glocale
|
||||
_ = glocale.translation.gettext
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
# Gramps modules
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
from gramps.gen.constfunc import conv_to_unicode, handle2internal, win
|
||||
from gramps.gen.db.dbconst import *
|
||||
from . import BSDDBTxn
|
||||
from gramps.gen.errors import DbError
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
# Local Constants
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
DBERRS = (db.DBRunRecoveryError, db.DBAccessError,
|
||||
db.DBPageNotFoundError, db.DBInvalidArgError)
|
||||
|
||||
_SIGBASE = ('person', 'family', 'source', 'event', 'media',
|
||||
'place', 'repository', 'reference', 'note', 'tag', 'citation')
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
# DbUndo class
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
class DbUndo(object):
|
||||
"""
|
||||
Base class for the Gramps undo/redo manager. Needs to be subclassed
|
||||
for use with a real backend.
|
||||
"""
|
||||
|
||||
__slots__ = ('undodb', 'db', 'mapbase', 'undo_history_timestamp',
|
||||
'txn', 'undoq', 'redoq')
|
||||
|
||||
def __init__(self, grampsdb):
|
||||
"""
|
||||
Class constructor. Set up main instance variables
|
||||
"""
|
||||
self.db = grampsdb
|
||||
self.undoq = deque()
|
||||
self.redoq = deque()
|
||||
self.undo_history_timestamp = time.time()
|
||||
self.txn = None
|
||||
# N.B. the databases have to be in the same order as the numbers in
|
||||
# xxx_KEY in gen/db/dbconst.py
|
||||
self.mapbase = (
|
||||
self.db.person_map,
|
||||
self.db.family_map,
|
||||
self.db.source_map,
|
||||
self.db.event_map,
|
||||
self.db.media_map,
|
||||
self.db.place_map,
|
||||
self.db.repository_map,
|
||||
self.db.reference_map,
|
||||
self.db.note_map,
|
||||
self.db.tag_map,
|
||||
self.db.citation_map,
|
||||
)
|
||||
|
||||
def clear(self):
|
||||
"""
|
||||
Clear the undo/redo list (but not the backing storage)
|
||||
"""
|
||||
self.undoq.clear()
|
||||
self.redoq.clear()
|
||||
self.undo_history_timestamp = time.time()
|
||||
self.txn = None
|
||||
|
||||
def __enter__(self, value):
|
||||
"""
|
||||
Context manager method to establish the context
|
||||
"""
|
||||
self.open(value)
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
"""
|
||||
Context manager method to finish the context
|
||||
"""
|
||||
if exc_type is None:
|
||||
self.close()
|
||||
return exc_type is None
|
||||
|
||||
def open(self, value):
|
||||
"""
|
||||
Open the backing storage. Needs to be overridden in the derived
|
||||
class.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def close(self):
|
||||
"""
|
||||
Close the backing storage. Needs to be overridden in the derived
|
||||
class.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def append(self, value):
|
||||
"""
|
||||
Add a new entry on the end. Needs to be overridden in the derived
|
||||
class.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def __getitem__(self, index):
|
||||
"""
|
||||
Returns an entry by index number. Needs to be overridden in the
|
||||
derived class.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def __setitem__(self, index, value):
|
||||
"""
|
||||
Set an entry to a value. Needs to be overridden in the derived class.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def __len__(self):
|
||||
"""
|
||||
Returns the number of entries. Needs to be overridden in the derived
|
||||
class.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def commit(self, txn, msg):
|
||||
"""
|
||||
Commit the transaction to the undo/redo database. "txn" should be
|
||||
an instance of Gramps transaction class
|
||||
"""
|
||||
txn.set_description(msg)
|
||||
txn.timestamp = time.time()
|
||||
self.undoq.append(txn)
|
||||
|
||||
def undo(self, update_history=True):
|
||||
"""
|
||||
Undo a previously committed transaction
|
||||
"""
|
||||
if self.db.readonly or self.undo_count == 0:
|
||||
return False
|
||||
return self.__undo(update_history)
|
||||
|
||||
def redo(self, update_history=True):
|
||||
"""
|
||||
Redo a previously committed, then undone, transaction
|
||||
"""
|
||||
if self.db.readonly or self.redo_count == 0:
|
||||
return False
|
||||
return self.__redo(update_history)
|
||||
|
||||
def undoredo(func):
|
||||
"""
|
||||
Decorator function to wrap undo and redo operations within a bsddb
|
||||
transaction. It also catches bsddb errors and raises an exception
|
||||
as appropriate
|
||||
"""
|
||||
def try_(self, *args, **kwargs):
|
||||
try:
|
||||
with BSDDBTxn(self.db.env) as txn:
|
||||
self.txn = self.db.txn = txn.txn
|
||||
status = func(self, *args, **kwargs)
|
||||
if not status:
|
||||
txn.abort()
|
||||
self.db.txn = None
|
||||
return status
|
||||
|
||||
except DBERRS as msg:
|
||||
self.db._log_error()
|
||||
raise DbError(msg)
|
||||
|
||||
return try_
|
||||
|
||||
@undoredo
|
||||
def __undo(self, update_history=True):
|
||||
"""
|
||||
Access the last committed transaction, and revert the data to the
|
||||
state before the transaction was committed.
|
||||
"""
|
||||
txn = self.undoq.pop()
|
||||
self.redoq.append(txn)
|
||||
transaction = txn
|
||||
db = self.db
|
||||
subitems = transaction.get_recnos(reverse=True)
|
||||
|
||||
# Process all records in the transaction
|
||||
for record_id in subitems:
|
||||
(key, trans_type, handle, old_data, new_data) = \
|
||||
pickle.loads(self.undodb[record_id])
|
||||
|
||||
if key == REFERENCE_KEY:
|
||||
self.undo_reference(old_data, handle, self.mapbase[key])
|
||||
else:
|
||||
self.undo_data(old_data, handle, self.mapbase[key],
|
||||
db.emit, _SIGBASE[key])
|
||||
# Notify listeners
|
||||
if db.undo_callback:
|
||||
if self.undo_count > 0:
|
||||
db.undo_callback(_("_Undo %s")
|
||||
% self.undoq[-1].get_description())
|
||||
else:
|
||||
db.undo_callback(None)
|
||||
|
||||
if db.redo_callback:
|
||||
db.redo_callback(_("_Redo %s")
|
||||
% transaction.get_description())
|
||||
|
||||
if update_history and db.undo_history_callback:
|
||||
db.undo_history_callback()
|
||||
return True
|
||||
|
||||
@undoredo
|
||||
def __redo(self, db=None, update_history=True):
|
||||
"""
|
||||
Access the last undone transaction, and revert the data to the state
|
||||
before the transaction was undone.
|
||||
"""
|
||||
txn = self.redoq.pop()
|
||||
self.undoq.append(txn)
|
||||
transaction = txn
|
||||
db = self.db
|
||||
subitems = transaction.get_recnos()
|
||||
|
||||
# Process all records in the transaction
|
||||
for record_id in subitems:
|
||||
(key, trans_type, handle, old_data, new_data) = \
|
||||
pickle.loads(self.undodb[record_id])
|
||||
|
||||
if key == REFERENCE_KEY:
|
||||
self.undo_reference(new_data, handle, self.mapbase[key])
|
||||
else:
|
||||
self.undo_data(new_data, handle, self.mapbase[key],
|
||||
db.emit, _SIGBASE[key])
|
||||
# Notify listeners
|
||||
if db.undo_callback:
|
||||
db.undo_callback(_("_Undo %s")
|
||||
% transaction.get_description())
|
||||
|
||||
if db.redo_callback:
|
||||
if self.redo_count > 1:
|
||||
new_transaction = self.redoq[-2]
|
||||
db.redo_callback(_("_Redo %s")
|
||||
% new_transaction.get_description())
|
||||
else:
|
||||
db.redo_callback(None)
|
||||
|
||||
if update_history and db.undo_history_callback:
|
||||
db.undo_history_callback()
|
||||
return True
|
||||
|
||||
def undo_reference(self, data, handle, db_map):
|
||||
"""
|
||||
Helper method to undo a reference map entry
|
||||
"""
|
||||
try:
|
||||
if data is None:
|
||||
db_map.delete(handle, txn=self.txn)
|
||||
else:
|
||||
db_map.put(handle, data, txn=self.txn)
|
||||
|
||||
except DBERRS as msg:
|
||||
self.db._log_error()
|
||||
raise DbError(msg)
|
||||
|
||||
def undo_data(self, data, handle, db_map, emit, signal_root):
|
||||
"""
|
||||
Helper method to undo/redo the changes made
|
||||
"""
|
||||
try:
|
||||
if data is None:
|
||||
emit(signal_root + '-delete', ([handle2internal(handle)],))
|
||||
db_map.delete(handle, txn=self.txn)
|
||||
else:
|
||||
ex_data = db_map.get(handle, txn=self.txn)
|
||||
if ex_data:
|
||||
signal = signal_root + '-update'
|
||||
else:
|
||||
signal = signal_root + '-add'
|
||||
db_map.put(handle, data, txn=self.txn)
|
||||
emit(signal, ([handle2internal(handle)],))
|
||||
|
||||
except DBERRS as msg:
|
||||
self.db._log_error()
|
||||
raise DbError(msg)
|
||||
|
||||
undo_count = property(lambda self:len(self.undoq))
|
||||
redo_count = property(lambda self:len(self.redoq))
|
||||
|
||||
class DbUndoList(DbUndo):
|
||||
"""
|
||||
Implementation of the Gramps undo database using a Python list
|
||||
"""
|
||||
def __init__(self, grampsdb):
|
||||
"""
|
||||
Class constructor
|
||||
"""
|
||||
super(DbUndoList, self).__init__(grampsdb)
|
||||
self.undodb = []
|
||||
|
||||
def open(self):
|
||||
"""
|
||||
A list does not need to be opened
|
||||
"""
|
||||
pass
|
||||
|
||||
def close(self):
|
||||
"""
|
||||
Close the list by resetting it to empty
|
||||
"""
|
||||
self.undodb = []
|
||||
self.clear()
|
||||
|
||||
def append(self, value):
|
||||
"""
|
||||
Add an entry on the end of the list
|
||||
"""
|
||||
self.undodb.append(value)
|
||||
return len(self.undodb)-1
|
||||
|
||||
def __getitem__(self, index):
|
||||
"""
|
||||
Return an item at the specified index
|
||||
"""
|
||||
return self.undodb[index]
|
||||
|
||||
def __setitem__(self, index, value):
|
||||
"""
|
||||
Set an item at the speficied index to the given value
|
||||
"""
|
||||
self.undodb[index] = value
|
||||
|
||||
def __iter__(self):
|
||||
"""
|
||||
Iterator
|
||||
"""
|
||||
for item in self.undodb:
|
||||
yield item
|
||||
|
||||
def __len__(self):
|
||||
"""
|
||||
Return number of entries in the list
|
||||
"""
|
||||
return len(self.undodb)
|
||||
|
||||
class DbUndoBSDDB(DbUndo):
|
||||
"""
|
||||
Class constructor for Gramps undo/redo database using a bsddb recno
|
||||
database as the backing store.
|
||||
"""
|
||||
|
||||
def __init__(self, grampsdb, path):
|
||||
"""
|
||||
Class constructor
|
||||
"""
|
||||
super(DbUndoBSDDB, self).__init__(grampsdb)
|
||||
self.undodb = db.DB()
|
||||
self.path = path
|
||||
|
||||
def open(self):
|
||||
"""
|
||||
Open the undo/redo database
|
||||
"""
|
||||
path = self.path
|
||||
self.undodb.open(path, db.DB_RECNO, db.DB_CREATE)
|
||||
|
||||
def close(self):
|
||||
"""
|
||||
Close the undo/redo database
|
||||
"""
|
||||
self.undodb.close()
|
||||
self.undodb = None
|
||||
self.mapbase = None
|
||||
self.db = None
|
||||
|
||||
try:
|
||||
os.remove(self.path)
|
||||
except OSError:
|
||||
pass
|
||||
self.clear()
|
||||
|
||||
def append(self, value):
|
||||
"""
|
||||
Add an entry on the end of the database
|
||||
"""
|
||||
return self.undodb.append(value)
|
||||
|
||||
def __len__(self):
|
||||
"""
|
||||
Returns the number of entries in the database
|
||||
"""
|
||||
x = self.undodb.stat()['nkeys']
|
||||
y = len(self.undodb)
|
||||
assert x == y
|
||||
return x
|
||||
|
||||
def __getitem__(self, index):
|
||||
"""
|
||||
Returns the entry stored at the specified index
|
||||
"""
|
||||
return self.undodb.get(index)
|
||||
|
||||
def __setitem__(self, index, value):
|
||||
"""
|
||||
Sets the entry stored at the specified index to the value given.
|
||||
"""
|
||||
self.undodb.put(index, value)
|
||||
|
||||
def __iter__(self):
|
||||
"""
|
||||
Iterator
|
||||
"""
|
||||
cursor = self.undodb.cursor()
|
||||
data = cursor.first()
|
||||
while data:
|
||||
yield data
|
||||
data = next(cursor)
|
||||
|
||||
def testundo():
|
||||
class T:
|
||||
def __init__(self):
|
||||
self.msg = ''
|
||||
self.timetstamp = 0
|
||||
def set_description(self, msg):
|
||||
self.msg = msg
|
||||
|
||||
class D:
|
||||
def __init__(self):
|
||||
self.person_map = {}
|
||||
self.family_map = {}
|
||||
self.source_map = {}
|
||||
self.event_map = {}
|
||||
self.media_map = {}
|
||||
self.place_map = {}
|
||||
self.note_map = {}
|
||||
self.tag_map = {}
|
||||
self.repository_map = {}
|
||||
self.reference_map = {}
|
||||
|
||||
print("list tests")
|
||||
undo = DbUndoList(D())
|
||||
print(undo.append('foo'))
|
||||
print(undo.append('bar'))
|
||||
print(undo[0])
|
||||
undo[0] = 'foobar'
|
||||
print(undo[0])
|
||||
print("len", len(undo))
|
||||
print("iter")
|
||||
for data in undo:
|
||||
print(data)
|
||||
print()
|
||||
print("bsddb tests")
|
||||
undo = DbUndoBSDDB(D(), '/tmp/testundo')
|
||||
undo.open()
|
||||
print(undo.append('foo'))
|
||||
print(undo.append('fo2'))
|
||||
print(undo.append('fo3'))
|
||||
print(undo[1])
|
||||
undo[1] = 'bar'
|
||||
print(undo[1])
|
||||
for data in undo:
|
||||
print(data)
|
||||
print("len", len(undo))
|
||||
|
||||
print("test commit")
|
||||
undo.commit(T(), msg="test commit")
|
||||
undo.close()
|
||||
|
||||
if __name__ == '__main__':
|
||||
testundo()
|
@ -39,23 +39,24 @@ from bsddb3 import db
|
||||
# Gramps modules
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
from ..const import GRAMPS_LOCALE as glocale
|
||||
from gramps.gen.const import GRAMPS_LOCALE as glocale
|
||||
_ = glocale.translation.gettext
|
||||
from ..constfunc import handle2internal
|
||||
from ..lib.markertype import MarkerType
|
||||
from ..lib.nameorigintype import NameOriginType
|
||||
from ..lib.place import Place
|
||||
from ..lib.placeref import PlaceRef
|
||||
from ..lib.placetype import PlaceType
|
||||
from ..lib.placename import PlaceName
|
||||
from ..lib.eventtype import EventType
|
||||
from ..lib.tag import Tag
|
||||
from ..utils.file import create_checksum
|
||||
from ..utils.id import create_id
|
||||
from gramps.gen.constfunc import handle2internal
|
||||
from gramps.gen.lib.markertype import MarkerType
|
||||
from gramps.gen.lib.nameorigintype import NameOriginType
|
||||
from gramps.gen.lib.place import Place
|
||||
from gramps.gen.lib.placeref import PlaceRef
|
||||
from gramps.gen.lib.placetype import PlaceType
|
||||
from gramps.gen.lib.placename import PlaceName
|
||||
from gramps.gen.lib.eventtype import EventType
|
||||
from gramps.gen.lib.tag import Tag
|
||||
from gramps.gen.utils.file import create_checksum
|
||||
from gramps.gen.utils.id import create_id
|
||||
from . import BSDDBTxn
|
||||
from .write import _mkname, SURNAMES
|
||||
from .dbconst import (PERSON_KEY, FAMILY_KEY, EVENT_KEY,
|
||||
MEDIA_KEY, PLACE_KEY, REPOSITORY_KEY, SOURCE_KEY)
|
||||
from gramps.gen.db.dbconst import (PERSON_KEY, FAMILY_KEY, EVENT_KEY,
|
||||
MEDIA_KEY, PLACE_KEY, REPOSITORY_KEY,
|
||||
SOURCE_KEY)
|
||||
from gramps.gui.dialog import (InfoDialog)
|
||||
|
||||
LOG = logging.getLogger(".upgrade")
|
||||
@ -359,7 +360,7 @@ def upgrade_datamap_17(datamap):
|
||||
"""
|
||||
new_srcattr_list = []
|
||||
private = False
|
||||
from ..lib.srcattrtype import SrcAttributeType
|
||||
from gramps.gen.lib.srcattrtype import SrcAttributeType
|
||||
for (key, value) in datamap.items():
|
||||
the_type = SrcAttributeType(key).serialize()
|
||||
new_srcattr_list.append((private, the_type, value))
|
@ -40,49 +40,46 @@ from functools import wraps
|
||||
import logging
|
||||
from sys import maxsize, getfilesystemencoding, version_info
|
||||
|
||||
try:
|
||||
from bsddb3 import dbshelve, db
|
||||
except:
|
||||
# FIXME: make this more abstract to deal with other backends
|
||||
class db:
|
||||
DB_HASH = 0
|
||||
DBRunRecoveryError = 0
|
||||
DBAccessError = 0
|
||||
DBPageNotFoundError = 0
|
||||
DBInvalidArgError = 0
|
||||
from bsddb3 import dbshelve, db
|
||||
from bsddb3.db import DB_CREATE, DB_AUTO_COMMIT, DB_DUP, DB_DUPSORT, DB_RDONLY
|
||||
|
||||
DBFLAGS_O = DB_CREATE | DB_AUTO_COMMIT # Default flags for database open
|
||||
DBFLAGS_R = DB_RDONLY # Flags to open a database read-only
|
||||
DBFLAGS_D = DB_DUP | DB_DUPSORT # Default flags for duplicate keys
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
# Gramps modules
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
from ..lib.person import Person
|
||||
from ..lib.family import Family
|
||||
from ..lib.src import Source
|
||||
from ..lib.citation import Citation
|
||||
from ..lib.event import Event
|
||||
from ..lib.place import Place
|
||||
from ..lib.repo import Repository
|
||||
from ..lib.mediaobj import MediaObject
|
||||
from ..lib.note import Note
|
||||
from ..lib.tag import Tag
|
||||
from ..lib.genderstats import GenderStats
|
||||
from ..lib.researcher import Researcher
|
||||
from gramps.gen.lib.person import Person
|
||||
from gramps.gen.lib.family import Family
|
||||
from gramps.gen.lib.src import Source
|
||||
from gramps.gen.lib.citation import Citation
|
||||
from gramps.gen.lib.event import Event
|
||||
from gramps.gen.lib.place import Place
|
||||
from gramps.gen.lib.repo import Repository
|
||||
from gramps.gen.lib.mediaobj import MediaObject
|
||||
from gramps.gen.lib.note import Note
|
||||
from gramps.gen.lib.tag import Tag
|
||||
from gramps.gen.lib.genderstats import GenderStats
|
||||
from gramps.gen.lib.researcher import Researcher
|
||||
|
||||
from . import (DbBsddbRead, DbWriteBase, BSDDBTxn,
|
||||
DbTxn, BsddbBaseCursor, BsddbDowngradeError, DbVersionError,
|
||||
DbEnvironmentError, DbUpgradeRequiredError, find_surname,
|
||||
find_byte_surname, find_surname_name, DbUndoBSDDB as DbUndo,
|
||||
exceptions)
|
||||
from .dbconst import *
|
||||
from ..utils.callback import Callback
|
||||
from ..utils.cast import conv_dbstr_to_unicode
|
||||
from ..utils.id import create_id
|
||||
from ..updatecallback import UpdateCallback
|
||||
from ..errors import DbError
|
||||
from ..constfunc import (win, conv_to_unicode, handle2internal,
|
||||
find_byte_surname, find_surname_name, DbUndoBSDDB as DbUndo)
|
||||
|
||||
from gramps.gen.db import exceptions
|
||||
from gramps.gen.db.dbconst import *
|
||||
from gramps.gen.utils.callback import Callback
|
||||
from gramps.gen.utils.cast import conv_dbstr_to_unicode
|
||||
from gramps.gen.utils.id import create_id
|
||||
from gramps.gen.updatecallback import UpdateCallback
|
||||
from gramps.gen.errors import DbError
|
||||
from gramps.gen.constfunc import (win, conv_to_unicode, handle2internal,
|
||||
get_env_var)
|
||||
from ..const import HOME_DIR, GRAMPS_LOCALE as glocale
|
||||
from gramps.gen.const import HOME_DIR, GRAMPS_LOCALE as glocale
|
||||
_ = glocale.translation.gettext
|
||||
|
||||
_LOG = logging.getLogger(DBLOGNAME)
|
||||
@ -133,39 +130,6 @@ DBERRS = (db.DBRunRecoveryError, db.DBAccessError,
|
||||
# these maps or modifying the values of the keys will break
|
||||
# existing databases.
|
||||
|
||||
CLASS_TO_KEY_MAP = {Person.__name__: PERSON_KEY,
|
||||
Family.__name__: FAMILY_KEY,
|
||||
Source.__name__: SOURCE_KEY,
|
||||
Citation.__name__: CITATION_KEY,
|
||||
Event.__name__: EVENT_KEY,
|
||||
MediaObject.__name__: MEDIA_KEY,
|
||||
Place.__name__: PLACE_KEY,
|
||||
Repository.__name__:REPOSITORY_KEY,
|
||||
Note.__name__: NOTE_KEY,
|
||||
Tag.__name__: TAG_KEY}
|
||||
|
||||
KEY_TO_CLASS_MAP = {PERSON_KEY: Person.__name__,
|
||||
FAMILY_KEY: Family.__name__,
|
||||
SOURCE_KEY: Source.__name__,
|
||||
CITATION_KEY: Citation.__name__,
|
||||
EVENT_KEY: Event.__name__,
|
||||
MEDIA_KEY: MediaObject.__name__,
|
||||
PLACE_KEY: Place.__name__,
|
||||
REPOSITORY_KEY: Repository.__name__,
|
||||
NOTE_KEY: Note.__name__,
|
||||
TAG_KEY: Tag.__name__}
|
||||
|
||||
KEY_TO_NAME_MAP = {PERSON_KEY: 'person',
|
||||
FAMILY_KEY: 'family',
|
||||
EVENT_KEY: 'event',
|
||||
SOURCE_KEY: 'source',
|
||||
CITATION_KEY: 'citation',
|
||||
PLACE_KEY: 'place',
|
||||
MEDIA_KEY: 'media',
|
||||
REPOSITORY_KEY: 'repository',
|
||||
#REFERENCE_KEY: 'reference',
|
||||
NOTE_KEY: 'note',
|
||||
TAG_KEY: 'tag'}
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
# Helper functions
|
||||
@ -689,7 +653,7 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
return False
|
||||
|
||||
@catch_db_error
|
||||
def load(self, name, callback, mode=DBMODE_W, force_schema_upgrade=False,
|
||||
def load(self, name, callback=None, mode=DBMODE_W, force_schema_upgrade=False,
|
||||
force_bsddb_upgrade=False, force_bsddb_downgrade=False,
|
||||
force_python_upgrade=False):
|
||||
|
||||
@ -2433,6 +2397,11 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
version = str(_DBVERSION)
|
||||
version_file.write(version)
|
||||
|
||||
versionpath = os.path.join(name, str(DBBACKEND))
|
||||
_LOG.debug("Write database backend file to 'bsddb'")
|
||||
with open(versionpath, "w") as version_file:
|
||||
version_file.write("bsddb")
|
||||
|
||||
self.metadata.close()
|
||||
self.env.close()
|
||||
|
||||
@ -2449,6 +2418,180 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
"""
|
||||
return DbTxn
|
||||
|
||||
def backup(self):
|
||||
"""
|
||||
Exports the database to a set of backup files. These files consist
|
||||
of the pickled database tables, one file for each table.
|
||||
|
||||
The heavy lifting is done by the private :py:func:`__do__export` function.
|
||||
The purpose of this function is to catch any exceptions that occur.
|
||||
|
||||
:param database: database instance to backup
|
||||
:type database: DbDir
|
||||
"""
|
||||
try:
|
||||
do_export(self)
|
||||
except (OSError, IOError) as msg:
|
||||
raise DbException(str(msg))
|
||||
|
||||
def restore(self):
|
||||
"""
|
||||
Restores the database to a set of backup files. These files consist
|
||||
of the pickled database tables, one file for each table.
|
||||
|
||||
The heavy lifting is done by the private :py:func:`__do__restore` function.
|
||||
The purpose of this function is to catch any exceptions that occur.
|
||||
|
||||
:param database: database instance to restore
|
||||
:type database: DbDir
|
||||
"""
|
||||
try:
|
||||
do_restore(self)
|
||||
except (OSError, IOError) as msg:
|
||||
raise DbException(str(msg))
|
||||
|
||||
def get_summary(self):
|
||||
"""
|
||||
Returns dictionary of summary item.
|
||||
Should include, if possible:
|
||||
|
||||
_("Number of people")
|
||||
_("Version")
|
||||
_("Schema version")
|
||||
"""
|
||||
schema_version = self.metadata.get(b'version', default=None)
|
||||
bdbversion_file = os.path.join(self.path, BDBVERSFN)
|
||||
if os.path.isfile(bdbversion_file):
|
||||
vers_file = open(bdbversion_file)
|
||||
bsddb_version = vers_file.readline().strip()
|
||||
else:
|
||||
bsddb_version = _("Unknown")
|
||||
return {
|
||||
_("Number of people"): self.get_number_of_people(),
|
||||
_("Schema version"): schema_version,
|
||||
_("Version"): bsddb_version,
|
||||
}
|
||||
|
||||
def prepare_import(self):
|
||||
"""
|
||||
Initialization before imports
|
||||
"""
|
||||
pass
|
||||
|
||||
def commit_import(self):
|
||||
"""
|
||||
Post process after imports
|
||||
"""
|
||||
pass
|
||||
|
||||
def mk_backup_name(database, base):
|
||||
"""
|
||||
Return the backup name of the database table
|
||||
|
||||
:param database: database instance
|
||||
:type database: DbDir
|
||||
:param base: base name of the table
|
||||
:type base: str
|
||||
"""
|
||||
return os.path.join(database.get_save_path(), base + ".gbkp")
|
||||
|
||||
def mk_tmp_name(database, base):
|
||||
"""
|
||||
Return the temporary backup name of the database table
|
||||
|
||||
:param database: database instance
|
||||
:type database: DbDir
|
||||
:param base: base name of the table
|
||||
:type base: str
|
||||
"""
|
||||
return os.path.join(database.get_save_path(), base + ".gbkp.new")
|
||||
|
||||
def do_export(database):
|
||||
"""
|
||||
Loop through each table of the database, saving the pickled data
|
||||
a file.
|
||||
|
||||
:param database: database instance to backup
|
||||
:type database: DbDir
|
||||
"""
|
||||
try:
|
||||
for (base, tbl) in build_tbl_map(database):
|
||||
backup_name = mk_tmp_name(database, base)
|
||||
backup_table = open(backup_name, 'wb')
|
||||
|
||||
cursor = tbl.cursor()
|
||||
data = cursor.first()
|
||||
while data:
|
||||
pickle.dump(data, backup_table, 2)
|
||||
data = cursor.next()
|
||||
cursor.close()
|
||||
backup_table.close()
|
||||
except (IOError,OSError):
|
||||
return
|
||||
|
||||
for (base, tbl) in build_tbl_map(database):
|
||||
new_name = mk_backup_name(database, base)
|
||||
old_name = mk_tmp_name(database, base)
|
||||
if os.path.isfile(new_name):
|
||||
os.unlink(new_name)
|
||||
os.rename(old_name, new_name)
|
||||
|
||||
def do_restore(database):
|
||||
"""
|
||||
Loop through each table of the database, restoring the pickled data
|
||||
to the appropriate database file.
|
||||
|
||||
:param database: database instance to backup
|
||||
:type database: DbDir
|
||||
"""
|
||||
for (base, tbl) in build_tbl_map(database):
|
||||
backup_name = mk_backup_name(database, base)
|
||||
backup_table = open(backup_name, 'rb')
|
||||
load_tbl_txn(database, backup_table, tbl)
|
||||
|
||||
database.rebuild_secondary()
|
||||
|
||||
def load_tbl_txn(database, backup_table, tbl):
|
||||
"""
|
||||
Return the temporary backup name of the database table
|
||||
|
||||
:param database: database instance
|
||||
:type database: DbDir
|
||||
:param backup_table: file containing the backup data
|
||||
:type backup_table: file
|
||||
:param tbl: Berkeley db database table
|
||||
:type tbl: Berkeley db database table
|
||||
"""
|
||||
try:
|
||||
while True:
|
||||
data = pickle.load(backup_table)
|
||||
txn = database.env.txn_begin()
|
||||
tbl.put(data[0], data[1], txn=txn)
|
||||
txn.commit()
|
||||
except EOFError:
|
||||
backup_table.close()
|
||||
|
||||
def build_tbl_map(database):
|
||||
"""
|
||||
Builds a table map of names to database tables.
|
||||
|
||||
:param database: database instance to backup
|
||||
:type database: DbDir
|
||||
"""
|
||||
return [
|
||||
( PERSON_TBL, database.person_map.db),
|
||||
( FAMILY_TBL, database.family_map.db),
|
||||
( PLACES_TBL, database.place_map.db),
|
||||
( SOURCES_TBL, database.source_map.db),
|
||||
( CITATIONS_TBL, database.citation_map.db),
|
||||
( REPO_TBL, database.repository_map.db),
|
||||
( NOTE_TBL, database.note_map.db),
|
||||
( MEDIA_TBL, database.media_map.db),
|
||||
( EVENTS_TBL, database.event_map.db),
|
||||
( TAG_TBL, database.tag_map.db),
|
||||
( META, database.metadata.db),
|
||||
]
|
||||
|
||||
def _mkname(path, name):
|
||||
return os.path.join(path, name + DBEXT)
|
||||
|
31
gramps/plugins/database/dictionarydb.gpr.py
Normal file
31
gramps/plugins/database/dictionarydb.gpr.py
Normal file
@ -0,0 +1,31 @@
|
||||
#
|
||||
# Gramps - a GTK+/GNOME based genealogy program
|
||||
#
|
||||
# Copyright (C) 2015 Douglas Blank <doug.blank@gmail.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
plg = newplugin()
|
||||
plg.id = 'dictionarydb'
|
||||
plg.name = _("Dictionary Database Backend")
|
||||
plg.name_accell = _("Di_ctionary Database Backend")
|
||||
plg.description = _("Dictionary (in-memory) Database Backend")
|
||||
plg.version = '1.0'
|
||||
plg.gramps_target_version = "5.0"
|
||||
plg.status = STABLE
|
||||
plg.fname = 'dictionarydb.py'
|
||||
plg.ptype = DATABASE
|
||||
plg.databaseclass = 'DictionaryDb'
|
File diff suppressed because it is too large
Load Diff
@ -31,7 +31,6 @@ Show uncollected objects in a window.
|
||||
#------------------------------------------------------------------------
|
||||
from gramps.gen.const import GRAMPS_LOCALE as glocale
|
||||
_ = glocale.translation.gettext
|
||||
from bsddb3.db import DBError
|
||||
|
||||
#------------------------------------------------------------------------
|
||||
#
|
||||
@ -155,6 +154,13 @@ class Leak(Gramplet):
|
||||
parent=self.uistate.window)
|
||||
|
||||
def display(self):
|
||||
try:
|
||||
from bsddb3.db import DBError
|
||||
except:
|
||||
class DBError(Exception):
|
||||
"""
|
||||
Dummy.
|
||||
"""
|
||||
gc.collect(2)
|
||||
self.model.clear()
|
||||
count = 0
|
||||
|
@ -103,8 +103,10 @@ def importData(dbase, filename, user):
|
||||
parser = CSVParser(dbase, user, (config.get('preferences.tag-on-import-format') if
|
||||
config.get('preferences.tag-on-import') else None))
|
||||
try:
|
||||
dbase.prepare_import()
|
||||
with open(filename, 'r') as filehandle:
|
||||
parser.parse(filehandle)
|
||||
dbase.commit_import()
|
||||
except EnvironmentError as err:
|
||||
user.notify_error(_("%s could not be opened\n") % filename, str(err))
|
||||
return
|
||||
|
@ -131,7 +131,9 @@ def importData(database, filename, user):
|
||||
try:
|
||||
read_only = database.readonly
|
||||
database.readonly = False
|
||||
database.prepare_import()
|
||||
gedparse.parse_gedcom_file(False)
|
||||
database.commit_import()
|
||||
database.readonly = read_only
|
||||
ifile.close()
|
||||
except IOError as msg:
|
||||
|
@ -154,7 +154,9 @@ def importData(database, filename, user):
|
||||
return
|
||||
|
||||
try:
|
||||
database.prepare_import()
|
||||
status = g.parse_geneweb_file()
|
||||
database.commit_import()
|
||||
except IOError as msg:
|
||||
errmsg = _("%s could not be opened\n") % filename
|
||||
user.notify_error(errmsg,str(msg))
|
||||
|
@ -93,7 +93,9 @@ def impData(database, name, user):
|
||||
imp_db_name = os.path.join(tmpdir_path, XMLFILE)
|
||||
|
||||
importer = importData
|
||||
database.prepare_import()
|
||||
info = importer(database, imp_db_name, user)
|
||||
database.commit_import()
|
||||
|
||||
newmediapath = database.get_mediapath()
|
||||
#import of gpkg should not change media path as all media has new paths!
|
||||
|
@ -75,7 +75,9 @@ def _importData(database, filename, user):
|
||||
return
|
||||
|
||||
try:
|
||||
database.prepare_import()
|
||||
status = g.parse_progen_file()
|
||||
database.commit_import()
|
||||
except ProgenError as msg:
|
||||
user.notify_error(_("Pro-Gen data error"), str(msg))
|
||||
return
|
||||
|
@ -63,8 +63,10 @@ def importData(database, filename, user):
|
||||
"""Function called by Gramps to import data on persons in VCard format."""
|
||||
parser = VCardParser(database)
|
||||
try:
|
||||
database.prepare_import()
|
||||
with OpenFileOrStdin(filename) as filehandle:
|
||||
parser.parse(filehandle)
|
||||
database.commit_import()
|
||||
except EnvironmentError as msg:
|
||||
user.notify_error(_("%s could not be opened\n") % filename, str(msg))
|
||||
return
|
||||
|
@ -57,7 +57,7 @@ from gramps.gen.lib import (Address, Attribute, AttributeType, ChildRef,
|
||||
SrcAttribute, SrcAttributeType, StyledText,
|
||||
StyledTextTag, StyledTextTagType, Surname, Tag, Url)
|
||||
from gramps.gen.db import DbTxn
|
||||
from gramps.gen.db.write import CLASS_TO_KEY_MAP
|
||||
#from gramps.gen.db.write import CLASS_TO_KEY_MAP
|
||||
from gramps.gen.errors import GrampsImportError
|
||||
from gramps.gen.utils.id import create_id
|
||||
from gramps.gen.utils.db import family_name
|
||||
@ -68,7 +68,7 @@ from gramps.gen.display.name import displayer as name_displayer
|
||||
from gramps.gen.db.dbconst import (PERSON_KEY, FAMILY_KEY, SOURCE_KEY,
|
||||
EVENT_KEY, MEDIA_KEY, PLACE_KEY,
|
||||
REPOSITORY_KEY, NOTE_KEY, TAG_KEY,
|
||||
CITATION_KEY)
|
||||
CITATION_KEY, CLASS_TO_KEY_MAP)
|
||||
from gramps.gen.updatecallback import UpdateCallback
|
||||
from gramps.version import VERSION
|
||||
from gramps.gen.config import config
|
||||
@ -122,6 +122,7 @@ def importData(database, filename, user):
|
||||
line_cnt = 0
|
||||
person_cnt = 0
|
||||
|
||||
database.prepare_import()
|
||||
with ImportOpenFileContextManager(filename, user) as xml_file:
|
||||
if xml_file is None:
|
||||
return
|
||||
@ -162,6 +163,7 @@ def importData(database, filename, user):
|
||||
"valid Gramps database."))
|
||||
return
|
||||
|
||||
database.commit_import()
|
||||
database.readonly = read_only
|
||||
return info
|
||||
|
||||
|
@ -85,9 +85,9 @@ def import_file(db, filename, user):
|
||||
print("ERROR:", name, exception)
|
||||
return False
|
||||
import_function = getattr(mod, pdata.import_function)
|
||||
db.prepare_import()
|
||||
#db.prepare_import()
|
||||
retval = import_function(db, filename, user)
|
||||
db.commit_import()
|
||||
#db.commit_import()
|
||||
return retval
|
||||
return False
|
||||
|
||||
|
@ -143,8 +143,12 @@ def get_person_from_handle(db, handle):
|
||||
return None
|
||||
|
||||
def probably_alive(handle):
|
||||
## FIXME: need to call after save?
|
||||
person = db.get_person_from_handle(handle)
|
||||
return alive(person, db)
|
||||
if person:
|
||||
return alive(person, db)
|
||||
else:
|
||||
return True
|
||||
|
||||
def format_number(number, with_grouping=True):
|
||||
if number != "":
|
||||
|
Loading…
x
Reference in New Issue
Block a user