Update with latest trunk changes
svn: r20754
This commit is contained in:
parent
3eb9594a98
commit
c2092a71bd
@ -55,16 +55,21 @@ db.
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
import os
|
||||
import cPickle as pickle
|
||||
import sys
|
||||
if sys.version_info[0] < 3:
|
||||
import cPickle as pickle
|
||||
else:
|
||||
import pickle
|
||||
|
||||
#------------------------------------------------------------------------
|
||||
#
|
||||
# Gramps libs
|
||||
#
|
||||
#------------------------------------------------------------------------
|
||||
from exceptions import DbException
|
||||
from write import FAMILY_TBL, PLACES_TBL, LOCATION_TBL, SOURCES_TBL, MEDIA_TBL,\
|
||||
EVENTS_TBL, PERSON_TBL, REPO_TBL, NOTE_TBL, TAG_TBL, META, CITATIONS_TBL
|
||||
from .exceptions import DbException
|
||||
from .write import (FAMILY_TBL, PLACES_TBL, LOCATION_TBL, SOURCES_TBL,
|
||||
MEDIA_TBL, EVENTS_TBL, PERSON_TBL, REPO_TBL, NOTE_TBL,
|
||||
TAG_TBL, META, CITATIONS_TBL)
|
||||
|
||||
#------------------------------------------------------------------------
|
||||
#
|
||||
@ -87,7 +92,7 @@ def backup(database):
|
||||
"""
|
||||
try:
|
||||
__do_export(database)
|
||||
except (OSError, IOError), msg:
|
||||
except (OSError, IOError) as msg:
|
||||
raise DbException(str(msg))
|
||||
|
||||
def __mk_backup_name(database, base):
|
||||
@ -155,7 +160,7 @@ def restore(database):
|
||||
"""
|
||||
try:
|
||||
__do_restore(database)
|
||||
except (OSError, IOError), msg:
|
||||
except (OSError, IOError) as msg:
|
||||
raise DbException(str(msg))
|
||||
|
||||
def __do_restore(database):
|
||||
|
@ -41,8 +41,8 @@ from ..ggettext import gettext as _
|
||||
#-------------------------------------------------------------------------
|
||||
from ..lib.childreftype import ChildRefType
|
||||
from ..lib.childref import ChildRef
|
||||
from txn import DbTxn
|
||||
from exceptions import DbTransactionCancel
|
||||
from .txn import DbTxn
|
||||
from .exceptions import DbTransactionCancel
|
||||
|
||||
class DbReadBase(object):
|
||||
"""
|
||||
|
@ -30,6 +30,7 @@ Declare constants used by database modules
|
||||
# standard python modules
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
import sys
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
@ -60,14 +61,14 @@ DBLOGNAME = ".Db" # Name of logger
|
||||
DBMODE_R = "r" # Read-only access
|
||||
DBMODE_W = "w" # Full Read/Write access
|
||||
DBPAGE = 16384 # Size of the pages used to hold items in the database
|
||||
DBMODE = 0666 # Unix mode for database creation
|
||||
DBMODE = 0o666 # Unix mode for database creation
|
||||
DBCACHE = 0x4000000 # Size of the shared memory buffer pool
|
||||
DBLOCKS = 100000 # Maximum number of locks supported
|
||||
DBOBJECTS = 100000 # Maximum number of simultaneously locked objects
|
||||
DBUNDO = 1000 # Maximum size of undo buffer
|
||||
|
||||
from ..config import config
|
||||
if config.get('preferences.use-bsddb3'):
|
||||
if config.get('preferences.use-bsddb3') or sys.version_info[0] >= 3:
|
||||
from bsddb3.db import DB_CREATE, DB_AUTO_COMMIT, DB_DUP, DB_DUPSORT, DB_RDONLY
|
||||
else:
|
||||
from bsddb.db import DB_CREATE, DB_AUTO_COMMIT, DB_DUP, DB_DUPSORT, DB_RDONLY
|
||||
|
@ -25,21 +25,27 @@
|
||||
"""
|
||||
Read classes for the GRAMPS databases.
|
||||
"""
|
||||
from __future__ import with_statement
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
# libraries
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
import cPickle
|
||||
from __future__ import print_function, with_statement
|
||||
|
||||
import sys
|
||||
if sys.version_info[0] < 3:
|
||||
import cPickle as pickle
|
||||
else:
|
||||
import pickle
|
||||
import time
|
||||
import random
|
||||
import locale
|
||||
import os
|
||||
from sys import maxint
|
||||
from sys import maxsize
|
||||
|
||||
from ..config import config
|
||||
if config.get('preferences.use-bsddb3'):
|
||||
if config.get('preferences.use-bsddb3') or sys.version_info[0] >= 3:
|
||||
from bsddb3 import db
|
||||
else:
|
||||
from bsddb import db
|
||||
@ -68,12 +74,13 @@ from ..lib.genderstats import GenderStats
|
||||
from ..lib.researcher import Researcher
|
||||
from ..lib.nameorigintype import NameOriginType
|
||||
|
||||
from dbconst import *
|
||||
from .dbconst import *
|
||||
from ..utils.callback import Callback
|
||||
from ..utils.cast import conv_dbstr_to_unicode
|
||||
from . import (BsddbBaseCursor, DbReadBase)
|
||||
from ..utils.id import create_id
|
||||
from ..errors import DbError
|
||||
from ..constfunc import UNITYPE, STRTYPE, cuni
|
||||
|
||||
LOG = logging.getLogger(DBLOGNAME)
|
||||
LOG = logging.getLogger(".citation")
|
||||
@ -82,7 +89,7 @@ LOG = logging.getLogger(".citation")
|
||||
# constants
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
from dbconst import *
|
||||
from .dbconst import *
|
||||
|
||||
_SIGBASE = ('person', 'family', 'source', 'citation',
|
||||
'event', 'media', 'place', 'location', 'repository',
|
||||
@ -99,12 +106,14 @@ DBERRS = (db.DBRunRecoveryError, db.DBAccessError,
|
||||
def find_surname(key, data):
|
||||
"""
|
||||
Creating a surname from raw data of a person, to use for sort and index
|
||||
returns a byte string
|
||||
"""
|
||||
return __index_surname(data[3][5])
|
||||
|
||||
def find_surname_name(key, data):
|
||||
"""
|
||||
Creating a surname from raw name, to use for sort and index
|
||||
returns a byte string
|
||||
"""
|
||||
return __index_surname(data[5])
|
||||
|
||||
@ -112,12 +121,13 @@ def __index_surname(surn_list):
|
||||
"""
|
||||
All non pa/matronymic surnames are used in indexing.
|
||||
pa/matronymic not as they change for every generation!
|
||||
returns a byte string
|
||||
"""
|
||||
if surn_list:
|
||||
surn = u" ".join([x[0] for x in surn_list if not (x[3][0] in [
|
||||
surn = " ".join([x[0] for x in surn_list if not (x[3][0] in [
|
||||
NameOriginType.PATRONYMIC, NameOriginType.MATRONYMIC]) ])
|
||||
else:
|
||||
surn = u""
|
||||
surn = ""
|
||||
return surn.encode('utf-8')
|
||||
|
||||
|
||||
@ -187,7 +197,7 @@ class DbBsddbTreeCursor(BsddbBaseCursor):
|
||||
data = self.set(str(to_do.pop()))
|
||||
_n = self.next_dup
|
||||
while data:
|
||||
payload = cPickle.loads(data[1])
|
||||
payload = pickle.loads(data[1])
|
||||
yield (payload[0], payload)
|
||||
to_do.append(payload[0])
|
||||
data = _n()
|
||||
@ -461,7 +471,7 @@ class DbBsddbRead(DbReadBase, Callback):
|
||||
|
||||
def get_table_names(self):
|
||||
"""Return a list of valid table names."""
|
||||
return self._tables.keys()
|
||||
return list(self._tables.keys())
|
||||
|
||||
def get_table_metadata(self, table_name):
|
||||
"""Return the metadata for a valid table name."""
|
||||
@ -472,7 +482,7 @@ class DbBsddbRead(DbReadBase, Callback):
|
||||
def get_cursor(self, table, *args, **kwargs):
|
||||
try:
|
||||
return DbReadCursor(table, self.txn)
|
||||
except DBERRS, msg:
|
||||
except DBERRS as msg:
|
||||
self.__log_error()
|
||||
raise DbError(msg)
|
||||
|
||||
@ -564,9 +574,12 @@ class DbBsddbRead(DbReadBase, Callback):
|
||||
Helper function for find_next_<object>_gramps_id methods
|
||||
"""
|
||||
index = prefix % map_index
|
||||
while trans.get(str(index), txn=self.txn) is not None:
|
||||
#in bytes
|
||||
bindex = index.encode('utf-8')
|
||||
while trans.get(bindex, txn=self.txn) is not None:
|
||||
map_index += 1
|
||||
index = prefix % map_index
|
||||
bindex = index.encode('utf-8')
|
||||
map_index += 1
|
||||
return (map_index, index)
|
||||
|
||||
@ -652,7 +665,9 @@ class DbBsddbRead(DbReadBase, Callback):
|
||||
return gid
|
||||
|
||||
def get_from_handle(self, handle, class_type, data_map):
|
||||
data = data_map.get(str(handle))
|
||||
if isinstance(handle, UNITYPE):
|
||||
handle = handle.encode('utf-8')
|
||||
data = data_map.get(handle)
|
||||
if data:
|
||||
newobj = class_type()
|
||||
newobj.unserialize(data)
|
||||
@ -777,8 +792,10 @@ class DbBsddbRead(DbReadBase, Callback):
|
||||
return self.get_from_handle(handle, Location, self.location_map)
|
||||
|
||||
def __get_obj_from_gramps_id(self, val, tbl, class_, prim_tbl):
|
||||
if isinstance(val, UNITYPE):
|
||||
val = val.encode('utf-8')
|
||||
try:
|
||||
data = tbl.get(str(val), txn=self.txn)
|
||||
data = tbl.get(val, txn=self.txn)
|
||||
if data is not None:
|
||||
obj = class_()
|
||||
### FIXME: this is a dirty hack that works without no
|
||||
@ -788,12 +805,12 @@ class DbBsddbRead(DbReadBase, Callback):
|
||||
if self.readonly:
|
||||
tuple_data = prim_tbl.get(data, txn=self.txn)
|
||||
else:
|
||||
tuple_data = cPickle.loads(data)
|
||||
tuple_data = pickle.loads(data)
|
||||
obj.unserialize(tuple_data)
|
||||
return obj
|
||||
else:
|
||||
return None
|
||||
except DBERRS, msg:
|
||||
except DBERRS as msg:
|
||||
self.__log_error()
|
||||
raise DbError(msg)
|
||||
|
||||
@ -892,17 +909,15 @@ class DbBsddbRead(DbReadBase, Callback):
|
||||
Return the default grouping name for a surname.
|
||||
Return type is a unicode object
|
||||
"""
|
||||
if isinstance(surname, unicode):
|
||||
ssurname = surname.encode('utf-8')
|
||||
return conv_dbstr_to_unicode(self.name_group.get(ssurname, ssurname))
|
||||
else:
|
||||
if isinstance(surname, UNITYPE):
|
||||
surname = surname.encode('utf-8')
|
||||
return conv_dbstr_to_unicode(self.name_group.get(surname, surname))
|
||||
|
||||
def get_name_group_keys(self):
|
||||
"""
|
||||
Return the defined names that have been assigned to a default grouping.
|
||||
"""
|
||||
return map(conv_dbstr_to_unicode, self.name_group.keys())
|
||||
return list(map(conv_dbstr_to_unicode, list(self.name_group.keys())))
|
||||
|
||||
def has_name_group_key(self, name):
|
||||
"""
|
||||
@ -910,10 +925,9 @@ class DbBsddbRead(DbReadBase, Callback):
|
||||
"""
|
||||
# The use of has_key seems allright because there is no write lock
|
||||
# on the name_group table when this is called.
|
||||
if isinstance(name, unicode):
|
||||
return self.name_group.has_key(name.encode('utf-8'))
|
||||
else:
|
||||
return self.name_group.has_key(name)
|
||||
if isinstance(name, UNITYPE):
|
||||
name = name.encode('utf-8')
|
||||
return name in self.name_group
|
||||
|
||||
def get_number_of_records(self, table):
|
||||
if not self.db_is_open:
|
||||
@ -1188,7 +1202,7 @@ class DbBsddbRead(DbReadBase, Callback):
|
||||
}
|
||||
|
||||
table = key2table[obj_key]
|
||||
return table.keys()
|
||||
return list(table.keys())
|
||||
|
||||
def has_gramps_id(self, obj_key, gramps_id):
|
||||
key2table = {
|
||||
@ -1204,8 +1218,9 @@ class DbBsddbRead(DbReadBase, Callback):
|
||||
}
|
||||
|
||||
table = key2table[obj_key]
|
||||
#return str(gramps_id) in table
|
||||
return table.get(str(gramps_id), txn=self.txn) is not None
|
||||
if isinstance(gramps_id, UNITYPE):
|
||||
gramps_id = gramps_id.encode('utf-8')
|
||||
return table.get(gramps_id, txn=self.txn) is not None
|
||||
|
||||
def find_initial_person(self):
|
||||
person = self.get_default_person()
|
||||
@ -1217,7 +1232,7 @@ class DbBsddbRead(DbReadBase, Callback):
|
||||
|
||||
@staticmethod
|
||||
def _validated_id_prefix(val, default):
|
||||
if isinstance(val, basestring) and val:
|
||||
if isinstance(val, STRTYPE) and val:
|
||||
try:
|
||||
str_ = val % 1
|
||||
except TypeError: # missing conversion specifier
|
||||
@ -1239,22 +1254,23 @@ class DbBsddbRead(DbReadBase, Callback):
|
||||
pattern_match = re.match(r"(.*)%[0 ](\d+)[diu]$", id_pattern)
|
||||
if pattern_match:
|
||||
str_prefix = pattern_match.group(1)
|
||||
nr_width = pattern_match.group(2)
|
||||
##nr_width = pattern_match.group(2)
|
||||
def closure_func(gramps_id):
|
||||
if gramps_id and gramps_id.startswith(str_prefix):
|
||||
id_number = gramps_id[len(str_prefix):]
|
||||
if id_number.isdigit():
|
||||
id_value = int(id_number, 10)
|
||||
if len(str(id_value)) > nr_width:
|
||||
# The ID to be imported is too large to fit in the
|
||||
# users format. For now just create a new ID,
|
||||
# because that is also what happens with IDs that
|
||||
# are identical to IDs already in the database. If
|
||||
# the problem of colliding import and already
|
||||
# present IDs is solved the code here also needs
|
||||
# some solution.
|
||||
gramps_id = id_pattern % 1
|
||||
else:
|
||||
## this code never ran, as an int compared to str with > is False!
|
||||
## if len(cuni(id_value)) > nr_width:
|
||||
## # The ID to be imported is too large to fit in the
|
||||
## # users format. For now just create a new ID,
|
||||
## # because that is also what happens with IDs that
|
||||
## # are identical to IDs already in the database. If
|
||||
## # the problem of colliding import and already
|
||||
## # present IDs is solved the code here also needs
|
||||
## # some solution.
|
||||
## gramps_id = id_pattern % 1
|
||||
## else:
|
||||
gramps_id = id_pattern % id_value
|
||||
return gramps_id
|
||||
else:
|
||||
@ -1439,13 +1455,13 @@ class DbBsddbRead(DbReadBase, Callback):
|
||||
if person:
|
||||
return person
|
||||
elif (self.metadata is not None) and (not self.readonly):
|
||||
self.metadata['default'] = None
|
||||
self.metadata[b'default'] = None
|
||||
return None
|
||||
|
||||
def get_default_handle(self):
|
||||
"""Return the default Person of the database."""
|
||||
if self.metadata is not None:
|
||||
return self.metadata.get('default')
|
||||
return self.metadata.get(b'default')
|
||||
return None
|
||||
|
||||
def get_save_path(self):
|
||||
@ -1561,9 +1577,11 @@ class DbBsddbRead(DbReadBase, Callback):
|
||||
"""
|
||||
Helper method for get_raw_<object>_data methods
|
||||
"""
|
||||
if isinstance(handle, UNITYPE):
|
||||
handle = handle.encode('utf-8')
|
||||
try:
|
||||
return table.get(str(handle), txn=self.txn)
|
||||
except DBERRS, msg:
|
||||
return table.get(handle, txn=self.txn)
|
||||
except DBERRS as msg:
|
||||
self.__log_error()
|
||||
raise DbError(msg)
|
||||
|
||||
@ -1604,9 +1622,11 @@ class DbBsddbRead(DbReadBase, Callback):
|
||||
"""
|
||||
Helper function for has_<object>_handle methods
|
||||
"""
|
||||
if isinstance(handle, UNITYPE):
|
||||
handle = handle.encode('utf-8')
|
||||
try:
|
||||
return table.get(str(handle), txn=self.txn) is not None
|
||||
except DBERRS, msg:
|
||||
return table.get(handle, txn=self.txn) is not None
|
||||
except DBERRS as msg:
|
||||
self.__log_error()
|
||||
raise DbError(msg)
|
||||
|
||||
@ -1676,62 +1696,94 @@ class DbBsddbRead(DbReadBase, Callback):
|
||||
"""
|
||||
return self.__has_handle(self.location_map, handle)
|
||||
|
||||
def __sortbyperson_key(self, person):
|
||||
return locale.strxfrm(find_surname(str(person),
|
||||
self.person_map.get(str(person))))
|
||||
def __sortbyperson_key(self, handle):
|
||||
if isinstance(handle, UNITYPE):
|
||||
handle = handle.encode('utf-8')
|
||||
return locale.strxfrm(find_surname(handle,
|
||||
self.person_map.get(handle)))
|
||||
|
||||
def __sortbyplace(self, first, second):
|
||||
return locale.strcoll(self.place_map.get(str(first))[2],
|
||||
self.place_map.get(str(second))[2])
|
||||
if isinstance(first, UNITYPE):
|
||||
first = first.encode('utf-8')
|
||||
if isinstance(second, UNITYPE):
|
||||
second = second.encode('utf-8')
|
||||
return locale.strcoll(self.place_map.get(first)[2],
|
||||
self.place_map.get(second)[2])
|
||||
|
||||
def __sortbyplace_key(self, place):
|
||||
return locale.strxfrm(self.place_map.get(str(place))[2])
|
||||
if isinstance(place, UNITYPE):
|
||||
place = place.encode('utf-8')
|
||||
return locale.strxfrm(self.place_map.get(place)[2])
|
||||
|
||||
def __sortbysource(self, first, second):
|
||||
source1 = unicode(self.source_map[str(first)][2])
|
||||
source2 = unicode(self.source_map[str(second)][2])
|
||||
if isinstance(first, UNITYPE):
|
||||
first = first.encode('utf-8')
|
||||
if isinstance(second, UNITYPE):
|
||||
second = second.encode('utf-8')
|
||||
source1 = cuni(self.source_map[first][2])
|
||||
source2 = cuni(self.source_map[second][2])
|
||||
return locale.strcoll(source1, source2)
|
||||
|
||||
def __sortbysource_key(self, key):
|
||||
source = unicode(self.source_map[str(key)][2])
|
||||
if isinstance(key, UNITYPE):
|
||||
key = key.encode('utf-8')
|
||||
source = cuni(self.source_map[key][2])
|
||||
return locale.strxfrm(source)
|
||||
|
||||
def __sortbycitation(self, first, second):
|
||||
citation1 = unicode(self.citation_map[str(first)][3])
|
||||
citation2 = unicode(self.citation_map[str(second)][3])
|
||||
if isinstance(first, UNITYPE):
|
||||
first = first.encode('utf-8')
|
||||
if isinstance(second, UNITYPE):
|
||||
second = second.encode('utf-8')
|
||||
citation1 = cuni(self.citation_map[first][3])
|
||||
citation2 = cuni(self.citation_map[second][3])
|
||||
return locale.strcoll(citation1, citation2)
|
||||
|
||||
def __sortbycitation_key(self, key):
|
||||
citation = unicode(self.citation_map[str(key)][3])
|
||||
if isinstance(key, UNITYPE):
|
||||
key = key.encode('utf-8')
|
||||
citation = cuni(self.citation_map[key][3])
|
||||
return locale.strxfrm(citation)
|
||||
|
||||
def __sortbymedia(self, first, second):
|
||||
media1 = self.media_map[str(first)][4]
|
||||
media2 = self.media_map[str(second)][4]
|
||||
if isinstance(first, UNITYPE):
|
||||
first = first.encode('utf-8')
|
||||
if isinstance(second, UNITYPE):
|
||||
second = second.encode('utf-8')
|
||||
media1 = self.media_map[first][4]
|
||||
media2 = self.media_map[second][4]
|
||||
return locale.strcoll(media1, media2)
|
||||
|
||||
def __sortbymedia_key(self, key):
|
||||
media = self.media_map[str(key)][4]
|
||||
if isinstance(key, UNITYPE):
|
||||
key = key.encode('utf-8')
|
||||
media = self.media_map[key][4]
|
||||
return locale.strxfrm(media)
|
||||
|
||||
def __sortbytag(self, first, second):
|
||||
tag1 = self.tag_map[str(first)][1]
|
||||
tag2 = self.tag_map[str(second)][1]
|
||||
if isinstance(first, UNITYPE):
|
||||
first = first.encode('utf-8')
|
||||
if isinstance(second, UNITYPE):
|
||||
second = second.encode('utf-8')
|
||||
tag1 = self.tag_map[first][1]
|
||||
tag2 = self.tag_map[second][1]
|
||||
return locale.strcoll(tag1, tag2)
|
||||
|
||||
def __sortbytag_key(self, key):
|
||||
tag = self.tag_map[str(key)][1]
|
||||
if isinstance(key, UNITYPE):
|
||||
key = key.encode('utf-8')
|
||||
tag = self.tag_map[key][1]
|
||||
return locale.strxfrm(tag)
|
||||
|
||||
def set_mediapath(self, path):
|
||||
"""Set the default media path for database, path should be utf-8."""
|
||||
if (self.metadata is not None) and (not self.readonly):
|
||||
self.metadata['mediapath'] = path
|
||||
self.metadata[b'mediapath'] = path
|
||||
|
||||
def get_mediapath(self):
|
||||
"""Return the default media path of the database."""
|
||||
if self.metadata is not None:
|
||||
return self.metadata.get('mediapath', None)
|
||||
return self.metadata.get(b'mediapath', None)
|
||||
return None
|
||||
|
||||
def find_backlink_handles(self, handle, include_classes=None):
|
||||
@ -1808,13 +1860,13 @@ class DbBsddbRead(DbReadBase, Callback):
|
||||
|
||||
# Find which tables to iterate over
|
||||
if (include_classes is None):
|
||||
the_tables = primary_tables.keys()
|
||||
the_tables = list(primary_tables.keys())
|
||||
else:
|
||||
the_tables = include_classes
|
||||
|
||||
# Now we use the functions and classes defined above to loop through
|
||||
# each of the existing primary object tables
|
||||
for primary_table_name, funcs in the_tables.iteritems():
|
||||
for primary_table_name, funcs in the_tables.items():
|
||||
with funcs['cursor_func']() as cursor:
|
||||
|
||||
# Grab the real object class here so that the lookup does
|
||||
@ -1859,7 +1911,7 @@ class DbBsddbRead(DbReadBase, Callback):
|
||||
name_file = open(filepath, "r")
|
||||
name = name_file.read()
|
||||
name_file.close()
|
||||
except (OSError, IOError), msg:
|
||||
except (OSError, IOError) as msg:
|
||||
self.__log_error()
|
||||
name = None
|
||||
return name
|
||||
|
@ -31,13 +31,18 @@ undos and redos.
|
||||
# Standard python modules
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
from __future__ import with_statement
|
||||
from __future__ import print_function, with_statement
|
||||
|
||||
import time, os
|
||||
import cPickle as pickle
|
||||
import sys
|
||||
if sys.version_info[0] < 3:
|
||||
import cPickle as pickle
|
||||
else:
|
||||
import pickle
|
||||
from collections import deque
|
||||
|
||||
from ..config import config
|
||||
if config.get('preferences.use-bsddb3'):
|
||||
if config.get('preferences.use-bsddb3') or sys.version_info[0] >= 3:
|
||||
from bsddb3 import db
|
||||
else:
|
||||
from bsddb import db
|
||||
@ -48,7 +53,7 @@ from ..ggettext import gettext as _
|
||||
# Gramps modules
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
from dbconst import *
|
||||
from .dbconst import *
|
||||
from . import BSDDBTxn
|
||||
from ..errors import DbError
|
||||
|
||||
@ -208,7 +213,7 @@ class DbUndo(object):
|
||||
self.db.txn = None
|
||||
return status
|
||||
|
||||
except DBERRS, msg:
|
||||
except DBERRS as msg:
|
||||
self.db._log_error()
|
||||
raise DbError(msg)
|
||||
|
||||
@ -301,7 +306,7 @@ class DbUndo(object):
|
||||
else:
|
||||
db_map.put(handle, data, txn=self.txn)
|
||||
|
||||
except DBERRS, msg:
|
||||
except DBERRS as msg:
|
||||
self.db._log_error()
|
||||
raise DbError(msg)
|
||||
|
||||
@ -322,7 +327,7 @@ class DbUndo(object):
|
||||
db_map.put(handle, data, txn=self.txn)
|
||||
emit(signal, ([handle],))
|
||||
|
||||
except DBERRS, msg:
|
||||
except DBERRS as msg:
|
||||
self.db._log_error()
|
||||
raise DbError(msg)
|
||||
|
||||
@ -455,7 +460,7 @@ class DbUndoBSDDB(DbUndo):
|
||||
data = cursor.first()
|
||||
while data:
|
||||
yield data
|
||||
data = cursor.next()
|
||||
data = next(cursor)
|
||||
|
||||
def testundo():
|
||||
class T:
|
||||
@ -479,32 +484,32 @@ def testundo():
|
||||
self.repository_map = {}
|
||||
self.reference_map = {}
|
||||
|
||||
print "list tests"
|
||||
print("list tests")
|
||||
undo = DbUndoList(D())
|
||||
print undo.append('foo')
|
||||
print undo.append('bar')
|
||||
print undo[0]
|
||||
print(undo.append('foo'))
|
||||
print(undo.append('bar'))
|
||||
print(undo[0])
|
||||
undo[0] = 'foobar'
|
||||
print undo[0]
|
||||
print "len", len(undo)
|
||||
print "iter"
|
||||
print(undo[0])
|
||||
print("len", len(undo))
|
||||
print("iter")
|
||||
for data in undo:
|
||||
print data
|
||||
print
|
||||
print "bsddb tests"
|
||||
print(data)
|
||||
print()
|
||||
print("bsddb tests")
|
||||
undo = DbUndoBSDDB(D(), '/tmp/testundo')
|
||||
undo.open()
|
||||
print undo.append('foo')
|
||||
print undo.append('fo2')
|
||||
print undo.append('fo3')
|
||||
print undo[1]
|
||||
print(undo.append('foo'))
|
||||
print(undo.append('fo2'))
|
||||
print(undo.append('fo3'))
|
||||
print(undo[1])
|
||||
undo[1] = 'bar'
|
||||
print undo[1]
|
||||
print(undo[1])
|
||||
for data in undo:
|
||||
print data
|
||||
print "len", len(undo)
|
||||
print(data)
|
||||
print("len", len(undo))
|
||||
|
||||
print "test commit"
|
||||
print("test commit")
|
||||
undo.commit(T(), msg="test commit")
|
||||
undo.close()
|
||||
|
||||
|
@ -21,8 +21,9 @@
|
||||
|
||||
# $Id$
|
||||
|
||||
from __future__ import with_statement
|
||||
from __future__ import with_statement, unicode_literals
|
||||
|
||||
import sys
|
||||
from ..lib.markertype import MarkerType
|
||||
from ..lib.tag import Tag
|
||||
import time
|
||||
@ -30,20 +31,21 @@ import logging
|
||||
LOG = logging.getLogger(".citation")
|
||||
|
||||
from ..ggettext import gettext as _
|
||||
from ..constfunc import cuni
|
||||
|
||||
"""
|
||||
methods to upgrade a database from version 13 to current version
|
||||
"""
|
||||
from ..config import config
|
||||
if config.get('preferences.use-bsddb3'):
|
||||
if config.get('preferences.use-bsddb3') or sys.version_info[0] >= 3:
|
||||
from bsddb3 import db
|
||||
else:
|
||||
from bsddb import db
|
||||
from . import BSDDBTxn
|
||||
from ..lib.nameorigintype import NameOriginType
|
||||
from write import _mkname, SURNAMES
|
||||
from dbconst import (PERSON_KEY, FAMILY_KEY, EVENT_KEY,
|
||||
MEDIA_KEY, PLACE_KEY, LOCATION_KEY, REPOSITORY_KEY)
|
||||
from .write import _mkname, SURNAMES
|
||||
from .dbconst import (PERSON_KEY, FAMILY_KEY, EVENT_KEY, MEDIA_KEY, PLACE_KEY,
|
||||
LOCATION_KEY, REPOSITORY_KEY)
|
||||
from gramps.gui.dialog import (InfoDialog)
|
||||
|
||||
def gramps_upgrade_17(self):
|
||||
@ -70,7 +72,7 @@ def gramps_upgrade_17(self):
|
||||
self.update()
|
||||
|
||||
with BSDDBTxn(self.env, self.metadata) as txn:
|
||||
txn.put('version', 17)
|
||||
txn.put(b'version', 17)
|
||||
|
||||
def add_reference(self, pri_handle, ref_handle):
|
||||
key = (PLACE_KEY, pri_handle)
|
||||
@ -101,7 +103,7 @@ def match_location(self, parent, item, lat_long):
|
||||
self.children[handle] = []
|
||||
self.children[parent].append(handle)
|
||||
new_location = (handle,
|
||||
parent,
|
||||
str(parent),
|
||||
item[1], # Name
|
||||
item[0]+1, # Type
|
||||
lat_long[0],
|
||||
@ -211,10 +213,10 @@ def gramps_upgrade_16(self):
|
||||
self.update()
|
||||
|
||||
LOG.debug("%d persons upgraded with %d citations in %d seconds. " %
|
||||
(len(self.person_map.keys()),
|
||||
(len(list(self.person_map.keys())),
|
||||
self.cmap_index - start_num_citations,
|
||||
time.time() - start_time))
|
||||
data_upgradeobject[key2data[PERSON_KEY]] = (len(self.person_map.keys()),
|
||||
data_upgradeobject[key2data[PERSON_KEY]] = (len(list(self.person_map.keys())),
|
||||
self.cmap_index - start_num_citations,
|
||||
time.time() - start_time)
|
||||
|
||||
@ -247,7 +249,7 @@ def gramps_upgrade_16(self):
|
||||
LOG.debug("Media upgrade %d citations upgraded in %d seconds" %
|
||||
(self.cmap_index - start_num_citations,
|
||||
int(time.time() - start_time)))
|
||||
data_upgradeobject[key2data[MEDIA_KEY]] = (len(self.media_map.keys()),
|
||||
data_upgradeobject[key2data[MEDIA_KEY]] = (len(list(self.media_map.keys())),
|
||||
self.cmap_index - start_num_citations,
|
||||
time.time() - start_time)
|
||||
|
||||
@ -258,7 +260,7 @@ def gramps_upgrade_16(self):
|
||||
start_time = time.time()
|
||||
for place_handle in self.place_map.keys():
|
||||
place = self.place_map[place_handle]
|
||||
(handle, gramps_id, title, long, lat,
|
||||
(handle, gramps_id, title, int, lat,
|
||||
main_loc, alt_loc, urls, media_list, source_list, note_list,
|
||||
change, private) = place
|
||||
if source_list:
|
||||
@ -271,7 +273,7 @@ def gramps_upgrade_16(self):
|
||||
self, media_list)
|
||||
if source_list or media_list:
|
||||
new_place = (handle, gramps_id, title,
|
||||
long, lat, main_loc, alt_loc, urls,
|
||||
int, lat, main_loc, alt_loc, urls,
|
||||
media_list, new_citation_list, note_list,
|
||||
change, private)
|
||||
with BSDDBTxn(self.env, self.place_map) as txn:
|
||||
@ -279,10 +281,10 @@ def gramps_upgrade_16(self):
|
||||
self.update()
|
||||
|
||||
LOG.debug("%d places upgraded with %d citations in %d seconds. " %
|
||||
(len(self.place_map.keys()),
|
||||
(len(list(self.place_map.keys())),
|
||||
self.cmap_index - start_num_citations,
|
||||
time.time() - start_time))
|
||||
data_upgradeobject[key2data[PLACE_KEY]] = (len(self.place_map.keys()),
|
||||
data_upgradeobject[key2data[PLACE_KEY]] = (len(list(self.place_map.keys())),
|
||||
self.cmap_index - start_num_citations,
|
||||
time.time() - start_time)
|
||||
|
||||
@ -327,10 +329,10 @@ def gramps_upgrade_16(self):
|
||||
self.update()
|
||||
|
||||
LOG.debug("%d familys upgraded with %d citations in %d seconds. " %
|
||||
(len(self.family_map.keys()),
|
||||
(len(list(self.family_map.keys())),
|
||||
self.cmap_index - start_num_citations,
|
||||
time.time() - start_time))
|
||||
data_upgradeobject[key2data[FAMILY_KEY]] = (len(self.family_map.keys()),
|
||||
data_upgradeobject[key2data[FAMILY_KEY]] = (len(list(self.family_map.keys())),
|
||||
self.cmap_index - start_num_citations,
|
||||
time.time() - start_time)
|
||||
# ---------------------------------
|
||||
@ -372,10 +374,10 @@ def gramps_upgrade_16(self):
|
||||
|
||||
LOG.debug("%d events upgraded with %d citations in %d seconds. "
|
||||
"Backlinks took %d seconds" %
|
||||
(len(self.event_map.keys()),
|
||||
(len(list(self.event_map.keys())),
|
||||
self.cmap_index - start_num_citations,
|
||||
int(upgrade_time), int(backlink_time)))
|
||||
data_upgradeobject[key2data[EVENT_KEY]] = (len(self.event_map.keys()),
|
||||
data_upgradeobject[key2data[EVENT_KEY]] = (len(list(self.event_map.keys())),
|
||||
self.cmap_index - start_num_citations,
|
||||
time.time() - start_time)
|
||||
|
||||
@ -399,10 +401,10 @@ def gramps_upgrade_16(self):
|
||||
self.update()
|
||||
|
||||
LOG.debug("%d repositorys upgraded with %d citations in %d seconds. " %
|
||||
(len(self.repository_map.keys()),
|
||||
(len(list(self.repository_map.keys())),
|
||||
self.cmap_index - start_num_citations,
|
||||
time.time() - start_time))
|
||||
data_upgradeobject[key2data[REPOSITORY_KEY]] = (len(self.repository_map.keys()),
|
||||
data_upgradeobject[key2data[REPOSITORY_KEY]] = (len(list(self.repository_map.keys())),
|
||||
self.cmap_index - start_num_citations,
|
||||
time.time() - start_time)
|
||||
# ---------------------------------
|
||||
@ -475,7 +477,7 @@ def gramps_upgrade_16(self):
|
||||
|
||||
# Bump up database version. Separate transaction to save metadata.
|
||||
with BSDDBTxn(self.env, self.metadata) as txn:
|
||||
txn.put('version', 16)
|
||||
txn.put(b'version', 16)
|
||||
|
||||
LOG.debug([data_upgradeobject])
|
||||
txt = _("Number of new objects upgraded:\n")
|
||||
@ -657,9 +659,9 @@ def gramps_upgrade_15(self):
|
||||
tags = [tag_handle]
|
||||
else:
|
||||
tags = []
|
||||
address_list = map(convert_address, address_list)
|
||||
address_list = list(map(convert_address, address_list))
|
||||
new_primary_name = convert_name_15(primary_name)
|
||||
new_alternate_names = map(convert_name_15, alternate_names)
|
||||
new_alternate_names = list(map(convert_name_15, alternate_names))
|
||||
new_person = (junk_handle, # 0
|
||||
gramps_id, # 1
|
||||
gender, # 2
|
||||
@ -763,7 +765,7 @@ def gramps_upgrade_15(self):
|
||||
new_place = list(place)
|
||||
if new_place[5] is not None:
|
||||
new_place[5] = convert_location(new_place[5])
|
||||
new_place[6] = map(convert_location, new_place[6])
|
||||
new_place[6] = list(map(convert_location, new_place[6]))
|
||||
new_place = new_place[:12] + new_place[13:]
|
||||
new_place = tuple(new_place)
|
||||
with BSDDBTxn(self.env, self.place_map) as txn:
|
||||
@ -791,7 +793,7 @@ def gramps_upgrade_15(self):
|
||||
repository = self.repository_map[handle]
|
||||
new_repository = list(repository)
|
||||
new_repository = new_repository[:8] + new_repository[9:]
|
||||
new_repository[5] = map(convert_address, new_repository[5])
|
||||
new_repository[5] = list(map(convert_address, new_repository[5]))
|
||||
new_repository = tuple(new_repository)
|
||||
with BSDDBTxn(self.env, self.repository_map) as txn:
|
||||
txn.put(str(handle), new_repository)
|
||||
@ -799,13 +801,13 @@ def gramps_upgrade_15(self):
|
||||
|
||||
# Bump up database version. Separate transaction to save metadata.
|
||||
with BSDDBTxn(self.env, self.metadata) as txn:
|
||||
txn.put('version', 15)
|
||||
txn.put(b'version', 15)
|
||||
|
||||
def convert_marker(self, marker_field):
|
||||
"""Convert a marker into a tag."""
|
||||
marker = MarkerType()
|
||||
marker.unserialize(marker_field)
|
||||
tag_name = unicode(marker)
|
||||
tag_name = cuni(marker)
|
||||
|
||||
if tag_name != '':
|
||||
if tag_name not in self.tags:
|
||||
@ -824,7 +826,7 @@ def convert_marker(self, marker_field):
|
||||
|
||||
def convert_locbase(loc):
|
||||
"""Convert location base to include an empty locality field."""
|
||||
return tuple([loc[0], u''] + list(loc[1:]))
|
||||
return tuple([loc[0], ''] + list(loc[1:]))
|
||||
|
||||
def convert_location(loc):
|
||||
"""Convert a location into the new format."""
|
||||
@ -840,26 +842,26 @@ def convert_name_15(name):
|
||||
name_type, prefix, patronymic,
|
||||
group_as, sort_as, display_as, call) = name
|
||||
|
||||
connector = u""
|
||||
origintype = (NameOriginType.NONE, u"")
|
||||
patorigintype = (NameOriginType.PATRONYMIC, u"")
|
||||
connector = ""
|
||||
origintype = (NameOriginType.NONE, "")
|
||||
patorigintype = (NameOriginType.PATRONYMIC, "")
|
||||
|
||||
if patronymic.strip() == u"":
|
||||
if patronymic.strip() == "":
|
||||
#no patronymic, create a single surname
|
||||
surname_list = [(surname, prefix, True, origintype, connector)]
|
||||
else:
|
||||
#a patronymic, if no surname or equal as patronymic, a single surname
|
||||
if (surname.strip() == u"") or (surname == patronymic and prefix == u""):
|
||||
if (surname.strip() == "") or (surname == patronymic and prefix == ""):
|
||||
surname_list = [(patronymic, prefix, True, patorigintype, connector)]
|
||||
else:
|
||||
#two surnames, first patronymic, then surname which is primary
|
||||
surname_list = [(patronymic, u"", False, patorigintype, u""),
|
||||
surname_list = [(patronymic, "", False, patorigintype, ""),
|
||||
(surname, prefix, True, origintype, connector)]
|
||||
|
||||
#return new value, add two empty strings for nick and family nick
|
||||
return (privacy, source_list, note_list, date,
|
||||
first_name, surname_list, suffix, title, name_type,
|
||||
group_as, sort_as, display_as, call, u"", u"")
|
||||
group_as, sort_as, display_as, call, "", "")
|
||||
|
||||
def gramps_upgrade_14(self):
|
||||
"""Upgrade database from version 13 to 14."""
|
||||
@ -1067,12 +1069,12 @@ def gramps_upgrade_14(self):
|
||||
# ---------------------------------
|
||||
for place_handle in self.place_map.keys():
|
||||
place = self.place_map[place_handle]
|
||||
(handle, gramps_id, title, long, lat,
|
||||
(handle, gramps_id, title, int, lat,
|
||||
main_loc, alt_loc, urls, media_list, source_list, note_list,
|
||||
change, marker, private) = place
|
||||
new_media_list = new_media_list_14(media_list)
|
||||
new_source_list = new_source_list_14(source_list)
|
||||
new_place = (handle, gramps_id, title, long, lat,
|
||||
new_place = (handle, gramps_id, title, int, lat,
|
||||
main_loc, alt_loc, urls, new_media_list,
|
||||
new_source_list, note_list, change, marker, private)
|
||||
|
||||
@ -1101,7 +1103,7 @@ def gramps_upgrade_14(self):
|
||||
|
||||
# Bump up database version. Separate transaction to save metadata.
|
||||
with BSDDBTxn(self.env, self.metadata) as txn:
|
||||
txn.put('version', 14)
|
||||
txn.put(b'version', 14)
|
||||
|
||||
def new_source_list_14(source_list):
|
||||
new_source_list = []
|
||||
|
@ -32,19 +32,23 @@ This is used since GRAMPS version 3.0
|
||||
# Standard python modules
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
from __future__ import with_statement
|
||||
import cPickle as pickle
|
||||
from __future__ import print_function, with_statement
|
||||
import sys
|
||||
if sys.version_info[0] < 3:
|
||||
import cPickle as pickle
|
||||
else:
|
||||
import pickle
|
||||
import os
|
||||
import time
|
||||
import locale
|
||||
import bisect
|
||||
from functools import wraps
|
||||
import logging
|
||||
from sys import maxint
|
||||
from sys import maxsize
|
||||
|
||||
from ..ggettext import gettext as _
|
||||
from ..config import config
|
||||
if config.get('preferences.use-bsddb3'):
|
||||
if config.get('preferences.use-bsddb3') or sys.version_info[0] >= 3:
|
||||
from bsddb3 import dbshelve, db
|
||||
else:
|
||||
from bsddb import dbshelve, db
|
||||
@ -72,12 +76,12 @@ from . import (DbBsddbRead, DbWriteBase, BSDDBTxn,
|
||||
DbTxn, BsddbBaseCursor, BsddbDowngradeError, DbVersionError,
|
||||
DbEnvironmentError, DbUpgradeRequiredError, find_surname,
|
||||
find_surname_name, DbUndoBSDDB as DbUndo)
|
||||
from dbconst import *
|
||||
from .dbconst import *
|
||||
from ..utils.callback import Callback
|
||||
from ..utils.cast import (conv_unicode_tosrtkey, conv_dbstr_to_unicode)
|
||||
from ..updatecallback import UpdateCallback
|
||||
from ..errors import DbError
|
||||
from ..constfunc import win
|
||||
from ..constfunc import win, conv_to_unicode, cuni, UNITYPE
|
||||
|
||||
_LOG = logging.getLogger(DBLOGNAME)
|
||||
LOG = logging.getLogger(".citation")
|
||||
@ -168,13 +172,27 @@ KEY_TO_NAME_MAP = {PERSON_KEY: 'person',
|
||||
#-------------------------------------------------------------------------
|
||||
|
||||
def find_idmap(key, data):
|
||||
return str(data[1])
|
||||
""" return id for association of secondary index.
|
||||
returns a byte string
|
||||
"""
|
||||
val = data[1]
|
||||
if isinstance(val, UNITYPE):
|
||||
val = val.encode('utf-8')
|
||||
return val
|
||||
|
||||
def find_parent(key, data):
|
||||
return str(data[1])
|
||||
val = data[1]
|
||||
if isinstance(val, UNITYPE):
|
||||
val = val.encode('utf-8')
|
||||
return val
|
||||
|
||||
def find_name(key, data):
|
||||
return str(data[2]).upper()
|
||||
val = data[2].upper()
|
||||
if isinstance(val, UNITYPE):
|
||||
val = val.encode('utf-8')
|
||||
return val
|
||||
|
||||
# Secondary database key lookups for reference_map table
|
||||
# reference_map data values are of the form:
|
||||
@ -182,10 +200,22 @@ def find_name(key, data):
|
||||
# (referenced_object_class_name, referenced_object_handle))
|
||||
|
||||
def find_primary_handle(key, data):
|
||||
return str((data)[0][1])
|
||||
""" return handle for association of indexes
|
||||
returns byte string
|
||||
"""
|
||||
val = (data)[0][1]
|
||||
if isinstance(val, UNITYPE):
|
||||
val = val.encode('utf-8')
|
||||
return val
|
||||
|
||||
def find_referenced_handle(key, data):
|
||||
return str((data)[1][1])
|
||||
""" return handle for association of indexes
|
||||
returns byte string
|
||||
"""
|
||||
val = (data)[1][1]
|
||||
if isinstance(val, UNITYPE):
|
||||
val = val.encode('utf-8')
|
||||
return val
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
@ -244,7 +274,10 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
__signals__['home-person-changed'] = None
|
||||
|
||||
# 4. Signal for change in person group name, parameters are
|
||||
if sys.version_info[0] < 3:
|
||||
__signals__['person-groupname-rebuild'] = (unicode, unicode)
|
||||
else:
|
||||
__signals__['person-groupname-rebuild'] = (str, str)
|
||||
|
||||
def __init__(self):
|
||||
"""Create a new GrampsDB."""
|
||||
@ -268,7 +301,7 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
def try_(self, *args, **kwargs):
|
||||
try:
|
||||
return func(self, *args, **kwargs)
|
||||
except DBERRS, msg:
|
||||
except DBERRS as msg:
|
||||
self.__log_error()
|
||||
raise DbError(msg)
|
||||
return try_
|
||||
@ -371,10 +404,13 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
@catch_db_error
|
||||
def set_default_person_handle(self, handle):
|
||||
"""Set the default Person to the passed instance."""
|
||||
#we store a byte string!
|
||||
if isinstance(handle, UNITYPE):
|
||||
handle = handle.encode('utf-8')
|
||||
if not self.readonly:
|
||||
# Start transaction
|
||||
with BSDDBTxn(self.env, self.metadata) as txn:
|
||||
txn.put('default', str(handle))
|
||||
txn.put(b'default', handle)
|
||||
self.emit('home-person-changed')
|
||||
|
||||
@catch_db_error
|
||||
@ -386,7 +422,7 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
elif (self.metadata) and (not self.readonly):
|
||||
# Start transaction
|
||||
with BSDDBTxn(self.env, self.metadata) as txn:
|
||||
txn.put('default', None)
|
||||
txn.put(b'default', None)
|
||||
return None
|
||||
|
||||
def set_mediapath(self, path):
|
||||
@ -394,7 +430,7 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
if self.metadata and not self.readonly:
|
||||
# Start transaction
|
||||
with BSDDBTxn(self.env, self.metadata) as txn:
|
||||
txn.put('mediapath', path)
|
||||
txn.put(b'mediapath', path)
|
||||
|
||||
def __check_bdb_version(self, name):
|
||||
"""Older version of Berkeley DB can't read data created by a newer
|
||||
@ -409,6 +445,9 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
except:
|
||||
# Just assume that the Berkeley DB version is OK.
|
||||
pass
|
||||
if not env_version:
|
||||
#empty file, assume it is ok to open
|
||||
env_version = (0, 0, 0)
|
||||
if (env_version[0] > bdb_version[0]) or \
|
||||
(env_version[0] == bdb_version[0] and
|
||||
env_version[1] > bdb_version[1]):
|
||||
@ -419,12 +458,12 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
|
||||
@catch_db_error
|
||||
def version_supported(self):
|
||||
dbversion = self.metadata.get('version', default=0)
|
||||
dbversion = self.metadata.get(b'version', default=0)
|
||||
return ((dbversion <= _DBVERSION) and (dbversion >= _MINVERSION))
|
||||
|
||||
@catch_db_error
|
||||
def need_upgrade(self):
|
||||
dbversion = self.metadata.get('version', default=0)
|
||||
dbversion = self.metadata.get(b'version', default=0)
|
||||
return not self.readonly and dbversion < _DBVERSION
|
||||
|
||||
def __check_readonly(self, name):
|
||||
@ -502,7 +541,7 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
|
||||
try:
|
||||
self.env.open(env_name, env_flags)
|
||||
except Exception, msg:
|
||||
except Exception as msg:
|
||||
_LOG.warning("Error opening db environment: " + str(msg))
|
||||
try:
|
||||
self.__close_early()
|
||||
@ -525,7 +564,7 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
raise DbVersionError()
|
||||
|
||||
self.__load_metadata()
|
||||
gstats = self.metadata.get('gender_stats', default=None)
|
||||
gstats = self.metadata.get(b'gender_stats', default=None)
|
||||
|
||||
# Ensure version info in metadata
|
||||
if not self.readonly:
|
||||
@ -533,12 +572,12 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
with BSDDBTxn(self.env, self.metadata) as txn:
|
||||
if gstats is None:
|
||||
# New database. Set up the current version.
|
||||
#self.metadata.put('version', _DBVERSION, txn=the_txn)
|
||||
txn.put('version', _DBVERSION)
|
||||
elif 'version' not in self.metadata:
|
||||
#self.metadata.put(b'version', _DBVERSION, txn=the_txn)
|
||||
txn.put(b'version', _DBVERSION)
|
||||
elif b'version' not in self.metadata:
|
||||
# Not new database, but the version is missing.
|
||||
# Use 0, but it is likely to fail anyway.
|
||||
txn.put('version', 0)
|
||||
txn.put(b'version', 0)
|
||||
|
||||
self.genderStats = GenderStats(gstats)
|
||||
|
||||
@ -626,7 +665,7 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
|
||||
def __load_metadata(self):
|
||||
# name display formats
|
||||
self.name_formats = self.metadata.get('name_formats', default=[])
|
||||
self.name_formats = self.metadata.get(b'name_formats', default=[])
|
||||
# upgrade formats if they were saved in the old way
|
||||
for format_ix in range(len(self.name_formats)):
|
||||
format = self.name_formats[format_ix]
|
||||
@ -636,7 +675,7 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
|
||||
# database owner
|
||||
try:
|
||||
owner_data = self.metadata.get('researcher')
|
||||
owner_data = self.metadata.get(b'researcher')
|
||||
if owner_data:
|
||||
if len(owner_data[0]) == 7: # Pre-3.3 format
|
||||
owner_data = upgrade_researcher(owner_data)
|
||||
@ -647,35 +686,35 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
# bookmarks
|
||||
meta = lambda meta: self.metadata.get(meta, default=[])
|
||||
|
||||
self.bookmarks.set(meta('bookmarks'))
|
||||
self.family_bookmarks.set(meta('family_bookmarks'))
|
||||
self.event_bookmarks.set(meta('event_bookmarks'))
|
||||
self.source_bookmarks.set(meta('source_bookmarks'))
|
||||
self.citation_bookmarks.set(meta('citation_bookmarks'))
|
||||
self.repo_bookmarks.set(meta('repo_bookmarks'))
|
||||
self.media_bookmarks.set(meta('media_bookmarks'))
|
||||
self.place_bookmarks.set(meta('place_bookmarks'))
|
||||
self.note_bookmarks.set(meta('note_bookmarks'))
|
||||
self.bookmarks.set(meta(b'bookmarks'))
|
||||
self.family_bookmarks.set(meta(b'family_bookmarks'))
|
||||
self.event_bookmarks.set(meta(b'event_bookmarks'))
|
||||
self.source_bookmarks.set(meta(b'source_bookmarks'))
|
||||
self.citation_bookmarks.set(meta(b'citation_bookmarks'))
|
||||
self.repo_bookmarks.set(meta(b'repo_bookmarks'))
|
||||
self.media_bookmarks.set(meta(b'media_bookmarks'))
|
||||
self.place_bookmarks.set(meta(b'place_bookmarks'))
|
||||
self.note_bookmarks.set(meta(b'note_bookmarks'))
|
||||
|
||||
# Custom type values
|
||||
self.family_event_names = set(meta('fevent_names'))
|
||||
self.individual_event_names = set(meta('pevent_names'))
|
||||
self.family_attributes = set(meta('fattr_names'))
|
||||
self.individual_attributes = set(meta('pattr_names'))
|
||||
self.marker_names = set(meta('marker_names'))
|
||||
self.child_ref_types = set(meta('child_refs'))
|
||||
self.family_rel_types = set(meta('family_rels'))
|
||||
self.event_role_names = set(meta('event_roles'))
|
||||
self.name_types = set(meta('name_types'))
|
||||
self.origin_types = set(meta('origin_types'))
|
||||
self.repository_types = set(meta('repo_types'))
|
||||
self.note_types = set(meta('note_types'))
|
||||
self.source_media_types = set(meta('sm_types'))
|
||||
self.url_types = set(meta('url_types'))
|
||||
self.media_attributes = set(meta('mattr_names'))
|
||||
self.family_event_names = set(meta(b'fevent_names'))
|
||||
self.individual_event_names = set(meta(b'pevent_names'))
|
||||
self.family_attributes = set(meta(b'fattr_names'))
|
||||
self.individual_attributes = set(meta(b'pattr_names'))
|
||||
self.marker_names = set(meta(b'marker_names'))
|
||||
self.child_ref_types = set(meta(b'child_refs'))
|
||||
self.family_rel_types = set(meta(b'family_rels'))
|
||||
self.event_role_names = set(meta(b'event_roles'))
|
||||
self.name_types = set(meta(b'name_types'))
|
||||
self.origin_types = set(meta(b'origin_types'))
|
||||
self.repository_types = set(meta(b'repo_types'))
|
||||
self.note_types = set(meta(b'note_types'))
|
||||
self.source_media_types = set(meta(b'sm_types'))
|
||||
self.url_types = set(meta(b'url_types'))
|
||||
self.media_attributes = set(meta(b'mattr_names'))
|
||||
|
||||
# surname list
|
||||
self.surname_list = meta('surname_list')
|
||||
self.surname_list = meta(b'surname_list')
|
||||
|
||||
def __connect_secondary(self):
|
||||
"""
|
||||
@ -871,8 +910,8 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
|
||||
result_list = list(find_backlink_handles(handle))
|
||||
"""
|
||||
|
||||
handle = str(handle)
|
||||
if isinstance(handle, UNITYPE):
|
||||
handle = handle.encode('utf-8')
|
||||
# Use the secondary index to locate all the reference_map entries
|
||||
# that include a reference to the object we are looking for.
|
||||
referenced_cur = self.get_reference_map_referenced_cursor()
|
||||
@ -1006,25 +1045,34 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
Remove the reference specified by the key, preserving the change in
|
||||
the passed transaction.
|
||||
"""
|
||||
if isinstance(key, tuple):
|
||||
#create a string key
|
||||
key = str(key)
|
||||
if isinstance(key, UNITYPE):
|
||||
key = key.encode('utf-8')
|
||||
if not self.readonly:
|
||||
if not transaction.batch:
|
||||
old_data = self.reference_map.get(str(key), txn=txn)
|
||||
transaction.add(REFERENCE_KEY, TXNDEL, str(key), old_data, None)
|
||||
old_data = self.reference_map.get(key, txn=txn)
|
||||
transaction.add(REFERENCE_KEY, TXNDEL, key, old_data, None)
|
||||
#transaction.reference_del.append(str(key))
|
||||
self.reference_map.delete(str(key), txn=txn)
|
||||
self.reference_map.delete(key, txn=txn)
|
||||
|
||||
def __add_reference(self, key, data, transaction, txn):
|
||||
"""
|
||||
Add the reference specified by the key and the data, preserving the
|
||||
change in the passed transaction.
|
||||
"""
|
||||
|
||||
if isinstance(key, tuple):
|
||||
#create a string key
|
||||
key = str(key)
|
||||
if isinstance(key, UNITYPE):
|
||||
key = key.encode('utf-8')
|
||||
if self.readonly or not key:
|
||||
return
|
||||
|
||||
self.reference_map.put(str(key), data, txn=txn)
|
||||
self.reference_map.put(key, data, txn=txn)
|
||||
if not transaction.batch:
|
||||
transaction.add(REFERENCE_KEY, TXNADD, str(key), None, data)
|
||||
transaction.add(REFERENCE_KEY, TXNADD, key, None, data)
|
||||
#transaction.reference_add.append((str(key), data))
|
||||
|
||||
@catch_db_error
|
||||
@ -1111,45 +1159,45 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
with BSDDBTxn(self.env, self.metadata) as txn:
|
||||
|
||||
# name display formats
|
||||
txn.put('name_formats', self.name_formats)
|
||||
txn.put(b'name_formats', self.name_formats)
|
||||
|
||||
# database owner
|
||||
owner_data = self.owner.serialize()
|
||||
txn.put('researcher', owner_data)
|
||||
txn.put(b'researcher', owner_data)
|
||||
|
||||
# bookmarks
|
||||
txn.put('bookmarks', self.bookmarks.get())
|
||||
txn.put('family_bookmarks', self.family_bookmarks.get())
|
||||
txn.put('event_bookmarks', self.event_bookmarks.get())
|
||||
txn.put('source_bookmarks', self.source_bookmarks.get())
|
||||
txn.put('citation_bookmarks', self.citation_bookmarks.get())
|
||||
txn.put('place_bookmarks', self.place_bookmarks.get())
|
||||
txn.put('repo_bookmarks', self.repo_bookmarks.get())
|
||||
txn.put('media_bookmarks', self.media_bookmarks.get())
|
||||
txn.put('note_bookmarks', self.note_bookmarks.get())
|
||||
txn.put(b'bookmarks', self.bookmarks.get())
|
||||
txn.put(b'family_bookmarks', self.family_bookmarks.get())
|
||||
txn.put(b'event_bookmarks', self.event_bookmarks.get())
|
||||
txn.put(b'source_bookmarks', self.source_bookmarks.get())
|
||||
txn.put(b'citation_bookmarks', self.citation_bookmarks.get())
|
||||
txn.put(b'place_bookmarks', self.place_bookmarks.get())
|
||||
txn.put(b'repo_bookmarks', self.repo_bookmarks.get())
|
||||
txn.put(b'media_bookmarks', self.media_bookmarks.get())
|
||||
txn.put(b'note_bookmarks', self.note_bookmarks.get())
|
||||
|
||||
# gender stats
|
||||
txn.put('gender_stats', self.genderStats.save_stats())
|
||||
txn.put(b'gender_stats', self.genderStats.save_stats())
|
||||
|
||||
# Custom type values
|
||||
txn.put('fevent_names', list(self.family_event_names))
|
||||
txn.put('pevent_names', list(self.individual_event_names))
|
||||
txn.put('fattr_names', list(self.family_attributes))
|
||||
txn.put('pattr_names', list(self.individual_attributes))
|
||||
txn.put('marker_names', list(self.marker_names))
|
||||
txn.put('child_refs', list(self.child_ref_types))
|
||||
txn.put('family_rels', list(self.family_rel_types))
|
||||
txn.put('event_roles', list(self.event_role_names))
|
||||
txn.put('name_types', list(self.name_types))
|
||||
txn.put('origin_types', list(self.origin_types))
|
||||
txn.put('repo_types', list(self.repository_types))
|
||||
txn.put('note_types', list(self.note_types))
|
||||
txn.put('sm_types', list(self.source_media_types))
|
||||
txn.put('url_types', list(self.url_types))
|
||||
txn.put('mattr_names', list(self.media_attributes))
|
||||
txn.put(b'fevent_names', list(self.family_event_names))
|
||||
txn.put(b'pevent_names', list(self.individual_event_names))
|
||||
txn.put(b'fattr_names', list(self.family_attributes))
|
||||
txn.put(b'pattr_names', list(self.individual_attributes))
|
||||
txn.put(b'marker_names', list(self.marker_names))
|
||||
txn.put(b'child_refs', list(self.child_ref_types))
|
||||
txn.put(b'family_rels', list(self.family_rel_types))
|
||||
txn.put(b'event_roles', list(self.event_role_names))
|
||||
txn.put(b'name_types', list(self.name_types))
|
||||
txn.put(b'origin_types', list(self.origin_types))
|
||||
txn.put(b'repo_types', list(self.repository_types))
|
||||
txn.put(b'note_types', list(self.note_types))
|
||||
txn.put(b'sm_types', list(self.source_media_types))
|
||||
txn.put(b'url_types', list(self.url_types))
|
||||
txn.put(b'mattr_names', list(self.media_attributes))
|
||||
|
||||
# name display formats
|
||||
txn.put('surname_list', self.surname_list)
|
||||
txn.put(b'surname_list', self.surname_list)
|
||||
|
||||
self.metadata.close()
|
||||
|
||||
@ -1256,9 +1304,14 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
versionpath = os.path.join(self.path, BDBVERSFN)
|
||||
try:
|
||||
with open(versionpath, "w") as version_file:
|
||||
version_file.write(str(db.version()))
|
||||
version = str(db.version())
|
||||
if sys.version_info[0] < 3:
|
||||
if isinstance(version, UNITYPE):
|
||||
version = version.encode('utf-8')
|
||||
version_file.write(version)
|
||||
except:
|
||||
# Storing the version of Berkeley Db is not really vital.
|
||||
print ("Error storing berkeley db version")
|
||||
pass
|
||||
|
||||
try:
|
||||
@ -1268,7 +1321,7 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
|
||||
def create_id(self):
|
||||
return "%08x%08x" % ( int(time.time()*10000),
|
||||
self.rand.randint(0, maxint))
|
||||
self.rand.randint(0, maxsize))
|
||||
|
||||
def __add_object(self, obj, transaction, find_next_func, commit_func):
|
||||
if find_next_func and not obj.gramps_id:
|
||||
@ -1415,7 +1468,8 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
if self.readonly or not handle:
|
||||
return
|
||||
|
||||
handle = str(handle)
|
||||
if isinstance(handle, UNITYPE):
|
||||
handle = handle.encode('utf-8')
|
||||
if transaction.batch:
|
||||
with BSDDBTxn(self.env, data_map) as txn:
|
||||
self.delete_primary_from_reference_map(handle, transaction,
|
||||
@ -1439,6 +1493,8 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
person = self.get_person_from_handle(handle)
|
||||
self.genderStats.uncount_person (person)
|
||||
self.remove_from_surname_list(person)
|
||||
if isinstance(handle, UNITYPE):
|
||||
handle = handle.encode('utf-8')
|
||||
if transaction.batch:
|
||||
with BSDDBTxn(self.env, self.person_map) as txn:
|
||||
self.delete_primary_from_reference_map(handle, transaction,
|
||||
@ -1447,7 +1503,7 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
else:
|
||||
self.delete_primary_from_reference_map(handle, transaction,
|
||||
txn=self.txn)
|
||||
self.person_map.delete(str(handle), txn=self.txn)
|
||||
self.person_map.delete(handle, txn=self.txn)
|
||||
transaction.add(PERSON_KEY, TXNDEL, handle, person.serialize(), None)
|
||||
|
||||
def remove_source(self, handle, transaction):
|
||||
@ -1542,7 +1598,7 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
if group is not None:
|
||||
txn.put(sname, group)
|
||||
if group == None:
|
||||
grouppar = u''
|
||||
grouppar = ''
|
||||
else:
|
||||
grouppar = group
|
||||
self.emit('person-groupname-rebuild', (name, grouppar))
|
||||
@ -1568,7 +1624,7 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
"""
|
||||
if batch_transaction:
|
||||
return
|
||||
name = unicode(find_surname_name(person.handle,
|
||||
name = conv_to_unicode(find_surname_name(person.handle,
|
||||
person.get_primary_name().serialize()), 'utf-8')
|
||||
i = bisect.bisect(self.surname_list, name)
|
||||
if 0 < i <= len(self.surname_list):
|
||||
@ -1588,11 +1644,18 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
"""
|
||||
name = find_surname_name(person.handle,
|
||||
person.get_primary_name().serialize())
|
||||
if sys.version_info[0] < 3:
|
||||
if isinstance(name, unicode):
|
||||
uname = name
|
||||
name = str(name)
|
||||
else:
|
||||
uname = unicode(name, 'utf-8')
|
||||
else:
|
||||
if isinstance(name, str):
|
||||
uname = name
|
||||
name = name.encode('utf-8')
|
||||
else:
|
||||
uname = str(name)
|
||||
try:
|
||||
cursor = self.surnames.cursor(txn=self.txn)
|
||||
cursor_position = cursor.set(name)
|
||||
@ -1601,7 +1664,7 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
i = bisect.bisect(self.surname_list, uname)
|
||||
if 0 <= i-1 < len(self.surname_list):
|
||||
del self.surname_list[i-1]
|
||||
except db.DBError, err:
|
||||
except db.DBError as err:
|
||||
if str(err) == "(0, 'DB object has been closed')":
|
||||
pass # A batch transaction closes the surnames db table.
|
||||
else:
|
||||
@ -1619,7 +1682,9 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
return
|
||||
|
||||
obj.change = int(change_time or time.time())
|
||||
handle = str(obj.handle)
|
||||
handle = obj.handle
|
||||
if isinstance(handle, UNITYPE):
|
||||
handle = handle.encode('utf-8')
|
||||
|
||||
self.update_reference_map(obj, transaction, self.txn)
|
||||
|
||||
@ -1850,8 +1915,10 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
transaction, change_time)
|
||||
|
||||
def get_from_handle(self, handle, class_type, data_map):
|
||||
if isinstance(handle, UNITYPE):
|
||||
handle = handle.encode('utf-8')
|
||||
try:
|
||||
data = data_map.get(str(handle), txn=self.txn)
|
||||
data = data_map.get(handle, txn=self.txn)
|
||||
except:
|
||||
data = None
|
||||
# under certain circumstances during a database reload,
|
||||
@ -1936,7 +2003,7 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
self.env.log_flush()
|
||||
if not transaction.batch:
|
||||
emit = self.__emit
|
||||
for obj_type, obj_name in KEY_TO_NAME_MAP.iteritems():
|
||||
for obj_type, obj_name in KEY_TO_NAME_MAP.items():
|
||||
emit(transaction, obj_type, TXNADD, obj_name, '-add')
|
||||
emit(transaction, obj_type, TXNUPD, obj_name, '-update')
|
||||
emit(transaction, obj_type, TXNDEL, obj_name, '-delete')
|
||||
@ -1983,7 +2050,7 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
# "while Gtk.events_pending(): Gtk.main_iteration() loop"
|
||||
# (typically used in a progress bar), so emit rebuild signals
|
||||
# to correct that.
|
||||
object_types = set([x[0] for x in transaction.keys()])
|
||||
object_types = set([x[0] for x in list(transaction.keys())])
|
||||
for object_type in object_types:
|
||||
if object_type == REFERENCE_KEY:
|
||||
continue
|
||||
@ -2040,17 +2107,19 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
def gramps_upgrade(self, callback=None):
|
||||
UpdateCallback.__init__(self, callback)
|
||||
|
||||
version = self.metadata.get('version', default=_MINVERSION)
|
||||
version = self.metadata.get(b'version', default=_MINVERSION)
|
||||
|
||||
t = time.time()
|
||||
|
||||
import upgrade
|
||||
from . import upgrade
|
||||
if version < 14:
|
||||
upgrade.gramps_upgrade_14(self)
|
||||
if version < 15:
|
||||
upgrade.gramps_upgrade_15(self)
|
||||
if version < 16:
|
||||
upgrade.gramps_upgrade_16(self)
|
||||
if version < 17:
|
||||
upgrade.gramps_upgrade_17(self)
|
||||
|
||||
self.reset()
|
||||
self.set_total(6)
|
||||
@ -2064,9 +2133,6 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
self.__close_undodb()
|
||||
self.db_is_open = False
|
||||
|
||||
if version < 17:
|
||||
self.__connect_secondary()
|
||||
upgrade.gramps_upgrade_17(self)
|
||||
|
||||
_LOG.debug("Upgrade time: %d seconds" % int(time.time()-t))
|
||||
|
||||
@ -2120,7 +2186,7 @@ class DbBsddb(DbBsddbRead, DbWriteBase, UpdateCallback):
|
||||
self.metadata = self.__open_shelf(full_name, META)
|
||||
|
||||
with BSDDBTxn(self.env, self.metadata) as txn:
|
||||
txn.put('version', _DBVERSION)
|
||||
txn.put(b'version', _DBVERSION)
|
||||
|
||||
self.metadata.close()
|
||||
self.env.close()
|
||||
@ -2193,13 +2259,14 @@ if __name__ == "__main__":
|
||||
db_name = f.read()
|
||||
if db_name == 'Small Example':
|
||||
break
|
||||
print "loading", db_path
|
||||
print("loading", db_path)
|
||||
d.load(db_path, lambda x: x)
|
||||
|
||||
print d.get_default_person()
|
||||
print(d.get_default_person())
|
||||
out = ''
|
||||
with d.get_person_cursor() as c:
|
||||
for key, data in c:
|
||||
person = Person(data)
|
||||
print key, person.get_primary_name().get_name(),
|
||||
out += key + person.get_primary_name().get_name()
|
||||
|
||||
print d.surnames.keys()
|
||||
print(out, list(d.surnames.keys()))
|
||||
|
@ -25,25 +25,25 @@
|
||||
Package providing filter rules for GRAMPS.
|
||||
"""
|
||||
|
||||
from _allplaces import AllPlaces
|
||||
from _hascitation import HasCitation
|
||||
from _hasgallery import HasGallery
|
||||
from _hasidof import HasIdOf
|
||||
from _regexpidof import RegExpIdOf
|
||||
from _hasnote import HasNote
|
||||
from _hasnoteregexp import HasNoteRegexp
|
||||
from _hasnotematchingsubstringof import HasNoteMatchingSubstringOf
|
||||
from _hasreferencecountof import HasReferenceCountOf
|
||||
from _hassourcecount import HasSourceCount
|
||||
from _hassourceof import HasSourceOf
|
||||
from _placeprivate import PlacePrivate
|
||||
from _matchesfilter import MatchesFilter
|
||||
from _haslocation import HasLocation
|
||||
from _hasnolatorlon import HasNoLatOrLon
|
||||
from _inlatlonneighborhood import InLatLonNeighborhood
|
||||
from _matcheseventfilter import MatchesEventFilter
|
||||
from _matchessourceconfidence import MatchesSourceConfidence
|
||||
from _changedsince import ChangedSince
|
||||
from ._allplaces import AllPlaces
|
||||
from ._hascitation import HasCitation
|
||||
from ._hasgallery import HasGallery
|
||||
from ._hasidof import HasIdOf
|
||||
from ._regexpidof import RegExpIdOf
|
||||
from ._hasnote import HasNote
|
||||
from ._hasnoteregexp import HasNoteRegexp
|
||||
from ._hasnotematchingsubstringof import HasNoteMatchingSubstringOf
|
||||
from ._hasreferencecountof import HasReferenceCountOf
|
||||
from ._hassourcecount import HasSourceCount
|
||||
from ._hassourceof import HasSourceOf
|
||||
from ._placeprivate import PlacePrivate
|
||||
from ._matchesfilter import MatchesFilter
|
||||
from ._haslocation import HasLocation
|
||||
from ._hasnolatorlon import HasNoLatOrLon
|
||||
from ._inlatlonneighborhood import InLatLonNeighborhood
|
||||
from ._matcheseventfilter import MatchesEventFilter
|
||||
from ._matchessourceconfidence import MatchesSourceConfidence
|
||||
from ._changedsince import ChangedSince
|
||||
|
||||
editor_rule_list = [
|
||||
AllPlaces,
|
||||
|
@ -25,18 +25,19 @@
|
||||
"""
|
||||
Place object for GRAMPS.
|
||||
"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
# GRAMPS modules
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
from primaryobj import PrimaryObject
|
||||
from citationbase import CitationBase
|
||||
from notebase import NoteBase
|
||||
from mediabase import MediaBase
|
||||
from urlbase import UrlBase
|
||||
from location import Location
|
||||
from .primaryobj import PrimaryObject
|
||||
from .citationbase import CitationBase
|
||||
from .notebase import NoteBase
|
||||
from .mediabase import MediaBase
|
||||
from .urlbase import UrlBase
|
||||
from .location import Location
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
|
@ -61,6 +61,7 @@ from gramps.gen.db.exceptions import (DbUpgradeRequiredError,
|
||||
BsddbDowngradeError,
|
||||
DbVersionError,
|
||||
DbEnvironmentError)
|
||||
from gramps.gen.constfunc import STRTYPE
|
||||
from gramps.gen.utils.file import get_unicode_path_from_file_chooser
|
||||
from .pluginmanager import GuiPluginManager
|
||||
from .dialog import (DBErrorDialog, ErrorDialog, QuestionDialog2,
|
||||
@ -132,11 +133,13 @@ class DbLoader(CLIDbLoader):
|
||||
|
||||
pmgr = GuiPluginManager.get_instance()
|
||||
|
||||
import_dialog = Gtk.FileChooserDialog(_('Gramps: Import database'),
|
||||
import_dialog = Gtk.FileChooserDialog(_('Gramps: Import Family Tree'),
|
||||
self.uistate.window,
|
||||
Gtk.FileChooserAction.OPEN,
|
||||
(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL,
|
||||
'gramps-import', Gtk.ResponseType.OK))
|
||||
(Gtk.STOCK_CANCEL,
|
||||
Gtk.ResponseType.CANCEL,
|
||||
_('Import'),
|
||||
Gtk.ResponseType.OK))
|
||||
import_dialog.set_local_only(False)
|
||||
|
||||
# Always add automatic (match all files) filter
|
||||
@ -205,8 +208,7 @@ class DbLoader(CLIDbLoader):
|
||||
In this process, a warning dialog can pop up.
|
||||
|
||||
"""
|
||||
|
||||
if not isinstance(filename, basestring):
|
||||
if not isinstance(filename, STRTYPE):
|
||||
return True
|
||||
|
||||
filename = os.path.normpath(os.path.abspath(filename))
|
||||
@ -249,7 +251,7 @@ class DbLoader(CLIDbLoader):
|
||||
User(callback=self._pulse_progress))
|
||||
dirname = os.path.dirname(filename) + os.path.sep
|
||||
config.set('paths.recent-import-dir', dirname)
|
||||
except UnicodeError, msg:
|
||||
except UnicodeError as msg:
|
||||
ErrorDialog(
|
||||
_("Could not import file: %s") % filename,
|
||||
_("This file incorrectly identifies its character "
|
||||
@ -266,7 +268,7 @@ class DbLoader(CLIDbLoader):
|
||||
is returned
|
||||
"""
|
||||
if self.import_info is None:
|
||||
return u""
|
||||
return ""
|
||||
return self.import_info.info_text()
|
||||
|
||||
def read_file(self, filename):
|
||||
@ -306,7 +308,7 @@ class DbLoader(CLIDbLoader):
|
||||
db.load(filename, self._pulse_progress,
|
||||
mode, upgrade=False)
|
||||
self.dbstate.change_database(db)
|
||||
except DbUpgradeRequiredError, msg:
|
||||
except DbUpgradeRequiredError as msg:
|
||||
if QuestionDialog2(_("Need to upgrade database!"),
|
||||
str(msg),
|
||||
_("Upgrade now"),
|
||||
@ -319,20 +321,20 @@ class DbLoader(CLIDbLoader):
|
||||
self.dbstate.change_database(db)
|
||||
else:
|
||||
self.dbstate.no_database()
|
||||
except BsddbDowngradeError, msg:
|
||||
except BsddbDowngradeError as msg:
|
||||
self.dbstate.no_database()
|
||||
self._errordialog( _("Cannot open database"), str(msg))
|
||||
except DbVersionError, msg:
|
||||
except DbVersionError as msg:
|
||||
self.dbstate.no_database()
|
||||
self._errordialog( _("Cannot open database"), str(msg))
|
||||
except DbEnvironmentError, msg:
|
||||
except DbEnvironmentError as msg:
|
||||
self.dbstate.no_database()
|
||||
self._errordialog( _("Cannot open database"), str(msg))
|
||||
except OSError, msg:
|
||||
except OSError as msg:
|
||||
self.dbstate.no_database()
|
||||
self._errordialog(
|
||||
_("Could not open file: %s") % filename, str(msg))
|
||||
except DbError, msg:
|
||||
except DbError as msg:
|
||||
self.dbstate.no_database()
|
||||
self._dberrordialog(msg)
|
||||
except Exception as newerror:
|
||||
|
@ -36,8 +36,8 @@ from gi.repository import GObject
|
||||
from gramps.gen.lib import Location
|
||||
from gramps.gen.errors import WindowActiveError
|
||||
from ...ddtargets import DdTargets
|
||||
from locationmodel import LocationModel
|
||||
from embeddedlist import EmbeddedList
|
||||
from .locationmodel import LocationModel
|
||||
from .embeddedlist import EmbeddedList
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
|
@ -28,7 +28,7 @@
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
from gramps.gen.db import DbTxn
|
||||
from editsecondary import EditSecondary
|
||||
from .editsecondary import EditSecondary
|
||||
from ..glade import Glade
|
||||
from gramps.gen.errors import ValidationError
|
||||
from gramps.gen.utils.place import conv_lat_lon
|
||||
|
@ -44,10 +44,10 @@ from gi.repository import Gtk
|
||||
# gramps modules
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
from gramps.gen.lib import NoteType, Place
|
||||
from gramps.gen.lib import NoteType, Place, Location
|
||||
from gramps.gen.db import DbTxn
|
||||
from editprimary import EditPrimary
|
||||
from displaytabs import (GrampsTab, LocationEmbedList, CitationEmbedList,
|
||||
from .editprimary import EditPrimary
|
||||
from .displaytabs import (GrampsTab, LocationEmbedList, CitationEmbedList,
|
||||
GalleryTab, NoteTab, WebEmbedList, PlaceBackRefList)
|
||||
from ..widgets import MonitoredEntry, PrivacyButton, LocationEntry
|
||||
from ..dialog import ErrorDialog
|
||||
@ -252,7 +252,7 @@ class EditPlace(EditPrimary):
|
||||
handle, new_locations = self.lentry.get_result()
|
||||
with DbTxn(_('Add location'), self.dbstate.db) as trans:
|
||||
for loc_type, name in new_locations:
|
||||
new_location = gen.lib.Location()
|
||||
new_location = Location()
|
||||
new_location.parent = handle
|
||||
new_location.name = name
|
||||
new_location.set_type(loc_type)
|
||||
|
@ -71,6 +71,7 @@ from ..selectors import SelectorFactory
|
||||
from gramps.gen.display.name import displayer as _nd
|
||||
from gramps.gen.utils.db import family_name
|
||||
from gramps.gen.utils.string import confidence
|
||||
from gramps.gen.constfunc import cuni
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
@ -106,14 +107,6 @@ _name2typeclass = {
|
||||
_('Surname origin type:'): NameOriginType,
|
||||
}
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
# Sorting function for the filter rules
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
def by_rule_name(f, s):
|
||||
return cmp(f.name, s.name)
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
# MyBoolean - check button with standard interface
|
||||
@ -337,7 +330,7 @@ class MyID(Gtk.Box):
|
||||
self.set_text(val.get_gramps_id())
|
||||
|
||||
def get_text(self):
|
||||
return unicode(self.entry.get_text())
|
||||
return cuni(self.entry.get_text())
|
||||
|
||||
def name_from_gramps_id(self, gramps_id):
|
||||
if self.namespace == 'Person':
|
||||
@ -576,7 +569,7 @@ class EditRule(ManagedWindow):
|
||||
taglist = taglist + [tag.get_name() for tag in dbstate.db.iter_tags()]
|
||||
t = MyList(taglist, taglist)
|
||||
elif v == _('Confidence level:'):
|
||||
t = MyList(map(str, range(5)),
|
||||
t = MyList(list(map(str, list(range(5)))),
|
||||
[confidence[i] for i in range(5)])
|
||||
elif v == _('Location:'):
|
||||
t = MyLocation(dbstate)
|
||||
@ -617,7 +610,7 @@ class EditRule(ManagedWindow):
|
||||
else:
|
||||
self.sel_class = None
|
||||
|
||||
keys = sorted(the_map, by_rule_name, reverse=True)
|
||||
keys = sorted(the_map, key=lambda x: x.name, reverse=True)
|
||||
catlist = sorted(set(class_obj.category for class_obj in keys))
|
||||
|
||||
for category in catlist:
|
||||
@ -642,7 +635,7 @@ class EditRule(ManagedWindow):
|
||||
self.notebook.set_current_page(page)
|
||||
self.display_values(self.active_rule.__class__)
|
||||
(class_obj, vallist, tlist) = self.page[page]
|
||||
r = self.active_rule.values()
|
||||
r = list(self.active_rule.values())
|
||||
for i in range(0, min(len(tlist), len(r))):
|
||||
tlist[i].set_text(r[i])
|
||||
|
||||
@ -729,7 +722,7 @@ class EditRule(ManagedWindow):
|
||||
try:
|
||||
page = self.notebook.get_current_page()
|
||||
(class_obj, vallist, tlist) = self.page[page]
|
||||
value_list = [unicode(sclass.get_text()) for sclass in tlist]
|
||||
value_list = [cuni(sclass.get_text()) for sclass in tlist]
|
||||
new_rule = class_obj(value_list)
|
||||
|
||||
self.update_rule(self.active_rule, new_rule)
|
||||
@ -811,7 +804,7 @@ class EditFilter(ManagedWindow):
|
||||
self.close()
|
||||
|
||||
def filter_name_changed(self, obj):
|
||||
name = unicode(self.fname.get_text())
|
||||
name = cuni(self.fname.get_text())
|
||||
# Make sure that the name is not empty
|
||||
# and not in the list of existing filters (excluding this one)
|
||||
names = [filt.get_name()
|
||||
@ -834,14 +827,14 @@ class EditFilter(ManagedWindow):
|
||||
self.rlist.add([r.name,r.display_values()],r)
|
||||
|
||||
def on_ok_clicked(self, obj):
|
||||
n = unicode(self.fname.get_text()).strip()
|
||||
n = cuni(self.fname.get_text()).strip()
|
||||
if n == '':
|
||||
return
|
||||
if n != self.filter.get_name():
|
||||
self.uistate.emit('filter-name-changed',
|
||||
(self.namespace,unicode(self.filter.get_name()), n))
|
||||
(self.namespace, cuni(self.filter.get_name()), n))
|
||||
self.filter.set_name(n)
|
||||
self.filter.set_comment(unicode(self.comment.get_text()).strip())
|
||||
self.filter.set_comment(cuni(self.comment.get_text()).strip())
|
||||
for f in self.filterdb.get_filters(self.namespace)[:]:
|
||||
if n == f.get_name():
|
||||
self.filterdb.get_filters(self.namespace).remove(f)
|
||||
@ -974,7 +967,7 @@ class ShowResults(ManagedWindow):
|
||||
gid = repo.get_gramps_id()
|
||||
elif self.namespace == 'Note':
|
||||
note = self.db.get_note_from_handle(handle)
|
||||
name = note.get().replace(u'\n', u' ')
|
||||
name = note.get().replace('\n', ' ')
|
||||
if len(name) > 80:
|
||||
name = name[:80]+"..."
|
||||
gid = note.get_gramps_id()
|
||||
@ -1149,7 +1142,7 @@ class FilterEditor(ManagedWindow):
|
||||
|
||||
# Remove what we found
|
||||
filters = self.filterdb.get_filters(space)
|
||||
map(filters.remove, filter_set)
|
||||
list(map(filters.remove, filter_set))
|
||||
|
||||
def _find_dependent_filters(self, space, gfilter, filter_set):
|
||||
"""
|
||||
@ -1165,7 +1158,7 @@ class FilterEditor(ManagedWindow):
|
||||
if the_filter.get_name() == name:
|
||||
continue
|
||||
for rule in the_filter.get_rules():
|
||||
values = rule.values()
|
||||
values = list(rule.values())
|
||||
if issubclass(rule.__class__, MatchesFilterBase) \
|
||||
and (name in values):
|
||||
self._find_dependent_filters(space, the_filter, filter_set)
|
||||
@ -1203,7 +1196,7 @@ class FilterEditor(ManagedWindow):
|
||||
|
||||
for the_filter in self.filterdb.get_filters(space):
|
||||
for rule in the_filter.get_rules():
|
||||
values = rule.values()
|
||||
values = list(rule.values())
|
||||
if issubclass(rule.__class__, MatchesFilterBase) \
|
||||
and (old_name in values):
|
||||
ind = values.index(old_name)
|
||||
@ -1212,7 +1205,7 @@ class FilterEditor(ManagedWindow):
|
||||
def check_recursive_filters(self, space, name):
|
||||
for the_filter in self.filterdb.get_filters(space):
|
||||
for rule in the_filter.get_rules():
|
||||
values = rule.values()
|
||||
values = list(rule.values())
|
||||
if issubclass(rule.__class__, MatchesFilterBase) \
|
||||
and (name in values):
|
||||
return True
|
||||
|
@ -44,6 +44,7 @@ from gi.repository import Gtk
|
||||
from ... import widgets
|
||||
from .. import build_filter_model
|
||||
from . import SidebarFilter
|
||||
from gramps.gen.constfunc import cuni
|
||||
from gramps.gen.filters import GenericFilterFactory, rules
|
||||
from gramps.gen.filters.rules.place import (RegExpIdOf, HasIdOf, HasLocation,
|
||||
HasNoteRegexp, HasNoteMatchingSubstringOf,
|
||||
@ -94,10 +95,10 @@ class PlaceSidebarFilter(SidebarFilter):
|
||||
self.generic.set_active(0)
|
||||
|
||||
def get_filter(self):
|
||||
gid = unicode(self.filter_id.get_text()).strip()
|
||||
title = unicode(self.filter_title.get_text()).strip()
|
||||
gid = cuni(self.filter_id.get_text()).strip()
|
||||
title = cuni(self.filter_title.get_text()).strip()
|
||||
location = self.filter_location.get_handle()
|
||||
note = unicode(self.filter_note.get_text()).strip()
|
||||
note = cuni(self.filter_note.get_text()).strip()
|
||||
regex = self.filter_regex.get_active()
|
||||
gen = self.generic.get_active() > 0
|
||||
|
||||
@ -127,7 +128,7 @@ class PlaceSidebarFilter(SidebarFilter):
|
||||
if self.generic.get_active() != 0:
|
||||
model = self.generic.get_model()
|
||||
node = self.generic.get_active_iter()
|
||||
obj = unicode(model.get_value(node, 0))
|
||||
obj = cuni(model.get_value(node, 0))
|
||||
rule = MatchesFilter([obj])
|
||||
generic_filter.add_rule(rule)
|
||||
|
||||
|
@ -21,38 +21,38 @@
|
||||
|
||||
# $Id$
|
||||
|
||||
from selectorexceptions import SelectorException
|
||||
from .selectorexceptions import SelectorException
|
||||
|
||||
def SelectorFactory(classname):
|
||||
if classname == 'Person':
|
||||
from selectperson import SelectPerson
|
||||
from .selectperson import SelectPerson
|
||||
cls = SelectPerson
|
||||
elif classname == 'Family':
|
||||
from selectfamily import SelectFamily
|
||||
from .selectfamily import SelectFamily
|
||||
cls = SelectFamily
|
||||
elif classname == 'Event':
|
||||
from selectevent import SelectEvent
|
||||
from .selectevent import SelectEvent
|
||||
cls = SelectEvent
|
||||
elif classname == 'Place':
|
||||
from selectplace import SelectPlace
|
||||
from .selectplace import SelectPlace
|
||||
cls = SelectPlace
|
||||
elif classname == 'Location':
|
||||
from selectlocation import SelectLocation
|
||||
from .selectlocation import SelectLocation
|
||||
cls = SelectLocation
|
||||
elif classname == 'Source':
|
||||
from selectsource import SelectSource
|
||||
from .selectsource import SelectSource
|
||||
cls = SelectSource
|
||||
elif classname == 'Citation':
|
||||
from selectcitation import SelectCitation
|
||||
from .selectcitation import SelectCitation
|
||||
cls = SelectCitation
|
||||
elif classname in ['MediaObject', 'Media']:
|
||||
from selectobject import SelectObject
|
||||
from .selectobject import SelectObject
|
||||
cls = SelectObject
|
||||
elif classname == 'Repository':
|
||||
from selectrepository import SelectRepository
|
||||
from .selectrepository import SelectRepository
|
||||
cls = SelectRepository
|
||||
elif classname == 'Note':
|
||||
from selectnote import SelectNote
|
||||
from .selectnote import SelectNote
|
||||
cls = SelectNote
|
||||
else:
|
||||
raise SelectorException("Attempt to create unknown "
|
||||
|
@ -35,7 +35,7 @@ from gramps.gen.ggettext import gettext as _
|
||||
#
|
||||
#-------------------------------------------------------------------------
|
||||
from ..views.treemodels.placemodel import PlaceListModel
|
||||
from baseselector import BaseSelector
|
||||
from .baseselector import BaseSelector
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
|
@ -24,15 +24,15 @@
|
||||
Package init for the treemodels package.
|
||||
"""
|
||||
|
||||
from peoplemodel import PeopleBaseModel, PersonListModel, PersonTreeModel
|
||||
from familymodel import FamilyModel
|
||||
from eventmodel import EventModel
|
||||
from sourcemodel import SourceModel
|
||||
from placemodel import PlaceBaseModel, PlaceListModel, PlaceTreeModel
|
||||
from locationmodel import LocationTreeModel
|
||||
from mediamodel import MediaModel
|
||||
from repomodel import RepositoryModel
|
||||
from notemodel import NoteModel
|
||||
from citationbasemodel import CitationBaseModel
|
||||
from citationlistmodel import CitationListModel
|
||||
from citationtreemodel import CitationTreeModel
|
||||
from .peoplemodel import PeopleBaseModel, PersonListModel, PersonTreeModel
|
||||
from .familymodel import FamilyModel
|
||||
from .eventmodel import EventModel
|
||||
from .sourcemodel import SourceModel
|
||||
from .placemodel import PlaceBaseModel, PlaceListModel, PlaceTreeModel
|
||||
from .locationmodel import LocationTreeModel
|
||||
from .mediamodel import MediaModel
|
||||
from .repomodel import RepositoryModel
|
||||
from .notemodel import NoteModel
|
||||
from .citationbasemodel import CitationBaseModel
|
||||
from .citationlistmodel import CitationListModel
|
||||
from .citationtreemodel import CitationTreeModel
|
||||
|
@ -48,6 +48,7 @@ from gi.repository import Gtk
|
||||
#-------------------------------------------------------------------------
|
||||
from gramps.gen.datehandler import format_time
|
||||
from gramps.gen.utils.place import conv_lat_lon
|
||||
from gramps.gen.constfunc import cuni
|
||||
from .flatbasemodel import FlatBaseModel
|
||||
from .treebasemodel import TreeBaseModel
|
||||
|
||||
@ -112,19 +113,19 @@ class PlaceBaseModel(object):
|
||||
return len(self.fmap)+1
|
||||
|
||||
def column_handle(self, data):
|
||||
return unicode(data[0])
|
||||
return cuni(data[0])
|
||||
|
||||
def column_place_name(self, data):
|
||||
return unicode(data[2])
|
||||
return cuni(data[2])
|
||||
|
||||
def column_id(self, data):
|
||||
return unicode(data[1])
|
||||
return cuni(data[1])
|
||||
|
||||
def column_location(self, data):
|
||||
try:
|
||||
loc = self.db.get_location_from_handle(data[3])
|
||||
lines = [loc.name]
|
||||
while loc.parent is not None:
|
||||
while loc.parent != str(None):
|
||||
loc = self.db.get_location_from_handle(loc.parent)
|
||||
lines.append(loc.name)
|
||||
return ', '.join(lines)
|
||||
@ -164,7 +165,7 @@ class PlaceListModel(PlaceBaseModel, FlatBaseModel):
|
||||
FlatBaseModel.destroy(self)
|
||||
|
||||
def column_name(self, data):
|
||||
return unicode(data[2])
|
||||
return cuni(data[2])
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
@ -254,16 +255,16 @@ class PlaceTreeModel(PlaceBaseModel, TreeBaseModel):
|
||||
if data[5] is not None:
|
||||
level = [data[5][0][i] for i in range(5,-1,-1)]
|
||||
if not (level[3] or level[4] or level[5]):
|
||||
name = unicode(level[2] or level[1] or level[0])
|
||||
name = cuni(level[2] or level[1] or level[0])
|
||||
else:
|
||||
name = ', '.join([item for item in level[3:] if item])
|
||||
if not name:
|
||||
name = unicode(data[2])
|
||||
name = cuni(data[2])
|
||||
|
||||
if name:
|
||||
return cgi.escape(name)
|
||||
else:
|
||||
return u"<i>%s<i>" % cgi.escape(_("<no name>"))
|
||||
return "<i>%s<i>" % cgi.escape(_("<no name>"))
|
||||
|
||||
def column_header(self, node):
|
||||
"""
|
||||
|
@ -965,7 +965,7 @@ class TreeBaseModel(GObject.Object, Gtk.TreeModel):
|
||||
pathlist.append(index)
|
||||
node = parent
|
||||
|
||||
if pathlist is not None:
|
||||
if pathlist:
|
||||
pathlist.reverse()
|
||||
return Gtk.TreePath(tuple(pathlist))
|
||||
else:
|
||||
|
@ -23,25 +23,25 @@
|
||||
|
||||
"""Custom widgets."""
|
||||
|
||||
from basicentry import *
|
||||
from buttons import *
|
||||
from expandcollapsearrow import *
|
||||
from labels import *
|
||||
from locationentry import *
|
||||
from locationentry2 import *
|
||||
from linkbox import *
|
||||
from photo import *
|
||||
from monitoredwidgets import *
|
||||
from shortlistcomboentry import *
|
||||
from springseparator import *
|
||||
from statusbar import Statusbar
|
||||
from styledtextbuffer import *
|
||||
from styledtexteditor import *
|
||||
from toolcomboentry import *
|
||||
from undoablebuffer import *
|
||||
from undoableentry import *
|
||||
from undoablestyledbuffer import *
|
||||
from validatedcomboentry import *
|
||||
from validatedmaskedentry import *
|
||||
from valueaction import *
|
||||
from valuetoolitem import *
|
||||
from .basicentry import *
|
||||
from .buttons import *
|
||||
from .expandcollapsearrow import *
|
||||
from .labels import *
|
||||
from .locationentry import *
|
||||
from .locationentry2 import *
|
||||
from .linkbox import *
|
||||
from .photo import *
|
||||
from .monitoredwidgets import *
|
||||
from .shortlistcomboentry import *
|
||||
from .springseparator import *
|
||||
from .statusbar import Statusbar
|
||||
from .styledtextbuffer import *
|
||||
from .styledtexteditor import *
|
||||
from .toolcomboentry import *
|
||||
from .undoablebuffer import *
|
||||
from .undoableentry import *
|
||||
from .undoablestyledbuffer import *
|
||||
from .validatedcomboentry import *
|
||||
from .validatedmaskedentry import *
|
||||
from .valueaction import *
|
||||
from .valuetoolitem import *
|
||||
|
@ -69,7 +69,7 @@ class LocationEntry(object):
|
||||
if handle:
|
||||
locs = []
|
||||
loc = db.get_location_from_handle(handle)
|
||||
while loc.parent is not None:
|
||||
while loc.parent != str(None):
|
||||
locs.append(loc)
|
||||
loc = db.get_location_from_handle(loc.parent)
|
||||
locs.append(loc)
|
||||
|
@ -132,7 +132,7 @@ class LocationEntry2(Gtk.Entry):
|
||||
def get_location_text(self, handle):
|
||||
loc = self.dbstate.db.get_location_from_handle(handle)
|
||||
lines = [loc.name]
|
||||
while loc.parent is not None:
|
||||
while loc.parent != str(None):
|
||||
loc = self.dbstate.db.get_location_from_handle(loc.parent)
|
||||
lines.append(loc.name)
|
||||
return ', '.join(lines)
|
||||
|
@ -125,7 +125,7 @@ class PlaceDetails(Gramplet):
|
||||
"""
|
||||
loc = self.dbstate.db.get_location_from_handle(handle)
|
||||
lines = [loc.name]
|
||||
while loc.parent is not None:
|
||||
while loc.parent != str(None):
|
||||
loc = self.dbstate.db.get_location_from_handle(loc.parent)
|
||||
lines.append(loc.name)
|
||||
self.add_row(_('Location'), '\n'.join(lines))
|
||||
|
@ -33,7 +33,6 @@ Geography for events
|
||||
from gramps.gen.ggettext import gettext as _
|
||||
import os
|
||||
import sys
|
||||
import urlparse
|
||||
import operator
|
||||
import locale
|
||||
from gi.repository import Gdk
|
||||
|
@ -33,7 +33,6 @@ Geography for one family
|
||||
from gramps.gen.ggettext import gettext as _
|
||||
import os
|
||||
import sys
|
||||
import urlparse
|
||||
import operator
|
||||
import locale
|
||||
from gi.repository import Gdk
|
||||
|
@ -33,7 +33,6 @@ Geography for one person
|
||||
from gramps.gen.ggettext import gettext as _
|
||||
import os
|
||||
import sys
|
||||
import urlparse
|
||||
import operator
|
||||
import locale
|
||||
from gi.repository import Gdk
|
||||
|
@ -34,7 +34,6 @@ from gramps.gen.ggettext import gettext as _
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import urlparse
|
||||
import operator
|
||||
import locale
|
||||
from gi.repository import Gdk
|
||||
|
@ -49,6 +49,7 @@ from gramps.gui.ddtargets import DdTargets
|
||||
from gramps.gui.dialog import ErrorDialog
|
||||
from gramps.gui.editors import EditLocation
|
||||
from gramps.gen.plug import CATEGORY_QR_PLACE
|
||||
from gramps.gui.views.listview import LISTTREE
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
#
|
||||
@ -114,6 +115,12 @@ class LocationView(ListView):
|
||||
|
||||
self.additional_uis.append(self.additional_ui())
|
||||
|
||||
def type_list(self):
|
||||
"""
|
||||
set the listtype, this governs eg keybinding
|
||||
"""
|
||||
return LISTTREE
|
||||
|
||||
def navigation_type(self):
|
||||
return 'Place'
|
||||
|
||||
|
@ -38,7 +38,7 @@ version = '1.0',
|
||||
gramps_target_version = '4.0',
|
||||
status = STABLE,
|
||||
fname = 'eventview.py',
|
||||
authors = [u"The Gramps project"],
|
||||
authors = ["The Gramps project"],
|
||||
authors_email = ["http://gramps-project.org"],
|
||||
category = ("Events", _("Events")),
|
||||
viewclass = 'EventView',
|
||||
@ -53,7 +53,7 @@ version = '1.0',
|
||||
gramps_target_version = '4.0',
|
||||
status = STABLE,
|
||||
fname = 'familyview.py',
|
||||
authors = [u"The Gramps project"],
|
||||
authors = ["The Gramps project"],
|
||||
authors_email = ["http://gramps-project.org"],
|
||||
category = ("Families", _("Families")),
|
||||
viewclass = 'FamilyView',
|
||||
@ -68,7 +68,7 @@ version = '1.0',
|
||||
gramps_target_version = '4.0',
|
||||
status = STABLE,
|
||||
fname = 'grampletview.py',
|
||||
authors = [u"The Gramps project"],
|
||||
authors = ["The Gramps project"],
|
||||
authors_email = ["http://gramps-project.org"],
|
||||
category = ("Gramplets", _("Gramplets")),
|
||||
viewclass = 'GrampletView',
|
||||
@ -83,7 +83,7 @@ version = '1.0',
|
||||
gramps_target_version = '4.0',
|
||||
status = STABLE,
|
||||
fname = 'mediaview.py',
|
||||
authors = [u"The Gramps project"],
|
||||
authors = ["The Gramps project"],
|
||||
authors_email = ["http://gramps-project.org"],
|
||||
category = ("Media", _("Media")),
|
||||
viewclass = 'MediaView',
|
||||
@ -98,7 +98,7 @@ version = '1.0',
|
||||
gramps_target_version = '4.0',
|
||||
status = STABLE,
|
||||
fname = 'noteview.py',
|
||||
authors = [u"The Gramps project"],
|
||||
authors = ["The Gramps project"],
|
||||
authors_email = ["http://gramps-project.org"],
|
||||
category = ("Notes", _("Notes")),
|
||||
viewclass = 'NoteView',
|
||||
@ -113,7 +113,7 @@ version = '1.0',
|
||||
gramps_target_version = '4.0',
|
||||
status = STABLE,
|
||||
fname = 'relview.py',
|
||||
authors = [u"The Gramps project"],
|
||||
authors = ["The Gramps project"],
|
||||
authors_email = ["http://gramps-project.org"],
|
||||
category = ("Relationships", _("Relationships")),
|
||||
viewclass = 'RelationshipView',
|
||||
@ -128,7 +128,7 @@ version = '1.0',
|
||||
gramps_target_version = '4.0',
|
||||
status = STABLE,
|
||||
fname = 'pedigreeview.py',
|
||||
authors = [u"The Gramps project"],
|
||||
authors = ["The Gramps project"],
|
||||
authors_email = ["http://gramps-project.org"],
|
||||
category = ("Ancestry", _("Charts")),
|
||||
viewclass = 'PedigreeView',
|
||||
@ -145,7 +145,7 @@ version = '1.0',
|
||||
gramps_target_version = '4.0',
|
||||
status = STABLE,
|
||||
fname = 'fanchartview.py',
|
||||
authors = [u"Douglas S. Blank", u"B. Malengier"],
|
||||
authors = ["Douglas S. Blank", "B. Malengier"],
|
||||
authors_email = ["doug.blank@gmail.com", "benny.malengier@gmail.com"],
|
||||
viewclass = 'FanChartView',
|
||||
stock_icon = 'gramps-fanchart',
|
||||
@ -160,7 +160,7 @@ version = '1.0',
|
||||
gramps_target_version = '4.0',
|
||||
status = STABLE,
|
||||
fname = 'fanchartdescview.py',
|
||||
authors = [u"B. Malengier"],
|
||||
authors = ["B. Malengier"],
|
||||
authors_email = ["benny.malengier@gmail.com"],
|
||||
viewclass = 'FanChartDescView',
|
||||
stock_icon = 'gramps-fanchartdesc',
|
||||
@ -175,7 +175,7 @@ version = '1.0',
|
||||
gramps_target_version = '4.0',
|
||||
status = STABLE,
|
||||
fname = 'persontreeview.py',
|
||||
authors = [u"The Gramps project"],
|
||||
authors = ["The Gramps project"],
|
||||
authors_email = ["http://gramps-project.org"],
|
||||
category = ("People", _("People")),
|
||||
viewclass = 'PersonTreeView',
|
||||
@ -192,7 +192,7 @@ version = '1.0',
|
||||
gramps_target_version = '4.0',
|
||||
status = STABLE,
|
||||
fname = 'personlistview.py',
|
||||
authors = [u"The Gramps project"],
|
||||
authors = ["The Gramps project"],
|
||||
authors_email = ["http://gramps-project.org"],
|
||||
category = ("People", _("People")),
|
||||
viewclass = 'PersonListView',
|
||||
@ -208,7 +208,7 @@ version = '1.0',
|
||||
gramps_target_version = '4.0',
|
||||
status = STABLE,
|
||||
fname = 'placelistview.py',
|
||||
authors = [u"The Gramps project"],
|
||||
authors = ["The Gramps project"],
|
||||
authors_email = ["http://gramps-project.org"],
|
||||
category = ("Places", _("Places")),
|
||||
viewclass = 'PlaceListView',
|
||||
@ -224,7 +224,7 @@ stock_icon = 'gramps-tree-list',
|
||||
#gramps_target_version = '4.0',
|
||||
#status = STABLE,
|
||||
#fname = 'placetreeview.py',
|
||||
#authors = [u"Donald N. Allingham", u"Gary Burton", u"Nick Hall"],
|
||||
#authors = ["Donald N. Allingham", "Gary Burton", "Nick Hall"],
|
||||
#authors_email = [""],
|
||||
#category = ("Places", _("Places")),
|
||||
#viewclass = 'PlaceTreeView',
|
||||
@ -239,7 +239,7 @@ version = '1.0',
|
||||
gramps_target_version = '4.0',
|
||||
status = STABLE,
|
||||
fname = 'repoview.py',
|
||||
authors = [u"The Gramps project"],
|
||||
authors = ["The Gramps project"],
|
||||
authors_email = ["http://gramps-project.org"],
|
||||
category = ("Repositories", _("Repositories")),
|
||||
viewclass = 'RepositoryView',
|
||||
@ -254,7 +254,7 @@ version = '1.0',
|
||||
gramps_target_version = '4.0',
|
||||
status = STABLE,
|
||||
fname = 'sourceview.py',
|
||||
authors = [u"The Gramps project"],
|
||||
authors = ["The Gramps project"],
|
||||
authors_email = ["http://gramps-project.org"],
|
||||
category = ("Sources", _("Sources")),
|
||||
viewclass = 'SourceView',
|
||||
@ -270,7 +270,7 @@ version = '1.0',
|
||||
gramps_target_version = '4.0',
|
||||
status = STABLE,
|
||||
fname = 'citationlistview.py',
|
||||
authors = [u"The Gramps project"],
|
||||
authors = ["The Gramps project"],
|
||||
authors_email = ["http://gramps-project.org"],
|
||||
category = ("Citations", _("Citations")),
|
||||
viewclass = 'CitationListView',
|
||||
@ -285,7 +285,7 @@ version = '1.0',
|
||||
gramps_target_version = '4.0',
|
||||
status = STABLE,
|
||||
fname = 'citationtreeview.py',
|
||||
authors = [u"Tim G L Lyons", u"Nick Hall"],
|
||||
authors = ["Tim G L Lyons", "Nick Hall"],
|
||||
authors_email = [""],
|
||||
category = ("Sources", _("Sources")),
|
||||
viewclass = 'CitationTreeView',
|
||||
|
Loading…
Reference in New Issue
Block a user