2002-10-20 19:55:16 +05:30
|
|
|
#
|
|
|
|
# Gramps - a GTK+/GNOME based genealogy program
|
|
|
|
#
|
2007-06-28 11:11:40 +05:30
|
|
|
# Copyright (C) 2000-2007 Donald N. Allingham
|
2009-07-13 01:15:17 +05:30
|
|
|
# Copyright (C) 2009 Gary Burton
|
2002-10-20 19:55:16 +05:30
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation; either version 2 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
2007-09-10 08:33:46 +05:30
|
|
|
# This program is distributed in the hope that it will be useful,
|
2002-10-20 19:55:16 +05:30
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program; if not, write to the Free Software
|
|
|
|
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
|
|
#
|
|
|
|
|
2003-11-07 21:59:27 +05:30
|
|
|
# $Id$
|
|
|
|
|
2009-07-04 03:30:58 +05:30
|
|
|
"""
|
|
|
|
Non GUI/GTK related utility functions
|
|
|
|
"""
|
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# Standard python modules
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
import os
|
2005-12-06 12:08:09 +05:30
|
|
|
import sys
|
2003-06-15 05:35:43 +05:30
|
|
|
import locale
|
2006-03-23 04:33:57 +05:30
|
|
|
import random
|
|
|
|
import time
|
2009-07-04 03:30:58 +05:30
|
|
|
import shutil
|
2009-11-10 09:03:10 +05:30
|
|
|
import uuid
|
2010-09-16 18:03:23 +05:30
|
|
|
import logging
|
|
|
|
LOG = logging.getLogger(".")
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# Gramps modules
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2010-01-14 09:38:04 +05:30
|
|
|
from gen.display.name import displayer as name_displayer
|
2007-10-08 22:11:39 +05:30
|
|
|
import gen.lib
|
2005-12-06 12:08:09 +05:30
|
|
|
import Errors
|
2009-07-04 03:30:58 +05:30
|
|
|
from GrampsLocale import codeset
|
2010-05-20 23:17:31 +05:30
|
|
|
from Date import Date
|
|
|
|
import DateHandler
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2010-02-08 13:57:45 +05:30
|
|
|
from const import TEMP_DIR, USER_HOME, GRAMPS_UUID
|
|
|
|
import constfunc
|
2010-01-18 10:12:17 +05:30
|
|
|
from gen.ggettext import sgettext as _
|
2007-10-14 08:59:12 +05:30
|
|
|
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# Constants from config .ini keys
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2009-07-20 02:20:46 +05:30
|
|
|
# cache values; use refresh_constants() if they change
|
2007-10-14 08:59:12 +05:30
|
|
|
try:
|
2009-10-08 06:42:51 +05:30
|
|
|
import config
|
|
|
|
_MAX_AGE_PROB_ALIVE = config.get('behavior.max-age-prob-alive')
|
|
|
|
_MAX_SIB_AGE_DIFF = config.get('behavior.max-sib-age-diff')
|
|
|
|
_AVG_GENERATION_GAP = config.get('behavior.avg-generation-gap')
|
2007-10-14 08:59:12 +05:30
|
|
|
except ImportError:
|
|
|
|
# Utils used as module not part of GRAMPS
|
|
|
|
_MAX_AGE_PROB_ALIVE = 110
|
|
|
|
_MAX_SIB_AGE_DIFF = 20
|
|
|
|
_AVG_GENERATION_GAP = 20
|
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
2005-05-31 02:11:43 +05:30
|
|
|
# Integer to String mappings for constants
|
2002-10-20 19:55:16 +05:30
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2005-05-31 02:11:43 +05:30
|
|
|
gender = {
|
2007-10-08 22:11:39 +05:30
|
|
|
gen.lib.Person.MALE : _("male"),
|
|
|
|
gen.lib.Person.FEMALE : _("female"),
|
2010-02-04 12:59:42 +05:30
|
|
|
gen.lib.Person.UNKNOWN : _("gender|unknown"),
|
2005-05-31 02:11:43 +05:30
|
|
|
}
|
2008-01-18 01:54:44 +05:30
|
|
|
|
2006-03-23 02:49:32 +05:30
|
|
|
def format_gender( type):
|
2007-09-10 08:33:46 +05:30
|
|
|
return gender.get(type[0], _("Invalid"))
|
2005-05-31 02:11:43 +05:30
|
|
|
|
|
|
|
confidence = {
|
2007-10-08 22:11:39 +05:30
|
|
|
gen.lib.SourceRef.CONF_VERY_HIGH : _("Very High"),
|
|
|
|
gen.lib.SourceRef.CONF_HIGH : _("High"),
|
|
|
|
gen.lib.SourceRef.CONF_NORMAL : _("Normal"),
|
|
|
|
gen.lib.SourceRef.CONF_LOW : _("Low"),
|
|
|
|
gen.lib.SourceRef.CONF_VERY_LOW : _("Very Low"),
|
2005-05-31 02:11:43 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
family_rel_descriptions = {
|
2007-10-08 22:11:39 +05:30
|
|
|
gen.lib.FamilyRelType.MARRIED : _("A legal or common-law relationship "
|
2007-09-10 08:33:46 +05:30
|
|
|
"between a husband and wife"),
|
2007-10-08 22:11:39 +05:30
|
|
|
gen.lib.FamilyRelType.UNMARRIED : _("No legal or common-law relationship "
|
2007-09-10 08:33:46 +05:30
|
|
|
"between man and woman"),
|
2007-10-08 22:11:39 +05:30
|
|
|
gen.lib.FamilyRelType.CIVIL_UNION : _("An established relationship between "
|
2007-09-10 08:33:46 +05:30
|
|
|
"members of the same sex"),
|
2007-10-08 22:11:39 +05:30
|
|
|
gen.lib.FamilyRelType.UNKNOWN : _("Unknown relationship between a man "
|
2007-09-10 08:33:46 +05:30
|
|
|
"and woman"),
|
2008-10-06 18:06:03 +05:30
|
|
|
gen.lib.FamilyRelType.CUSTOM : _("An unspecified relationship between "
|
2007-09-10 08:33:46 +05:30
|
|
|
"a man and woman"),
|
2005-05-31 02:11:43 +05:30
|
|
|
}
|
|
|
|
|
2006-03-23 02:49:32 +05:30
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# modified flag
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2003-11-07 21:59:27 +05:30
|
|
|
_history_brokenFlag = 0
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2003-11-07 21:59:27 +05:30
|
|
|
def history_broken():
|
|
|
|
global _history_brokenFlag
|
|
|
|
_history_brokenFlag = 1
|
|
|
|
|
2005-03-12 06:14:11 +05:30
|
|
|
data_recover_msg = _('The data can only be recovered by Undo operation '
|
|
|
|
'or by quitting with abandoning changes.')
|
|
|
|
|
2005-12-06 12:08:09 +05:30
|
|
|
def fix_encoding(value):
|
2008-05-26 01:25:47 +05:30
|
|
|
if not isinstance(value, unicode):
|
2005-12-06 12:08:09 +05:30
|
|
|
try:
|
|
|
|
return unicode(value)
|
|
|
|
except:
|
2007-01-28 04:38:08 +05:30
|
|
|
try:
|
|
|
|
codeset = locale.getpreferredencoding()
|
|
|
|
except:
|
|
|
|
codeset = "UTF-8"
|
2007-09-10 08:33:46 +05:30
|
|
|
return unicode(value, codeset)
|
2005-12-06 12:08:09 +05:30
|
|
|
else:
|
|
|
|
return value
|
|
|
|
|
2005-12-13 07:37:16 +05:30
|
|
|
def xml_lang():
|
2007-09-10 08:33:46 +05:30
|
|
|
loc = locale.getlocale()
|
2008-06-16 20:31:46 +05:30
|
|
|
if loc[0] is None:
|
2005-12-13 07:37:16 +05:30
|
|
|
return ""
|
|
|
|
else:
|
2007-09-10 08:33:46 +05:30
|
|
|
return loc[0].replace('_', '-')
|
2005-12-13 07:37:16 +05:30
|
|
|
|
2003-04-04 11:18:25 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# force_unicode
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
|
|
|
|
def force_unicode(n):
|
2008-05-26 01:25:47 +05:30
|
|
|
if not isinstance(n, unicode):
|
2007-09-10 08:33:46 +05:30
|
|
|
return (unicode(n).lower(), unicode(n))
|
2003-04-04 11:18:25 +05:30
|
|
|
else:
|
2007-09-10 08:33:46 +05:30
|
|
|
return (n.lower(), n)
|
2003-04-04 11:18:25 +05:30
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# Clears the modified flag. Should be called after data is saved.
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2003-11-07 21:59:27 +05:30
|
|
|
def clearHistory_broken():
|
|
|
|
global _history_brokenFlag
|
|
|
|
_history_brokenFlag = 0
|
|
|
|
|
|
|
|
def wasHistory_broken():
|
|
|
|
return _history_brokenFlag
|
|
|
|
|
2010-10-25 01:51:37 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# Preset a name with a name of family member
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
|
|
|
|
def preset_name(basepers, name, sibling=False):
|
|
|
|
"""Fill up name with all family common names of basepers.
|
|
|
|
If sibling=True, pa/matronymics are retained.
|
|
|
|
"""
|
|
|
|
surnlist = []
|
|
|
|
primname = basepers.get_primary_name()
|
|
|
|
prim = False
|
|
|
|
for surn in primname.get_surname_list():
|
|
|
|
if (not sibling) and (surn.get_origintype().value in
|
|
|
|
[gen.lib.NameOriginType.PATRONYMIC,
|
|
|
|
gen.lib.NameOriginType.MATRONYMIC]):
|
|
|
|
continue
|
|
|
|
surnlist.append(gen.lib.Surname(source=surn))
|
|
|
|
if surn.primary:
|
|
|
|
prim=True
|
|
|
|
if not surnlist:
|
|
|
|
surnlist = [gen.lib.Surname()]
|
|
|
|
name.set_surname_list(surnlist)
|
|
|
|
if not prim:
|
|
|
|
name.set_primary_surname(0)
|
|
|
|
name.set_family_nick_name(primname.get_family_nick_name())
|
|
|
|
name.set_group_as(primname.get_group_as())
|
|
|
|
name.set_sort_as(primname.get_sort_as())
|
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# Short hand function to return either the person's name, or an empty
|
|
|
|
# string if the person is None
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2003-10-12 09:56:00 +05:30
|
|
|
|
2007-04-02 04:07:10 +05:30
|
|
|
def family_name(family, db, noname=_("unknown")):
|
2002-10-20 19:55:16 +05:30
|
|
|
"""Builds a name for the family from the parents names"""
|
2007-04-02 04:07:10 +05:30
|
|
|
|
2004-07-28 07:59:07 +05:30
|
|
|
father_handle = family.get_father_handle()
|
|
|
|
mother_handle = family.get_mother_handle()
|
2004-08-07 10:46:57 +05:30
|
|
|
father = db.get_person_from_handle(father_handle)
|
|
|
|
mother = db.get_person_from_handle(mother_handle)
|
2002-10-20 19:55:16 +05:30
|
|
|
if father and mother:
|
2007-06-28 11:11:40 +05:30
|
|
|
fname = name_displayer.display(father)
|
|
|
|
mname = name_displayer.display(mother)
|
2005-03-12 02:35:46 +05:30
|
|
|
name = _("%(father)s and %(mother)s") % {
|
2007-09-10 08:33:46 +05:30
|
|
|
"father" : fname,
|
2005-03-12 02:35:46 +05:30
|
|
|
"mother" : mname}
|
2002-10-20 19:55:16 +05:30
|
|
|
elif father:
|
2007-06-28 11:11:40 +05:30
|
|
|
name = name_displayer.display(father)
|
2005-03-14 03:40:40 +05:30
|
|
|
elif mother:
|
2007-06-28 11:11:40 +05:30
|
|
|
name = name_displayer.display(mother)
|
2005-03-14 03:40:40 +05:30
|
|
|
else:
|
2006-04-20 10:29:04 +05:30
|
|
|
name = noname
|
2002-10-20 19:55:16 +05:30
|
|
|
return name
|
|
|
|
|
2007-04-02 04:07:10 +05:30
|
|
|
def family_upper_name(family, db):
|
2003-10-12 09:56:00 +05:30
|
|
|
"""Builds a name for the family from the parents names"""
|
2004-07-28 07:59:07 +05:30
|
|
|
father_handle = family.get_father_handle()
|
|
|
|
mother_handle = family.get_mother_handle()
|
2004-08-07 10:46:57 +05:30
|
|
|
father = db.get_person_from_handle(father_handle)
|
|
|
|
mother = db.get_person_from_handle(mother_handle)
|
2003-10-12 09:56:00 +05:30
|
|
|
if father and mother:
|
2004-02-14 11:10:30 +05:30
|
|
|
fname = father.get_primary_name().get_upper_name()
|
|
|
|
mname = mother.get_primary_name().get_upper_name()
|
2007-12-16 23:05:16 +05:30
|
|
|
name = _("%(father)s and %(mother)s") % {
|
2007-09-10 08:33:46 +05:30
|
|
|
'father' : fname,
|
2007-06-25 10:27:53 +05:30
|
|
|
'mother' : mname
|
|
|
|
}
|
2003-10-12 09:56:00 +05:30
|
|
|
elif father:
|
2004-02-14 11:10:30 +05:30
|
|
|
name = father.get_primary_name().get_upper_name()
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-02-14 11:10:30 +05:30
|
|
|
name = mother.get_primary_name().get_upper_name()
|
2003-10-12 09:56:00 +05:30
|
|
|
return name
|
2009-07-04 03:30:58 +05:30
|
|
|
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# String Encoding functions
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
|
|
|
|
def encodingdefs():
|
|
|
|
"""
|
|
|
|
4 functions are defined to obtain a byte string that can be used as
|
|
|
|
sort key and is locale aware. Do not print the sortkey, it is a sortable
|
|
|
|
string, but is not a human readable correct string!
|
|
|
|
When gtk is defined, one can avoid some function calls as then the default
|
|
|
|
python encoding is not ascii but utf-8, so use the gtk functions in those
|
|
|
|
cases.
|
|
|
|
|
|
|
|
conv_utf8_tosrtkey: convert a utf8 encoded string to sortkey usable string
|
|
|
|
|
|
|
|
conv_unicode_tosrtkey: convert a unicode object to sortkey usable string
|
|
|
|
|
|
|
|
conv_utf8_tosrtkey_ongtk: convert a utf8 encoded string to sortkey usable
|
|
|
|
string when gtk is loaded or utf-8 is default python encoding
|
|
|
|
|
|
|
|
conv_unicode_tosrtkey_ongtk: convert a unicode object to sortkey usable
|
|
|
|
string when gtk is loaded or utf-8 is default python encoding
|
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
2010-02-08 13:57:45 +05:30
|
|
|
if constfunc.win():
|
2009-07-04 03:30:58 +05:30
|
|
|
# python encoding is ascii, but C functions need to recieve the
|
|
|
|
# windows codeset, so convert over to it
|
|
|
|
conv_utf8_tosrtkey = lambda x: locale.strxfrm(x.decode("utf-8").encode(
|
|
|
|
codeset))
|
|
|
|
conv_unicode_tosrtkey = lambda x: locale.strxfrm(x.encode(codeset))
|
|
|
|
#when gtk is imported the python defaultencoding is utf-8,
|
|
|
|
#so no need to specify it
|
|
|
|
conv_utf8_tosrtkey_ongtk = lambda x: locale.strxfrm(unicode(x).encode(
|
|
|
|
codeset))
|
2010-02-04 20:31:11 +05:30
|
|
|
conv_unicode_tosrtkey_ongtk = lambda x: locale.strxfrm(x.encode(codeset,'replace'))
|
2009-07-04 03:30:58 +05:30
|
|
|
else:
|
|
|
|
# on unix C functions need to recieve utf-8. Default conversion would
|
|
|
|
# use ascii, so it is needed to be explicit about the resulting encoding
|
|
|
|
conv_utf8_tosrtkey = lambda x: locale.strxfrm(x)
|
|
|
|
conv_unicode_tosrtkey = lambda x: locale.strxfrm(x.encode("utf-8"))
|
|
|
|
# when gtk loaded, default encoding (sys.getdefaultencoding ) is utf-8,
|
|
|
|
# so default conversion happens with utf-8
|
|
|
|
conv_utf8_tosrtkey_ongtk = lambda x: locale.strxfrm(x)
|
|
|
|
conv_unicode_tosrtkey_ongtk = lambda x: locale.strxfrm(x)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
#
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2005-12-06 12:08:09 +05:30
|
|
|
def find_file( filename):
|
|
|
|
# try the filename we got
|
|
|
|
try:
|
|
|
|
fname = filename
|
|
|
|
if os.path.isfile( filename):
|
|
|
|
return( filename)
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
2006-05-24 10:58:33 +05:30
|
|
|
# Build list of alternate encodings
|
2007-01-28 04:38:08 +05:30
|
|
|
encodings = set()
|
|
|
|
|
|
|
|
for enc in [sys.getfilesystemencoding, locale.getpreferredencoding]:
|
|
|
|
try:
|
|
|
|
encodings.add(enc)
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
encodings.add('UTF-8')
|
|
|
|
encodings.add('ISO-8859-1')
|
|
|
|
|
2005-12-06 12:08:09 +05:30
|
|
|
for enc in encodings:
|
|
|
|
try:
|
|
|
|
fname = filename.encode(enc)
|
|
|
|
if os.path.isfile( fname):
|
|
|
|
return fname
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
|
|
|
# not found
|
|
|
|
return ''
|
|
|
|
|
|
|
|
def find_folder( filename):
|
|
|
|
# try the filename we got
|
|
|
|
try:
|
|
|
|
fname = filename
|
|
|
|
if os.path.isdir( filename):
|
|
|
|
return( filename)
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
|
|
|
# Build list of elternate encodings
|
2006-03-19 08:55:31 +05:30
|
|
|
try:
|
2007-09-10 08:33:46 +05:30
|
|
|
encodings = [sys.getfilesystemencoding(),
|
|
|
|
locale.getpreferredencoding(),
|
2006-03-19 08:55:31 +05:30
|
|
|
'UTF-8', 'ISO-8859-1']
|
|
|
|
except:
|
|
|
|
encodings = [sys.getfilesystemencoding(), 'UTF-8', 'ISO-8859-1']
|
|
|
|
encodings = list(set(encodings))
|
2005-12-06 12:08:09 +05:30
|
|
|
for enc in encodings:
|
|
|
|
try:
|
|
|
|
fname = filename.encode(enc)
|
|
|
|
if os.path.isdir( fname):
|
|
|
|
return fname
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
|
|
|
# not found
|
|
|
|
return ''
|
|
|
|
|
2010-10-09 18:03:47 +05:30
|
|
|
def get_unicode_path_from_file_chooser(path):
|
2007-11-16 22:15:45 +05:30
|
|
|
"""
|
|
|
|
Return the Unicode version of a path string.
|
|
|
|
|
2009-06-25 03:26:07 +05:30
|
|
|
:type path: str
|
|
|
|
:param path: The path to be converted to Unicode
|
|
|
|
:rtype: unicode
|
2010-10-09 18:03:47 +05:30
|
|
|
:returns: The Unicode version of path.
|
2007-11-16 22:15:45 +05:30
|
|
|
"""
|
2010-10-13 14:16:58 +05:30
|
|
|
# make omly unicode of path of type 'str'
|
|
|
|
if not (isinstance(path, str)):
|
2010-04-19 18:41:27 +05:30
|
|
|
return path
|
2010-09-16 18:03:23 +05:30
|
|
|
|
|
|
|
if constfunc.win():
|
|
|
|
# in windows filechooser returns officially utf-8, not filesystemencoding
|
|
|
|
try:
|
|
|
|
return unicode(path)
|
|
|
|
except:
|
|
|
|
LOG.warn("Problem encountered converting string: %s." % path)
|
|
|
|
return unicode(path, sys.getfilesystemencoding(), errors='replace')
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
return unicode(path, sys.getfilesystemencoding())
|
|
|
|
except:
|
|
|
|
LOG.warn("Problem encountered converting string: %s." % path)
|
|
|
|
return unicode(path, sys.getfilesystemencoding(), errors='replace')
|
|
|
|
|
2010-10-09 18:03:47 +05:30
|
|
|
def get_unicode_path_from_env_var(path):
|
|
|
|
"""
|
|
|
|
Return the Unicode version of a path string.
|
|
|
|
|
|
|
|
:type path: str
|
|
|
|
:param path: The path to be converted to Unicode
|
|
|
|
:rtype: unicode
|
|
|
|
:returns: The Unicode version of path.
|
|
|
|
"""
|
2010-10-13 14:16:58 +05:30
|
|
|
# make omly unicode of path of type 'str'
|
|
|
|
if not (isinstance(path, str)):
|
2010-10-09 18:03:47 +05:30
|
|
|
return path
|
|
|
|
|
|
|
|
if constfunc.win():
|
|
|
|
# In Windows path/filename returned from a emvironment variable is in filesystemencoding
|
|
|
|
try:
|
|
|
|
new_path = unicode(path, sys.getfilesystemencoding())
|
|
|
|
return new_path
|
|
|
|
except:
|
|
|
|
LOG.warn("Problem encountered converting string: %s." % path)
|
|
|
|
return unicode(path, sys.getfilesystemencoding(), errors='replace')
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
return unicode(path)
|
|
|
|
except:
|
|
|
|
LOG.warn("Problem encountered converting string: %s." % path)
|
|
|
|
return unicode(path, sys.getfilesystemencoding(), errors='replace')
|
|
|
|
|
2007-11-16 22:15:45 +05:30
|
|
|
|
2003-01-10 10:51:32 +05:30
|
|
|
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# Iterate over ancestors.
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2005-01-12 04:11:15 +05:30
|
|
|
def for_each_ancestor(db, start, func, data):
|
2003-01-10 10:51:32 +05:30
|
|
|
"""
|
|
|
|
Recursively iterate (breadth-first) over ancestors of
|
|
|
|
people listed in start.
|
2007-09-10 08:33:46 +05:30
|
|
|
Call func(data, pid) for the Id of each person encountered.
|
2003-01-10 10:51:32 +05:30
|
|
|
Exit and return 1, as soon as func returns true.
|
|
|
|
Return 0 otherwise.
|
|
|
|
"""
|
|
|
|
todo = start
|
2007-09-10 08:33:46 +05:30
|
|
|
done_ids = set()
|
2003-01-10 10:51:32 +05:30
|
|
|
while len(todo):
|
2005-01-12 04:11:15 +05:30
|
|
|
p_handle = todo.pop()
|
|
|
|
p = db.get_person_from_handle(p_handle)
|
|
|
|
# Don't process the same handle twice. This can happen
|
2003-01-10 10:51:32 +05:30
|
|
|
# if there is a cycle in the database, or if the
|
|
|
|
# initial list contains X and some of X's ancestors.
|
2007-09-10 08:33:46 +05:30
|
|
|
if p_handle in done_ids:
|
2003-01-10 10:51:32 +05:30
|
|
|
continue
|
2007-09-10 08:33:46 +05:30
|
|
|
done_ids.add(p_handle)
|
|
|
|
if func(data, p_handle):
|
2003-01-10 10:51:32 +05:30
|
|
|
return 1
|
2006-04-14 00:59:36 +05:30
|
|
|
for fam_handle in p.get_parent_family_handle_list():
|
2005-01-12 04:11:15 +05:30
|
|
|
fam = db.get_family_from_handle(fam_handle)
|
|
|
|
if fam:
|
|
|
|
f_handle = fam.get_father_handle()
|
|
|
|
m_handle = fam.get_mother_handle()
|
|
|
|
if f_handle: todo.append(f_handle)
|
|
|
|
if m_handle: todo.append(m_handle)
|
2003-01-10 10:51:32 +05:30
|
|
|
return 0
|
2003-03-05 11:31:31 +05:30
|
|
|
|
|
|
|
def title(n):
|
|
|
|
return '<span weight="bold" size="larger">%s</span>' % n
|
|
|
|
|
2007-09-10 08:33:46 +05:30
|
|
|
def set_title_label(xmlobj, t):
|
2003-03-05 11:31:31 +05:30
|
|
|
title_label = xmlobj.get_widget('title')
|
|
|
|
title_label.set_text('<span weight="bold" size="larger">%s</span>' % t)
|
2005-02-24 05:55:34 +05:30
|
|
|
title_label.set_use_markup(True)
|
2003-03-06 11:42:51 +05:30
|
|
|
|
2006-04-24 03:48:01 +05:30
|
|
|
from warnings import warn
|
2007-09-10 08:33:46 +05:30
|
|
|
def set_titles(window, title, t, msg=None):
|
2006-04-24 03:48:01 +05:30
|
|
|
warn('The Utils.set_titles is deprecated. Use ManagedWindow methods')
|
2003-05-10 11:47:07 +05:30
|
|
|
|
2003-11-25 23:15:34 +05:30
|
|
|
def search_for(name):
|
2006-06-19 02:28:25 +05:30
|
|
|
if name.startswith( '"' ):
|
|
|
|
name = name.split('"')[1]
|
|
|
|
else:
|
|
|
|
name = name.split()[0]
|
2010-02-08 22:46:37 +05:30
|
|
|
if constfunc.win():
|
2006-07-22 11:22:16 +05:30
|
|
|
for i in os.environ['PATH'].split(';'):
|
2007-09-10 08:33:46 +05:30
|
|
|
fname = os.path.join(i, name)
|
|
|
|
if os.access(fname, os.X_OK) and not os.path.isdir(fname):
|
2006-07-22 11:22:16 +05:30
|
|
|
return 1
|
2011-01-15 15:13:34 +05:30
|
|
|
if os.access(name, os.X_OK) and not os.path.isdir(name):
|
|
|
|
return 1
|
2006-07-22 11:22:16 +05:30
|
|
|
else:
|
|
|
|
for i in os.environ['PATH'].split(':'):
|
2007-09-10 08:33:46 +05:30
|
|
|
fname = os.path.join(i, name)
|
|
|
|
if os.access(fname, os.X_OK) and not os.path.isdir(fname):
|
2006-07-22 11:22:16 +05:30
|
|
|
return 1
|
2003-11-25 23:15:34 +05:30
|
|
|
return 0
|
2004-04-25 10:18:02 +05:30
|
|
|
|
2004-06-30 09:36:10 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# create_id
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
rand = random.Random(time.time())
|
|
|
|
|
2004-06-27 08:40:06 +05:30
|
|
|
def create_id():
|
2007-09-10 08:33:46 +05:30
|
|
|
return "%08x%08x" % ( int(time.time()*10000),
|
|
|
|
rand.randint(0, sys.maxint))
|
2004-10-08 09:29:55 +05:30
|
|
|
|
2009-11-10 09:03:10 +05:30
|
|
|
def create_uid(self, handle=None):
|
|
|
|
if handle:
|
|
|
|
uid = uuid.uuid5(GRAMPS_UUID, handle)
|
|
|
|
else:
|
|
|
|
uid = uuid.uuid4()
|
|
|
|
return uid.hex.upper()
|
|
|
|
|
2010-01-20 19:09:37 +05:30
|
|
|
class ProbablyAlive(object):
|
|
|
|
"""
|
|
|
|
An object to hold the parameters for considering someone alive.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self,
|
|
|
|
db,
|
2010-01-24 21:48:37 +05:30
|
|
|
max_sib_age_diff=None,
|
|
|
|
max_age_prob_alive=None,
|
|
|
|
avg_generation_gap=None):
|
2010-01-20 19:09:37 +05:30
|
|
|
self.db = db
|
2010-01-24 21:48:37 +05:30
|
|
|
if max_sib_age_diff is None:
|
|
|
|
max_sib_age_diff = _MAX_SIB_AGE_DIFF
|
|
|
|
if max_age_prob_alive is None:
|
|
|
|
max_age_prob_alive = _MAX_AGE_PROB_ALIVE
|
|
|
|
if avg_generation_gap is None:
|
|
|
|
avg_generation_gap = _AVG_GENERATION_GAP
|
2010-01-20 19:09:37 +05:30
|
|
|
self.MAX_SIB_AGE_DIFF = max_sib_age_diff
|
|
|
|
self.MAX_AGE_PROB_ALIVE = max_age_prob_alive
|
|
|
|
self.AVG_GENERATION_GAP = avg_generation_gap
|
|
|
|
|
|
|
|
def probably_alive_range(self, person, is_spouse=False):
|
2010-01-27 19:02:53 +05:30
|
|
|
# FIXME: some of these computed dates need to be a span. For
|
|
|
|
# example, if a person could be born +/- 20 yrs around
|
|
|
|
# a date then it should be a span, and yr_offset should
|
|
|
|
# deal with it as well ("between 1920 and 1930" + 10 =
|
|
|
|
# "between 1930 and 1940")
|
2010-01-20 19:09:37 +05:30
|
|
|
if person is None:
|
|
|
|
return (None, None, "", None)
|
|
|
|
birth_ref = person.get_birth_ref()
|
|
|
|
death_ref = person.get_death_ref()
|
|
|
|
death_date = None
|
|
|
|
birth_date = None
|
|
|
|
explain = ""
|
|
|
|
# If the recorded death year is before current year then
|
|
|
|
# things are simple.
|
|
|
|
if death_ref and death_ref.get_role().is_primary():
|
2010-03-08 08:29:49 +05:30
|
|
|
if death_ref:
|
|
|
|
death = self.db.get_event_from_handle(death_ref.ref)
|
|
|
|
if death and death.get_date_object().get_start_date() != gen.lib.Date.EMPTY:
|
|
|
|
death_date = death.get_date_object()
|
2010-01-20 19:09:37 +05:30
|
|
|
|
|
|
|
# Look for Cause Of Death, Burial or Cremation events.
|
|
|
|
# These are fairly good indications that someone's not alive.
|
|
|
|
if not death_date:
|
|
|
|
for ev_ref in person.get_primary_event_ref_list():
|
2010-03-08 08:29:49 +05:30
|
|
|
if ev_ref:
|
|
|
|
ev = self.db.get_event_from_handle(ev_ref.ref)
|
|
|
|
if ev and ev.type.is_death_fallback():
|
|
|
|
death_date = ev.get_date_object()
|
|
|
|
explain = _("death-related evidence")
|
2010-01-20 19:09:37 +05:30
|
|
|
|
|
|
|
# If they were born within X years before current year then
|
|
|
|
# assume they are alive (we already know they are not dead).
|
|
|
|
if not birth_date:
|
|
|
|
if birth_ref and birth_ref.get_role().is_primary():
|
|
|
|
birth = self.db.get_event_from_handle(birth_ref.ref)
|
|
|
|
if birth and birth.get_date_object().get_start_date() != gen.lib.Date.EMPTY:
|
|
|
|
birth_date = birth.get_date_object()
|
|
|
|
|
|
|
|
# Look for Baptism, etc events.
|
|
|
|
# These are fairly good indications that someone's birth.
|
|
|
|
if not birth_date:
|
|
|
|
for ev_ref in person.get_primary_event_ref_list():
|
|
|
|
ev = self.db.get_event_from_handle(ev_ref.ref)
|
|
|
|
if ev and ev.type.is_birth_fallback():
|
|
|
|
birth_date = ev.get_date_object()
|
|
|
|
explain = _("birth-related evidence")
|
|
|
|
|
|
|
|
if not birth_date and death_date:
|
|
|
|
# person died more than MAX after current year
|
|
|
|
birth_date = death_date.copy_offset_ymd(year=-self.MAX_AGE_PROB_ALIVE)
|
|
|
|
explain = _("death date")
|
|
|
|
|
|
|
|
if not death_date and birth_date:
|
|
|
|
# person died more than MAX after current year
|
|
|
|
death_date = birth_date.copy_offset_ymd(year=self.MAX_AGE_PROB_ALIVE)
|
|
|
|
explain = _("birth date")
|
|
|
|
|
|
|
|
if death_date and birth_date:
|
2010-01-27 11:36:35 +05:30
|
|
|
return (birth_date, death_date, explain, person) # direct self evidence
|
2010-01-20 19:09:37 +05:30
|
|
|
|
|
|
|
# Neither birth nor death events are available. Try looking
|
|
|
|
# at siblings. If a sibling was born more than X years past,
|
|
|
|
# or more than Z future, then probably this person is
|
|
|
|
# not alive. If the sibling died more than X years
|
|
|
|
# past, or more than X years future, then probably not alive.
|
|
|
|
|
|
|
|
family_list = person.get_parent_family_handle_list()
|
|
|
|
for family_handle in family_list:
|
|
|
|
family = self.db.get_family_from_handle(family_handle)
|
2010-05-30 17:44:26 +05:30
|
|
|
if family is None:
|
|
|
|
continue
|
2010-01-20 19:09:37 +05:30
|
|
|
for child_ref in family.get_child_ref_list():
|
|
|
|
child_handle = child_ref.ref
|
|
|
|
child = self.db.get_person_from_handle(child_handle)
|
2010-05-30 17:44:26 +05:30
|
|
|
if child is None:
|
|
|
|
continue
|
2010-01-20 19:09:37 +05:30
|
|
|
# Go through once looking for direct evidence:
|
|
|
|
for ev_ref in child.get_primary_event_ref_list():
|
|
|
|
ev = self.db.get_event_from_handle(ev_ref.ref)
|
|
|
|
if ev and ev.type.is_birth():
|
|
|
|
dobj = ev.get_date_object()
|
|
|
|
if dobj.get_start_date() != gen.lib.Date.EMPTY:
|
|
|
|
# if sibling birth date too far away, then not alive:
|
|
|
|
year = dobj.get_year()
|
|
|
|
if year != 0:
|
|
|
|
# sibling birth date
|
|
|
|
return (gen.lib.Date().copy_ymd(year - self.MAX_SIB_AGE_DIFF),
|
2010-01-27 19:02:53 +05:30
|
|
|
gen.lib.Date().copy_ymd(year - self.MAX_SIB_AGE_DIFF + self.MAX_AGE_PROB_ALIVE),
|
2010-01-20 19:09:37 +05:30
|
|
|
_("sibling birth date"),
|
|
|
|
child)
|
|
|
|
elif ev and ev.type.is_death():
|
|
|
|
dobj = ev.get_date_object()
|
|
|
|
if dobj.get_start_date() != gen.lib.Date.EMPTY:
|
|
|
|
# if sibling death date too far away, then not alive:
|
|
|
|
year = dobj.get_year()
|
|
|
|
if year != 0:
|
|
|
|
# sibling death date
|
|
|
|
return (gen.lib.Date().copy_ymd(year - self.MAX_SIB_AGE_DIFF - self.MAX_AGE_PROB_ALIVE),
|
2010-01-27 19:02:53 +05:30
|
|
|
gen.lib.Date().copy_ymd(year - self.MAX_SIB_AGE_DIFF - self.MAX_AGE_PROB_ALIVE
|
|
|
|
+ self.MAX_AGE_PROB_ALIVE),
|
2010-01-20 19:09:37 +05:30
|
|
|
_("sibling death date"),
|
|
|
|
child)
|
|
|
|
# Go through again looking for fallback:
|
|
|
|
for ev_ref in child.get_primary_event_ref_list():
|
|
|
|
ev = self.db.get_event_from_handle(ev_ref.ref)
|
|
|
|
if ev and ev.type.is_birth_fallback():
|
|
|
|
dobj = ev.get_date_object()
|
|
|
|
if dobj.get_start_date() != gen.lib.Date.EMPTY:
|
|
|
|
# if sibling birth date too far away, then not alive:
|
|
|
|
year = dobj.get_year()
|
|
|
|
if year != 0:
|
|
|
|
# sibling birth date
|
|
|
|
return (gen.lib.Date().copy_ymd(year - self.MAX_SIB_AGE_DIFF),
|
2010-01-27 19:02:53 +05:30
|
|
|
gen.lib.Date().copy_ymd(year - self.MAX_SIB_AGE_DIFF + self.MAX_AGE_PROB_ALIVE),
|
2010-01-20 19:09:37 +05:30
|
|
|
_("sibling birth-related date"),
|
|
|
|
child)
|
|
|
|
elif ev and ev.type.is_death_fallback():
|
|
|
|
dobj = ev.get_date_object()
|
|
|
|
if dobj.get_start_date() != gen.lib.Date.EMPTY:
|
|
|
|
# if sibling death date too far away, then not alive:
|
|
|
|
year = dobj.get_year()
|
|
|
|
if year != 0:
|
|
|
|
# sibling death date
|
|
|
|
return (gen.lib.Date().copy_ymd(year - self.MAX_SIB_AGE_DIFF - self.MAX_AGE_PROB_ALIVE),
|
2010-01-27 19:02:53 +05:30
|
|
|
gen.lib.Date().copy_ymd(year - self.MAX_SIB_AGE_DIFF - self.MAX_AGE_PROB_ALIVE + self.MAX_AGE_PROB_ALIVE),
|
2010-01-20 19:09:37 +05:30
|
|
|
_("sibling death-related date"),
|
|
|
|
child)
|
|
|
|
|
|
|
|
if not is_spouse: # if you are not in recursion, let's recurse:
|
|
|
|
for family_handle in person.get_family_handle_list():
|
|
|
|
family = self.db.get_family_from_handle(family_handle)
|
|
|
|
if family:
|
|
|
|
mother_handle = family.get_mother_handle()
|
|
|
|
father_handle = family.get_father_handle()
|
|
|
|
if mother_handle == person.handle and father_handle:
|
|
|
|
father = self.db.get_person_from_handle(father_handle)
|
|
|
|
date1, date2, explain, other = self.probably_alive_range(father, is_spouse=True)
|
|
|
|
if date1 and date2:
|
|
|
|
return date1, date2, _("a spouse, ") + explain, other
|
|
|
|
elif father_handle == person.handle and mother_handle:
|
|
|
|
mother = self.db.get_person_from_handle(mother_handle)
|
|
|
|
date1, date2, explain, other = self.probably_alive_range(mother, is_spouse=True)
|
|
|
|
if date1 and date2:
|
|
|
|
return date1, date2, _("a spouse, ") + explain, other
|
2010-01-27 11:36:35 +05:30
|
|
|
# Let's check the family events and see if we find something
|
|
|
|
for ref in family.get_event_ref_list():
|
|
|
|
if ref:
|
|
|
|
event = self.db.get_event_from_handle(ref.ref)
|
|
|
|
if event:
|
|
|
|
date = event.get_date_object()
|
|
|
|
year = date.get_year()
|
|
|
|
if year != 0:
|
|
|
|
other = None
|
|
|
|
if person.handle == mother_handle and father_handle:
|
|
|
|
other = self.db.get_person_from_handle(father_handle)
|
|
|
|
elif person.handle == father_handle and mother_handle:
|
|
|
|
other = self.db.get_person_from_handle(mother_handle)
|
|
|
|
return (gen.lib.Date().copy_ymd(year - self.AVG_GENERATION_GAP),
|
2010-01-27 19:02:53 +05:30
|
|
|
gen.lib.Date().copy_ymd(year - self.AVG_GENERATION_GAP +
|
|
|
|
self.MAX_AGE_PROB_ALIVE),
|
|
|
|
|
2010-01-27 11:36:35 +05:30
|
|
|
_("event with spouse"), other)
|
2010-01-20 19:09:37 +05:30
|
|
|
|
|
|
|
# Try looking for descendants that were born more than a lifespan
|
|
|
|
# ago.
|
|
|
|
|
|
|
|
def descendants_too_old (person, years):
|
|
|
|
for family_handle in person.get_family_handle_list():
|
|
|
|
family = self.db.get_family_from_handle(family_handle)
|
2010-04-02 05:06:21 +05:30
|
|
|
if not family:
|
|
|
|
# can happen with LivingProxyDb(PrivateProxyDb(db))
|
|
|
|
continue
|
2010-01-20 19:09:37 +05:30
|
|
|
for child_ref in family.get_child_ref_list():
|
|
|
|
child_handle = child_ref.ref
|
|
|
|
child = self.db.get_person_from_handle(child_handle)
|
|
|
|
child_birth_ref = child.get_birth_ref()
|
|
|
|
if child_birth_ref:
|
|
|
|
child_birth = self.db.get_event_from_handle(child_birth_ref.ref)
|
|
|
|
dobj = child_birth.get_date_object()
|
|
|
|
if dobj.get_start_date() != gen.lib.Date.EMPTY:
|
|
|
|
d = gen.lib.Date(dobj)
|
|
|
|
val = d.get_start_date()
|
|
|
|
val = d.get_year() - years
|
|
|
|
d.set_year(val)
|
|
|
|
return (d, d.copy_offset_ymd(self.MAX_AGE_PROB_ALIVE),
|
|
|
|
_("descendent birth date"),
|
|
|
|
child)
|
|
|
|
child_death_ref = child.get_death_ref()
|
|
|
|
if child_death_ref:
|
|
|
|
child_death = self.db.get_event_from_handle(child_death_ref.ref)
|
|
|
|
dobj = child_death.get_date_object()
|
|
|
|
if dobj.get_start_date() != gen.lib.Date.EMPTY:
|
|
|
|
return (dobj.copy_offset_ymd(- self.AVG_GENERATION_GAP),
|
|
|
|
dobj.copy_offset_ymd(- self.AVG_GENERATION_GAP + self.MAX_AGE_PROB_ALIVE),
|
|
|
|
_("descendent death date"),
|
|
|
|
child)
|
|
|
|
date1, date2, explain, other = descendants_too_old (child, years + self.AVG_GENERATION_GAP)
|
|
|
|
if date1 and date2:
|
|
|
|
return date1, date2, explain, other
|
|
|
|
# Check fallback data:
|
|
|
|
for ev_ref in child.get_primary_event_ref_list():
|
|
|
|
ev = self.db.get_event_from_handle(ev_ref.ref)
|
|
|
|
if ev and ev.type.is_birth_fallback():
|
|
|
|
dobj = ev.get_date_object()
|
|
|
|
if dobj.get_start_date() != gen.lib.Date.EMPTY:
|
|
|
|
d = gen.lib.Date(dobj)
|
|
|
|
val = d.get_start_date()
|
|
|
|
val = d.get_year() - years
|
|
|
|
d.set_year(val)
|
|
|
|
return (d, d.copy_offset_ymd(self.MAX_AGE_PROB_ALIVE),
|
|
|
|
_("descendent birth-related date"),
|
|
|
|
child)
|
|
|
|
|
|
|
|
elif ev and ev.type.is_death_fallback():
|
|
|
|
dobj = ev.get_date_object()
|
|
|
|
if dobj.get_start_date() != gen.lib.Date.EMPTY:
|
|
|
|
return (dobj.copy_offset_ymd(- self.AVG_GENERATION_GAP),
|
|
|
|
dobj.copy_offset_ymd(- self.AVG_GENERATION_GAP + self.MAX_AGE_PROB_ALIVE),
|
|
|
|
_("descendent death-related date"),
|
|
|
|
child)
|
|
|
|
|
|
|
|
return (None, None, "", None)
|
|
|
|
|
|
|
|
# If there are descendants that are too old for the person to have
|
|
|
|
# been alive in the current year then they must be dead.
|
|
|
|
|
|
|
|
date1, date2, explain, other = None, None, "", None
|
|
|
|
try:
|
|
|
|
date1, date2, explain, other = descendants_too_old(person, self.AVG_GENERATION_GAP)
|
|
|
|
except RuntimeError:
|
|
|
|
raise Errors.DatabaseError(
|
|
|
|
_("Database error: %s is defined as his or her own ancestor") %
|
|
|
|
name_displayer.display(person))
|
|
|
|
|
|
|
|
if date1 and date2:
|
|
|
|
return (date1, date2, explain, other)
|
|
|
|
|
|
|
|
def ancestors_too_old(person, year):
|
|
|
|
family_handle = person.get_main_parents_family_handle()
|
|
|
|
if family_handle:
|
|
|
|
family = self.db.get_family_from_handle(family_handle)
|
2010-04-02 05:06:21 +05:30
|
|
|
if not family:
|
|
|
|
# can happen with LivingProxyDb(PrivateProxyDb(db))
|
|
|
|
return (None, None, "", None)
|
2010-01-20 19:09:37 +05:30
|
|
|
father_handle = family.get_father_handle()
|
|
|
|
if father_handle:
|
|
|
|
father = self.db.get_person_from_handle(father_handle)
|
|
|
|
father_birth_ref = father.get_birth_ref()
|
|
|
|
if father_birth_ref and father_birth_ref.get_role().is_primary():
|
|
|
|
father_birth = self.db.get_event_from_handle(
|
|
|
|
father_birth_ref.ref)
|
|
|
|
dobj = father_birth.get_date_object()
|
|
|
|
if dobj.get_start_date() != gen.lib.Date.EMPTY:
|
|
|
|
return (dobj.copy_offset_ymd(- year),
|
|
|
|
dobj.copy_offset_ymd(- year + self.MAX_AGE_PROB_ALIVE),
|
|
|
|
_("ancestor birth date"),
|
|
|
|
father)
|
|
|
|
father_death_ref = father.get_death_ref()
|
|
|
|
if father_death_ref and father_death_ref.get_role().is_primary():
|
|
|
|
father_death = self.db.get_event_from_handle(
|
|
|
|
father_death_ref.ref)
|
|
|
|
dobj = father_death.get_date_object()
|
|
|
|
if dobj.get_start_date() != gen.lib.Date.EMPTY:
|
|
|
|
return (dobj.copy_offset_ymd(- year - self.MAX_AGE_PROB_ALIVE),
|
|
|
|
dobj.copy_offset_ymd(- year - self.MAX_AGE_PROB_ALIVE + self.MAX_AGE_PROB_ALIVE),
|
|
|
|
_("ancestor death date"),
|
|
|
|
father)
|
|
|
|
|
|
|
|
# Check fallback data:
|
|
|
|
for ev_ref in father.get_primary_event_ref_list():
|
|
|
|
ev = self.db.get_event_from_handle(ev_ref.ref)
|
|
|
|
if ev and ev.type.is_birth_fallback():
|
|
|
|
dobj = ev.get_date_object()
|
|
|
|
if dobj.get_start_date() != gen.lib.Date.EMPTY:
|
|
|
|
return (dobj.copy_offset_ymd(- year),
|
|
|
|
dobj.copy_offset_ymd(- year + self.MAX_AGE_PROB_ALIVE),
|
|
|
|
_("ancestor birth-related date"),
|
|
|
|
father)
|
|
|
|
|
|
|
|
elif ev and ev.type.is_death_fallback():
|
|
|
|
dobj = ev.get_date_object()
|
|
|
|
if dobj.get_start_date() != gen.lib.Date.EMPTY:
|
|
|
|
return (dobj.copy_offset_ymd(- year - self.MAX_AGE_PROB_ALIVE),
|
|
|
|
dobj.copy_offset_ymd(- year - self.MAX_AGE_PROB_ALIVE + self.MAX_AGE_PROB_ALIVE),
|
|
|
|
_("ancestor death-related date"),
|
|
|
|
father)
|
|
|
|
|
|
|
|
date1, date2, explain, other = ancestors_too_old (father, year - self.AVG_GENERATION_GAP)
|
|
|
|
if date1 and date2:
|
|
|
|
return date1, date2, explain, other
|
|
|
|
|
|
|
|
mother_handle = family.get_mother_handle()
|
|
|
|
if mother_handle:
|
|
|
|
mother = self.db.get_person_from_handle(mother_handle)
|
|
|
|
mother_birth_ref = mother.get_birth_ref()
|
|
|
|
if mother_birth_ref and mother_birth_ref.get_role().is_primary():
|
|
|
|
mother_birth = self.db.get_event_from_handle(mother_birth_ref.ref)
|
|
|
|
dobj = mother_birth.get_date_object()
|
|
|
|
if dobj.get_start_date() != gen.lib.Date.EMPTY:
|
|
|
|
return (dobj.copy_offset_ymd(- year),
|
|
|
|
dobj.copy_offset_ymd(- year + self.MAX_AGE_PROB_ALIVE),
|
|
|
|
_("ancestor birth date"),
|
|
|
|
mother)
|
|
|
|
mother_death_ref = mother.get_death_ref()
|
|
|
|
if mother_death_ref and mother_death_ref.get_role().is_primary():
|
|
|
|
mother_death = self.db.get_event_from_handle(
|
|
|
|
mother_death_ref.ref)
|
|
|
|
dobj = mother_death.get_date_object()
|
|
|
|
if dobj.get_start_date() != gen.lib.Date.EMPTY:
|
|
|
|
return (dobj.copy_offset_ymd(- year - self.MAX_AGE_PROB_ALIVE),
|
|
|
|
dobj.copy_offset_ymd(- year - self.MAX_AGE_PROB_ALIVE + self.MAX_AGE_PROB_ALIVE),
|
|
|
|
_("ancestor death date"),
|
|
|
|
mother)
|
|
|
|
|
|
|
|
# Check fallback data:
|
|
|
|
for ev_ref in mother.get_primary_event_ref_list():
|
|
|
|
ev = self.db.get_event_from_handle(ev_ref.ref)
|
|
|
|
if ev and ev.type.is_birth_fallback():
|
|
|
|
dobj = ev.get_date_object()
|
|
|
|
if dobj.get_start_date() != gen.lib.Date.EMPTY:
|
|
|
|
return (dobj.copy_offset_ymd(- year),
|
|
|
|
dobj.copy_offset_ymd(- year + self.MAX_AGE_PROB_ALIVE),
|
|
|
|
_("ancestor birth-related date"),
|
|
|
|
mother)
|
|
|
|
|
|
|
|
elif ev and ev.type.is_death_fallback():
|
|
|
|
dobj = ev.get_date_object()
|
|
|
|
if dobj.get_start_date() != gen.lib.Date.EMPTY:
|
|
|
|
return (dobj.copy_offset_ymd(- year - self.MAX_AGE_PROB_ALIVE),
|
|
|
|
dobj.copy_offset_ymd(- year - self.MAX_AGE_PROB_ALIVE + self.MAX_AGE_PROB_ALIVE),
|
|
|
|
_("ancestor death-related date"),
|
|
|
|
mother)
|
|
|
|
|
|
|
|
date1, date2, explain, other = ancestors_too_old (mother, year - self.AVG_GENERATION_GAP)
|
|
|
|
if date1 and date2:
|
|
|
|
return (date1, date2, explain, other)
|
|
|
|
|
|
|
|
return (None, None, "", None)
|
|
|
|
|
|
|
|
# If there are ancestors that would be too old in the current year
|
|
|
|
# then assume our person must be dead too.
|
|
|
|
date1, date2, explain, other = ancestors_too_old (person, - self.AVG_GENERATION_GAP)
|
|
|
|
if date1 and date2:
|
|
|
|
return (date1, date2, explain, other)
|
|
|
|
|
|
|
|
# If we can't find any reason to believe that they are dead we
|
|
|
|
# must assume they are alive.
|
|
|
|
|
|
|
|
return (None, None, "", None)
|
|
|
|
|
2007-11-21 20:19:50 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# probably_alive
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2010-01-20 19:09:37 +05:30
|
|
|
def probably_alive(person, db,
|
|
|
|
current_date=None,
|
|
|
|
limit=0,
|
|
|
|
max_sib_age_diff=None,
|
|
|
|
max_age_prob_alive=None,
|
2010-01-27 11:36:35 +05:30
|
|
|
avg_generation_gap=None,
|
|
|
|
return_range=False):
|
2010-01-20 19:09:37 +05:30
|
|
|
"""
|
|
|
|
Return true if the person may be alive on current_date.
|
|
|
|
|
|
|
|
This works by a process of emlimination. If we can't find a good
|
|
|
|
reason to believe that someone is dead then we assume they must
|
|
|
|
be alive.
|
|
|
|
|
|
|
|
:param current_date: a date object that is not estimated or modified
|
|
|
|
(defaults to today)
|
|
|
|
:param limit: number of years to check beyond death_date
|
|
|
|
:param max_sib_age_diff: maximum sibling age difference, in years
|
|
|
|
:param max_age_prob_alive: maximum age of a person, in years
|
|
|
|
:param avg_generation_gap: average generation gap, in years
|
|
|
|
"""
|
2010-05-26 04:59:37 +05:30
|
|
|
# First, get the real database to use all people
|
2010-05-13 17:39:15 +05:30
|
|
|
# for determining alive status:
|
2010-05-26 04:59:37 +05:30
|
|
|
basedb = db.basedb
|
2010-05-13 17:39:15 +05:30
|
|
|
# Now, we create a wrapper for doing work:
|
|
|
|
pb = ProbablyAlive(basedb, max_sib_age_diff,
|
2010-02-13 01:12:55 +05:30
|
|
|
max_age_prob_alive,
|
|
|
|
avg_generation_gap)
|
2010-01-27 11:36:35 +05:30
|
|
|
birth, death, explain, relative = pb.probably_alive_range(person)
|
2010-01-27 19:02:53 +05:30
|
|
|
if current_date is None:
|
|
|
|
current_date = gen.lib.date.Today()
|
|
|
|
if not birth or not death:
|
|
|
|
# no evidence, must consider alive
|
|
|
|
return (True, None, None, _("no evidence"), None)
|
2010-06-11 06:05:54 +05:30
|
|
|
# must have dates from here:
|
2010-01-27 19:02:53 +05:30
|
|
|
if limit:
|
|
|
|
death += limit # add these years to death
|
|
|
|
# Finally, check to see if current_date is between dates
|
|
|
|
result = (current_date.match(birth, ">=") and
|
|
|
|
current_date.match(death, "<="))
|
|
|
|
if return_range:
|
|
|
|
return (result, birth, death, explain, relative)
|
|
|
|
else:
|
|
|
|
return result
|
2010-01-20 19:09:37 +05:30
|
|
|
|
|
|
|
def probably_alive_range(person, db,
|
|
|
|
max_sib_age_diff=None,
|
|
|
|
max_age_prob_alive=None,
|
|
|
|
avg_generation_gap=None):
|
|
|
|
"""
|
|
|
|
Computes estimated birth and death dates.
|
|
|
|
Returns: (birth_date, death_date, explain_text, related_person)
|
|
|
|
"""
|
2010-05-13 17:39:15 +05:30
|
|
|
# First, find the real database to use all people
|
|
|
|
# for determining alive status:
|
|
|
|
from gen.proxy.proxybase import ProxyDbBase
|
|
|
|
basedb = db
|
|
|
|
while isinstance(basedb, ProxyDbBase):
|
|
|
|
basedb = basedb.db
|
|
|
|
# Now, we create a wrapper for doing work:
|
|
|
|
pb = ProbablyAlive(basedb, max_sib_age_diff,
|
2010-01-20 19:09:37 +05:30
|
|
|
max_age_prob_alive, avg_generation_gap)
|
2010-01-27 11:36:35 +05:30
|
|
|
return pb.probably_alive_range(person)
|
2010-01-20 19:09:37 +05:30
|
|
|
|
2005-03-12 02:35:46 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
2010-01-24 21:48:37 +05:30
|
|
|
# Other util functions
|
2005-03-12 02:35:46 +05:30
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2007-03-27 23:58:26 +05:30
|
|
|
def get_referents(handle, db, primary_objects):
|
|
|
|
""" Find objects that refer to an object.
|
|
|
|
|
|
|
|
This function is the base for other get_<object>_referents finctions.
|
|
|
|
|
2005-03-12 02:35:46 +05:30
|
|
|
"""
|
2007-01-23 09:07:13 +05:30
|
|
|
# Use one pass through the reference map to grab all the references
|
2010-01-30 19:01:16 +05:30
|
|
|
object_list = list(db.find_backlink_handles(handle))
|
2007-01-23 09:07:13 +05:30
|
|
|
|
2007-03-27 23:58:26 +05:30
|
|
|
# Then form the object-specific lists
|
|
|
|
the_lists = ()
|
2005-03-12 02:35:46 +05:30
|
|
|
|
2007-03-27 23:58:26 +05:30
|
|
|
for primary in primary_objects:
|
|
|
|
primary_list = [item[1] for item in object_list if item[0] == primary]
|
2007-09-10 08:33:46 +05:30
|
|
|
the_lists = the_lists + (primary_list, )
|
2005-03-12 02:35:46 +05:30
|
|
|
|
2007-03-27 23:58:26 +05:30
|
|
|
return the_lists
|
2005-03-12 02:35:46 +05:30
|
|
|
|
2007-03-27 23:58:26 +05:30
|
|
|
def get_source_referents(source_handle, db):
|
|
|
|
""" Find objects that refer the source.
|
2007-01-23 09:07:13 +05:30
|
|
|
|
2007-03-27 23:58:26 +05:30
|
|
|
This function finds all primary objects that refer (directly or through
|
|
|
|
secondary child-objects) to a given source handle in a given database.
|
|
|
|
|
|
|
|
"""
|
2007-09-10 08:33:46 +05:30
|
|
|
_primaries = ('Person', 'Family', 'Event', 'Place',
|
2007-03-27 23:58:26 +05:30
|
|
|
'Source', 'MediaObject', 'Repository')
|
|
|
|
|
|
|
|
return (get_referents(source_handle, db, _primaries))
|
2005-03-12 02:35:46 +05:30
|
|
|
|
2007-09-10 08:33:46 +05:30
|
|
|
def get_media_referents(media_handle, db):
|
2007-03-27 23:58:26 +05:30
|
|
|
""" Find objects that refer the media object.
|
2005-03-12 06:14:11 +05:30
|
|
|
|
|
|
|
This function finds all primary objects that refer
|
|
|
|
to a given media handle in a given database.
|
2007-03-27 23:58:26 +05:30
|
|
|
|
2005-03-12 06:14:11 +05:30
|
|
|
"""
|
2007-03-27 23:58:26 +05:30
|
|
|
_primaries = ('Person', 'Family', 'Event', 'Place', 'Source')
|
|
|
|
|
|
|
|
return (get_referents(media_handle, db, _primaries))
|
2005-03-12 06:14:11 +05:30
|
|
|
|
2007-03-27 23:58:26 +05:30
|
|
|
def get_note_referents(note_handle, db):
|
|
|
|
""" Find objects that refer a note object.
|
|
|
|
|
|
|
|
This function finds all primary objects that refer
|
|
|
|
to a given note handle in a given database.
|
|
|
|
|
|
|
|
"""
|
2007-09-10 08:33:46 +05:30
|
|
|
_primaries = ('Person', 'Family', 'Event', 'Place',
|
2007-03-27 23:58:26 +05:30
|
|
|
'Source', 'MediaObject', 'Repository')
|
|
|
|
|
|
|
|
return (get_referents(note_handle, db, _primaries))
|
2005-03-12 06:14:11 +05:30
|
|
|
|
2004-07-09 23:49:47 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
#
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2006-05-10 04:45:38 +05:30
|
|
|
_NEW_NAME_PATTERN = '%s%sUntitled_%d.%s'
|
2004-07-09 23:49:47 +05:30
|
|
|
|
2007-09-10 08:33:46 +05:30
|
|
|
def get_new_filename(ext, folder='~/'):
|
2004-07-09 23:49:47 +05:30
|
|
|
ix = 1
|
2006-05-10 04:45:38 +05:30
|
|
|
while os.path.isfile(os.path.expanduser(_NEW_NAME_PATTERN %
|
2007-09-10 08:33:46 +05:30
|
|
|
(folder, os.path.sep, ix, ext))):
|
2004-07-09 23:49:47 +05:30
|
|
|
ix = ix + 1
|
2007-09-10 08:33:46 +05:30
|
|
|
return os.path.expanduser(_NEW_NAME_PATTERN % (folder, os.path.sep, ix, ext))
|
2004-12-22 07:26:37 +05:30
|
|
|
|
2007-11-26 11:11:04 +05:30
|
|
|
def get_empty_tempdir(dirname):
|
|
|
|
""" Return path to TEMP_DIR/dirname, a guaranteed empty directory
|
|
|
|
|
|
|
|
makes intervening directories if required
|
|
|
|
fails if _file_ by that name already exists,
|
|
|
|
or for inadequate permissions to delete dir/files or create dir(s)
|
|
|
|
|
|
|
|
"""
|
|
|
|
dirpath = os.path.join(TEMP_DIR,dirname)
|
|
|
|
if os.path.isdir(dirpath):
|
|
|
|
shutil.rmtree(dirpath)
|
|
|
|
os.makedirs(dirpath)
|
2010-10-09 18:03:47 +05:30
|
|
|
dirpath = get_unicode_path_from_env_var(dirpath)
|
2007-11-26 11:11:04 +05:30
|
|
|
return dirpath
|
|
|
|
|
|
|
|
def rm_tempdir(path):
|
|
|
|
"""Remove a tempdir created with get_empty_tempdir"""
|
|
|
|
if path.startswith(TEMP_DIR) and os.path.isdir(path):
|
|
|
|
shutil.rmtree(path)
|
|
|
|
|
2007-09-05 08:32:50 +05:30
|
|
|
def cast_to_bool(val):
|
|
|
|
if val == str(True):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2004-12-22 07:26:37 +05:30
|
|
|
def get_type_converter(val):
|
|
|
|
"""
|
2008-02-24 19:25:55 +05:30
|
|
|
Return function that converts strings into the type of val.
|
2004-12-22 07:26:37 +05:30
|
|
|
"""
|
|
|
|
val_type = type(val)
|
2007-09-10 08:33:46 +05:30
|
|
|
if val_type in (str, unicode):
|
2004-12-22 07:26:37 +05:30
|
|
|
return unicode
|
|
|
|
elif val_type == int:
|
|
|
|
return int
|
|
|
|
elif val_type == float:
|
|
|
|
return float
|
2007-09-05 08:32:50 +05:30
|
|
|
elif val_type == bool:
|
|
|
|
return cast_to_bool
|
2007-09-10 08:33:46 +05:30
|
|
|
elif val_type in (list, tuple):
|
2004-12-22 07:26:37 +05:30
|
|
|
return list
|
|
|
|
|
|
|
|
def type_name(val):
|
|
|
|
"""
|
2008-02-24 19:25:55 +05:30
|
|
|
Return the name the type of val.
|
2004-12-22 07:26:37 +05:30
|
|
|
|
|
|
|
Only numbers and strings are supported.
|
|
|
|
The rest becomes strings (unicode).
|
|
|
|
"""
|
|
|
|
val_type = type(val)
|
|
|
|
if val_type == int:
|
|
|
|
return 'int'
|
|
|
|
elif val_type == float:
|
|
|
|
return 'float'
|
2008-03-03 04:33:52 +05:30
|
|
|
elif val_type == bool:
|
|
|
|
return 'bool'
|
2007-09-10 08:33:46 +05:30
|
|
|
elif val_type in (str, unicode):
|
2004-12-22 07:26:37 +05:30
|
|
|
return 'unicode'
|
|
|
|
return 'unicode'
|
|
|
|
|
|
|
|
def get_type_converter_by_name(val_str):
|
|
|
|
"""
|
2008-02-24 19:25:55 +05:30
|
|
|
Return function that converts strings into the type given by val_str.
|
2004-12-22 07:26:37 +05:30
|
|
|
|
|
|
|
Only numbers and strings are supported.
|
|
|
|
The rest becomes strings (unicode).
|
|
|
|
"""
|
|
|
|
if val_str == 'int':
|
|
|
|
return int
|
|
|
|
elif val_str == 'float':
|
|
|
|
return float
|
2008-03-03 04:33:52 +05:30
|
|
|
elif val_str == 'bool':
|
2008-04-20 08:33:39 +05:30
|
|
|
return cast_to_bool
|
2007-09-10 08:33:46 +05:30
|
|
|
elif val_str in ('str', 'unicode'):
|
2004-12-22 07:26:37 +05:30
|
|
|
return unicode
|
|
|
|
return unicode
|
2005-07-09 01:54:54 +05:30
|
|
|
|
2006-02-05 04:59:44 +05:30
|
|
|
def relative_path(original, base):
|
2008-02-12 03:57:24 +05:30
|
|
|
"""
|
|
|
|
Calculate the relative path from base to original, with base a directory,
|
|
|
|
and original an absolute path
|
|
|
|
On problems, original is returned unchanged
|
|
|
|
"""
|
|
|
|
if not os.path.isdir(base):
|
|
|
|
return original
|
|
|
|
#original and base must be absolute paths
|
|
|
|
if not os.path.isabs(base):
|
|
|
|
return original
|
|
|
|
if not os.path.isabs(original):
|
|
|
|
return original
|
|
|
|
original = os.path.normpath(original)
|
|
|
|
base = os.path.normpath(base)
|
|
|
|
|
|
|
|
# If the db_dir and obj_dir are on different drives (win only)
|
|
|
|
# then there cannot be a relative path. Return original obj_path
|
|
|
|
(base_drive, base) = os.path.splitdrive(base)
|
|
|
|
(orig_drive, orig_name) = os.path.splitdrive(original)
|
|
|
|
if base_drive.upper() != orig_drive.upper():
|
2006-02-05 04:59:44 +05:30
|
|
|
return original
|
|
|
|
|
|
|
|
# Starting from the filepath root, work out how much of the filepath is
|
|
|
|
# shared by base and target.
|
2008-02-12 03:57:24 +05:30
|
|
|
base_list = (base).split(os.sep)
|
|
|
|
target_list = (orig_name).split(os.sep)
|
|
|
|
# make sure '/home/person' and 'c:/home/person' both give
|
|
|
|
# list ['home', 'person']
|
2010-01-22 00:12:53 +05:30
|
|
|
base_list = filter(None, base_list)
|
|
|
|
target_list = filter(None, target_list)
|
2008-02-12 03:57:24 +05:30
|
|
|
i = -1
|
2006-02-05 04:59:44 +05:30
|
|
|
for i in range(min(len(base_list), len(target_list))):
|
|
|
|
if base_list[i] <> target_list[i]: break
|
|
|
|
else:
|
2008-02-12 03:57:24 +05:30
|
|
|
#if break did not happen we are here at end, and add 1.
|
2007-09-10 08:33:46 +05:30
|
|
|
i += 1
|
2006-02-05 04:59:44 +05:30
|
|
|
rel_list = [os.pardir] * (len(base_list)-i) + target_list[i:]
|
|
|
|
return os.path.join(*rel_list)
|
|
|
|
|
2008-02-12 03:57:24 +05:30
|
|
|
def media_path(db):
|
|
|
|
"""
|
|
|
|
Given a database, return the mediapath to use as basedir for media
|
|
|
|
"""
|
|
|
|
mpath = db.get_mediapath()
|
|
|
|
if mpath is None:
|
|
|
|
#use home dir
|
|
|
|
mpath = USER_HOME
|
|
|
|
return mpath
|
|
|
|
|
|
|
|
def media_path_full(db, filename):
|
|
|
|
"""
|
|
|
|
Given a database and a filename of a media, return the media filename
|
|
|
|
is full form, eg 'graves/tomb.png' becomes '/home/me/genea/graves/tomb.png
|
|
|
|
"""
|
|
|
|
if os.path.isabs(filename):
|
|
|
|
return filename
|
|
|
|
mpath = media_path(db)
|
|
|
|
return os.path.join(mpath, filename)
|
2006-03-30 08:54:04 +05:30
|
|
|
|
2007-09-10 08:33:46 +05:30
|
|
|
def profile(func, *args):
|
2008-02-19 01:37:09 +05:30
|
|
|
import hotshot.stats
|
2007-01-19 10:46:41 +05:30
|
|
|
|
2007-09-10 08:33:46 +05:30
|
|
|
prf = hotshot.Profile('mystats.profile')
|
2007-01-19 10:46:41 +05:30
|
|
|
print "Start"
|
2007-09-10 08:33:46 +05:30
|
|
|
prf.runcall(func, *args)
|
2007-01-19 10:46:41 +05:30
|
|
|
print "Finished"
|
2007-09-10 08:33:46 +05:30
|
|
|
prf.close()
|
2007-01-19 10:46:41 +05:30
|
|
|
print "Loading profile"
|
|
|
|
stats = hotshot.stats.load('mystats.profile')
|
|
|
|
print "done"
|
|
|
|
stats.strip_dirs()
|
2007-09-10 08:33:46 +05:30
|
|
|
stats.sort_stats('time', 'calls')
|
2007-01-19 10:46:41 +05:30
|
|
|
stats.print_stats(100)
|
2007-08-30 04:31:16 +05:30
|
|
|
stats.print_callers(100)
|
2007-01-19 10:46:41 +05:30
|
|
|
|
2007-12-12 09:29:18 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# Keyword translation interface
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
|
|
|
|
# keyword, code, translated standard, translated upper
|
2009-02-12 13:14:11 +05:30
|
|
|
KEYWORDS = [("title", "t", _("Person|Title"), _("Person|TITLE")),
|
2007-12-12 09:29:18 +05:30
|
|
|
("given", "f", _("Given"), _("GIVEN")),
|
2010-10-02 02:52:25 +05:30
|
|
|
("surname", "l", _("Surname"), _("SURNAME")),
|
|
|
|
("call", "c", _("Name|Call"), _("Name|CALL")),
|
|
|
|
("common", "x", _("Name|Common"), _("Name|COMMON")),
|
|
|
|
("initials", "i", _("Initials"), _("INITIALS")),
|
2007-12-12 09:29:18 +05:30
|
|
|
("suffix", "s", _("Suffix"), _("SUFFIX")),
|
2010-10-02 02:52:25 +05:30
|
|
|
("rawsurnames", "q", _("Rawsurnames"), _("RAWSURNAMES")),
|
|
|
|
("patronymic", "y", _("Patronymic"), _("PATRONYMIC")),
|
|
|
|
("notpatronymic", "o", _("Notpatronymic"),_("NOTPATRONYMIC")),
|
|
|
|
("primary", "m", _("Primary"), _("PRIMARY")),
|
|
|
|
("prefix", "p", _("Prefix"), _("PREFIX")),
|
|
|
|
("nickname", "n", _("Nickname"), _("NICKNAME")),
|
|
|
|
("familynick", "g", _("Familynick"), _("FAMILYNICK")),
|
2007-12-12 09:29:18 +05:30
|
|
|
]
|
|
|
|
KEY_TO_TRANS = {}
|
|
|
|
TRANS_TO_KEY = {}
|
|
|
|
for (key, code, standard, upper) in KEYWORDS:
|
|
|
|
KEY_TO_TRANS[key] = standard
|
|
|
|
KEY_TO_TRANS[key.upper()] = upper
|
|
|
|
KEY_TO_TRANS["%" + ("%s" % code)] = standard
|
|
|
|
KEY_TO_TRANS["%" + ("%s" % code.upper())] = upper
|
2007-12-12 21:54:40 +05:30
|
|
|
TRANS_TO_KEY[standard.lower()] = key
|
2007-12-12 09:29:18 +05:30
|
|
|
TRANS_TO_KEY[standard] = key
|
2007-12-12 21:54:40 +05:30
|
|
|
TRANS_TO_KEY[upper] = key.upper()
|
2007-12-12 09:29:18 +05:30
|
|
|
|
|
|
|
def get_translation_from_keyword(keyword):
|
|
|
|
""" Return the translation of keyword """
|
|
|
|
return KEY_TO_TRANS.get(keyword, keyword)
|
|
|
|
|
|
|
|
def get_keyword_from_translation(word):
|
|
|
|
""" Return the keyword of translation """
|
|
|
|
return TRANS_TO_KEY.get(word, word)
|
|
|
|
|
|
|
|
def get_keywords():
|
|
|
|
""" Get all keywords, longest to shortest """
|
|
|
|
keys = KEY_TO_TRANS.keys()
|
|
|
|
keys.sort(lambda a,b: -cmp(len(a), len(b)))
|
|
|
|
return keys
|
|
|
|
|
|
|
|
def get_translations():
|
|
|
|
""" Get all translations, longest to shortest """
|
|
|
|
trans = TRANS_TO_KEY.keys()
|
|
|
|
trans.sort(lambda a,b: -cmp(len(a), len(b)))
|
|
|
|
return trans
|
2009-06-19 20:53:58 +05:30
|
|
|
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
2009-07-20 02:20:46 +05:30
|
|
|
# Config-based functions
|
2009-06-19 20:53:58 +05:30
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
def get_researcher():
|
2010-10-30 04:41:08 +05:30
|
|
|
"""
|
|
|
|
Return a new database owner with the default values from the config file.
|
|
|
|
"""
|
|
|
|
name = config.get('researcher.researcher-name')
|
|
|
|
address = config.get('researcher.researcher-addr')
|
|
|
|
locality = config.get('researcher.researcher-locality')
|
|
|
|
city = config.get('researcher.researcher-city')
|
|
|
|
state = config.get('researcher.researcher-state')
|
|
|
|
country = config.get('researcher.researcher-country')
|
|
|
|
post_code = config.get('researcher.researcher-postal')
|
|
|
|
phone = config.get('researcher.researcher-phone')
|
|
|
|
email = config.get('researcher.researcher-email')
|
2009-06-19 20:53:58 +05:30
|
|
|
|
|
|
|
owner = gen.lib.Researcher()
|
2010-10-30 04:41:08 +05:30
|
|
|
owner.set_name(name)
|
|
|
|
owner.set_address(address)
|
|
|
|
owner.set_locality(locality)
|
|
|
|
owner.set_city(city)
|
|
|
|
owner.set_state(state)
|
|
|
|
owner.set_country(country)
|
|
|
|
owner.set_postal_code(post_code)
|
|
|
|
owner.set_phone(phone)
|
|
|
|
owner.set_email(email)
|
2009-06-19 20:53:58 +05:30
|
|
|
|
|
|
|
return owner
|
2009-07-20 02:20:46 +05:30
|
|
|
|
|
|
|
def update_constants():
|
|
|
|
"""
|
|
|
|
Used to update the constants that are cached in this module.
|
|
|
|
"""
|
2009-10-08 06:42:51 +05:30
|
|
|
import config
|
2010-02-13 01:12:55 +05:30
|
|
|
global _MAX_AGE_PROB_ALIVE, _MAX_SIB_AGE_DIFF, _AVG_GENERATION_GAP
|
2009-10-08 06:42:51 +05:30
|
|
|
_MAX_AGE_PROB_ALIVE = config.get('behavior.max-age-prob-alive')
|
|
|
|
_MAX_SIB_AGE_DIFF = config.get('behavior.max-sib-age-diff')
|
|
|
|
_AVG_GENERATION_GAP = config.get('behavior.avg-generation-gap')
|
2009-07-20 02:20:46 +05:30
|
|
|
|
2009-08-20 03:34:57 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# Function to return the name of the main participant of an event
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2009-07-20 02:20:46 +05:30
|
|
|
|
2009-08-20 03:34:57 +05:30
|
|
|
def get_participant_from_event(db, event_handle):
|
|
|
|
"""
|
|
|
|
Obtain the first primary or family participant to an event we find in the
|
|
|
|
database. Note that an event can have more than one primary or
|
2009-08-20 15:01:10 +05:30
|
|
|
family participant, only one is returned, adding ellipses if there are
|
|
|
|
more.
|
2009-08-20 03:34:57 +05:30
|
|
|
"""
|
|
|
|
participant = ""
|
2009-08-20 15:01:10 +05:30
|
|
|
ellipses = False
|
2010-01-22 00:12:53 +05:30
|
|
|
result_list = list(db.find_backlink_handles(event_handle,
|
|
|
|
include_classes=['Person', 'Family']))
|
2009-08-20 15:01:10 +05:30
|
|
|
#obtain handles without duplicates
|
|
|
|
people = set([x[1] for x in result_list if x[0] == 'Person'])
|
|
|
|
families = set([x[1] for x in result_list if x[0] == 'Family'])
|
2009-08-20 03:34:57 +05:30
|
|
|
for personhandle in people:
|
|
|
|
person = db.get_person_from_handle(personhandle)
|
2010-05-23 20:21:51 +05:30
|
|
|
if not person:
|
|
|
|
continue
|
2009-08-20 03:34:57 +05:30
|
|
|
for event_ref in person.get_event_ref_list():
|
|
|
|
if event_handle == event_ref.ref and \
|
2009-12-21 03:51:00 +05:30
|
|
|
event_ref.get_role().is_primary():
|
2009-08-20 15:01:10 +05:30
|
|
|
if participant:
|
|
|
|
ellipses = True
|
|
|
|
else:
|
|
|
|
participant = name_displayer.display(person)
|
2009-08-20 03:34:57 +05:30
|
|
|
break
|
2009-08-20 15:01:10 +05:30
|
|
|
if ellipses:
|
2009-08-20 03:34:57 +05:30
|
|
|
break
|
2009-08-20 15:01:10 +05:30
|
|
|
if ellipses:
|
|
|
|
return _('%s, ...') % participant
|
2009-08-20 03:34:57 +05:30
|
|
|
|
|
|
|
for familyhandle in families:
|
|
|
|
family = db.get_family_from_handle(familyhandle)
|
|
|
|
for event_ref in family.get_event_ref_list():
|
|
|
|
if event_handle == event_ref.ref and \
|
2009-12-21 03:51:00 +05:30
|
|
|
event_ref.get_role().is_family():
|
2009-08-20 15:01:10 +05:30
|
|
|
if participant:
|
|
|
|
ellipses = True
|
|
|
|
else:
|
|
|
|
participant = family_name(family, db)
|
2009-08-20 03:34:57 +05:30
|
|
|
break
|
2009-08-20 15:01:10 +05:30
|
|
|
if ellipses:
|
2009-08-20 03:34:57 +05:30
|
|
|
break
|
|
|
|
|
2009-08-20 15:01:10 +05:30
|
|
|
if ellipses:
|
|
|
|
return _('%s, ...') % participant
|
|
|
|
else:
|
|
|
|
return participant
|
2009-11-13 04:02:52 +05:30
|
|
|
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# Function to return children's list of a person
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
def find_children(db,p):
|
|
|
|
"""
|
|
|
|
Return the list of all children's IDs for a person.
|
|
|
|
"""
|
|
|
|
childlist = []
|
|
|
|
for family_handle in p.get_family_handle_list():
|
|
|
|
family = db.get_family_from_handle(family_handle)
|
|
|
|
for child_ref in family.get_child_ref_list():
|
|
|
|
childlist.append(child_ref.ref)
|
|
|
|
return childlist
|
|
|
|
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# Function to return parent's list of a person
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
def find_parents(db,p):
|
|
|
|
"""
|
|
|
|
Return the unique list of all parents' IDs for a person.
|
|
|
|
"""
|
|
|
|
parentlist = []
|
|
|
|
for f in p.get_parent_family_handle_list():
|
|
|
|
family = db.get_family_from_handle(f)
|
|
|
|
father_handle = family.get_father_handle()
|
|
|
|
mother_handle = family.get_mother_handle()
|
|
|
|
if father_handle not in parentlist:
|
|
|
|
parentlist.append(father_handle)
|
|
|
|
if mother_handle not in parentlist:
|
|
|
|
parentlist.append(mother_handle)
|
|
|
|
return parentlist
|
|
|
|
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# Function to return persons, that share the same event.
|
|
|
|
# This for example links witnesses to the tree
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
def find_witnessed_people(db,p):
|
|
|
|
people = []
|
|
|
|
for event_ref in p.get_event_ref_list():
|
|
|
|
for l in db.find_backlink_handles( event_ref.ref):
|
|
|
|
if l[0] == 'Person' and l[1] != p.get_handle() and l[1] not in people:
|
|
|
|
people.append(l[1])
|
|
|
|
if l[0] == 'Family':
|
|
|
|
fam = db.get_family_from_handle(l[1])
|
|
|
|
if fam:
|
|
|
|
father_handle = fam.get_father_handle()
|
|
|
|
if father_handle and father_handle != p.get_handle() and father_handle not in people:
|
|
|
|
people.append(father_handle)
|
|
|
|
mother_handle = fam.get_mother_handle()
|
|
|
|
if mother_handle and mother_handle != p.get_handle() and mother_handle not in people:
|
|
|
|
people.append(mother_handle)
|
|
|
|
for f in p.get_family_handle_list():
|
|
|
|
family = db.get_family_from_handle(f)
|
|
|
|
for event_ref in family.get_event_ref_list():
|
|
|
|
for l in db.find_backlink_handles( event_ref.ref):
|
|
|
|
if l[0] == 'Person' and l[1] != p.get_handle() and l[1] not in people:
|
|
|
|
people.append(l[1])
|
|
|
|
for pref in p.get_person_ref_list():
|
|
|
|
if pref.ref != p.get_handle and pref.ref not in people:
|
|
|
|
people.append(pref.ref)
|
|
|
|
return people
|
2010-01-11 00:49:33 +05:30
|
|
|
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# Function to return a label to display the active object in the status bar
|
|
|
|
# and to describe bookmarked objects.
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
def navigation_label(db, nav_type, handle):
|
|
|
|
|
|
|
|
label = None
|
2010-05-30 17:44:26 +05:30
|
|
|
obj = None
|
2010-01-11 00:49:33 +05:30
|
|
|
if nav_type == 'Person':
|
|
|
|
obj = db.get_person_from_handle(handle)
|
|
|
|
if obj:
|
|
|
|
label = name_displayer.display(obj)
|
|
|
|
elif nav_type == 'Family':
|
|
|
|
obj = db.get_family_from_handle(handle)
|
|
|
|
if obj:
|
|
|
|
label = family_name(obj, db)
|
|
|
|
elif nav_type == 'Event':
|
|
|
|
obj = db.get_event_from_handle(handle)
|
|
|
|
if obj:
|
|
|
|
who = get_participant_from_event(db, handle)
|
|
|
|
desc = obj.get_description()
|
2010-02-06 04:59:38 +05:30
|
|
|
label = obj.get_type()
|
2010-01-11 00:49:33 +05:30
|
|
|
if desc:
|
|
|
|
label = '%s - %s' % (label, desc)
|
2010-02-06 04:59:38 +05:30
|
|
|
if who:
|
|
|
|
label = '%s - %s' % (label, who)
|
2010-01-11 00:49:33 +05:30
|
|
|
elif nav_type == 'Place':
|
|
|
|
obj = db.get_place_from_handle(handle)
|
|
|
|
if obj:
|
|
|
|
label = obj.get_title()
|
|
|
|
elif nav_type == 'Source':
|
|
|
|
obj = db.get_source_from_handle(handle)
|
|
|
|
if obj:
|
|
|
|
label = obj.get_title()
|
|
|
|
elif nav_type == 'Repository':
|
|
|
|
obj = db.get_repository_from_handle(handle)
|
|
|
|
if obj:
|
|
|
|
label = obj.get_name()
|
|
|
|
elif nav_type == 'Media':
|
|
|
|
obj = db.get_object_from_handle(handle)
|
|
|
|
if obj:
|
|
|
|
label = obj.get_description()
|
|
|
|
elif nav_type == 'Note':
|
|
|
|
obj = db.get_note_from_handle(handle)
|
|
|
|
if obj:
|
|
|
|
label = obj.get()
|
2010-02-04 12:59:42 +05:30
|
|
|
# When strings are cut, make sure they are unicode
|
|
|
|
#otherwise you may end of with cutting within an utf-8 sequence
|
2010-05-08 20:00:01 +05:30
|
|
|
label = unicode(label)
|
2010-05-28 18:51:14 +05:30
|
|
|
label = " ".join(label.split())
|
2010-01-11 00:49:33 +05:30
|
|
|
if len(label) > 40:
|
|
|
|
label = label[:40] + "..."
|
|
|
|
|
2010-05-30 17:44:26 +05:30
|
|
|
if label and obj:
|
2010-01-11 00:49:33 +05:30
|
|
|
label = '[%s] %s' % (obj.get_gramps_id(), label)
|
|
|
|
|
2010-01-12 06:00:23 +05:30
|
|
|
return (label, obj)
|
2010-05-20 23:17:31 +05:30
|
|
|
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# Format the date and time displayed in the Last Changed column in views.
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
def format_time(secs):
|
|
|
|
"""
|
|
|
|
Format a time in seconds as a date in the preferred date format and a
|
|
|
|
24 hour time as hh:mm:ss.
|
|
|
|
"""
|
|
|
|
t = time.localtime(secs)
|
|
|
|
d = Date(t.tm_year, t.tm_mon, t.tm_mday)
|
|
|
|
return DateHandler.displayer.display(d) + time.strftime(' %X', t)
|