2002-10-20 19:55:16 +05:30
|
|
|
#
|
|
|
|
# Gramps - a GTK+/GNOME based genealogy program
|
|
|
|
#
|
2004-01-19 23:13:35 +05:30
|
|
|
# Copyright (C) 2000-2004 Donald N. Allingham
|
2002-10-20 19:55:16 +05:30
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation; either version 2 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program; if not, write to the Free Software
|
|
|
|
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
|
|
#
|
|
|
|
|
2004-06-28 18:50:33 +05:30
|
|
|
# $Id$
|
2003-10-23 21:03:57 +05:30
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
"Import from GEDCOM"
|
|
|
|
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# standard python modules
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import string
|
|
|
|
import const
|
|
|
|
import time
|
|
|
|
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# GTK/GNOME Modules
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
import gtk
|
|
|
|
import gtk.glade
|
|
|
|
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# GRAMPS modules
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2003-01-15 10:55:50 +05:30
|
|
|
import Errors
|
2003-01-10 11:09:40 +05:30
|
|
|
import RelLib
|
2002-10-20 19:55:16 +05:30
|
|
|
import Date
|
2004-09-17 09:00:04 +05:30
|
|
|
import DateParser
|
2002-11-10 00:14:58 +05:30
|
|
|
from ansel_utf8 import ansel_to_utf8
|
2002-10-20 19:55:16 +05:30
|
|
|
import latin_utf8
|
|
|
|
import Utils
|
|
|
|
from GedcomInfo import *
|
2002-11-03 12:05:06 +05:30
|
|
|
from QuestionDialog import ErrorDialog
|
2003-08-17 07:44:33 +05:30
|
|
|
from gettext import gettext as _
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2003-01-19 11:55:20 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# constants
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2002-10-20 19:55:16 +05:30
|
|
|
ANSEL = 1
|
|
|
|
UNICODE = 2
|
2003-01-19 11:55:20 +05:30
|
|
|
UPDATE = 25
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
callback = None
|
|
|
|
|
2004-04-11 08:07:13 +05:30
|
|
|
_title_string = _("GEDCOM")
|
2003-07-15 22:47:58 +05:30
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
def nocnv(s):
|
2003-01-02 10:01:52 +05:30
|
|
|
return unicode(s)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2003-03-31 07:03:40 +05:30
|
|
|
file_systems = {
|
|
|
|
'VFAT' : _('Windows 9x file system'),
|
|
|
|
'FAT' : _('Windows 9x file system'),
|
|
|
|
"NTFS" : _('Windows NT file system'),
|
|
|
|
"ISO9660" : _('CD ROM'),
|
|
|
|
"SMBFS" : _('Networked Windows file system')
|
|
|
|
}
|
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# GEDCOM events to GRAMPS events conversion
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
ged2gramps = {}
|
|
|
|
for _val in const.personalConstantEvents.keys():
|
|
|
|
_key = const.personalConstantEvents[_val]
|
|
|
|
if _key != "":
|
|
|
|
ged2gramps[_key] = _val
|
|
|
|
|
|
|
|
ged2fam = {}
|
|
|
|
for _val in const.familyConstantEvents.keys():
|
|
|
|
_key = const.familyConstantEvents[_val]
|
|
|
|
if _key != "":
|
|
|
|
ged2fam[_key] = _val
|
|
|
|
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# regular expressions
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
intRE = re.compile(r"\s*(\d+)\s*$")
|
|
|
|
lineRE = re.compile(r"\s*(\d+)\s+(\S+)\s*(.*)$")
|
|
|
|
headRE = re.compile(r"\s*(\d+)\s+HEAD")
|
|
|
|
nameRegexp= re.compile(r"/?([^/]*)(/([^/]*)(/([^/]*))?)?")
|
2004-01-19 23:13:35 +05:30
|
|
|
snameRegexp= re.compile(r"/([^/]*)/([^/]*)")
|
2002-10-20 19:55:16 +05:30
|
|
|
calRegexp = re.compile(r"\s*(ABT|BEF|AFT)?\s*@#D([^@]+)@\s*(.*)$")
|
2004-09-17 09:00:04 +05:30
|
|
|
rangeRegexp = re.compile(r"\s*BET\s+@#D([^@]+)@\s*(.*)\s+AND\s+@#D([^@]+)@\s*(.*)$")
|
|
|
|
spanRegexp = re.compile(r"\s*FROM\s+@#D([^@]+)@\s*(.*)\s+TO\s+@#D([^@]+)@\s*(.*)$")
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
#
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2004-10-11 02:46:44 +05:30
|
|
|
def importData(database, filename, cb=None, use_trans=True):
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
global callback
|
2004-10-16 10:40:35 +05:30
|
|
|
|
2004-09-25 03:35:46 +05:30
|
|
|
f = open(filename,"r")
|
|
|
|
|
|
|
|
ansel = False
|
|
|
|
gramps = False
|
|
|
|
for index in range(0,50):
|
|
|
|
line = f.readline().split()
|
2004-10-11 02:46:44 +05:30
|
|
|
if len(line) == 0:
|
|
|
|
break
|
2004-09-25 03:35:46 +05:30
|
|
|
if line[1] == 'CHAR' and line[2] == "ANSEL":
|
|
|
|
ansel = True
|
|
|
|
if line[1] == 'SOUR' and line[2] == "GRAMPS":
|
|
|
|
gramps = True
|
|
|
|
f.close()
|
|
|
|
|
|
|
|
if not gramps and ansel:
|
|
|
|
glade_file = "%s/gedcomimport.glade" % os.path.dirname(__file__)
|
|
|
|
top = gtk.glade.XML(glade_file,'encoding','gramps')
|
|
|
|
code = top.get_widget('codeset')
|
|
|
|
code.set_active(0)
|
|
|
|
dialog = top.get_widget('encoding')
|
|
|
|
dialog.run()
|
|
|
|
codeset = code.get_active()
|
|
|
|
dialog.destroy()
|
|
|
|
else:
|
|
|
|
codeset = None
|
2004-10-11 02:46:44 +05:30
|
|
|
import2(database, filename, cb, codeset, use_trans)
|
2004-09-25 03:35:46 +05:30
|
|
|
|
|
|
|
|
2004-10-11 02:46:44 +05:30
|
|
|
def import2(database, filename, cb, codeset, use_trans):
|
2002-10-20 19:55:16 +05:30
|
|
|
# add some checking here
|
|
|
|
|
|
|
|
glade_file = "%s/gedcomimport.glade" % os.path.dirname(__file__)
|
2004-08-01 09:51:31 +05:30
|
|
|
if not os.path.isfile(glade_file):
|
|
|
|
glade_file = "plugins/gedcomimport.glade"
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2003-08-17 07:44:33 +05:30
|
|
|
statusTop = gtk.glade.XML(glade_file,"status","gramps")
|
2002-10-20 19:55:16 +05:30
|
|
|
statusWindow = statusTop.get_widget("status")
|
2003-03-17 10:51:40 +05:30
|
|
|
|
|
|
|
Utils.set_titles(statusWindow,statusTop.get_widget('title'),
|
|
|
|
_('GEDCOM import status'))
|
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
statusTop.get_widget("close").set_sensitive(0)
|
|
|
|
statusTop.signal_autoconnect({
|
|
|
|
"destroy_passed_object" : Utils.destroy_passed_object
|
|
|
|
})
|
|
|
|
|
|
|
|
try:
|
2004-09-25 03:35:46 +05:30
|
|
|
g = GedcomParser(database,filename,statusTop, codeset)
|
2002-10-20 19:55:16 +05:30
|
|
|
except IOError,msg:
|
|
|
|
Utils.destroy_passed_object(statusWindow)
|
2003-05-16 07:19:50 +05:30
|
|
|
ErrorDialog(_("%s could not be opened\n") % filename,str(msg))
|
2002-10-20 19:55:16 +05:30
|
|
|
return
|
2003-01-10 11:09:40 +05:30
|
|
|
except:
|
|
|
|
Utils.destroy_passed_object(statusWindow)
|
2004-11-27 08:24:31 +05:30
|
|
|
DisplayTrace.DisplayTrace()
|
2003-01-10 11:09:40 +05:30
|
|
|
return
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2004-10-19 08:49:25 +05:30
|
|
|
if database.get_number_of_people() == 0:
|
|
|
|
use_trans = False
|
|
|
|
|
2003-01-15 10:55:50 +05:30
|
|
|
try:
|
2004-10-11 02:46:44 +05:30
|
|
|
close = g.parse_gedcom_file(use_trans)
|
2003-01-15 10:55:50 +05:30
|
|
|
except IOError,msg:
|
|
|
|
Utils.destroy_passed_object(statusWindow)
|
|
|
|
errmsg = _("%s could not be opened\n") % filename
|
2003-05-16 07:19:50 +05:30
|
|
|
ErrorDialog(errmsg,str(msg))
|
2003-01-15 10:55:50 +05:30
|
|
|
return
|
|
|
|
except Errors.GedcomError, val:
|
2003-05-16 07:19:50 +05:30
|
|
|
(m1,m2) = val.messages()
|
2003-01-15 10:55:50 +05:30
|
|
|
Utils.destroy_passed_object(statusWindow)
|
2003-05-16 07:19:50 +05:30
|
|
|
ErrorDialog(m1,m2)
|
2003-01-15 10:55:50 +05:30
|
|
|
return
|
|
|
|
except:
|
|
|
|
import DisplayTrace
|
|
|
|
Utils.destroy_passed_object(statusWindow)
|
|
|
|
DisplayTrace.DisplayTrace()
|
|
|
|
return
|
2002-11-10 00:14:58 +05:30
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
statusTop.get_widget("close").set_sensitive(1)
|
|
|
|
if close:
|
|
|
|
statusWindow.destroy()
|
2004-10-23 09:26:48 +05:30
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
if cb:
|
|
|
|
statusWindow.destroy()
|
|
|
|
cb(1)
|
|
|
|
elif callback:
|
2003-07-03 08:26:34 +05:30
|
|
|
callback()
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
#
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
class DateStruct:
|
|
|
|
def __init__(self):
|
|
|
|
self.date = ""
|
|
|
|
self.time = ""
|
|
|
|
|
2004-12-26 23:53:50 +05:30
|
|
|
class GedcomDateParser(DateParser.DateParser):
|
|
|
|
|
|
|
|
month_to_int = {
|
|
|
|
'jan' : 1, 'feb' : 2, 'mar' : 3, 'apr' : 4,
|
|
|
|
'may' : 5, 'jun' : 6, 'jul' : 7, 'aug' : 8,
|
|
|
|
'sep' : 9, 'oct' : 10, 'nov' : 11, 'dec' : 12,
|
|
|
|
}
|
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
#
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
class GedcomParser:
|
|
|
|
|
|
|
|
SyntaxError = "Syntax Error"
|
|
|
|
BadFile = "Not a GEDCOM file"
|
|
|
|
|
2004-11-27 08:24:31 +05:30
|
|
|
def __init__(self, dbase, filename, window, codeset):
|
2004-12-26 23:53:50 +05:30
|
|
|
self.dp = GedcomDateParser()
|
2003-01-10 11:09:40 +05:30
|
|
|
self.db = dbase
|
2002-10-20 19:55:16 +05:30
|
|
|
self.person = None
|
2004-11-25 07:36:05 +05:30
|
|
|
self.media_map = {}
|
2002-10-20 19:55:16 +05:30
|
|
|
self.fmap = {}
|
|
|
|
self.smap = {}
|
|
|
|
self.nmap = {}
|
|
|
|
self.share_note = []
|
|
|
|
self.refn = {}
|
|
|
|
self.added = {}
|
|
|
|
self.gedmap = GedcomInfoDB()
|
|
|
|
self.gedsource = None
|
2004-11-27 05:01:50 +05:30
|
|
|
self.def_src = RelLib.Source()
|
2004-11-27 08:24:31 +05:30
|
|
|
fname = os.path.basename(filename).split('\\')[-1]
|
|
|
|
self.def_src.set_title(_("Import from %s") % unicode(fname))
|
|
|
|
self.dir_path = os.path.dirname(filename)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.localref = 0
|
|
|
|
self.placemap = {}
|
|
|
|
self.broken_conc_list = [ 'FamilyOrigins', 'FTW' ]
|
|
|
|
self.broken_conc = 0
|
|
|
|
self.is_ftw = 0
|
2004-06-27 08:40:06 +05:30
|
|
|
self.idswap = {}
|
2004-08-01 09:51:31 +05:30
|
|
|
self.gid2id = {}
|
2004-08-11 09:12:38 +05:30
|
|
|
self.sid2id = {}
|
|
|
|
self.lid2id = {}
|
|
|
|
self.fid2id = {}
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2004-11-27 08:24:31 +05:30
|
|
|
self.f = open(filename,"rU")
|
|
|
|
self.filename = filename
|
2002-10-20 19:55:16 +05:30
|
|
|
self.index = 0
|
|
|
|
self.backoff = 0
|
2004-10-16 22:56:04 +05:30
|
|
|
self.override = codeset
|
2004-09-25 03:35:46 +05:30
|
|
|
|
2004-10-19 08:49:25 +05:30
|
|
|
if self.db.get_number_of_people() == 0:
|
|
|
|
self.map_gid = self.map_gid_empty
|
|
|
|
else:
|
|
|
|
self.map_gid = self.map_gid_not_empty
|
|
|
|
|
2004-10-16 22:56:04 +05:30
|
|
|
if self.override != 0:
|
|
|
|
if self.override == 1:
|
2004-09-25 03:35:46 +05:30
|
|
|
self.cnv = ansel_to_utf8
|
2004-10-16 22:56:04 +05:30
|
|
|
elif self.override == 2:
|
2004-09-25 03:35:46 +05:30
|
|
|
self.cnv = latin_utf8.latin_to_utf8
|
|
|
|
else:
|
|
|
|
self.cnv = nocnv
|
|
|
|
else:
|
|
|
|
self.cnv = nocnv
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2004-11-27 08:24:31 +05:30
|
|
|
self.geddir = os.path.dirname(os.path.normpath(os.path.abspath(filename)))
|
2003-03-31 07:03:40 +05:30
|
|
|
|
2004-06-29 08:03:25 +05:30
|
|
|
self.transtable = string.maketrans('','')
|
|
|
|
self.delc = self.transtable[0:31]
|
|
|
|
self.transtable2 = self.transtable[0:128] + ('?' * 128)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
self.window = window
|
2003-01-19 11:55:20 +05:30
|
|
|
if window:
|
|
|
|
self.file_obj = window.get_widget("file")
|
|
|
|
self.encoding_obj = window.get_widget("encoding")
|
|
|
|
self.created_obj = window.get_widget("created")
|
|
|
|
self.version_obj = window.get_widget("version")
|
|
|
|
self.families_obj = window.get_widget("families")
|
|
|
|
self.people_obj = window.get_widget("people")
|
|
|
|
self.errors_obj = window.get_widget("errors")
|
|
|
|
self.close_done = window.get_widget('close_done')
|
|
|
|
self.error_text_obj = window.get_widget("error_text")
|
2003-03-31 07:03:40 +05:30
|
|
|
self.info_text_obj = window.get_widget("info_text")
|
2003-01-19 11:55:20 +05:30
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
self.error_count = 0
|
|
|
|
|
2004-10-06 09:12:54 +05:30
|
|
|
amap = const.personalConstantAttributes
|
|
|
|
self.attrs = amap.values()
|
2002-10-20 19:55:16 +05:30
|
|
|
self.gedattr = {}
|
2004-10-06 09:12:54 +05:30
|
|
|
for val in amap.keys():
|
|
|
|
self.gedattr[amap[val]] = val
|
2003-01-19 11:55:20 +05:30
|
|
|
|
|
|
|
if self.window:
|
2004-11-27 08:24:31 +05:30
|
|
|
self.update(self.file_obj,os.path.basename(filename))
|
2003-01-19 11:55:20 +05:30
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
self.search_paths = []
|
|
|
|
|
|
|
|
try:
|
2003-03-31 07:03:40 +05:30
|
|
|
mypaths = []
|
|
|
|
f = open("/proc/mounts","r")
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2004-05-04 10:04:48 +05:30
|
|
|
for line in f.xreadlines():
|
2004-10-23 09:26:48 +05:30
|
|
|
paths = line.split()
|
2003-03-31 07:03:40 +05:30
|
|
|
ftype = paths[2].upper()
|
|
|
|
if ftype in file_systems.keys():
|
|
|
|
mypaths.append((paths[1],file_systems[ftype]))
|
2002-10-20 19:55:16 +05:30
|
|
|
self.search_paths.append(paths[1])
|
|
|
|
f.close()
|
2003-03-31 07:03:40 +05:30
|
|
|
|
|
|
|
if len(mypaths):
|
|
|
|
self.infomsg(_("Windows style path names for images will use the following mount "
|
|
|
|
"points to try to find the images. These paths are based on Windows "
|
|
|
|
"compatible file systems available on this system:\n\n"))
|
|
|
|
for p in mypaths:
|
|
|
|
self.infomsg("\t%s : %s\n" % p)
|
|
|
|
|
|
|
|
self.infomsg('\n')
|
|
|
|
self.infomsg(_("Images that cannot be found in the specfied path in the GEDCOM file "
|
|
|
|
"will be searched for in the same directory in which the GEDCOM file "
|
|
|
|
"exists (%s).\n") % self.geddir)
|
2002-10-20 19:55:16 +05:30
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
2003-02-10 09:41:01 +05:30
|
|
|
def errmsg(self,msg):
|
2003-05-20 01:28:52 +05:30
|
|
|
if self.window:
|
|
|
|
try:
|
|
|
|
self.error_text_obj.get_buffer().insert_at_cursor(msg)
|
|
|
|
except TypeError:
|
|
|
|
self.error_text_obj.get_buffer().insert_at_cursor(msg,len(msg))
|
|
|
|
else:
|
|
|
|
print msg
|
2003-02-10 09:41:01 +05:30
|
|
|
|
2003-03-31 07:03:40 +05:30
|
|
|
def infomsg(self,msg):
|
2003-05-20 01:28:52 +05:30
|
|
|
if self.window:
|
|
|
|
try:
|
|
|
|
self.info_text_obj.get_buffer().insert_at_cursor(msg)
|
|
|
|
except TypeError:
|
|
|
|
self.info_text_obj.get_buffer().insert_at_cursor(msg,len(msg))
|
|
|
|
else:
|
|
|
|
print msg
|
2003-03-31 07:03:40 +05:30
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
def find_file(self,fullname,altpath):
|
2003-03-31 07:03:40 +05:30
|
|
|
tries = []
|
2004-10-23 09:26:48 +05:30
|
|
|
fullname = fullname.replace('\\','/')
|
2003-03-31 07:03:40 +05:30
|
|
|
tries.append(fullname)
|
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
if os.path.isfile(fullname):
|
2003-03-31 07:03:40 +05:30
|
|
|
return (1,fullname)
|
2004-11-24 09:25:25 +05:30
|
|
|
other = os.path.join(altpath,fullname)
|
|
|
|
tries.append(other)
|
|
|
|
if os.path.isfile(other):
|
|
|
|
return (1,other)
|
2002-10-20 19:55:16 +05:30
|
|
|
other = os.path.join(altpath,os.path.basename(fullname))
|
2003-03-31 07:03:40 +05:30
|
|
|
tries.append(other)
|
2002-10-20 19:55:16 +05:30
|
|
|
if os.path.isfile(other):
|
2003-03-31 07:03:40 +05:30
|
|
|
return (1,other)
|
2002-10-20 19:55:16 +05:30
|
|
|
if len(fullname) > 3:
|
|
|
|
if fullname[1] == ':':
|
|
|
|
fullname = fullname[2:]
|
|
|
|
for path in self.search_paths:
|
2003-03-31 07:03:40 +05:30
|
|
|
other = os.path.normpath("%s/%s" % (path,fullname))
|
|
|
|
tries.append(other)
|
2002-10-20 19:55:16 +05:30
|
|
|
if os.path.isfile(other):
|
2003-03-31 07:03:40 +05:30
|
|
|
return (1,other)
|
|
|
|
return (0,tries)
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2003-03-31 07:03:40 +05:30
|
|
|
return (0,tries)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def update(self,field,text):
|
|
|
|
field.set_text(text)
|
|
|
|
while gtk.events_pending():
|
2004-05-04 10:04:48 +05:30
|
|
|
gtk.main_iteration()
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def get_next(self):
|
|
|
|
if self.backoff == 0:
|
2003-01-19 11:55:20 +05:30
|
|
|
next_line = self.f.readline()
|
2004-05-20 10:11:55 +05:30
|
|
|
try:
|
2004-06-29 08:03:25 +05:30
|
|
|
self.text = string.translate(next_line.strip(),self.transtable,self.delc)
|
2004-05-20 10:11:55 +05:30
|
|
|
except:
|
|
|
|
self.text = next_line.strip()
|
2003-01-24 09:17:05 +05:30
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
try:
|
|
|
|
self.text = self.cnv(self.text)
|
|
|
|
except:
|
2004-06-29 08:03:25 +05:30
|
|
|
self.text = string.translate(self.text,self.transtable2)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2003-01-24 09:17:05 +05:30
|
|
|
self.index += 1
|
2004-10-23 09:26:48 +05:30
|
|
|
l = self.text.split(None, 2)
|
2002-10-20 19:55:16 +05:30
|
|
|
ln = len(l)
|
|
|
|
try:
|
|
|
|
if ln == 2:
|
|
|
|
self.groups = (int(l[0]),l[1],"")
|
|
|
|
else:
|
|
|
|
self.groups = (int(l[0]),l[1],l[2])
|
|
|
|
except:
|
2003-12-16 09:34:08 +05:30
|
|
|
if self.text == "":
|
|
|
|
msg = _("Warning: line %d was blank, so it was ignored.\n") % self.index
|
|
|
|
else:
|
|
|
|
msg = _("Warning: line %d was not understood, so it was ignored.") % self.index
|
|
|
|
msg = "%s\n\t%s\n" % (msg,self.text)
|
2003-02-10 09:41:01 +05:30
|
|
|
self.errmsg(msg)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.error_count = self.error_count + 1
|
|
|
|
self.groups = (999, "XXX", "XXX")
|
|
|
|
self.backoff = 0
|
|
|
|
return self.groups
|
|
|
|
|
|
|
|
def barf(self,level):
|
2004-01-05 09:27:01 +05:30
|
|
|
msg = _("Warning: line %d was not understood, so it was ignored.") % self.index
|
2003-05-21 03:28:24 +05:30
|
|
|
self.errmsg(msg)
|
2002-10-20 19:55:16 +05:30
|
|
|
msg = "\n\t%s\n" % self.text
|
2003-01-19 11:55:20 +05:30
|
|
|
|
2003-05-21 03:28:24 +05:30
|
|
|
self.errmsg(msg)
|
|
|
|
self.error_count = self.error_count + 1
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(level)
|
|
|
|
|
|
|
|
def warn(self,msg):
|
2003-05-21 03:28:24 +05:30
|
|
|
self.errmsg(msg)
|
|
|
|
self.error_count = self.error_count + 1
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def backup(self):
|
|
|
|
self.backoff = 1
|
|
|
|
|
2004-10-11 02:46:44 +05:30
|
|
|
def parse_gedcom_file(self,use_trans=True):
|
|
|
|
|
|
|
|
if use_trans:
|
|
|
|
self.trans = self.db.transaction_begin()
|
|
|
|
else:
|
|
|
|
self.trans = None
|
2002-10-20 19:55:16 +05:30
|
|
|
t = time.time()
|
|
|
|
self.index = 0
|
|
|
|
self.fam_count = 0
|
|
|
|
self.indi_count = 0
|
2004-10-16 10:40:35 +05:30
|
|
|
self.source_count = 0
|
2002-10-20 19:55:16 +05:30
|
|
|
try:
|
|
|
|
self.parse_header()
|
|
|
|
self.parse_submitter()
|
2004-11-27 05:01:50 +05:30
|
|
|
self.db.add_source(self.def_src,self.trans)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_record()
|
|
|
|
self.parse_trailer()
|
2003-01-15 10:55:50 +05:30
|
|
|
except Errors.GedcomError, err:
|
2003-02-10 09:41:01 +05:30
|
|
|
self.errmsg(str(err))
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2003-01-19 11:55:20 +05:30
|
|
|
if self.window:
|
|
|
|
self.update(self.families_obj,str(self.fam_count))
|
|
|
|
self.update(self.people_obj,str(self.indi_count))
|
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
self.break_note_links()
|
|
|
|
t = time.time() - t
|
2002-11-03 12:05:06 +05:30
|
|
|
msg = _('Import Complete: %d seconds') % t
|
2004-05-20 10:11:55 +05:30
|
|
|
|
2004-10-11 02:46:44 +05:30
|
|
|
if use_trans:
|
|
|
|
self.db.transaction_commit(self.trans,_("GEDCOM import"))
|
2004-05-20 10:11:55 +05:30
|
|
|
|
2003-01-19 11:55:20 +05:30
|
|
|
if self.window:
|
2003-03-31 07:03:40 +05:30
|
|
|
self.infomsg("\n%s" % msg)
|
2003-01-19 11:55:20 +05:30
|
|
|
else:
|
|
|
|
print msg
|
2003-05-20 01:28:52 +05:30
|
|
|
print "Families: %d" % self.fam_count
|
|
|
|
print "Individuals: %d" % self.indi_count
|
2003-01-19 11:55:20 +05:30
|
|
|
return None
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def break_note_links(self):
|
|
|
|
for o in self.share_note:
|
|
|
|
o.unique_note()
|
|
|
|
|
|
|
|
def parse_trailer(self):
|
2004-01-05 09:27:01 +05:30
|
|
|
matches = self.get_next()
|
2002-10-20 19:55:16 +05:30
|
|
|
if matches[1] != "TRLR":
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(0)
|
|
|
|
self.f.close()
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_header(self):
|
2004-01-05 09:27:01 +05:30
|
|
|
self.parse_header_head()
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_header_source()
|
|
|
|
|
|
|
|
def parse_submitter(self):
|
2004-01-05 09:27:01 +05:30
|
|
|
matches = self.get_next()
|
2002-10-20 19:55:16 +05:30
|
|
|
if matches[2] != "SUBM":
|
|
|
|
self.backup()
|
2004-01-05 09:27:01 +05:30
|
|
|
return
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-11-27 05:01:50 +05:30
|
|
|
self.parse_submitter_data(1)
|
|
|
|
|
|
|
|
def parse_submitter_data(self,level):
|
|
|
|
while(1):
|
|
|
|
matches = self.get_next()
|
|
|
|
if int(matches[0]) < level:
|
|
|
|
self.backup()
|
|
|
|
return
|
|
|
|
elif matches[1] == "NAME":
|
|
|
|
self.def_src.set_author(unicode(matches[2]))
|
|
|
|
elif matches[1] == ["ADDR"]:
|
|
|
|
self.ignore_sub_junk(level+1)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_source(self,name,level):
|
2004-08-11 09:12:38 +05:30
|
|
|
self.source = self.find_or_create_source(name[1:-1])
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
note = ""
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
if note:
|
2004-02-14 11:10:30 +05:30
|
|
|
self.source.set_note(note)
|
|
|
|
if not self.source.get_title():
|
2004-08-21 23:43:18 +05:30
|
|
|
self.source.set_title("No title - ID %s" % self.source.get_gramps_id())
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.commit_source(self.source, self.trans)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return
|
|
|
|
elif matches[1] == "TITL":
|
|
|
|
title = matches[2] + self.parse_continue_data(level+1)
|
2004-10-23 09:26:48 +05:30
|
|
|
title = title.replace('\n',' ')
|
2004-02-14 11:10:30 +05:30
|
|
|
self.source.set_title(title)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "TAXT" or matches[1] == "PERI": # EasyTree Sierra On-Line
|
2004-02-14 11:10:30 +05:30
|
|
|
if self.source.get_title() == "":
|
2002-10-20 19:55:16 +05:30
|
|
|
title = matches[2] + self.parse_continue_data(level+1)
|
2004-10-23 09:26:48 +05:30
|
|
|
title = title.replace('\n',' ')
|
2004-02-14 11:10:30 +05:30
|
|
|
self.source.set_title(title)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "AUTH":
|
2004-02-14 11:10:30 +05:30
|
|
|
self.source.set_author(matches[2] + self.parse_continue_data(level+1))
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "PUBL":
|
2004-02-14 11:10:30 +05:30
|
|
|
self.source.set_publication_info(matches[2] + self.parse_continue_data(level+1))
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "OBJE":
|
|
|
|
self.ignore_sub_junk(2)
|
|
|
|
elif matches[1] == "NOTE":
|
|
|
|
note = self.parse_note(matches,self.source,level+1,note)
|
|
|
|
elif matches[1] == "TEXT":
|
2004-02-14 11:10:30 +05:30
|
|
|
note = self.source.get_note()
|
2002-10-20 19:55:16 +05:30
|
|
|
d = self.parse_continue_data(level+1)
|
|
|
|
if note:
|
|
|
|
note = "%s\n%s %s%s" % (note,matches[1],matches[2],d)
|
|
|
|
else:
|
|
|
|
note = "%s %s%s" % (matches[1],matches[2],d)
|
2003-07-25 16:15:24 +05:30
|
|
|
elif matches[1] == "ABBR":
|
2004-02-21 11:41:59 +05:30
|
|
|
self.source.set_abbreviation(matches[2] + self.parse_continue_data(level+1))
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-02-14 11:10:30 +05:30
|
|
|
note = self.source.get_note()
|
2002-10-20 19:55:16 +05:30
|
|
|
if note:
|
|
|
|
note = "%s\n%s %s" % (note,matches[1],matches[2])
|
|
|
|
else:
|
|
|
|
note = "%s %s" % (matches[1],matches[2])
|
|
|
|
|
|
|
|
def parse_record(self):
|
|
|
|
while 1:
|
2004-01-05 09:27:01 +05:30
|
|
|
matches = self.get_next()
|
2002-10-20 19:55:16 +05:30
|
|
|
if matches[2] == "FAM":
|
2003-01-19 11:55:20 +05:30
|
|
|
if self.fam_count % UPDATE == 0 and self.window:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.update(self.families_obj,str(self.fam_count))
|
|
|
|
self.fam_count = self.fam_count + 1
|
2004-08-22 02:13:34 +05:30
|
|
|
self.family = self.find_or_create_family(matches[1][1:-1])
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_family()
|
|
|
|
if self.addr != None:
|
2004-08-21 23:43:18 +05:30
|
|
|
father_handle = self.family.get_father_handle()
|
|
|
|
father = self.db.get_person_from_handle(father_handle)
|
2002-10-20 19:55:16 +05:30
|
|
|
if father:
|
2004-02-14 11:10:30 +05:30
|
|
|
father.add_address(self.addr)
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.commit_person(father, self.trans)
|
2004-08-21 23:43:18 +05:30
|
|
|
mother_handle = self.family.get_mother_handle()
|
|
|
|
mother = self.db.get_person_from_handle(mother_handle)
|
2002-10-20 19:55:16 +05:30
|
|
|
if mother:
|
2004-02-14 11:10:30 +05:30
|
|
|
mother.add_address(self.addr)
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.commit_person(mother, self.trans)
|
2004-08-21 23:43:18 +05:30
|
|
|
for child_handle in self.family.get_child_handle_list():
|
|
|
|
child = self.db.get_person_from_handle(child_handle)
|
|
|
|
if child:
|
|
|
|
child.add_address(self.addr)
|
|
|
|
self.db.commit_person(child, self.trans)
|
2004-11-27 08:24:31 +05:30
|
|
|
if len(self.family.get_source_references()) == 0:
|
|
|
|
sref = RelLib.SourceRef()
|
|
|
|
sref.set_base_handle(self.def_src.get_handle())
|
|
|
|
self.family.add_source_reference(sref)
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.commit_family(self.family, self.trans)
|
2004-05-04 10:04:48 +05:30
|
|
|
del self.family
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[2] == "INDI":
|
2003-01-19 11:55:20 +05:30
|
|
|
if self.indi_count % UPDATE == 0 and self.window:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.update(self.people_obj,str(self.indi_count))
|
|
|
|
self.indi_count = self.indi_count + 1
|
2004-10-06 09:12:54 +05:30
|
|
|
gid = matches[1]
|
|
|
|
gid = gid[1:-1]
|
|
|
|
self.person = self.find_or_create_person(self.map_gid(gid))
|
2004-07-28 07:59:07 +05:30
|
|
|
self.added[self.person.get_handle()] = 1
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_individual()
|
2004-11-27 05:01:50 +05:30
|
|
|
if len(self.person.get_source_references()) == 0:
|
|
|
|
sref = RelLib.SourceRef()
|
|
|
|
sref.set_base_handle(self.def_src.get_handle())
|
|
|
|
self.person.add_source_reference(sref)
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.commit_person(self.person, self.trans)
|
2004-05-04 10:04:48 +05:30
|
|
|
del self.person
|
2003-01-19 11:55:20 +05:30
|
|
|
elif matches[2] in ["SUBM","SUBN","REPO"]:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(1)
|
2003-07-21 05:39:12 +05:30
|
|
|
elif matches[1] in ["SUBM","SUBN","OBJE","_EVENT_DEFN"]:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(1)
|
|
|
|
elif matches[2] == "SOUR":
|
|
|
|
self.parse_source(matches[1],1)
|
|
|
|
elif matches[2][0:4] == "NOTE":
|
|
|
|
if self.nmap.has_key(matches[1]):
|
|
|
|
noteobj = self.nmap[matches[1]]
|
|
|
|
else:
|
2003-01-10 11:09:40 +05:30
|
|
|
noteobj = RelLib.Note()
|
2002-10-20 19:55:16 +05:30
|
|
|
self.nmap[matches[1]] = noteobj
|
|
|
|
text = matches[2][4:]
|
2003-12-16 09:34:08 +05:30
|
|
|
# noteobj.append(text + self.parse_continue_data(1))
|
|
|
|
noteobj.append(text + self.parse_note_continue(1))
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_note_data(1)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "TRLR":
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return
|
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(1)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2004-10-19 08:49:25 +05:30
|
|
|
def map_gid_empty(self,gid):
|
|
|
|
return gid
|
|
|
|
|
|
|
|
def map_gid_not_empty(self,gid):
|
2004-10-06 09:12:54 +05:30
|
|
|
if self.idswap.get(gid):
|
|
|
|
return self.idswap[gid]
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-10-06 09:12:54 +05:30
|
|
|
if self.db.id_trans.get(str(gid)):
|
|
|
|
self.idswap[gid] = self.db.find_next_gid()
|
2004-06-27 08:40:06 +05:30
|
|
|
else:
|
2004-10-06 09:12:54 +05:30
|
|
|
self.idswap[gid] = gid
|
|
|
|
return self.idswap[gid]
|
2004-06-27 08:40:06 +05:30
|
|
|
|
2004-08-01 09:51:31 +05:30
|
|
|
def find_or_create_person(self,gramps_id):
|
|
|
|
person = RelLib.Person()
|
|
|
|
intid = self.gid2id.get(gramps_id)
|
|
|
|
if self.db.person_map.has_key(intid):
|
|
|
|
person.unserialize(self.db.person_map.get(intid))
|
|
|
|
else:
|
2004-10-19 08:49:25 +05:30
|
|
|
intid = self.find_person_handle(gramps_id)
|
2004-08-01 09:51:31 +05:30
|
|
|
person.set_handle(intid)
|
|
|
|
person.set_gramps_id(gramps_id)
|
2002-10-20 19:55:16 +05:30
|
|
|
return person
|
|
|
|
|
2004-10-19 08:49:25 +05:30
|
|
|
def find_person_handle(self,gramps_id):
|
|
|
|
intid = self.gid2id.get(gramps_id)
|
|
|
|
if not intid:
|
2004-10-23 09:26:48 +05:30
|
|
|
intid = create_id()
|
2004-10-19 08:49:25 +05:30
|
|
|
self.gid2id[gramps_id] = intid
|
|
|
|
return intid
|
|
|
|
|
|
|
|
def find_or_create_family(self,gramps_id):
|
|
|
|
family = RelLib.Family()
|
|
|
|
intid = self.fid2id.get(gramps_id)
|
|
|
|
if self.db.family_map.has_key(intid):
|
|
|
|
family.unserialize(self.db.family_map.get(intid))
|
|
|
|
else:
|
|
|
|
intid = self.find_family_handle(gramps_id)
|
|
|
|
family.set_handle(intid)
|
|
|
|
family.set_gramps_id(gramps_id)
|
|
|
|
return family
|
|
|
|
|
|
|
|
def find_family_handle(self,gramps_id):
|
|
|
|
intid = self.fid2id.get(gramps_id)
|
|
|
|
if not intid:
|
2004-10-23 09:26:48 +05:30
|
|
|
intid = create_id()
|
2004-10-19 08:49:25 +05:30
|
|
|
self.fid2id[gramps_id] = intid
|
|
|
|
return intid
|
|
|
|
|
2004-08-11 09:12:38 +05:30
|
|
|
def find_or_create_source(self,gramps_id):
|
|
|
|
source = RelLib.Source()
|
|
|
|
intid = self.sid2id.get(gramps_id)
|
|
|
|
if self.db.source_map.has_key(intid):
|
|
|
|
source.unserialize(self.db.source_map.get(intid))
|
|
|
|
else:
|
2004-10-23 09:26:48 +05:30
|
|
|
intid = create_id()
|
2004-08-11 09:12:38 +05:30
|
|
|
source.set_handle(intid)
|
|
|
|
source.set_gramps_id(gramps_id)
|
2004-10-18 04:47:30 +05:30
|
|
|
self.db.add_source(source,self.trans)
|
2004-08-11 09:12:38 +05:30
|
|
|
self.sid2id[gramps_id] = intid
|
|
|
|
return source
|
|
|
|
|
|
|
|
def find_or_create_place(self,gramps_id):
|
|
|
|
place = RelLib.Place()
|
|
|
|
intid = self.lid2id.get(gramps_id)
|
|
|
|
if self.db.place_map.has_key(intid):
|
|
|
|
place.unserialize(self.db.place_map.get(intid))
|
|
|
|
else:
|
2004-10-23 09:26:48 +05:30
|
|
|
intid = create_id()
|
2004-08-11 09:12:38 +05:30
|
|
|
place.set_handle(intid)
|
|
|
|
place.set_title(gramps_id)
|
|
|
|
place.set_gramps_id(self.db.find_next_place_gramps_id())
|
2004-10-16 22:56:04 +05:30
|
|
|
self.db.add_place(place,self.trans)
|
2004-08-11 09:12:38 +05:30
|
|
|
self.lid2id[gramps_id] = intid
|
|
|
|
return place
|
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
def parse_cause(self,event,level):
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return
|
|
|
|
elif matches[1] == "SOUR":
|
2004-02-14 11:10:30 +05:30
|
|
|
event.add_source_reference(self.handle_source(matches,level+1))
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(1)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_note_data(self,level):
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return
|
|
|
|
elif matches[1] in ["SOUR","CHAN","REFN"]:
|
|
|
|
self.ignore_sub_junk(level+1)
|
|
|
|
elif matches[1] == "RIN":
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
self.barf(level+1)
|
|
|
|
|
|
|
|
def parse_ftw_relations(self,level):
|
|
|
|
mrel = "Birth"
|
|
|
|
frel = "Birth"
|
|
|
|
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return (mrel,frel)
|
|
|
|
# FTW
|
|
|
|
elif matches[1] == "_FREL":
|
2004-10-23 09:26:48 +05:30
|
|
|
if matches[2].lower() != "natural":
|
|
|
|
frel = matches[2].capitalize()
|
2002-10-20 19:55:16 +05:30
|
|
|
# FTW
|
|
|
|
elif matches[1] == "_MREL":
|
2004-10-23 09:26:48 +05:30
|
|
|
if matches[2].lower() != "natural":
|
2002-10-20 19:55:16 +05:30
|
|
|
mrel = matches[2]
|
|
|
|
elif matches[1] == "ADOP":
|
|
|
|
mrel = "Adopted"
|
|
|
|
frel = "Adopted"
|
|
|
|
# Legacy
|
|
|
|
elif matches[1] == "_STAT":
|
|
|
|
mrel = matches[2]
|
|
|
|
frel = matches[2]
|
|
|
|
# Legacy _PREF
|
|
|
|
elif matches[1][0] == "_":
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
self.barf(level+1)
|
2004-10-06 09:12:54 +05:30
|
|
|
return None
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_family(self):
|
|
|
|
self.addr = None
|
|
|
|
note = ""
|
|
|
|
while 1:
|
2004-01-05 09:27:01 +05:30
|
|
|
matches = self.get_next()
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) == 0:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "HUSB":
|
2004-10-06 09:12:54 +05:30
|
|
|
gid = matches[2]
|
2004-10-19 08:49:25 +05:30
|
|
|
handle = self.find_person_handle(self.map_gid(gid[1:-1]))
|
|
|
|
self.family.set_father_handle(handle)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(2)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "WIFE":
|
2004-10-06 09:12:54 +05:30
|
|
|
gid = matches[2]
|
2004-10-19 08:49:25 +05:30
|
|
|
handle = self.find_person_handle(self.map_gid(gid[1:-1]))
|
|
|
|
self.family.set_mother_handle(handle)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(2)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "SLGS":
|
2004-10-06 09:12:54 +05:30
|
|
|
lds_ord = RelLib.LdsOrd()
|
|
|
|
self.family.set_lds_sealing(lds_ord)
|
|
|
|
self.parse_ord(lds_ord,2)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "ADDR":
|
2003-01-10 11:09:40 +05:30
|
|
|
self.addr = RelLib.Address()
|
2004-02-14 11:10:30 +05:30
|
|
|
self.addr.set_street(matches[2] + self.parse_continue_data(1))
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_address(self.addr,2)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "CHIL":
|
2002-10-20 19:55:16 +05:30
|
|
|
mrel,frel = self.parse_ftw_relations(2)
|
2004-10-06 09:12:54 +05:30
|
|
|
gid = matches[2]
|
|
|
|
child = self.find_or_create_person(self.map_gid(gid[1:-1]))
|
2004-07-28 07:59:07 +05:30
|
|
|
self.family.add_child_handle(child.get_handle())
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2004-07-28 07:59:07 +05:30
|
|
|
for f in child.get_parent_family_handle_list():
|
|
|
|
if f[0] == self.family.get_handle():
|
2002-10-20 19:55:16 +05:30
|
|
|
break
|
|
|
|
else:
|
|
|
|
if (mrel=="Birth" or mrel=="") and (frel=="Birth" or frel==""):
|
2004-07-28 07:59:07 +05:30
|
|
|
child.set_main_parent_family_handle(self.family.get_handle())
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-07-28 07:59:07 +05:30
|
|
|
if child.get_main_parents_family_handle() == self.family:
|
|
|
|
child.set_main_parent_family_handle(None)
|
|
|
|
child.add_parent_family_handle(self.family.get_handle(),mrel,frel)
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.commit_person(child, self.trans)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "NCHI":
|
2003-01-10 11:09:40 +05:30
|
|
|
a = RelLib.Attribute()
|
2004-02-14 11:10:30 +05:30
|
|
|
a.set_type("Number of Children")
|
|
|
|
a.set_value(matches[2])
|
|
|
|
self.family.add_attribute(a)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] in ["RIN", "SUBM", "REFN","CHAN","SOUR"]:
|
|
|
|
self.ignore_sub_junk(2)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "OBJE":
|
2002-10-20 19:55:16 +05:30
|
|
|
if matches[2] and matches[2][0] == '@':
|
|
|
|
self.barf(2)
|
|
|
|
else:
|
|
|
|
self.parse_family_object(2)
|
|
|
|
elif matches[1] == "_COMM":
|
2004-10-23 09:26:48 +05:30
|
|
|
note = matches[2].strip() + self.parse_continue_data(1)
|
2004-02-14 11:10:30 +05:30
|
|
|
self.family.set_note(note)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(2)
|
|
|
|
elif matches[1] == "NOTE":
|
|
|
|
note = self.parse_note(matches,self.family,1,note)
|
|
|
|
else:
|
2003-01-10 11:09:40 +05:30
|
|
|
event = RelLib.Event()
|
2002-10-20 19:55:16 +05:30
|
|
|
try:
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_name(ged2fam[matches[1]])
|
2002-10-20 19:55:16 +05:30
|
|
|
except:
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_name(matches[1])
|
|
|
|
if event.get_name() == "Marriage":
|
2004-12-25 00:16:34 +05:30
|
|
|
self.family.set_relationship(RelLib.Family.MARRIED)
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.add_event(event,self.trans)
|
2004-07-28 07:59:07 +05:30
|
|
|
self.family.add_event_handle(event.get_handle())
|
2004-01-05 09:27:01 +05:30
|
|
|
self.parse_family_event(event,2)
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.commit_event(event, self.trans)
|
2004-05-04 10:04:48 +05:30
|
|
|
del event
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2003-07-21 05:39:12 +05:30
|
|
|
def parse_note_base(self,matches,obj,level,old_note,task):
|
2002-10-20 19:55:16 +05:30
|
|
|
note = old_note
|
|
|
|
if matches[2] and matches[2][0] == "@":
|
|
|
|
if self.nmap.has_key(matches[2]):
|
|
|
|
self.share_note.append(obj)
|
2004-02-14 11:10:30 +05:30
|
|
|
obj.set_note_object(self.nmap[matches[2]])
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2003-01-10 11:09:40 +05:30
|
|
|
noteobj = RelLib.Note()
|
2002-10-20 19:55:16 +05:30
|
|
|
self.nmap[matches[2]] = noteobj
|
|
|
|
self.share_note.append(obj)
|
2004-02-14 11:10:30 +05:30
|
|
|
obj.set_note_object(noteobj)
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
|
|
|
if old_note:
|
|
|
|
note = "%s\n%s%s" % (old_note,matches[2],self.parse_continue_data(level))
|
|
|
|
else:
|
|
|
|
note = matches[2] + self.parse_continue_data(level)
|
2003-07-21 05:39:12 +05:30
|
|
|
task(note)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(level+1)
|
|
|
|
return note
|
|
|
|
|
2003-07-21 05:39:12 +05:30
|
|
|
def parse_note(self,matches,obj,level,old_note):
|
2004-04-06 08:50:04 +05:30
|
|
|
return self.parse_note_base(matches,obj,level,old_note,obj.set_note)
|
2003-07-21 05:39:12 +05:30
|
|
|
|
|
|
|
def parse_comment(self,matches,obj,level,old_note):
|
2004-04-06 08:50:04 +05:30
|
|
|
return self.parse_note_base(matches,obj,level,old_note,obj.set_comments)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_individual(self):
|
|
|
|
name_cnt = 0
|
|
|
|
note = ""
|
|
|
|
while 1:
|
2004-01-05 09:27:01 +05:30
|
|
|
matches = self.get_next()
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) == 0:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "NAME":
|
2003-01-10 11:09:40 +05:30
|
|
|
name = RelLib.Name()
|
2002-10-20 19:55:16 +05:30
|
|
|
m = snameRegexp.match(matches[2])
|
|
|
|
if m:
|
|
|
|
n = m.groups()[0]
|
2004-01-19 23:13:35 +05:30
|
|
|
n2 = m.groups()[1]
|
|
|
|
names = (n2,'',n,'','')
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
|
|
|
try:
|
|
|
|
names = nameRegexp.match(matches[2]).groups()
|
|
|
|
except:
|
|
|
|
names = (matches[2],"","","","")
|
|
|
|
if names[0]:
|
2004-02-14 11:10:30 +05:30
|
|
|
name.set_first_name(names[0].strip())
|
2002-10-20 19:55:16 +05:30
|
|
|
if names[2]:
|
2004-02-14 11:10:30 +05:30
|
|
|
name.set_surname(names[2].strip())
|
2002-10-20 19:55:16 +05:30
|
|
|
if names[4]:
|
2004-02-14 11:10:30 +05:30
|
|
|
name.set_suffix(names[4].strip())
|
2002-10-20 19:55:16 +05:30
|
|
|
if name_cnt == 0:
|
2004-02-14 11:10:30 +05:30
|
|
|
self.person.set_primary_name(name)
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-02-14 11:10:30 +05:30
|
|
|
self.person.add_alternate_name(name)
|
2002-10-20 19:55:16 +05:30
|
|
|
name_cnt = name_cnt + 1
|
|
|
|
self.parse_name(name,2)
|
|
|
|
elif matches[1] in ["ALIA","_ALIA"]:
|
2003-01-10 11:09:40 +05:30
|
|
|
aka = RelLib.Name()
|
2002-10-20 19:55:16 +05:30
|
|
|
try:
|
|
|
|
names = nameRegexp.match(matches[2]).groups()
|
|
|
|
except:
|
|
|
|
names = (matches[2],"","","","")
|
|
|
|
if names[0]:
|
2004-02-14 11:10:30 +05:30
|
|
|
aka.set_first_name(names[0])
|
2002-10-20 19:55:16 +05:30
|
|
|
if names[2]:
|
2004-02-14 11:10:30 +05:30
|
|
|
aka.set_surname(names[2])
|
2002-10-20 19:55:16 +05:30
|
|
|
if names[4]:
|
2004-02-14 11:10:30 +05:30
|
|
|
aka.set_suffix(names[4])
|
|
|
|
self.person.add_alternate_name(aka)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "OBJE":
|
2002-10-20 19:55:16 +05:30
|
|
|
if matches[2] and matches[2][0] == '@':
|
|
|
|
self.barf(2)
|
|
|
|
else:
|
|
|
|
self.parse_person_object(2)
|
|
|
|
elif matches[1] in ["NOTE","_COMM"]:
|
|
|
|
note = self.parse_note(matches,self.person,1,note)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "SEX":
|
2002-10-20 19:55:16 +05:30
|
|
|
if matches[2] == '':
|
2004-02-14 11:10:30 +05:30
|
|
|
self.person.set_gender(RelLib.Person.unknown)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[2][0] == "M":
|
2004-02-14 11:10:30 +05:30
|
|
|
self.person.set_gender(RelLib.Person.male)
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-02-14 11:10:30 +05:30
|
|
|
self.person.set_gender(RelLib.Person.female)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] in [ "BAPL", "ENDL", "SLGC" ]:
|
2004-10-06 09:12:54 +05:30
|
|
|
lds_ord = RelLib.LdsOrd()
|
2002-10-20 19:55:16 +05:30
|
|
|
if matches[1] == "BAPL":
|
2004-10-06 09:12:54 +05:30
|
|
|
self.person.set_lds_baptism(lds_ord)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "ENDL":
|
2004-10-06 09:12:54 +05:30
|
|
|
self.person.set_lds_endowment(lds_ord)
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-10-06 09:12:54 +05:30
|
|
|
self.person.set_lds_sealing(lds_ord)
|
|
|
|
self.parse_ord(lds_ord,2)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "FAMS":
|
2004-10-19 08:49:25 +05:30
|
|
|
handle = self.find_family_handle(matches[2][1:-1])
|
|
|
|
self.person.add_family_handle(handle)
|
2002-10-20 19:55:16 +05:30
|
|
|
if note == "":
|
|
|
|
note = self.parse_optional_note(2)
|
|
|
|
else:
|
|
|
|
note = "%s\n\n%s" % (note,self.parse_optional_note(2))
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "FAMC":
|
2004-10-06 09:12:54 +05:30
|
|
|
ftype,note = self.parse_famc_type(2)
|
2004-10-19 08:49:25 +05:30
|
|
|
handle = self.find_family_handle(matches[2][1:-1])
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2004-07-28 07:59:07 +05:30
|
|
|
for f in self.person.get_parent_family_handle_list():
|
2004-10-19 08:49:25 +05:30
|
|
|
if f[0] == handle:
|
2002-10-20 19:55:16 +05:30
|
|
|
break
|
|
|
|
else:
|
2004-10-06 09:12:54 +05:30
|
|
|
if ftype == "" or ftype == "Birth":
|
2004-07-28 07:59:07 +05:30
|
|
|
if self.person.get_main_parents_family_handle() == None:
|
2004-10-19 08:49:25 +05:30
|
|
|
self.person.set_main_parent_family_handle(handle)
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-10-19 08:49:25 +05:30
|
|
|
self.person.add_parent_family_handle(handle,"Unknown","Unknown")
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-10-19 08:49:25 +05:30
|
|
|
if self.person.get_main_parents_family_handle() == handle:
|
2004-07-28 07:59:07 +05:30
|
|
|
self.person.set_main_parent_family_handle(None)
|
2004-10-19 08:49:25 +05:30
|
|
|
self.person.add_parent_family_handle(handle,ftype,ftype)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "RESI":
|
2003-01-10 11:09:40 +05:30
|
|
|
addr = RelLib.Address()
|
2004-02-14 11:10:30 +05:30
|
|
|
self.person.add_address(addr)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_residence(addr,2)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "ADDR":
|
2003-01-10 11:09:40 +05:30
|
|
|
addr = RelLib.Address()
|
2004-02-14 11:10:30 +05:30
|
|
|
addr.set_street(matches[2] + self.parse_continue_data(1))
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_address(addr,2)
|
2004-02-14 11:10:30 +05:30
|
|
|
self.person.add_address(addr)
|
2003-12-11 01:35:02 +05:30
|
|
|
elif matches[1] == "PHON":
|
|
|
|
addr = RelLib.Address()
|
2004-02-14 11:10:30 +05:30
|
|
|
addr.set_street("Unknown")
|
|
|
|
addr.set_phone(matches[2])
|
|
|
|
self.person.add_address(addr)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "BIRT":
|
2003-01-10 11:09:40 +05:30
|
|
|
event = RelLib.Event()
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.add_event(event, self.trans)
|
2004-07-28 07:59:07 +05:30
|
|
|
if self.person.get_birth_handle():
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_name("Alternate Birth")
|
2004-07-28 07:59:07 +05:30
|
|
|
self.person.add_event_handle(event.get_handle())
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_name("Birth")
|
2004-07-28 07:59:07 +05:30
|
|
|
self.person.set_birth_handle(event.get_handle())
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_person_event(event,2)
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.commit_event(event, self.trans)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "ADOP":
|
2003-01-10 11:09:40 +05:30
|
|
|
event = RelLib.Event()
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_name("Adopted")
|
2004-07-28 07:59:07 +05:30
|
|
|
self.person.add_event_handle(event.get_handle())
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_adopt_event(event,2)
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.add_event(event, self.trans)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "DEAT":
|
2003-01-10 11:09:40 +05:30
|
|
|
event = RelLib.Event()
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.add_event(event, self.trans)
|
2004-07-28 07:59:07 +05:30
|
|
|
if self.person.get_death_handle():
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_name("Alternate Death")
|
2004-07-28 07:59:07 +05:30
|
|
|
self.person.add_event_handle(event.get_handle())
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_name("Death")
|
2004-07-28 07:59:07 +05:30
|
|
|
self.person.set_death_handle(event.get_handle())
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_person_event(event,2)
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.commit_event(event, self.trans)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "EVEN":
|
2003-01-10 11:09:40 +05:30
|
|
|
event = RelLib.Event()
|
2004-02-08 05:30:35 +05:30
|
|
|
if matches[2]:
|
2004-02-21 11:41:59 +05:30
|
|
|
event.set_description(matches[2])
|
2004-01-05 09:27:01 +05:30
|
|
|
self.parse_person_event(event,2)
|
2004-10-23 09:26:48 +05:30
|
|
|
n = event.get_name().strip()
|
2002-10-20 19:55:16 +05:30
|
|
|
if n in self.attrs:
|
2003-01-10 11:09:40 +05:30
|
|
|
attr = RelLib.Attribute()
|
2004-02-14 11:10:30 +05:30
|
|
|
attr.set_type(self.gedattr[n])
|
|
|
|
attr.set_value(event.get_description())
|
|
|
|
self.person.add_attribute(attr)
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.add_event(event, self.trans)
|
2004-07-28 07:59:07 +05:30
|
|
|
self.person.add_event_handle(event.get_handle())
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "SOUR":
|
|
|
|
source_ref = self.handle_source(matches,2)
|
2004-11-15 07:37:21 +05:30
|
|
|
self.person.add_source_reference(source_ref)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "REFN":
|
2002-10-20 19:55:16 +05:30
|
|
|
if intRE.match(matches[2]):
|
|
|
|
try:
|
2004-07-28 07:59:07 +05:30
|
|
|
self.refn[self.person.get_handle()] = int(matches[2])
|
2002-10-20 19:55:16 +05:30
|
|
|
except:
|
|
|
|
pass
|
2004-06-10 05:29:03 +05:30
|
|
|
elif matches[1] in ["AFN","RFN","_UID"]:
|
|
|
|
attr = RelLib.Attribute()
|
2004-06-19 09:20:52 +05:30
|
|
|
attr.set_type(matches[1])
|
|
|
|
attr.set_value(matches[2])
|
|
|
|
self.person.add_attribute(attr)
|
2004-06-10 05:29:03 +05:30
|
|
|
elif matches[1] in ["CHAN","ASSO","ANCI","DESI","RIN"]:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(2)
|
|
|
|
else:
|
2003-01-10 11:09:40 +05:30
|
|
|
event = RelLib.Event()
|
2004-10-23 09:26:48 +05:30
|
|
|
n = matches[1].strip()
|
2002-10-20 19:55:16 +05:30
|
|
|
if ged2gramps.has_key(n):
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_name(ged2gramps[n])
|
2002-10-20 19:55:16 +05:30
|
|
|
elif self.gedattr.has_key(n):
|
2003-01-10 11:09:40 +05:30
|
|
|
attr = RelLib.Attribute()
|
2004-02-14 11:10:30 +05:30
|
|
|
attr.set_type(self.gedattr[n])
|
|
|
|
attr.set_value(event.get_description())
|
|
|
|
self.person.add_attribute(attr)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_person_attr(attr,2)
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
val = self.gedsource.tag2gramps(n)
|
|
|
|
if val:
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_name(val)
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_name(n)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2004-01-05 09:27:01 +05:30
|
|
|
self.parse_person_event(event,2)
|
2002-10-20 19:55:16 +05:30
|
|
|
if matches[2]:
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_description(matches[2])
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.add_event(event, self.trans)
|
2004-07-28 07:59:07 +05:30
|
|
|
self.person.add_event_handle(event.get_handle())
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_optional_note(self,level):
|
|
|
|
note = ""
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return note
|
|
|
|
elif matches[1] == "NOTE":
|
2004-10-23 09:26:48 +05:30
|
|
|
if not matches[2].strip() or matches[2] and matches[2][0] != "@":
|
2002-10-20 19:55:16 +05:30
|
|
|
note = matches[2] + self.parse_continue_data(level+1)
|
|
|
|
self.parse_note_data(level+1)
|
|
|
|
else:
|
|
|
|
self.ignore_sub_junk(level+1)
|
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(level+1)
|
2004-10-06 09:12:54 +05:30
|
|
|
return None
|
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
def parse_famc_type(self,level):
|
2004-10-06 09:12:54 +05:30
|
|
|
ftype = ""
|
2002-10-20 19:55:16 +05:30
|
|
|
note = ""
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
2004-10-23 09:26:48 +05:30
|
|
|
return (ftype.capitalize(),note)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "PEDI":
|
2004-10-06 09:12:54 +05:30
|
|
|
ftype = matches[2]
|
2003-07-21 05:39:12 +05:30
|
|
|
elif matches[1] == "SOUR":
|
2004-01-06 02:09:09 +05:30
|
|
|
source_ref = self.handle_source(matches,level+1)
|
2004-02-14 11:10:30 +05:30
|
|
|
self.person.get_primary_name().add_source_reference(source_ref)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "_PRIMARY":
|
2004-02-29 10:39:23 +05:30
|
|
|
pass #type = matches[1]
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "NOTE":
|
2004-10-23 09:26:48 +05:30
|
|
|
if not matches[2].strip() or matches[2] and matches[2][0] != "@":
|
2002-10-20 19:55:16 +05:30
|
|
|
note = matches[2] + self.parse_continue_data(level+1)
|
|
|
|
self.parse_note_data(level+1)
|
|
|
|
else:
|
|
|
|
self.ignore_sub_junk(level+1)
|
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(level+1)
|
2004-10-06 09:12:54 +05:30
|
|
|
return None
|
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
def parse_person_object(self,level):
|
|
|
|
form = ""
|
2004-10-06 09:12:54 +05:30
|
|
|
filename = ""
|
2004-11-24 09:25:25 +05:30
|
|
|
title = "no title"
|
2002-10-20 19:55:16 +05:30
|
|
|
note = ""
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
if matches[1] == "FORM":
|
|
|
|
form = string.lower(matches[2])
|
|
|
|
elif matches[1] == "TITL":
|
|
|
|
title = matches[2]
|
|
|
|
elif matches[1] == "FILE":
|
2004-10-06 09:12:54 +05:30
|
|
|
filename = matches[2]
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "NOTE":
|
|
|
|
note = matches[2] + self.parse_continue_data(level+1)
|
|
|
|
elif matches[1][0] == "_":
|
|
|
|
self.ignore_sub_junk(level+1)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
break
|
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(level+1)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
if form == "url":
|
2003-01-10 11:09:40 +05:30
|
|
|
url = RelLib.Url()
|
2004-10-06 09:12:54 +05:30
|
|
|
url.set_path(filename)
|
2002-10-20 19:55:16 +05:30
|
|
|
url.set_description(title)
|
2004-02-14 11:10:30 +05:30
|
|
|
self.person.add_url(url)
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-10-06 09:12:54 +05:30
|
|
|
(ok,path) = self.find_file(filename,self.dir_path)
|
2003-03-31 07:03:40 +05:30
|
|
|
if not ok:
|
2004-10-06 09:12:54 +05:30
|
|
|
self.warn(_("Warning: could not import %s") % filename + "\n")
|
2003-03-31 07:03:40 +05:30
|
|
|
self.warn(_("\tThe following paths were tried:\n\t\t"))
|
|
|
|
self.warn(string.join(path,"\n\t\t"))
|
|
|
|
self.warn('\n')
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-11-25 07:36:05 +05:30
|
|
|
photo_handle = self.media_map.get(path)
|
|
|
|
if photo_handle == None:
|
|
|
|
photo = RelLib.MediaObject()
|
|
|
|
photo.set_path(path)
|
|
|
|
photo.set_description(title)
|
|
|
|
photo.set_mime_type(Utils.get_mime_type(path))
|
|
|
|
self.db.add_object(photo, self.trans)
|
|
|
|
self.media_map[path] = photo.get_handle()
|
|
|
|
else:
|
|
|
|
photo = self.db.get_object_from_handle(photo_handle)
|
2004-02-21 11:41:59 +05:30
|
|
|
oref = RelLib.MediaRef()
|
2004-07-28 07:59:07 +05:30
|
|
|
oref.set_reference_handle(photo.get_handle())
|
2004-02-22 10:27:06 +05:30
|
|
|
self.person.add_media_reference(oref)
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.commit_person(self.person, self.trans)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_family_object(self,level):
|
|
|
|
form = ""
|
2004-10-06 09:12:54 +05:30
|
|
|
filename = ""
|
2002-10-20 19:55:16 +05:30
|
|
|
title = ""
|
|
|
|
note = ""
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
if matches[1] == "FORM":
|
|
|
|
form = string.lower(matches[2])
|
|
|
|
elif matches[1] == "TITL":
|
|
|
|
title = matches[2]
|
|
|
|
elif matches[1] == "FILE":
|
2004-10-06 09:12:54 +05:30
|
|
|
filename = matches[2]
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "NOTE":
|
|
|
|
note = matches[2] + self.parse_continue_data(level+1)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
break
|
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(level+1)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
if form:
|
2004-10-06 09:12:54 +05:30
|
|
|
(ok,path) = self.find_file(filename,self.dir_path)
|
2003-03-31 07:03:40 +05:30
|
|
|
if not ok:
|
2004-10-06 09:12:54 +05:30
|
|
|
self.warn(_("Warning: could not import %s") % filename + "\n")
|
2003-03-31 07:03:40 +05:30
|
|
|
self.warn(_("\tThe following paths were tried:\n\t\t"))
|
|
|
|
self.warn(string.join(path,"\n\t\t"))
|
|
|
|
self.warn('\n')
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-02-21 11:41:59 +05:30
|
|
|
photo = RelLib.MediaObject()
|
2004-02-14 11:10:30 +05:30
|
|
|
photo.set_path(path)
|
|
|
|
photo.set_description(title)
|
|
|
|
photo.set_mime_type(Utils.get_mime_type(path))
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.add_object(photo, self.trans)
|
2004-02-21 11:41:59 +05:30
|
|
|
oref = RelLib.MediaRef()
|
2004-07-28 07:59:07 +05:30
|
|
|
oref.set_reference_handle(photo.get_handle())
|
2004-02-22 10:27:06 +05:30
|
|
|
self.family.add_media_reference(photo)
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.commit_family(self.family, self.trans)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_residence(self,address,level):
|
|
|
|
note = ""
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
|
|
|
|
if int(matches[0]) < level:
|
|
|
|
self.backup()
|
|
|
|
return
|
|
|
|
elif matches[1] == "DATE":
|
2004-02-14 11:10:30 +05:30
|
|
|
address.set_date_object(self.extract_date(matches[2]))
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "ADDR":
|
2004-02-14 11:10:30 +05:30
|
|
|
address.set_street(matches[2] + self.parse_continue_data(level+1))
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_address(address,level+1)
|
2003-07-21 05:39:12 +05:30
|
|
|
elif matches[1] in ["AGE","AGNC","CAUS","STAT","TEMP","OBJE","TYPE","_DATE2"]:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(level+1)
|
|
|
|
elif matches[1] == "SOUR":
|
2004-02-14 11:10:30 +05:30
|
|
|
address.add_source_reference(self.handle_source(matches,level+1))
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "PLAC":
|
2004-02-14 11:10:30 +05:30
|
|
|
address.set_street(matches[2])
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_address(address,level+1)
|
2003-12-11 01:35:02 +05:30
|
|
|
elif matches[1] == "PHON":
|
2004-02-14 11:10:30 +05:30
|
|
|
address.set_street("Unknown")
|
|
|
|
address.set_phone(matches[2])
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "NOTE":
|
|
|
|
note = self.parse_note(matches,address,level+1,note)
|
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(level+1)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_address(self,address,level):
|
|
|
|
first = 0
|
|
|
|
note = ""
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) < level:
|
2003-12-10 07:37:05 +05:30
|
|
|
if matches[1] == "PHON":
|
2004-02-14 11:10:30 +05:30
|
|
|
address.set_phone(matches[2])
|
2003-12-10 07:37:05 +05:30
|
|
|
else:
|
|
|
|
self.backup()
|
2002-10-20 19:55:16 +05:30
|
|
|
return
|
|
|
|
elif matches[1] in [ "ADDR", "ADR1", "ADR2" ]:
|
2004-02-14 11:10:30 +05:30
|
|
|
val = address.get_street()
|
2002-10-20 19:55:16 +05:30
|
|
|
data = self.parse_continue_data(level+1)
|
|
|
|
if first == 0:
|
|
|
|
val = "%s %s" % (matches[2],data)
|
|
|
|
first = 1
|
|
|
|
else:
|
|
|
|
val = "%s,%s %s" % (val,matches[2],data)
|
2004-02-14 11:10:30 +05:30
|
|
|
address.set_street(val)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "CITY":
|
2004-02-14 11:10:30 +05:30
|
|
|
address.set_city(matches[2])
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "STAE":
|
2004-02-14 11:10:30 +05:30
|
|
|
address.set_state(matches[2])
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "POST":
|
2004-02-14 11:10:30 +05:30
|
|
|
address.set_postal_code(matches[2])
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "CTRY":
|
2004-08-11 21:52:36 +05:30
|
|
|
address.set_country(matches[2])
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(level+1)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2004-10-06 09:12:54 +05:30
|
|
|
def parse_ord(self,lds_ord,level):
|
2002-10-20 19:55:16 +05:30
|
|
|
note = ""
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
if int(matches[0]) < level:
|
|
|
|
self.backup()
|
|
|
|
break
|
|
|
|
elif matches[1] == "TEMP":
|
2004-05-24 10:02:19 +05:30
|
|
|
value = extract_temple(matches)
|
2004-05-15 19:54:38 +05:30
|
|
|
if value:
|
2004-10-06 09:12:54 +05:30
|
|
|
lds_ord.set_temple(value)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "DATE":
|
2004-10-06 09:12:54 +05:30
|
|
|
lds_ord.set_date_object(self.extract_date(matches[2]))
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "FAMC":
|
2004-10-19 08:49:25 +05:30
|
|
|
lds_ord.set_family_handle(self.find_family_handle(matches[2][1:-1]))
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "PLAC":
|
|
|
|
try:
|
2004-08-11 09:12:38 +05:30
|
|
|
place = self.find_or_create_place(matches[2])
|
|
|
|
place.set_title(matches[2])
|
|
|
|
place_handle = place.get_handle()
|
2004-10-06 09:12:54 +05:30
|
|
|
lds_ord.set_place_handle(place_handle)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(level+1)
|
|
|
|
except NameError:
|
|
|
|
pass
|
|
|
|
elif matches[1] == "SOUR":
|
2004-10-06 09:12:54 +05:30
|
|
|
lds_ord.add_source_reference(self.handle_source(matches,level+1))
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "NOTE":
|
2004-10-06 09:12:54 +05:30
|
|
|
note = self.parse_note(matches,lds_ord,level+1,note)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "STAT":
|
|
|
|
if const.lds_status.has_key(matches[2]):
|
2004-10-06 09:12:54 +05:30
|
|
|
lds_ord.set_status(const.lds_status[matches[2]])
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
|
|
|
self.barf(level+1)
|
|
|
|
|
|
|
|
def parse_person_event(self,event,level):
|
|
|
|
note = ""
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
if int(matches[0]) < level:
|
2003-01-19 11:55:20 +05:30
|
|
|
if note:
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_note(note)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
break
|
|
|
|
elif matches[1] == "TYPE":
|
2004-02-14 11:10:30 +05:30
|
|
|
if event.get_name() == "":
|
2002-10-20 19:55:16 +05:30
|
|
|
if ged2gramps.has_key(matches[2]):
|
|
|
|
name = ged2gramps[matches[2]]
|
|
|
|
else:
|
|
|
|
val = self.gedsource.tag2gramps(matches[2])
|
|
|
|
if val:
|
|
|
|
name = val
|
|
|
|
else:
|
|
|
|
name = matches[2]
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_name(name)
|
2004-09-12 04:54:08 +05:30
|
|
|
else:
|
|
|
|
event.set_description(matches[2])
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "DATE":
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_date_object(self.extract_date(matches[2]))
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "SOUR":
|
2004-02-14 11:10:30 +05:30
|
|
|
event.add_source_reference(self.handle_source(matches,level+1))
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "PLAC":
|
|
|
|
val = matches[2]
|
2005-01-01 09:57:15 +05:30
|
|
|
n = event.get_name().strip()
|
2002-10-20 19:55:16 +05:30
|
|
|
if self.is_ftw and n in ["Occupation","Degree","SSN"]:
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_description(val)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(level+1)
|
|
|
|
else:
|
2004-08-11 09:12:38 +05:30
|
|
|
place = self.find_or_create_place(val)
|
|
|
|
place_handle = place.get_handle()
|
|
|
|
place.set_title(matches[2])
|
2004-07-28 07:59:07 +05:30
|
|
|
event.set_place_handle(place_handle)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(level+1)
|
|
|
|
elif matches[1] == "CAUS":
|
|
|
|
info = matches[2] + self.parse_continue_data(level+1)
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_cause(info)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_cause(event,level+1)
|
|
|
|
elif matches[1] == "NOTE" or matches[1] == 'OFFI':
|
|
|
|
info = matches[2] + self.parse_continue_data(level+1)
|
|
|
|
if note == "":
|
|
|
|
note = info
|
|
|
|
else:
|
|
|
|
note = "\n%s" % info
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "CONC":
|
2004-02-14 11:10:30 +05:30
|
|
|
d = event.get_description()
|
2002-10-20 19:55:16 +05:30
|
|
|
if self.broken_conc:
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_description("%s %s" % (d, matches[2]))
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_description("%s%s" % (d, matches[2]))
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "CONT":
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_description("%s\n%s" % (event.get_description(),matches[2]))
|
2003-07-21 05:39:12 +05:30
|
|
|
elif matches[1] in ["RELI", "TIME","ADDR","AGE","AGNC","STAT","TEMP","OBJE","_DATE2"]:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(level+1)
|
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(level+1)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_adopt_event(self,event,level):
|
|
|
|
note = ""
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
if int(matches[0]) < level:
|
|
|
|
if note != "":
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_note(note)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
break
|
|
|
|
elif matches[1] == "DATE":
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_date_object(self.extract_date(matches[2]))
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] in ["TIME","ADDR","AGE","AGNC","STAT","TEMP","OBJE"]:
|
|
|
|
self.ignore_sub_junk(level+1)
|
|
|
|
elif matches[1] == "SOUR":
|
2004-02-14 11:10:30 +05:30
|
|
|
event.add_source_reference(self.handle_source(matches,level+1))
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "FAMC":
|
2004-10-19 08:49:25 +05:30
|
|
|
handle = self.find_family_handle(matches[2][1:-1])
|
2002-10-20 19:55:16 +05:30
|
|
|
mrel,frel = self.parse_adopt_famc(level+1);
|
2004-10-19 08:49:25 +05:30
|
|
|
if self.person.get_main_parents_family_handle() == handle:
|
2004-07-28 07:59:07 +05:30
|
|
|
self.person.set_main_parent_family_handle(None)
|
2004-10-19 08:49:25 +05:30
|
|
|
self.person.add_parent_family_handle(handle,mrel,frel)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "PLAC":
|
|
|
|
val = matches[2]
|
2004-08-11 09:12:38 +05:30
|
|
|
place = self.find_or_create_place(val)
|
|
|
|
place_handle = place.get_handle()
|
|
|
|
place.set_title(matches[2])
|
2004-07-28 07:59:07 +05:30
|
|
|
event.set_place_handle(place_handle)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(level+1)
|
|
|
|
elif matches[1] == "TYPE":
|
|
|
|
# eventually do something intelligent here
|
|
|
|
pass
|
|
|
|
elif matches[1] == "CAUS":
|
|
|
|
info = matches[2] + self.parse_continue_data(level+1)
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_cause(info)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_cause(event,level+1)
|
|
|
|
elif matches[1] == "NOTE":
|
|
|
|
info = matches[2] + self.parse_continue_data(level+1)
|
|
|
|
if note == "":
|
|
|
|
note = info
|
|
|
|
else:
|
|
|
|
note = "\n%s" % info
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "CONC":
|
2004-02-14 11:10:30 +05:30
|
|
|
d = event.get_description()
|
2002-10-20 19:55:16 +05:30
|
|
|
if self.broken_conc:
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_description("%s %s" % (d,matches[2]))
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_description("%s%s" % (d,matches[2]))
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "CONT":
|
2004-02-14 11:10:30 +05:30
|
|
|
d = event.get_description()
|
|
|
|
event.set_description("%s\n%s" % (d,matches[2]))
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(level+1)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_adopt_famc(self,level):
|
|
|
|
mrel = "Adopted"
|
|
|
|
frel = "Adopted"
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
if int(matches[0]) < level:
|
|
|
|
self.backup()
|
|
|
|
return (mrel,frel)
|
|
|
|
elif matches[1] == "ADOP":
|
|
|
|
if matches[2] == "HUSB":
|
|
|
|
mrel = "Birth"
|
|
|
|
elif matches[2] == "WIFE":
|
|
|
|
frel = "Birth"
|
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(level+1)
|
2004-10-06 09:12:54 +05:30
|
|
|
return None
|
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
def parse_person_attr(self,attr,level):
|
|
|
|
note = ""
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
if int(matches[0]) < level:
|
|
|
|
self.backup()
|
|
|
|
break
|
|
|
|
elif matches[1] == "TYPE":
|
2004-02-14 11:10:30 +05:30
|
|
|
if attr.get_type() == "":
|
2002-10-20 19:55:16 +05:30
|
|
|
if ged2gramps.has_key(matches[2]):
|
|
|
|
name = ged2gramps[matches[2]]
|
|
|
|
else:
|
|
|
|
val = self.gedsource.tag2gramps(matches[2])
|
|
|
|
if val:
|
|
|
|
name = val
|
|
|
|
else:
|
|
|
|
name = matches[2]
|
2004-02-14 11:10:30 +05:30
|
|
|
attr.set_name(name)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] in ["CAUS", "DATE","TIME","ADDR","AGE","AGNC","STAT","TEMP","OBJE"]:
|
|
|
|
self.ignore_sub_junk(level+1)
|
|
|
|
elif matches[1] == "SOUR":
|
2004-02-14 11:10:30 +05:30
|
|
|
attr.add_source_reference(self.handle_source(matches,level+1))
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "PLAC":
|
|
|
|
val = matches[2]
|
2004-02-14 11:10:30 +05:30
|
|
|
if attr.get_value() == "":
|
|
|
|
attr.set_value(val)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(level+1)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "DATE":
|
2002-10-20 19:55:16 +05:30
|
|
|
note = "%s\n\n" % ("Date : %s" % matches[2])
|
|
|
|
elif matches[1] == "NOTE":
|
|
|
|
info = matches[2] + self.parse_continue_data(level+1)
|
|
|
|
if note == "":
|
|
|
|
note = info
|
|
|
|
else:
|
|
|
|
note = "%s\n\n%s" % (note,info)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "CONC":
|
2002-10-20 19:55:16 +05:30
|
|
|
if self.broken_conc:
|
2004-02-14 11:10:30 +05:30
|
|
|
attr.set_value("%s %s" % (attr.get_value(), matches[2]))
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-02-14 11:10:30 +05:30
|
|
|
attr.set_value("%s %s" % (attr.get_value(), matches[2]))
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "CONT":
|
2004-02-14 11:10:30 +05:30
|
|
|
attr.set_value("%s\n%s" % (attr.get_value(),matches[2]))
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(level+1)
|
2002-10-20 19:55:16 +05:30
|
|
|
if note != "":
|
2004-02-14 11:10:30 +05:30
|
|
|
attr.set_note(note)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_family_event(self,event,level):
|
|
|
|
note = ""
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
if int(matches[0]) < level:
|
|
|
|
if note:
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_note(note)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
break
|
|
|
|
elif matches[1] == "TYPE":
|
2004-02-14 11:10:30 +05:30
|
|
|
if event.get_name() == "" or event.get_name() == 'EVEN':
|
2002-10-20 19:55:16 +05:30
|
|
|
try:
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_name(ged2fam[matches[2]])
|
2002-10-20 19:55:16 +05:30
|
|
|
except:
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_name(matches[2])
|
2003-09-18 08:29:52 +05:30
|
|
|
else:
|
|
|
|
note = 'Status = %s\n' % matches[2]
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "DATE":
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_date_object(self.extract_date(matches[2]))
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "CAUS":
|
|
|
|
info = matches[2] + self.parse_continue_data(level+1)
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_cause(info)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_cause(event,level+1)
|
2003-09-18 08:29:52 +05:30
|
|
|
elif matches[1] in ["TIME","AGE","AGNC","ADDR","STAT",
|
|
|
|
"TEMP","HUSB","WIFE","OBJE","_CHUR"]:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(level+1)
|
|
|
|
elif matches[1] == "SOUR":
|
2004-02-14 11:10:30 +05:30
|
|
|
event.add_source_reference(self.handle_source(matches,level+1))
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "PLAC":
|
|
|
|
val = matches[2]
|
2004-08-11 09:12:38 +05:30
|
|
|
place = self.find_or_create_place(val)
|
|
|
|
place_handle = place.get_handle()
|
|
|
|
place.set_title(matches[2])
|
2004-07-28 07:59:07 +05:30
|
|
|
event.set_place_handle(place_handle)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(level+1)
|
|
|
|
elif matches[1] == 'OFFI':
|
|
|
|
if note == "":
|
|
|
|
note = matches[2]
|
|
|
|
else:
|
|
|
|
note = note + "\n" + matches[2]
|
|
|
|
elif matches[1] == "NOTE":
|
|
|
|
note = self.parse_note(matches,event,level+1,note)
|
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(level+1)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_source_reference(self,source,level):
|
|
|
|
"""Reads the data associated with a SOUR reference"""
|
|
|
|
note = ""
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) < level:
|
|
|
|
self.backup()
|
|
|
|
return
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "PAGE":
|
2004-02-14 11:10:30 +05:30
|
|
|
source.set_page(matches[2] + self.parse_continue_data(level+1))
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "DATA":
|
|
|
|
date,text = self.parse_source_data(level+1)
|
2004-09-17 09:00:04 +05:30
|
|
|
d = self.dp.parse(date)
|
2004-02-14 11:10:30 +05:30
|
|
|
source.set_date(d)
|
|
|
|
source.set_text(text)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] in ["OBJE","REFN","TEXT"]:
|
|
|
|
self.ignore_sub_junk(level+1)
|
|
|
|
elif matches[1] == "QUAY":
|
|
|
|
val = int(matches[2])
|
|
|
|
if val > 1:
|
2004-02-14 11:10:30 +05:30
|
|
|
source.set_confidence_level(val+1)
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-02-14 11:10:30 +05:30
|
|
|
source.set_confidence_level(val)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "NOTE":
|
2003-07-21 05:39:12 +05:30
|
|
|
note = self.parse_comment(matches,source,level+1,note)
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(level+1)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_source_data(self,level):
|
|
|
|
"""Parses the source data"""
|
|
|
|
date = ""
|
|
|
|
note = ""
|
|
|
|
while 1:
|
2004-01-05 09:27:01 +05:30
|
|
|
matches = self.get_next()
|
|
|
|
if int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return (date,note)
|
|
|
|
elif matches[1] == "DATE":
|
|
|
|
date = matches[2]
|
|
|
|
elif matches[1] == "TEXT":
|
|
|
|
note = matches[2] + self.parse_continue_data(level+1)
|
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(level+1)
|
2004-10-06 09:12:54 +05:30
|
|
|
return None
|
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
def parse_name(self,name,level):
|
|
|
|
"""Parses the person's name information"""
|
|
|
|
note = ""
|
|
|
|
while 1:
|
2004-01-05 09:27:01 +05:30
|
|
|
matches = self.get_next()
|
|
|
|
if int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] in ["ALIA","_ALIA"]:
|
2003-01-10 11:09:40 +05:30
|
|
|
aka = RelLib.Name()
|
2002-10-20 19:55:16 +05:30
|
|
|
try:
|
|
|
|
names = nameRegexp.match(matches[2]).groups()
|
|
|
|
except:
|
|
|
|
names = (matches[2],"","","","")
|
|
|
|
if names[0]:
|
2004-02-14 11:10:30 +05:30
|
|
|
aka.set_first_name(names[0])
|
2002-10-20 19:55:16 +05:30
|
|
|
if names[2]:
|
2004-02-14 11:10:30 +05:30
|
|
|
aka.set_surname(names[2])
|
2002-10-20 19:55:16 +05:30
|
|
|
if names[4]:
|
2004-02-14 11:10:30 +05:30
|
|
|
aka.set_suffix(names[4])
|
|
|
|
self.person.add_alternate_name(aka)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "NPFX":
|
2004-02-14 11:10:30 +05:30
|
|
|
name.set_title(matches[2])
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "GIVN":
|
2004-02-14 11:10:30 +05:30
|
|
|
name.set_first_name(matches[2])
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "SPFX":
|
2004-02-14 11:10:30 +05:30
|
|
|
name.set_surname_prefix(matches[2])
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "SURN":
|
2004-02-14 11:10:30 +05:30
|
|
|
name.set_surname(matches[2])
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "_MARNM":
|
2003-07-21 05:39:12 +05:30
|
|
|
self.parse_marnm(self.person,matches[2].strip())
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "TITL":
|
2004-02-14 11:10:30 +05:30
|
|
|
name.set_suffix(matches[2])
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "NSFX":
|
2004-02-14 11:10:30 +05:30
|
|
|
if name.get_suffix() == "":
|
|
|
|
name.set_suffix(matches[2])
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "NICK":
|
2004-02-14 11:10:30 +05:30
|
|
|
self.person.set_nick_name(matches[2])
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "_AKA":
|
|
|
|
lname = string.split(matches[2])
|
|
|
|
l = len(lname)
|
|
|
|
if l == 1:
|
2004-02-14 11:10:30 +05:30
|
|
|
self.person.set_nick_name(matches[2])
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2003-01-10 11:09:40 +05:30
|
|
|
name = RelLib.Name()
|
2004-02-14 11:10:30 +05:30
|
|
|
name.set_surname(lname[-1])
|
|
|
|
name.set_first_name(string.join(lname[0:l-1]))
|
|
|
|
self.person.add_alternate_name(name)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "SOUR":
|
2004-02-14 11:10:30 +05:30
|
|
|
name.add_source_reference(self.handle_source(matches,level+1))
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1][0:4] == "NOTE":
|
|
|
|
note = self.parse_note(matches,name,level+1,note)
|
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(level+1)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2003-07-21 05:39:12 +05:30
|
|
|
def parse_marnm(self,person,text):
|
|
|
|
data = text.split()
|
|
|
|
if len(data) == 1:
|
2004-02-14 11:10:30 +05:30
|
|
|
name = RelLib.Name(person.get_primary_name())
|
|
|
|
name.set_surname(data[0])
|
|
|
|
name.set_type('Married Name')
|
|
|
|
person.add_alternate_name(name)
|
2003-07-21 05:39:12 +05:30
|
|
|
elif len(data) > 1:
|
|
|
|
name = RelLib.Name()
|
2004-02-14 11:10:30 +05:30
|
|
|
name.set_surname(data[-1])
|
|
|
|
name.set_first_name(string.join(data[0:-1],' '))
|
|
|
|
name.set_type('Married Name')
|
|
|
|
person.add_alternate_name(name)
|
2003-07-21 05:39:12 +05:30
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
def parse_header_head(self):
|
|
|
|
"""validiates that this is a valid GEDCOM file"""
|
|
|
|
line = string.replace(self.f.readline(),'\r','')
|
2004-01-05 09:27:01 +05:30
|
|
|
match = headRE.search(line)
|
2002-10-20 19:55:16 +05:30
|
|
|
if not match:
|
2004-01-05 09:27:01 +05:30
|
|
|
raise Errors.GedcomError("%s is not a GEDCOM file" % self.filename)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.index = self.index + 1
|
|
|
|
|
|
|
|
def parse_header_source(self):
|
|
|
|
genby = ""
|
|
|
|
while 1:
|
2004-01-05 09:27:01 +05:30
|
|
|
matches = self.get_next()
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) == 0:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "SOUR":
|
2003-01-19 11:55:20 +05:30
|
|
|
if self.window and self.created_obj.get_text():
|
2002-10-20 19:55:16 +05:30
|
|
|
self.update(self.created_obj,matches[2])
|
|
|
|
self.gedsource = self.gedmap.get_from_source_tag(matches[2])
|
|
|
|
self.broken_conc = self.gedsource.get_conc()
|
|
|
|
if matches[2] == "FTW":
|
|
|
|
self.is_ftw = 1
|
|
|
|
genby = matches[2]
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "NAME" and self.window:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.update(self.created_obj,matches[2])
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "VERS" and self.window:
|
2004-11-27 05:01:50 +05:30
|
|
|
self.def_src.set_data_item('Generated by',"%s %s" %
|
|
|
|
(genby,matches[2]))
|
2002-10-20 19:55:16 +05:30
|
|
|
self.update(self.version_obj,matches[2])
|
2003-07-21 05:39:12 +05:30
|
|
|
pass
|
2004-11-27 05:01:50 +05:30
|
|
|
elif matches[1] == "FILE":
|
|
|
|
filename = os.path.basename(matches[2]).split('\\')[-1]
|
2004-11-27 08:24:31 +05:30
|
|
|
self.def_src.set_title(_("Import from %s") % unicode(filename))
|
2004-11-27 05:01:50 +05:30
|
|
|
elif matches[1] == "COPR":
|
|
|
|
self.def_src.set_publication_info(unicode(matches[2]))
|
|
|
|
elif matches[1] in ["CORP","DATA","SUBM","SUBN","LANG"]:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(2)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "DEST":
|
2002-10-20 19:55:16 +05:30
|
|
|
if genby == "GRAMPS":
|
|
|
|
self.gedsource = self.gedmap.get_from_source_tag(matches[2])
|
|
|
|
self.broken_conc = self.gedsource.get_conc()
|
2004-09-25 03:35:46 +05:30
|
|
|
elif matches[1] == "CHAR" and not self.override:
|
2003-01-02 10:01:52 +05:30
|
|
|
if matches[2] == "UNICODE" or matches[2] == "UTF-8" or matches[2] == "UTF8":
|
2002-11-10 00:14:58 +05:30
|
|
|
self.cnv = nocnv
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[2] == "ANSEL":
|
2002-11-10 00:14:58 +05:30
|
|
|
self.cnv = ansel_to_utf8
|
|
|
|
else:
|
2002-11-28 11:22:02 +05:30
|
|
|
self.cnv = latin_utf8.latin_to_utf8
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(2)
|
2003-01-19 11:55:20 +05:30
|
|
|
if self.window:
|
|
|
|
self.update(self.encoding_obj,matches[2])
|
2004-09-25 03:35:46 +05:30
|
|
|
else:
|
|
|
|
self.update(self.encoding_obj,_("Overridden"))
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "GEDC":
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(2)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "_SCHEMA":
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_ftw_schema(2)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "PLAC":
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_place_form(2)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "DATE":
|
2002-10-20 19:55:16 +05:30
|
|
|
date = self.parse_date(2)
|
|
|
|
date.date = matches[2]
|
2004-11-27 05:01:50 +05:30
|
|
|
self.def_src.set_data_item('Creation date',unicode(matches[2]))
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "NOTE":
|
2002-10-20 19:55:16 +05:30
|
|
|
note = matches[2] + self.parse_continue_data(2)
|
|
|
|
elif matches[1][0] == "_":
|
|
|
|
self.ignore_sub_junk(2)
|
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(2)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_ftw_schema(self,level):
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return
|
|
|
|
elif matches[1] == "INDI":
|
|
|
|
self.parse_ftw_indi_schema(level+1)
|
|
|
|
elif matches[1] == "FAM":
|
|
|
|
self.parse_ftw_fam_schema(level+1)
|
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(2)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_ftw_indi_schema(self,level):
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
label = self.parse_label(level+1)
|
|
|
|
ged2gramps[matches[1]] = label
|
|
|
|
|
|
|
|
def parse_label(self,level):
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return
|
|
|
|
elif matches[1] == "LABL":
|
|
|
|
return matches[2]
|
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(2)
|
2004-10-06 09:12:54 +05:30
|
|
|
return None
|
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
def parse_ftw_fam_schema(self,level):
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
label = self.parse_label(level+1)
|
|
|
|
ged2fam[matches[1]] = label
|
2004-10-06 09:12:54 +05:30
|
|
|
return None
|
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
def ignore_sub_junk(self,level):
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return
|
2004-10-06 09:12:54 +05:30
|
|
|
return
|
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
def ignore_change_data(self,level):
|
2004-01-05 09:27:01 +05:30
|
|
|
matches = self.get_next()
|
2002-10-20 19:55:16 +05:30
|
|
|
if matches[1] == "CHAN":
|
|
|
|
self.ignore_sub_junk(level+1)
|
|
|
|
else:
|
|
|
|
self.backup()
|
|
|
|
|
|
|
|
def parse_place_form(self,level):
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return
|
|
|
|
elif matches[1] != "FORM":
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(level+1)
|
2004-10-06 09:12:54 +05:30
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
def parse_continue_data(self,level):
|
2004-01-05 09:27:01 +05:30
|
|
|
data = ""
|
2002-10-20 19:55:16 +05:30
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
|
|
|
|
if int(matches[0]) < level:
|
|
|
|
self.backup()
|
|
|
|
return data
|
|
|
|
elif matches[1] == "CONC":
|
|
|
|
if self.broken_conc:
|
|
|
|
data = "%s %s" % (data,matches[2])
|
|
|
|
else:
|
|
|
|
data = "%s%s" % (data,matches[2])
|
|
|
|
elif matches[1] == "CONT":
|
|
|
|
data = "%s\n%s" % (data,matches[2])
|
|
|
|
else:
|
|
|
|
self.backup()
|
|
|
|
return data
|
2004-10-06 09:12:54 +05:30
|
|
|
return None
|
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
def parse_note_continue(self,level):
|
2004-01-05 09:27:01 +05:30
|
|
|
data = ""
|
2002-10-20 19:55:16 +05:30
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
|
|
|
|
if int(matches[0]) < level:
|
|
|
|
self.backup()
|
|
|
|
return data
|
|
|
|
elif matches[1] == "NOTE":
|
|
|
|
data = "%s\n%s%s" % (data,matches[2],self.parse_continue_data(level+1))
|
|
|
|
elif matches[1] == "CONC":
|
|
|
|
if self.broken_conc:
|
|
|
|
data = "%s %s" % (data,matches[2])
|
|
|
|
else:
|
|
|
|
data = "%s%s" % (data,matches[2])
|
|
|
|
elif matches[1] == "CONT":
|
|
|
|
data = "%s\n%s" % (data,matches[2])
|
|
|
|
else:
|
|
|
|
self.backup()
|
|
|
|
return data
|
2004-10-06 09:12:54 +05:30
|
|
|
return None
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_date(self,level):
|
|
|
|
date = DateStruct()
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return date
|
|
|
|
elif matches[1] == "TIME":
|
|
|
|
date.time = matches[2]
|
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(level+1)
|
2004-10-06 09:12:54 +05:30
|
|
|
return None
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def extract_date(self,text):
|
|
|
|
dateobj = Date.Date()
|
|
|
|
try:
|
2004-09-17 09:00:04 +05:30
|
|
|
match = rangeRegexp.match(text)
|
2002-10-20 19:55:16 +05:30
|
|
|
if match:
|
|
|
|
(cal1,data1,cal2,data2) = match.groups()
|
|
|
|
if cal1 != cal2:
|
|
|
|
pass
|
|
|
|
|
|
|
|
if cal1 == "FRENCH R":
|
2004-09-17 09:00:04 +05:30
|
|
|
cal = Date.CAL_FRENCH
|
2002-10-20 19:55:16 +05:30
|
|
|
elif cal1 == "JULIAN":
|
2004-09-17 09:00:04 +05:30
|
|
|
cal = Date.CAL_JULIAN
|
2002-10-20 19:55:16 +05:30
|
|
|
elif cal1 == "HEBREW":
|
2004-09-17 09:00:04 +05:30
|
|
|
cal = Date.CAL_HEBREW
|
|
|
|
else:
|
|
|
|
cal = Date.CAL_GREGORIAN
|
|
|
|
|
|
|
|
start = self.dp.parse(data1)
|
|
|
|
stop = self.dp.parse(data2)
|
|
|
|
dateobj.set(Date.QUAL_NONE, Date.MOD_RANGE, cal,
|
|
|
|
start.get_start_date() + stop.get_start_date())
|
|
|
|
return dateobj
|
|
|
|
|
|
|
|
match = spanRegexp.match(text)
|
|
|
|
if match:
|
|
|
|
(cal1,data1,cal2,data2) = match.groups()
|
|
|
|
if cal1 != cal2:
|
|
|
|
pass
|
|
|
|
|
|
|
|
if cal1 == "FRENCH R":
|
|
|
|
cal = Date.CAL_FRENCH
|
|
|
|
elif cal1 == "JULIAN":
|
|
|
|
cal = Date.CAL_JULIAN
|
|
|
|
elif cal1 == "HEBREW":
|
|
|
|
cal = Date.CAL_HEBREW
|
|
|
|
else:
|
|
|
|
cal = Date.CAL_GREGORIAN
|
|
|
|
|
|
|
|
start = self.dp.parse(data1)
|
|
|
|
stop = self.dp.parse(data2)
|
|
|
|
dateobj.set(Date.QUAL_NONE, Date.MOD_SPAN, cal,
|
|
|
|
start.get_start_date() + stop.get_start_date())
|
2002-10-20 19:55:16 +05:30
|
|
|
return dateobj
|
|
|
|
|
|
|
|
match = calRegexp.match(text)
|
|
|
|
if match:
|
|
|
|
(abt,cal,data) = match.groups()
|
2004-09-17 09:00:04 +05:30
|
|
|
dateobj = self.dp.parse("%s %s" % (abt, data))
|
2002-10-20 19:55:16 +05:30
|
|
|
if cal == "FRENCH R":
|
2004-09-17 09:00:04 +05:30
|
|
|
dateobj.set_calendar(Date.CAL_FRENCH)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif cal == "JULIAN":
|
2004-09-17 09:00:04 +05:30
|
|
|
dateobj.set_calendar(Date.CAL_JULIAN)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif cal == "HEBREW":
|
2004-09-17 09:00:04 +05:30
|
|
|
dateobj.set_calendar(Date.CAL_HEBREW)
|
|
|
|
return dateobj
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-10-16 10:40:35 +05:30
|
|
|
dval = self.dp.parse(text)
|
|
|
|
return dval
|
2004-09-17 09:00:04 +05:30
|
|
|
except IOError:
|
|
|
|
return self.dp.set_text(text)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def handle_source(self,matches,level):
|
2003-01-10 11:09:40 +05:30
|
|
|
source_ref = RelLib.SourceRef()
|
2002-10-20 19:55:16 +05:30
|
|
|
if matches[2] and matches[2][0] != "@":
|
|
|
|
self.localref = self.localref + 1
|
|
|
|
ref = "gsr%d" % self.localref
|
2004-08-11 09:12:38 +05:30
|
|
|
s = self.find_or_create_source(ref)
|
2004-07-28 07:59:07 +05:30
|
|
|
source_ref.set_base_handle(s.get_handle())
|
2004-02-14 11:10:30 +05:30
|
|
|
s.set_title('Imported Source #%d' % self.localref)
|
|
|
|
s.set_note(matches[2] + self.parse_continue_data(level))
|
2004-01-05 11:33:28 +05:30
|
|
|
self.ignore_sub_junk(level+1)
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-08-21 23:43:18 +05:30
|
|
|
source_ref.set_base_handle(self.find_or_create_source(matches[2][1:-1]).get_handle())
|
2003-07-21 05:39:12 +05:30
|
|
|
self.parse_source_reference(source_ref,level)
|
2002-10-20 19:55:16 +05:30
|
|
|
return source_ref
|
|
|
|
|
|
|
|
def resolve_refns(self):
|
2004-06-27 08:40:06 +05:30
|
|
|
return
|
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
prefix = self.db.iprefix
|
|
|
|
index = 0
|
2004-02-21 11:41:59 +05:30
|
|
|
new_pmax = self.db.pmap_index
|
2004-05-04 10:04:48 +05:30
|
|
|
for pid in self.added.keys():
|
2002-10-20 19:55:16 +05:30
|
|
|
index = index + 1
|
|
|
|
if self.refn.has_key(pid):
|
|
|
|
val = self.refn[pid]
|
|
|
|
new_key = prefix % val
|
|
|
|
new_pmax = max(new_pmax,val)
|
|
|
|
|
2004-08-07 10:46:57 +05:30
|
|
|
person = self.db.get_person_from_handle(pid,self.trans)
|
2004-05-04 10:04:48 +05:30
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
# new ID is not used
|
2004-07-28 07:59:07 +05:30
|
|
|
if not self.db.has_person_handle(new_key):
|
2004-08-13 10:04:07 +05:30
|
|
|
self.db.remove_person(pid,self.trans)
|
2004-07-28 07:59:07 +05:30
|
|
|
person.set_handle(new_key)
|
2004-06-27 08:40:06 +05:30
|
|
|
person.set_gramps_id(new_key)
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.add_person(person,self.trans)
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-08-07 10:46:57 +05:30
|
|
|
tp = self.db.get_person_from_handle(new_key,self.trans)
|
2002-10-20 19:55:16 +05:30
|
|
|
# same person, just change it
|
|
|
|
if person == tp:
|
2004-08-13 10:04:07 +05:30
|
|
|
self.db.remove_person(pid,self.trans)
|
2004-07-28 07:59:07 +05:30
|
|
|
person.set_handle(new_key)
|
2004-06-27 08:40:06 +05:30
|
|
|
person.set_gramps_id(new_key)
|
2004-08-11 09:12:38 +05:30
|
|
|
self.db.add_person(person,self.trans)
|
2002-10-20 19:55:16 +05:30
|
|
|
# give up trying to use the refn as a key
|
|
|
|
else:
|
|
|
|
pass
|
|
|
|
|
2004-02-21 11:41:59 +05:30
|
|
|
self.db.pmap_index = new_pmax
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2004-12-26 23:53:50 +05:30
|
|
|
def invert_year(self,subdate):
|
|
|
|
return (subdate[0],subdate[1],-subdate[2],subdate[3])
|
|
|
|
|
|
|
|
def parse(self,text):
|
|
|
|
"""
|
|
|
|
Parses the text, returning a Date object.
|
|
|
|
"""
|
|
|
|
new_date = Date.Date()
|
|
|
|
self.set_date(new_date,text)
|
|
|
|
return new_date
|
|
|
|
|
2004-05-24 10:02:19 +05:30
|
|
|
def extract_temple(matches):
|
2004-05-15 19:54:38 +05:30
|
|
|
try:
|
|
|
|
if const.lds_temple_to_abrev.has_key(matches[2]):
|
|
|
|
return const.lds_temple_to_abrev[matches[2]]
|
|
|
|
else:
|
2004-05-24 10:02:19 +05:30
|
|
|
values = matches[2].split()
|
2004-05-15 19:54:38 +05:30
|
|
|
return const.lds_temple_to_abrev[values[0]]
|
|
|
|
except:
|
|
|
|
return None
|
|
|
|
|
2004-05-04 10:04:48 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
#
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2002-10-20 19:55:16 +05:30
|
|
|
def readData(database,active_person,cb):
|
|
|
|
global callback
|
2004-05-24 10:02:19 +05:30
|
|
|
global file_topa
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
callback = cb
|
|
|
|
|
2004-05-04 10:04:48 +05:30
|
|
|
choose = gtk.FileChooserDialog("%s - GRAMPS" % _title_string,
|
|
|
|
None,
|
|
|
|
gtk.FILE_CHOOSER_ACTION_OPEN,
|
|
|
|
(gtk.STOCK_CANCEL,
|
|
|
|
gtk.RESPONSE_CANCEL,
|
|
|
|
gtk.STOCK_OPEN,
|
|
|
|
gtk.RESPONSE_OK))
|
2004-10-06 09:12:54 +05:30
|
|
|
mime_filter = gtk.FileFilter()
|
|
|
|
mime_filter.set_name(_('GEDCOM files'))
|
|
|
|
mime_filter.add_pattern('*.ged')
|
|
|
|
mime_filter.add_pattern('*.GED')
|
|
|
|
choose.add_filter(mime_filter)
|
2004-05-04 10:04:48 +05:30
|
|
|
|
2004-10-06 09:12:54 +05:30
|
|
|
mime_filter = gtk.FileFilter()
|
|
|
|
mime_filter.set_name(_('All files'))
|
|
|
|
mime_filter.add_pattern('*')
|
|
|
|
choose.add_filter(mime_filter)
|
2004-05-04 10:04:48 +05:30
|
|
|
|
|
|
|
response = choose.run()
|
|
|
|
if response == gtk.RESPONSE_OK:
|
|
|
|
filename = choose.get_filename()
|
2004-05-05 09:24:02 +05:30
|
|
|
choose.destroy()
|
2004-05-04 10:04:48 +05:30
|
|
|
try:
|
2004-12-25 00:16:34 +05:30
|
|
|
importData(database,filename)
|
2004-05-04 10:04:48 +05:30
|
|
|
except:
|
|
|
|
import DisplayTrace
|
|
|
|
DisplayTrace.DisplayTrace()
|
2004-05-05 09:24:02 +05:30
|
|
|
else:
|
|
|
|
choose.destroy()
|
2004-06-24 08:59:38 +05:30
|
|
|
|
|
|
|
|
|
|
|
_mime_type = 'application/x-gedcom'
|
|
|
|
_filter = gtk.FileFilter()
|
|
|
|
_filter.set_name(_('GEDCOM files'))
|
|
|
|
_filter.add_mime_type(_mime_type)
|
|
|
|
|
2004-10-23 09:26:48 +05:30
|
|
|
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
#
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
def create_id():
|
|
|
|
return Utils.create_id()
|
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
#
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2005-01-05 10:32:19 +05:30
|
|
|
from PluginMgr import register_import
|
* src/Plugins.py: Add native_format flag to import plugin registration.
* src/ReadXML.py, src/ReadGedcom.py: Register as native formats
to prevent loading twice on File->Open.
* src/data/gramps.schemas: Add keys for last import and export dirs.
* src/GrampsCfg.py (get_last_import_dir, save_last_import_dir,
get_last_export_dir, save_last_export_dir): Add functions.
* src/Exportder.py (suggest_filename): Try last export and last
import folders before falling back to Home; (save): Save export folder.
* src/Utils.py (get_new_filename): Add optional folder argument.
* src/DbPrompter.py (ExistingDbPrompter.chooser): Only add
importers for non-native formats, the rest is already taken care of;
Try last file, last import, last export, then home folders;
(ImportDbPrompter.chooser): Save import folder; Try last import,
last file, last export, then home folders.
(NewNativeDbPrompter): Try last file, last import, last export folders,
then fall back to home.
svn: r3493
2004-08-24 03:35:55 +05:30
|
|
|
register_import(importData,_filter,_mime_type,1)
|