2002-10-20 19:55:16 +05:30
|
|
|
#
|
|
|
|
# Gramps - a GTK+/GNOME based genealogy program
|
|
|
|
#
|
2004-01-19 23:13:35 +05:30
|
|
|
# Copyright (C) 2000-2004 Donald N. Allingham
|
2002-10-20 19:55:16 +05:30
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation; either version 2 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program; if not, write to the Free Software
|
|
|
|
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
|
|
#
|
|
|
|
|
2003-10-23 21:03:57 +05:30
|
|
|
# $Id$
|
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
"Import from GEDCOM"
|
|
|
|
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# standard python modules
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import string
|
|
|
|
import const
|
|
|
|
import time
|
|
|
|
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# GTK/GNOME Modules
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
import gtk
|
|
|
|
import gtk.glade
|
|
|
|
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# GRAMPS modules
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2003-01-15 10:55:50 +05:30
|
|
|
import Errors
|
2003-01-10 11:09:40 +05:30
|
|
|
import RelLib
|
2003-01-10 10:51:32 +05:30
|
|
|
import Julian
|
|
|
|
import FrenchRepublic
|
|
|
|
import Hebrew
|
2002-10-20 19:55:16 +05:30
|
|
|
import Date
|
2002-11-10 00:14:58 +05:30
|
|
|
from ansel_utf8 import ansel_to_utf8
|
2002-10-20 19:55:16 +05:30
|
|
|
import latin_utf8
|
|
|
|
import Utils
|
|
|
|
from GedcomInfo import *
|
2002-11-03 12:05:06 +05:30
|
|
|
from QuestionDialog import ErrorDialog
|
2003-08-17 07:44:33 +05:30
|
|
|
from gettext import gettext as _
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2003-01-19 11:55:20 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# constants
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2002-10-20 19:55:16 +05:30
|
|
|
ANSEL = 1
|
|
|
|
UNICODE = 2
|
2003-01-19 11:55:20 +05:30
|
|
|
UPDATE = 25
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
db = None
|
|
|
|
callback = None
|
|
|
|
|
2004-04-11 08:07:13 +05:30
|
|
|
_title_string = _("GEDCOM")
|
2003-07-15 22:47:58 +05:30
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
def nocnv(s):
|
2003-01-02 10:01:52 +05:30
|
|
|
return unicode(s)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
photo_types = [ "jpeg", "bmp", "pict", "pntg", "tpic", "png", "gif",
|
|
|
|
"jpg", "tiff", "pcx" ]
|
|
|
|
|
2003-03-31 07:03:40 +05:30
|
|
|
file_systems = {
|
|
|
|
'VFAT' : _('Windows 9x file system'),
|
|
|
|
'FAT' : _('Windows 9x file system'),
|
|
|
|
"NTFS" : _('Windows NT file system'),
|
|
|
|
"ISO9660" : _('CD ROM'),
|
|
|
|
"SMBFS" : _('Networked Windows file system')
|
|
|
|
}
|
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# GEDCOM events to GRAMPS events conversion
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
ged2gramps = {}
|
|
|
|
for _val in const.personalConstantEvents.keys():
|
|
|
|
_key = const.personalConstantEvents[_val]
|
|
|
|
if _key != "":
|
|
|
|
ged2gramps[_key] = _val
|
|
|
|
|
|
|
|
ged2fam = {}
|
|
|
|
for _val in const.familyConstantEvents.keys():
|
|
|
|
_key = const.familyConstantEvents[_val]
|
|
|
|
if _key != "":
|
|
|
|
ged2fam[_key] = _val
|
|
|
|
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# regular expressions
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
intRE = re.compile(r"\s*(\d+)\s*$")
|
|
|
|
lineRE = re.compile(r"\s*(\d+)\s+(\S+)\s*(.*)$")
|
|
|
|
headRE = re.compile(r"\s*(\d+)\s+HEAD")
|
|
|
|
nameRegexp= re.compile(r"/?([^/]*)(/([^/]*)(/([^/]*))?)?")
|
2004-01-19 23:13:35 +05:30
|
|
|
snameRegexp= re.compile(r"/([^/]*)/([^/]*)")
|
2002-10-20 19:55:16 +05:30
|
|
|
calRegexp = re.compile(r"\s*(ABT|BEF|AFT)?\s*@#D([^@]+)@\s*(.*)$")
|
|
|
|
fromtoRegexp = re.compile(r"\s*(FROM|BET)\s+@#D([^@]+)@\s*(.*)\s+(AND|TO)\s+@#D([^@]+)@\s*(.*)$")
|
|
|
|
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
#
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
def importData(database, filename, cb=None):
|
|
|
|
|
|
|
|
global callback
|
|
|
|
|
|
|
|
# add some checking here
|
|
|
|
|
|
|
|
glade_file = "%s/gedcomimport.glade" % os.path.dirname(__file__)
|
|
|
|
|
2003-08-17 07:44:33 +05:30
|
|
|
statusTop = gtk.glade.XML(glade_file,"status","gramps")
|
2002-10-20 19:55:16 +05:30
|
|
|
statusWindow = statusTop.get_widget("status")
|
2003-03-17 10:51:40 +05:30
|
|
|
|
|
|
|
Utils.set_titles(statusWindow,statusTop.get_widget('title'),
|
|
|
|
_('GEDCOM import status'))
|
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
statusTop.get_widget("close").set_sensitive(0)
|
|
|
|
statusTop.signal_autoconnect({
|
|
|
|
"destroy_passed_object" : Utils.destroy_passed_object
|
|
|
|
})
|
|
|
|
|
|
|
|
try:
|
|
|
|
g = GedcomParser(database,filename,statusTop)
|
|
|
|
except IOError,msg:
|
|
|
|
Utils.destroy_passed_object(statusWindow)
|
2003-05-16 07:19:50 +05:30
|
|
|
ErrorDialog(_("%s could not be opened\n") % filename,str(msg))
|
2002-10-20 19:55:16 +05:30
|
|
|
return
|
2003-01-10 11:09:40 +05:30
|
|
|
except:
|
|
|
|
Utils.destroy_passed_object(statusWindow)
|
|
|
|
ErrorDialog(_("%s could not be opened\n") % filename)
|
|
|
|
return
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2003-01-15 10:55:50 +05:30
|
|
|
try:
|
|
|
|
close = g.parse_gedcom_file()
|
|
|
|
g.resolve_refns()
|
|
|
|
except IOError,msg:
|
|
|
|
Utils.destroy_passed_object(statusWindow)
|
|
|
|
errmsg = _("%s could not be opened\n") % filename
|
2003-05-16 07:19:50 +05:30
|
|
|
ErrorDialog(errmsg,str(msg))
|
2003-01-15 10:55:50 +05:30
|
|
|
return
|
|
|
|
except Errors.GedcomError, val:
|
2003-05-16 07:19:50 +05:30
|
|
|
(m1,m2) = val.messages()
|
2003-01-15 10:55:50 +05:30
|
|
|
Utils.destroy_passed_object(statusWindow)
|
2003-05-16 07:19:50 +05:30
|
|
|
ErrorDialog(m1,m2)
|
2003-01-15 10:55:50 +05:30
|
|
|
return
|
|
|
|
except:
|
|
|
|
import DisplayTrace
|
|
|
|
Utils.destroy_passed_object(statusWindow)
|
|
|
|
DisplayTrace.DisplayTrace()
|
|
|
|
return
|
2002-11-10 00:14:58 +05:30
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
statusTop.get_widget("close").set_sensitive(1)
|
|
|
|
if close:
|
|
|
|
statusWindow.destroy()
|
|
|
|
|
|
|
|
if cb:
|
|
|
|
statusWindow.destroy()
|
|
|
|
cb(1)
|
|
|
|
elif callback:
|
2003-07-03 08:26:34 +05:30
|
|
|
callback()
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
#
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
class DateStruct:
|
|
|
|
def __init__(self):
|
|
|
|
self.date = ""
|
|
|
|
self.time = ""
|
|
|
|
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
#
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
class GedcomParser:
|
|
|
|
|
|
|
|
SyntaxError = "Syntax Error"
|
|
|
|
BadFile = "Not a GEDCOM file"
|
|
|
|
|
2003-01-10 11:09:40 +05:30
|
|
|
def __init__(self, dbase, file, window):
|
|
|
|
self.db = dbase
|
2002-10-20 19:55:16 +05:30
|
|
|
self.person = None
|
|
|
|
self.pmap = {}
|
|
|
|
self.fmap = {}
|
|
|
|
self.smap = {}
|
|
|
|
self.nmap = {}
|
|
|
|
self.share_note = []
|
|
|
|
self.refn = {}
|
|
|
|
self.added = {}
|
|
|
|
self.gedmap = GedcomInfoDB()
|
|
|
|
self.gedsource = None
|
|
|
|
self.dir_path = os.path.dirname(file)
|
|
|
|
self.localref = 0
|
|
|
|
self.placemap = {}
|
|
|
|
self.broken_conc_list = [ 'FamilyOrigins', 'FTW' ]
|
|
|
|
self.broken_conc = 0
|
|
|
|
self.is_ftw = 0
|
|
|
|
|
|
|
|
self.f = open(file,"r")
|
2003-01-15 10:55:50 +05:30
|
|
|
self.filename = file
|
2002-10-20 19:55:16 +05:30
|
|
|
self.index = 0
|
|
|
|
self.backoff = 0
|
|
|
|
self.cnv = nocnv
|
|
|
|
|
2003-03-31 07:03:40 +05:30
|
|
|
self.geddir = os.path.dirname(os.path.normpath(os.path.abspath(file)))
|
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
self.trans = string.maketrans('','')
|
|
|
|
self.delc = self.trans[0:31]
|
2003-10-23 21:03:57 +05:30
|
|
|
self.trans2 = self.trans[0:128] + ('?' * 128)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
self.window = window
|
2003-01-19 11:55:20 +05:30
|
|
|
if window:
|
|
|
|
self.file_obj = window.get_widget("file")
|
|
|
|
self.encoding_obj = window.get_widget("encoding")
|
|
|
|
self.created_obj = window.get_widget("created")
|
|
|
|
self.version_obj = window.get_widget("version")
|
|
|
|
self.families_obj = window.get_widget("families")
|
|
|
|
self.people_obj = window.get_widget("people")
|
|
|
|
self.errors_obj = window.get_widget("errors")
|
|
|
|
self.close_done = window.get_widget('close_done')
|
|
|
|
self.error_text_obj = window.get_widget("error_text")
|
2003-03-31 07:03:40 +05:30
|
|
|
self.info_text_obj = window.get_widget("info_text")
|
2003-01-19 11:55:20 +05:30
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
self.error_count = 0
|
|
|
|
|
|
|
|
map = const.personalConstantAttributes
|
|
|
|
self.attrs = map.values()
|
|
|
|
self.gedattr = {}
|
|
|
|
for val in map.keys():
|
|
|
|
self.gedattr[map[val]] = val
|
2003-01-19 11:55:20 +05:30
|
|
|
|
|
|
|
if self.window:
|
2003-03-31 07:03:40 +05:30
|
|
|
self.update(self.file_obj,os.path.basename(file))
|
2003-01-19 11:55:20 +05:30
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
self.search_paths = []
|
|
|
|
|
|
|
|
try:
|
2003-03-31 07:03:40 +05:30
|
|
|
mypaths = []
|
|
|
|
f = open("/proc/mounts","r")
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2004-05-04 10:04:48 +05:30
|
|
|
for line in f.xreadlines():
|
2002-10-20 19:55:16 +05:30
|
|
|
paths = string.split(line)
|
2003-03-31 07:03:40 +05:30
|
|
|
ftype = paths[2].upper()
|
|
|
|
if ftype in file_systems.keys():
|
|
|
|
mypaths.append((paths[1],file_systems[ftype]))
|
2002-10-20 19:55:16 +05:30
|
|
|
self.search_paths.append(paths[1])
|
|
|
|
f.close()
|
2003-03-31 07:03:40 +05:30
|
|
|
|
|
|
|
if len(mypaths):
|
|
|
|
self.infomsg(_("Windows style path names for images will use the following mount "
|
|
|
|
"points to try to find the images. These paths are based on Windows "
|
|
|
|
"compatible file systems available on this system:\n\n"))
|
|
|
|
for p in mypaths:
|
|
|
|
self.infomsg("\t%s : %s\n" % p)
|
|
|
|
|
|
|
|
self.infomsg('\n')
|
|
|
|
self.infomsg(_("Images that cannot be found in the specfied path in the GEDCOM file "
|
|
|
|
"will be searched for in the same directory in which the GEDCOM file "
|
|
|
|
"exists (%s).\n") % self.geddir)
|
2002-10-20 19:55:16 +05:30
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
2003-02-10 09:41:01 +05:30
|
|
|
def errmsg(self,msg):
|
2003-05-20 01:28:52 +05:30
|
|
|
if self.window:
|
|
|
|
try:
|
|
|
|
self.error_text_obj.get_buffer().insert_at_cursor(msg)
|
|
|
|
except TypeError:
|
|
|
|
self.error_text_obj.get_buffer().insert_at_cursor(msg,len(msg))
|
|
|
|
else:
|
|
|
|
print msg
|
2003-02-10 09:41:01 +05:30
|
|
|
|
2003-03-31 07:03:40 +05:30
|
|
|
def infomsg(self,msg):
|
2003-05-20 01:28:52 +05:30
|
|
|
if self.window:
|
|
|
|
try:
|
|
|
|
self.info_text_obj.get_buffer().insert_at_cursor(msg)
|
|
|
|
except TypeError:
|
|
|
|
self.info_text_obj.get_buffer().insert_at_cursor(msg,len(msg))
|
|
|
|
else:
|
|
|
|
print msg
|
2003-03-31 07:03:40 +05:30
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
def find_file(self,fullname,altpath):
|
2003-03-31 07:03:40 +05:30
|
|
|
tries = []
|
2002-10-20 19:55:16 +05:30
|
|
|
fullname = string.replace(fullname,'\\','/')
|
2003-03-31 07:03:40 +05:30
|
|
|
tries.append(fullname)
|
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
if os.path.isfile(fullname):
|
2003-03-31 07:03:40 +05:30
|
|
|
return (1,fullname)
|
2002-10-20 19:55:16 +05:30
|
|
|
other = os.path.join(altpath,os.path.basename(fullname))
|
2003-03-31 07:03:40 +05:30
|
|
|
tries.append(other)
|
2002-10-20 19:55:16 +05:30
|
|
|
if os.path.isfile(other):
|
2003-03-31 07:03:40 +05:30
|
|
|
return (1,other)
|
2002-10-20 19:55:16 +05:30
|
|
|
if len(fullname) > 3:
|
|
|
|
if fullname[1] == ':':
|
|
|
|
fullname = fullname[2:]
|
|
|
|
for path in self.search_paths:
|
2003-03-31 07:03:40 +05:30
|
|
|
other = os.path.normpath("%s/%s" % (path,fullname))
|
|
|
|
tries.append(other)
|
2002-10-20 19:55:16 +05:30
|
|
|
if os.path.isfile(other):
|
2003-03-31 07:03:40 +05:30
|
|
|
return (1,other)
|
|
|
|
return (0,tries)
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2003-03-31 07:03:40 +05:30
|
|
|
return (0,tries)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def update(self,field,text):
|
|
|
|
field.set_text(text)
|
|
|
|
while gtk.events_pending():
|
2004-05-04 10:04:48 +05:30
|
|
|
gtk.main_iteration()
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def get_next(self):
|
|
|
|
if self.backoff == 0:
|
2003-01-19 11:55:20 +05:30
|
|
|
next_line = self.f.readline()
|
2004-05-20 10:11:55 +05:30
|
|
|
try:
|
|
|
|
self.text = string.translate(next_line.strip(),self.trans,self.delc)
|
|
|
|
except:
|
|
|
|
self.text = next_line.strip()
|
2003-01-24 09:17:05 +05:30
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
try:
|
|
|
|
self.text = self.cnv(self.text)
|
|
|
|
except:
|
|
|
|
self.text = string.translate(self.text,self.trans2)
|
|
|
|
|
2003-01-24 09:17:05 +05:30
|
|
|
self.index += 1
|
2002-10-20 19:55:16 +05:30
|
|
|
l = string.split(self.text, None, 2)
|
|
|
|
ln = len(l)
|
|
|
|
try:
|
|
|
|
if ln == 2:
|
|
|
|
self.groups = (int(l[0]),l[1],"")
|
|
|
|
else:
|
|
|
|
self.groups = (int(l[0]),l[1],l[2])
|
|
|
|
except:
|
2003-12-16 09:34:08 +05:30
|
|
|
if self.text == "":
|
|
|
|
msg = _("Warning: line %d was blank, so it was ignored.\n") % self.index
|
|
|
|
else:
|
|
|
|
msg = _("Warning: line %d was not understood, so it was ignored.") % self.index
|
|
|
|
msg = "%s\n\t%s\n" % (msg,self.text)
|
2003-02-10 09:41:01 +05:30
|
|
|
self.errmsg(msg)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.error_count = self.error_count + 1
|
|
|
|
self.groups = (999, "XXX", "XXX")
|
|
|
|
self.backoff = 0
|
|
|
|
return self.groups
|
|
|
|
|
|
|
|
def barf(self,level):
|
2003-07-21 05:39:12 +05:30
|
|
|
import traceback
|
2004-01-05 09:27:01 +05:30
|
|
|
msg = _("Warning: line %d was not understood, so it was ignored.") % self.index
|
2003-05-21 03:28:24 +05:30
|
|
|
self.errmsg(msg)
|
2002-10-20 19:55:16 +05:30
|
|
|
msg = "\n\t%s\n" % self.text
|
2003-01-19 11:55:20 +05:30
|
|
|
|
2003-05-21 03:28:24 +05:30
|
|
|
self.errmsg(msg)
|
|
|
|
self.error_count = self.error_count + 1
|
2003-07-21 05:39:12 +05:30
|
|
|
# self.errmsg(string.join(traceback.format_stack()))
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(level)
|
|
|
|
|
|
|
|
def warn(self,msg):
|
2003-05-21 03:28:24 +05:30
|
|
|
self.errmsg(msg)
|
|
|
|
self.error_count = self.error_count + 1
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def backup(self):
|
|
|
|
self.backoff = 1
|
|
|
|
|
|
|
|
def parse_gedcom_file(self):
|
2004-05-20 10:11:55 +05:30
|
|
|
|
|
|
|
self.trans = self.db.start_transaction()
|
2002-10-20 19:55:16 +05:30
|
|
|
t = time.time()
|
|
|
|
self.index = 0
|
|
|
|
self.fam_count = 0
|
|
|
|
self.indi_count = 0
|
|
|
|
try:
|
|
|
|
self.parse_header()
|
|
|
|
self.parse_submitter()
|
|
|
|
self.parse_record()
|
|
|
|
self.parse_trailer()
|
2003-01-15 10:55:50 +05:30
|
|
|
except Errors.GedcomError, err:
|
2003-02-10 09:41:01 +05:30
|
|
|
self.errmsg(str(err))
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2003-01-19 11:55:20 +05:30
|
|
|
if self.window:
|
|
|
|
self.update(self.families_obj,str(self.fam_count))
|
|
|
|
self.update(self.people_obj,str(self.indi_count))
|
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
self.break_note_links()
|
|
|
|
t = time.time() - t
|
2002-11-03 12:05:06 +05:30
|
|
|
msg = _('Import Complete: %d seconds') % t
|
2004-05-20 10:11:55 +05:30
|
|
|
|
|
|
|
self.db.add_transaction(self.trans,_("GEDCOM import"))
|
|
|
|
|
2003-01-19 11:55:20 +05:30
|
|
|
if self.window:
|
2003-03-31 07:03:40 +05:30
|
|
|
self.infomsg("\n%s" % msg)
|
2003-01-19 11:55:20 +05:30
|
|
|
else:
|
|
|
|
print msg
|
2003-05-20 01:28:52 +05:30
|
|
|
print "Families: %d" % self.fam_count
|
|
|
|
print "Individuals: %d" % self.indi_count
|
2003-01-19 11:55:20 +05:30
|
|
|
return None
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def break_note_links(self):
|
|
|
|
for o in self.share_note:
|
|
|
|
o.unique_note()
|
|
|
|
|
|
|
|
def parse_trailer(self):
|
2004-01-05 09:27:01 +05:30
|
|
|
matches = self.get_next()
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
if matches[1] != "TRLR":
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(0)
|
|
|
|
self.f.close()
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_header(self):
|
2004-01-05 09:27:01 +05:30
|
|
|
self.parse_header_head()
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_header_source()
|
|
|
|
|
|
|
|
def parse_submitter(self):
|
2004-01-05 09:27:01 +05:30
|
|
|
matches = self.get_next()
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
if matches[2] != "SUBM":
|
|
|
|
self.backup()
|
2004-01-05 09:27:01 +05:30
|
|
|
return
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
|
|
|
self.ignore_sub_junk(1)
|
|
|
|
|
|
|
|
def parse_source(self,name,level):
|
2004-05-20 10:11:55 +05:30
|
|
|
self.source = self.db.find_source(name,self.smap, self.trans)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
note = ""
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
if note:
|
2004-02-14 11:10:30 +05:30
|
|
|
self.source.set_note(note)
|
|
|
|
if not self.source.get_title():
|
|
|
|
self.source.set_title("No title - ID %s" % self.source.get_id())
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.commit_source(self.source, self.trans)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return
|
|
|
|
elif matches[1] == "TITL":
|
|
|
|
title = matches[2] + self.parse_continue_data(level+1)
|
|
|
|
title = string.replace(title,'\n',' ')
|
2004-02-14 11:10:30 +05:30
|
|
|
self.source.set_title(title)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "TAXT" or matches[1] == "PERI": # EasyTree Sierra On-Line
|
2004-02-14 11:10:30 +05:30
|
|
|
if self.source.get_title() == "":
|
2002-10-20 19:55:16 +05:30
|
|
|
title = matches[2] + self.parse_continue_data(level+1)
|
|
|
|
title = string.replace(title,'\n',' ')
|
2004-02-14 11:10:30 +05:30
|
|
|
self.source.set_title(title)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "AUTH":
|
2004-02-14 11:10:30 +05:30
|
|
|
self.source.set_author(matches[2] + self.parse_continue_data(level+1))
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "PUBL":
|
2004-02-14 11:10:30 +05:30
|
|
|
self.source.set_publication_info(matches[2] + self.parse_continue_data(level+1))
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "OBJE":
|
|
|
|
self.ignore_sub_junk(2)
|
|
|
|
elif matches[1] == "NOTE":
|
|
|
|
note = self.parse_note(matches,self.source,level+1,note)
|
|
|
|
elif matches[1] == "TEXT":
|
2004-02-14 11:10:30 +05:30
|
|
|
note = self.source.get_note()
|
2002-10-20 19:55:16 +05:30
|
|
|
d = self.parse_continue_data(level+1)
|
|
|
|
if note:
|
|
|
|
note = "%s\n%s %s%s" % (note,matches[1],matches[2],d)
|
|
|
|
else:
|
|
|
|
note = "%s %s%s" % (matches[1],matches[2],d)
|
2003-07-25 16:15:24 +05:30
|
|
|
elif matches[1] == "ABBR":
|
2004-02-21 11:41:59 +05:30
|
|
|
self.source.set_abbreviation(matches[2] + self.parse_continue_data(level+1))
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-02-14 11:10:30 +05:30
|
|
|
note = self.source.get_note()
|
2002-10-20 19:55:16 +05:30
|
|
|
if note:
|
|
|
|
note = "%s\n%s %s" % (note,matches[1],matches[2])
|
|
|
|
else:
|
|
|
|
note = "%s %s" % (matches[1],matches[2])
|
|
|
|
|
|
|
|
def parse_record(self):
|
|
|
|
while 1:
|
2004-01-05 09:27:01 +05:30
|
|
|
matches = self.get_next()
|
2002-10-20 19:55:16 +05:30
|
|
|
if matches[2] == "FAM":
|
2003-01-19 11:55:20 +05:30
|
|
|
if self.fam_count % UPDATE == 0 and self.window:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.update(self.families_obj,str(self.fam_count))
|
|
|
|
self.fam_count = self.fam_count + 1
|
2004-05-20 10:11:55 +05:30
|
|
|
self.family = self.db.find_family_with_map(matches[1],self.fmap, self.trans)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_family()
|
|
|
|
if self.addr != None:
|
2004-02-14 11:10:30 +05:30
|
|
|
father = self.family.get_father_id()
|
2002-10-20 19:55:16 +05:30
|
|
|
if father:
|
2004-02-14 11:10:30 +05:30
|
|
|
father.add_address(self.addr)
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.commit_person(father, self.trans)
|
2004-02-14 11:10:30 +05:30
|
|
|
mother = self.family.get_mother_id()
|
2002-10-20 19:55:16 +05:30
|
|
|
if mother:
|
2004-02-14 11:10:30 +05:30
|
|
|
mother.add_address(self.addr)
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.commit_person(mother, self.trans)
|
2004-02-14 11:10:30 +05:30
|
|
|
for child in self.family.get_child_id_list():
|
|
|
|
child.add_address(self.addr)
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.commit_person(child, self.trans)
|
|
|
|
self.db.commit_family(self.family, self.trans)
|
2004-05-04 10:04:48 +05:30
|
|
|
del self.family
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[2] == "INDI":
|
2003-01-19 11:55:20 +05:30
|
|
|
if self.indi_count % UPDATE == 0 and self.window:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.update(self.people_obj,str(self.indi_count))
|
|
|
|
self.indi_count = self.indi_count + 1
|
|
|
|
id = matches[1]
|
|
|
|
id = id[1:-1]
|
|
|
|
self.person = self.find_or_create_person(id)
|
2004-05-04 10:04:48 +05:30
|
|
|
self.added[self.person.get_id()] = 1
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_individual()
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.commit_person(self.person, self.trans)
|
2004-05-04 10:04:48 +05:30
|
|
|
del self.person
|
2003-01-19 11:55:20 +05:30
|
|
|
elif matches[2] in ["SUBM","SUBN","REPO"]:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(1)
|
2003-07-21 05:39:12 +05:30
|
|
|
elif matches[1] in ["SUBM","SUBN","OBJE","_EVENT_DEFN"]:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(1)
|
|
|
|
elif matches[2] == "SOUR":
|
|
|
|
self.parse_source(matches[1],1)
|
|
|
|
elif matches[2][0:4] == "NOTE":
|
|
|
|
if self.nmap.has_key(matches[1]):
|
|
|
|
noteobj = self.nmap[matches[1]]
|
|
|
|
else:
|
2003-01-10 11:09:40 +05:30
|
|
|
noteobj = RelLib.Note()
|
2002-10-20 19:55:16 +05:30
|
|
|
self.nmap[matches[1]] = noteobj
|
|
|
|
text = matches[2][4:]
|
2003-12-16 09:34:08 +05:30
|
|
|
# noteobj.append(text + self.parse_continue_data(1))
|
|
|
|
noteobj.append(text + self.parse_note_continue(1))
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_note_data(1)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "TRLR":
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return
|
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(1)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2003-01-19 11:55:20 +05:30
|
|
|
def find_or_create_person(self,id):
|
2002-10-20 19:55:16 +05:30
|
|
|
if self.pmap.has_key(id):
|
2004-05-20 10:11:55 +05:30
|
|
|
person = self.db.find_person_from_id(self.pmap[id],self.trans)
|
2004-02-21 11:41:59 +05:30
|
|
|
elif self.db.has_person_id(id):
|
2003-01-10 11:09:40 +05:30
|
|
|
person = RelLib.Person()
|
2004-05-20 10:11:55 +05:30
|
|
|
self.pmap[id] = self.db.add_person(person,self.trans)
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2003-01-19 11:55:20 +05:30
|
|
|
person = RelLib.Person(id)
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.add_person_as(person,self.trans)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.pmap[id] = id
|
|
|
|
return person
|
|
|
|
|
|
|
|
def parse_cause(self,event,level):
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return
|
|
|
|
elif matches[1] == "SOUR":
|
2004-02-14 11:10:30 +05:30
|
|
|
event.add_source_reference(self.handle_source(matches,level+1))
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(1)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_note_data(self,level):
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return
|
|
|
|
elif matches[1] in ["SOUR","CHAN","REFN"]:
|
|
|
|
self.ignore_sub_junk(level+1)
|
|
|
|
elif matches[1] == "RIN":
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
self.barf(level+1)
|
|
|
|
|
|
|
|
def parse_ftw_relations(self,level):
|
|
|
|
mrel = "Birth"
|
|
|
|
frel = "Birth"
|
|
|
|
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return (mrel,frel)
|
|
|
|
# FTW
|
|
|
|
elif matches[1] == "_FREL":
|
|
|
|
if string.lower(matches[2]) != "natural":
|
|
|
|
frel = string.capitalize(matches[2])
|
|
|
|
# FTW
|
|
|
|
elif matches[1] == "_MREL":
|
|
|
|
if string.lower(matches[2]) != "natural":
|
|
|
|
mrel = matches[2]
|
|
|
|
elif matches[1] == "ADOP":
|
|
|
|
mrel = "Adopted"
|
|
|
|
frel = "Adopted"
|
|
|
|
# Legacy
|
|
|
|
elif matches[1] == "_STAT":
|
|
|
|
mrel = matches[2]
|
|
|
|
frel = matches[2]
|
|
|
|
# Legacy _PREF
|
|
|
|
elif matches[1][0] == "_":
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
self.barf(level+1)
|
|
|
|
|
|
|
|
def parse_family(self):
|
|
|
|
self.addr = None
|
|
|
|
note = ""
|
|
|
|
while 1:
|
2004-01-05 09:27:01 +05:30
|
|
|
matches = self.get_next()
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) == 0:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "HUSB":
|
2002-10-20 19:55:16 +05:30
|
|
|
id = matches[2]
|
|
|
|
person = self.find_or_create_person(id[1:-1])
|
2004-02-21 11:41:59 +05:30
|
|
|
self.family.set_father_id(person.get_id())
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(2)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "WIFE":
|
2002-10-20 19:55:16 +05:30
|
|
|
id = matches[2]
|
|
|
|
person = self.find_or_create_person(id[1:-1])
|
2004-02-21 11:41:59 +05:30
|
|
|
self.family.set_mother_id(person.get_id())
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(2)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "SLGS":
|
2003-01-10 11:09:40 +05:30
|
|
|
ord = RelLib.LdsOrd()
|
2004-02-14 11:10:30 +05:30
|
|
|
self.family.set_lds_sealing(ord)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_ord(ord,2)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "ADDR":
|
2003-01-10 11:09:40 +05:30
|
|
|
self.addr = RelLib.Address()
|
2004-02-14 11:10:30 +05:30
|
|
|
self.addr.set_street(matches[2] + self.parse_continue_data(1))
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_address(self.addr,2)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "CHIL":
|
2002-10-20 19:55:16 +05:30
|
|
|
mrel,frel = self.parse_ftw_relations(2)
|
|
|
|
id = matches[2]
|
|
|
|
child = self.find_or_create_person(id[1:-1])
|
2004-02-21 11:41:59 +05:30
|
|
|
self.family.add_child_id(child.get_id())
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2004-02-14 11:10:30 +05:30
|
|
|
for f in child.get_parent_family_id_list():
|
2004-02-21 11:41:59 +05:30
|
|
|
if f[0] == self.family.get_id():
|
2002-10-20 19:55:16 +05:30
|
|
|
break
|
|
|
|
else:
|
|
|
|
if (mrel=="Birth" or mrel=="") and (frel=="Birth" or frel==""):
|
2004-02-21 11:41:59 +05:30
|
|
|
child.set_main_parent_family_id(self.family.get_id())
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-02-14 11:10:30 +05:30
|
|
|
if child.get_main_parents_family_id() == self.family:
|
|
|
|
child.set_main_parent_family_id(None)
|
2004-02-21 11:41:59 +05:30
|
|
|
child.add_parent_family_id(self.family.get_id(),mrel,frel)
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.commit_person(child, self.trans)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "NCHI":
|
2003-01-10 11:09:40 +05:30
|
|
|
a = RelLib.Attribute()
|
2004-02-14 11:10:30 +05:30
|
|
|
a.set_type("Number of Children")
|
|
|
|
a.set_value(matches[2])
|
|
|
|
self.family.add_attribute(a)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] in ["RIN", "SUBM", "REFN","CHAN","SOUR"]:
|
|
|
|
self.ignore_sub_junk(2)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "OBJE":
|
2002-10-20 19:55:16 +05:30
|
|
|
if matches[2] and matches[2][0] == '@':
|
|
|
|
self.barf(2)
|
|
|
|
else:
|
|
|
|
self.parse_family_object(2)
|
|
|
|
elif matches[1] == "_COMM":
|
|
|
|
note = string.strip(matches[2]) + self.parse_continue_data(1)
|
2004-02-14 11:10:30 +05:30
|
|
|
self.family.set_note(note)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(2)
|
|
|
|
elif matches[1] == "NOTE":
|
|
|
|
note = self.parse_note(matches,self.family,1,note)
|
|
|
|
else:
|
2003-01-10 11:09:40 +05:30
|
|
|
event = RelLib.Event()
|
2002-10-20 19:55:16 +05:30
|
|
|
try:
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_name(ged2fam[matches[1]])
|
2002-10-20 19:55:16 +05:30
|
|
|
except:
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_name(matches[1])
|
|
|
|
if event.get_name() == "Marriage":
|
|
|
|
self.family.set_relationship("Married")
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.add_event(event,self.trans)
|
2004-02-21 11:41:59 +05:30
|
|
|
self.family.add_event_id(event.get_id())
|
2004-01-05 09:27:01 +05:30
|
|
|
self.parse_family_event(event,2)
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.commit_event(event, self.trans)
|
2004-05-04 10:04:48 +05:30
|
|
|
del event
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2003-07-21 05:39:12 +05:30
|
|
|
def parse_note_base(self,matches,obj,level,old_note,task):
|
2002-10-20 19:55:16 +05:30
|
|
|
note = old_note
|
|
|
|
if matches[2] and matches[2][0] == "@":
|
|
|
|
if self.nmap.has_key(matches[2]):
|
|
|
|
self.share_note.append(obj)
|
2004-02-14 11:10:30 +05:30
|
|
|
obj.set_note_object(self.nmap[matches[2]])
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2003-01-10 11:09:40 +05:30
|
|
|
noteobj = RelLib.Note()
|
2002-10-20 19:55:16 +05:30
|
|
|
self.nmap[matches[2]] = noteobj
|
|
|
|
self.share_note.append(obj)
|
2004-02-14 11:10:30 +05:30
|
|
|
obj.set_note_object(noteobj)
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
|
|
|
if old_note:
|
|
|
|
note = "%s\n%s%s" % (old_note,matches[2],self.parse_continue_data(level))
|
|
|
|
else:
|
|
|
|
note = matches[2] + self.parse_continue_data(level)
|
2003-07-21 05:39:12 +05:30
|
|
|
task(note)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(level+1)
|
|
|
|
return note
|
|
|
|
|
2003-07-21 05:39:12 +05:30
|
|
|
def parse_note(self,matches,obj,level,old_note):
|
2004-04-06 08:50:04 +05:30
|
|
|
return self.parse_note_base(matches,obj,level,old_note,obj.set_note)
|
2003-07-21 05:39:12 +05:30
|
|
|
|
|
|
|
def parse_comment(self,matches,obj,level,old_note):
|
2004-04-06 08:50:04 +05:30
|
|
|
return self.parse_note_base(matches,obj,level,old_note,obj.set_comments)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_individual(self):
|
|
|
|
name_cnt = 0
|
|
|
|
note = ""
|
|
|
|
while 1:
|
2004-01-05 09:27:01 +05:30
|
|
|
matches = self.get_next()
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) == 0:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "NAME":
|
2003-01-10 11:09:40 +05:30
|
|
|
name = RelLib.Name()
|
2002-10-20 19:55:16 +05:30
|
|
|
m = snameRegexp.match(matches[2])
|
|
|
|
if m:
|
|
|
|
n = m.groups()[0]
|
2004-01-19 23:13:35 +05:30
|
|
|
n2 = m.groups()[1]
|
|
|
|
names = (n2,'',n,'','')
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
|
|
|
try:
|
|
|
|
names = nameRegexp.match(matches[2]).groups()
|
|
|
|
except:
|
|
|
|
names = (matches[2],"","","","")
|
|
|
|
if names[0]:
|
2004-02-14 11:10:30 +05:30
|
|
|
name.set_first_name(names[0].strip())
|
2002-10-20 19:55:16 +05:30
|
|
|
if names[2]:
|
2004-02-14 11:10:30 +05:30
|
|
|
name.set_surname(names[2].strip())
|
2002-10-20 19:55:16 +05:30
|
|
|
if names[4]:
|
2004-02-14 11:10:30 +05:30
|
|
|
name.set_suffix(names[4].strip())
|
2002-10-20 19:55:16 +05:30
|
|
|
if name_cnt == 0:
|
2004-02-14 11:10:30 +05:30
|
|
|
self.person.set_primary_name(name)
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-02-14 11:10:30 +05:30
|
|
|
self.person.add_alternate_name(name)
|
2002-10-20 19:55:16 +05:30
|
|
|
name_cnt = name_cnt + 1
|
|
|
|
self.parse_name(name,2)
|
|
|
|
elif matches[1] in ["ALIA","_ALIA"]:
|
2003-01-10 11:09:40 +05:30
|
|
|
aka = RelLib.Name()
|
2002-10-20 19:55:16 +05:30
|
|
|
try:
|
|
|
|
names = nameRegexp.match(matches[2]).groups()
|
|
|
|
except:
|
|
|
|
names = (matches[2],"","","","")
|
|
|
|
if names[0]:
|
2004-02-14 11:10:30 +05:30
|
|
|
aka.set_first_name(names[0])
|
2002-10-20 19:55:16 +05:30
|
|
|
if names[2]:
|
2004-02-14 11:10:30 +05:30
|
|
|
aka.set_surname(names[2])
|
2002-10-20 19:55:16 +05:30
|
|
|
if names[4]:
|
2004-02-14 11:10:30 +05:30
|
|
|
aka.set_suffix(names[4])
|
|
|
|
self.person.add_alternate_name(aka)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "OBJE":
|
2002-10-20 19:55:16 +05:30
|
|
|
if matches[2] and matches[2][0] == '@':
|
|
|
|
self.barf(2)
|
|
|
|
else:
|
|
|
|
self.parse_person_object(2)
|
|
|
|
elif matches[1] in ["NOTE","_COMM"]:
|
|
|
|
note = self.parse_note(matches,self.person,1,note)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "SEX":
|
2002-10-20 19:55:16 +05:30
|
|
|
if matches[2] == '':
|
2004-02-14 11:10:30 +05:30
|
|
|
self.person.set_gender(RelLib.Person.unknown)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[2][0] == "M":
|
2004-02-14 11:10:30 +05:30
|
|
|
self.person.set_gender(RelLib.Person.male)
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-02-14 11:10:30 +05:30
|
|
|
self.person.set_gender(RelLib.Person.female)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] in [ "BAPL", "ENDL", "SLGC" ]:
|
2003-01-10 11:09:40 +05:30
|
|
|
ord = RelLib.LdsOrd()
|
2002-10-20 19:55:16 +05:30
|
|
|
if matches[1] == "BAPL":
|
2004-02-14 11:10:30 +05:30
|
|
|
self.person.set_lds_baptism(ord)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "ENDL":
|
2004-02-14 11:10:30 +05:30
|
|
|
self.person.set_lds_endowment(ord)
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-02-14 11:10:30 +05:30
|
|
|
self.person.set_lds_sealing(ord)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_ord(ord,2)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "FAMS":
|
2004-05-20 10:11:55 +05:30
|
|
|
family = self.db.find_family_with_map(matches[2],self.fmap, self.trans)
|
2004-02-21 11:41:59 +05:30
|
|
|
self.person.add_family_id(family.get_id())
|
2002-10-20 19:55:16 +05:30
|
|
|
if note == "":
|
|
|
|
note = self.parse_optional_note(2)
|
|
|
|
else:
|
|
|
|
note = "%s\n\n%s" % (note,self.parse_optional_note(2))
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.commit_family(family, self.trans)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "FAMC":
|
2002-10-20 19:55:16 +05:30
|
|
|
type,note = self.parse_famc_type(2)
|
2004-05-20 10:11:55 +05:30
|
|
|
family = self.db.find_family_with_map(matches[2],self.fmap, self.trans)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2004-02-14 11:10:30 +05:30
|
|
|
for f in self.person.get_parent_family_id_list():
|
2004-02-21 11:41:59 +05:30
|
|
|
if f[0] == family.get_id():
|
2002-10-20 19:55:16 +05:30
|
|
|
break
|
|
|
|
else:
|
|
|
|
if type == "" or type == "Birth":
|
2004-02-14 11:10:30 +05:30
|
|
|
if self.person.get_main_parents_family_id() == None:
|
2004-02-21 11:41:59 +05:30
|
|
|
self.person.set_main_parent_family_id(family.get_id())
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-02-21 11:41:59 +05:30
|
|
|
self.person.add_parent_family_id(family.get_id(),"Unknown","Unknown")
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-02-21 11:41:59 +05:30
|
|
|
if self.person.get_main_parents_family_id() == family.get_id():
|
2004-02-14 11:10:30 +05:30
|
|
|
self.person.set_main_parent_family_id(None)
|
2004-02-21 11:41:59 +05:30
|
|
|
self.person.add_parent_family_id(family.get_id(),type,type)
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.commit_family(family, self.trans)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "RESI":
|
2003-01-10 11:09:40 +05:30
|
|
|
addr = RelLib.Address()
|
2004-02-14 11:10:30 +05:30
|
|
|
self.person.add_address(addr)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_residence(addr,2)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "ADDR":
|
2003-01-10 11:09:40 +05:30
|
|
|
addr = RelLib.Address()
|
2004-02-14 11:10:30 +05:30
|
|
|
addr.set_street(matches[2] + self.parse_continue_data(1))
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_address(addr,2)
|
2004-02-14 11:10:30 +05:30
|
|
|
self.person.add_address(addr)
|
2003-12-11 01:35:02 +05:30
|
|
|
elif matches[1] == "PHON":
|
|
|
|
addr = RelLib.Address()
|
2004-02-14 11:10:30 +05:30
|
|
|
addr.set_street("Unknown")
|
|
|
|
addr.set_phone(matches[2])
|
|
|
|
self.person.add_address(addr)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "BIRT":
|
2003-01-10 11:09:40 +05:30
|
|
|
event = RelLib.Event()
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.add_event(event, self.trans)
|
2004-02-21 11:41:59 +05:30
|
|
|
if self.person.get_birth_id():
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_name("Alternate Birth")
|
2004-02-21 11:41:59 +05:30
|
|
|
self.person.add_event_id(event.get_id())
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_name("Birth")
|
2004-02-21 11:41:59 +05:30
|
|
|
self.person.set_birth_id(event.get_id())
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_person_event(event,2)
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.commit_event(event, self.trans)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "ADOP":
|
2003-01-10 11:09:40 +05:30
|
|
|
event = RelLib.Event()
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_name("Adopted")
|
2004-02-21 11:41:59 +05:30
|
|
|
self.person.add_event_id(event.get_id())
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_adopt_event(event,2)
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.add_event(event, self.trans)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "DEAT":
|
2003-01-10 11:09:40 +05:30
|
|
|
event = RelLib.Event()
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.add_event(event, self.trans)
|
2004-02-21 11:41:59 +05:30
|
|
|
if self.person.get_death_id():
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_name("Alternate Death")
|
2004-02-21 11:41:59 +05:30
|
|
|
self.person.add_event_id(event.get_id())
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_name("Death")
|
2004-02-21 11:41:59 +05:30
|
|
|
self.person.set_death_id(event.get_id())
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_person_event(event,2)
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.commit_event(event, self.trans)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "EVEN":
|
2003-01-10 11:09:40 +05:30
|
|
|
event = RelLib.Event()
|
2004-02-08 05:30:35 +05:30
|
|
|
if matches[2]:
|
2004-02-21 11:41:59 +05:30
|
|
|
event.set_description(matches[2])
|
2004-01-05 09:27:01 +05:30
|
|
|
self.parse_person_event(event,2)
|
2004-02-14 11:10:30 +05:30
|
|
|
n = string.strip(event.get_name())
|
2002-10-20 19:55:16 +05:30
|
|
|
if n in self.attrs:
|
2003-01-10 11:09:40 +05:30
|
|
|
attr = RelLib.Attribute()
|
2004-02-14 11:10:30 +05:30
|
|
|
attr.set_type(self.gedattr[n])
|
|
|
|
attr.set_value(event.get_description())
|
|
|
|
self.person.add_attribute(attr)
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.add_event(event, self.trans)
|
2004-02-21 11:41:59 +05:30
|
|
|
self.person.add_event_id(event.get_id())
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "SOUR":
|
|
|
|
source_ref = self.handle_source(matches,2)
|
2004-02-14 11:10:30 +05:30
|
|
|
self.person.get_primary_name().add_source_reference(source_ref)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "REFN":
|
2002-10-20 19:55:16 +05:30
|
|
|
if intRE.match(matches[2]):
|
|
|
|
try:
|
2004-02-14 11:10:30 +05:30
|
|
|
self.refn[self.person.get_id()] = int(matches[2])
|
2002-10-20 19:55:16 +05:30
|
|
|
except:
|
|
|
|
pass
|
2004-06-10 05:29:03 +05:30
|
|
|
elif matches[1] in ["AFN","RFN","_UID"]:
|
|
|
|
attr = RelLib.Attribute()
|
2004-06-19 09:20:52 +05:30
|
|
|
attr.set_type(matches[1])
|
|
|
|
attr.set_value(matches[2])
|
|
|
|
self.person.add_attribute(attr)
|
2004-06-10 05:29:03 +05:30
|
|
|
elif matches[1] in ["CHAN","ASSO","ANCI","DESI","RIN"]:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(2)
|
|
|
|
else:
|
2003-01-10 11:09:40 +05:30
|
|
|
event = RelLib.Event()
|
2002-10-20 19:55:16 +05:30
|
|
|
n = string.strip(matches[1])
|
|
|
|
if ged2gramps.has_key(n):
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_name(ged2gramps[n])
|
2002-10-20 19:55:16 +05:30
|
|
|
elif self.gedattr.has_key(n):
|
2003-01-10 11:09:40 +05:30
|
|
|
attr = RelLib.Attribute()
|
2004-02-14 11:10:30 +05:30
|
|
|
attr.set_type(self.gedattr[n])
|
|
|
|
attr.set_value(event.get_description())
|
|
|
|
self.person.add_attribute(attr)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_person_attr(attr,2)
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
val = self.gedsource.tag2gramps(n)
|
|
|
|
if val:
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_name(val)
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_name(n)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2004-01-05 09:27:01 +05:30
|
|
|
self.parse_person_event(event,2)
|
2002-10-20 19:55:16 +05:30
|
|
|
if matches[2]:
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_description(matches[2])
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.add_event(event, self.trans)
|
2004-02-21 11:41:59 +05:30
|
|
|
self.person.add_event_id(event.get_id())
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_optional_note(self,level):
|
|
|
|
note = ""
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return note
|
|
|
|
elif matches[1] == "NOTE":
|
|
|
|
if not string.strip(matches[2]) or matches[2] and matches[2][0] != "@":
|
|
|
|
note = matches[2] + self.parse_continue_data(level+1)
|
|
|
|
self.parse_note_data(level+1)
|
|
|
|
else:
|
|
|
|
self.ignore_sub_junk(level+1)
|
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(level+1)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_famc_type(self,level):
|
|
|
|
type = ""
|
|
|
|
note = ""
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return (string.capitalize(type),note)
|
|
|
|
elif matches[1] == "PEDI":
|
|
|
|
type = matches[2]
|
2003-07-21 05:39:12 +05:30
|
|
|
elif matches[1] == "SOUR":
|
2004-01-06 02:09:09 +05:30
|
|
|
source_ref = self.handle_source(matches,level+1)
|
2004-02-14 11:10:30 +05:30
|
|
|
self.person.get_primary_name().add_source_reference(source_ref)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "_PRIMARY":
|
2004-02-29 10:39:23 +05:30
|
|
|
pass #type = matches[1]
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "NOTE":
|
|
|
|
if not string.strip(matches[2]) or matches[2] and matches[2][0] != "@":
|
|
|
|
note = matches[2] + self.parse_continue_data(level+1)
|
|
|
|
self.parse_note_data(level+1)
|
|
|
|
else:
|
|
|
|
self.ignore_sub_junk(level+1)
|
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(level+1)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_person_object(self,level):
|
|
|
|
form = ""
|
|
|
|
file = ""
|
|
|
|
title = ""
|
|
|
|
note = ""
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
if matches[1] == "FORM":
|
|
|
|
form = string.lower(matches[2])
|
|
|
|
elif matches[1] == "TITL":
|
|
|
|
title = matches[2]
|
|
|
|
elif matches[1] == "FILE":
|
|
|
|
file = matches[2]
|
|
|
|
elif matches[1] == "NOTE":
|
|
|
|
note = matches[2] + self.parse_continue_data(level+1)
|
|
|
|
elif matches[1][0] == "_":
|
|
|
|
self.ignore_sub_junk(level+1)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
break
|
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(level+1)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
if form == "url":
|
2003-01-10 11:09:40 +05:30
|
|
|
url = RelLib.Url()
|
2002-10-20 19:55:16 +05:30
|
|
|
url.set_path(file)
|
|
|
|
url.set_description(title)
|
2004-02-14 11:10:30 +05:30
|
|
|
self.person.add_url(url)
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2003-03-31 07:03:40 +05:30
|
|
|
(ok,path) = self.find_file(file,self.dir_path)
|
|
|
|
if not ok:
|
|
|
|
self.warn(_("Warning: could not import %s") % file + "\n")
|
|
|
|
self.warn(_("\tThe following paths were tried:\n\t\t"))
|
|
|
|
self.warn(string.join(path,"\n\t\t"))
|
|
|
|
self.warn('\n')
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-02-21 11:41:59 +05:30
|
|
|
photo = RelLib.MediaObject()
|
2004-02-14 11:10:30 +05:30
|
|
|
photo.set_path(path)
|
|
|
|
photo.set_description(title)
|
|
|
|
photo.set_mime_type(Utils.get_mime_type(path))
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.add_object(photo, self.trans)
|
2004-02-21 11:41:59 +05:30
|
|
|
oref = RelLib.MediaRef()
|
2004-02-25 08:55:57 +05:30
|
|
|
oref.set_reference_id(photo.get_id())
|
2004-02-22 10:27:06 +05:30
|
|
|
self.person.add_media_reference(oref)
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.commit_person(self.person, self.trans)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_family_object(self,level):
|
|
|
|
form = ""
|
|
|
|
file = ""
|
|
|
|
title = ""
|
|
|
|
note = ""
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
if matches[1] == "FORM":
|
|
|
|
form = string.lower(matches[2])
|
|
|
|
elif matches[1] == "TITL":
|
|
|
|
title = matches[2]
|
|
|
|
elif matches[1] == "FILE":
|
|
|
|
file = matches[2]
|
|
|
|
elif matches[1] == "NOTE":
|
|
|
|
note = matches[2] + self.parse_continue_data(level+1)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
break
|
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(level+1)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
if form:
|
2003-03-31 07:03:40 +05:30
|
|
|
(ok,path) = self.find_file(file,self.dir_path)
|
|
|
|
if not ok:
|
|
|
|
self.warn(_("Warning: could not import %s") % file + "\n")
|
|
|
|
self.warn(_("\tThe following paths were tried:\n\t\t"))
|
|
|
|
self.warn(string.join(path,"\n\t\t"))
|
|
|
|
self.warn('\n')
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-02-21 11:41:59 +05:30
|
|
|
photo = RelLib.MediaObject()
|
2004-02-14 11:10:30 +05:30
|
|
|
photo.set_path(path)
|
|
|
|
photo.set_description(title)
|
|
|
|
photo.set_mime_type(Utils.get_mime_type(path))
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.add_object(photo, self.trans)
|
2004-02-21 11:41:59 +05:30
|
|
|
oref = RelLib.MediaRef()
|
2004-02-25 08:55:57 +05:30
|
|
|
oref.set_reference_id(photo.get_id())
|
2004-02-22 10:27:06 +05:30
|
|
|
self.family.add_media_reference(photo)
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.commit_family(self.family, self.trans)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_residence(self,address,level):
|
|
|
|
note = ""
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
|
|
|
|
if int(matches[0]) < level:
|
|
|
|
self.backup()
|
|
|
|
return
|
|
|
|
elif matches[1] == "DATE":
|
2004-02-14 11:10:30 +05:30
|
|
|
address.set_date_object(self.extract_date(matches[2]))
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "ADDR":
|
2004-02-14 11:10:30 +05:30
|
|
|
address.set_street(matches[2] + self.parse_continue_data(level+1))
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_address(address,level+1)
|
2003-07-21 05:39:12 +05:30
|
|
|
elif matches[1] in ["AGE","AGNC","CAUS","STAT","TEMP","OBJE","TYPE","_DATE2"]:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(level+1)
|
|
|
|
elif matches[1] == "SOUR":
|
2004-02-14 11:10:30 +05:30
|
|
|
address.add_source_reference(self.handle_source(matches,level+1))
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "PLAC":
|
2004-02-14 11:10:30 +05:30
|
|
|
address.set_street(matches[2])
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_address(address,level+1)
|
2003-12-11 01:35:02 +05:30
|
|
|
elif matches[1] == "PHON":
|
2004-02-14 11:10:30 +05:30
|
|
|
address.set_street("Unknown")
|
|
|
|
address.set_phone(matches[2])
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "NOTE":
|
|
|
|
note = self.parse_note(matches,address,level+1,note)
|
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(level+1)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_address(self,address,level):
|
|
|
|
first = 0
|
|
|
|
note = ""
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) < level:
|
2003-12-10 07:37:05 +05:30
|
|
|
if matches[1] == "PHON":
|
2004-02-14 11:10:30 +05:30
|
|
|
address.set_phone(matches[2])
|
2003-12-10 07:37:05 +05:30
|
|
|
else:
|
|
|
|
self.backup()
|
2002-10-20 19:55:16 +05:30
|
|
|
return
|
|
|
|
elif matches[1] in [ "ADDR", "ADR1", "ADR2" ]:
|
2004-02-14 11:10:30 +05:30
|
|
|
val = address.get_street()
|
2002-10-20 19:55:16 +05:30
|
|
|
data = self.parse_continue_data(level+1)
|
|
|
|
if first == 0:
|
|
|
|
val = "%s %s" % (matches[2],data)
|
|
|
|
first = 1
|
|
|
|
else:
|
|
|
|
val = "%s,%s %s" % (val,matches[2],data)
|
2004-02-14 11:10:30 +05:30
|
|
|
address.set_street(val)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "CITY":
|
2004-02-14 11:10:30 +05:30
|
|
|
address.set_city(matches[2])
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "STAE":
|
2004-02-14 11:10:30 +05:30
|
|
|
address.set_state(matches[2])
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "POST":
|
2004-02-14 11:10:30 +05:30
|
|
|
address.set_postal_code(matches[2])
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "CTRY":
|
|
|
|
address.setCountry(matches[2])
|
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(level+1)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_ord(self,ord,level):
|
|
|
|
note = ""
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
if int(matches[0]) < level:
|
|
|
|
self.backup()
|
|
|
|
break
|
|
|
|
elif matches[1] == "TEMP":
|
2004-05-24 10:02:19 +05:30
|
|
|
value = extract_temple(matches)
|
2004-05-15 19:54:38 +05:30
|
|
|
if value:
|
2004-06-19 09:20:52 +05:30
|
|
|
ord.set_temple(value)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "DATE":
|
2004-02-14 11:10:30 +05:30
|
|
|
ord.set_date_object(self.extract_date(matches[2]))
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "FAMC":
|
2004-05-20 10:11:55 +05:30
|
|
|
ord.set_family_id(self.db.find_family_with_map(matches[2],self.fmap,self.trans))
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "PLAC":
|
|
|
|
try:
|
|
|
|
val = matches[2]
|
|
|
|
if self.placemap.has_key(val):
|
2004-02-21 11:41:59 +05:30
|
|
|
place_id = self.placemap[val]
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2003-01-10 11:09:40 +05:30
|
|
|
place = RelLib.Place()
|
2002-10-20 19:55:16 +05:30
|
|
|
place.set_title(matches[2])
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.add_place(place, self.trans)
|
2004-02-21 11:41:59 +05:30
|
|
|
place_id = place.get_id()
|
|
|
|
self.placemap[val] = place_id
|
|
|
|
ord.set_place_id(place_id)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(level+1)
|
|
|
|
except NameError:
|
|
|
|
pass
|
|
|
|
elif matches[1] == "SOUR":
|
2004-02-14 11:10:30 +05:30
|
|
|
ord.add_source_reference(self.handle_source(matches,level+1))
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "NOTE":
|
|
|
|
note = self.parse_note(matches,ord,level+1,note)
|
|
|
|
elif matches[1] == "STAT":
|
|
|
|
if const.lds_status.has_key(matches[2]):
|
2004-02-14 11:10:30 +05:30
|
|
|
ord.set_status(const.lds_status[matches[2]])
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
|
|
|
self.barf(level+1)
|
|
|
|
|
|
|
|
def parse_person_event(self,event,level):
|
|
|
|
note = ""
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
if int(matches[0]) < level:
|
2003-01-19 11:55:20 +05:30
|
|
|
if note:
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_note(note)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
break
|
|
|
|
elif matches[1] == "TYPE":
|
2004-02-14 11:10:30 +05:30
|
|
|
if event.get_name() == "":
|
2002-10-20 19:55:16 +05:30
|
|
|
if ged2gramps.has_key(matches[2]):
|
|
|
|
name = ged2gramps[matches[2]]
|
|
|
|
else:
|
|
|
|
val = self.gedsource.tag2gramps(matches[2])
|
|
|
|
if val:
|
|
|
|
name = val
|
|
|
|
else:
|
|
|
|
name = matches[2]
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_name(name)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "DATE":
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_date_object(self.extract_date(matches[2]))
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "SOUR":
|
2004-02-14 11:10:30 +05:30
|
|
|
event.add_source_reference(self.handle_source(matches,level+1))
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "PLAC":
|
|
|
|
val = matches[2]
|
2004-02-14 11:10:30 +05:30
|
|
|
n = string.strip(event.get_name())
|
2002-10-20 19:55:16 +05:30
|
|
|
if self.is_ftw and n in ["Occupation","Degree","SSN"]:
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_description(val)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(level+1)
|
|
|
|
else:
|
|
|
|
if self.placemap.has_key(val):
|
2004-02-21 11:41:59 +05:30
|
|
|
place_id = self.placemap[val]
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2003-01-10 11:09:40 +05:30
|
|
|
place = RelLib.Place()
|
2002-10-20 19:55:16 +05:30
|
|
|
place.set_title(matches[2])
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.add_place(place, self.trans)
|
2004-02-21 11:41:59 +05:30
|
|
|
place_id = place.get_id()
|
|
|
|
self.placemap[val] = place_id
|
|
|
|
event.set_place_id(place_id)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(level+1)
|
|
|
|
elif matches[1] == "CAUS":
|
|
|
|
info = matches[2] + self.parse_continue_data(level+1)
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_cause(info)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_cause(event,level+1)
|
|
|
|
elif matches[1] == "NOTE" or matches[1] == 'OFFI':
|
|
|
|
info = matches[2] + self.parse_continue_data(level+1)
|
|
|
|
if note == "":
|
|
|
|
note = info
|
|
|
|
else:
|
|
|
|
note = "\n%s" % info
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "CONC":
|
2004-02-14 11:10:30 +05:30
|
|
|
d = event.get_description()
|
2002-10-20 19:55:16 +05:30
|
|
|
if self.broken_conc:
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_description("%s %s" % (d, matches[2]))
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_description("%s%s" % (d, matches[2]))
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "CONT":
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_description("%s\n%s" % (event.get_description(),matches[2]))
|
2003-07-21 05:39:12 +05:30
|
|
|
elif matches[1] in ["RELI", "TIME","ADDR","AGE","AGNC","STAT","TEMP","OBJE","_DATE2"]:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(level+1)
|
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(level+1)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_adopt_event(self,event,level):
|
|
|
|
note = ""
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
if int(matches[0]) < level:
|
|
|
|
if note != "":
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_note(note)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
break
|
|
|
|
elif matches[1] == "DATE":
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_date_object(self.extract_date(matches[2]))
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] in ["TIME","ADDR","AGE","AGNC","STAT","TEMP","OBJE"]:
|
|
|
|
self.ignore_sub_junk(level+1)
|
|
|
|
elif matches[1] == "SOUR":
|
2004-02-14 11:10:30 +05:30
|
|
|
event.add_source_reference(self.handle_source(matches,level+1))
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "FAMC":
|
2004-05-20 10:11:55 +05:30
|
|
|
family = self.db.find_family_with_map(matches[2],self.fmap,self.trans)
|
2002-10-20 19:55:16 +05:30
|
|
|
mrel,frel = self.parse_adopt_famc(level+1);
|
2004-02-21 11:41:59 +05:30
|
|
|
if self.person.get_main_parents_family_id() == family.get_id():
|
2004-02-14 11:10:30 +05:30
|
|
|
self.person.set_main_parent_family_id(None)
|
2004-02-21 11:41:59 +05:30
|
|
|
self.person.add_parent_family_id(family.get_id(),mrel,frel)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "PLAC":
|
|
|
|
val = matches[2]
|
|
|
|
if self.placemap.has_key(val):
|
2004-02-21 11:41:59 +05:30
|
|
|
place_id = self.placemap[val]
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2003-01-10 11:09:40 +05:30
|
|
|
place = RelLib.Place()
|
2002-10-20 19:55:16 +05:30
|
|
|
place.set_title(matches[2])
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.add_place(place, self.trans)
|
2004-02-21 11:41:59 +05:30
|
|
|
place_id = place.get_id()
|
|
|
|
self.placemap[val] = place_id
|
|
|
|
event.set_place_id(place_id)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(level+1)
|
|
|
|
elif matches[1] == "TYPE":
|
|
|
|
# eventually do something intelligent here
|
|
|
|
pass
|
|
|
|
elif matches[1] == "CAUS":
|
|
|
|
info = matches[2] + self.parse_continue_data(level+1)
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_cause(info)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_cause(event,level+1)
|
|
|
|
elif matches[1] == "NOTE":
|
|
|
|
info = matches[2] + self.parse_continue_data(level+1)
|
|
|
|
if note == "":
|
|
|
|
note = info
|
|
|
|
else:
|
|
|
|
note = "\n%s" % info
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "CONC":
|
2004-02-14 11:10:30 +05:30
|
|
|
d = event.get_description()
|
2002-10-20 19:55:16 +05:30
|
|
|
if self.broken_conc:
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_description("%s %s" % (d,matches[2]))
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_description("%s%s" % (d,matches[2]))
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "CONT":
|
2004-02-14 11:10:30 +05:30
|
|
|
d = event.get_description()
|
|
|
|
event.set_description("%s\n%s" % (d,matches[2]))
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(level+1)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_adopt_famc(self,level):
|
|
|
|
mrel = "Adopted"
|
|
|
|
frel = "Adopted"
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
if int(matches[0]) < level:
|
|
|
|
self.backup()
|
|
|
|
return (mrel,frel)
|
|
|
|
elif matches[1] == "ADOP":
|
|
|
|
if matches[2] == "HUSB":
|
|
|
|
mrel = "Birth"
|
|
|
|
elif matches[2] == "WIFE":
|
|
|
|
frel = "Birth"
|
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(level+1)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_person_attr(self,attr,level):
|
|
|
|
note = ""
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
if int(matches[0]) < level:
|
|
|
|
self.backup()
|
|
|
|
break
|
|
|
|
elif matches[1] == "TYPE":
|
2004-02-14 11:10:30 +05:30
|
|
|
if attr.get_type() == "":
|
2002-10-20 19:55:16 +05:30
|
|
|
if ged2gramps.has_key(matches[2]):
|
|
|
|
name = ged2gramps[matches[2]]
|
|
|
|
else:
|
|
|
|
val = self.gedsource.tag2gramps(matches[2])
|
|
|
|
if val:
|
|
|
|
name = val
|
|
|
|
else:
|
|
|
|
name = matches[2]
|
2004-02-14 11:10:30 +05:30
|
|
|
attr.set_name(name)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] in ["CAUS", "DATE","TIME","ADDR","AGE","AGNC","STAT","TEMP","OBJE"]:
|
|
|
|
self.ignore_sub_junk(level+1)
|
|
|
|
elif matches[1] == "SOUR":
|
2004-02-14 11:10:30 +05:30
|
|
|
attr.add_source_reference(self.handle_source(matches,level+1))
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "PLAC":
|
|
|
|
val = matches[2]
|
2004-02-14 11:10:30 +05:30
|
|
|
if attr.get_value() == "":
|
|
|
|
attr.set_value(val)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(level+1)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "DATE":
|
2002-10-20 19:55:16 +05:30
|
|
|
note = "%s\n\n" % ("Date : %s" % matches[2])
|
|
|
|
elif matches[1] == "NOTE":
|
|
|
|
info = matches[2] + self.parse_continue_data(level+1)
|
|
|
|
if note == "":
|
|
|
|
note = info
|
|
|
|
else:
|
|
|
|
note = "%s\n\n%s" % (note,info)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "CONC":
|
2002-10-20 19:55:16 +05:30
|
|
|
if self.broken_conc:
|
2004-02-14 11:10:30 +05:30
|
|
|
attr.set_value("%s %s" % (attr.get_value(), matches[2]))
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-02-14 11:10:30 +05:30
|
|
|
attr.set_value("%s %s" % (attr.get_value(), matches[2]))
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "CONT":
|
2004-02-14 11:10:30 +05:30
|
|
|
attr.set_value("%s\n%s" % (attr.get_value(),matches[2]))
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(level+1)
|
2002-10-20 19:55:16 +05:30
|
|
|
if note != "":
|
2004-02-14 11:10:30 +05:30
|
|
|
attr.set_note(note)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_family_event(self,event,level):
|
|
|
|
note = ""
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
if int(matches[0]) < level:
|
|
|
|
if note:
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_note(note)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
break
|
|
|
|
elif matches[1] == "TYPE":
|
2004-02-14 11:10:30 +05:30
|
|
|
if event.get_name() == "" or event.get_name() == 'EVEN':
|
2002-10-20 19:55:16 +05:30
|
|
|
try:
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_name(ged2fam[matches[2]])
|
2002-10-20 19:55:16 +05:30
|
|
|
except:
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_name(matches[2])
|
2003-09-18 08:29:52 +05:30
|
|
|
else:
|
|
|
|
note = 'Status = %s\n' % matches[2]
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "DATE":
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_date_object(self.extract_date(matches[2]))
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "CAUS":
|
|
|
|
info = matches[2] + self.parse_continue_data(level+1)
|
2004-02-14 11:10:30 +05:30
|
|
|
event.set_cause(info)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_cause(event,level+1)
|
2003-09-18 08:29:52 +05:30
|
|
|
elif matches[1] in ["TIME","AGE","AGNC","ADDR","STAT",
|
|
|
|
"TEMP","HUSB","WIFE","OBJE","_CHUR"]:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(level+1)
|
|
|
|
elif matches[1] == "SOUR":
|
2004-02-14 11:10:30 +05:30
|
|
|
event.add_source_reference(self.handle_source(matches,level+1))
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "PLAC":
|
|
|
|
val = matches[2]
|
|
|
|
if self.placemap.has_key(val):
|
2004-02-21 11:41:59 +05:30
|
|
|
place_id = self.placemap[val]
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2003-01-10 11:09:40 +05:30
|
|
|
place = RelLib.Place()
|
2002-10-20 19:55:16 +05:30
|
|
|
place.set_title(matches[2])
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.add_place(place, self.trans)
|
2004-02-21 11:41:59 +05:30
|
|
|
place_id = place.get_id()
|
|
|
|
self.placemap[val] = place_id
|
|
|
|
event.set_place_id(place_id)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(level+1)
|
|
|
|
elif matches[1] == 'OFFI':
|
|
|
|
if note == "":
|
|
|
|
note = matches[2]
|
|
|
|
else:
|
|
|
|
note = note + "\n" + matches[2]
|
|
|
|
elif matches[1] == "NOTE":
|
|
|
|
note = self.parse_note(matches,event,level+1,note)
|
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(level+1)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_source_reference(self,source,level):
|
|
|
|
"""Reads the data associated with a SOUR reference"""
|
|
|
|
note = ""
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) < level:
|
|
|
|
self.backup()
|
|
|
|
return
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "PAGE":
|
2004-02-14 11:10:30 +05:30
|
|
|
source.set_page(matches[2] + self.parse_continue_data(level+1))
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "DATA":
|
|
|
|
date,text = self.parse_source_data(level+1)
|
|
|
|
d = Date.Date()
|
|
|
|
d.set(date)
|
2004-02-14 11:10:30 +05:30
|
|
|
source.set_date(d)
|
|
|
|
source.set_text(text)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] in ["OBJE","REFN","TEXT"]:
|
|
|
|
self.ignore_sub_junk(level+1)
|
|
|
|
elif matches[1] == "QUAY":
|
|
|
|
val = int(matches[2])
|
|
|
|
if val > 1:
|
2004-02-14 11:10:30 +05:30
|
|
|
source.set_confidence_level(val+1)
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-02-14 11:10:30 +05:30
|
|
|
source.set_confidence_level(val)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "NOTE":
|
2003-07-21 05:39:12 +05:30
|
|
|
note = self.parse_comment(matches,source,level+1,note)
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(level+1)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_source_data(self,level):
|
|
|
|
"""Parses the source data"""
|
|
|
|
date = ""
|
|
|
|
note = ""
|
|
|
|
while 1:
|
2004-01-05 09:27:01 +05:30
|
|
|
matches = self.get_next()
|
|
|
|
if int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return (date,note)
|
|
|
|
elif matches[1] == "DATE":
|
|
|
|
date = matches[2]
|
|
|
|
elif matches[1] == "TEXT":
|
|
|
|
note = matches[2] + self.parse_continue_data(level+1)
|
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(level+1)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_name(self,name,level):
|
|
|
|
"""Parses the person's name information"""
|
|
|
|
note = ""
|
|
|
|
while 1:
|
2004-01-05 09:27:01 +05:30
|
|
|
matches = self.get_next()
|
|
|
|
if int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] in ["ALIA","_ALIA"]:
|
2003-01-10 11:09:40 +05:30
|
|
|
aka = RelLib.Name()
|
2002-10-20 19:55:16 +05:30
|
|
|
try:
|
|
|
|
names = nameRegexp.match(matches[2]).groups()
|
|
|
|
except:
|
|
|
|
names = (matches[2],"","","","")
|
|
|
|
if names[0]:
|
2004-02-14 11:10:30 +05:30
|
|
|
aka.set_first_name(names[0])
|
2002-10-20 19:55:16 +05:30
|
|
|
if names[2]:
|
2004-02-14 11:10:30 +05:30
|
|
|
aka.set_surname(names[2])
|
2002-10-20 19:55:16 +05:30
|
|
|
if names[4]:
|
2004-02-14 11:10:30 +05:30
|
|
|
aka.set_suffix(names[4])
|
|
|
|
self.person.add_alternate_name(aka)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "NPFX":
|
2004-02-14 11:10:30 +05:30
|
|
|
name.set_title(matches[2])
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "GIVN":
|
2004-02-14 11:10:30 +05:30
|
|
|
name.set_first_name(matches[2])
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "SPFX":
|
2004-02-14 11:10:30 +05:30
|
|
|
name.set_surname_prefix(matches[2])
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "SURN":
|
2004-02-14 11:10:30 +05:30
|
|
|
name.set_surname(matches[2])
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "_MARNM":
|
2003-07-21 05:39:12 +05:30
|
|
|
self.parse_marnm(self.person,matches[2].strip())
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "TITL":
|
2004-02-14 11:10:30 +05:30
|
|
|
name.set_suffix(matches[2])
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "NSFX":
|
2004-02-14 11:10:30 +05:30
|
|
|
if name.get_suffix() == "":
|
|
|
|
name.set_suffix(matches[2])
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "NICK":
|
2004-02-14 11:10:30 +05:30
|
|
|
self.person.set_nick_name(matches[2])
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "_AKA":
|
|
|
|
lname = string.split(matches[2])
|
|
|
|
l = len(lname)
|
|
|
|
if l == 1:
|
2004-02-14 11:10:30 +05:30
|
|
|
self.person.set_nick_name(matches[2])
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2003-01-10 11:09:40 +05:30
|
|
|
name = RelLib.Name()
|
2004-02-14 11:10:30 +05:30
|
|
|
name.set_surname(lname[-1])
|
|
|
|
name.set_first_name(string.join(lname[0:l-1]))
|
|
|
|
self.person.add_alternate_name(name)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1] == "SOUR":
|
2004-02-14 11:10:30 +05:30
|
|
|
name.add_source_reference(self.handle_source(matches,level+1))
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[1][0:4] == "NOTE":
|
|
|
|
note = self.parse_note(matches,name,level+1,note)
|
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(level+1)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2003-07-21 05:39:12 +05:30
|
|
|
def parse_marnm(self,person,text):
|
|
|
|
data = text.split()
|
|
|
|
if len(data) == 1:
|
2004-02-14 11:10:30 +05:30
|
|
|
name = RelLib.Name(person.get_primary_name())
|
|
|
|
name.set_surname(data[0])
|
|
|
|
name.set_type('Married Name')
|
|
|
|
person.add_alternate_name(name)
|
2003-07-21 05:39:12 +05:30
|
|
|
elif len(data) > 1:
|
|
|
|
name = RelLib.Name()
|
2004-02-14 11:10:30 +05:30
|
|
|
name.set_surname(data[-1])
|
|
|
|
name.set_first_name(string.join(data[0:-1],' '))
|
|
|
|
name.set_type('Married Name')
|
|
|
|
person.add_alternate_name(name)
|
2003-07-21 05:39:12 +05:30
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
def parse_header_head(self):
|
|
|
|
"""validiates that this is a valid GEDCOM file"""
|
|
|
|
line = string.replace(self.f.readline(),'\r','')
|
2004-01-05 09:27:01 +05:30
|
|
|
match = headRE.search(line)
|
2002-10-20 19:55:16 +05:30
|
|
|
if not match:
|
2004-01-05 09:27:01 +05:30
|
|
|
raise Errors.GedcomError("%s is not a GEDCOM file" % self.filename)
|
2002-10-20 19:55:16 +05:30
|
|
|
self.index = self.index + 1
|
|
|
|
|
|
|
|
def parse_header_source(self):
|
|
|
|
genby = ""
|
|
|
|
while 1:
|
2004-01-05 09:27:01 +05:30
|
|
|
matches = self.get_next()
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) == 0:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "SOUR":
|
2003-01-19 11:55:20 +05:30
|
|
|
if self.window and self.created_obj.get_text():
|
2002-10-20 19:55:16 +05:30
|
|
|
self.update(self.created_obj,matches[2])
|
|
|
|
self.gedsource = self.gedmap.get_from_source_tag(matches[2])
|
|
|
|
self.broken_conc = self.gedsource.get_conc()
|
|
|
|
if matches[2] == "FTW":
|
|
|
|
self.is_ftw = 1
|
|
|
|
genby = matches[2]
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "NAME" and self.window:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.update(self.created_obj,matches[2])
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "VERS" and self.window:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.update(self.version_obj,matches[2])
|
2003-07-21 05:39:12 +05:30
|
|
|
pass
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] in ["CORP","DATA","SUBM","SUBN","COPR","FILE","LANG"]:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(2)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "DEST":
|
2002-10-20 19:55:16 +05:30
|
|
|
if genby == "GRAMPS":
|
|
|
|
self.gedsource = self.gedmap.get_from_source_tag(matches[2])
|
|
|
|
self.broken_conc = self.gedsource.get_conc()
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "CHAR":
|
2003-01-02 10:01:52 +05:30
|
|
|
if matches[2] == "UNICODE" or matches[2] == "UTF-8" or matches[2] == "UTF8":
|
2002-11-10 00:14:58 +05:30
|
|
|
self.cnv = nocnv
|
2002-10-20 19:55:16 +05:30
|
|
|
elif matches[2] == "ANSEL":
|
2002-11-10 00:14:58 +05:30
|
|
|
self.cnv = ansel_to_utf8
|
|
|
|
else:
|
2002-11-28 11:22:02 +05:30
|
|
|
self.cnv = latin_utf8.latin_to_utf8
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(2)
|
2003-01-19 11:55:20 +05:30
|
|
|
if self.window:
|
|
|
|
self.update(self.encoding_obj,matches[2])
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "GEDC":
|
2002-10-20 19:55:16 +05:30
|
|
|
self.ignore_sub_junk(2)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "_SCHEMA":
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_ftw_schema(2)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "PLAC":
|
2002-10-20 19:55:16 +05:30
|
|
|
self.parse_place_form(2)
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "DATE":
|
2002-10-20 19:55:16 +05:30
|
|
|
date = self.parse_date(2)
|
|
|
|
date.date = matches[2]
|
2004-01-05 09:27:01 +05:30
|
|
|
elif matches[1] == "NOTE":
|
2002-10-20 19:55:16 +05:30
|
|
|
note = matches[2] + self.parse_continue_data(2)
|
|
|
|
elif matches[1][0] == "_":
|
|
|
|
self.ignore_sub_junk(2)
|
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(2)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_ftw_schema(self,level):
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return
|
|
|
|
elif matches[1] == "INDI":
|
|
|
|
self.parse_ftw_indi_schema(level+1)
|
|
|
|
elif matches[1] == "FAM":
|
|
|
|
self.parse_ftw_fam_schema(level+1)
|
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(2)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_ftw_indi_schema(self,level):
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
label = self.parse_label(level+1)
|
|
|
|
ged2gramps[matches[1]] = label
|
|
|
|
|
|
|
|
def parse_label(self,level):
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return
|
|
|
|
elif matches[1] == "LABL":
|
|
|
|
return matches[2]
|
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(2)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_ftw_fam_schema(self,level):
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
label = self.parse_label(level+1)
|
|
|
|
ged2fam[matches[1]] = label
|
|
|
|
|
|
|
|
def ignore_sub_junk(self,level):
|
|
|
|
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return
|
|
|
|
|
|
|
|
def ignore_change_data(self,level):
|
2004-01-05 09:27:01 +05:30
|
|
|
matches = self.get_next()
|
2002-10-20 19:55:16 +05:30
|
|
|
if matches[1] == "CHAN":
|
|
|
|
self.ignore_sub_junk(level+1)
|
|
|
|
else:
|
|
|
|
self.backup()
|
|
|
|
|
|
|
|
def parse_place_form(self,level):
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return
|
|
|
|
elif matches[1] != "FORM":
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(level+1)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def parse_continue_data(self,level):
|
2004-01-05 09:27:01 +05:30
|
|
|
data = ""
|
2002-10-20 19:55:16 +05:30
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
|
|
|
|
if int(matches[0]) < level:
|
|
|
|
self.backup()
|
|
|
|
return data
|
|
|
|
elif matches[1] == "CONC":
|
|
|
|
if self.broken_conc:
|
|
|
|
data = "%s %s" % (data,matches[2])
|
|
|
|
else:
|
|
|
|
data = "%s%s" % (data,matches[2])
|
|
|
|
elif matches[1] == "CONT":
|
|
|
|
data = "%s\n%s" % (data,matches[2])
|
|
|
|
else:
|
|
|
|
self.backup()
|
|
|
|
return data
|
|
|
|
|
|
|
|
def parse_note_continue(self,level):
|
2004-01-05 09:27:01 +05:30
|
|
|
data = ""
|
2002-10-20 19:55:16 +05:30
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
|
|
|
|
if int(matches[0]) < level:
|
|
|
|
self.backup()
|
|
|
|
return data
|
|
|
|
elif matches[1] == "NOTE":
|
|
|
|
data = "%s\n%s%s" % (data,matches[2],self.parse_continue_data(level+1))
|
|
|
|
elif matches[1] == "CONC":
|
|
|
|
if self.broken_conc:
|
|
|
|
data = "%s %s" % (data,matches[2])
|
|
|
|
else:
|
|
|
|
data = "%s%s" % (data,matches[2])
|
|
|
|
elif matches[1] == "CONT":
|
|
|
|
data = "%s\n%s" % (data,matches[2])
|
|
|
|
else:
|
|
|
|
self.backup()
|
|
|
|
return data
|
|
|
|
|
|
|
|
def parse_date(self,level):
|
|
|
|
date = DateStruct()
|
|
|
|
while 1:
|
|
|
|
matches = self.get_next()
|
|
|
|
|
2004-01-05 09:27:01 +05:30
|
|
|
if int(matches[0]) < level:
|
2002-10-20 19:55:16 +05:30
|
|
|
self.backup()
|
|
|
|
return date
|
|
|
|
elif matches[1] == "TIME":
|
|
|
|
date.time = matches[2]
|
|
|
|
else:
|
2004-01-05 09:27:01 +05:30
|
|
|
self.barf(level+1)
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
def extract_date(self,text):
|
|
|
|
dateobj = Date.Date()
|
|
|
|
try:
|
|
|
|
match = fromtoRegexp.match(text)
|
|
|
|
if match:
|
|
|
|
(cal1,data1,cal2,data2) = match.groups()
|
|
|
|
if cal1 != cal2:
|
|
|
|
pass
|
|
|
|
|
|
|
|
if cal1 == "FRENCH R":
|
2003-01-10 10:51:32 +05:30
|
|
|
dateobj.set_calendar(FrenchRepublic.FrenchRepublic)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif cal1 == "JULIAN":
|
2003-01-10 10:51:32 +05:30
|
|
|
dateobj.set_calendar(Julian.Julian)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif cal1 == "HEBREW":
|
2003-01-10 10:51:32 +05:30
|
|
|
dateobj.set_calendar(Hebrew.Hebrew)
|
2002-10-20 19:55:16 +05:30
|
|
|
dateobj.get_start_date().set(data1)
|
|
|
|
dateobj.get_stop_date().set(data2)
|
|
|
|
dateobj.set_range(1)
|
|
|
|
return dateobj
|
|
|
|
|
|
|
|
match = calRegexp.match(text)
|
|
|
|
if match:
|
|
|
|
(abt,cal,data) = match.groups()
|
|
|
|
if cal == "FRENCH R":
|
2003-01-10 10:51:32 +05:30
|
|
|
dateobj.set_calendar(FrenchRepublic.FrenchRepublic)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif cal == "JULIAN":
|
2003-01-10 10:51:32 +05:30
|
|
|
dateobj.set_calendar(Julian.Julian)
|
2002-10-20 19:55:16 +05:30
|
|
|
elif cal == "HEBREW":
|
2003-01-10 10:51:32 +05:30
|
|
|
dateobj.set_calendar(Hebrew.Hebrew)
|
2002-10-20 19:55:16 +05:30
|
|
|
dateobj.set(data)
|
|
|
|
if abt:
|
|
|
|
dateobj.get_start_date().setMode(abt)
|
|
|
|
else:
|
|
|
|
dateobj.set(text)
|
|
|
|
except:
|
|
|
|
dateobj.set_text(text)
|
|
|
|
return dateobj
|
|
|
|
|
|
|
|
def handle_source(self,matches,level):
|
2003-01-10 11:09:40 +05:30
|
|
|
source_ref = RelLib.SourceRef()
|
2002-10-20 19:55:16 +05:30
|
|
|
if matches[2] and matches[2][0] != "@":
|
|
|
|
self.localref = self.localref + 1
|
|
|
|
ref = "gsr%d" % self.localref
|
2004-05-20 10:11:55 +05:30
|
|
|
s = self.db.find_source(ref,self.smap, self.trans)
|
2004-02-21 11:41:59 +05:30
|
|
|
source_ref.set_base_id(s.get_id())
|
2004-02-14 11:10:30 +05:30
|
|
|
s.set_title('Imported Source #%d' % self.localref)
|
|
|
|
s.set_note(matches[2] + self.parse_continue_data(level))
|
2004-01-05 11:33:28 +05:30
|
|
|
self.ignore_sub_junk(level+1)
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-05-20 10:11:55 +05:30
|
|
|
source_ref.set_base_id(self.db.find_source(matches[2],self.smap,self.trans).get_id())
|
2003-07-21 05:39:12 +05:30
|
|
|
self.parse_source_reference(source_ref,level)
|
2002-10-20 19:55:16 +05:30
|
|
|
return source_ref
|
|
|
|
|
|
|
|
def resolve_refns(self):
|
|
|
|
prefix = self.db.iprefix
|
|
|
|
index = 0
|
2004-02-21 11:41:59 +05:30
|
|
|
new_pmax = self.db.pmap_index
|
2004-05-04 10:04:48 +05:30
|
|
|
for pid in self.added.keys():
|
2002-10-20 19:55:16 +05:30
|
|
|
index = index + 1
|
|
|
|
if self.refn.has_key(pid):
|
|
|
|
val = self.refn[pid]
|
|
|
|
new_key = prefix % val
|
|
|
|
new_pmax = max(new_pmax,val)
|
|
|
|
|
2004-05-20 10:11:55 +05:30
|
|
|
person = self.db.find_person_from_id(pid,self.trans)
|
2004-05-04 10:04:48 +05:30
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
# new ID is not used
|
2004-02-21 11:41:59 +05:30
|
|
|
if not self.db.has_person_id(new_key):
|
2004-05-24 10:02:19 +05:30
|
|
|
self.db.remove_person_id(pid,self.trans)
|
2004-02-14 11:10:30 +05:30
|
|
|
person.set_id(new_key)
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.add_person(person,self.trans)
|
2002-10-20 19:55:16 +05:30
|
|
|
else:
|
2004-05-20 10:11:55 +05:30
|
|
|
tp = self.db.find_person_from_id(new_key,self.trans)
|
2002-10-20 19:55:16 +05:30
|
|
|
# same person, just change it
|
|
|
|
if person == tp:
|
2004-05-24 10:02:19 +05:30
|
|
|
self.db.remove_person_id(pid,self.trans)
|
2004-02-14 11:10:30 +05:30
|
|
|
person.set_id(new_key)
|
2004-05-20 10:11:55 +05:30
|
|
|
self.db.add_person_as(person,self.trans)
|
2002-10-20 19:55:16 +05:30
|
|
|
# give up trying to use the refn as a key
|
|
|
|
else:
|
|
|
|
pass
|
|
|
|
|
2004-02-21 11:41:59 +05:30
|
|
|
self.db.pmap_index = new_pmax
|
2002-10-20 19:55:16 +05:30
|
|
|
|
2004-05-24 10:02:19 +05:30
|
|
|
def extract_temple(matches):
|
2004-05-15 19:54:38 +05:30
|
|
|
try:
|
|
|
|
if const.lds_temple_to_abrev.has_key(matches[2]):
|
|
|
|
return const.lds_temple_to_abrev[matches[2]]
|
|
|
|
else:
|
2004-05-24 10:02:19 +05:30
|
|
|
values = matches[2].split()
|
2004-05-15 19:54:38 +05:30
|
|
|
return const.lds_temple_to_abrev[values[0]]
|
|
|
|
except:
|
|
|
|
return None
|
|
|
|
|
2004-05-04 10:04:48 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
#
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2002-10-20 19:55:16 +05:30
|
|
|
def readData(database,active_person,cb):
|
|
|
|
global db
|
|
|
|
global callback
|
2004-05-24 10:02:19 +05:30
|
|
|
global file_topa
|
2002-10-20 19:55:16 +05:30
|
|
|
|
|
|
|
db = database
|
|
|
|
callback = cb
|
|
|
|
|
2004-05-04 10:04:48 +05:30
|
|
|
choose = gtk.FileChooserDialog("%s - GRAMPS" % _title_string,
|
|
|
|
None,
|
|
|
|
gtk.FILE_CHOOSER_ACTION_OPEN,
|
|
|
|
(gtk.STOCK_CANCEL,
|
|
|
|
gtk.RESPONSE_CANCEL,
|
|
|
|
gtk.STOCK_OPEN,
|
|
|
|
gtk.RESPONSE_OK))
|
|
|
|
filter = gtk.FileFilter()
|
|
|
|
filter.set_name(_('GEDCOM files'))
|
|
|
|
filter.add_pattern('*.ged')
|
|
|
|
filter.add_pattern('*.GED')
|
|
|
|
choose.add_filter(filter)
|
|
|
|
|
|
|
|
filter = gtk.FileFilter()
|
|
|
|
filter.set_name(_('All files'))
|
|
|
|
filter.add_pattern('*')
|
|
|
|
choose.add_filter(filter)
|
|
|
|
|
|
|
|
response = choose.run()
|
|
|
|
if response == gtk.RESPONSE_OK:
|
|
|
|
filename = choose.get_filename()
|
2004-05-05 09:24:02 +05:30
|
|
|
choose.destroy()
|
2004-05-04 10:04:48 +05:30
|
|
|
try:
|
|
|
|
importData(db,filename)
|
|
|
|
except:
|
|
|
|
import DisplayTrace
|
|
|
|
DisplayTrace.DisplayTrace()
|
2004-05-05 09:24:02 +05:30
|
|
|
else:
|
|
|
|
choose.destroy()
|
|
|
|
|
2002-10-20 19:55:16 +05:30
|
|
|
#-------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
#
|
|
|
|
#
|
|
|
|
#-------------------------------------------------------------------------
|
2004-05-04 10:04:48 +05:30
|
|
|
from Plugins import register_import
|
|
|
|
register_import(readData,_title_string)
|