2005-02-01 09:16:29 +05:30
|
|
|
#
|
|
|
|
# Gramps - a GTK+/GNOME based genealogy program
|
|
|
|
#
|
2007-06-28 11:11:40 +05:30
|
|
|
# Copyright (C) 2000-2007 Donald N. Allingham
|
2007-07-24 09:58:17 +05:30
|
|
|
# Copyright (C) 2007 Johan Gonqvist <johan.gronqvist@gmail.com>
|
2007-09-28 17:46:12 +05:30
|
|
|
# Copyright (C) 2007 Gary Burton <gary.burton@zen.co.uk>
|
2009-01-25 13:54:56 +05:30
|
|
|
# Copyright (C) 2007-2009 Stephane Charette <stephanecharette@gmail.com>
|
2008-05-19 00:54:28 +05:30
|
|
|
# Copyright (C) 2008 Brian G. Matherly
|
2008-10-22 00:27:42 +05:30
|
|
|
# Copyright (C) 2008 Jason M. Simanek <jason@bohemianalps.com>
|
2009-01-27 03:52:01 +05:30
|
|
|
# Copyright (C) 2008-2009 Rob G. Healey <robhealey1@gmail.com>
|
2005-02-01 09:16:29 +05:30
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
2008-03-10 01:42:56 +05:30
|
|
|
# it under the terms of the GNU General Public License as published by
|
2005-02-01 09:16:29 +05:30
|
|
|
# the Free Software Foundation; either version 2 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program; if not, write to the Free Software
|
|
|
|
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
|
|
#
|
|
|
|
|
2009-08-08 01:32:55 +05:30
|
|
|
# $Id: $
|
2005-02-01 09:16:29 +05:30
|
|
|
|
2005-08-18 11:28:28 +05:30
|
|
|
"""
|
|
|
|
Narrative Web Page generator.
|
|
|
|
"""
|
2005-02-01 09:16:29 +05:30
|
|
|
|
2008-03-15 02:37:35 +05:30
|
|
|
#------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# Suggested pylint usage:
|
|
|
|
# --max-line-length=100 Yes, I know PEP8 suggest 80, but this has longer lines
|
|
|
|
# --argument-rgx='[a-z_][a-z0-9_]{1,30}$' Several identifiers are two characters
|
|
|
|
# --variable-rgx='[a-z_][a-z0-9_]{1,30}$' Several identifiers are two characters
|
|
|
|
#
|
|
|
|
#------------------------------------------------------------------------
|
|
|
|
|
2005-02-01 09:16:29 +05:30
|
|
|
#------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# python modules
|
|
|
|
#
|
|
|
|
#------------------------------------------------------------------------
|
2009-06-11 22:15:30 +05:30
|
|
|
from __future__ import with_statement
|
2009-05-30 14:16:12 +05:30
|
|
|
import os, sys
|
2008-05-21 00:54:03 +05:30
|
|
|
import re
|
2008-12-05 03:14:12 +05:30
|
|
|
try:
|
|
|
|
from hashlib import md5
|
|
|
|
except ImportError:
|
|
|
|
from md5 import md5
|
2009-05-30 14:16:12 +05:30
|
|
|
import time, datetime
|
2005-08-18 11:28:28 +05:30
|
|
|
import locale
|
2005-02-01 09:16:29 +05:30
|
|
|
import shutil
|
2005-08-18 11:28:28 +05:30
|
|
|
import codecs
|
2005-12-06 12:08:09 +05:30
|
|
|
import tarfile
|
2009-05-21 04:56:30 +05:30
|
|
|
import tempfile
|
2008-03-08 22:10:19 +05:30
|
|
|
import operator
|
2008-04-01 01:20:37 +05:30
|
|
|
from TransUtils import sgettext as _
|
2005-08-18 11:28:28 +05:30
|
|
|
from cStringIO import StringIO
|
2007-01-08 09:18:49 +05:30
|
|
|
from textwrap import TextWrapper
|
2007-10-04 09:26:41 +05:30
|
|
|
from unicodedata import normalize
|
2005-02-01 09:16:29 +05:30
|
|
|
|
2009-07-23 01:33:07 +05:30
|
|
|
# attempt to import the python exif library?
|
|
|
|
try:
|
|
|
|
import pyexiv2
|
2009-07-28 17:11:20 +05:30
|
|
|
pyexiftaglib = True
|
2009-07-23 01:33:07 +05:30
|
|
|
except ImportError:
|
2009-07-28 17:11:20 +05:30
|
|
|
pyexiftaglib = False
|
2009-07-23 01:33:07 +05:30
|
|
|
|
2009-08-29 10:41:23 +05:30
|
|
|
from gen.lib.repotype import RepositoryType
|
2006-03-05 10:01:24 +05:30
|
|
|
#------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# Set up logging
|
|
|
|
#
|
|
|
|
#------------------------------------------------------------------------
|
|
|
|
import logging
|
|
|
|
log = logging.getLogger(".WebPage")
|
|
|
|
|
2005-02-01 09:16:29 +05:30
|
|
|
#------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# GRAMPS module
|
|
|
|
#
|
|
|
|
#------------------------------------------------------------------------
|
2009-06-11 22:15:30 +05:30
|
|
|
from gen.lib import UrlType, EventType, Person, date, Date, ChildRefType, \
|
|
|
|
FamilyRelType, NameType, Name
|
2005-02-01 09:16:29 +05:30
|
|
|
import const
|
|
|
|
import Sort
|
2008-10-02 09:32:10 +05:30
|
|
|
from gen.plug import PluginManager
|
2008-09-27 19:26:17 +05:30
|
|
|
from gen.plug.menu import PersonOption, NumberOption, StringOption, \
|
|
|
|
BooleanOption, EnumeratedListOption, FilterOption, \
|
|
|
|
NoteOption, MediaOption, DestinationOption
|
2008-03-08 22:10:19 +05:30
|
|
|
from ReportBase import (Report, ReportUtils, MenuReportOptions, CATEGORY_WEB,
|
2009-06-06 15:19:40 +05:30
|
|
|
Bibliography, CSS_FILES )
|
2005-02-01 09:16:29 +05:30
|
|
|
import Utils
|
2009-06-19 20:53:58 +05:30
|
|
|
from gui.utils import ProgressMeter
|
2007-09-11 03:44:33 +05:30
|
|
|
import ThumbNails
|
2007-11-04 08:59:58 +05:30
|
|
|
import ImgManip
|
2007-11-03 11:13:12 +05:30
|
|
|
import Mime
|
2009-04-09 10:04:24 +05:30
|
|
|
from Utils import probably_alive, xml_lang
|
2005-08-18 11:28:28 +05:30
|
|
|
from QuestionDialog import ErrorDialog, WarningDialog
|
2007-06-28 11:11:40 +05:30
|
|
|
from BasicUtils import name_displayer as _nd
|
2005-02-04 19:24:02 +05:30
|
|
|
from DateHandler import displayer as _dd
|
2007-08-05 11:12:43 +05:30
|
|
|
from DateHandler import parser as _dp
|
2008-02-19 01:37:09 +05:30
|
|
|
from gen.proxy import PrivateProxyDb, LivingProxyDb
|
2007-10-16 17:39:20 +05:30
|
|
|
from gen.lib.eventroletype import EventRoleType
|
2009-06-06 15:19:40 +05:30
|
|
|
from libhtmlconst import _CHARACTER_SETS, _CC, _COPY_OPTIONS
|
2005-02-01 09:16:29 +05:30
|
|
|
|
2009-06-23 05:28:09 +05:30
|
|
|
# import HTML Class from
|
|
|
|
# src/plugins/lib/libhtml.py
|
2009-06-11 22:15:30 +05:30
|
|
|
from libhtml import Html
|
|
|
|
|
2009-06-20 05:23:42 +05:30
|
|
|
# import styled notes from
|
|
|
|
# src/plugins/lib/libhtmlbackend.py
|
|
|
|
from libhtmlbackend import HtmlBackend
|
2005-08-18 11:28:28 +05:30
|
|
|
#------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# constants
|
|
|
|
#
|
|
|
|
#------------------------------------------------------------------------
|
2009-08-11 06:59:53 +05:30
|
|
|
# Translatable strings for variables within this plugin
|
|
|
|
# gettext carries a huge footprint with it.
|
2009-08-11 18:19:27 +05:30
|
|
|
AHEAD = _('Attributes')
|
2009-08-11 06:59:53 +05:30
|
|
|
DHEAD = _('Date')
|
|
|
|
DESCRHEAD = _('Description')
|
2009-08-27 18:51:55 +05:30
|
|
|
EHEAD = _('Type')
|
2009-08-11 06:59:53 +05:30
|
|
|
NHEAD = _('Notes')
|
2009-08-11 18:19:27 +05:30
|
|
|
PHEAD = _('Place')
|
|
|
|
SHEAD = _('Sources')
|
|
|
|
THEAD = _('Type')
|
|
|
|
VHEAD = _('Value')
|
2009-08-11 06:59:53 +05:30
|
|
|
STREET = _('Street')
|
|
|
|
CITY = _('City')
|
2009-08-11 18:19:27 +05:30
|
|
|
PARISH = _('Church Parish')
|
2009-08-11 06:59:53 +05:30
|
|
|
COUNTY = _('County')
|
|
|
|
STATE = _('State/ Province')
|
|
|
|
COUNTRY = _('Country')
|
|
|
|
POSTAL = _('Postal Code')
|
|
|
|
PHONE = _('Phone')
|
2009-08-11 12:17:42 +05:30
|
|
|
LONGITUDE = _('Longitude')
|
|
|
|
LATITUDE = _('Latitude')
|
|
|
|
LOCATIONS = _('Alternate Locations')
|
2009-08-11 18:19:27 +05:30
|
|
|
TMPL = _('Temple')
|
|
|
|
ST = _('Status')
|
2009-08-11 06:59:53 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# define clear blank line for proper styling
|
|
|
|
fullclear = Html('div', class_='fullclear', inline=True)
|
|
|
|
|
2009-02-26 13:55:45 +05:30
|
|
|
# Names for stylesheets
|
|
|
|
_NARRATIVESCREEN = 'narrative-screen.css'
|
|
|
|
_NARRATIVEPRINT = 'narrative-print.css'
|
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
# variables for alphabet_navigation()
|
2009-06-23 05:28:09 +05:30
|
|
|
_PERSON, _PLACE = 0, 1
|
2009-06-21 08:55:28 +05:30
|
|
|
|
2009-03-25 10:27:07 +05:30
|
|
|
# Web page filename extensions
|
|
|
|
_WEB_EXT = ['.html', '.htm', '.shtml', '.php', '.php3', '.cgi']
|
|
|
|
|
2008-06-05 09:15:14 +05:30
|
|
|
_INCLUDE_LIVING_VALUE = 99 # Arbitrary number
|
2005-08-18 11:28:28 +05:30
|
|
|
_NAME_COL = 3
|
|
|
|
|
2009-05-21 04:56:30 +05:30
|
|
|
_DEFAULT_MAX_IMG_WIDTH = 800 # resize images that are wider than this (settable in options)
|
|
|
|
_DEFAULT_MAX_IMG_HEIGHT = 600 # resize images that are taller than this (settable in options)
|
2008-02-10 09:39:09 +05:30
|
|
|
_WIDTH = 160
|
|
|
|
_HEIGHT = 50
|
|
|
|
_VGAP = 10
|
|
|
|
_HGAP = 30
|
|
|
|
_SHADOW = 5
|
|
|
|
_XOFFSET = 5
|
2005-12-06 12:08:09 +05:30
|
|
|
|
2007-01-08 09:18:49 +05:30
|
|
|
wrapper = TextWrapper()
|
|
|
|
wrapper.break_log_words = True
|
|
|
|
wrapper.width = 20
|
2005-02-10 07:14:05 +05:30
|
|
|
|
2008-05-21 00:54:03 +05:30
|
|
|
_html_dbl_quotes = re.compile(r'([^"]*) " ([^"]*) " (.*)', re.VERBOSE)
|
|
|
|
_html_sng_quotes = re.compile(r"([^']*) ' ([^']*) ' (.*)", re.VERBOSE)
|
|
|
|
_html_replacement = {
|
2008-03-08 22:10:19 +05:30
|
|
|
"&" : "&",
|
2008-04-12 08:19:34 +05:30
|
|
|
">" : ">",
|
|
|
|
"<" : "<",
|
2008-03-06 18:37:37 +05:30
|
|
|
}
|
2008-03-08 22:10:19 +05:30
|
|
|
|
|
|
|
# This command then defines the 'html_escape' option for escaping
|
|
|
|
# special characters for presentation in HTML based on the above list.
|
2008-03-06 18:37:37 +05:30
|
|
|
def html_escape(text):
|
2008-03-10 01:42:56 +05:30
|
|
|
"""Convert the text and replace some characters with a &# variant."""
|
2008-05-21 00:54:03 +05:30
|
|
|
|
|
|
|
# First single characters, no quotes
|
|
|
|
text = ''.join([_html_replacement.get(c, c) for c in text])
|
|
|
|
|
|
|
|
# Deal with double quotes.
|
|
|
|
while 1:
|
|
|
|
m = _html_dbl_quotes.match(text)
|
|
|
|
if not m:
|
|
|
|
break
|
|
|
|
text = m.group(1) + '“' + m.group(2) + '”' + m.group(3)
|
|
|
|
# Replace remaining double quotes.
|
|
|
|
text = text.replace('"', '"')
|
|
|
|
|
|
|
|
# Deal with single quotes.
|
|
|
|
text = text.replace("'s ", '’s ')
|
|
|
|
while 1:
|
|
|
|
m = _html_sng_quotes.match(text)
|
|
|
|
if not m:
|
|
|
|
break
|
|
|
|
text = m.group(1) + '‘' + m.group(2) + '’' + m.group(3)
|
|
|
|
# Replace remaining single quotes.
|
|
|
|
text = text.replace("'", ''')
|
|
|
|
|
|
|
|
return text
|
2008-03-06 18:37:37 +05:30
|
|
|
|
2008-04-01 01:20:37 +05:30
|
|
|
def name_to_md5(text):
|
|
|
|
"""This creates an MD5 hex string to be used as filename."""
|
2008-12-05 03:14:12 +05:30
|
|
|
return md5(text).hexdigest()
|
2008-04-01 01:20:37 +05:30
|
|
|
|
2009-07-31 17:00:14 +05:30
|
|
|
def conf_priv(obj):
|
|
|
|
if obj.get_privacy() != 0:
|
|
|
|
return ' priv="%d"' % obj.get_privacy()
|
|
|
|
else:
|
|
|
|
return ''
|
|
|
|
|
2009-05-21 22:49:50 +05:30
|
|
|
class BasePage(object):
|
2008-03-10 01:42:56 +05:30
|
|
|
"""
|
2008-07-06 14:10:47 +05:30
|
|
|
This is the base class to write certain HTML pages.
|
2008-03-10 01:42:56 +05:30
|
|
|
"""
|
2009-02-03 13:31:31 +05:30
|
|
|
|
2008-03-14 03:58:22 +05:30
|
|
|
def __init__(self, report, title, gid=None):
|
2008-03-10 01:42:56 +05:30
|
|
|
"""
|
2008-03-14 03:58:22 +05:30
|
|
|
report - instance of NavWebReport
|
2008-03-10 01:42:56 +05:30
|
|
|
title - text for the <title> tag
|
|
|
|
gid - Gramps ID
|
|
|
|
"""
|
|
|
|
|
2009-06-20 05:23:42 +05:30
|
|
|
# class to do conversion of styled notes to html markup
|
|
|
|
self._backend = HtmlBackend()
|
|
|
|
|
2008-03-14 03:58:22 +05:30
|
|
|
self.report = report
|
2005-02-13 09:24:47 +05:30
|
|
|
self.title_str = title
|
2005-08-18 11:28:28 +05:30
|
|
|
self.gid = gid
|
2008-03-14 03:58:22 +05:30
|
|
|
self.src_list = {}
|
|
|
|
|
2008-03-10 01:42:56 +05:30
|
|
|
self.page_title = ""
|
|
|
|
|
2009-06-19 20:53:58 +05:30
|
|
|
self.author = Utils.get_researcher().get_name()
|
2008-03-12 03:41:42 +05:30
|
|
|
if self.author:
|
|
|
|
self.author = self.author.replace(',,,', '')
|
|
|
|
self.up = False
|
|
|
|
|
2008-03-10 01:42:56 +05:30
|
|
|
# TODO. All of these attributes are not necessary, because we have
|
2008-03-14 02:01:33 +05:30
|
|
|
# also the options in self.options. Besides, we need to check which
|
2008-03-10 01:42:56 +05:30
|
|
|
# are still required.
|
2009-06-11 22:15:30 +05:30
|
|
|
self.html_dir = report.options['target']
|
|
|
|
self.ext = report.options['ext']
|
|
|
|
self.noid = report.options['nogid']
|
|
|
|
self.linkhome = report.options['linkhome']
|
2009-06-30 01:34:00 +05:30
|
|
|
self.create_media = report.options['gallery']
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-08-11 18:19:27 +05:30
|
|
|
def get_citation_links(self, source_ref_list):
|
2009-08-21 06:07:59 +05:30
|
|
|
self.bibli = Bibliography()
|
|
|
|
|
2009-08-11 18:19:27 +05:30
|
|
|
gid_list = []
|
|
|
|
lnk = (self.report.cur_fname, self.page_title, self.gid)
|
|
|
|
|
|
|
|
for sref in source_ref_list:
|
|
|
|
handle = sref.get_reference_handle()
|
|
|
|
gid_list.append(sref)
|
|
|
|
|
|
|
|
if handle in self.src_list:
|
|
|
|
if lnk not in self.src_list[handle]:
|
|
|
|
self.src_list[handle].append(lnk)
|
|
|
|
else:
|
|
|
|
self.src_list[handle] = [lnk]
|
|
|
|
|
|
|
|
text = ""
|
|
|
|
if len(gid_list):
|
|
|
|
text = text + " <sup>"
|
|
|
|
for ref in gid_list:
|
|
|
|
index, key = self.bibli.add_reference(ref)
|
|
|
|
id_ = "%d%s" % (index+1, key)
|
|
|
|
text = text + '<a href="#sref%s">%s</a>' % (id_, id_)
|
|
|
|
text = text + "</sup>"
|
|
|
|
|
|
|
|
# return citation list text to its callers
|
|
|
|
return text
|
|
|
|
|
|
|
|
def get_note_format(self, note):
|
|
|
|
"""
|
|
|
|
will get the note from the database, and will return either the
|
|
|
|
styled text or plain note
|
|
|
|
"""
|
|
|
|
|
|
|
|
# retrieve the body of the note
|
|
|
|
note_text = note.get()
|
|
|
|
|
|
|
|
# styled notes
|
|
|
|
htmlnotetext = self.styled_note(note.get_styledtext(),
|
|
|
|
note.get_format())
|
|
|
|
if htmlnotetext:
|
|
|
|
text = htmlnotetext
|
|
|
|
else:
|
|
|
|
text = Html('p', note_text)
|
|
|
|
|
|
|
|
# return text of the note to its callers
|
|
|
|
return text
|
|
|
|
|
|
|
|
#################################################
|
|
|
|
#
|
|
|
|
# Will produce styled notes for NarrativeWeb by using:
|
|
|
|
# src/plugins/lib/libhtmlbackend.py
|
|
|
|
#
|
|
|
|
#################################################
|
|
|
|
|
|
|
|
def styled_note(self, styledtext, format):
|
|
|
|
"""
|
|
|
|
styledtext : assumed a StyledText object to write
|
|
|
|
format : = 0 : Flowed, = 1 : Preformatted
|
|
|
|
style_name : name of the style to use for default presentation
|
|
|
|
"""
|
|
|
|
text = str(styledtext)
|
|
|
|
|
|
|
|
if not text:
|
|
|
|
return ''
|
|
|
|
|
|
|
|
s_tags = styledtext.get_tags()
|
|
|
|
#FIXME: following split should be regex to match \n\s*\n instead?
|
|
|
|
markuptext = self._backend.add_markup_from_styled(text, s_tags,
|
|
|
|
split='\n\n')
|
|
|
|
htmllist = Html('div', id='grampsstylednote')
|
|
|
|
if format == 1:
|
|
|
|
#preformatted, retain whitespace.
|
|
|
|
#so use \n\n for paragraph detection
|
|
|
|
#FIXME: following split should be regex to match \n\s*\n instead?
|
|
|
|
htmllist += Html('pre', indent=None, inline=True)
|
|
|
|
for line in markuptext.split('\n\n'):
|
|
|
|
htmllist += Html('p')
|
|
|
|
for realline in line.split('\n'):
|
|
|
|
htmllist += realline
|
|
|
|
htmllist += Html('br')
|
|
|
|
|
|
|
|
elif format == 0:
|
|
|
|
#flowed
|
|
|
|
#FIXME: following split should be regex to match \n\s*\n instead?
|
|
|
|
for line in markuptext.split('\n\n'):
|
|
|
|
htmllist += Html('p')
|
|
|
|
htmllist += line
|
|
|
|
|
|
|
|
return htmllist
|
|
|
|
|
|
|
|
def dump_notes(self, notelist):
|
|
|
|
"""
|
|
|
|
dump out of list of notes with very little elements of its own
|
|
|
|
|
|
|
|
@param: notelist -- list of notes
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not notelist:
|
|
|
|
return ' '
|
|
|
|
db = self.report.database
|
|
|
|
|
|
|
|
# begin unordered list
|
|
|
|
unordered = Html('ul')
|
|
|
|
|
|
|
|
for notehandle in notelist:
|
|
|
|
note = db.get_note_from_handle(notehandle)
|
|
|
|
unordered += self.get_note_format(note)
|
|
|
|
|
|
|
|
# return unordered note list to its callers
|
|
|
|
return unordered
|
|
|
|
|
2009-08-21 06:07:59 +05:30
|
|
|
def get_event_data(self, event, evt_ref):
|
|
|
|
db = self.report.database
|
|
|
|
|
|
|
|
# Event/ Type
|
|
|
|
evt_name = str(event.get_type())
|
|
|
|
|
|
|
|
if evt_ref.get_role() == EventRoleType.PRIMARY:
|
|
|
|
eventtype = u"%(evt_name)s" % locals()
|
|
|
|
else:
|
|
|
|
evt_role = evt_ref.get_role()
|
|
|
|
eventtype = u"%(evt_name)s (%(evt_role)s)" % locals()
|
|
|
|
|
|
|
|
# get place name
|
|
|
|
place_handle = event.get_place_handle()
|
|
|
|
place = db.get_place_from_handle(place_handle)
|
|
|
|
|
|
|
|
place_hyper = None
|
|
|
|
if place:
|
|
|
|
place_name = ReportUtils.place_name(db, place_handle)
|
|
|
|
place_hyper = self.place_link(place_handle, place_name,
|
|
|
|
place.gramps_id, True)
|
|
|
|
|
|
|
|
# get event and event_ref notes
|
|
|
|
notelist = event.get_note_list()
|
|
|
|
notelist.extend(evt_ref.get_note_list() )
|
|
|
|
|
|
|
|
# wrap it all up and return to its callers
|
|
|
|
info = [
|
|
|
|
['Type', eventtype],
|
|
|
|
['Date', _dd.display(event.get_date_object() )],
|
|
|
|
['Place', place_hyper],
|
|
|
|
['Description', event.get_description()],
|
|
|
|
['Sources', self.get_citation_links(event.get_source_references() )],
|
|
|
|
['Notes', notelist],
|
|
|
|
['Attributes', event.get_attribute_list()]
|
|
|
|
]
|
|
|
|
return info
|
|
|
|
|
2009-08-11 18:19:27 +05:30
|
|
|
def dump_ordinance(self, db, ldsobj, LDSType='Person'):
|
2009-08-03 01:36:00 +05:30
|
|
|
"""
|
|
|
|
will dump the LDS Ordinance information for either
|
|
|
|
a person or a family ...
|
|
|
|
|
2009-08-03 10:51:00 +05:30
|
|
|
@param: db -- the database in use
|
2009-08-11 18:19:27 +05:30
|
|
|
@param: ldsobj -- either person or family
|
2009-08-03 01:36:00 +05:30
|
|
|
"""
|
2009-08-03 10:51:00 +05:30
|
|
|
objectldsord = ldsobj.lds_ord_list
|
|
|
|
if not objectldsord:
|
|
|
|
return None
|
|
|
|
numberofords = len(objectldsord)
|
2009-08-03 01:36:00 +05:30
|
|
|
|
2009-08-11 18:19:27 +05:30
|
|
|
def create_LDS_header_row(LDSType):
|
2009-08-03 10:51:00 +05:30
|
|
|
""" create the header row for this section """
|
|
|
|
|
|
|
|
# begin HTML row
|
|
|
|
trow = Html('tr')
|
2009-08-03 01:36:00 +05:30
|
|
|
|
2009-08-11 18:19:27 +05:30
|
|
|
header_row = [
|
|
|
|
[THEAD, 'LDSType' ],
|
2009-08-11 10:23:35 +05:30
|
|
|
[DHEAD, 'LDSDate' ],
|
2009-08-11 18:19:27 +05:30
|
|
|
[TMPL, 'LDSTemple' ],
|
|
|
|
[PHEAD, 'LDSPlace' ],
|
|
|
|
[ST, 'LDSStatus' ],
|
|
|
|
[_('Sealed to '), 'LDSSealed'],
|
|
|
|
[SHEAD, 'LDSSources']
|
|
|
|
]
|
|
|
|
|
|
|
|
# finish the label's missing piece
|
|
|
|
header_row[5][0] += _('Parents') if LDSType == 'Person' else _('Spouse')
|
2009-08-03 01:36:00 +05:30
|
|
|
|
2009-08-11 18:19:27 +05:30
|
|
|
for (label, colclass) in header_row:
|
2009-08-03 10:51:00 +05:30
|
|
|
trow += Html('th', label, class_='Column%s' % colclass, inline=True)
|
|
|
|
|
|
|
|
# return row back to module
|
|
|
|
return trow
|
|
|
|
|
|
|
|
# begin LDS ordinance table and table head
|
|
|
|
with Html('table', class_='infolist ldsordlist') as table:
|
|
|
|
thead = Html('thead')
|
|
|
|
table += thead
|
|
|
|
|
|
|
|
# get LDS ord header row
|
2009-08-11 18:19:27 +05:30
|
|
|
thead += create_LDS_header_row(LDSType)
|
2009-08-03 10:51:00 +05:30
|
|
|
|
|
|
|
# start table body
|
|
|
|
tbody = Html('tbody')
|
|
|
|
table += tbody
|
2009-08-03 01:36:00 +05:30
|
|
|
|
|
|
|
for row in range(1, (numberofords + 1)):
|
|
|
|
|
|
|
|
# get ordinance for this row
|
2009-08-03 10:51:00 +05:30
|
|
|
ord = objectldsord[(row - 1)]
|
2009-08-03 01:36:00 +05:30
|
|
|
|
2009-08-03 10:51:00 +05:30
|
|
|
# 0 = column class, 1 = ordinance data
|
2009-08-03 01:36:00 +05:30
|
|
|
lds_ord_data = [
|
2009-08-03 14:13:14 +05:30
|
|
|
['LDSType', ord.type2xml()],
|
2009-08-11 18:19:27 +05:30
|
|
|
['LDSDate', _dd.display(ord.get_date_object() )],
|
2009-08-03 14:13:14 +05:30
|
|
|
['LDSTemple', ord.get_temple()],
|
2009-08-11 18:19:27 +05:30
|
|
|
['LDSPlace', ReportUtils.place_name(db, ord.get_place_handle() )],
|
2009-08-03 14:13:14 +05:30
|
|
|
['LDSStatus', ord.get_status()],
|
|
|
|
['LDSSealed', ord.get_family_handle()],
|
2009-08-11 18:19:27 +05:30
|
|
|
['LDSSources', self.get_citation_links(ord.get_source_references() )],
|
2009-08-03 10:51:00 +05:30
|
|
|
]
|
|
|
|
|
2009-08-03 01:36:00 +05:30
|
|
|
# begin ordinance rows
|
|
|
|
trow = Html('tr')
|
2009-08-03 10:51:00 +05:30
|
|
|
tbody += trow
|
2009-08-03 01:36:00 +05:30
|
|
|
|
|
|
|
for col in range(1, (len(lds_ord_data) + 1)):
|
|
|
|
|
2009-08-03 10:51:00 +05:30
|
|
|
# column class for styling
|
|
|
|
colclass = lds_ord_data[(col - 1)][0]
|
2009-08-03 01:36:00 +05:30
|
|
|
|
|
|
|
# actual column data
|
2009-08-03 10:51:00 +05:30
|
|
|
value = lds_ord_data[(col - 1)][1]
|
2009-08-03 01:36:00 +05:30
|
|
|
value = value or ' '
|
|
|
|
|
2009-08-03 10:51:00 +05:30
|
|
|
trow += Html('td', value, class_='Column%s' % colclass,
|
|
|
|
inline=True if value == ' ' else False)
|
2009-08-03 01:36:00 +05:30
|
|
|
|
|
|
|
# return table to its callers
|
|
|
|
return table
|
|
|
|
|
2009-08-11 18:19:27 +05:30
|
|
|
def source_link(self, handle, name, gid=None, up=False):
|
2009-08-03 14:13:14 +05:30
|
|
|
|
2009-08-11 18:19:27 +05:30
|
|
|
url = self.report.build_url_fname_html(handle, 'src', up)
|
|
|
|
# begin hyperlink
|
|
|
|
hyper = Html('a', html_escape(name), href=url, title=html_escape(name))
|
|
|
|
if not self.noid and gid:
|
|
|
|
hyper += Html('span', '[%s]' % gid, class_='grampsid', inline=True)
|
2009-08-03 14:13:14 +05:30
|
|
|
|
2009-08-11 18:19:27 +05:30
|
|
|
# return hyperlink to its callers
|
|
|
|
return hyper
|
2009-08-03 14:13:14 +05:30
|
|
|
|
2009-07-31 17:00:14 +05:30
|
|
|
def dump_source_references(self, db, sourcelist):
|
|
|
|
""" Dump a list of source references """
|
|
|
|
|
|
|
|
ordered = Html('ol')
|
|
|
|
list = Html('li')
|
|
|
|
ordered += list
|
|
|
|
|
|
|
|
source_dict = {}
|
|
|
|
# Sort the sources
|
|
|
|
for handle in sourcelist:
|
|
|
|
|
|
|
|
# if source is not None, then add it?
|
|
|
|
source = db.get_source_from_handle(handle)
|
|
|
|
if source is not None:
|
|
|
|
key = source.get_title() + str(source.get_gramps_id())
|
|
|
|
source_dict[key] = (source, handle)
|
|
|
|
keys = sorted(source_dict, key=locale.strxfrm)
|
|
|
|
|
|
|
|
for cindex, key in enumerate(keys):
|
|
|
|
(source, handle) = source_dict[key]
|
|
|
|
source_title = source.get_title()
|
|
|
|
|
|
|
|
list += self.source_link(handle, title, cindex+1, source.gramps_id, True)
|
|
|
|
|
|
|
|
# return ordered list to its callers
|
|
|
|
return ordered
|
|
|
|
|
2009-07-15 05:23:07 +05:30
|
|
|
def write_out_addresses(self, obj, spec=False):
|
2009-06-30 01:34:00 +05:30
|
|
|
"""
|
2009-07-15 05:23:07 +05:30
|
|
|
will display an object's addresses, url list, note list,
|
|
|
|
and source references.
|
|
|
|
|
|
|
|
param: spec = True -- repository
|
|
|
|
False -- person
|
2009-06-30 01:34:00 +05:30
|
|
|
"""
|
|
|
|
|
2009-08-07 00:43:16 +05:30
|
|
|
def write_address_header(spec):
|
2009-08-04 14:28:17 +05:30
|
|
|
""" create header row for address """
|
|
|
|
|
|
|
|
trow = Html('tr')
|
|
|
|
addr_header = [
|
2009-08-11 18:19:27 +05:30
|
|
|
[DHEAD, 'Date'],
|
|
|
|
[STREET, 'StreetAddress'],
|
|
|
|
[CITY, 'City'],
|
2009-08-11 06:59:53 +05:30
|
|
|
[COUNTY, 'County'],
|
2009-08-11 18:19:27 +05:30
|
|
|
[STATE, 'State'],
|
|
|
|
[COUNTRY, 'Cntry'],
|
|
|
|
[POSTAL, 'Postalcode'],
|
|
|
|
[PHONE, 'Phone'] ]
|
2009-08-04 14:28:17 +05:30
|
|
|
|
|
|
|
# if spec = True -- an individual's address else repository
|
|
|
|
if spec:
|
2009-08-11 06:59:53 +05:30
|
|
|
addr_header.append([SHEAD, 'Source'])
|
2009-08-04 14:28:17 +05:30
|
|
|
|
2009-08-11 06:59:53 +05:30
|
|
|
for (label, colclass) in addr_header:
|
2009-08-04 14:28:17 +05:30
|
|
|
trow += Html('th', label, class_='Column%s' % colclass, inline=True)
|
|
|
|
|
|
|
|
# return table header row back to module
|
|
|
|
return trow
|
2009-07-15 05:23:07 +05:30
|
|
|
|
|
|
|
# begin summaryarea division
|
2009-08-04 14:28:17 +05:30
|
|
|
with Html('div', id='summaryarea') as summaryarea:
|
2009-06-30 01:34:00 +05:30
|
|
|
|
2009-07-15 05:23:07 +05:30
|
|
|
# begin address table
|
|
|
|
with Html('table', class_='infolist repolist') as table:
|
|
|
|
summaryarea += table
|
|
|
|
|
|
|
|
# begin table head
|
|
|
|
thead = Html('thead')
|
|
|
|
table += thead
|
2009-06-30 01:34:00 +05:30
|
|
|
|
2009-08-04 14:28:17 +05:30
|
|
|
# add header row
|
2009-08-07 00:43:16 +05:30
|
|
|
thead += write_address_header(spec)
|
2009-07-15 05:23:07 +05:30
|
|
|
|
|
|
|
# begin table body
|
|
|
|
tbody = Html('tbody')
|
|
|
|
table += tbody
|
|
|
|
|
|
|
|
# get address list from an object; either repository or person
|
|
|
|
for address in obj.get_address_list():
|
|
|
|
|
|
|
|
trow = Html('tr')
|
|
|
|
tbody += trow
|
|
|
|
|
2009-08-04 14:28:17 +05:30
|
|
|
addrcollist = [
|
2009-08-11 18:19:27 +05:30
|
|
|
['Date', _dd.display(address.get_date_object() )],
|
2009-08-04 14:28:17 +05:30
|
|
|
['Street', address.get_street()],
|
|
|
|
['City', address.get_city()],
|
|
|
|
['County', address.get_county()],
|
|
|
|
['State', address.get_state()],
|
|
|
|
['Cntry', address.get_country()],
|
|
|
|
['Postal', address.get_postal_code()],
|
|
|
|
['Phone', address.get_phone()] ]
|
|
|
|
|
|
|
|
# get source citation list
|
|
|
|
if spec:
|
2009-08-11 18:19:27 +05:30
|
|
|
addrcollist.append([SHEAD, self.get_citation_links(
|
|
|
|
address.get_source_references())])
|
2009-07-15 05:23:07 +05:30
|
|
|
|
2009-08-07 00:43:16 +05:30
|
|
|
for (colclass, value) in addrcollist:
|
2009-07-28 17:11:20 +05:30
|
|
|
|
2009-08-07 00:43:16 +05:30
|
|
|
value = value or ' '
|
|
|
|
trow += Html('td', value, class_='Column%s' % colclass, inline=True)
|
2009-07-15 05:23:07 +05:30
|
|
|
|
|
|
|
# address: note list
|
|
|
|
notelist = self.display_note_list(address.get_note_list())
|
|
|
|
if notelist is not None:
|
|
|
|
summaryarea += notelist
|
|
|
|
|
|
|
|
# return summaryarea division to its callers
|
|
|
|
return summaryarea
|
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
def get_copyright_license(self, copyright, up=False):
|
2009-05-30 14:16:12 +05:30
|
|
|
"""
|
|
|
|
will return either the text or image of the copyright license
|
|
|
|
"""
|
|
|
|
|
|
|
|
text = ''
|
|
|
|
if copyright == 0:
|
|
|
|
if self.author:
|
|
|
|
year = date.Today().get_year()
|
|
|
|
text = '© %(year)d %(person)s' % {
|
|
|
|
'person' : self.author,
|
|
|
|
'year' : year}
|
|
|
|
elif 0 < copyright <= len(_CC):
|
|
|
|
# Note. This is a URL
|
|
|
|
fname = '/'.join(["images", "somerights20.gif"])
|
|
|
|
url = self.report.build_url_fname(fname, None, up=False)
|
|
|
|
text = _CC[copyright] % {'gif_fname' : url}
|
|
|
|
|
|
|
|
# return text or image to its callers
|
|
|
|
return text
|
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
def get_name(self, person, maiden_name = None):
|
|
|
|
"""
|
|
|
|
Return person's name, unless maiden_name given, unless married_name
|
|
|
|
listed.
|
2009-02-09 12:40:42 +05:30
|
|
|
"""
|
2009-04-06 09:19:47 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# name_format is the name format that you set in options
|
|
|
|
name_format = self.report.options['name_format']
|
2009-02-09 12:40:42 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# Get all of a person's names
|
|
|
|
primary_name = person.get_primary_name()
|
|
|
|
married_name = None
|
|
|
|
names = [primary_name] + person.get_alternate_names()
|
|
|
|
for name in names:
|
|
|
|
if int(name.get_type()) == NameType.MARRIED:
|
|
|
|
married_name = name
|
|
|
|
break # use first
|
|
|
|
# Now, decide which to use:
|
|
|
|
if maiden_name is not None:
|
|
|
|
if married_name is not None:
|
|
|
|
name = Name(married_name)
|
|
|
|
else:
|
|
|
|
name = Name(primary_name)
|
|
|
|
name.set_surname(maiden_name)
|
2009-02-09 12:40:42 +05:30
|
|
|
else:
|
2009-06-11 22:15:30 +05:30
|
|
|
name = Name(primary_name)
|
|
|
|
name.set_display_as(name_format)
|
|
|
|
return _nd.display_name(name)
|
|
|
|
|
2009-08-09 13:25:53 +05:30
|
|
|
def display_attr_list(self, attrobj):
|
|
|
|
"""
|
|
|
|
will display an object's attributes
|
|
|
|
|
|
|
|
@param: attrobj -- object to display it's attributes
|
|
|
|
"""
|
|
|
|
attrlist = attrobj.get_attribute_list()
|
|
|
|
if not attrlist:
|
|
|
|
return None
|
|
|
|
|
|
|
|
# begin attributes division and section title
|
|
|
|
with Html('div', class_='subsection', id='attributes') as section:
|
|
|
|
section += Html('h4', _('Attributes'), inline=True)
|
|
|
|
|
|
|
|
# begin section table
|
|
|
|
with Html('table', class_='infolist attrlist') as table:
|
|
|
|
section += table
|
|
|
|
|
|
|
|
# begin table head
|
|
|
|
thead = Html('thead')
|
|
|
|
table += thead
|
|
|
|
|
|
|
|
trow = Html('tr')
|
|
|
|
thead += trow
|
2009-08-11 04:21:44 +05:30
|
|
|
attr_header_row = [
|
2009-08-11 18:19:27 +05:30
|
|
|
(THEAD, 'Type'),
|
|
|
|
(VHEAD, 'Value'),
|
|
|
|
(SHEAD, 'Source'),
|
|
|
|
(NHEAD, 'Notes') ]
|
|
|
|
|
2009-08-11 04:21:44 +05:30
|
|
|
for (label, colclass) in attr_header_row:
|
2009-08-09 13:25:53 +05:30
|
|
|
|
2009-08-11 04:21:44 +05:30
|
|
|
trow += Html('th', label, class_='Column%s' % colclass, inline=True)
|
2009-08-09 13:25:53 +05:30
|
|
|
|
|
|
|
# begin table body
|
|
|
|
tbody = Html('tbody')
|
|
|
|
table += tbody
|
|
|
|
|
|
|
|
for attr in attrlist:
|
|
|
|
trow = Html('tr')
|
|
|
|
tbody += trow
|
|
|
|
|
2009-08-11 18:19:27 +05:30
|
|
|
for (colclass, value) in [
|
|
|
|
['Type', attr.get_type().xml_str()],
|
|
|
|
['Value', attr.get_value()],
|
|
|
|
['Sources', self.get_citation_links(attr.get_source_references() )],
|
|
|
|
['Notes', self.dump_notes(attr.get_note_list() )] ]:
|
2009-08-09 13:25:53 +05:30
|
|
|
|
|
|
|
trow += Html('td', value, class_='Column%s' % colclass, inline=True)
|
|
|
|
|
|
|
|
# return section to its caller
|
|
|
|
return section
|
|
|
|
|
2009-06-25 01:33:10 +05:30
|
|
|
def write_footer(self):
|
2009-06-11 22:15:30 +05:30
|
|
|
"""
|
|
|
|
Will create and display the footer section of each page...
|
|
|
|
"""
|
2009-06-16 00:55:43 +05:30
|
|
|
db = self.report.database
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-25 01:33:10 +05:30
|
|
|
# begin footer division
|
|
|
|
with Html('div', id='footer') as section:
|
2009-06-23 05:28:09 +05:30
|
|
|
|
2009-06-25 01:33:10 +05:30
|
|
|
footer_note = self.report.options['footernote']
|
|
|
|
if footer_note:
|
|
|
|
note = db.get_note_from_gramps_id(footer_note)
|
2009-07-15 05:23:07 +05:30
|
|
|
note_text = self.get_note_format(note)
|
|
|
|
|
|
|
|
user_footer = Html('div', id='user_footer')
|
|
|
|
section += user_footer
|
2009-06-20 05:23:42 +05:30
|
|
|
|
2009-07-15 05:23:07 +05:30
|
|
|
# attach note
|
|
|
|
user_footer += note_text
|
2009-06-25 01:33:10 +05:30
|
|
|
|
|
|
|
value = _dd.display(date.Today())
|
|
|
|
msg = _('Generated by <a href="%(homepage)s">'
|
|
|
|
'GRAMPS</a> on %(date)s') % {
|
|
|
|
'date': value, 'homepage' : const.URL_HOMEPAGE
|
|
|
|
}
|
|
|
|
|
|
|
|
# optional "link-home" feature; see bug report #2736
|
|
|
|
if self.report.options['linkhome']:
|
|
|
|
home_person = db.get_default_person()
|
|
|
|
if home_person:
|
|
|
|
home_person_url = self.report.build_url_fname_html(home_person.handle, 'ppl', self.up)
|
|
|
|
home_person_name = self.get_name(home_person)
|
|
|
|
msg += _(' Created for <a href="%s">%s</a>') % (
|
|
|
|
home_person_url, home_person_name)
|
|
|
|
|
|
|
|
# creation date
|
|
|
|
section += Html('p', msg, id='createdate')
|
|
|
|
|
|
|
|
# get copyright license for all pages
|
|
|
|
copy_nr = self.report.copyright
|
|
|
|
|
|
|
|
text = ''
|
|
|
|
if copy_nr == 0:
|
|
|
|
if self.author:
|
|
|
|
year = date.Today().get_year()
|
|
|
|
text = '© %(year)d %(person)s' % {
|
|
|
|
'person' : self.author,
|
|
|
|
'year' : year}
|
|
|
|
elif 0 < copy_nr <= len(_CC):
|
|
|
|
# Note. This is a URL
|
|
|
|
fname = '/'.join(["images", "somerights20.gif"])
|
|
|
|
url = self.report.build_url_fname(fname, None, self.up)
|
|
|
|
text = _CC[copy_nr] % {'gif_fname' : url}
|
|
|
|
section += Html('p', text, id='copyright')
|
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# return footer to its caller
|
2009-06-25 01:33:10 +05:30
|
|
|
return section
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2009-06-25 01:33:10 +05:30
|
|
|
def write_header(self, title):
|
2008-03-21 03:54:36 +05:30
|
|
|
"""
|
2009-03-15 03:00:04 +05:30
|
|
|
Note. 'title' is used as currentsection in the navigation links and
|
|
|
|
as part of the header title.
|
2008-03-21 03:54:36 +05:30
|
|
|
"""
|
2009-06-16 00:55:43 +05:30
|
|
|
db = self.report.database
|
2008-07-15 19:07:17 +05:30
|
|
|
|
2009-08-14 12:44:25 +05:30
|
|
|
# Header constants
|
2009-04-09 10:04:24 +05:30
|
|
|
xmllang = xml_lang()
|
2009-06-11 22:15:30 +05:30
|
|
|
_META1 = 'name="generator" content="%s %s %s"' % (
|
|
|
|
const.PROGRAM_NAME, const.VERSION, const.URL_HOMEPAGE
|
|
|
|
)
|
|
|
|
_META2 = 'name="author" content="%s"' % self.author
|
|
|
|
|
|
|
|
page, head, body = Html.page('%s - %s' %
|
|
|
|
(html_escape(self.title_str),
|
|
|
|
html_escape(title)),
|
|
|
|
self.report.encoding, xmllang
|
|
|
|
)
|
|
|
|
|
|
|
|
# create additional meta tags
|
|
|
|
meta = (Html('meta', attr = _META1) +
|
|
|
|
Html('meta', attr = _META2, indent=False)
|
|
|
|
)
|
2008-03-18 02:39:16 +05:30
|
|
|
|
2009-02-26 13:55:45 +05:30
|
|
|
# Link to media reference regions behaviour stylesheet
|
2009-03-17 03:47:42 +05:30
|
|
|
fname = '/'.join(["styles", "behaviour.css"])
|
2009-06-11 22:15:30 +05:30
|
|
|
url1= self.report.build_url_fname(fname, None, self.up)
|
2009-02-04 13:01:15 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# Link to _NARRATIVESCREEN stylesheet
|
2009-03-17 03:47:42 +05:30
|
|
|
fname = '/'.join(["styles", _NARRATIVESCREEN])
|
2009-07-29 13:28:22 +05:30
|
|
|
url3 = self.report.build_url_fname(fname, None, self.up)
|
2008-03-06 18:37:37 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# Link to _NARRATIVEPRINT stylesheet
|
2009-03-17 03:47:42 +05:30
|
|
|
fname = '/'.join(["styles", _NARRATIVEPRINT])
|
2009-07-29 13:28:22 +05:30
|
|
|
url4 = self.report.build_url_fname(fname, None, self.up)
|
2008-03-08 17:00:59 +05:30
|
|
|
|
2009-01-28 07:09:08 +05:30
|
|
|
# Link to GRAMPS favicon
|
2009-06-11 22:15:30 +05:30
|
|
|
fname = '/'.join(['images', 'favicon.ico'])
|
2009-07-29 13:28:22 +05:30
|
|
|
url5 = self.report.build_url_image('favicon.ico', 'images', self.up)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
# create stylesheet and favicon links
|
2009-07-29 13:28:22 +05:30
|
|
|
links = [Html('link', href=url5, type='image/x-icon', rel='shortcut icon'),
|
2009-06-11 22:15:30 +05:30
|
|
|
Html('link', href=url1, type='text/css', media='screen', rel='stylesheet'),
|
2009-07-29 13:28:22 +05:30
|
|
|
Html('link', href=url3, type='text/css', media='screen', rel='stylesheet'),
|
|
|
|
Html('link', href=url4, type='text/css', media='print', rel='stylesheet')
|
2009-06-11 22:15:30 +05:30
|
|
|
]
|
2008-03-06 18:37:37 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# add additional meta and link tags
|
|
|
|
head += meta
|
|
|
|
head += links
|
|
|
|
|
|
|
|
# replace standard body element with custom one
|
|
|
|
body.attr = 'id= "NarrativeWeb"'
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2009-02-26 13:55:45 +05:30
|
|
|
# begin header section
|
2009-06-11 22:15:30 +05:30
|
|
|
headerdiv = (Html('div', id='header') +
|
2009-06-23 05:28:09 +05:30
|
|
|
Html('h1', html_escape(self.title_str), id='SiteTitle', inline=True)
|
|
|
|
)
|
2009-06-11 22:15:30 +05:30
|
|
|
body += headerdiv
|
2008-03-06 18:37:37 +05:30
|
|
|
|
2009-06-20 05:23:42 +05:30
|
|
|
header_note = self.report.options['headernote']
|
|
|
|
if header_note:
|
|
|
|
note = db.get_note_from_gramps_id(header_note)
|
2009-07-15 05:23:07 +05:30
|
|
|
note_text = self.get_note_format(note)
|
|
|
|
|
|
|
|
user_header = Html('div', id='user_header')
|
|
|
|
headerdiv += user_header
|
2009-06-20 05:23:42 +05:30
|
|
|
|
2009-07-15 05:23:07 +05:30
|
|
|
# attach note
|
|
|
|
user_header += note_text
|
2009-06-20 05:23:42 +05:30
|
|
|
|
2008-07-16 13:29:29 +05:30
|
|
|
# Begin Navigation Menu
|
2009-07-21 06:29:38 +05:30
|
|
|
body += self.display_nav_links(title)
|
2008-03-15 02:37:35 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# return to its caller, page and body
|
|
|
|
return page, body
|
|
|
|
|
|
|
|
def display_nav_links(self, currentsection):
|
2009-02-03 13:31:31 +05:30
|
|
|
"""
|
|
|
|
Creates the navigation menu
|
|
|
|
"""
|
2008-03-15 02:37:35 +05:30
|
|
|
|
2009-07-15 05:23:07 +05:30
|
|
|
db = self.report.database
|
|
|
|
|
|
|
|
# include repositories or not?
|
|
|
|
inc_repos = True
|
|
|
|
if not self.report.inc_repository or \
|
|
|
|
len(db.get_repository_handles()) == 0:
|
|
|
|
inc_repos = False
|
|
|
|
|
2008-03-21 03:54:36 +05:30
|
|
|
navs = [
|
2009-06-30 01:34:00 +05:30
|
|
|
(self.report.index_fname, _('Home'), self.report.use_home),
|
|
|
|
(self.report.intro_fname, _('Introduction'), self.report.use_intro),
|
|
|
|
(self.report.surname_fname, _('Surnames'), True),
|
|
|
|
('individuals', _('Individuals'), True),
|
|
|
|
('places', _('Places'), True),
|
2009-08-21 06:07:59 +05:30
|
|
|
('events', _('Events'), self.report.inc_events),
|
2009-06-30 01:34:00 +05:30
|
|
|
('media', _('Media'), self.create_media),
|
|
|
|
('download', _('Download'), self.report.inc_download),
|
|
|
|
('contact', _('Contact'), self.report.use_contact),
|
|
|
|
('sources', _('Sources'), True),
|
2009-07-15 05:23:07 +05:30
|
|
|
('repositories', _('Repositories'), inc_repos),
|
2008-03-21 03:54:36 +05:30
|
|
|
]
|
2008-07-15 19:07:17 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
navigation = Html('div', id='navigation')
|
|
|
|
ul = Html('ul')
|
2009-02-03 13:31:31 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
navs = ((u, n) for u, n, c in navs if c)
|
2009-03-15 03:00:04 +05:30
|
|
|
for url_fname, nav_text in navs:
|
2009-02-03 13:31:31 +05:30
|
|
|
|
2009-03-25 10:27:07 +05:30
|
|
|
if not _has_webpage_extension(url_fname):
|
2009-03-15 03:00:04 +05:30
|
|
|
url_fname += self.ext
|
2009-02-03 13:31:31 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
url = self.report.build_url_fname(url_fname, None, self.up)
|
2009-02-03 13:31:31 +05:30
|
|
|
|
2009-03-15 03:00:04 +05:30
|
|
|
# Define 'currentsection' to correctly set navlink item CSS id
|
|
|
|
# 'CurrentSection' for Navigation styling.
|
|
|
|
# Use 'self.report.cur_fname' to determine 'CurrentSection' for individual
|
|
|
|
# elements for Navigation styling.
|
2009-02-03 13:31:31 +05:30
|
|
|
|
2009-03-25 10:27:07 +05:30
|
|
|
# Figure out if we need <li class="CurrentSection"> of just plain <li>
|
2009-03-15 03:00:04 +05:30
|
|
|
cs = False
|
|
|
|
if nav_text == currentsection:
|
|
|
|
cs = True
|
|
|
|
elif nav_text == _('Surnames'):
|
|
|
|
if "srn" in self.report.cur_fname:
|
|
|
|
cs = True
|
|
|
|
elif _('Surnames') in currentsection:
|
|
|
|
cs = True
|
|
|
|
elif nav_text == _('Individuals'):
|
|
|
|
if "ppl" in self.report.cur_fname:
|
|
|
|
cs = True
|
|
|
|
elif nav_text == _('Sources'):
|
|
|
|
if "src" in self.report.cur_fname:
|
2009-02-03 13:31:31 +05:30
|
|
|
cs = True
|
2009-03-15 03:00:04 +05:30
|
|
|
elif nav_text == _('Places'):
|
|
|
|
if "plc" in self.report.cur_fname:
|
|
|
|
cs = True
|
2009-08-21 06:07:59 +05:30
|
|
|
elif nav_text == _('Events'):
|
|
|
|
if 'evt' in self.report.cur_fname:
|
|
|
|
cs = True
|
2009-07-15 05:23:07 +05:30
|
|
|
elif nav_text == _('Media'):
|
2009-03-15 03:00:04 +05:30
|
|
|
if "img" in self.report.cur_fname:
|
|
|
|
cs = True
|
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
cs = cs and 'class="CurrentSection"' or ''
|
|
|
|
ul += (Html('li', attr=cs, inline=True) +
|
|
|
|
Html('a', nav_text, href=url)
|
|
|
|
)
|
2009-02-03 13:31:31 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
navigation += ul
|
2008-03-06 18:37:37 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# return navigation menu bar to its caller
|
|
|
|
return navigation
|
|
|
|
|
|
|
|
def display_first_image_as_thumbnail( self, photolist=None):
|
2009-06-16 00:55:43 +05:30
|
|
|
db = self.report.database
|
|
|
|
|
2009-06-30 01:34:00 +05:30
|
|
|
if not photolist or not self.create_media:
|
2009-06-21 08:55:28 +05:30
|
|
|
return None
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2005-08-18 11:28:28 +05:30
|
|
|
photo_handle = photolist[0].get_reference_handle()
|
2009-06-16 00:55:43 +05:30
|
|
|
photo = db.get_object_from_handle(photo_handle)
|
2005-12-06 12:08:09 +05:30
|
|
|
mime_type = photo.get_mime_type()
|
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# begin snapshot division
|
2009-06-21 08:55:28 +05:30
|
|
|
with Html('div', class_='snapshot') as snapshot:
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
if mime_type:
|
|
|
|
try:
|
|
|
|
lnkref = (self.report.cur_fname, self.page_title, self.gid)
|
|
|
|
self.report.add_lnkref_to_photo(photo, lnkref)
|
|
|
|
real_path, newpath = self.report.prepare_copy_media(photo)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
# TODO. Check if build_url_fname can be used.
|
|
|
|
newpath = '/'.join(['..']*3 + [newpath])
|
2009-08-30 02:23:31 +05:30
|
|
|
if ( Utils.win ):
|
|
|
|
newpath = newpath.replace('\\','/')
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
# begin hyperlink
|
|
|
|
# description is given only for the purpose of the alt tag in img element
|
|
|
|
snapshot += self.media_link(photo_handle, newpath, '', up=True)
|
2008-03-10 01:42:56 +05:30
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
except (IOError, OSError), msg:
|
|
|
|
WarningDialog(_("Could not add photo to page"), str(msg))
|
2005-12-06 12:08:09 +05:30
|
|
|
else:
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
# get media description
|
|
|
|
descr = photo.get_description()
|
|
|
|
|
|
|
|
# begin hyperlink
|
|
|
|
snapshot += self.doc_link(photo_handle, descr, up=True)
|
|
|
|
|
|
|
|
lnk = (self.report.cur_fname, self.page_title, self.gid)
|
|
|
|
# FIXME. Is it OK to add to the photo_list of report?
|
|
|
|
photo_list = self.report.photo_list
|
|
|
|
if photo_handle in photo_list:
|
|
|
|
if lnk not in photo_list[photo_handle]:
|
|
|
|
photo_list[photo_handle].append(lnk)
|
|
|
|
else:
|
|
|
|
photo_list[photo_handle] = [lnk]
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
# return snapshot division to its callers
|
|
|
|
return snapshot
|
|
|
|
|
|
|
|
def display_additional_images_as_gallery( self, photolist=None):
|
|
|
|
|
2009-06-30 01:34:00 +05:30
|
|
|
if not photolist or not self.create_media:
|
2009-06-11 22:15:30 +05:30
|
|
|
return None
|
2009-06-16 00:55:43 +05:30
|
|
|
db = self.report.database
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# begin individualgallery division
|
|
|
|
with Html('div', class_='subsection', id='indivgallery') as section:
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# begin section title
|
|
|
|
section += Html('h4', _('Gallery'), inline=True)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
displayed = []
|
|
|
|
for mediaref in photolist:
|
2008-04-01 01:20:37 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
photo_handle = mediaref.get_reference_handle()
|
|
|
|
photo = db.get_object_from_handle(photo_handle)
|
|
|
|
if photo_handle in displayed:
|
|
|
|
continue
|
|
|
|
mime_type = photo.get_mime_type()
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# get media description
|
|
|
|
descr = photo.get_description()
|
|
|
|
|
|
|
|
if mime_type:
|
|
|
|
try:
|
|
|
|
lnkref = (self.report.cur_fname, self.page_title, self.gid)
|
|
|
|
self.report.add_lnkref_to_photo(photo, lnkref)
|
|
|
|
real_path, newpath = self.report.prepare_copy_media(photo)
|
|
|
|
# TODO. Check if build_url_fname can be used.
|
|
|
|
newpath = '/'.join(['..']*3 + [newpath])
|
2009-08-30 02:23:31 +05:30
|
|
|
if ( Utils.win ):
|
|
|
|
newpath = newpath.replace('\\','/')
|
2009-08-07 03:34:52 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# begin hyperlink
|
2009-08-07 03:34:52 +05:30
|
|
|
section += self.media_link(photo_handle, newpath, descr, True, False)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
except (IOError, OSError), msg:
|
|
|
|
WarningDialog(_("Could not add photo to page"), str(msg))
|
|
|
|
else:
|
|
|
|
try:
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# begin hyperlink
|
2009-08-07 03:34:52 +05:30
|
|
|
section += self.doc_link(photo_handle, descr, up=True)
|
2008-03-10 01:42:56 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
lnk = (self.report.cur_fname, self.page_title, self.gid)
|
|
|
|
# FIXME. Is it OK to add to the photo_list of report?
|
|
|
|
photo_list = self.report.photo_list
|
|
|
|
if photo_handle in photo_list:
|
|
|
|
if lnk not in photo_list[photo_handle]:
|
|
|
|
photo_list[photo_handle].append(lnk)
|
|
|
|
else:
|
|
|
|
photo_list[photo_handle] = [lnk]
|
|
|
|
except (IOError, OSError), msg:
|
|
|
|
WarningDialog(_("Could not add photo to page"), str(msg))
|
|
|
|
displayed.append(photo_handle)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# add clearline for proper styling
|
|
|
|
section += fullclear
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# return indivgallery division to its caller
|
|
|
|
return section
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
def display_note_list(self, notelist=None):
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2007-03-10 09:55:23 +05:30
|
|
|
if not notelist:
|
2009-06-11 22:15:30 +05:30
|
|
|
return None
|
2009-06-16 00:55:43 +05:30
|
|
|
db = self.report.database
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# begin narrative division
|
|
|
|
with Html('div', class_='subsection', id='narrative') as section:
|
|
|
|
|
|
|
|
for notehandle in notelist:
|
|
|
|
note = db.get_note_from_handle(notehandle)
|
2009-06-20 05:23:42 +05:30
|
|
|
|
2009-07-15 05:23:07 +05:30
|
|
|
if note:
|
|
|
|
note_text = self.get_note_format(note)
|
|
|
|
try:
|
|
|
|
note_text = unicode(note_text)
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
note_text = unicode(str(note_text), errors='replace')
|
|
|
|
|
|
|
|
# add section title
|
2009-06-20 05:23:42 +05:30
|
|
|
section += Html('h4', _('Narrative'), inline=True)
|
2009-06-16 00:55:43 +05:30
|
|
|
|
2009-07-15 05:23:07 +05:30
|
|
|
# attach note
|
|
|
|
section += note_text
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-20 05:23:42 +05:30
|
|
|
# return notes to its callers
|
2009-06-16 00:55:43 +05:30
|
|
|
return section
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
def display_url_list(self, urllist=None):
|
2005-08-18 11:28:28 +05:30
|
|
|
|
|
|
|
if not urllist:
|
2009-06-11 22:15:30 +05:30
|
|
|
return None
|
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# begin web links division
|
|
|
|
with Html('div', class_='subsection', id='weblinks') as section:
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# begin web title
|
|
|
|
title = Html('h4', _('Weblinks'), inline=True)
|
|
|
|
section += title
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# ordered list
|
|
|
|
ordered = Html('ol')
|
|
|
|
section += ordered
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
for url in urllist:
|
|
|
|
uri = url.get_path()
|
|
|
|
descr = url.get_description()
|
|
|
|
if not descr:
|
|
|
|
descr = uri
|
|
|
|
if url.get_type() == UrlType.EMAIL and not uri.startswith("mailto:"):
|
|
|
|
ordered += Html('li') + Html('a',descr, href='mailto:%s' % url)
|
|
|
|
|
|
|
|
elif url.get_type() == UrlType.WEB_HOME and not uri.startswith("http://"):
|
|
|
|
ordered += Html('li') + Html('a', descr, href='http://%s' % url)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
elif url.get_type() == UrlType.WEB_FTP and not uri.startswith("ftp://"):
|
|
|
|
ordered += Html('li') + Html('a', descr, href='ftp://%s' % url)
|
|
|
|
else:
|
|
|
|
ordered += Html('li') + Html('a', descr, href=url)
|
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# return web links to its caller
|
2009-06-16 00:55:43 +05:30
|
|
|
return section
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
def display_source_refs(self, bibli):
|
2008-03-01 10:13:58 +05:30
|
|
|
if bibli.get_citation_count() == 0:
|
2009-06-11 22:15:30 +05:30
|
|
|
return None
|
2009-08-07 09:08:33 +05:30
|
|
|
db = self.report.database
|
2009-06-30 01:34:00 +05:30
|
|
|
|
2009-08-07 09:08:33 +05:30
|
|
|
# source references division and title
|
2009-06-30 01:34:00 +05:30
|
|
|
with Html('div', class_='subsection', id='sourcerefs') as section:
|
2009-08-07 09:08:33 +05:30
|
|
|
section += Html('h4', _('Source References'), inline=True)
|
2007-08-23 15:24:37 +05:30
|
|
|
|
2009-08-07 09:08:33 +05:30
|
|
|
ordered=Html('ol')
|
|
|
|
section += ordered
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2009-08-07 09:08:33 +05:30
|
|
|
cindex = 0
|
|
|
|
for citation in bibli.get_citation_list():
|
|
|
|
cindex += 1
|
2009-06-30 01:34:00 +05:30
|
|
|
# Add this source to the global list of sources to be displayed
|
|
|
|
# on each source page.
|
|
|
|
lnk = (self.report.cur_fname, self.page_title, self.gid)
|
|
|
|
shandle = citation.get_source_handle()
|
|
|
|
if shandle in self.src_list:
|
|
|
|
if lnk not in self.src_list[shandle]:
|
|
|
|
self.src_list[shandle].append(lnk)
|
|
|
|
else:
|
|
|
|
self.src_list[shandle] = [lnk]
|
|
|
|
|
|
|
|
# Add this source and its references to the page
|
|
|
|
source = db.get_source_from_handle(shandle)
|
|
|
|
title = source.get_title()
|
2009-08-07 09:08:33 +05:30
|
|
|
list = Html('li')
|
|
|
|
ordered += list
|
|
|
|
|
|
|
|
hyper = Html('a', name='sref%d' % cindex) + \
|
|
|
|
self.source_link(source.handle, title, source.gramps_id, True)
|
|
|
|
list += hyper
|
2009-06-30 01:34:00 +05:30
|
|
|
|
|
|
|
ordered1 = Html('ol')
|
|
|
|
list += ordered1
|
2009-08-07 09:08:33 +05:30
|
|
|
|
2009-06-30 01:34:00 +05:30
|
|
|
for key, sref in citation.get_ref_list():
|
|
|
|
|
|
|
|
tmp = []
|
|
|
|
confidence = Utils.confidence.get(sref.confidence, _('Unknown'))
|
|
|
|
if confidence == _('Normal'):
|
|
|
|
confidence = None
|
2009-08-07 09:08:33 +05:30
|
|
|
|
|
|
|
source_data = [
|
2009-08-11 06:59:53 +05:30
|
|
|
[DHEAD, _dd.display(sref.date)],
|
|
|
|
[_('Page'), sref.page],
|
|
|
|
[_('Confidence'), confidence]
|
2009-08-07 09:08:33 +05:30
|
|
|
]
|
|
|
|
for (label, data) in source_data:
|
2009-06-30 01:34:00 +05:30
|
|
|
if data:
|
|
|
|
tmp.append("%s: %s" % (label, data))
|
2009-08-07 09:08:33 +05:30
|
|
|
|
|
|
|
# get citation note list
|
2009-06-30 01:34:00 +05:30
|
|
|
notelist = sref.get_note_list()
|
|
|
|
for notehandle in notelist:
|
|
|
|
note = db.get_note_from_handle(notehandle)
|
2009-08-07 09:08:33 +05:30
|
|
|
|
|
|
|
# check if note is styled text or not?
|
2009-07-15 05:23:07 +05:30
|
|
|
note_text = self.get_note_format(note)
|
|
|
|
tmp.append("%s: %s" % (_('Text'), note_text))
|
2009-08-07 09:08:33 +05:30
|
|
|
|
2009-06-30 01:34:00 +05:30
|
|
|
if len(tmp):
|
2009-08-07 09:08:33 +05:30
|
|
|
list1 = Html('li') + (
|
|
|
|
Html('a', '; '.join(tmp), name='sref%d%s' % (cindex, key))
|
2009-06-30 01:34:00 +05:30
|
|
|
)
|
2009-08-07 09:08:33 +05:30
|
|
|
ordered1 += list1
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2009-08-07 09:08:33 +05:30
|
|
|
# return section to its callers
|
2009-06-30 01:34:00 +05:30
|
|
|
return section
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
def display_references(self, handlelist, up=False):
|
|
|
|
|
|
|
|
if not handlelist:
|
|
|
|
return None
|
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
# begin references division and title
|
|
|
|
with Html('div', class_='subsection', id='references') as section:
|
|
|
|
section += Html('h4', _('References'), inline=True)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
ordered = Html('ol')
|
|
|
|
section += ordered
|
|
|
|
sortlist = sorted(handlelist, key=lambda x:locale.strxfrm(x[1]))
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
for (path, name, gid) in sortlist:
|
|
|
|
list = Html('li')
|
|
|
|
ordered += list
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
# Note. 'path' already has a filename extension
|
|
|
|
url = self.report.build_url_fname(path, None, self.up)
|
|
|
|
list += self.person_link(url, name, None, gid)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
# return references division to its caller
|
2009-06-21 08:55:28 +05:30
|
|
|
return section
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
def person_link(self, url, person, name_style, gid=None, thumbnailUrl=None):
|
2009-06-11 22:15:30 +05:30
|
|
|
"""
|
|
|
|
creates a hyperlink for a person
|
2009-06-21 08:55:28 +05:30
|
|
|
namestyle = False -- first and suffix only
|
|
|
|
= True -- name displayed in name_format variable
|
|
|
|
= None -- person is name
|
2009-06-11 22:15:30 +05:30
|
|
|
"""
|
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
# see above for explanation
|
|
|
|
if name_style:
|
|
|
|
person_name = self.get_name(person)
|
|
|
|
elif name_style == False:
|
|
|
|
person_name = _get_short_name(person.gender, person.primary_name)
|
|
|
|
elif name_style == None: # abnormal specialty situation
|
|
|
|
person_name = person
|
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# 1. start building link to image or person
|
|
|
|
hyper = Html('a', href=url)
|
|
|
|
|
|
|
|
# 2. insert thumbnail if there is one, otherwise insert class = "noThumb"
|
2008-10-03 16:08:19 +05:30
|
|
|
if thumbnailUrl:
|
2009-06-11 22:15:30 +05:30
|
|
|
hyper += (Html('span', class_="thumbnail") +
|
2009-06-21 08:55:28 +05:30
|
|
|
Html('img', src= thumbnailUrl, alt = "Image of " + person_name)
|
2009-06-11 22:15:30 +05:30
|
|
|
)
|
|
|
|
else:
|
|
|
|
hyper.attr += ' class= "noThumb"'
|
|
|
|
|
|
|
|
# 3. insert the person's name
|
2009-06-21 08:55:28 +05:30
|
|
|
hyper += person_name
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
# 3. insert gramps id if requested and available
|
2008-03-14 03:58:22 +05:30
|
|
|
if not self.noid and gid:
|
2009-06-11 22:15:30 +05:30
|
|
|
hyper += Html('span', '[%s]' % gid, class_="grampsid", inline=True)
|
|
|
|
|
|
|
|
# return hyperlink to its caller
|
|
|
|
return hyper
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2009-08-07 09:08:33 +05:30
|
|
|
def individual_link(self, url, person):
|
|
|
|
"""
|
|
|
|
creates a hyperlink for a partner in IndividualListPage, and SurnameListPage
|
|
|
|
with no image class attached to it.
|
|
|
|
|
|
|
|
@param: url = hyperlink to person
|
|
|
|
@param: person = person to be hyperlinked
|
|
|
|
"""
|
|
|
|
|
|
|
|
person_name = self.get_name(person)
|
|
|
|
|
|
|
|
# 1. start building link to image or person
|
|
|
|
hyper = Html('a', person_name, href=url, title=html_escape(person_name))
|
|
|
|
|
|
|
|
# return hyperlink to its caller
|
|
|
|
return hyper
|
|
|
|
|
2008-03-21 03:54:36 +05:30
|
|
|
# TODO. Check img_url of callers
|
2009-06-11 22:15:30 +05:30
|
|
|
def media_link(self, handle, img_url, name, up, usedescr=True):
|
2008-07-04 00:40:05 +05:30
|
|
|
url = self.report.build_url_fname_html(handle, 'img', up)
|
2005-12-06 12:08:09 +05:30
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
# begin thumbnail division
|
|
|
|
with Html('div', class_='thumbnail') as thumbnail:
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
# begin hyperlink
|
|
|
|
hyper = (Html('a', href=url, title=name) +
|
|
|
|
Html('img', src=img_url, alt=name) +
|
|
|
|
(Html('p', inline=True) +
|
2009-08-07 03:34:52 +05:30
|
|
|
html_escape(name) if usedescr else '')
|
2009-06-21 08:55:28 +05:30
|
|
|
)
|
|
|
|
# add hyperlink and description to thumbnail division
|
|
|
|
thumbnail += hyper
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
# return thumbnail division to its callers
|
|
|
|
return thumbnail
|
|
|
|
|
|
|
|
def doc_link(self, handle, name, up, usedescr=True):
|
2008-03-21 03:54:36 +05:30
|
|
|
# TODO. Check extension of handle
|
|
|
|
url = self.report.build_url_fname(handle, 'img', up)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
# begin thumbnail division
|
|
|
|
thumbnail = Html('div', class_='thumbnail')
|
|
|
|
|
|
|
|
# begin hyperlink
|
|
|
|
hyper = Html('a', href=url, title=name)
|
2008-03-21 03:54:36 +05:30
|
|
|
url = self.report.build_url_image('document.png', 'images', up)
|
2009-06-11 22:15:30 +05:30
|
|
|
hyper += Html('img', src=url, alt=html_escape(name))
|
2005-12-06 12:08:09 +05:30
|
|
|
if usedescr:
|
2009-06-11 22:15:30 +05:30
|
|
|
descr = Html('p', html_escape(name), inline=True)
|
|
|
|
else:
|
|
|
|
descr = ''
|
|
|
|
|
|
|
|
# add hyperlink and description to thumbnail division
|
|
|
|
thumbnail += (hyper, descr)
|
|
|
|
|
|
|
|
# return thumbnail division to its callers
|
|
|
|
return thumbnail
|
|
|
|
|
2009-06-30 01:34:00 +05:30
|
|
|
def repository_link(self, handle, name, cindex, gid=None, up=False):
|
|
|
|
|
|
|
|
url = self.report.build_url_fname_html(handle, 'repo', up)
|
|
|
|
# begin hyperlink
|
|
|
|
hyper = Html('a', html_escape(name), href=url, title=name)
|
|
|
|
if not self.noid and gid:
|
|
|
|
hyper += Html('span', '[%s]' % gid, class_='grampsid', inline=True)
|
|
|
|
|
|
|
|
# return hyperlink to its callers
|
|
|
|
return hyper
|
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
def place_link(self, handle, name, gid=None, up=False):
|
2008-03-21 03:54:36 +05:30
|
|
|
url = self.report.build_url_fname_html(handle, 'plc', up)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
hyper = Html('a', html_escape(name), href=url, title=name)
|
2008-03-21 03:54:36 +05:30
|
|
|
if not self.noid and gid:
|
2009-06-11 22:15:30 +05:30
|
|
|
hyper += Html('span', ' [%s] ' % gid, class_='grampsid', inline=True)
|
|
|
|
|
|
|
|
# return hyperlink to its callers
|
|
|
|
return hyper
|
2005-02-10 07:14:05 +05:30
|
|
|
|
2009-08-21 06:07:59 +05:30
|
|
|
def event_link(self, eventtype, handle, gid, up=False):
|
|
|
|
""" createsa hyperlink for an event based on its type """
|
|
|
|
|
|
|
|
url = self.report.build_url_fname_html(handle, 'evt', up)
|
|
|
|
|
|
|
|
hyper = Html('a', html_escape(eventtype), href=url, title=eventtype)
|
|
|
|
if not self.noid and gid:
|
|
|
|
hyper += Html('span', ' [%s] ' % gid, class_='grampsid', inline=True)
|
|
|
|
|
|
|
|
# return hyperlink back to its caller
|
|
|
|
return hyper
|
|
|
|
|
2009-08-10 10:22:41 +05:30
|
|
|
# ---------------------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# # Web Page Fortmatter and writer
|
|
|
|
#
|
|
|
|
# ---------------------------------------------------------------------------------------
|
|
|
|
|
|
|
|
def mywriter(self, htmlinstance, of):
|
|
|
|
"""
|
|
|
|
Will format, write, and close the file
|
|
|
|
|
|
|
|
of -- open file that is being written to
|
|
|
|
htmlinstance -- web page created with libhtml
|
|
|
|
src/plugins/lib/libhtml.py
|
|
|
|
"""
|
|
|
|
|
|
|
|
htmlinstance.write(lambda line: of.write(line + '\n'))
|
|
|
|
|
|
|
|
# closes the file
|
|
|
|
self.report.close_file(of)
|
|
|
|
|
2005-02-10 07:14:05 +05:30
|
|
|
class IndividualListPage(BasePage):
|
|
|
|
|
2008-03-14 03:58:22 +05:30
|
|
|
def __init__(self, report, title, person_handle_list):
|
|
|
|
BasePage.__init__(self, report, title)
|
2009-06-16 00:55:43 +05:30
|
|
|
db = report.database
|
|
|
|
|
|
|
|
# handles for this module for use in partner column
|
|
|
|
report_handle_list = person_handle_list
|
|
|
|
|
|
|
|
# plugin variables for this module
|
|
|
|
showbirth = report.options['showbirth']
|
|
|
|
showdeath = report.options['showdeath']
|
|
|
|
showpartner = report.options['showpartner']
|
|
|
|
showparents = report.options['showparents']
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2008-03-18 02:39:16 +05:30
|
|
|
of = self.report.create_file("individuals")
|
2009-06-25 01:33:10 +05:30
|
|
|
indlistpage, body = self.write_header(_('Individuals'))
|
2009-02-03 13:31:31 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# begin Individuals division
|
|
|
|
with Html('div', class_='content', id='Individuals') as section:
|
|
|
|
body += section
|
2005-02-10 07:14:05 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# Individual List description
|
|
|
|
msg = _("This page contains an index of all the individuals in the "
|
|
|
|
"database, sorted by their last names. Selecting the person’s "
|
|
|
|
"name will take you to that person’s individual page.")
|
|
|
|
section += Html('p', msg, id='description')
|
2008-03-06 18:37:37 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# add alphabet navigation after page msg
|
2009-06-21 08:55:28 +05:30
|
|
|
alpha_nav = alphabet_navigation(db, person_handle_list, _PERSON)
|
2009-06-16 00:55:43 +05:30
|
|
|
if alpha_nav is not None:
|
|
|
|
section += alpha_nav
|
2009-03-25 10:27:07 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# begin table and table head
|
|
|
|
with Html('table', class_='infolist IndividualList') as table:
|
|
|
|
section += table
|
|
|
|
thead = Html('thead')
|
|
|
|
table += thead
|
2009-03-25 10:27:07 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
trow = Html('tr')
|
|
|
|
thead += trow
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# Table Header -- Surname and Given name columns
|
2009-06-25 01:33:10 +05:30
|
|
|
tcell1 = Html('th', _('Surname'), class_='ColumnSurname', inline=True)
|
|
|
|
tcell2 = Html('th', _('Name'), class_='ColumnName', inline=True)
|
|
|
|
trow += (tcell1, tcell2)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# table header -- show birth column
|
|
|
|
if showbirth:
|
|
|
|
trow += Html('th', _('Birth'), class_='ColumnBirth', inline=True)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# table header -- show death column
|
|
|
|
if showdeath:
|
|
|
|
trow += Html('th', _('Death'), class_='ColumnDeath', inline=True)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# table header -- show partmer column
|
|
|
|
if showpartner:
|
|
|
|
trow += Html('th', _('Partner'), class_='ColumnPartner', inline=True)
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# table header -- show parents column
|
|
|
|
if showparents:
|
|
|
|
trow += Html('th', _('Parents'), class_='ColumnParents', inline=True)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# begin table body
|
|
|
|
tbody = Html('tbody')
|
|
|
|
table += tbody
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# list of person handles for this report
|
|
|
|
person_handle_list = sort_people(db, person_handle_list)
|
|
|
|
|
|
|
|
for (surname, handle_list) in person_handle_list:
|
|
|
|
first = True
|
|
|
|
if surname:
|
|
|
|
letter = normalize('NFKC', surname)[0].upper()
|
2005-08-18 11:28:28 +05:30
|
|
|
else:
|
2009-06-16 00:55:43 +05:30
|
|
|
letter = u' '
|
|
|
|
# See : http://www.gramps-project.org/bugs/view.php?id=2933
|
|
|
|
(lang_country, modifier ) = locale.getlocale()
|
|
|
|
if lang_country == "sv_SE" and ( letter == u'W' or letter == u'V' ):
|
|
|
|
letter = u'V,W'
|
|
|
|
for person_handle in handle_list:
|
|
|
|
person = db.get_person_from_handle(person_handle)
|
|
|
|
|
|
|
|
# surname column
|
|
|
|
trow = Html('tr')
|
|
|
|
tcell = Html('td', class_='ColumnSurname', inline=True)
|
|
|
|
if first:
|
|
|
|
trow.attr = 'class="BeginSurname"'
|
|
|
|
if surname:
|
|
|
|
tcell += Html('a', surname, name=letter, title='Letter %s' % letter,
|
|
|
|
inline=True)
|
|
|
|
else:
|
|
|
|
tcell += ' '
|
|
|
|
else:
|
|
|
|
tcell += ' '
|
|
|
|
tbody += trow
|
|
|
|
trow += tcell
|
2007-07-19 12:15:25 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
first = False
|
|
|
|
# firstname column
|
|
|
|
tcell = Html('td', class_='ColumnName')
|
|
|
|
trow += tcell
|
2009-06-21 08:55:28 +05:30
|
|
|
url = self.report.build_url_fname_html(person.handle, 'ppl')
|
|
|
|
tcell += self.person_link(url, person, False, person.gramps_id)
|
2005-12-06 12:08:09 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# birth column
|
|
|
|
if showbirth:
|
|
|
|
tcell = Html('td', class_='ColumnBirth', inline=True)
|
|
|
|
birth = ReportUtils.get_birth_or_fallback(db, person)
|
|
|
|
if birth:
|
|
|
|
birth_date = _dd.display(birth.get_date_object())
|
|
|
|
if birth.get_type() == EventType.BIRTH:
|
|
|
|
tcell += birth_date
|
|
|
|
else:
|
2009-08-03 01:36:00 +05:30
|
|
|
tcell += Html('em', birth_date)
|
2007-09-28 17:46:12 +05:30
|
|
|
else:
|
2009-06-16 00:55:43 +05:30
|
|
|
tcell += ' '
|
|
|
|
trow += tcell
|
|
|
|
|
|
|
|
# death column
|
|
|
|
if showdeath:
|
|
|
|
tcell = Html('td', class_='ColumnDeath', inline=True)
|
|
|
|
death = ReportUtils.get_death_or_fallback(db, person)
|
|
|
|
if death:
|
|
|
|
death_date = _dd.display(death.get_date_object())
|
|
|
|
if death.get_type() == EventType.DEATH:
|
|
|
|
tcell += death_date
|
|
|
|
else:
|
|
|
|
tcell += Html('em', death_date)
|
2007-09-28 17:46:12 +05:30
|
|
|
else:
|
2009-06-16 00:55:43 +05:30
|
|
|
tcell += ' '
|
|
|
|
trow += tcell
|
|
|
|
|
|
|
|
# partner column
|
|
|
|
if showpartner:
|
|
|
|
tcell = Html('td', class_='ColumnPartner')
|
|
|
|
family_list = person.get_family_handle_list()
|
|
|
|
first_family = True
|
|
|
|
partner_name = None
|
|
|
|
if family_list:
|
|
|
|
for family_handle in family_list:
|
|
|
|
family = db.get_family_from_handle(family_handle)
|
|
|
|
partner_handle = ReportUtils.find_spouse(person, family)
|
|
|
|
if partner_handle:
|
|
|
|
partner = db.get_person_from_handle(partner_handle)
|
|
|
|
partner_name = self.get_name(partner)
|
|
|
|
if not first_family:
|
|
|
|
tcell += ', '
|
|
|
|
if partner_handle in report_handle_list:
|
|
|
|
url = self.report.build_url_fname_html(partner_handle, 'ppl')
|
2009-08-07 09:08:33 +05:30
|
|
|
tcell += self.individual_link(url, partner)
|
2009-06-16 00:55:43 +05:30
|
|
|
else:
|
|
|
|
tcell += partner_name
|
|
|
|
first_family = False
|
2009-06-11 22:15:30 +05:30
|
|
|
else:
|
2009-06-16 00:55:43 +05:30
|
|
|
tcell += ' '
|
|
|
|
else:
|
|
|
|
tcell += ' '
|
|
|
|
trow += tcell
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# parents column
|
|
|
|
if showparents:
|
|
|
|
tcell = Html('td', class_='ColumnParents')
|
|
|
|
parent_handle_list = person.get_parent_family_handle_list()
|
|
|
|
if parent_handle_list:
|
|
|
|
parent_handle = parent_handle_list[0]
|
|
|
|
family = db.get_family_from_handle(parent_handle)
|
|
|
|
father_handle = family.get_father_handle()
|
|
|
|
mother_handle = family.get_mother_handle()
|
|
|
|
father = db.get_person_from_handle(father_handle)
|
|
|
|
mother = db.get_person_from_handle(mother_handle)
|
|
|
|
if father:
|
|
|
|
father_name = self.get_name(father)
|
|
|
|
if mother:
|
|
|
|
mother_name = self.get_name(mother)
|
|
|
|
if mother and father:
|
|
|
|
fathercell = Html('span', father_name, class_='father fatherNmother')
|
|
|
|
mothercell = Html('span', mother_name, class_='mother')
|
|
|
|
tcell += (fathercell, mothercell)
|
|
|
|
elif mother:
|
|
|
|
tcell += Html('span', mother_name, class_='mother')
|
|
|
|
elif father:
|
|
|
|
tcell += Html('span', father_name, class_='father')
|
|
|
|
else:
|
|
|
|
tcell += ' '
|
|
|
|
trow += tcell
|
2008-03-14 03:58:22 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# create clear line for proper styling
|
2009-06-16 00:55:43 +05:30
|
|
|
# create footer section
|
2009-06-25 01:33:10 +05:30
|
|
|
footer = self.write_footer()
|
2009-06-16 00:55:43 +05:30
|
|
|
body += (fullclear, footer)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
# send page out for processing
|
2009-06-25 01:33:10 +05:30
|
|
|
# and close the file
|
2009-06-16 00:55:43 +05:30
|
|
|
self.mywriter(indlistpage, of)
|
2005-08-18 11:28:28 +05:30
|
|
|
|
|
|
|
class SurnamePage(BasePage):
|
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
def __init__(self, report, title, surname, person_handle_list, report_handle_list):
|
2008-03-14 03:58:22 +05:30
|
|
|
BasePage.__init__(self, report, title)
|
2009-06-11 22:15:30 +05:30
|
|
|
db = report.database
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2009-06-12 11:34:30 +05:30
|
|
|
# module variables
|
2008-03-21 03:54:36 +05:30
|
|
|
showbirth = report.options['showbirth']
|
|
|
|
showdeath = report.options['showdeath']
|
2009-05-30 14:16:12 +05:30
|
|
|
showpartner = report.options['showpartner']
|
2008-03-21 03:54:36 +05:30
|
|
|
showparents = report.options['showparents']
|
|
|
|
|
2009-06-12 11:34:30 +05:30
|
|
|
of = self.report.create_file(name_to_md5(surname), 'srn')
|
|
|
|
self.up = True
|
|
|
|
surnamepage, body = self.write_header("%s - %s" % (_('Surname'), surname))
|
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# begin SurnameDetail division
|
2009-06-12 11:34:30 +05:30
|
|
|
with Html('div', id='SurnameDetail', class_='contente') as surnamedetail:
|
|
|
|
body += surnamedetail
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
# section title
|
2009-06-12 11:34:30 +05:30
|
|
|
surnamedetail += Html('h3', html_escape(surname), inline=True)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
msg = _("This page contains an index of all the individuals in the "
|
2009-06-12 11:34:30 +05:30
|
|
|
"database with the surname of %s. Selecting the person’s name "
|
|
|
|
"will take you to that person’s individual page.") % surname
|
|
|
|
surnamedetail += Html('p', msg, id='description')
|
|
|
|
|
|
|
|
# begin surname table and thead
|
|
|
|
with Html('table', class_='infolist surname') as surname_table:
|
|
|
|
surnamedetail += surname_table
|
2009-06-11 22:15:30 +05:30
|
|
|
with Html('thead') as thead:
|
2009-06-12 11:34:30 +05:30
|
|
|
surname_table += thead
|
|
|
|
tabhead = []
|
|
|
|
tabhead.append('Name')
|
|
|
|
if report.options['showbirth']:
|
|
|
|
tabhead.append('Birth')
|
|
|
|
if report.options['showdeath']:
|
|
|
|
tabhead.append('Death')
|
|
|
|
if report.options['showpartner']:
|
|
|
|
tabhead.append('Partner')
|
|
|
|
if report.options['showparents']:
|
|
|
|
tabhead.append('Parents')
|
2009-06-11 22:15:30 +05:30
|
|
|
with Html('tr') as trow:
|
2009-06-12 11:34:30 +05:30
|
|
|
thead += trow
|
|
|
|
|
|
|
|
# now spit out whatever is in table head
|
|
|
|
for column in tabhead:
|
|
|
|
trow += Html('th', _(column), class_='Column%s' % column,
|
|
|
|
inline=True)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
# begin table body
|
|
|
|
with Html('tbody') as tbody:
|
2009-06-12 11:34:30 +05:30
|
|
|
surname_table += tbody
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
for person_handle in person_handle_list:
|
|
|
|
|
|
|
|
# firstname column
|
|
|
|
person = db.get_person_from_handle(person_handle)
|
2009-06-12 11:34:30 +05:30
|
|
|
trow = Html('tr')
|
|
|
|
tcell = Html('td', class_='ColumnName')
|
|
|
|
url = self.report.build_url_fname_html(person.handle, 'ppl', True)
|
2009-06-21 08:55:28 +05:30
|
|
|
tcell += self.person_link(url, person, False, person.gramps_id)
|
2009-06-12 11:34:30 +05:30
|
|
|
trow += tcell
|
|
|
|
|
|
|
|
# birth column
|
|
|
|
if showbirth:
|
|
|
|
tcell = Html('td', class_='ColumnBirth', inline=True)
|
|
|
|
birth = ReportUtils.get_birth_or_fallback(db, person)
|
|
|
|
if birth:
|
|
|
|
birth_date = _dd.display(birth.get_date_object())
|
|
|
|
if birth.get_type() == EventType.BIRTH:
|
|
|
|
tcell += birth_date
|
|
|
|
else:
|
|
|
|
tcell += Html('em', birth_date)
|
|
|
|
else:
|
|
|
|
tcell += ' '
|
|
|
|
trow += tcell
|
|
|
|
|
|
|
|
# death column
|
|
|
|
if showdeath:
|
|
|
|
tcell = Html('td', class_='ColumnDeath', inline=True)
|
|
|
|
death = ReportUtils.get_death_or_fallback(db, person)
|
|
|
|
if death:
|
|
|
|
death_date = _dd.display(death.get_date_object())
|
|
|
|
if death.get_type() == EventType.DEATH:
|
|
|
|
tcell += death_date
|
|
|
|
else:
|
|
|
|
tcell += Html('em', death_date)
|
|
|
|
else:
|
|
|
|
tcell += ' '
|
|
|
|
trow += tcell
|
|
|
|
|
|
|
|
# partner column
|
|
|
|
if showpartner:
|
|
|
|
tcell = Html('td', class_='ColumnPartner')
|
|
|
|
family_list = person.get_family_handle_list()
|
|
|
|
first_family = True
|
|
|
|
if family_list:
|
|
|
|
for family_handle in family_list:
|
|
|
|
family = db.get_family_from_handle(family_handle)
|
|
|
|
partner_handle = ReportUtils.find_spouse(person, family)
|
|
|
|
if partner_handle:
|
|
|
|
partner = db.get_person_from_handle(partner_handle)
|
|
|
|
partner_name = self.get_name(partner)
|
|
|
|
if not first_family:
|
|
|
|
tcell += ','
|
|
|
|
if partner_handle in report_handle_list:
|
2009-08-07 09:08:33 +05:30
|
|
|
url = self.report.build_url_fname_html(partner_handle, 'ppl', True)
|
|
|
|
tcell += self.individual_link(url, partner)
|
2009-06-11 22:15:30 +05:30
|
|
|
else:
|
2009-06-12 11:34:30 +05:30
|
|
|
tcell += partner_name
|
2009-06-11 22:15:30 +05:30
|
|
|
else:
|
|
|
|
tcell += ' '
|
2009-06-12 11:34:30 +05:30
|
|
|
else:
|
|
|
|
tcell += ' '
|
|
|
|
trow += tcell
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-12 11:34:30 +05:30
|
|
|
# parents column
|
|
|
|
if report.options['showparents']:
|
|
|
|
tcell = Html('td', class_='ColumnParents')
|
|
|
|
parent_handle_list = person.get_parent_family_handle_list()
|
|
|
|
if parent_handle_list:
|
|
|
|
parent_handle = parent_handle_list[0]
|
|
|
|
family = db.get_family_from_handle(parent_handle)
|
|
|
|
father_id = family.get_father_handle()
|
|
|
|
mother_id = family.get_mother_handle()
|
|
|
|
father = db.get_person_from_handle(father_id)
|
|
|
|
mother = db.get_person_from_handle(mother_id)
|
|
|
|
if father:
|
|
|
|
father_name = self.get_name(father)
|
|
|
|
if mother:
|
|
|
|
mother_name = self.get_name(mother)
|
|
|
|
if mother and father:
|
|
|
|
tcell += Html('span', father_name,
|
|
|
|
class_='father fatherNmother') + (
|
|
|
|
Html('span', mother_name, class_='mother')
|
|
|
|
)
|
|
|
|
elif mother:
|
|
|
|
tcell += Html('span', mother_name, class_='mother')
|
|
|
|
elif father:
|
|
|
|
tcell += Html('span', father_name, class_='father')
|
|
|
|
else:
|
|
|
|
tcell += ' '
|
|
|
|
trow += tcell
|
|
|
|
tbody += trow
|
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# add clearline for proper styling
|
|
|
|
# add footer section
|
2009-06-25 01:33:10 +05:30
|
|
|
footer = self.write_footer()
|
2009-06-11 22:15:30 +05:30
|
|
|
body += (fullclear, footer)
|
|
|
|
|
|
|
|
# send page out for processing
|
2009-06-25 01:33:10 +05:30
|
|
|
# and close the file
|
2009-06-12 11:34:30 +05:30
|
|
|
self.mywriter(surnamepage, of)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
class PlaceListPage(BasePage):
|
|
|
|
|
|
|
|
def __init__(self, report, title, place_handles, src_list):
|
|
|
|
BasePage.__init__(self, report, title)
|
|
|
|
self.src_list = src_list # TODO verify that this is correct
|
2009-06-21 08:55:28 +05:30
|
|
|
db = report.database
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
of = self.report.create_file("places")
|
2009-06-25 01:33:10 +05:30
|
|
|
placelistpage, body = self.write_header(_('Places'))
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
# begin places division
|
2009-06-21 08:55:28 +05:30
|
|
|
with Html('div', class_='content', id='Places') as section:
|
|
|
|
body += section
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
msg = _("This page contains an index of all the places in the "
|
|
|
|
"database, sorted by their title. Clicking on a place’s "
|
|
|
|
"title will take you to that place’s page.")
|
|
|
|
section += Html('p', msg, id='description')
|
|
|
|
|
2009-06-25 01:33:10 +05:30
|
|
|
# begin alphabet navigation
|
2009-06-21 08:55:28 +05:30
|
|
|
alpha_nav = alphabet_navigation(db, place_handles, _PLACE)
|
|
|
|
if alpha_nav is not None:
|
|
|
|
section += alpha_nav
|
|
|
|
|
|
|
|
# begin places table and table head
|
|
|
|
with Html('table', class_='infolist placelist') as table:
|
|
|
|
section += table
|
|
|
|
|
|
|
|
# begin table head
|
|
|
|
thead = Html('thead')
|
|
|
|
table += thead
|
|
|
|
|
|
|
|
trow = Html('tr') + (
|
|
|
|
Html('th', _('Letter'), class_='ColumnLetter', inline=True),
|
|
|
|
Html('th', _('Name'), class_='ColumnName', inline=True)
|
2009-06-11 22:15:30 +05:30
|
|
|
)
|
2009-06-21 08:55:28 +05:30
|
|
|
thead += trow
|
2009-02-08 15:12:19 +05:30
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
sort = Sort.Sort(db)
|
2009-06-30 19:35:57 +05:30
|
|
|
handle_list = sorted(place_handles, key=sort.by_place_title_key)
|
2009-06-21 08:55:28 +05:30
|
|
|
last_letter = ''
|
|
|
|
|
|
|
|
# begin table body
|
|
|
|
tbody = Html('tbody')
|
|
|
|
table += tbody
|
|
|
|
|
|
|
|
for handle in handle_list:
|
|
|
|
place = db.get_place_from_handle(handle)
|
|
|
|
place_title = ReportUtils.place_name(db, handle)
|
|
|
|
|
|
|
|
if not place_title:
|
|
|
|
continue
|
|
|
|
|
|
|
|
letter = normalize('NFKC', place_title)[0].upper()
|
|
|
|
# See : http://www.gramps-project.org/bugs/view.php?id=2933
|
|
|
|
(lang_country, modifier ) = locale.getlocale()
|
|
|
|
if lang_country == "sv_SE" and ( letter == u'W' or letter == u'V' ):
|
|
|
|
letter = u'V,W'
|
|
|
|
|
|
|
|
if letter != last_letter:
|
|
|
|
last_letter = letter
|
|
|
|
trow = Html('tr', class_='BeginLetter')
|
|
|
|
tbody += trow
|
|
|
|
tcell = Html('td', class_='ColumnLetter', inline=True) + (
|
|
|
|
Html('a', last_letter, name=last_letter, title="Letter %s" % last_letter)
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
trow = Html('tr')
|
|
|
|
tbody += trow
|
|
|
|
tcell = Html('td', ' ', class_='ColumnLetter', inline=True)
|
|
|
|
trow += tcell
|
|
|
|
|
|
|
|
tcell = Html('td', class_='ColumnName') + \
|
|
|
|
self.place_link(place.handle, place_title, place.gramps_id)
|
|
|
|
trow += tcell
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# add clearline for proper styling
|
2009-06-21 08:55:28 +05:30
|
|
|
# add footer section
|
2009-06-25 01:33:10 +05:30
|
|
|
footer = self.write_footer()
|
2009-06-21 08:55:28 +05:30
|
|
|
body += (fullclear, footer)
|
2008-03-14 03:58:22 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# send page out for processing
|
2009-06-25 01:33:10 +05:30
|
|
|
# and close the file
|
2009-06-21 08:55:28 +05:30
|
|
|
self.mywriter(placelistpage, of)
|
2005-02-13 09:24:47 +05:30
|
|
|
|
2005-07-09 01:54:54 +05:30
|
|
|
class PlacePage(BasePage):
|
|
|
|
|
2008-03-14 03:58:22 +05:30
|
|
|
def __init__(self, report, title, place_handle, src_list, place_list):
|
2009-06-21 08:55:28 +05:30
|
|
|
db = report.database
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
place = db.get_place_from_handle(place_handle)
|
2008-03-14 03:58:22 +05:30
|
|
|
BasePage.__init__(self, report, title, place.gramps_id)
|
|
|
|
self.src_list = src_list # TODO verify that this is correct
|
2008-03-12 03:41:42 +05:30
|
|
|
|
2008-03-21 03:54:36 +05:30
|
|
|
of = self.report.create_file(place.get_handle(), 'plc')
|
2008-03-12 03:41:42 +05:30
|
|
|
self.up = True
|
2009-06-21 08:55:28 +05:30
|
|
|
self.page_title = ReportUtils.place_name(db, place_handle)
|
|
|
|
placepage, body = self.write_header("%s - %s" % (_('Places'), self.page_title))
|
2009-02-03 13:31:31 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# begin PlaceDetail Division
|
2009-07-15 05:23:07 +05:30
|
|
|
with Html('div', class_='content', id='PlaceDetail') as placedetail:
|
|
|
|
body += placedetail
|
2005-07-09 01:54:54 +05:30
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
media_list = place.get_media_list()
|
|
|
|
thumbnail = self.display_first_image_as_thumbnail(media_list)
|
|
|
|
if thumbnail is not None:
|
2009-07-15 05:23:07 +05:30
|
|
|
placedetail += thumbnail
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-07-15 05:23:07 +05:30
|
|
|
# add section title
|
|
|
|
placedetail += Html('h3', html_escape(self.page_title.strip()))
|
2005-07-09 01:54:54 +05:30
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
# begin summaryarea division and places table
|
|
|
|
with Html('div', id='summaryarea') as summaryarea:
|
2009-07-15 05:23:07 +05:30
|
|
|
placedetail += summaryarea
|
2009-06-21 08:55:28 +05:30
|
|
|
|
|
|
|
with Html('table', class_='infolist place') as table:
|
|
|
|
summaryarea += table
|
|
|
|
|
|
|
|
if not self.noid:
|
|
|
|
trow = Html('tr') + (
|
|
|
|
Html('td', _('GRAMPS ID'), class_='ColumnAttribute', inline=True),
|
|
|
|
Html('td', place.gramps_id, class_='ColumnValue', inline=True)
|
|
|
|
)
|
|
|
|
table += trow
|
|
|
|
|
|
|
|
if place.main_loc:
|
|
|
|
ml = place.main_loc
|
2009-08-11 06:59:53 +05:30
|
|
|
for val in [
|
2009-08-11 12:17:42 +05:30
|
|
|
(LATITUDE, place.lat),
|
|
|
|
(LONGITUDE, place.long),
|
|
|
|
(STREET, ml.street),
|
|
|
|
(CITY, ml.city),
|
|
|
|
(PARISH, ml.parish),
|
|
|
|
(COUNTY, ml.county),
|
|
|
|
(STATE, ml.state),
|
|
|
|
(POSTAL, ml.postal),
|
|
|
|
(COUNTRY, ml.country),
|
|
|
|
(LOCATIONS, place.get_alternate_locations()) ]:
|
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
if val[1]:
|
|
|
|
trow = Html('tr') + (
|
|
|
|
Html('td', val[0], class_='ColumnAttribute', inline=True),
|
|
|
|
Html('td', val[1], class_='ColumnValue', inline=True)
|
|
|
|
)
|
|
|
|
table += trow
|
|
|
|
|
|
|
|
# place gallery
|
2009-06-30 01:34:00 +05:30
|
|
|
if self.create_media:
|
2009-06-21 08:55:28 +05:30
|
|
|
placegallery = self.display_additional_images_as_gallery(media_list)
|
|
|
|
if placegallery is not None:
|
2009-08-03 14:29:34 +05:30
|
|
|
placedetail += placegallery
|
2009-06-21 08:55:28 +05:30
|
|
|
|
|
|
|
# place notes
|
|
|
|
notelist = self.display_note_list(place.get_note_list())
|
|
|
|
if notelist is not None:
|
2009-07-15 05:23:07 +05:30
|
|
|
placedetail += notelist
|
2009-06-21 08:55:28 +05:30
|
|
|
|
|
|
|
# place urls
|
|
|
|
urllinks = self.display_url_list(place.get_url_list())
|
|
|
|
if urllinks is not None:
|
2009-07-15 05:23:07 +05:30
|
|
|
placedetail += urllinks
|
2009-06-21 08:55:28 +05:30
|
|
|
|
2009-08-11 12:17:42 +05:30
|
|
|
# source references
|
2009-08-12 01:35:56 +05:30
|
|
|
# sourcerefs = self.get_citation_links(place.get_source_references() )
|
|
|
|
# if sourcerefs is not None:
|
|
|
|
# placedetail += sourcerefs
|
2009-08-11 12:17:42 +05:30
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
# place references
|
|
|
|
referenceslist = self.display_references(place_list[place.handle])
|
|
|
|
if referenceslist is not None:
|
2009-07-15 05:23:07 +05:30
|
|
|
placedetail += referenceslist
|
2008-03-14 03:58:22 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# add clearline for proper styling
|
2009-06-21 08:55:28 +05:30
|
|
|
# add footer section
|
2009-06-25 01:33:10 +05:30
|
|
|
footer = self.write_footer()
|
2009-06-21 08:55:28 +05:30
|
|
|
body += (fullclear, footer)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
# send page out for processing
|
2009-06-25 01:33:10 +05:30
|
|
|
# and close the file
|
2009-06-21 08:55:28 +05:30
|
|
|
self.mywriter(placepage, of)
|
2005-07-09 01:54:54 +05:30
|
|
|
|
2009-08-21 06:07:59 +05:30
|
|
|
class EventListPage(BasePage):
|
|
|
|
|
2009-08-27 18:51:55 +05:30
|
|
|
def __init__(self, report, title, event_dict):
|
|
|
|
BasePage.__init__(self, report, title)
|
2009-08-21 06:07:59 +05:30
|
|
|
db = report.database
|
|
|
|
|
|
|
|
of = self.report.create_file("events")
|
|
|
|
eventslistpage, body = self.write_header(_('Events'))
|
|
|
|
|
|
|
|
# begin events list division
|
|
|
|
with Html('div', class_='content', id='EventList') as eventlist:
|
|
|
|
body += eventlist
|
|
|
|
|
|
|
|
msg = _("This page contains an index of all the events in the "
|
|
|
|
"database, sorted by their type and person’s surname. "
|
|
|
|
"Clicking on an event’s type will take you to that "
|
|
|
|
"event’s page. Clicking on a place will take you to that "
|
|
|
|
"place’s page. Clicking on a person’s name will take "
|
|
|
|
"you to that person’s page.")
|
|
|
|
eventlist += Html('p', msg, id='description')
|
|
|
|
|
|
|
|
# begin event list table
|
|
|
|
with Html('table', class_='infolist eventlist') as table:
|
|
|
|
eventlist += table
|
|
|
|
|
|
|
|
# begin table head
|
|
|
|
thead = Html('thead')
|
|
|
|
table += thead
|
|
|
|
|
|
|
|
# begin table header row
|
|
|
|
trow = Html('tr')
|
|
|
|
thead += trow
|
|
|
|
|
|
|
|
for (label, colclass) in [
|
2009-08-27 18:51:55 +05:30
|
|
|
(EHEAD, 'Type'),
|
2009-08-21 06:07:59 +05:30
|
|
|
(DHEAD, 'Date'),
|
|
|
|
(DESCRHEAD, 'Description'),
|
|
|
|
(_('Person'), 'Person') ]:
|
|
|
|
|
|
|
|
trow += Html('th', label, class_ = 'Column%s' % colclass, inline = True)
|
|
|
|
|
2009-08-27 18:51:55 +05:30
|
|
|
# send entire events dictionary
|
|
|
|
table += self.write_event_rows(event_dict)
|
2009-08-21 06:07:59 +05:30
|
|
|
|
|
|
|
# and clearline for proper styling
|
|
|
|
# and footer section
|
|
|
|
footer = self.write_footer()
|
|
|
|
body += (fullclear, footer)
|
|
|
|
|
|
|
|
# send page ut for processing
|
|
|
|
# and close the file
|
|
|
|
self.mywriter(eventslistpage, of)
|
|
|
|
|
2009-08-27 18:51:55 +05:30
|
|
|
def write_event_rows(self, event_dict):
|
2009-08-21 06:07:59 +05:30
|
|
|
"""
|
2009-08-27 18:51:55 +05:30
|
|
|
display the event row for class EventListPage()
|
2009-08-21 06:07:59 +05:30
|
|
|
"""
|
|
|
|
db = self.report.database
|
|
|
|
|
|
|
|
# begin table body
|
|
|
|
tbody = Html('tbody')
|
|
|
|
|
2009-08-27 18:51:55 +05:30
|
|
|
for (sort_name, person, event_list) in event_dict:
|
2009-08-21 06:07:59 +05:30
|
|
|
|
2009-08-27 18:51:55 +05:30
|
|
|
# get person's hyperlink
|
2009-08-26 11:22:37 +05:30
|
|
|
url = self.report.build_url_fname_html(person.handle, 'ppl', self.up)
|
|
|
|
person_hyper = self.person_link(url, person, True, person.gramps_id)
|
2009-08-21 06:07:59 +05:30
|
|
|
|
2009-08-27 18:51:55 +05:30
|
|
|
first = True
|
|
|
|
for (evt_type, sort_date, event, evt_ref) in event_list:
|
|
|
|
|
|
|
|
# event hyperlink
|
|
|
|
event_hyper = self.event_link(evt_type, evt_ref.ref, event.gramps_id)
|
|
|
|
|
|
|
|
# begin table row
|
|
|
|
trow = Html('tr')
|
|
|
|
tbody += trow
|
|
|
|
|
|
|
|
if first:
|
|
|
|
trow.attr = 'class="BeginName"'
|
|
|
|
|
|
|
|
for (colclass, data) in [
|
|
|
|
['Type', event_hyper],
|
|
|
|
['Date', _dd.display(event.get_date_object() )],
|
|
|
|
['Description', event.get_description()] ]:
|
|
|
|
data = data or ' '
|
|
|
|
|
|
|
|
# conditional statement for inline=True or False
|
|
|
|
samerow = True if (data == ' ' or colclass == 'Date') else False
|
2009-08-21 06:07:59 +05:30
|
|
|
|
2009-08-27 18:51:55 +05:30
|
|
|
trow += Html('td', data, class_='Column%s' % colclass, inline=samerow)
|
2009-08-21 06:07:59 +05:30
|
|
|
|
2009-08-27 18:51:55 +05:30
|
|
|
if first:
|
|
|
|
trow += Html('td', person_hyper, class_='ColumnName')
|
|
|
|
else:
|
|
|
|
trow += Html('td', ' ', class_='ColumnName', inline=True)
|
|
|
|
first = False
|
2009-08-21 06:07:59 +05:30
|
|
|
|
2009-08-27 18:51:55 +05:30
|
|
|
# return events table body to its caller
|
2009-08-21 06:07:59 +05:30
|
|
|
return tbody
|
|
|
|
|
|
|
|
class EventPage(BasePage):
|
2009-08-27 18:51:55 +05:30
|
|
|
def __init__(self, report, title, evt_type, event, evt_ref, person):
|
|
|
|
BasePage.__init__(self, report, '%s - %s' % (title, evt_type))
|
2009-08-21 06:07:59 +05:30
|
|
|
db = report.database
|
|
|
|
|
|
|
|
of = self.report.create_file(evt_ref.ref, 'evt')
|
|
|
|
self.up = True
|
|
|
|
eventpage, body = self.write_header(_('Events'))
|
|
|
|
|
|
|
|
# start event page division
|
|
|
|
with Html('div', class_='content', id='EventDetail') as eventdetail:
|
|
|
|
body += eventdetail
|
|
|
|
|
|
|
|
# display page itle
|
2009-08-29 14:53:09 +05:30
|
|
|
title = _('%(type)s of %(name)s') % {'type' : evt_type,
|
|
|
|
'name' : self.get_name(person) }
|
|
|
|
eventdetail += Html('h3', title, inline=True)
|
2009-08-21 06:07:59 +05:30
|
|
|
|
|
|
|
# begin event detail table
|
2009-08-27 18:51:55 +05:30
|
|
|
with Html('table', class_='infolist eventlist') as table:
|
2009-08-21 06:07:59 +05:30
|
|
|
eventdetail += table
|
|
|
|
|
2009-08-29 14:36:48 +05:30
|
|
|
tbody = Html('tbody')
|
|
|
|
table += tbody
|
|
|
|
|
2009-08-21 06:07:59 +05:30
|
|
|
# get event data
|
|
|
|
event_row = self.get_event_data(event, evt_ref)
|
|
|
|
|
|
|
|
# the first four in the list is to be used here, the rest are below
|
|
|
|
for index in xrange(5):
|
|
|
|
label = event_row[index][0]
|
|
|
|
data = event_row[index][1] or None
|
|
|
|
|
|
|
|
if data is not None:
|
|
|
|
trow = Html('tr') + (
|
|
|
|
Html('td', label, class_= 'ColumnAttribute', inline=True),
|
|
|
|
Html('td', data, class_='ColumnValue', inline=True)
|
|
|
|
)
|
2009-08-29 14:36:48 +05:30
|
|
|
tbody += trow
|
2009-08-21 06:07:59 +05:30
|
|
|
|
2009-08-26 11:22:37 +05:30
|
|
|
url = self.report.build_url_fname_html(person.handle, 'ppl', self.up)
|
2009-08-27 18:51:55 +05:30
|
|
|
person_hyper = self.person_link(url, person, True, person.gramps_id)
|
2009-08-21 06:07:59 +05:30
|
|
|
trow = Html('tr') + (
|
|
|
|
Html('td', _('Person'), class_='ColumnAttribute', inline=True),
|
2009-08-27 18:51:55 +05:30
|
|
|
Html('td', person_hyper, class_='ColumnValue', inline=True)
|
2009-08-21 06:07:59 +05:30
|
|
|
)
|
2009-08-29 14:36:48 +05:30
|
|
|
tbody += trow
|
2009-08-21 06:07:59 +05:30
|
|
|
|
2009-08-29 14:36:48 +05:30
|
|
|
# get notes section
|
|
|
|
notelist = event_row[5][1]
|
|
|
|
if notelist:
|
|
|
|
eventdetail += self.display_note_list(notelist)
|
2009-08-21 06:07:59 +05:30
|
|
|
|
2009-08-29 14:36:48 +05:30
|
|
|
# get attribute list
|
|
|
|
attrib = event_row[6][1]
|
|
|
|
if attrib:
|
|
|
|
eventdetail += self.display_attr_list(event)
|
2009-08-21 06:07:59 +05:30
|
|
|
|
|
|
|
# add clearline for proper styling
|
|
|
|
# add footer section
|
|
|
|
footer = self.write_footer()
|
|
|
|
body += (fullclear, footer)
|
|
|
|
|
|
|
|
# send page out for processing
|
|
|
|
# and close the page
|
|
|
|
self.mywriter(eventpage, of)
|
|
|
|
|
2005-08-18 11:28:28 +05:30
|
|
|
class MediaPage(BasePage):
|
|
|
|
|
2008-03-14 03:58:22 +05:30
|
|
|
def __init__(self, report, title, handle, src_list, my_media_list, info):
|
2005-08-18 11:28:28 +05:30
|
|
|
(prev, next, page_number, total_pages) = info
|
2008-03-14 03:58:22 +05:30
|
|
|
db = report.database
|
2009-07-23 01:33:07 +05:30
|
|
|
|
2005-08-18 11:28:28 +05:30
|
|
|
photo = db.get_object_from_handle(handle)
|
2008-03-14 03:58:22 +05:30
|
|
|
# TODO. How do we pass my_media_list down for use in BasePage?
|
|
|
|
BasePage.__init__(self, report, title, photo.gramps_id)
|
2008-03-18 02:39:16 +05:30
|
|
|
|
2009-02-04 13:01:15 +05:30
|
|
|
"""
|
|
|
|
*************************************
|
|
|
|
GRAMPS feature #2634 -- attempt to highlight subregions in media
|
|
|
|
objects and link back to the relevant web page.
|
|
|
|
|
|
|
|
This next section of code builds up the "records" we'll need to
|
|
|
|
generate the html/css code to support the subregions
|
|
|
|
*************************************
|
|
|
|
"""
|
|
|
|
|
|
|
|
# get all of the backlinks to this media object; meaning all of
|
|
|
|
# the people, events, places, etc..., that use this image
|
|
|
|
_region_items = set()
|
|
|
|
for (classname, newhandle) in db.find_backlink_handles(handle):
|
|
|
|
|
|
|
|
# for each of the backlinks, get the relevant object from the db
|
|
|
|
# and determine a few important things, such as a text name we
|
|
|
|
# can use, and the URL to a relevant web page
|
|
|
|
_obj = None
|
|
|
|
_name = ""
|
|
|
|
_linkurl = "#"
|
|
|
|
if classname == "Person":
|
|
|
|
_obj = db.get_person_from_handle( newhandle )
|
|
|
|
# what is the shortest possible name we could use for this person?
|
|
|
|
_name = _obj.get_primary_name().get_call_name()
|
|
|
|
if not _name or _name == "":
|
|
|
|
_name = _obj.get_primary_name().get_first_name()
|
|
|
|
_linkurl = report.build_url_fname_html(_obj.handle, 'ppl', True)
|
2009-07-23 01:33:07 +05:30
|
|
|
elif classname == "Event":
|
2009-02-04 13:01:15 +05:30
|
|
|
_obj = db.get_event_from_handle( newhandle )
|
|
|
|
_name = _obj.get_description()
|
|
|
|
|
2009-07-23 01:33:07 +05:30
|
|
|
# continue looking through the loop for an object...
|
2009-02-08 10:19:15 +05:30
|
|
|
if _obj is None:
|
|
|
|
continue
|
|
|
|
|
|
|
|
# get a list of all media refs for this object
|
|
|
|
medialist = _obj.get_media_list()
|
|
|
|
|
|
|
|
# go media refs looking for one that points to this image
|
|
|
|
for mediaref in medialist:
|
|
|
|
|
|
|
|
# is this mediaref for this image? do we have a rect?
|
|
|
|
if mediaref.ref == handle and mediaref.rect is not None:
|
|
|
|
|
|
|
|
(x1, y1, x2, y2) = mediaref.rect
|
|
|
|
# GRAMPS gives us absolute coordinates,
|
|
|
|
# but we need relative width + height
|
|
|
|
w = x2 - x1
|
|
|
|
h = y2 - y1
|
|
|
|
|
|
|
|
# remember all this information, cause we'll need
|
|
|
|
# need it later when we output the <li>...</li> tags
|
|
|
|
item = (_name, x1, y1, w, h, _linkurl)
|
|
|
|
_region_items.add(item)
|
2009-02-04 13:01:15 +05:30
|
|
|
"""
|
|
|
|
*************************************
|
|
|
|
end of code that looks for and prepares the media object regions
|
|
|
|
*************************************
|
|
|
|
"""
|
|
|
|
|
2008-03-21 03:54:36 +05:30
|
|
|
of = self.report.create_file(handle, 'img')
|
2008-03-18 02:39:16 +05:30
|
|
|
self.up = True
|
2007-11-03 11:13:12 +05:30
|
|
|
|
2007-11-05 12:46:10 +05:30
|
|
|
self.src_list = src_list
|
2008-03-14 03:58:22 +05:30
|
|
|
self.bibli = Bibliography()
|
2007-11-05 12:46:10 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# get media type to be used primarily with 'img' tags
|
2005-12-06 12:08:09 +05:30
|
|
|
mime_type = photo.get_mime_type()
|
2009-06-11 22:15:30 +05:30
|
|
|
mtype = Mime.get_description(mime_type)
|
2007-11-03 11:13:12 +05:30
|
|
|
|
2007-04-09 09:10:11 +05:30
|
|
|
if mime_type:
|
|
|
|
note_only = False
|
2005-12-06 12:08:09 +05:30
|
|
|
newpath = self.copy_source_file(handle, photo)
|
2008-06-16 20:31:46 +05:30
|
|
|
target_exists = newpath is not None
|
2005-08-18 11:28:28 +05:30
|
|
|
else:
|
2007-04-09 09:10:11 +05:30
|
|
|
note_only = True
|
2005-12-06 12:08:09 +05:30
|
|
|
target_exists = False
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2005-12-06 12:08:09 +05:30
|
|
|
self.copy_thumbnail(handle, photo)
|
2005-08-18 11:28:28 +05:30
|
|
|
self.page_title = photo.get_description()
|
2009-06-24 04:04:14 +05:30
|
|
|
mediapage, body = self.write_header("%s - %s" % (_('Media'), self.page_title))
|
2009-02-03 13:31:31 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# begin GalleryDetail division
|
2009-06-24 04:04:14 +05:30
|
|
|
mediadetail = Html('div', class_='content', id='GalleryDetail')
|
|
|
|
body += mediadetail
|
2007-11-03 11:13:12 +05:30
|
|
|
|
2005-08-18 11:28:28 +05:30
|
|
|
# gallery navigation
|
2009-06-11 22:15:30 +05:30
|
|
|
gallerynav = Html('div', id='GalleryNav')
|
2009-06-24 04:04:14 +05:30
|
|
|
mediadetail += gallerynav
|
2005-08-18 11:28:28 +05:30
|
|
|
if prev:
|
2009-06-24 04:04:14 +05:30
|
|
|
gallerynav += self.gallery_nav_link(prev, _('Previous'), True)
|
2009-06-11 22:15:30 +05:30
|
|
|
data = _('<strong id="GalleryCurrent">%(page_number)d</strong> of '
|
|
|
|
'<strong id="GalleryTotal">%(total_pages)d</strong>' ) % {
|
2005-08-18 11:28:28 +05:30
|
|
|
'page_number' : page_number, 'total_pages' : total_pages }
|
2009-06-24 04:04:14 +05:30
|
|
|
gallerynav += Html('span', data, id='GalleryPages')
|
2005-08-18 11:28:28 +05:30
|
|
|
if next:
|
2009-06-24 04:04:14 +05:30
|
|
|
gallerynav += self.gallery_nav_link(next, _('Next'), True)
|
|
|
|
|
|
|
|
# missing media error msg
|
|
|
|
errormsg = _('The file has been moved or deleted.')
|
|
|
|
missingimage = Html('span', errormsg, class_='MissingImage')
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# begin summaryarea division
|
|
|
|
summaryarea = Html('div', id='summaryarea')
|
2009-06-24 04:04:14 +05:30
|
|
|
mediadetail += summaryarea
|
2005-12-06 12:08:09 +05:30
|
|
|
if mime_type:
|
|
|
|
if mime_type.startswith("image/"):
|
2009-02-04 13:01:15 +05:30
|
|
|
if not target_exists:
|
2009-06-24 04:04:14 +05:30
|
|
|
mediadisplay = Html('div', id='GalleryDisplay') + \
|
|
|
|
missingimage
|
|
|
|
summaryarea += mediadisplay
|
2009-02-04 13:01:15 +05:30
|
|
|
else:
|
2009-05-21 04:56:30 +05:30
|
|
|
# Check how big the image is relative to the requested 'initial'
|
|
|
|
# image size. If it's significantly bigger, scale it down to
|
|
|
|
# improve the site's responsiveness. We don't want the user to
|
|
|
|
# have to await a large download unnecessarily. Either way, set
|
|
|
|
# the display image size as requested.
|
|
|
|
orig_image_path = Utils.media_path_full(db, photo.get_path())
|
|
|
|
(width, height) = ImgManip.image_size(orig_image_path)
|
|
|
|
max_width = self.report.options['maxinitialimagewidth']
|
|
|
|
max_height = self.report.options['maxinitialimageheight']
|
2009-07-28 17:11:20 +05:30
|
|
|
scale_w = (float(max_width)/width) or 1 # the 'or 1' is so that
|
|
|
|
# a max of zero is ignored
|
|
|
|
|
2009-05-21 04:56:30 +05:30
|
|
|
scale_h = (float(max_height)/height) or 1
|
|
|
|
scale = min(scale_w, scale_h)
|
|
|
|
new_width = int(width*scale)
|
|
|
|
new_height = int(height*scale)
|
|
|
|
if scale < 0.8:
|
|
|
|
# scale factor is significant enough to warrant making a smaller image
|
|
|
|
initial_image_path = '%s_init.jpg' % os.path.splitext(newpath)[0]
|
|
|
|
initial_image_data = ImgManip.resize_to_jpeg_buffer(orig_image_path,
|
|
|
|
new_width, new_height)
|
|
|
|
if self.report.archive:
|
|
|
|
filed, dest = tempfile.mkstemp()
|
|
|
|
os.write(filed, initial_image_data)
|
|
|
|
os.close(filed)
|
|
|
|
self.report.archive.add(dest, initial_image_path)
|
|
|
|
else:
|
|
|
|
filed = open(os.path.join(self.html_dir, initial_image_path), 'w')
|
|
|
|
filed.write(initial_image_data)
|
|
|
|
filed.close()
|
|
|
|
else:
|
|
|
|
# not worth actually making a smaller image
|
|
|
|
initial_image_path = newpath
|
|
|
|
|
2008-03-21 03:54:36 +05:30
|
|
|
# TODO. Convert disk path to URL.
|
2009-05-21 04:56:30 +05:30
|
|
|
url = self.report.build_url_fname(initial_image_path, None, self.up)
|
|
|
|
if initial_image_path != newpath:
|
2009-06-24 04:04:14 +05:30
|
|
|
scalemsg = Html('p', '(%d x %d).' % (width, height), inline=True)
|
2009-06-11 22:15:30 +05:30
|
|
|
summaryarea += scalemsg
|
2009-06-24 04:04:14 +05:30
|
|
|
mediadisplay = Html('div', style='width:%dpx; height:%dpx;' % (new_width, new_height))
|
|
|
|
summaryarea += mediadisplay
|
2009-02-04 13:01:15 +05:30
|
|
|
|
|
|
|
# Feature #2634; display the mouse-selectable regions.
|
|
|
|
# See the large block at the top of this function where
|
|
|
|
# the various regions are stored in _region_items
|
2009-06-11 22:15:30 +05:30
|
|
|
if len(_region_items):
|
|
|
|
ordered = Html('ol', class_='RegionBox')
|
2009-06-24 04:04:14 +05:30
|
|
|
mediadisplay += ordered
|
2009-02-04 13:01:15 +05:30
|
|
|
while len(_region_items) > 0:
|
|
|
|
(name, x, y, w, h, linkurl) = _region_items.pop()
|
2009-06-11 22:15:30 +05:30
|
|
|
ordered += Html('li', style='left:%d%%; top:%d%%; width:%d%%; height:%d%%;'
|
|
|
|
% (x, y, w, h)) +(
|
|
|
|
Html('a', name, href=linkurl)
|
|
|
|
)
|
2009-02-04 13:01:15 +05:30
|
|
|
|
|
|
|
# display the image
|
2009-05-21 04:56:30 +05:30
|
|
|
if initial_image_path != newpath:
|
2009-06-11 22:15:30 +05:30
|
|
|
url = self.report.build_url_fname(newpath, None, self.up)
|
2009-06-24 04:04:14 +05:30
|
|
|
mediadisplay += Html('a', href=url) + (
|
2009-06-11 22:15:30 +05:30
|
|
|
Html('img', width=new_width, height=new_height, src=url,
|
2009-06-30 01:34:00 +05:30
|
|
|
alt=html_escape(self.page_title))
|
2009-06-11 22:15:30 +05:30
|
|
|
)
|
2005-12-06 12:08:09 +05:30
|
|
|
else:
|
|
|
|
dirname = tempfile.mkdtemp()
|
2008-03-10 01:42:56 +05:30
|
|
|
thmb_path = os.path.join(dirname, "temp.png")
|
2008-03-08 22:10:19 +05:30
|
|
|
if ThumbNails.run_thumbnailer(mime_type,
|
2009-06-24 04:04:14 +05:30
|
|
|
Utils.media_path_full(db,
|
|
|
|
photo.get_path()),
|
|
|
|
thmb_path, 320):
|
2005-12-06 12:08:09 +05:30
|
|
|
try:
|
2008-03-21 03:54:36 +05:30
|
|
|
path = self.report.build_path('preview', photo.handle)
|
2009-02-10 00:28:23 +05:30
|
|
|
npath = os.path.join(path, photo.handle) + '.png'
|
|
|
|
self.report.copy_file(thmb_path, npath)
|
|
|
|
path = npath
|
2005-12-06 12:08:09 +05:30
|
|
|
os.unlink(thmb_path)
|
|
|
|
except IOError:
|
2008-03-10 01:42:56 +05:30
|
|
|
path = os.path.join('images', 'document.png')
|
2005-12-06 12:08:09 +05:30
|
|
|
else:
|
2008-03-10 01:42:56 +05:30
|
|
|
path = os.path.join('images', 'document.png')
|
2005-12-06 12:08:09 +05:30
|
|
|
os.rmdir(dirname)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2009-06-24 04:04:14 +05:30
|
|
|
mediadisplay = Html('div', id='GalleryDisplay')
|
|
|
|
summaryarea += mediadisplay
|
2005-12-06 12:08:09 +05:30
|
|
|
if target_exists:
|
2008-03-21 03:54:36 +05:30
|
|
|
# TODO. Convert disk path to URL
|
|
|
|
url = self.report.build_url_fname(newpath, None, self.up)
|
2009-06-11 22:15:30 +05:30
|
|
|
hyper = Html('a', href=url)
|
2008-03-21 03:54:36 +05:30
|
|
|
# TODO. Mixup url and path
|
|
|
|
# path = convert_disk_path_to_url(path)
|
|
|
|
url = self.report.build_url_fname(path, None, self.up)
|
2009-08-14 12:44:25 +05:30
|
|
|
if hyper:
|
|
|
|
hyper += Html('img', src=url, alt=html_escape(self.page_title))
|
|
|
|
else:
|
|
|
|
hyper = Html('img', src=url, alt=html_escape(self.page_title))
|
2005-12-06 12:08:09 +05:30
|
|
|
if target_exists:
|
2009-06-24 04:04:14 +05:30
|
|
|
mediadisplay += hyper
|
2005-12-06 12:08:09 +05:30
|
|
|
else:
|
2009-06-24 04:04:14 +05:30
|
|
|
mediadisplay += missingimage
|
2005-12-06 12:08:09 +05:30
|
|
|
else:
|
2009-06-24 04:04:14 +05:30
|
|
|
mediadisplay = Html('div', id='GalleryDisplay')
|
|
|
|
summaryarea += mediadisplay
|
2008-03-21 03:54:36 +05:30
|
|
|
url = self.report.build_url_image('document.png', 'images', self.up)
|
2009-06-30 01:34:00 +05:30
|
|
|
mediadisplay += Html('img', src=url, alt=html_escape(self.page_title))
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
# media title
|
|
|
|
title = Html('h3', html_escape(self.page_title.strip()), inline=True)
|
|
|
|
summaryarea += title
|
|
|
|
|
|
|
|
# begin media table
|
|
|
|
with Html('table', class_='infolist gallery') as table:
|
2009-06-24 04:04:14 +05:30
|
|
|
summaryarea += table
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-07-17 00:54:03 +05:30
|
|
|
# GRAMPS id
|
2009-06-11 22:15:30 +05:30
|
|
|
if not self.noid:
|
2009-07-17 00:54:03 +05:30
|
|
|
trow = Html('tr') + (
|
|
|
|
Html('td', _('GRAMPS ID'), class_='ColumnAttribute', inline=True),
|
|
|
|
Html('td', photo.gramps_id, class_='ColumnValue', inline=True)
|
|
|
|
)
|
2009-06-24 04:04:14 +05:30
|
|
|
table += trow
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-07-17 00:54:03 +05:30
|
|
|
# mime type
|
|
|
|
if mime_type:
|
|
|
|
trow = Html('tr') + (
|
|
|
|
Html('td', _('File Type'), class_='ColumnAttribute', inline=True),
|
2009-07-21 06:29:38 +05:30
|
|
|
Html('td', mime_type, class_='ColumnValue', inline=True)
|
2009-07-17 00:54:03 +05:30
|
|
|
)
|
|
|
|
table += trow
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-07-17 00:54:03 +05:30
|
|
|
# media date
|
2009-06-11 22:15:30 +05:30
|
|
|
date = _dd.display(photo.get_date_object())
|
|
|
|
if date:
|
2009-07-17 00:54:03 +05:30
|
|
|
trow = Html('tr') + (
|
2009-08-11 06:59:53 +05:30
|
|
|
Html('td', DHEAD, class_='ColumnAttribute', inline=True),
|
2009-07-17 00:54:03 +05:30
|
|
|
Html('td', date, class_='ColumnValue', inline=True)
|
|
|
|
)
|
|
|
|
table += trow
|
|
|
|
|
|
|
|
# display image Exif tags/ keys if any?
|
2009-07-28 17:11:20 +05:30
|
|
|
if (pyexiftaglib and mime_type.startswith('image/')):
|
|
|
|
"""
|
|
|
|
# Exif Tags/ Keys
|
|
|
|
#
|
|
|
|
# Determine if the python exif lib is installed on the system?
|
|
|
|
# yes, then use it and determine if the photo has anything written
|
|
|
|
# inside of it? No, if not?, then do not show on the media page
|
|
|
|
"""
|
2009-07-21 06:29:38 +05:30
|
|
|
|
2009-08-11 04:21:44 +05:30
|
|
|
image = pyexiv2.Image('%s' % Utils.media_path_full(db,
|
|
|
|
photo.get_path()))
|
2009-07-23 01:33:07 +05:30
|
|
|
image.readMetadata()
|
2009-07-21 06:29:38 +05:30
|
|
|
|
2009-07-23 01:33:07 +05:30
|
|
|
# exif data does exists
|
|
|
|
if len(image.exifKeys()):
|
2009-07-21 06:29:38 +05:30
|
|
|
|
2009-07-28 17:11:20 +05:30
|
|
|
# add clearline for better page layout
|
2009-07-23 01:33:07 +05:30
|
|
|
mediadetail += fullclear
|
2009-07-17 00:54:03 +05:30
|
|
|
|
2009-07-28 17:11:20 +05:30
|
|
|
# add exif title header
|
|
|
|
mediadetail += Html('h4', _('Image Exif Tags'), inline=True)
|
2009-07-23 01:33:07 +05:30
|
|
|
|
2009-07-28 17:11:20 +05:30
|
|
|
# begin exif table
|
|
|
|
with Html('table', class_='exifdata') as table:
|
|
|
|
mediadetail += table
|
2009-07-23 01:33:07 +05:30
|
|
|
|
2009-07-28 17:11:20 +05:30
|
|
|
for keytag in image.exifKeys():
|
|
|
|
trow = Html('tr') + (
|
|
|
|
Html('td', keytag, class_='ColumnAttribute', inline=True),
|
|
|
|
Html('td', image[keytag], class_='ColumnValue', inline=True)
|
|
|
|
)
|
|
|
|
table += trow
|
2009-07-23 01:33:07 +05:30
|
|
|
|
|
|
|
#################################################
|
2009-07-17 00:54:03 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# get media notes
|
|
|
|
notes = self.display_note_list(photo.get_note_list())
|
|
|
|
if notes is not None:
|
2009-06-24 04:04:14 +05:30
|
|
|
mediadetail += notes
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
# get media attributes
|
2009-08-09 13:25:53 +05:30
|
|
|
attrib = self.display_attr_list(photo)
|
2009-06-11 22:15:30 +05:30
|
|
|
if attrib is not None:
|
2009-06-24 04:04:14 +05:30
|
|
|
mediadetail += attrib
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
# get media sources
|
|
|
|
sources = self.display_media_sources(photo)
|
|
|
|
if sources is not None:
|
2009-06-24 04:04:14 +05:30
|
|
|
mediadetail += sources
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
# get media references
|
|
|
|
references = self.display_references(my_media_list)
|
|
|
|
if references is not None:
|
2009-06-24 04:04:14 +05:30
|
|
|
mediadetail += references
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
# add clearline for proper styling
|
|
|
|
# add footer section
|
2009-06-25 01:33:10 +05:30
|
|
|
footer = self.write_footer()
|
2009-06-24 04:04:14 +05:30
|
|
|
body += (fullclear, footer)
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# send page out for processing
|
2009-06-25 01:33:10 +05:30
|
|
|
# and close the file
|
2009-06-23 05:28:09 +05:30
|
|
|
self.mywriter(mediapage, of)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
def gallery_nav_link(self, handle, name, up=False):
|
2005-02-10 07:14:05 +05:30
|
|
|
|
2008-07-04 01:16:50 +05:30
|
|
|
url = self.report.build_url_fname_html(handle, 'img', up)
|
2009-06-11 22:15:30 +05:30
|
|
|
name = html_escape(name)
|
|
|
|
hyper = Html('a', name, id=name, href=url, title=name, inline=True)
|
|
|
|
|
|
|
|
# return hyperlink to its callers
|
|
|
|
return hyper
|
|
|
|
|
|
|
|
def display_media_sources(self, photo):
|
2008-04-01 01:20:37 +05:30
|
|
|
|
2007-08-23 15:24:37 +05:30
|
|
|
for sref in photo.get_source_references():
|
|
|
|
self.bibli.add_reference(sref)
|
2009-06-11 22:15:30 +05:30
|
|
|
sourcerefs = self.display_source_refs(self.bibli)
|
|
|
|
|
|
|
|
# return source references to its callers
|
|
|
|
return sourcerefs
|
|
|
|
|
2008-03-10 01:42:56 +05:30
|
|
|
def copy_source_file(self, handle, photo):
|
2009-06-16 00:55:43 +05:30
|
|
|
db = self.report.database
|
|
|
|
|
2005-12-06 12:08:09 +05:30
|
|
|
ext = os.path.splitext(photo.get_path())[1]
|
2008-03-21 03:54:36 +05:30
|
|
|
to_dir = self.report.build_path('images', handle)
|
|
|
|
newpath = os.path.join(to_dir, handle) + ext
|
2005-12-06 12:08:09 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
fullpath = Utils.media_path_full(db, photo.get_path())
|
2005-12-06 12:08:09 +05:30
|
|
|
try:
|
2008-03-14 03:58:22 +05:30
|
|
|
if self.report.archive:
|
|
|
|
self.report.archive.add(fullpath, str(newpath))
|
2005-12-06 12:08:09 +05:30
|
|
|
else:
|
2008-03-10 01:42:56 +05:30
|
|
|
to_dir = os.path.join(self.html_dir, to_dir)
|
2005-12-06 12:08:09 +05:30
|
|
|
if not os.path.isdir(to_dir):
|
|
|
|
os.makedirs(to_dir)
|
2008-02-18 19:36:41 +05:30
|
|
|
shutil.copyfile(fullpath,
|
2008-02-24 19:25:55 +05:30
|
|
|
os.path.join(self.html_dir, newpath))
|
2005-12-06 12:08:09 +05:30
|
|
|
return newpath
|
2008-03-10 01:42:56 +05:30
|
|
|
except (IOError, OSError), msg:
|
2007-04-09 09:10:11 +05:30
|
|
|
error = _("Missing media object:") + \
|
2008-03-10 01:42:56 +05:30
|
|
|
"%s (%s)" % (photo.get_description(), photo.get_gramps_id())
|
|
|
|
WarningDialog(error, str(msg))
|
2005-12-06 12:08:09 +05:30
|
|
|
return None
|
|
|
|
|
2008-03-10 01:42:56 +05:30
|
|
|
def copy_thumbnail(self, handle, photo):
|
2009-06-16 00:55:43 +05:30
|
|
|
db = self.report.database
|
|
|
|
|
2008-03-21 03:54:36 +05:30
|
|
|
to_dir = self.report.build_path('thumb', handle)
|
|
|
|
to_path = os.path.join(to_dir, handle) + '.png'
|
2005-12-06 12:08:09 +05:30
|
|
|
if photo.get_mime_type():
|
2008-02-18 19:36:41 +05:30
|
|
|
from_path = ThumbNails.get_thumbnail_path(Utils.media_path_full(
|
2009-06-16 00:55:43 +05:30
|
|
|
db,
|
2008-02-18 19:36:41 +05:30
|
|
|
photo.get_path()),
|
2009-06-16 00:55:43 +05:30
|
|
|
photo.get_mime_type())
|
2005-12-06 12:08:09 +05:30
|
|
|
if not os.path.isfile(from_path):
|
2008-03-10 01:42:56 +05:30
|
|
|
from_path = os.path.join(const.IMAGE_DIR, "document.png")
|
2005-12-06 12:08:09 +05:30
|
|
|
else:
|
2008-03-10 01:42:56 +05:30
|
|
|
from_path = os.path.join(const.IMAGE_DIR, "document.png")
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2008-07-17 17:24:33 +05:30
|
|
|
self.report.copy_file(from_path, to_path)
|
2005-12-06 12:08:09 +05:30
|
|
|
|
2005-08-18 11:28:28 +05:30
|
|
|
class SurnameListPage(BasePage):
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2005-08-18 11:28:28 +05:30
|
|
|
ORDER_BY_NAME = 0
|
|
|
|
ORDER_BY_COUNT = 1
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2008-03-14 03:58:22 +05:30
|
|
|
def __init__(self, report, title, person_handle_list, order_by=ORDER_BY_NAME, filename="surnames"):
|
|
|
|
BasePage.__init__(self, report, title)
|
2009-06-21 08:55:28 +05:30
|
|
|
db = report.database
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2005-08-18 11:28:28 +05:30
|
|
|
if order_by == self.ORDER_BY_NAME:
|
2008-03-18 02:39:16 +05:30
|
|
|
of = self.report.create_file(filename)
|
2009-07-23 01:33:07 +05:30
|
|
|
surnamelistpage, body = self.write_header(_('Surnames'))
|
2005-08-18 11:28:28 +05:30
|
|
|
else:
|
2008-03-18 02:39:16 +05:30
|
|
|
of = self.report.create_file("surnames_count")
|
2009-07-23 01:33:07 +05:30
|
|
|
surnamelistpage, body = self.write_header(_('Surnames by person count'))
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
# begin surnames division
|
2009-07-15 05:23:07 +05:30
|
|
|
with Html('div', class_='content', id='surnames') as surnamelist:
|
|
|
|
body += surnamelist
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
# page description
|
|
|
|
msg = _( 'This page contains an index of all the '
|
|
|
|
'surnames in the database. Selecting a link '
|
|
|
|
'will lead to a list of individuals in the '
|
|
|
|
'database with this same surname.')
|
2009-07-15 05:23:07 +05:30
|
|
|
surnamelist += Html('p', msg, id='description')
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
# add alphabet navigation after page msg
|
|
|
|
# only if surname list not surname count
|
|
|
|
if order_by == self.ORDER_BY_NAME:
|
2009-06-21 08:55:28 +05:30
|
|
|
alpha_nav = alphabet_navigation(db, person_handle_list, _PERSON)
|
2009-06-11 22:15:30 +05:30
|
|
|
if alpha_nav is not None:
|
2009-07-15 05:23:07 +05:30
|
|
|
surnamelist += alpha_nav
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
if order_by == self.ORDER_BY_COUNT:
|
|
|
|
table_id = 'SortByCount'
|
2009-05-01 14:09:17 +05:30
|
|
|
else:
|
2009-06-11 22:15:30 +05:30
|
|
|
table_id = 'SortByName'
|
2009-07-15 05:23:07 +05:30
|
|
|
|
|
|
|
# begin surnamelist table and table head
|
2009-06-11 22:15:30 +05:30
|
|
|
with Html('table', class_='infolist surnamelist', id=table_id) as table:
|
2009-07-15 05:23:07 +05:30
|
|
|
surnamelist += table
|
|
|
|
|
|
|
|
thead = Html('thead')
|
|
|
|
table += thead
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-07-15 05:23:07 +05:30
|
|
|
trow = Html('tr') + (
|
|
|
|
Html('th', _('Letter'), class_='ColumnLetter', inline=True)
|
|
|
|
)
|
|
|
|
thead += trow
|
|
|
|
|
|
|
|
fname = self.report.surname_fname + self.ext
|
2009-07-23 01:33:07 +05:30
|
|
|
tcell = Html('th', class_='ColumnSurname', inline=True)
|
|
|
|
trow += tcell
|
|
|
|
hyper = Html('a', _('Surname'), href=fname)
|
|
|
|
tcell += hyper
|
|
|
|
|
2009-07-15 05:23:07 +05:30
|
|
|
fname = "surnames_count" + self.ext
|
2009-07-23 01:33:07 +05:30
|
|
|
tcell = Html('th', class_='ColumnQuantity', inline=True)
|
|
|
|
trow += tcell
|
|
|
|
hyper = Html('a', _('Number of People'), href=fname)
|
|
|
|
tcell += hyper
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
# begin table body
|
|
|
|
with Html('tbody') as tbody:
|
|
|
|
table += tbody
|
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
person_handle_list = sort_people(db, person_handle_list)
|
2009-06-11 22:15:30 +05:30
|
|
|
if order_by == self.ORDER_BY_COUNT:
|
|
|
|
temp_list = {}
|
|
|
|
for (surname, data_list) in person_handle_list:
|
|
|
|
index_val = "%90d_%s" % (999999999-len(data_list), surname)
|
|
|
|
temp_list[index_val] = (surname, data_list)
|
|
|
|
|
|
|
|
person_handle_list = []
|
|
|
|
for key in sorted(temp_list, key=locale.strxfrm):
|
|
|
|
person_handle_list.append(temp_list[key])
|
|
|
|
|
|
|
|
last_letter = ''
|
|
|
|
last_surname = ''
|
|
|
|
|
|
|
|
for (surname, data_list) in person_handle_list:
|
|
|
|
if len(surname) == 0:
|
|
|
|
continue
|
|
|
|
|
|
|
|
# Get a capital normalized version of the first letter of
|
|
|
|
# the surname
|
|
|
|
if surname:
|
|
|
|
letter = normalize('NFKC', surname)[0].upper()
|
|
|
|
else:
|
|
|
|
letter = u' '
|
|
|
|
# See : http://www.gramps-project.org/bugs/view.php?id=2933
|
|
|
|
(lang_country, modifier ) = locale.getlocale()
|
|
|
|
if lang_country == "sv_SE" and ( letter == u'W' or letter == u'V' ):
|
|
|
|
letter = u'V,W'
|
|
|
|
|
2009-07-23 01:33:07 +05:30
|
|
|
trow = Html('tr')
|
|
|
|
tbody += trow
|
2009-06-11 22:15:30 +05:30
|
|
|
if letter != last_letter:
|
|
|
|
last_letter = letter
|
2009-07-23 01:33:07 +05:30
|
|
|
trow.attr = ' class="BeginLetter" '
|
|
|
|
|
|
|
|
tcell = Html('td', class_='ColumnLetter', inline=True) + (
|
|
|
|
Html('a', last_letter, name=last_letter)
|
|
|
|
)
|
|
|
|
trow += tcell
|
|
|
|
|
|
|
|
tcell = Html('td', class_='ColumnSurname') + \
|
|
|
|
self.surname_link(name_to_md5(surname), surname)
|
|
|
|
trow += tcell
|
2009-06-11 22:15:30 +05:30
|
|
|
elif surname != last_surname:
|
2009-07-23 01:33:07 +05:30
|
|
|
tcell = Html('td', ' ', class_='ColumnLetter', inline=True)
|
2009-06-11 22:15:30 +05:30
|
|
|
trow += tcell
|
2009-07-23 01:33:07 +05:30
|
|
|
tcell = Html('td', class_='ColumnSurname') + \
|
|
|
|
self.surname_link(name_to_md5(surname), surname)
|
|
|
|
trow += tcell
|
|
|
|
last_surname = surname
|
|
|
|
tcell = Html('td', len(data_list), class_='ColumnQuantity', inline=True)
|
|
|
|
trow += tcell
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# create footer section
|
|
|
|
# add clearline for proper styling
|
2009-06-25 01:33:10 +05:30
|
|
|
footer = self.write_footer()
|
2009-06-11 22:15:30 +05:30
|
|
|
body += (fullclear, footer)
|
2005-02-10 07:14:05 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# send page out for processing
|
2009-06-25 01:33:10 +05:30
|
|
|
# and close the file
|
2009-07-23 01:33:07 +05:30
|
|
|
self.mywriter(surnamelistpage, of)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
def surname_link(self, fname, name, opt_val=None, up=False):
|
2008-04-01 01:20:37 +05:30
|
|
|
url = self.report.build_url_fname_html(fname, 'srn', up)
|
2009-06-11 22:15:30 +05:30
|
|
|
hyper = Html('a', name, href=url, title=name)
|
2008-06-16 20:31:46 +05:30
|
|
|
if opt_val is not None:
|
2009-06-11 22:15:30 +05:30
|
|
|
hyper += opt_val
|
|
|
|
|
|
|
|
# return hyperlink to its caller
|
|
|
|
return hyper
|
2008-04-01 01:20:37 +05:30
|
|
|
|
2005-02-10 07:14:05 +05:30
|
|
|
class IntroductionPage(BasePage):
|
2009-06-11 22:15:30 +05:30
|
|
|
"""
|
|
|
|
This class will create the Introduction page ...
|
|
|
|
"""
|
2005-02-10 07:14:05 +05:30
|
|
|
|
2008-03-14 03:58:22 +05:30
|
|
|
def __init__(self, report, title):
|
|
|
|
BasePage.__init__(self, report, title)
|
2009-06-20 05:23:42 +05:30
|
|
|
db = report.database
|
2005-02-10 07:14:05 +05:30
|
|
|
|
2008-03-18 02:39:16 +05:30
|
|
|
of = self.report.create_file(report.intro_fname)
|
2008-04-01 01:20:37 +05:30
|
|
|
# Note. In old NarrativeWeb.py the content_divid depended on filename.
|
2009-06-20 05:23:42 +05:30
|
|
|
intropage, body = self.write_header(_('Introduction'))
|
2009-02-03 13:31:31 +05:30
|
|
|
|
2009-06-20 05:23:42 +05:30
|
|
|
# begin Introduction division
|
|
|
|
with Html('div', class_='content', id='Introduction') as section:
|
|
|
|
body += section
|
2005-02-10 07:14:05 +05:30
|
|
|
|
2009-06-20 05:23:42 +05:30
|
|
|
introimg = report.add_image('introimg')
|
|
|
|
if introimg is not None:
|
|
|
|
section += introimg
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2009-06-20 05:23:42 +05:30
|
|
|
note_id = report.options['intronote']
|
2009-07-15 05:23:07 +05:30
|
|
|
if note_id:
|
|
|
|
note = db.get_note_from_gramps_id(note_id)
|
|
|
|
note_text = self.get_note_format(note)
|
2009-06-20 05:23:42 +05:30
|
|
|
|
2009-07-15 05:23:07 +05:30
|
|
|
# attach note
|
|
|
|
section += note_text
|
2005-02-28 05:13:20 +05:30
|
|
|
|
2009-06-20 05:23:42 +05:30
|
|
|
# add clearline for proper styling
|
2009-06-11 22:15:30 +05:30
|
|
|
# create footer section
|
2009-06-25 01:33:10 +05:30
|
|
|
footer = self.write_footer()
|
2009-06-20 05:23:42 +05:30
|
|
|
body += (fullclear, footer)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
# send page out for processing
|
2009-06-25 01:33:10 +05:30
|
|
|
# and close the file
|
2009-06-20 05:23:42 +05:30
|
|
|
self.mywriter(intropage, of)
|
2005-02-10 07:14:05 +05:30
|
|
|
|
|
|
|
class HomePage(BasePage):
|
2009-06-11 22:15:30 +05:30
|
|
|
"""
|
|
|
|
This class will create the Home Page ...
|
|
|
|
"""
|
2005-02-10 07:14:05 +05:30
|
|
|
|
2008-03-14 03:58:22 +05:30
|
|
|
def __init__(self, report, title):
|
|
|
|
BasePage.__init__(self, report, title)
|
2009-06-21 08:55:28 +05:30
|
|
|
db = report.database
|
2005-02-10 07:14:05 +05:30
|
|
|
|
2008-03-18 02:39:16 +05:30
|
|
|
of = self.report.create_file("index")
|
2009-06-21 08:55:28 +05:30
|
|
|
homepage, body = self.write_header(_('Home'))
|
2009-02-03 13:31:31 +05:30
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
# begin home division
|
2009-07-21 06:29:38 +05:30
|
|
|
with Html('div', class_='content', id='Home') as section:
|
|
|
|
body += section
|
2005-02-10 07:14:05 +05:30
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
homeimg = report.add_image('homeimg')
|
|
|
|
if homeimg is not None:
|
2009-07-21 06:29:38 +05:30
|
|
|
section += homeimg
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
note_id = report.options['homenote']
|
|
|
|
if note_id:
|
|
|
|
note = db.get_note_from_gramps_id(note_id)
|
2009-07-15 05:23:07 +05:30
|
|
|
note_text = self.get_note_format(note)
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2009-07-15 05:23:07 +05:30
|
|
|
# attach note
|
2009-07-21 06:29:38 +05:30
|
|
|
section += note_text
|
2009-07-15 05:23:07 +05:30
|
|
|
|
|
|
|
# create clear line for proper styling
|
2009-06-21 08:55:28 +05:30
|
|
|
# create footer section
|
2009-06-25 01:33:10 +05:30
|
|
|
footer = self.write_footer()
|
2009-06-21 08:55:28 +05:30
|
|
|
body += (fullclear, footer)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
# send page out for processing
|
2009-06-25 01:33:10 +05:30
|
|
|
# and close the file
|
2009-06-21 08:55:28 +05:30
|
|
|
self.mywriter(homepage, of)
|
2005-02-10 07:14:05 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
class SourceListPage(BasePage):
|
2005-02-10 07:14:05 +05:30
|
|
|
|
2008-03-14 03:58:22 +05:30
|
|
|
def __init__(self, report, title, handle_set):
|
|
|
|
BasePage.__init__(self, report, title)
|
2009-06-21 08:55:28 +05:30
|
|
|
db = report.database
|
2005-02-10 07:14:05 +05:30
|
|
|
|
2008-03-18 02:39:16 +05:30
|
|
|
of = self.report.create_file("sources")
|
2009-06-25 01:33:10 +05:30
|
|
|
sourcelistpage, body = self.write_header(_('Sources'))
|
2009-02-03 13:31:31 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# begin source list division
|
2009-06-16 00:55:43 +05:30
|
|
|
with Html('div', class_='content', id='sources') as section:
|
|
|
|
body += section
|
2005-02-10 07:14:05 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
handle_list = list(handle_set)
|
|
|
|
source_dict = {}
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
# Sort the sources
|
2009-06-16 00:55:43 +05:30
|
|
|
for handle in handle_list:
|
2009-06-21 08:55:28 +05:30
|
|
|
source = db.get_source_from_handle(handle)
|
2009-06-16 00:55:43 +05:30
|
|
|
key = source.get_title() + str(source.get_gramps_id())
|
|
|
|
source_dict[key] = (source, handle)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
keys = sorted(source_dict, key=locale.strxfrm)
|
2005-02-16 11:11:33 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
msg = _("This page contains an index of all the sources in the "
|
|
|
|
"database, sorted by their title. Clicking on a source’s "
|
|
|
|
"title will take you to that source’s page.")
|
|
|
|
section += Html('p', msg, id='description')
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-30 01:34:00 +05:30
|
|
|
# begin sourcelist table and table head
|
2009-06-16 00:55:43 +05:30
|
|
|
with Html('table', class_='infolist sourcelist') as table:
|
|
|
|
section += table
|
2009-08-16 01:58:22 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
thead = Html('thead')
|
|
|
|
table += thead
|
2009-08-12 01:35:56 +05:30
|
|
|
trow = Html('tr')
|
2009-06-16 00:55:43 +05:30
|
|
|
thead += trow
|
2009-08-12 01:35:56 +05:30
|
|
|
|
|
|
|
for (label, colclass) in [
|
2009-08-27 18:51:55 +05:30
|
|
|
(None, 'RowLabel'),
|
2009-08-16 01:58:22 +05:30
|
|
|
(_('Name'), 'Name') ]:
|
2009-08-12 01:35:56 +05:30
|
|
|
|
|
|
|
label = label or ' '
|
|
|
|
trow += Html('th', label, class_='Column%s' % colclass, inline=True)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# begin table body
|
|
|
|
tbody = Html('tbody')
|
|
|
|
table += tbody
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
for index, key in enumerate(keys):
|
|
|
|
(source, handle) = source_dict[key]
|
2009-08-12 01:35:56 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
trow = Html('tr') + (
|
|
|
|
Html('td', index+1, class_='ColumnRowLabel', inline=True)
|
|
|
|
)
|
2009-06-17 03:24:06 +05:30
|
|
|
tbody += trow
|
2009-08-07 00:43:16 +05:30
|
|
|
trow += Html('td', class_='ColumnName') + \
|
2009-06-16 00:55:43 +05:30
|
|
|
self.source_link(handle, source.get_title(), source.gramps_id)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# add clearline for proper styling
|
2009-06-16 00:55:43 +05:30
|
|
|
# add footer section
|
2009-06-25 01:33:10 +05:30
|
|
|
footer = self.write_footer()
|
2009-06-16 00:55:43 +05:30
|
|
|
body += (fullclear, footer)
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# send page out for processing
|
2009-06-25 01:33:10 +05:30
|
|
|
# and close the file
|
2009-06-21 08:55:28 +05:30
|
|
|
self.mywriter(sourcelistpage, of)
|
2005-08-18 11:28:28 +05:30
|
|
|
|
|
|
|
class SourcePage(BasePage):
|
|
|
|
|
2008-03-14 03:58:22 +05:30
|
|
|
def __init__(self, report, title, handle, src_list):
|
2009-06-16 00:55:43 +05:30
|
|
|
db = report.database
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
source = db.get_source_from_handle(handle)
|
2008-03-14 03:58:22 +05:30
|
|
|
BasePage.__init__(self, report, title, source.gramps_id)
|
2008-03-12 03:41:42 +05:30
|
|
|
|
2008-03-21 03:54:36 +05:30
|
|
|
of = self.report.create_file(source.get_handle(), 'src')
|
2008-03-12 03:41:42 +05:30
|
|
|
self.up = True
|
2008-03-18 02:39:16 +05:30
|
|
|
self.page_title = source.get_title()
|
2009-06-16 00:55:43 +05:30
|
|
|
sourcepage, body = self.write_header("%s - %s" % (_('Sources'), self.page_title))
|
2009-02-03 13:31:31 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# begin source detail division
|
2009-06-16 00:55:43 +05:30
|
|
|
with Html('div', class_='content', id='SourceDetail') as section:
|
|
|
|
body += section
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
media_list = source.get_media_list()
|
|
|
|
thumbnail = self.display_first_image_as_thumbnail(media_list)
|
2009-06-21 08:55:28 +05:30
|
|
|
if thumbnail is not None:
|
2009-06-16 00:55:43 +05:30
|
|
|
section += thumbnail
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2009-06-30 01:34:00 +05:30
|
|
|
# add section title
|
2009-06-16 00:55:43 +05:30
|
|
|
section += Html('h3', html_escape(self.page_title.strip()), inline=True)
|
|
|
|
|
2009-08-10 10:22:41 +05:30
|
|
|
# begin sources table
|
|
|
|
with Html('table', class_='infolist source') as table:
|
|
|
|
section += table
|
2009-06-16 00:55:43 +05:30
|
|
|
|
2009-08-10 10:22:41 +05:30
|
|
|
grampsid = None
|
|
|
|
if not self.noid:
|
|
|
|
grampsid = source.gramps_id
|
|
|
|
|
|
|
|
for (label, val) in [(_('GRAMPS ID'), grampsid),
|
|
|
|
(_('Author'), source.author),
|
|
|
|
(_('Publication information'), source.pubinfo),
|
|
|
|
(_('Abbreviation'), source.abbrev)]:
|
|
|
|
if val:
|
|
|
|
trow = Html('tr') + (
|
|
|
|
Html('td', label, class_='ColumnAttribute'),
|
|
|
|
Html('td', val, class_='ColumnValue')
|
|
|
|
)
|
|
|
|
table += trow
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-08-10 10:22:41 +05:30
|
|
|
# additional media
|
2009-06-16 00:55:43 +05:30
|
|
|
sourcegallery = self.display_additional_images_as_gallery(media_list)
|
2009-06-21 08:55:28 +05:30
|
|
|
if sourcegallery is not None:
|
2009-06-16 00:55:43 +05:30
|
|
|
section += sourcegallery
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# additional notes
|
|
|
|
sourcenotes = self.display_note_list(source.get_note_list())
|
2009-06-21 08:55:28 +05:30
|
|
|
if sourcenotes is not None:
|
2009-06-16 00:55:43 +05:30
|
|
|
section += sourcenotes
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# references
|
|
|
|
source_references = self.display_references(src_list[source.handle])
|
2009-06-21 08:55:28 +05:30
|
|
|
if source_references is not None:
|
2009-06-16 00:55:43 +05:30
|
|
|
section += source_references
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# add clearline for proper styling
|
2009-06-16 00:55:43 +05:30
|
|
|
# add footer section
|
2009-06-25 01:33:10 +05:30
|
|
|
footer = self.write_footer()
|
2009-06-16 00:55:43 +05:30
|
|
|
body += (fullclear, footer)
|
2008-03-14 03:58:22 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# send page out for processing
|
2009-06-25 01:33:10 +05:30
|
|
|
# and close the file
|
2009-06-16 00:55:43 +05:30
|
|
|
self.mywriter(sourcepage, of)
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
class MediaListPage(BasePage):
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2008-05-21 00:54:03 +05:30
|
|
|
def __init__(self, report, title):
|
2008-03-14 03:58:22 +05:30
|
|
|
BasePage.__init__(self, report, title)
|
2009-06-21 08:55:28 +05:30
|
|
|
db = report.database
|
2008-03-14 03:58:22 +05:30
|
|
|
|
2009-07-15 05:23:07 +05:30
|
|
|
of = self.report.create_file("media")
|
|
|
|
medialistpage, body = self.write_header(_('Media'))
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
# begin gallery division
|
|
|
|
with Html('div', class_='content', id='Gallery') as section:
|
|
|
|
body += section
|
|
|
|
|
|
|
|
msg = _("This page contains an index of all the media objects "
|
|
|
|
"in the database, sorted by their title. Clicking on "
|
2009-07-15 05:23:07 +05:30
|
|
|
"the title will take you to that media object’s page. "
|
|
|
|
"If you see media size densions above an image, click on the "
|
|
|
|
"image to see the full sized version. ")
|
2009-06-16 00:55:43 +05:30
|
|
|
section += Html('p', msg, id='description')
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
# begin gallery table and table head
|
|
|
|
with Html('table', class_='infolist gallerylist') as table:
|
|
|
|
section += table
|
2009-06-23 05:28:09 +05:30
|
|
|
|
|
|
|
# begin table head
|
|
|
|
thead = Html('thead')
|
|
|
|
table += thead
|
|
|
|
|
|
|
|
trow = Html('tr') + (
|
|
|
|
Html('th', ' ', class_='ColumnRowLabel', inline=True),
|
|
|
|
Html('th', _('Name'), class_='ColumnName', inline=True),
|
2009-08-11 06:59:53 +05:30
|
|
|
Html('th', DHEAD, class_='ColumnDate', inline=True)
|
2009-06-23 05:28:09 +05:30
|
|
|
)
|
|
|
|
thead += trow
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
# begin table body
|
2009-06-23 05:28:09 +05:30
|
|
|
tbody = Html('tbody')
|
|
|
|
table += tbody
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-23 05:28:09 +05:30
|
|
|
index = 1
|
|
|
|
sort = Sort.Sort(db)
|
2009-06-30 19:35:57 +05:30
|
|
|
mlist = sorted(self.report.photo_list, key=sort.by_media_title_key)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-23 05:28:09 +05:30
|
|
|
for handle in mlist:
|
|
|
|
media = db.get_object_from_handle(handle)
|
|
|
|
date = _dd.display(media.get_date_object())
|
|
|
|
title = media.get_description()
|
|
|
|
if not title:
|
|
|
|
title = "[untitled]"
|
|
|
|
|
|
|
|
trow = Html('tr') + (
|
|
|
|
Html('td', index, class_='ColumnRowLabel', inline=True),
|
|
|
|
)
|
|
|
|
tbody += trow
|
|
|
|
tcell = Html('td', class_='ColumnName') + \
|
|
|
|
self.media_ref_link(handle, title)
|
|
|
|
trow += tcell
|
|
|
|
trow += Html('td', date, class_='ColumnDate', inline=True)
|
|
|
|
index += 1
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# add footer section
|
|
|
|
# add clearline for proper styling
|
2009-06-25 01:33:10 +05:30
|
|
|
footer = self.write_footer()
|
2009-06-11 22:15:30 +05:30
|
|
|
body += (fullclear, footer)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# send page out for processing
|
2009-06-25 01:33:10 +05:30
|
|
|
# and close the file
|
2009-06-23 05:28:09 +05:30
|
|
|
self.mywriter(medialistpage, of)
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
def media_ref_link(self, handle, name, up=False):
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# get media url
|
2008-07-04 00:40:05 +05:30
|
|
|
url = self.report.build_url_fname_html(handle, 'img', up)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
# get name
|
|
|
|
name = html_escape(name)
|
|
|
|
|
|
|
|
# begin hyper link
|
|
|
|
hyper = Html('a', name, href=url, title=name)
|
|
|
|
|
|
|
|
# return hyperlink to its callers
|
|
|
|
return hyper
|
2008-04-01 01:20:37 +05:30
|
|
|
|
2005-02-10 07:14:05 +05:30
|
|
|
class DownloadPage(BasePage):
|
2009-06-11 22:15:30 +05:30
|
|
|
"""
|
|
|
|
This class will produce the Download Page ...
|
|
|
|
"""
|
2005-02-10 07:14:05 +05:30
|
|
|
|
2008-03-14 03:58:22 +05:30
|
|
|
def __init__(self, report, title):
|
|
|
|
BasePage.__init__(self, report, title)
|
2009-07-15 05:23:07 +05:30
|
|
|
db = report.database
|
2005-02-10 07:14:05 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# do NOT include a Download Page
|
|
|
|
if not self.report.inc_download:
|
2009-06-30 01:34:00 +05:30
|
|
|
return None
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-05-30 14:16:12 +05:30
|
|
|
# menu options for class
|
|
|
|
# download and description #1
|
2009-07-15 05:23:07 +05:30
|
|
|
downloadnote = self.report.downloadnote
|
|
|
|
|
2009-05-30 14:16:12 +05:30
|
|
|
dlfname1 = self.report.dl_fname1
|
|
|
|
dldescr1 = self.report.dl_descr1
|
|
|
|
dldescr = ''.join(wrapper.wrap(dldescr1))
|
|
|
|
|
|
|
|
# download and description #2
|
|
|
|
dlfname2 = self.report.dl_fname2
|
|
|
|
dldescr2 = self.report.dl_descr2
|
|
|
|
dldescr2 = ''.join(wrapper.wrap(dldescr2))
|
|
|
|
|
|
|
|
# download copyright
|
|
|
|
dlcopy = self.report.dl_copy
|
|
|
|
|
|
|
|
# if no filenames at all, return???
|
|
|
|
if not dlfname1 and not dlfname2:
|
|
|
|
return
|
|
|
|
|
2008-03-18 02:39:16 +05:30
|
|
|
of = self.report.create_file("download")
|
2009-06-16 00:55:43 +05:30
|
|
|
downloadpage, body = self.write_header(_('Download'))
|
2009-02-03 13:31:31 +05:30
|
|
|
|
2009-05-30 14:16:12 +05:30
|
|
|
# begin download page and table
|
2009-07-15 05:23:07 +05:30
|
|
|
with Html('div', class_='content', id='Download') as download:
|
|
|
|
body += download
|
|
|
|
|
|
|
|
# download page note
|
|
|
|
if downloadnote:
|
|
|
|
note = db.get_note_from_gramps_id(downloadnote)
|
|
|
|
note_text = self.get_note_format(note)
|
|
|
|
download += note_text
|
|
|
|
|
|
|
|
# add clearline before beginning table
|
|
|
|
download += fullclear
|
2009-05-30 14:16:12 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# begin download table
|
|
|
|
with Html('table', class_='infolist download') as table:
|
2009-07-15 05:23:07 +05:30
|
|
|
download += table
|
2009-05-30 14:16:12 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# table head
|
|
|
|
thead = Html('thead')
|
|
|
|
table += thead
|
|
|
|
trow = Html('tr')
|
2009-07-15 05:23:07 +05:30
|
|
|
thead += trow
|
|
|
|
|
2009-08-11 06:59:53 +05:30
|
|
|
for (label, colclass) in [(_('File Name'), 'Filename'),
|
|
|
|
(DESCRHEAD, 'Description'),
|
2009-07-15 05:23:07 +05:30
|
|
|
(_('License'), 'License'),
|
|
|
|
(_('Last Modified'), 'Modified') ]:
|
2009-08-11 06:59:53 +05:30
|
|
|
trow += Html('th', label, class_='Column%s' % colclass, inline=True)
|
2009-05-30 14:16:12 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# if dlfname1 is not None, show it???
|
|
|
|
if dlfname1:
|
2009-05-30 14:16:12 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# table body
|
|
|
|
tbody = Html('tbody')
|
|
|
|
table += tbody
|
|
|
|
trow = Html('tr', id='Row01')
|
|
|
|
tbody += trow
|
2009-05-30 14:16:12 +05:30
|
|
|
|
2009-07-15 05:23:07 +05:30
|
|
|
# table row 1, column 1 -- File
|
|
|
|
fname = os.path.basename(dlfname1)
|
|
|
|
tcell = Html('td', id='Col03', class_='Filename') + (
|
|
|
|
Html('a', fname, href=dlfname1, alt=dldescr1)
|
|
|
|
)
|
|
|
|
trow += tcell
|
|
|
|
|
|
|
|
# table Row 1, column 2 -- File Description
|
2009-06-16 00:55:43 +05:30
|
|
|
tcell = Html('td', id='Col01', class_='Description',
|
|
|
|
inline=True)
|
|
|
|
if dldescr1:
|
|
|
|
tcell += dldescr1
|
|
|
|
else:
|
|
|
|
tcell += ' '
|
|
|
|
trow += tcell
|
|
|
|
|
2009-07-15 05:23:07 +05:30
|
|
|
# table row 1, column 3 -- Copyright License
|
2009-06-16 00:55:43 +05:30
|
|
|
tcell = Html('td', id='Col02', class_='License')
|
|
|
|
copyright = self.get_copyright_license(dlcopy)
|
|
|
|
if copyright:
|
|
|
|
tcell += copyright
|
|
|
|
else:
|
|
|
|
tcell += ' '
|
|
|
|
trow += tcell
|
2009-05-30 14:16:12 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# table row 1, column 4 -- Last Modified
|
|
|
|
tcell = Html('td', id='Col04', class_='Modified', inline=True)
|
|
|
|
if os.path.exists(dlfname1):
|
|
|
|
modified = os.stat(dlfname1).st_mtime
|
|
|
|
last_mod = datetime.datetime.fromtimestamp(modified)
|
|
|
|
tcell += last_mod
|
|
|
|
else:
|
|
|
|
tcell += ' '
|
|
|
|
trow += tcell
|
2009-05-30 14:16:12 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# if download filename #2, show it???
|
|
|
|
if dlfname2:
|
2009-05-30 14:16:12 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# begin row #2
|
|
|
|
trow = Html('tr', id='Row02')
|
|
|
|
tbody += trow
|
2009-05-30 14:16:12 +05:30
|
|
|
|
2009-07-15 05:23:07 +05:30
|
|
|
# table row 2, column 1 -- File
|
|
|
|
fname = os.path.basename(dlfname2)
|
|
|
|
tcell = Html('td', id='Col03', class_='Filename') + (
|
|
|
|
Html('a', fname, href=dlfname2, alt=dldescr2)
|
|
|
|
)
|
|
|
|
trow += tcell
|
|
|
|
|
|
|
|
# table row 2, column 2 -- Description
|
2009-06-16 00:55:43 +05:30
|
|
|
tcell = Html('td', id='Col01', class_='Description',
|
|
|
|
inline=True)
|
|
|
|
if dldescr2:
|
|
|
|
tcell += dldescr2
|
|
|
|
else:
|
|
|
|
tcell += ' '
|
|
|
|
trow += tcell
|
|
|
|
|
2009-07-15 05:23:07 +05:30
|
|
|
# table row 2, column 3 -- Copyright License
|
2009-06-16 00:55:43 +05:30
|
|
|
tcell = Html('td', id='Col02', class_='License')
|
|
|
|
copyright = self.get_copyright_license(dlcopy)
|
|
|
|
if copyright:
|
|
|
|
tcell += copyright
|
|
|
|
else:
|
|
|
|
tcell += ' '
|
|
|
|
trow += tcell
|
|
|
|
|
|
|
|
# table row 2, column 4 -- Last Modified
|
|
|
|
tcell = Html('td', id='Col04', class_='Modified', inline=True)
|
|
|
|
if os.path.exists(dlfname2):
|
|
|
|
modified = os.stat(dlfname2).st_mtime
|
|
|
|
last_mod = datetime.datetime.fromtimestamp(modified)
|
|
|
|
tcell += last_mod
|
|
|
|
else:
|
|
|
|
tcell += ' '
|
|
|
|
trow += tcell
|
2009-05-30 14:16:12 +05:30
|
|
|
|
|
|
|
# clear line for proper styling
|
2009-06-16 00:55:43 +05:30
|
|
|
# create footer section
|
2009-06-25 01:33:10 +05:30
|
|
|
footer = self.write_footer()
|
2009-06-16 00:55:43 +05:30
|
|
|
body += (fullclear, footer)
|
2009-05-30 14:16:12 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# send page out for processing
|
2009-06-25 01:33:10 +05:30
|
|
|
# and close the file
|
2009-06-16 00:55:43 +05:30
|
|
|
self.mywriter(downloadpage, of)
|
2005-02-10 07:14:05 +05:30
|
|
|
|
|
|
|
class ContactPage(BasePage):
|
|
|
|
|
2008-03-14 03:58:22 +05:30
|
|
|
def __init__(self, report, title):
|
|
|
|
BasePage.__init__(self, report, title)
|
2009-06-21 08:55:28 +05:30
|
|
|
db = report.database
|
2005-02-10 07:14:05 +05:30
|
|
|
|
2008-03-18 02:39:16 +05:30
|
|
|
of = self.report.create_file("contact")
|
2009-06-16 00:55:43 +05:30
|
|
|
contactpage, body = self.write_header(_('Contact'))
|
|
|
|
|
|
|
|
# begin contact division
|
|
|
|
with Html('div', class_='content', id='Contact') as section:
|
|
|
|
body += section
|
|
|
|
|
|
|
|
# begin summaryarea division
|
|
|
|
with Html('div', id='summaryarea') as summaryarea:
|
|
|
|
section += summaryarea
|
|
|
|
|
|
|
|
contactimg = report.add_image('contactimg', 200)
|
2009-06-21 08:55:28 +05:30
|
|
|
if contactimg is not None:
|
2009-06-16 00:55:43 +05:30
|
|
|
summaryarea += contactimg
|
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
# get researcher information
|
2009-06-19 20:53:58 +05:30
|
|
|
r = Utils.get_researcher()
|
2009-06-16 00:55:43 +05:30
|
|
|
|
|
|
|
with Html('div', id='researcher') as researcher:
|
|
|
|
summaryarea += researcher
|
|
|
|
if r.name:
|
|
|
|
r.name = r.name.replace(',,,', '')
|
|
|
|
researcher += Html('h3', r.name, inline=True)
|
|
|
|
if r.addr:
|
2009-06-21 08:55:28 +05:30
|
|
|
researcher += Html('span', r.addr, id='streetaddress')
|
2009-06-16 00:55:43 +05:30
|
|
|
text = "".join([r.city, r.state, r.postal])
|
|
|
|
if text:
|
|
|
|
city = Html('span', r.city, id='city', inline=True)
|
|
|
|
state = Html('span', r.state, id='state', inline=True)
|
|
|
|
postal = Html('span', r.postal, id='postalcode', inline=True)
|
|
|
|
researcher += (city, state, postal)
|
|
|
|
if r.country:
|
|
|
|
researcher += Html('span', r.country, id='country', inline=True)
|
|
|
|
if r.email:
|
|
|
|
researcher += Html('span', id='email') + (
|
|
|
|
Html('a', r.email, href='mailto:%s?subject="from GRAMPS Web Site"'
|
|
|
|
% r.email, inline=True)
|
|
|
|
)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# add clear line for proper styling
|
|
|
|
summaryarea += fullclear
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
note_id = report.options['contactnote']
|
|
|
|
if note_id:
|
2009-06-21 08:55:28 +05:30
|
|
|
note = db.get_note_from_gramps_id(note_id)
|
2009-07-15 05:23:07 +05:30
|
|
|
note_text = self.get_note_format(note)
|
2009-06-20 05:23:42 +05:30
|
|
|
|
2009-07-15 05:23:07 +05:30
|
|
|
# attach note
|
|
|
|
summaryarea += note_text
|
2005-12-06 12:08:09 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# add clearline for proper styling
|
2009-06-16 00:55:43 +05:30
|
|
|
# add footer section
|
2009-06-25 01:33:10 +05:30
|
|
|
footer = self.write_footer()
|
2009-06-16 00:55:43 +05:30
|
|
|
body += (fullclear, footer)
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# send page out for porcessing
|
2009-06-25 01:33:10 +05:30
|
|
|
# and close the file
|
2009-06-16 00:55:43 +05:30
|
|
|
self.mywriter(contactpage, of)
|
2005-02-10 07:14:05 +05:30
|
|
|
|
|
|
|
class IndividualPage(BasePage):
|
2008-03-10 01:42:56 +05:30
|
|
|
"""
|
|
|
|
This class is used to write HTML for an individual.
|
|
|
|
"""
|
2005-02-01 09:16:29 +05:30
|
|
|
|
|
|
|
gender_map = {
|
2009-04-09 10:04:24 +05:30
|
|
|
Person.MALE : _('male'),
|
|
|
|
Person.FEMALE : _('female'),
|
|
|
|
Person.UNKNOWN : _('unknown'),
|
2005-02-01 09:16:29 +05:30
|
|
|
}
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2008-03-14 03:58:22 +05:30
|
|
|
def __init__(self, report, title, person, ind_list, place_list, src_list):
|
|
|
|
BasePage.__init__(self, report, title, person.gramps_id)
|
2005-02-01 09:16:29 +05:30
|
|
|
self.person = person
|
|
|
|
self.ind_list = ind_list
|
2008-03-14 03:58:22 +05:30
|
|
|
self.src_list = src_list # Used by get_citation_links()
|
2007-07-23 17:52:03 +05:30
|
|
|
self.bibli = Bibliography()
|
2005-02-16 11:11:33 +05:30
|
|
|
self.place_list = place_list
|
2009-06-21 08:55:28 +05:30
|
|
|
self.sort_name = self.get_name(self.person)
|
|
|
|
self.name = self.get_name(self.person)
|
|
|
|
db = report.database
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2008-03-21 03:54:36 +05:30
|
|
|
of = self.report.create_file(person.handle, 'ppl')
|
2008-03-12 03:41:42 +05:30
|
|
|
self.up = True
|
2009-06-16 00:55:43 +05:30
|
|
|
indivdetpage, body = self.write_header(self.sort_name)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# begin individualdetail division
|
2009-07-15 05:23:07 +05:30
|
|
|
with Html('div', class_='content', id='IndividualDetail') as individualdetail:
|
|
|
|
body += individualdetail
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# display a person's general data
|
|
|
|
thumbnail, name, summary = self.display_ind_general()
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# if there is a thumbnail, add it also?
|
|
|
|
if thumbnail is not None:
|
2009-07-15 05:23:07 +05:30
|
|
|
individualdetail += (thumbnail, name, summary)
|
2009-06-16 00:55:43 +05:30
|
|
|
else:
|
2009-07-15 05:23:07 +05:30
|
|
|
individualdetail += (name, summary)
|
2009-06-16 00:55:43 +05:30
|
|
|
|
|
|
|
# display a person's events
|
|
|
|
sect2 = self.display_ind_events()
|
|
|
|
if sect2 is not None:
|
2009-07-15 05:23:07 +05:30
|
|
|
individualdetail += sect2
|
2009-06-16 00:55:43 +05:30
|
|
|
|
|
|
|
# display attributes
|
2009-08-09 13:25:53 +05:30
|
|
|
sect3 = self.display_attr_list(self.person)
|
2009-06-16 00:55:43 +05:30
|
|
|
if sect3 is not None:
|
2009-07-15 05:23:07 +05:30
|
|
|
individualdetail += sect3
|
2009-06-16 00:55:43 +05:30
|
|
|
|
|
|
|
# display parents
|
|
|
|
sect4 = self.display_ind_parents()
|
|
|
|
if sect4 is not None:
|
2009-07-15 05:23:07 +05:30
|
|
|
individualdetail += sect4
|
2009-06-16 00:55:43 +05:30
|
|
|
|
|
|
|
# display relationships
|
|
|
|
sect5 = self.display_ind_families()
|
|
|
|
if sect5 is not None:
|
2009-07-15 05:23:07 +05:30
|
|
|
individualdetail += sect5
|
2009-06-16 00:55:43 +05:30
|
|
|
|
2009-07-31 17:00:14 +05:30
|
|
|
# display LDS ordinance
|
|
|
|
sect6 = self.display_lds_ordinance(self.person)
|
2009-06-16 00:55:43 +05:30
|
|
|
if sect6 is not None:
|
2009-07-31 17:00:14 +05:30
|
|
|
individualdetail += sect6
|
|
|
|
|
|
|
|
# display address(es)
|
|
|
|
sect7 = self.display_addresses()
|
|
|
|
if sect7 is not None:
|
|
|
|
individualdetail += sect7
|
2009-06-16 00:55:43 +05:30
|
|
|
|
|
|
|
media_list = []
|
|
|
|
photo_list = self.person.get_media_list()
|
|
|
|
if len(photo_list) > 1:
|
|
|
|
media_list = photo_list[1:]
|
|
|
|
for handle in self.person.get_family_handle_list():
|
2009-06-21 08:55:28 +05:30
|
|
|
family = db.get_family_from_handle(handle)
|
2009-06-16 00:55:43 +05:30
|
|
|
media_list += family.get_media_list()
|
|
|
|
for evt_ref in family.get_event_ref_list():
|
2009-06-21 08:55:28 +05:30
|
|
|
event = db.get_event_from_handle(evt_ref.ref)
|
2009-06-16 00:55:43 +05:30
|
|
|
media_list += event.get_media_list()
|
|
|
|
for evt_ref in self.person.get_primary_event_ref_list():
|
2009-06-21 08:55:28 +05:30
|
|
|
event = db.get_event_from_handle(evt_ref.ref)
|
2009-06-16 00:55:43 +05:30
|
|
|
if event:
|
|
|
|
media_list += event.get_media_list()
|
|
|
|
|
|
|
|
# display additional images as gallery
|
2009-07-31 17:00:14 +05:30
|
|
|
sect8 = self.display_additional_images_as_gallery(media_list)
|
2009-06-16 00:55:43 +05:30
|
|
|
if sect8 is not None:
|
2009-07-15 05:23:07 +05:30
|
|
|
individualdetail += sect8
|
2009-06-16 00:55:43 +05:30
|
|
|
|
2009-07-31 17:00:14 +05:30
|
|
|
# display notes
|
|
|
|
sect9 = self.display_note_list(self.person.get_note_list())
|
2009-06-16 00:55:43 +05:30
|
|
|
if sect9 is not None:
|
2009-07-15 05:23:07 +05:30
|
|
|
individualdetail += sect9
|
2009-06-16 00:55:43 +05:30
|
|
|
|
2009-07-31 17:00:14 +05:30
|
|
|
# display web links
|
|
|
|
sect10 = self.display_url_list(self.person.get_url_list())
|
2009-06-16 00:55:43 +05:30
|
|
|
if sect10 is not None:
|
2009-07-15 05:23:07 +05:30
|
|
|
individualdetail += sect10
|
2009-06-16 00:55:43 +05:30
|
|
|
|
2009-07-31 17:00:14 +05:30
|
|
|
# display sources
|
2009-08-21 06:07:59 +05:30
|
|
|
sect11 = self.display_ind_sources(self.person.get_source_references() )
|
2009-06-17 03:24:06 +05:30
|
|
|
if sect11 is not None:
|
2009-07-15 05:23:07 +05:30
|
|
|
individualdetail += sect11
|
2009-06-16 00:55:43 +05:30
|
|
|
|
2009-08-13 07:20:13 +05:30
|
|
|
# display associations
|
|
|
|
assocs = self.person.get_person_ref_list()
|
2009-08-16 01:58:22 +05:30
|
|
|
if assocs:
|
|
|
|
individualdetail += self.display_ind_associations(assocs)
|
2009-08-13 07:20:13 +05:30
|
|
|
|
2009-07-31 17:00:14 +05:30
|
|
|
# display pedigree
|
2009-08-13 07:20:13 +05:30
|
|
|
sect13 = self.display_ind_pedigree()
|
|
|
|
if sect13 is not None:
|
|
|
|
individualdetail += sect13
|
2009-07-31 17:00:14 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
# display ancestor tree
|
|
|
|
if report.options['graph']:
|
2009-08-13 07:20:13 +05:30
|
|
|
sect14 = self.display_tree()
|
|
|
|
if sect14 is not None:
|
|
|
|
individualdetail += sect14
|
2008-03-14 03:58:22 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# add clearline for proper styling
|
2009-06-16 00:55:43 +05:30
|
|
|
# create footer section
|
2009-06-25 01:33:10 +05:30
|
|
|
footer = self.write_footer()
|
2009-06-16 00:55:43 +05:30
|
|
|
body += (fullclear, footer)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
# send page out for processing
|
2009-06-25 01:33:10 +05:30
|
|
|
# and close the file
|
2009-06-16 00:55:43 +05:30
|
|
|
self.mywriter(indivdetpage, of)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
def draw_box(self, center, col, person):
|
2009-06-16 00:55:43 +05:30
|
|
|
db = self.report.database
|
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
top = center - _HEIGHT/2
|
|
|
|
xoff = _XOFFSET+col*(_WIDTH+_HGAP)
|
2009-02-04 12:37:43 +05:30
|
|
|
sex = person.gender
|
2009-04-09 10:04:24 +05:30
|
|
|
if sex == Person.MALE:
|
2008-10-22 00:27:42 +05:30
|
|
|
divclass = "male"
|
2009-04-09 10:04:24 +05:30
|
|
|
elif sex == Person.FEMALE:
|
2008-10-22 00:27:42 +05:30
|
|
|
divclass = "female"
|
2008-07-16 13:29:29 +05:30
|
|
|
else:
|
2008-10-22 00:27:42 +05:30
|
|
|
divclass = "unknown"
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
boxbg = Html('div', class_="boxbg %s AncCol%s" % (divclass, col),
|
|
|
|
style="top: %dpx; left: %dpx;" % (top, xoff+1)
|
|
|
|
)
|
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
person_name = self.get_name(person)
|
2008-03-10 01:42:56 +05:30
|
|
|
if person.handle in self.ind_list:
|
2008-10-03 16:08:19 +05:30
|
|
|
thumbnailUrl = None
|
2009-06-30 01:34:00 +05:30
|
|
|
if self.create_media and col < 5:
|
2008-10-03 16:08:19 +05:30
|
|
|
photolist = person.get_media_list()
|
|
|
|
if photolist:
|
|
|
|
photo_handle = photolist[0].get_reference_handle()
|
2009-06-16 00:55:43 +05:30
|
|
|
photo = db.get_object_from_handle(photo_handle)
|
2008-10-03 16:08:19 +05:30
|
|
|
mime_type = photo.get_mime_type()
|
|
|
|
if mime_type:
|
|
|
|
(photoUrl, thumbnailUrl) = self.report.prepare_copy_media(photo)
|
|
|
|
thumbnailUrl = '/'.join(['..']*3 + [thumbnailUrl])
|
2009-08-30 02:23:31 +05:30
|
|
|
if ( Utils.win ):
|
|
|
|
thumbnailUrl = thumbnailUrl.replace('\\','/')
|
2008-03-21 03:54:36 +05:30
|
|
|
url = self.report.build_url_fname_html(person.handle, 'ppl', True)
|
2009-06-21 08:55:28 +05:30
|
|
|
boxbg += self.person_link(url, person, name_style=True,
|
|
|
|
thumbnailUrl=thumbnailUrl)
|
2005-12-06 12:08:09 +05:30
|
|
|
else:
|
2009-06-21 08:55:28 +05:30
|
|
|
boxbg += Html('span', person_name, class_="unlinked", inline=True)
|
|
|
|
shadow = Html('div', class_="shadow", inline=True, style="top: %dpx; left: %dpx;"
|
|
|
|
% (top+_SHADOW, xoff+_SHADOW) )
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
return [boxbg, shadow]
|
|
|
|
|
|
|
|
def extend_line(self, y0, x0):
|
|
|
|
style = "top: %dpx; left: %dpx; width: %dpx"
|
|
|
|
bv = Html('div', class_="bvline", inline=True,
|
|
|
|
style=style % (y0, x0, _HGAP/2)
|
|
|
|
)
|
|
|
|
gv = Html('div', class_="gvline", inline=True,
|
|
|
|
style=style % (y0+_SHADOW, x0, _HGAP/2+_SHADOW)
|
|
|
|
)
|
|
|
|
return [bv, gv]
|
|
|
|
|
|
|
|
def connect_line(self, y0, y1, col):
|
|
|
|
y = min(y0, y1)
|
|
|
|
stylew = "top: %dpx; left: %dpx; width: %dpx;"
|
|
|
|
styleh = "top: %dpx; left: %dpx; height: %dpx;"
|
2008-02-10 09:39:09 +05:30
|
|
|
x0 = _XOFFSET + col * _WIDTH + (col-1)*_HGAP + _HGAP/2
|
2009-06-11 22:15:30 +05:30
|
|
|
bv = Html('div', class_="bvline", inline=True, style=stylew %
|
2008-03-10 01:42:56 +05:30
|
|
|
(y1, x0, _HGAP/2))
|
2009-06-11 22:15:30 +05:30
|
|
|
gv = Html('div', class_="gvline", inline=True, style=stylew %
|
2008-03-10 01:42:56 +05:30
|
|
|
(y1+_SHADOW, x0+_SHADOW, _HGAP/2+_SHADOW))
|
2009-06-11 22:15:30 +05:30
|
|
|
bh = Html('div', class_="bhline", inline=True, style=styleh %
|
2008-03-10 01:42:56 +05:30
|
|
|
(y, x0, abs(y0-y1)))
|
2009-06-11 22:15:30 +05:30
|
|
|
gh = Html('div', class_="gvline", inline=True, style=styleh %
|
2008-03-10 01:42:56 +05:30
|
|
|
(y+_SHADOW, x0+_SHADOW, abs(y0-y1)))
|
2009-06-11 22:15:30 +05:30
|
|
|
return [bv, gv, bh, gh]
|
2005-12-06 12:08:09 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
def draw_connected_box(self, center1, center2, col, handle):
|
2009-06-16 00:55:43 +05:30
|
|
|
db = self.report.database
|
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
box = []
|
2005-12-06 12:08:09 +05:30
|
|
|
if not handle:
|
2009-06-11 22:15:30 +05:30
|
|
|
return box
|
2009-06-16 00:55:43 +05:30
|
|
|
person = db.get_person_from_handle(handle)
|
2009-06-11 22:15:30 +05:30
|
|
|
box = self.draw_box(center2, col, person)
|
|
|
|
box += self.connect_line(center1, center2, col)
|
|
|
|
return box
|
|
|
|
|
|
|
|
def display_tree(self):
|
|
|
|
tree = []
|
2006-05-14 11:21:46 +05:30
|
|
|
if not self.person.get_main_parents_family_handle():
|
2009-06-11 22:15:30 +05:30
|
|
|
return None
|
2005-12-06 12:08:09 +05:30
|
|
|
|
2008-03-18 02:39:16 +05:30
|
|
|
generations = self.report.options['graphgens']
|
2008-03-10 01:42:56 +05:30
|
|
|
max_in_col = 1 << (generations-1)
|
2008-02-10 09:39:09 +05:30
|
|
|
max_size = _HEIGHT*max_in_col + _VGAP*(max_in_col+1)
|
2006-05-14 11:21:46 +05:30
|
|
|
center = int(max_size/2)
|
2008-03-06 18:37:37 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
with Html('div', id="tree", class_="subsection") as tree:
|
|
|
|
tree += Html('h4', _('Ancestors'), inline=True)
|
|
|
|
with Html('div', id="treeContainer",
|
|
|
|
style="width:%dpx; height:%dpx;" %
|
|
|
|
(_XOFFSET+(generations)*_WIDTH+(generations-1)*_HGAP,
|
|
|
|
max_size)
|
|
|
|
) as container:
|
|
|
|
tree += container
|
|
|
|
container += self.draw_tree(1, generations, max_size,
|
|
|
|
0, center, self.person.handle)
|
|
|
|
return tree
|
|
|
|
|
|
|
|
def draw_tree(self, gen_nr, maxgen, max_size, old_center, new_center, phandle):
|
2009-06-16 00:55:43 +05:30
|
|
|
db = self.report.database
|
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
tree = []
|
2008-03-15 02:37:35 +05:30
|
|
|
if gen_nr > maxgen:
|
2009-06-11 22:15:30 +05:30
|
|
|
return tree
|
2008-03-15 02:37:35 +05:30
|
|
|
gen_offset = int(max_size / pow(2, gen_nr+1))
|
2009-06-16 00:55:43 +05:30
|
|
|
person = db.get_person_from_handle(phandle)
|
2006-05-14 11:21:46 +05:30
|
|
|
if not person:
|
2009-06-11 22:15:30 +05:30
|
|
|
return tree
|
2005-12-06 12:08:09 +05:30
|
|
|
|
2008-03-15 02:37:35 +05:30
|
|
|
if gen_nr == 1:
|
2009-06-11 22:15:30 +05:30
|
|
|
tree = self.draw_box(new_center, 0, person)
|
2006-05-14 11:21:46 +05:30
|
|
|
else:
|
2009-06-11 22:15:30 +05:30
|
|
|
tree = self.draw_connected_box(old_center, new_center, gen_nr-1, phandle)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2008-03-15 02:37:35 +05:30
|
|
|
if gen_nr == maxgen:
|
2009-06-11 22:15:30 +05:30
|
|
|
return tree
|
2005-12-06 12:08:09 +05:30
|
|
|
|
2006-05-14 11:21:46 +05:30
|
|
|
family_handle = person.get_main_parents_family_handle()
|
|
|
|
if family_handle:
|
2008-03-15 02:37:35 +05:30
|
|
|
line_offset = _XOFFSET + gen_nr*_WIDTH + (gen_nr-1)*_HGAP
|
2009-06-11 22:15:30 +05:30
|
|
|
tree += self.extend_line(new_center, line_offset)
|
2005-12-06 12:08:09 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
family = db.get_family_from_handle(family_handle)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2006-05-14 11:21:46 +05:30
|
|
|
f_center = new_center-gen_offset
|
|
|
|
f_handle = family.get_father_handle()
|
2009-06-11 22:15:30 +05:30
|
|
|
tree += self.draw_tree(gen_nr+1, maxgen, max_size,
|
|
|
|
new_center, f_center, f_handle)
|
2005-12-06 12:08:09 +05:30
|
|
|
|
2006-05-14 11:21:46 +05:30
|
|
|
m_center = new_center+gen_offset
|
|
|
|
m_handle = family.get_mother_handle()
|
2009-06-11 22:15:30 +05:30
|
|
|
tree += self.draw_tree(gen_nr+1, maxgen, max_size,
|
|
|
|
new_center, m_center, m_handle)
|
|
|
|
return tree
|
|
|
|
|
2009-08-21 06:07:59 +05:30
|
|
|
def display_ind_sources(self, sourcelist):
|
2009-08-19 03:10:36 +05:30
|
|
|
"""
|
|
|
|
will create the "Source References" section for a person
|
|
|
|
"""
|
2005-12-06 12:08:09 +05:30
|
|
|
|
2009-08-22 16:13:17 +05:30
|
|
|
for sref in sourcelist:
|
2007-07-23 17:52:03 +05:30
|
|
|
self.bibli.add_reference(sref)
|
2009-06-11 22:15:30 +05:30
|
|
|
sourcerefs = self.display_source_refs(self.bibli)
|
|
|
|
|
|
|
|
# return to its caller
|
|
|
|
return sourcerefs
|
|
|
|
|
2009-08-13 07:20:13 +05:30
|
|
|
def display_ind_associations(self, assoclist):
|
|
|
|
"""
|
|
|
|
display an individual's associations
|
|
|
|
"""
|
|
|
|
|
|
|
|
# begin Associations division
|
|
|
|
with Html('div', class_='subsection', id='Associations') as section:
|
|
|
|
section += Html('h4', _('Associations'), inline=True)
|
|
|
|
|
|
|
|
with Html('table', class_='infolist assoclist') as table:
|
|
|
|
section += table
|
|
|
|
|
|
|
|
thead = Html('thead')
|
|
|
|
table += thead
|
|
|
|
|
|
|
|
trow = Html('tr')
|
|
|
|
thead += trow
|
|
|
|
|
|
|
|
assoc_row = [
|
|
|
|
(_('Relationship'), 'Relationship'),
|
|
|
|
(SHEAD, 'Sources'),
|
|
|
|
(NHEAD, 'Notes') ]
|
|
|
|
|
|
|
|
for (label, colclass) in assoc_row:
|
|
|
|
trow += Html('th', label, class_='Column%s' % colclass, inline=True)
|
|
|
|
|
|
|
|
tbody = Html('tbody')
|
|
|
|
table += tbody
|
|
|
|
|
|
|
|
for person_ref in assoclist:
|
|
|
|
|
|
|
|
trow = Html('tr')
|
|
|
|
tbody += trow
|
|
|
|
|
|
|
|
index = 0
|
|
|
|
for data in [
|
|
|
|
[person_ref.get_relation()],
|
|
|
|
[self.get_citation_links(person_ref.get_source_references())],
|
|
|
|
[self.dump_notes(person_ref.get_note_list())] ]:
|
|
|
|
|
|
|
|
# get colclass from assoc_row
|
|
|
|
colclass = assoc_row[index][1]
|
|
|
|
|
|
|
|
trow += Html('td', data, class_='Column%s' % colclass, inline=True)
|
|
|
|
index += 1
|
|
|
|
|
|
|
|
# return section to its callers
|
|
|
|
return section
|
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
def display_ind_pedigree(self):
|
|
|
|
"""
|
|
|
|
Display an individual's pedigree
|
|
|
|
"""
|
2009-06-16 00:55:43 +05:30
|
|
|
db = self.report.database
|
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# Define helper functions
|
|
|
|
|
|
|
|
def children_ped(ol):
|
|
|
|
if family:
|
|
|
|
for child_ref in family.get_child_ref_list():
|
|
|
|
child_handle = child_ref.ref
|
|
|
|
if child_handle == self.person.handle:
|
|
|
|
child_ped(ol)
|
|
|
|
else:
|
2009-06-16 00:55:43 +05:30
|
|
|
child = db.get_person_from_handle(child_handle)
|
2009-06-11 22:15:30 +05:30
|
|
|
ol += Html('li') + self.pedigree_person(child)
|
|
|
|
else:
|
|
|
|
child_ped(ol)
|
|
|
|
return ol
|
|
|
|
|
|
|
|
def child_ped(ol):
|
2009-06-21 08:55:28 +05:30
|
|
|
ol += Html('li', class_="thisperson", inline=True) + self.name
|
2009-06-11 22:15:30 +05:30
|
|
|
family = self.pedigree_family()
|
|
|
|
if family:
|
|
|
|
ol += Html('ol', class_="spouselist") + family
|
|
|
|
return ol
|
|
|
|
|
|
|
|
# End of helper functions
|
2005-02-10 07:14:05 +05:30
|
|
|
|
2005-02-04 09:24:48 +05:30
|
|
|
parent_handle_list = self.person.get_parent_family_handle_list()
|
|
|
|
if parent_handle_list:
|
2006-04-23 08:28:53 +05:30
|
|
|
parent_handle = parent_handle_list[0]
|
2009-06-16 00:55:43 +05:30
|
|
|
family = db.get_family_from_handle(parent_handle)
|
2009-06-11 22:15:30 +05:30
|
|
|
father_handle = family.get_father_handle()
|
|
|
|
mother_handle = family.get_mother_handle()
|
2009-06-16 00:55:43 +05:30
|
|
|
mother = db.get_person_from_handle(mother_handle)
|
|
|
|
father = db.get_person_from_handle(father_handle)
|
2005-02-04 09:24:48 +05:30
|
|
|
else:
|
|
|
|
family = None
|
|
|
|
father = None
|
|
|
|
mother = None
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
with Html('div', id="pedigree", class_="subsection") as ped:
|
|
|
|
ped += Html('h4', _('Pedigree'), inline=True)
|
|
|
|
with Html('ol', class_="pedigreegen") as pedol:
|
|
|
|
ped += pedol
|
|
|
|
if father and mother:
|
|
|
|
pedfa = Html('li') + self.pedigree_person(father)
|
|
|
|
pedol += pedfa
|
|
|
|
with Html('ol') as pedma:
|
|
|
|
pedfa += pedma
|
|
|
|
pedma += (Html('li', class_="spouse") +
|
|
|
|
self.pedigree_person(mother) +
|
|
|
|
children_ped(Html('ol'))
|
|
|
|
)
|
|
|
|
elif father:
|
|
|
|
pedol += (Html('li') + self.pedigree_person(father) +
|
|
|
|
children_ped(Html('ol'))
|
|
|
|
)
|
|
|
|
elif mother:
|
|
|
|
pedol += (Html('li') + self.pedigree_person(mother) +
|
|
|
|
children_ped(Html('ol'))
|
|
|
|
)
|
2005-02-04 09:24:48 +05:30
|
|
|
else:
|
2009-06-11 22:15:30 +05:30
|
|
|
pedol += children_ped(Html('ol'))
|
|
|
|
return ped
|
|
|
|
|
|
|
|
def display_ind_general(self):
|
|
|
|
"""
|
|
|
|
display an individual's general information...
|
|
|
|
"""
|
2009-06-16 00:55:43 +05:30
|
|
|
db = self.report.database
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2005-08-18 11:28:28 +05:30
|
|
|
self.page_title = self.sort_name
|
2009-06-11 22:15:30 +05:30
|
|
|
thumbnail = self.display_first_image_as_thumbnail(self.person.get_media_list())
|
2005-02-01 09:16:29 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
sect_name = Html('h3', self.sort_name.strip(), inline=True)
|
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
# begin summaryarea division
|
|
|
|
with Html('div', id='summaryarea') as summaryarea:
|
|
|
|
|
|
|
|
# begin general details table
|
|
|
|
with Html('table', class_='infolist') as table:
|
|
|
|
summaryarea += table
|
|
|
|
|
|
|
|
primary_name = self.person.get_primary_name()
|
|
|
|
all_names = [primary_name] + self.person.get_alternate_names()
|
|
|
|
|
|
|
|
# Names [and their sources]
|
|
|
|
for name in all_names:
|
|
|
|
pname = _nd.display_name(name)
|
|
|
|
pname += self.get_citation_links( name.get_source_references() )
|
|
|
|
|
|
|
|
# if we have just a firstname, then the name is preceeded by ", "
|
|
|
|
# which doesn't exactly look very nice printed on the web page
|
|
|
|
if pname[:2] == ', ':
|
|
|
|
pname = pname[2:]
|
|
|
|
|
|
|
|
type_ = str( name.get_type() )
|
|
|
|
trow = Html('tr') + (
|
2009-06-30 01:34:00 +05:30
|
|
|
Html('td', type_, class_='ColumnAttribute', inline=True)
|
2009-06-21 08:55:28 +05:30
|
|
|
)
|
|
|
|
table += trow
|
|
|
|
tcell = Html('td', pname, class_='ColumnValue', inline=True)
|
|
|
|
trow += tcell
|
|
|
|
|
|
|
|
# display any notes associated with this name
|
|
|
|
notelist = name.get_note_list()
|
|
|
|
if len(notelist):
|
|
|
|
unordered = Html('ul')
|
2009-07-15 05:23:07 +05:30
|
|
|
tcell += unordered
|
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
for notehandle in notelist:
|
|
|
|
note = db.get_note_from_handle(notehandle)
|
|
|
|
if note:
|
2009-07-15 05:23:07 +05:30
|
|
|
note_text = self.get_note_format(note)
|
2009-06-20 05:23:42 +05:30
|
|
|
|
2009-07-15 05:23:07 +05:30
|
|
|
# attach note
|
|
|
|
unordered += note_text
|
2009-06-21 08:55:28 +05:30
|
|
|
|
|
|
|
# display call names
|
|
|
|
first_name = primary_name.get_first_name()
|
|
|
|
for name in all_names:
|
|
|
|
call_name = name.get_call_name()
|
|
|
|
if call_name and call_name != first_name:
|
|
|
|
call_name += self.get_citation_links(
|
|
|
|
name.get_source_references() )
|
|
|
|
trow = Html('tr') + (
|
|
|
|
Html('td', _('Common Name'), class_='ColumnAttribute',
|
|
|
|
inline=True),
|
|
|
|
Html('td', call_name, class_='ColumnValue', inline=True)
|
|
|
|
)
|
|
|
|
table += trow
|
|
|
|
|
|
|
|
# display the nickname attribute
|
|
|
|
nick_name = self.person.get_nick_name()
|
|
|
|
if nick_name and nick_name != first_name:
|
|
|
|
nick_name += self.get_citation_links(
|
|
|
|
self.person.get_source_references() )
|
|
|
|
trow = Html('tr') + (
|
|
|
|
Html('td', _('Nick Name'), class_='ColumnAttribute',
|
|
|
|
inline=True),
|
|
|
|
Html('td', nick_name, class_='ColumnValue', inline=True)
|
|
|
|
)
|
|
|
|
table += trow
|
|
|
|
|
|
|
|
# GRAMPS ID
|
|
|
|
if not self.noid:
|
|
|
|
trow = Html('tr') + (
|
|
|
|
Html('td', _('GRAMPS ID'), class_='ColumnAttribute',
|
|
|
|
inline=True),
|
|
|
|
Html('td', self.person.gramps_id, class_='ColumnValue',
|
|
|
|
inline=True)
|
|
|
|
)
|
|
|
|
table += trow
|
2009-01-29 07:43:48 +05:30
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
# Gender
|
|
|
|
gender = self.gender_map[self.person.gender]
|
|
|
|
trow = Html('tr') + (
|
|
|
|
Html('td', _('Gender'), class_='ColumnAttribute', inline=True),
|
|
|
|
Html('td', gender, class_='ColumnValue', inline=True)
|
|
|
|
)
|
|
|
|
table += trow
|
|
|
|
|
|
|
|
# Age At Death???
|
|
|
|
birth_ref = self.person.get_birth_ref()
|
|
|
|
birth_date = None
|
|
|
|
if birth_ref:
|
|
|
|
birth_event = db.get_event_from_handle(birth_ref.ref)
|
|
|
|
birth_date = birth_event.get_date_object()
|
|
|
|
|
|
|
|
if birth_date is not None:
|
|
|
|
alive = probably_alive(self.person, db, date.Today())
|
|
|
|
death_ref = self.person.get_death_ref()
|
|
|
|
death_date = None
|
|
|
|
if death_ref:
|
|
|
|
death_event = db.get_event_from_handle(death_ref.ref)
|
|
|
|
death_date = death_event.get_date_object()
|
|
|
|
|
|
|
|
if not alive and death_date is not None:
|
|
|
|
nyears = death_date - birth_date
|
|
|
|
nyears.format(precision=3)
|
|
|
|
trow = Html('tr') + (
|
|
|
|
Html('td', _('Age at Death'), class_='ColumnAttribute',
|
|
|
|
inline=True),
|
|
|
|
Html('td', nyears, class_='ColumnValue', inline=True)
|
|
|
|
)
|
|
|
|
table += trow
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# return all three pieces to its caller
|
|
|
|
# do NOT combine before returning to class IndividualPage
|
|
|
|
return thumbnail, sect_name, summaryarea
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2009-08-11 18:19:27 +05:30
|
|
|
def attribs_or_not(self, db, evt_ref_list):
|
|
|
|
""" determine if there are attributes for these events """
|
|
|
|
|
|
|
|
for event_ref in evt_ref_list:
|
|
|
|
event = db.get_event_from_handle(event_ref.ref)
|
|
|
|
|
|
|
|
attrlist = event.get_attribute_list()
|
|
|
|
attrlist.extend(event_ref.get_attribute_list() )
|
|
|
|
if attrlist:
|
|
|
|
return True
|
|
|
|
|
|
|
|
# return True or False back to its caller
|
|
|
|
return False
|
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
def display_ind_events(self):
|
|
|
|
"""
|
|
|
|
will create the events table
|
|
|
|
"""
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
evt_ref_list = self.person.get_event_ref_list()
|
2008-05-21 00:54:03 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
if not evt_ref_list:
|
|
|
|
return None
|
2009-06-16 00:55:43 +05:30
|
|
|
db = self.report.database
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-08-11 12:17:42 +05:30
|
|
|
# begin events division and section title
|
2009-06-11 22:15:30 +05:30
|
|
|
with Html('div', class_='subsection', id='events') as section:
|
|
|
|
section += Html('h4', _('Events'), inline=True)
|
2009-08-11 12:17:42 +05:30
|
|
|
|
2009-08-11 18:19:27 +05:30
|
|
|
# determine if there are attributes or not?
|
|
|
|
attribtable = self.attribs_or_not(db, evt_ref_list)
|
2009-08-11 12:17:42 +05:30
|
|
|
|
2009-08-11 18:19:27 +05:30
|
|
|
# attributes: yes!
|
|
|
|
if attribtable:
|
2009-08-11 12:17:42 +05:30
|
|
|
|
|
|
|
for event_ref in evt_ref_list:
|
2009-08-11 18:19:27 +05:30
|
|
|
|
|
|
|
# begin events table
|
|
|
|
with Html('table', class_='infolist eventtable') as table:
|
|
|
|
section += table
|
|
|
|
|
|
|
|
# begin table head
|
|
|
|
thead = Html('thead')
|
|
|
|
table += thead
|
|
|
|
thead += self.display_event_header()
|
|
|
|
|
|
|
|
tbody = Html('tbody')
|
|
|
|
table += tbody
|
|
|
|
|
|
|
|
# ordered list
|
|
|
|
ordered = Html('ol')
|
|
|
|
tbody += ordered
|
|
|
|
|
|
|
|
event = db.get_event_from_handle(event_ref.ref)
|
|
|
|
if event:
|
|
|
|
ordered += self.display_event_row(event, event_ref)
|
|
|
|
|
|
|
|
# attributes list
|
|
|
|
attrlist = event.get_attribute_list()
|
|
|
|
attrlist.extend(event_ref.get_attribute_list() )
|
|
|
|
if attrlist:
|
|
|
|
ordered += self.dump_attributes(attrlist)
|
|
|
|
|
|
|
|
# no attributes for these events
|
|
|
|
else:
|
|
|
|
|
|
|
|
# begin events table
|
|
|
|
with Html('table', class_='infolist eventtable') as table:
|
|
|
|
section += table
|
|
|
|
|
|
|
|
# begin table head
|
|
|
|
thead = Html('thead')
|
|
|
|
table += thead
|
|
|
|
thead += self.display_event_header()
|
|
|
|
|
|
|
|
tbody = Html('tbody')
|
|
|
|
table += tbody
|
|
|
|
|
|
|
|
# ordered list
|
|
|
|
ordered = Html('ol')
|
|
|
|
tbody += ordered
|
|
|
|
|
|
|
|
for event_ref in evt_ref_list:
|
|
|
|
|
|
|
|
event = db.get_event_from_handle(event_ref.ref)
|
|
|
|
if event:
|
|
|
|
ordered += self.display_event_row(event, event_ref)
|
2009-08-11 12:17:42 +05:30
|
|
|
|
|
|
|
# return section to its caller
|
2009-06-11 22:15:30 +05:30
|
|
|
return section
|
|
|
|
|
2009-08-11 18:19:27 +05:30
|
|
|
def dump_attributes(self, attrlist):
|
|
|
|
"""
|
|
|
|
dump event attributes list
|
|
|
|
|
|
|
|
@param: attrlist -- list of attributes for event or event_ref
|
|
|
|
"""
|
|
|
|
|
|
|
|
# begin section table
|
|
|
|
with Html('table', class_='infolist attrlist') as table:
|
|
|
|
|
|
|
|
# begin table head
|
|
|
|
thead = Html('thead')
|
|
|
|
table += thead
|
|
|
|
|
|
|
|
trow = Html('tr')
|
|
|
|
thead += trow
|
|
|
|
|
2009-08-19 03:10:36 +05:30
|
|
|
attr_header_row = [
|
2009-08-11 18:19:27 +05:30
|
|
|
(AHEAD, 'Title'),
|
|
|
|
(THEAD, 'Type'),
|
|
|
|
(VHEAD, 'Value'),
|
2009-08-19 03:10:36 +05:30
|
|
|
(SHEAD, 'Sources'),
|
|
|
|
(NHEAD, 'Notes') ]
|
2009-08-11 18:19:27 +05:30
|
|
|
|
2009-08-19 03:10:36 +05:30
|
|
|
for (label, colclass) in attr_header_row:
|
2009-08-11 18:19:27 +05:30
|
|
|
trow += Html('th', label, class_='Column%s' % colclass, inline=True)
|
|
|
|
|
|
|
|
# begin table body
|
|
|
|
tbody = Html('tbody')
|
|
|
|
table += tbody
|
|
|
|
|
|
|
|
for attr in attrlist:
|
|
|
|
trow = Html('tr')
|
|
|
|
tbody += trow
|
|
|
|
|
|
|
|
attr_data_row = [
|
2009-08-19 03:10:36 +05:30
|
|
|
(None),
|
|
|
|
(str(attr.get_type()) ),
|
|
|
|
(attr.get_value()),
|
|
|
|
(self.get_citation_links(attr.get_source_references()) ),
|
|
|
|
(self.dump_notes(attr.get_note_list()) ) ]
|
2009-08-11 18:19:27 +05:30
|
|
|
|
2009-08-19 03:10:36 +05:30
|
|
|
index = 0
|
|
|
|
for value in attr_data_row:
|
|
|
|
colclass = attr_header_row[index][1]
|
2009-08-11 18:19:27 +05:30
|
|
|
|
|
|
|
value = value or ' '
|
|
|
|
trow += Html('td', value, class_='Column%s' % colclass, inline=True)
|
2009-08-19 03:10:36 +05:30
|
|
|
index += 1
|
2009-08-11 18:19:27 +05:30
|
|
|
|
|
|
|
# return table to its callers
|
|
|
|
return table
|
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
def display_addresses(self):
|
|
|
|
"""
|
|
|
|
display a person's addresses ...
|
|
|
|
"""
|
2008-05-21 00:54:03 +05:30
|
|
|
|
2007-01-18 10:01:08 +05:30
|
|
|
alist = self.person.get_address_list()
|
2008-03-10 01:42:56 +05:30
|
|
|
if not alist:
|
2009-06-11 22:15:30 +05:30
|
|
|
return None
|
|
|
|
|
|
|
|
# begin addresses division and title
|
2009-06-21 08:55:28 +05:30
|
|
|
with Html('div', class_='subsection', id='Addresses') as section:
|
|
|
|
section += Html('h4', _('Addresses'), inline=True)
|
|
|
|
|
2009-07-15 05:23:07 +05:30
|
|
|
# write out addresses()
|
2009-08-11 18:19:27 +05:30
|
|
|
section += self.write_out_addresses(self.person, spec=True)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
# return address division to its caller
|
2009-06-21 08:55:28 +05:30
|
|
|
return section
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-08-03 11:08:11 +05:30
|
|
|
def display_lds_ordinance(self, person):
|
2009-07-31 17:00:14 +05:30
|
|
|
"""
|
|
|
|
display LDS information for a person or family
|
|
|
|
"""
|
|
|
|
|
2009-08-03 11:08:11 +05:30
|
|
|
ldsordlist = person.lds_ord_list
|
2009-07-31 17:00:14 +05:30
|
|
|
if not ldsordlist:
|
|
|
|
return None
|
|
|
|
db = self.report.database
|
|
|
|
|
|
|
|
# begin LDS Ordinance division and section title
|
|
|
|
with Html('div', class_='subsection', id='LDSOrdinance') as section:
|
|
|
|
section += Html('h4', _('Latter-Day Saints (LDS) Ordinance'), inline=True)
|
|
|
|
|
2009-08-03 11:08:11 +05:30
|
|
|
# ump individual LDS ordinance list
|
|
|
|
section += self.dump_ordinance(db, self.person)
|
2009-07-31 17:00:14 +05:30
|
|
|
|
|
|
|
# return section to its caller
|
|
|
|
return section
|
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
def display_child_link(self, child_handle):
|
|
|
|
"""
|
|
|
|
display child link ...
|
|
|
|
"""
|
2009-06-16 00:55:43 +05:30
|
|
|
db = self.report.database
|
2005-02-01 09:16:29 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
child = db.get_person_from_handle(child_handle)
|
2009-06-11 22:15:30 +05:30
|
|
|
gid = child.gramps_id
|
2009-06-21 08:55:28 +05:30
|
|
|
list = Html('li')
|
2008-03-10 01:42:56 +05:30
|
|
|
if child_handle in self.ind_list:
|
2008-03-21 03:54:36 +05:30
|
|
|
url = self.report.build_url_fname_html(child_handle, 'ppl', True)
|
2009-06-21 08:55:28 +05:30
|
|
|
list += self.person_link(url, child, True, gid)
|
2009-08-19 03:10:36 +05:30
|
|
|
|
2005-08-18 11:28:28 +05:30
|
|
|
else:
|
2009-08-19 03:10:36 +05:30
|
|
|
list += self.get_name(child)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
# return list to its caller
|
|
|
|
return list
|
|
|
|
|
|
|
|
def display_parent(self, handle, title, rel):
|
|
|
|
"""
|
|
|
|
This will display a parent ...
|
|
|
|
"""
|
2009-06-16 00:55:43 +05:30
|
|
|
db = self.report.database
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
person = db.get_person_from_handle(handle)
|
2009-06-21 08:55:28 +05:30
|
|
|
tcell1 = Html('td', title, class_='ColumnAttribute', inline=True)
|
|
|
|
tcell2 = Html('td', class_='ColumnValue')
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2008-03-12 02:49:36 +05:30
|
|
|
gid = person.gramps_id
|
2008-03-10 01:42:56 +05:30
|
|
|
if handle in self.ind_list:
|
2008-03-21 03:54:36 +05:30
|
|
|
url = self.report.build_url_fname_html(handle, 'ppl', True)
|
2009-06-21 08:55:28 +05:30
|
|
|
tcell2 += self.person_link(url, person, True, gid)
|
2005-08-18 11:28:28 +05:30
|
|
|
else:
|
2009-06-21 08:55:28 +05:30
|
|
|
person_name = self.get_name(person)
|
|
|
|
tcell2 += person_name
|
2009-04-09 10:04:24 +05:30
|
|
|
if rel and rel != ChildRefType(ChildRefType.BIRTH):
|
2009-06-21 08:55:28 +05:30
|
|
|
tcell2 += ' (%s)' % str(rel)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
# return table columns to its caller
|
2009-06-21 08:55:28 +05:30
|
|
|
return tcell1, tcell2
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
def display_ind_parents(self):
|
|
|
|
"""
|
|
|
|
Display a person's parents
|
|
|
|
"""
|
2005-02-01 09:16:29 +05:30
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
birthorder = self.report.options['birthorder']
|
2005-02-04 09:24:48 +05:30
|
|
|
parent_list = self.person.get_parent_family_handle_list()
|
2005-02-01 09:16:29 +05:30
|
|
|
|
2005-08-18 11:28:28 +05:30
|
|
|
if not parent_list:
|
2009-06-16 00:55:43 +05:30
|
|
|
return None
|
2009-06-21 08:55:28 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
db = self.report.database
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
# begin parents division
|
2009-06-23 05:28:09 +05:30
|
|
|
with Html('div', class_='subsection', id='parents') as section:
|
|
|
|
section += Html('h4', _('Parents'), inline=True)
|
2005-02-01 09:16:29 +05:30
|
|
|
|
2009-06-23 05:28:09 +05:30
|
|
|
# begin parents table
|
|
|
|
with Html('table', class_='infolist') as table:
|
|
|
|
section += table
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-23 05:28:09 +05:30
|
|
|
first = True
|
|
|
|
if parent_list:
|
|
|
|
for family_handle in parent_list:
|
|
|
|
family = db.get_family_from_handle(family_handle)
|
2007-07-19 12:15:25 +05:30
|
|
|
|
2009-06-23 05:28:09 +05:30
|
|
|
# Get the mother and father relationships
|
|
|
|
frel = None
|
|
|
|
mrel = None
|
|
|
|
sibling = set()
|
|
|
|
|
|
|
|
child_handle = self.person.get_handle()
|
|
|
|
child_ref_list = family.get_child_ref_list()
|
|
|
|
for child_ref in child_ref_list:
|
|
|
|
if child_ref.ref == child_handle:
|
|
|
|
frel = child_ref.get_father_relation()
|
|
|
|
mrel = child_ref.get_mother_relation()
|
|
|
|
break
|
|
|
|
|
|
|
|
if not first:
|
|
|
|
trow = Html('tr') +(
|
|
|
|
Html('td', ' ', colspan=2, inline=True)
|
|
|
|
)
|
|
|
|
table += trow
|
|
|
|
else:
|
|
|
|
first = False
|
|
|
|
|
|
|
|
father_handle = family.get_father_handle()
|
|
|
|
if father_handle:
|
|
|
|
trow = Html('tr')
|
|
|
|
table += trow
|
|
|
|
|
|
|
|
tabcol1, tabcol2 = self.display_parent(father_handle, _('Father'), frel)
|
|
|
|
trow += (tabcol1, tabcol2)
|
|
|
|
mother_handle = family.get_mother_handle()
|
|
|
|
if mother_handle:
|
|
|
|
trow = Html('tr')
|
|
|
|
table += trow
|
|
|
|
tabcol1, tabcol2 = self.display_parent(mother_handle, _('Mother'), mrel)
|
|
|
|
trow += (tabcol1, tabcol2)
|
|
|
|
|
|
|
|
first = False
|
|
|
|
if len(child_ref_list) > 1:
|
|
|
|
childlist = [child_ref.ref for child_ref in child_ref_list]
|
|
|
|
for child_handle in childlist:
|
|
|
|
sibling.add(child_handle) # remember that we've already "seen" this child
|
|
|
|
|
|
|
|
# now that we have all natural siblings, display them...
|
|
|
|
if len(sibling):
|
|
|
|
trow = Html('tr') + (
|
|
|
|
Html('td', _('Siblings'), class_='ColumnAttribute', inline=True)
|
|
|
|
)
|
|
|
|
table += trow
|
|
|
|
tcell = Html('td', class_='ColumnValue')
|
|
|
|
trow += tcell
|
|
|
|
ordered = Html('ol')
|
|
|
|
tcell += ordered
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-23 05:28:09 +05:30
|
|
|
if birthorder:
|
|
|
|
kids = []
|
|
|
|
kids = sorted(add_birthdate(db, sibling))
|
2009-06-21 08:55:28 +05:30
|
|
|
|
2009-06-23 05:28:09 +05:30
|
|
|
for birth_date, child_handle in kids:
|
|
|
|
if child_handle != self.person.handle:
|
|
|
|
ordered += self.display_child_link(child_handle)
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
for child_handle in sibling:
|
|
|
|
if child_handle != self.person.handle:
|
|
|
|
ordered += self.display_child_link(child_handle)
|
|
|
|
|
|
|
|
# Also try to identify half-siblings
|
|
|
|
half_siblings = set()
|
|
|
|
|
|
|
|
# if we have a known father...
|
|
|
|
showallsiblings = self.report.options['showhalfsiblings']
|
|
|
|
if father_handle and showallsiblings:
|
|
|
|
# 1) get all of the families in which this father is involved
|
|
|
|
# 2) get all of the children from those families
|
|
|
|
# 3) if the children are not already listed as siblings...
|
|
|
|
# 4) then remember those children since we're going to list them
|
|
|
|
father = db.get_person_from_handle(father_handle)
|
|
|
|
for family_handle in father.get_family_handle_list():
|
2009-06-16 00:55:43 +05:30
|
|
|
family = db.get_family_from_handle(family_handle)
|
2009-06-23 05:28:09 +05:30
|
|
|
for half_child_ref in family.get_child_ref_list():
|
|
|
|
half_child_handle = half_child_ref.ref
|
|
|
|
if half_child_handle not in sibling:
|
|
|
|
if half_child_handle != self.person.handle:
|
|
|
|
# we have a new step/half sibling
|
|
|
|
half_siblings.add(half_child_handle)
|
|
|
|
|
|
|
|
# do the same thing with the mother (see "father" just above):
|
|
|
|
if mother_handle and showallsiblings:
|
|
|
|
mother = db.get_person_from_handle(mother_handle)
|
|
|
|
for family_handle in mother.get_family_handle_list():
|
2009-06-16 00:55:43 +05:30
|
|
|
family = db.get_family_from_handle(family_handle)
|
2009-06-23 05:28:09 +05:30
|
|
|
for half_child_ref in family.get_child_ref_list():
|
|
|
|
half_child_handle = half_child_ref.ref
|
|
|
|
if half_child_handle not in sibling:
|
|
|
|
if half_child_handle != self.person.handle:
|
|
|
|
# we have a new half sibling
|
|
|
|
half_siblings.add(half_child_handle)
|
|
|
|
|
|
|
|
# now that we have all half- siblings, display them...
|
|
|
|
if len(half_siblings):
|
|
|
|
trow = Html('tr') + (
|
|
|
|
Html('td', _('Half Siblings'), class_='ColumnAttribute', inline=True),
|
|
|
|
)
|
|
|
|
table += trow
|
|
|
|
tcell = Html('td', class_='ColumnValue')
|
|
|
|
trow += tcell
|
|
|
|
ordered = Html('ol')
|
|
|
|
tcell += ordered
|
2009-02-22 05:44:08 +05:30
|
|
|
|
2009-06-23 05:28:09 +05:30
|
|
|
if birthorder:
|
|
|
|
kids = []
|
|
|
|
kids = sorted(add_birthdate(db, half_siblings))
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-23 05:28:09 +05:30
|
|
|
for birth_date, child_handle in kids:
|
|
|
|
ordered += self.display_child_link(child_handle)
|
2008-07-06 14:10:47 +05:30
|
|
|
|
2009-06-23 05:28:09 +05:30
|
|
|
else:
|
|
|
|
|
|
|
|
for child_handle in half_siblings:
|
|
|
|
ordered += self.display_child_link(child_handle)
|
|
|
|
|
|
|
|
# get step-siblings
|
|
|
|
if showallsiblings:
|
|
|
|
step_siblings = set()
|
|
|
|
|
|
|
|
# to find the step-siblings, we need to identify
|
|
|
|
# all of the families that can be linked back to
|
|
|
|
# the current person, and then extract the children
|
|
|
|
# from those families
|
|
|
|
all_family_handles = set()
|
|
|
|
all_parent_handles = set()
|
|
|
|
tmp_parent_handles = set()
|
|
|
|
|
|
|
|
# first we queue up the parents we know about
|
|
|
|
if mother_handle:
|
|
|
|
tmp_parent_handles.add(mother_handle)
|
|
|
|
if father_handle:
|
|
|
|
tmp_parent_handles.add(father_handle)
|
|
|
|
|
|
|
|
while len(tmp_parent_handles) > 0:
|
|
|
|
# pop the next parent from the set
|
|
|
|
parent_handle = tmp_parent_handles.pop()
|
|
|
|
|
|
|
|
# add this parent to our official list
|
|
|
|
all_parent_handles.add(parent_handle)
|
|
|
|
|
|
|
|
# get all families with this parent
|
|
|
|
parent = db.get_person_from_handle(parent_handle)
|
|
|
|
for family_handle in parent.get_family_handle_list():
|
|
|
|
|
|
|
|
all_family_handles.add(family_handle)
|
|
|
|
|
|
|
|
# we already have 1 parent from this family
|
|
|
|
# (see "parent" above) so now see if we need
|
|
|
|
# to queue up the other parent
|
|
|
|
family = db.get_family_from_handle(family_handle)
|
|
|
|
tmp_mother_handle = family.get_mother_handle()
|
|
|
|
if tmp_mother_handle and \
|
|
|
|
tmp_mother_handle != parent and \
|
|
|
|
tmp_mother_handle not in tmp_parent_handles and \
|
|
|
|
tmp_mother_handle not in all_parent_handles:
|
|
|
|
tmp_parent_handles.add(tmp_mother_handle)
|
|
|
|
tmp_father_handle = family.get_father_handle()
|
|
|
|
if tmp_father_handle and \
|
|
|
|
tmp_father_handle != parent and \
|
|
|
|
tmp_father_handle not in tmp_parent_handles and \
|
|
|
|
tmp_father_handle not in all_parent_handles:
|
|
|
|
tmp_parent_handles.add(tmp_father_handle)
|
|
|
|
|
|
|
|
# once we get here, we have all of the families
|
|
|
|
# that could result in step-siblings; note that
|
|
|
|
# we can only have step-siblings if the number
|
|
|
|
# of families involved is > 1
|
|
|
|
|
|
|
|
if len(all_family_handles) > 1:
|
|
|
|
while len(all_family_handles) > 0:
|
|
|
|
# pop the next family from the set
|
|
|
|
family_handle = all_family_handles.pop()
|
|
|
|
# look in this family for children we haven't yet seen
|
|
|
|
family = db.get_family_from_handle(family_handle)
|
|
|
|
for step_child_ref in family.get_child_ref_list():
|
|
|
|
step_child_handle = step_child_ref.ref
|
|
|
|
if step_child_handle not in sibling and \
|
|
|
|
step_child_handle not in half_siblings and \
|
|
|
|
step_child_handle != self.person.handle:
|
|
|
|
# we have a new step sibling
|
|
|
|
step_siblings.add(step_child_handle)
|
|
|
|
|
2009-06-30 01:34:00 +05:30
|
|
|
# now that we have all step- siblings, display them...
|
|
|
|
if len(step_siblings):
|
|
|
|
trow = Html('tr') + (
|
|
|
|
Html('td', _('Step Siblings'), class_='ColumnAttribute', inline=True)
|
|
|
|
)
|
|
|
|
table += trow
|
|
|
|
tcell = Html('td', class_='ColumnValue')
|
|
|
|
trow += tcell
|
|
|
|
ordered = Html('ol')
|
|
|
|
tcell += ordered
|
2009-06-23 05:28:09 +05:30
|
|
|
|
2009-06-30 01:34:00 +05:30
|
|
|
if birthorder:
|
|
|
|
kids = []
|
|
|
|
kids = sorted(add_birthdate(db, step_siblings))
|
2009-06-23 05:28:09 +05:30
|
|
|
|
2009-06-30 01:34:00 +05:30
|
|
|
for birth_date, child_handle in kids:
|
|
|
|
ordered += self.display_child_link(child_handle)
|
2009-06-23 05:28:09 +05:30
|
|
|
|
2009-06-30 01:34:00 +05:30
|
|
|
else:
|
2009-06-23 05:28:09 +05:30
|
|
|
|
2009-06-30 01:34:00 +05:30
|
|
|
for child_handle in step_siblings:
|
|
|
|
ordered += self.display_child_link(child_handle)
|
2009-06-23 05:28:09 +05:30
|
|
|
|
|
|
|
# return parents division to its caller
|
|
|
|
return section
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-16 00:55:43 +05:30
|
|
|
def display_ind_families(self):
|
2009-06-11 22:15:30 +05:30
|
|
|
"""
|
|
|
|
Displays a person's relationships ...
|
|
|
|
"""
|
2005-08-18 11:28:28 +05:30
|
|
|
|
|
|
|
family_list = self.person.get_family_handle_list()
|
|
|
|
if not family_list:
|
2009-06-11 22:15:30 +05:30
|
|
|
return None
|
2009-06-21 08:55:28 +05:30
|
|
|
db = self.report.database
|
2009-07-28 17:11:20 +05:30
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
# begin families division and section title
|
2009-06-16 00:55:43 +05:30
|
|
|
with Html('div', class_='subsection', id='families') as section:
|
2009-06-21 08:55:28 +05:30
|
|
|
section += Html('h4', _('Families'), inline=True)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
# begin families table
|
2009-06-16 00:55:43 +05:30
|
|
|
with Html('table', class_='infolist') as table:
|
2009-06-21 08:55:28 +05:30
|
|
|
section += table
|
2009-06-16 00:55:43 +05:30
|
|
|
|
|
|
|
for family_handle in family_list:
|
|
|
|
family = db.get_family_from_handle(family_handle)
|
|
|
|
self.display_partner(family, table)
|
|
|
|
childlist = family.get_child_ref_list()
|
|
|
|
if childlist:
|
2009-06-23 05:28:09 +05:30
|
|
|
trow = Html('tr') + (
|
|
|
|
Html('td', ' ', class_='ColumnType', inline=True),
|
|
|
|
Html('td', _('Children'), class_='ColumnAttribute', inline=True)
|
|
|
|
)
|
|
|
|
table += trow
|
|
|
|
tcell = Html('td', class_='ColumnValue')
|
|
|
|
trow += tcell
|
2009-06-21 08:55:28 +05:30
|
|
|
ordered = Html('ol')
|
2009-06-23 05:28:09 +05:30
|
|
|
tcell += ordered
|
2009-06-21 08:55:28 +05:30
|
|
|
childlist = [child_ref.ref for child_ref in childlist]
|
|
|
|
|
|
|
|
if self.report.options['birthorder']:
|
2009-06-23 05:28:09 +05:30
|
|
|
kids = []
|
|
|
|
kids = sorted(add_birthdate(db, childlist))
|
2009-06-21 08:55:28 +05:30
|
|
|
|
2009-06-23 05:28:09 +05:30
|
|
|
for birth_date, child_handle in kids:
|
|
|
|
ordered += self.display_child_link(child_handle)
|
|
|
|
else:
|
|
|
|
|
|
|
|
for child_handle in childlist:
|
|
|
|
ordered += self.display_child_link(child_handle)
|
2009-06-21 08:55:28 +05:30
|
|
|
|
2009-08-03 01:36:00 +05:30
|
|
|
# family LDS ordinance list
|
2009-08-03 10:51:00 +05:30
|
|
|
famldslist = family.lds_ord_list
|
|
|
|
if famldslist:
|
|
|
|
trow = Html('tr') + (
|
|
|
|
Html('td', ' ', class_='ColumnType', inline=True),
|
|
|
|
Html('td', ' ', class_='ColumnAttribute', inline=True),
|
2009-08-11 18:19:27 +05:30
|
|
|
Html('td', self.dump_ordinance(db, family, 'Family'),
|
|
|
|
class_='ColumnValue')
|
2009-08-03 10:51:00 +05:30
|
|
|
)
|
|
|
|
table += trow
|
2009-08-03 01:36:00 +05:30
|
|
|
|
|
|
|
# get family attributes
|
|
|
|
attrlist = family.get_attribute_list()
|
|
|
|
if attrlist:
|
|
|
|
trow = Html('tr') + (
|
2009-08-03 10:51:00 +05:30
|
|
|
Html('td', ' ', class_='ColumnType', inline=True),
|
2009-08-03 01:36:00 +05:30
|
|
|
Html('td', ' ', class_='ColumnAttribute', inline=True),
|
|
|
|
Html('td', _('Attributes'), class_='ColumnAttribute', inline=True)
|
|
|
|
)
|
|
|
|
table += trow
|
|
|
|
|
|
|
|
for attr in family.get_attribute_list():
|
|
|
|
attrType = str(attr.get_type())
|
|
|
|
if attrType:
|
|
|
|
trow = Html('tr') + (
|
|
|
|
Html('td', ' ', class_='ColumnValue', inline=True),
|
|
|
|
Html('td', attrType, class_='ColumnValue', inline=True),
|
|
|
|
Html('td', attr.get_value(), class_='ColumnValue', inline=True)
|
|
|
|
)
|
|
|
|
table += trow
|
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
# return section to its caller
|
2009-06-16 00:55:43 +05:30
|
|
|
return section
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-07-15 05:23:07 +05:30
|
|
|
def display_partner(self, family, table):
|
2009-06-11 22:15:30 +05:30
|
|
|
"""
|
|
|
|
display an individual's partner
|
|
|
|
"""
|
|
|
|
|
|
|
|
gender = self.person.gender
|
2005-02-01 09:16:29 +05:30
|
|
|
reltype = family.get_relationship()
|
2009-06-16 00:55:43 +05:30
|
|
|
db = self.report.database
|
2005-02-01 09:16:29 +05:30
|
|
|
|
2009-04-09 10:04:24 +05:30
|
|
|
if reltype == FamilyRelType.MARRIED:
|
|
|
|
if gender == Person.FEMALE:
|
2005-02-01 09:16:29 +05:30
|
|
|
relstr = _("Husband")
|
2009-04-09 10:04:24 +05:30
|
|
|
elif gender == Person.MALE:
|
2005-02-01 09:16:29 +05:30
|
|
|
relstr = _("Wife")
|
|
|
|
else:
|
|
|
|
relstr = _("Partner")
|
|
|
|
else:
|
|
|
|
relstr = _("Partner")
|
|
|
|
|
2009-05-30 14:16:12 +05:30
|
|
|
partner_handle = ReportUtils.find_spouse(self.person, family)
|
|
|
|
if partner_handle:
|
2009-06-16 00:55:43 +05:30
|
|
|
partner = db.get_person_from_handle(partner_handle)
|
2009-06-21 08:55:28 +05:30
|
|
|
partner_name = self.get_name(partner)
|
2005-02-01 09:16:29 +05:30
|
|
|
else:
|
2009-06-21 08:55:28 +05:30
|
|
|
partner_name = _("unknown")
|
2009-07-15 05:23:07 +05:30
|
|
|
|
|
|
|
# family relationship type
|
2006-04-23 08:28:53 +05:30
|
|
|
rtype = str(family.get_relationship())
|
2009-07-15 05:23:07 +05:30
|
|
|
trow = Html('tr', class_='BeginFamily') + (
|
|
|
|
Html('td', rtype, class_='ColumnType', inline=True),
|
|
|
|
Html('td', relstr, class_='ColumnAttribute', inline=True)
|
|
|
|
)
|
2009-07-28 17:11:20 +05:30
|
|
|
table += trow
|
2009-07-15 05:23:07 +05:30
|
|
|
tcell = Html('td', class_='ColumnValue')
|
|
|
|
trow += tcell
|
|
|
|
|
2009-07-29 13:28:22 +05:30
|
|
|
# display partner's name
|
2009-05-30 14:16:12 +05:30
|
|
|
if partner_handle:
|
|
|
|
if partner_handle in self.ind_list:
|
2009-06-11 22:15:30 +05:30
|
|
|
url = self.report.build_url_fname_html(partner_handle, 'ppl', True)
|
2009-07-28 17:11:20 +05:30
|
|
|
tcell += self.person_link(url, partner, True, partner.gramps_id)
|
2005-08-18 11:28:28 +05:30
|
|
|
else:
|
2009-07-15 05:23:07 +05:30
|
|
|
tcell += partner_name
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2009-08-03 10:51:00 +05:30
|
|
|
# display family events; such as marriage and divorce events
|
2009-07-15 05:23:07 +05:30
|
|
|
family_events = family.get_event_ref_list()
|
2009-08-03 10:51:00 +05:30
|
|
|
if family_events:
|
2009-07-31 17:00:14 +05:30
|
|
|
trow = Html('tr') + (
|
|
|
|
Html('td', ' ', class_='ColumnType', inline=True),
|
2009-08-03 03:59:54 +05:30
|
|
|
Html('td', ' ', class_='ColumnAttribute', inline=True),
|
2009-08-03 10:51:00 +05:30
|
|
|
Html('td', self.format_event(family_events), class_='ColumnValue')
|
2009-07-31 17:00:14 +05:30
|
|
|
)
|
|
|
|
table += trow
|
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# return table to its caller
|
2009-07-15 05:23:07 +05:30
|
|
|
return table
|
2008-03-08 17:00:59 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
def pedigree_person(self, person):
|
|
|
|
"""
|
|
|
|
will produce a hyperlink for a pedigree person ...
|
|
|
|
"""
|
|
|
|
|
|
|
|
person_name = self.get_name(person)
|
2008-03-14 02:01:33 +05:30
|
|
|
if person.handle in self.ind_list:
|
2008-03-21 03:54:36 +05:30
|
|
|
url = self.report.build_url_fname_html(person.handle, 'ppl', True)
|
2009-06-21 08:55:28 +05:30
|
|
|
hyper = self.person_link(url, person, name_style=True)
|
2005-08-18 11:28:28 +05:30
|
|
|
else:
|
2009-06-11 22:15:30 +05:30
|
|
|
hyper = person_name
|
|
|
|
|
|
|
|
# return hyperlink to its callers
|
|
|
|
# can be an actual hyperlink or just a person's name
|
|
|
|
return hyper
|
2005-02-04 09:24:48 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
def pedigree_family(self):
|
|
|
|
"""
|
|
|
|
Returns a family pedigree
|
|
|
|
"""
|
2009-06-16 00:55:43 +05:30
|
|
|
db = self.report.database
|
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
ped = []
|
2005-02-04 09:24:48 +05:30
|
|
|
for family_handle in self.person.get_family_handle_list():
|
2009-06-16 00:55:43 +05:30
|
|
|
rel_family = db.get_family_from_handle(family_handle)
|
2008-03-10 01:42:56 +05:30
|
|
|
spouse_handle = ReportUtils.find_spouse(self.person, rel_family)
|
2005-02-04 09:24:48 +05:30
|
|
|
if spouse_handle:
|
2009-06-16 00:55:43 +05:30
|
|
|
spouse = db.get_person_from_handle(spouse_handle)
|
2009-06-11 22:15:30 +05:30
|
|
|
pedsp = (Html('li', class_='spouse') +
|
|
|
|
self.pedigree_person(spouse)
|
|
|
|
)
|
|
|
|
ped += [pedsp]
|
|
|
|
else:
|
|
|
|
pedsp = ped
|
2007-08-12 08:12:22 +05:30
|
|
|
childlist = rel_family.get_child_ref_list()
|
2005-02-04 09:24:48 +05:30
|
|
|
if childlist:
|
2009-06-11 22:15:30 +05:30
|
|
|
with Html('ol') as childol:
|
|
|
|
pedsp += [childol]
|
|
|
|
for child_ref in childlist:
|
2009-06-16 00:55:43 +05:30
|
|
|
child = db.get_person_from_handle(child_ref.ref)
|
2009-06-11 22:15:30 +05:30
|
|
|
childol += (Html('li') +
|
|
|
|
self.pedigree_person(child)
|
|
|
|
)
|
|
|
|
return ped
|
|
|
|
|
|
|
|
def display_event_header(self):
|
|
|
|
"""
|
|
|
|
will print the event header row for display_event_row() and
|
|
|
|
format_event()
|
|
|
|
"""
|
2005-02-01 09:16:29 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# begin table header row
|
2009-06-21 08:55:28 +05:30
|
|
|
trow = Html('tr')
|
2008-03-21 03:54:36 +05:30
|
|
|
|
2009-08-11 18:19:27 +05:30
|
|
|
for (label, colclass) in [
|
2009-08-27 18:51:55 +05:30
|
|
|
(EHEAD, 'Type'),
|
2009-08-11 18:19:27 +05:30
|
|
|
(DHEAD, 'Date'),
|
|
|
|
(PHEAD, 'Place'),
|
|
|
|
(DESCRHEAD, 'Description'),
|
2009-08-21 06:07:59 +05:30
|
|
|
(SHEAD, 'Sources'),
|
2009-08-27 18:51:55 +05:30
|
|
|
(NHEAD, 'Notes') ]:
|
2009-08-11 18:19:27 +05:30
|
|
|
|
2009-08-11 04:21:44 +05:30
|
|
|
trow += Html('th', label, class_ = 'Column%s' % colclass, inline = True)
|
2007-07-19 12:15:25 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# return header row to its caller
|
2009-06-21 08:55:28 +05:30
|
|
|
return trow
|
2007-10-21 02:56:43 +05:30
|
|
|
|
2009-08-21 06:07:59 +05:30
|
|
|
def display_event_row(self, event, event_ref):
|
|
|
|
"""
|
|
|
|
display the event row
|
|
|
|
"""
|
|
|
|
db = self.report.database
|
|
|
|
|
|
|
|
lnk = (self.report.cur_fname, self.page_title, self.gid)
|
|
|
|
descr = event.get_description()
|
|
|
|
place_handle = event.get_place_handle()
|
|
|
|
if place_handle:
|
|
|
|
if place_handle in self.place_list:
|
|
|
|
if lnk not in self.place_list[place_handle]:
|
|
|
|
self.place_list[place_handle].append(lnk)
|
|
|
|
else:
|
|
|
|
self.place_list[place_handle] = [lnk]
|
|
|
|
|
|
|
|
place = self.place_link(place_handle,
|
|
|
|
ReportUtils.place_name(db, place_handle), up=True)
|
|
|
|
else:
|
|
|
|
place = ''
|
|
|
|
|
|
|
|
# Event/ Type
|
|
|
|
evt_name = str(event.get_type())
|
|
|
|
|
|
|
|
if event_ref.get_role() == EventRoleType.PRIMARY:
|
|
|
|
eventtype = u"%(evt_name)s" % locals()
|
|
|
|
else:
|
|
|
|
evt_role = event_ref.get_role()
|
|
|
|
eventtype = u"%(evt_name)s (%(evt_role)s)" % locals()
|
|
|
|
eventtype = eventtype or ' '
|
|
|
|
|
|
|
|
# Place
|
|
|
|
place_handle = event.get_place_handle()
|
|
|
|
if place_handle:
|
|
|
|
|
|
|
|
lnk = (self.report.cur_fname, self.page_title, self.gid)
|
|
|
|
if place_handle in self.place_list:
|
|
|
|
if lnk not in self.place_list[place_handle]:
|
|
|
|
self.place_list[place_handle].append(lnk)
|
|
|
|
else:
|
|
|
|
self.place_list[place_handle] = [lnk]
|
|
|
|
|
|
|
|
place = self.place_link(place_handle,
|
|
|
|
ReportUtils.place_name(db, place_handle), up=True)
|
|
|
|
else:
|
|
|
|
place = None
|
|
|
|
place = place or ' '
|
|
|
|
|
|
|
|
# get event and event_ref notes
|
|
|
|
notelist = event.get_note_list()
|
|
|
|
notelist.extend(event_ref.get_note_list() )
|
|
|
|
|
|
|
|
# begin event table row
|
|
|
|
trow = Html('tr')
|
|
|
|
|
|
|
|
for (colclass, data) in [
|
|
|
|
['EventType', eventtype],
|
|
|
|
['Date', _dd.display(event.get_date_object() )],
|
|
|
|
['Place', place],
|
|
|
|
['Description', event.get_description()],
|
|
|
|
['Source', self.get_citation_links(event.get_source_references() )],
|
|
|
|
['Notes', self.dump_notes(notelist)] ]:
|
|
|
|
|
|
|
|
data = data or ' '
|
|
|
|
trow += Html('td', data, class_='Column%s' % colclass, inline=True)
|
|
|
|
|
|
|
|
# return events table row to its callers
|
|
|
|
return trow
|
|
|
|
|
2009-07-15 05:23:07 +05:30
|
|
|
def format_event(self, eventlist):
|
2009-06-16 00:55:43 +05:30
|
|
|
db = self.report.database
|
|
|
|
|
2009-07-15 05:23:07 +05:30
|
|
|
# begin eventlist table and table header
|
2009-06-11 22:15:30 +05:30
|
|
|
with Html('table', class_='infolist eventtable') as table:
|
2009-07-15 05:23:07 +05:30
|
|
|
thead = Html('thead')
|
|
|
|
table += thead
|
|
|
|
|
|
|
|
# attach event header row
|
|
|
|
thead += self.display_event_header()
|
|
|
|
|
|
|
|
# begin table body
|
|
|
|
tbody = Html('tbody')
|
|
|
|
table += tbody
|
|
|
|
|
2009-08-11 18:19:27 +05:30
|
|
|
# ordered list
|
|
|
|
ordered = Html('ol')
|
|
|
|
tbody += ordered
|
|
|
|
|
2009-07-15 05:23:07 +05:30
|
|
|
for event_ref in eventlist:
|
|
|
|
|
|
|
|
event = db.get_event_from_handle(event_ref.ref)
|
2009-08-11 12:17:42 +05:30
|
|
|
evtType = str(event.get_type() )
|
2009-07-15 05:23:07 +05:30
|
|
|
|
|
|
|
# add event body row
|
2009-08-11 18:19:27 +05:30
|
|
|
ordered += self.display_event_row(event, event_ref )
|
|
|
|
|
|
|
|
# attributes list
|
|
|
|
attrlist = event.get_attribute_list()
|
|
|
|
attrlist.extend(event_ref.get_attribute_list() )
|
|
|
|
if attrlist:
|
|
|
|
ordered += self.dump_attributes(attrlist)
|
2009-07-15 05:23:07 +05:30
|
|
|
|
|
|
|
# return table to its callers
|
2009-06-11 22:15:30 +05:30
|
|
|
return table
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2009-06-30 01:34:00 +05:30
|
|
|
class RepositoryListPage(BasePage):
|
|
|
|
"""
|
|
|
|
Will create the repository list page
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, report, title, repos_dict, keys):
|
|
|
|
BasePage.__init__(self, report, title)
|
|
|
|
|
|
|
|
db = report.database
|
|
|
|
of = self.report.create_file('repositories')
|
|
|
|
repolistpage, body = self.write_header(_('Repositories'))
|
|
|
|
|
|
|
|
# begin RepositoryList division
|
2009-07-15 05:23:07 +05:30
|
|
|
with Html('div', class_='content', id='RepositoryList') as repositorylist:
|
|
|
|
body += repositorylist
|
2009-06-30 01:34:00 +05:30
|
|
|
|
|
|
|
msg = _("This page contains an index of all the repositories in the "
|
2009-07-15 05:23:07 +05:30
|
|
|
"database, sorted by their title. Clicking on a repositories’s "
|
|
|
|
"title will take you to that repositories’s page.")
|
|
|
|
repositorylist += Html('p', msg, id='description')
|
2009-06-30 01:34:00 +05:30
|
|
|
|
|
|
|
# begin repositories table and table head
|
|
|
|
with Html('table', class_='infolist repolist') as table:
|
2009-07-15 05:23:07 +05:30
|
|
|
repositorylist += table
|
2009-06-30 01:34:00 +05:30
|
|
|
|
|
|
|
thead = Html('thead')
|
|
|
|
table += thead
|
2009-07-15 05:23:07 +05:30
|
|
|
|
2009-06-30 01:34:00 +05:30
|
|
|
trow = Html('tr') + (
|
|
|
|
Html('th', ' ', class_='ColumnRowLabel', inline=True),
|
|
|
|
Html('th', _('Type'), class_='ColumnType', inline=True),
|
|
|
|
Html('th', _('Name'), class_='ColumnName', inline=True)
|
|
|
|
)
|
|
|
|
thead += trow
|
|
|
|
|
|
|
|
# begin table body
|
|
|
|
tbody = Html('tbody')
|
|
|
|
table += tbody
|
|
|
|
|
|
|
|
index = 0
|
|
|
|
for index, key in enumerate(keys):
|
|
|
|
(repo, handle) = repos_dict[key]
|
|
|
|
|
|
|
|
trow = Html('tr')
|
|
|
|
tbody += trow
|
|
|
|
|
|
|
|
# index number
|
|
|
|
tcell = Html('td', index+1, class_='ColumnRowLabel', inline=True)
|
|
|
|
trow += tcell
|
|
|
|
|
|
|
|
# repository type
|
|
|
|
rtype = repo.type.xml_str()
|
2009-08-29 10:41:23 +05:30
|
|
|
for xtype in RepositoryType._DATAMAP:
|
|
|
|
if rtype == xtype[2]:
|
|
|
|
rtype = xtype[1]
|
|
|
|
break
|
|
|
|
|
2009-06-30 01:34:00 +05:30
|
|
|
if rtype:
|
2009-07-15 05:23:07 +05:30
|
|
|
tcell = Html('td', rtype, class_='ColumnType', inline=True)
|
2009-06-30 01:34:00 +05:30
|
|
|
trow += tcell
|
|
|
|
|
|
|
|
# repository name and hyperlink
|
|
|
|
repo_title = html_escape(repo.name)
|
|
|
|
if repo_title:
|
|
|
|
tcell = Html('td', class_='ColumnName') + \
|
|
|
|
self.repository_link(handle, repo_title, repo.gramps_id)
|
|
|
|
trow += tcell
|
|
|
|
|
|
|
|
# add clearline for proper styling
|
|
|
|
# add footer section
|
|
|
|
footer = self.write_footer()
|
|
|
|
body += (fullclear, footer)
|
|
|
|
|
|
|
|
# send page out for processing
|
|
|
|
# and close the file
|
|
|
|
self.mywriter(repolistpage, of)
|
|
|
|
|
|
|
|
class RepositoryPage(BasePage):
|
|
|
|
"""
|
|
|
|
will create the individual Repository Pages
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, report, title, repo, handle):
|
|
|
|
BasePage.__init__(self, report, title)
|
|
|
|
db = report.database
|
|
|
|
|
|
|
|
of = self.report.create_file(handle, 'repo')
|
|
|
|
self.up = True
|
2009-07-15 05:23:07 +05:30
|
|
|
repositorypage, body = self.write_header('Repositories')
|
2009-06-30 01:34:00 +05:30
|
|
|
|
|
|
|
# begin RepositoryDetail division and page title
|
2009-07-15 05:23:07 +05:30
|
|
|
with Html('div', class_='content', id='RepositoryDetail') as repositorydetail:
|
|
|
|
body += repositorydetail
|
2009-06-30 01:34:00 +05:30
|
|
|
|
2009-07-15 05:23:07 +05:30
|
|
|
# repository name
|
|
|
|
repositorydetail += Html('h3', repo.name, inline=True)
|
|
|
|
|
|
|
|
# begin repository table
|
|
|
|
with Html('table', class_='infolist repolist') as table:
|
|
|
|
repositorydetail += table
|
|
|
|
|
|
|
|
# repository type
|
|
|
|
trow = Html('tr') + (
|
2009-08-13 07:20:13 +05:30
|
|
|
Html('td', _('Type'), class_='ColumnType', inline=True),
|
|
|
|
Html('td', str(repo.type), class_='ColumnAttribute', inline=True)
|
2009-07-15 05:23:07 +05:30
|
|
|
)
|
|
|
|
table += trow
|
|
|
|
|
2009-08-13 07:20:13 +05:30
|
|
|
if not self.noid:
|
|
|
|
# repo gramps id
|
|
|
|
trow = Html('tr') + (
|
|
|
|
Html('td', _('GRAMPS ID'), class_='ColumnType', inline=True),
|
|
|
|
Html('td', repo.gramps_id, class_='ColumnAttribute', inline=True)
|
|
|
|
)
|
|
|
|
table += trow
|
2009-07-15 05:23:07 +05:30
|
|
|
|
2009-08-13 07:20:13 +05:30
|
|
|
# repository: address(es)
|
2009-07-28 17:11:20 +05:30
|
|
|
addresses = self.write_out_addresses(repo)
|
2009-07-15 05:23:07 +05:30
|
|
|
if addresses is not None:
|
|
|
|
repositorydetail += addresses
|
|
|
|
|
|
|
|
# repository: urllist
|
|
|
|
urllist = self.display_url_list(repo.get_url_list())
|
|
|
|
if urllist is not None:
|
|
|
|
repositorydetail += urllist
|
|
|
|
|
|
|
|
# reposity: notelist
|
|
|
|
notelist = self.display_note_list(repo.get_note_list())
|
|
|
|
if notelist is not None:
|
|
|
|
repositorydetail += notelist
|
2009-06-30 01:34:00 +05:30
|
|
|
|
|
|
|
# add clearline for proper styling
|
|
|
|
# add footer section
|
|
|
|
footer = self.write_footer()
|
|
|
|
body += (fullclear, footer)
|
|
|
|
|
|
|
|
# send page out for processing
|
|
|
|
# and close the file
|
|
|
|
self.mywriter(repositorypage, of)
|
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
class NavWebReport(Report):
|
2008-03-14 03:58:22 +05:30
|
|
|
|
2008-02-20 10:22:10 +05:30
|
|
|
def __init__(self, database, options):
|
2005-02-01 09:16:29 +05:30
|
|
|
"""
|
2008-02-24 19:25:55 +05:30
|
|
|
Create WebReport object that produces the report.
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2005-02-01 09:16:29 +05:30
|
|
|
The arguments are:
|
|
|
|
|
|
|
|
database - the GRAMPS database instance
|
2008-02-10 09:39:09 +05:30
|
|
|
options - instance of the Options class for this report
|
2005-02-01 09:16:29 +05:30
|
|
|
"""
|
2008-02-20 10:22:10 +05:30
|
|
|
Report.__init__(self, database, options)
|
2008-02-10 09:39:09 +05:30
|
|
|
menu = options.menu
|
2008-03-14 03:58:22 +05:30
|
|
|
self.options = {}
|
2008-02-10 09:39:09 +05:30
|
|
|
|
|
|
|
for optname in menu.get_all_option_names():
|
|
|
|
menuopt = menu.get_option_by_name(optname)
|
2008-03-14 03:58:22 +05:30
|
|
|
self.options[optname] = menuopt.get_value()
|
2007-08-12 08:12:22 +05:30
|
|
|
|
2008-03-14 03:58:22 +05:30
|
|
|
if not self.options['incpriv']:
|
2007-08-12 08:12:22 +05:30
|
|
|
self.database = PrivateProxyDb(database)
|
|
|
|
else:
|
|
|
|
self.database = database
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2008-03-14 03:58:22 +05:30
|
|
|
livinginfo = self.options['living']
|
|
|
|
yearsafterdeath = self.options['yearsafterdeath']
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2008-06-05 09:15:14 +05:30
|
|
|
if livinginfo != _INCLUDE_LIVING_VALUE:
|
2007-08-27 01:49:18 +05:30
|
|
|
self.database = LivingProxyDb(self.database,
|
2008-06-05 09:15:14 +05:30
|
|
|
livinginfo,
|
2007-08-27 01:49:18 +05:30
|
|
|
None,
|
|
|
|
yearsafterdeath)
|
2005-02-01 09:16:29 +05:30
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
filters_option = menu.get_option_by_name('filter')
|
|
|
|
self.filter = filters_option.get_filter()
|
|
|
|
|
2008-03-14 03:58:22 +05:30
|
|
|
self.copyright = self.options['cright']
|
2008-03-18 02:39:16 +05:30
|
|
|
self.target_path = self.options['target']
|
|
|
|
self.ext = self.options['ext']
|
2008-03-14 03:58:22 +05:30
|
|
|
self.css = self.options['css']
|
2009-06-30 01:34:00 +05:30
|
|
|
|
2008-03-14 03:58:22 +05:30
|
|
|
self.title = self.options['title']
|
|
|
|
self.inc_gallery = self.options['gallery']
|
2008-03-18 02:39:16 +05:30
|
|
|
self.inc_contact = self.options['contactnote'] or \
|
|
|
|
self.options['contactimg']
|
2009-05-30 14:16:12 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# name format option
|
|
|
|
self.name_format = self.options['name_format']
|
|
|
|
|
2009-08-21 06:07:59 +05:30
|
|
|
# create an event pages or not?
|
|
|
|
self.inc_events = self.options['inc_events']
|
|
|
|
|
2009-06-30 01:34:00 +05:30
|
|
|
# include repository page or not?
|
|
|
|
self.inc_repository = self.options['inc_repository']
|
|
|
|
|
2009-05-30 14:16:12 +05:30
|
|
|
# Download Options Tab
|
2008-03-14 03:58:22 +05:30
|
|
|
self.inc_download = self.options['incdownload']
|
2009-07-15 05:23:07 +05:30
|
|
|
self.downloadnote = self.options['downloadnote']
|
2009-05-30 14:16:12 +05:30
|
|
|
self.dl_fname1 = self.options['down_fname1']
|
|
|
|
self.dl_descr1 = self.options['dl_descr1']
|
|
|
|
self.dl_fname2 = self.options['down_fname2']
|
|
|
|
self.dl_descr2 = self.options['dl_descr2']
|
|
|
|
self.dl_copy = self.options['dl_cright']
|
|
|
|
|
2009-06-30 01:34:00 +05:30
|
|
|
self.encoding = self.options['encoding']
|
|
|
|
|
2008-03-14 03:58:22 +05:30
|
|
|
self.use_archive = self.options['archive']
|
2008-03-18 02:39:16 +05:30
|
|
|
self.use_intro = self.options['intronote'] or \
|
|
|
|
self.options['introimg']
|
|
|
|
self.use_home = self.options['homenote'] or \
|
|
|
|
self.options['homeimg']
|
|
|
|
self.use_contact = self.options['contactnote'] or \
|
|
|
|
self.options['contactimg']
|
2009-02-03 13:31:31 +05:30
|
|
|
|
2009-02-22 05:44:08 +05:30
|
|
|
# either include the gender graphics or not?
|
2009-01-28 07:09:08 +05:30
|
|
|
self.graph = self.options['graph']
|
2008-03-18 02:39:16 +05:30
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
# whether to display children in birthorder or entry order?
|
|
|
|
self.birthorder = self.options['birthorder']
|
|
|
|
|
2008-03-18 02:39:16 +05:30
|
|
|
if self.use_home:
|
|
|
|
self.index_fname = "index"
|
|
|
|
self.surname_fname = "surnames"
|
|
|
|
self.intro_fname = "introduction"
|
|
|
|
elif self.use_intro:
|
|
|
|
self.index_fname = None
|
|
|
|
self.surname_fname = "surnames"
|
|
|
|
self.intro_fname = "index"
|
|
|
|
else:
|
|
|
|
self.index_fname = None
|
|
|
|
self.surname_fname = "index"
|
|
|
|
self.intro_fname = None
|
2008-03-14 03:58:22 +05:30
|
|
|
|
|
|
|
self.archive = None
|
2009-02-03 13:31:31 +05:30
|
|
|
self.cur_fname = None # Internal use. The name of the output file,
|
|
|
|
# to be used for the tar archive.
|
2008-03-21 03:54:36 +05:30
|
|
|
self.string_io = None
|
2008-03-14 03:58:22 +05:30
|
|
|
if self.use_archive:
|
|
|
|
self.html_dir = None
|
|
|
|
else:
|
|
|
|
self.html_dir = self.target_path
|
|
|
|
self.warn_dir = True # Only give warning once.
|
2008-03-15 02:37:35 +05:30
|
|
|
self.photo_list = {}
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2005-02-01 09:16:29 +05:30
|
|
|
def write_report(self):
|
2005-08-18 11:28:28 +05:30
|
|
|
if not self.use_archive:
|
|
|
|
dir_name = self.target_path
|
2008-06-16 20:31:46 +05:30
|
|
|
if dir_name is None:
|
2005-08-18 11:28:28 +05:30
|
|
|
dir_name = os.getcwd()
|
|
|
|
elif not os.path.isdir(dir_name):
|
|
|
|
parent_dir = os.path.dirname(dir_name)
|
|
|
|
if not os.path.isdir(parent_dir):
|
|
|
|
ErrorDialog(_("Neither %s nor %s are directories") % \
|
2008-03-10 01:42:56 +05:30
|
|
|
(dir_name, parent_dir))
|
2005-02-01 09:16:29 +05:30
|
|
|
return
|
2005-08-18 11:28:28 +05:30
|
|
|
else:
|
|
|
|
try:
|
|
|
|
os.mkdir(dir_name)
|
|
|
|
except IOError, value:
|
|
|
|
ErrorDialog(_("Could not create the directory: %s") % \
|
|
|
|
dir_name + "\n" + value[1])
|
|
|
|
return
|
|
|
|
except:
|
|
|
|
ErrorDialog(_("Could not create the directory: %s") % \
|
|
|
|
dir_name)
|
|
|
|
return
|
2005-02-01 09:16:29 +05:30
|
|
|
|
|
|
|
try:
|
2005-08-18 11:28:28 +05:30
|
|
|
image_dir_name = os.path.join(dir_name, 'images')
|
|
|
|
if not os.path.isdir(image_dir_name):
|
|
|
|
os.mkdir(image_dir_name)
|
|
|
|
|
|
|
|
image_dir_name = os.path.join(dir_name, 'thumb')
|
|
|
|
if not os.path.isdir(image_dir_name):
|
|
|
|
os.mkdir(image_dir_name)
|
2005-02-01 09:16:29 +05:30
|
|
|
except IOError, value:
|
|
|
|
ErrorDialog(_("Could not create the directory: %s") % \
|
2005-02-13 09:24:47 +05:30
|
|
|
image_dir_name + "\n" + value[1])
|
2005-02-01 09:16:29 +05:30
|
|
|
return
|
|
|
|
except:
|
|
|
|
ErrorDialog(_("Could not create the directory: %s") % \
|
2005-02-13 09:24:47 +05:30
|
|
|
image_dir_name)
|
2005-02-01 09:16:29 +05:30
|
|
|
return
|
2005-12-06 12:08:09 +05:30
|
|
|
else:
|
|
|
|
if os.path.isdir(self.target_path):
|
|
|
|
ErrorDialog(_('Invalid file name'),
|
|
|
|
_('The archive file must be a file, not a directory'))
|
|
|
|
return
|
|
|
|
try:
|
2008-03-14 03:58:22 +05:30
|
|
|
self.archive = tarfile.open(self.target_path, "w:gz")
|
2008-03-10 01:42:56 +05:30
|
|
|
except (OSError, IOError), value:
|
2005-12-06 12:08:09 +05:30
|
|
|
ErrorDialog(_("Could not create %s") % self.target_path,
|
|
|
|
value)
|
|
|
|
return
|
|
|
|
|
2009-06-19 20:53:58 +05:30
|
|
|
self.progress = ProgressMeter(_("Narrated Web Site Report"), '')
|
2005-12-06 12:08:09 +05:30
|
|
|
|
|
|
|
# Build the person list
|
2007-08-27 01:49:18 +05:30
|
|
|
ind_list = self.build_person_list()
|
2005-12-06 12:08:09 +05:30
|
|
|
|
2009-01-28 07:09:08 +05:30
|
|
|
# copy all of the neccessary files
|
|
|
|
self.copy_narrated_files()
|
2008-03-06 18:37:37 +05:30
|
|
|
|
2005-12-06 12:08:09 +05:30
|
|
|
place_list = {}
|
|
|
|
source_list = {}
|
|
|
|
|
2008-03-14 03:58:22 +05:30
|
|
|
self.base_pages()
|
|
|
|
self.person_pages(ind_list, place_list, source_list)
|
|
|
|
self.surname_pages(ind_list)
|
2009-08-21 06:07:59 +05:30
|
|
|
|
2008-03-14 03:58:22 +05:30
|
|
|
self.place_pages(place_list, source_list)
|
2009-08-21 06:07:59 +05:30
|
|
|
|
|
|
|
if self.inc_events:
|
|
|
|
self.event_pages(ind_list)
|
|
|
|
|
2008-08-15 02:33:50 +05:30
|
|
|
self.source_pages(source_list)
|
2005-12-06 12:08:09 +05:30
|
|
|
if self.inc_gallery:
|
2008-03-14 03:58:22 +05:30
|
|
|
self.gallery_pages(source_list)
|
2009-06-30 01:34:00 +05:30
|
|
|
|
2008-08-15 02:33:50 +05:30
|
|
|
# Build source pages a second time to pick up sources referenced
|
|
|
|
# by galleries
|
2008-03-15 05:20:54 +05:30
|
|
|
self.source_pages(source_list)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2009-06-30 01:34:00 +05:30
|
|
|
# repository pages
|
2009-07-15 05:23:07 +05:30
|
|
|
repolist = self.database.get_repository_handles()
|
|
|
|
if len(repolist):
|
|
|
|
self.repository_pages(repolist)
|
2009-06-30 01:34:00 +05:30
|
|
|
|
2009-07-15 05:23:07 +05:30
|
|
|
# if an archive is being used, close it?
|
2008-03-14 03:58:22 +05:30
|
|
|
if self.archive:
|
|
|
|
self.archive.close()
|
2005-12-06 12:08:09 +05:30
|
|
|
self.progress.close()
|
2005-02-28 05:13:20 +05:30
|
|
|
|
2005-12-06 12:08:09 +05:30
|
|
|
def build_person_list(self):
|
|
|
|
"""
|
|
|
|
Builds the person list. Gets all the handles from the database
|
2008-03-14 03:58:22 +05:30
|
|
|
and then applies the chosen filter:
|
2005-12-06 12:08:09 +05:30
|
|
|
"""
|
|
|
|
|
|
|
|
# gets the person list and applies the requested filter
|
2009-07-04 01:53:41 +05:30
|
|
|
|
|
|
|
ind_list = self.database.iter_person_handles()
|
|
|
|
self.progress.set_pass(_('Applying Filter...'), self.database.get_number_of_people())
|
2008-12-16 03:12:58 +05:30
|
|
|
ind_list = self.filter.apply(self.database, ind_list, self.progress)
|
2009-01-27 03:52:01 +05:30
|
|
|
|
2007-08-27 01:49:18 +05:30
|
|
|
return ind_list
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2009-01-28 07:09:08 +05:30
|
|
|
def copy_narrated_files(self):
|
2005-12-06 12:08:09 +05:30
|
|
|
"""
|
2009-01-29 07:43:48 +05:30
|
|
|
Copy all of the CSS and image files for Narrated Web
|
2005-12-06 12:08:09 +05:30
|
|
|
"""
|
2008-03-14 03:58:22 +05:30
|
|
|
|
2009-02-04 13:01:15 +05:30
|
|
|
# copy behaviour stylesheet
|
|
|
|
fname = os.path.join(const.DATA_DIR, "behaviour.css")
|
|
|
|
self.copy_file(fname, "behaviour.css", "styles")
|
|
|
|
|
2009-01-28 07:09:08 +05:30
|
|
|
# copy screen stylesheet
|
2009-02-11 02:21:39 +05:30
|
|
|
if self.css:
|
|
|
|
fname = os.path.join(const.DATA_DIR, self.css)
|
2009-02-26 13:55:45 +05:30
|
|
|
self.copy_file(fname, _NARRATIVESCREEN, "styles")
|
2009-01-28 07:09:08 +05:30
|
|
|
|
|
|
|
# copy printer stylesheet
|
2008-07-17 17:24:33 +05:30
|
|
|
fname = os.path.join(const.DATA_DIR, "Web_Print-Default.css")
|
2009-02-26 13:55:45 +05:30
|
|
|
self.copy_file(fname, _NARRATIVEPRINT, "styles")
|
2009-01-28 07:09:08 +05:30
|
|
|
|
|
|
|
imgs = []
|
|
|
|
|
2009-03-26 16:47:57 +05:30
|
|
|
# Mainz stylesheet graphics
|
|
|
|
# will only be used if Mainz is slected as the stylesheet
|
|
|
|
Mainz_images = ["Web_Mainz_Bkgd.png", "Web_Mainz_Header.png",
|
|
|
|
"Web_Mainz_Mid.png", "Web_Mainz_MidLight.png"]
|
|
|
|
|
2009-01-28 07:09:08 +05:30
|
|
|
# Copy Mainz Style Images
|
|
|
|
if self.css == "Web_Mainz.css":
|
2009-03-26 16:47:57 +05:30
|
|
|
imgs += Mainz_images
|
2009-01-28 07:09:08 +05:30
|
|
|
|
|
|
|
# Copy the Creative Commons icon if the Creative Commons
|
|
|
|
# license is requested???
|
|
|
|
if 0 < self.copyright < len(_CC):
|
|
|
|
imgs += ["somerights20.gif"]
|
|
|
|
|
|
|
|
# include GRAMPS favicon
|
|
|
|
imgs += ["favicon.ico"]
|
|
|
|
|
2009-02-04 13:01:15 +05:30
|
|
|
# we need the blank image gif neede by behaviour.css
|
|
|
|
imgs += ["blank.gif"]
|
|
|
|
|
2009-01-28 07:09:08 +05:30
|
|
|
# copy Ancestor Tree graphics if needed???
|
|
|
|
if self.graph:
|
|
|
|
imgs += ["Web_Gender_Female.png",
|
|
|
|
"Web_Gender_FemaleFFF.png",
|
|
|
|
"Web_Gender_Male.png",
|
|
|
|
"Web_Gender_MaleFFF.png"]
|
|
|
|
|
|
|
|
for f in imgs:
|
|
|
|
from_path = os.path.join(const.IMAGE_DIR, f)
|
|
|
|
self.copy_file(from_path, f, "images")
|
2005-12-06 12:08:09 +05:30
|
|
|
|
2008-03-14 03:58:22 +05:30
|
|
|
def person_pages(self, ind_list, place_list, source_list):
|
2005-12-06 12:08:09 +05:30
|
|
|
|
2008-03-10 01:42:56 +05:30
|
|
|
self.progress.set_pass(_('Creating individual pages'), len(ind_list) + 1)
|
2007-07-19 12:15:25 +05:30
|
|
|
self.progress.step() # otherwise the progress indicator sits at 100%
|
|
|
|
# for a short while from the last step we did,
|
|
|
|
# which was to apply the privacy filter
|
2005-12-06 12:08:09 +05:30
|
|
|
|
2008-03-14 03:58:22 +05:30
|
|
|
IndividualListPage(self, self.title, ind_list)
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2005-02-01 09:16:29 +05:30
|
|
|
for person_handle in ind_list:
|
2005-12-06 12:08:09 +05:30
|
|
|
self.progress.step()
|
2005-02-01 09:16:29 +05:30
|
|
|
person = self.database.get_person_from_handle(person_handle)
|
2008-03-14 03:58:22 +05:30
|
|
|
IndividualPage(self, self.title, person, ind_list, place_list, source_list)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2008-03-14 03:58:22 +05:30
|
|
|
def surname_pages(self, ind_list):
|
2005-12-06 12:08:09 +05:30
|
|
|
"""
|
|
|
|
Generates the surname related pages from list of individual
|
|
|
|
people.
|
|
|
|
"""
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2008-03-10 01:42:56 +05:30
|
|
|
local_list = sort_people(self.database, ind_list)
|
2009-01-27 03:52:01 +05:30
|
|
|
|
2008-03-10 01:42:56 +05:30
|
|
|
self.progress.set_pass(_("Creating surname pages"), len(local_list))
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2008-03-18 02:39:16 +05:30
|
|
|
SurnameListPage(self, self.title, ind_list, SurnameListPage.ORDER_BY_NAME, self.surname_fname)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2008-03-14 03:58:22 +05:30
|
|
|
SurnameListPage(self, self.title, ind_list, SurnameListPage.ORDER_BY_COUNT, "surnames_count")
|
2005-02-13 09:24:47 +05:30
|
|
|
|
2008-02-24 19:25:55 +05:30
|
|
|
for (surname, handle_list) in local_list:
|
2009-06-11 22:15:30 +05:30
|
|
|
SurnamePage(self, self.title, surname, handle_list, ind_list)
|
2005-12-06 12:08:09 +05:30
|
|
|
self.progress.step()
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2008-03-14 03:58:22 +05:30
|
|
|
def source_pages(self, source_list):
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2008-03-10 01:42:56 +05:30
|
|
|
self.progress.set_pass(_("Creating source pages"), len(source_list))
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
SourceListPage(self, self.title, source_list.keys())
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2008-05-21 00:54:03 +05:30
|
|
|
for key in source_list:
|
2008-03-14 03:58:22 +05:30
|
|
|
SourcePage(self, self.title, key, source_list)
|
2005-12-06 12:08:09 +05:30
|
|
|
self.progress.step()
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2009-08-21 06:07:59 +05:30
|
|
|
def get_event_type(self, event, event_ref):
|
|
|
|
""" return the type of an event """
|
|
|
|
|
|
|
|
# Event/ Type
|
|
|
|
evt_name = str(event.get_type())
|
|
|
|
|
|
|
|
if event_ref.get_role() == EventRoleType.PRIMARY:
|
|
|
|
evt_type = u"%(evt_name)s" % locals()
|
|
|
|
else:
|
|
|
|
evt_role = event_ref.get_role()
|
|
|
|
evt_type = u"%(evt_name)s (%(evt_role)s)" % locals()
|
|
|
|
evt_type = evt_type or ' '
|
|
|
|
|
|
|
|
# return event type to its callers
|
|
|
|
return evt_type
|
|
|
|
|
2008-03-14 03:58:22 +05:30
|
|
|
def place_pages(self, place_list, source_list):
|
2005-12-06 12:08:09 +05:30
|
|
|
|
2008-03-10 01:42:56 +05:30
|
|
|
self.progress.set_pass(_("Creating place pages"), len(place_list))
|
2005-12-06 12:08:09 +05:30
|
|
|
|
2008-03-14 03:58:22 +05:30
|
|
|
PlaceListPage(self, self.title, place_list, source_list)
|
2005-12-06 12:08:09 +05:30
|
|
|
|
2009-08-15 20:24:17 +05:30
|
|
|
for place in place_list:
|
2008-03-14 03:58:22 +05:30
|
|
|
PlacePage(self, self.title, place, source_list, place_list)
|
2005-12-06 12:08:09 +05:30
|
|
|
self.progress.step()
|
|
|
|
|
2009-08-21 06:07:59 +05:30
|
|
|
def event_pages(self, ind_list):
|
|
|
|
"""
|
2009-08-27 18:51:55 +05:30
|
|
|
a dump of all the events sorted by person's surname, and event type,
|
|
|
|
then by date if needed...
|
2009-08-21 06:07:59 +05:30
|
|
|
"""
|
|
|
|
db = self.database
|
2009-08-27 18:51:55 +05:30
|
|
|
|
|
|
|
# a dictionary for event information
|
|
|
|
event_dict = []
|
2009-08-21 06:07:59 +05:30
|
|
|
|
|
|
|
for person_handle in ind_list:
|
|
|
|
person = db.get_person_from_handle(person_handle)
|
|
|
|
|
2009-08-27 18:51:55 +05:30
|
|
|
# begin events list for each person
|
|
|
|
event_list = []
|
|
|
|
|
2009-08-21 06:07:59 +05:30
|
|
|
# get sort name for sorting later
|
|
|
|
last_name = person.get_primary_name().get_surname()
|
|
|
|
first_name = person.get_primary_name().get_first_name()
|
|
|
|
sort_name = ', '.join([last_name, first_name])
|
|
|
|
|
|
|
|
for family_handle in person.get_family_handle_list():
|
|
|
|
family = db.get_family_from_handle(family_handle)
|
|
|
|
|
|
|
|
for evt_ref in family.get_event_ref_list():
|
|
|
|
event = db.get_event_from_handle(evt_ref.ref)
|
|
|
|
|
|
|
|
# get event type
|
|
|
|
evt_type = self.get_event_type(event, evt_ref)
|
|
|
|
|
2009-08-27 18:51:55 +05:30
|
|
|
# get sot date as year/month/day or 0000/00/00
|
2009-08-21 06:07:59 +05:30
|
|
|
event_date = event.get_date_object()
|
2009-08-27 18:51:55 +05:30
|
|
|
year = event_date.get_year() or 0
|
|
|
|
month = event_date.get_month() or 0
|
|
|
|
day = event_date.get_day() or 0
|
|
|
|
sort_date = '%04d/%02d/%02d' % (year, month, day)
|
2009-08-21 06:07:59 +05:30
|
|
|
|
2009-08-27 18:51:55 +05:30
|
|
|
# add event data
|
|
|
|
event_list.append([evt_type, sort_date, event, evt_ref])
|
2009-08-21 06:07:59 +05:30
|
|
|
|
|
|
|
for evt_ref in person.get_primary_event_ref_list():
|
|
|
|
event = db.get_event_from_handle(evt_ref.ref)
|
|
|
|
|
|
|
|
# get event type
|
|
|
|
evt_type = self.get_event_type(event, evt_ref)
|
|
|
|
|
2009-08-27 18:51:55 +05:30
|
|
|
# get sot date as year/month/day or 0000/00/00
|
2009-08-21 06:07:59 +05:30
|
|
|
event_date = event.get_date_object()
|
2009-08-27 18:51:55 +05:30
|
|
|
year = event_date.get_year() or 0
|
|
|
|
month = event_date.get_month() or 0
|
|
|
|
day = event_date.get_day() or 0
|
|
|
|
sort_date = '%04d/%02d/%02d' % (year, month, day)
|
2009-08-21 06:07:59 +05:30
|
|
|
|
2009-08-27 18:51:55 +05:30
|
|
|
# add event data
|
|
|
|
event_list.append([evt_type, sort_date, event, evt_ref])
|
2009-08-21 06:07:59 +05:30
|
|
|
|
2009-08-27 18:51:55 +05:30
|
|
|
# sort the list of an individual's events
|
|
|
|
event_list.sort()
|
2009-08-21 06:07:59 +05:30
|
|
|
|
2009-08-27 18:51:55 +05:30
|
|
|
# combine person and their events together
|
|
|
|
event_dict.append([sort_name, person, event_list])
|
|
|
|
|
|
|
|
# sort the events dictionary
|
|
|
|
event_dict.sort()
|
|
|
|
|
|
|
|
# set progress meter pass
|
|
|
|
self.progress.set_pass(_('Creating event pages'), len(event_dict))
|
2009-08-21 06:07:59 +05:30
|
|
|
|
|
|
|
# send all data to the events list page
|
2009-08-27 18:51:55 +05:30
|
|
|
EventListPage(self, self.title, event_dict)
|
2009-08-21 06:07:59 +05:30
|
|
|
|
2009-08-27 18:51:55 +05:30
|
|
|
for (sort_name, person, event_list) in event_dict:
|
|
|
|
|
|
|
|
for evt_type, sort_date, event, evt_ref in event_list:
|
|
|
|
self.progress.step()
|
2009-08-21 06:07:59 +05:30
|
|
|
|
2009-08-27 18:51:55 +05:30
|
|
|
# create individual event page
|
|
|
|
EventPage(self, self.title, evt_type, event, evt_ref, person)
|
2009-08-21 06:07:59 +05:30
|
|
|
|
2008-03-14 03:58:22 +05:30
|
|
|
def gallery_pages(self, source_list):
|
2007-04-09 01:49:14 +05:30
|
|
|
import gc
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2008-03-14 03:58:22 +05:30
|
|
|
self.progress.set_pass(_("Creating media pages"), len(self.photo_list))
|
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
MediaListPage(self, self.title)
|
2005-08-18 11:28:28 +05:30
|
|
|
|
|
|
|
prev = None
|
2005-12-06 12:08:09 +05:30
|
|
|
total = len(self.photo_list)
|
2007-10-12 17:46:49 +05:30
|
|
|
sort = Sort.Sort(self.database)
|
2009-06-30 19:35:57 +05:30
|
|
|
photo_keys = sorted(self.photo_list, key=sort.by_media_title_key)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2008-07-04 00:40:05 +05:30
|
|
|
index = 1
|
2005-08-18 11:28:28 +05:30
|
|
|
for photo_handle in photo_keys:
|
2007-04-09 01:49:14 +05:30
|
|
|
gc.collect() # Reduce memory usage when there are many images.
|
2005-08-18 11:28:28 +05:30
|
|
|
if index == total:
|
|
|
|
next = None
|
|
|
|
else:
|
|
|
|
next = photo_keys[index]
|
2008-03-14 03:58:22 +05:30
|
|
|
# Notice. Here self.photo_list[photo_handle] is used not self.photo_list
|
|
|
|
MediaPage(self, self.title, photo_handle, source_list, self.photo_list[photo_handle],
|
2005-12-06 12:08:09 +05:30
|
|
|
(prev, next, index, total))
|
|
|
|
self.progress.step()
|
2005-08-18 11:28:28 +05:30
|
|
|
prev = photo_handle
|
|
|
|
index += 1
|
|
|
|
|
2008-03-14 03:58:22 +05:30
|
|
|
def base_pages(self):
|
2005-12-06 12:08:09 +05:30
|
|
|
|
|
|
|
if self.use_home:
|
2008-03-14 03:58:22 +05:30
|
|
|
HomePage(self, self.title)
|
2005-12-06 12:08:09 +05:30
|
|
|
|
|
|
|
if self.inc_contact:
|
2008-03-14 03:58:22 +05:30
|
|
|
ContactPage(self, self.title)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2005-12-06 12:08:09 +05:30
|
|
|
if self.inc_download:
|
2008-03-14 03:58:22 +05:30
|
|
|
DownloadPage(self, self.title)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2005-12-06 12:08:09 +05:30
|
|
|
if self.use_intro:
|
2008-03-14 03:58:22 +05:30
|
|
|
IntroductionPage(self, self.title)
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2009-06-30 01:34:00 +05:30
|
|
|
def repository_pages(self, repolist):
|
2009-07-15 05:23:07 +05:30
|
|
|
"""
|
|
|
|
will create RepositoryPage() and RepositoryListPage()
|
|
|
|
"""
|
2009-06-30 01:34:00 +05:30
|
|
|
|
|
|
|
db = self.database
|
|
|
|
repos_dict = {}
|
|
|
|
|
|
|
|
# Sort the repositories
|
|
|
|
for handle in repolist:
|
|
|
|
repo = db.get_repository_from_handle(handle)
|
|
|
|
key = repo.name + str(repo.get_gramps_id())
|
|
|
|
repos_dict[key] = (repo, handle)
|
|
|
|
|
|
|
|
keys = sorted(repos_dict, key=locale.strxfrm)
|
|
|
|
|
|
|
|
# set progress bar pass for Repositories
|
|
|
|
self.progress.set_pass(_('Creating repository pages'), len(repos_dict))
|
|
|
|
|
|
|
|
# RepositoryListPage Class
|
|
|
|
RepositoryListPage(self, self.title, repos_dict, keys)
|
|
|
|
|
|
|
|
index = 0
|
|
|
|
for index, key in enumerate(keys):
|
|
|
|
(repo, handle) = repos_dict[key]
|
|
|
|
|
|
|
|
# RepositoryPage Class
|
|
|
|
RepositoryPage(self, self.title, repo, handle)
|
|
|
|
|
|
|
|
self.progress.step()
|
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
def add_image(self, option_name, height=0):
|
2008-03-21 03:54:36 +05:30
|
|
|
pic_id = self.options[option_name]
|
|
|
|
if pic_id:
|
2009-06-11 22:15:30 +05:30
|
|
|
obj = self.database.get_object_from_gramps_id(pic_id)
|
2008-03-21 03:54:36 +05:30
|
|
|
mime_type = obj.get_mime_type()
|
|
|
|
if mime_type and mime_type.startswith("image"):
|
|
|
|
try:
|
2008-07-17 17:24:33 +05:30
|
|
|
newpath, thumb_path = self.prepare_copy_media(obj)
|
2009-06-11 22:15:30 +05:30
|
|
|
self.copy_file(Utils.media_path_full(self.database, obj.get_path()),
|
2008-03-21 03:54:36 +05:30
|
|
|
newpath)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
# begin image
|
|
|
|
image = Html('img')
|
|
|
|
img_attr = ''
|
2008-03-21 03:54:36 +05:30
|
|
|
if height:
|
2009-06-11 22:15:30 +05:30
|
|
|
img_attr += ' height="%d" ' % height
|
|
|
|
img_attr += ' src="%s" alt="%s" ' % (newpath, obj.get_description())
|
|
|
|
|
|
|
|
# add image attributes to image
|
|
|
|
image.attr = img_attr
|
|
|
|
|
|
|
|
# return an image
|
|
|
|
return image
|
|
|
|
|
2008-03-21 03:54:36 +05:30
|
|
|
except (IOError, OSError), msg:
|
|
|
|
WarningDialog(_("Could not add photo to page"), str(msg))
|
2008-03-18 02:39:16 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# no image to return
|
|
|
|
return None
|
|
|
|
|
2008-03-21 03:54:36 +05:30
|
|
|
def build_subdirs(self, subdir, fname, up=False):
|
|
|
|
"""
|
|
|
|
If subdir is given, then two extra levels of subdirectory are inserted
|
|
|
|
between 'subdir' and the filename. The reason is to prevent directories with
|
|
|
|
too many entries.
|
2008-03-18 02:39:16 +05:30
|
|
|
|
2008-03-21 03:54:36 +05:30
|
|
|
For example, this may return "8/1/aec934857df74d36618"
|
2008-03-18 02:39:16 +05:30
|
|
|
"""
|
2008-03-21 03:54:36 +05:30
|
|
|
subdirs = []
|
|
|
|
if subdir:
|
|
|
|
subdirs.append(subdir)
|
|
|
|
subdirs.append(fname[-1].lower())
|
|
|
|
subdirs.append(fname[-2].lower())
|
|
|
|
if up:
|
|
|
|
subdirs = ['..']*3 + subdirs
|
|
|
|
return subdirs
|
|
|
|
|
2009-08-30 02:23:31 +05:30
|
|
|
|
2008-03-21 03:54:36 +05:30
|
|
|
def build_path(self, subdir, fname, up=False):
|
|
|
|
"""
|
|
|
|
Return the name of the subdirectory.
|
|
|
|
|
|
|
|
Notice that we DO use os.path.join() here.
|
|
|
|
"""
|
|
|
|
return os.path.join(*self.build_subdirs(subdir, fname, up))
|
|
|
|
|
|
|
|
def build_url_image(self, fname, subdir=None, up=False):
|
|
|
|
subdirs = []
|
|
|
|
if subdir:
|
|
|
|
subdirs.append(subdir)
|
|
|
|
if up:
|
|
|
|
subdirs = ['..']*3 + subdirs
|
2009-08-30 02:23:31 +05:30
|
|
|
nname = '/'.join(subdirs + [fname])
|
|
|
|
if ( Utils.win ):
|
|
|
|
nname = nname.replace('\\','/')
|
|
|
|
return nname
|
2008-03-21 03:54:36 +05:30
|
|
|
|
|
|
|
def build_url_fname_html(self, fname, subdir=None, up=False):
|
|
|
|
return self.build_url_fname(fname, subdir, up) + self.ext
|
|
|
|
|
|
|
|
def build_url_fname(self, fname, subdir=None, up=False):
|
|
|
|
"""
|
|
|
|
Create part of the URL given the filename and optionally the subdirectory.
|
|
|
|
If the subdirectory is given, then two extra levels of subdirectory are inserted
|
|
|
|
between 'subdir' and the filename. The reason is to prevent directories with
|
|
|
|
too many entries.
|
|
|
|
If 'up' is True, then "../../../" is inserted in front of the result.
|
|
|
|
|
|
|
|
The extension is added to the filename as well.
|
|
|
|
|
|
|
|
Notice that we do NOT use os.path.join() because we're creating a URL.
|
|
|
|
Imagine we run gramps on Windows (heaven forbits), we don't want to
|
|
|
|
see backslashes in the URL.
|
2008-03-18 02:39:16 +05:30
|
|
|
"""
|
2009-08-30 02:23:31 +05:30
|
|
|
if ( Utils.win ):
|
|
|
|
fname = fname.replace('\\','/')
|
2008-03-21 03:54:36 +05:30
|
|
|
subdirs = self.build_subdirs(subdir, fname, up)
|
|
|
|
return '/'.join(subdirs + [fname])
|
|
|
|
|
|
|
|
def create_file(self, fname, subdir=None):
|
|
|
|
if subdir:
|
|
|
|
subdir = self.build_path(subdir, fname)
|
|
|
|
self.cur_fname = os.path.join(subdir, fname) + self.ext
|
|
|
|
else:
|
|
|
|
self.cur_fname = fname + self.ext
|
2008-03-18 02:39:16 +05:30
|
|
|
if self.archive:
|
|
|
|
self.string_io = StringIO()
|
|
|
|
of = codecs.EncodedFile(self.string_io, 'utf-8',
|
|
|
|
self.encoding, 'xmlcharrefreplace')
|
|
|
|
else:
|
2008-03-21 03:54:36 +05:30
|
|
|
if subdir:
|
|
|
|
subdir = os.path.join(self.html_dir, subdir)
|
|
|
|
if not os.path.isdir(subdir):
|
|
|
|
os.makedirs(subdir)
|
|
|
|
fname = os.path.join(self.html_dir, self.cur_fname)
|
|
|
|
of = codecs.EncodedFile(open(fname, "w"), 'utf-8',
|
2008-03-18 02:39:16 +05:30
|
|
|
self.encoding, 'xmlcharrefreplace')
|
|
|
|
return of
|
|
|
|
|
|
|
|
def close_file(self, of):
|
|
|
|
if self.archive:
|
|
|
|
tarinfo = tarfile.TarInfo(self.cur_fname)
|
|
|
|
tarinfo.size = len(self.string_io.getvalue())
|
|
|
|
tarinfo.mtime = time.time()
|
|
|
|
if os.sys.platform != "win32":
|
|
|
|
tarinfo.uid = os.getuid()
|
|
|
|
tarinfo.gid = os.getgid()
|
|
|
|
self.string_io.seek(0)
|
|
|
|
self.archive.addfile(tarinfo, self.string_io)
|
2008-03-21 03:54:36 +05:30
|
|
|
self.string_io = None
|
2008-03-18 02:39:16 +05:30
|
|
|
of.close()
|
|
|
|
else:
|
|
|
|
of.close()
|
|
|
|
self.cur_fname = None
|
|
|
|
|
2008-03-21 03:54:36 +05:30
|
|
|
def add_lnkref_to_photo(self, photo, lnkref):
|
|
|
|
handle = photo.get_handle()
|
|
|
|
# FIXME. Is it OK to add to the photo_list of report?
|
|
|
|
photo_list = self.photo_list
|
|
|
|
if handle in photo_list:
|
|
|
|
if lnkref not in photo_list[handle]:
|
|
|
|
photo_list[handle].append(lnkref)
|
|
|
|
else:
|
|
|
|
photo_list[handle] = [lnkref]
|
|
|
|
|
2008-07-17 17:24:33 +05:30
|
|
|
def prepare_copy_media(self, photo):
|
2008-03-21 03:54:36 +05:30
|
|
|
handle = photo.get_handle()
|
|
|
|
ext = os.path.splitext(photo.get_path())[1]
|
|
|
|
real_path = os.path.join(self.build_path('images', handle), handle + ext)
|
|
|
|
thumb_path = os.path.join(self.build_path('thumb', handle), handle + '.png')
|
2008-07-17 17:24:33 +05:30
|
|
|
return real_path, thumb_path
|
2008-03-21 03:54:36 +05:30
|
|
|
|
2008-04-01 01:20:37 +05:30
|
|
|
def copy_file(self, from_fname, to_fname, to_dir=''):
|
|
|
|
"""
|
|
|
|
Copy a file from a source to a (report) destination.
|
|
|
|
If to_dir is not present and if the target is not an archive,
|
|
|
|
then the destination directory will be created.
|
2008-07-17 17:24:33 +05:30
|
|
|
|
|
|
|
Normally 'to_fname' will be just a filename, without directory path.
|
|
|
|
|
|
|
|
'to_dir' is the relative path name in the destination root. It will
|
|
|
|
be prepended before 'to_fname'.
|
2008-04-01 01:20:37 +05:30
|
|
|
"""
|
|
|
|
if self.archive:
|
|
|
|
dest = os.path.join(to_dir, to_fname)
|
|
|
|
self.archive.add(from_fname, dest)
|
|
|
|
else:
|
2008-07-17 17:24:33 +05:30
|
|
|
dest = os.path.join(self.html_dir, to_dir, to_fname)
|
2008-04-01 01:20:37 +05:30
|
|
|
|
2008-07-17 17:24:33 +05:30
|
|
|
destdir = os.path.dirname(dest)
|
|
|
|
if not os.path.isdir(destdir):
|
|
|
|
os.makedirs(destdir)
|
|
|
|
|
|
|
|
if from_fname != dest:
|
|
|
|
shutil.copyfile(from_fname, dest)
|
2008-03-14 03:58:22 +05:30
|
|
|
elif self.warn_dir:
|
|
|
|
WarningDialog(
|
|
|
|
_("Possible destination error") + "\n" +
|
|
|
|
_("You appear to have set your target directory "
|
|
|
|
"to a directory used for data storage. This "
|
|
|
|
"could create problems with file management. "
|
|
|
|
"It is recommended that you consider using "
|
|
|
|
"a different directory to store your generated "
|
|
|
|
"web pages."))
|
|
|
|
self.warn_dir = False
|
2005-08-18 11:28:28 +05:30
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
class NavWebOptions(MenuReportOptions):
|
2005-02-01 09:16:29 +05:30
|
|
|
"""
|
|
|
|
Defines options and provides handling interface.
|
|
|
|
"""
|
2008-02-10 09:39:09 +05:30
|
|
|
|
2008-02-20 10:22:10 +05:30
|
|
|
def __init__(self, name, dbase):
|
2008-02-10 09:39:09 +05:30
|
|
|
self.__db = dbase
|
|
|
|
self.__archive = None
|
|
|
|
self.__target = None
|
|
|
|
self.__pid = None
|
|
|
|
self.__filter = None
|
|
|
|
self.__graph = None
|
|
|
|
self.__graphgens = None
|
|
|
|
self.__living = None
|
|
|
|
self.__yearsafterdeath = None
|
2008-02-20 10:22:10 +05:30
|
|
|
MenuReportOptions.__init__(self, name, dbase)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
def add_menu_options(self, menu):
|
|
|
|
"""
|
2008-12-12 18:52:35 +05:30
|
|
|
Add options to the menu for the web site.
|
2008-02-10 09:39:09 +05:30
|
|
|
"""
|
|
|
|
self.__add_report_options(menu)
|
|
|
|
self.__add_page_generation_options(menu)
|
|
|
|
self.__add_privacy_options(menu)
|
2009-06-11 22:15:30 +05:30
|
|
|
self.__add_download_options(menu)
|
2008-02-10 09:39:09 +05:30
|
|
|
self.__add_advanced_options(menu)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
def __add_report_options(self, menu):
|
|
|
|
"""
|
|
|
|
Options on the "Report Options" tab.
|
|
|
|
"""
|
|
|
|
category_name = _("Report Options")
|
2008-03-08 22:10:19 +05:30
|
|
|
|
|
|
|
self.__archive = BooleanOption(_('Store web pages in .tar.gz archive'),
|
2008-02-10 09:39:09 +05:30
|
|
|
False)
|
|
|
|
self.__archive.set_help(_('Whether to store the web pages in an '
|
|
|
|
'archive file'))
|
|
|
|
menu.add_option(category_name, 'archive', self.__archive)
|
|
|
|
self.__archive.connect('value-changed', self.__archive_changed)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
|
|
|
self.__target = DestinationOption(_("Destination"),
|
2008-03-10 01:42:56 +05:30
|
|
|
os.path.join(const.USER_HOME, "NAVWEB"))
|
2008-02-10 09:39:09 +05:30
|
|
|
self.__target.set_help( _("The destination directory for the web "
|
|
|
|
"files"))
|
|
|
|
menu.add_option(category_name, "target", self.__target)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
self.__archive_changed()
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2008-07-16 13:29:29 +05:30
|
|
|
title = StringOption(_("Web site title"), _('My Family Tree'))
|
|
|
|
title.set_help(_("The title of the web site"))
|
|
|
|
menu.add_option(category_name, "title", title)
|
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
self.__filter = FilterOption(_("Filter"), 0)
|
|
|
|
self.__filter.set_help(
|
2008-12-12 18:52:35 +05:30
|
|
|
_("Select filter to restrict people that appear on web site"))
|
2008-02-10 09:39:09 +05:30
|
|
|
menu.add_option(category_name, "filter", self.__filter)
|
|
|
|
self.__filter.connect('value-changed', self.__filter_changed)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
self.__pid = PersonOption(_("Filter Person"))
|
|
|
|
self.__pid.set_help(_("The center person for the filter"))
|
|
|
|
menu.add_option(category_name, "pid", self.__pid)
|
|
|
|
self.__pid.connect('value-changed', self.__update_filters)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
self.__update_filters()
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
# We must figure out the value of the first option before we can
|
|
|
|
# create the EnumeratedListOption
|
|
|
|
fmt_list = _nd.get_name_format()
|
|
|
|
name_format = EnumeratedListOption(_("Name format"), fmt_list[0][0])
|
|
|
|
for num, name, fmt_str, act in fmt_list:
|
|
|
|
name_format.add_item(num, name)
|
|
|
|
name_format.set_help(_("Select the format to display names"))
|
|
|
|
menu.add_option(category_name, "name_format", name_format)
|
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
ext = EnumeratedListOption(_("File extension"), ".html" )
|
2009-03-25 10:27:07 +05:30
|
|
|
for etype in _WEB_EXT:
|
2008-02-10 09:39:09 +05:30
|
|
|
ext.add_item(etype, etype)
|
|
|
|
ext.set_help( _("The extension to be used for the web files"))
|
|
|
|
menu.add_option(category_name, "ext", ext)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
cright = EnumeratedListOption(_('Copyright'), 0 )
|
2009-02-11 02:21:39 +05:30
|
|
|
for index, copt in enumerate(_COPY_OPTIONS):
|
2008-02-10 09:39:09 +05:30
|
|
|
cright.add_item(index, copt)
|
|
|
|
cright.set_help( _("The copyright to be used for the web files"))
|
|
|
|
menu.add_option(category_name, "cright", cright)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2009-06-06 15:19:40 +05:30
|
|
|
css = EnumeratedListOption(_('StyleSheet'), CSS_FILES[0][1])
|
|
|
|
for style in CSS_FILES:
|
2008-02-10 09:39:09 +05:30
|
|
|
css.add_item(style[1], style[0])
|
2008-07-15 19:07:17 +05:30
|
|
|
css.set_help( _('The stylesheet to be used for the web page'))
|
2008-02-10 09:39:09 +05:30
|
|
|
menu.add_option(category_name, "css", css)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
self.__graph = BooleanOption(_("Include ancestor graph"), True)
|
|
|
|
self.__graph.set_help(_('Whether to include an ancestor graph '
|
|
|
|
'on each individual page'))
|
|
|
|
menu.add_option(category_name, 'graph', self.__graph)
|
|
|
|
self.__graph.connect('value-changed', self.__graph_changed)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
self.__graphgens = EnumeratedListOption(_('Graph generations'), 4)
|
|
|
|
self.__graphgens.add_item(2, "2")
|
|
|
|
self.__graphgens.add_item(3, "3")
|
|
|
|
self.__graphgens.add_item(4, "4")
|
|
|
|
self.__graphgens.add_item(5, "5")
|
|
|
|
self.__graphgens.set_help( _("The number of generations to include in "
|
|
|
|
"the ancestor graph"))
|
|
|
|
menu.add_option(category_name, "graphgens", self.__graphgens)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
self.__graph_changed()
|
2007-11-27 03:23:58 +05:30
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
def __add_page_generation_options(self, menu):
|
|
|
|
"""
|
|
|
|
Options on the "Page Generation" tab.
|
|
|
|
"""
|
|
|
|
category_name = _("Page Generation")
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
homenote = NoteOption(_('Home page note'))
|
|
|
|
homenote.set_help( _("A note to be used on the home page"))
|
|
|
|
menu.add_option(category_name, "homenote", homenote)
|
2007-11-27 03:23:58 +05:30
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
homeimg = MediaOption(_('Home page image'))
|
|
|
|
homeimg.set_help( _("An image to be used on the home page"))
|
|
|
|
menu.add_option(category_name, "homeimg", homeimg)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
intronote = NoteOption(_('Introduction note'))
|
|
|
|
intronote.set_help( _("A note to be used as the introduction"))
|
|
|
|
menu.add_option(category_name, "intronote", intronote)
|
2007-11-27 03:23:58 +05:30
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
introimg = MediaOption(_('Introduction image'))
|
|
|
|
introimg.set_help( _("An image to be used as the introduction"))
|
|
|
|
menu.add_option(category_name, "introimg", introimg)
|
2007-11-27 03:23:58 +05:30
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
contactnote = NoteOption(_("Publisher contact note"))
|
|
|
|
contactnote.set_help( _("A note to be used as the publisher contact"))
|
|
|
|
menu.add_option(category_name, "contactnote", contactnote)
|
2007-11-27 03:23:58 +05:30
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
contactimg = MediaOption(_("Publisher contact image"))
|
|
|
|
contactimg.set_help( _("An image to be used as the publisher contact"))
|
|
|
|
menu.add_option(category_name, "contactimg", contactimg)
|
2007-11-27 03:23:58 +05:30
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
headernote = NoteOption(_('HTML user header'))
|
|
|
|
headernote.set_help( _("A note to be used as the page header"))
|
|
|
|
menu.add_option(category_name, "headernote", headernote)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
footernote = NoteOption(_('HTML user footer'))
|
|
|
|
footernote.set_help( _("A note to be used as the page footer"))
|
|
|
|
menu.add_option(category_name, "footernote", footernote)
|
2007-11-27 03:23:58 +05:30
|
|
|
|
2009-05-21 12:44:59 +05:30
|
|
|
self.__gallery = BooleanOption(_("Include images and media objects"), True)
|
|
|
|
self.__gallery.set_help(_('Whether to include a gallery of media objects'))
|
|
|
|
menu.add_option(category_name, 'gallery', self.__gallery)
|
|
|
|
self.__gallery.connect('value-changed', self.__gallery_changed)
|
|
|
|
|
2009-06-30 01:34:00 +05:30
|
|
|
self.__maxinitialimagewidth = NumberOption(_("Max width of initial image"),
|
|
|
|
_DEFAULT_MAX_IMG_WIDTH, 0, 2000)
|
2009-05-21 12:44:59 +05:30
|
|
|
self.__maxinitialimagewidth.set_help(_("This allows you to set the maximum width "
|
|
|
|
"of the image shown on the media page. Set to 0 for no limit."))
|
|
|
|
menu.add_option(category_name, 'maxinitialimagewidth', self.__maxinitialimagewidth)
|
|
|
|
|
2009-06-30 01:34:00 +05:30
|
|
|
self.__maxinitialimageheight = NumberOption(_("Max height of initial image"),
|
|
|
|
_DEFAULT_MAX_IMG_HEIGHT, 0, 2000)
|
2009-05-21 12:44:59 +05:30
|
|
|
self.__maxinitialimageheight.set_help(_("This allows you to set the maximum height "
|
|
|
|
"of the image shown on the media page. Set to 0 for no limit."))
|
|
|
|
menu.add_option(category_name, 'maxinitialimageheight', self.__maxinitialimageheight)
|
|
|
|
|
|
|
|
self.__gallery_changed()
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
nogid = BooleanOption(_('Suppress GRAMPS ID'), False)
|
|
|
|
nogid.set_help(_('Whether to include the Gramps ID of objects'))
|
|
|
|
menu.add_option(category_name, 'nogid', nogid)
|
2007-11-27 03:23:58 +05:30
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
def __add_privacy_options(self, menu):
|
|
|
|
"""
|
|
|
|
Options on the "Privacy" tab.
|
|
|
|
"""
|
|
|
|
category_name = _("Privacy")
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
incpriv = BooleanOption(_("Include records marked private"), False)
|
|
|
|
incpriv.set_help(_('Whether to include private objects'))
|
|
|
|
menu.add_option(category_name, 'incpriv', incpriv)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
|
|
|
self.__living = EnumeratedListOption(_("Living People"),
|
2008-06-05 09:15:14 +05:30
|
|
|
_INCLUDE_LIVING_VALUE )
|
|
|
|
self.__living.add_item(LivingProxyDb.MODE_EXCLUDE_ALL,
|
|
|
|
_("Exclude"))
|
|
|
|
self.__living.add_item(LivingProxyDb.MODE_INCLUDE_LAST_NAME_ONLY,
|
|
|
|
_("Include Last Name Only"))
|
|
|
|
self.__living.add_item(LivingProxyDb.MODE_INCLUDE_FULL_NAME_ONLY,
|
|
|
|
_("Include Full Name Only"))
|
|
|
|
self.__living.add_item(_INCLUDE_LIVING_VALUE,
|
|
|
|
_("Include"))
|
2008-02-10 09:39:09 +05:30
|
|
|
self.__living.set_help(_("How to handle living people"))
|
|
|
|
menu.add_option(category_name, "living", self.__living)
|
|
|
|
self.__living.connect('value-changed', self.__living_changed)
|
|
|
|
|
|
|
|
self.__yearsafterdeath = NumberOption(_("Years from death to consider "
|
|
|
|
"living"), 30, 0, 100)
|
|
|
|
self.__yearsafterdeath.set_help(_("This allows you to restrict "
|
|
|
|
"information on people who have not "
|
|
|
|
"been dead for very long"))
|
2008-03-08 22:10:19 +05:30
|
|
|
menu.add_option(category_name, 'yearsafterdeath',
|
2008-02-10 09:39:09 +05:30
|
|
|
self.__yearsafterdeath)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
self.__living_changed()
|
2007-11-26 00:37:50 +05:30
|
|
|
|
2009-05-30 14:16:12 +05:30
|
|
|
def __add_download_options(self, menu):
|
|
|
|
"""
|
|
|
|
Options for the download tab ...
|
|
|
|
"""
|
|
|
|
|
2009-06-30 01:34:00 +05:30
|
|
|
category_name = _("Download")
|
2009-05-30 14:16:12 +05:30
|
|
|
|
|
|
|
self.__incdownload = BooleanOption(_("Include download page"), False)
|
|
|
|
self.__incdownload.set_help(_('Whether to include a database download option'))
|
|
|
|
menu.add_option(category_name, 'incdownload', self.__incdownload)
|
|
|
|
self.__incdownload.connect('value-changed', self.__download_changed)
|
|
|
|
|
2009-07-15 05:23:07 +05:30
|
|
|
self.__downloadnote = NoteOption(_('Download page note'))
|
|
|
|
self.__downloadnote.set_help( _("A note to be used on the download page"))
|
|
|
|
menu.add_option(category_name, "downloadnote", self.__downloadnote)
|
|
|
|
|
2009-07-28 17:11:20 +05:30
|
|
|
self.__down_fname1 = DestinationOption(_("Download Filename"),
|
2009-05-30 14:16:12 +05:30
|
|
|
os.path.join(const.USER_HOME, ""))
|
|
|
|
self.__down_fname1.set_help(_("File to be used for downloading of database"))
|
|
|
|
menu.add_option(category_name, "down_fname1", self.__down_fname1)
|
|
|
|
|
2009-07-28 17:11:20 +05:30
|
|
|
self.__dl_descr1 = StringOption(_("Description for download"), _('Smith Family Tree'))
|
2009-05-30 14:16:12 +05:30
|
|
|
self.__dl_descr1.set_help(_('Give a description for this file.'))
|
|
|
|
menu.add_option(category_name, 'dl_descr1', self.__dl_descr1)
|
|
|
|
|
2009-07-28 17:11:20 +05:30
|
|
|
self.__down_fname2 = DestinationOption(_("Download Filename"),
|
2009-05-30 14:16:12 +05:30
|
|
|
os.path.join(const.USER_HOME, ""))
|
|
|
|
self.__down_fname2.set_help(_("File to be used for downloading of database"))
|
|
|
|
menu.add_option(category_name, "down_fname2", self.__down_fname2)
|
|
|
|
|
2009-07-28 17:11:20 +05:30
|
|
|
self.__dl_descr2 = StringOption(_("Description for download"), _('Johnson Family Tree'))
|
2009-05-30 14:16:12 +05:30
|
|
|
self.__dl_descr2.set_help(_('Give a description for this file.'))
|
|
|
|
menu.add_option(category_name, 'dl_descr2', self.__dl_descr2)
|
|
|
|
|
|
|
|
self.__dl_cright = EnumeratedListOption(_('Download Copyright License'), 0 )
|
|
|
|
for index, copt in enumerate(_COPY_OPTIONS):
|
|
|
|
self.__dl_cright.add_item(index, copt)
|
|
|
|
self.__dl_cright.set_help( _("The copyright to be used for ths download file?"))
|
|
|
|
menu.add_option(category_name, "dl_cright", self.__dl_cright)
|
|
|
|
|
|
|
|
self.__download_changed()
|
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
def __add_advanced_options(self, menu):
|
|
|
|
"""
|
|
|
|
Options on the "Advanced" tab.
|
|
|
|
"""
|
|
|
|
category_name = _("Advanced")
|
|
|
|
|
2009-03-25 10:27:07 +05:30
|
|
|
encoding = EnumeratedListOption(_('Character set encoding'), _CHARACTER_SETS[0][1] )
|
|
|
|
for eopt in _CHARACTER_SETS:
|
|
|
|
encoding.add_item(eopt[1], eopt[0])
|
|
|
|
encoding.set_help( _("The encoding to be used for the web files"))
|
|
|
|
menu.add_option(category_name, "encoding", encoding)
|
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
linkhome = BooleanOption(_('Include link to home person on every '
|
|
|
|
'page'), False)
|
|
|
|
linkhome.set_help(_('Whether to include a link to the home person'))
|
|
|
|
menu.add_option(category_name, 'linkhome', linkhome)
|
|
|
|
|
|
|
|
showbirth = BooleanOption(_("Include a column for birth dates on the "
|
|
|
|
"index pages"), True)
|
|
|
|
showbirth.set_help(_('Whether to include a birth column'))
|
|
|
|
menu.add_option(category_name, 'showbirth', showbirth)
|
|
|
|
|
|
|
|
showdeath = BooleanOption(_("Include a column for death dates on the "
|
|
|
|
"index pages"), False)
|
|
|
|
showdeath.set_help(_('Whether to include a death column'))
|
|
|
|
menu.add_option(category_name, 'showdeath', showdeath)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2009-05-30 14:16:12 +05:30
|
|
|
showpartner = BooleanOption(_("Include a column for partners on the "
|
2008-02-10 09:39:09 +05:30
|
|
|
"index pages"), False)
|
2009-05-30 14:16:12 +05:30
|
|
|
showpartner.set_help(_('Whether to include a partners column'))
|
|
|
|
menu.add_option(category_name, 'showpartner', showpartner)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
showparents = BooleanOption(_("Include a column for parents on the "
|
|
|
|
"index pages"), False)
|
|
|
|
showparents.set_help(_('Whether to include a parents column'))
|
|
|
|
menu.add_option(category_name, 'showparents', showparents)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2009-07-28 17:11:20 +05:30
|
|
|
showallsiblings = BooleanOption(_("Include half and/ or "
|
2009-06-11 22:15:30 +05:30
|
|
|
"step-siblings on the individual pages"), False)
|
2009-07-28 17:11:20 +05:30
|
|
|
showallsiblings.set_help(_( "Whether to include half and/ or "
|
2009-06-11 22:15:30 +05:30
|
|
|
"step-siblings with the parents and siblings"))
|
2009-07-28 17:11:20 +05:30
|
|
|
menu.add_option(category_name, 'showhalfsiblings', showallsiblings)
|
2009-07-15 05:23:07 +05:30
|
|
|
|
2009-08-29 14:05:47 +05:30
|
|
|
birthorder = BooleanOption(_('Sort all children in birth order'), False)
|
2009-07-28 17:11:20 +05:30
|
|
|
birthorder.set_help(_('Whether to display children in birth order'
|
|
|
|
' or in entry order?'))
|
|
|
|
menu.add_option(category_name, 'birthorder', birthorder)
|
2009-07-15 05:23:07 +05:30
|
|
|
|
2009-08-29 14:05:47 +05:30
|
|
|
inc_events = BooleanOption(_('Include event pages'), False)
|
2009-08-21 06:07:59 +05:30
|
|
|
inc_events.set_help(_('Add a complete events list and relevant pages or not'))
|
|
|
|
menu.add_option(category_name, 'inc_events', inc_events)
|
|
|
|
|
2009-07-15 05:23:07 +05:30
|
|
|
inc_repository = BooleanOption(_('Include Repository Pages'), False)
|
|
|
|
inc_repository.set_help(_('Whether to include the Repository Pages or not?'))
|
|
|
|
menu.add_option(category_name, 'inc_repository', inc_repository)
|
2009-06-21 08:55:28 +05:30
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
def __archive_changed(self):
|
|
|
|
"""
|
|
|
|
Update the change of storage: archive or directory
|
|
|
|
"""
|
|
|
|
if self.__archive.get_value() == True:
|
|
|
|
self.__target.set_extension(".tar.gz")
|
|
|
|
self.__target.set_directory_entry(False)
|
2005-08-18 11:28:28 +05:30
|
|
|
else:
|
2008-02-10 09:39:09 +05:30
|
|
|
self.__target.set_directory_entry(True)
|
2005-02-01 09:16:29 +05:30
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
def __update_filters(self):
|
|
|
|
"""
|
|
|
|
Update the filter list based on the selected person
|
|
|
|
"""
|
|
|
|
gid = self.__pid.get_value()
|
|
|
|
person = self.__db.get_person_from_gramps_id(gid)
|
|
|
|
filter_list = ReportUtils.get_person_filters(person, False)
|
|
|
|
self.__filter.set_filters(filter_list)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
def __filter_changed(self):
|
|
|
|
"""
|
|
|
|
Handle filter change. If the filter is not specific to a person,
|
|
|
|
disable the person option
|
|
|
|
"""
|
|
|
|
filter_value = self.__filter.get_value()
|
|
|
|
if filter_value in [1, 2, 3, 4]:
|
|
|
|
# Filters 1, 2, 3 and 4 rely on the center person
|
|
|
|
self.__pid.set_available(True)
|
2005-08-18 11:28:28 +05:30
|
|
|
else:
|
2008-02-10 09:39:09 +05:30
|
|
|
# The rest don't
|
|
|
|
self.__pid.set_available(False)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2008-02-10 09:39:09 +05:30
|
|
|
def __graph_changed(self):
|
|
|
|
"""
|
|
|
|
Handle enabling or disabling the ancestor graph
|
|
|
|
"""
|
|
|
|
self.__graphgens.set_available(self.__graph.get_value())
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2009-05-21 12:44:59 +05:30
|
|
|
def __gallery_changed(self):
|
|
|
|
"""
|
|
|
|
Handles the changing nature of gallery
|
|
|
|
"""
|
|
|
|
|
|
|
|
if self.__gallery.get_value() == False:
|
|
|
|
self.__maxinitialimagewidth.set_available(False)
|
|
|
|
self.__maxinitialimageheight.set_available(False)
|
|
|
|
else:
|
|
|
|
self.__maxinitialimagewidth.set_available(True)
|
|
|
|
self.__maxinitialimageheight.set_available(True)
|
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
def __living_changed(self):
|
|
|
|
"""
|
|
|
|
Handle a change in the living option
|
|
|
|
"""
|
|
|
|
if self.__living.get_value() == _INCLUDE_LIVING_VALUE:
|
|
|
|
self.__yearsafterdeath.set_available(False)
|
|
|
|
else:
|
|
|
|
self.__yearsafterdeath.set_available(True)
|
|
|
|
|
2009-05-30 14:16:12 +05:30
|
|
|
def __download_changed(self):
|
|
|
|
"""
|
|
|
|
Handles the changing nature of include download page
|
|
|
|
"""
|
|
|
|
|
2009-07-15 05:23:07 +05:30
|
|
|
if self.__incdownload.get_value():
|
|
|
|
self.__downloadnote.set_available(True)
|
|
|
|
self.__down_fname1.set_available(True)
|
|
|
|
self.__dl_descr1.set_available(True)
|
|
|
|
self.__down_fname2.set_available(True)
|
|
|
|
self.__dl_descr2.set_available(True)
|
|
|
|
self.__dl_cright.set_available(True)
|
|
|
|
else:
|
|
|
|
self.__downloadnote.set_available(False)
|
2009-05-30 14:16:12 +05:30
|
|
|
self.__down_fname1.set_available(False)
|
|
|
|
self.__dl_descr1.set_available(False)
|
|
|
|
self.__down_fname2.set_available(False)
|
|
|
|
self.__dl_descr2.set_available(False)
|
|
|
|
self.__dl_cright.set_available(False)
|
2009-07-15 05:23:07 +05:30
|
|
|
|
2008-03-14 03:58:22 +05:30
|
|
|
# FIXME. Why do we need our own sorting? Why not use Sort.Sort?
|
2008-02-24 19:25:55 +05:30
|
|
|
def sort_people(db, handle_list):
|
2005-08-18 11:28:28 +05:30
|
|
|
sname_sub = {}
|
|
|
|
sortnames = {}
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2007-08-12 08:12:22 +05:30
|
|
|
for person_handle in handle_list:
|
|
|
|
person = db.get_person_from_handle(person_handle)
|
|
|
|
primary_name = person.get_primary_name()
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2007-08-12 08:12:22 +05:30
|
|
|
if primary_name.group_as:
|
|
|
|
surname = primary_name.group_as
|
|
|
|
else:
|
|
|
|
surname = db.get_name_group_mapping(primary_name.surname)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2007-08-12 08:12:22 +05:30
|
|
|
sortnames[person_handle] = _nd.sort_string(primary_name)
|
2008-03-08 22:10:19 +05:30
|
|
|
|
2008-07-17 23:40:32 +05:30
|
|
|
if surname in sname_sub:
|
2007-08-12 08:12:22 +05:30
|
|
|
sname_sub[surname].append(person_handle)
|
|
|
|
else:
|
|
|
|
sname_sub[surname] = [person_handle]
|
2005-08-18 11:28:28 +05:30
|
|
|
|
|
|
|
sorted_lists = []
|
2009-06-11 22:15:30 +05:30
|
|
|
temp_list = sorted(sname_sub, key=locale.strxfrm)
|
|
|
|
|
2005-08-18 11:28:28 +05:30
|
|
|
for name in temp_list:
|
2009-06-11 22:15:30 +05:30
|
|
|
slist = sorted(((sortnames[x], x) for x in sname_sub[name]),
|
|
|
|
key=lambda x:locale.strxfrm(x[0]))
|
2008-07-04 22:57:51 +05:30
|
|
|
entries = [x[1] for x in slist]
|
2008-03-10 01:42:56 +05:30
|
|
|
sorted_lists.append((name, entries))
|
2009-01-27 03:52:01 +05:30
|
|
|
|
2005-08-18 11:28:28 +05:30
|
|
|
return sorted_lists
|
|
|
|
|
2009-01-27 03:52:01 +05:30
|
|
|
# Modified _get_regular_surname from WebCal.py to get prefix, first name, and suffix
|
2009-06-11 22:15:30 +05:30
|
|
|
def _get_short_name(gender, name):
|
2009-01-27 03:52:01 +05:30
|
|
|
""" Will get prefix and suffix for all people passed through it """
|
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
short_name = name.get_first_name()
|
2009-01-27 03:52:01 +05:30
|
|
|
prefix = name.get_surname_prefix()
|
|
|
|
if prefix:
|
2009-06-11 22:15:30 +05:30
|
|
|
short_name = prefix + " " + short_name
|
|
|
|
if gender == Person.FEMALE:
|
|
|
|
return short_name
|
2009-01-27 03:52:01 +05:30
|
|
|
else:
|
|
|
|
suffix = name.get_suffix()
|
|
|
|
if suffix:
|
2009-06-11 22:15:30 +05:30
|
|
|
short_name = short_name + ", " + suffix
|
|
|
|
return short_name
|
2009-01-27 03:52:01 +05:30
|
|
|
|
2009-02-09 12:40:42 +05:30
|
|
|
def get_person_keyname(db, handle):
|
|
|
|
""" .... """
|
|
|
|
person = db.get_person_from_handle(handle)
|
|
|
|
return person.get_primary_name().surname
|
2009-02-03 13:31:31 +05:30
|
|
|
|
2009-02-09 12:40:42 +05:30
|
|
|
def get_place_keyname(db, handle):
|
2009-02-03 13:31:31 +05:30
|
|
|
""" ... """
|
|
|
|
|
2009-02-09 12:40:42 +05:30
|
|
|
return ReportUtils.place_name(db, handle)
|
|
|
|
|
2009-02-10 00:20:23 +05:30
|
|
|
def get_first_letters(db, handle_list, key):
|
2009-02-09 12:40:42 +05:30
|
|
|
""" key is _PLACE or _PERSON ...."""
|
|
|
|
|
2009-02-10 00:20:23 +05:30
|
|
|
first_letters = []
|
2009-02-09 12:40:42 +05:30
|
|
|
|
|
|
|
for handle in handle_list:
|
|
|
|
if key == _PERSON:
|
|
|
|
keyname = get_person_keyname(db, handle)
|
|
|
|
else:
|
|
|
|
keyname = get_place_keyname(db, handle)
|
|
|
|
|
|
|
|
if keyname:
|
2009-04-21 02:14:22 +05:30
|
|
|
c = normalize('NFKC', keyname)[0].upper()
|
2009-05-01 14:09:17 +05:30
|
|
|
# See : http://www.gramps-project.org/bugs/view.php?id=2933
|
|
|
|
(lang_country, modifier ) = locale.getlocale()
|
|
|
|
if lang_country == "sv_SE" and ( c == u'W' or c == u'V' ):
|
|
|
|
first_letters.append(u'V')
|
|
|
|
else:
|
|
|
|
first_letters.append(c)
|
2009-02-09 12:40:42 +05:30
|
|
|
|
2009-02-10 00:20:23 +05:30
|
|
|
return first_letters
|
2009-02-09 12:40:42 +05:30
|
|
|
|
2009-03-25 10:27:07 +05:30
|
|
|
def _has_webpage_extension(url):
|
|
|
|
"""
|
|
|
|
determine if a filename has an extension or not...
|
|
|
|
|
|
|
|
url = filename to be checked
|
|
|
|
"""
|
|
|
|
|
|
|
|
for ext in _WEB_EXT:
|
|
|
|
if url.endswith(ext):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2009-06-11 22:15:30 +05:30
|
|
|
def alphabet_navigation(db, handle_list, key):
|
|
|
|
"""
|
|
|
|
Will create the alphabetical navigation bar...
|
|
|
|
|
|
|
|
handle_list -- a list of people's or Places' handles
|
|
|
|
key -- _PERSON or _PLACE
|
|
|
|
"""
|
|
|
|
|
|
|
|
sorted_set = {}
|
|
|
|
|
|
|
|
# The comment below from the glibc locale sv_SE in
|
|
|
|
# localedata/locales/sv_SE :
|
|
|
|
#
|
|
|
|
# % The letter w is normally not present in the Swedish alphabet. It
|
|
|
|
# % exists in some names in Swedish and foreign words, but is accounted
|
|
|
|
# % for as a variant of 'v'. Words and names with 'w' are in Swedish
|
|
|
|
# % ordered alphabetically among the words and names with 'v'. If two
|
|
|
|
# % words or names are only to be distinguished by 'v' or % 'w', 'v' is
|
|
|
|
# % placed before 'w'.
|
|
|
|
#
|
|
|
|
# See : http://www.gramps-project.org/bugs/view.php?id=2933
|
|
|
|
#
|
|
|
|
(lang_country, modifier ) = locale.getlocale()
|
|
|
|
for ltr in get_first_letters(db, handle_list, key):
|
|
|
|
if ltr in sorted_set:
|
|
|
|
sorted_set[ltr] += 1
|
|
|
|
else:
|
|
|
|
sorted_set[ltr] = 1
|
|
|
|
|
|
|
|
# remove the number of each occurance of each letter
|
2009-06-30 01:34:00 +05:30
|
|
|
sorted_alpha_index = sorted((l for l in sorted_set if l != ','),
|
|
|
|
key=locale.strxfrm)
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-30 01:34:00 +05:30
|
|
|
# if no letters, return None back to its callers
|
|
|
|
if not sorted_alpha_index:
|
2009-06-11 22:15:30 +05:30
|
|
|
return None
|
|
|
|
|
2009-06-23 05:28:09 +05:30
|
|
|
# begin alphabet division
|
|
|
|
with Html('div', id='alphabet') as section:
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-30 01:34:00 +05:30
|
|
|
# set up table
|
|
|
|
with Html('table', class_='infolist alphabet') as table:
|
|
|
|
section += table
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-30 01:34:00 +05:30
|
|
|
num_ltrs = len(sorted_alpha_index)
|
|
|
|
nrows = (num_ltrs / 35) + 1
|
|
|
|
index = 0
|
|
|
|
for rows in xrange(nrows):
|
|
|
|
trow = Html('tr')
|
|
|
|
table += trow
|
|
|
|
unordered = Html('ul')
|
|
|
|
trow += unordered
|
|
|
|
cols = 0
|
|
|
|
while (cols <= 35 and index < num_ltrs):
|
|
|
|
ltr = sorted_alpha_index[index]
|
|
|
|
title_str = _('Surnames') if key == 0 else _('Places')
|
2009-07-28 00:17:00 +05:30
|
|
|
if lang_country == "sv_SE" and ltr == u'V':
|
2009-06-30 01:34:00 +05:30
|
|
|
title_str += _(' starting with %s') % "V,W"
|
|
|
|
unordered += (Html('li', class_='letters', inline=True) +
|
|
|
|
Html('a', "V,W", href="#V,W", title=title_str)
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
title_str += _(' starting with %s') % ltr
|
|
|
|
unordered += Html('li', class_='letters', inline=True) + (
|
|
|
|
Html('a', ltr, href='#%s' % ltr, title=title_str)
|
|
|
|
)
|
|
|
|
cols += 1
|
|
|
|
index += 1
|
2009-06-11 22:15:30 +05:30
|
|
|
|
|
|
|
# return alphabet navigation to its callers
|
2009-06-23 05:28:09 +05:30
|
|
|
return section
|
2009-06-11 22:15:30 +05:30
|
|
|
|
2009-06-23 05:28:09 +05:30
|
|
|
def add_birthdate(db, childlist):
|
2009-06-21 08:55:28 +05:30
|
|
|
"""
|
|
|
|
This will sort a list of child handles in birth order
|
|
|
|
"""
|
2009-06-23 05:28:09 +05:30
|
|
|
|
2009-06-21 08:55:28 +05:30
|
|
|
sorted_children = []
|
|
|
|
for child_handle in childlist:
|
|
|
|
child = db.get_person_from_handle(child_handle)
|
|
|
|
|
|
|
|
birth_date = None
|
|
|
|
birth_ref = child.get_birth_ref()
|
|
|
|
if birth_ref:
|
|
|
|
birth_event = db.get_event_from_handle(birth_ref.ref)
|
|
|
|
birth_date = birth_event.get_date_object()
|
|
|
|
if birth_date is not None:
|
2009-08-19 03:10:36 +05:30
|
|
|
year = birth_date.get_year()
|
|
|
|
if not year:
|
|
|
|
year = 2199
|
|
|
|
month = birth_date.get_month()
|
|
|
|
if not month:
|
|
|
|
month = 12
|
|
|
|
day = birth_date.get_day()
|
|
|
|
if not day:
|
|
|
|
day = 31
|
2009-06-21 08:55:28 +05:30
|
|
|
else:
|
2009-08-19 03:10:36 +05:30
|
|
|
year, month, day = 2199, 12, 31
|
|
|
|
|
|
|
|
# get birth date, if there is None, then give fake one...
|
|
|
|
birth_date = Date(year, month, day)
|
|
|
|
sorted_children.append((birth_date, child_handle))
|
2009-06-21 08:55:28 +05:30
|
|
|
|
2009-08-19 03:10:36 +05:30
|
|
|
# return the list of child handles and their birthdates
|
2009-06-23 05:28:09 +05:30
|
|
|
return sorted_children
|
2009-06-21 08:55:28 +05:30
|
|
|
|
2009-07-31 17:00:14 +05:30
|
|
|
# -------------------------------------------
|
2009-02-09 12:40:42 +05:30
|
|
|
#
|
|
|
|
# Register Plugin
|
|
|
|
#
|
|
|
|
# -------------------------------------------
|
2008-05-19 00:54:28 +05:30
|
|
|
pmgr = PluginManager.get_instance()
|
|
|
|
pmgr.register_report(
|
2005-02-04 19:24:02 +05:30
|
|
|
name = 'navwebpage',
|
2006-06-01 10:09:40 +05:30
|
|
|
category = CATEGORY_WEB,
|
2008-02-10 09:39:09 +05:30
|
|
|
report_class = NavWebReport,
|
|
|
|
options_class = NavWebOptions,
|
2008-10-02 09:32:10 +05:30
|
|
|
modes = PluginManager.REPORT_MODE_GUI | PluginManager.REPORT_MODE_CLI,
|
2008-03-02 04:17:48 +05:30
|
|
|
translated_name = _("Narrated Web Site"),
|
2005-12-06 12:08:09 +05:30
|
|
|
status = _("Stable"),
|
2008-02-10 09:39:09 +05:30
|
|
|
author_name = "Donald N. Allingham",
|
|
|
|
author_email = "don@gramps-project.org",
|
2008-03-02 04:17:48 +05:30
|
|
|
description = _("Produces web (HTML) pages for individuals, or a set of "
|
|
|
|
"individuals"),
|
2009-02-26 13:55:45 +05:30
|
|
|
)
|