Use built-in map function to simplify some list comprehensions

svn: r16636
This commit is contained in:
Gerald Britton 2011-02-16 20:06:40 +00:00
parent 66d5991362
commit a19e65073c
11 changed files with 79 additions and 39 deletions

View File

@ -585,7 +585,7 @@ class DateParser(object):
match = self._calny_iso.match(text)
if match:
cal = self.calendar_to_int[match.group(2).lower()]
newyear = tuple([int(s) for s in match.group(3).split("-")])
newyear = tuple(map(int, match.group(3).split("-"))
text = match.group(1) + match.group(4)
return (text, cal, newyear)
@ -602,7 +602,7 @@ class DateParser(object):
else:
match = self._ny_iso.match(text)
if match:
newyear = tuple([int(s) for s in match.group(2).split("-")])
newyear = tuple(map(int, match.group(2).split("-"))
text = match.group(1) + match.group(3)
return (text, newyear)

View File

@ -779,7 +779,7 @@ class DbBsddbRead(DbReadBase, Callback):
"""
Return the defined names that have been assigned to a default grouping.
"""
return [unicode(k) for k in self.name_group.keys()]
return map(unicode, self.name_group.keys())
def has_name_group_key(self, name):
"""

View File

@ -86,10 +86,9 @@ def gramps_upgrade_15(self):
tags = [tag_handle]
else:
tags = []
address_list = [convert_address(addr) for addr in address_list]
address_list = map(convert_address, address_list)
new_primary_name = convert_name_15(primary_name)
new_alternate_names = [convert_name_15(altname) for altname in
alternate_names]
new_alternate_names = map(convert_name_15, alternate_names)
new_person = (junk_handle, # 0
gramps_id, # 1
gender, # 2
@ -193,7 +192,7 @@ def gramps_upgrade_15(self):
new_place = list(place)
if new_place[5] is not None:
new_place[5] = convert_location(new_place[5])
new_place[6] = [convert_location(loc) for loc in new_place[6]]
new_place[6] = map(convert_location, new_place[6])
new_place = new_place[:11] + new_place[12:]
new_place = tuple(new_place)
with BSDDBTxn(self.env, self.place_map) as txn:
@ -221,7 +220,7 @@ def gramps_upgrade_15(self):
repository = self.repository_map[handle]
new_repository = list(repository)
new_repository = new_repository[:7] + new_repository[8:]
new_repository[5] = [convert_address(addr) for addr in new_repository[5]]
new_repository[5] = map(convert_address, new_repository[5])
new_repository = tuple(new_repository)
with BSDDBTxn(self.env, self.repository_map) as txn:
txn.put(str(handle), new_repository)

View File

@ -1080,7 +1080,7 @@ class Date(object):
code = Date.NEWYEAR_SEP1
elif "-" in string:
try:
code = tuple([int(n) for n in string.split("-")])
code = tuple(map(int, string.split("-")))
except:
code = 0
else:

View File

@ -68,7 +68,7 @@ class Place(SourceBase, NoteBase, MediaBase, UrlBase, PrimaryObject):
self.lat = source.lat
self.title = source.title
self.main_loc = Location(source.main_loc)
self.alt_loc = [Location(loc) for loc in source.alt_loc]
self.alt_loc = map(Location, source.alt_loc)
else:
self.long = ""
self.lat = ""

View File

@ -161,12 +161,10 @@ def valid_plugin_version(plugin_version_string):
if not isinstance(plugin_version_string, basestring): return False
dots = plugin_version_string.count(".")
if dots == 1:
plugin_version = tuple([int(n) for n in
plugin_version_string.split(".", 1)])
plugin_version = tuple(map(int, plugin_version_string.split(".", 1)))
return plugin_version == VERSION_TUPLE[:2]
elif dots == 2:
plugin_version = tuple([int(n) for n in
plugin_version_string.split(".", 2)])
plugin_version = tuple(map(int, plugin_version_string.split(".", 2)))
return (plugin_version[:2] == VERSION_TUPLE[:2] and
plugin_version <= VERSION_TUPLE)
return False

View File

@ -84,9 +84,8 @@ def version_str_to_tup(sversion, positions):
(1, 2)
"""
try:
tup = tuple(([int(n) for n in
sversion.split(".", sversion.count("."))] +
[0] * positions)[0:positions])
tup = tuple(map(int, sversion.split(".")))
tup += (0,) * (positions - len(tup))
except:
tup = (0,) * positions
return tup

View File

@ -539,7 +539,7 @@ class EditRule(ManagedWindow.ManagedWindow):
taglist = taglist + [tag.get_name() for tag in dbstate.db.iter_tags()]
t = MyList(taglist, taglist)
elif v == _('Confidence level:'):
t = MyList([str(i) for i in range(5)],
t = MyList(map(str, range(5)),
[Utils.confidence[i] for i in range(5)])
else:
t = MyEntry()

View File

@ -270,7 +270,9 @@ class PSDrawDoc(BaseDoc, DrawDoc):
self.file.write('[] 0 setdash\n')
else:
dash_style = stype.get_dash_style(stype.get_line_style())
self.file.write('[%s] 0 setdash\n' % (" ".join([str(d) for d in dash_style])))
self.file.write('[%s] 0 setdash\n' % (
" ".join(map(str, dash_style)))
)
point = path[0]
x1 = point[0] + self.paper.get_left_margin()
@ -311,7 +313,9 @@ class PSDrawDoc(BaseDoc, DrawDoc):
self.file.write('[] 0 setdash\n')
else:
dash_style = stype.get_dash_style(stype.get_line_style())
self.file.write('[%s] 0 setdash\n' % (" ".join([str(d) for d in dash_style])))
self.file.write('[%s] 0 setdash\n' % (
" ".join(map(str, dash_style)))
)
self.file.write(
'2 setlinecap\n' +

View File

@ -158,7 +158,9 @@ class SvgDrawDoc(BaseDoc, DrawDoc):
line_out += 'x2="%4.2fcm" y2="%4.2fcm" ' % (x2, y2)
line_out += 'style="stroke:#%02x%02x%02x; ' % s.get_color()
if s.get_line_style() != SOLID:
line_out += 'stroke-dasharray: %s; ' % (",".join([str(d) for d in s.get_dash_style()]))
line_out += 'stroke-dasharray: %s; ' % (
",".join(map(str, s.get_dash_style()))
)
line_out += 'stroke-width:%.2fpt;"/>\n' % s.get_line_width()
self.f.write(line_out)
@ -170,7 +172,9 @@ class SvgDrawDoc(BaseDoc, DrawDoc):
line_out = '<polygon fill="#%02x%02x%02x"' % stype.get_fill_color()
line_out += ' style="stroke:#%02x%02x%02x; ' % stype.get_color()
if stype.get_line_style() != SOLID:
line_out += 'stroke-dasharray: %s; ' % (",".join([str(d) for d in stype.get_dash_style()]))
line_out += 'stroke-dasharray: %s; ' % (
",".join(map(str, stype.get_dash_style()))
)
line_out += ' stroke-width:%.2fpt;"' % stype.get_line_width()
line_out += ' points="%.2f,%.2f' % units((point[0]+self.paper.get_left_margin(),
point[1]+self.paper.get_top_margin()))
@ -208,7 +212,9 @@ class SvgDrawDoc(BaseDoc, DrawDoc):
line_out += 'style="fill:#%02x%02x%02x; ' % box_style.get_fill_color()
line_out += 'stroke:#%02x%02x%02x; ' % box_style.get_color()
if box_style.get_line_style() != SOLID:
line_out += 'stroke-dasharray: %s; ' % (",".join([str(d) for d in box_style.get_dash_style()]))
line_out += 'stroke-dasharray: %s; ' % (
",".join(map(str, box_style.get_dash_style()))
)
line_out += 'stroke-width:%f;"/>\n' % box_style.get_line_width()
self.f.write(line_out)

View File

@ -66,6 +66,9 @@ def run(database, document, filter_name, *args, **kwargs):
sdb = SimpleAccess(database)
sdoc = SimpleDoc(document)
stab = SimpleTable(sdb)
print "Inverse Person"
filter_name = "Inverse Person"
if (filter_name == 'all'):
sdoc.title(_("Summary counts of current selection"))
sdoc.paragraph("")
@ -99,6 +102,7 @@ def run(database, document, filter_name, *args, **kwargs):
sdoc.paragraph("")
stab.write(sdoc)
return
# display the title
if filter_name in fname_map:
sdoc.title(_("Filtering on %s") % fname_map[filter_name]) # listed above
@ -106,119 +110,136 @@ def run(database, document, filter_name, *args, **kwargs):
sdoc.title(_("Filtering on %s") % _(filter_name))
sdoc.paragraph("")
matches = 0
if (filter_name == 'Inverse Person'):
sdb.dbase = database.db
stab.columns(_("Person"), _("Gramps ID"), _("Birth Date"))
proxy_handles = dict([(handle,1) for handle in
database.iter_person_handles()])
proxy_handles = set(database.iter_person_handles())
for person in database.db.iter_people():
if person.handle not in proxy_handles:
stab.row(person, person.gramps_id,
sdb.birth_or_fallback(person))
matches += 1
elif (filter_name == 'Inverse Family'):
sdb.dbase = database.db
stab.columns(_("Family"), _("Gramps ID"))
proxy_handles = dict([(handle,1) for handle in
database.iter_family_handles()])
proxy_handle = set(database.iter_family_handles())
for family in database.db.iter_families():
if family.handle not in proxy_handles:
stab.row(family, family.gramps_id)
matches += 1
elif (filter_name == 'Inverse Event'):
sdb.dbase = database.db
stab.columns(_("Event"), _("Gramps ID"))
proxy_handles = dict([(handle,1) for handle in
database.iter_event_handles()])
proxy_handles = set(database.iter_event_handles())
for event in database.db.iter_events():
if event.handle not in proxy_handles:
stab.row(event, event.gramps_id)
matches += 1
elif (filter_name == 'Inverse Place'):
sdb.dbase = database.db
stab.columns(_("Place"), _("Gramps ID"))
proxy_handles = dict([(handle,1) for handle in
database.iter_place_handles()])
proxy_handles = set(database.iter_place_handles())
for place in database.db.iter_places():
if place.handle not in proxy_handles:
stab.row(place, place.gramps_id)
matches += 1
elif (filter_name == 'Inverse Source'):
sdb.dbase = database.db
stab.columns(_("Source"), _("Gramps ID"))
proxy_handles = dict([(handle,1) for handle in
database.iter_source_handles()])
proxy_handles = set(database.iter_source_handles())
for source in database.db.iter_sources():
if source.handle not in proxy_handles:
stab.row(source, source.gramps_id)
matches += 1
elif (filter_name == 'Inverse Repository'):
sdb.dbase = database.db
stab.columns(_("Repository"), _("Gramps ID"))
proxy_handles = dict([(handle,1) for handle in
database.iter_repository_handles()])
proxy_handles = set(database.iter_repository_handles())
for repository in database.db.iter_repositories():
if repository.handle not in proxy_handles:
stab.row(repository, repository.gramps_id)
matches += 1
elif (filter_name == 'Inverse MediaObject'):
sdb.dbase = database.db
stab.columns(_("Media"), _("Gramps ID"))
proxy_handles = dict([(handle,1) for handle in
database.iter_media_object_handles()])
proxy_handles = set(database.iter_media_object_handles())
for media in database.db.iter_media_objects():
if media.handle not in proxy_handles:
stab.row(media, media.gramps_id)
matches += 1
elif (filter_name == 'Inverse Note'):
sdb.dbase = database.db
stab.columns(_("Note"), _("Gramps ID"))
proxy_handles = dict([(handle,1) for handle in
database.iter_note_handles()])
proxy_handles = set(database.iter_note_handles())
for note in database.db.iter_notes():
if note.handle not in proxy_handles:
stab.row(note, note.gramps_id)
matches += 1
elif (filter_name in ['all people', 'Person']):
stab.columns(_("Person"), _("Gramps ID"), _("Birth Date"))
for person in database.iter_people():
stab.row(person, person.gramps_id, sdb.birth_or_fallback(person))
matches += 1
elif (filter_name in ['all families', 'Family']):
stab.columns(_("Family"), _("Gramps ID"))
for family in database.iter_families():
stab.row(family, family.gramps_id)
matches += 1
elif (filter_name in ['all events', 'Event']):
stab.columns(_("Event"), _("Gramps ID"))
for obj in database.iter_events():
stab.row(obj, obj.gramps_id)
matches += 1
elif (filter_name in ['all places', 'Place']):
stab.columns(_("Place"), _("Gramps ID"))
for obj in database.iter_places():
stab.row(obj, obj.gramps_id)
matches += 1
elif (filter_name in ['all sources', 'Source']):
stab.columns(_("Source"), _("Gramps ID"))
for obj in database.iter_sources():
stab.row(obj, obj.gramps_id)
matches += 1
elif (filter_name in ['all repositories', 'Repository']):
stab.columns(_("Repository"), _("Gramps ID"))
for obj in database.iter_repositories():
stab.row(obj, obj.gramps_id)
matches += 1
elif (filter_name in ['all media', 'MediaObject']):
stab.columns(_("Media"), _("Gramps ID"))
for obj in database.iter_media_objects():
stab.row(obj, obj.gramps_id)
matches += 1
elif (filter_name in ['all notes', 'Note']):
stab.columns(_("Note"), _("Gramps ID"))
for obj in database.iter_notes():
stab.row(obj, obj.gramps_id)
matches += 1
elif (filter_name == 'males'):
stab.columns(_("Person"), _("Birth Date"), _("Name type"))
for person in database.iter_people():
@ -226,6 +247,7 @@ def run(database, document, filter_name, *args, **kwargs):
stab.row(person, sdb.birth_or_fallback(person),
str(person.get_primary_name().get_type()))
matches += 1
elif (filter_name == 'females'):
stab.columns(_("Person"), _("Birth Date"), _("Name type"))
for person in database.iter_people():
@ -233,6 +255,7 @@ def run(database, document, filter_name, *args, **kwargs):
stab.row(person, sdb.birth_or_fallback(person),
str(person.get_primary_name().get_type()))
matches += 1
elif (filter_name == 'people with unknown gender'):
stab.columns(_("Person"), _("Birth Date"), _("Name type"))
for person in database.iter_people():
@ -240,6 +263,7 @@ def run(database, document, filter_name, *args, **kwargs):
stab.row(person, sdb.birth_or_fallback(person),
str(person.get_primary_name().get_type()))
matches += 1
elif (filter_name == 'people with incomplete names'):
stab.columns(_("Person"), _("Birth Date"), _("Name type"))
for person in database.iter_people():
@ -248,6 +272,7 @@ def run(database, document, filter_name, *args, **kwargs):
stab.row(person, sdb.birth_or_fallback(person),
str(person.get_primary_name().get_type()))
matches += 1
elif (filter_name == 'people with missing birth dates'):
stab.columns(_("Person"), _("Type"))
for person in database.iter_people():
@ -260,6 +285,7 @@ def run(database, document, filter_name, *args, **kwargs):
else:
stab.row(person, _("missing birth event"))
matches += 1
elif (filter_name == 'disconnected people'):
stab.columns(_("Person"), _("Birth Date"), _("Name type"))
for person in database.iter_people():
@ -268,6 +294,7 @@ def run(database, document, filter_name, *args, **kwargs):
stab.row(person, sdb.birth_or_fallback(person),
str(person.get_primary_name().get_type()))
matches += 1
elif (filter_name == 'unique surnames'):
namelist = defaultdict(int)
for person in database.iter_people():
@ -284,6 +311,7 @@ def run(database, document, filter_name, *args, **kwargs):
database,
document,
name))
elif (filter_name == 'people with media'):
stab.columns(_("Person"), _("Media count"))
for person in database.iter_people():
@ -291,6 +319,7 @@ def run(database, document, filter_name, *args, **kwargs):
if length > 0:
stab.row(person, length)
matches += 1
elif (filter_name == 'media references'):
stab.columns(_("Person"), _("Reference"))
for person in database.iter_people():
@ -298,12 +327,14 @@ def run(database, document, filter_name, *args, **kwargs):
for item in medialist:
stab.row(person, _("media"))
matches += 1
elif (filter_name == 'unique media'):
stab.columns(_("Unique Media"))
for photo in database.iter_media_objects():
fullname = media_path_full(database, photo.get_path())
stab.row(fullname)
matches += 1
elif (filter_name == 'missing media'):
stab.columns(_("Missing Media"))
for photo in database.iter_media_objects():
@ -313,6 +344,7 @@ def run(database, document, filter_name, *args, **kwargs):
except:
stab.row(fullname)
matches += 1
elif (filter_name == 'media by size'):
stab.columns(_("Media"), _("Size in bytes"))
for photo in database.iter_media_objects():
@ -323,6 +355,7 @@ def run(database, document, filter_name, *args, **kwargs):
matches += 1
except:
pass
elif (filter_name == 'list of people'):
stab.columns(_("Person"), _("Birth Date"), _("Name type"))
handles = kwargs["handles"]
@ -331,6 +364,7 @@ def run(database, document, filter_name, *args, **kwargs):
stab.row(person, sdb.birth_or_fallback(person),
str(person.get_primary_name().get_type()))
matches += 1
else:
raise AttributeError, ("invalid filter name: '%s'" % filter_name)
sdoc.paragraph(ngettext("Filter matched %d record."