Merge pull request #113 from sam-m888/useWithOpen
Prefer with to open files
This commit is contained in:
commit
b526a41af1
@ -234,9 +234,8 @@ class CLIDbManager(object):
|
||||
except:
|
||||
version = (0, 0, 0)
|
||||
if os.path.isfile(path_name):
|
||||
file = open(path_name, 'r', encoding='utf8')
|
||||
name = file.readline().strip()
|
||||
file.close()
|
||||
with open(path_name, 'r', encoding='utf8') as file:
|
||||
name = file.readline().strip()
|
||||
|
||||
(tval, last) = time_val(dirpath)
|
||||
(enable, stock_id) = self.icon_values(dirpath, self.active,
|
||||
@ -293,9 +292,8 @@ class CLIDbManager(object):
|
||||
name_list = [ name[0] for name in self.current_names ]
|
||||
title = find_next_db_name(name_list)
|
||||
|
||||
name_file = open(path_name, "w", encoding='utf8')
|
||||
name_file.write(title)
|
||||
name_file.close()
|
||||
with open(path_name, "w", encoding='utf8') as name_file:
|
||||
name_file.write(title)
|
||||
|
||||
if create_db:
|
||||
# write the version number into metadata
|
||||
@ -409,9 +407,8 @@ class CLIDbManager(object):
|
||||
dirpath = os.path.join(dbdir, dpath)
|
||||
path_name = os.path.join(dirpath, NAME_FILE)
|
||||
if os.path.isfile(path_name):
|
||||
file = open(path_name, 'r', encoding='utf8')
|
||||
name = file.readline().strip()
|
||||
file.close()
|
||||
with open(path_name, 'r', encoding='utf8') as file:
|
||||
name = file.readline().strip()
|
||||
if re.match("^" + dbname + "$", name):
|
||||
match_list.append((name, dirpath))
|
||||
if len(match_list) == 0:
|
||||
@ -438,12 +435,10 @@ class CLIDbManager(object):
|
||||
Returns old_name, new_name if success, None, None if no success
|
||||
"""
|
||||
try:
|
||||
name_file = open(filepath, "r", encoding='utf8')
|
||||
old_text=name_file.read()
|
||||
name_file.close()
|
||||
name_file = open(filepath, "w", encoding='utf8')
|
||||
name_file.write(new_text)
|
||||
name_file.close()
|
||||
with open(filepath, "r", encoding='utf8') as name_file:
|
||||
old_text=name_file.read()
|
||||
with open(filepath, "w", encoding='utf8') as name_file:
|
||||
name_file.write(new_text)
|
||||
except (OSError, IOError) as msg:
|
||||
CLIDbManager.ERROR(_("Could not rename Family Tree"),
|
||||
str(msg))
|
||||
@ -543,11 +538,10 @@ def find_locker_name(dirpath):
|
||||
"""
|
||||
try:
|
||||
fname = os.path.join(dirpath, "lock")
|
||||
ifile = open(fname, 'r', encoding='utf8')
|
||||
username = ifile.read().strip()
|
||||
# feature request 2356: avoid genitive form
|
||||
last = _("Locked by %s") % username
|
||||
ifile.close()
|
||||
with open(fname, 'r', encoding='utf8') as ifile:
|
||||
username = ifile.read().strip()
|
||||
# feature request 2356: avoid genitive form
|
||||
last = _("Locked by %s") % username
|
||||
except (OSError, IOError, UnicodeDecodeError):
|
||||
last = _("Unknown")
|
||||
return last
|
||||
|
@ -203,26 +203,23 @@ class DbState(Callback):
|
||||
dirpath = os.path.join(dbdir, dpath)
|
||||
path_name = os.path.join(dirpath, "name.txt")
|
||||
if os.path.isfile(path_name):
|
||||
file = open(path_name, 'r', encoding='utf8')
|
||||
name = file.readline().strip()
|
||||
file.close()
|
||||
with open(path_name, 'r', encoding='utf8') as file:
|
||||
name = file.readline().strip()
|
||||
if dbname == name:
|
||||
locked = False
|
||||
locked_by = None
|
||||
backend = None
|
||||
fname = os.path.join(dirpath, "database.txt")
|
||||
if os.path.isfile(fname):
|
||||
ifile = open(fname, 'r', encoding='utf8')
|
||||
backend = ifile.read().strip()
|
||||
ifile.close()
|
||||
with open(fname, 'r', encoding='utf8') as ifile:
|
||||
backend = ifile.read().strip()
|
||||
else:
|
||||
backend = "bsddb"
|
||||
try:
|
||||
fname = os.path.join(dirpath, "lock")
|
||||
ifile = open(fname, 'r', encoding='utf8')
|
||||
locked_by = ifile.read().strip()
|
||||
locked = True
|
||||
ifile.close()
|
||||
with open(fname, 'r', encoding='utf8') as ifile:
|
||||
locked_by = ifile.read().strip()
|
||||
locked = True
|
||||
except (OSError, IOError):
|
||||
pass
|
||||
return (dirpath, locked, locked_by, backend)
|
||||
|
@ -103,9 +103,8 @@ class FilterList(object):
|
||||
if os.path.isfile(self.file):
|
||||
parser = make_parser()
|
||||
parser.setContentHandler(FilterParser(self))
|
||||
the_file = open(self.file, 'r', encoding='utf8')
|
||||
parser.parse(the_file)
|
||||
the_file.close()
|
||||
with open(self.file, 'r', encoding='utf8') as the_file:
|
||||
parser.parse(the_file)
|
||||
except (IOError, OSError):
|
||||
print("IO/OSError in _filterlist.py")
|
||||
except SAXParseException:
|
||||
|
@ -603,9 +603,8 @@ class GVDotDoc(GVDocBase):
|
||||
if self._filename[-3:] != ".gv":
|
||||
self._filename += ".gv"
|
||||
|
||||
dotfile = open(self._filename, "wb")
|
||||
dotfile.write(self._dot.getvalue())
|
||||
dotfile.close()
|
||||
with open(self._filename, "wb") as dotfile:
|
||||
dotfile.write(self._dot.getvalue())
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
#
|
||||
|
@ -146,31 +146,30 @@ class StyleSheetList(object):
|
||||
"""
|
||||
Saves the current StyleSheet definitions to the associated file.
|
||||
"""
|
||||
xml_file = open(self.__file, "w")
|
||||
xml_file.write('<?xml version="1.0" encoding="utf-8"?>\n')
|
||||
xml_file.write('<stylelist>\n')
|
||||
with open(self.__file, "w") as xml_file:
|
||||
xml_file.write('<?xml version="1.0" encoding="utf-8"?>\n')
|
||||
xml_file.write('<stylelist>\n')
|
||||
|
||||
for name in sorted(self.map.keys()): # enable diff of archived copies
|
||||
if name == "default":
|
||||
continue
|
||||
sheet = self.map[name]
|
||||
xml_file.write('<sheet name="%s">\n' % escxml(name))
|
||||
for name in sorted(self.map.keys()): # enable diff of archived copies
|
||||
if name == "default":
|
||||
continue
|
||||
sheet = self.map[name]
|
||||
xml_file.write('<sheet name="%s">\n' % escxml(name))
|
||||
|
||||
for p_name in sheet.get_paragraph_style_names():
|
||||
self.write_paragraph_style(xml_file, sheet, p_name)
|
||||
for p_name in sheet.get_paragraph_style_names():
|
||||
self.write_paragraph_style(xml_file, sheet, p_name)
|
||||
|
||||
for t_name in sheet.get_table_style_names():
|
||||
self.write_table_style(xml_file, sheet, t_name)
|
||||
for t_name in sheet.get_table_style_names():
|
||||
self.write_table_style(xml_file, sheet, t_name)
|
||||
|
||||
for c_name in sheet.get_cell_style_names():
|
||||
self.write_cell_style(xml_file, sheet, c_name)
|
||||
for c_name in sheet.get_cell_style_names():
|
||||
self.write_cell_style(xml_file, sheet, c_name)
|
||||
|
||||
for g_name in sheet.get_draw_style_names():
|
||||
self.write_graphics_style(xml_file, sheet, g_name)
|
||||
for g_name in sheet.get_draw_style_names():
|
||||
self.write_graphics_style(xml_file, sheet, g_name)
|
||||
|
||||
xml_file.write('</sheet>\n')
|
||||
xml_file.write('</stylelist>\n')
|
||||
xml_file.close()
|
||||
xml_file.write('</sheet>\n')
|
||||
xml_file.write('</stylelist>\n')
|
||||
|
||||
def write_paragraph_style(self, xml_file, sheet, p_name):
|
||||
|
||||
@ -275,9 +274,8 @@ class StyleSheetList(object):
|
||||
if os.path.isfile(self.__file):
|
||||
parser = make_parser()
|
||||
parser.setContentHandler(SheetParser(self))
|
||||
the_file = open(self.__file)
|
||||
parser.parse(the_file)
|
||||
the_file.close()
|
||||
with open(self.__file) as the_file:
|
||||
parser.parse(the_file)
|
||||
except (IOError, OSError, SAXParseException):
|
||||
pass
|
||||
|
||||
|
@ -458,68 +458,67 @@ class BookList(object):
|
||||
"""
|
||||
Saves the current BookList to the associated file.
|
||||
"""
|
||||
f = open(self.file, "w")
|
||||
f.write("<?xml version=\"1.0\" encoding=\"utf-8\"?>\n")
|
||||
f.write('<booklist>\n')
|
||||
for name in sorted(self.bookmap): # enable a diff of archived copies
|
||||
book = self.get_book(name)
|
||||
dbname = book.get_dbname()
|
||||
f.write(' <book name="%s" database="%s">\n' % (name, dbname) )
|
||||
for item in book.get_item_list():
|
||||
f.write(' <item name="%s" trans_name="%s">\n' %
|
||||
(item.get_name(), item.get_translated_name() ) )
|
||||
options = item.option_class.handler.options_dict
|
||||
for option_name in sorted(options.keys()): # enable a diff
|
||||
option_value = options[option_name]
|
||||
if isinstance(option_value, (list, tuple)):
|
||||
f.write(' <option name="%s" value="" '
|
||||
'length="%d">\n' % (
|
||||
escape(option_name),
|
||||
len(options[option_name]) ) )
|
||||
for list_index in range(len(option_value)):
|
||||
option_type = type_name(option_value[list_index])
|
||||
value = escape(str(option_value[list_index]))
|
||||
with open(self.file, "w") as f:
|
||||
f.write("<?xml version=\"1.0\" encoding=\"utf-8\"?>\n")
|
||||
f.write('<booklist>\n')
|
||||
for name in sorted(self.bookmap): # enable a diff of archived copies
|
||||
book = self.get_book(name)
|
||||
dbname = book.get_dbname()
|
||||
f.write(' <book name="%s" database="%s">\n' % (name, dbname) )
|
||||
for item in book.get_item_list():
|
||||
f.write(' <item name="%s" trans_name="%s">\n' %
|
||||
(item.get_name(), item.get_translated_name() ) )
|
||||
options = item.option_class.handler.options_dict
|
||||
for option_name in sorted(options.keys()): # enable a diff
|
||||
option_value = options[option_name]
|
||||
if isinstance(option_value, (list, tuple)):
|
||||
f.write(' <option name="%s" value="" '
|
||||
'length="%d">\n' % (
|
||||
escape(option_name),
|
||||
len(options[option_name]) ) )
|
||||
for list_index in range(len(option_value)):
|
||||
option_type = type_name(option_value[list_index])
|
||||
value = escape(str(option_value[list_index]))
|
||||
value = value.replace('"', '"')
|
||||
f.write(' <listitem number="%d" type="%s" '
|
||||
'value="%s"/>\n' % (
|
||||
list_index,
|
||||
option_type,
|
||||
value ) )
|
||||
f.write(' </option>\n')
|
||||
else:
|
||||
option_type = type_name(option_value)
|
||||
value = escape(str(option_value))
|
||||
value = value.replace('"', '"')
|
||||
f.write(' <listitem number="%d" type="%s" '
|
||||
f.write(' <option name="%s" type="%s" '
|
||||
'value="%s"/>\n' % (
|
||||
list_index,
|
||||
escape(option_name),
|
||||
option_type,
|
||||
value ) )
|
||||
f.write(' </option>\n')
|
||||
else:
|
||||
option_type = type_name(option_value)
|
||||
value = escape(str(option_value))
|
||||
value = value.replace('"', '"')
|
||||
f.write(' <option name="%s" type="%s" '
|
||||
'value="%s"/>\n' % (
|
||||
escape(option_name),
|
||||
option_type,
|
||||
value) )
|
||||
value) )
|
||||
|
||||
f.write(' <style name="%s"/>\n' % item.get_style_name() )
|
||||
f.write(' </item>\n')
|
||||
if book.get_paper_name():
|
||||
f.write(' <paper name="%s"/>\n' % book.get_paper_name() )
|
||||
if book.get_orientation() is not None: # 0 is legal
|
||||
f.write(' <orientation value="%s"/>\n' %
|
||||
book.get_orientation() )
|
||||
if book.get_paper_metric() is not None: # 0 is legal
|
||||
f.write(' <metric value="%s"/>\n' % book.get_paper_metric() )
|
||||
if book.get_custom_paper_size():
|
||||
size = book.get_custom_paper_size()
|
||||
f.write(' <size value="%f %f"/>\n' % (size[0], size[1]) )
|
||||
if book.get_margins():
|
||||
for pos in range(len(book.get_margins())):
|
||||
f.write(' <margin number="%s" value="%f"/>\n' %
|
||||
(pos, book.get_margin(pos)) )
|
||||
if book.get_format_name():
|
||||
f.write(' <format name="%s"/>\n' % book.get_format_name() )
|
||||
if book.get_output():
|
||||
f.write(' <output name="%s"/>\n' % book.get_output() )
|
||||
f.write(' </book>\n')
|
||||
f.write(' <style name="%s"/>\n' % item.get_style_name() )
|
||||
f.write(' </item>\n')
|
||||
if book.get_paper_name():
|
||||
f.write(' <paper name="%s"/>\n' % book.get_paper_name() )
|
||||
if book.get_orientation() is not None: # 0 is legal
|
||||
f.write(' <orientation value="%s"/>\n' %
|
||||
book.get_orientation() )
|
||||
if book.get_paper_metric() is not None: # 0 is legal
|
||||
f.write(' <metric value="%s"/>\n' % book.get_paper_metric() )
|
||||
if book.get_custom_paper_size():
|
||||
size = book.get_custom_paper_size()
|
||||
f.write(' <size value="%f %f"/>\n' % (size[0], size[1]) )
|
||||
if book.get_margins():
|
||||
for pos in range(len(book.get_margins())):
|
||||
f.write(' <margin number="%s" value="%f"/>\n' %
|
||||
(pos, book.get_margin(pos)) )
|
||||
if book.get_format_name():
|
||||
f.write(' <format name="%s"/>\n' % book.get_format_name() )
|
||||
if book.get_output():
|
||||
f.write(' <output name="%s"/>\n' % book.get_output() )
|
||||
f.write(' </book>\n')
|
||||
|
||||
f.write('</booklist>\n')
|
||||
f.close()
|
||||
f.write('</booklist>\n')
|
||||
|
||||
def parse(self):
|
||||
"""
|
||||
|
@ -504,9 +504,8 @@ class OptionListCollection(_options.OptionListCollection):
|
||||
if os.path.isfile(self.filename):
|
||||
p = make_parser()
|
||||
p.setContentHandler(OptionParser(self))
|
||||
the_file = open(self.filename, encoding="utf-8")
|
||||
p.parse(the_file)
|
||||
the_file.close()
|
||||
with open(self.filename, encoding="utf-8") as the_file:
|
||||
p.parse(the_file)
|
||||
except (IOError, OSError, SAXParseException):
|
||||
pass
|
||||
|
||||
@ -1000,9 +999,8 @@ class DocOptionListCollection(_options.OptionListCollection):
|
||||
if os.path.isfile(self.filename):
|
||||
p = make_parser()
|
||||
p.setContentHandler(DocOptionParser(self))
|
||||
the_file = open(self.filename, encoding="utf-8")
|
||||
p.parse(the_file)
|
||||
the_file.close()
|
||||
with open(self.filename, encoding="utf-8") as the_file:
|
||||
p.parse(the_file)
|
||||
except (IOError, OSError, SAXParseException):
|
||||
pass
|
||||
|
||||
|
@ -279,9 +279,8 @@ def resize_to_jpeg_buffer(source, size, crop=None):
|
||||
scaled = img.scale_simple(int(size[0]), int(size[1]), GdkPixbuf.InterpType.BILINEAR)
|
||||
os.close(filed)
|
||||
scaled.savev(dest, "jpeg", "", "")
|
||||
ofile = open(dest, mode='rb')
|
||||
data = ofile.read()
|
||||
ofile.close()
|
||||
with open(dest, mode='rb') as ofile:
|
||||
data = ofile.read()
|
||||
try:
|
||||
os.unlink(dest)
|
||||
except:
|
||||
|
@ -285,10 +285,8 @@ class HtmlDoc(BaseDoc, TextDoc):
|
||||
Copy support files to the datadir that needs to hold them
|
||||
"""
|
||||
#css of textdoc styles
|
||||
tdfile = open(os.path.join(self._backend.datadirfull(),
|
||||
_TEXTDOCSCREEN), 'w')
|
||||
tdfile.write(self.style_declaration)
|
||||
tdfile.close()
|
||||
with open(os.path.join(self._backend.datadirfull(), _TEXTDOCSCREEN), 'w') as tdfile:
|
||||
tdfile.write(self.style_declaration)
|
||||
#css file
|
||||
if self.css_filename:
|
||||
#we do an extra check in case file does not exist, eg cli call
|
||||
|
@ -30,12 +30,11 @@ from test import test_util
|
||||
test_util.path_append_parent()
|
||||
|
||||
def get_potfile(filename):
|
||||
fp = open(filename, "r")
|
||||
retvals = []
|
||||
for line in fp:
|
||||
if line and line[0] != "#":
|
||||
retvals.append(line.strip())
|
||||
fp.close()
|
||||
with open(filename, "r") as fp:
|
||||
retvals = []
|
||||
for line in fp:
|
||||
if line and line[0] != "#":
|
||||
retvals.append(line.strip())
|
||||
return retvals
|
||||
|
||||
# POTFILES.skip
|
||||
@ -55,9 +54,8 @@ class TestPOT(unittest.TestCase):
|
||||
realpath = (dir + "/" + file)
|
||||
pathfile = realpath[3:]
|
||||
if os.path.exists(realpath):
|
||||
fp = open(realpath, "r")
|
||||
lines = fp.read()
|
||||
fp.close()
|
||||
with open(realpath, "r") as fp:
|
||||
lines = fp.read()
|
||||
found = False
|
||||
for search in searches:
|
||||
if search in lines:
|
||||
@ -88,9 +86,8 @@ class TestMake(unittest.TestCase):
|
||||
if pathfile[3:] in excluded_files:
|
||||
self.assertTrue(True, "exclude '%s'" % pathfile)
|
||||
elif os.path.exists(makefile):
|
||||
fp = open(makefile, "r")
|
||||
lines = fp.read()
|
||||
fp.close()
|
||||
with open(makefile, "r") as fp:
|
||||
lines = fp.read()
|
||||
self.assertTrue(filename in lines, "'%s' not in %s/Makefile.in" %
|
||||
(filename, path))
|
||||
else:
|
||||
@ -107,9 +104,8 @@ class TestGetText(unittest.TestCase):
|
||||
def helper(self, pofile, searches):
|
||||
if not os.path.exists("../../" + pofile):
|
||||
self.assertTrue(False, "'%s' is in POTFILES.in and does not exist" % pofile)
|
||||
fp = open("../../" + pofile, "r")
|
||||
lines = fp.read()
|
||||
fp.close()
|
||||
with open("../../" + pofile, "r") as fp:
|
||||
lines = fp.read()
|
||||
found = False
|
||||
for search in searches:
|
||||
found = (search in lines) or found
|
||||
|
385
po/update_po.py
385
po/update_po.py
@ -37,11 +37,11 @@ Examples:
|
||||
python update_po.py -p
|
||||
|
||||
Generates a new template/catalog (gramps.pot).
|
||||
|
||||
|
||||
python update_po.py -m de.po
|
||||
|
||||
Merges 'de.po' file with 'gramps.pot'.
|
||||
|
||||
|
||||
python update_po.py -k de.po
|
||||
|
||||
Checks 'de.po' file, tests to compile and generates a textual resume.
|
||||
@ -55,7 +55,7 @@ from argparse import ArgumentParser
|
||||
|
||||
# Windows OS
|
||||
|
||||
if sys.platform == 'win32':
|
||||
if sys.platform == 'win32':
|
||||
# GetText Win 32 obtained from http://gnuwin32.sourceforge.net/packages/gettext.htm
|
||||
# ....\gettext\bin\msgmerge.exe needs to be on the path
|
||||
msgmergeCmd = os.path.join('C:', 'Program Files(x86)', 'gettext', 'bin', 'msgmerge.exe')
|
||||
@ -96,67 +96,64 @@ def tests():
|
||||
print ("\n====='msgmerge'=(merge our translation)================\n")
|
||||
os.system('''%(program)s -V''' % {'program': msgmergeCmd})
|
||||
except:
|
||||
print ('Please, install %(program)s for updating your translation'
|
||||
print ('Please, install %(program)s for updating your translation'
|
||||
% {'program': msgmergeCmd})
|
||||
|
||||
|
||||
try:
|
||||
print ("\n==='msgfmt'=(format our translation for installation)==\n")
|
||||
os.system('''%(program)s -V''' % {'program': msgfmtCmd})
|
||||
except:
|
||||
print ('Please, install %(program)s for checking your translation'
|
||||
print ('Please, install %(program)s for checking your translation'
|
||||
% {'program': msgfmtCmd})
|
||||
|
||||
|
||||
try:
|
||||
print ("\n===='msgattrib'==(list groups of messages)=============\n")
|
||||
os.system('''%(program)s -V''' % {'program': msgattribCmd})
|
||||
except:
|
||||
print ('Please, install %(program)s for listing groups of messages'
|
||||
print ('Please, install %(program)s for listing groups of messages'
|
||||
% {'program': msgattribCmd})
|
||||
|
||||
|
||||
|
||||
try:
|
||||
print("\n===='xgettext' =(generate a new template)==============\n")
|
||||
os.system('''%(program)s -V''' % {'program': xgettextCmd})
|
||||
except:
|
||||
print ('Please, install %(program)s for generating a new template'
|
||||
print ('Please, install %(program)s for generating a new template'
|
||||
% {'program': xgettextCmd})
|
||||
|
||||
|
||||
try:
|
||||
print("\n=================='python'=============================\n")
|
||||
os.system('''%(program)s -V''' % {'program': pythonCmd})
|
||||
except:
|
||||
print ('Please, install python')
|
||||
|
||||
|
||||
def TipsParse(filename, mark):
|
||||
"""
|
||||
Experimental alternative to 'intltool-extract' for 'tips.xml'.
|
||||
"""
|
||||
|
||||
from xml.etree import ElementTree
|
||||
|
||||
|
||||
tree = ElementTree.parse(filename)
|
||||
root = tree.getroot()
|
||||
|
||||
|
||||
'''
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<tips>
|
||||
<_tip number="1">
|
||||
<b>Working with Dates</b>
|
||||
<br/>
|
||||
A range of dates can be given by using the format "between
|
||||
January 4, 2000 and March 20, 2003". You can also indicate
|
||||
the level of confidence in a date and even choose between seven
|
||||
A range of dates can be given by using the format "between
|
||||
January 4, 2000 and March 20, 2003". You can also indicate
|
||||
the level of confidence in a date and even choose between seven
|
||||
different calendars. Try the button next to the date field in the
|
||||
Events Editor.
|
||||
</_tip>
|
||||
|
||||
char *s = N_("<b>Working with Dates</b><br/>A range of dates can be
|
||||
|
||||
char *s = N_("<b>Working with Dates</b><br/>A range of dates can be
|
||||
given by using the format "between January 4, 2000 and March 20,
|
||||
2003". You can also indicate the level of confidence in a date
|
||||
and even choose between seven different calendars. Try the button
|
||||
2003". You can also indicate the level of confidence in a date
|
||||
and even choose between seven different calendars. Try the button
|
||||
next to the date field in the Events Editor.");
|
||||
|
||||
|
||||
gramps.pot:
|
||||
msgid ""
|
||||
"<b>Working with Dates</b><br/>A range of dates can be given by using the "
|
||||
@ -165,34 +162,32 @@ def TipsParse(filename, mark):
|
||||
"different calendars. Try the button next to the date field in the Events "
|
||||
"Editor."
|
||||
'''
|
||||
|
||||
tips = open('../data/tips.xml.in.h', 'w')
|
||||
marklist = root.iter(mark)
|
||||
for key in marklist:
|
||||
tip = ElementTree.tostring(key, encoding="UTF-8", method="xml")
|
||||
if sys.version_info[0] < 3:
|
||||
tip = tip.replace("<?xml version='1.0' encoding='UTF-8'?>", "")
|
||||
tip = tip.replace('\n<_tip number="%(number)s">' % key.attrib, "")
|
||||
else: # python3 support
|
||||
tip = tip.decode("utf-8")
|
||||
tip = tip.replace('<_tip number="%(number)s">' % key.attrib, "")
|
||||
tip = tip.replace("<br />", "<br/>")
|
||||
#tip = tip.replace("\n</_tip>\n", "</_tip>\n") # special case tip 7
|
||||
#tip = tip.replace("\n<b>", "<b>") # special case tip 18
|
||||
tip = tip.replace("</_tip>\n\n", "")
|
||||
tip = tip.replace('"', '"')
|
||||
tips.write('char *s = N_("%s");\n' % tip)
|
||||
tips.close()
|
||||
|
||||
with open('../data/tips.xml.in.h', 'w') as tips:
|
||||
marklist = root.iter(mark)
|
||||
for key in marklist:
|
||||
tip = ElementTree.tostring(key, encoding="UTF-8", method="xml")
|
||||
if sys.version_info[0] < 3:
|
||||
tip = tip.replace("<?xml version='1.0' encoding='UTF-8'?>", "")
|
||||
tip = tip.replace('\n<_tip number="%(number)s">' % key.attrib, "")
|
||||
else: # python3 support
|
||||
tip = tip.decode("utf-8")
|
||||
tip = tip.replace('<_tip number="%(number)s">' % key.attrib, "")
|
||||
tip = tip.replace("<br />", "<br/>")
|
||||
#tip = tip.replace("\n</_tip>\n", "</_tip>\n") # special case tip 7
|
||||
#tip = tip.replace("\n<b>", "<b>") # special case tip 18
|
||||
tip = tip.replace("</_tip>\n\n", "")
|
||||
tip = tip.replace('"', '"')
|
||||
tips.write('char *s = N_("%s");\n' % tip)
|
||||
print ('Wrote ../data/tips.xml.in.h')
|
||||
root.clear()
|
||||
|
||||
|
||||
def HolidaysParse(filename, mark):
|
||||
"""
|
||||
Experimental alternative to 'intltool-extract' for 'holidays.xml'.
|
||||
"""
|
||||
|
||||
from xml.etree import ElementTree
|
||||
|
||||
|
||||
tree = ElementTree.parse(filename)
|
||||
root = tree.getroot()
|
||||
ellist = root.iter()
|
||||
@ -204,26 +199,24 @@ def HolidaysParse(filename, mark):
|
||||
..
|
||||
<country _name="Jewish Holidays">
|
||||
<date _name="Yom Kippur" value="> passover(y)" offset="172"/>
|
||||
|
||||
|
||||
char *s = N_("Bulgaria");
|
||||
char *s = N_("Jewish Holidays");
|
||||
char *s = N_("Yom Kippur");
|
||||
|
||||
|
||||
gramps.pot:
|
||||
msgid "Bulgaria"
|
||||
msgid "Jewish Holidays"
|
||||
msgid "Yom Kippur"
|
||||
'''
|
||||
|
||||
holidays = open('../data/holidays.xml.in.h', 'w')
|
||||
for key in ellist:
|
||||
if key.attrib.get(mark):
|
||||
line = key.attrib
|
||||
string = line.items
|
||||
# mapping via the line dict (_name is the key)
|
||||
name = 'char *s = N_("%(_name)s");\n' % line
|
||||
holidays.write(name)
|
||||
holidays.close()
|
||||
with open('../data/holidays.xml.in.h', 'w') as holidays:
|
||||
for key in ellist:
|
||||
if key.attrib.get(mark):
|
||||
line = key.attrib
|
||||
string = line.items
|
||||
# mapping via the line dict (_name is the key)
|
||||
name = 'char *s = N_("%(_name)s");\n' % line
|
||||
holidays.write(name)
|
||||
print ('Wrote ../data/holidays.xml.in.h')
|
||||
root.clear()
|
||||
|
||||
@ -232,12 +225,11 @@ def XmlParse(filename, mark):
|
||||
"""
|
||||
Experimental alternative to 'intltool-extract' for 'file.xml.in'.
|
||||
"""
|
||||
|
||||
from xml.etree import ElementTree
|
||||
|
||||
|
||||
tree = ElementTree.parse(filename)
|
||||
root = tree.getroot()
|
||||
|
||||
|
||||
'''
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
|
||||
@ -252,47 +244,44 @@ def XmlParse(filename, mark):
|
||||
<glob pattern="*.gedcom"/>
|
||||
<glob pattern="*.GED"/>
|
||||
<glob pattern="*.GEDCOM"/>
|
||||
|
||||
|
||||
msgid "Gramps database"
|
||||
msgid "GEDCOM"
|
||||
|
||||
|
||||
<_p> Gramps is a free software project and community.
|
||||
We strive to produce a genealogy program that is both intuitive for hobbyists
|
||||
and feature-complete for professional genealogists.
|
||||
</p>
|
||||
'''
|
||||
|
||||
head = open(filename + '.h', 'w')
|
||||
|
||||
for key in root.iter():
|
||||
if key.tag == '{http://www.freedesktop.org/standards/shared-mime-info}%s' % mark:
|
||||
comment = 'char *s = N_("%s");\n' % key.text
|
||||
head.write(comment)
|
||||
|
||||
if root.tag == 'application':
|
||||
|
||||
with open(filename + '.h', 'w') as head:
|
||||
for key in root.iter():
|
||||
if key.tag == mark:
|
||||
if key.tag == '{http://www.freedesktop.org/standards/shared-mime-info}%s' % mark:
|
||||
comment = 'char *s = N_("%s");\n' % key.text
|
||||
head.write(comment)
|
||||
|
||||
head.close()
|
||||
|
||||
if root.tag == 'application':
|
||||
for key in root.iter():
|
||||
if key.tag == mark:
|
||||
comment = 'char *s = N_("%s");\n' % key.text
|
||||
head.write(comment)
|
||||
|
||||
print ('Wrote %s' % filename)
|
||||
root.clear()
|
||||
|
||||
|
||||
def DesktopParse(filename):
|
||||
"""
|
||||
Experimental alternative to 'intltool-extract' for 'gramps.desktop'.
|
||||
"""
|
||||
|
||||
|
||||
'''
|
||||
[Desktop Entry]
|
||||
_Name=Gramps
|
||||
_GenericName=Genealogy System
|
||||
_X-GNOME-FullName=Gramps Genealogy System
|
||||
_Comment=Manage genealogical information,
|
||||
_Comment=Manage genealogical information,
|
||||
perform genealogical research and analysis
|
||||
|
||||
|
||||
msgid "Gramps"
|
||||
msgid "Genealogy System"
|
||||
msgid "Gramps Genealogy System"
|
||||
@ -300,29 +289,25 @@ def DesktopParse(filename):
|
||||
"Manage genealogical information,
|
||||
perform genealogical research and analysis"
|
||||
'''
|
||||
|
||||
desktop = open('../data/gramps.desktop.in.h', 'w')
|
||||
|
||||
f = open(filename)
|
||||
lines = [file.strip() for file in f]
|
||||
f.close()
|
||||
|
||||
for line in lines:
|
||||
if line[0] == '_':
|
||||
for i in range(len(line)):
|
||||
if line[i] == '=':
|
||||
val = 'char *s = N_("%s");\n' % line[i+1:len(line)]
|
||||
desktop.write(val)
|
||||
|
||||
desktop.close()
|
||||
with open('../data/gramps.desktop.in.h', 'w') as desktop:
|
||||
|
||||
with open(filename) as f:
|
||||
lines = [file.strip() for file in f]
|
||||
|
||||
for line in lines:
|
||||
if line[0] == '_':
|
||||
for i in range(len(line)):
|
||||
if line[i] == '=':
|
||||
val = 'char *s = N_("%s");\n' % line[i+1:len(line)]
|
||||
desktop.write(val)
|
||||
|
||||
print ('Wrote ../data/gramps.desktop.in.h')
|
||||
|
||||
|
||||
def KeyParse(filename, mark):
|
||||
"""
|
||||
Experimental alternative to 'intltool-extract' for 'gramps.keys'.
|
||||
"""
|
||||
|
||||
|
||||
'''
|
||||
application/x-gramps-xml:
|
||||
_description=Gramps XML database
|
||||
@ -338,48 +323,44 @@ def KeyParse(filename, mark):
|
||||
application/x-gedcom:
|
||||
_description=GEDCOM
|
||||
default_action_type=application
|
||||
|
||||
|
||||
msgid "Gramps XML database"
|
||||
msgid "GEDCOM"
|
||||
'''
|
||||
|
||||
key = open('../data/gramps.keys.in.h', 'w')
|
||||
|
||||
f = open(filename)
|
||||
lines = [file for file in f]
|
||||
f.close()
|
||||
|
||||
temp = []
|
||||
|
||||
for line in lines:
|
||||
for i in range(len(line)):
|
||||
if line[i:i+12] == mark:
|
||||
temp.append(line.strip())
|
||||
|
||||
for t in temp:
|
||||
for i in range(len(t)):
|
||||
if t[i] == '=':
|
||||
val = 'char *s = N_("%s");\n' % t[i+1:len(t)]
|
||||
key.write(val)
|
||||
|
||||
key.close()
|
||||
with open('../data/gramps.keys.in.h', 'w') as key:
|
||||
|
||||
with open(filename) as f:
|
||||
lines = [file for file in f]
|
||||
|
||||
temp = []
|
||||
|
||||
for line in lines:
|
||||
for i in range(len(line)):
|
||||
if line[i:i+12] == mark:
|
||||
temp.append(line.strip())
|
||||
|
||||
for t in temp:
|
||||
for i in range(len(t)):
|
||||
if t[i] == '=':
|
||||
val = 'char *s = N_("%s");\n' % t[i+1:len(t)]
|
||||
key.write(val)
|
||||
|
||||
print ('Wrote ../data/gramps.keys.in.h')
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
The utility for handling translation stuff.
|
||||
What is need by Gramps, nothing more.
|
||||
"""
|
||||
|
||||
parser = ArgumentParser(
|
||||
|
||||
parser = ArgumentParser(
|
||||
description='This program generates a new template and '
|
||||
'also provides some common features.',
|
||||
'also provides some common features.',
|
||||
)
|
||||
parser.add_argument("-t", "--test",
|
||||
action="store_true", dest="test", default=True,
|
||||
help="test if 'python' and 'gettext' are properly installed")
|
||||
|
||||
|
||||
parser.add_argument("-x", "--xml",
|
||||
action="store_true", dest="xml", default=False,
|
||||
help="extract messages from xml based file formats")
|
||||
@ -392,90 +373,88 @@ def main():
|
||||
parser.add_argument("-p", "--pot",
|
||||
action="store_true", dest="catalog", default=False,
|
||||
help="create a new catalog")
|
||||
|
||||
|
||||
update = parser.add_argument_group('Update', 'Maintenance around translations')
|
||||
|
||||
|
||||
# need at least one argument (sv.po, de.po, etc ...)
|
||||
|
||||
# lang.po files maintenance
|
||||
# lang.po files maintenance
|
||||
update.add_argument("-m", dest="merge",
|
||||
choices=LANG,
|
||||
help="merge lang.po files with last catalog")
|
||||
|
||||
|
||||
update.add_argument("-k", dest="check",
|
||||
choices=LANG,
|
||||
help="check lang.po files")
|
||||
|
||||
# testing stage
|
||||
|
||||
# testing stage
|
||||
trans = parser.add_argument_group('Translation', 'Display content of translations file')
|
||||
|
||||
|
||||
# need one argument (eg, de.po)
|
||||
|
||||
trans.add_argument("-u", dest="untranslated",
|
||||
|
||||
trans.add_argument("-u", dest="untranslated",
|
||||
choices=[file for file in os.listdir('.') if file.endswith('.po')],
|
||||
help="list untranslated messages")
|
||||
trans.add_argument("-f", dest="fuzzy",
|
||||
choices=[file for file in os.listdir('.') if file.endswith('.po')],
|
||||
help="list fuzzy messages")
|
||||
|
||||
|
||||
|
||||
args = parser.parse_args()
|
||||
namespace, extra = parser.parse_known_args()
|
||||
|
||||
if args.test:
|
||||
tests()
|
||||
|
||||
|
||||
if args.xml:
|
||||
extract_xml()
|
||||
|
||||
|
||||
if args.glade:
|
||||
create_filesfile()
|
||||
extract_glade()
|
||||
if os.path.isfile('tmpfiles'):
|
||||
os.unlink('tmpfiles')
|
||||
|
||||
|
||||
if args.catalog:
|
||||
retrieve()
|
||||
|
||||
|
||||
if args.clean:
|
||||
clean()
|
||||
|
||||
|
||||
if args.merge:
|
||||
#retrieve() windows os?
|
||||
if sys.argv[2:] == ['all']:
|
||||
sys.argv[2:] = LANG
|
||||
merge(sys.argv[2:])
|
||||
|
||||
|
||||
if args.check:
|
||||
#retrieve() windows os?
|
||||
if sys.argv[2:] == ['all']:
|
||||
sys.argv[2:] = LANG
|
||||
check(sys.argv[2:])
|
||||
|
||||
|
||||
if args.untranslated:
|
||||
untranslated(sys.argv[2:])
|
||||
|
||||
|
||||
if args.fuzzy:
|
||||
fuzzy(sys.argv[2:])
|
||||
|
||||
def create_filesfile():
|
||||
"""
|
||||
Create a file with all files that we should translate.
|
||||
These are all python files not in POTFILES.skip added with those in
|
||||
These are all python files not in POTFILES.skip added with those in
|
||||
POTFILES.in
|
||||
"""
|
||||
dir = os.getcwd()
|
||||
topdir = os.path.normpath(os.path.join(dir, '..', 'gramps'))
|
||||
lentopdir = len(topdir)
|
||||
f = open('POTFILES.in')
|
||||
infiles = dict(['../' + file.strip(), None] for file in f if file.strip()
|
||||
and not file[0]=='#')
|
||||
f.close()
|
||||
f = open('POTFILES.skip')
|
||||
notinfiles = dict(['../' + file.strip(), None] for file in f if file
|
||||
and not file[0]=='#')
|
||||
f.close()
|
||||
|
||||
with open('POTFILES.in') as f:
|
||||
infiles = dict(['../' + file.strip(), None] for file in f if file.strip()
|
||||
and not file[0]=='#')
|
||||
|
||||
with open('POTFILES.skip') as f:
|
||||
notinfiles = dict(['../' + file.strip(), None] for file in f if file
|
||||
and not file[0]=='#')
|
||||
|
||||
for (dirpath, dirnames, filenames) in os.walk(topdir):
|
||||
root, subdir = os.path.split(dirpath)
|
||||
if subdir.startswith("."):
|
||||
@ -490,7 +469,7 @@ def create_filesfile():
|
||||
# if the directory does not exist or is a link, do nothing
|
||||
if not os.path.isdir(dirpath) or os.path.islink(dirpath):
|
||||
continue
|
||||
|
||||
|
||||
for filename in os.listdir(dirpath):
|
||||
name = os.path.split(filename)[1]
|
||||
if name.endswith('.py') or name.endswith('.glade'):
|
||||
@ -499,11 +478,10 @@ def create_filesfile():
|
||||
if full_filename[lentopdir:] in notinfiles:
|
||||
infiles['../gramps' + full_filename[lentopdir:]] = None
|
||||
#now we write out all the files in form ../gramps/filename
|
||||
f = open('tmpfiles', 'w')
|
||||
for file in sorted(infiles.keys()):
|
||||
f.write(file)
|
||||
f.write('\n')
|
||||
f.close()
|
||||
with open('tmpfiles', 'w') as f:
|
||||
for file in sorted(infiles.keys()):
|
||||
f.write(file)
|
||||
f.write('\n')
|
||||
|
||||
def listing(name, extensionlist):
|
||||
"""
|
||||
@ -511,26 +489,22 @@ def listing(name, extensionlist):
|
||||
Parsing from a textual file (gramps) is faster and easy for maintenance.
|
||||
Like POTFILES.in and POTFILES.skip
|
||||
"""
|
||||
|
||||
f = open('tmpfiles')
|
||||
files = [file.strip() for file in f if file and not file[0]=='#']
|
||||
f.close()
|
||||
|
||||
temp = open(name, 'w')
|
||||
|
||||
for entry in files:
|
||||
for ext in extensionlist:
|
||||
if entry.endswith(ext):
|
||||
temp.write(entry)
|
||||
temp.write('\n')
|
||||
break
|
||||
|
||||
temp.close()
|
||||
|
||||
|
||||
with open('tmpfiles') as f:
|
||||
files = [file.strip() for file in f if file and not file[0]=='#']
|
||||
|
||||
with open(name, 'w') as temp:
|
||||
for entry in files:
|
||||
for ext in extensionlist:
|
||||
if entry.endswith(ext):
|
||||
temp.write(entry)
|
||||
temp.write('\n')
|
||||
break
|
||||
|
||||
def headers():
|
||||
"""
|
||||
Look at existing C file format headers.
|
||||
Generated by 'intltool-extract' but want to get rid of this
|
||||
Generated by 'intltool-extract' but want to get rid of this
|
||||
dependency (perl, just a set of tools).
|
||||
"""
|
||||
headers = []
|
||||
@ -550,7 +524,7 @@ def headers():
|
||||
headers.append('''../data/gramps.appdata.xml.in.h''')
|
||||
if os.path.isfile('''gtklist.h'''):
|
||||
headers.append('''gtklist.h''')
|
||||
|
||||
|
||||
return headers
|
||||
|
||||
def extract_xml():
|
||||
@ -558,21 +532,20 @@ def extract_xml():
|
||||
Extract translation strings from XML based, keys, mime and desktop
|
||||
files. Own XML files parsing and custom translation marks.
|
||||
"""
|
||||
|
||||
HolidaysParse('../data/holidays.xml.in', '_name')
|
||||
TipsParse('../data/tips.xml.in', '_tip')
|
||||
XmlParse('../data/gramps.xml.in', '_comment')
|
||||
XmlParse('../data/gramps.appdata.xml.in', '_p')
|
||||
DesktopParse('../data/gramps.desktop.in')
|
||||
KeyParse('../data/gramps.keys.in', '_description')
|
||||
|
||||
|
||||
def create_template():
|
||||
"""
|
||||
Create a new file for template, if it does not exist.
|
||||
"""
|
||||
template = open('gramps.pot', 'w')
|
||||
template.close()
|
||||
|
||||
with open('gramps.pot', 'w') as template:
|
||||
pass
|
||||
|
||||
def extract_glade():
|
||||
"""
|
||||
Extract messages from a temp file with all .glade
|
||||
@ -592,9 +565,9 @@ def extract_gtkbuilder():
|
||||
https://savannah.gnu.org/bugs/index.php?29216
|
||||
See bug reports #6595, #5621
|
||||
"""
|
||||
|
||||
|
||||
from xml.etree import ElementTree
|
||||
|
||||
|
||||
'''
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<interface>
|
||||
@ -617,26 +590,24 @@ def extract_gtkbuilder():
|
||||
</row>
|
||||
</data>
|
||||
</object>
|
||||
|
||||
|
||||
msgid "All rules must apply"
|
||||
msgid "At least one rule must apply"
|
||||
msgid "Exactly one rule must apply"
|
||||
'''
|
||||
|
||||
|
||||
files = ['../gramps/plugins/importer/importgedcom.glade', '../gramps/gui/glade/rule.glade']
|
||||
temp = open('gtklist.h', 'w')
|
||||
|
||||
for filename in files:
|
||||
tree = ElementTree.parse(filename)
|
||||
root = tree.getroot()
|
||||
for line in root.iter():
|
||||
att = line.attrib
|
||||
if att == {'id': '0', 'translatable': 'yes'}:
|
||||
col = 'char *s = N_("%s");\n' % line.text
|
||||
temp.write(col)
|
||||
root.clear()
|
||||
|
||||
temp.close()
|
||||
with open('gtklist.h', 'w') as temp:
|
||||
for filename in files:
|
||||
tree = ElementTree.parse(filename)
|
||||
root = tree.getroot()
|
||||
for line in root.iter():
|
||||
att = line.attrib
|
||||
if att == {'id': '0', 'translatable': 'yes'}:
|
||||
col = 'char *s = N_("%s");\n' % line.text
|
||||
temp.write(col)
|
||||
root.clear()
|
||||
|
||||
print ('Wrote gtklist.h')
|
||||
|
||||
def retrieve():
|
||||
@ -644,14 +615,14 @@ def retrieve():
|
||||
Extract messages from all files used by Gramps (python, glade, xml)
|
||||
"""
|
||||
extract_xml()
|
||||
|
||||
|
||||
extract_gtkbuilder()
|
||||
|
||||
|
||||
create_template()
|
||||
|
||||
|
||||
create_filesfile()
|
||||
listing('python.txt', ['.py', '.py.in'])
|
||||
|
||||
|
||||
# additional keywords must always be kept in sync with those in genpot.sh
|
||||
os.system('''%(xgettext)s -F -c -j --directory=./ -d gramps '''
|
||||
'''-L Python -o gramps.pot --files-from=python.txt '''
|
||||
@ -659,14 +630,14 @@ def retrieve():
|
||||
'''--keyword=_T_ --keyword=trans_text '''
|
||||
'''--keyword=sgettext --from-code=UTF-8''' % {'xgettext': xgettextCmd}
|
||||
)
|
||||
|
||||
|
||||
extract_glade()
|
||||
|
||||
|
||||
# C format header (.h extension)
|
||||
for h in headers():
|
||||
print ('xgettext for %s' % h)
|
||||
os.system('''%(xgettext)s -F --add-comments -j -o gramps.pot '''
|
||||
'''--keyword=N_ --from-code=UTF-8 %(head)s'''
|
||||
'''--keyword=N_ --from-code=UTF-8 %(head)s'''
|
||||
% {'xgettext': xgettextCmd, 'head': h}
|
||||
)
|
||||
clean()
|
||||
@ -696,10 +667,9 @@ def merge(args):
|
||||
"""
|
||||
Merge messages with 'gramps.pot'
|
||||
"""
|
||||
|
||||
for arg in args:
|
||||
if arg == 'all':
|
||||
continue
|
||||
continue
|
||||
print ('Merge %(lang)s with current template' % {'lang': arg})
|
||||
os.system('''%(msgmerge)s %(lang)s gramps.pot -o updated_%(lang)s''' \
|
||||
% {'msgmerge': msgmergeCmd, 'lang': arg})
|
||||
@ -709,7 +679,6 @@ def check(args):
|
||||
"""
|
||||
Check the translation file
|
||||
"""
|
||||
|
||||
for arg in args:
|
||||
if arg == 'all':
|
||||
continue
|
||||
@ -724,14 +693,12 @@ def untranslated(arg):
|
||||
"""
|
||||
List untranslated messages
|
||||
"""
|
||||
|
||||
os.system('''%(msgattrib)s --untranslated %(lang.po)s''' % {'msgattrib': msgattribCmd, 'lang.po': arg[0]})
|
||||
|
||||
def fuzzy(arg):
|
||||
"""
|
||||
List fuzzy messages
|
||||
"""
|
||||
|
||||
os.system('''%(msgattrib)s --only-fuzzy --no-obsolete %(lang.po)s''' % {'msgattrib': msgattribCmd, 'lang.po': arg[0]})
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
Loading…
Reference in New Issue
Block a user