Warning, /sdk/pology/bin/posummit is written in an unsupported language. File is not indexed.
0001 #!/usr/bin/env python3
0002 # -*- coding: UTF-8 -*-
0003
0004 import copy
0005 from difflib import SequenceMatcher
0006 import filecmp
0007 import hashlib
0008 import locale
0009 import os
0010 import re
0011 import shutil
0012 import sys
0013 import time
0014 from functools import reduce
0015
0016 try:
0017 import fallback_import_paths
0018 except:
0019 pass
0020
0021 from pology import version, _, n_, t_, PologyError
0022 from pology.ascript import collect_ascription_associations
0023 from pology.ascript import collect_ascription_history
0024 from pology.ascript import make_ascription_selector
0025 from pology.catalog import Catalog
0026 from pology.header import Header, format_datetime
0027 from pology.message import Message, MessageUnsafe
0028 from pology.colors import ColorOptionParser
0029 from pology.fsops import str_to_unicode, unicode_to_str
0030 from pology.fsops import mkdirpath, assert_system, collect_system
0031 from pology.fsops import getucwd, join_ncwd
0032 from pology.fsops import collect_paths_cmdline, build_path_selector
0033 from pology.fsops import exit_on_exception
0034 from pology.merge import merge_pofile
0035 from pology.monitored import Monpair, Monlist
0036 from pology.msgreport import report_on_msg
0037 from pology.report import report, error, warning, format_item_list
0038 from pology.report import init_file_progress
0039 from pology.stdcmdopt import add_cmdopt_incexc, add_cmdopt_filesfrom
0040 from pology.vcs import make_vcs
0041 from pology.wrap import select_field_wrapping
0042
0043
0044 SUMMIT_ID = "+" # must not start with word-character (\w)
0045
0046
0047 def main ():
0048
0049 locale.setlocale(locale.LC_ALL, "")
0050
0051 # Setup options and parse the command line.
0052 usage = _("@info command usage",
0053 "\n"
0054 " %(cmd)s [OPTIONS] CFGFILE LANG OPMODE [PARTIAL...]\n"
0055 " (if there is no '%(cfgfile)s' file in a parent directory)\n"
0056 " %(cmd)s [OPTIONS] OPMODE [PARTIAL...]\n"
0057 " (if there is a '%(cfgfile)s' file in a parent directory)",
0058 cmd="%prog", cfgfile="summit-config")
0059 desc = _("@info command description",
0060 "Translate PO files spread across different branches "
0061 "in a unified fashion.")
0062 ver = _("@info command version",
0063 "%(cmd)s (Pology) %(version)s\n"
0064 "Copyright © 2007, 2008, 2009, 2010 "
0065 "Chusslove Illich (Часлав Илић) <%(email)s>",
0066 cmd="%prog", version=version(), email="caslav.ilic@gmx.net")
0067
0068 opars = ColorOptionParser(usage=usage, description=desc, version=ver)
0069 opars.add_option(
0070 "-a", "--asc-filter",
0071 action="store", dest="asc_filter", default=None,
0072 help=_("@info command line option description",
0073 "Apply a non-default ascription filter on scatter."))
0074 opars.add_option(
0075 "--create",
0076 action="store_true", dest="create", default=False,
0077 help=_("@info command line option description",
0078 "Allow creation of new summit catalogs."))
0079 opars.add_option(
0080 "--force",
0081 action="store_true", dest="force", default=False,
0082 help=_("@info command line option description",
0083 "Force some operations that are normally not advised."))
0084 opars.add_option(
0085 "-q", "--quiet",
0086 action="store_true", dest="quiet", default=False,
0087 help=_("@info command line option description",
0088 "Output less detailed progress info."))
0089 opars.add_option(
0090 "-v", "--verbose",
0091 action="store_true", dest="verbose", default=False,
0092 help=_("@info command line option description",
0093 "Output more detailed progress info"))
0094 add_cmdopt_filesfrom(opars)
0095 add_cmdopt_incexc(opars)
0096
0097 options, free_args = opars.parse_args(str_to_unicode(sys.argv[1:]))
0098
0099 # Look for the config file through parent directories.
0100 parent = getucwd()
0101 cfgpath = None
0102 while True:
0103 for cfgname in ("summit-config",):
0104 cfgpath1 = os.path.join(parent, cfgname)
0105 if os.path.isfile(cfgpath1):
0106 cfgpath = cfgpath1
0107 break
0108 if cfgpath:
0109 break
0110 pparent = parent
0111 parent = os.path.dirname(parent)
0112 if parent == pparent:
0113 break
0114
0115 # If config file not found, expect it and language as arguments.
0116 if not cfgpath:
0117 if len(free_args) < 1:
0118 error(_("@info",
0119 "Summit configuration file neither found "
0120 "as '%(cfgfile)s' in parent directories, "
0121 "nor given in command line.",
0122 cfgfile="summit-config"))
0123 cfgpath = free_args.pop(0)
0124 if not os.path.isfile(cfgpath):
0125 error(_("@info",
0126 "Summit configuration file '%(file)s' does not exist.",
0127 file=cfgpath))
0128
0129 if len(free_args) < 1:
0130 error(_("@info",
0131 "Language code not given."))
0132 lang = free_args.pop(0)
0133 else:
0134 lang = None
0135 # ...will be read from config file.
0136
0137 if len(free_args) < 1:
0138 error(_("@info", "Operation mode not given."))
0139 opmodes = free_args.pop(0).split(",")
0140 opmodes_uniq = []
0141 for opmode in opmodes:
0142 if opmode not in opmodes_uniq:
0143 if opmode not in ("gather", "scatter", "merge", "deps"):
0144 error(_("@info",
0145 "Unknown operation mode '%(mode)s'.",
0146 mode=opmode))
0147 opmodes_uniq.append(opmode)
0148 opmodes = opmodes_uniq
0149
0150 # Could use some speedup.
0151 try:
0152 import psyco
0153 psyco.full()
0154 except ImportError:
0155 pass
0156
0157 # Read project definition.
0158 project = Project(lang, opmodes, options)
0159 project.include(cfgpath)
0160
0161 # In summit-over-templates mode, determine if templates are dynamic.
0162 project.templates_dynamic = ( project.over_templates
0163 and not project.summit.get("topdir_templates"))
0164
0165 # If config file was found in parent directories,
0166 # it should have defined the language itself.
0167 # Otherwise, its language is set to language given in command line.
0168 if not lang:
0169 if not project.lang:
0170 error(_("@info",
0171 "Language code not set in configuration file."))
0172 lang = project.lang
0173 else:
0174 project.lang = lang
0175
0176 # In summit-over-templates mode, derive special project data
0177 # for implicitly gathering templates on merge.
0178 if project.templates_dynamic and "merge" in project.opmodes:
0179 project.toptions = copy.copy(options)
0180 project.toptions.quiet = True
0181 project.tproject = Project(project.templates_lang, ["gather"],
0182 project.toptions)
0183 project.tproject.include(cfgpath)
0184 project.tproject.templates_dynamic = False
0185 project.tproject.summit_version_control = "none"
0186 project.tproject.summit_wrap = False # performance
0187 project.tproject.summit_fine_wrap = False # performance
0188 tpd = project.tproject.summit.get("topdir_templates")
0189 if tpd is None:
0190 # FIXME: Portability.
0191 tpd = "/tmp/summit-templates-%d" % os.getpid()
0192 project.tproject.summit["topdir"] = tpd
0193 for tb in project.tproject.branches:
0194 tbpd = tb.get("topdir_templates")
0195 if tbpd is not None:
0196 tb["topdir"] = tbpd
0197 project.tproject.lang = project.templates_lang
0198 project.tproject = derive_project_data(project.tproject,
0199 project.toptions,
0200 project.summit["topdir"])
0201 project.summit["topdir_templates"] = tpd
0202
0203 # Explicit gathering in summit-over-templates mode
0204 # may be useful to check if gathering works.
0205 # Make some adjustments for this to go smoothly.
0206 if ( project.templates_dynamic and "gather" in project.opmodes
0207 and project.lang == project.templates_lang
0208 ):
0209 options.create = True
0210 project.summit["topdir"] = project.summit["topdir_templates"]
0211 project.summit_version_control = "none"
0212
0213 # Derive project data.
0214 project = derive_project_data(project, options)
0215
0216 # Collect partial processing specs and inclusion-exclusion test.
0217 specargs, ffself = collect_paths_cmdline(rawpaths=free_args,
0218 filesfrom=options.files_from,
0219 getsel=True, abort=True)
0220 options.partspecs, options.partbids = collect_partspecs(project, specargs)
0221 if not options.files_from:
0222 # If there was no from-file input and no partial processing specs
0223 # were collected, indicate operation on the whole summit.
0224 if not options.partspecs:
0225 options.partspecs = None
0226 if not options.partbids:
0227 options.partbids = None
0228 cmdself = build_path_selector(incnames=options.include_names,
0229 incpaths=options.include_paths,
0230 excnames=options.exclude_names,
0231 excpaths=options.exclude_paths)
0232 options.selcatf = lambda x: cmdself(x) and ffself(x)
0233
0234 # Invoke the appropriate operations on collected bundles.
0235 for opmode in opmodes:
0236 if options.verbose:
0237 report(_("@info:progress",
0238 "-----> Processing mode: %(mode)s",
0239 mode=opmode))
0240 if opmode == "gather":
0241 summit_gather(project, options)
0242 elif opmode == "scatter":
0243 summit_scatter(project, options)
0244 elif opmode == "merge":
0245 summit_merge(project, options)
0246 elif opmode == "deps":
0247 summit_deps(project, options)
0248
0249
0250 class Project (object):
0251
0252 def __init__ (self, lang, opmodes, options):
0253
0254 self.__dict__.update({
0255 "lang" : lang,
0256 "opmodes" : opmodes,
0257 "options" : options,
0258
0259 "summit" : {},
0260 "branches" : [],
0261 "mappings" : [],
0262 "subdir_mappings" : [],
0263 "subdir_precedence" : [],
0264
0265 "over_templates" : False,
0266 "templates_lang" : "templates",
0267
0268 "summit_wrap" : False,
0269 "summit_fine_wrap" : True,
0270 "summit_fuzzy_merging" : True,
0271 "branches_wrap" : True,
0272 "branches_fine_wrap" : True,
0273 "branches_fuzzy_merging" : True,
0274
0275 "version_control" : "",
0276 "summit_version_control" : "",
0277 "branches_version_control" : "",
0278
0279 "hook_on_scatter_msgstr" : [],
0280 "hook_on_scatter_msg" : [],
0281 "hook_on_scatter_cat" : [],
0282 "hook_on_scatter_file" : [],
0283 "hook_on_scatter_branch": [],
0284 "hook_on_gather_msg" : [],
0285 "hook_on_gather_msg_branch" : [],
0286 "hook_on_gather_cat" : [],
0287 "hook_on_gather_cat_branch" : [],
0288 "hook_on_gather_file" : [],
0289 "hook_on_gather_file_branch" : [],
0290 "hook_on_merge_msg" : [],
0291 "hook_on_merge_head" : [],
0292 "hook_on_merge_cat" : [],
0293 "hook_on_merge_file" : [],
0294
0295 "header_propagate_fields" : [],
0296 "header_skip_fields_on_scatter" : [],
0297
0298 "vivify_on_merge" : False,
0299 "vivify_w_translator" : "Simulacrum",
0300 "vivify_w_langteam" : "Nevernessian",
0301 "vivify_w_language" : "",
0302 "vivify_w_charset" : "UTF-8",
0303 "vivify_w_plurals" : "",
0304
0305 "compendium_on_merge" : "",
0306 "compendium_fuzzy_exact" : False,
0307 "compendium_min_words_exact" : 0,
0308
0309 "merge_min_adjsim_fuzzy" : 0.0,
0310 "merge_rebase_fuzzy" : False,
0311
0312 "scatter_min_completeness" : 0.0,
0313 "scatter_acc_completeness" : 0.0,
0314
0315 "ascription_filters" : [],
0316 "ascription_history_filter" : None,
0317 })
0318 self.__dict__["locked"] = False
0319
0320 self.inclusion_trail = []
0321
0322
0323 def __setattr__ (self, att, val):
0324
0325 # TODO: Do extensive checks.
0326 if self.locked and att not in self.__dict__:
0327 error(_("@info",
0328 "Unknown summit configuration field '%(field)s'.",
0329 field=att))
0330 self.__dict__[att] = val
0331
0332
0333 def relpath (self, path):
0334
0335 rootdir = os.path.dirname(self.inclusion_trail[-1])
0336 if not os.path.isabs(path):
0337 path = join_ncwd(rootdir, path)
0338
0339 return path
0340
0341
0342 # FIXME: Temporary for backward compatibility, remove at some point.
0343 def resolve_path_rooted (self, path):
0344
0345 return self.relpath(path)
0346
0347
0348 def include (self, path):
0349
0350 path = os.path.abspath(path)
0351 if path in self.inclusion_trail:
0352 error(_("@info",
0353 "Circular inclusion of '%(file)s' attempted "
0354 "in summit configuration.",
0355 file=path))
0356 self.inclusion_trail.append(path)
0357 self.locked = True
0358 with open(path) as input:
0359 code = input.read()
0360 exec(code, {"S" : self})
0361 self.locked = False
0362 self.inclusion_trail.pop()
0363
0364
0365 def derive_project_data (project, options, nwgrefpath=None):
0366
0367 p = project # shortcut
0368
0369 # Create summit object from summit dictionary.
0370 class Summit: pass
0371 s = Summit()
0372 sd = p.summit
0373 s.id = SUMMIT_ID
0374 s.by_lang = False
0375 s.topdir = sd.pop("topdir", None)
0376 s.topdir_templates = sd.pop("topdir_templates", None)
0377 # Assert that there are no misnamed keys in the dictionary.
0378 if sd:
0379 error(_("@info",
0380 "Unknown keys in summit configuration: %(keylist)s.",
0381 keylist=format_item_list(list(sd.keys()))))
0382 # Assert that all necessary fields in summit specification exist.
0383 if s.topdir is None:
0384 error(_("@info",
0385 "Top directory not set in summit configuration."))
0386 s.split_path = None # needed only on some checks later
0387 p.summit = s
0388
0389 # Create branch objects from branch dictionaries.
0390 class Branch: pass
0391 branches = []
0392 for bd in p.branches:
0393 b = Branch()
0394 branches.append(b)
0395
0396 b.id = bd.pop("id", None)
0397 b.topdir = bd.pop("topdir", None)
0398 b.topdir_templates = bd.pop("topdir_templates", None)
0399 # If operation is performed on templates and branch template directory
0400 # is defined, override plain branch directory with it.
0401 if p.lang == p.templates_lang and b.topdir_templates is not None:
0402 b.topdir = b.topdir_templates
0403 b.by_lang = bd.pop("by_lang", False)
0404 if b.by_lang and isinstance(b.by_lang, bool):
0405 b.by_lang = project.lang
0406 # If separate templates directory is not defined in by-language mode,
0407 # set it to same as catalogs directory.
0408 if b.by_lang and b.topdir_templates is None:
0409 b.topdir_templates = b.topdir
0410 b.scatter_create_filter = bd.pop("scatter_create_filter", None)
0411 b.skip_version_control = bd.pop("skip_version_control", False)
0412 # FIXME: merge_locally retained for backward compatibility,
0413 # replace at some point with b.merge = bd.pop("merge", False).
0414 b.merge = bd.pop("merge", None)
0415 if b.merge is None:
0416 b.merge = bd.pop("merge_locally", False)
0417 b.split_path, b.join_path = bd.pop("transform_path", (None, None))
0418 b.insert_nosim = bd.pop("insert_nosim", False)
0419
0420 # Assemble include-exclude functions.
0421 includes = bd.pop("includes", [])
0422 excludes = bd.pop("excludes", [])
0423
0424 def regex_to_func (rxstr):
0425 try:
0426 rx = re.compile(rxstr, re.U)
0427 except:
0428 error(_("@info",
0429 "Invalid regular expression '%(regex)s' "
0430 "in include-exclude specification "
0431 "of branch '%(branch)s'.",
0432 branch=b.id, regex=rxstr))
0433 return lambda x: bool(rx.search(x))
0434
0435 def chain_tests (tests):
0436 testfs = []
0437 for test in tests:
0438 if isinstance(test, str):
0439 testfs.append(regex_to_func(test))
0440 elif callable(test):
0441 testfs.append(test)
0442 else:
0443 error(_("@info",
0444 "Invalid test type '%(type)s' "
0445 "in include-exclude specification "
0446 "of branch '%(branch)s'.",
0447 branch=b.id, type=type(test)))
0448 return lambda x: reduce(lambda s, y: s or y(x), testfs, False)
0449
0450 if includes:
0451 includef = chain_tests(includes)
0452 if excludes:
0453 excludef = chain_tests(excludes)
0454 if includes and excludes:
0455 b.ignored = lambda x: not includef(x) or excludef(x)
0456 elif includes:
0457 b.ignored = lambda x: not includef(x)
0458 elif excludes:
0459 b.ignored = lambda x: excludef(x)
0460 else:
0461 b.ignored = lambda x: False
0462
0463 # Assert that there are no misnamed keys in the dictionary.
0464 if bd:
0465 error(_("@info",
0466 "Unknown keys in specification of branch '%(branch)s': "
0467 "%(keylist)s.",
0468 branch=b.id, keylist=format_item_list(list(bd.keys()))))
0469 p.branches = branches
0470
0471 # Assert that all necessary fields in branch specifications exist.
0472 p.branch_ids = []
0473 for branch in p.branches:
0474 if branch.id is None:
0475 error(_("@info",
0476 "Branch with undefined ID."))
0477 if branch.id in p.branch_ids:
0478 error(_("@info",
0479 "Non-unique branch ID '%(branch)s'.",
0480 branch=branch.id))
0481 p.branch_ids.append(branch.id)
0482 if branch.topdir is None:
0483 error(_("@info",
0484 "Top directory not set for branch '%(branch)s'.",
0485 branch=branch.id))
0486
0487 # Dictionary of branches by branch id.
0488 p.bdict = dict([(x.id, x) for x in p.branches])
0489
0490 # Create version control operators if given.
0491 p.summit_vcs = None
0492 p.branches_vcs = None
0493 if p.summit_version_control:
0494 p.summit_vcs = make_vcs(p.summit_version_control.lower())
0495 if p.branches_version_control:
0496 p.branches_vcs = make_vcs(p.branches_version_control.lower())
0497 if p.version_control:
0498 if p.summit_vcs is None:
0499 p.summit_vcs = make_vcs(p.version_control.lower())
0500 if p.branches_vcs is None:
0501 p.branches_vcs = make_vcs(p.version_control.lower())
0502
0503 # Decide wrapping policies.
0504 class D: pass
0505 dummyopt = D()
0506 dummyopt.do_wrap = p.summit_wrap
0507 dummyopt.do_fine_wrap = p.summit_fine_wrap
0508 p.summit_wrapping = select_field_wrapping(cmlopt=dummyopt)
0509 dummyopt.do_wrap = p.branches_wrap
0510 dummyopt.do_fine_wrap = p.branches_fine_wrap
0511 p.branches_wrapping = select_field_wrapping(cmlopt=dummyopt)
0512
0513 # Decide the extension of catalogs.
0514 if p.over_templates and p.lang == p.templates_lang:
0515 catext = ".pot"
0516 else:
0517 catext = ".po"
0518
0519 # Collect catalogs from branches.
0520 p.catalogs = {}
0521 for b in p.branches:
0522 p.catalogs[b.id] = collect_catalogs(b.topdir, catext,
0523 b.by_lang, b.ignored, b.split_path,
0524 project, options)
0525 # ...and from the summit.
0526 p.catalogs[SUMMIT_ID] = collect_catalogs(p.summit.topdir, catext,
0527 None, None, None,
0528 project, options)
0529 if ( p.lang == p.templates_lang and "gather" in p.opmodes
0530 and nwgrefpath is not None
0531 ):
0532 # Also add summit templates which do not actually exist,
0533 # but are going to be created on gather without warnings,
0534 # by reflecting the catalogs found in the given path.
0535 refcats = collect_catalogs(nwgrefpath, ".po",
0536 None, None, None, project, options)
0537 for name, spec in refcats.items():
0538 if name not in p.catalogs[SUMMIT_ID]:
0539 path, subdir = spec[0] # all summit catalogs unique
0540 tpath = join_ncwd(p.summit.topdir, subdir, name + ".pot")
0541 p.catalogs[SUMMIT_ID][name] = [(tpath, subdir)]
0542
0543 # Resolve ascription filter.
0544 project.ascription_filter = None
0545 for afname, afspec in project.ascription_filters:
0546 if options.asc_filter is None or afname == options.asc_filter:
0547 if isinstance(afspec, str):
0548 afcall = make_ascription_selector([afspec])
0549 elif isinstance(afspec, (tuple, list)):
0550 afcall = make_ascription_selector(afspec)
0551 elif callable(afspec):
0552 afcall = afspec
0553 else:
0554 error(_("@info",
0555 "Unknown type of definition for "
0556 "ascription filter '%(filt)s'.",
0557 filt=afname))
0558 project.ascription_filter = afcall
0559 break
0560 if options.asc_filter is not None and project.ascription_filter is None:
0561 error(_("@info",
0562 "Summit configuration does not define "
0563 "ascription filter '%(filt)s'.",
0564 filt=options.asc_filter))
0565
0566 # Link summit and ascription catalogs.
0567 if project.ascription_filter:
0568 tmp0 = [(x, y[0][0]) for x, y in list(p.catalogs[SUMMIT_ID].items())]
0569 tmp1 = [x[0] for x in tmp0]
0570 tmp2 = collect_ascription_associations([x[1] for x in tmp0])
0571 tmp3 = list(zip([tmp2[0][0]] * len(tmp1), [x[1] for x in tmp2[0][1]]))
0572 p.aconfs_acatpaths = dict(list(zip(tmp1, tmp3)))
0573
0574 # Assure that summit catalogs are unique.
0575 for name, spec in list(p.catalogs[SUMMIT_ID].items()):
0576 if len(spec) > 1:
0577 fstr = "\n".join([x[0] for x in spec])
0578 error(_("@info",
0579 "Non-unique summit catalog '%(name)s', found as:\n"
0580 "%(filelist)s",
0581 name=name, filelist=fstr))
0582
0583 # At scatter in summit-over-static-templates mode, add to the collection
0584 # of branch catalogs any that should be newly created.
0585 p.add_on_scatter = {}
0586 if ( p.over_templates and p.lang != p.templates_lang
0587 and "scatter" in p.opmodes):
0588
0589 # Go through all mappings and collect branch names mapped to
0590 # summit catalogs per branch id and summit name, and vice versa.
0591 mapped_summit_names = {}
0592 mapped_branch_names = {}
0593 for mapping in p.mappings:
0594 branch_id = mapping[0]
0595 branch_name = mapping[1]
0596 summit_names = mapping[2:]
0597 if not branch_id in mapped_summit_names:
0598 mapped_summit_names[branch_id] = {}
0599 if not branch_id in mapped_branch_names:
0600 mapped_branch_names[branch_id] = {}
0601 for summit_name in summit_names:
0602 if not summit_name in mapped_summit_names[branch_id]:
0603 mapped_summit_names[branch_id][summit_name] = []
0604 mapped_summit_names[branch_id][summit_name].append(branch_name)
0605 if not branch_name in mapped_branch_names[branch_id]:
0606 mapped_branch_names[branch_id][branch_name] = []
0607 mapped_branch_names[branch_id][branch_name].append(summit_name)
0608
0609 # Go through all branches.
0610 bt_cache = {}
0611 for branch in p.branches:
0612 # Skip this branch if no templates.
0613 if not branch.topdir_templates:
0614 continue
0615
0616 # Collect all templates for this branch.
0617 branch_templates = bt_cache.get(branch.topdir_templates)
0618 if branch_templates is None:
0619 branch_templates = collect_catalogs(branch.topdir_templates,
0620 ".pot", branch.by_lang,
0621 branch.ignored,
0622 branch.split_path,
0623 project, options)
0624 bt_cache[branch.topdir_templates] = branch_templates
0625
0626 # Go through all summit catalogs.
0627 for summit_name in p.catalogs[SUMMIT_ID]:
0628
0629 # Collect names of any catalogs in this branch mapped to
0630 # the current summit catalog.
0631 branch_names = []
0632 if ( branch.id in mapped_summit_names
0633 and summit_name in mapped_summit_names[branch.id]):
0634 branch_names = mapped_summit_names[branch.id][summit_name]
0635 # Unconditionally add summit name as one possible branch name,
0636 # since otherwise a mapped branch catalog could shadow
0637 # a direct branch catalog.
0638 branch_names.append(summit_name)
0639
0640 # For each collected branch name, check if there are some
0641 # branch templates for which the corresponding branch path
0642 # does not exit and (in case of explicit mapping) whether
0643 # all summit catalogs needed for scattering are available.
0644 # If this is the case, set missing paths for scattering.
0645 for branch_name in branch_names:
0646
0647 if ( branch_name in branch_templates
0648 and all([x in p.catalogs[SUMMIT_ID] for x in mapped_branch_names.get(branch.id, {})
0649 .get(branch_name, [])])
0650 ):
0651 # Assemble all branch catalog entries.
0652 for template in branch_templates[branch_name]:
0653 # Compose the branch catalog subdir and path.
0654 subdir = template[1]
0655 if branch.join_path:
0656 subpath = branch.join_path(branch_name, subdir,
0657 branch.by_lang)
0658 elif branch.by_lang:
0659 subpath = os.path.join(subdir, branch_name,
0660 branch.by_lang + ".po")
0661 else:
0662 subpath = os.path.join(subdir,
0663 branch_name + ".po")
0664 path = join_ncwd(branch.topdir, subpath)
0665
0666 # Skip this catalog if excluded from creation on
0667 # scatter, by filter on catalog name and subdir
0668 # (False -> excluded).
0669 scf = branch.scatter_create_filter
0670 if scf and not scf(branch_name, subdir):
0671 continue
0672
0673 # If not there already, add this path
0674 # to branch catalog entry,
0675 # and record later initialization from template.
0676 brcats = p.catalogs[branch.id].get(branch_name)
0677 if brcats is None:
0678 brcats = []
0679 p.catalogs[branch.id][branch_name] = brcats
0680 if (path, subdir) not in brcats:
0681 brcats.append((path, subdir))
0682 p.add_on_scatter[path] = template[0]
0683
0684 # In summit-over-dynamic-templates mode,
0685 # automatic vivification of summit catalogs must be active.
0686 if p.templates_dynamic:
0687 p.vivify_on_merge = True
0688
0689 # At merge in summit-over-templates mode,
0690 # if automatic vivification of summit catalogs requested,
0691 # add to the collection of summit catalogs any that should be created.
0692 p.add_on_merge = {}
0693 if ( p.over_templates and p.lang != p.templates_lang
0694 and "merge" in p.opmodes and (p.vivify_on_merge or options.create)
0695 ):
0696 # Collect all summit templates.
0697 if not p.templates_dynamic:
0698 summit_templates = collect_catalogs(p.summit.topdir_templates,
0699 ".pot", None, None, None,
0700 project, options)
0701 else:
0702 summit_templates = p.tproject.catalogs[SUMMIT_ID]
0703
0704 # Go through all summit templates, recording missing summit catalogs.
0705 for name, spec in summit_templates.items():
0706 tpath, tsubdir = spec[0] # all summit catalogs unique
0707 if name not in p.catalogs[SUMMIT_ID]:
0708 # Compose the summit catalog path.
0709 spath = join_ncwd(p.summit.topdir, tsubdir, name + ".po")
0710 # Add this file to summit catalog entries.
0711 p.catalogs[SUMMIT_ID][name] = [(spath, tsubdir)]
0712 # Record later initialization from template.
0713 p.add_on_merge[spath] = tpath
0714
0715 # Convenient dictionary views of mappings.
0716 # - direct: branch_id->branch_name->summit_name
0717 # - part inverse: branch_id->summit_name->branch_name
0718 # - full inverse: summit_name->branch_id->branch_name
0719 p.direct_map = {}
0720 p.part_inverse_map = {}
0721 p.full_inverse_map = {}
0722
0723 # Initialize mappings by branch before the main loop for direct mappings,
0724 # because an explicit mapping may name a branch before it was processed
0725 # in the main loop.
0726 for branch_id in p.branch_ids:
0727 p.direct_map[branch_id] = {}
0728 for branch_name in p.catalogs[branch_id]:
0729 p.direct_map[branch_id][branch_name] = []
0730
0731 # Add direct mappings.
0732 # - explicit
0733 for mapping in p.mappings:
0734 branch_id, branch_name = mapping[:2]
0735 if ( "gather" in p.opmodes
0736 and ( branch_id not in p.catalogs
0737 or branch_name not in p.catalogs[branch_id])
0738 ):
0739 warning(_("@info",
0740 "No branch catalog corresponding to mapping %(mapping)s "
0741 "set by the summit configuration.",
0742 mapping=("('%s', '%s', ...)" % (branch_id, branch_name))))
0743 continue
0744 summit_names = mapping[2:]
0745 p.direct_map[branch_id][branch_name] = summit_names
0746 # - implicit
0747 for branch_id in p.branch_ids:
0748 for branch_name in p.catalogs[branch_id]:
0749 if p.direct_map[branch_id][branch_name] == []:
0750 p.direct_map[branch_id][branch_name].append(branch_name)
0751
0752 # Convert subdir mappings into dictionary by branch ID and subdir.
0753 p.subdir_map = {}
0754 for bid, bsubdir, ssubdir in p.subdir_mappings:
0755 p.subdir_map[(bid, bsubdir)] = ssubdir
0756
0757 # Collect missing summit catalogs.
0758 needed_additions = []
0759 for branch_id in p.branch_ids:
0760 for branch_name in p.catalogs[branch_id]:
0761 summit_names = p.direct_map[branch_id][branch_name]
0762 for summit_name in summit_names:
0763 if summit_name not in p.catalogs[SUMMIT_ID]:
0764 # Compose the path for the missing summit catalog.
0765 # Default the subdir to that of the current branch,
0766 # as it is the primary branch for this catalog.
0767 # Or use explicit subdir mapping if given.
0768 branch_path, branch_subdir = \
0769 p.catalogs[branch_id][branch_name][0]
0770 dmkey = (branch_id, branch_subdir)
0771 summit_subdir = p.subdir_map.get(dmkey) or branch_subdir
0772 summit_path = join_ncwd(p.summit.topdir, summit_subdir,
0773 summit_name + catext)
0774 if "gather" in p.opmodes:
0775 if options.create:
0776 # Add summit catalog into list of existing catalogs;
0777 # it will be created for real on gather.
0778 p.catalogs[SUMMIT_ID][summit_name] = [
0779 (summit_path, summit_subdir)]
0780 else:
0781 needed_additions.append((branch_path, summit_path))
0782 elif "scatter" in p.opmodes:
0783 needed_additions.append((branch_path, summit_path))
0784
0785 # Initialize inverse mappings.
0786 # - part inverse:
0787 for branch_id in p.branch_ids:
0788 p.part_inverse_map[branch_id] = {}
0789 for summit_name in p.catalogs[SUMMIT_ID]:
0790 p.part_inverse_map[branch_id][summit_name] = []
0791 # - full inverse:
0792 for summit_name in p.catalogs[SUMMIT_ID]:
0793 p.full_inverse_map[summit_name] = {}
0794 for branch_id in p.branch_ids:
0795 p.full_inverse_map[summit_name][branch_id] = []
0796
0797 # Add existing inverse mappings.
0798 for branch_id in p.branch_ids:
0799 for branch_name in sorted(p.catalogs[branch_id]):
0800 for summit_name in p.direct_map[branch_id][branch_name]:
0801 if summit_name in p.full_inverse_map:
0802 # - part inverse:
0803 pinv = p.part_inverse_map[branch_id][summit_name]
0804 if branch_name not in pinv:
0805 pinv.append(branch_name)
0806 # - full inverse:
0807 finv = p.full_inverse_map[summit_name][branch_id]
0808 if branch_name not in finv:
0809 finv.append(branch_name)
0810
0811 # Collect superfluous summit catalogs.
0812 needed_removals = []
0813 for summit_name in p.catalogs[SUMMIT_ID]:
0814 src_branch_ids = []
0815 for branch_id in project.branch_ids:
0816 if project.full_inverse_map[summit_name][branch_id]:
0817 src_branch_ids.append(branch_id)
0818 if not src_branch_ids:
0819 if "gather" in p.opmodes:
0820 if not options.create:
0821 summit_path = p.catalogs[SUMMIT_ID][summit_name][0][0]
0822 needed_removals.append(summit_path)
0823
0824 # Create function to assign precedence to a subdirectory.
0825 p.subdir_precedence = [os.path.normpath(sd) for sd in p.subdir_precedence]
0826 def calc_subdir_precedence (subdir):
0827 for i, test_subdir in enumerate(p.subdir_precedence):
0828 ltsd = len(test_subdir)
0829 if ( subdir.startswith(test_subdir)
0830 and subdir[ltsd:ltsd + 1] in ("", os.path.sep)
0831 ):
0832 return i
0833 return len(p.subdir_precedence)
0834 p.calc_subdir_precedence = calc_subdir_precedence
0835
0836 # Collect summit catalogs that should be moved.
0837 needed_moves = []
0838 for summit_name in p.catalogs[SUMMIT_ID]:
0839 branch_subdirs = []
0840 for branch_id in p.full_inverse_map[summit_name]:
0841 for branch_name in p.full_inverse_map[summit_name][branch_id]:
0842 branch_subdirs_1 = []
0843 for bpath, bsubdir in p.catalogs[branch_id][branch_name]:
0844 dmkey = (branch_id, bsubdir)
0845 branch_subdirs_1.append(p.subdir_map.get(dmkey) or bsubdir)
0846 branch_subdirs.extend(branch_subdirs_1)
0847 if branch_subdirs:
0848 branch_subdirs = list(set(branch_subdirs))
0849 subdir_precs = list(map(p.calc_subdir_precedence, branch_subdirs))
0850 precs_subdirs = sorted(zip(subdir_precs, branch_subdirs))
0851 branch_subdirs_sel = [sd for pr, sd in precs_subdirs
0852 if pr == precs_subdirs[0][0]]
0853 summit_subdir = p.catalogs[SUMMIT_ID][summit_name][0][1]
0854 if summit_subdir not in branch_subdirs_sel:
0855 summit_path = p.catalogs[SUMMIT_ID][summit_name][0][0]
0856 dpaths = []
0857 for bsubdir in branch_subdirs_sel:
0858 dpath = join_ncwd(p.summit.topdir, bsubdir,
0859 summit_name + catext)
0860 dpaths.append(dpath)
0861 if "gather" in p.opmodes:
0862 if not options.create:
0863 needed_moves.append((summit_path, dpaths))
0864
0865 # If catalog creation is not allowed,
0866 # complain about needed additions, removals, and moves.
0867 if needed_additions or needed_removals or needed_moves:
0868 if needed_additions:
0869 fmtlist = "\n".join("%s --> %s" % x
0870 for x in sorted(needed_additions))
0871 warning(_("@info",
0872 "Some branch catalogs have no "
0873 "associated summit catalog "
0874 "(expected summit path given):\n"
0875 "%(filelist)s",
0876 filelist=fmtlist))
0877 if needed_removals:
0878 fmtlist = "\n".join(sorted(needed_removals))
0879 warning(_("@info",
0880 "Some summit catalogs have no "
0881 "associated branch catalogs:\n"
0882 "%(filelist)s",
0883 filelist=fmtlist))
0884 if needed_moves:
0885 fmtlist = "\n".join("%s --| %s" % (x, " | ".join(y))
0886 for x, y in sorted(needed_moves))
0887 warning(_("@info",
0888 "Some summit catalogs should be "
0889 "moved to another subdirectory:\n"
0890 "%(filelist)s",
0891 filelist=fmtlist))
0892 if "gather" in p.opmodes:
0893 error(_("@info",
0894 "Halting because catalog creation is not allowed "
0895 "(consider issuing %(opt)s option).", opt="--create"))
0896
0897 # Fill in defaults for missing fields in hook specs.
0898 for attr in p.__dict__:
0899 if attr.startswith("hook_"):
0900 p.__dict__[attr] = hook_fill_defaults(p.__dict__[attr])
0901
0902 return p
0903
0904
0905 def split_path_in_project (project, path):
0906
0907 if os.path.isfile(path):
0908 if not path.endswith((".po", ".pot")):
0909 error(_("@info",
0910 "Non-PO file '%(file)s' given as catalog.",
0911 file=path))
0912
0913 splits = []
0914 for b in [project.summit] + project.branches:
0915 broot = os.path.abspath(b.topdir)
0916 apath = os.path.abspath(path)
0917 if apath.startswith(broot + os.path.sep) or apath == broot:
0918 subpath = apath[len(broot + os.path.sep):]
0919 # Split the path into catalog name and subdirectory.
0920 if os.path.isfile(apath):
0921 if b.split_path:
0922 catname, subdir = b.split_path(subpath)
0923 else:
0924 subdir = os.path.dirname(subpath)
0925 basename = os.path.basename(subpath)
0926 catname = basename[:basename.rfind(".")]
0927 if b.by_lang:
0928 # If this is by-language mode,
0929 # catalog path can be split only if of proper language,
0930 # and subdirectory and catalog name should backtrack.
0931 if catname != b.by_lang:
0932 continue
0933 catname = os.path.basename(subdir)
0934 subdir = os.path.dirname(subdir)
0935 elif os.path.isdir(apath):
0936 if b.split_path:
0937 catname = None
0938 dummy_subpath = os.path.join(subpath, "__dummy__.po")
0939 subdir = b.split_path(dummy_subpath)[1]
0940 else:
0941 subdir = subpath
0942 catname = None
0943 if b.by_lang:
0944 # If this is a leaf directory in by-language mode,
0945 # then actually a catalog has been selected,
0946 # and subdirectory and catalog name should backtrack.
0947 apath2 = os.path.join(subdir, b.by_lang + ".po")
0948 if os.path.isfile(apath2):
0949 catname = os.path.basename(subdir)
0950 subdir = os.path.dirname(subdir)
0951 # Collect the splitting.
0952 # Catalog name being None means that a subdirectory is selected,
0953 # and if subdirectory too is None, the whole branch is selected.
0954 if not catname and not subdir:
0955 subdir = None
0956 splits.append((b.id, subdir, catname))
0957 if not splits:
0958 error(_("@info",
0959 "Path '%(path)s' is not covered by the summit configuration.",
0960 path=path))
0961
0962 return splits
0963
0964
0965 def collect_partspecs (project, specargs):
0966
0967 partbids = []
0968 partspecs = {}
0969 for specarg in specargs:
0970 # If the partial specification is a valid path,
0971 # convert it to operation target.
0972 optargets = []
0973 if os.path.exists(specarg):
0974 splits = split_path_in_project(project, specarg)
0975 for bid, breldir, catname in splits:
0976 if catname:
0977 optarget = bid + ":" + catname
0978 elif breldir:
0979 optarget = bid + ":" + breldir + os.path.sep
0980 else:
0981 optarget = bid + ":"
0982 optargets.append(optarget)
0983 else:
0984 optargets = [specarg]
0985
0986 for optarget in optargets:
0987 lst = optarget.split(":", 1)
0988 if len(lst) < 2:
0989 fdname, = lst
0990 bid = None
0991 else:
0992 bid, fdname = lst
0993 if bid not in project.branch_ids and bid != SUMMIT_ID:
0994 error(_("@info",
0995 "Branch '%(branch)s' is not defined "
0996 "in the summit configuration.",
0997 branch=bid))
0998 if bid and bid not in partbids:
0999 partbids.append(bid)
1000 if fdname:
1001 bsid = bid or SUMMIT_ID
1002 if bsid not in partspecs:
1003 partspecs[bsid] = []
1004 partspecs[bsid].append(fdname)
1005
1006 return partspecs, partbids
1007
1008
1009 # Fill in defaults for missing fields in hook specs.
1010 def hook_fill_defaults (specs):
1011
1012 new_specs = []
1013 for spec in specs:
1014 call = spec[0]
1015 branch_rx = r""
1016 if len(spec) > 1: branch_rx = spec[1]
1017 name_rx = r""
1018 if len(spec) > 2: name_rx = spec[2]
1019 new_specs.append((call, branch_rx, name_rx))
1020
1021 return new_specs
1022
1023
1024 # Each catalog is represented by a dictionary entry: the key is the catalog
1025 # name, the value is the list of tuples of file path and subdirectory
1026 # relative to top (list in case there are several same-named catalogs in
1027 # different subdirectories).
1028 def collect_catalogs (topdir, catext, by_lang, ignored, split_path,
1029 project, options):
1030
1031 catalogs = {}
1032 topdir = os.path.normpath(topdir)
1033 for root, dirs, files in os.walk(topdir):
1034 for file in files:
1035 catn = ""
1036 if file.endswith(catext):
1037 if not by_lang:
1038 fpath = os.path.abspath(os.path.join(root, file))
1039 if split_path:
1040 catn, spath = split_path(fpath[len(topdir) + 1:])
1041 else:
1042 catn = file[0:file.rfind(".")]
1043 spath = root[len(topdir) + 1:]
1044 elif file == by_lang + ".po" or catext == ".pot":
1045 fpath = os.path.abspath(os.path.join(root, file))
1046 if split_path:
1047 catn, spath = split_path(fpath[len(topdir) + 1:])
1048 else:
1049 catn = os.path.basename(root)
1050 spath = os.path.dirname(root)[len(topdir) + 1:]
1051
1052 if catn:
1053 if not ignored or not ignored(fpath):
1054 if catn not in catalogs:
1055 catalogs[catn] = []
1056 fpath = join_ncwd(fpath)
1057 spath = os.path.normpath(spath)
1058 catalogs[catn].append((fpath, spath))
1059
1060 for catpaths in list(catalogs.values()):
1061 catpaths.sort(key=lambda x: x[0])
1062
1063 return catalogs
1064
1065
1066 def summit_gather (project, options):
1067
1068 if ( project.over_templates and project.lang != project.templates_lang
1069 and not options.force):
1070 error(_("@info",
1071 "Gathering catalogs is normally not allowed "
1072 "in summit-over-static-templates mode. "
1073 "If this is the initial creation of summit catalogs, "
1074 "or externally injected branch catalogs need to be gathered, "
1075 "run with options %(opts)s.",
1076 opts="--create --force"))
1077 elif ( project.templates_dynamic
1078 and project.lang == project.templates_lang and not options.force):
1079 warning(_("@info",
1080 "Gathering templates is superfluous in "
1081 "summit-over-templates mode. "
1082 "If this is done to check whether gathering works, "
1083 "to supress this message run with option %(opt)s.",
1084 opt="--force"))
1085
1086 # Collect names of summit catalogs to gather.
1087 summit_names = select_summit_names(project, options)
1088
1089 # Setup progress indicator.
1090 upprog = lambda x=None: x
1091 if not options.verbose:
1092 catpaths = [project.catalogs[SUMMIT_ID][x][0][0] for x in summit_names]
1093 upprog = init_file_progress(catpaths,
1094 addfmt=t_("@info:progress",
1095 "Gathering: %(file)s"))
1096
1097 # Gather all selected catalogs.
1098 for name in summit_names:
1099 catpath = project.catalogs[SUMMIT_ID][name][0][0]
1100 if options.verbose:
1101 report(_("@info:progress",
1102 "Gathering %(file)s...",
1103 file=catpath))
1104 upprogc = lambda: upprog(catpath)
1105 summit_gather_single(name, project, options, update_progress=upprogc)
1106 upprog()
1107
1108
1109 def summit_scatter (project, options):
1110
1111 if project.over_templates and project.lang == project.templates_lang:
1112 error(_("@info",
1113 "Scattering not possible on '%(lang)s' "
1114 "in summit-over-templates mode.",
1115 lang=project.templates_lang))
1116
1117 scatter_specs = []
1118
1119 # Select branches to scatter to.
1120 if not options.partbids or SUMMIT_ID in options.partbids:
1121 branch_ids = project.branch_ids
1122 else:
1123 branch_ids = options.partbids
1124
1125 # Collect catalogs to scatter through all selected branches.
1126 for branch_id in branch_ids:
1127
1128 branch_catalogs = select_branch_catalogs(branch_id, project, options)
1129
1130 for branch_name, branch_path, branch_subdir in branch_catalogs:
1131
1132 # Collect names of all the summit catalogs which this branch
1133 # catalog supplies messages to.
1134 summit_names = project.direct_map[branch_id][branch_name]
1135
1136 # Collect paths of selected summit catalogs.
1137 summit_paths = []
1138 for summit_name in summit_names:
1139 if not summit_name in project.catalogs[SUMMIT_ID]:
1140 # Warning pertinent to this situation will have
1141 # been issued earlier, so just skip it here.
1142 #warning(_("@info",
1143 #"Missing summit catalog "
1144 #"for branch catalog '%(file)s'.",
1145 #file=branch_path))
1146 continue
1147 summit_paths.append(
1148 project.catalogs[SUMMIT_ID][summit_name][0][0])
1149
1150 # There may be no summit catalogs for this branch catalog.
1151 # The warning about this condition has been issued earlier,
1152 # just skip the branch catalog here.
1153 if summit_paths:
1154 scatter_specs.append((branch_id, branch_name, branch_subdir,
1155 branch_path, summit_paths))
1156
1157 # Dummy entry to indicate branch switch.
1158 scatter_specs.append((branch_id, None, None, None, None))
1159
1160 # Setup progress indicator.
1161 upprog = lambda x=None: x
1162 if not options.verbose:
1163 catpaths = [x[3] for x in scatter_specs if x[1]]
1164 upprog = init_file_progress(catpaths,
1165 addfmt=t_("@info:progress",
1166 "Scattering: %(file)s"))
1167
1168 # Scatter to branch catalogs.
1169 for scatter_spec in scatter_specs:
1170 branch_id, catpath = scatter_spec[0], scatter_spec[3]
1171 if catpath is not None:
1172 if options.verbose:
1173 report(_("@info:progress",
1174 "Scattering %(file)s...",
1175 file=catpath))
1176 upprogc = lambda: upprog(catpath)
1177 summit_scatter_single(*(scatter_spec + (project, options, upprogc)))
1178 else:
1179 # Apply post-scatter hooks.
1180 if options.verbose:
1181 report(_("@info:progress",
1182 "Applying post-hook to branch %(branch)s...",
1183 branch=branch_id))
1184 exec_hook_branch(branch_id, project.hook_on_scatter_branch)
1185 upprog()
1186
1187
1188 def summit_merge (project, options):
1189
1190 if project.over_templates and project.lang == project.templates_lang:
1191 error(_("@info",
1192 "Merging not possible on '%(lang)s' in "
1193 "summit-over-templates mode.",
1194 lang=project.templates_lang))
1195
1196 merge_specs = []
1197
1198 # Select branches to merge.
1199 if not options.partbids:
1200 branch_ids = project.branch_ids + [SUMMIT_ID]
1201 else:
1202 branch_ids = options.partbids
1203
1204 # Setup merging in summit.
1205 if SUMMIT_ID in branch_ids and project.summit.topdir_templates:
1206 branch_ids.remove(SUMMIT_ID)
1207
1208 # Collect names of summit catalogs to merge.
1209 summit_names = select_summit_names(project, options)
1210
1211 # Collect template catalogs to use.
1212 if not project.templates_dynamic:
1213 template_catalogs = collect_catalogs(project.summit.topdir_templates,
1214 ".pot", None, None, None,
1215 project, options)
1216 else:
1217 template_catalogs = project.tproject.catalogs[SUMMIT_ID]
1218
1219 # Collect data for summit catalogs to merge.
1220 for name in summit_names:
1221 summit_path, summit_subdir = project.catalogs[SUMMIT_ID][name][0]
1222 if name not in template_catalogs:
1223 warning(_("@info",
1224 "No template for summit catalog '%(file)s'.",
1225 file=summit_path))
1226 continue
1227 template_path = template_catalogs[name][0][0]
1228 merge_specs.append((SUMMIT_ID, name, summit_subdir,
1229 summit_path, template_path,
1230 project.summit_wrapping,
1231 project.summit_fuzzy_merging))
1232
1233 # Setup merging in branches.
1234 for branch_id in branch_ids:
1235 branch = project.bdict[branch_id]
1236
1237 # Skip branch if local merging not desired, or no templates defined.
1238 if (not branch.merge or branch.topdir_templates is None):
1239 continue
1240
1241 # Collect branch catalogs to merge.
1242 branch_catalogs = select_branch_catalogs(branch_id, project, options)
1243
1244 # Collect template catalogs to use.
1245 template_catalogs = collect_catalogs(branch.topdir_templates, ".pot",
1246 branch.by_lang, branch.ignored,
1247 branch.split_path,
1248 project, options)
1249
1250 # Collect data for branch catalogs to merge.
1251 for name, branch_path, branch_subdir in branch_catalogs:
1252 if not os.path.isfile(branch_path):
1253 # Catalog has been selected due to another operation mode,
1254 # which can create catalogs from scratch.
1255 continue
1256 if not name in template_catalogs:
1257 warning(_("@info",
1258 "No template for branch catalog '%(file)s'.",
1259 file=branch_path))
1260 continue
1261 exact = False
1262 for template_path, template_subdir in template_catalogs[name]:
1263 if template_subdir == branch_subdir:
1264 exact = True
1265 break
1266 if not exact:
1267 warning(_("@info",
1268 "No exact template for branch catalog '%(file)s'.",
1269 file=branch_path))
1270 continue
1271 merge_specs.append((branch_id, name, branch_subdir,
1272 branch_path, template_path,
1273 project.branches_wrapping,
1274 project.branches_fuzzy_merging))
1275
1276 # Setup progress indicator.
1277 upprog = lambda x=None: x
1278 if not options.verbose:
1279 catpaths = [x[3] for x in merge_specs]
1280 upprog = init_file_progress(catpaths,
1281 addfmt=t_("@info:progress",
1282 "Merging: %(file)s"))
1283
1284 # Merge catalogs.
1285 for merge_spec in merge_specs:
1286 catpath = merge_spec[3]
1287 if options.verbose:
1288 report(_("@info:progress",
1289 "Merging %(file)s...",
1290 file=catpath))
1291 upprogc = lambda: upprog(catpath)
1292 summit_merge_single(*(merge_spec + (project, options, upprogc)))
1293 upprog()
1294
1295 # Remove template tree in summit-over-dynamic-templates mode.
1296 if project.templates_dynamic:
1297 shutil.rmtree(project.tproject.summit.topdir)
1298
1299
1300 def summit_deps (project, options):
1301
1302 # Collect names of summit catalogs for which to report dependencies.
1303 summit_names = select_summit_names(project, options)
1304
1305 # Report dependencies for all selected catalogs.
1306 for summit_name in summit_names:
1307 if summit_name not in project.catalogs[SUMMIT_ID]:
1308 # May happen if there are some missing summit catalogs
1309 # to current branch catalogs, i.e. gather has not been made.
1310 continue
1311 summit_path = project.catalogs[SUMMIT_ID][summit_name][0][0]
1312 branch_paths = []
1313 for branch_id in project.branch_ids:
1314 for branch_name in project.full_inverse_map[summit_name][branch_id]:
1315 for branch_path, d1 in project.catalogs[branch_id][branch_name]:
1316 branch_paths.append(branch_path)
1317 fmtbpaths = " ".join(branch_paths)
1318 if options.verbose:
1319 actype = _("@item:intext action performed on a catalog",
1320 "depends")
1321 report(": (%s) %s %s" % (actype, summit_path, fmtbpaths))
1322 else:
1323 report(": %s %s" % (summit_path, fmtbpaths))
1324
1325
1326 def select_branch_catalogs (branch_id, project, options):
1327
1328 # Shortcuts.
1329 pbcats = project.catalogs[branch_id]
1330
1331 # Select either all catalogs in this branch,
1332 # or those mentioned in the command line.
1333 if not options.partspecs:
1334 branch_catalogs = []
1335 for name, spec in list(pbcats.items()):
1336 for path, subdir in spec:
1337 if options.selcatf(path):
1338 branch_catalogs.append((name, path, subdir))
1339 else:
1340 # Select branch catalogs by command line specification.
1341 branch_catalogs = []
1342
1343 # Process direct specifications (branch->summit).
1344 if branch_id in options.partspecs:
1345 for part_spec in options.partspecs[branch_id]:
1346 # If the catalog specification has path separators,
1347 # then it selects a complete subdir in the branch.
1348 branch_catalogs_l = []
1349 if part_spec.find(os.sep) >= 0:
1350 sel_subdir = os.path.normpath(part_spec)
1351 one_found = False
1352 for name, spec in list(pbcats.items()):
1353 for path, subdir in spec:
1354 if sel_subdir == subdir:
1355 one_found = True
1356 if options.selcatf(path):
1357 branch_catalogs_l.append(
1358 (name, path, subdir))
1359 if not one_found:
1360 error(_("@info",
1361 "No catalogs in subdirectory '%(dir)s' "
1362 "of branch '%(branch)s'.",
1363 dir=sel_subdir, branch=branch_id))
1364 else:
1365 # Otherwise, specific catalog is selected.
1366 sel_name = part_spec
1367 one_found = False
1368 for name, spec in list(pbcats.items()):
1369 if sel_name == name:
1370 for path, subdir in spec:
1371 one_found = True
1372 if options.selcatf(path):
1373 branch_catalogs_l.append(
1374 (name, path, subdir))
1375 break
1376 if not one_found:
1377 error(_("@info",
1378 "No catalog named '%(name)s' "
1379 "in branch '%(branch)s'.",
1380 name=sel_name, branch=branch_id))
1381
1382 # Also select all branch catalogs which contribute to same
1383 # summit catalogs as the already selected ones.
1384 branch_catalogs_l2 = []
1385 dmap = project.direct_map[branch_id]
1386 pimap = project.part_inverse_map[branch_id]
1387 for branch_name, d1, d2 in branch_catalogs_l:
1388 if branch_name in dmap:
1389 for summit_name in dmap[branch_name]:
1390 if summit_name in pimap:
1391 for name in pimap[summit_name]:
1392 for path, subdir in pbcats[name]:
1393 if options.selcatf(path):
1394 branch_catalogs_l2.append(
1395 (name, path, subdir))
1396
1397 branch_catalogs.extend(branch_catalogs_l)
1398 branch_catalogs.extend(branch_catalogs_l2)
1399
1400 # Process inverse specifications (summit->branch).
1401 if SUMMIT_ID in options.partspecs:
1402 for part_spec in options.partspecs[SUMMIT_ID]:
1403 if part_spec.find(os.sep) >= 0:
1404 # Complete subdir.
1405 sel_subdir = os.path.normpath(part_spec)
1406 cats = []
1407 for name, spec in list(project.catalogs[SUMMIT_ID].items()):
1408 path, subdir = spec[0] # all summit catalogs unique
1409 if sel_subdir == subdir:
1410 bnames = project.full_inverse_map[name][branch_id]
1411 for bname in bnames:
1412 if bname in pbcats:
1413 for bpath, bsubdir in pbcats[bname]:
1414 if options.selcatf(bpath):
1415 cats.append((bname, bpath, bsubdir))
1416 branch_catalogs.extend(cats)
1417 else:
1418 # Specific catalog.
1419 sel_name = part_spec
1420 if not sel_name in project.catalogs[SUMMIT_ID]:
1421 error(_("@info",
1422 "No summit catalog named '%(name)s'.",
1423 name=sel_name))
1424 bnames = project.full_inverse_map[sel_name][branch_id]
1425 for bname in bnames:
1426 if bname in pbcats:
1427 for bpath, bsubdir in pbcats[bname]:
1428 if options.selcatf(bpath):
1429 branch_catalogs.append(
1430 (bname, bpath, bsubdir))
1431
1432 # Same catalogs may have been selected multiple times, remove.
1433 branch_catalogs = list(set(branch_catalogs))
1434
1435 # Sort by path.
1436 branch_catalogs.sort(key=lambda x: x[1])
1437 # ...sorting is not only for looks, but to establish priority of
1438 # supplying comments to summit messages.
1439
1440 return branch_catalogs
1441
1442
1443 def select_summit_names (project, options):
1444
1445 # Collect all summit catalogs selected explicitly or implicitly.
1446 summit_names = []
1447 if options.partspecs is None:
1448 for name, spec in list(project.catalogs[SUMMIT_ID].items()):
1449 path, subdir = spec[0] # summit catalogs are unique
1450 if options.selcatf(path):
1451 summit_names.append(name)
1452 else:
1453 for branch_id in options.partspecs:
1454 for part_spec in options.partspecs[branch_id]:
1455
1456 if branch_id == SUMMIT_ID: # explicit by summit reference
1457 if part_spec.find(os.sep) >= 0: # whole subdir
1458 sel_subdir = os.path.normpath(part_spec)
1459 one_found = False
1460 for name, spec in list(project.catalogs[SUMMIT_ID].items()):
1461 path, subdir = spec[0] # summit catalogs are unique
1462 if sel_subdir == subdir:
1463 one_found = True
1464 if options.selcatf(path):
1465 summit_names.append(name)
1466 if not one_found:
1467 error(_("@info",
1468 "No summit directory named '%(name)s'.",
1469 name=sel_subdir))
1470 else: # single name
1471 sel_name = part_spec
1472 spec = project.catalogs[SUMMIT_ID].get(sel_name)
1473 if not spec:
1474 error(_("@info",
1475 "No summit catalog named '%(name)s'.",
1476 name=sel_name))
1477 path, subdir = spec[0] # summit catalogs are unique
1478 if options.selcatf(path):
1479 summit_names.append(sel_name)
1480
1481 else: # implicit by branch reference
1482 if part_spec.find(os.sep) >= 0: # whole subdir
1483 sel_subdir = os.path.normpath(part_spec)
1484 one_found = False
1485 for name, spec in list(project.catalogs[branch_id].items()):
1486 for path, subdir in spec:
1487 if sel_subdir == subdir:
1488 one_found = True
1489 if options.selcatf(path):
1490 summit_names.extend(
1491 project.direct_map[branch_id][name])
1492 break
1493 if not one_found:
1494 error(_("@info",
1495 "No directory named '%(name)s' "
1496 "in branch '%(branch)s'.",
1497 name=sel_subdir, branch=branch_id))
1498 else: # single name
1499 sel_name = part_spec
1500 spec = project.catalogs[branch_id].get(sel_name)
1501 if not spec:
1502 error(_("@info",
1503 "No catalog named '%(name)s' "
1504 "in branch '%(branch)s'.",
1505 name=sel_name, branch=branch_id))
1506 for path, subdir in spec:
1507 if options.selcatf(path):
1508 summit_names.extend(
1509 project.direct_map[branch_id][sel_name])
1510 break
1511
1512 # Make names unique and sort by path.
1513 summit_names = list(set(summit_names))
1514 summit_names.sort(key=lambda x: project.catalogs[SUMMIT_ID].get(x, [[""]])[0][0])
1515
1516 # Additionaly sort by subdirectory precedence.
1517 # This is necessary so that catalogs can be properly moved when gathering,
1518 # in case a higher precedence subdirectory was not created before.
1519 # Default "~" means that catalogs with no paths will be sorted at end.
1520 summit_names.sort(key=lambda x: project.calc_subdir_precedence(
1521 project.catalogs[SUMMIT_ID].get(x, [["", "~"]])[0][1]))
1522
1523 return summit_names
1524
1525
1526 def summit_gather_single (summit_name, project, options,
1527 phony=False, pre_summit_names=(), memo_store=None,
1528 update_progress=(lambda: None)):
1529
1530 if memo_store is not None:
1531 memo_key = (summit_name, tuple(sorted(pre_summit_names)))
1532 if memo_key in memo_store: # value can be None
1533 return memo_store.get(memo_key)
1534
1535 update_progress()
1536
1537 summit_path = project.catalogs[SUMMIT_ID][summit_name][0][0]
1538 summit_subdir = project.catalogs[SUMMIT_ID][summit_name][0][1]
1539
1540 update_from_old = ( os.path.exists(summit_path)
1541 and not project.templates_dynamic)
1542
1543 # Do not overwrite the old summit catalog here if it exists,
1544 # as it will be needed for comparison later.
1545 monitored = update_from_old
1546 summit_cat = Catalog("", monitored=monitored,
1547 wrapping=project.summit_wrapping, create=True)
1548 summit_cat.filename = summit_path
1549
1550 # Collect branches in which this summit catalog has corresponding
1551 # branch catalogs, in order of branch priority.
1552 src_branch_ids = []
1553 for branch_id in project.branch_ids:
1554 if project.full_inverse_map[summit_name][branch_id]:
1555 src_branch_ids.append(branch_id)
1556
1557 # If there are no branch catalogs,
1558 # then the current summit catalog is to be removed.
1559 if not src_branch_ids:
1560 if phony: # cannot happen
1561 error(_("@info",
1562 "Phony gather on summit catalog which is to be removed."))
1563
1564 # Remove by version control, if any.
1565 if project.summit_vcs:
1566 if not project.summit_vcs.remove(summit_path):
1567 warning(_("@info",
1568 "Cannot remove '%(path)s' from version control.",
1569 path=summit_path))
1570 # If not removed by version control, plainly delete.
1571 if os.path.isfile(summit_path):
1572 os.unlink(summit_path)
1573 if os.path.isfile(summit_path):
1574 warning(_("@info",
1575 "Cannot remove '%(path)s' from disk.",
1576 path=summit_path))
1577
1578 if not os.path.isfile(summit_path):
1579 if options.verbose:
1580 actype = _("@item:intext action performed on a catalog",
1581 "gathered-removed")
1582 report("- (%s) %s" % (actype, summit_path))
1583 elif not options.quiet:
1584 report("- %s" % summit_path)
1585
1586 # Skip the rest, nothing to gather.
1587 if memo_store is not None:
1588 memo_store[memo_key] = summit_cat
1589 return summit_cat
1590
1591 # Open all corresponding branch catalogs.
1592 # For each branch catalog, also phony-gather any dependent summit
1593 # catalogs. Phony means not to take into account branch catalogs which
1594 # map to current summit catalog if it is higher in their queue than
1595 # the phony-gathered one, and not to sync phony-gathered catalog;
1596 # this is needed in order that any new messages get inserted
1597 # uniquely and deterministically in case of split-mappings.
1598 bcat_pscats = {}
1599 if phony or memo_store is not None:
1600 sub_memo_store = memo_store
1601 else:
1602 sub_memo_store = {}
1603 for branch_id in src_branch_ids:
1604
1605 branch = project.bdict[branch_id]
1606 if isinstance(branch.insert_nosim, (list, tuple)):
1607 apply_insert_nosim = lambda sn, sd: (
1608 any(re.search(rs, sn) for rs in branch.insert_nosim))
1609 elif callable(branch.insert_nosim):
1610 apply_insert_nosim = lambda sn, sd: branch.insert_nosim(sn, sd)
1611 else:
1612 apply_insert_nosim = lambda sn, sd: bool(branch.insert_nosim)
1613
1614 bcat_pscats[branch_id] = []
1615 for branch_name in project.full_inverse_map[summit_name][branch_id]:
1616
1617 # In phony-gather, do not use branch catalogs with split-mappings
1618 # which map to one of the summit catalogs among previous.
1619 phony_skip = False
1620 for dep_summit_name in project.direct_map[branch_id][branch_name]:
1621 if dep_summit_name in pre_summit_names:
1622 phony_skip = True
1623 break
1624 if phony_skip:
1625 continue
1626
1627 # Gather and open dependent summit catalogs.
1628 dep_summit_cats = []
1629 sub_pre_summit_names = list(pre_summit_names)
1630 for dep_summit_name in project.direct_map[branch_id][branch_name]:
1631 if dep_summit_name == summit_name:
1632 sub_pre_summit_names.append(summit_name)
1633 continue
1634 dep_summit_cat = summit_gather_single(dep_summit_name,
1635 project, options,
1636 True,
1637 sub_pre_summit_names,
1638 sub_memo_store,
1639 update_progress)
1640 if dep_summit_cat is not None:
1641 dep_summit_cats.append(dep_summit_cat)
1642
1643 # Open all branch catalogs of this name, ordered by path,
1644 # link them to the same dependent summit catalogs.
1645 for path, subdir in project.catalogs[branch_id][branch_name]:
1646 update_progress()
1647
1648 # Apply hooks to branch catalog file, creating temporaries.
1649 tmp_path = None
1650 if project.hook_on_gather_file_branch:
1651 # Temporary path should be such as to not modify the
1652 # catalog name (e.g. appending ".mod" could make ".po"
1653 # a part of the name).
1654 tmp_path = path + "~mod"
1655 shutil.copyfile(path, tmp_path)
1656 exec_hook_file(branch_id, branch_name, subdir, tmp_path,
1657 project.hook_on_gather_file_branch)
1658
1659 branch_cat = Catalog(tmp_path or path, monitored=False)
1660 if tmp_path: # as soon as catalog is opened, no longer needed
1661 os.unlink(tmp_path)
1662
1663 # Apply hooks to branch catalog.
1664 if project.hook_on_gather_cat_branch:
1665 exec_hook_cat(branch_id, branch_name, subdir, branch_cat,
1666 project.hook_on_gather_cat_branch)
1667 branch_cat.sync_map()
1668
1669 # Apply hooks to all branch catalog messages here,
1670 # as they may modify message keys.
1671 if project.hook_on_gather_msg_branch:
1672 for msg in branch_cat:
1673 update_progress()
1674 exec_hook_msg(branch_id, branch_name, subdir,
1675 msg, branch_cat,
1676 project.hook_on_gather_msg_branch)
1677 branch_cat.sync_map()
1678
1679 insert_nosim = apply_insert_nosim(branch_name, subdir)
1680
1681 bcat_pscats[branch_id].append((branch_cat, dep_summit_cats,
1682 insert_nosim))
1683
1684 # On phony gather, in case of split mappings,
1685 # it may happen that there are no corresponding branch catalogs.
1686 if phony and not any(bcat_pscats.values()):
1687 if memo_store is not None:
1688 memo_store[memo_key] = None
1689 return None
1690
1691 # Select primary branch catalog.
1692 prim_branch_cat = None
1693 for branch_id in src_branch_ids:
1694 if bcat_pscats[branch_id]:
1695 prim_branch_cat = bcat_pscats[branch_id][0][0]
1696 break
1697 assert prim_branch_cat is not None
1698
1699 # Gather messages through branch catalogs.
1700 for branch_id in src_branch_ids:
1701 for branch_cat, dep_summit_cats, insert_nosim in bcat_pscats[branch_id]:
1702 is_primary = branch_cat is prim_branch_cat
1703 summit_gather_single_bcat(branch_id, branch_cat, is_primary,
1704 summit_cat, monitored, dep_summit_cats,
1705 insert_nosim,
1706 project, options, update_progress)
1707
1708 # Gather the summit header according to primary branch.
1709 summit_gather_single_header(summit_cat, prim_branch_cat, project, options)
1710
1711 # Apply hooks to the summit messages.
1712 if project.hook_on_gather_msg:
1713 for msg in summit_cat:
1714 exec_hook_msg(SUMMIT_ID, summit_cat.name, summit_subdir,
1715 msg, summit_cat, project.hook_on_gather_msg)
1716
1717 # Apply hooks to the summit catalog.
1718 exec_hook_cat(SUMMIT_ID, summit_cat.name, summit_subdir, summit_cat,
1719 project.hook_on_gather_cat)
1720
1721 # If phony-gather, stop here and return summit catalog for reference.
1722 if phony:
1723 if memo_store is not None:
1724 memo_store[memo_key] = summit_cat
1725 return summit_cat
1726
1727 # If the old summit catalog exists, compare with the new.
1728 # If there were any modified entries, or their order changed,
1729 # replace the old with the new summit catalog.
1730 # Copy over unmodified entries from the old catalog,
1731 # to avoid line reformatting.
1732 if update_from_old:
1733 old_cat = Catalog(summit_path, monitored=monitored,
1734 wrapping=project.summit_wrapping)
1735 summit_created = False
1736 replace = False
1737 # Compare headers without some insignificant fields.
1738 if cmpnorm_hdr(summit_cat.header) == cmpnorm_hdr(old_cat.header):
1739 summit_cat.header = old_cat.header
1740 else:
1741 replace = True
1742 # Compare messages and their positions.
1743 for pos in range(len(summit_cat)):
1744 update_progress()
1745 old_pos = old_cat.find(summit_cat[pos])
1746 if pos != old_pos:
1747 replace = True
1748 if old_pos >= 0:
1749 if summit_cat[pos] == old_cat[old_pos]:
1750 summit_cat[pos] = old_cat[old_pos]
1751 else:
1752 replace = True
1753 # Compare lengths.
1754 if len(summit_cat) != len(old_cat):
1755 replace = True
1756 else:
1757 summit_created = True
1758 replace = True
1759
1760 # Check if the catalog needs to be moved to another subdirectory.
1761 branch_subdirs = []
1762 for branch_id in project.full_inverse_map[summit_name]:
1763 for branch_name in project.full_inverse_map[summit_name][branch_id]:
1764 branch_subdirs_1 = []
1765 for bpath, bsubdir in project.catalogs[branch_id][branch_name]:
1766 bsubdir = project.subdir_map.get((branch_id, bsubdir), bsubdir)
1767 branch_subdirs_1.append(bsubdir)
1768 branch_subdirs_1.sort()
1769 branch_subdirs.extend(branch_subdirs_1)
1770 new_summit_path = summit_path
1771 if branch_subdirs:
1772 branch_subdirs = list(set(branch_subdirs))
1773 subdir_precs = list(map(project.calc_subdir_precedence, branch_subdirs))
1774 precs_subdirs = sorted(zip(subdir_precs, branch_subdirs))
1775 branch_subdirs_sel = [sd for pr, sd in precs_subdirs
1776 if pr == precs_subdirs[0][0]]
1777 if summit_subdir not in branch_subdirs_sel:
1778 catext = summit_path[summit_path.rfind("."):]
1779 new_summit_path = join_ncwd(project.summit.topdir,
1780 branch_subdirs_sel[0],
1781 summit_name + catext)
1782
1783 if replace or summit_cat.filename != new_summit_path:
1784 added = False
1785 moved = False
1786
1787 if replace:
1788 # Set template creation date for the summit catalog
1789 # to the current date.
1790 # Do not try to trust branch template creation dates,
1791 # e.g. by copying the latest one.
1792 summit_cat.header.set_field("POT-Creation-Date", format_datetime(),
1793 before="PO-Revision-Date",
1794 reorder=True)
1795
1796 # Sync to disk.
1797 summit_cat.sync()
1798
1799 # Apply hooks to summit catalog file.
1800 exec_hook_file(SUMMIT_ID, summit_cat.name, summit_subdir,
1801 summit_cat.filename, project.hook_on_gather_file)
1802
1803 if summit_created:
1804 added = True
1805 # Add to version control.
1806 if ( project.summit_vcs
1807 and not project.summit_vcs.is_versioned(summit_cat.filename)
1808 ):
1809 if not project.summit_vcs.add(summit_cat.filename):
1810 warning(_("@info",
1811 "Cannot add '%(file)s' to version control.",
1812 file=summit_cat.filename))
1813 else:
1814 added = True
1815
1816 if summit_cat.filename != new_summit_path:
1817 if project.summit_vcs:
1818 if not project.summit_vcs.move(summit_cat.filename,
1819 new_summit_path):
1820 warning(_("@info",
1821 "Cannot move '%(srcfile)s' to '%(dstfile)s'.",
1822 srcfile=summit_cat.filename,
1823 dstfile=new_summit_path))
1824 else:
1825 summit_cat.filename = new_summit_path
1826 moved = True
1827
1828 branch_paths = []
1829 for branch_id in src_branch_ids:
1830 for branch_cat, dep_summit_cats, insert_nosim in bcat_pscats[branch_id]:
1831 branch_paths.append(branch_cat.filename)
1832 paths_str = " ".join(branch_paths)
1833 if options.verbose:
1834 if added:
1835 actype = _("@item:intext action performed on a catalog",
1836 "gathered-added")
1837 report(">+ (%s) %s %s"
1838 % (actype, summit_cat.filename, paths_str))
1839 elif moved:
1840 actype = _("@item:intext action performed on a catalog",
1841 "gathered-moved")
1842 report(">| (%s) %s %s"
1843 % (actype, summit_cat.filename, paths_str))
1844 else:
1845 actype = _("@item:intext action performed on a catalog",
1846 "gathered")
1847 report("> (%s) %s %s"
1848 % (actype, summit_cat.filename, paths_str))
1849 elif not options.quiet:
1850 if added:
1851 report(">+ %s %s" % (summit_cat.filename, paths_str))
1852 elif moved:
1853 report(">| %s %s" % (summit_cat.filename, paths_str))
1854 else:
1855 report("> %s %s" % (summit_cat.filename, paths_str))
1856
1857 if memo_store is not None:
1858 memo_store[memo_key] = summit_cat
1859 return summit_cat
1860
1861
1862 def cmpnorm_hdr (hdr):
1863
1864 rhdr = Header(hdr)
1865 for field in (
1866 "POT-Creation-Date",
1867 ):
1868 rhdr.remove_field(field)
1869 return rhdr
1870
1871
1872 def extkey_msg (msg):
1873
1874 # NOTE: If computation of context pad is modified,
1875 # padded messages in existing summit catalogs will get fuzzy
1876 # on next merge with newly gathered templates.
1877
1878 msg = MessageUnsafe(msg)
1879 if msg.msgid_plural is not None:
1880 h = hashlib.md5()
1881 h.update(msg.msgid_plural.encode("UTF-8"))
1882 ctxtpad = h.hexdigest()
1883 else:
1884 # Something that looks like a hex digest but slightly shorter,
1885 # so that it does not match any real digest.
1886 ctxtpad = "abcd1234efgh5665hgfe4321dcba"
1887 msg.auto_comment.append("%s msgctxt-pad %s"
1888 % (_summit_tag_kwprop, ctxtpad))
1889 if msg.msgctxt is None:
1890 msg.msgctxt = "%s" % ctxtpad
1891 else:
1892 msg.msgctxt = "%s|%s" % (msg.msgctxt, ctxtpad)
1893
1894 return msg
1895
1896
1897 def summit_gather_single_bcat (branch_id, branch_cat, is_primary,
1898 summit_cat, monitored, dep_summit_cats,
1899 insert_nosim,
1900 project, options, update_progress):
1901
1902 MessageType = (Message if monitored else MessageUnsafe)
1903
1904 # Go through messages in the branch catalog, merging them with
1905 # existing summit messages, or collecting for later insertion.
1906 # Do not insert new messages immediately, as source references may be
1907 # updated by merging, which reflects on heuristic insertion.
1908 # Ignore messages present in dependent summit catalogs.
1909 msgs_to_merge = []
1910 msgs_to_insert = []
1911 xkpairs = []
1912 for msg in branch_cat:
1913 update_progress()
1914
1915 # Do not gather obsolete messages.
1916 if msg.obsolete:
1917 continue
1918
1919 # Normalizations when gathering templates,
1920 # in case extraction tool needs to have its sanity checked,
1921 # or certain language files stand in for true templates.
1922 if project.lang == project.templates_lang:
1923 msg.manual_comment[:] = []
1924 msg.unfuzzy()
1925 if msg.msgid_plural is None:
1926 msg.msgstr[:] = [""]
1927 else:
1928 msg.msgstr[:] = ["", ""]
1929
1930 # Construct branch message with extended key.
1931 xkmsg = extkey_msg(msg)
1932
1933 # Do not gather messages belonging to depending summit catalogs.
1934 in_dep = False
1935 for dep_summit_cat in dep_summit_cats:
1936 if msg in dep_summit_cat or xkmsg in dep_summit_cat:
1937 in_dep = True
1938 break
1939 if in_dep:
1940 continue
1941
1942 # If the summit message for the original branch message exists,
1943 # but their extended keys do not match,
1944 # switch to branch message with extended key.
1945 summit_msg = summit_cat.get(msg)
1946 if summit_msg and extkey_msg(summit_msg).key != xkmsg.key:
1947 xkpairs.append((msg, xkmsg))
1948 msg = xkmsg
1949 summit_msg = summit_cat.get(msg)
1950
1951 # Collect the branch message for merging or insertion.
1952 if summit_msg is not None:
1953 msgs_to_merge.append((msg, summit_msg))
1954 else:
1955 msgs_to_insert.append(msg)
1956
1957 # If some messages had to have extended keys, update branch catalog.
1958 if xkpairs:
1959 for msg, xkmsg in xkpairs:
1960 branch_cat.remove_on_sync(msg)
1961 branch_cat.add_last(xkmsg)
1962 branch_cat.sync_map()
1963
1964 # Merge messages already in the summit catalog.
1965 if msgs_to_merge:
1966 for msg, summit_msg in msgs_to_merge:
1967 # Merge the message.
1968 gather_merge_msg(summit_msg, msg)
1969 # Update automatic comments.
1970 summit_override_auto(summit_msg, msg, branch_id, is_primary)
1971 # Equip any new summit tags to the merged message.
1972 summit_set_tags(summit_msg, branch_id, project)
1973
1974 # Insert messages not already in the summit catalog.
1975 if msgs_to_insert:
1976 # Pair messages to insert from branch with summit messages
1977 # having common source files.
1978 # If summit is empty, this is primary branch catalog, so make
1979 # only one dummy pair to preserve original ordering of messages.
1980 summit_msgs_by_src_dict = dict(summit_cat.messages_by_source())
1981 if summit_msgs_by_src_dict:
1982 msgs_by_src = branch_cat.messages_by_source()
1983 else:
1984 msgs_by_src = [("", branch_cat)]
1985
1986 # Collect possible source file synonyms to those in the summit catalog.
1987 fnsyn = branch_cat.detect_renamed_sources(summit_cat)
1988
1989 # Prepare messages for insertion into summit.
1990 summit_msg_by_msg = {}
1991 for msg in msgs_to_insert:
1992 update_progress()
1993 summit_msg = MessageType(msg)
1994 summit_set_tags(summit_msg, branch_id, project)
1995 summit_msg_by_msg[msg] = summit_msg
1996
1997 # Insert branch messages into summit source by source.
1998 for src, msgs in msgs_by_src:
1999
2000 # Assemble collection of summit messages from same source file.
2001 summit_msgs = []
2002 for osrc in [src] + fnsyn.get(src, []):
2003 summit_msgs.extend(summit_msgs_by_src_dict.get(osrc, []))
2004
2005 # If existing summit messages from same source found,
2006 # insert branch messages around those summit messages.
2007 # Otherwise, just append them at the end.
2008 if summit_msgs:
2009
2010 # Assemble groups of messages by same msgid and same msgctxt,
2011 # for insertion by similarity.
2012 if not insert_nosim:
2013 smsgs_by_msgid = {}
2014 smsgs_by_msgctxt = {}
2015 for smsg in summit_msgs:
2016 if smsg.msgid not in smsgs_by_msgid:
2017 smsgs_by_msgid[smsg.msgid] = []
2018 smsgs_by_msgid[smsg.msgid].append(smsg)
2019 if smsg.msgctxt is not None:
2020 if smsg.msgctxt not in smsgs_by_msgctxt:
2021 smsgs_by_msgctxt[smsg.msgctxt] = []
2022 smsgs_by_msgctxt[smsg.msgctxt].append(smsg)
2023
2024 insertions = []
2025 for msg in msgs:
2026 update_progress()
2027 new_summit_msg = summit_msg_by_msg.get(msg)
2028 if new_summit_msg is None:
2029 continue
2030
2031 # Existing summit message to where (after or before)
2032 # current message is to be inserted.
2033 summit_msg_ref = None
2034 before = False
2035
2036 # Try to insert message by similarity.
2037 # Similarity is checked by groups,
2038 # such that for each group there is a message part
2039 # which is compared for similarity.
2040 if not insert_nosim:
2041 for summit_msgs_group, matt, forceins in (
2042 (smsgs_by_msgid.get(msg.msgid), "msgctxt", True),
2043 (smsgs_by_msgctxt.get(msg.msgctxt), "msgid", True),
2044 (summit_msgs, "key", False),
2045 ):
2046 if not summit_msgs_group:
2047 continue
2048
2049 # Shortcut: if only one summit message in the group
2050 # and insertion forced, insert after it.
2051 if len(summit_msgs_group) == 1 and forceins:
2052 summit_msg_ref = summit_msgs_group[-1]
2053 break
2054
2055 # Does the message have the part to be matched?
2056 mval = msg.get(matt)
2057 if mval is None:
2058 continue
2059
2060 # Find existing message with the most similar
2061 # matching attribute.
2062 seqm = SequenceMatcher(None, mval, "")
2063 maxr = 0.0
2064 for summit_msg in summit_msgs_group:
2065 smval = summit_msg.get(matt)
2066 if smval is None:
2067 continue
2068 seqm.set_seq2(smval)
2069 r = seqm.ratio()
2070 if maxr <= r:
2071 maxr = r
2072 maxr_summit_msg = summit_msg
2073
2074 # If similar enough message has been found,
2075 # set insertion position after it.
2076 # Otherwise, insert after last summit message
2077 # in the group if insertion forced.
2078 if maxr > 0.6:
2079 summit_msg_ref = maxr_summit_msg
2080 break
2081 elif forceins:
2082 summit_msg_ref = summit_msgs_group[-1]
2083 break
2084
2085 # If no similar existing message, set position before
2086 # the summit message with first greater source reference
2087 # line number, if any such.
2088 if summit_msg_ref is None and src:
2089 for summit_msg in summit_msgs:
2090 if msg.source[0][1] < summit_msg.source[0][1]:
2091 summit_msg_ref = summit_msg
2092 before = True
2093 break
2094
2095 # If not insertion by source references, insert last.
2096 if summit_msg_ref is None:
2097 summit_msg_ref = summit_msgs[-1]
2098
2099 # Record insertion.
2100 pos = summit_cat.find(summit_msg_ref)
2101 if not before:
2102 pos += 1
2103 insertions.append((new_summit_msg, pos))
2104
2105 # Insert ordered messages into catalog.
2106 summit_cat.add_more(insertions)
2107
2108 else:
2109 for msg in msgs:
2110 update_progress()
2111 new_summit_msg = summit_msg_by_msg.get(msg)
2112 if new_summit_msg is not None:
2113 summit_cat.add_last(new_summit_msg)
2114
2115
2116 def gather_merge_msg (summit_msg, msg):
2117
2118 if summit_msg.key != msg.key:
2119 error(_("@info",
2120 "Cannot gather messages with different keys."))
2121 if (summit_msg.msgid_plural is None) != (msg.msgid_plural is None):
2122 error(_("@info",
2123 "Cannot gather messages with different plurality."))
2124
2125 if ( (summit_msg.translated and msg.translated)
2126 or (summit_msg.fuzzy and msg.fuzzy)
2127 or (summit_msg.untranslated and msg.untranslated)
2128 ):
2129 if not summit_msg.manual_comment:
2130 summit_msg.manual_comment = Monlist(msg.manual_comment)
2131 if msg.msgid_plural is not None:
2132 summit_msg.msgid_plural = msg.msgid_plural
2133 summit_msg.msgstr = Monlist(msg.msgstr)
2134
2135 elif summit_msg.fuzzy and msg.translated:
2136 summit_msg.manual_comment = Monlist(msg.manual_comment)
2137 if summit_msg.msgid_plural is None or msg.msgid_plural is not None:
2138 if msg.msgid_plural is not None:
2139 summit_msg.msgid_plural = msg.msgid_plural
2140 summit_msg.msgstr = Monlist(msg.msgstr)
2141 if summit_msg.msgid_plural == msg.msgid_plural:
2142 summit_msg.unfuzzy()
2143
2144 elif summit_msg.untranslated and (msg.translated or msg.fuzzy):
2145 summit_msg.manual_comment = Monlist(msg.manual_comment)
2146 if summit_msg.msgid_plural is None or msg.msgid_plural is not None:
2147 if msg.fuzzy:
2148 summit_msg.msgctxt_previous = msg.msgctxt_previous
2149 summit_msg.msgid_previous = msg.msgid_previous
2150 summit_msg.msgid_plural_previous = msg.msgid_plural_previous
2151 if msg.msgid_plural is not None:
2152 summit_msg.msgid_plural = msg.msgid_plural
2153 summit_msg.msgstr = Monlist(msg.msgstr)
2154 summit_msg.fuzzy = msg.fuzzy
2155
2156
2157 def summit_gather_single_header (summit_cat, prim_branch_cat,
2158 project, options):
2159
2160 # Copy over comments from the primary branch catalog.
2161 hdr = summit_cat.header
2162 bhdr = prim_branch_cat.header
2163 hdr.title = bhdr.title
2164 hdr.copyright = bhdr.copyright
2165 hdr.license = bhdr.license
2166 hdr.author = bhdr.author
2167 hdr.comment = bhdr.comment
2168
2169 # Copy over standard fields from the primary branch catalog.
2170 for fname in [x[0] for x in Header().field]:
2171 fvalue = prim_branch_cat.header.get_field_value(fname)
2172 if fvalue is not None:
2173 summit_cat.header.set_field(fname, fvalue)
2174 else:
2175 summit_cat.header.remove_field(fname)
2176
2177 # Copy over non-standard fields from the primary branch catalog on request.
2178 bfields = []
2179 for fname in project.header_propagate_fields:
2180 bfields.extend(prim_branch_cat.header.select_fields(fname))
2181 cfields = []
2182 for fname in project.header_propagate_fields:
2183 cfields.extend(summit_cat.header.select_fields(fname))
2184 # Replace old with new set if not equal.
2185 if bfields != cfields:
2186 for cfield in cfields:
2187 summit_cat.header.field.remove(cfield)
2188 for bfield in bfields:
2189 summit_cat.header.field.append(bfield)
2190
2191
2192 _asc_check_cache = {}
2193
2194 def summit_scatter_single (branch_id, branch_name, branch_subdir,
2195 branch_path, summit_paths,
2196 project, options, update_progress):
2197
2198 update_progress()
2199
2200 # See if the branch catalog is to be newly created from the template.
2201 new_from_template = False
2202 branch_path_mod = branch_path
2203 if branch_path in project.add_on_scatter:
2204 new_from_template = True
2205 # Initialize new catalog with messages directly from the template.
2206 # Later the catalog file name will be switched to branch path,
2207 # if the catalog satisfies criteria to be created on scatter.
2208 branch_path_mod = project.add_on_scatter[branch_path]
2209
2210 # Open the branch catalog and all summit catalogs.
2211 try:
2212 branch_cat = Catalog(branch_path_mod, wrapping=project.branches_wrapping)
2213 except PologyError as e:
2214 warning(_("@info",
2215 "Cannot open the branch catalog '%(file)s' "
2216 "to scatter to. The error was:\n"
2217 "%(msg)s",
2218 file=branch_path_mod, msg=str_to_unicode(str(e))))
2219 return
2220 summit_cats = []
2221 for summit_path in summit_paths:
2222 try:
2223 # NOTE: Must be opened monitored to have compatible types
2224 # when copying message parts to branch message.
2225 summit_cat = Catalog(summit_path)
2226 except PologyError as e:
2227 warning(_("@info",
2228 "Cannot open the summit catalog '%(file)s' "
2229 "to scatter from. The error was:\n"
2230 "%(msg)s",
2231 file=summit_path, msg=str_to_unicode(str(e))))
2232 return
2233 summit_cats.append(summit_cat)
2234
2235 # Collect and link ascription catalogs to summit catalogs.
2236 # (Do not open them here, but only later when a check is not cached.)
2237 if project.ascription_filter:
2238 aconfs_acats = {}
2239 for summit_cat in summit_cats:
2240 aconf, acatpath = project.aconfs_acatpaths[summit_cat.name]
2241 aconfs_acats[summit_cat.name] = (aconf, None, acatpath)
2242 if acatpath not in _asc_check_cache:
2243 _asc_check_cache[acatpath] = {}
2244
2245 # Pair branch messages with summit messages.
2246 msgs_total = 0
2247 msgs_translated = 0
2248 msg_links = []
2249 asc_stopped = 0
2250 for branch_msg in branch_cat:
2251 update_progress()
2252
2253 # Skip obsolete messages.
2254 if branch_msg.obsolete:
2255 continue
2256 msgs_total += 1
2257
2258 # If there is a hook on branch messages on gather,
2259 # it must be used here to prepare branch message for lookup
2260 # in summit catalog, as the hook may modify the key.
2261 branch_msg_lkp = branch_msg
2262 if project.hook_on_gather_msg_branch:
2263 branch_msg_lkp = MessageUnsafe(branch_msg)
2264 exec_hook_msg(branch_id, branch_name, branch_subdir,
2265 branch_msg_lkp, branch_cat,
2266 project.hook_on_gather_msg_branch)
2267
2268 # Construct branch message for lookup with extended key.
2269 branch_xkmsg_lkp = extkey_msg(branch_msg_lkp)
2270
2271 # Find first summit catalog which has this message translated.
2272 summit_msg = None
2273 for summit_cat in summit_cats:
2274 # Branch message with extended key must be looked up first.
2275 for bmsg_lkp in [branch_xkmsg_lkp, branch_msg_lkp]:
2276 if bmsg_lkp in summit_cat:
2277 summit_msg = summit_cat[bmsg_lkp]
2278 if summit_msg.obsolete:
2279 summit_msg = None
2280 else:
2281 break
2282 if summit_msg is not None:
2283 break
2284
2285 if summit_msg is None:
2286 report_on_msg(_("@info:progress",
2287 "Message not in the summit."),
2288 branch_msg, branch_cat)
2289 continue
2290
2291 if ( project.ascription_filter and not options.force
2292 and do_scatter(summit_msg, branch_msg)
2293 ):
2294 aconf, acat, acatpath = aconfs_acats[summit_cat.name]
2295 if summit_msg.key not in _asc_check_cache[acatpath]:
2296 if acat is None:
2297 acat = Catalog(acatpath, monitored=False, create=True)
2298 aconfs_acats[summit_cat.name] = (aconf, acat, acatpath)
2299 hfilter = project.ascription_history_filter
2300 ahist = collect_ascription_history(summit_msg, acat, aconf,
2301 nomrg=True, hfilter=hfilter)
2302 afilter = project.ascription_filter
2303 res = afilter(summit_msg, summit_cat, ahist, aconf)
2304 _asc_check_cache[acatpath][summit_msg.key] = res
2305 if not _asc_check_cache[acatpath][summit_msg.key]:
2306 asc_stopped += 1
2307 continue
2308
2309 if summit_msg.translated:
2310 msgs_translated += 1
2311 msg_links.append((branch_msg, summit_msg, summit_cat))
2312
2313 if asc_stopped > 0:
2314 warning(n_("@info:progress",
2315 "%(file)s: %(num)d message stopped by ascription filter.",
2316 "%(file)s: %(num)d messages stopped by ascription filter.",
2317 file=branch_path, num=asc_stopped))
2318
2319 # If completeness less than minimal acceptable, remove all translations.
2320 if msgs_total > 0:
2321 completeness_ratio = float(msgs_translated) / msgs_total
2322 else:
2323 completeness_ratio = 1.0
2324 if ( completeness_ratio < project.scatter_acc_completeness
2325 and not options.force
2326 ):
2327 for branch_msg in branch_cat:
2328 if branch_msg.obsolete:
2329 branch_cat.remove_on_sync(branch_msg)
2330 else:
2331 clear_msg(branch_msg)
2332
2333 # If complete enough, scatter from summit to branch messages.
2334 else:
2335 scattered_branch_msgs = set()
2336 for branch_msg, summit_msg, summit_cat in msg_links:
2337 update_progress()
2338
2339 if do_scatter(summit_msg, branch_msg):
2340 exec_hook_msg(branch_id, branch_name, branch_subdir,
2341 summit_msg, summit_cat,
2342 project.hook_on_scatter_msg)
2343
2344 # NOTE: Same plurality and equal msgid_plural fields
2345 # between summit and branch message are enforced,
2346 # so only assert this for robustness.
2347 if summit_msg.msgid_plural != branch_msg.msgid_plural:
2348 error(_("@info",
2349 "Cannot scatter messages with "
2350 "different plurality."))
2351
2352 for i in range(len(summit_msg.msgstr)):
2353 piped_msgstr = exec_hook_msgstr(
2354 branch_id, branch_name, branch_subdir,
2355 summit_msg.msgstr[i], summit_msg, summit_cat,
2356 project.hook_on_scatter_msgstr)
2357 if i < len(branch_msg.msgstr):
2358 branch_msg.msgstr[i] = piped_msgstr
2359 else:
2360 branch_msg.msgstr.append(piped_msgstr)
2361 branch_msg.unfuzzy()
2362 branch_msg.manual_comment = summit_msg.manual_comment
2363 scattered_branch_msgs.add(branch_msg)
2364
2365 # Fuzzy all active messages which were not scattered,
2366 # in order to avoid stale translations in branches.
2367 for branch_msg in branch_cat:
2368 if branch_msg.active and branch_msg not in scattered_branch_msgs:
2369 branch_msg.fuzzy = True
2370 branch_msg.msgctxt_previous = branch_msg.msgctxt
2371 branch_msg.msgid_previous = branch_msg.msgid
2372 branch_msg.msgid_plural_previous = branch_msg.msgid_plural
2373
2374 # Update branch header based on primary summit catalog.
2375 # Copy over all header parts from summit to branch,
2376 # except for those copied from template on merging.
2377 hdr = branch_cat.header
2378 shdr = summit_cats[0].header
2379 # Fields to keep due to being copied over on merging.
2380 keep_fields = [
2381 "Report-Msgid-Bugs-To",
2382 "POT-Creation-Date",
2383 ]
2384 # Fields to keep if no branch message was modified.
2385 if not branch_cat.modcount and branch_cat.header.initialized:
2386 keep_fields.extend([
2387 "PO-Revision-Date",
2388 "Last-Translator",
2389 ])
2390 # Fields to keep due to explicitly being told to.
2391 keep_fields.extend(project.header_skip_fields_on_scatter)
2392 # Update comments.
2393 hdr.title = shdr.title
2394 hdr.copyright = shdr.copyright
2395 hdr.license = shdr.license
2396 hdr.author = shdr.author
2397 hdr.comment = shdr.comment
2398 # Update fields only if normalized lists of fields do not match.
2399 if normhf(hdr.field, keep_fields) != normhf(shdr.field, keep_fields):
2400 # Collect branch fields to be preserved.
2401 preserved_fs = []
2402 for fnam in keep_fields:
2403 selected_fs = branch_cat.header.select_fields(fnam)
2404 preserved_fs.append(selected_fs[0] if selected_fs else (fnam, None))
2405 # Overwrite branch with summit header fields.
2406 hdr.field = shdr.field
2407 # Put back the preserved branch fields.
2408 for fnam, fval in preserved_fs:
2409 if fval is not None:
2410 hdr.set_field(fnam, fval)
2411 else:
2412 hdr.remove_field(fnam)
2413
2414 # Apply hooks to the branch catalog.
2415 exec_hook_cat(branch_id, branch_name, branch_subdir, branch_cat,
2416 project.hook_on_scatter_cat)
2417
2418 # If the branch catalog has been newly created,
2419 # see if it is translated enough to be really written out.
2420 skip_write = False
2421 if new_from_template and not options.force:
2422 ntrans = 0
2423 for msg in branch_cat:
2424 if msg.translated:
2425 ntrans += 1
2426 if len(branch_cat) > 0:
2427 skip_write = ( float(ntrans) / len(branch_cat) + 1e-6
2428 < project.scatter_min_completeness)
2429 else:
2430 skip_write = False
2431
2432 if new_from_template and not skip_write:
2433 # Create any needed subdirectories and set destination branch path.
2434 mkdirpath(os.path.dirname(branch_path))
2435 branch_cat.filename = branch_path
2436
2437 # Commit changes to the branch catalog.
2438 if not skip_write and (branch_cat.sync() or options.force):
2439
2440 # Apply hooks to branch catalog file.
2441 exec_hook_file(branch_id, branch_name, branch_subdir,
2442 branch_cat.filename, project.hook_on_scatter_file)
2443
2444 # Add to version control.
2445 if ( project.branches_vcs
2446 and not project.bdict[branch_id].skip_version_control
2447 ):
2448 if not project.branches_vcs.add(branch_cat.filename):
2449 warning(_("@info",
2450 "Cannot add '%(file)s' to version control.",
2451 file=branch_cat.filename))
2452
2453 paths_str = " ".join(summit_paths)
2454 if options.verbose:
2455 if new_from_template:
2456 actype = _("@item:intext action performed on a catalog",
2457 "scattered-added")
2458 report("<+ (%s) %s %s"
2459 % (actype, branch_cat.filename, paths_str))
2460 else:
2461 actype = _("@item:intext action performed on a catalog",
2462 "scattered")
2463 report("< (%s) %s %s"
2464 % (actype, branch_cat.filename, paths_str))
2465 elif not options.quiet:
2466 if new_from_template:
2467 report("<+ %s %s" % (branch_cat.filename, paths_str))
2468 else:
2469 report("< %s %s" % (branch_cat.filename, paths_str))
2470
2471
2472 def do_scatter (smsg, bmsg):
2473
2474 return smsg.translated
2475
2476
2477 def hook_applicable (branch_check, branch_id, name_check, name, subdir):
2478
2479 if branch_check is not None:
2480 if hasattr(branch_check, "__call__"):
2481 if not branch_check(branch_id):
2482 return False
2483 else:
2484 if not re.search(branch_check, branch_id):
2485 return False
2486
2487 if name_check is not None:
2488 if hasattr(name_check, "__call__"):
2489 if not name_check(name, subdir):
2490 return False
2491 else:
2492 if not re.search(name_check, name):
2493 return False
2494
2495 return True
2496
2497
2498 # Pipe msgstr through hook calls,
2499 # for which branch id and catalog name match hook specification.
2500 def exec_hook_msgstr (branch_id, branch_name, branch_subdir,
2501 msgstr, msg, cat, hooks):
2502
2503 piped_msgstr = msgstr
2504 for call, branch_ch, name_ch in hooks:
2505 if hook_applicable(branch_ch, branch_id, name_ch,
2506 branch_name, branch_subdir):
2507 piped_msgstr_tmp = call(piped_msgstr, msg, cat)
2508 if isinstance(piped_msgstr_tmp, str):
2509 piped_msgstr = piped_msgstr_tmp
2510
2511 return piped_msgstr
2512
2513
2514 # Pipe message through hook calls,
2515 # for which branch id and catalog name match hook specification.
2516 def exec_hook_msg (branch_id, branch_name, branch_subdir, msg, cat, hooks):
2517
2518 # Apply all hooks to the message.
2519 for call, branch_ch, name_ch in hooks:
2520 if hook_applicable(branch_ch, branch_id, name_ch,
2521 branch_name, branch_subdir):
2522 call(msg, cat)
2523
2524
2525 # Pipe header through hook calls,
2526 # for which branch id and catalog name match hook specification.
2527 def exec_hook_head (branch_id, branch_name, branch_subdir, hdr, cat, hooks):
2528
2529 # Apply all hooks to the header.
2530 for call, branch_ch, name_ch in hooks:
2531 if hook_applicable(branch_ch, branch_id, name_ch,
2532 branch_name, branch_subdir):
2533 call(hdr, cat)
2534
2535
2536 # Pipe catalog through hook calls,
2537 # for which branch id and catalog name match hook specification.
2538 def exec_hook_cat (branch_id, branch_name, branch_subdir, cat, hooks):
2539
2540 # Apply all hooks to the catalog.
2541 for call, branch_ch, name_ch in hooks:
2542 if hook_applicable(branch_ch, branch_id, name_ch,
2543 branch_name, branch_subdir):
2544 call(cat)
2545
2546
2547 # Pipe catalog file through hook calls,
2548 # for which branch id and catalog name match hook specification.
2549 def exec_hook_file (branch_id, branch_name, branch_subdir, filepath, hooks):
2550
2551 # Make temporary backup of the file.
2552 # FIXME: Portable construction of temporary file.
2553 bckppath = "/tmp/backup%s-%s" % (os.getpid(), os.path.basename(filepath))
2554 shutil.copyfile(filepath, bckppath)
2555
2556 # Apply all hooks to the file, but stop if one returns non-zero status.
2557 failed = False
2558 for call, branch_ch, name_ch in hooks:
2559 if hook_applicable(branch_ch, branch_id, name_ch,
2560 branch_name, branch_subdir):
2561 if call(filepath) != 0:
2562 failed = True
2563 break
2564
2565 # If any hook failed, retrieve the temporary copy.
2566 if failed:
2567 shutil.move(bckppath, filepath)
2568 else:
2569 os.unlink(bckppath)
2570
2571
2572 # Pipe branch through hook calls,
2573 # for which branch id and matches hook specification.
2574 def exec_hook_branch (branch_id, hooks):
2575
2576 # Apply all hooks to the branch, but stop if one returns non-zero status.
2577 failed = False
2578 for call, branch_ch, d1 in hooks:
2579 if hook_applicable(branch_ch, branch_id, None, None, None):
2580 if call(branch_id) != 0:
2581 failed = True
2582 break
2583
2584
2585 def find_summit_comment (msg, summit_tag):
2586
2587 i = 0
2588 for c in msg.auto_comment:
2589 if c.startswith(summit_tag):
2590 return i
2591 i += 1
2592 return -1
2593
2594
2595 def get_summit_comment (msg, summit_tag, default=""):
2596
2597 p = find_summit_comment(msg, summit_tag)
2598 if p >= 0:
2599 return msg.auto_comment[p][len(summit_tag):].strip()
2600 else:
2601 return default
2602
2603
2604 def set_summit_comment (msg, summit_tag, text):
2605
2606 ctext = str(summit_tag + " " + text.strip())
2607 p = find_summit_comment(msg, summit_tag)
2608 if p >= 0:
2609 msg.auto_comment[p] = ctext
2610 else:
2611 msg.auto_comment.append(ctext)
2612
2613
2614 _summit_tag_branchid = "+>"
2615 _summit_tag_kwprop = "+:"
2616 _summit_tags = (
2617 _summit_tag_branchid,
2618 _summit_tag_kwprop,
2619 )
2620
2621 def summit_set_tags (msg, branch_id, project):
2622
2623 # Add branch ID.
2624 branch_ids = get_summit_comment(msg, _summit_tag_branchid, "").split()
2625 if branch_id not in branch_ids:
2626 branch_ids.append(branch_id)
2627 set_summit_comment(msg, _summit_tag_branchid, " ".join(branch_ids))
2628
2629
2630 def summit_override_auto (summit_msg, branch_msg, branch_id, is_primary):
2631
2632 # Copy auto/source/flag comments only if this is the primary branch
2633 # for the current message.
2634 if is_primary:
2635
2636 # Equalize flags, except the fuzzy.
2637 for fl in branch_msg.flag:
2638 if fl != "fuzzy":
2639 summit_msg.flag.add(fl)
2640 for fl in summit_msg.flag:
2641 if fl != "fuzzy" and fl not in branch_msg.flag:
2642 summit_msg.flag.remove(fl)
2643
2644 # Equalize source references.
2645 # FIXME: Once there is a way to reliably tell the root directory
2646 # of source references, add missing and remove obsolete source
2647 # references instead.
2648 summit_msg.source = Monlist(list(map(Monpair, branch_msg.source)))
2649
2650 # Split auto comments of the current summit message into
2651 # summit and non-summit tagged comments.
2652 # Also of the branch message, in case it has summit-alike comments.
2653 summit_nscmnts, summit_scmnts = split_summit_comments(summit_msg)
2654 branch_nscmnts, branch_scmnts = split_summit_comments(branch_msg)
2655
2656 # Override auto comments only if different overally
2657 # (which needs not be, due to double fresh/old insertion)
2658 # and non-summit auto comments of the current summit message
2659 # are different to the branch message auto comments.
2660 if ( summit_msg.auto_comment != branch_msg.auto_comment
2661 and summit_nscmnts != branch_nscmnts
2662 ):
2663 summit_msg.auto_comment = Monlist(branch_msg.auto_comment)
2664 summit_msg.auto_comment.extend(summit_scmnts)
2665
2666
2667 def split_summit_comments (msg):
2668
2669 non_summit_comments = []
2670 summit_comments = []
2671 for comment in msg.auto_comment:
2672 wlst = comment.split()
2673 if wlst and wlst[0] in _summit_tags:
2674 summit_comments.append(comment)
2675 else:
2676 non_summit_comments.append(comment)
2677
2678 return non_summit_comments, summit_comments
2679
2680
2681 def summit_merge_single (branch_id, catalog_name, catalog_subdir,
2682 catalog_path, template_path,
2683 wrapping, fuzzy_merging,
2684 project, options, update_progress):
2685
2686 update_progress()
2687
2688 # Gather the summit template in summit-over-dynamic-templates mode.
2689 if project.templates_dynamic and branch_id == SUMMIT_ID:
2690 summit_gather_single(catalog_name, project.tproject, project.toptions,
2691 update_progress=update_progress)
2692
2693 # FIXME: Portable construction of temporary file.
2694 tmp_path = os.path.join("/tmp", ( os.path.basename(catalog_path)
2695 + "~merged-%d" % os.getpid()))
2696
2697 # Whether to create pristine catalog from template.
2698 vivified = catalog_path in project.add_on_merge
2699
2700 # Skip calling msgmerge if template creation dates exist and are equal.
2701 do_msgmerge = True
2702 if not vivified and not project.templates_dynamic and not options.force:
2703 hdr = Catalog(catalog_path, monitored=False, headonly=True).header
2704 thdr = Catalog(template_path, monitored=False, headonly=True).header
2705 pcd = hdr.get_field_value("POT-Creation-Date")
2706 tpcd = thdr.get_field_value("POT-Creation-Date")
2707 do_msgmerge = (not pcd or not tpcd or pcd != tpcd)
2708
2709 header_prop_fields = project.header_propagate_fields
2710
2711 # Should merged catalog be opened, and in what mode?
2712 do_open = False
2713 headonly = False
2714 monitored = False
2715 otherwrap = set(wrapping).difference(["basic"])
2716 if otherwrap or project.hook_on_merge_msg or project.hook_on_merge_cat:
2717 do_open = True
2718 elif header_prop_fields or project.hook_on_merge_head or vivified:
2719 do_open = True
2720 headonly = True
2721 if ( header_prop_fields or vivified
2722 or project.hook_on_merge_head or project.hook_on_merge_msg
2723 or project.hook_on_merge_cat
2724 ):
2725 monitored = True
2726
2727 # Should template catalog be opened too?
2728 do_open_template = False
2729 if header_prop_fields or vivified:
2730 do_open_template = True
2731
2732 cat = None
2733 if do_msgmerge:
2734 # Create the temporary merged catalog.
2735 minasfz, refuzzy = 0.0, False
2736 cmppaths, fuzzex, minwnex = [], False, 0
2737 if branch_id == SUMMIT_ID:
2738 minasfz = project.merge_min_adjsim_fuzzy
2739 refuzzy = project.merge_rebase_fuzzy
2740 if project.compendium_on_merge:
2741 cmppaths.append(project.compendium_on_merge)
2742 fuzzex = project.compendium_fuzzy_exact
2743 minwnex = project.compendium_min_words_exact
2744 catalog_path_mod = catalog_path
2745 if vivified:
2746 if cmppaths:
2747 catalog_path_mod = "/dev/null"
2748 else:
2749 catalog_path_mod = tmp_path
2750 shutil.copyfile(template_path, tmp_path)
2751
2752 getcat = do_open and not headonly
2753 ignpotdate = project.templates_dynamic
2754 cat = merge_pofile(catalog_path_mod, template_path, outpath=tmp_path,
2755 wrapping=wrapping, fuzzymatch=fuzzy_merging,
2756 minasfz=minasfz, refuzzy=refuzzy,
2757 cmppaths=cmppaths, fuzzex=fuzzex, minwnex=minwnex,
2758 getcat=getcat, monitored=monitored,
2759 ignpotdate=ignpotdate,
2760 quiet=True, abort=False)
2761 if cat is None:
2762 warning(_("@info",
2763 "Catalog '%(file1)s' not merged with "
2764 "template '%(file2)s' due to errors on merging.",
2765 file1=catalog_path_mod, file2=template_path))
2766 return
2767 elif not getcat:
2768 # Catalog not requested, so the return value is True
2769 # indicating that the merge succedded.
2770 cat = None
2771
2772 else:
2773 # Copy current to temporary catalog, to be processed by hooks, etc.
2774 shutil.copyfile(catalog_path, tmp_path)
2775
2776 # Save good time by opening the merged catalog only if necessary,
2777 # and only as much as necessary.
2778
2779 # Open catalogs as necessary.
2780 if do_open:
2781 update_progress()
2782 if cat is None:
2783 cat = Catalog(tmp_path, monitored=monitored, wrapping=wrapping,
2784 headonly=headonly)
2785 if do_open_template:
2786 tcat = Catalog(template_path, monitored=False, headonly=True)
2787
2788 # Initialize header if the catalog has been vivified from template.
2789 if vivified:
2790 hdr = cat.header
2791 hdr.title = Monlist()
2792 hdr.copyright = ""
2793 hdr.license = ""
2794 hdr.author = Monlist()
2795 hdr.comment = Monlist()
2796 # Get the project ID from template;
2797 # if it gives default value, use catalog name instead.
2798 projid = tcat.header.get_field_value("Project-Id-Version")
2799 if not projid or "PACKAGE" in projid:
2800 projid = catalog_name
2801 hdr.set_field("Project-Id-Version", str(projid))
2802 rdate = time.strftime("%Y-%m-%d %H:%M%z")
2803 hdr.set_field("PO-Revision-Date", str(rdate))
2804 hdr.set_field("Last-Translator", str(project.vivify_w_translator))
2805 hdr.set_field("Language-Team", str(project.vivify_w_langteam))
2806 if project.vivify_w_language:
2807 hdr.set_field("Language", str(project.vivify_w_language),
2808 after="Language-Team", reorder=True)
2809 hdr.set_field("Content-Type",
2810 "text/plain; charset=%s" % project.vivify_w_charset)
2811 hdr.set_field("Content-Transfer-Encoding", "8bit")
2812 if project.vivify_w_plurals:
2813 hdr.set_field("Plural-Forms", str(project.vivify_w_plurals))
2814 else:
2815 hdr.remove_field("Plural-Forms")
2816
2817 # Propagate requested header fields.
2818 if header_prop_fields:
2819 # Preserve order of the fields when collecting.
2820 fields = []
2821 for field in cat.header.field:
2822 if field[0] in header_prop_fields:
2823 fields.append(field)
2824 tfields = []
2825 for tfield in tcat.header.field:
2826 if tfield[0] in header_prop_fields:
2827 tfields.append(tfield)
2828 # Replace the field sequence if not equal to that of the template.
2829 if fields != tfields:
2830 for field in fields:
2831 cat.header.field.remove(field)
2832 for tfield in tfields:
2833 cat.header.field.append(tfield)
2834
2835 # Set original instead of temporary file path -- hooks may expect it.
2836 if cat is not None:
2837 cat.filename = catalog_path
2838
2839 # Execute header hooks.
2840 if project.hook_on_merge_head:
2841 exec_hook_head(branch_id, catalog_name, catalog_subdir,
2842 cat.header, cat, project.hook_on_merge_head)
2843
2844 # Execute message hooks.
2845 if project.hook_on_merge_msg:
2846 for msg in cat:
2847 exec_hook_msg(branch_id, catalog_name, catalog_subdir,
2848 msg, cat, project.hook_on_merge_msg)
2849
2850 # Execute catalog hooks.
2851 if project.hook_on_merge_cat:
2852 exec_hook_cat(branch_id, catalog_name, catalog_subdir,
2853 cat, project.hook_on_merge_cat)
2854
2855 # Synchronize merged catalog if it has been opened.
2856 if cat is not None:
2857 cat.filename = tmp_path # not to overwrite original file
2858 cat.sync(force=otherwrap)
2859
2860 # Execute file hooks.
2861 if project.hook_on_merge_file:
2862 cat_name = os.path.basename(tmp_path)
2863 cat_name = cat_name[:cat_name.rfind(".po")]
2864 exec_hook_file(branch_id, cat_name, catalog_subdir, tmp_path,
2865 project.hook_on_merge_file)
2866
2867 # If there is any difference between merged and old catalog.
2868 if vivified or not filecmp.cmp(catalog_path, tmp_path):
2869 # Assert correctness of the merged catalog and move over the old.
2870 assert_system("msgfmt -c -o/dev/null %s " % tmp_path)
2871 added = False
2872 if vivified:
2873 added = True
2874 mkdirpath(os.path.dirname(catalog_path))
2875 shutil.move(tmp_path, catalog_path)
2876
2877 # Add to version control if not already added.
2878 vcs = project.summit_vcs if SUMMIT_ID else project.branches_vcs
2879 if ( vcs
2880 and ( branch_id == SUMMIT_ID or
2881 not project.bdict[branch_id].skip_version_control)
2882 and not vcs.is_versioned(catalog_path)
2883 ):
2884 if not vcs.add(catalog_path):
2885 warning(_("@info",
2886 "Cannot add '%(file)s' to version control.",
2887 file=catalog_path))
2888
2889 if options.verbose:
2890 if added:
2891 actype = _("@item:intext action performed on a catalog",
2892 "merged-added")
2893 report(".+ (%s) %s" % (actype, catalog_path))
2894 else:
2895 actype = _("@item:intext action performed on a catalog",
2896 "merged")
2897 report(". (%s) %s" % (actype, catalog_path))
2898 elif not options.quiet:
2899 if added:
2900 report(".+ %s" % catalog_path)
2901 else:
2902 report(". %s" % catalog_path)
2903
2904 # Remove the temporary merged catalog.
2905 if os.path.exists(tmp_path):
2906 os.remove(tmp_path)
2907
2908
2909 # Put header fields in canonical form, for equality checking.
2910 # Returns ordered list of (field name, field value).
2911 def normhf (fields, excluded=[]):
2912
2913 nfs = []
2914
2915 for fnam, fval in fields:
2916 if fnam not in excluded:
2917 nfs.append((fnam, fval))
2918 nfs.sort()
2919
2920 return nfs
2921
2922
2923 # Remove all translator-related elements from the message.
2924 def clear_msg (msg):
2925
2926 msg.unfuzzy()
2927 msg.msgstr[:] = [""] * len(msg.msgstr)
2928 msg.manual_comment[:] = []
2929
2930 return msg
2931
2932
2933 if __name__ == '__main__':
2934 exit_on_exception(main)