Devuan deployment of britney2
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 

694 lines
34 KiB

  1. from itertools import chain
  2. from urllib.parse import quote
  3. import apt_pkg
  4. from britney2 import DependencyType
  5. from britney2.excuse import Excuse
  6. from britney2.policies import PolicyVerdict
  7. from britney2.utils import (invalidate_excuses, find_smooth_updateable_binaries, compute_item_name,
  8. get_dependency_solvers,
  9. )
  10. class ExcuseFinder(object):
  11. def __init__(self, options, suite_info, all_binaries, pkg_universe, policy_engine, mi_factory, hints):
  12. self.options = options
  13. self.suite_info = suite_info
  14. self.all_binaries = all_binaries
  15. self.pkg_universe = pkg_universe
  16. self._policy_engine = policy_engine
  17. self._migration_item_factory = mi_factory
  18. self.hints = hints
  19. self.excuses = {}
  20. def _excuse_unsat_deps(self, pkg, src, arch, source_suite, excuse, get_dependency_solvers=get_dependency_solvers):
  21. """Find unsatisfied dependencies for a binary package
  22. This method analyzes the dependencies of the binary package specified
  23. by the parameter `pkg', built from the source package `src', for the
  24. architecture `arch' within the suite `suite'. If the dependency can't
  25. be satisfied in testing and/or unstable, it updates the excuse passed
  26. as parameter.
  27. """
  28. # retrieve the binary package from the specified suite and arch
  29. target_suite = self.suite_info.target_suite
  30. binaries_s_a = source_suite.binaries[arch]
  31. provides_s_a = source_suite.provides_table[arch]
  32. binaries_t_a = target_suite.binaries[arch]
  33. provides_t_a = target_suite.provides_table[arch]
  34. binary_u = binaries_s_a[pkg]
  35. source_s = source_suite.sources[binary_u.source]
  36. if (binary_u.source_version != source_s.version):
  37. # we don't want cruft to block packages, so if this is cruft, we
  38. # can simply ignore it; if the cruft would migrate to testing, the
  39. # installability check will catch missing deps
  40. return True
  41. # local copies for better performance
  42. parse_depends = apt_pkg.parse_depends
  43. # analyze the dependency fields (if present)
  44. deps = binary_u.depends
  45. if not deps:
  46. return True
  47. is_all_ok = True
  48. # for every dependency block (formed as conjunction of disjunction)
  49. for block, block_txt in zip(parse_depends(deps, False), deps.split(',')):
  50. # if the block is satisfied in testing, then skip the block
  51. packages = get_dependency_solvers(block, binaries_t_a, provides_t_a)
  52. if packages:
  53. for p in packages:
  54. if p.pkg_id.package_name not in binaries_s_a:
  55. continue
  56. excuse.add_sane_dep(p.source)
  57. continue
  58. # check if the block can be satisfied in the source suite, and list the solving packages
  59. packages = get_dependency_solvers(block, binaries_s_a, provides_s_a)
  60. packages = sorted(p.source for p in packages)
  61. # if the dependency can be satisfied by the same source package, skip the block:
  62. # obviously both binary packages will enter testing together
  63. if src in packages:
  64. continue
  65. # if no package can satisfy the dependency, add this information to the excuse
  66. if not packages:
  67. # still list this dep as unsatifiable, even if it is arch:all
  68. # on a non-nobreakall arch, because the autopkgtest policy
  69. # uses this to determine of the autopkgtest can run.
  70. # TODO this should probably be handled in a smarter way
  71. excuse.add_unsatisfiable_on_arch(arch)
  72. if binary_u.architecture != 'all' or arch in self.options.nobreakall_arches:
  73. if arch not in self.options.break_arches:
  74. # when the result of this function is changed to
  75. # actually block items, this should be changed to
  76. # add_verdict_info
  77. excuse.addinfo("%s/%s unsatisfiable Depends: %s" % (pkg, arch, block_txt.strip()))
  78. excuse.add_unsatisfiable_dep(block_txt.strip(), arch)
  79. excuse.addreason("depends")
  80. # TODO this should only be considered a failure if it
  81. # is a regression wrt testing
  82. is_all_ok = False
  83. continue
  84. # for the solving packages, update the excuse to add the dependencies
  85. if arch not in self.options.break_arches:
  86. sources_t = target_suite.sources
  87. sources_s = source_suite.sources
  88. for p in packages:
  89. item_name = compute_item_name(sources_t, sources_s, p, arch)
  90. excuse.add_dependency(DependencyType.DEPENDS, item_name, arch)
  91. else:
  92. for p in packages:
  93. excuse.add_break_dep(p, arch)
  94. return is_all_ok
  95. def _should_remove_source(self, item):
  96. """Check if a source package should be removed from testing
  97. This method checks if a source package should be removed from the
  98. target suite; this happens if the source package is not
  99. present in the primary source suite anymore.
  100. It returns True if the package can be removed, False otherwise.
  101. In the former case, a new excuse is appended to the object
  102. attribute excuses.
  103. """
  104. if hasattr(self.options, 'partial_source'):
  105. return False
  106. # if the source package is available in unstable, then do nothing
  107. source_suite = self.suite_info.primary_source_suite
  108. pkg = item.package
  109. if pkg in source_suite.sources:
  110. return False
  111. # otherwise, add a new excuse for its removal
  112. src = item.suite.sources[pkg]
  113. excuse = Excuse(item.name)
  114. excuse.addinfo("Package not in %s, will try to remove" % source_suite.name)
  115. excuse.set_vers(src.version, None)
  116. src.maintainer and excuse.set_maint(src.maintainer)
  117. src.section and excuse.set_section(src.section)
  118. # if the package is blocked, skip it
  119. for hint in self.hints.search('block', package=pkg, removal=True):
  120. excuse.policy_verdict = PolicyVerdict.REJECTED_PERMANENTLY
  121. excuse.add_verdict_info(
  122. excuse.policy_verdict,
  123. "Not touching package, as requested by %s "
  124. "(contact debian-release if update is needed)" % hint.user)
  125. excuse.addreason("block")
  126. self.excuses[excuse.name] = excuse
  127. return False
  128. excuse.policy_verdict = PolicyVerdict.PASS
  129. self.excuses[excuse.name] = excuse
  130. return True
  131. def _should_upgrade_srcarch(self, item):
  132. """Check if a set of binary packages should be upgraded
  133. This method checks if the binary packages produced by the source
  134. package on the given architecture should be upgraded; this can
  135. happen also if the migration is a binary-NMU for the given arch.
  136. It returns False if the given packages don't need to be upgraded,
  137. True otherwise. In the former case, a new excuse is appended to
  138. the object attribute excuses.
  139. """
  140. # retrieve the source packages for testing and suite
  141. target_suite = self.suite_info.target_suite
  142. source_suite = item.suite
  143. src = item.package
  144. arch = item.architecture
  145. source_t = target_suite.sources[src]
  146. source_u = source_suite.sources[src]
  147. excuse = Excuse(item.name)
  148. excuse.set_vers(source_t.version, source_t.version)
  149. source_u.maintainer and excuse.set_maint(source_u.maintainer)
  150. source_u.section and excuse.set_section(source_u.section)
  151. # if there is a `remove' hint and the requested version is the same as the
  152. # version in testing, then stop here and return False
  153. # (as a side effect, a removal may generate such excuses for both the source
  154. # package and its binary packages on each architecture)
  155. for hint in self.hints.search('remove', package=src, version=source_t.version):
  156. excuse.add_hint(hint)
  157. excuse.policy_verdict = PolicyVerdict.REJECTED_PERMANENTLY
  158. excuse.add_verdict_info(excuse.policy_verdict, "Removal request by %s" % (hint.user))
  159. excuse.add_verdict_info(excuse.policy_verdict, "Trying to remove package, not update it")
  160. self.excuses[excuse.name] = excuse
  161. return False
  162. # the starting point is that there is nothing wrong and nothing worth doing
  163. anywrongver = False
  164. anyworthdoing = False
  165. packages_t_a = target_suite.binaries[arch]
  166. packages_s_a = source_suite.binaries[arch]
  167. wrong_verdict = PolicyVerdict.REJECTED_PERMANENTLY
  168. # for every binary package produced by this source in unstable for this architecture
  169. for pkg_id in sorted(x for x in source_u.binaries if x.architecture == arch):
  170. pkg_name = pkg_id.package_name
  171. # retrieve the testing (if present) and unstable corresponding binary packages
  172. binary_t = packages_t_a[pkg_name] if pkg_name in packages_t_a else None
  173. binary_u = packages_s_a[pkg_name]
  174. # this is the source version for the new binary package
  175. pkgsv = binary_u.source_version
  176. # if the new binary package is architecture-independent, then skip it
  177. if binary_u.architecture == 'all':
  178. if pkg_id not in source_t.binaries:
  179. # only add a note if the arch:all does not match the expected version
  180. excuse.add_detailed_info("Ignoring %s %s (from %s) as it is arch: all" % (pkg_name, binary_u.version, pkgsv))
  181. continue
  182. # if the new binary package is not from the same source as the testing one, then skip it
  183. # this implies that this binary migration is part of a source migration
  184. if source_u.version == pkgsv and source_t.version != pkgsv:
  185. anywrongver = True
  186. excuse.add_verdict_info(
  187. wrong_verdict,
  188. "From wrong source: %s %s (%s not %s)" %
  189. (pkg_name, binary_u.version, pkgsv, source_t.version))
  190. continue
  191. # cruft in unstable
  192. if source_u.version != pkgsv and source_t.version != pkgsv:
  193. if self.options.ignore_cruft:
  194. excuse.add_detailed_info("Old cruft: %s %s (but ignoring cruft, so nevermind)" % (pkg_name, pkgsv))
  195. else:
  196. anywrongver = True
  197. excuse.add_verdict_info(wrong_verdict, "Old cruft: %s %s" % (pkg_name, pkgsv))
  198. continue
  199. # if the source package has been updated in unstable and this is a binary migration, skip it
  200. # (the binaries are now out-of-date)
  201. if source_t.version == pkgsv and source_t.version != source_u.version:
  202. anywrongver = True
  203. excuse.add_verdict_info(
  204. wrong_verdict,
  205. "From wrong source: %s %s (%s not %s)" %
  206. (pkg_name, binary_u.version, pkgsv, source_u.version))
  207. continue
  208. # find unsatisfied dependencies for the new binary package
  209. self._excuse_unsat_deps(pkg_name, src, arch, source_suite, excuse)
  210. # if the binary is not present in testing, then it is a new binary;
  211. # in this case, there is something worth doing
  212. if not binary_t:
  213. excuse.add_detailed_info("New binary: %s (%s)" % (pkg_name, binary_u.version))
  214. anyworthdoing = True
  215. continue
  216. # at this point, the binary package is present in testing, so we can compare
  217. # the versions of the packages ...
  218. vcompare = apt_pkg.version_compare(binary_t.version, binary_u.version)
  219. # ... if updating would mean downgrading, then stop here: there is something wrong
  220. if vcompare > 0:
  221. anywrongver = True
  222. excuse.add_verdict_info(
  223. wrong_verdict,
  224. "Not downgrading: %s (%s to %s)" % (pkg_name, binary_t.version, binary_u.version))
  225. break
  226. # ... if updating would mean upgrading, then there is something worth doing
  227. elif vcompare < 0:
  228. excuse.add_detailed_info("Updated binary: %s (%s to %s)" % (pkg_name, binary_t.version, binary_u.version))
  229. anyworthdoing = True
  230. srcv = source_u.version
  231. same_source = source_t.version == srcv
  232. primary_source_suite = self.suite_info.primary_source_suite
  233. is_primary_source = source_suite == primary_source_suite
  234. # if there is nothing wrong and there is something worth doing or the source
  235. # package is not fake, then check what packages should be removed
  236. if not anywrongver and (anyworthdoing or not source_u.is_fakesrc):
  237. # we want to remove binaries that are no longer produced by the
  238. # new source, but there are some special cases:
  239. # - if this is binary-only (same_source) and not from the primary
  240. # source, we don't do any removals:
  241. # binNMUs in *pu on some architectures would otherwise result in
  242. # the removal of binaries on other architectures
  243. # - for the primary source, smooth binaries in the target suite
  244. # are not considered for removal
  245. if not same_source or is_primary_source:
  246. smoothbins = set()
  247. if is_primary_source:
  248. binaries_t = target_suite.binaries
  249. possible_smooth_updates = [p for p in source_t.binaries if p.architecture == arch]
  250. smoothbins = find_smooth_updateable_binaries(possible_smooth_updates,
  251. source_u,
  252. self.pkg_universe,
  253. target_suite,
  254. binaries_t,
  255. source_suite.binaries,
  256. frozenset(),
  257. self.options.smooth_updates)
  258. # for every binary package produced by this source in testing for this architecture
  259. for pkg_id in sorted(x for x in source_t.binaries if x.architecture == arch):
  260. pkg = pkg_id.package_name
  261. # if the package is architecture-independent, then ignore it
  262. tpkg_data = packages_t_a[pkg]
  263. if tpkg_data.architecture == 'all':
  264. if pkg_id not in source_u.binaries:
  265. # only add a note if the arch:all does not match the expected version
  266. excuse.add_detailed_info("Ignoring removal of %s as it is arch: all" % (pkg))
  267. continue
  268. # if the package is not produced by the new source package, then remove it from testing
  269. if pkg not in packages_s_a:
  270. excuse.add_detailed_info("Removed binary: %s %s" % (pkg, tpkg_data.version))
  271. # the removed binary is only interesting if this is a binary-only migration,
  272. # as otherwise the updated source will already cause the binary packages
  273. # to be updated
  274. if same_source and pkg_id not in smoothbins:
  275. # Special-case, if the binary is a candidate for a smooth update, we do not consider
  276. # it "interesting" on its own. This case happens quite often with smooth updatable
  277. # packages, where the old binary "survives" a full run because it still has
  278. # reverse dependencies.
  279. anyworthdoing = True
  280. if not anyworthdoing:
  281. # nothing worth doing, we don't add an excuse to the list, we just return false
  282. return False
  283. # there is something worth doing
  284. # we assume that this package will be ok, if not invalidated below
  285. excuse.policy_verdict = PolicyVerdict.PASS
  286. # if there is something something wrong, reject this package
  287. if anywrongver:
  288. excuse.policy_verdict = wrong_verdict
  289. self._policy_engine.apply_srcarch_policies(item, arch, source_t, source_u, excuse)
  290. self.excuses[excuse.name] = excuse
  291. return excuse.is_valid
  292. def _should_upgrade_src(self, item):
  293. """Check if source package should be upgraded
  294. This method checks if a source package should be upgraded. The analysis
  295. is performed for the source package specified by the `src' parameter,
  296. for the distribution `source_suite'.
  297. It returns False if the given package doesn't need to be upgraded,
  298. True otherwise. In the former case, a new excuse is appended to
  299. the object attribute excuses.
  300. """
  301. src = item.package
  302. source_suite = item.suite
  303. suite_name = source_suite.name
  304. source_u = source_suite.sources[src]
  305. if source_u.is_fakesrc:
  306. # it is a fake package created to satisfy Britney implementation details; silently ignore it
  307. return False
  308. target_suite = self.suite_info.target_suite
  309. # retrieve the source packages for testing (if available) and suite
  310. if src in target_suite.sources:
  311. source_t = target_suite.sources[src]
  312. # if testing and unstable have the same version, then this is a candidate for binary-NMUs only
  313. if apt_pkg.version_compare(source_t.version, source_u.version) == 0:
  314. return False
  315. else:
  316. source_t = None
  317. excuse = Excuse(item.name)
  318. excuse.set_vers(source_t and source_t.version or None, source_u.version)
  319. source_u.maintainer and excuse.set_maint(source_u.maintainer)
  320. source_u.section and excuse.set_section(source_u.section)
  321. # if the version in unstable is older, then stop here with a warning in the excuse and return False
  322. if source_t and apt_pkg.version_compare(source_u.version, source_t.version) < 0:
  323. excuse.policy_verdict = PolicyVerdict.REJECTED_PERMANENTLY
  324. excuse.add_verdict_info(
  325. excuse.policy_verdict,
  326. "ALERT: %s is newer in the target suite (%s %s)" % (src, source_t.version, source_u.version))
  327. self.excuses[excuse.name] = excuse
  328. excuse.addreason("newerintesting")
  329. return False
  330. # the starting point is that we will update the candidate
  331. excuse.policy_verdict = PolicyVerdict.PASS
  332. # if there is a `remove' hint and the requested version is the same as the
  333. # version in testing, then stop here and return False
  334. for hint in self.hints.search('remove', package=src):
  335. if source_t and source_t.version == hint.version or \
  336. source_u.version == hint.version:
  337. excuse.add_hint(hint)
  338. excuse.policy_verdict = PolicyVerdict.REJECTED_PERMANENTLY
  339. excuse.add_verdict_info(excuse.policy_verdict, "Removal request by %s" % (hint.user))
  340. excuse.add_verdict_info(excuse.policy_verdict, "Trying to remove package, not update it")
  341. break
  342. all_binaries = self.all_binaries
  343. for pkg_id in sorted(source_u.binaries):
  344. is_valid = self._excuse_unsat_deps(pkg_id.package_name, src, pkg_id.architecture, source_suite, excuse)
  345. if is_valid:
  346. continue
  347. # TODO actually reject items that are not valid based on the
  348. # result of _excuse_unsat_deps. However:
  349. # - the calculation from _excuse_unsat_deps isn't correct when
  350. # multiple source suites are required:
  351. # * bin in unstable needs bin from (t)pu
  352. # * bin in (t)pu needs bin from unstable
  353. # - when a binary is already uninstallable in testing, a newer
  354. # version of that binary is allowed to migrate, even if it is
  355. # uninstallable
  356. # at this point, we check the status of the builds on all the supported architectures
  357. # to catch the out-of-date ones
  358. archs_to_consider = list(self.options.architectures)
  359. archs_to_consider.append('all')
  360. for arch in archs_to_consider:
  361. oodbins = {}
  362. uptodatebins = False
  363. # for every binary package produced by this source in the suite for this architecture
  364. if arch == 'all':
  365. consider_binaries = source_u.binaries
  366. else:
  367. # Will also include arch:all for the given architecture (they are filtered out
  368. # below)
  369. consider_binaries = sorted(x for x in source_u.binaries if x.architecture == arch)
  370. for pkg_id in consider_binaries:
  371. pkg = pkg_id.package_name
  372. # retrieve the binary package and its source version
  373. binary_u = all_binaries[pkg_id]
  374. pkgsv = binary_u.source_version
  375. # arch:all packages are treated separately from arch:arch
  376. if binary_u.architecture != arch:
  377. continue
  378. # if it wasn't built by the same source, it is out-of-date
  379. # if there is at least one binary on this arch which is
  380. # up-to-date, there is a build on this arch
  381. if source_u.version != pkgsv:
  382. if pkgsv not in oodbins:
  383. oodbins[pkgsv] = set()
  384. oodbins[pkgsv].add(pkg)
  385. excuse.add_old_binary(pkg, pkgsv)
  386. continue
  387. else:
  388. uptodatebins = True
  389. # if there are out-of-date packages, warn about them in the excuse and set excuse.is_valid
  390. # to False to block the update; if the architecture where the package is out-of-date is
  391. # in the `outofsync_arches' list, then do not block the update
  392. if oodbins:
  393. oodtxt = ""
  394. for v in sorted(oodbins):
  395. if oodtxt:
  396. oodtxt = oodtxt + "; "
  397. oodtxt = oodtxt + "%s (from <a href=\"https://buildd.debian.org/status/logs.php?" \
  398. "arch=%s&pkg=%s&ver=%s\" target=\"_blank\">%s</a>)" % \
  399. (", ".join(sorted(oodbins[v])), quote(arch), quote(src), quote(v), v)
  400. if uptodatebins:
  401. text = "old binaries left on <a href=\"https://buildd.debian.org/status/logs.php?" \
  402. "arch=%s&pkg=%s&ver=%s\" target=\"_blank\">%s</a>: %s" % \
  403. (quote(arch), quote(src), quote(source_u.version), arch, oodtxt)
  404. else:
  405. text = "missing build on <a href=\"https://buildd.debian.org/status/logs.php?" \
  406. "arch=%s&pkg=%s&ver=%s\" target=\"_blank\">%s</a>" % \
  407. (quote(arch), quote(src), quote(source_u.version), arch)
  408. if arch in self.options.outofsync_arches:
  409. text = text + " (but %s isn't keeping up, so nevermind)" % (arch)
  410. if not uptodatebins:
  411. excuse.missing_build_on_ood_arch(arch)
  412. else:
  413. if uptodatebins:
  414. if self.options.ignore_cruft:
  415. text = text + " (but ignoring cruft, so nevermind)"
  416. excuse.add_detailed_info(text)
  417. else:
  418. excuse.policy_verdict = PolicyVerdict.REJECTED_PERMANENTLY
  419. excuse.addreason("cruft")
  420. excuse.add_verdict_info(excuse.policy_verdict, text)
  421. else:
  422. excuse.policy_verdict = PolicyVerdict.REJECTED_CANNOT_DETERMINE_IF_PERMANENT
  423. excuse.missing_build_on_arch(arch)
  424. excuse.addreason("missingbuild")
  425. excuse.add_verdict_info(excuse.policy_verdict, text)
  426. excuse.add_detailed_info("old binaries on %s: %s" % (arch, oodtxt))
  427. # if the source package has no binaries, set is_valid to False to block the update
  428. if not source_u.binaries:
  429. excuse.policy_verdict = PolicyVerdict.REJECTED_PERMANENTLY
  430. excuse.add_verdict_info(excuse.policy_verdict, "%s has no binaries on any arch" % src)
  431. excuse.addreason("no-binaries")
  432. self._policy_engine.apply_src_policies(item, source_t, source_u, excuse)
  433. if source_suite.suite_class.is_additional_source and source_t:
  434. # o-o-d(ish) checks for (t-)p-u
  435. # This only makes sense if the package is actually in testing.
  436. for arch in self.options.architectures:
  437. # if the package in testing has no binaries on this
  438. # architecture, it can't be out-of-date
  439. if not any(x for x in source_t.binaries
  440. if x.architecture == arch and all_binaries[x].architecture != 'all'):
  441. continue
  442. # if the (t-)p-u package has produced any binaries on
  443. # this architecture then we assume it's ok. this allows for
  444. # uploads to (t-)p-u which intentionally drop binary
  445. # packages
  446. if any(x for x in source_suite.binaries[arch].values()
  447. if x.source == src and x.source_version == source_u.version and x.architecture != 'all'):
  448. continue
  449. # TODO: Find a way to avoid hardcoding pu/stable relation.
  450. if suite_name == 'pu':
  451. base = 'stable'
  452. else:
  453. base = target_suite.name
  454. text = "Not yet built on "\
  455. "<a href=\"https://buildd.debian.org/status/logs.php?"\
  456. "arch=%s&pkg=%s&ver=%s&suite=%s\" target=\"_blank\">%s</a> "\
  457. "(relative to target suite)" % \
  458. (quote(arch), quote(src), quote(source_u.version), base, arch)
  459. if arch in self.options.outofsync_arches:
  460. text = text + " (but %s isn't keeping up, so never mind)" % (arch)
  461. excuse.missing_build_on_ood_arch(arch)
  462. excuse.addinfo(text)
  463. else:
  464. excuse.policy_verdict = PolicyVerdict.REJECTED_CANNOT_DETERMINE_IF_PERMANENT
  465. excuse.missing_build_on_arch(arch)
  466. excuse.addreason("missingbuild")
  467. excuse.add_verdict_info(excuse.policy_verdict, text)
  468. # check if there is a `force' hint for this package, which allows it to go in even if it is not updateable
  469. forces = self.hints.search('force', package=src, version=source_u.version)
  470. if forces:
  471. # force() updates the final verdict for us
  472. changed_state = excuse.force()
  473. if changed_state:
  474. excuse.addinfo("Should ignore, but forced by %s" % (forces[0].user))
  475. self.excuses[excuse.name] = excuse
  476. return excuse.is_valid
  477. def _compute_excuses_and_initial_actionable_items(self):
  478. # list of local methods and variables (for better performance)
  479. excuses = self.excuses
  480. suite_info = self.suite_info
  481. pri_source_suite = suite_info.primary_source_suite
  482. architectures = self.options.architectures
  483. should_remove_source = self._should_remove_source
  484. should_upgrade_srcarch = self._should_upgrade_srcarch
  485. should_upgrade_src = self._should_upgrade_src
  486. mi_factory = self._migration_item_factory
  487. sources_ps = pri_source_suite.sources
  488. sources_t = suite_info.target_suite.sources
  489. # this set will contain the packages which are valid candidates;
  490. # if a package is going to be removed, it will have a "-" prefix
  491. actionable_items = set()
  492. actionable_items_add = actionable_items.add # Every . in a loop slows it down
  493. # for every source package in testing, check if it should be removed
  494. for pkg in sources_t:
  495. if pkg not in sources_ps:
  496. item = mi_factory.parse_item("-" + pkg, versioned=False, auto_correct=False)
  497. if should_remove_source(item):
  498. actionable_items_add(item.name)
  499. # for every source package in the source suites, check if it should be upgraded
  500. for suite in chain((pri_source_suite, *suite_info.additional_source_suites)):
  501. sources_s = suite.sources
  502. item_suffix = "_%s" % suite.excuses_suffix if suite.excuses_suffix else ''
  503. for pkg in sources_s:
  504. src_s_data = sources_s[pkg]
  505. if src_s_data.is_fakesrc:
  506. continue
  507. src_t_data = sources_t.get(pkg)
  508. if src_t_data is None or apt_pkg.version_compare(src_s_data.version, src_t_data.version) != 0:
  509. item = mi_factory.parse_item("%s%s" % (pkg, item_suffix), versioned=False, auto_correct=False)
  510. # check if the source package should be upgraded
  511. if should_upgrade_src(item):
  512. actionable_items_add(item.name)
  513. else:
  514. # package has same version in source and target suite; check if any of the
  515. # binaries have changed on the various architectures
  516. for arch in architectures:
  517. item = mi_factory.parse_item("%s/%s%s" % (pkg, arch, item_suffix),
  518. versioned=False, auto_correct=False)
  519. if should_upgrade_srcarch(item):
  520. actionable_items_add(item.name)
  521. # process the `remove' hints, if the given package is not yet in actionable_items
  522. for hint in self.hints['remove']:
  523. src = hint.package
  524. if src not in sources_t or src in actionable_items or ("-" + src in actionable_items):
  525. continue
  526. # check if the version specified in the hint is the same as the considered package
  527. tsrcv = sources_t[src].version
  528. if tsrcv != hint.version:
  529. continue
  530. # add the removal of the package to actionable_items and build a new excuse
  531. excuse = Excuse("-%s" % (src))
  532. excuse.set_vers(tsrcv, None)
  533. excuse.addinfo("Removal request by %s" % (hint.user))
  534. # if the removal of the package is blocked, skip it
  535. blocked = False
  536. for blockhint in self.hints.search('block', package=src, removal=True):
  537. excuse.policy_verdict = PolicyVerdict.REJECTED_PERMANENTLY
  538. excuse.add_verdict_info(
  539. excuse.policy_verdict,
  540. "Not removing package, due to block hint by %s "
  541. "(contact debian-release if update is needed)" % blockhint.user)
  542. excuse.addreason("block")
  543. blocked = True
  544. if blocked:
  545. excuses[excuse.name] = excuse
  546. continue
  547. actionable_items_add("-%s" % (src))
  548. excuse.addinfo("Package is broken, will try to remove")
  549. excuse.add_hint(hint)
  550. # Using "PASS" here as "Created by a hint" != "accepted due to hint". In a future
  551. # where there might be policy checks on removals, it would make sense to distinguish
  552. # those two states. Not sure that future will ever be.
  553. excuse.policy_verdict = PolicyVerdict.PASS
  554. excuses[excuse.name] = excuse
  555. return actionable_items
  556. def find_actionable_excuses(self):
  557. excuses = self.excuses
  558. actionable_items = self._compute_excuses_and_initial_actionable_items()
  559. # extract the not considered packages, which are in the excuses but not in upgrade_me
  560. unconsidered = {ename for ename in excuses if ename not in actionable_items}
  561. # invalidate impossible excuses
  562. for e in excuses.values():
  563. # parts[0] == package name
  564. # parts[1] == optional architecture
  565. parts = e.name.split('/')
  566. for d in sorted(e.all_deps):
  567. for deptype in e.all_deps[d]:
  568. ok = False
  569. # source -> source dependency; both packages must have
  570. # valid excuses
  571. if d in actionable_items or d in unconsidered:
  572. ok = True
  573. # if the excuse is for a binNMU, also consider d/$arch as a
  574. # valid excuse
  575. elif len(parts) == 2:
  576. bd = '%s/%s' % (d, parts[1])
  577. if bd in actionable_items or bd in unconsidered:
  578. ok = True
  579. # if the excuse is for a source package, check each of the
  580. # architectures on which the excuse lists a dependency on d,
  581. # and consider the excuse valid if it is possible on each
  582. # architecture
  583. else:
  584. arch_ok = True
  585. for arch in e.all_deps[d][deptype]:
  586. bd = '%s/%s' % (d, arch)
  587. if bd not in actionable_items and bd not in unconsidered:
  588. arch_ok = False
  589. break
  590. if arch_ok:
  591. ok = True
  592. if not ok:
  593. # TODO this should actually invalidate the excuse
  594. # would that be correct in all cases?
  595. # - arch all on non-nobreakall arch?
  596. # - pkg in testing already uninstallable?
  597. e.addinfo("Impossible %s: %s -> %s" % (deptype, e.name, d))
  598. e.addreason(deptype.get_reason())
  599. invalidate_excuses(excuses, actionable_items, unconsidered)
  600. mi_factory = self._migration_item_factory
  601. actionable_items = {mi_factory.parse_item(x, versioned=False, auto_correct=False) for x in actionable_items}
  602. return excuses, actionable_items