-
Notifications
You must be signed in to change notification settings - Fork 0
/
britney.py
executable file
·1629 lines (1398 loc) · 77 KB
/
britney.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#!/usr/bin/python3 -u
# -*- coding: utf-8 -*-
# Copyright (C) 2001-2008 Anthony Towns <[email protected]>
# Andreas Barth <[email protected]>
# Fabio Tranchitella <[email protected]>
# Copyright (C) 2010-2013 Adam D. Barratt <[email protected]>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
"""
= Introduction =
This is the Debian testing updater script, also known as "Britney".
Packages are usually installed into the `testing' distribution after
they have undergone some degree of testing in unstable. The goal of
this software is to do this task in a smart way, allowing testing
to always be fully installable and close to being a release candidate.
Britney's source code is split between two different but related tasks:
the first one is the generation of the update excuses, while the
second tries to update testing with the valid candidates; first
each package alone, then larger and even larger sets of packages
together. Each try is accepted if testing is not more uninstallable
after the update than before.
= Data Loading =
In order to analyze the entire Debian distribution, Britney needs to
load in memory the whole archive: this means more than 10.000 packages
for twelve architectures, as well as the dependency interconnections
between them. For this reason, the memory requirements for running this
software are quite high and at least 1 gigabyte of RAM should be available.
Britney loads the source packages from the `Sources' file and the binary
packages from the `Packages_${arch}' files, where ${arch} is substituted
with the supported architectures. While loading the data, the software
analyzes the dependencies and builds a directed weighted graph in memory
with all the interconnections between the packages (see Britney.read_sources
and Britney.read_binaries).
Other than source and binary packages, Britney loads the following data:
* BugsV, which contains the list of release-critical bugs for a given
version of a source or binary package (see RCBugPolicy.read_bugs).
* Dates, which contains the date of the upload of a given version
of a source package (see Britney.read_dates).
* Urgencies, which contains the urgency of the upload of a given
version of a source package (see AgePolicy._read_urgencies).
* Hints, which contains lists of commands which modify the standard behaviour
of Britney (see Britney.read_hints).
* Blocks, which contains user-supplied blocks read from Launchpad bugs
(see LPBlockBugPolicy).
* ExcuseBugs, which contains user-supplied update-excuse read from
Launchpad bugs (see LPExcuseBugsPolicy).
For a more detailed explanation about the format of these files, please read
the documentation of the related methods. The exact meaning of them will be
instead explained in the chapter "Excuses Generation".
= Excuses =
An excuse is a detailed explanation of why a package can or cannot
be updated in the testing distribution from a newer package in
another distribution (like for example unstable). The main purpose
of the excuses is to be written in an HTML file which will be
published over HTTP. The maintainers will be able to parse it manually
or automatically to find the explanation of why their packages have
been updated or not.
== Excuses generation ==
These are the steps (with references to method names) that Britney
does for the generation of the update excuses.
* If a source package is available in testing but it is not
present in unstable and no binary packages in unstable are
built from it, then it is marked for removal.
* Every source package in unstable and testing-proposed-updates,
if already present in testing, is checked for binary-NMUs, new
or dropped binary packages in all the supported architectures
(see Britney.should_upgrade_srcarch). The steps to detect if an
upgrade is needed are:
1. If there is a `remove' hint for the source package, the package
is ignored: it will be removed and not updated.
2. For every binary package built from the new source, it checks
for unsatisfied dependencies, new binary packages and updated
binary packages (binNMU), excluding the architecture-independent
ones, and packages not built from the same source.
3. For every binary package built from the old source, it checks
if it is still built from the new source; if this is not true
and the package is not architecture-independent, the script
removes it from testing.
4. Finally, if there is something worth doing (eg. a new or updated
binary package) and nothing wrong it marks the source package
as "Valid candidate", or "Not considered" if there is something
wrong which prevented the update.
* Every source package in unstable and testing-proposed-updates is
checked for upgrade (see Britney.should_upgrade_src). The steps
to detect if an upgrade is needed are:
1. If the source package in testing is more recent the new one
is ignored.
2. If the source package doesn't exist (is fake), which means that
a binary package refers to it but it is not present in the
`Sources' file, the new one is ignored.
3. If the package doesn't exist in testing, the urgency of the
upload is ignored and set to the default (actually `low').
4. If there is a `remove' hint for the source package, the package
is ignored: it will be removed and not updated.
5. If there is a `block' hint for the source package without an
`unblock` hint or a `block-all source`, the package is ignored.
6. If there is a `block-udeb' hint for the source package, it will
have the same effect as `block', but may only be cancelled by
a subsequent `unblock-udeb' hint.
7. If the suite is unstable, the update can go ahead only if the
upload happened more than the minimum days specified by the
urgency of the upload; if this is not true, the package is
ignored as `too-young'. Note that the urgency is sticky, meaning
that the highest urgency uploaded since the previous testing
transition is taken into account.
8. If the suite is unstable, all the architecture-dependent binary
packages and the architecture-independent ones for the `nobreakall'
architectures have to be built from the source we are considering.
If this is not true, then these are called `out-of-date'
architectures and the package is ignored.
9. The source package must have at least one binary package, otherwise
it is ignored.
10. If the suite is unstable, the new source package must have no
release critical bugs which do not also apply to the testing
one. If this is not true, the package is ignored as `buggy'.
11. If there is a `force' hint for the source package, then it is
updated even if it is marked as ignored from the previous steps.
12. If the suite is {testing-,}proposed-updates, the source package can
be updated only if there is an explicit approval for it. Unless
a `force' hint exists, the new package must also be available
on all of the architectures for which it has binary packages in
testing.
13. If the package will be ignored, mark it as "Valid candidate",
otherwise mark it as "Not considered".
* The list of `remove' hints is processed: if the requested source
package is not already being updated or removed and the version
actually in testing is the same specified with the `remove' hint,
it is marked for removal.
* The excuses are sorted by the number of days from the last upload
(days-old) and by name.
* A list of unconsidered excuses (for which the package is not upgraded)
is built. Using this list, all of the excuses depending on them are
marked as invalid "impossible dependencies".
* The excuses are written in an HTML file.
"""
import contextlib
import logging
import optparse
import os
import sys
import time
from collections import defaultdict
from functools import reduce
from itertools import chain
from operator import attrgetter
import apt_pkg
from britney2 import SourcePackage, BinaryPackageId, BinaryPackage
from britney2.excusefinder import ExcuseFinder
from britney2.hints import HintParser
from britney2.inputs.suiteloader import DebMirrorLikeSuiteContentLoader, MissingRequiredConfigurationError
from britney2.installability.builder import build_installability_tester
from britney2.installability.solver import InstallabilitySolver
from britney2.migration import MigrationManager
from britney2.migrationitem import MigrationItemFactory
from britney2.policies.policy import (AgePolicy,
RCBugPolicy,
PiupartsPolicy,
DependsPolicy,
BuildDependsPolicy,
PolicyEngine,
BlockPolicy,
BuiltUsingPolicy,
BuiltOnBuilddPolicy,
ImplicitDependencyPolicy,
LinuxPolicy,
LPBlockBugPolicy,
)
from britney2.policies.autopkgtest import AutopkgtestPolicy
from britney2.policies.sourceppa import SourcePPAPolicy
from britney2.policies.sruadtregression import SRUADTRegressionPolicy
from britney2.policies.email import EmailPolicy
from britney2.policies.lpexcusebugs import LPExcuseBugsPolicy
from britney2.utils import (log_and_format_old_libraries,
read_nuninst, write_nuninst, write_heidi,
format_and_log_uninst, newly_uninst,
write_excuses, write_heidi_delta,
old_libraries, is_nuninst_asgood_generous,
clone_nuninst, compile_nuninst, parse_provides,
MigrationConstraintException,
)
__author__ = 'Fabio Tranchitella and the Debian Release Team'
__version__ = '2.0'
# "temporarily" raise recursion limit for the auto hinter
sys.setrecursionlimit(2000)
class Britney(object):
"""Britney, the Debian testing updater script
This is the script that updates the testing distribution. It is executed
each day after the installation of the updated packages. It generates the
`Packages' files for the testing distribution, but it does so in an
intelligent manner; it tries to avoid any inconsistency and to use only
non-buggy packages.
For more documentation on this script, please read the Developers Reference.
"""
HINTS_HELPERS = ("easy", "hint", "remove", "block", "block-udeb", "unblock", "unblock-udeb", "approve",
"remark", "ignore-piuparts", "ignore-rc-bugs", "force-skiptest", "force-badtest")
HINTS_STANDARD = ("urgent", "age-days") + HINTS_HELPERS
# ALL = {"force", "force-hint", "block-all"} | HINTS_STANDARD | registered policy hints (not covered above)
HINTS_ALL = ('ALL')
def __init__(self):
"""Class constructor
This method initializes and populates the data lists, which contain all
the information needed by the other methods of the class.
"""
# setup logging - provide the "short level name" (i.e. INFO -> I) that
# we used to use prior to using the logging module.
old_factory = logging.getLogRecordFactory()
short_level_mapping = {
'CRITICAL': 'F',
'INFO': 'I',
'WARNING': 'W',
'ERROR': 'E',
'DEBUG': 'N',
}
def record_factory(*args, **kwargs): # pragma: no cover
record = old_factory(*args, **kwargs)
try:
record.shortlevelname = short_level_mapping[record.levelname]
except KeyError:
record.shortlevelname = record.levelname
return record
logging.setLogRecordFactory(record_factory)
logging.basicConfig(format='{shortlevelname}: [{asctime}] - {message}',
style='{',
datefmt="%Y-%m-%dT%H:%M:%S%z",
stream=sys.stdout,
)
self.logger = logging.getLogger()
# Logger for "upgrade_output"; the file handler will be attached later when
# we are ready to open the file.
self.output_logger = logging.getLogger('britney2.output.upgrade_output')
self.output_logger.setLevel(logging.INFO)
# initialize the apt_pkg back-end
apt_pkg.init()
apt_pkg.init_config()
# parse the command line arguments
self._policy_engine = PolicyEngine()
self.suite_info = None # Initialized during __parse_arguments
self.__parse_arguments()
self.all_selected = []
self.excuses = {}
self.upgrade_me = []
if self.options.nuninst_cache:
self.logger.info("Not building the list of non-installable packages, as requested")
if self.options.print_uninst:
nuninst = read_nuninst(self.options.noninst_status,
self.options.architectures)
print('* summary')
print('\n'.join('%4d %s' % (len(nuninst[x]), x) for x in self.options.architectures))
return
try:
constraints_file = os.path.join(self.options.static_input_dir, 'constraints')
faux_packages = os.path.join(self.options.static_input_dir, 'faux-packages')
except AttributeError:
self.logger.info("The static_input_dir option is not set")
constraints_file = None
faux_packages = None
if faux_packages is not None and os.path.exists(faux_packages):
self.logger.info("Loading faux packages from %s", faux_packages)
self._load_faux_packages(faux_packages)
elif faux_packages is not None:
self.logger.info("No Faux packages as %s does not exist", faux_packages)
if constraints_file is not None and os.path.exists(constraints_file):
self.logger.info("Loading constraints from %s", constraints_file)
self.constraints = self._load_constraints(constraints_file)
else:
if constraints_file is not None:
self.logger.info("No constraints as %s does not exist", constraints_file)
self.constraints = {
'keep-installable': [],
}
self.logger.info("Compiling Installability tester")
self.pkg_universe, self._inst_tester = build_installability_tester(self.suite_info, self.options.architectures)
target_suite = self.suite_info.target_suite
target_suite.inst_tester = self._inst_tester
self.allow_uninst = {}
for arch in self.options.architectures:
self.allow_uninst[arch] = set()
self._migration_item_factory = MigrationItemFactory(self.suite_info)
self._hint_parser = HintParser(self._migration_item_factory)
self._migration_manager = MigrationManager(self.options, self.suite_info, self.all_binaries, self.pkg_universe,
self.constraints, self.allow_uninst, self._migration_item_factory,
self.hints)
if not self.options.nuninst_cache:
self.logger.info("Building the list of non-installable packages for the full archive")
self._inst_tester.compute_installability()
nuninst = compile_nuninst(target_suite,
self.options.architectures,
self.options.nobreakall_arches)
self.nuninst_orig = nuninst
for arch in self.options.architectures:
self.logger.info("> Found %d non-installable packages", len(nuninst[arch]))
if self.options.print_uninst:
self.nuninst_arch_report(nuninst, arch)
if self.options.print_uninst:
print('* summary')
print('\n'.join(map(lambda x: '%4d %s' % (len(nuninst[x]), x), self.options.architectures)))
return
else:
write_nuninst(self.options.noninst_status, nuninst)
stats = self._inst_tester.compute_stats()
self.logger.info("> Installability tester statistics (per architecture)")
for arch in self.options.architectures:
arch_stat = stats[arch]
self.logger.info("> %s", arch)
for stat in arch_stat.stat_summary():
self.logger.info("> - %s", stat)
else:
self.logger.info("Loading uninstallability counters from cache")
self.nuninst_orig = read_nuninst(self.options.noninst_status,
self.options.architectures)
# nuninst_orig may get updated during the upgrade process
self.nuninst_orig_save = clone_nuninst(self.nuninst_orig, architectures=self.options.architectures)
self._policy_engine.register_policy_hints(self._hint_parser)
try:
self.read_hints(self.options.hintsdir)
except AttributeError:
self.read_hints(os.path.join(self.suite_info['unstable'].path, 'Hints'))
self._policy_engine.initialise(self, self.hints)
def __parse_arguments(self):
"""Parse the command line arguments
This method parses and initializes the command line arguments.
While doing so, it preprocesses some of the options to be converted
in a suitable form for the other methods of the class.
"""
# initialize the parser
parser = optparse.OptionParser(version="%prog")
parser.add_option("-v", "", action="count", dest="verbose", help="enable verbose output")
parser.add_option("-c", "--config", action="store", dest="config", default="/etc/britney.conf",
help="path for the configuration file")
parser.add_option("", "--architectures", action="store", dest="architectures", default=None,
help="override architectures from configuration file")
parser.add_option("", "--actions", action="store", dest="actions", default=None,
help="override the list of actions to be performed")
parser.add_option("", "--hints", action="store", dest="hints", default=None,
help="additional hints, separated by semicolons")
parser.add_option("", "--hint-tester", action="store_true", dest="hint_tester", default=None,
help="provide a command line interface to test hints")
parser.add_option("", "--dry-run", action="store_true", dest="dry_run", default=False,
help="disable all outputs to the testing directory")
parser.add_option("", "--nuninst-cache", action="store_true", dest="nuninst_cache", default=False,
help="do not build the non-installability status, use the cache from file")
parser.add_option("", "--print-uninst", action="store_true", dest="print_uninst", default=False,
help="just print a summary of uninstallable packages")
parser.add_option("", "--compute-migrations", action="store_true", dest="compute_migrations", default=True,
help="Compute which packages can migrate (the default)")
parser.add_option("", "--no-compute-migrations", action="store_false", dest="compute_migrations",
help="Do not compute which packages can migrate.")
parser.add_option("", "--series", action="store", dest="series", default='',
help="set distribution series name")
parser.add_option("", "--distribution", action="store", dest="distribution", default="Debian",
help="set distribution name")
(self.options, self.args) = parser.parse_args()
if self.options.verbose:
self.logger.setLevel(logging.INFO)
else:
self.logger.setLevel(logging.WARNING)
# TODO: Define a more obvious toggle for debug information
try: # pragma: no cover
if int(os.environ.get('BRITNEY_DEBUG', '0')):
self.logger.setLevel(logging.DEBUG)
except ValueError: # pragma: no cover
pass
# integrity checks
# if the configuration file exists, then read it and set the additional options
if not os.path.isfile(self.options.config): # pragma: no cover
self.logger.error("Unable to read the configuration file (%s), exiting!", self.options.config)
sys.exit(1)
# minimum days for unstable-testing transition and the list of hints
# are handled as an ad-hoc case
MINDAYS = {}
self.HINTS = {'command-line': self.HINTS_ALL}
with open(self.options.config, encoding='utf-8') as config:
for line in config:
if '=' in line and not line.strip().startswith('#'):
k, v = line.split('=', 1)
k = k.strip()
v = v.strip()
if self.options.series is not None:
v = v.replace("%(SERIES)", self.options.series)
if k.startswith("MINDAYS_"):
MINDAYS[k.split("_")[1].lower()] = int(v)
elif k.startswith("HINTS_"):
self.HINTS[k.split("_")[1].lower()] = \
reduce(lambda x, y: x+y, [
hasattr(self, "HINTS_" + i) and
getattr(self, "HINTS_" + i) or
(i,) for i in v.split()])
elif not hasattr(self.options, k.lower()) or \
not getattr(self.options, k.lower()):
setattr(self.options, k.lower(), v)
if hasattr(self.options, 'components'): # pragma: no cover
self.logger.error("The COMPONENTS configuration has been removed.")
self.logger.error("Britney will read the value from the Release file automatically")
sys.exit(1)
suite_loader = DebMirrorLikeSuiteContentLoader(self.options)
try:
self.suite_info = suite_loader.load_suites()
except MissingRequiredConfigurationError as e: # pragma: no cover
self.logger.error("Could not load the suite content due to missing configuration: %s", str(e))
sys.exit(1)
self.all_binaries = suite_loader.all_binaries()
self.options.components = suite_loader.components
self.options.architectures = suite_loader.architectures
self.options.nobreakall_arches = suite_loader.nobreakall_arches
self.options.outofsync_arches = suite_loader.outofsync_arches
self.options.break_arches = suite_loader.break_arches
self.options.new_arches = suite_loader.new_arches
if self.options.series == '':
self.options.series = self.suite_info.target_suite.name
if hasattr(self.options, 'heidi_output') and not hasattr(self.options, "heidi_delta_output"):
self.options.heidi_delta_output = self.options.heidi_output + "Delta"
self.options.smooth_updates = self.options.smooth_updates.split()
if not hasattr(self.options, 'ignore_cruft') or \
self.options.ignore_cruft == "0":
self.options.ignore_cruft = False
if not hasattr(self.options, 'check_consistency_level'):
self.options.check_consistency_level = 2
else:
self.options.check_consistency_level = int(self.options.check_consistency_level)
if not hasattr(self.options, 'adt_retry_url_mech'):
self.options.adt_retry_url_mech = ''
self.options.has_arch_all_buildds = getattr(self.options, 'has_arch_all_buildds', 'yes') == 'yes'
self._policy_engine.add_policy(DependsPolicy(self.options, self.suite_info))
self._policy_engine.add_policy(RCBugPolicy(self.options, self.suite_info))
if getattr(self.options, 'piuparts_enable', 'yes') == 'yes':
self._policy_engine.add_policy(PiupartsPolicy(self.options, self.suite_info))
add_autopkgtest_policy = getattr(self.options, 'adt_enable', 'no')
if add_autopkgtest_policy in ('yes', 'dry-run'):
self._policy_engine.add_policy(AutopkgtestPolicy(self.options, self.suite_info, dry_run=add_autopkgtest_policy == 'dry-run'))
self._policy_engine.add_policy(AgePolicy(self.options, self.suite_info, MINDAYS))
# XXX this policy results in asymmetric enforcement of
# build-dependencies in the release pocket (nothing blocks
# propagation of a new package which will regress build-dep
# satisfaction of another package already in the release pocket, this
# only shows up via the NBS report); and this is a subset of what we
# already get from the rebuild tests, which the reality is we don't
# have capacity to enforce for the whole archive. - vorlon
# self._policy_engine.add_policy(BuildDependsPolicy(self.options, self.suite_info))
self._policy_engine.add_policy(BlockPolicy(self.options, self.suite_info))
# XXX re-enable once https://bugs.launchpad.net/launchpad/+bug/1868558 is fixed
# self._policy_engine.add_policy(BuiltUsingPolicy(self.options, self.suite_info))
# This policy is objectively terrible, it reduces britney runtime
# at the cost of giving only partial information about blocking
# packages during large transitions instead of giving full detail in
# update_output that the team can work through in parallel, thus
# dragging out complicated transitions. vorlon 20240214
if getattr(self.options, 'implicit_deps', 'yes') == 'yes':
self._policy_engine.add_policy(ImplicitDependencyPolicy(self.options, self.suite_info))
if getattr(self.options, 'check_buildd', 'no') == 'yes':
self._policy_engine.add_policy(BuiltOnBuilddPolicy(self.options, self.suite_info))
self._policy_engine.add_policy(LPBlockBugPolicy(self.options, self.suite_info))
self._policy_engine.add_policy(LPExcuseBugsPolicy(self.options, self.suite_info))
self._policy_engine.add_policy(SourcePPAPolicy(self.options, self.suite_info))
self._policy_engine.add_policy(LinuxPolicy(self.options, self.suite_info))
add_sruregression_policy = getattr(self.options, 'sruregressionemail_enable', 'no')
if add_sruregression_policy in ('yes', 'dry-run'):
self._policy_engine.add_policy(SRUADTRegressionPolicy(self.options,
self.suite_info,
dry_run=add_sruregression_policy == 'dry-run'))
add_email_policy = getattr(self.options, 'email_enable', 'no')
if add_email_policy in ('yes', 'dry-run'):
self._policy_engine.add_policy(EmailPolicy(self.options,
self.suite_info,
dry_run=add_email_policy == 'dry-run'))
@property
def hints(self):
return self._hint_parser.hints
def _load_faux_packages(self, faux_packages_file):
"""Loads fake packages
In rare cases, it is useful to create a "fake" package that can be used to satisfy
dependencies. This is usually needed for packages that are not shipped directly
on this mirror but is a prerequisite for using this mirror (e.g. some vendors provide
non-distributable "setup" packages and contrib/non-free packages depend on these).
:param faux_packages_file: Path to the file containing the fake package definitions
"""
tag_file = apt_pkg.TagFile(faux_packages_file)
get_field = tag_file.section.get
step = tag_file.step
no = 0
pri_source_suite = self.suite_info.primary_source_suite
target_suite = self.suite_info.target_suite
while step():
no += 1
pkg_name = get_field('Package', None)
if pkg_name is None: # pragma: no cover
raise ValueError("Missing Package field in paragraph %d (file %s)" % (no, faux_packages_file))
pkg_name = sys.intern(pkg_name)
version = sys.intern(get_field('Version', '1.0-1'))
provides_raw = get_field('Provides')
archs_raw = get_field('Architecture', None)
component = get_field('Component', 'non-free')
if archs_raw:
archs = archs_raw.split()
else:
archs = self.options.architectures
faux_section = 'faux'
if component != 'main':
faux_section = "%s/faux" % component
src_data = SourcePackage(pkg_name,
version,
sys.intern(faux_section),
set(),
None,
True,
None,
None,
[],
[],
)
target_suite.sources[pkg_name] = src_data
pri_source_suite.sources[pkg_name] = src_data
for arch in archs:
pkg_id = BinaryPackageId(pkg_name, version, arch)
if provides_raw:
provides = parse_provides(provides_raw, pkg_id=pkg_id, logger=self.logger)
else:
provides = []
bin_data = BinaryPackage(version,
faux_section,
pkg_name,
version,
arch,
get_field('Multi-Arch'),
None,
None,
provides,
False,
pkg_id,
[],
)
src_data.binaries.add(pkg_id)
target_suite.binaries[arch][pkg_name] = bin_data
pri_source_suite.binaries[arch][pkg_name] = bin_data
# register provided packages with the target suite provides table
for provided_pkg, provided_version, _ in bin_data.provides:
target_suite.provides_table[arch][provided_pkg].add((pkg_name, provided_version))
self.all_binaries[pkg_id] = bin_data
def _load_constraints(self, constraints_file):
"""Loads configurable constraints
The constraints file can contain extra rules that Britney should attempt
to satisfy. Examples can be "keep package X in testing and ensure it is
installable".
:param constraints_file: Path to the file containing the constraints
"""
tag_file = apt_pkg.TagFile(constraints_file)
get_field = tag_file.section.get
step = tag_file.step
no = 0
faux_version = sys.intern('1')
faux_section = sys.intern('faux')
keep_installable = []
constraints = {
'keep-installable': keep_installable
}
pri_source_suite = self.suite_info.primary_source_suite
target_suite = self.suite_info.target_suite
while step():
no += 1
pkg_name = get_field('Fake-Package-Name', None)
if pkg_name is None: # pragma: no cover
raise ValueError("Missing Fake-Package-Name field in paragraph %d (file %s)" % (no, constraints_file))
pkg_name = sys.intern(pkg_name)
def mandatory_field(x):
v = get_field(x, None)
if v is None: # pragma: no cover
raise ValueError("Missing %s field for %s (file %s)" % (x, pkg_name, constraints_file))
return v
constraint = mandatory_field('Constraint')
if constraint not in {'present-and-installable'}: # pragma: no cover
raise ValueError("Unsupported constraint %s for %s (file %s)" % (constraint, pkg_name, constraints_file))
self.logger.info(" - constraint %s", pkg_name)
pkg_list = [x.strip() for x in mandatory_field('Package-List').split("\n")
if x.strip() != '' and not x.strip().startswith("#")]
src_data = SourcePackage(pkg_name,
faux_version,
faux_section,
set(),
None,
True,
None,
None,
[],
[],
)
target_suite.sources[pkg_name] = src_data
pri_source_suite.sources[pkg_name] = src_data
keep_installable.append(pkg_name)
for arch in self.options.architectures:
deps = []
for pkg_spec in pkg_list:
s = pkg_spec.split(None, 1)
if len(s) == 1:
deps.append(s[0])
else:
pkg, arch_res = s
if not (arch_res.startswith('[') and arch_res.endswith(']')): # pragma: no cover
raise ValueError("Invalid arch-restriction on %s - should be [arch1 arch2] (for %s file %s)"
% (pkg, pkg_name, constraints_file))
arch_res = arch_res[1:-1].split()
if not arch_res: # pragma: no cover
msg = "Empty arch-restriction for %s: Uses comma or negation (for %s file %s)"
raise ValueError(msg % (pkg, pkg_name, constraints_file))
for a in arch_res:
if a == arch:
deps.append(pkg)
elif ',' in a or '!' in a: # pragma: no cover
msg = "Invalid arch-restriction for %s: Uses comma or negation (for %s file %s)"
raise ValueError(msg % (pkg, pkg_name, constraints_file))
pkg_id = BinaryPackageId(pkg_name, faux_version, arch)
bin_data = BinaryPackage(faux_version,
faux_section,
pkg_name,
faux_version,
arch,
'no',
', '.join(deps),
None,
[],
False,
pkg_id,
[],
)
src_data.binaries.add(pkg_id)
target_suite.binaries[arch][pkg_name] = bin_data
pri_source_suite.binaries[arch][pkg_name] = bin_data
self.all_binaries[pkg_id] = bin_data
return constraints
# Data reading/writing methods
# ----------------------------
def read_hints(self, hintsdir):
"""Read the hint commands from the specified directory
The hint commands are read from the files contained in the directory
specified by the `hintsdir' parameter.
The names of the files have to be the same as the authorized users
for the hints.
The file contains rows with the format:
<command> <package-name>[/<version>]
The method returns a dictionary where the key is the command, and
the value is the list of affected packages.
"""
for who in self.HINTS.keys():
if who == 'command-line':
lines = self.options.hints and self.options.hints.split(';') or ()
filename = '<cmd-line>'
self._hint_parser.parse_hints(who, self.HINTS[who], filename, lines)
else:
filename = os.path.join(hintsdir, who)
if not os.path.isfile(filename):
self.logger.error("Cannot read hints list from %s, no such file!", filename)
continue
self.logger.info("Loading hints list from %s", filename)
with open(filename, encoding='utf-8') as f:
self._hint_parser.parse_hints(who, self.HINTS[who], filename, f)
hints = self._hint_parser.hints
for x in ["block", "block-all", "block-udeb", "unblock", "unblock-udeb", "force", "urgent", "remove", "age-days"]:
z = defaultdict(dict)
for hint in hints[x]:
package = hint.package
architecture = hint.architecture
key = (hint, hint.user)
if package in z and architecture in z[package] and z[package][architecture] != key:
hint2 = z[package][architecture][0]
if x in ['unblock', 'unblock-udeb']:
if apt_pkg.version_compare(hint2.version, hint.version) < 0:
# This hint is for a newer version, so discard the old one
self.logger.warning("Overriding %s[%s] = ('%s', '%s', '%s') with ('%s', '%s', '%s')",
x, package, hint2.version, hint2.architecture,
hint2.user, hint.version, hint.architecture, hint.user)
hint2.set_active(False)
else:
# This hint is for an older version, so ignore it in favour of the new one
self.logger.warning("Ignoring %s[%s] = ('%s', '%s', '%s'), ('%s', '%s', '%s') is higher or equal",
x, package, hint.version, hint.architecture, hint.user,
hint2.version, hint2.architecture, hint2.user)
hint.set_active(False)
else:
self.logger.warning("Overriding %s[%s] = ('%s', '%s') with ('%s', '%s')",
x, package, hint2.user, hint2, hint.user, hint)
hint2.set_active(False)
z[package][architecture] = key
for hint in hints['allow-uninst']:
if hint.architecture == 'source':
for arch in self.options.architectures:
self.allow_uninst[arch].add(hint.package)
else:
self.allow_uninst[hint.architecture].add(hint.package)
# Sanity check the hints hash
if len(hints["block"]) == 0 and len(hints["block-udeb"]) == 0:
self.logger.warning("WARNING: No block hints at all, not even udeb ones!")
def write_excuses(self):
"""Produce and write the update excuses
This method handles the update excuses generation: the packages are
looked at to determine whether they are valid candidates. For the details
of this procedure, please refer to the module docstring.
"""
self.logger.info("Update Excuses generation started")
mi_factory = self._migration_item_factory
excusefinder = ExcuseFinder(self.options, self.suite_info, self.all_binaries,
self.pkg_universe, self._policy_engine, mi_factory, self.hints)
excuses, upgrade_me = excusefinder.find_actionable_excuses()
self.excuses = excuses
# sort the list of candidates
self.upgrade_me = sorted(upgrade_me)
old_lib_removals = old_libraries(mi_factory, self.suite_info, self.options.outofsync_arches)
self.upgrade_me.extend(old_lib_removals)
self.output_logger.info("List of old libraries added to upgrade_me (%d):", len(old_lib_removals))
log_and_format_old_libraries(self.output_logger, old_lib_removals)
# write excuses to the output file
if not self.options.dry_run:
self.logger.info("> Writing Excuses to %s", self.options.excuses_output)
os.makedirs(os.path.dirname(self.options.excuses_output), exist_ok=True)
write_excuses(excuses, self.options.excuses_output,
output_format="legacy-html")
if hasattr(self.options, 'excuses_yaml_output'):
self.logger.info("> Writing YAML Excuses to %s", self.options.excuses_yaml_output)
write_excuses(excuses, self.options.excuses_yaml_output,
output_format="yaml")
self.logger.info("Update Excuses generation completed")
# Upgrade run
# -----------
def eval_nuninst(self, nuninst, original=None):
"""Return a string which represents the uninstallability counters
This method returns a string which represents the uninstallability
counters reading the uninstallability statistics `nuninst` and, if
present, merging the results with the `original` one.
An example of the output string is:
1+2: i-0:a-0:a-0:h-0:i-1:m-0:m-0:p-0:a-0:m-0:s-2:s-0
where the first part is the number of broken packages in non-break
architectures + the total number of broken packages for all the
architectures.
"""
res = []
total = 0
totalbreak = 0
for arch in self.options.architectures:
if arch in nuninst:
n = len(nuninst[arch])
elif original and arch in original:
n = len(original[arch])
else:
continue
if arch in self.options.break_arches:
totalbreak = totalbreak + n
else:
total = total + n
res.append("%s-%d" % (arch[0], n))
return "%d+%d: %s" % (total, totalbreak, ":".join(res))
def iter_packages(self, packages, selected, nuninst=None):
"""Iter on the list of actions and apply them one-by-one
This method applies the changes from `packages` to testing, checking the uninstallability
counters for every action performed. If the action does not improve them, it is reverted.
The method returns the new uninstallability counters and the remaining actions if the
final result is successful, otherwise (None, []).
"""
group_info = {}
rescheduled_packages = packages
maybe_rescheduled_packages = []
output_logger = self.output_logger
solver = InstallabilitySolver(self.pkg_universe, self._inst_tester)
mm = self._migration_manager
target_suite = self.suite_info.target_suite
for y in sorted((y for y in packages), key=attrgetter('uvname')):
try:
_, updates, rms, _ = mm.compute_groups(y)
result = (y, frozenset(updates), frozenset(rms))
group_info[y] = result
except MigrationConstraintException as e:
rescheduled_packages.remove(y)
output_logger.info("not adding package to list: %s", (y.package))
output_logger.info(" got exception: %s" % (repr(e)))
if nuninst:
nuninst_orig = nuninst
else:
nuninst_orig = self.nuninst_orig
nuninst_last_accepted = nuninst_orig
output_logger.info("recur: [] %s %d/0", ",".join(x.uvname for x in selected), len(packages))
while rescheduled_packages:
groups = {group_info[x] for x in rescheduled_packages}
worklist = solver.solve_groups(groups)
rescheduled_packages = []
worklist.reverse()
while worklist:
comp = worklist.pop()
comp_name = ' '.join(item.uvname for item in comp)
output_logger.info("trying: %s" % comp_name)
with mm.start_transaction() as transaction:
accepted = False
try:
accepted, nuninst_after, failed_arch, new_cruft = mm.migrate_items_to_target_suite(
comp,
nuninst_last_accepted
)
if accepted:
selected.extend(comp)
transaction.commit()
output_logger.info("accepted: %s", comp_name)
output_logger.info(" ori: %s", self.eval_nuninst(nuninst_orig))
output_logger.info(" pre: %s", self.eval_nuninst(nuninst_last_accepted))
output_logger.info(" now: %s", self.eval_nuninst(nuninst_after))
if new_cruft:
output_logger.info(
" added new cruft items to list: %s",
" ".join(x.uvname for x in new_cruft))
if len(selected) <= 20:
output_logger.info(" all: %s", " ".join(x.uvname for x in selected))
else:
output_logger.info(" most: (%d) .. %s",
len(selected),
" ".join(x.uvname for x in selected[-20:]))
if self.options.check_consistency_level >= 3:
target_suite.check_suite_source_pkg_consistency('iter_packages after commit')
nuninst_last_accepted = nuninst_after
for cruft_item in new_cruft:
try:
_, updates, rms, _ = mm.compute_groups(cruft_item)
result = (cruft_item, frozenset(updates), frozenset(rms))
group_info[cruft_item] = result
worklist.append([cruft_item])
except MigrationConstraintException as e:
output_logger.info(
" got exception adding cruft item %s to list: %s" %
(cruft_item.uvname, repr(e)))
rescheduled_packages.extend(maybe_rescheduled_packages)
maybe_rescheduled_packages.clear()
else:
transaction.rollback()
broken = sorted(b for b in nuninst_after[failed_arch]
if b not in nuninst_last_accepted[failed_arch])
compare_nuninst = None
if any(item for item in comp if item.architecture != 'source'):
compare_nuninst = nuninst_last_accepted
# NB: try_migration already reverted this for us, so just print the results and move on
output_logger.info("skipped: %s (%d, %d, %d)",
comp_name,
len(rescheduled_packages),
len(maybe_rescheduled_packages),
len(worklist)
)
output_logger.info(" got: %s", self.eval_nuninst(nuninst_after, compare_nuninst))
output_logger.info(" * %s: %s", failed_arch, ", ".join(broken))
if self.options.check_consistency_level >= 3:
target_suite.check_suite_source_pkg_consistency('iter_package after rollback (not accepted)')
except MigrationConstraintException as e:
transaction.rollback()
output_logger.info("skipped: %s (%d, %d, %d)",
comp_name,
len(rescheduled_packages),