resolve.py 67 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666
  1. # Copyright (C) 2012 Anaconda, Inc
  2. # SPDX-License-Identifier: BSD-3-Clause
  3. from __future__ import annotations
  4. import copy
  5. import itertools
  6. from collections import defaultdict, deque
  7. from functools import lru_cache
  8. from logging import DEBUG, getLogger
  9. from tqdm import tqdm
  10. from conda.common.iterators import groupby_to_dict as groupby
  11. from ._vendor.frozendict import FrozenOrderedDict as frozendict
  12. from .auxlib.decorators import memoizemethod
  13. from .base.constants import MAX_CHANNEL_PRIORITY, ChannelPriority, SatSolverChoice
  14. from .base.context import context
  15. from .common.compat import on_win
  16. from .common.io import dashlist, time_recorder
  17. from .common.logic import (
  18. TRUE,
  19. Clauses,
  20. PycoSatSolver,
  21. PyCryptoSatSolver,
  22. PySatSolver,
  23. minimal_unsatisfiable_subset,
  24. )
  25. from .common.toposort import toposort
  26. from .exceptions import (
  27. CondaDependencyError,
  28. InvalidSpec,
  29. ResolvePackageNotFound,
  30. UnsatisfiableError,
  31. )
  32. from .models.channel import Channel, MultiChannel
  33. from .models.enums import NoarchType, PackageType
  34. from .models.match_spec import MatchSpec
  35. from .models.records import PackageRecord
  36. from .models.version import VersionOrder
  37. log = getLogger(__name__)
  38. stdoutlog = getLogger("conda.stdoutlog")
  39. # used in conda build
  40. Unsatisfiable = UnsatisfiableError
  41. ResolvePackageNotFound = ResolvePackageNotFound
  42. _sat_solvers = {
  43. SatSolverChoice.PYCOSAT: PycoSatSolver,
  44. SatSolverChoice.PYCRYPTOSAT: PyCryptoSatSolver,
  45. SatSolverChoice.PYSAT: PySatSolver,
  46. }
  47. @lru_cache(maxsize=None)
  48. def _get_sat_solver_cls(sat_solver_choice=SatSolverChoice.PYCOSAT):
  49. def try_out_solver(sat_solver):
  50. c = Clauses(sat_solver=sat_solver)
  51. required = {c.new_var(), c.new_var()}
  52. c.Require(c.And, *required)
  53. solution = set(c.sat())
  54. if not required.issubset(solution):
  55. raise RuntimeError(f"Wrong SAT solution: {solution}. Required: {required}")
  56. sat_solver = _sat_solvers[sat_solver_choice]
  57. try:
  58. try_out_solver(sat_solver)
  59. except Exception as e:
  60. log.warning(
  61. "Could not run SAT solver through interface '%s'.", sat_solver_choice
  62. )
  63. log.debug("SAT interface error due to: %s", e, exc_info=True)
  64. else:
  65. log.debug("Using SAT solver interface '%s'.", sat_solver_choice)
  66. return sat_solver
  67. for sat_solver in _sat_solvers.values():
  68. try:
  69. try_out_solver(sat_solver)
  70. except Exception as e:
  71. log.debug(
  72. "Attempted SAT interface '%s' but unavailable due to: %s",
  73. sat_solver_choice,
  74. e,
  75. )
  76. else:
  77. log.debug("Falling back to SAT solver interface '%s'.", sat_solver_choice)
  78. return sat_solver
  79. raise CondaDependencyError(
  80. "Cannot run solver. No functioning SAT implementations available."
  81. )
  82. def exactness_and_number_of_deps(resolve_obj, ms):
  83. """Sorting key to emphasize packages that have more strict
  84. requirements. More strict means the reduced index can be reduced
  85. more, so we want to consider these more constrained deps earlier in
  86. reducing the index.
  87. """
  88. if ms.strictness == 3:
  89. prec = resolve_obj.find_matches(ms)
  90. value = 3
  91. if prec:
  92. for dep in prec[0].depends:
  93. value += MatchSpec(dep).strictness
  94. else:
  95. value = ms.strictness
  96. return value
  97. class Resolve:
  98. def __init__(self, index, processed=False, channels=()):
  99. self.index = index
  100. self.channels = channels
  101. self._channel_priorities_map = (
  102. self._make_channel_priorities(channels) if channels else {}
  103. )
  104. self._channel_priority = context.channel_priority
  105. self._solver_ignore_timestamps = context.solver_ignore_timestamps
  106. groups = groupby(lambda x: x.name, index.values())
  107. trackers = defaultdict(list)
  108. for name in groups:
  109. unmanageable_precs = [prec for prec in groups[name] if prec.is_unmanageable]
  110. if unmanageable_precs:
  111. log.debug("restricting to unmanageable packages: %s", name)
  112. groups[name] = unmanageable_precs
  113. tf_precs = (prec for prec in groups[name] if prec.track_features)
  114. for prec in tf_precs:
  115. for feature_name in prec.track_features:
  116. trackers[feature_name].append(prec)
  117. self.groups = groups # dict[package_name, list[PackageRecord]]
  118. self.trackers = trackers # dict[track_feature, set[PackageRecord]]
  119. self._cached_find_matches = {} # dict[MatchSpec, set[PackageRecord]]
  120. self.ms_depends_ = {} # dict[PackageRecord, list[MatchSpec]]
  121. self._reduced_index_cache = {}
  122. self._pool_cache = {}
  123. self._strict_channel_cache = {}
  124. self._system_precs = {
  125. _
  126. for _ in index
  127. if (
  128. hasattr(_, "package_type")
  129. and _.package_type == PackageType.VIRTUAL_SYSTEM
  130. )
  131. }
  132. # sorting these in reverse order is effectively prioritizing
  133. # constraint behavior from newer packages. It is applying broadening
  134. # reduction based on the latest packages, which may reduce the space
  135. # more, because more modern packages utilize constraints in more sane
  136. # ways (for example, using run_exports in conda-build 3)
  137. for name, group in self.groups.items():
  138. self.groups[name] = sorted(group, key=self.version_key, reverse=True)
  139. def __hash__(self):
  140. return (
  141. super().__hash__()
  142. ^ hash(frozenset(self.channels))
  143. ^ hash(frozendict(self._channel_priorities_map))
  144. ^ hash(self._channel_priority)
  145. ^ hash(self._solver_ignore_timestamps)
  146. ^ hash(frozendict((k, tuple(v)) for k, v in self.groups.items()))
  147. ^ hash(frozendict((k, tuple(v)) for k, v in self.trackers.items()))
  148. ^ hash(frozendict((k, tuple(v)) for k, v in self.ms_depends_.items()))
  149. )
  150. def default_filter(self, features=None, filter=None):
  151. # TODO: fix this import; this is bad
  152. from .core.subdir_data import make_feature_record
  153. if filter is None:
  154. filter = {}
  155. else:
  156. filter.clear()
  157. filter.update(
  158. {make_feature_record(fstr): False for fstr in self.trackers.keys()}
  159. )
  160. if features:
  161. filter.update({make_feature_record(fstr): True for fstr in features})
  162. return filter
  163. def valid(self, spec_or_prec, filter, optional=True):
  164. """Tests if a package, MatchSpec, or a list of both has satisfiable
  165. dependencies, assuming cyclic dependencies are always valid.
  166. Args:
  167. spec_or_prec: a package record, a MatchSpec, or an iterable of these.
  168. filter: a dictionary of (fkey,valid) pairs, used to consider a subset
  169. of dependencies, and to eliminate repeated searches.
  170. optional: if True (default), do not enforce optional specifications
  171. when considering validity. If False, enforce them.
  172. Returns:
  173. True if the full set of dependencies can be satisfied; False otherwise.
  174. If filter is supplied and update is True, it will be updated with the
  175. search results.
  176. """
  177. def v_(spec):
  178. return v_ms_(spec) if isinstance(spec, MatchSpec) else v_fkey_(spec)
  179. def v_ms_(ms):
  180. return (
  181. optional
  182. and ms.optional
  183. or any(v_fkey_(fkey) for fkey in self.find_matches(ms))
  184. )
  185. def v_fkey_(prec):
  186. val = filter.get(prec)
  187. if val is None:
  188. filter[prec] = True
  189. try:
  190. depends = self.ms_depends(prec)
  191. except InvalidSpec:
  192. val = filter[prec] = False
  193. else:
  194. val = filter[prec] = all(v_ms_(ms) for ms in depends)
  195. return val
  196. result = v_(spec_or_prec)
  197. return result
  198. def valid2(self, spec_or_prec, filter_out, optional=True):
  199. def is_valid(_spec_or_prec):
  200. if isinstance(_spec_or_prec, MatchSpec):
  201. return is_valid_spec(_spec_or_prec)
  202. else:
  203. return is_valid_prec(_spec_or_prec)
  204. @memoizemethod
  205. def is_valid_spec(_spec):
  206. return (
  207. optional
  208. and _spec.optional
  209. or any(is_valid_prec(_prec) for _prec in self.find_matches(_spec))
  210. )
  211. def is_valid_prec(prec):
  212. val = filter_out.get(prec)
  213. if val is None:
  214. filter_out[prec] = False
  215. try:
  216. has_valid_deps = all(
  217. is_valid_spec(ms) for ms in self.ms_depends(prec)
  218. )
  219. except InvalidSpec:
  220. val = filter_out[prec] = "invalid dep specs"
  221. else:
  222. val = filter_out[prec] = (
  223. False if has_valid_deps else "invalid depends specs"
  224. )
  225. return not val
  226. return is_valid(spec_or_prec)
  227. def invalid_chains(self, spec, filter, optional=True):
  228. """Constructs a set of 'dependency chains' for invalid specs.
  229. A dependency chain is a tuple of MatchSpec objects, starting with
  230. the requested spec, proceeding down the dependency tree, ending at
  231. a specification that cannot be satisfied.
  232. Args:
  233. spec: a package key or MatchSpec
  234. filter: a dictionary of (prec, valid) pairs to be used when
  235. testing for package validity.
  236. Returns:
  237. A tuple of tuples, empty if the MatchSpec is valid.
  238. """
  239. def chains_(spec, names):
  240. if spec.name in names:
  241. return
  242. names.add(spec.name)
  243. if self.valid(spec, filter, optional):
  244. return
  245. precs = self.find_matches(spec)
  246. found = False
  247. conflict_deps = set()
  248. for prec in precs:
  249. for m2 in self.ms_depends(prec):
  250. for x in chains_(m2, names):
  251. found = True
  252. yield (spec,) + x
  253. else:
  254. conflict_deps.add(m2)
  255. if not found:
  256. conflict_groups = groupby(lambda x: x.name, conflict_deps)
  257. for group in conflict_groups.values():
  258. yield (spec,) + MatchSpec.union(group)
  259. return chains_(spec, set())
  260. def verify_specs(self, specs):
  261. """Perform a quick verification that specs and dependencies are reasonable.
  262. Args:
  263. specs: An iterable of strings or MatchSpec objects to be tested.
  264. Returns:
  265. Nothing, but if there is a conflict, an error is thrown.
  266. Note that this does not attempt to resolve circular dependencies.
  267. """
  268. non_tf_specs = []
  269. bad_deps = []
  270. feature_names = set()
  271. for ms in specs:
  272. _feature_names = ms.get_exact_value("track_features")
  273. if _feature_names:
  274. feature_names.update(_feature_names)
  275. else:
  276. non_tf_specs.append(ms)
  277. bad_deps.extend(
  278. (spec,)
  279. for spec in non_tf_specs
  280. if (not spec.optional and not self.find_matches(spec))
  281. )
  282. if bad_deps:
  283. raise ResolvePackageNotFound(bad_deps)
  284. return tuple(non_tf_specs), feature_names
  285. def _classify_bad_deps(
  286. self, bad_deps, specs_to_add, history_specs, strict_channel_priority
  287. ):
  288. classes = {
  289. "python": set(),
  290. "request_conflict_with_history": set(),
  291. "direct": set(),
  292. "virtual_package": set(),
  293. }
  294. specs_to_add = {MatchSpec(_) for _ in specs_to_add or []}
  295. history_specs = {MatchSpec(_) for _ in history_specs or []}
  296. for chain in bad_deps:
  297. # sometimes chains come in as strings
  298. if (
  299. len(chain) > 1
  300. and chain[-1].name == "python"
  301. and not any(_.name == "python" for _ in specs_to_add)
  302. and any(_[0] for _ in bad_deps if _[0].name == "python")
  303. ):
  304. python_first_specs = [_[0] for _ in bad_deps if _[0].name == "python"]
  305. if python_first_specs:
  306. python_spec = python_first_specs[0]
  307. if not (
  308. set(self.find_matches(python_spec))
  309. & set(self.find_matches(chain[-1]))
  310. ):
  311. classes["python"].add(
  312. (
  313. tuple([chain[0], chain[-1]]),
  314. str(MatchSpec(python_spec, target=None)),
  315. )
  316. )
  317. elif chain[-1].name.startswith("__"):
  318. version = [_ for _ in self._system_precs if _.name == chain[-1].name]
  319. virtual_package_version = (
  320. version[0].version if version else "not available"
  321. )
  322. classes["virtual_package"].add((tuple(chain), virtual_package_version))
  323. elif chain[0] in specs_to_add:
  324. match = False
  325. for spec in history_specs:
  326. if spec.name == chain[-1].name:
  327. classes["request_conflict_with_history"].add(
  328. (tuple(chain), str(MatchSpec(spec, target=None)))
  329. )
  330. match = True
  331. if not match:
  332. classes["direct"].add(
  333. (tuple(chain), str(MatchSpec(chain[0], target=None)))
  334. )
  335. else:
  336. if len(chain) > 1 or any(
  337. len(c) >= 1 and c[0] == chain[0] for c in bad_deps
  338. ):
  339. classes["direct"].add(
  340. (tuple(chain), str(MatchSpec(chain[0], target=None)))
  341. )
  342. if classes["python"]:
  343. # filter out plain single-entry python conflicts. The python section explains these.
  344. classes["direct"] = [
  345. _
  346. for _ in classes["direct"]
  347. if _[1].startswith("python ") or len(_[0]) > 1
  348. ]
  349. return classes
  350. def find_matches_with_strict(self, ms, strict_channel_priority):
  351. matches = self.find_matches(ms)
  352. if not strict_channel_priority:
  353. return matches
  354. sole_source_channel_name = self._get_strict_channel(ms.name)
  355. return tuple(f for f in matches if f.channel.name == sole_source_channel_name)
  356. def find_conflicts(self, specs, specs_to_add=None, history_specs=None):
  357. if context.unsatisfiable_hints:
  358. if not context.json:
  359. print(
  360. "\nFound conflicts! Looking for incompatible packages.\n"
  361. "This can take several minutes. Press CTRL-C to abort."
  362. )
  363. bad_deps = self.build_conflict_map(specs, specs_to_add, history_specs)
  364. else:
  365. bad_deps = {}
  366. strict_channel_priority = context.channel_priority == ChannelPriority.STRICT
  367. raise UnsatisfiableError(bad_deps, strict=strict_channel_priority)
  368. def breadth_first_search_for_dep_graph(
  369. self, root_spec, target_name, dep_graph, num_targets=1
  370. ):
  371. """Return shorted path from root_spec to target_name"""
  372. queue = []
  373. queue.append([root_spec])
  374. visited = []
  375. target_paths = []
  376. while queue:
  377. path = queue.pop(0)
  378. node = path[-1]
  379. if node in visited:
  380. continue
  381. visited.append(node)
  382. if node.name == target_name:
  383. if len(target_paths) == 0:
  384. target_paths.append(path)
  385. if len(target_paths[-1]) == len(path):
  386. last_spec = MatchSpec.union((path[-1], target_paths[-1][-1]))[0]
  387. target_paths[-1][-1] = last_spec
  388. else:
  389. target_paths.append(path)
  390. found_all_targets = len(target_paths) == num_targets and any(
  391. len(_) != len(path) for _ in queue
  392. )
  393. if len(queue) == 0 or found_all_targets:
  394. return target_paths
  395. sub_graph = dep_graph
  396. for p in path[0:-1]:
  397. sub_graph = sub_graph[p]
  398. children = [_ for _ in sub_graph.get(node, {})]
  399. if children is None:
  400. continue
  401. for adj in children:
  402. if len(target_paths) < num_targets:
  403. new_path = list(path)
  404. new_path.append(adj)
  405. queue.append(new_path)
  406. return target_paths
  407. def build_graph_of_deps(self, spec):
  408. dep_graph = {spec: {}}
  409. all_deps = set()
  410. queue = [[spec]]
  411. while queue:
  412. path = queue.pop(0)
  413. sub_graph = dep_graph
  414. for p in path:
  415. sub_graph = sub_graph[p]
  416. parent_node = path[-1]
  417. matches = self.find_matches(parent_node)
  418. for mat in matches:
  419. if len(mat.depends) > 0:
  420. for i in mat.depends:
  421. new_node = MatchSpec(i)
  422. sub_graph.update({new_node: {}})
  423. all_deps.add(new_node)
  424. new_path = list(path)
  425. new_path.append(new_node)
  426. if len(new_path) <= context.unsatisfiable_hints_check_depth:
  427. queue.append(new_path)
  428. return dep_graph, all_deps
  429. def build_conflict_map(self, specs, specs_to_add=None, history_specs=None):
  430. """Perform a deeper analysis on conflicting specifications, by attempting
  431. to find the common dependencies that might be the cause of conflicts.
  432. Args:
  433. specs: An iterable of strings or MatchSpec objects to be tested.
  434. It is assumed that the specs conflict.
  435. Returns:
  436. bad_deps: A list of lists of bad deps
  437. Strategy:
  438. If we're here, we know that the specs conflict. This could be because:
  439. - One spec conflicts with another; e.g.
  440. ['numpy 1.5*', 'numpy >=1.6']
  441. - One spec conflicts with a dependency of another; e.g.
  442. ['numpy 1.5*', 'scipy 0.12.0b1']
  443. - Each spec depends on *the same package* but in a different way; e.g.,
  444. ['A', 'B'] where A depends on numpy 1.5, and B on numpy 1.6.
  445. Technically, all three of these cases can be boiled down to the last
  446. one if we treat the spec itself as one of the "dependencies". There
  447. might be more complex reasons for a conflict, but this code only
  448. considers the ones above.
  449. The purpose of this code, then, is to identify packages (like numpy
  450. above) that all of the specs depend on *but in different ways*. We
  451. then identify the dependency chains that lead to those packages.
  452. """
  453. # if only a single package matches the spec use the packages depends
  454. # rather than the spec itself
  455. strict_channel_priority = context.channel_priority == ChannelPriority.STRICT
  456. specs = set(specs) | (specs_to_add or set())
  457. # Remove virtual packages
  458. specs = {spec for spec in specs if not spec.name.startswith("__")}
  459. if len(specs) == 1:
  460. matches = self.find_matches(next(iter(specs)))
  461. if len(matches) == 1:
  462. specs = set(self.ms_depends(matches[0]))
  463. specs.update({_.to_match_spec() for _ in self._system_precs})
  464. for spec in specs:
  465. self._get_package_pool((spec,))
  466. dep_graph = {}
  467. dep_list = {}
  468. with tqdm(
  469. total=len(specs),
  470. desc="Building graph of deps",
  471. leave=False,
  472. disable=context.json,
  473. ) as t:
  474. for spec in specs:
  475. t.set_description(f"Examining {spec}")
  476. t.update()
  477. dep_graph_for_spec, all_deps_for_spec = self.build_graph_of_deps(spec)
  478. dep_graph.update(dep_graph_for_spec)
  479. if dep_list.get(spec.name):
  480. dep_list[spec.name].append(spec)
  481. else:
  482. dep_list[spec.name] = [spec]
  483. for dep in all_deps_for_spec:
  484. if dep_list.get(dep.name):
  485. dep_list[dep.name].append(spec)
  486. else:
  487. dep_list[dep.name] = [spec]
  488. chains = []
  489. conflicting_pkgs_pkgs = {}
  490. for k, v in dep_list.items():
  491. set_v = frozenset(v)
  492. # Packages probably conflicts if many specs depend on it
  493. if len(set_v) > 1:
  494. if conflicting_pkgs_pkgs.get(set_v) is None:
  495. conflicting_pkgs_pkgs[set_v] = [k]
  496. else:
  497. conflicting_pkgs_pkgs[set_v].append(k)
  498. # Conflict if required virtual package is not present
  499. elif k.startswith("__") and any(s for s in set_v if s.name != k):
  500. conflicting_pkgs_pkgs[set_v] = [k]
  501. with tqdm(
  502. total=len(specs),
  503. desc="Determining conflicts",
  504. leave=False,
  505. disable=context.json,
  506. ) as t:
  507. for roots, nodes in conflicting_pkgs_pkgs.items():
  508. t.set_description(
  509. "Examining conflict for {}".format(" ".join(_.name for _ in roots))
  510. )
  511. t.update()
  512. lroots = [_ for _ in roots]
  513. current_shortest_chain = []
  514. shortest_node = None
  515. requested_spec_unsat = frozenset(nodes).intersection(
  516. {_.name for _ in roots}
  517. )
  518. if requested_spec_unsat:
  519. chains.append([_ for _ in roots if _.name in requested_spec_unsat])
  520. shortest_node = chains[-1][0]
  521. for root in roots:
  522. if root != chains[0][0]:
  523. search_node = shortest_node.name
  524. num_occurances = dep_list[search_node].count(root)
  525. c = self.breadth_first_search_for_dep_graph(
  526. root, search_node, dep_graph, num_occurances
  527. )
  528. chains.extend(c)
  529. else:
  530. for node in nodes:
  531. num_occurances = dep_list[node].count(lroots[0])
  532. chain = self.breadth_first_search_for_dep_graph(
  533. lroots[0], node, dep_graph, num_occurances
  534. )
  535. chains.extend(chain)
  536. if len(current_shortest_chain) == 0 or len(chain) < len(
  537. current_shortest_chain
  538. ):
  539. current_shortest_chain = chain
  540. shortest_node = node
  541. for root in lroots[1:]:
  542. num_occurances = dep_list[shortest_node].count(root)
  543. c = self.breadth_first_search_for_dep_graph(
  544. root, shortest_node, dep_graph, num_occurances
  545. )
  546. chains.extend(c)
  547. bad_deps = self._classify_bad_deps(
  548. chains, specs_to_add, history_specs, strict_channel_priority
  549. )
  550. return bad_deps
  551. def _get_strict_channel(self, package_name):
  552. channel_name = None
  553. try:
  554. channel_name = self._strict_channel_cache[package_name]
  555. except KeyError:
  556. if package_name in self.groups:
  557. all_channel_names = {
  558. prec.channel.name for prec in self.groups[package_name]
  559. }
  560. by_cp = {
  561. self._channel_priorities_map.get(cn, 1): cn
  562. for cn in all_channel_names
  563. }
  564. highest_priority = sorted(by_cp)[
  565. 0
  566. ] # highest priority is the lowest number
  567. channel_name = self._strict_channel_cache[package_name] = by_cp[
  568. highest_priority
  569. ]
  570. return channel_name
  571. @memoizemethod
  572. def _broader(self, ms, specs_by_name):
  573. """Prevent introduction of matchspecs that broaden our selection of choices."""
  574. if not specs_by_name:
  575. return False
  576. return ms.strictness < specs_by_name[0].strictness
  577. def _get_package_pool(self, specs):
  578. specs = frozenset(specs)
  579. if specs in self._pool_cache:
  580. pool = self._pool_cache[specs]
  581. else:
  582. pool = self.get_reduced_index(specs)
  583. grouped_pool = groupby(lambda x: x.name, pool)
  584. pool = {k: set(v) for k, v in grouped_pool.items()}
  585. self._pool_cache[specs] = pool
  586. return pool
  587. @time_recorder(module_name=__name__)
  588. def get_reduced_index(
  589. self, explicit_specs, sort_by_exactness=True, exit_on_conflict=False
  590. ):
  591. # TODO: fix this import; this is bad
  592. from .core.subdir_data import make_feature_record
  593. strict_channel_priority = context.channel_priority == ChannelPriority.STRICT
  594. cache_key = strict_channel_priority, tuple(explicit_specs)
  595. if cache_key in self._reduced_index_cache:
  596. return self._reduced_index_cache[cache_key]
  597. if log.isEnabledFor(DEBUG):
  598. log.debug(
  599. "Retrieving packages for: %s",
  600. dashlist(sorted(str(s) for s in explicit_specs)),
  601. )
  602. explicit_specs, features = self.verify_specs(explicit_specs)
  603. filter_out = {
  604. prec: False if val else "feature not enabled"
  605. for prec, val in self.default_filter(features).items()
  606. }
  607. snames = set()
  608. top_level_spec = None
  609. cp_filter_applied = set() # values are package names
  610. if sort_by_exactness:
  611. # prioritize specs that are more exact. Exact specs will evaluate to 3,
  612. # constrained specs will evaluate to 2, and name only will be 1
  613. explicit_specs = sorted(
  614. list(explicit_specs),
  615. key=lambda x: (exactness_and_number_of_deps(self, x), x.dist_str()),
  616. reverse=True,
  617. )
  618. # tuple because it needs to be hashable
  619. explicit_specs = tuple(explicit_specs)
  620. explicit_spec_package_pool = {}
  621. for s in explicit_specs:
  622. explicit_spec_package_pool[s.name] = explicit_spec_package_pool.get(
  623. s.name, set()
  624. ) | set(self.find_matches(s))
  625. def filter_group(_specs):
  626. # all _specs should be for the same package name
  627. name = next(iter(_specs)).name
  628. group = self.groups.get(name, ())
  629. # implement strict channel priority
  630. if group and strict_channel_priority and name not in cp_filter_applied:
  631. sole_source_channel_name = self._get_strict_channel(name)
  632. for prec in group:
  633. if prec.channel.name != sole_source_channel_name:
  634. filter_out[prec] = "removed due to strict channel priority"
  635. cp_filter_applied.add(name)
  636. # Prune packages that don't match any of the patterns,
  637. # have unsatisfiable dependencies, or conflict with the explicit specs
  638. nold = nnew = 0
  639. for prec in group:
  640. if not filter_out.setdefault(prec, False):
  641. nold += 1
  642. if (not self.match_any(_specs, prec)) or (
  643. explicit_spec_package_pool.get(name)
  644. and prec not in explicit_spec_package_pool[name]
  645. ):
  646. filter_out[prec] = (
  647. "incompatible with required spec %s" % top_level_spec
  648. )
  649. continue
  650. unsatisfiable_dep_specs = set()
  651. for ms in self.ms_depends(prec):
  652. if not ms.optional and not any(
  653. rec
  654. for rec in self.find_matches(ms)
  655. if not filter_out.get(rec, False)
  656. ):
  657. unsatisfiable_dep_specs.add(ms)
  658. if unsatisfiable_dep_specs:
  659. filter_out[prec] = "unsatisfiable dependencies %s" % " ".join(
  660. str(s) for s in unsatisfiable_dep_specs
  661. )
  662. continue
  663. filter_out[prec] = False
  664. nnew += 1
  665. reduced = nnew < nold
  666. if reduced:
  667. log.debug("%s: pruned from %d -> %d" % (name, nold, nnew))
  668. if any(ms.optional for ms in _specs):
  669. return reduced
  670. elif nnew == 0:
  671. # Indicates that a conflict was found; we can exit early
  672. return None
  673. # Perform the same filtering steps on any dependencies shared across
  674. # *all* packages in the group. Even if just one of the packages does
  675. # not have a particular dependency, it must be ignored in this pass.
  676. # Otherwise, we might do more filtering than we should---and it is
  677. # better to have extra packages here than missing ones.
  678. if reduced or name not in snames:
  679. snames.add(name)
  680. _dep_specs = groupby(
  681. lambda s: s.name,
  682. (
  683. dep_spec
  684. for prec in group
  685. if not filter_out.get(prec, False)
  686. for dep_spec in self.ms_depends(prec)
  687. if not dep_spec.optional
  688. ),
  689. )
  690. _dep_specs.pop("*", None) # discard track_features specs
  691. for deps_name, deps in sorted(
  692. _dep_specs.items(), key=lambda x: any(_.optional for _ in x[1])
  693. ):
  694. if len(deps) >= nnew:
  695. res = filter_group(set(deps))
  696. if res:
  697. reduced = True
  698. elif res is None:
  699. # Indicates that a conflict was found; we can exit early
  700. return None
  701. return reduced
  702. # Iterate on pruning until no progress is made. We've implemented
  703. # what amounts to "double-elimination" here; packages get one additional
  704. # chance after their first "False" reduction. This catches more instances
  705. # where one package's filter affects another. But we don't have to be
  706. # perfect about this, so performance matters.
  707. pruned_to_zero = set()
  708. for _ in range(2):
  709. snames.clear()
  710. slist = deque(explicit_specs)
  711. while slist:
  712. s = slist.popleft()
  713. if filter_group([s]):
  714. slist.append(s)
  715. else:
  716. pruned_to_zero.add(s)
  717. if pruned_to_zero and exit_on_conflict:
  718. return {}
  719. # Determine all valid packages in the dependency graph
  720. reduced_index2 = {
  721. prec: prec for prec in (make_feature_record(fstr) for fstr in features)
  722. }
  723. specs_by_name_seed = {}
  724. for s in explicit_specs:
  725. specs_by_name_seed[s.name] = specs_by_name_seed.get(s.name, []) + [s]
  726. for explicit_spec in explicit_specs:
  727. add_these_precs2 = tuple(
  728. prec
  729. for prec in self.find_matches(explicit_spec)
  730. if prec not in reduced_index2 and self.valid2(prec, filter_out)
  731. )
  732. if strict_channel_priority and add_these_precs2:
  733. strict_channel_name = self._get_strict_channel(add_these_precs2[0].name)
  734. add_these_precs2 = tuple(
  735. prec
  736. for prec in add_these_precs2
  737. if prec.channel.name == strict_channel_name
  738. )
  739. reduced_index2.update((prec, prec) for prec in add_these_precs2)
  740. for pkg in add_these_precs2:
  741. # what we have seen is only relevant within the context of a single package
  742. # that is picked up because of an explicit spec. We don't want the
  743. # broadening check to apply across packages at the explicit level; only
  744. # at the level of deps below that explicit package.
  745. seen_specs = set()
  746. specs_by_name = copy.deepcopy(specs_by_name_seed)
  747. dep_specs = set(self.ms_depends(pkg))
  748. for dep in dep_specs:
  749. specs = specs_by_name.get(dep.name, [])
  750. if dep not in specs and (
  751. not specs or dep.strictness >= specs[0].strictness
  752. ):
  753. specs.insert(0, dep)
  754. specs_by_name[dep.name] = specs
  755. while dep_specs:
  756. # used for debugging
  757. # size_index = len(reduced_index2)
  758. # specs_added = []
  759. ms = dep_specs.pop()
  760. seen_specs.add(ms)
  761. for dep_pkg in (
  762. _ for _ in self.find_matches(ms) if _ not in reduced_index2
  763. ):
  764. if not self.valid2(dep_pkg, filter_out):
  765. continue
  766. # expand the reduced index if not using strict channel priority,
  767. # or if using it and this package is in the appropriate channel
  768. if not strict_channel_priority or (
  769. self._get_strict_channel(dep_pkg.name)
  770. == dep_pkg.channel.name
  771. ):
  772. reduced_index2[dep_pkg] = dep_pkg
  773. # recurse to deps of this dep
  774. new_specs = set(self.ms_depends(dep_pkg)) - seen_specs
  775. for new_ms in new_specs:
  776. # We do not pull packages into the reduced index due
  777. # to a track_features dependency. Remember, a feature
  778. # specifies a "soft" dependency: it must be in the
  779. # environment, but it is not _pulled_ in. The SAT
  780. # logic doesn't do a perfect job of capturing this
  781. # behavior, but keeping these packags out of the
  782. # reduced index helps. Of course, if _another_
  783. # package pulls it in by dependency, that's fine.
  784. if (
  785. "track_features" not in new_ms
  786. and not self._broader(
  787. new_ms,
  788. tuple(specs_by_name.get(new_ms.name, ())),
  789. )
  790. ):
  791. dep_specs.add(new_ms)
  792. # if new_ms not in dep_specs:
  793. # specs_added.append(new_ms)
  794. else:
  795. seen_specs.add(new_ms)
  796. # debugging info - see what specs are bringing in the largest blobs
  797. # if size_index != len(reduced_index2):
  798. # print("MS {} added {} pkgs to index".format(ms,
  799. # len(reduced_index2) - size_index))
  800. # if specs_added:
  801. # print("MS {} added {} specs to further examination".format(ms,
  802. # specs_added))
  803. reduced_index2 = frozendict(reduced_index2)
  804. self._reduced_index_cache[cache_key] = reduced_index2
  805. return reduced_index2
  806. def match_any(self, mss, prec):
  807. return any(ms.match(prec) for ms in mss)
  808. def find_matches(self, spec: MatchSpec) -> tuple[PackageRecord]:
  809. res = self._cached_find_matches.get(spec, None)
  810. if res is not None:
  811. return res
  812. spec_name = spec.get_exact_value("name")
  813. if spec_name:
  814. candidate_precs = self.groups.get(spec_name, ())
  815. elif spec.get_exact_value("track_features"):
  816. feature_names = spec.get_exact_value("track_features")
  817. candidate_precs = itertools.chain.from_iterable(
  818. self.trackers.get(feature_name, ()) for feature_name in feature_names
  819. )
  820. else:
  821. candidate_precs = self.index.values()
  822. res = tuple(p for p in candidate_precs if spec.match(p))
  823. self._cached_find_matches[spec] = res
  824. return res
  825. def ms_depends(self, prec: PackageRecord) -> list[MatchSpec]:
  826. deps = self.ms_depends_.get(prec)
  827. if deps is None:
  828. deps = [MatchSpec(d) for d in prec.combined_depends]
  829. deps.extend(MatchSpec(track_features=feat) for feat in prec.features)
  830. self.ms_depends_[prec] = deps
  831. return deps
  832. def version_key(self, prec, vtype=None):
  833. channel = prec.channel
  834. channel_priority = self._channel_priorities_map.get(
  835. channel.name, 1
  836. ) # TODO: ask @mcg1969 why the default value is 1 here # NOQA
  837. valid = 1 if channel_priority < MAX_CHANNEL_PRIORITY else 0
  838. version_comparator = VersionOrder(prec.get("version", ""))
  839. build_number = prec.get("build_number", 0)
  840. build_string = prec.get("build")
  841. noarch = -int(prec.subdir == "noarch")
  842. if self._channel_priority != ChannelPriority.DISABLED:
  843. vkey = [valid, -channel_priority, version_comparator, build_number, noarch]
  844. else:
  845. vkey = [valid, version_comparator, -channel_priority, build_number, noarch]
  846. if self._solver_ignore_timestamps:
  847. vkey.append(build_string)
  848. else:
  849. vkey.extend((prec.get("timestamp", 0), build_string))
  850. return vkey
  851. @staticmethod
  852. def _make_channel_priorities(channels):
  853. priorities_map = {}
  854. for priority_counter, chn in enumerate(
  855. itertools.chain.from_iterable(
  856. (Channel(cc) for cc in c._channels)
  857. if isinstance(c, MultiChannel)
  858. else (c,)
  859. for c in (Channel(c) for c in channels)
  860. )
  861. ):
  862. channel_name = chn.name
  863. if channel_name in priorities_map:
  864. continue
  865. priorities_map[channel_name] = min(
  866. priority_counter, MAX_CHANNEL_PRIORITY - 1
  867. )
  868. return priorities_map
  869. def get_pkgs(self, ms, emptyok=False): # pragma: no cover
  870. # legacy method for conda-build
  871. ms = MatchSpec(ms)
  872. precs = self.find_matches(ms)
  873. if not precs and not emptyok:
  874. raise ResolvePackageNotFound([(ms,)])
  875. return sorted(precs, key=self.version_key)
  876. @staticmethod
  877. def to_sat_name(val):
  878. # val can be a PackageRecord or MatchSpec
  879. if isinstance(val, PackageRecord):
  880. return val.dist_str()
  881. elif isinstance(val, MatchSpec):
  882. return "@s@" + str(val) + ("?" if val.optional else "")
  883. else:
  884. raise NotImplementedError()
  885. @staticmethod
  886. def to_feature_metric_id(prec_dist_str, feat):
  887. return f"@fm@{prec_dist_str}@{feat}"
  888. def push_MatchSpec(self, C, spec):
  889. spec = MatchSpec(spec)
  890. sat_name = self.to_sat_name(spec)
  891. m = C.from_name(sat_name)
  892. if m is not None:
  893. # the spec has already been pushed onto the clauses stack
  894. return sat_name
  895. simple = spec._is_single()
  896. nm = spec.get_exact_value("name")
  897. tf = frozenset(
  898. _tf
  899. for _tf in (f.strip() for f in spec.get_exact_value("track_features") or ())
  900. if _tf
  901. )
  902. if nm:
  903. tgroup = libs = self.groups.get(nm, [])
  904. elif tf:
  905. assert len(tf) == 1
  906. k = next(iter(tf))
  907. tgroup = libs = self.trackers.get(k, [])
  908. else:
  909. tgroup = libs = self.index.keys()
  910. simple = False
  911. if not simple:
  912. libs = [fkey for fkey in tgroup if spec.match(fkey)]
  913. if len(libs) == len(tgroup):
  914. if spec.optional:
  915. m = TRUE
  916. elif not simple:
  917. ms2 = MatchSpec(track_features=tf) if tf else MatchSpec(nm)
  918. m = C.from_name(self.push_MatchSpec(C, ms2))
  919. if m is None:
  920. sat_names = [self.to_sat_name(prec) for prec in libs]
  921. if spec.optional:
  922. ms2 = MatchSpec(track_features=tf) if tf else MatchSpec(nm)
  923. sat_names.append("!" + self.to_sat_name(ms2))
  924. m = C.Any(sat_names)
  925. C.name_var(m, sat_name)
  926. return sat_name
  927. @time_recorder(module_name=__name__)
  928. def gen_clauses(self):
  929. C = Clauses(sat_solver=_get_sat_solver_cls(context.sat_solver))
  930. for name, group in self.groups.items():
  931. group = [self.to_sat_name(prec) for prec in group]
  932. # Create one variable for each package
  933. for sat_name in group:
  934. C.new_var(sat_name)
  935. # Create one variable for the group
  936. m = C.new_var(self.to_sat_name(MatchSpec(name)))
  937. # Exactly one of the package variables, OR
  938. # the negation of the group variable, is true
  939. C.Require(C.ExactlyOne, group + [C.Not(m)])
  940. # If a package is installed, its dependencies must be as well
  941. for prec in self.index.values():
  942. nkey = C.Not(self.to_sat_name(prec))
  943. for ms in self.ms_depends(prec):
  944. # Virtual packages can't be installed, we ignore them
  945. if not ms.name.startswith("__"):
  946. C.Require(C.Or, nkey, self.push_MatchSpec(C, ms))
  947. if log.isEnabledFor(DEBUG):
  948. log.debug(
  949. "gen_clauses returning with clause count: %d", C.get_clause_count()
  950. )
  951. return C
  952. def generate_spec_constraints(self, C, specs):
  953. result = [(self.push_MatchSpec(C, ms),) for ms in specs]
  954. if log.isEnabledFor(DEBUG):
  955. log.debug(
  956. "generate_spec_constraints returning with clause count: %d",
  957. C.get_clause_count(),
  958. )
  959. return result
  960. def generate_feature_count(self, C):
  961. result = {
  962. self.push_MatchSpec(C, MatchSpec(track_features=name)): 1
  963. for name in self.trackers.keys()
  964. }
  965. if log.isEnabledFor(DEBUG):
  966. log.debug(
  967. "generate_feature_count returning with clause count: %d",
  968. C.get_clause_count(),
  969. )
  970. return result
  971. def generate_update_count(self, C, specs):
  972. return {
  973. "!" + ms.target: 1 for ms in specs if ms.target and C.from_name(ms.target)
  974. }
  975. def generate_feature_metric(self, C):
  976. eq = {} # a C.minimize() objective: dict[varname, coeff]
  977. # Given a pair (prec, feature), assign a "1" score IF:
  978. # - The prec is installed
  979. # - The prec does NOT require the feature
  980. # - At least one package in the group DOES require the feature
  981. # - A package that tracks the feature is installed
  982. for name, group in self.groups.items():
  983. prec_feats = {self.to_sat_name(prec): set(prec.features) for prec in group}
  984. active_feats = set.union(*prec_feats.values()).intersection(self.trackers)
  985. for feat in active_feats:
  986. clause_id_for_feature = self.push_MatchSpec(
  987. C, MatchSpec(track_features=feat)
  988. )
  989. for prec_sat_name, features in prec_feats.items():
  990. if feat not in features:
  991. feature_metric_id = self.to_feature_metric_id(
  992. prec_sat_name, feat
  993. )
  994. C.name_var(
  995. C.And(prec_sat_name, clause_id_for_feature),
  996. feature_metric_id,
  997. )
  998. eq[feature_metric_id] = 1
  999. return eq
  1000. def generate_removal_count(self, C, specs):
  1001. return {"!" + self.push_MatchSpec(C, ms.name): 1 for ms in specs}
  1002. def generate_install_count(self, C, specs):
  1003. return {self.push_MatchSpec(C, ms.name): 1 for ms in specs if ms.optional}
  1004. def generate_package_count(self, C, missing):
  1005. return {self.push_MatchSpec(C, nm): 1 for nm in missing}
  1006. def generate_version_metrics(self, C, specs, include0=False):
  1007. # each of these are weights saying how well packages match the specs
  1008. # format for each: a C.minimize() objective: dict[varname, coeff]
  1009. eqc = {} # channel
  1010. eqv = {} # version
  1011. eqb = {} # build number
  1012. eqa = {} # arch/noarch
  1013. eqt = {} # timestamp
  1014. sdict = {} # dict[package_name, PackageRecord]
  1015. for s in specs:
  1016. s = MatchSpec(s) # needed for testing
  1017. sdict.setdefault(s.name, [])
  1018. # # TODO: this block is important! can't leave it commented out
  1019. # rec = sdict.setdefault(s.name, [])
  1020. # if s.target:
  1021. # dist = Dist(s.target)
  1022. # if dist in self.index:
  1023. # if self.index[dist].get('priority', 0) < MAX_CHANNEL_PRIORITY:
  1024. # rec.append(dist)
  1025. for name, targets in sdict.items():
  1026. pkgs = [(self.version_key(p), p) for p in self.groups.get(name, [])]
  1027. pkey = None
  1028. # keep in mind that pkgs is already sorted according to version_key (a tuple,
  1029. # so composite sort key). Later entries in the list are, by definition,
  1030. # greater in some way, so simply comparing with != suffices.
  1031. for version_key, prec in pkgs:
  1032. if targets and any(prec == t for t in targets):
  1033. continue
  1034. if pkey is None:
  1035. ic = iv = ib = it = ia = 0
  1036. # valid package, channel priority
  1037. elif pkey[0] != version_key[0] or pkey[1] != version_key[1]:
  1038. ic += 1
  1039. iv = ib = it = ia = 0
  1040. # version
  1041. elif pkey[2] != version_key[2]:
  1042. iv += 1
  1043. ib = it = ia = 0
  1044. # build number
  1045. elif pkey[3] != version_key[3]:
  1046. ib += 1
  1047. it = ia = 0
  1048. # arch/noarch
  1049. elif pkey[4] != version_key[4]:
  1050. ia += 1
  1051. it = 0
  1052. elif not self._solver_ignore_timestamps and pkey[5] != version_key[5]:
  1053. it += 1
  1054. prec_sat_name = self.to_sat_name(prec)
  1055. if ic or include0:
  1056. eqc[prec_sat_name] = ic
  1057. if iv or include0:
  1058. eqv[prec_sat_name] = iv
  1059. if ib or include0:
  1060. eqb[prec_sat_name] = ib
  1061. if ia or include0:
  1062. eqa[prec_sat_name] = ia
  1063. if it or include0:
  1064. eqt[prec_sat_name] = it
  1065. pkey = version_key
  1066. return eqc, eqv, eqb, eqa, eqt
  1067. def dependency_sort(
  1068. self,
  1069. must_have: dict[str, PackageRecord],
  1070. ) -> list[PackageRecord]:
  1071. assert isinstance(must_have, dict)
  1072. digraph = {} # dict[str, set[dependent_package_names]]
  1073. for package_name, prec in must_have.items():
  1074. if prec in self.index:
  1075. digraph[package_name] = {ms.name for ms in self.ms_depends(prec)}
  1076. # There are currently at least three special cases to be aware of.
  1077. # 1. The `toposort()` function, called below, contains special case code to remove
  1078. # any circular dependency between python and pip.
  1079. # 2. conda/plan.py has special case code for menuinst
  1080. # Always link/unlink menuinst first/last on windows in case a subsequent
  1081. # package tries to import it to create/remove a shortcut
  1082. # 3. On windows, python noarch packages need an implicit dependency on conda added, if
  1083. # conda is in the list of packages for the environment. Python noarch packages
  1084. # that have entry points use conda's own conda.exe python entry point binary. If conda
  1085. # is going to be updated during an operation, the unlink / link order matters.
  1086. # See issue #6057.
  1087. if on_win and "conda" in digraph:
  1088. for package_name, dist in must_have.items():
  1089. record = self.index.get(prec)
  1090. if hasattr(record, "noarch") and record.noarch == NoarchType.python:
  1091. digraph[package_name].add("conda")
  1092. sorted_keys = toposort(digraph)
  1093. must_have = must_have.copy()
  1094. # Take all of the items in the sorted keys
  1095. # Don't fail if the key does not exist
  1096. result = [must_have.pop(key) for key in sorted_keys if key in must_have]
  1097. # Take any key that were not sorted
  1098. result.extend(must_have.values())
  1099. return result
  1100. def environment_is_consistent(self, installed):
  1101. log.debug("Checking if the current environment is consistent")
  1102. if not installed:
  1103. return None, []
  1104. sat_name_map = {} # dict[sat_name, PackageRecord]
  1105. specs = []
  1106. for prec in installed:
  1107. sat_name_map[self.to_sat_name(prec)] = prec
  1108. specs.append(MatchSpec(f"{prec.name} {prec.version} {prec.build}"))
  1109. r2 = Resolve({prec: prec for prec in installed}, True, channels=self.channels)
  1110. C = r2.gen_clauses()
  1111. constraints = r2.generate_spec_constraints(C, specs)
  1112. solution = C.sat(constraints)
  1113. return bool(solution)
  1114. def get_conflicting_specs(self, specs, explicit_specs):
  1115. if not specs:
  1116. return ()
  1117. all_specs = set(specs) | set(explicit_specs)
  1118. reduced_index = self.get_reduced_index(all_specs)
  1119. # Check if satisfiable
  1120. def mysat(specs, add_if=False):
  1121. constraints = r2.generate_spec_constraints(C, specs)
  1122. return C.sat(constraints, add_if)
  1123. if reduced_index:
  1124. r2 = Resolve(reduced_index, True, channels=self.channels)
  1125. C = r2.gen_clauses()
  1126. solution = mysat(all_specs, True)
  1127. else:
  1128. solution = None
  1129. if solution:
  1130. final_unsat_specs = ()
  1131. elif context.unsatisfiable_hints:
  1132. r2 = Resolve(self.index, True, channels=self.channels)
  1133. C = r2.gen_clauses()
  1134. # This first result is just a single unsatisfiable core. There may be several.
  1135. final_unsat_specs = tuple(
  1136. minimal_unsatisfiable_subset(
  1137. specs, sat=mysat, explicit_specs=explicit_specs
  1138. )
  1139. )
  1140. else:
  1141. final_unsat_specs = None
  1142. return final_unsat_specs
  1143. def bad_installed(self, installed, new_specs):
  1144. log.debug("Checking if the current environment is consistent")
  1145. if not installed:
  1146. return None, []
  1147. sat_name_map = {} # dict[sat_name, PackageRecord]
  1148. specs = []
  1149. for prec in installed:
  1150. sat_name_map[self.to_sat_name(prec)] = prec
  1151. specs.append(MatchSpec(f"{prec.name} {prec.version} {prec.build}"))
  1152. new_index = {prec: prec for prec in sat_name_map.values()}
  1153. name_map = {p.name: p for p in new_index}
  1154. if "python" in name_map and "pip" not in name_map:
  1155. python_prec = new_index[name_map["python"]]
  1156. if "pip" in python_prec.depends:
  1157. # strip pip dependency from python if not installed in environment
  1158. new_deps = [d for d in python_prec.depends if d != "pip"]
  1159. python_prec.depends = new_deps
  1160. r2 = Resolve(new_index, True, channels=self.channels)
  1161. C = r2.gen_clauses()
  1162. constraints = r2.generate_spec_constraints(C, specs)
  1163. solution = C.sat(constraints)
  1164. limit = xtra = None
  1165. if not solution or xtra:
  1166. def get_(name, snames):
  1167. if name not in snames:
  1168. snames.add(name)
  1169. for fn in self.groups.get(name, []):
  1170. for ms in self.ms_depends(fn):
  1171. get_(ms.name, snames)
  1172. # New addition: find the largest set of installed packages that
  1173. # are consistent with each other, and include those in the
  1174. # list of packages to maintain consistency with
  1175. snames = set()
  1176. eq_optional_c = r2.generate_removal_count(C, specs)
  1177. solution, _ = C.minimize(eq_optional_c, C.sat())
  1178. snames.update(
  1179. sat_name_map[sat_name]["name"]
  1180. for sat_name in (C.from_index(s) for s in solution)
  1181. if sat_name and sat_name[0] != "!" and "@" not in sat_name
  1182. )
  1183. # Existing behavior: keep all specs and their dependencies
  1184. for spec in new_specs:
  1185. get_(MatchSpec(spec).name, snames)
  1186. if len(snames) < len(sat_name_map):
  1187. limit = snames
  1188. xtra = [
  1189. rec
  1190. for sat_name, rec in sat_name_map.items()
  1191. if rec["name"] not in snames
  1192. ]
  1193. log.debug(
  1194. "Limiting solver to the following packages: %s", ", ".join(limit)
  1195. )
  1196. if xtra:
  1197. log.debug("Packages to be preserved: %s", xtra)
  1198. return limit, xtra
  1199. def restore_bad(self, pkgs, preserve):
  1200. if preserve:
  1201. sdict = {prec.name: prec for prec in pkgs}
  1202. pkgs.extend(p for p in preserve if p.name not in sdict)
  1203. def install_specs(self, specs, installed, update_deps=True):
  1204. specs = list(map(MatchSpec, specs))
  1205. snames = {s.name for s in specs}
  1206. log.debug("Checking satisfiability of current install")
  1207. limit, preserve = self.bad_installed(installed, specs)
  1208. for prec in installed:
  1209. if prec not in self.index:
  1210. continue
  1211. name, version, build = prec.name, prec.version, prec.build
  1212. schannel = prec.channel.canonical_name
  1213. if name in snames or limit is not None and name not in limit:
  1214. continue
  1215. # If update_deps=True, set the target package in MatchSpec so that
  1216. # the solver can minimize the version change. If update_deps=False,
  1217. # fix the version and build so that no change is possible.
  1218. if update_deps:
  1219. # TODO: fix target here
  1220. spec = MatchSpec(name=name, target=prec.dist_str())
  1221. else:
  1222. spec = MatchSpec(
  1223. name=name, version=version, build=build, channel=schannel
  1224. )
  1225. specs.insert(0, spec)
  1226. return tuple(specs), preserve
  1227. def install(self, specs, installed=None, update_deps=True, returnall=False):
  1228. specs, preserve = self.install_specs(specs, installed or [], update_deps)
  1229. pkgs = []
  1230. if specs:
  1231. pkgs = self.solve(specs, returnall=returnall, _remove=False)
  1232. self.restore_bad(pkgs, preserve)
  1233. return pkgs
  1234. def remove_specs(self, specs, installed):
  1235. nspecs = []
  1236. # There's an imperfect thing happening here. "specs" nominally contains
  1237. # a list of package names or track_feature values to be removed. But
  1238. # because of add_defaults_to_specs it may also contain version constraints
  1239. # like "python 2.7*", which are *not* asking for python to be removed.
  1240. # We need to separate these two kinds of specs here.
  1241. for s in map(MatchSpec, specs):
  1242. # Since '@' is an illegal version number, this ensures that all of
  1243. # these matches will never match an actual package. Combined with
  1244. # optional=True, this has the effect of forcing their removal.
  1245. if s._is_single():
  1246. nspecs.append(MatchSpec(s, version="@", optional=True))
  1247. else:
  1248. nspecs.append(MatchSpec(s, optional=True))
  1249. snames = {s.name for s in nspecs if s.name}
  1250. limit, _ = self.bad_installed(installed, nspecs)
  1251. preserve = []
  1252. for prec in installed:
  1253. nm, ver = prec.name, prec.version
  1254. if nm in snames:
  1255. continue
  1256. elif limit is not None:
  1257. preserve.append(prec)
  1258. else:
  1259. # TODO: fix target here
  1260. nspecs.append(
  1261. MatchSpec(
  1262. name=nm,
  1263. version=">=" + ver if ver else None,
  1264. optional=True,
  1265. target=prec.dist_str(),
  1266. )
  1267. )
  1268. return nspecs, preserve
  1269. def remove(self, specs, installed):
  1270. specs, preserve = self.remove_specs(specs, installed)
  1271. pkgs = self.solve(specs, _remove=True)
  1272. self.restore_bad(pkgs, preserve)
  1273. return pkgs
  1274. @time_recorder(module_name=__name__)
  1275. def solve(
  1276. self,
  1277. specs: list,
  1278. returnall: bool = False,
  1279. _remove=False,
  1280. specs_to_add=None,
  1281. history_specs=None,
  1282. should_retry_solve=False,
  1283. ) -> list[PackageRecord]:
  1284. if specs and not isinstance(specs[0], MatchSpec):
  1285. specs = tuple(MatchSpec(_) for _ in specs)
  1286. specs = set(specs)
  1287. if log.isEnabledFor(DEBUG):
  1288. dlist = dashlist(
  1289. str("%i: %s target=%s optional=%s" % (i, s, s.target, s.optional))
  1290. for i, s in enumerate(specs)
  1291. )
  1292. log.debug("Solving for: %s", dlist)
  1293. if not specs:
  1294. return ()
  1295. # Find the compliant packages
  1296. log.debug("Solve: Getting reduced index of compliant packages")
  1297. len0 = len(specs)
  1298. reduced_index = self.get_reduced_index(
  1299. specs, exit_on_conflict=not context.unsatisfiable_hints
  1300. )
  1301. if not reduced_index:
  1302. # something is intrinsically unsatisfiable - either not found or
  1303. # not the right version
  1304. not_found_packages = set()
  1305. wrong_version_packages = set()
  1306. for s in specs:
  1307. if not self.find_matches(s):
  1308. if s.name in self.groups:
  1309. wrong_version_packages.add(s)
  1310. else:
  1311. not_found_packages.add(s)
  1312. if not_found_packages:
  1313. raise ResolvePackageNotFound(not_found_packages)
  1314. elif wrong_version_packages:
  1315. raise UnsatisfiableError(
  1316. [[d] for d in wrong_version_packages], chains=False
  1317. )
  1318. if should_retry_solve:
  1319. # We don't want to call find_conflicts until our last try.
  1320. # This jumps back out to conda/cli/install.py, where the
  1321. # retries happen
  1322. raise UnsatisfiableError({})
  1323. else:
  1324. self.find_conflicts(specs, specs_to_add, history_specs)
  1325. # Check if satisfiable
  1326. log.debug("Solve: determining satisfiability")
  1327. def mysat(specs, add_if=False):
  1328. constraints = r2.generate_spec_constraints(C, specs)
  1329. return C.sat(constraints, add_if)
  1330. # Return a solution of packages
  1331. def clean(sol):
  1332. return [
  1333. q
  1334. for q in (C.from_index(s) for s in sol)
  1335. if q and q[0] != "!" and "@" not in q
  1336. ]
  1337. def is_converged(solution):
  1338. """Determine if the SAT problem has converged to a single solution.
  1339. This is determined by testing for a SAT solution with the current
  1340. clause set and a clause in which at least one of the packages in
  1341. the current solution is excluded. If a solution exists the problem
  1342. has not converged as multiple solutions still exist.
  1343. """
  1344. psolution = clean(solution)
  1345. nclause = tuple(C.Not(C.from_name(q)) for q in psolution)
  1346. if C.sat((nclause,), includeIf=False) is None:
  1347. return True
  1348. return False
  1349. r2 = Resolve(reduced_index, True, channels=self.channels)
  1350. C = r2.gen_clauses()
  1351. solution = mysat(specs, True)
  1352. if not solution:
  1353. if should_retry_solve:
  1354. # we don't want to call find_conflicts until our last try
  1355. raise UnsatisfiableError({})
  1356. else:
  1357. self.find_conflicts(specs, specs_to_add, history_specs)
  1358. speco = [] # optional packages
  1359. specr = [] # requested packages
  1360. speca = [] # all other packages
  1361. specm = set(r2.groups) # missing from specs
  1362. for k, s in enumerate(specs):
  1363. if s.name in specm:
  1364. specm.remove(s.name)
  1365. if not s.optional:
  1366. (speca if s.target or k >= len0 else specr).append(s)
  1367. elif any(r2.find_matches(s)):
  1368. s = MatchSpec(s.name, optional=True, target=s.target)
  1369. speco.append(s)
  1370. speca.append(s)
  1371. speca.extend(MatchSpec(s) for s in specm)
  1372. if log.isEnabledFor(DEBUG):
  1373. log.debug("Requested specs: %s", dashlist(sorted(str(s) for s in specr)))
  1374. log.debug("Optional specs: %s", dashlist(sorted(str(s) for s in speco)))
  1375. log.debug("All other specs: %s", dashlist(sorted(str(s) for s in speca)))
  1376. log.debug("missing specs: %s", dashlist(sorted(str(s) for s in specm)))
  1377. # Removed packages: minimize count
  1378. log.debug("Solve: minimize removed packages")
  1379. if _remove:
  1380. eq_optional_c = r2.generate_removal_count(C, speco)
  1381. solution, obj7 = C.minimize(eq_optional_c, solution)
  1382. log.debug("Package removal metric: %d", obj7)
  1383. # Requested packages: maximize versions
  1384. log.debug("Solve: maximize versions of requested packages")
  1385. eq_req_c, eq_req_v, eq_req_b, eq_req_a, eq_req_t = r2.generate_version_metrics(
  1386. C, specr
  1387. )
  1388. solution, obj3a = C.minimize(eq_req_c, solution)
  1389. solution, obj3 = C.minimize(eq_req_v, solution)
  1390. log.debug("Initial package channel/version metric: %d/%d", obj3a, obj3)
  1391. # Track features: minimize feature count
  1392. log.debug("Solve: minimize track_feature count")
  1393. eq_feature_count = r2.generate_feature_count(C)
  1394. solution, obj1 = C.minimize(eq_feature_count, solution)
  1395. log.debug("Track feature count: %d", obj1)
  1396. # Featured packages: minimize number of featureless packages
  1397. # installed when a featured alternative is feasible.
  1398. # For example, package name foo exists with two built packages. One with
  1399. # 'track_features: 'feat1', and one with 'track_features': 'feat2'.
  1400. # The previous "Track features" minimization pass has chosen 'feat1' for the
  1401. # environment, but not 'feat2'. In this case, the 'feat2' version of foo is
  1402. # considered "featureless."
  1403. eq_feature_metric = r2.generate_feature_metric(C)
  1404. solution, obj2 = C.minimize(eq_feature_metric, solution)
  1405. log.debug("Package misfeature count: %d", obj2)
  1406. # Requested packages: maximize builds
  1407. log.debug("Solve: maximize build numbers of requested packages")
  1408. solution, obj4 = C.minimize(eq_req_b, solution)
  1409. log.debug("Initial package build metric: %d", obj4)
  1410. # prefer arch packages where available for requested specs
  1411. log.debug("Solve: prefer arch over noarch for requested packages")
  1412. solution, noarch_obj = C.minimize(eq_req_a, solution)
  1413. log.debug("Noarch metric: %d", noarch_obj)
  1414. # Optional installations: minimize count
  1415. if not _remove:
  1416. log.debug("Solve: minimize number of optional installations")
  1417. eq_optional_install = r2.generate_install_count(C, speco)
  1418. solution, obj49 = C.minimize(eq_optional_install, solution)
  1419. log.debug("Optional package install metric: %d", obj49)
  1420. # Dependencies: minimize the number of packages that need upgrading
  1421. log.debug("Solve: minimize number of necessary upgrades")
  1422. eq_u = r2.generate_update_count(C, speca)
  1423. solution, obj50 = C.minimize(eq_u, solution)
  1424. log.debug("Dependency update count: %d", obj50)
  1425. # Remaining packages: maximize versions, then builds
  1426. log.debug(
  1427. "Solve: maximize versions and builds of indirect dependencies. "
  1428. "Prefer arch over noarch where equivalent."
  1429. )
  1430. eq_c, eq_v, eq_b, eq_a, eq_t = r2.generate_version_metrics(C, speca)
  1431. solution, obj5a = C.minimize(eq_c, solution)
  1432. solution, obj5 = C.minimize(eq_v, solution)
  1433. solution, obj6 = C.minimize(eq_b, solution)
  1434. solution, obj6a = C.minimize(eq_a, solution)
  1435. log.debug(
  1436. "Additional package channel/version/build/noarch metrics: %d/%d/%d/%d",
  1437. obj5a,
  1438. obj5,
  1439. obj6,
  1440. obj6a,
  1441. )
  1442. # Prune unnecessary packages
  1443. log.debug("Solve: prune unnecessary packages")
  1444. eq_c = r2.generate_package_count(C, specm)
  1445. solution, obj7 = C.minimize(eq_c, solution, trymax=True)
  1446. log.debug("Weak dependency count: %d", obj7)
  1447. if not is_converged(solution):
  1448. # Maximize timestamps
  1449. eq_t.update(eq_req_t)
  1450. solution, obj6t = C.minimize(eq_t, solution)
  1451. log.debug("Timestamp metric: %d", obj6t)
  1452. log.debug("Looking for alternate solutions")
  1453. nsol = 1
  1454. psolutions = []
  1455. psolution = clean(solution)
  1456. psolutions.append(psolution)
  1457. while True:
  1458. nclause = tuple(C.Not(C.from_name(q)) for q in psolution)
  1459. solution = C.sat((nclause,), True)
  1460. if solution is None:
  1461. break
  1462. nsol += 1
  1463. if nsol > 10:
  1464. log.debug("Too many solutions; terminating")
  1465. break
  1466. psolution = clean(solution)
  1467. psolutions.append(psolution)
  1468. if nsol > 1:
  1469. psols2 = list(map(set, psolutions))
  1470. common = set.intersection(*psols2)
  1471. diffs = [sorted(set(sol) - common) for sol in psols2]
  1472. if not context.json:
  1473. stdoutlog.info(
  1474. "\nWarning: %s possible package resolutions "
  1475. "(only showing differing packages):%s%s"
  1476. % (
  1477. ">10" if nsol > 10 else nsol,
  1478. dashlist(", ".join(diff) for diff in diffs),
  1479. "\n ... and others" if nsol > 10 else "",
  1480. )
  1481. )
  1482. # def stripfeat(sol):
  1483. # return sol.split('[')[0]
  1484. new_index = {self.to_sat_name(prec): prec for prec in self.index.values()}
  1485. if returnall:
  1486. if len(psolutions) > 1:
  1487. raise RuntimeError()
  1488. # TODO: clean up this mess
  1489. # return [sorted(Dist(stripfeat(dname)) for dname in psol) for psol in psolutions]
  1490. # return [sorted((new_index[sat_name] for sat_name in psol), key=lambda x: x.name)
  1491. # for psol in psolutions]
  1492. # return sorted(Dist(stripfeat(dname)) for dname in psolutions[0])
  1493. return sorted(
  1494. (new_index[sat_name] for sat_name in psolutions[0]), key=lambda x: x.name
  1495. )