plan.py 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649
  1. # Copyright (C) 2012 Anaconda, Inc
  2. # SPDX-License-Identifier: BSD-3-Clause
  3. """
  4. Handle the planning of installs and their execution.
  5. NOTE:
  6. conda.install uses canonical package names in its interface functions,
  7. whereas conda.resolve uses package filenames, as those are used as index
  8. keys. We try to keep fixes to this "impedance mismatch" local to this
  9. module.
  10. """
  11. import sys
  12. from collections import defaultdict
  13. from logging import getLogger
  14. try:
  15. from boltons.setutils import IndexedSet
  16. except ImportError: # pragma: no cover
  17. from ._vendor.boltons.setutils import IndexedSet
  18. from .base.constants import DEFAULTS_CHANNEL_NAME, UNKNOWN_CHANNEL
  19. from .base.context import context, stack_context_default
  20. from .common.io import dashlist, env_vars, time_recorder
  21. from .common.iterators import groupby_to_dict as groupby
  22. from .core.index import LAST_CHANNEL_URLS, _supplement_index_with_prefix
  23. from .core.link import PrefixSetup, UnlinkLinkTransaction
  24. from .core.solve import diff_for_unlink_link_precs
  25. from .exceptions import CondaIndexError, PackagesNotFoundError
  26. from .history import History
  27. from .instructions import FETCH, LINK, SYMLINK_CONDA, UNLINK
  28. from .models.channel import Channel, prioritize_channels
  29. from .models.dist import Dist
  30. from .models.enums import LinkType
  31. from .models.match_spec import ChannelMatch
  32. from .models.prefix_graph import PrefixGraph
  33. from .models.records import PackageRecord
  34. from .models.version import normalized_version
  35. from .resolve import MatchSpec
  36. from .utils import human_bytes
  37. log = getLogger(__name__)
  38. # TODO: Remove conda/plan.py. This module should be almost completely deprecated now.
  39. def print_dists(dists_extras):
  40. fmt = " %-27s|%17s"
  41. print(fmt % ("package", "build"))
  42. print(fmt % ("-" * 27, "-" * 17))
  43. for prec, extra in dists_extras:
  44. line = fmt % (prec.name + "-" + prec.version, prec.build)
  45. if extra:
  46. line += extra
  47. print(line)
  48. def display_actions(
  49. actions, index, show_channel_urls=None, specs_to_remove=(), specs_to_add=()
  50. ):
  51. prefix = actions.get("PREFIX")
  52. builder = ["", "## Package Plan ##\n"]
  53. if prefix:
  54. builder.append(" environment location: %s" % prefix)
  55. builder.append("")
  56. if specs_to_remove:
  57. builder.append(
  58. " removed specs: %s"
  59. % dashlist(sorted(str(s) for s in specs_to_remove), indent=4)
  60. )
  61. builder.append("")
  62. if specs_to_add:
  63. builder.append(
  64. " added / updated specs: %s"
  65. % dashlist(sorted(str(s) for s in specs_to_add), indent=4)
  66. )
  67. builder.append("")
  68. print("\n".join(builder))
  69. if show_channel_urls is None:
  70. show_channel_urls = context.show_channel_urls
  71. def channel_str(rec):
  72. if rec.get("schannel"):
  73. return rec["schannel"]
  74. if rec.get("url"):
  75. return Channel(rec["url"]).canonical_name
  76. if rec.get("channel"):
  77. return Channel(rec["channel"]).canonical_name
  78. return UNKNOWN_CHANNEL
  79. def channel_filt(s):
  80. if show_channel_urls is False:
  81. return ""
  82. if show_channel_urls is None and s == DEFAULTS_CHANNEL_NAME:
  83. return ""
  84. return s
  85. if actions.get(FETCH):
  86. print("\nThe following packages will be downloaded:\n")
  87. disp_lst = []
  88. for prec in actions[FETCH]:
  89. assert isinstance(prec, PackageRecord)
  90. extra = "%15s" % human_bytes(prec["size"])
  91. schannel = channel_filt(prec.channel.canonical_name)
  92. if schannel:
  93. extra += " " + schannel
  94. disp_lst.append((prec, extra))
  95. print_dists(disp_lst)
  96. if index and len(actions[FETCH]) > 1:
  97. num_bytes = sum(prec["size"] for prec in actions[FETCH])
  98. print(" " * 4 + "-" * 60)
  99. print(" " * 43 + "Total: %14s" % human_bytes(num_bytes))
  100. # package -> [oldver-oldbuild, newver-newbuild]
  101. packages = defaultdict(lambda: list(("", "")))
  102. features = defaultdict(lambda: list(("", "")))
  103. channels = defaultdict(lambda: list(("", "")))
  104. records = defaultdict(lambda: list((None, None)))
  105. linktypes = {}
  106. for prec in actions.get(LINK, []):
  107. assert isinstance(prec, PackageRecord)
  108. pkg = prec["name"]
  109. channels[pkg][1] = channel_str(prec)
  110. packages[pkg][1] = prec["version"] + "-" + prec["build"]
  111. records[pkg][1] = prec
  112. linktypes[
  113. pkg
  114. ] = (
  115. LinkType.hardlink
  116. ) # TODO: this is a lie; may have to give this report after UnlinkLinkTransaction.verify() # NOQA
  117. features[pkg][1] = ",".join(prec.get("features") or ())
  118. for prec in actions.get(UNLINK, []):
  119. assert isinstance(prec, PackageRecord)
  120. pkg = prec["name"]
  121. channels[pkg][0] = channel_str(prec)
  122. packages[pkg][0] = prec["version"] + "-" + prec["build"]
  123. records[pkg][0] = prec
  124. features[pkg][0] = ",".join(prec.get("features") or ())
  125. new = {p for p in packages if not packages[p][0]}
  126. removed = {p for p in packages if not packages[p][1]}
  127. # New packages are actually listed in the left-hand column,
  128. # so let's move them over there
  129. for pkg in new:
  130. for var in (packages, features, channels, records):
  131. var[pkg] = var[pkg][::-1]
  132. updated = set()
  133. downgraded = set()
  134. channeled = set()
  135. oldfmt = {}
  136. newfmt = {}
  137. empty = True
  138. if packages:
  139. empty = False
  140. maxpkg = max(len(p) for p in packages) + 1
  141. maxoldver = max(len(p[0]) for p in packages.values())
  142. maxnewver = max(len(p[1]) for p in packages.values())
  143. maxoldfeatures = max(len(p[0]) for p in features.values())
  144. maxnewfeatures = max(len(p[1]) for p in features.values())
  145. maxoldchannels = max(len(channel_filt(p[0])) for p in channels.values())
  146. maxnewchannels = max(len(channel_filt(p[1])) for p in channels.values())
  147. for pkg in packages:
  148. # That's right. I'm using old-style string formatting to generate a
  149. # string with new-style string formatting.
  150. oldfmt[pkg] = f"{{pkg:<{maxpkg}}} {{vers[0]:<{maxoldver}}}"
  151. if maxoldchannels:
  152. oldfmt[pkg] += " {channels[0]:<%s}" % maxoldchannels
  153. if features[pkg][0]:
  154. oldfmt[pkg] += " [{features[0]:<%s}]" % maxoldfeatures
  155. lt = LinkType(linktypes.get(pkg, LinkType.hardlink))
  156. lt = "" if lt == LinkType.hardlink else (" (%s)" % lt)
  157. if pkg in removed or pkg in new:
  158. oldfmt[pkg] += lt
  159. continue
  160. newfmt[pkg] = "{vers[1]:<%s}" % maxnewver
  161. if maxnewchannels:
  162. newfmt[pkg] += " {channels[1]:<%s}" % maxnewchannels
  163. if features[pkg][1]:
  164. newfmt[pkg] += " [{features[1]:<%s}]" % maxnewfeatures
  165. newfmt[pkg] += lt
  166. P0 = records[pkg][0]
  167. P1 = records[pkg][1]
  168. pri0 = P0.get("priority")
  169. pri1 = P1.get("priority")
  170. if pri0 is None or pri1 is None:
  171. pri0 = pri1 = 1
  172. try:
  173. if str(P1.version) == "custom":
  174. newver = str(P0.version) != "custom"
  175. oldver = not newver
  176. else:
  177. # <= here means that unchanged packages will be put in updated
  178. N0 = normalized_version(P0.version)
  179. N1 = normalized_version(P1.version)
  180. newver = N0 < N1
  181. oldver = N0 > N1
  182. except TypeError:
  183. newver = P0.version < P1.version
  184. oldver = P0.version > P1.version
  185. oldbld = P0.build_number > P1.build_number
  186. newbld = P0.build_number < P1.build_number
  187. if (
  188. context.channel_priority
  189. and pri1 < pri0
  190. and (oldver or not newver and not newbld)
  191. ):
  192. channeled.add(pkg)
  193. elif newver:
  194. updated.add(pkg)
  195. elif pri1 < pri0 and (oldver or not newver and oldbld):
  196. channeled.add(pkg)
  197. elif oldver:
  198. downgraded.add(pkg)
  199. elif not oldbld:
  200. updated.add(pkg)
  201. else:
  202. downgraded.add(pkg)
  203. arrow = " --> "
  204. lead = " " * 4
  205. def format(s, pkg):
  206. chans = [channel_filt(c) for c in channels[pkg]]
  207. return lead + s.format(
  208. pkg=pkg + ":", vers=packages[pkg], channels=chans, features=features[pkg]
  209. )
  210. if new:
  211. print("\nThe following NEW packages will be INSTALLED:\n")
  212. for pkg in sorted(new):
  213. # New packages have been moved to the "old" column for display
  214. print(format(oldfmt[pkg], pkg))
  215. if removed:
  216. print("\nThe following packages will be REMOVED:\n")
  217. for pkg in sorted(removed):
  218. print(format(oldfmt[pkg], pkg))
  219. if updated:
  220. print("\nThe following packages will be UPDATED:\n")
  221. for pkg in sorted(updated):
  222. print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg))
  223. if channeled:
  224. print(
  225. "\nThe following packages will be SUPERSEDED by a higher-priority channel:\n"
  226. )
  227. for pkg in sorted(channeled):
  228. print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg))
  229. if downgraded:
  230. print("\nThe following packages will be DOWNGRADED:\n")
  231. for pkg in sorted(downgraded):
  232. print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg))
  233. if empty and actions.get(SYMLINK_CONDA):
  234. print("\nThe following empty environments will be CREATED:\n")
  235. print(actions["PREFIX"])
  236. print()
  237. def add_unlink(actions, dist):
  238. assert isinstance(dist, Dist)
  239. if UNLINK not in actions:
  240. actions[UNLINK] = []
  241. actions[UNLINK].append(dist)
  242. # -------------------------------------------------------------------
  243. def add_defaults_to_specs(r, linked, specs, update=False, prefix=None):
  244. return
  245. def _get_best_prec_match(precs):
  246. assert precs
  247. for chn in context.channels:
  248. channel_matcher = ChannelMatch(chn)
  249. prec_matches = tuple(
  250. prec for prec in precs if channel_matcher.match(prec.channel.name)
  251. )
  252. if prec_matches:
  253. break
  254. else:
  255. prec_matches = precs
  256. log.warn("Multiple packages found:%s", dashlist(prec_matches))
  257. return prec_matches[0]
  258. def revert_actions(prefix, revision=-1, index=None):
  259. # TODO: If revision raise a revision error, should always go back to a safe revision
  260. h = History(prefix)
  261. # TODO: need a History method to get user-requested specs for revision number
  262. # Doing a revert right now messes up user-requested spec history.
  263. # Either need to wipe out history after ``revision``, or add the correct
  264. # history information to the new entry about to be created.
  265. # TODO: This is wrong!!!!!!!!!!
  266. user_requested_specs = h.get_requested_specs_map().values()
  267. try:
  268. target_state = {
  269. MatchSpec.from_dist_str(dist_str) for dist_str in h.get_state(revision)
  270. }
  271. except IndexError:
  272. raise CondaIndexError("no such revision: %d" % revision)
  273. _supplement_index_with_prefix(index, prefix)
  274. not_found_in_index_specs = set()
  275. link_precs = set()
  276. for spec in target_state:
  277. precs = tuple(prec for prec in index.values() if spec.match(prec))
  278. if not precs:
  279. not_found_in_index_specs.add(spec)
  280. elif len(precs) > 1:
  281. link_precs.add(_get_best_prec_match(precs))
  282. else:
  283. link_precs.add(precs[0])
  284. if not_found_in_index_specs:
  285. raise PackagesNotFoundError(not_found_in_index_specs)
  286. final_precs = IndexedSet(PrefixGraph(link_precs).graph) # toposort
  287. unlink_precs, link_precs = diff_for_unlink_link_precs(prefix, final_precs)
  288. stp = PrefixSetup(prefix, unlink_precs, link_precs, (), user_requested_specs, ())
  289. txn = UnlinkLinkTransaction(stp)
  290. return txn
  291. # ---------------------------- Backwards compat for conda-build --------------------------
  292. @time_recorder("execute_actions")
  293. def execute_actions(actions, index, verbose=False): # pragma: no cover
  294. plan = _plan_from_actions(actions, index)
  295. execute_instructions(plan, index, verbose)
  296. def _plan_from_actions(actions, index): # pragma: no cover
  297. from .instructions import ACTION_CODES, PREFIX, PRINT, PROGRESS, PROGRESS_COMMANDS
  298. if "op_order" in actions and actions["op_order"]:
  299. op_order = actions["op_order"]
  300. else:
  301. op_order = ACTION_CODES
  302. assert PREFIX in actions and actions[PREFIX]
  303. prefix = actions[PREFIX]
  304. plan = [("PREFIX", "%s" % prefix)]
  305. unlink_link_transaction = actions.get("UNLINKLINKTRANSACTION")
  306. if unlink_link_transaction:
  307. raise RuntimeError()
  308. # progressive_fetch_extract = actions.get('PROGRESSIVEFETCHEXTRACT')
  309. # if progressive_fetch_extract:
  310. # plan.append((PROGRESSIVEFETCHEXTRACT, progressive_fetch_extract))
  311. # plan.append((UNLINKLINKTRANSACTION, unlink_link_transaction))
  312. # return plan
  313. axn = actions.get("ACTION") or None
  314. specs = actions.get("SPECS", [])
  315. log.debug(f"Adding plans for operations: {op_order}")
  316. for op in op_order:
  317. if op not in actions:
  318. log.trace(f"action {op} not in actions")
  319. continue
  320. if not actions[op]:
  321. log.trace(f"action {op} has None value")
  322. continue
  323. if "_" not in op:
  324. plan.append((PRINT, "%sing packages ..." % op.capitalize()))
  325. elif op.startswith("RM_"):
  326. plan.append(
  327. (PRINT, "Pruning %s packages from the cache ..." % op[3:].lower())
  328. )
  329. if op in PROGRESS_COMMANDS:
  330. plan.append((PROGRESS, "%d" % len(actions[op])))
  331. for arg in actions[op]:
  332. log.debug(f"appending value {arg} for action {op}")
  333. plan.append((op, arg))
  334. plan = _inject_UNLINKLINKTRANSACTION(plan, index, prefix, axn, specs)
  335. return plan
  336. def _inject_UNLINKLINKTRANSACTION(plan, index, prefix, axn, specs): # pragma: no cover
  337. from os.path import isdir
  338. from .core.link import PrefixSetup, UnlinkLinkTransaction
  339. from .core.package_cache_data import ProgressiveFetchExtract
  340. from .instructions import (
  341. LINK,
  342. PROGRESSIVEFETCHEXTRACT,
  343. UNLINK,
  344. UNLINKLINKTRANSACTION,
  345. )
  346. from .models.dist import Dist
  347. # this is only used for conda-build at this point
  348. first_unlink_link_idx = next(
  349. (q for q, p in enumerate(plan) if p[0] in (UNLINK, LINK)), -1
  350. )
  351. if first_unlink_link_idx >= 0:
  352. grouped_instructions = groupby(lambda x: x[0], plan)
  353. unlink_dists = tuple(Dist(d[1]) for d in grouped_instructions.get(UNLINK, ()))
  354. link_dists = tuple(Dist(d[1]) for d in grouped_instructions.get(LINK, ()))
  355. unlink_dists, link_dists = _handle_menuinst(unlink_dists, link_dists)
  356. if isdir(prefix):
  357. unlink_precs = tuple(index[d] for d in unlink_dists)
  358. else:
  359. # there's nothing to unlink in an environment that doesn't exist
  360. # this is a hack for what appears to be a logic error in conda-build
  361. # caught in tests/test_subpackages.py::test_subpackage_recipes[python_test_dep]
  362. unlink_precs = ()
  363. link_precs = tuple(index[d] for d in link_dists)
  364. pfe = ProgressiveFetchExtract(link_precs)
  365. pfe.prepare()
  366. stp = PrefixSetup(prefix, unlink_precs, link_precs, (), specs, ())
  367. plan.insert(
  368. first_unlink_link_idx, (UNLINKLINKTRANSACTION, UnlinkLinkTransaction(stp))
  369. )
  370. plan.insert(first_unlink_link_idx, (PROGRESSIVEFETCHEXTRACT, pfe))
  371. elif axn in ("INSTALL", "CREATE"):
  372. plan.insert(0, (UNLINKLINKTRANSACTION, (prefix, (), (), (), specs)))
  373. return plan
  374. def _handle_menuinst(unlink_dists, link_dists): # pragma: no cover
  375. from .common.compat import on_win
  376. if not on_win:
  377. return unlink_dists, link_dists
  378. # Always link/unlink menuinst first/last on windows in case a subsequent
  379. # package tries to import it to create/remove a shortcut
  380. # unlink
  381. menuinst_idx = next(
  382. (q for q, d in enumerate(unlink_dists) if d.name == "menuinst"), None
  383. )
  384. if menuinst_idx is not None:
  385. unlink_dists = (
  386. *unlink_dists[:menuinst_idx],
  387. *unlink_dists[menuinst_idx + 1 :],
  388. *unlink_dists[menuinst_idx : menuinst_idx + 1],
  389. )
  390. # link
  391. menuinst_idx = next(
  392. (q for q, d in enumerate(link_dists) if d.name == "menuinst"), None
  393. )
  394. if menuinst_idx is not None:
  395. link_dists = (
  396. *link_dists[menuinst_idx : menuinst_idx + 1],
  397. *link_dists[:menuinst_idx],
  398. *link_dists[menuinst_idx + 1 :],
  399. )
  400. return unlink_dists, link_dists
  401. @time_recorder("install_actions")
  402. def install_actions(
  403. prefix,
  404. index,
  405. specs,
  406. force=False,
  407. only_names=None,
  408. always_copy=False,
  409. pinned=True,
  410. update_deps=True,
  411. prune=False,
  412. channel_priority_map=None,
  413. is_update=False,
  414. minimal_hint=False,
  415. ): # pragma: no cover
  416. # this is for conda-build
  417. with env_vars(
  418. {
  419. "CONDA_ALLOW_NON_CHANNEL_URLS": "true",
  420. "CONDA_SOLVER_IGNORE_TIMESTAMPS": "false",
  421. },
  422. stack_callback=stack_context_default,
  423. ):
  424. from os.path import basename
  425. from .models.channel import Channel
  426. from .models.dist import Dist
  427. if channel_priority_map:
  428. channel_names = IndexedSet(
  429. Channel(url).canonical_name for url in channel_priority_map
  430. )
  431. channels = IndexedSet(Channel(cn) for cn in channel_names)
  432. subdirs = IndexedSet(basename(url) for url in channel_priority_map)
  433. else:
  434. # a hack for when conda-build calls this function without giving channel_priority_map
  435. if LAST_CHANNEL_URLS:
  436. channel_priority_map = prioritize_channels(LAST_CHANNEL_URLS)
  437. channels = IndexedSet(Channel(url) for url in channel_priority_map)
  438. subdirs = (
  439. IndexedSet(
  440. subdir for subdir in (c.subdir for c in channels) if subdir
  441. )
  442. or context.subdirs
  443. )
  444. else:
  445. channels = subdirs = None
  446. specs = tuple(MatchSpec(spec) for spec in specs)
  447. from .core.prefix_data import PrefixData
  448. PrefixData._cache_.clear()
  449. solver_backend = context.plugin_manager.get_cached_solver_backend()
  450. solver = solver_backend(prefix, channels, subdirs, specs_to_add=specs)
  451. if index:
  452. solver._index = {prec: prec for prec in index.values()}
  453. txn = solver.solve_for_transaction(prune=prune, ignore_pinned=not pinned)
  454. prefix_setup = txn.prefix_setups[prefix]
  455. actions = get_blank_actions(prefix)
  456. actions["UNLINK"].extend(Dist(prec) for prec in prefix_setup.unlink_precs)
  457. actions["LINK"].extend(Dist(prec) for prec in prefix_setup.link_precs)
  458. return actions
  459. def get_blank_actions(prefix): # pragma: no cover
  460. from collections import defaultdict
  461. from .instructions import (
  462. CHECK_EXTRACT,
  463. CHECK_FETCH,
  464. EXTRACT,
  465. FETCH,
  466. LINK,
  467. PREFIX,
  468. RM_EXTRACTED,
  469. RM_FETCHED,
  470. SYMLINK_CONDA,
  471. UNLINK,
  472. )
  473. actions = defaultdict(list)
  474. actions[PREFIX] = prefix
  475. actions["op_order"] = (
  476. CHECK_FETCH,
  477. RM_FETCHED,
  478. FETCH,
  479. CHECK_EXTRACT,
  480. RM_EXTRACTED,
  481. EXTRACT,
  482. UNLINK,
  483. LINK,
  484. SYMLINK_CONDA,
  485. )
  486. return actions
  487. @time_recorder("execute_plan")
  488. def execute_plan(old_plan, index=None, verbose=False): # pragma: no cover
  489. """Deprecated: This should `conda.instructions.execute_instructions` instead."""
  490. plan = _update_old_plan(old_plan)
  491. execute_instructions(plan, index, verbose)
  492. def execute_instructions(
  493. plan, index=None, verbose=False, _commands=None
  494. ): # pragma: no cover
  495. """Execute the instructions in the plan
  496. :param plan: A list of (instruction, arg) tuples
  497. :param index: The meta-data index
  498. :param verbose: verbose output
  499. :param _commands: (For testing only) dict mapping an instruction to executable if None
  500. then the default commands will be used
  501. """
  502. from .base.context import context
  503. from .instructions import PROGRESS_COMMANDS, commands
  504. from .models.dist import Dist
  505. if _commands is None:
  506. _commands = commands
  507. log.debug("executing plan %s", plan)
  508. state = {"i": None, "prefix": context.root_prefix, "index": index}
  509. for instruction, arg in plan:
  510. log.debug(" %s(%r)", instruction, arg)
  511. if state["i"] is not None and instruction in PROGRESS_COMMANDS:
  512. state["i"] += 1
  513. getLogger("progress.update").info((Dist(arg).dist_name, state["i"] - 1))
  514. cmd = _commands[instruction]
  515. if callable(cmd):
  516. cmd(state, arg)
  517. if (
  518. state["i"] is not None
  519. and instruction in PROGRESS_COMMANDS
  520. and state["maxval"] == state["i"]
  521. ):
  522. state["i"] = None
  523. getLogger("progress.stop").info(None)
  524. def _update_old_plan(old_plan): # pragma: no cover
  525. """
  526. Update an old plan object to work with
  527. `conda.instructions.execute_instructions`
  528. """
  529. plan = []
  530. for line in old_plan:
  531. if line.startswith("#"):
  532. continue
  533. if " " not in line:
  534. from .exceptions import ArgumentError
  535. raise ArgumentError(
  536. "The instruction '%s' takes at least" " one argument" % line
  537. )
  538. instruction, arg = line.split(" ", 1)
  539. plan.append((instruction, arg))
  540. return plan
  541. if __name__ == "__main__":
  542. # for testing new revert_actions() only
  543. from pprint import pprint
  544. pprint(dict(revert_actions(sys.prefix, int(sys.argv[1]))))