wheelfile.py 7.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191
  1. from __future__ import annotations
  2. import csv
  3. import hashlib
  4. import os.path
  5. import re
  6. import stat
  7. import time
  8. from collections import OrderedDict
  9. from io import StringIO, TextIOWrapper
  10. from zipfile import ZIP_DEFLATED, ZipFile, ZipInfo
  11. from wheel.cli import WheelError
  12. from wheel.util import log, urlsafe_b64decode, urlsafe_b64encode
  13. # Non-greedy matching of an optional build number may be too clever (more
  14. # invalid wheel filenames will match). Separate regex for .dist-info?
  15. WHEEL_INFO_RE = re.compile(
  16. r"""^(?P<namever>(?P<name>[^\s-]+?)-(?P<ver>[^\s-]+?))(-(?P<build>\d[^\s-]*))?
  17. -(?P<pyver>[^\s-]+?)-(?P<abi>[^\s-]+?)-(?P<plat>\S+)\.whl$""",
  18. re.VERBOSE,
  19. )
  20. MINIMUM_TIMESTAMP = 315532800 # 1980-01-01 00:00:00 UTC
  21. def get_zipinfo_datetime(timestamp=None):
  22. # Some applications need reproducible .whl files, but they can't do this without
  23. # forcing the timestamp of the individual ZipInfo objects. See issue #143.
  24. timestamp = int(os.environ.get("SOURCE_DATE_EPOCH", timestamp or time.time()))
  25. timestamp = max(timestamp, MINIMUM_TIMESTAMP)
  26. return time.gmtime(timestamp)[0:6]
  27. class WheelFile(ZipFile):
  28. """A ZipFile derivative class that also reads SHA-256 hashes from
  29. .dist-info/RECORD and checks any read files against those.
  30. """
  31. _default_algorithm = hashlib.sha256
  32. def __init__(self, file, mode="r", compression=ZIP_DEFLATED):
  33. basename = os.path.basename(file)
  34. self.parsed_filename = WHEEL_INFO_RE.match(basename)
  35. if not basename.endswith(".whl") or self.parsed_filename is None:
  36. raise WheelError(f"Bad wheel filename {basename!r}")
  37. ZipFile.__init__(self, file, mode, compression=compression, allowZip64=True)
  38. self.dist_info_path = "{}.dist-info".format(
  39. self.parsed_filename.group("namever")
  40. )
  41. self.record_path = self.dist_info_path + "/RECORD"
  42. self._file_hashes = OrderedDict()
  43. self._file_sizes = {}
  44. if mode == "r":
  45. # Ignore RECORD and any embedded wheel signatures
  46. self._file_hashes[self.record_path] = None, None
  47. self._file_hashes[self.record_path + ".jws"] = None, None
  48. self._file_hashes[self.record_path + ".p7s"] = None, None
  49. # Fill in the expected hashes by reading them from RECORD
  50. try:
  51. record = self.open(self.record_path)
  52. except KeyError:
  53. raise WheelError(f"Missing {self.record_path} file")
  54. with record:
  55. for line in csv.reader(
  56. TextIOWrapper(record, newline="", encoding="utf-8")
  57. ):
  58. path, hash_sum, size = line
  59. if not hash_sum:
  60. continue
  61. algorithm, hash_sum = hash_sum.split("=")
  62. try:
  63. hashlib.new(algorithm)
  64. except ValueError:
  65. raise WheelError(f"Unsupported hash algorithm: {algorithm}")
  66. if algorithm.lower() in {"md5", "sha1"}:
  67. raise WheelError(
  68. "Weak hash algorithm ({}) is not permitted by PEP "
  69. "427".format(algorithm)
  70. )
  71. self._file_hashes[path] = (
  72. algorithm,
  73. urlsafe_b64decode(hash_sum.encode("ascii")),
  74. )
  75. def open(self, name_or_info, mode="r", pwd=None):
  76. def _update_crc(newdata):
  77. eof = ef._eof
  78. update_crc_orig(newdata)
  79. running_hash.update(newdata)
  80. if eof and running_hash.digest() != expected_hash:
  81. raise WheelError(f"Hash mismatch for file '{ef_name}'")
  82. ef_name = (
  83. name_or_info.filename if isinstance(name_or_info, ZipInfo) else name_or_info
  84. )
  85. if (
  86. mode == "r"
  87. and not ef_name.endswith("/")
  88. and ef_name not in self._file_hashes
  89. ):
  90. raise WheelError(f"No hash found for file '{ef_name}'")
  91. ef = ZipFile.open(self, name_or_info, mode, pwd)
  92. if mode == "r" and not ef_name.endswith("/"):
  93. algorithm, expected_hash = self._file_hashes[ef_name]
  94. if expected_hash is not None:
  95. # Monkey patch the _update_crc method to also check for the hash from
  96. # RECORD
  97. running_hash = hashlib.new(algorithm)
  98. update_crc_orig, ef._update_crc = ef._update_crc, _update_crc
  99. return ef
  100. def write_files(self, base_dir):
  101. log.info(f"creating '{self.filename}' and adding '{base_dir}' to it")
  102. deferred = []
  103. for root, dirnames, filenames in os.walk(base_dir):
  104. # Sort the directory names so that `os.walk` will walk them in a
  105. # defined order on the next iteration.
  106. dirnames.sort()
  107. for name in sorted(filenames):
  108. path = os.path.normpath(os.path.join(root, name))
  109. if os.path.isfile(path):
  110. arcname = os.path.relpath(path, base_dir).replace(os.path.sep, "/")
  111. if arcname == self.record_path:
  112. pass
  113. elif root.endswith(".dist-info"):
  114. deferred.append((path, arcname))
  115. else:
  116. self.write(path, arcname)
  117. deferred.sort()
  118. for path, arcname in deferred:
  119. self.write(path, arcname)
  120. def write(self, filename, arcname=None, compress_type=None):
  121. with open(filename, "rb") as f:
  122. st = os.fstat(f.fileno())
  123. data = f.read()
  124. zinfo = ZipInfo(
  125. arcname or filename, date_time=get_zipinfo_datetime(st.st_mtime)
  126. )
  127. zinfo.external_attr = (stat.S_IMODE(st.st_mode) | stat.S_IFMT(st.st_mode)) << 16
  128. zinfo.compress_type = compress_type or self.compression
  129. self.writestr(zinfo, data, compress_type)
  130. def writestr(self, zinfo_or_arcname, data, compress_type=None):
  131. if isinstance(data, str):
  132. data = data.encode("utf-8")
  133. ZipFile.writestr(self, zinfo_or_arcname, data, compress_type)
  134. fname = (
  135. zinfo_or_arcname.filename
  136. if isinstance(zinfo_or_arcname, ZipInfo)
  137. else zinfo_or_arcname
  138. )
  139. log.info(f"adding '{fname}'")
  140. if fname != self.record_path:
  141. hash_ = self._default_algorithm(data)
  142. self._file_hashes[fname] = (
  143. hash_.name,
  144. urlsafe_b64encode(hash_.digest()).decode("ascii"),
  145. )
  146. self._file_sizes[fname] = len(data)
  147. def close(self):
  148. # Write RECORD
  149. if self.fp is not None and self.mode == "w" and self._file_hashes:
  150. data = StringIO()
  151. writer = csv.writer(data, delimiter=",", quotechar='"', lineterminator="\n")
  152. writer.writerows(
  153. (
  154. (fname, algorithm + "=" + hash_, self._file_sizes[fname])
  155. for fname, (algorithm, hash_) in self._file_hashes.items()
  156. )
  157. )
  158. writer.writerow((format(self.record_path), "", ""))
  159. zinfo = ZipInfo(self.record_path, date_time=get_zipinfo_datetime())
  160. zinfo.compress_type = self.compression
  161. zinfo.external_attr = 0o664 << 16
  162. self.writestr(zinfo, data.getvalue())
  163. ZipFile.close(self)