zipfile.py 85 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437
  1. """
  2. Read and write ZIP files.
  3. XXX references to utf-8 need further investigation.
  4. """
  5. import binascii
  6. import importlib.util
  7. import io
  8. import itertools
  9. import os
  10. import posixpath
  11. import shutil
  12. import stat
  13. import struct
  14. import sys
  15. import threading
  16. import time
  17. import contextlib
  18. try:
  19. import zlib # We may need its compression method
  20. crc32 = zlib.crc32
  21. except ImportError:
  22. zlib = None
  23. crc32 = binascii.crc32
  24. try:
  25. import bz2 # We may need its compression method
  26. except ImportError:
  27. bz2 = None
  28. try:
  29. import lzma # We may need its compression method
  30. except ImportError:
  31. lzma = None
  32. __all__ = ["BadZipFile", "BadZipfile", "error",
  33. "ZIP_STORED", "ZIP_DEFLATED", "ZIP_BZIP2", "ZIP_LZMA",
  34. "is_zipfile", "ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile",
  35. "Path"]
  36. class BadZipFile(Exception):
  37. pass
  38. class LargeZipFile(Exception):
  39. """
  40. Raised when writing a zipfile, the zipfile requires ZIP64 extensions
  41. and those extensions are disabled.
  42. """
  43. error = BadZipfile = BadZipFile # Pre-3.2 compatibility names
  44. ZIP64_LIMIT = (1 << 31) - 1
  45. ZIP_FILECOUNT_LIMIT = (1 << 16) - 1
  46. ZIP_MAX_COMMENT = (1 << 16) - 1
  47. # constants for Zip file compression methods
  48. ZIP_STORED = 0
  49. ZIP_DEFLATED = 8
  50. ZIP_BZIP2 = 12
  51. ZIP_LZMA = 14
  52. # Other ZIP compression methods not supported
  53. DEFAULT_VERSION = 20
  54. ZIP64_VERSION = 45
  55. BZIP2_VERSION = 46
  56. LZMA_VERSION = 63
  57. # we recognize (but not necessarily support) all features up to that version
  58. MAX_EXTRACT_VERSION = 63
  59. # Below are some formats and associated data for reading/writing headers using
  60. # the struct module. The names and structures of headers/records are those used
  61. # in the PKWARE description of the ZIP file format:
  62. # http://www.pkware.com/documents/casestudies/APPNOTE.TXT
  63. # (URL valid as of January 2008)
  64. # The "end of central directory" structure, magic number, size, and indices
  65. # (section V.I in the format document)
  66. structEndArchive = b"<4s4H2LH"
  67. stringEndArchive = b"PK\005\006"
  68. sizeEndCentDir = struct.calcsize(structEndArchive)
  69. _ECD_SIGNATURE = 0
  70. _ECD_DISK_NUMBER = 1
  71. _ECD_DISK_START = 2
  72. _ECD_ENTRIES_THIS_DISK = 3
  73. _ECD_ENTRIES_TOTAL = 4
  74. _ECD_SIZE = 5
  75. _ECD_OFFSET = 6
  76. _ECD_COMMENT_SIZE = 7
  77. # These last two indices are not part of the structure as defined in the
  78. # spec, but they are used internally by this module as a convenience
  79. _ECD_COMMENT = 8
  80. _ECD_LOCATION = 9
  81. # The "central directory" structure, magic number, size, and indices
  82. # of entries in the structure (section V.F in the format document)
  83. structCentralDir = "<4s4B4HL2L5H2L"
  84. stringCentralDir = b"PK\001\002"
  85. sizeCentralDir = struct.calcsize(structCentralDir)
  86. # indexes of entries in the central directory structure
  87. _CD_SIGNATURE = 0
  88. _CD_CREATE_VERSION = 1
  89. _CD_CREATE_SYSTEM = 2
  90. _CD_EXTRACT_VERSION = 3
  91. _CD_EXTRACT_SYSTEM = 4
  92. _CD_FLAG_BITS = 5
  93. _CD_COMPRESS_TYPE = 6
  94. _CD_TIME = 7
  95. _CD_DATE = 8
  96. _CD_CRC = 9
  97. _CD_COMPRESSED_SIZE = 10
  98. _CD_UNCOMPRESSED_SIZE = 11
  99. _CD_FILENAME_LENGTH = 12
  100. _CD_EXTRA_FIELD_LENGTH = 13
  101. _CD_COMMENT_LENGTH = 14
  102. _CD_DISK_NUMBER_START = 15
  103. _CD_INTERNAL_FILE_ATTRIBUTES = 16
  104. _CD_EXTERNAL_FILE_ATTRIBUTES = 17
  105. _CD_LOCAL_HEADER_OFFSET = 18
  106. # The "local file header" structure, magic number, size, and indices
  107. # (section V.A in the format document)
  108. structFileHeader = "<4s2B4HL2L2H"
  109. stringFileHeader = b"PK\003\004"
  110. sizeFileHeader = struct.calcsize(structFileHeader)
  111. _FH_SIGNATURE = 0
  112. _FH_EXTRACT_VERSION = 1
  113. _FH_EXTRACT_SYSTEM = 2
  114. _FH_GENERAL_PURPOSE_FLAG_BITS = 3
  115. _FH_COMPRESSION_METHOD = 4
  116. _FH_LAST_MOD_TIME = 5
  117. _FH_LAST_MOD_DATE = 6
  118. _FH_CRC = 7
  119. _FH_COMPRESSED_SIZE = 8
  120. _FH_UNCOMPRESSED_SIZE = 9
  121. _FH_FILENAME_LENGTH = 10
  122. _FH_EXTRA_FIELD_LENGTH = 11
  123. # The "Zip64 end of central directory locator" structure, magic number, and size
  124. structEndArchive64Locator = "<4sLQL"
  125. stringEndArchive64Locator = b"PK\x06\x07"
  126. sizeEndCentDir64Locator = struct.calcsize(structEndArchive64Locator)
  127. # The "Zip64 end of central directory" record, magic number, size, and indices
  128. # (section V.G in the format document)
  129. structEndArchive64 = "<4sQ2H2L4Q"
  130. stringEndArchive64 = b"PK\x06\x06"
  131. sizeEndCentDir64 = struct.calcsize(structEndArchive64)
  132. _CD64_SIGNATURE = 0
  133. _CD64_DIRECTORY_RECSIZE = 1
  134. _CD64_CREATE_VERSION = 2
  135. _CD64_EXTRACT_VERSION = 3
  136. _CD64_DISK_NUMBER = 4
  137. _CD64_DISK_NUMBER_START = 5
  138. _CD64_NUMBER_ENTRIES_THIS_DISK = 6
  139. _CD64_NUMBER_ENTRIES_TOTAL = 7
  140. _CD64_DIRECTORY_SIZE = 8
  141. _CD64_OFFSET_START_CENTDIR = 9
  142. _DD_SIGNATURE = 0x08074b50
  143. _EXTRA_FIELD_STRUCT = struct.Struct('<HH')
  144. def _strip_extra(extra, xids):
  145. # Remove Extra Fields with specified IDs.
  146. unpack = _EXTRA_FIELD_STRUCT.unpack
  147. modified = False
  148. buffer = []
  149. start = i = 0
  150. while i + 4 <= len(extra):
  151. xid, xlen = unpack(extra[i : i + 4])
  152. j = i + 4 + xlen
  153. if xid in xids:
  154. if i != start:
  155. buffer.append(extra[start : i])
  156. start = j
  157. modified = True
  158. i = j
  159. if not modified:
  160. return extra
  161. return b''.join(buffer)
  162. def _check_zipfile(fp):
  163. try:
  164. if _EndRecData(fp):
  165. return True # file has correct magic number
  166. except OSError:
  167. pass
  168. return False
  169. def is_zipfile(filename):
  170. """Quickly see if a file is a ZIP file by checking the magic number.
  171. The filename argument may be a file or file-like object too.
  172. """
  173. result = False
  174. try:
  175. if hasattr(filename, "read"):
  176. result = _check_zipfile(fp=filename)
  177. else:
  178. with open(filename, "rb") as fp:
  179. result = _check_zipfile(fp)
  180. except OSError:
  181. pass
  182. return result
  183. def _EndRecData64(fpin, offset, endrec):
  184. """
  185. Read the ZIP64 end-of-archive records and use that to update endrec
  186. """
  187. try:
  188. fpin.seek(offset - sizeEndCentDir64Locator, 2)
  189. except OSError:
  190. # If the seek fails, the file is not large enough to contain a ZIP64
  191. # end-of-archive record, so just return the end record we were given.
  192. return endrec
  193. data = fpin.read(sizeEndCentDir64Locator)
  194. if len(data) != sizeEndCentDir64Locator:
  195. return endrec
  196. sig, diskno, reloff, disks = struct.unpack(structEndArchive64Locator, data)
  197. if sig != stringEndArchive64Locator:
  198. return endrec
  199. if diskno != 0 or disks > 1:
  200. raise BadZipFile("zipfiles that span multiple disks are not supported")
  201. # Assume no 'zip64 extensible data'
  202. fpin.seek(offset - sizeEndCentDir64Locator - sizeEndCentDir64, 2)
  203. data = fpin.read(sizeEndCentDir64)
  204. if len(data) != sizeEndCentDir64:
  205. return endrec
  206. sig, sz, create_version, read_version, disk_num, disk_dir, \
  207. dircount, dircount2, dirsize, diroffset = \
  208. struct.unpack(structEndArchive64, data)
  209. if sig != stringEndArchive64:
  210. return endrec
  211. # Update the original endrec using data from the ZIP64 record
  212. endrec[_ECD_SIGNATURE] = sig
  213. endrec[_ECD_DISK_NUMBER] = disk_num
  214. endrec[_ECD_DISK_START] = disk_dir
  215. endrec[_ECD_ENTRIES_THIS_DISK] = dircount
  216. endrec[_ECD_ENTRIES_TOTAL] = dircount2
  217. endrec[_ECD_SIZE] = dirsize
  218. endrec[_ECD_OFFSET] = diroffset
  219. return endrec
  220. def _EndRecData(fpin):
  221. """Return data from the "End of Central Directory" record, or None.
  222. The data is a list of the nine items in the ZIP "End of central dir"
  223. record followed by a tenth item, the file seek offset of this record."""
  224. # Determine file size
  225. fpin.seek(0, 2)
  226. filesize = fpin.tell()
  227. # Check to see if this is ZIP file with no archive comment (the
  228. # "end of central directory" structure should be the last item in the
  229. # file if this is the case).
  230. try:
  231. fpin.seek(-sizeEndCentDir, 2)
  232. except OSError:
  233. return None
  234. data = fpin.read()
  235. if (len(data) == sizeEndCentDir and
  236. data[0:4] == stringEndArchive and
  237. data[-2:] == b"\000\000"):
  238. # the signature is correct and there's no comment, unpack structure
  239. endrec = struct.unpack(structEndArchive, data)
  240. endrec=list(endrec)
  241. # Append a blank comment and record start offset
  242. endrec.append(b"")
  243. endrec.append(filesize - sizeEndCentDir)
  244. # Try to read the "Zip64 end of central directory" structure
  245. return _EndRecData64(fpin, -sizeEndCentDir, endrec)
  246. # Either this is not a ZIP file, or it is a ZIP file with an archive
  247. # comment. Search the end of the file for the "end of central directory"
  248. # record signature. The comment is the last item in the ZIP file and may be
  249. # up to 64K long. It is assumed that the "end of central directory" magic
  250. # number does not appear in the comment.
  251. maxCommentStart = max(filesize - (1 << 16) - sizeEndCentDir, 0)
  252. fpin.seek(maxCommentStart, 0)
  253. data = fpin.read()
  254. start = data.rfind(stringEndArchive)
  255. if start >= 0:
  256. # found the magic number; attempt to unpack and interpret
  257. recData = data[start:start+sizeEndCentDir]
  258. if len(recData) != sizeEndCentDir:
  259. # Zip file is corrupted.
  260. return None
  261. endrec = list(struct.unpack(structEndArchive, recData))
  262. commentSize = endrec[_ECD_COMMENT_SIZE] #as claimed by the zip file
  263. comment = data[start+sizeEndCentDir:start+sizeEndCentDir+commentSize]
  264. endrec.append(comment)
  265. endrec.append(maxCommentStart + start)
  266. # Try to read the "Zip64 end of central directory" structure
  267. return _EndRecData64(fpin, maxCommentStart + start - filesize,
  268. endrec)
  269. # Unable to find a valid end of central directory structure
  270. return None
  271. class ZipInfo (object):
  272. """Class with attributes describing each file in the ZIP archive."""
  273. __slots__ = (
  274. 'orig_filename',
  275. 'filename',
  276. 'date_time',
  277. 'compress_type',
  278. '_compresslevel',
  279. 'comment',
  280. 'extra',
  281. 'create_system',
  282. 'create_version',
  283. 'extract_version',
  284. 'reserved',
  285. 'flag_bits',
  286. 'volume',
  287. 'internal_attr',
  288. 'external_attr',
  289. 'header_offset',
  290. 'CRC',
  291. 'compress_size',
  292. 'file_size',
  293. '_raw_time',
  294. )
  295. def __init__(self, filename="NoName", date_time=(1980,1,1,0,0,0)):
  296. self.orig_filename = filename # Original file name in archive
  297. # Terminate the file name at the first null byte. Null bytes in file
  298. # names are used as tricks by viruses in archives.
  299. null_byte = filename.find(chr(0))
  300. if null_byte >= 0:
  301. filename = filename[0:null_byte]
  302. # This is used to ensure paths in generated ZIP files always use
  303. # forward slashes as the directory separator, as required by the
  304. # ZIP format specification.
  305. if os.sep != "/" and os.sep in filename:
  306. filename = filename.replace(os.sep, "/")
  307. self.filename = filename # Normalized file name
  308. self.date_time = date_time # year, month, day, hour, min, sec
  309. if date_time[0] < 1980:
  310. raise ValueError('ZIP does not support timestamps before 1980')
  311. # Standard values:
  312. self.compress_type = ZIP_STORED # Type of compression for the file
  313. self._compresslevel = None # Level for the compressor
  314. self.comment = b"" # Comment for each file
  315. self.extra = b"" # ZIP extra data
  316. if sys.platform == 'win32':
  317. self.create_system = 0 # System which created ZIP archive
  318. else:
  319. # Assume everything else is unix-y
  320. self.create_system = 3 # System which created ZIP archive
  321. self.create_version = DEFAULT_VERSION # Version which created ZIP archive
  322. self.extract_version = DEFAULT_VERSION # Version needed to extract archive
  323. self.reserved = 0 # Must be zero
  324. self.flag_bits = 0 # ZIP flag bits
  325. self.volume = 0 # Volume number of file header
  326. self.internal_attr = 0 # Internal attributes
  327. self.external_attr = 0 # External file attributes
  328. self.compress_size = 0 # Size of the compressed file
  329. self.file_size = 0 # Size of the uncompressed file
  330. # Other attributes are set by class ZipFile:
  331. # header_offset Byte offset to the file header
  332. # CRC CRC-32 of the uncompressed file
  333. def __repr__(self):
  334. result = ['<%s filename=%r' % (self.__class__.__name__, self.filename)]
  335. if self.compress_type != ZIP_STORED:
  336. result.append(' compress_type=%s' %
  337. compressor_names.get(self.compress_type,
  338. self.compress_type))
  339. hi = self.external_attr >> 16
  340. lo = self.external_attr & 0xFFFF
  341. if hi:
  342. result.append(' filemode=%r' % stat.filemode(hi))
  343. if lo:
  344. result.append(' external_attr=%#x' % lo)
  345. isdir = self.is_dir()
  346. if not isdir or self.file_size:
  347. result.append(' file_size=%r' % self.file_size)
  348. if ((not isdir or self.compress_size) and
  349. (self.compress_type != ZIP_STORED or
  350. self.file_size != self.compress_size)):
  351. result.append(' compress_size=%r' % self.compress_size)
  352. result.append('>')
  353. return ''.join(result)
  354. def FileHeader(self, zip64=None):
  355. """Return the per-file header as a bytes object."""
  356. dt = self.date_time
  357. dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
  358. dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
  359. if self.flag_bits & 0x08:
  360. # Set these to zero because we write them after the file data
  361. CRC = compress_size = file_size = 0
  362. else:
  363. CRC = self.CRC
  364. compress_size = self.compress_size
  365. file_size = self.file_size
  366. extra = self.extra
  367. min_version = 0
  368. if zip64 is None:
  369. zip64 = file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT
  370. if zip64:
  371. fmt = '<HHQQ'
  372. extra = extra + struct.pack(fmt,
  373. 1, struct.calcsize(fmt)-4, file_size, compress_size)
  374. if file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT:
  375. if not zip64:
  376. raise LargeZipFile("Filesize would require ZIP64 extensions")
  377. # File is larger than what fits into a 4 byte integer,
  378. # fall back to the ZIP64 extension
  379. file_size = 0xffffffff
  380. compress_size = 0xffffffff
  381. min_version = ZIP64_VERSION
  382. if self.compress_type == ZIP_BZIP2:
  383. min_version = max(BZIP2_VERSION, min_version)
  384. elif self.compress_type == ZIP_LZMA:
  385. min_version = max(LZMA_VERSION, min_version)
  386. self.extract_version = max(min_version, self.extract_version)
  387. self.create_version = max(min_version, self.create_version)
  388. filename, flag_bits = self._encodeFilenameFlags()
  389. header = struct.pack(structFileHeader, stringFileHeader,
  390. self.extract_version, self.reserved, flag_bits,
  391. self.compress_type, dostime, dosdate, CRC,
  392. compress_size, file_size,
  393. len(filename), len(extra))
  394. return header + filename + extra
  395. def _encodeFilenameFlags(self):
  396. try:
  397. return self.filename.encode('ascii'), self.flag_bits
  398. except UnicodeEncodeError:
  399. return self.filename.encode('utf-8'), self.flag_bits | 0x800
  400. def _decodeExtra(self):
  401. # Try to decode the extra field.
  402. extra = self.extra
  403. unpack = struct.unpack
  404. while len(extra) >= 4:
  405. tp, ln = unpack('<HH', extra[:4])
  406. if ln+4 > len(extra):
  407. raise BadZipFile("Corrupt extra field %04x (size=%d)" % (tp, ln))
  408. if tp == 0x0001:
  409. data = extra[4:ln+4]
  410. # ZIP64 extension (large files and/or large archives)
  411. try:
  412. if self.file_size in (0xFFFF_FFFF_FFFF_FFFF, 0xFFFF_FFFF):
  413. field = "File size"
  414. self.file_size, = unpack('<Q', data[:8])
  415. data = data[8:]
  416. if self.compress_size == 0xFFFF_FFFF:
  417. field = "Compress size"
  418. self.compress_size, = unpack('<Q', data[:8])
  419. data = data[8:]
  420. if self.header_offset == 0xFFFF_FFFF:
  421. field = "Header offset"
  422. self.header_offset, = unpack('<Q', data[:8])
  423. except struct.error:
  424. raise BadZipFile(f"Corrupt zip64 extra field. "
  425. f"{field} not found.") from None
  426. extra = extra[ln+4:]
  427. @classmethod
  428. def from_file(cls, filename, arcname=None, *, strict_timestamps=True):
  429. """Construct an appropriate ZipInfo for a file on the filesystem.
  430. filename should be the path to a file or directory on the filesystem.
  431. arcname is the name which it will have within the archive (by default,
  432. this will be the same as filename, but without a drive letter and with
  433. leading path separators removed).
  434. """
  435. if isinstance(filename, os.PathLike):
  436. filename = os.fspath(filename)
  437. st = os.stat(filename)
  438. isdir = stat.S_ISDIR(st.st_mode)
  439. mtime = time.localtime(st.st_mtime)
  440. date_time = mtime[0:6]
  441. if not strict_timestamps and date_time[0] < 1980:
  442. date_time = (1980, 1, 1, 0, 0, 0)
  443. elif not strict_timestamps and date_time[0] > 2107:
  444. date_time = (2107, 12, 31, 23, 59, 59)
  445. # Create ZipInfo instance to store file information
  446. if arcname is None:
  447. arcname = filename
  448. arcname = os.path.normpath(os.path.splitdrive(arcname)[1])
  449. while arcname[0] in (os.sep, os.altsep):
  450. arcname = arcname[1:]
  451. if isdir:
  452. arcname += '/'
  453. zinfo = cls(arcname, date_time)
  454. zinfo.external_attr = (st.st_mode & 0xFFFF) << 16 # Unix attributes
  455. if isdir:
  456. zinfo.file_size = 0
  457. zinfo.external_attr |= 0x10 # MS-DOS directory flag
  458. else:
  459. zinfo.file_size = st.st_size
  460. return zinfo
  461. def is_dir(self):
  462. """Return True if this archive member is a directory."""
  463. return self.filename[-1] == '/'
  464. # ZIP encryption uses the CRC32 one-byte primitive for scrambling some
  465. # internal keys. We noticed that a direct implementation is faster than
  466. # relying on binascii.crc32().
  467. _crctable = None
  468. def _gen_crc(crc):
  469. for j in range(8):
  470. if crc & 1:
  471. crc = (crc >> 1) ^ 0xEDB88320
  472. else:
  473. crc >>= 1
  474. return crc
  475. # ZIP supports a password-based form of encryption. Even though known
  476. # plaintext attacks have been found against it, it is still useful
  477. # to be able to get data out of such a file.
  478. #
  479. # Usage:
  480. # zd = _ZipDecrypter(mypwd)
  481. # plain_bytes = zd(cypher_bytes)
  482. def _ZipDecrypter(pwd):
  483. key0 = 305419896
  484. key1 = 591751049
  485. key2 = 878082192
  486. global _crctable
  487. if _crctable is None:
  488. _crctable = list(map(_gen_crc, range(256)))
  489. crctable = _crctable
  490. def crc32(ch, crc):
  491. """Compute the CRC32 primitive on one byte."""
  492. return (crc >> 8) ^ crctable[(crc ^ ch) & 0xFF]
  493. def update_keys(c):
  494. nonlocal key0, key1, key2
  495. key0 = crc32(c, key0)
  496. key1 = (key1 + (key0 & 0xFF)) & 0xFFFFFFFF
  497. key1 = (key1 * 134775813 + 1) & 0xFFFFFFFF
  498. key2 = crc32(key1 >> 24, key2)
  499. for p in pwd:
  500. update_keys(p)
  501. def decrypter(data):
  502. """Decrypt a bytes object."""
  503. result = bytearray()
  504. append = result.append
  505. for c in data:
  506. k = key2 | 2
  507. c ^= ((k * (k^1)) >> 8) & 0xFF
  508. update_keys(c)
  509. append(c)
  510. return bytes(result)
  511. return decrypter
  512. class LZMACompressor:
  513. def __init__(self):
  514. self._comp = None
  515. def _init(self):
  516. props = lzma._encode_filter_properties({'id': lzma.FILTER_LZMA1})
  517. self._comp = lzma.LZMACompressor(lzma.FORMAT_RAW, filters=[
  518. lzma._decode_filter_properties(lzma.FILTER_LZMA1, props)
  519. ])
  520. return struct.pack('<BBH', 9, 4, len(props)) + props
  521. def compress(self, data):
  522. if self._comp is None:
  523. return self._init() + self._comp.compress(data)
  524. return self._comp.compress(data)
  525. def flush(self):
  526. if self._comp is None:
  527. return self._init() + self._comp.flush()
  528. return self._comp.flush()
  529. class LZMADecompressor:
  530. def __init__(self):
  531. self._decomp = None
  532. self._unconsumed = b''
  533. self.eof = False
  534. def decompress(self, data):
  535. if self._decomp is None:
  536. self._unconsumed += data
  537. if len(self._unconsumed) <= 4:
  538. return b''
  539. psize, = struct.unpack('<H', self._unconsumed[2:4])
  540. if len(self._unconsumed) <= 4 + psize:
  541. return b''
  542. self._decomp = lzma.LZMADecompressor(lzma.FORMAT_RAW, filters=[
  543. lzma._decode_filter_properties(lzma.FILTER_LZMA1,
  544. self._unconsumed[4:4 + psize])
  545. ])
  546. data = self._unconsumed[4 + psize:]
  547. del self._unconsumed
  548. result = self._decomp.decompress(data)
  549. self.eof = self._decomp.eof
  550. return result
  551. compressor_names = {
  552. 0: 'store',
  553. 1: 'shrink',
  554. 2: 'reduce',
  555. 3: 'reduce',
  556. 4: 'reduce',
  557. 5: 'reduce',
  558. 6: 'implode',
  559. 7: 'tokenize',
  560. 8: 'deflate',
  561. 9: 'deflate64',
  562. 10: 'implode',
  563. 12: 'bzip2',
  564. 14: 'lzma',
  565. 18: 'terse',
  566. 19: 'lz77',
  567. 97: 'wavpack',
  568. 98: 'ppmd',
  569. }
  570. def _check_compression(compression):
  571. if compression == ZIP_STORED:
  572. pass
  573. elif compression == ZIP_DEFLATED:
  574. if not zlib:
  575. raise RuntimeError(
  576. "Compression requires the (missing) zlib module")
  577. elif compression == ZIP_BZIP2:
  578. if not bz2:
  579. raise RuntimeError(
  580. "Compression requires the (missing) bz2 module")
  581. elif compression == ZIP_LZMA:
  582. if not lzma:
  583. raise RuntimeError(
  584. "Compression requires the (missing) lzma module")
  585. else:
  586. raise NotImplementedError("That compression method is not supported")
  587. def _get_compressor(compress_type, compresslevel=None):
  588. if compress_type == ZIP_DEFLATED:
  589. if compresslevel is not None:
  590. return zlib.compressobj(compresslevel, zlib.DEFLATED, -15)
  591. return zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -15)
  592. elif compress_type == ZIP_BZIP2:
  593. if compresslevel is not None:
  594. return bz2.BZ2Compressor(compresslevel)
  595. return bz2.BZ2Compressor()
  596. # compresslevel is ignored for ZIP_LZMA
  597. elif compress_type == ZIP_LZMA:
  598. return LZMACompressor()
  599. else:
  600. return None
  601. def _get_decompressor(compress_type):
  602. _check_compression(compress_type)
  603. if compress_type == ZIP_STORED:
  604. return None
  605. elif compress_type == ZIP_DEFLATED:
  606. return zlib.decompressobj(-15)
  607. elif compress_type == ZIP_BZIP2:
  608. return bz2.BZ2Decompressor()
  609. elif compress_type == ZIP_LZMA:
  610. return LZMADecompressor()
  611. else:
  612. descr = compressor_names.get(compress_type)
  613. if descr:
  614. raise NotImplementedError("compression type %d (%s)" % (compress_type, descr))
  615. else:
  616. raise NotImplementedError("compression type %d" % (compress_type,))
  617. class _SharedFile:
  618. def __init__(self, file, pos, close, lock, writing):
  619. self._file = file
  620. self._pos = pos
  621. self._close = close
  622. self._lock = lock
  623. self._writing = writing
  624. self.seekable = file.seekable
  625. def tell(self):
  626. return self._pos
  627. def seek(self, offset, whence=0):
  628. with self._lock:
  629. if self._writing():
  630. raise ValueError("Can't reposition in the ZIP file while "
  631. "there is an open writing handle on it. "
  632. "Close the writing handle before trying to read.")
  633. self._file.seek(offset, whence)
  634. self._pos = self._file.tell()
  635. return self._pos
  636. def read(self, n=-1):
  637. with self._lock:
  638. if self._writing():
  639. raise ValueError("Can't read from the ZIP file while there "
  640. "is an open writing handle on it. "
  641. "Close the writing handle before trying to read.")
  642. self._file.seek(self._pos)
  643. data = self._file.read(n)
  644. self._pos = self._file.tell()
  645. return data
  646. def close(self):
  647. if self._file is not None:
  648. fileobj = self._file
  649. self._file = None
  650. self._close(fileobj)
  651. # Provide the tell method for unseekable stream
  652. class _Tellable:
  653. def __init__(self, fp):
  654. self.fp = fp
  655. self.offset = 0
  656. def write(self, data):
  657. n = self.fp.write(data)
  658. self.offset += n
  659. return n
  660. def tell(self):
  661. return self.offset
  662. def flush(self):
  663. self.fp.flush()
  664. def close(self):
  665. self.fp.close()
  666. class ZipExtFile(io.BufferedIOBase):
  667. """File-like object for reading an archive member.
  668. Is returned by ZipFile.open().
  669. """
  670. # Max size supported by decompressor.
  671. MAX_N = 1 << 31 - 1
  672. # Read from compressed files in 4k blocks.
  673. MIN_READ_SIZE = 4096
  674. # Chunk size to read during seek
  675. MAX_SEEK_READ = 1 << 24
  676. def __init__(self, fileobj, mode, zipinfo, pwd=None,
  677. close_fileobj=False):
  678. self._fileobj = fileobj
  679. self._pwd = pwd
  680. self._close_fileobj = close_fileobj
  681. self._compress_type = zipinfo.compress_type
  682. self._compress_left = zipinfo.compress_size
  683. self._left = zipinfo.file_size
  684. self._decompressor = _get_decompressor(self._compress_type)
  685. self._eof = False
  686. self._readbuffer = b''
  687. self._offset = 0
  688. self.newlines = None
  689. self.mode = mode
  690. self.name = zipinfo.filename
  691. if hasattr(zipinfo, 'CRC'):
  692. self._expected_crc = zipinfo.CRC
  693. self._running_crc = crc32(b'')
  694. else:
  695. self._expected_crc = None
  696. self._seekable = False
  697. try:
  698. if fileobj.seekable():
  699. self._orig_compress_start = fileobj.tell()
  700. self._orig_compress_size = zipinfo.compress_size
  701. self._orig_file_size = zipinfo.file_size
  702. self._orig_start_crc = self._running_crc
  703. self._seekable = True
  704. except AttributeError:
  705. pass
  706. self._decrypter = None
  707. if pwd:
  708. if zipinfo.flag_bits & 0x8:
  709. # compare against the file type from extended local headers
  710. check_byte = (zipinfo._raw_time >> 8) & 0xff
  711. else:
  712. # compare against the CRC otherwise
  713. check_byte = (zipinfo.CRC >> 24) & 0xff
  714. h = self._init_decrypter()
  715. if h != check_byte:
  716. raise RuntimeError("Bad password for file %r" % zipinfo.orig_filename)
  717. def _init_decrypter(self):
  718. self._decrypter = _ZipDecrypter(self._pwd)
  719. # The first 12 bytes in the cypher stream is an encryption header
  720. # used to strengthen the algorithm. The first 11 bytes are
  721. # completely random, while the 12th contains the MSB of the CRC,
  722. # or the MSB of the file time depending on the header type
  723. # and is used to check the correctness of the password.
  724. header = self._fileobj.read(12)
  725. self._compress_left -= 12
  726. return self._decrypter(header)[11]
  727. def __repr__(self):
  728. result = ['<%s.%s' % (self.__class__.__module__,
  729. self.__class__.__qualname__)]
  730. if not self.closed:
  731. result.append(' name=%r mode=%r' % (self.name, self.mode))
  732. if self._compress_type != ZIP_STORED:
  733. result.append(' compress_type=%s' %
  734. compressor_names.get(self._compress_type,
  735. self._compress_type))
  736. else:
  737. result.append(' [closed]')
  738. result.append('>')
  739. return ''.join(result)
  740. def readline(self, limit=-1):
  741. """Read and return a line from the stream.
  742. If limit is specified, at most limit bytes will be read.
  743. """
  744. if limit < 0:
  745. # Shortcut common case - newline found in buffer.
  746. i = self._readbuffer.find(b'\n', self._offset) + 1
  747. if i > 0:
  748. line = self._readbuffer[self._offset: i]
  749. self._offset = i
  750. return line
  751. return io.BufferedIOBase.readline(self, limit)
  752. def peek(self, n=1):
  753. """Returns buffered bytes without advancing the position."""
  754. if n > len(self._readbuffer) - self._offset:
  755. chunk = self.read(n)
  756. if len(chunk) > self._offset:
  757. self._readbuffer = chunk + self._readbuffer[self._offset:]
  758. self._offset = 0
  759. else:
  760. self._offset -= len(chunk)
  761. # Return up to 512 bytes to reduce allocation overhead for tight loops.
  762. return self._readbuffer[self._offset: self._offset + 512]
  763. def readable(self):
  764. if self.closed:
  765. raise ValueError("I/O operation on closed file.")
  766. return True
  767. def read(self, n=-1):
  768. """Read and return up to n bytes.
  769. If the argument is omitted, None, or negative, data is read and returned until EOF is reached.
  770. """
  771. if self.closed:
  772. raise ValueError("read from closed file.")
  773. if n is None or n < 0:
  774. buf = self._readbuffer[self._offset:]
  775. self._readbuffer = b''
  776. self._offset = 0
  777. while not self._eof:
  778. buf += self._read1(self.MAX_N)
  779. return buf
  780. end = n + self._offset
  781. if end < len(self._readbuffer):
  782. buf = self._readbuffer[self._offset:end]
  783. self._offset = end
  784. return buf
  785. n = end - len(self._readbuffer)
  786. buf = self._readbuffer[self._offset:]
  787. self._readbuffer = b''
  788. self._offset = 0
  789. while n > 0 and not self._eof:
  790. data = self._read1(n)
  791. if n < len(data):
  792. self._readbuffer = data
  793. self._offset = n
  794. buf += data[:n]
  795. break
  796. buf += data
  797. n -= len(data)
  798. return buf
  799. def _update_crc(self, newdata):
  800. # Update the CRC using the given data.
  801. if self._expected_crc is None:
  802. # No need to compute the CRC if we don't have a reference value
  803. return
  804. self._running_crc = crc32(newdata, self._running_crc)
  805. # Check the CRC if we're at the end of the file
  806. if self._eof and self._running_crc != self._expected_crc:
  807. raise BadZipFile("Bad CRC-32 for file %r" % self.name)
  808. def read1(self, n):
  809. """Read up to n bytes with at most one read() system call."""
  810. if n is None or n < 0:
  811. buf = self._readbuffer[self._offset:]
  812. self._readbuffer = b''
  813. self._offset = 0
  814. while not self._eof:
  815. data = self._read1(self.MAX_N)
  816. if data:
  817. buf += data
  818. break
  819. return buf
  820. end = n + self._offset
  821. if end < len(self._readbuffer):
  822. buf = self._readbuffer[self._offset:end]
  823. self._offset = end
  824. return buf
  825. n = end - len(self._readbuffer)
  826. buf = self._readbuffer[self._offset:]
  827. self._readbuffer = b''
  828. self._offset = 0
  829. if n > 0:
  830. while not self._eof:
  831. data = self._read1(n)
  832. if n < len(data):
  833. self._readbuffer = data
  834. self._offset = n
  835. buf += data[:n]
  836. break
  837. if data:
  838. buf += data
  839. break
  840. return buf
  841. def _read1(self, n):
  842. # Read up to n compressed bytes with at most one read() system call,
  843. # decrypt and decompress them.
  844. if self._eof or n <= 0:
  845. return b''
  846. # Read from file.
  847. if self._compress_type == ZIP_DEFLATED:
  848. ## Handle unconsumed data.
  849. data = self._decompressor.unconsumed_tail
  850. if n > len(data):
  851. data += self._read2(n - len(data))
  852. else:
  853. data = self._read2(n)
  854. if self._compress_type == ZIP_STORED:
  855. self._eof = self._compress_left <= 0
  856. elif self._compress_type == ZIP_DEFLATED:
  857. n = max(n, self.MIN_READ_SIZE)
  858. data = self._decompressor.decompress(data, n)
  859. self._eof = (self._decompressor.eof or
  860. self._compress_left <= 0 and
  861. not self._decompressor.unconsumed_tail)
  862. if self._eof:
  863. data += self._decompressor.flush()
  864. else:
  865. data = self._decompressor.decompress(data)
  866. self._eof = self._decompressor.eof or self._compress_left <= 0
  867. data = data[:self._left]
  868. self._left -= len(data)
  869. if self._left <= 0:
  870. self._eof = True
  871. self._update_crc(data)
  872. return data
  873. def _read2(self, n):
  874. if self._compress_left <= 0:
  875. return b''
  876. n = max(n, self.MIN_READ_SIZE)
  877. n = min(n, self._compress_left)
  878. data = self._fileobj.read(n)
  879. self._compress_left -= len(data)
  880. if not data:
  881. raise EOFError
  882. if self._decrypter is not None:
  883. data = self._decrypter(data)
  884. return data
  885. def close(self):
  886. try:
  887. if self._close_fileobj:
  888. self._fileobj.close()
  889. finally:
  890. super().close()
  891. def seekable(self):
  892. if self.closed:
  893. raise ValueError("I/O operation on closed file.")
  894. return self._seekable
  895. def seek(self, offset, whence=0):
  896. if self.closed:
  897. raise ValueError("seek on closed file.")
  898. if not self._seekable:
  899. raise io.UnsupportedOperation("underlying stream is not seekable")
  900. curr_pos = self.tell()
  901. if whence == 0: # Seek from start of file
  902. new_pos = offset
  903. elif whence == 1: # Seek from current position
  904. new_pos = curr_pos + offset
  905. elif whence == 2: # Seek from EOF
  906. new_pos = self._orig_file_size + offset
  907. else:
  908. raise ValueError("whence must be os.SEEK_SET (0), "
  909. "os.SEEK_CUR (1), or os.SEEK_END (2)")
  910. if new_pos > self._orig_file_size:
  911. new_pos = self._orig_file_size
  912. if new_pos < 0:
  913. new_pos = 0
  914. read_offset = new_pos - curr_pos
  915. buff_offset = read_offset + self._offset
  916. if buff_offset >= 0 and buff_offset < len(self._readbuffer):
  917. # Just move the _offset index if the new position is in the _readbuffer
  918. self._offset = buff_offset
  919. read_offset = 0
  920. elif read_offset < 0:
  921. # Position is before the current position. Reset the ZipExtFile
  922. self._fileobj.seek(self._orig_compress_start)
  923. self._running_crc = self._orig_start_crc
  924. self._compress_left = self._orig_compress_size
  925. self._left = self._orig_file_size
  926. self._readbuffer = b''
  927. self._offset = 0
  928. self._decompressor = _get_decompressor(self._compress_type)
  929. self._eof = False
  930. read_offset = new_pos
  931. if self._decrypter is not None:
  932. self._init_decrypter()
  933. while read_offset > 0:
  934. read_len = min(self.MAX_SEEK_READ, read_offset)
  935. self.read(read_len)
  936. read_offset -= read_len
  937. return self.tell()
  938. def tell(self):
  939. if self.closed:
  940. raise ValueError("tell on closed file.")
  941. if not self._seekable:
  942. raise io.UnsupportedOperation("underlying stream is not seekable")
  943. filepos = self._orig_file_size - self._left - len(self._readbuffer) + self._offset
  944. return filepos
  945. class _ZipWriteFile(io.BufferedIOBase):
  946. def __init__(self, zf, zinfo, zip64):
  947. self._zinfo = zinfo
  948. self._zip64 = zip64
  949. self._zipfile = zf
  950. self._compressor = _get_compressor(zinfo.compress_type,
  951. zinfo._compresslevel)
  952. self._file_size = 0
  953. self._compress_size = 0
  954. self._crc = 0
  955. @property
  956. def _fileobj(self):
  957. return self._zipfile.fp
  958. def writable(self):
  959. return True
  960. def write(self, data):
  961. if self.closed:
  962. raise ValueError('I/O operation on closed file.')
  963. # Accept any data that supports the buffer protocol
  964. if isinstance(data, (bytes, bytearray)):
  965. nbytes = len(data)
  966. else:
  967. data = memoryview(data)
  968. nbytes = data.nbytes
  969. self._file_size += nbytes
  970. self._crc = crc32(data, self._crc)
  971. if self._compressor:
  972. data = self._compressor.compress(data)
  973. self._compress_size += len(data)
  974. self._fileobj.write(data)
  975. return nbytes
  976. def close(self):
  977. if self.closed:
  978. return
  979. try:
  980. super().close()
  981. # Flush any data from the compressor, and update header info
  982. if self._compressor:
  983. buf = self._compressor.flush()
  984. self._compress_size += len(buf)
  985. self._fileobj.write(buf)
  986. self._zinfo.compress_size = self._compress_size
  987. else:
  988. self._zinfo.compress_size = self._file_size
  989. self._zinfo.CRC = self._crc
  990. self._zinfo.file_size = self._file_size
  991. # Write updated header info
  992. if self._zinfo.flag_bits & 0x08:
  993. # Write CRC and file sizes after the file data
  994. fmt = '<LLQQ' if self._zip64 else '<LLLL'
  995. self._fileobj.write(struct.pack(fmt, _DD_SIGNATURE, self._zinfo.CRC,
  996. self._zinfo.compress_size, self._zinfo.file_size))
  997. self._zipfile.start_dir = self._fileobj.tell()
  998. else:
  999. if not self._zip64:
  1000. if self._file_size > ZIP64_LIMIT:
  1001. raise RuntimeError(
  1002. 'File size unexpectedly exceeded ZIP64 limit')
  1003. if self._compress_size > ZIP64_LIMIT:
  1004. raise RuntimeError(
  1005. 'Compressed size unexpectedly exceeded ZIP64 limit')
  1006. # Seek backwards and write file header (which will now include
  1007. # correct CRC and file sizes)
  1008. # Preserve current position in file
  1009. self._zipfile.start_dir = self._fileobj.tell()
  1010. self._fileobj.seek(self._zinfo.header_offset)
  1011. self._fileobj.write(self._zinfo.FileHeader(self._zip64))
  1012. self._fileobj.seek(self._zipfile.start_dir)
  1013. # Successfully written: Add file to our caches
  1014. self._zipfile.filelist.append(self._zinfo)
  1015. self._zipfile.NameToInfo[self._zinfo.filename] = self._zinfo
  1016. finally:
  1017. self._zipfile._writing = False
  1018. class ZipFile:
  1019. """ Class with methods to open, read, write, close, list zip files.
  1020. z = ZipFile(file, mode="r", compression=ZIP_STORED, allowZip64=True,
  1021. compresslevel=None)
  1022. file: Either the path to the file, or a file-like object.
  1023. If it is a path, the file will be opened and closed by ZipFile.
  1024. mode: The mode can be either read 'r', write 'w', exclusive create 'x',
  1025. or append 'a'.
  1026. compression: ZIP_STORED (no compression), ZIP_DEFLATED (requires zlib),
  1027. ZIP_BZIP2 (requires bz2) or ZIP_LZMA (requires lzma).
  1028. allowZip64: if True ZipFile will create files with ZIP64 extensions when
  1029. needed, otherwise it will raise an exception when this would
  1030. be necessary.
  1031. compresslevel: None (default for the given compression type) or an integer
  1032. specifying the level to pass to the compressor.
  1033. When using ZIP_STORED or ZIP_LZMA this keyword has no effect.
  1034. When using ZIP_DEFLATED integers 0 through 9 are accepted.
  1035. When using ZIP_BZIP2 integers 1 through 9 are accepted.
  1036. """
  1037. fp = None # Set here since __del__ checks it
  1038. _windows_illegal_name_trans_table = None
  1039. def __init__(self, file, mode="r", compression=ZIP_STORED, allowZip64=True,
  1040. compresslevel=None, *, strict_timestamps=True):
  1041. """Open the ZIP file with mode read 'r', write 'w', exclusive create 'x',
  1042. or append 'a'."""
  1043. if mode not in ('r', 'w', 'x', 'a'):
  1044. raise ValueError("ZipFile requires mode 'r', 'w', 'x', or 'a'")
  1045. _check_compression(compression)
  1046. self._allowZip64 = allowZip64
  1047. self._didModify = False
  1048. self.debug = 0 # Level of printing: 0 through 3
  1049. self.NameToInfo = {} # Find file info given name
  1050. self.filelist = [] # List of ZipInfo instances for archive
  1051. self.compression = compression # Method of compression
  1052. self.compresslevel = compresslevel
  1053. self.mode = mode
  1054. self.pwd = None
  1055. self._comment = b''
  1056. self._strict_timestamps = strict_timestamps
  1057. # Check if we were passed a file-like object
  1058. if isinstance(file, os.PathLike):
  1059. file = os.fspath(file)
  1060. if isinstance(file, str):
  1061. # No, it's a filename
  1062. self._filePassed = 0
  1063. self.filename = file
  1064. modeDict = {'r' : 'rb', 'w': 'w+b', 'x': 'x+b', 'a' : 'r+b',
  1065. 'r+b': 'w+b', 'w+b': 'wb', 'x+b': 'xb'}
  1066. filemode = modeDict[mode]
  1067. while True:
  1068. try:
  1069. self.fp = io.open(file, filemode)
  1070. except OSError:
  1071. if filemode in modeDict:
  1072. filemode = modeDict[filemode]
  1073. continue
  1074. raise
  1075. break
  1076. else:
  1077. self._filePassed = 1
  1078. self.fp = file
  1079. self.filename = getattr(file, 'name', None)
  1080. self._fileRefCnt = 1
  1081. self._lock = threading.RLock()
  1082. self._seekable = True
  1083. self._writing = False
  1084. try:
  1085. if mode == 'r':
  1086. self._RealGetContents()
  1087. elif mode in ('w', 'x'):
  1088. # set the modified flag so central directory gets written
  1089. # even if no files are added to the archive
  1090. self._didModify = True
  1091. try:
  1092. self.start_dir = self.fp.tell()
  1093. except (AttributeError, OSError):
  1094. self.fp = _Tellable(self.fp)
  1095. self.start_dir = 0
  1096. self._seekable = False
  1097. else:
  1098. # Some file-like objects can provide tell() but not seek()
  1099. try:
  1100. self.fp.seek(self.start_dir)
  1101. except (AttributeError, OSError):
  1102. self._seekable = False
  1103. elif mode == 'a':
  1104. try:
  1105. # See if file is a zip file
  1106. self._RealGetContents()
  1107. # seek to start of directory and overwrite
  1108. self.fp.seek(self.start_dir)
  1109. except BadZipFile:
  1110. # file is not a zip file, just append
  1111. self.fp.seek(0, 2)
  1112. # set the modified flag so central directory gets written
  1113. # even if no files are added to the archive
  1114. self._didModify = True
  1115. self.start_dir = self.fp.tell()
  1116. else:
  1117. raise ValueError("Mode must be 'r', 'w', 'x', or 'a'")
  1118. except:
  1119. fp = self.fp
  1120. self.fp = None
  1121. self._fpclose(fp)
  1122. raise
  1123. def __enter__(self):
  1124. return self
  1125. def __exit__(self, type, value, traceback):
  1126. self.close()
  1127. def __repr__(self):
  1128. result = ['<%s.%s' % (self.__class__.__module__,
  1129. self.__class__.__qualname__)]
  1130. if self.fp is not None:
  1131. if self._filePassed:
  1132. result.append(' file=%r' % self.fp)
  1133. elif self.filename is not None:
  1134. result.append(' filename=%r' % self.filename)
  1135. result.append(' mode=%r' % self.mode)
  1136. else:
  1137. result.append(' [closed]')
  1138. result.append('>')
  1139. return ''.join(result)
  1140. def _RealGetContents(self):
  1141. """Read in the table of contents for the ZIP file."""
  1142. fp = self.fp
  1143. try:
  1144. endrec = _EndRecData(fp)
  1145. except OSError:
  1146. raise BadZipFile("File is not a zip file")
  1147. if not endrec:
  1148. raise BadZipFile("File is not a zip file")
  1149. if self.debug > 1:
  1150. print(endrec)
  1151. size_cd = endrec[_ECD_SIZE] # bytes in central directory
  1152. offset_cd = endrec[_ECD_OFFSET] # offset of central directory
  1153. self._comment = endrec[_ECD_COMMENT] # archive comment
  1154. # "concat" is zero, unless zip was concatenated to another file
  1155. concat = endrec[_ECD_LOCATION] - size_cd - offset_cd
  1156. if endrec[_ECD_SIGNATURE] == stringEndArchive64:
  1157. # If Zip64 extension structures are present, account for them
  1158. concat -= (sizeEndCentDir64 + sizeEndCentDir64Locator)
  1159. if self.debug > 2:
  1160. inferred = concat + offset_cd
  1161. print("given, inferred, offset", offset_cd, inferred, concat)
  1162. # self.start_dir: Position of start of central directory
  1163. self.start_dir = offset_cd + concat
  1164. fp.seek(self.start_dir, 0)
  1165. data = fp.read(size_cd)
  1166. fp = io.BytesIO(data)
  1167. total = 0
  1168. while total < size_cd:
  1169. centdir = fp.read(sizeCentralDir)
  1170. if len(centdir) != sizeCentralDir:
  1171. raise BadZipFile("Truncated central directory")
  1172. centdir = struct.unpack(structCentralDir, centdir)
  1173. if centdir[_CD_SIGNATURE] != stringCentralDir:
  1174. raise BadZipFile("Bad magic number for central directory")
  1175. if self.debug > 2:
  1176. print(centdir)
  1177. filename = fp.read(centdir[_CD_FILENAME_LENGTH])
  1178. flags = centdir[5]
  1179. if flags & 0x800:
  1180. # UTF-8 file names extension
  1181. filename = filename.decode('utf-8')
  1182. else:
  1183. # Historical ZIP filename encoding
  1184. filename = filename.decode('cp437')
  1185. # Create ZipInfo instance to store file information
  1186. x = ZipInfo(filename)
  1187. x.extra = fp.read(centdir[_CD_EXTRA_FIELD_LENGTH])
  1188. x.comment = fp.read(centdir[_CD_COMMENT_LENGTH])
  1189. x.header_offset = centdir[_CD_LOCAL_HEADER_OFFSET]
  1190. (x.create_version, x.create_system, x.extract_version, x.reserved,
  1191. x.flag_bits, x.compress_type, t, d,
  1192. x.CRC, x.compress_size, x.file_size) = centdir[1:12]
  1193. if x.extract_version > MAX_EXTRACT_VERSION:
  1194. raise NotImplementedError("zip file version %.1f" %
  1195. (x.extract_version / 10))
  1196. x.volume, x.internal_attr, x.external_attr = centdir[15:18]
  1197. # Convert date/time code to (year, month, day, hour, min, sec)
  1198. x._raw_time = t
  1199. x.date_time = ( (d>>9)+1980, (d>>5)&0xF, d&0x1F,
  1200. t>>11, (t>>5)&0x3F, (t&0x1F) * 2 )
  1201. x._decodeExtra()
  1202. x.header_offset = x.header_offset + concat
  1203. self.filelist.append(x)
  1204. self.NameToInfo[x.filename] = x
  1205. # update total bytes read from central directory
  1206. total = (total + sizeCentralDir + centdir[_CD_FILENAME_LENGTH]
  1207. + centdir[_CD_EXTRA_FIELD_LENGTH]
  1208. + centdir[_CD_COMMENT_LENGTH])
  1209. if self.debug > 2:
  1210. print("total", total)
  1211. def namelist(self):
  1212. """Return a list of file names in the archive."""
  1213. return [data.filename for data in self.filelist]
  1214. def infolist(self):
  1215. """Return a list of class ZipInfo instances for files in the
  1216. archive."""
  1217. return self.filelist
  1218. def printdir(self, file=None):
  1219. """Print a table of contents for the zip file."""
  1220. print("%-46s %19s %12s" % ("File Name", "Modified ", "Size"),
  1221. file=file)
  1222. for zinfo in self.filelist:
  1223. date = "%d-%02d-%02d %02d:%02d:%02d" % zinfo.date_time[:6]
  1224. print("%-46s %s %12d" % (zinfo.filename, date, zinfo.file_size),
  1225. file=file)
  1226. def testzip(self):
  1227. """Read all the files and check the CRC."""
  1228. chunk_size = 2 ** 20
  1229. for zinfo in self.filelist:
  1230. try:
  1231. # Read by chunks, to avoid an OverflowError or a
  1232. # MemoryError with very large embedded files.
  1233. with self.open(zinfo.filename, "r") as f:
  1234. while f.read(chunk_size): # Check CRC-32
  1235. pass
  1236. except BadZipFile:
  1237. return zinfo.filename
  1238. def getinfo(self, name):
  1239. """Return the instance of ZipInfo given 'name'."""
  1240. info = self.NameToInfo.get(name)
  1241. if info is None:
  1242. raise KeyError(
  1243. 'There is no item named %r in the archive' % name)
  1244. return info
  1245. def setpassword(self, pwd):
  1246. """Set default password for encrypted files."""
  1247. if pwd and not isinstance(pwd, bytes):
  1248. raise TypeError("pwd: expected bytes, got %s" % type(pwd).__name__)
  1249. if pwd:
  1250. self.pwd = pwd
  1251. else:
  1252. self.pwd = None
  1253. @property
  1254. def comment(self):
  1255. """The comment text associated with the ZIP file."""
  1256. return self._comment
  1257. @comment.setter
  1258. def comment(self, comment):
  1259. if not isinstance(comment, bytes):
  1260. raise TypeError("comment: expected bytes, got %s" % type(comment).__name__)
  1261. # check for valid comment length
  1262. if len(comment) > ZIP_MAX_COMMENT:
  1263. import warnings
  1264. warnings.warn('Archive comment is too long; truncating to %d bytes'
  1265. % ZIP_MAX_COMMENT, stacklevel=2)
  1266. comment = comment[:ZIP_MAX_COMMENT]
  1267. self._comment = comment
  1268. self._didModify = True
  1269. def read(self, name, pwd=None):
  1270. """Return file bytes for name."""
  1271. with self.open(name, "r", pwd) as fp:
  1272. return fp.read()
  1273. def open(self, name, mode="r", pwd=None, *, force_zip64=False):
  1274. """Return file-like object for 'name'.
  1275. name is a string for the file name within the ZIP file, or a ZipInfo
  1276. object.
  1277. mode should be 'r' to read a file already in the ZIP file, or 'w' to
  1278. write to a file newly added to the archive.
  1279. pwd is the password to decrypt files (only used for reading).
  1280. When writing, if the file size is not known in advance but may exceed
  1281. 2 GiB, pass force_zip64 to use the ZIP64 format, which can handle large
  1282. files. If the size is known in advance, it is best to pass a ZipInfo
  1283. instance for name, with zinfo.file_size set.
  1284. """
  1285. if mode not in {"r", "w"}:
  1286. raise ValueError('open() requires mode "r" or "w"')
  1287. if pwd and not isinstance(pwd, bytes):
  1288. raise TypeError("pwd: expected bytes, got %s" % type(pwd).__name__)
  1289. if pwd and (mode == "w"):
  1290. raise ValueError("pwd is only supported for reading files")
  1291. if not self.fp:
  1292. raise ValueError(
  1293. "Attempt to use ZIP archive that was already closed")
  1294. # Make sure we have an info object
  1295. if isinstance(name, ZipInfo):
  1296. # 'name' is already an info object
  1297. zinfo = name
  1298. elif mode == 'w':
  1299. zinfo = ZipInfo(name)
  1300. zinfo.compress_type = self.compression
  1301. zinfo._compresslevel = self.compresslevel
  1302. else:
  1303. # Get info object for name
  1304. zinfo = self.getinfo(name)
  1305. if mode == 'w':
  1306. return self._open_to_write(zinfo, force_zip64=force_zip64)
  1307. if self._writing:
  1308. raise ValueError("Can't read from the ZIP file while there "
  1309. "is an open writing handle on it. "
  1310. "Close the writing handle before trying to read.")
  1311. # Open for reading:
  1312. self._fileRefCnt += 1
  1313. zef_file = _SharedFile(self.fp, zinfo.header_offset,
  1314. self._fpclose, self._lock, lambda: self._writing)
  1315. try:
  1316. # Skip the file header:
  1317. fheader = zef_file.read(sizeFileHeader)
  1318. if len(fheader) != sizeFileHeader:
  1319. raise BadZipFile("Truncated file header")
  1320. fheader = struct.unpack(structFileHeader, fheader)
  1321. if fheader[_FH_SIGNATURE] != stringFileHeader:
  1322. raise BadZipFile("Bad magic number for file header")
  1323. fname = zef_file.read(fheader[_FH_FILENAME_LENGTH])
  1324. if fheader[_FH_EXTRA_FIELD_LENGTH]:
  1325. zef_file.read(fheader[_FH_EXTRA_FIELD_LENGTH])
  1326. if zinfo.flag_bits & 0x20:
  1327. # Zip 2.7: compressed patched data
  1328. raise NotImplementedError("compressed patched data (flag bit 5)")
  1329. if zinfo.flag_bits & 0x40:
  1330. # strong encryption
  1331. raise NotImplementedError("strong encryption (flag bit 6)")
  1332. if fheader[_FH_GENERAL_PURPOSE_FLAG_BITS] & 0x800:
  1333. # UTF-8 filename
  1334. fname_str = fname.decode("utf-8")
  1335. else:
  1336. fname_str = fname.decode("cp437")
  1337. if fname_str != zinfo.orig_filename:
  1338. raise BadZipFile(
  1339. 'File name in directory %r and header %r differ.'
  1340. % (zinfo.orig_filename, fname))
  1341. # check for encrypted flag & handle password
  1342. is_encrypted = zinfo.flag_bits & 0x1
  1343. if is_encrypted:
  1344. if not pwd:
  1345. pwd = self.pwd
  1346. if not pwd:
  1347. raise RuntimeError("File %r is encrypted, password "
  1348. "required for extraction" % name)
  1349. else:
  1350. pwd = None
  1351. return ZipExtFile(zef_file, mode, zinfo, pwd, True)
  1352. except:
  1353. zef_file.close()
  1354. raise
  1355. def _open_to_write(self, zinfo, force_zip64=False):
  1356. if force_zip64 and not self._allowZip64:
  1357. raise ValueError(
  1358. "force_zip64 is True, but allowZip64 was False when opening "
  1359. "the ZIP file."
  1360. )
  1361. if self._writing:
  1362. raise ValueError("Can't write to the ZIP file while there is "
  1363. "another write handle open on it. "
  1364. "Close the first handle before opening another.")
  1365. # Size and CRC are overwritten with correct data after processing the file
  1366. zinfo.compress_size = 0
  1367. zinfo.CRC = 0
  1368. zinfo.flag_bits = 0x00
  1369. if zinfo.compress_type == ZIP_LZMA:
  1370. # Compressed data includes an end-of-stream (EOS) marker
  1371. zinfo.flag_bits |= 0x02
  1372. if not self._seekable:
  1373. zinfo.flag_bits |= 0x08
  1374. if not zinfo.external_attr:
  1375. zinfo.external_attr = 0o600 << 16 # permissions: ?rw-------
  1376. # Compressed size can be larger than uncompressed size
  1377. zip64 = self._allowZip64 and \
  1378. (force_zip64 or zinfo.file_size * 1.05 > ZIP64_LIMIT)
  1379. if self._seekable:
  1380. self.fp.seek(self.start_dir)
  1381. zinfo.header_offset = self.fp.tell()
  1382. self._writecheck(zinfo)
  1383. self._didModify = True
  1384. self.fp.write(zinfo.FileHeader(zip64))
  1385. self._writing = True
  1386. return _ZipWriteFile(self, zinfo, zip64)
  1387. def extract(self, member, path=None, pwd=None):
  1388. """Extract a member from the archive to the current working directory,
  1389. using its full name. Its file information is extracted as accurately
  1390. as possible. `member' may be a filename or a ZipInfo object. You can
  1391. specify a different directory using `path'.
  1392. """
  1393. if path is None:
  1394. path = os.getcwd()
  1395. else:
  1396. path = os.fspath(path)
  1397. return self._extract_member(member, path, pwd)
  1398. def extractall(self, path=None, members=None, pwd=None):
  1399. """Extract all members from the archive to the current working
  1400. directory. `path' specifies a different directory to extract to.
  1401. `members' is optional and must be a subset of the list returned
  1402. by namelist().
  1403. """
  1404. if members is None:
  1405. members = self.namelist()
  1406. if path is None:
  1407. path = os.getcwd()
  1408. else:
  1409. path = os.fspath(path)
  1410. for zipinfo in members:
  1411. self._extract_member(zipinfo, path, pwd)
  1412. @classmethod
  1413. def _sanitize_windows_name(cls, arcname, pathsep):
  1414. """Replace bad characters and remove trailing dots from parts."""
  1415. table = cls._windows_illegal_name_trans_table
  1416. if not table:
  1417. illegal = ':<>|"?*'
  1418. table = str.maketrans(illegal, '_' * len(illegal))
  1419. cls._windows_illegal_name_trans_table = table
  1420. arcname = arcname.translate(table)
  1421. # remove trailing dots
  1422. arcname = (x.rstrip('.') for x in arcname.split(pathsep))
  1423. # rejoin, removing empty parts.
  1424. arcname = pathsep.join(x for x in arcname if x)
  1425. return arcname
  1426. def _extract_member(self, member, targetpath, pwd):
  1427. """Extract the ZipInfo object 'member' to a physical
  1428. file on the path targetpath.
  1429. """
  1430. if not isinstance(member, ZipInfo):
  1431. member = self.getinfo(member)
  1432. # build the destination pathname, replacing
  1433. # forward slashes to platform specific separators.
  1434. arcname = member.filename.replace('/', os.path.sep)
  1435. if os.path.altsep:
  1436. arcname = arcname.replace(os.path.altsep, os.path.sep)
  1437. # interpret absolute pathname as relative, remove drive letter or
  1438. # UNC path, redundant separators, "." and ".." components.
  1439. arcname = os.path.splitdrive(arcname)[1]
  1440. invalid_path_parts = ('', os.path.curdir, os.path.pardir)
  1441. arcname = os.path.sep.join(x for x in arcname.split(os.path.sep)
  1442. if x not in invalid_path_parts)
  1443. if os.path.sep == '\\':
  1444. # filter illegal characters on Windows
  1445. arcname = self._sanitize_windows_name(arcname, os.path.sep)
  1446. targetpath = os.path.join(targetpath, arcname)
  1447. targetpath = os.path.normpath(targetpath)
  1448. # Create all upper directories if necessary.
  1449. upperdirs = os.path.dirname(targetpath)
  1450. if upperdirs and not os.path.exists(upperdirs):
  1451. os.makedirs(upperdirs)
  1452. if member.is_dir():
  1453. if not os.path.isdir(targetpath):
  1454. os.mkdir(targetpath)
  1455. return targetpath
  1456. with self.open(member, pwd=pwd) as source, \
  1457. open(targetpath, "wb") as target:
  1458. shutil.copyfileobj(source, target)
  1459. return targetpath
  1460. def _writecheck(self, zinfo):
  1461. """Check for errors before writing a file to the archive."""
  1462. if zinfo.filename in self.NameToInfo:
  1463. import warnings
  1464. warnings.warn('Duplicate name: %r' % zinfo.filename, stacklevel=3)
  1465. if self.mode not in ('w', 'x', 'a'):
  1466. raise ValueError("write() requires mode 'w', 'x', or 'a'")
  1467. if not self.fp:
  1468. raise ValueError(
  1469. "Attempt to write ZIP archive that was already closed")
  1470. _check_compression(zinfo.compress_type)
  1471. if not self._allowZip64:
  1472. requires_zip64 = None
  1473. if len(self.filelist) >= ZIP_FILECOUNT_LIMIT:
  1474. requires_zip64 = "Files count"
  1475. elif zinfo.file_size > ZIP64_LIMIT:
  1476. requires_zip64 = "Filesize"
  1477. elif zinfo.header_offset > ZIP64_LIMIT:
  1478. requires_zip64 = "Zipfile size"
  1479. if requires_zip64:
  1480. raise LargeZipFile(requires_zip64 +
  1481. " would require ZIP64 extensions")
  1482. def write(self, filename, arcname=None,
  1483. compress_type=None, compresslevel=None):
  1484. """Put the bytes from filename into the archive under the name
  1485. arcname."""
  1486. if not self.fp:
  1487. raise ValueError(
  1488. "Attempt to write to ZIP archive that was already closed")
  1489. if self._writing:
  1490. raise ValueError(
  1491. "Can't write to ZIP archive while an open writing handle exists"
  1492. )
  1493. zinfo = ZipInfo.from_file(filename, arcname,
  1494. strict_timestamps=self._strict_timestamps)
  1495. if zinfo.is_dir():
  1496. zinfo.compress_size = 0
  1497. zinfo.CRC = 0
  1498. else:
  1499. if compress_type is not None:
  1500. zinfo.compress_type = compress_type
  1501. else:
  1502. zinfo.compress_type = self.compression
  1503. if compresslevel is not None:
  1504. zinfo._compresslevel = compresslevel
  1505. else:
  1506. zinfo._compresslevel = self.compresslevel
  1507. if zinfo.is_dir():
  1508. with self._lock:
  1509. if self._seekable:
  1510. self.fp.seek(self.start_dir)
  1511. zinfo.header_offset = self.fp.tell() # Start of header bytes
  1512. if zinfo.compress_type == ZIP_LZMA:
  1513. # Compressed data includes an end-of-stream (EOS) marker
  1514. zinfo.flag_bits |= 0x02
  1515. self._writecheck(zinfo)
  1516. self._didModify = True
  1517. self.filelist.append(zinfo)
  1518. self.NameToInfo[zinfo.filename] = zinfo
  1519. self.fp.write(zinfo.FileHeader(False))
  1520. self.start_dir = self.fp.tell()
  1521. else:
  1522. with open(filename, "rb") as src, self.open(zinfo, 'w') as dest:
  1523. shutil.copyfileobj(src, dest, 1024*8)
  1524. def writestr(self, zinfo_or_arcname, data,
  1525. compress_type=None, compresslevel=None):
  1526. """Write a file into the archive. The contents is 'data', which
  1527. may be either a 'str' or a 'bytes' instance; if it is a 'str',
  1528. it is encoded as UTF-8 first.
  1529. 'zinfo_or_arcname' is either a ZipInfo instance or
  1530. the name of the file in the archive."""
  1531. if isinstance(data, str):
  1532. data = data.encode("utf-8")
  1533. if not isinstance(zinfo_or_arcname, ZipInfo):
  1534. zinfo = ZipInfo(filename=zinfo_or_arcname,
  1535. date_time=time.localtime(time.time())[:6])
  1536. zinfo.compress_type = self.compression
  1537. zinfo._compresslevel = self.compresslevel
  1538. if zinfo.filename[-1] == '/':
  1539. zinfo.external_attr = 0o40775 << 16 # drwxrwxr-x
  1540. zinfo.external_attr |= 0x10 # MS-DOS directory flag
  1541. else:
  1542. zinfo.external_attr = 0o600 << 16 # ?rw-------
  1543. else:
  1544. zinfo = zinfo_or_arcname
  1545. if not self.fp:
  1546. raise ValueError(
  1547. "Attempt to write to ZIP archive that was already closed")
  1548. if self._writing:
  1549. raise ValueError(
  1550. "Can't write to ZIP archive while an open writing handle exists."
  1551. )
  1552. if compress_type is not None:
  1553. zinfo.compress_type = compress_type
  1554. if compresslevel is not None:
  1555. zinfo._compresslevel = compresslevel
  1556. zinfo.file_size = len(data) # Uncompressed size
  1557. with self._lock:
  1558. with self.open(zinfo, mode='w') as dest:
  1559. dest.write(data)
  1560. def __del__(self):
  1561. """Call the "close()" method in case the user forgot."""
  1562. self.close()
  1563. def close(self):
  1564. """Close the file, and for mode 'w', 'x' and 'a' write the ending
  1565. records."""
  1566. if self.fp is None:
  1567. return
  1568. if self._writing:
  1569. raise ValueError("Can't close the ZIP file while there is "
  1570. "an open writing handle on it. "
  1571. "Close the writing handle before closing the zip.")
  1572. try:
  1573. if self.mode in ('w', 'x', 'a') and self._didModify: # write ending records
  1574. with self._lock:
  1575. if self._seekable:
  1576. self.fp.seek(self.start_dir)
  1577. self._write_end_record()
  1578. finally:
  1579. fp = self.fp
  1580. self.fp = None
  1581. self._fpclose(fp)
  1582. def _write_end_record(self):
  1583. for zinfo in self.filelist: # write central directory
  1584. dt = zinfo.date_time
  1585. dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
  1586. dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
  1587. extra = []
  1588. if zinfo.file_size > ZIP64_LIMIT \
  1589. or zinfo.compress_size > ZIP64_LIMIT:
  1590. extra.append(zinfo.file_size)
  1591. extra.append(zinfo.compress_size)
  1592. file_size = 0xffffffff
  1593. compress_size = 0xffffffff
  1594. else:
  1595. file_size = zinfo.file_size
  1596. compress_size = zinfo.compress_size
  1597. if zinfo.header_offset > ZIP64_LIMIT:
  1598. extra.append(zinfo.header_offset)
  1599. header_offset = 0xffffffff
  1600. else:
  1601. header_offset = zinfo.header_offset
  1602. extra_data = zinfo.extra
  1603. min_version = 0
  1604. if extra:
  1605. # Append a ZIP64 field to the extra's
  1606. extra_data = _strip_extra(extra_data, (1,))
  1607. extra_data = struct.pack(
  1608. '<HH' + 'Q'*len(extra),
  1609. 1, 8*len(extra), *extra) + extra_data
  1610. min_version = ZIP64_VERSION
  1611. if zinfo.compress_type == ZIP_BZIP2:
  1612. min_version = max(BZIP2_VERSION, min_version)
  1613. elif zinfo.compress_type == ZIP_LZMA:
  1614. min_version = max(LZMA_VERSION, min_version)
  1615. extract_version = max(min_version, zinfo.extract_version)
  1616. create_version = max(min_version, zinfo.create_version)
  1617. filename, flag_bits = zinfo._encodeFilenameFlags()
  1618. centdir = struct.pack(structCentralDir,
  1619. stringCentralDir, create_version,
  1620. zinfo.create_system, extract_version, zinfo.reserved,
  1621. flag_bits, zinfo.compress_type, dostime, dosdate,
  1622. zinfo.CRC, compress_size, file_size,
  1623. len(filename), len(extra_data), len(zinfo.comment),
  1624. 0, zinfo.internal_attr, zinfo.external_attr,
  1625. header_offset)
  1626. self.fp.write(centdir)
  1627. self.fp.write(filename)
  1628. self.fp.write(extra_data)
  1629. self.fp.write(zinfo.comment)
  1630. pos2 = self.fp.tell()
  1631. # Write end-of-zip-archive record
  1632. centDirCount = len(self.filelist)
  1633. centDirSize = pos2 - self.start_dir
  1634. centDirOffset = self.start_dir
  1635. requires_zip64 = None
  1636. if centDirCount > ZIP_FILECOUNT_LIMIT:
  1637. requires_zip64 = "Files count"
  1638. elif centDirOffset > ZIP64_LIMIT:
  1639. requires_zip64 = "Central directory offset"
  1640. elif centDirSize > ZIP64_LIMIT:
  1641. requires_zip64 = "Central directory size"
  1642. if requires_zip64:
  1643. # Need to write the ZIP64 end-of-archive records
  1644. if not self._allowZip64:
  1645. raise LargeZipFile(requires_zip64 +
  1646. " would require ZIP64 extensions")
  1647. zip64endrec = struct.pack(
  1648. structEndArchive64, stringEndArchive64,
  1649. 44, 45, 45, 0, 0, centDirCount, centDirCount,
  1650. centDirSize, centDirOffset)
  1651. self.fp.write(zip64endrec)
  1652. zip64locrec = struct.pack(
  1653. structEndArchive64Locator,
  1654. stringEndArchive64Locator, 0, pos2, 1)
  1655. self.fp.write(zip64locrec)
  1656. centDirCount = min(centDirCount, 0xFFFF)
  1657. centDirSize = min(centDirSize, 0xFFFFFFFF)
  1658. centDirOffset = min(centDirOffset, 0xFFFFFFFF)
  1659. endrec = struct.pack(structEndArchive, stringEndArchive,
  1660. 0, 0, centDirCount, centDirCount,
  1661. centDirSize, centDirOffset, len(self._comment))
  1662. self.fp.write(endrec)
  1663. self.fp.write(self._comment)
  1664. if self.mode == "a":
  1665. self.fp.truncate()
  1666. self.fp.flush()
  1667. def _fpclose(self, fp):
  1668. assert self._fileRefCnt > 0
  1669. self._fileRefCnt -= 1
  1670. if not self._fileRefCnt and not self._filePassed:
  1671. fp.close()
  1672. class PyZipFile(ZipFile):
  1673. """Class to create ZIP archives with Python library files and packages."""
  1674. def __init__(self, file, mode="r", compression=ZIP_STORED,
  1675. allowZip64=True, optimize=-1):
  1676. ZipFile.__init__(self, file, mode=mode, compression=compression,
  1677. allowZip64=allowZip64)
  1678. self._optimize = optimize
  1679. def writepy(self, pathname, basename="", filterfunc=None):
  1680. """Add all files from "pathname" to the ZIP archive.
  1681. If pathname is a package directory, search the directory and
  1682. all package subdirectories recursively for all *.py and enter
  1683. the modules into the archive. If pathname is a plain
  1684. directory, listdir *.py and enter all modules. Else, pathname
  1685. must be a Python *.py file and the module will be put into the
  1686. archive. Added modules are always module.pyc.
  1687. This method will compile the module.py into module.pyc if
  1688. necessary.
  1689. If filterfunc(pathname) is given, it is called with every argument.
  1690. When it is False, the file or directory is skipped.
  1691. """
  1692. pathname = os.fspath(pathname)
  1693. if filterfunc and not filterfunc(pathname):
  1694. if self.debug:
  1695. label = 'path' if os.path.isdir(pathname) else 'file'
  1696. print('%s %r skipped by filterfunc' % (label, pathname))
  1697. return
  1698. dir, name = os.path.split(pathname)
  1699. if os.path.isdir(pathname):
  1700. initname = os.path.join(pathname, "__init__.py")
  1701. if os.path.isfile(initname):
  1702. # This is a package directory, add it
  1703. if basename:
  1704. basename = "%s/%s" % (basename, name)
  1705. else:
  1706. basename = name
  1707. if self.debug:
  1708. print("Adding package in", pathname, "as", basename)
  1709. fname, arcname = self._get_codename(initname[0:-3], basename)
  1710. if self.debug:
  1711. print("Adding", arcname)
  1712. self.write(fname, arcname)
  1713. dirlist = sorted(os.listdir(pathname))
  1714. dirlist.remove("__init__.py")
  1715. # Add all *.py files and package subdirectories
  1716. for filename in dirlist:
  1717. path = os.path.join(pathname, filename)
  1718. root, ext = os.path.splitext(filename)
  1719. if os.path.isdir(path):
  1720. if os.path.isfile(os.path.join(path, "__init__.py")):
  1721. # This is a package directory, add it
  1722. self.writepy(path, basename,
  1723. filterfunc=filterfunc) # Recursive call
  1724. elif ext == ".py":
  1725. if filterfunc and not filterfunc(path):
  1726. if self.debug:
  1727. print('file %r skipped by filterfunc' % path)
  1728. continue
  1729. fname, arcname = self._get_codename(path[0:-3],
  1730. basename)
  1731. if self.debug:
  1732. print("Adding", arcname)
  1733. self.write(fname, arcname)
  1734. else:
  1735. # This is NOT a package directory, add its files at top level
  1736. if self.debug:
  1737. print("Adding files from directory", pathname)
  1738. for filename in sorted(os.listdir(pathname)):
  1739. path = os.path.join(pathname, filename)
  1740. root, ext = os.path.splitext(filename)
  1741. if ext == ".py":
  1742. if filterfunc and not filterfunc(path):
  1743. if self.debug:
  1744. print('file %r skipped by filterfunc' % path)
  1745. continue
  1746. fname, arcname = self._get_codename(path[0:-3],
  1747. basename)
  1748. if self.debug:
  1749. print("Adding", arcname)
  1750. self.write(fname, arcname)
  1751. else:
  1752. if pathname[-3:] != ".py":
  1753. raise RuntimeError(
  1754. 'Files added with writepy() must end with ".py"')
  1755. fname, arcname = self._get_codename(pathname[0:-3], basename)
  1756. if self.debug:
  1757. print("Adding file", arcname)
  1758. self.write(fname, arcname)
  1759. def _get_codename(self, pathname, basename):
  1760. """Return (filename, archivename) for the path.
  1761. Given a module name path, return the correct file path and
  1762. archive name, compiling if necessary. For example, given
  1763. /python/lib/string, return (/python/lib/string.pyc, string).
  1764. """
  1765. def _compile(file, optimize=-1):
  1766. import py_compile
  1767. if self.debug:
  1768. print("Compiling", file)
  1769. try:
  1770. py_compile.compile(file, doraise=True, optimize=optimize)
  1771. except py_compile.PyCompileError as err:
  1772. print(err.msg)
  1773. return False
  1774. return True
  1775. file_py = pathname + ".py"
  1776. file_pyc = pathname + ".pyc"
  1777. pycache_opt0 = importlib.util.cache_from_source(file_py, optimization='')
  1778. pycache_opt1 = importlib.util.cache_from_source(file_py, optimization=1)
  1779. pycache_opt2 = importlib.util.cache_from_source(file_py, optimization=2)
  1780. if self._optimize == -1:
  1781. # legacy mode: use whatever file is present
  1782. if (os.path.isfile(file_pyc) and
  1783. os.stat(file_pyc).st_mtime >= os.stat(file_py).st_mtime):
  1784. # Use .pyc file.
  1785. arcname = fname = file_pyc
  1786. elif (os.path.isfile(pycache_opt0) and
  1787. os.stat(pycache_opt0).st_mtime >= os.stat(file_py).st_mtime):
  1788. # Use the __pycache__/*.pyc file, but write it to the legacy pyc
  1789. # file name in the archive.
  1790. fname = pycache_opt0
  1791. arcname = file_pyc
  1792. elif (os.path.isfile(pycache_opt1) and
  1793. os.stat(pycache_opt1).st_mtime >= os.stat(file_py).st_mtime):
  1794. # Use the __pycache__/*.pyc file, but write it to the legacy pyc
  1795. # file name in the archive.
  1796. fname = pycache_opt1
  1797. arcname = file_pyc
  1798. elif (os.path.isfile(pycache_opt2) and
  1799. os.stat(pycache_opt2).st_mtime >= os.stat(file_py).st_mtime):
  1800. # Use the __pycache__/*.pyc file, but write it to the legacy pyc
  1801. # file name in the archive.
  1802. fname = pycache_opt2
  1803. arcname = file_pyc
  1804. else:
  1805. # Compile py into PEP 3147 pyc file.
  1806. if _compile(file_py):
  1807. if sys.flags.optimize == 0:
  1808. fname = pycache_opt0
  1809. elif sys.flags.optimize == 1:
  1810. fname = pycache_opt1
  1811. else:
  1812. fname = pycache_opt2
  1813. arcname = file_pyc
  1814. else:
  1815. fname = arcname = file_py
  1816. else:
  1817. # new mode: use given optimization level
  1818. if self._optimize == 0:
  1819. fname = pycache_opt0
  1820. arcname = file_pyc
  1821. else:
  1822. arcname = file_pyc
  1823. if self._optimize == 1:
  1824. fname = pycache_opt1
  1825. elif self._optimize == 2:
  1826. fname = pycache_opt2
  1827. else:
  1828. msg = "invalid value for 'optimize': {!r}".format(self._optimize)
  1829. raise ValueError(msg)
  1830. if not (os.path.isfile(fname) and
  1831. os.stat(fname).st_mtime >= os.stat(file_py).st_mtime):
  1832. if not _compile(file_py, optimize=self._optimize):
  1833. fname = arcname = file_py
  1834. archivename = os.path.split(arcname)[1]
  1835. if basename:
  1836. archivename = "%s/%s" % (basename, archivename)
  1837. return (fname, archivename)
  1838. def _parents(path):
  1839. """
  1840. Given a path with elements separated by
  1841. posixpath.sep, generate all parents of that path.
  1842. >>> list(_parents('b/d'))
  1843. ['b']
  1844. >>> list(_parents('/b/d/'))
  1845. ['/b']
  1846. >>> list(_parents('b/d/f/'))
  1847. ['b/d', 'b']
  1848. >>> list(_parents('b'))
  1849. []
  1850. >>> list(_parents(''))
  1851. []
  1852. """
  1853. return itertools.islice(_ancestry(path), 1, None)
  1854. def _ancestry(path):
  1855. """
  1856. Given a path with elements separated by
  1857. posixpath.sep, generate all elements of that path
  1858. >>> list(_ancestry('b/d'))
  1859. ['b/d', 'b']
  1860. >>> list(_ancestry('/b/d/'))
  1861. ['/b/d', '/b']
  1862. >>> list(_ancestry('b/d/f/'))
  1863. ['b/d/f', 'b/d', 'b']
  1864. >>> list(_ancestry('b'))
  1865. ['b']
  1866. >>> list(_ancestry(''))
  1867. []
  1868. """
  1869. path = path.rstrip(posixpath.sep)
  1870. while path and path != posixpath.sep:
  1871. yield path
  1872. path, tail = posixpath.split(path)
  1873. _dedupe = dict.fromkeys
  1874. """Deduplicate an iterable in original order"""
  1875. def _difference(minuend, subtrahend):
  1876. """
  1877. Return items in minuend not in subtrahend, retaining order
  1878. with O(1) lookup.
  1879. """
  1880. return itertools.filterfalse(set(subtrahend).__contains__, minuend)
  1881. class CompleteDirs(ZipFile):
  1882. """
  1883. A ZipFile subclass that ensures that implied directories
  1884. are always included in the namelist.
  1885. """
  1886. @staticmethod
  1887. def _implied_dirs(names):
  1888. parents = itertools.chain.from_iterable(map(_parents, names))
  1889. as_dirs = (p + posixpath.sep for p in parents)
  1890. return _dedupe(_difference(as_dirs, names))
  1891. def namelist(self):
  1892. names = super(CompleteDirs, self).namelist()
  1893. return names + list(self._implied_dirs(names))
  1894. def _name_set(self):
  1895. return set(self.namelist())
  1896. def resolve_dir(self, name):
  1897. """
  1898. If the name represents a directory, return that name
  1899. as a directory (with the trailing slash).
  1900. """
  1901. names = self._name_set()
  1902. dirname = name + '/'
  1903. dir_match = name not in names and dirname in names
  1904. return dirname if dir_match else name
  1905. @classmethod
  1906. def make(cls, source):
  1907. """
  1908. Given a source (filename or zipfile), return an
  1909. appropriate CompleteDirs subclass.
  1910. """
  1911. if isinstance(source, CompleteDirs):
  1912. return source
  1913. if not isinstance(source, ZipFile):
  1914. return cls(source)
  1915. # Only allow for FastPath when supplied zipfile is read-only
  1916. if 'r' not in source.mode:
  1917. cls = CompleteDirs
  1918. res = cls.__new__(cls)
  1919. vars(res).update(vars(source))
  1920. return res
  1921. class FastLookup(CompleteDirs):
  1922. """
  1923. ZipFile subclass to ensure implicit
  1924. dirs exist and are resolved rapidly.
  1925. """
  1926. def namelist(self):
  1927. with contextlib.suppress(AttributeError):
  1928. return self.__names
  1929. self.__names = super(FastLookup, self).namelist()
  1930. return self.__names
  1931. def _name_set(self):
  1932. with contextlib.suppress(AttributeError):
  1933. return self.__lookup
  1934. self.__lookup = super(FastLookup, self)._name_set()
  1935. return self.__lookup
  1936. class Path:
  1937. """
  1938. A pathlib-compatible interface for zip files.
  1939. Consider a zip file with this structure::
  1940. .
  1941. ├── a.txt
  1942. └── b
  1943. ├── c.txt
  1944. └── d
  1945. └── e.txt
  1946. >>> data = io.BytesIO()
  1947. >>> zf = ZipFile(data, 'w')
  1948. >>> zf.writestr('a.txt', 'content of a')
  1949. >>> zf.writestr('b/c.txt', 'content of c')
  1950. >>> zf.writestr('b/d/e.txt', 'content of e')
  1951. >>> zf.filename = 'abcde.zip'
  1952. Path accepts the zipfile object itself or a filename
  1953. >>> root = Path(zf)
  1954. From there, several path operations are available.
  1955. Directory iteration (including the zip file itself):
  1956. >>> a, b = root.iterdir()
  1957. >>> a
  1958. Path('abcde.zip', 'a.txt')
  1959. >>> b
  1960. Path('abcde.zip', 'b/')
  1961. name property:
  1962. >>> b.name
  1963. 'b'
  1964. join with divide operator:
  1965. >>> c = b / 'c.txt'
  1966. >>> c
  1967. Path('abcde.zip', 'b/c.txt')
  1968. >>> c.name
  1969. 'c.txt'
  1970. Read text:
  1971. >>> c.read_text()
  1972. 'content of c'
  1973. existence:
  1974. >>> c.exists()
  1975. True
  1976. >>> (b / 'missing.txt').exists()
  1977. False
  1978. Coercion to string:
  1979. >>> str(c)
  1980. 'abcde.zip/b/c.txt'
  1981. """
  1982. __repr = "{self.__class__.__name__}({self.root.filename!r}, {self.at!r})"
  1983. def __init__(self, root, at=""):
  1984. self.root = FastLookup.make(root)
  1985. self.at = at
  1986. def open(self, mode='r', *args, **kwargs):
  1987. """
  1988. Open this entry as text or binary following the semantics
  1989. of ``pathlib.Path.open()`` by passing arguments through
  1990. to io.TextIOWrapper().
  1991. """
  1992. pwd = kwargs.pop('pwd', None)
  1993. zip_mode = mode[0]
  1994. stream = self.root.open(self.at, zip_mode, pwd=pwd)
  1995. if 'b' in mode:
  1996. if args or kwargs:
  1997. raise ValueError("encoding args invalid for binary operation")
  1998. return stream
  1999. return io.TextIOWrapper(stream, *args, **kwargs)
  2000. @property
  2001. def name(self):
  2002. return posixpath.basename(self.at.rstrip("/"))
  2003. def read_text(self, *args, **kwargs):
  2004. with self.open('r', *args, **kwargs) as strm:
  2005. return strm.read()
  2006. def read_bytes(self):
  2007. with self.open('rb') as strm:
  2008. return strm.read()
  2009. def _is_child(self, path):
  2010. return posixpath.dirname(path.at.rstrip("/")) == self.at.rstrip("/")
  2011. def _next(self, at):
  2012. return Path(self.root, at)
  2013. def is_dir(self):
  2014. return not self.at or self.at.endswith("/")
  2015. def is_file(self):
  2016. return not self.is_dir()
  2017. def exists(self):
  2018. return self.at in self.root._name_set()
  2019. def iterdir(self):
  2020. if not self.is_dir():
  2021. raise ValueError("Can't listdir a file")
  2022. subs = map(self._next, self.root.namelist())
  2023. return filter(self._is_child, subs)
  2024. def __str__(self):
  2025. return posixpath.join(self.root.filename, self.at)
  2026. def __repr__(self):
  2027. return self.__repr.format(self=self)
  2028. def joinpath(self, add):
  2029. next = posixpath.join(self.at, add)
  2030. return self._next(self.root.resolve_dir(next))
  2031. __truediv__ = joinpath
  2032. @property
  2033. def parent(self):
  2034. parent_at = posixpath.dirname(self.at.rstrip('/'))
  2035. if parent_at:
  2036. parent_at += '/'
  2037. return self._next(parent_at)
  2038. def main(args=None):
  2039. import argparse
  2040. description = 'A simple command-line interface for zipfile module.'
  2041. parser = argparse.ArgumentParser(description=description)
  2042. group = parser.add_mutually_exclusive_group(required=True)
  2043. group.add_argument('-l', '--list', metavar='<zipfile>',
  2044. help='Show listing of a zipfile')
  2045. group.add_argument('-e', '--extract', nargs=2,
  2046. metavar=('<zipfile>', '<output_dir>'),
  2047. help='Extract zipfile into target dir')
  2048. group.add_argument('-c', '--create', nargs='+',
  2049. metavar=('<name>', '<file>'),
  2050. help='Create zipfile from sources')
  2051. group.add_argument('-t', '--test', metavar='<zipfile>',
  2052. help='Test if a zipfile is valid')
  2053. args = parser.parse_args(args)
  2054. if args.test is not None:
  2055. src = args.test
  2056. with ZipFile(src, 'r') as zf:
  2057. badfile = zf.testzip()
  2058. if badfile:
  2059. print("The following enclosed file is corrupted: {!r}".format(badfile))
  2060. print("Done testing")
  2061. elif args.list is not None:
  2062. src = args.list
  2063. with ZipFile(src, 'r') as zf:
  2064. zf.printdir()
  2065. elif args.extract is not None:
  2066. src, curdir = args.extract
  2067. with ZipFile(src, 'r') as zf:
  2068. zf.extractall(curdir)
  2069. elif args.create is not None:
  2070. zip_name = args.create.pop(0)
  2071. files = args.create
  2072. def addToZip(zf, path, zippath):
  2073. if os.path.isfile(path):
  2074. zf.write(path, zippath, ZIP_DEFLATED)
  2075. elif os.path.isdir(path):
  2076. if zippath:
  2077. zf.write(path, zippath)
  2078. for nm in sorted(os.listdir(path)):
  2079. addToZip(zf,
  2080. os.path.join(path, nm), os.path.join(zippath, nm))
  2081. # else: ignore
  2082. with ZipFile(zip_name, 'w') as zf:
  2083. for path in files:
  2084. zippath = os.path.basename(path)
  2085. if not zippath:
  2086. zippath = os.path.basename(os.path.dirname(path))
  2087. if zippath in ('', os.curdir, os.pardir):
  2088. zippath = ''
  2089. addToZip(zf, path, zippath)
  2090. if __name__ == "__main__":
  2091. main()