patches-mtime-cache.py 5.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192
  1. #!/usr/bin/env python
  2. from __future__ import print_function
  3. import argparse
  4. import hashlib
  5. import json
  6. import os
  7. import posixpath
  8. import sys
  9. import traceback
  10. from lib.patches import patch_from_dir
  11. def patched_file_paths(patches_config):
  12. for patch_dir, repo in patches_config.items():
  13. for line in patch_from_dir(patch_dir).split("\n"):
  14. if line.startswith("+++"):
  15. yield posixpath.join(repo, line[6:])
  16. def generate_cache(patches_config):
  17. mtime_cache = {}
  18. for file_path in patched_file_paths(patches_config):
  19. if file_path in mtime_cache:
  20. # File may be patched multiple times, we don't need to
  21. # rehash it since we are looking at the final result
  22. continue
  23. if not os.path.exists(file_path):
  24. print("Skipping non-existent file:", file_path)
  25. continue
  26. with open(file_path, "rb") as f:
  27. mtime_cache[file_path] = {
  28. "sha256": hashlib.sha256(f.read()).hexdigest(),
  29. "atime": os.path.getatime(file_path),
  30. "mtime": os.path.getmtime(file_path),
  31. }
  32. return mtime_cache
  33. def apply_mtimes(mtime_cache):
  34. updates = []
  35. for file_path, metadata in mtime_cache.items():
  36. if not os.path.exists(file_path):
  37. print("Skipping non-existent file:", file_path)
  38. continue
  39. with open(file_path, "rb") as f:
  40. if hashlib.sha256(f.read()).hexdigest() == metadata["sha256"]:
  41. updates.append(
  42. [file_path, metadata["atime"], metadata["mtime"]]
  43. )
  44. # We can't atomically set the times for all files at once, but by waiting
  45. # to update until we've checked all the files we at least have less chance
  46. # of only updating some files due to an error on one of the files
  47. for [file_path, atime, mtime] in updates:
  48. os.utime(file_path, (atime, mtime))
  49. def set_mtimes(patches_config, mtime):
  50. mtime_cache = {}
  51. for file_path in patched_file_paths(patches_config):
  52. if file_path in mtime_cache:
  53. continue
  54. if not os.path.exists(file_path):
  55. print("Skipping non-existent file:", file_path)
  56. continue
  57. mtime_cache[file_path] = mtime
  58. for file_path in mtime_cache:
  59. os.utime(file_path, (mtime_cache[file_path], mtime_cache[file_path]))
  60. def main():
  61. parser = argparse.ArgumentParser(
  62. description="Make mtime cache for patched files"
  63. )
  64. subparsers = parser.add_subparsers(
  65. dest="operation", help="sub-command help"
  66. )
  67. apply_subparser = subparsers.add_parser(
  68. "apply", help="apply the mtimes from the cache"
  69. )
  70. apply_subparser.add_argument(
  71. "--cache-file", required=True, help="mtime cache file"
  72. )
  73. apply_subparser.add_argument(
  74. "--preserve-cache",
  75. action="store_true",
  76. help="don't delete cache after applying",
  77. )
  78. generate_subparser = subparsers.add_parser(
  79. "generate", help="generate the mtime cache"
  80. )
  81. generate_subparser.add_argument(
  82. "--cache-file", required=True, help="mtime cache file"
  83. )
  84. set_subparser = subparsers.add_parser(
  85. "set", help="set all mtimes to a specific date"
  86. )
  87. set_subparser.add_argument(
  88. "--mtime",
  89. type=int,
  90. required=True,
  91. help="mtime to use for all patched files",
  92. )
  93. for subparser in [generate_subparser, set_subparser]:
  94. subparser.add_argument(
  95. "--patches-config",
  96. type=argparse.FileType("r"),
  97. required=True,
  98. help="patches' config in the JSON format",
  99. )
  100. args = parser.parse_args()
  101. if args.operation == "generate":
  102. try:
  103. # Cache file may exist from a previously aborted sync. Reuse it.
  104. with open(args.cache_file, mode="r") as f:
  105. json.load(f) # Make sure it's not an empty file
  106. print("Using existing mtime cache for patches")
  107. return 0
  108. except Exception:
  109. pass
  110. try:
  111. with open(args.cache_file, mode="w") as f:
  112. mtime_cache = generate_cache(json.load(args.patches_config))
  113. json.dump(mtime_cache, f, indent=2)
  114. except Exception:
  115. print(
  116. "ERROR: failed to generate mtime cache for patches",
  117. file=sys.stderr,
  118. )
  119. traceback.print_exc(file=sys.stderr)
  120. return 0
  121. elif args.operation == "apply":
  122. if not os.path.exists(args.cache_file):
  123. print("ERROR: --cache-file does not exist", file=sys.stderr)
  124. return 0 # Cache file may not exist, fail more gracefully
  125. try:
  126. with open(args.cache_file, mode="r") as f:
  127. apply_mtimes(json.load(f))
  128. if not args.preserve_cache:
  129. os.remove(args.cache_file)
  130. except Exception:
  131. print(
  132. "ERROR: failed to apply mtime cache for patches",
  133. file=sys.stderr,
  134. )
  135. traceback.print_exc(file=sys.stderr)
  136. return 0
  137. elif args.operation == "set":
  138. # Python 2/3 compatibility
  139. try:
  140. user_input = raw_input
  141. except NameError:
  142. user_input = input
  143. answer = user_input(
  144. "WARNING: Manually setting mtimes could mess up your build. "
  145. "If you're sure, type yes: "
  146. )
  147. if answer.lower() != "yes":
  148. print("Aborting")
  149. return 0
  150. set_mtimes(json.load(args.patches_config), args.mtime)
  151. return 0
  152. if __name__ == "__main__":
  153. sys.exit(main())