patches-mtime-cache.py 5.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194
  1. #!/usr/bin/env python3
  2. from __future__ import print_function
  3. import argparse
  4. import hashlib
  5. import json
  6. import os
  7. import posixpath
  8. import sys
  9. import traceback
  10. from lib.patches import patch_from_dir
  11. def patched_file_paths(patches_config):
  12. for target in patches_config:
  13. patch_dir = target.get('patch_dir')
  14. repo = target.get('repo')
  15. for line in patch_from_dir(patch_dir).split("\n"):
  16. if line.startswith("+++"):
  17. yield posixpath.join(repo, line[6:])
  18. def generate_cache(patches_config):
  19. mtime_cache = {}
  20. for file_path in patched_file_paths(patches_config):
  21. if file_path in mtime_cache:
  22. # File may be patched multiple times, we don't need to
  23. # rehash it since we are looking at the final result
  24. continue
  25. if not os.path.exists(file_path):
  26. print("Skipping non-existent file:", file_path)
  27. continue
  28. with open(file_path, "rb") as f:
  29. mtime_cache[file_path] = {
  30. "sha256": hashlib.sha256(f.read()).hexdigest(),
  31. "atime": os.path.getatime(file_path),
  32. "mtime": os.path.getmtime(file_path),
  33. }
  34. return mtime_cache
  35. def apply_mtimes(mtime_cache):
  36. updates = []
  37. for file_path, metadata in mtime_cache.items():
  38. if not os.path.exists(file_path):
  39. print("Skipping non-existent file:", file_path)
  40. continue
  41. with open(file_path, "rb") as f:
  42. if hashlib.sha256(f.read()).hexdigest() == metadata["sha256"]:
  43. updates.append(
  44. [file_path, metadata["atime"], metadata["mtime"]]
  45. )
  46. # We can't atomically set the times for all files at once, but by waiting
  47. # to update until we've checked all the files we at least have less chance
  48. # of only updating some files due to an error on one of the files
  49. for [file_path, atime, mtime] in updates:
  50. os.utime(file_path, (atime, mtime))
  51. def set_mtimes(patches_config, mtime):
  52. mtime_cache = {}
  53. for file_path in patched_file_paths(patches_config):
  54. if file_path in mtime_cache:
  55. continue
  56. if not os.path.exists(file_path):
  57. print("Skipping non-existent file:", file_path)
  58. continue
  59. mtime_cache[file_path] = mtime
  60. for file_path in mtime_cache:
  61. os.utime(file_path, (mtime_cache[file_path], mtime_cache[file_path]))
  62. def main():
  63. parser = argparse.ArgumentParser(
  64. description="Make mtime cache for patched files"
  65. )
  66. subparsers = parser.add_subparsers(
  67. dest="operation", help="sub-command help"
  68. )
  69. apply_subparser = subparsers.add_parser(
  70. "apply", help="apply the mtimes from the cache"
  71. )
  72. apply_subparser.add_argument(
  73. "--cache-file", required=True, help="mtime cache file"
  74. )
  75. apply_subparser.add_argument(
  76. "--preserve-cache",
  77. action="store_true",
  78. help="don't delete cache after applying",
  79. )
  80. generate_subparser = subparsers.add_parser(
  81. "generate", help="generate the mtime cache"
  82. )
  83. generate_subparser.add_argument(
  84. "--cache-file", required=True, help="mtime cache file"
  85. )
  86. set_subparser = subparsers.add_parser(
  87. "set", help="set all mtimes to a specific date"
  88. )
  89. set_subparser.add_argument(
  90. "--mtime",
  91. type=int,
  92. required=True,
  93. help="mtime to use for all patched files",
  94. )
  95. for subparser in [generate_subparser, set_subparser]:
  96. subparser.add_argument(
  97. "--patches-config",
  98. type=argparse.FileType("r"),
  99. required=True,
  100. help="patches' config in the JSON format",
  101. )
  102. args = parser.parse_args()
  103. if args.operation == "generate":
  104. try:
  105. # Cache file may exist from a previously aborted sync. Reuse it.
  106. with open(args.cache_file, mode="r") as f:
  107. json.load(f) # Make sure it's not an empty file
  108. print("Using existing mtime cache for patches")
  109. return 0
  110. except Exception:
  111. pass
  112. try:
  113. with open(args.cache_file, mode="w") as f:
  114. mtime_cache = generate_cache(json.load(args.patches_config))
  115. json.dump(mtime_cache, f, indent=2)
  116. except Exception:
  117. print(
  118. "ERROR: failed to generate mtime cache for patches",
  119. file=sys.stderr,
  120. )
  121. traceback.print_exc(file=sys.stderr)
  122. return 0
  123. elif args.operation == "apply":
  124. if not os.path.exists(args.cache_file):
  125. print("ERROR: --cache-file does not exist", file=sys.stderr)
  126. return 0 # Cache file may not exist, fail more gracefully
  127. try:
  128. with open(args.cache_file, mode="r") as f:
  129. apply_mtimes(json.load(f))
  130. if not args.preserve_cache:
  131. os.remove(args.cache_file)
  132. except Exception:
  133. print(
  134. "ERROR: failed to apply mtime cache for patches",
  135. file=sys.stderr,
  136. )
  137. traceback.print_exc(file=sys.stderr)
  138. return 0
  139. elif args.operation == "set":
  140. # Python 2/3 compatibility
  141. try:
  142. user_input = raw_input
  143. except NameError:
  144. user_input = input
  145. answer = user_input(
  146. "WARNING: Manually setting mtimes could mess up your build. "
  147. "If you're sure, type yes: "
  148. )
  149. if answer.lower() != "yes":
  150. print("Aborting")
  151. return 0
  152. set_mtimes(json.load(args.patches_config), args.mtime)
  153. return 0
  154. if __name__ == "__main__":
  155. sys.exit(main())