patches-mtime-cache.py 5.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184
  1. #!/usr/bin/env python3
  2. import argparse
  3. import hashlib
  4. import json
  5. import os
  6. import posixpath
  7. import sys
  8. import traceback
  9. from lib.patches import patch_from_dir
  10. def patched_file_paths(patches_config):
  11. for patch_dir, repo in patches_config.items():
  12. for line in patch_from_dir(patch_dir).split("\n"):
  13. if line.startswith("+++"):
  14. yield posixpath.join(repo, line[6:])
  15. def generate_cache(patches_config):
  16. mtime_cache = {}
  17. for file_path in patched_file_paths(patches_config):
  18. if file_path in mtime_cache:
  19. # File may be patched multiple times, we don't need to
  20. # rehash it since we are looking at the final result
  21. continue
  22. if not os.path.exists(file_path):
  23. print("Skipping non-existent file:", file_path)
  24. continue
  25. with open(file_path, "rb") as f:
  26. mtime_cache[file_path] = {
  27. "sha256": hashlib.sha256(f.read()).hexdigest(),
  28. "atime": os.path.getatime(file_path),
  29. "mtime": os.path.getmtime(file_path),
  30. }
  31. return mtime_cache
  32. def apply_mtimes(mtime_cache):
  33. updates = []
  34. for file_path, metadata in mtime_cache.items():
  35. if not os.path.exists(file_path):
  36. print("Skipping non-existent file:", file_path)
  37. continue
  38. with open(file_path, "rb") as f:
  39. if hashlib.sha256(f.read()).hexdigest() == metadata["sha256"]:
  40. updates.append(
  41. [file_path, metadata["atime"], metadata["mtime"]]
  42. )
  43. # We can't atomically set the times for all files at once, but by waiting
  44. # to update until we've checked all the files we at least have less chance
  45. # of only updating some files due to an error on one of the files
  46. for [file_path, atime, mtime] in updates:
  47. os.utime(file_path, (atime, mtime))
  48. def set_mtimes(patches_config, mtime):
  49. mtime_cache = {}
  50. for file_path in patched_file_paths(patches_config):
  51. if file_path in mtime_cache:
  52. continue
  53. if not os.path.exists(file_path):
  54. print("Skipping non-existent file:", file_path)
  55. continue
  56. mtime_cache[file_path] = mtime
  57. for file_path in mtime_cache:
  58. os.utime(file_path, (mtime_cache[file_path], mtime_cache[file_path]))
  59. def main():
  60. parser = argparse.ArgumentParser(
  61. description="Make mtime cache for patched files"
  62. )
  63. subparsers = parser.add_subparsers(
  64. dest="operation", help="sub-command help"
  65. )
  66. apply_subparser = subparsers.add_parser(
  67. "apply", help="apply the mtimes from the cache"
  68. )
  69. apply_subparser.add_argument(
  70. "--cache-file", required=True, help="mtime cache file"
  71. )
  72. apply_subparser.add_argument(
  73. "--preserve-cache",
  74. action="store_true",
  75. help="don't delete cache after applying",
  76. )
  77. generate_subparser = subparsers.add_parser(
  78. "generate", help="generate the mtime cache"
  79. )
  80. generate_subparser.add_argument(
  81. "--cache-file", required=True, help="mtime cache file"
  82. )
  83. set_subparser = subparsers.add_parser(
  84. "set", help="set all mtimes to a specific date"
  85. )
  86. set_subparser.add_argument(
  87. "--mtime",
  88. type=int,
  89. required=True,
  90. help="mtime to use for all patched files",
  91. )
  92. for subparser in [generate_subparser, set_subparser]:
  93. subparser.add_argument(
  94. "--patches-config",
  95. type=argparse.FileType("r"),
  96. required=True,
  97. help="patches' config in the JSON format",
  98. )
  99. args = parser.parse_args()
  100. if args.operation == "generate":
  101. try:
  102. # Cache file may exist from a previously aborted sync. Reuse it.
  103. with open(args.cache_file, mode="r") as f:
  104. json.load(f) # Make sure it's not an empty file
  105. print("Using existing mtime cache for patches")
  106. return 0
  107. except Exception:
  108. pass
  109. try:
  110. with open(args.cache_file, mode="w") as f:
  111. mtime_cache = generate_cache(json.load(args.patches_config))
  112. json.dump(mtime_cache, f, indent=2)
  113. except Exception:
  114. print(
  115. "ERROR: failed to generate mtime cache for patches",
  116. file=sys.stderr,
  117. )
  118. traceback.print_exc(file=sys.stderr)
  119. return 0
  120. elif args.operation == "apply":
  121. if not os.path.exists(args.cache_file):
  122. print("ERROR: --cache-file does not exist", file=sys.stderr)
  123. return 0 # Cache file may not exist, fail more gracefully
  124. try:
  125. with open(args.cache_file, mode="r") as f:
  126. apply_mtimes(json.load(f))
  127. if not args.preserve_cache:
  128. os.remove(args.cache_file)
  129. except Exception:
  130. print(
  131. "ERROR: failed to apply mtime cache for patches",
  132. file=sys.stderr,
  133. )
  134. traceback.print_exc(file=sys.stderr)
  135. return 0
  136. elif args.operation == "set":
  137. answer = input(
  138. "WARNING: Manually setting mtimes could mess up your build. "
  139. "If you're sure, type yes: "
  140. )
  141. if answer.lower() != "yes":
  142. print("Aborting")
  143. return 0
  144. set_mtimes(json.load(args.patches_config), args.mtime)
  145. return 0
  146. if __name__ == "__main__":
  147. sys.exit(main())