120 lines
3.3 KiB
Plaintext
120 lines
3.3 KiB
Plaintext
|
#!/usr/bin/env python3
|
||
|
#
|
||
|
# SPDX-License-Identifier: GPL-2.0-only
|
||
|
#
|
||
|
|
||
|
import os
|
||
|
import sys
|
||
|
import shutil
|
||
|
import errno
|
||
|
import time
|
||
|
|
||
|
def mkdir(d):
|
||
|
try:
|
||
|
os.makedirs(d)
|
||
|
except OSError as e:
|
||
|
if e.errno != errno.EEXIST:
|
||
|
raise e
|
||
|
|
||
|
# extract the hash from past the last colon to last underscore
|
||
|
def extract_sha(filename):
|
||
|
return filename.split(':')[7].split('_')[0]
|
||
|
|
||
|
# get all files in a directory, extract hash and make
|
||
|
# a map from hash to list of file with that hash
|
||
|
def map_sha_to_files(dir_, prefix, sha_map):
|
||
|
sstate_prefix_path = dir_ + '/' + prefix + '/'
|
||
|
if not os.path.exists(sstate_prefix_path):
|
||
|
return
|
||
|
sstate_files = os.listdir(sstate_prefix_path)
|
||
|
for f in sstate_files:
|
||
|
try:
|
||
|
sha = extract_sha(f)
|
||
|
if sha not in sha_map:
|
||
|
sha_map[sha] = []
|
||
|
sha_map[sha].append(sstate_prefix_path + f)
|
||
|
except IndexError:
|
||
|
continue
|
||
|
|
||
|
# given a prefix build a map of hash to list of files
|
||
|
def build_sha_cache(prefix):
|
||
|
sha_map = {}
|
||
|
|
||
|
sstate_dir = sys.argv[2]
|
||
|
map_sha_to_files(sstate_dir, prefix, sha_map)
|
||
|
|
||
|
native_sstate_dir = sys.argv[2] + '/' + sys.argv[4]
|
||
|
map_sha_to_files(native_sstate_dir, prefix, sha_map)
|
||
|
|
||
|
return sha_map
|
||
|
|
||
|
if len(sys.argv) < 5:
|
||
|
print("Incorrect number of arguments specified")
|
||
|
print("syntax: gen-lockedsig-cache <locked-sigs.inc> <input-cachedir> <output-cachedir> <nativelsbstring> [filterfile]")
|
||
|
sys.exit(1)
|
||
|
|
||
|
filterlist = []
|
||
|
if len(sys.argv) > 5:
|
||
|
print('Reading filter file %s' % sys.argv[5])
|
||
|
with open(sys.argv[5]) as f:
|
||
|
for l in f.readlines():
|
||
|
if ":" in l:
|
||
|
filterlist.append(l.rstrip())
|
||
|
|
||
|
print('Reading %s' % sys.argv[1])
|
||
|
sigs = []
|
||
|
with open(sys.argv[1]) as f:
|
||
|
for l in f.readlines():
|
||
|
if ":" in l:
|
||
|
task, sig = l.split()[0].rsplit(':', 1)
|
||
|
if filterlist and not task in filterlist:
|
||
|
print('Filtering out %s' % task)
|
||
|
else:
|
||
|
sigs.append(sig)
|
||
|
|
||
|
print('Gathering file list')
|
||
|
start_time = time.perf_counter()
|
||
|
files = set()
|
||
|
sstate_content_cache = {}
|
||
|
for s in sigs:
|
||
|
prefix = s[:2]
|
||
|
prefix2 = s[2:4]
|
||
|
if prefix not in sstate_content_cache:
|
||
|
sstate_content_cache[prefix] = {}
|
||
|
if prefix2 not in sstate_content_cache[prefix]:
|
||
|
sstate_content_cache[prefix][prefix2] = build_sha_cache(prefix + "/" + prefix2)
|
||
|
|
||
|
if s in sstate_content_cache[prefix][prefix2]:
|
||
|
for f in sstate_content_cache[prefix][prefix2][s]:
|
||
|
files.add(f)
|
||
|
|
||
|
elapsed = time.perf_counter() - start_time
|
||
|
print("Gathering file list took %.1fs" % elapsed)
|
||
|
|
||
|
print('Processing files')
|
||
|
for f in files:
|
||
|
sys.stdout.write('Processing %s... ' % f)
|
||
|
if not f.endswith(('.tar.zst', '.siginfo', '.sig')):
|
||
|
# Most likely a temp file, skip it
|
||
|
print('skipping')
|
||
|
continue
|
||
|
dst = os.path.join(sys.argv[3], os.path.relpath(f, sys.argv[2]))
|
||
|
destdir = os.path.dirname(dst)
|
||
|
mkdir(destdir)
|
||
|
|
||
|
src = os.path.realpath(f)
|
||
|
if os.path.exists(dst):
|
||
|
os.remove(dst)
|
||
|
if (os.stat(src).st_dev == os.stat(destdir).st_dev):
|
||
|
print('linking')
|
||
|
try:
|
||
|
os.link(src, dst)
|
||
|
except OSError as e:
|
||
|
print('hard linking failed, copying')
|
||
|
shutil.copyfile(src, dst)
|
||
|
else:
|
||
|
print('copying')
|
||
|
shutil.copyfile(src, dst)
|
||
|
|
||
|
print('Done!')
|