Fix hash and support sdk extensions

master
Marko Semet 2020-01-20 11:40:26 +01:00
parent d05f998c39
commit eae08ee476
3 changed files with 115 additions and 36 deletions

View File

@ -9,6 +9,6 @@ CONFIG_DIR="$(dirname "$0")" &&
if [ -z "$4" ]; then if [ -z "$4" ]; then
exec flatpak-builder $3 --arch "$2" --rebuild-on-sdk-change "$BUILD_DIR" "$1" exec flatpak-builder $3 --arch "$2" --rebuild-on-sdk-change "$BUILD_DIR" "$1"
else else
HASH="$("$CONFIG_DIR/hash_modules.py" "$1" "$2" | sed -n '1p')" && HASH="$("$CONFIG_DIR/hash_modules.py" --installed "$1" "$2" | sed -n '1p')" &&
exec flatpak-builder $3 --arch "$2" --rebuild-on-sdk-change --gpg-sign=winebarrels@marko10-000.de --repo "$4" -s "WB_HASH='${HASH}'" "$BUILD_DIR" "$1" exec flatpak-builder $3 --arch "$2" --rebuild-on-sdk-change --gpg-sign=winebarrels@marko10-000.de --repo "$4" -s "WB_HASH='${HASH}'" "$BUILD_DIR" "$1"
fi fi

View File

@ -0,0 +1,8 @@
#! /usr/bin/env bash
# Args: <config-file> <arch> <builder-args> [<repo>]
CONFIG_DIR="$(dirname "$0")" &&
"$CONFIG_DIR/hash_modules.py" --require-build --installed "$1" "$2" &&
exec "$CONFIG_DIR/build.sh" "$@"
echo "No rebuild required."

View File

@ -1,5 +1,7 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import configparser
import hashlib import hashlib
import io
import itertools import itertools
import json import json
import os import os
@ -25,21 +27,46 @@ def list_remotes(verbose=False):
return result return result
def commit_hashes(name, verbose=False): def commit_hashes(name, use_local=False, verbose=False):
result = [] result = []
for i in list_remotes(verbose=verbose): if use_local:
out = None result = [_load_stdout(["flatpak", "info", "--user", "-c", name]).lower()]
try: else:
out = _load_stdout(["flatpak", "remote-info", "--user", "-c", i, name]).lower() for i in list_remotes(verbose=verbose):
except RuntimeError: out = None
pass try:
if out is not None: out = _load_stdout(["flatpak", "remote-info", "--user", "-c", i, name]).lower()
out = out.splitlines()[0] except RuntimeError:
for i in filter(lambda x: not(b"a"[0] <= x <= b"z"[0] or b"0"[0] <= x <= b"9"[0]), out): pass
raise ValueError("Wrong char: " + repr(bytes([i]))) if out is not None:
result.append(out.decode()) out = out.splitlines()[0]
for i in filter(lambda x: not(b"a"[0] <= x <= b"z"[0] or b"0"[0] <= x <= b"9"[0]), out):
raise ValueError("Wrong char: " + repr(bytes([i])))
result.append(out.decode())
if verbose: if verbose:
print("Commit hashes for " + name + ": " + ", ".join(result), file=sys.stderr) if use_local:
print("Commit local hashes for " + name + ": " + ", ".join(result), file=sys.stderr)
else:
print("Commit remote hashes for " + name + ": " + ", ".join(result), file=sys.stderr)
return result
def commit_metas(name, use_local=False, verbose=False):
result = []
if use_local:
result = [_load_stdout(["flatpak", "info", "--user", "-m", name])]
else:
for i in list_remotes(verbose=verbose):
out = None
try:
out = _load_stdout(["flatpak", "remote-info", "--user", "-m", i, name])
except RuntimeError:
pass
if out is not None:
out = out.splitlines()[0]
for i in filter(lambda x: not(b"a"[0] <= x <= b"z"[0] or b"0"[0] <= x <= b"9"[0]), out):
raise ValueError("Wrong char: " + repr(bytes([i])))
result.append(out.decode())
return result return result
@ -104,25 +131,67 @@ class ModuleLoader():
tmp = tmp.encode("utf-8") tmp = tmp.encode("utf-8")
return function(tmp) return function(tmp)
def get_depends(self, arch): def get_depends(self, arch, use_local=False, verbose=False):
version_src = "/" + arch + "/" + self.content["runtime-version"] version_src = "/" + arch + "/" + self.content["runtime-version"]
result = [self.content["sdk"] + version_src, result = [[self.content["sdk"] + version_src],
self.content["runtime"] + version_src] [self.content["runtime"] + version_src]]
# Sdk extensions
if "sdk-extensions" in self.content:
# Parse meta config
meta = []
for i in commit_metas(self.content["sdk"] + version_src, use_local=use_local, verbose=verbose):
config = configparser.ConfigParser()
config.readfp(io.BytesIO(i))
meta.append(config)
# Parse extensions
for i in self.content["sdk-extensions"]:
iSplit = i.split(".")
ext_result = set()
for config in meta:
for jSize in range(1, len(iSplit) + 1):
j = ".".join(iSplit[:jSize])
# Try version
try:
tmp = config.get("Extension " + j, "version")
ext_result.add(i + "/" + arch + "/" + tmp)
except configparser.NoSectionError:
pass
# Try versions
try:
tmp = config.get("Extension " + j, "versions").split(";")
ext_result = ext_result.union(set(map(lambda x: i + "/" + arch + "/" + x, tmp)))
except configparser.NoSectionError:
pass
result.append(list(ext_result))
# Debug output
if verbose:
print("Flatpak depends: " + "\n".join(map(repr, result)), file=sys.stderr)
return result return result
def hashes_from_base_platforms(self, arch, function, verbose=False): def hashes_from_base_platforms(self, arch, function, use_local=False, verbose=False):
# Get source # Get source
source = None
def helper_func(content): def helper_func(content):
nonlocal source return function(content)
source = content sourceHash = self.hash_content(helper_func)
self.hash_content(helper_func)
# Build hashes # Build hashes
base_hashes = list(map(lambda x: list(map(lambda y: y.encode("utf-8"), commit_hashes(x, verbose=verbose))), self.get_depends(arch))) base_hashes = []
result = list(map(lambda x: function(b"\x00".join(x) + b"\x00" + source), itertools.product(*base_hashes))) for i in self.get_depends(arch, use_local=use_local, verbose=verbose):
tmp = set()
for j in i:
tmp = tmp.union(set(map(lambda x: x.encode("utf-8").lower(), commit_hashes(j, use_local=use_local, verbose=verbose))))
base_hashes.append(sorted(tmp))
result = list(map(lambda x: function(b"\x00".join(x) + b"\x00" + sourceHash), itertools.product(*base_hashes)))
if verbose: if verbose:
print("Local hashes for: " + ", ".join(result), file=sys.stderr) if use_local:
print("Local hashes for: " + ", ".join(map(bytes.decode, result)), file=sys.stderr)
else:
print("Remote hashes for: " + ", ".join(map(bytes.decode, result)), file=sys.stderr)
return result return result
def old_wb_hashes(self, arch, verbose=False): def old_wb_hashes(self, arch, verbose=False):
@ -133,15 +202,15 @@ class ModuleLoader():
result += wb_hashes(self.content["id-platform"] + version_target, verbose=verbose) result += wb_hashes(self.content["id-platform"] + version_target, verbose=verbose)
return result return result
def version_hashes(self, arch, verbose=False): def version_hashes(self, arch, use_local=False, verbose=False):
def func(data): def func(data):
tmp = hashlib.sha3_256() tmp = hashlib.sha3_256()
tmp.update(data) tmp.update(data)
return tmp.hexdigest() return tmp.hexdigest().encode()
return self.hashes_from_base_platforms(arch, func, verbose=verbose) return list(map(bytes.decode, self.hashes_from_base_platforms(arch, func, use_local=use_local, verbose=verbose)))
def required_update(self, arch, verbose=False): def required_update(self, arch, use_local=False, verbose=False):
own_hash = set(self.version_hashes(arch, verbose=verbose)) own_hash = set(self.version_hashes(arch, use_local=use_local, verbose=verbose))
old_hashes = set(self.old_wb_hashes(arch, verbose=verbose)) old_hashes = set(self.old_wb_hashes(arch, verbose=verbose))
return len(own_hash.intersection(old_hashes)) == 0 return len(own_hash.intersection(old_hashes)) == 0
@ -149,13 +218,13 @@ class ModuleLoader():
return os.path.abspath(os.path.join(os.path.split(self.path)[0], file)) return os.path.abspath(os.path.join(os.path.split(self.path)[0], file))
def get_need_update(source, arch, verbose=False): def get_need_update(source, arch, use_local=False, verbose=False):
mod = ModuleLoader(source) mod = ModuleLoader(source)
return mod.required_update(arch, verbose=verbose) return mod.required_update(arch, use_local=use_local, verbose=verbose)
def get_own_hash(source, arch, verbose=False): def get_own_hash(source, arch, use_local=False, verbose=False):
mod = ModuleLoader(source) mod = ModuleLoader(source)
tmp = mod.version_hashes(arch, verbose=verbose) tmp = mod.version_hashes(arch, use_local=use_local, verbose=verbose)
if len(tmp) != 1: if len(tmp) != 1:
raise ValueError("No unique version number possible.") raise ValueError("No unique version number possible.")
return tmp[0] return tmp[0]
@ -172,14 +241,16 @@ if __name__ == '__main__':
help="The arch to build.") help="The arch to build.")
parser.add_argument("--require-build", dest="rebuild", action="store_const", parser.add_argument("--require-build", dest="rebuild", action="store_const",
const=True, default=False, help="Returns 0 when outdated.") const=True, default=False, help="Returns 0 when outdated.")
parser.add_argument("--installed", dest="installed", action="store_const",
const=True, default=False, help="Use installed then remote.")
parser.add_argument("-v", "--verbose", dest="verbose", action="store_const", parser.add_argument("-v", "--verbose", dest="verbose", action="store_const",
const=True, default=False, help="Verbose information to stderr.") const=True, default=False, help="Verbose information to stderr.")
args = parser.parse_args() args = parser.parse_args()
if args.rebuild: if args.rebuild:
if get_need_update(args.file[0], args.arch[0], verbose=args.verbose): if get_need_update(args.file[0], args.arch[0], use_local=args.installed, verbose=args.verbose):
exit(0) exit(0)
else: else:
exit(1) exit(1)
else: else:
print(get_own_hash(args.file[0], args.arch[0], verbose=args.verbose)) print(get_own_hash(args.file[0], args.arch[0], use_local=args.installed, verbose=args.verbose))