mirror of
https://github.com/Xetibo/ReSet.git
synced 2025-04-04 13:02:01 +02:00
feat: Add flatpak build tools
This commit is contained in:
parent
638abd5b86
commit
727808db46
4
.gitignore
vendored
4
.gitignore
vendored
|
@ -2,6 +2,8 @@
|
||||||
# will have compiled files and executables
|
# will have compiled files and executables
|
||||||
debug/
|
debug/
|
||||||
target/
|
target/
|
||||||
|
flatpak/build
|
||||||
|
flatpak/.flatpak-builder
|
||||||
|
|
||||||
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
|
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
|
||||||
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
|
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
|
||||||
|
@ -17,4 +19,4 @@ Cargo.lock
|
||||||
# Added by cargo
|
# Added by cargo
|
||||||
|
|
||||||
/target
|
/target
|
||||||
.idea/
|
.idea/
|
||||||
|
|
|
@ -5,10 +5,9 @@ edition = "2021"
|
||||||
description = "A wip universal Linux settings application."
|
description = "A wip universal Linux settings application."
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
ReSet-Lib = "*"
|
adw = { version = "0.5.3", package = "libadwaita", features = ["v1_4"]}
|
||||||
adw = { version = "*", package = "libadwaita", features = ["v1_4"]}
|
|
||||||
dbus = "0.9.7"
|
dbus = "0.9.7"
|
||||||
gtk = { version = "*", package = "gtk4", features = ["v4_12"]}
|
gtk = { version = "0.7.3", package = "gtk4", features = ["v4_12"]}
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
glib-build-tools = "*"
|
glib-build-tools = "0.18.0"
|
||||||
|
|
1022
flatpak/cargo-sources.json
Normal file
1022
flatpak/cargo-sources.json
Normal file
File diff suppressed because it is too large
Load diff
381
flatpak/flatpak-generator.py
Normal file
381
flatpak/flatpak-generator.py
Normal file
|
@ -0,0 +1,381 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
# provided by flatpak -> flatpak-builder-tools
|
||||||
|
# https://github.com/flatpak/flatpak-builder-tools/tree/master/cargo
|
||||||
|
__license__ = 'MIT'
|
||||||
|
import json
|
||||||
|
from urllib.parse import urlparse, ParseResult, parse_qs
|
||||||
|
import os
|
||||||
|
import contextlib
|
||||||
|
import glob
|
||||||
|
import subprocess
|
||||||
|
import argparse
|
||||||
|
import logging
|
||||||
|
import hashlib
|
||||||
|
import asyncio
|
||||||
|
import aiohttp
|
||||||
|
import toml
|
||||||
|
|
||||||
|
CRATES_IO = 'https://static.crates.io/crates'
|
||||||
|
CARGO_HOME = 'cargo'
|
||||||
|
CARGO_CRATES = f'{CARGO_HOME}/vendor'
|
||||||
|
VENDORED_SOURCES = 'vendored-sources'
|
||||||
|
GIT_CACHE = 'flatpak-cargo/git'
|
||||||
|
COMMIT_LEN = 7
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def workdir(path: str):
|
||||||
|
oldpath = os.getcwd()
|
||||||
|
os.chdir(path)
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
os.chdir(oldpath)
|
||||||
|
|
||||||
|
|
||||||
|
def canonical_url(url):
|
||||||
|
'Converts a string to a Cargo Canonical URL, as per https://github.com/rust-lang/cargo/blob/35c55a93200c84a4de4627f1770f76a8ad268a39/src/cargo/util/canonical_url.rs#L19'
|
||||||
|
# Hrm. The upstream cargo does not replace those URLs, but if we don't then it doesn't work too well :(
|
||||||
|
url = url.replace('git+https://', 'https://')
|
||||||
|
u = urlparse(url)
|
||||||
|
# It seems cargo drops query and fragment
|
||||||
|
u = ParseResult(u.scheme, u.netloc, u.path, None, None, None)
|
||||||
|
u = u._replace(path = u.path.rstrip('/'))
|
||||||
|
|
||||||
|
if u.netloc == 'github.com':
|
||||||
|
u = u._replace(scheme = 'https')
|
||||||
|
u = u._replace(path = u.path.lower())
|
||||||
|
|
||||||
|
if u.path.endswith('.git'):
|
||||||
|
u = u._replace(path = u.path[:-len('.git')])
|
||||||
|
|
||||||
|
return u
|
||||||
|
|
||||||
|
|
||||||
|
def get_git_tarball(repo_url, commit):
|
||||||
|
url = canonical_url(repo_url)
|
||||||
|
path = url.path.split('/')[1:]
|
||||||
|
|
||||||
|
assert len(path) == 2
|
||||||
|
owner = path[0]
|
||||||
|
if path[1].endswith('.git'):
|
||||||
|
repo = path[1].replace('.git', '')
|
||||||
|
else:
|
||||||
|
repo = path[1]
|
||||||
|
if url.hostname == 'github.com':
|
||||||
|
return f'https://codeload.{url.hostname}/{owner}/{repo}/tar.gz/{commit}'
|
||||||
|
elif url.hostname.split('.')[0] == 'gitlab':
|
||||||
|
return f'https://{url.hostname}/{owner}/{repo}/-/archive/{commit}/{repo}-{commit}.tar.gz'
|
||||||
|
elif url.hostname == 'bitbucket.org':
|
||||||
|
return f'https://{url.hostname}/{owner}/{repo}/get/{commit}.tar.gz'
|
||||||
|
else:
|
||||||
|
raise ValueError(f'Don\'t know how to get tarball for {repo_url}')
|
||||||
|
|
||||||
|
|
||||||
|
async def get_remote_sha256(url):
|
||||||
|
logging.info(f"started sha256({url})")
|
||||||
|
sha256 = hashlib.sha256()
|
||||||
|
async with aiohttp.ClientSession(raise_for_status=True) as http_session:
|
||||||
|
async with http_session.get(url) as response:
|
||||||
|
while True:
|
||||||
|
data = await response.content.read(4096)
|
||||||
|
if not data:
|
||||||
|
break
|
||||||
|
sha256.update(data)
|
||||||
|
logging.info(f"done sha256({url})")
|
||||||
|
return sha256.hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
def load_toml(tomlfile='Cargo.lock'):
|
||||||
|
with open(tomlfile, 'r') as f:
|
||||||
|
toml_data = toml.load(f)
|
||||||
|
return toml_data
|
||||||
|
|
||||||
|
|
||||||
|
def git_repo_name(git_url, commit):
|
||||||
|
name = canonical_url(git_url).path.split('/')[-1]
|
||||||
|
return f'{name}-{commit[:COMMIT_LEN]}'
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_git_repo(git_url, commit):
|
||||||
|
repo_dir = git_url.replace('://', '_').replace('/', '_')
|
||||||
|
cache_dir = os.environ.get('XDG_CACHE_HOME', os.path.expanduser('~/.cache'))
|
||||||
|
clone_dir = os.path.join(cache_dir, 'flatpak-cargo', repo_dir)
|
||||||
|
if not os.path.isdir(os.path.join(clone_dir, '.git')):
|
||||||
|
subprocess.run(['git', 'clone', '--depth=1', git_url, clone_dir], check=True)
|
||||||
|
rev_parse_proc = subprocess.run(['git', 'rev-parse', 'HEAD'], cwd=clone_dir, check=True,
|
||||||
|
stdout=subprocess.PIPE)
|
||||||
|
head = rev_parse_proc.stdout.decode().strip()
|
||||||
|
if head[:COMMIT_LEN] != commit[:COMMIT_LEN]:
|
||||||
|
subprocess.run(['git', 'fetch', 'origin', commit], cwd=clone_dir, check=True)
|
||||||
|
subprocess.run(['git', 'checkout', commit], cwd=clone_dir, check=True)
|
||||||
|
return clone_dir
|
||||||
|
|
||||||
|
|
||||||
|
async def get_git_repo_packages(git_url, commit):
|
||||||
|
logging.info('Loading packages from %s', git_url)
|
||||||
|
git_repo_dir = fetch_git_repo(git_url, commit)
|
||||||
|
packages = {}
|
||||||
|
|
||||||
|
with workdir(git_repo_dir):
|
||||||
|
if os.path.isfile('Cargo.toml'):
|
||||||
|
packages.update(await get_cargo_toml_packages(load_toml('Cargo.toml'), '.'))
|
||||||
|
else:
|
||||||
|
for toml_path in glob.glob('*/Cargo.toml'):
|
||||||
|
packages.update(await get_cargo_toml_packages(load_toml(toml_path),
|
||||||
|
os.path.dirname(toml_path)))
|
||||||
|
|
||||||
|
assert packages, f"No packages found in {git_repo_dir}"
|
||||||
|
logging.debug('Packages in %s:\n%s', git_url, json.dumps(packages, indent=4))
|
||||||
|
return packages
|
||||||
|
|
||||||
|
|
||||||
|
async def get_cargo_toml_packages(root_toml, root_dir):
|
||||||
|
assert not os.path.isabs(root_dir) and os.path.isdir(root_dir)
|
||||||
|
assert 'package' in root_toml or 'workspace' in root_toml
|
||||||
|
packages = {}
|
||||||
|
excluded_paths = None
|
||||||
|
|
||||||
|
def is_excluded(path):
|
||||||
|
if not excluded_paths:
|
||||||
|
return False
|
||||||
|
return any(os.path.commonpath([excluded_path, path]) == excluded_path
|
||||||
|
for excluded_path in excluded_paths)
|
||||||
|
|
||||||
|
async def get_dep_packages(entry, toml_dir):
|
||||||
|
assert not os.path.isabs(toml_dir)
|
||||||
|
# https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html
|
||||||
|
if 'dependencies' in entry:
|
||||||
|
for dep_name, dep in entry['dependencies'].items():
|
||||||
|
if 'package' in dep:
|
||||||
|
dep_name = dep['package']
|
||||||
|
if 'path' not in dep:
|
||||||
|
continue
|
||||||
|
if dep_name in packages:
|
||||||
|
continue
|
||||||
|
dep_dir = os.path.normpath(os.path.join(toml_dir, dep['path']))
|
||||||
|
if is_excluded(dep_dir):
|
||||||
|
logging.warning("Excluded dependency at %s", dep_dir)
|
||||||
|
return
|
||||||
|
logging.debug("Loading dependency %s from %s", dep_name, dep_dir)
|
||||||
|
dep_toml = load_toml(os.path.join(dep_dir, 'Cargo.toml'))
|
||||||
|
assert dep_toml['package']['name'] == dep_name, toml_dir
|
||||||
|
await get_dep_packages(dep_toml, dep_dir)
|
||||||
|
packages[dep_name] = dep_dir
|
||||||
|
if 'target' in entry:
|
||||||
|
for _, target in entry['target'].items():
|
||||||
|
await get_dep_packages(target, toml_dir)
|
||||||
|
|
||||||
|
if 'package' in root_toml:
|
||||||
|
await get_dep_packages(root_toml, root_dir)
|
||||||
|
packages[root_toml['package']['name']] = root_dir
|
||||||
|
|
||||||
|
if 'workspace' in root_toml:
|
||||||
|
workspace = root_toml['workspace']
|
||||||
|
if 'exclude' in workspace:
|
||||||
|
excluded_paths = [os.path.normpath(os.path.join(root_dir, excluded))
|
||||||
|
for excluded in workspace['exclude']]
|
||||||
|
for member in workspace.get('members', []):
|
||||||
|
for subpkg_toml in glob.glob(os.path.join(root_dir, member, 'Cargo.toml')):
|
||||||
|
subpkg = os.path.normpath(os.path.dirname(subpkg_toml))
|
||||||
|
if is_excluded(subpkg):
|
||||||
|
logging.warning("Excluded member at %s", subpkg)
|
||||||
|
continue
|
||||||
|
logging.debug("Loading workspace member %s in %s", member, root_dir)
|
||||||
|
pkg_toml = load_toml(subpkg_toml)
|
||||||
|
await get_dep_packages(pkg_toml, subpkg)
|
||||||
|
packages[pkg_toml['package']['name']] = subpkg
|
||||||
|
|
||||||
|
return packages
|
||||||
|
|
||||||
|
|
||||||
|
async def get_git_repo_sources(url, commit, tarball=False):
|
||||||
|
name = git_repo_name(url, commit)
|
||||||
|
if tarball:
|
||||||
|
tarball_url = get_git_tarball(url, commit)
|
||||||
|
git_repo_sources = [{
|
||||||
|
'type': 'archive',
|
||||||
|
'archive-type': 'tar-gzip',
|
||||||
|
'url': tarball_url,
|
||||||
|
'sha256': await get_remote_sha256(tarball_url),
|
||||||
|
'dest': f'{GIT_CACHE}/{name}',
|
||||||
|
}]
|
||||||
|
else:
|
||||||
|
git_repo_sources = [{
|
||||||
|
'type': 'git',
|
||||||
|
'url': url,
|
||||||
|
'commit': commit,
|
||||||
|
'dest': f'{GIT_CACHE}/{name}',
|
||||||
|
}]
|
||||||
|
return git_repo_sources
|
||||||
|
|
||||||
|
|
||||||
|
async def get_git_package_sources(package, git_repos):
|
||||||
|
name = package['name']
|
||||||
|
source = package['source']
|
||||||
|
commit = urlparse(source).fragment
|
||||||
|
assert commit, 'The commit needs to be indicated in the fragement part'
|
||||||
|
canonical = canonical_url(source)
|
||||||
|
repo_url = canonical.geturl()
|
||||||
|
|
||||||
|
git_repo = git_repos.setdefault(repo_url, {
|
||||||
|
'commits': {},
|
||||||
|
'lock': asyncio.Lock(),
|
||||||
|
})
|
||||||
|
async with git_repo['lock']:
|
||||||
|
if commit not in git_repo['commits']:
|
||||||
|
git_repo['commits'][commit] = await get_git_repo_packages(repo_url, commit)
|
||||||
|
|
||||||
|
cargo_vendored_entry = {
|
||||||
|
repo_url: {
|
||||||
|
'git': repo_url,
|
||||||
|
'replace-with': VENDORED_SOURCES,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
rev = parse_qs(urlparse(source).query).get('rev')
|
||||||
|
tag = parse_qs(urlparse(source).query).get('tag')
|
||||||
|
branch = parse_qs(urlparse(source).query).get('branch')
|
||||||
|
if rev:
|
||||||
|
assert len(rev) == 1
|
||||||
|
cargo_vendored_entry[repo_url]['rev'] = rev[0]
|
||||||
|
elif tag:
|
||||||
|
assert len(tag) == 1
|
||||||
|
cargo_vendored_entry[repo_url]['tag'] = tag[0]
|
||||||
|
elif branch:
|
||||||
|
assert len(branch) == 1
|
||||||
|
cargo_vendored_entry[repo_url]['branch'] = branch[0]
|
||||||
|
|
||||||
|
logging.info("Adding package %s from %s", name, repo_url)
|
||||||
|
pkg_subpath = git_repo['commits'][commit][name]
|
||||||
|
pkg_repo_dir = os.path.join(GIT_CACHE, git_repo_name(repo_url, commit), pkg_subpath)
|
||||||
|
git_sources = [
|
||||||
|
{
|
||||||
|
'type': 'shell',
|
||||||
|
'commands': [
|
||||||
|
f'cp -r --reflink=auto "{pkg_repo_dir}" "{CARGO_CRATES}/{name}"'
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'type': 'inline',
|
||||||
|
'contents': json.dumps({'package': None, 'files': {}}),
|
||||||
|
'dest': f'{CARGO_CRATES}/{name}', #-{version}',
|
||||||
|
'dest-filename': '.cargo-checksum.json',
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
return (git_sources, cargo_vendored_entry)
|
||||||
|
|
||||||
|
|
||||||
|
async def get_package_sources(package, cargo_lock, git_repos):
|
||||||
|
metadata = cargo_lock.get('metadata')
|
||||||
|
name = package['name']
|
||||||
|
version = package['version']
|
||||||
|
|
||||||
|
if 'source' not in package:
|
||||||
|
logging.debug('%s has no source', name)
|
||||||
|
return
|
||||||
|
source = package['source']
|
||||||
|
|
||||||
|
if source.startswith('git+'):
|
||||||
|
return await get_git_package_sources(package, git_repos)
|
||||||
|
|
||||||
|
key = f'checksum {name} {version} ({source})'
|
||||||
|
if metadata is not None and key in metadata:
|
||||||
|
checksum = metadata[key]
|
||||||
|
elif 'checksum' in package:
|
||||||
|
checksum = package['checksum']
|
||||||
|
else:
|
||||||
|
logging.warning(f'{name} doesn\'t have checksum')
|
||||||
|
return
|
||||||
|
crate_sources = [
|
||||||
|
{
|
||||||
|
'type': 'archive',
|
||||||
|
'archive-type': 'tar-gzip',
|
||||||
|
'url': f'{CRATES_IO}/{name}/{name}-{version}.crate',
|
||||||
|
'sha256': checksum,
|
||||||
|
'dest': f'{CARGO_CRATES}/{name}-{version}',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'type': 'inline',
|
||||||
|
'contents': json.dumps({'package': checksum, 'files': {}}),
|
||||||
|
'dest': f'{CARGO_CRATES}/{name}-{version}',
|
||||||
|
'dest-filename': '.cargo-checksum.json',
|
||||||
|
},
|
||||||
|
]
|
||||||
|
return (crate_sources, {'crates-io': {'replace-with': VENDORED_SOURCES}})
|
||||||
|
|
||||||
|
|
||||||
|
async def generate_sources(cargo_lock, git_tarballs=False):
|
||||||
|
# {
|
||||||
|
# "git-repo-url": {
|
||||||
|
# "lock": asyncio.Lock(),
|
||||||
|
# "commits": {
|
||||||
|
# "commit-hash": {
|
||||||
|
# "package-name": "./relative/package/path"
|
||||||
|
# }
|
||||||
|
# }
|
||||||
|
# }
|
||||||
|
# }
|
||||||
|
git_repos = {}
|
||||||
|
sources = []
|
||||||
|
package_sources = []
|
||||||
|
cargo_vendored_sources = {
|
||||||
|
VENDORED_SOURCES: {'directory': f'{CARGO_CRATES}'},
|
||||||
|
}
|
||||||
|
|
||||||
|
pkg_coros = [get_package_sources(p, cargo_lock, git_repos) for p in cargo_lock['package']]
|
||||||
|
for pkg in await asyncio.gather(*pkg_coros):
|
||||||
|
if pkg is None:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
pkg_sources, cargo_vendored_entry = pkg
|
||||||
|
package_sources.extend(pkg_sources)
|
||||||
|
cargo_vendored_sources.update(cargo_vendored_entry)
|
||||||
|
|
||||||
|
logging.debug('Adding collected git repos:\n%s', json.dumps(list(git_repos), indent=4))
|
||||||
|
git_repo_coros = []
|
||||||
|
for git_url, git_repo in git_repos.items():
|
||||||
|
for git_commit in git_repo['commits']:
|
||||||
|
git_repo_coros.append(get_git_repo_sources(git_url, git_commit, git_tarballs))
|
||||||
|
sources.extend(sum(await asyncio.gather(*git_repo_coros), []))
|
||||||
|
|
||||||
|
sources.extend(package_sources)
|
||||||
|
|
||||||
|
logging.debug('Vendored sources:\n%s', json.dumps(cargo_vendored_sources, indent=4))
|
||||||
|
sources.append({
|
||||||
|
'type': 'inline',
|
||||||
|
'contents': toml.dumps({
|
||||||
|
'source': cargo_vendored_sources,
|
||||||
|
}),
|
||||||
|
'dest': CARGO_HOME,
|
||||||
|
'dest-filename': 'config'
|
||||||
|
})
|
||||||
|
return sources
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument('cargo_lock', help='Path to the Cargo.lock file')
|
||||||
|
parser.add_argument('-o', '--output', required=False, help='Where to write generated sources')
|
||||||
|
parser.add_argument('-t', '--git-tarballs', action='store_true', help='Download git repos as tarballs')
|
||||||
|
parser.add_argument('-d', '--debug', action='store_true')
|
||||||
|
args = parser.parse_args()
|
||||||
|
if args.output is not None:
|
||||||
|
outfile = args.output
|
||||||
|
else:
|
||||||
|
outfile = 'generated-sources.json'
|
||||||
|
if args.debug:
|
||||||
|
loglevel = logging.DEBUG
|
||||||
|
else:
|
||||||
|
loglevel = logging.INFO
|
||||||
|
logging.basicConfig(level=loglevel)
|
||||||
|
|
||||||
|
generated_sources = asyncio.run(generate_sources(load_toml(args.cargo_lock),
|
||||||
|
git_tarballs=args.git_tarballs))
|
||||||
|
with open(outfile, 'w') as out:
|
||||||
|
json.dump(generated_sources, out, indent=4, sort_keys=False)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
41
flatpak/org.xetibo.ReSet.json
Normal file
41
flatpak/org.xetibo.ReSet.json
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
{
|
||||||
|
"app-id": "org.xetibo.ReSet",
|
||||||
|
"runtime": "org.gnome.Platform",
|
||||||
|
"runtime-version": "45",
|
||||||
|
"sdk": "org.gnome.Sdk",
|
||||||
|
"sdk-extensions": ["org.freedesktop.Sdk.Extension.rust-stable"],
|
||||||
|
"command": "reset",
|
||||||
|
"finish-args": [
|
||||||
|
"--socket=session-bus",
|
||||||
|
"--share=ipc",
|
||||||
|
"--socket=fallback-x11",
|
||||||
|
"--socket=wayland",
|
||||||
|
"--device=dri"
|
||||||
|
],
|
||||||
|
"build-options": {
|
||||||
|
"append-path": "/usr/lib/sdk/rust-stable/bin"
|
||||||
|
},
|
||||||
|
"modules": [
|
||||||
|
{
|
||||||
|
"name": "reset",
|
||||||
|
"buildsystem": "simple",
|
||||||
|
"build-options": {
|
||||||
|
"env": {
|
||||||
|
"CARGO_HOME": "/run/build/reset/cargo"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"build-commands": [
|
||||||
|
"cargo --offline fetch --manifest-path Cargo.toml --verbose",
|
||||||
|
"cargo --offline build --release --verbose",
|
||||||
|
"install -Dm755 ./target/release/reset -t /app/bin/"
|
||||||
|
],
|
||||||
|
"sources": [
|
||||||
|
{
|
||||||
|
"type": "dir",
|
||||||
|
"path": ".."
|
||||||
|
},
|
||||||
|
"cargo-sources.json"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
Loading…
Reference in a new issue