diff --git a/node/flatpak_node_generator/flatpak-yarn.js b/node/flatpak_node_generator/flatpak-yarn.js new file mode 100644 index 00000000..7e987661 --- /dev/null +++ b/node/flatpak_node_generator/flatpak-yarn.js @@ -0,0 +1,262 @@ +const PackageManager = { + Yarn1 : `Yarn Classic`, + Yarn2 : `Yarn`, + Npm : `npm`, + Pnpm : `pnpm`, +} + +module.exports = { + name: `flatpak-builder`, + factory: require => { + const { BaseCommand } = require(`@yarnpkg/cli`); + const { Configuration, Manifest, scriptUtils, structUtils, tgzUtils, execUtils, miscUtils, hashUtils } = require('@yarnpkg/core') + const { Filename, ZipFS, npath, ppath, PortablePath, xfs } = require('@yarnpkg/fslib'); + const { getLibzipPromise } = require('@yarnpkg/libzip'); + const { gitUtils } = require('@yarnpkg/plugin-git'); + const { PassThrough, Readable, Writable } = require('stream'); + const { Command, Option } = require(`clipanion`); + const { YarnVersion } = require('@yarnpkg/core'); + const fs = require('fs'); + + // from https://github.com/yarnpkg/berry/blob/%40yarnpkg/shell/3.2.3/packages/plugin-essentials/sources/commands/set/version.ts#L194 + async function setPackageManager(projectCwd) { + const bundleVersion = YarnVersion; + + const manifest = (await Manifest.tryFind(projectCwd)) || new Manifest(); + + if(bundleVersion && miscUtils.isTaggedYarnVersion(bundleVersion)) { + manifest.packageManager = `yarn@${bundleVersion}`; + const data = {}; + manifest.exportTo(data); + + const path = ppath.join(projectCwd, Manifest.fileName); + const content = `${JSON.stringify(data, null, manifest.indent)}\n`; + + await xfs.changeFilePromise(path, content, { + automaticNewlines: true, + }); + } + } + + // func from https://github.com/yarnpkg/berry/blob/%40yarnpkg/shell/3.2.3/packages/yarnpkg-core/sources/scriptUtils.ts#L215 + async function prepareExternalProject(cwd, outputPath , {configuration, locator, stdout, yarn_v1, workspace=null}) { + const devirtualizedLocator = locator && structUtils.isVirtualLocator(locator) + ? structUtils.devirtualizeLocator(locator) + : locator; + + const name = devirtualizedLocator + ? structUtils.stringifyLocator(devirtualizedLocator) + : `an external project`; + + const stderr = stdout; + + stdout.write(`Packing ${name} from sources\n`); + + const packageManagerSelection = await scriptUtils.detectPackageManager(cwd); + let effectivePackageManager; + if (packageManagerSelection !== null) { + stdout.write(`Using ${packageManagerSelection.packageManager} for bootstrap. Reason: ${packageManagerSelection.reason}\n\n`); + effectivePackageManager = packageManagerSelection.packageManager; + } else { + stdout.write(`No package manager configuration detected; defaulting to Yarn\n\n`); + effectivePackageManager = PackageManager.Yarn2; + } + if (effectivePackageManager === PackageManager.Pnpm) { + effectivePackageManager = PackageManager.Npm; + } + + const workflows = new Map([ + [PackageManager.Yarn1, async () => { + const workspaceCli = workspace !== null + ? [`workspace`, workspace] + : []; + + await setPackageManager(cwd); + + await Configuration.updateConfiguration(cwd, { + yarnPath: yarn_v1, + }); + + await xfs.appendFilePromise(ppath.join(cwd, `.npmignore`), `/.yarn\n`); + + const pack = await execUtils.pipevp(`yarn`, [...workspaceCli, `pack`, `--filename`, npath.fromPortablePath(outputPath)], {cwd, stdout, stderr}); + if (pack.code !== 0) + return pack.code; + + return 0; + }], + [PackageManager.Yarn2, async () => { + const workspaceCli = workspace !== null + ? [`workspace`, workspace] + : []; + const lockfilePath = ppath.join(cwd, Filename.lockfile); + if (!(await xfs.existsPromise(lockfilePath))) + await xfs.writeFilePromise(lockfilePath, ``); + + const pack = await execUtils.pipevp(`yarn`, [...workspaceCli, `pack`, `--filename`, npath.fromPortablePath(outputPath)], {cwd, stdout, stderr}); + if (pack.code !== 0) + return pack.code; + return 0; + }], + [PackageManager.Npm, async () => { + const workspaceCli = workspace !== null + ? [`--workspace`, workspace] + : []; + const packStream = new PassThrough(); + const packPromise = miscUtils.bufferStream(packStream); + const pack = await execUtils.pipevp(`npm`, [`pack`, `--silent`, ...workspaceCli], {cwd, stdout: packStream, stderr}); + if (pack.code !== 0) + return pack.code; + + const packOutput = (await packPromise).toString().trim().replace(/^.*\n/s, ``); + const packTarget = ppath.resolve(cwd, npath.toPortablePath(packOutput)); + await xfs.renamePromise(packTarget, outputPath); + return 0; + }], + ]); + const workflow = workflows.get(effectivePackageManager); + const code = await workflow(); + if (code === 0 || typeof code === `undefined`) + return; + else + throw `Packing the package failed (exit code ${code})`; + } + + class convertToZipCommand extends BaseCommand { + static paths = [[`convertToZip`]]; + yarn_v1 = Option.String({required: true}); + + async execute() { + const configuration = await Configuration.find(this.context.cwd, + this.context.plugins); + //const lockfile = configuration.get('lockfileFilename'); + const cacheFolder = configuration.get('cacheFolder'); + const locatorFolder = `${cacheFolder}/locator`; + + const compressionLevel = configuration.get(`compressionLevel`); + const stdout = this.context.stdout; + const gitChecksumPatches = []; // {name:, oriHash:, newHash:} + + async function patchLockfileChecksum(cwd, configuration, patches) { + const lockfilePath = ppath.join(cwd, configuration.get(`lockfileFilename`)); + + let currentContent = ``; + try { + currentContent = await xfs.readFilePromise(lockfilePath, `utf8`); + } catch (error) { + } + const newContent = patches.reduce((acc, item, i) => { + stdout.write(`patch '${item.name}' checksum:\n-${item.oriHash}\n+${item.newHash}\n\n\n`); + const regex = new RegExp(item.oriHash, "g"); + return acc.replace(regex, item.newHash); + }, currentContent); + + await xfs.writeFilePromise(lockfilePath, newContent); + } + + + stdout.write(`yarn cacheFolder: ${cacheFolder}\n`); + + const convertToZip = async (tgz, target, opts) => { + const { compressionLevel, ...bufferOpts } = opts; + const zipFs = new ZipFS(target, { + create: true, + libzip: await getLibzipPromise(), + level: compressionLevel + }); + const tgzBuffer = fs.readFileSync(tgz); + await tgzUtils.extractArchiveTo(tgzBuffer, zipFs, + bufferOpts); + zipFs.saveAndClose(); + } + + stdout.write(`converting cache to zip\n`); + + const files = fs.readdirSync(locatorFolder); + const tasks = [] + for (const i in files) { + const file = `${files[i]}`; + let tgzFile = `${locatorFolder}/${file}`; + const match = file.match(/([^-]+)-([^\.]{1,10}).(tgz|git)/); + if (!match) { + stdout.write(`ignore ${file}\n`); + continue; + } + let resolution, locator; + const entry_type = match[3]; + const sha = match[2]; + let checksum; + + if(entry_type === 'tgz') { + resolution = Buffer.from(match[1], 'base64').toString(); + locator = structUtils.parseLocator(resolution, true); + } + else if(entry_type === 'git') { + const gitJson = JSON.parse(fs.readFileSync(tgzFile, 'utf8')); + + resolution = gitJson.resolution; + locator = structUtils.parseLocator(resolution, true); + checksum = gitJson.checksum; + + const repoPathRel = gitJson.repo_dir_rel; + + const cloneTarget = `${cacheFolder}/${repoPathRel}`; + + const repoUrlParts = gitUtils.splitRepoUrl(locator.reference); + const packagePath = ppath.join(cloneTarget, `package.tgz`); + + await prepareExternalProject(cloneTarget, packagePath, { + configuration: configuration, + stdout, + workspace: repoUrlParts.extra.workspace, + locator, + yarn_v1: this.yarn_v1, + }); + + tgzFile = packagePath; + + } + const filename = + `${structUtils.slugifyLocator(locator)}-${sha}.zip`; + const targetFile = `${cacheFolder}/${filename}` + tasks.push(async () => { + await convertToZip(tgzFile, targetFile, { + compressionLevel: compressionLevel, + prefixPath: `node_modules/${structUtils.stringifyIdent(locator)}`, + stripComponents: 1, + }); + + if (entry_type === 'git') { + const file_checksum = await hashUtils.checksumFile(targetFile); + + if (file_checksum !== checksum) { + const newSha = file_checksum.slice(0, 10); + const newTarget = `${cacheFolder}/${structUtils.slugifyLocator(locator)}-${newSha}.zip`; + fs.renameSync(targetFile, newTarget); + + gitChecksumPatches.push({ + name: locator.name, + oriHash: checksum, + newHash: file_checksum, + }); + } + } + }); + } + + while (tasks.length) { + await Promise.all(tasks.splice(0, 128).map(t => t())); + } + + patchLockfileChecksum(this.context.cwd, configuration, gitChecksumPatches); + stdout.write(`converting finished\n`); + } + } + return { + commands: [ + convertToZipCommand + ], + }; + } +}; + diff --git a/node/flatpak_node_generator/manifest.py b/node/flatpak_node_generator/manifest.py index 8435ec0b..4b0041ad 100644 --- a/node/flatpak_node_generator/manifest.py +++ b/node/flatpak_node_generator/manifest.py @@ -162,9 +162,18 @@ def add_data_source(self, data: Union[str, bytes], destination: Path) -> None: self._add_source_with_destination(source, destination, is_dir=False) def add_git_source( - self, url: str, commit: str, destination: Optional[Path] = None + self, + url: str, + commit: Optional[str] = None, + destination: Optional[Path] = None, + tag: Optional[str] = None, ) -> None: - source = {'type': 'git', 'url': url, 'commit': commit} + source = {'type': 'git', 'url': url} + assert commit or tag + if commit: + source['commit'] = commit + if tag: + source['tag'] = tag self._add_source_with_destination(source, destination, is_dir=True) def add_script_source(self, commands: List[str], destination: Path) -> None: diff --git a/node/flatpak_node_generator/providers/yarn.py b/node/flatpak_node_generator/providers/yarn.py index e18c26e8..b9e93b68 100644 --- a/node/flatpak_node_generator/providers/yarn.py +++ b/node/flatpak_node_generator/providers/yarn.py @@ -1,6 +1,8 @@ from pathlib import Path -from typing import Any, Dict, Iterator, List, Optional, Tuple, Type +from typing import Any, Dict, Iterator, List, NamedTuple, Optional, Tuple, Type +import base64 +import json import os import re import shlex @@ -10,6 +12,7 @@ from ..integrity import Integrity from ..manifest import ManifestGenerator from ..package import GitSource, LocalSource, Package, PackageSource, ResolvedSource +from ..requests import Requests from . import LockfileProvider, ModuleProvider, ProviderFactory, RCFileProvider from .npm import NpmRCFileProvider from .special import SpecialSourceProvider @@ -28,6 +31,10 @@ class YarnLockfileProvider(LockfileProvider): _LOCAL_PKG_RE = re.compile(r'^(?:file|link):') + def __init__(self) -> None: + self.version = 1 + self.cacheKey = str() + @staticmethod def is_git_version(version: str) -> bool: for pattern in GIT_URL_PATTERNS: @@ -65,8 +72,10 @@ def _iter_lines() -> Iterator[Tuple[int, str]]: # to speed up parsing we can use something less robust, e.g. # _key, _value = line.split(' ', 1) # parent_entries[-1][self.unquote(_key)] = self.unquote(_value) - key, value = shlex.split(line) - parent_entries[-1][key] = value + key, *values = shlex.split(line) + if key.endswith(':'): + key = key[:-1] + parent_entries[-1][key] = values[0] if len(values) == 1 else values return root_entry @@ -77,7 +86,7 @@ def unquote(self, string: str) -> str: else: return string - def process_package( + def process_package_v1( self, lockfile: Path, name_line: str, entry: Dict[str, Any] ) -> Package: assert name_line and entry @@ -102,9 +111,57 @@ def process_package( name=name, version=entry['version'], source=source, lockfile=lockfile ) + def process_package( + self, lockfile: Path, name_line: str, entry: Dict[str, Any] + ) -> Optional[Package]: + assert name_line and entry + name = self.unquote(name_line).split(',', 1)[0] + name, _ = name.rsplit('@', 1) + + # ignore patch, it will be generated by yarn + if name.find('@patch:') != -1: + return None + + if entry.get('linkType', None) == 'soft': + return None + + version: str = entry['version'] + resolution: str = entry['resolution'] + resolved: str = f'resolution#{resolution}' + integrity: Integrity = Integrity( + algorithm='sha512', digest=entry.get('checksum', self.cacheKey) + ) + + source: PackageSource + + if self.is_git_version(resolved): + source = self.parse_git_source(version=resolved) + else: + source = ResolvedSource(resolved=resolved, integrity=integrity) + + return Package(name=name, version=version, source=source, lockfile=lockfile) + def process_lockfile(self, lockfile: Path) -> Iterator[Package]: - for name_line, package in self.parse_lockfile(lockfile).items(): - yield self.process_package(lockfile, name_line, package) + lock_dict: Dict[str, Any] = self.parse_lockfile(lockfile) + if '__metadata' in lock_dict: + metadata: Dict[str, Any] = lock_dict['__metadata'] + self.version = int(metadata.get('version', 1)) + assert self.version > 0 + if self.version > 1: + self.cacheKey = metadata['cacheKey'] + + lock_dict.pop('__metadata') + + if self.version == 1: + for name_line, package in lock_dict.items(): + yield self.process_package_v1(lockfile, name_line, package) + else: + for name_line, package in lock_dict.items(): + res_package: Optional[Package] = self.process_package( + lockfile, name_line, package + ) + if res_package: + yield res_package class YarnRCFileProvider(RCFileProvider): @@ -112,6 +169,25 @@ class YarnRCFileProvider(RCFileProvider): class YarnModuleProvider(ModuleProvider): + class Locator(NamedTuple): + scope: str + name: str + reference: str + + _GIT_PROTOCOLS = ['commit', 'head', 'tag', 'semver'] + + class GitRepoUrlParts(NamedTuple): + repo: str + protocol: Optional[str] + request: str + extra: Optional[Dict[str, str]] + + # From https://github.com/yarnpkg/berry/blob/%40yarnpkg/shell%2F3.1.0/packages/yarnpkg-core/sources/structUtils.ts#L412 + _RESOLUTION_RE = re.compile(r'^(?:@([^/]+?)\/)?([^/]+?)(?:@(.+))$') + # From https://github.com/yarnpkg/berry/blob/%40yarnpkg/shell%2F3.1.0/packages/yarnpkg-core/sources/structUtils.ts#L462 + _REFERENCE_RE = re.compile( + r'^([^#:]*:)?((?:(?!::)[^#])*)(?:#((?:(?!::).)*))?(?:::(.*))?$' + ) # From https://github.com/yarnpkg/yarn/blob/v1.22.4/src/fetchers/tarball-fetcher.js _PACKAGE_TARBALL_URL_RE = re.compile( r'(?:(@[^/]+)(?:/|%2f))?[^/]+/(?:-|_attachments)/(?:@[^/]+/)?([^/]+)$' @@ -121,6 +197,10 @@ def __init__(self, gen: ManifestGenerator, special: SpecialSourceProvider) -> No self.gen = gen self.special_source_provider = special self.mirror_dir = self.gen.data_root / 'yarn-mirror' + self.mirror_berry_dir = self.mirror_dir / 'yarn-berry' + self.mirror_locator_dir = self.mirror_berry_dir / 'locator' + self.registry = 'https://registry.yarnpkg.com' + self.has_resolution = False def __exit__( self, @@ -128,25 +208,148 @@ def __exit__( exc_value: Optional[BaseException], tb: Optional[types.TracebackType], ) -> None: - pass + self._finalize() + + def get_resolution_from_resolved(self, resolved: str) -> str: + assert resolved.startswith('resolution#') + return resolved[len('resolution#') :] + + def get_locator_url(self, locator: Locator) -> str: + if locator.scope: + return f'/@{locator.scope}%2f{locator.name}' + else: + return f'/{locator.name}' + + def get_locator_from_resolution(self, resolution: str) -> Locator: + match = self._RESOLUTION_RE.match(resolution) + assert match + scope, name, ref = [s or '' for s in match.groups()] + return self.Locator(scope=scope, name=name, reference=ref) + + def name_base64_locator(self, locator: Locator, resolution: str) -> str: + return f'{locator.name}-{base64.b64encode(resolution.encode()).decode()}' + + # From https://github.com/yarnpkg/berry/blob/%40yarnpkg/shell%2F3.1.0/packages/plugin-git/sources/gitUtils.ts#L56 + def parse_git_subsequent(self, url: str) -> GitRepoUrlParts: + repo, subsequent = url.split('#', 1) + protocol: Optional[str] = None + request: str = '' + extra: Dict[str, str] = {} + if not subsequent: + return self.GitRepoUrlParts( + repo=repo, protocol='head', request='HEAD', extra=None + ) + if re.match(r'^[a-z]+=', subsequent): + queries = urllib.parse.parse_qs(subsequent) + for q in queries.keys(): + if q in self._GIT_PROTOCOLS: + protocol = q + request = queries[q][0] + else: + extra[q] = queries[q][-1] + if not request: + protocol, request = 'head', 'HEAD' + return self.GitRepoUrlParts( + repo=repo, protocol=protocol, request=request, extra=extra + ) + else: + protocol, request = subsequent.split(':', 1) + if not request: + protocol, request = None, subsequent + return self.GitRepoUrlParts( + repo=repo, protocol=protocol, request=request, extra=None + ) + + async def resolve_source(self, locator: Locator, version: str) -> ResolvedSource: + data_url = f'{self.registry}{self.get_locator_url(locator)}' + # NOTE: Not cachable, because this is an API call. + raw_data = await Requests.instance.read_all(data_url, cachable=False) + data = json.loads(raw_data) + + assert 'versions' in data, f'{data_url} returned an invalid package index' + + versions = data['versions'] + assert ( + version in versions + ), f'{locator.name} versions available are {", ".join(versions)}, not {version}' + + dist = versions[version]['dist'] + assert 'tarball' in dist, f'{locator.name}@{version} has no tarball in dist' + + integrity: Integrity + if 'integrity' in dist: + integrity = Integrity.parse(dist['integrity']) + elif 'shasum' in dist: + integrity = Integrity.from_sha1(dist['shasum']) + else: + assert False, f'{locator.name}@{version} has no integrity in dist' + + return ResolvedSource(resolved=dist['tarball'], integrity=integrity) async def generate_package(self, package: Package) -> None: source = package.source if isinstance(source, ResolvedSource): - integrity = await source.retrieve_integrity() - url_parts = urllib.parse.urlparse(source.resolved) - match = self._PACKAGE_TARBALL_URL_RE.search(url_parts.path) - if match is not None: - scope, filename = match.groups() - if scope: - filename = f'{scope}-{filename}' + if source.resolved.startswith('resolution#'): + if not self.has_resolution: + self.has_resolution = True + assert source.integrity, f'{source.resolved}' + resolution = self.get_resolution_from_resolved(source.resolved) + locator = self.get_locator_from_resolution(resolution) + if YarnLockfileProvider.is_git_version(locator.reference): + filename = f'{self.name_base64_locator(locator, "git")}-{source.integrity.digest[:10]}.git' + git_parts = self.parse_git_subsequent(locator.reference) + repo_dir = self.gen.tmp_root / locator.name + if git_parts.protocol == 'commit' or git_parts.protocol == None: + self.gen.add_git_source( + git_parts.repo, + commit=git_parts.request, + destination=repo_dir, + ) + elif git_parts.protocol == 'tag': + self.gen.add_git_source( + git_parts.repo, tag=git_parts.request, destination=repo_dir + ) + else: + assert ( + False + ), f'Not supported git protocol: {git_parts.protocol}' + repo_dir_rel = os.path.relpath(repo_dir, self.mirror_berry_dir) + self.gen.add_data_source( + json.dumps( + { + 'repo_dir_rel': repo_dir_rel, + 'resolution': resolution, + 'checksum': source.integrity.digest, + } + ), + destination=self.mirror_locator_dir / filename, + ) + else: + filename = f'{self.name_base64_locator(locator, resolution)}-{source.integrity.digest[:10]}.tgz' + resolved_source = await self.resolve_source( + locator, package.version + ) + assert resolved_source.integrity + self.gen.add_url_source( + resolved_source.resolved, + resolved_source.integrity, + self.mirror_locator_dir / filename, + ) else: - filename = os.path.basename(url_parts.path) + integrity = await source.retrieve_integrity() + url_parts = urllib.parse.urlparse(source.resolved) + match = self._PACKAGE_TARBALL_URL_RE.search(url_parts.path) + if match is not None: + scope, filename = match.groups() + if scope: + filename = f'{scope}-{filename}' + else: + filename = os.path.basename(url_parts.path) - self.gen.add_url_source( - source.resolved, integrity, self.mirror_dir / filename - ) + self.gen.add_url_source( + source.resolved, integrity, self.mirror_dir / filename + ) elif isinstance(source, GitSource): repo_name = urllib.parse.urlparse(source.url).path.split('/')[-1] @@ -170,6 +373,27 @@ async def generate_package(self, package: Package) -> None: await self.special_source_provider.generate_special_sources(package) + def _finalize(self) -> None: + if not self.has_resolution: + return + + with open(Path(__file__).parents[1] / 'flatpak-yarn.js', mode='r') as f: + yarn2_plugin_source = f.read() + js_dest = self.gen.data_root / 'flatpak-builder.js' + self.gen.add_data_source(yarn2_plugin_source, destination=js_dest) + script_dest = self.gen.data_root / 'yarn2-setup.sh' + self.gen.add_script_source( + [ + 'yarn config set enableTelemetry false', + 'yarn config set enableNetwork false', + 'yarn config set enableGlobalCache false', + f'yarn config set cacheFolder $FLATPAK_BUILDER_BUILDDIR/{self.mirror_berry_dir}', + f'yarn plugin import $FLATPAK_BUILDER_BUILDDIR/{js_dest}', + 'yarn convertToZip $(which yarn)', + ], + destination=script_dest, + ) + class YarnProviderFactory(ProviderFactory): def __init__(self) -> None: