diff --git a/README b/README index c97d2ab..60774f4 100644 --- a/README +++ b/README @@ -20,18 +20,22 @@ backend_packages = ["sendwave.pants_node"] ### Usage This plugin adds two new targets to pants, -* node_library which is analogous to python library and should contain all javascript, css, html, svg, etc etc files that will need to be operated on by your npm command. You can include all files in one node_library target or add other node_library targets as dependencicies. Unfortunately this does not hook into the `pants tailor` command nor dependency tracking so you will need to manually specify dependencies or create single monster target +* node_library: analogous to python library must contain all javascript, css, html, svg, etc etc files in its `sources` field. You can include all files in one node_library target or add other node_library targets as dependencicies. Unfortunately this does not hook into the `pants tailor` command nor dependency tracking so you will need to manually specify dependencies or create single monster target -* node_package, which should have a list of node_library dependencies, and a list of `artifact_paths`, and should be located in the same directory as your package.json & lock file, these will be automatically included in the build. +* node_package, which should have a list of node_library dependencies, and a list of `artifact_paths`, those paths will be extracted from the build chroot & included in the package output. NOTE: the package(-lock).json for your package must be included as a source file in one of the node_libary targets your node_package target depends on. -The plugin will attempt to find your currently installed version of node and npm by searching your '/bin/`, `/usr/bin/` and `/usr/local/bin` paths as well as the value of the NVM_BIN environment variable. +The plugin will attempt to find your currently installed version of node and npm by searching your '/bin/`, `/usr/bin/` paths as well as the value of the NVM_BIN environment variable, this behavior is configurable in the `[node]` scope of your pants.toml & via command line options. run `pants help node` for more information. -It will install all dependencies as specified by the package.json file and constrainted by the lock file. -Then it will run the npm script specified by the "pants:build" key in the context of all node_library source files. +It will install all dependencies as specified by the package.json file and constrainted by the lock file (again: these must be included in a node_libary target!). +Then it will run the npm script specified by the "pants:build" key in the context of all node_library source files. NOTE: due to how symlinks are handled in pants process output_digests you will not be able to reference the symlinked executable - the "pants:build" script should use node directly to evaluate the appropriate javascript file. +Example: +instead of "pants:build": "nuxt build" use "pants:build": "node node_modules/nuxt/bin/nuxt.js" + +For more information reference this github comment: https://github.com/pantsbuild/pants/pull/15211#issuecomment-1135155501 All files under the `artifact_paths` will then be output in the pants-distdir (default `dist/`). -You may include the built files in a `docker()` build target by including them as dependencies as normal +You may include the built files in a `docker()` build target by including the node_package targets as dependencies to the docker target. NOTE: @@ -40,7 +44,7 @@ Please make sure you have generated a package.lock file in order to have reprodu npm i --package-lock-only ### LICENSE -See COPYING for the text of the Apache License which this package is released under +See COPYING for the text of the Apache License, which governs this package. [0] https://www.pantsbuild.org/ diff --git a/pants.toml b/pants.toml index 4f29baf..020f730 100644 --- a/pants.toml +++ b/pants.toml @@ -15,13 +15,14 @@ backend_packages = [ "sendwave.pants_node", "sendwave.pants_docker" ] - [source] -root_patterns = [ - "/pants_plugins", - "/test_webpack", - ] +root_patterns = ["/pants_plugins", "/test_webpack"] + +[node] +use_nvm = true +[python] +interpreter_constraints = [">=3.9"] [anonymous-telemetry] enabled = true diff --git a/pants_plugins/sendwave/pants_node/package.py b/pants_plugins/sendwave/pants_node/package.py index bb4e73a..de0ae2d 100644 --- a/pants_plugins/sendwave/pants_node/package.py +++ b/pants_plugins/sendwave/pants_node/package.py @@ -1,64 +1,149 @@ +"""Rules to package node_package targets. + +These rules provide the functionality to collect node_library sources, +install npm dependencies, run a node script on the collected code, and +extract the output. The generated bundles/files can be used either as +normal `pants package` calls, or included in a docker container, by +making the node_package target a dependency of the docker target. +""" import logging -from dataclasses import dataclass from pathlib import PurePath - -from pants.core.goals.package import BuiltPackage, BuiltPackageArtifact, PackageFieldSet -from pants.core.util_rules.external_tool import (DownloadedExternalTool, - ExternalToolRequest) +from typing import Tuple +from dataclasses import dataclass +from pants.core.goals.package import (BuiltPackage, + BuiltPackageArtifact, + PackageFieldSet) from pants.core.util_rules.source_files import SourceFiles, SourceFilesRequest -from pants.core.util_rules.stripped_source_files import StrippedSourceFiles from pants.engine.environment import Environment, EnvironmentRequest -from pants.engine.fs import (AddPrefix, Digest, FileContent, MergeDigests, - PathGlobs, RemovePrefix, Snapshot) -from pants.engine.platform import Platform +from pants.engine.fs import (AddPrefix, Digest, DigestEntries, + CreateDigest, MergeDigests, RemovePrefix, + Snapshot, FileEntry) from pants.engine.process import (BinaryPathRequest, BinaryPaths, Process, ProcessResult) -from pants.engine.rules import Get, MultiGet, collect_rules, rule -from pants.engine.target import (Target, TransitiveTargets, +from pants.source.source_root import SourceRootsRequest, SourceRootsResult +from pants.engine.rules import Get, collect_rules, rule +from pants.engine.target import (TransitiveTargets, Address, TransitiveTargetsRequest) -from pants.engine.unions import UnionMembership, UnionRule -from .target import NodeLibrary, NodeLibrarySourcesField, NodeProjectFieldSet -from sendwave.pants_docker.docker_component import DockerComponent, DockerComponentFieldSet +from pants.engine.unions import UnionRule +from .target import NodeLibrarySourcesField, NodeProjectFieldSet +from sendwave.pants_docker.docker_component import (DockerComponent, + DockerComponentFieldSet) +from sendwave.pants_node.subsystems import NodeSubsystem + + logger = logging.getLogger(__name__) @dataclass(frozen=True) -class PackageFileRequest: - package_root: str +class StripSourceRoots: + """Request to strip the source root from every file in the given digest.""" + + digest: Digest @rule -async def get_npm_package_files(request: PackageFileRequest) -> Digest: - project_root = PurePath(request.package_root) - package_json_path = project_root.joinpath("package.json") - package_lock = project_root.joinpath("package-lock.json") - yarn_lock = project_root.joinpath("yarn.lock") - npm_shrinkwrap = project_root.joinpath("npm-shrinkwrap.json") - - rooted_configs = await Get( - Digest, - PathGlobs( - [ - str(package_json_path), - str(package_lock), - str(yarn_lock), - str(npm_shrinkwrap), - ] +async def strip_source_roots(snapshot_to_strip: StripSourceRoots) -> Digest: + """Remove Source Root[0] from every item in the passed in digest. + + This can be useful since, for example, python code copied into a + docker container using the pants-docker plugin will already have + it's source root stripped. So for the built files to be cleanly + layered in to the docker container we should also strip the source + roots. There isn't a clean way to do that without using a + SourceFiles request, Which I don't think we can create for the new + files. Anyway this goes through and finds the root for each item + in a digest, and removes the source root. + + [0] https://www.pantsbuild.org/v2.9/docs/source-roots + """ + entries = await Get(DigestEntries, Digest, snapshot_to_strip.digest) + file_paths, dir_paths = [], [] + for e in entries: + path = PurePath(e.path) + if isinstance(e, FileEntry): + file_paths.append(path) + else: + dir_paths.append(path) + root_result = await Get(SourceRootsResult, + SourceRootsRequest(files=file_paths, + dirs=dir_paths)) + roots = root_result.path_to_root + roots_to_entries = {root: [] for (files, root) in roots.items()} + + # collect every output item in the digest by its source root + for entry in entries: + roots_to_entries[roots[PurePath(entry.path)]].append(entry) + + stripped_digests = [] + for root, entries in roots_to_entries.items(): + # create a separate digest for each source root + digest = await Get(Digest, CreateDigest(entries)) + # remove the source root from the digest + stripped_digests.append( + (await Get(Digest, RemovePrefix(digest, root.path))) + ) + # merge the digests together together + return await Get(Digest, MergeDigests(stripped_digests)) + + +@dataclass(frozen=True) +class NPMPathRequest: + """Empty request to get NodePaths. + + Get requires an argument, but all our configuration is in the + NodeSubsystem which is injected separately. So this is just a type + marker to tell pants how to give us what we want. + """ + + pass + + +@dataclass(frozen=True) +class NPMPath: + """Path to npm executable + search path for launched processes.""" + + binary_path: str + search_paths: Tuple[str] + + +@rule +async def get_node_search_paths(request: NPMPathRequest, + node: NodeSubsystem) -> NPMPath: + """Build NPMPath object from NodeSubsystem configuration.""" + use_nvm = node.options.use_nvm + if use_nvm: + nvm_bin = await Get(Environment, EnvironmentRequest(["NVM_BIN"])) + if nvm_bin: + search_paths = (([nvm_bin["NVM_BIN"], *node.options.search_paths])) + else: + search_paths = tuple(node.options.search_paths) + + npm_paths = await Get( + BinaryPaths, + BinaryPathRequest( + binary_name="npm", + search_path=search_paths, ), ) - unrooted_configs = await Get(Digest, RemovePrefix(rooted_configs, project_root.name)) - return unrooted_configs + if not npm_paths.first_path: + raise ValueError("Could not find npm in: ({}) cannot create package" + .format(search_paths)) + return NPMPath(binary_path=npm_paths.first_path.path, + search_paths=search_paths) @dataclass(frozen=True) class NodeSourceFilesRequest: - package_address: str + """Get all tranisitvely dependent source files for given node package.""" + + package_address: Address @rule async def get_node_package_file_sources( - request: NodeSourceFilesRequest, -) -> StrippedSourceFiles: + request: NodeSourceFilesRequest +) -> SourceFiles: + """Transitively looks up all source files for the node package.""" transitive_targets = await Get( TransitiveTargets, TransitiveTargetsRequest([request.package_address]) ) @@ -67,74 +152,107 @@ async def get_node_package_file_sources( for t in transitive_targets.closure if t.has_field(NodeLibrarySourcesField) ] - source_files = await Get(StrippedSourceFiles, SourceFilesRequest(all_sources)) - return source_files + return await Get(SourceFiles, SourceFilesRequest(all_sources)) + + +def log_console_output(output: bytes) -> None: + """Log bytes from console output. + + also replaces double escaped newlines. + """ + logger.info(output.decode("utf-8").replace("\\n", "\n")) @rule async def get_node_package_digest(field_set: NodeProjectFieldSet) -> Digest: - artifact_paths = field_set.artifact_paths.value - package_files, source_files, nvm_bin = await MultiGet( - Get(Snapshot, PackageFileRequest(field_set.address.spec_path)), - Get(StrippedSourceFiles, NodeSourceFilesRequest(field_set.address)), - Get(Environment, EnvironmentRequest(["NVM_BIN"])), - ) + """Build & retrieve output from a node_package target. - build_context = await Get( - Snapshot, MergeDigests([source_files.snapshot.digest, package_files.digest]) - ) - search_path = [] - if nvm_bin: - search_path.append(nvm_bin.get("NVM_BIN")) - search_path.extend(["/bin", "/usr/bin", "/usr/local/bin"]) - npm_paths = await Get( - BinaryPaths, - BinaryPathRequest( - binary_name="npm", - search_path=search_path, - ), - ) - if not npm_paths.first_path: - raise ValueError("Could not find npm in path: {} cannot create package" - .format(":".join(search_path))) + This is the main function of the pants-node plugin. It evaluates + the following steps: + + 1) Fetch all source files for the target + + 2) Roots all the files at the definition of the node_package + target. This means that when we run npm it will be as if we are + running it in the directory the node_package target was defined. + + 3) Lookup the locations of installed npm using the NodeSubsytem + configuration + + 4) run npm install using the system npm, and copy the resulting + node modules. NOTE: due to [1] we disable symlinks when running + npm install. - npm_path = npm_paths.first_path.path + 5) run `npm run-scripts pants:build` in a build context created by + merging the stripped source files with the newly installed + node_modules directory. When done we extract everything from the + 'artifact_paths' field on the target as the package output. + + 6a) if an output_path is configured we add that to the generated files + + 6b) if an output path is not configured we add the package root + back to the newly created files and then strip the source roots + from the generated files (since these will generally be stripped + in other types of packaging and we would like the distributions + from this plugin to be overlaid onto those other package + (i.e. with the pants-docker integration) + """ + package_root = field_set.address.spec_path + artifact_paths = field_set.artifact_paths.value + source_files = await Get(SourceFiles, + NodeSourceFilesRequest(field_set.address)) + stripped_files = await Get(Snapshot, + RemovePrefix( + source_files.snapshot.digest, + package_root + )) + node_paths = await Get(NPMPath, NPMPathRequest()) + npm_path = node_paths.binary_path + search_path = ":".join(node_paths.search_paths) + logger.info("Using npm at {npm_path} ($PATH={search_paths})" + .format(npm_path=npm_path, + search_paths=node_paths.search_paths)) npm_install_result = await Get( ProcessResult, Process( - argv=[npm_paths.first_path.path, "install"], + argv=[npm_path, "install", "--no-bin-links"], output_directories=["./node_modules"], - input_digest=build_context.digest, - env={"PATH": ":".join(search_path)}, + input_digest=stripped_files.digest, + env={"PATH": search_path}, description="installing node project dependencies", ), ) - - logger.debug(npm_install_result.stdout) + log_console_output(npm_install_result.stdout) build_context = await Get( - Snapshot, MergeDigests([build_context.digest, npm_install_result.output_digest]) + Digest, MergeDigests([stripped_files.digest, + npm_install_result.output_digest]) ) - proc = await Get( + build_result = await Get( ProcessResult, Process( description="Running npm run-script pants:build", - argv=[npm_paths.first_path.path, "run-script", "pants:build"], - input_digest=build_context.digest, + argv=[npm_path, "run-script", "pants:build"], + input_digest=build_context, output_directories=artifact_paths, - env={"PATH": ":".join(search_path)}, + env={"PATH": search_path}, ), ) - logger.debug(proc.stdout) + log_console_output(build_result.stdout) if field_set.output_path and field_set.output_path.value is not None: - return await Get(Digest, AddPrefix(proc.output_digest, field_set.output_path.value)) - return proc.output_digest + return await Get(Digest, + AddPrefix(build_result.output_digest, + field_set.output_path.value)) + else: + output = await Get(Digest, AddPrefix(build_result.output_digest, + package_root)) + return await Get(Digest, StripSourceRoots(digest=output)) @rule async def node_project_package( - field_set: NodeProjectFieldSet, + field_set: NodeProjectFieldSet ) -> BuiltPackage: - """""" + """Build a node_package target into a BuiltPackage.""" package = await Get(Snapshot, NodeProjectFieldSet, field_set) return BuiltPackage( digest=package.digest, @@ -144,12 +262,17 @@ async def node_project_package( @rule async def node_project_docker( - field_set: NodeProjectFieldSet, + field_set: NodeProjectFieldSet ) -> DockerComponent: - """""" + """Build a node_package target into a DockerComponent. + + This allows files generated by the node process to be included in + the docker image. + """ package = await Get(Digest, NodeProjectFieldSet, field_set) return DockerComponent(sources=package, commands=[]) + def rules(): """Return the pants rules for this module.""" return [ diff --git a/pants_plugins/sendwave/pants_node/register.py b/pants_plugins/sendwave/pants_node/register.py index 0ccab10..d480087 100644 --- a/pants_plugins/sendwave/pants_node/register.py +++ b/pants_plugins/sendwave/pants_node/register.py @@ -1,3 +1,4 @@ +import sendwave.pants_node.subsystems as subsystems import sendwave.pants_node.package as package import sendwave.pants_node.target as target @@ -6,6 +7,7 @@ def rules(): return [ *package.rules(), *target.rules(), + *subsystems.rules(), ] diff --git a/pants_plugins/sendwave/pants_node/subsystems.py b/pants_plugins/sendwave/pants_node/subsystems.py new file mode 100644 index 0000000..9d41ad5 --- /dev/null +++ b/pants_plugins/sendwave/pants_node/subsystems.py @@ -0,0 +1,49 @@ +"""Sendwave pants-node Options.""" + +from pants.engine.rules import SubsystemRule +from pants.option.subsystem import Subsystem + + +class NodeSubsystem(Subsystem): + """Register plugin specific configuration options. + + These are used to control how the plugin will search for + executables, including the npm binary that builds the project. It + will also determine the PATH environment variable of the spawned + npm/node processes. So, if your build script shells out to any + other program on the machine (e.g.. 'sh') make sure that binaries + location is included in the path. + + There are two options: + - search_paths: a list of string (paths) where we will search for + node binaries + - use_nvm: Boolean, if True the plugin will add the value of NVM_BIN to the + front of the search_path list + """ + + options_scope = "node" + help = "Node Options." + + @classmethod + def register_options(cls, register): + """Register Sendwave pants-node options.""" + super().register_options(register) + register( + "--search-paths", + type=list, + member_type=str, + default=["/bin", "/usr/bin/"], + help="Directories in which to search for node binaries.'" + ) + register( + "--use-nvm", + type=bool, + default=True, + help="If true, the value of $NVM_BIN will be " + "appended to the front of the search path." + ) + + +def rules(): + """Register the NodeSubsystem.""" + return [SubsystemRule(NodeSubsystem)] diff --git a/pants_plugins/sendwave/pants_node/target.py b/pants_plugins/sendwave/pants_node/target.py index 51ea2db..4f9717f 100644 --- a/pants_plugins/sendwave/pants_node/target.py +++ b/pants_plugins/sendwave/pants_node/target.py @@ -15,7 +15,7 @@ class NodeLibrarySourcesField(MultipleSourcesField): help = "File extensions that should be bundled by webpack" - default = ("*.js", "*.css", "*.html") + default = ("*.js", "*.css", "*.html", 'package.json', 'package-lock.json') class NodeLibrary(Target): diff --git a/test_webpack/BUILD b/test_webpack/BUILD index c8c1706..d0bc927 100644 --- a/test_webpack/BUILD +++ b/test_webpack/BUILD @@ -4,8 +4,7 @@ node_library(name='test_webpack', dependencies=['test_webpack/src:src']) node_package( name = 'test_bundle', dependencies=[":test_webpack"], - artifact_paths=["public/bundle"], - output_path="output", + artifact_paths=["public/bundle"] ) docker( diff --git a/test_webpack/package.json b/test_webpack/package.json index 7d75851..2bcba25 100644 --- a/test_webpack/package.json +++ b/test_webpack/package.json @@ -5,7 +5,7 @@ "private": "true", "scripts": { "test": "echo \"Error: no test specified\" && exit 1", - "pants:build": "webpack --config webpack.config.js" + "pants:build": "node node_modules/webpack/bin/webpack.js --config webpack.config.js" }, "keywords": [], "author": "",