Skip to content

Commit

Permalink
Cache git dependencies as wheels (#7473)
Browse files Browse the repository at this point in the history
Currently, poetry install will clone, build and install every git
dependency when it's not present in the environment. This is OK for
developer's machines, but not OK for CI - there environment is always
fresh, and installing git dependencies takes significant time on each CI
run, especially if the dependency has C extensions that need to be
built.

This commit builds a wheel for every git dependency that has precise
reference hash in lock file and is not required to be in editable mode,
stores that wheel in a cache dir and will install from it instead of
cloning the repository again.
  • Loading branch information
maksbotan authored and radoering committed Apr 9, 2023
1 parent dfb4904 commit fd706c8
Show file tree
Hide file tree
Showing 5 changed files with 205 additions and 31 deletions.
4 changes: 2 additions & 2 deletions src/poetry/installation/chef.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,8 +94,8 @@ def prepare(
return archive

if archive.is_dir():
tmp_dir = tempfile.mkdtemp(prefix="poetry-chef-")
return self._prepare(archive, Path(tmp_dir), editable=editable)
destination = output_dir or Path(tempfile.mkdtemp(prefix="poetry-chef-"))
return self._prepare(archive, destination=destination, editable=editable)

return self._prepare_sdist(archive, destination=output_dir)

Expand Down
39 changes: 34 additions & 5 deletions src/poetry/installation/executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -529,7 +529,7 @@ def _install(self, operation: Install | Update) -> int:
cleanup_archive: bool = False
if package.source_type == "git":
archive = self._prepare_git_archive(operation)
cleanup_archive = True
cleanup_archive = operation.package.develop
elif package.source_type == "file":
archive = self._prepare_archive(operation)
elif package.source_type == "directory":
Expand Down Expand Up @@ -584,7 +584,9 @@ def _remove(self, package: Package) -> int:

raise

def _prepare_archive(self, operation: Install | Update) -> Path:
def _prepare_archive(
self, operation: Install | Update, *, output_dir: Path | None = None
) -> Path:
package = operation.package
operation_message = self.get_operation_message(operation)

Expand All @@ -603,20 +605,35 @@ def _prepare_archive(self, operation: Install | Update) -> Path:

self._populate_hashes_dict(archive, package)

return self._chef.prepare(archive, editable=package.develop)
return self._chef.prepare(
archive, editable=package.develop, output_dir=output_dir
)

def _prepare_git_archive(self, operation: Install | Update) -> Path:
from poetry.vcs.git import Git

package = operation.package
assert package.source_url is not None

if package.source_resolved_reference and not package.develop:
# Only cache git archives when we know precise reference hash,
# otherwise we might get stale archives
cached_archive = self._artifact_cache.get_cached_archive_for_git(
package.source_url,
package.source_resolved_reference,
package.source_subdirectory,
env=self._env,
)
if cached_archive is not None:
return cached_archive

operation_message = self.get_operation_message(operation)

message = (
f" <fg=blue;options=bold>•</> {operation_message}: <info>Cloning...</info>"
)
self._write(operation, message)

assert package.source_url is not None
source = Git.clone(
url=package.source_url,
source_root=self._env.path / "src",
Expand All @@ -627,10 +644,22 @@ def _prepare_git_archive(self, operation: Install | Update) -> Path:
original_url = package.source_url
package._source_url = str(source.path)

archive = self._prepare_archive(operation)
output_dir = None
if package.source_resolved_reference and not package.develop:
output_dir = self._artifact_cache.get_cache_directory_for_git(
original_url,
package.source_resolved_reference,
package.source_subdirectory,
)

archive = self._prepare_archive(operation, output_dir=output_dir)
package._source_url = original_url

if output_dir is not None and output_dir.is_dir():
# Mark directories with cached git packages, to distinguish from
# "normal" cache
(output_dir / ".created_from_git_dependency").touch()

return archive

def _install_directory_without_wheel_installer(
Expand Down
45 changes: 39 additions & 6 deletions src/poetry/utils/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -231,35 +231,70 @@ def get_cache_directory_for_link(self, link: Link) -> Path:
if link.subdirectory_fragment:
key_parts["subdirectory"] = link.subdirectory_fragment

return self._get_directory_from_hash(key_parts)

def _get_directory_from_hash(self, key_parts: object) -> Path:
key = hashlib.sha256(
json.dumps(
key_parts, sort_keys=True, separators=(",", ":"), ensure_ascii=True
).encode("ascii")
).hexdigest()

split_key = [key[:2], key[2:4], key[4:6], key[6:]]

return self._cache_dir.joinpath(*split_key)

def get_cache_directory_for_git(
self, url: str, ref: str, subdirectory: str | None
) -> Path:
key_parts = {"url": url, "ref": ref}
if subdirectory:
key_parts["subdirectory"] = subdirectory

return self._get_directory_from_hash(key_parts)

def get_cached_archive_for_link(
self,
link: Link,
*,
strict: bool,
env: Env | None = None,
) -> Path | None:
cache_dir = self.get_cache_directory_for_link(link)

return self._get_cached_archive(
cache_dir, strict=strict, filename=link.filename, env=env
)

def get_cached_archive_for_git(
self, url: str, reference: str, subdirectory: str | None, env: Env
) -> Path | None:
cache_dir = self.get_cache_directory_for_git(url, reference, subdirectory)

return self._get_cached_archive(cache_dir, strict=False, env=env)

def _get_cached_archive(
self,
cache_dir: Path,
*,
strict: bool,
filename: str | None = None,
env: Env | None = None,
) -> Path | None:
assert strict or env is not None
# implication "strict -> filename should not be None"
assert not strict or filename is not None

archives = self._get_cached_archives_for_link(link)
archives = self._get_cached_archives(cache_dir)
if not archives:
return None

candidates: list[tuple[float | None, Path]] = []

for archive in archives:
if strict:
# in strict mode return the original cached archive instead of the
# prioritized archive type.
if link.filename == archive.name:
if filename == archive.name:
return archive
continue

Expand All @@ -286,9 +321,7 @@ def get_cached_archive_for_link(

return min(candidates)[1]

def _get_cached_archives_for_link(self, link: Link) -> list[Path]:
cache_dir = self.get_cache_directory_for_link(link)

def _get_cached_archives(self, cache_dir: Path) -> list[Path]:
archive_types = ["whl", "tar.gz", "tar.bz2", "bz2", "zip"]
paths: list[Path] = []
for archive_type in archive_types:
Expand Down
Loading

0 comments on commit fd706c8

Please sign in to comment.