Tom Pollard pushed to branch tpollard/896 at BuildStream / buildstream
Commits:
-
a39c4767
by Daniel Silverstone at 2019-02-12T15:12:30Z
-
cda03313
by Daniel Silverstone at 2019-02-12T15:12:30Z
-
b6f08e1b
by Daniel Silverstone at 2019-02-12T15:12:30Z
-
69ee11c6
by Daniel Silverstone at 2019-02-12T15:12:30Z
-
0928e570
by Daniel Silverstone at 2019-02-12T15:12:30Z
-
022a59f0
by Benjamin Schubert at 2019-02-12T16:14:05Z
-
352f4ad9
by Jonathan Maw at 2019-02-12T16:44:35Z
-
62396af9
by Jonathan Maw at 2019-02-12T16:44:35Z
-
f1e9cb66
by Jonathan Maw at 2019-02-12T16:44:35Z
-
e51116d5
by Jonathan Maw at 2019-02-12T17:59:25Z
-
478e5c47
by Valentin David at 2019-02-12T18:06:50Z
-
6de65306
by Valentin David at 2019-02-12T19:13:43Z
-
9b140fa0
by James Ennis at 2019-02-13T09:35:45Z
-
039d7c1d
by James Ennis at 2019-02-13T09:35:45Z
-
7cb6be74
by James Ennis at 2019-02-13T09:35:45Z
-
ddf642e1
by James Ennis at 2019-02-13T09:35:45Z
-
55c15f8f
by James Ennis at 2019-02-13T09:35:45Z
-
6951cfc8
by James Ennis at 2019-02-13T09:35:45Z
-
bf1933b6
by James Ennis at 2019-02-13T09:35:45Z
-
ea6ff6da
by James Ennis at 2019-02-13T09:35:45Z
-
fcc79917
by James Ennis at 2019-02-13T09:35:45Z
-
77345317
by James Ennis at 2019-02-13T09:35:45Z
-
775ac472
by James Ennis at 2019-02-13T09:35:45Z
-
f95d6ee8
by James Ennis at 2019-02-13T09:35:45Z
-
491937d7
by James Ennis at 2019-02-13T09:35:45Z
-
6ae41474
by James Ennis at 2019-02-13T09:35:45Z
-
8c6baf23
by James Ennis at 2019-02-13T10:48:21Z
-
fa770070
by Tom Pollard at 2019-02-13T12:33:04Z
-
7483657b
by Tom Pollard at 2019-02-13T12:33:04Z
-
e14ccaf7
by Tom Pollard at 2019-02-13T12:33:04Z
21 changed files:
- NEWS
- buildstream/_artifactcache.py
- + buildstream/_artifactelement.py
- buildstream/_cas/cascache.py
- buildstream/_context.py
- buildstream/_exceptions.py
- buildstream/_frontend/app.py
- buildstream/_frontend/cli.py
- buildstream/_loader/loader.py
- buildstream/_loader/metaelement.py
- buildstream/_project.py
- buildstream/_stream.py
- buildstream/_yaml.py
- buildstream/data/userconfig.yaml
- buildstream/element.py
- tests/artifactcache/pull.py
- tests/artifactcache/push.py
- tests/frontend/completions.py
- tests/frontend/pull.py
- tests/integration/artifact.py
- tests/integration/build-tree.py → tests/integration/shellbuildtrees.py
Changes:
| ... | ... | @@ -126,6 +126,14 @@ buildstream 1.3.1 |
| 126 | 126 |
Providing a remote will limit build's pull/push remote actions to the given
|
| 127 | 127 |
remote specifically, ignoring those defined via user or project configuration.
|
| 128 | 128 |
|
| 129 |
+ o Artifacts can now be cached explicitly with an empty `build tree` when built.
|
|
| 130 |
+ Element types without a build-root were already cached with an empty build tree
|
|
| 131 |
+ directory, this can now be extended to all or successful artifacts to save on cache
|
|
| 132 |
+ overheads. The cli main option '--cache-buildtrees' or the user configuration cache
|
|
| 133 |
+ group option 'cache-buildtrees' can be set as 'always', 'failure' or 'never', with
|
|
| 134 |
+ the default being always. Note, as the cache-key for the artifact is independant of
|
|
| 135 |
+ the cached build tree input it will remain unaltered, however the availbility of the
|
|
| 136 |
+ build tree content may differ.
|
|
| 129 | 137 |
|
| 130 | 138 |
=================
|
| 131 | 139 |
buildstream 1.1.5
|
| ... | ... | @@ -19,7 +19,6 @@ |
| 19 | 19 |
|
| 20 | 20 |
import multiprocessing
|
| 21 | 21 |
import os
|
| 22 |
-import string
|
|
| 23 | 22 |
from collections.abc import Mapping
|
| 24 | 23 |
|
| 25 | 24 |
from .types import _KeyStrength
|
| ... | ... | @@ -29,6 +28,7 @@ from . import utils |
| 29 | 28 |
from . import _yaml
|
| 30 | 29 |
|
| 31 | 30 |
from ._cas import CASRemote, CASRemoteSpec
|
| 31 |
+from .storage._casbaseddirectory import CasBasedDirectory
|
|
| 32 | 32 |
|
| 33 | 33 |
|
| 34 | 34 |
CACHE_SIZE_FILE = "cache_size"
|
| ... | ... | @@ -112,37 +112,6 @@ class ArtifactCache(): |
| 112 | 112 |
|
| 113 | 113 |
self._calculate_cache_quota()
|
| 114 | 114 |
|
| 115 |
- # get_artifact_fullname()
|
|
| 116 |
- #
|
|
| 117 |
- # Generate a full name for an artifact, including the
|
|
| 118 |
- # project namespace, element name and cache key.
|
|
| 119 |
- #
|
|
| 120 |
- # This can also be used as a relative path safely, and
|
|
| 121 |
- # will normalize parts of the element name such that only
|
|
| 122 |
- # digits, letters and some select characters are allowed.
|
|
| 123 |
- #
|
|
| 124 |
- # Args:
|
|
| 125 |
- # element (Element): The Element object
|
|
| 126 |
- # key (str): The element's cache key
|
|
| 127 |
- #
|
|
| 128 |
- # Returns:
|
|
| 129 |
- # (str): The relative path for the artifact
|
|
| 130 |
- #
|
|
| 131 |
- def get_artifact_fullname(self, element, key):
|
|
| 132 |
- project = element._get_project()
|
|
| 133 |
- |
|
| 134 |
- # Normalize ostree ref unsupported chars
|
|
| 135 |
- valid_chars = string.digits + string.ascii_letters + '-._'
|
|
| 136 |
- element_name = ''.join([
|
|
| 137 |
- x if x in valid_chars else '_'
|
|
| 138 |
- for x in element.normal_name
|
|
| 139 |
- ])
|
|
| 140 |
- |
|
| 141 |
- assert key is not None
|
|
| 142 |
- |
|
| 143 |
- # assume project and element names are not allowed to contain slashes
|
|
| 144 |
- return '{0}/{1}/{2}'.format(project.name, element_name, key)
|
|
| 145 |
- |
|
| 146 | 115 |
# setup_remotes():
|
| 147 | 116 |
#
|
| 148 | 117 |
# Sets up which remotes to use
|
| ... | ... | @@ -241,7 +210,7 @@ class ArtifactCache(): |
| 241 | 210 |
for key in (strong_key, weak_key):
|
| 242 | 211 |
if key:
|
| 243 | 212 |
try:
|
| 244 |
- ref = self.get_artifact_fullname(element, key)
|
|
| 213 |
+ ref = element.get_artifact_name(key)
|
|
| 245 | 214 |
|
| 246 | 215 |
self.cas.update_mtime(ref)
|
| 247 | 216 |
except CASError:
|
| ... | ... | @@ -521,7 +490,7 @@ class ArtifactCache(): |
| 521 | 490 |
# Returns: True if the artifact is in the cache, False otherwise
|
| 522 | 491 |
#
|
| 523 | 492 |
def contains(self, element, key):
|
| 524 |
- ref = self.get_artifact_fullname(element, key)
|
|
| 493 |
+ ref = element.get_artifact_name(key)
|
|
| 525 | 494 |
|
| 526 | 495 |
return self.cas.contains(ref)
|
| 527 | 496 |
|
| ... | ... | @@ -538,19 +507,21 @@ class ArtifactCache(): |
| 538 | 507 |
# Returns: True if the subdir exists & is populated in the cache, False otherwise
|
| 539 | 508 |
#
|
| 540 | 509 |
def contains_subdir_artifact(self, element, key, subdir):
|
| 541 |
- ref = self.get_artifact_fullname(element, key)
|
|
| 510 |
+ ref = element.get_artifact_name(key)
|
|
| 542 | 511 |
return self.cas.contains_subdir_artifact(ref, subdir)
|
| 543 | 512 |
|
| 544 | 513 |
# list_artifacts():
|
| 545 | 514 |
#
|
| 546 | 515 |
# List artifacts in this cache in LRU order.
|
| 547 | 516 |
#
|
| 517 |
+ # Args:
|
|
| 518 |
+ # glob (str): An option glob _expression_ to be used to list artifacts satisfying the glob
|
|
| 519 |
+ #
|
|
| 548 | 520 |
# Returns:
|
| 549 |
- # ([str]) - A list of artifact names as generated by
|
|
| 550 |
- # `ArtifactCache.get_artifact_fullname` in LRU order
|
|
| 521 |
+ # ([str]) - A list of artifact names as generated in LRU order
|
|
| 551 | 522 |
#
|
| 552 |
- def list_artifacts(self):
|
|
| 553 |
- return self.cas.list_refs()
|
|
| 523 |
+ def list_artifacts(self, *, glob=None):
|
|
| 524 |
+ return self.cas.list_refs(glob=glob)
|
|
| 554 | 525 |
|
| 555 | 526 |
# remove():
|
| 556 | 527 |
#
|
| ... | ... | @@ -559,8 +530,7 @@ class ArtifactCache(): |
| 559 | 530 |
#
|
| 560 | 531 |
# Args:
|
| 561 | 532 |
# ref (artifact_name): The name of the artifact to remove (as
|
| 562 |
- # generated by
|
|
| 563 |
- # `ArtifactCache.get_artifact_fullname`)
|
|
| 533 |
+ # generated by `Element.get_artifact_name`)
|
|
| 564 | 534 |
#
|
| 565 | 535 |
# Returns:
|
| 566 | 536 |
# (int): The amount of space recovered in the cache, in bytes
|
| ... | ... | @@ -606,7 +576,7 @@ class ArtifactCache(): |
| 606 | 576 |
# Returns: path to extracted artifact
|
| 607 | 577 |
#
|
| 608 | 578 |
def extract(self, element, key, subdir=None):
|
| 609 |
- ref = self.get_artifact_fullname(element, key)
|
|
| 579 |
+ ref = element.get_artifact_name(key)
|
|
| 610 | 580 |
|
| 611 | 581 |
path = os.path.join(self.extractdir, element._get_project().name, element.normal_name)
|
| 612 | 582 |
|
| ... | ... | @@ -622,7 +592,7 @@ class ArtifactCache(): |
| 622 | 592 |
# keys (list): The cache keys to use
|
| 623 | 593 |
#
|
| 624 | 594 |
def commit(self, element, content, keys):
|
| 625 |
- refs = [self.get_artifact_fullname(element, key) for key in keys]
|
|
| 595 |
+ refs = [element.get_artifact_name(key) for key in keys]
|
|
| 626 | 596 |
|
| 627 | 597 |
self.cas.commit(refs, content)
|
| 628 | 598 |
|
| ... | ... | @@ -638,8 +608,8 @@ class ArtifactCache(): |
| 638 | 608 |
# subdir (str): A subdirectory to limit the comparison to
|
| 639 | 609 |
#
|
| 640 | 610 |
def diff(self, element, key_a, key_b, *, subdir=None):
|
| 641 |
- ref_a = self.get_artifact_fullname(element, key_a)
|
|
| 642 |
- ref_b = self.get_artifact_fullname(element, key_b)
|
|
| 611 |
+ ref_a = element.get_artifact_name(key_a)
|
|
| 612 |
+ ref_b = element.get_artifact_name(key_b)
|
|
| 643 | 613 |
|
| 644 | 614 |
return self.cas.diff(ref_a, ref_b, subdir=subdir)
|
| 645 | 615 |
|
| ... | ... | @@ -700,7 +670,7 @@ class ArtifactCache(): |
| 700 | 670 |
# (ArtifactError): if there was an error
|
| 701 | 671 |
#
|
| 702 | 672 |
def push(self, element, keys):
|
| 703 |
- refs = [self.get_artifact_fullname(element, key) for key in list(keys)]
|
|
| 673 |
+ refs = [element.get_artifact_name(key) for key in list(keys)]
|
|
| 704 | 674 |
|
| 705 | 675 |
project = element._get_project()
|
| 706 | 676 |
|
| ... | ... | @@ -738,7 +708,7 @@ class ArtifactCache(): |
| 738 | 708 |
# (bool): True if pull was successful, False if artifact was not available
|
| 739 | 709 |
#
|
| 740 | 710 |
def pull(self, element, key, *, progress=None, subdir=None, excluded_subdirs=None):
|
| 741 |
- ref = self.get_artifact_fullname(element, key)
|
|
| 711 |
+ ref = element.get_artifact_name(key)
|
|
| 742 | 712 |
|
| 743 | 713 |
project = element._get_project()
|
| 744 | 714 |
|
| ... | ... | @@ -850,11 +820,27 @@ class ArtifactCache(): |
| 850 | 820 |
# newkey (str): A new cache key for the artifact
|
| 851 | 821 |
#
|
| 852 | 822 |
def link_key(self, element, oldkey, newkey):
|
| 853 |
- oldref = self.get_artifact_fullname(element, oldkey)
|
|
| 854 |
- newref = self.get_artifact_fullname(element, newkey)
|
|
| 823 |
+ oldref = element.get_artifact_name(oldkey)
|
|
| 824 |
+ newref = element.get_artifact_name(newkey)
|
|
| 855 | 825 |
|
| 856 | 826 |
self.cas.link_ref(oldref, newref)
|
| 857 | 827 |
|
| 828 |
+ # get_artifact_logs():
|
|
| 829 |
+ #
|
|
| 830 |
+ # Get the logs of an existing artifact
|
|
| 831 |
+ #
|
|
| 832 |
+ # Args:
|
|
| 833 |
+ # ref (str): The ref of the artifact
|
|
| 834 |
+ #
|
|
| 835 |
+ # Returns:
|
|
| 836 |
+ # logsdir (CasBasedDirectory): A CasBasedDirectory containing the artifact's logs
|
|
| 837 |
+ #
|
|
| 838 |
+ def get_artifact_logs(self, ref):
|
|
| 839 |
+ descend = ["logs"]
|
|
| 840 |
+ cache_id = self.cas.resolve_ref(ref, update_mtime=True)
|
|
| 841 |
+ vdir = CasBasedDirectory(self.cas, cache_id).descend(descend)
|
|
| 842 |
+ return vdir
|
|
| 843 |
+ |
|
| 858 | 844 |
################################################
|
| 859 | 845 |
# Local Private Methods #
|
| 860 | 846 |
################################################
|
| 1 |
+#
|
|
| 2 |
+# Copyright (C) 2019 Bloomberg Finance LP
|
|
| 3 |
+#
|
|
| 4 |
+# This program is free software; you can redistribute it and/or
|
|
| 5 |
+# modify it under the terms of the GNU Lesser General Public
|
|
| 6 |
+# License as published by the Free Software Foundation; either
|
|
| 7 |
+# version 2 of the License, or (at your option) any later version.
|
|
| 8 |
+#
|
|
| 9 |
+# This library is distributed in the hope that it will be useful,
|
|
| 10 |
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
| 11 |
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
| 12 |
+# Lesser General Public License for more details.
|
|
| 13 |
+#
|
|
| 14 |
+# You should have received a copy of the GNU Lesser General Public
|
|
| 15 |
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
|
|
| 16 |
+#
|
|
| 17 |
+# Authors:
|
|
| 18 |
+# James Ennis <james ennis codethink co uk>
|
|
| 19 |
+from . import Element
|
|
| 20 |
+from . import _cachekey
|
|
| 21 |
+from ._exceptions import ArtifactElementError
|
|
| 22 |
+from ._loader.metaelement import MetaElement
|
|
| 23 |
+ |
|
| 24 |
+ |
|
| 25 |
+# ArtifactElement()
|
|
| 26 |
+#
|
|
| 27 |
+# Object to be used for directly processing an artifact
|
|
| 28 |
+#
|
|
| 29 |
+# Args:
|
|
| 30 |
+# context (Context): The Context object
|
|
| 31 |
+# ref (str): The artifact ref
|
|
| 32 |
+#
|
|
| 33 |
+class ArtifactElement(Element):
|
|
| 34 |
+ def __init__(self, context, ref):
|
|
| 35 |
+ _, element, key = verify_artifact_ref(ref)
|
|
| 36 |
+ |
|
| 37 |
+ self._ref = ref
|
|
| 38 |
+ self._key = key
|
|
| 39 |
+ |
|
| 40 |
+ project = context.get_toplevel_project()
|
|
| 41 |
+ meta = MetaElement(project, element) # NOTE element has no .bst suffix
|
|
| 42 |
+ plugin_conf = None
|
|
| 43 |
+ |
|
| 44 |
+ super().__init__(context, project, meta, plugin_conf)
|
|
| 45 |
+ |
|
| 46 |
+ # Override Element.get_artifact_name()
|
|
| 47 |
+ def get_artifact_name(self, key=None):
|
|
| 48 |
+ return self._ref
|
|
| 49 |
+ |
|
| 50 |
+ # Dummy configure method
|
|
| 51 |
+ def configure(self, node):
|
|
| 52 |
+ pass
|
|
| 53 |
+ |
|
| 54 |
+ # Dummy preflight method
|
|
| 55 |
+ def preflight(self):
|
|
| 56 |
+ pass
|
|
| 57 |
+ |
|
| 58 |
+ # Override Element._calculate_cache_key
|
|
| 59 |
+ def _calculate_cache_key(self, dependencies=None):
|
|
| 60 |
+ return self._key
|
|
| 61 |
+ |
|
| 62 |
+ |
|
| 63 |
+# verify_artifact_ref()
|
|
| 64 |
+#
|
|
| 65 |
+# Verify that a ref string matches the format of an artifact
|
|
| 66 |
+#
|
|
| 67 |
+# Args:
|
|
| 68 |
+# ref (str): The artifact ref
|
|
| 69 |
+#
|
|
| 70 |
+# Returns:
|
|
| 71 |
+# project (str): The project's name
|
|
| 72 |
+# element (str): The element's name
|
|
| 73 |
+# key (str): The cache key
|
|
| 74 |
+#
|
|
| 75 |
+# Raises:
|
|
| 76 |
+# ArtifactElementError if the ref string does not match
|
|
| 77 |
+# the expected format
|
|
| 78 |
+#
|
|
| 79 |
+def verify_artifact_ref(ref):
|
|
| 80 |
+ try:
|
|
| 81 |
+ project, element, key = ref.split('/', 2) # This will raise a Value error if unable to split
|
|
| 82 |
+ # Explicitly raise a ValueError if the key lenght is not as expected
|
|
| 83 |
+ if len(key) != len(_cachekey.generate_key({})):
|
|
| 84 |
+ raise ValueError
|
|
| 85 |
+ except ValueError:
|
|
| 86 |
+ raise ArtifactElementError("Artifact: {} is not of the expected format".format(ref))
|
|
| 87 |
+ |
|
| 88 |
+ return project, element, key
|
| ... | ... | @@ -24,6 +24,7 @@ import stat |
| 24 | 24 |
import errno
|
| 25 | 25 |
import uuid
|
| 26 | 26 |
import contextlib
|
| 27 |
+from fnmatch import fnmatch
|
|
| 27 | 28 |
|
| 28 | 29 |
import grpc
|
| 29 | 30 |
|
| ... | ... | @@ -376,9 +377,7 @@ class CASCache(): |
| 376 | 377 |
for chunk in iter(lambda: tmp.read(_BUFFER_SIZE), b""):
|
| 377 | 378 |
h.update(chunk)
|
| 378 | 379 |
else:
|
| 379 |
- tmp = stack.enter_context(utils._tempnamedfile(dir=self.tmpdir))
|
|
| 380 |
- # Set mode bits to 0644
|
|
| 381 |
- os.chmod(tmp.name, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)
|
|
| 380 |
+ tmp = stack.enter_context(self._temporary_object())
|
|
| 382 | 381 |
|
| 383 | 382 |
if path:
|
| 384 | 383 |
with open(path, 'rb') as f:
|
| ... | ... | @@ -474,22 +473,35 @@ class CASCache(): |
| 474 | 473 |
#
|
| 475 | 474 |
# List refs in Least Recently Modified (LRM) order.
|
| 476 | 475 |
#
|
| 476 |
+ # Args:
|
|
| 477 |
+ # glob (str) - An optional glob _expression_ to be used to list refs satisfying the glob
|
|
| 478 |
+ #
|
|
| 477 | 479 |
# Returns:
|
| 478 | 480 |
# (list) - A list of refs in LRM order
|
| 479 | 481 |
#
|
| 480 |
- def list_refs(self):
|
|
| 482 |
+ def list_refs(self, *, glob=None):
|
|
| 481 | 483 |
# string of: /path/to/repo/refs/heads
|
| 482 | 484 |
ref_heads = os.path.join(self.casdir, 'refs', 'heads')
|
| 485 |
+ path = ref_heads
|
|
| 486 |
+ |
|
| 487 |
+ if glob is not None:
|
|
| 488 |
+ globdir = os.path.dirname(glob)
|
|
| 489 |
+ if not any(c in "*?[" for c in globdir):
|
|
| 490 |
+ # path prefix contains no globbing characters so
|
|
| 491 |
+ # append the glob to optimise the os.walk()
|
|
| 492 |
+ path = os.path.join(ref_heads, globdir)
|
|
| 483 | 493 |
|
| 484 | 494 |
refs = []
|
| 485 | 495 |
mtimes = []
|
| 486 | 496 |
|
| 487 |
- for root, _, files in os.walk(ref_heads):
|
|
| 497 |
+ for root, _, files in os.walk(path):
|
|
| 488 | 498 |
for filename in files:
|
| 489 | 499 |
ref_path = os.path.join(root, filename)
|
| 490 |
- refs.append(os.path.relpath(ref_path, ref_heads))
|
|
| 491 |
- # Obtain the mtime (the time a file was last modified)
|
|
| 492 |
- mtimes.append(os.path.getmtime(ref_path))
|
|
| 500 |
+ relative_path = os.path.relpath(ref_path, ref_heads) # Relative to refs head
|
|
| 501 |
+ if not glob or fnmatch(relative_path, glob):
|
|
| 502 |
+ refs.append(relative_path)
|
|
| 503 |
+ # Obtain the mtime (the time a file was last modified)
|
|
| 504 |
+ mtimes.append(os.path.getmtime(ref_path))
|
|
| 493 | 505 |
|
| 494 | 506 |
# NOTE: Sorted will sort from earliest to latest, thus the
|
| 495 | 507 |
# first ref of this list will be the file modified earliest.
|
| ... | ... | @@ -827,6 +839,19 @@ class CASCache(): |
| 827 | 839 |
for dirnode in directory.directories:
|
| 828 | 840 |
yield from self._required_blobs(dirnode.digest)
|
| 829 | 841 |
|
| 842 |
+ # _temporary_object():
|
|
| 843 |
+ #
|
|
| 844 |
+ # Returns:
|
|
| 845 |
+ # (file): A file object to a named temporary file.
|
|
| 846 |
+ #
|
|
| 847 |
+ # Create a named temporary file with 0o0644 access rights.
|
|
| 848 |
+ @contextlib.contextmanager
|
|
| 849 |
+ def _temporary_object(self):
|
|
| 850 |
+ with utils._tempnamedfile(dir=self.tmpdir) as f:
|
|
| 851 |
+ os.chmod(f.name,
|
|
| 852 |
+ stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)
|
|
| 853 |
+ yield f
|
|
| 854 |
+ |
|
| 830 | 855 |
# _ensure_blob():
|
| 831 | 856 |
#
|
| 832 | 857 |
# Fetch and add blob if it's not already local.
|
| ... | ... | @@ -844,7 +869,7 @@ class CASCache(): |
| 844 | 869 |
# already in local repository
|
| 845 | 870 |
return objpath
|
| 846 | 871 |
|
| 847 |
- with utils._tempnamedfile(dir=self.tmpdir) as f:
|
|
| 872 |
+ with self._temporary_object() as f:
|
|
| 848 | 873 |
remote._fetch_blob(digest, f)
|
| 849 | 874 |
|
| 850 | 875 |
added_digest = self.add_object(path=f.name, link_directly=True)
|
| ... | ... | @@ -854,7 +879,7 @@ class CASCache(): |
| 854 | 879 |
|
| 855 | 880 |
def _batch_download_complete(self, batch):
|
| 856 | 881 |
for digest, data in batch.send():
|
| 857 |
- with utils._tempnamedfile(dir=self.tmpdir) as f:
|
|
| 882 |
+ with self._temporary_object() as f:
|
|
| 858 | 883 |
f.write(data)
|
| 859 | 884 |
f.flush()
|
| 860 | 885 |
|
| ... | ... | @@ -121,6 +121,9 @@ class Context(): |
| 121 | 121 |
# Whether or not to attempt to pull build trees globally
|
| 122 | 122 |
self.pull_buildtrees = None
|
| 123 | 123 |
|
| 124 |
+ # Whether or not to cache build trees on artifact creation
|
|
| 125 |
+ self.cache_buildtrees = None
|
|
| 126 |
+ |
|
| 124 | 127 |
# Boolean, whether we double-check with the user that they meant to
|
| 125 | 128 |
# close the workspace when they're using it to access the project.
|
| 126 | 129 |
self.prompt_workspace_close_project_inaccessible = None
|
| ... | ... | @@ -201,7 +204,7 @@ class Context(): |
| 201 | 204 |
# our artifactdir - the artifactdir may not have been created
|
| 202 | 205 |
# yet.
|
| 203 | 206 |
cache = _yaml.node_get(defaults, Mapping, 'cache')
|
| 204 |
- _yaml.node_validate(cache, ['quota', 'pull-buildtrees'])
|
|
| 207 |
+ _yaml.node_validate(cache, ['quota', 'pull-buildtrees', 'cache-buildtrees'])
|
|
| 205 | 208 |
|
| 206 | 209 |
self.config_cache_quota = _yaml.node_get(cache, str, 'quota')
|
| 207 | 210 |
|
| ... | ... | @@ -213,6 +216,10 @@ class Context(): |
| 213 | 216 |
# Load pull build trees configuration
|
| 214 | 217 |
self.pull_buildtrees = _yaml.node_get(cache, bool, 'pull-buildtrees')
|
| 215 | 218 |
|
| 219 |
+ # Load cache build trees configuration
|
|
| 220 |
+ self.cache_buildtrees = _node_get_option_str(
|
|
| 221 |
+ cache, 'cache-buildtrees', ['always', 'failure', 'never'])
|
|
| 222 |
+ |
|
| 216 | 223 |
# Load logging config
|
| 217 | 224 |
logging = _yaml.node_get(defaults, Mapping, 'logging')
|
| 218 | 225 |
_yaml.node_validate(logging, [
|
| ... | ... | @@ -361,14 +368,17 @@ class Context(): |
| 361 | 368 |
# (bool): Whether or not to use strict build plan
|
| 362 | 369 |
#
|
| 363 | 370 |
def get_strict(self):
|
| 371 |
+ if self._strict_build_plan is None:
|
|
| 372 |
+ # Either we're not overridden or we've never worked it out before
|
|
| 373 |
+ # so work out if we should be strict, and then cache the result
|
|
| 374 |
+ toplevel = self.get_toplevel_project()
|
|
| 375 |
+ overrides = self.get_overrides(toplevel.name)
|
|
| 376 |
+ self._strict_build_plan = _yaml.node_get(overrides, bool, 'strict', default_value=True)
|
|
| 364 | 377 |
|
| 365 | 378 |
# If it was set by the CLI, it overrides any config
|
| 366 |
- if self._strict_build_plan is not None:
|
|
| 367 |
- return self._strict_build_plan
|
|
| 368 |
- |
|
| 369 |
- toplevel = self.get_toplevel_project()
|
|
| 370 |
- overrides = self.get_overrides(toplevel.name)
|
|
| 371 |
- return _yaml.node_get(overrides, bool, 'strict', default_value=True)
|
|
| 379 |
+ # Ditto if we've already computed this, then we return the computed
|
|
| 380 |
+ # value which we cache here too.
|
|
| 381 |
+ return self._strict_build_plan
|
|
| 372 | 382 |
|
| 373 | 383 |
# get_cache_key():
|
| 374 | 384 |
#
|
| ... | ... | @@ -344,3 +344,12 @@ class AppError(BstError): |
| 344 | 344 |
#
|
| 345 | 345 |
class SkipJob(Exception):
|
| 346 | 346 |
pass
|
| 347 |
+ |
|
| 348 |
+ |
|
| 349 |
+# ArtifactElementError
|
|
| 350 |
+#
|
|
| 351 |
+# Raised when errors are encountered by artifact elements
|
|
| 352 |
+#
|
|
| 353 |
+class ArtifactElementError(BstError):
|
|
| 354 |
+ def __init__(self, message, *, detail=None, reason=None):
|
|
| 355 |
+ super().__init__(message, detail=detail, domain=ErrorDomain.ELEMENT, reason=reason)
|
| ... | ... | @@ -183,7 +183,8 @@ class App(): |
| 183 | 183 |
'builders': 'sched_builders',
|
| 184 | 184 |
'pushers': 'sched_pushers',
|
| 185 | 185 |
'network_retries': 'sched_network_retries',
|
| 186 |
- 'pull_buildtrees': 'pull_buildtrees'
|
|
| 186 |
+ 'pull_buildtrees': 'pull_buildtrees',
|
|
| 187 |
+ 'cache_buildtrees': 'cache_buildtrees'
|
|
| 187 | 188 |
}
|
| 188 | 189 |
for cli_option, context_attr in override_map.items():
|
| 189 | 190 |
option_value = self._main_options.get(cli_option)
|
| 1 | 1 |
import os
|
| 2 | 2 |
import sys
|
| 3 | 3 |
from contextlib import ExitStack
|
| 4 |
-from fnmatch import fnmatch
|
|
| 5 | 4 |
from functools import partial
|
| 6 | 5 |
from tempfile import TemporaryDirectory
|
| 7 | 6 |
|
| ... | ... | @@ -252,6 +251,9 @@ def print_version(ctx, param, value): |
| 252 | 251 |
help="The mirror to fetch from first, before attempting other mirrors")
|
| 253 | 252 |
@click.option('--pull-buildtrees', is_flag=True, default=None,
|
| 254 | 253 |
help="Include an element's build tree when pulling remote element artifacts")
|
| 254 |
+@click.option('--cache-buildtrees', default=None,
|
|
| 255 |
+ type=click.Choice(['always', 'failure', 'never']),
|
|
| 256 |
+ help="Cache artifact build tree content on creation")
|
|
| 255 | 257 |
@click.pass_context
|
| 256 | 258 |
def cli(context, **kwargs):
|
| 257 | 259 |
"""Build and manipulate BuildStream projects
|
| ... | ... | @@ -573,7 +575,8 @@ def shell(app, element, sysroot, mount, isolate, build_, cli_buildtree, command) |
| 573 | 575 |
if choice != "never":
|
| 574 | 576 |
use_buildtree = choice
|
| 575 | 577 |
|
| 576 |
- if use_buildtree and not element._cached_success():
|
|
| 578 |
+ # Raise warning if the element is cached in a failed state
|
|
| 579 |
+ if use_buildtree and element._cached_failure():
|
|
| 577 | 580 |
click.echo("WARNING: using a buildtree from a failed build.", err=True)
|
| 578 | 581 |
|
| 579 | 582 |
try:
|
| ... | ... | @@ -901,38 +904,6 @@ def workspace_list(app): |
| 901 | 904 |
#############################################################
|
| 902 | 905 |
# Artifact Commands #
|
| 903 | 906 |
#############################################################
|
| 904 |
-def _classify_artifacts(names, cas, project_directory):
|
|
| 905 |
- element_targets = []
|
|
| 906 |
- artifact_refs = []
|
|
| 907 |
- element_globs = []
|
|
| 908 |
- artifact_globs = []
|
|
| 909 |
- |
|
| 910 |
- for name in names:
|
|
| 911 |
- if name.endswith('.bst'):
|
|
| 912 |
- if any(c in "*?[" for c in name):
|
|
| 913 |
- element_globs.append(name)
|
|
| 914 |
- else:
|
|
| 915 |
- element_targets.append(name)
|
|
| 916 |
- else:
|
|
| 917 |
- if any(c in "*?[" for c in name):
|
|
| 918 |
- artifact_globs.append(name)
|
|
| 919 |
- else:
|
|
| 920 |
- artifact_refs.append(name)
|
|
| 921 |
- |
|
| 922 |
- if element_globs:
|
|
| 923 |
- for dirpath, _, filenames in os.walk(project_directory):
|
|
| 924 |
- for filename in filenames:
|
|
| 925 |
- element_path = os.path.join(dirpath, filename).lstrip(project_directory).lstrip('/')
|
|
| 926 |
- if any(fnmatch(element_path, glob) for glob in element_globs):
|
|
| 927 |
- element_targets.append(element_path)
|
|
| 928 |
- |
|
| 929 |
- if artifact_globs:
|
|
| 930 |
- artifact_refs.extend(ref for ref in cas.list_refs()
|
|
| 931 |
- if any(fnmatch(ref, glob) for glob in artifact_globs))
|
|
| 932 |
- |
|
| 933 |
- return element_targets, artifact_refs
|
|
| 934 |
- |
|
| 935 |
- |
|
| 936 | 907 |
@cli.group(short_help="Manipulate cached artifacts")
|
| 937 | 908 |
def artifact():
|
| 938 | 909 |
"""Manipulate cached artifacts"""
|
| ... | ... | @@ -1111,53 +1082,24 @@ def artifact_push(app, elements, deps, remote): |
| 1111 | 1082 |
@click.pass_obj
|
| 1112 | 1083 |
def artifact_log(app, artifacts):
|
| 1113 | 1084 |
"""Show logs of all artifacts"""
|
| 1114 |
- from .._exceptions import CASError
|
|
| 1115 |
- from .._message import MessageType
|
|
| 1116 |
- from .._pipeline import PipelineSelection
|
|
| 1117 |
- from ..storage._casbaseddirectory import CasBasedDirectory
|
|
| 1118 |
- |
|
| 1119 |
- with ExitStack() as stack:
|
|
| 1120 |
- stack.enter_context(app.initialized())
|
|
| 1121 |
- cache = app.context.artifactcache
|
|
| 1122 |
- |
|
| 1123 |
- elements, artifacts = _classify_artifacts(artifacts, cache.cas,
|
|
| 1124 |
- app.project.directory)
|
|
| 1125 |
- |
|
| 1126 |
- vdirs = []
|
|
| 1127 |
- extractdirs = []
|
|
| 1128 |
- if artifacts:
|
|
| 1129 |
- for ref in artifacts:
|
|
| 1130 |
- try:
|
|
| 1131 |
- cache_id = cache.cas.resolve_ref(ref, update_mtime=True)
|
|
| 1132 |
- vdir = CasBasedDirectory(cache.cas, cache_id)
|
|
| 1133 |
- vdirs.append(vdir)
|
|
| 1134 |
- except CASError as e:
|
|
| 1135 |
- app._message(MessageType.WARN, "Artifact {} is not cached".format(ref), detail=str(e))
|
|
| 1136 |
- continue
|
|
| 1137 |
- if elements:
|
|
| 1138 |
- elements = app.stream.load_selection(elements, selection=PipelineSelection.NONE)
|
|
| 1139 |
- for element in elements:
|
|
| 1140 |
- if not element._cached():
|
|
| 1141 |
- app._message(MessageType.WARN, "Element {} is not cached".format(element))
|
|
| 1142 |
- continue
|
|
| 1143 |
- ref = cache.get_artifact_fullname(element, element._get_cache_key())
|
|
| 1144 |
- cache_id = cache.cas.resolve_ref(ref, update_mtime=True)
|
|
| 1145 |
- vdir = CasBasedDirectory(cache.cas, cache_id)
|
|
| 1146 |
- vdirs.append(vdir)
|
|
| 1147 |
- |
|
| 1148 |
- for vdir in vdirs:
|
|
| 1149 |
- # NOTE: If reading the logs feels unresponsive, here would be a good place to provide progress information.
|
|
| 1150 |
- logsdir = vdir.descend(["logs"])
|
|
| 1151 |
- td = stack.enter_context(TemporaryDirectory())
|
|
| 1152 |
- logsdir.export_files(td, can_link=True)
|
|
| 1153 |
- extractdirs.append(td)
|
|
| 1154 |
- |
|
| 1155 |
- for extractdir in extractdirs:
|
|
| 1156 |
- for log in (os.path.join(extractdir, log) for log in os.listdir(extractdir)):
|
|
| 1157 |
- # NOTE: Should click gain the ability to pass files to the pager this can be optimised.
|
|
| 1158 |
- with open(log) as f:
|
|
| 1159 |
- data = f.read()
|
|
| 1160 |
- click.echo_via_pager(data)
|
|
| 1085 |
+ with app.initialized():
|
|
| 1086 |
+ logsdirs = app.stream.artifact_log(artifacts)
|
|
| 1087 |
+ |
|
| 1088 |
+ with ExitStack() as stack:
|
|
| 1089 |
+ extractdirs = []
|
|
| 1090 |
+ for logsdir in logsdirs:
|
|
| 1091 |
+ # NOTE: If reading the logs feels unresponsive, here would be a good place
|
|
| 1092 |
+ # to provide progress information.
|
|
| 1093 |
+ td = stack.enter_context(TemporaryDirectory())
|
|
| 1094 |
+ logsdir.export_files(td, can_link=True)
|
|
| 1095 |
+ extractdirs.append(td)
|
|
| 1096 |
+ |
|
| 1097 |
+ for extractdir in extractdirs:
|
|
| 1098 |
+ for log in (os.path.join(extractdir, log) for log in os.listdir(extractdir)):
|
|
| 1099 |
+ # NOTE: Should click gain the ability to pass files to the pager this can be optimised.
|
|
| 1100 |
+ with open(log) as f:
|
|
| 1101 |
+ data = f.read()
|
|
| 1102 |
+ click.echo_via_pager(data)
|
|
| 1161 | 1103 |
|
| 1162 | 1104 |
|
| 1163 | 1105 |
##################################################################
|
| ... | ... | @@ -20,8 +20,6 @@ |
| 20 | 20 |
import os
|
| 21 | 21 |
from functools import cmp_to_key
|
| 22 | 22 |
from collections.abc import Mapping
|
| 23 |
-import tempfile
|
|
| 24 |
-import shutil
|
|
| 25 | 23 |
|
| 26 | 24 |
from .._exceptions import LoadError, LoadErrorReason
|
| 27 | 25 |
from .. import Consistency
|
| ... | ... | @@ -49,12 +47,10 @@ from .._message import Message, MessageType |
| 49 | 47 |
# context (Context): The Context object
|
| 50 | 48 |
# project (Project): The toplevel Project object
|
| 51 | 49 |
# parent (Loader): A parent Loader object, in the case this is a junctioned Loader
|
| 52 |
-# tempdir (str): A directory to cleanup with the Loader, given to the loader by a parent
|
|
| 53 |
-# loader in the case that this loader is a subproject loader.
|
|
| 54 | 50 |
#
|
| 55 | 51 |
class Loader():
|
| 56 | 52 |
|
| 57 |
- def __init__(self, context, project, *, parent=None, tempdir=None):
|
|
| 53 |
+ def __init__(self, context, project, *, parent=None):
|
|
| 58 | 54 |
|
| 59 | 55 |
# Ensure we have an absolute path for the base directory
|
| 60 | 56 |
basedir = project.element_path
|
| ... | ... | @@ -73,7 +69,6 @@ class Loader(): |
| 73 | 69 |
self._options = project.options # Project options (OptionPool)
|
| 74 | 70 |
self._basedir = basedir # Base project directory
|
| 75 | 71 |
self._first_pass_options = project.first_pass_config.options # Project options (OptionPool)
|
| 76 |
- self._tempdir = tempdir # A directory to cleanup
|
|
| 77 | 72 |
self._parent = parent # The parent loader
|
| 78 | 73 |
|
| 79 | 74 |
self._meta_elements = {} # Dict of resolved meta elements by name
|
| ... | ... | @@ -159,30 +154,6 @@ class Loader(): |
| 159 | 154 |
|
| 160 | 155 |
return ret
|
| 161 | 156 |
|
| 162 |
- # cleanup():
|
|
| 163 |
- #
|
|
| 164 |
- # Remove temporary checkout directories of subprojects
|
|
| 165 |
- #
|
|
| 166 |
- def cleanup(self):
|
|
| 167 |
- if self._parent and not self._tempdir:
|
|
| 168 |
- # already done
|
|
| 169 |
- return
|
|
| 170 |
- |
|
| 171 |
- # recurse
|
|
| 172 |
- for loader in self._loaders.values():
|
|
| 173 |
- # value may be None with nested junctions without overrides
|
|
| 174 |
- if loader is not None:
|
|
| 175 |
- loader.cleanup()
|
|
| 176 |
- |
|
| 177 |
- if not self._parent:
|
|
| 178 |
- # basedir of top-level loader is never a temporary directory
|
|
| 179 |
- return
|
|
| 180 |
- |
|
| 181 |
- # safe guard to not accidentally delete directories outside builddir
|
|
| 182 |
- if self._tempdir.startswith(self._context.builddir + os.sep):
|
|
| 183 |
- if os.path.exists(self._tempdir):
|
|
| 184 |
- shutil.rmtree(self._tempdir)
|
|
| 185 |
- |
|
| 186 | 157 |
###########################################
|
| 187 | 158 |
# Private Methods #
|
| 188 | 159 |
###########################################
|
| ... | ... | @@ -540,23 +511,28 @@ class Loader(): |
| 540 | 511 |
"Subproject has no ref for junction: {}".format(filename),
|
| 541 | 512 |
detail=detail)
|
| 542 | 513 |
|
| 543 |
- if len(sources) == 1 and sources[0]._get_local_path():
|
|
| 514 |
+ workspace = element._get_workspace()
|
|
| 515 |
+ if workspace:
|
|
| 516 |
+ # If a workspace is open, load it from there instead
|
|
| 517 |
+ basedir = workspace.get_absolute_path()
|
|
| 518 |
+ elif len(sources) == 1 and sources[0]._get_local_path():
|
|
| 544 | 519 |
# Optimization for junctions with a single local source
|
| 545 | 520 |
basedir = sources[0]._get_local_path()
|
| 546 |
- tempdir = None
|
|
| 547 | 521 |
else:
|
| 548 | 522 |
# Stage sources
|
| 549 |
- os.makedirs(self._context.builddir, exist_ok=True)
|
|
| 550 |
- basedir = tempfile.mkdtemp(prefix="{}-".format(element.normal_name), dir=self._context.builddir)
|
|
| 551 |
- element._stage_sources_at(basedir, mount_workspaces=False)
|
|
| 552 |
- tempdir = basedir
|
|
| 523 |
+ element._update_state()
|
|
| 524 |
+ basedir = os.path.join(self.project.directory, ".bst", "staged-junctions",
|
|
| 525 |
+ filename, element._get_cache_key())
|
|
| 526 |
+ if not os.path.exists(basedir):
|
|
| 527 |
+ os.makedirs(basedir, exist_ok=True)
|
|
| 528 |
+ element._stage_sources_at(basedir, mount_workspaces=False)
|
|
| 553 | 529 |
|
| 554 | 530 |
# Load the project
|
| 555 | 531 |
project_dir = os.path.join(basedir, element.path)
|
| 556 | 532 |
try:
|
| 557 | 533 |
from .._project import Project
|
| 558 | 534 |
project = Project(project_dir, self._context, junction=element,
|
| 559 |
- parent_loader=self, tempdir=tempdir)
|
|
| 535 |
+ parent_loader=self)
|
|
| 560 | 536 |
except LoadError as e:
|
| 561 | 537 |
if e.reason == LoadErrorReason.MISSING_PROJECT_CONF:
|
| 562 | 538 |
raise LoadError(reason=LoadErrorReason.INVALID_JUNCTION,
|
| ... | ... | @@ -38,20 +38,20 @@ class MetaElement(): |
| 38 | 38 |
# sandbox: Configuration specific to the sandbox environment
|
| 39 | 39 |
# first_pass: The element is to be loaded with first pass configuration (junction)
|
| 40 | 40 |
#
|
| 41 |
- def __init__(self, project, name, kind, provenance, sources, config,
|
|
| 42 |
- variables, environment, env_nocache, public, sandbox,
|
|
| 43 |
- first_pass):
|
|
| 41 |
+ def __init__(self, project, name, kind=None, provenance=None, sources=None, config=None,
|
|
| 42 |
+ variables=None, environment=None, env_nocache=None, public=None,
|
|
| 43 |
+ sandbox=None, first_pass=False):
|
|
| 44 | 44 |
self.project = project
|
| 45 | 45 |
self.name = name
|
| 46 | 46 |
self.kind = kind
|
| 47 | 47 |
self.provenance = provenance
|
| 48 | 48 |
self.sources = sources
|
| 49 |
- self.config = config
|
|
| 50 |
- self.variables = variables
|
|
| 51 |
- self.environment = environment
|
|
| 52 |
- self.env_nocache = env_nocache
|
|
| 53 |
- self.public = public
|
|
| 54 |
- self.sandbox = sandbox
|
|
| 49 |
+ self.config = config or {}
|
|
| 50 |
+ self.variables = variables or {}
|
|
| 51 |
+ self.environment = environment or {}
|
|
| 52 |
+ self.env_nocache = env_nocache or []
|
|
| 53 |
+ self.public = public or {}
|
|
| 54 |
+ self.sandbox = sandbox or {}
|
|
| 55 | 55 |
self.build_dependencies = []
|
| 56 | 56 |
self.dependencies = []
|
| 57 | 57 |
self.first_pass = first_pass
|
| ... | ... | @@ -26,6 +26,7 @@ from . import utils |
| 26 | 26 |
from . import _cachekey
|
| 27 | 27 |
from . import _site
|
| 28 | 28 |
from . import _yaml
|
| 29 |
+from ._artifactelement import ArtifactElement
|
|
| 29 | 30 |
from ._profile import Topics, profile_start, profile_end
|
| 30 | 31 |
from ._exceptions import LoadError, LoadErrorReason
|
| 31 | 32 |
from ._options import OptionPool
|
| ... | ... | @@ -91,7 +92,7 @@ class ProjectConfig: |
| 91 | 92 |
class Project():
|
| 92 | 93 |
|
| 93 | 94 |
def __init__(self, directory, context, *, junction=None, cli_options=None,
|
| 94 |
- default_mirror=None, parent_loader=None, tempdir=None):
|
|
| 95 |
+ default_mirror=None, parent_loader=None):
|
|
| 95 | 96 |
|
| 96 | 97 |
# The project name
|
| 97 | 98 |
self.name = None
|
| ... | ... | @@ -147,7 +148,7 @@ class Project(): |
| 147 | 148 |
self._project_includes = None
|
| 148 | 149 |
|
| 149 | 150 |
profile_start(Topics.LOAD_PROJECT, self.directory.replace(os.sep, '-'))
|
| 150 |
- self._load(parent_loader=parent_loader, tempdir=tempdir)
|
|
| 151 |
+ self._load(parent_loader=parent_loader)
|
|
| 151 | 152 |
profile_end(Topics.LOAD_PROJECT, self.directory.replace(os.sep, '-'))
|
| 152 | 153 |
|
| 153 | 154 |
self._partially_loaded = True
|
| ... | ... | @@ -255,6 +256,19 @@ class Project(): |
| 255 | 256 |
else:
|
| 256 | 257 |
return self.config.element_factory.create(self._context, self, meta)
|
| 257 | 258 |
|
| 259 |
+ # create_artifact_element()
|
|
| 260 |
+ #
|
|
| 261 |
+ # Instantiate and return an ArtifactElement
|
|
| 262 |
+ #
|
|
| 263 |
+ # Args:
|
|
| 264 |
+ # ref (str): A string of the artifact ref
|
|
| 265 |
+ #
|
|
| 266 |
+ # Returns:
|
|
| 267 |
+ # (ArtifactElement): A newly created ArtifactElement object of the appropriate kind
|
|
| 268 |
+ #
|
|
| 269 |
+ def create_artifact_element(self, ref):
|
|
| 270 |
+ return ArtifactElement(self._context, ref)
|
|
| 271 |
+ |
|
| 258 | 272 |
# create_source()
|
| 259 | 273 |
#
|
| 260 | 274 |
# Instantiate and return a Source
|
| ... | ... | @@ -389,8 +403,6 @@ class Project(): |
| 389 | 403 |
# Cleans up resources used loading elements
|
| 390 | 404 |
#
|
| 391 | 405 |
def cleanup(self):
|
| 392 |
- self.loader.cleanup()
|
|
| 393 |
- |
|
| 394 | 406 |
# Reset the element loader state
|
| 395 | 407 |
Element._reset_load_state()
|
| 396 | 408 |
|
| ... | ... | @@ -439,7 +451,7 @@ class Project(): |
| 439 | 451 |
#
|
| 440 | 452 |
# Raises: LoadError if there was a problem with the project.conf
|
| 441 | 453 |
#
|
| 442 |
- def _load(self, parent_loader=None, tempdir=None):
|
|
| 454 |
+ def _load(self, parent_loader=None):
|
|
| 443 | 455 |
|
| 444 | 456 |
# Load builtin default
|
| 445 | 457 |
projectfile = os.path.join(self.directory, _PROJECT_CONF_FILE)
|
| ... | ... | @@ -505,8 +517,7 @@ class Project(): |
| 505 | 517 |
self._fatal_warnings = _yaml.node_get(pre_config_node, list, 'fatal-warnings', default_value=[])
|
| 506 | 518 |
|
| 507 | 519 |
self.loader = Loader(self._context, self,
|
| 508 |
- parent=parent_loader,
|
|
| 509 |
- tempdir=tempdir)
|
|
| 520 |
+ parent=parent_loader)
|
|
| 510 | 521 |
|
| 511 | 522 |
self._project_includes = Includes(self.loader, copy_tree=False)
|
| 512 | 523 |
|
| ... | ... | @@ -27,8 +27,10 @@ import shutil |
| 27 | 27 |
import tarfile
|
| 28 | 28 |
import tempfile
|
| 29 | 29 |
from contextlib import contextmanager, suppress
|
| 30 |
+from fnmatch import fnmatch
|
|
| 30 | 31 |
|
| 31 |
-from ._exceptions import StreamError, ImplError, BstError, set_last_task_error
|
|
| 32 |
+from ._artifactelement import verify_artifact_ref
|
|
| 33 |
+from ._exceptions import StreamError, ImplError, BstError, ArtifactElementError, set_last_task_error
|
|
| 32 | 34 |
from ._message import Message, MessageType
|
| 33 | 35 |
from ._scheduler import Scheduler, SchedStatus, TrackQueue, FetchQueue, BuildQueue, PullQueue, PushQueue
|
| 34 | 36 |
from ._pipeline import Pipeline, PipelineSelection
|
| ... | ... | @@ -108,19 +110,21 @@ class Stream(): |
| 108 | 110 |
def load_selection(self, targets, *,
|
| 109 | 111 |
selection=PipelineSelection.NONE,
|
| 110 | 112 |
except_targets=(),
|
| 111 |
- use_artifact_config=False):
|
|
| 113 |
+ use_artifact_config=False,
|
|
| 114 |
+ load_refs=False):
|
|
| 112 | 115 |
|
| 113 | 116 |
profile_start(Topics.LOAD_SELECTION, "_".join(t.replace(os.sep, '-') for t in targets))
|
| 114 | 117 |
|
| 115 |
- elements, _ = self._load(targets, (),
|
|
| 116 |
- selection=selection,
|
|
| 117 |
- except_targets=except_targets,
|
|
| 118 |
- fetch_subprojects=False,
|
|
| 119 |
- use_artifact_config=use_artifact_config)
|
|
| 118 |
+ target_objects, _ = self._load(targets, (),
|
|
| 119 |
+ selection=selection,
|
|
| 120 |
+ except_targets=except_targets,
|
|
| 121 |
+ fetch_subprojects=False,
|
|
| 122 |
+ use_artifact_config=use_artifact_config,
|
|
| 123 |
+ load_refs=load_refs)
|
|
| 120 | 124 |
|
| 121 | 125 |
profile_end(Topics.LOAD_SELECTION, "_".join(t.replace(os.sep, '-') for t in targets))
|
| 122 | 126 |
|
| 123 |
- return elements
|
|
| 127 |
+ return target_objects
|
|
| 124 | 128 |
|
| 125 | 129 |
# shell()
|
| 126 | 130 |
#
|
| ... | ... | @@ -491,6 +495,31 @@ class Stream(): |
| 491 | 495 |
raise StreamError("Error while staging dependencies into a sandbox"
|
| 492 | 496 |
": '{}'".format(e), detail=e.detail, reason=e.reason) from e
|
| 493 | 497 |
|
| 498 |
+ # artifact_log()
|
|
| 499 |
+ #
|
|
| 500 |
+ # Show the full log of an artifact
|
|
| 501 |
+ #
|
|
| 502 |
+ # Args:
|
|
| 503 |
+ # targets (str): Targets to view the logs of
|
|
| 504 |
+ #
|
|
| 505 |
+ # Returns:
|
|
| 506 |
+ # logsdir (list): A list of CasBasedDirectory objects containing artifact logs
|
|
| 507 |
+ #
|
|
| 508 |
+ def artifact_log(self, targets):
|
|
| 509 |
+ # Return list of Element and/or ArtifactElement objects
|
|
| 510 |
+ target_objects = self.load_selection(targets, selection=PipelineSelection.NONE, load_refs=True)
|
|
| 511 |
+ |
|
| 512 |
+ logsdirs = []
|
|
| 513 |
+ for obj in target_objects:
|
|
| 514 |
+ ref = obj.get_artifact_name()
|
|
| 515 |
+ if not obj._cached():
|
|
| 516 |
+ self._message(MessageType.WARN, "{} is not cached".format(ref))
|
|
| 517 |
+ continue
|
|
| 518 |
+ |
|
| 519 |
+ logsdirs.append(self._artifacts.get_artifact_logs(ref))
|
|
| 520 |
+ |
|
| 521 |
+ return logsdirs
|
|
| 522 |
+ |
|
| 494 | 523 |
# source_checkout()
|
| 495 | 524 |
#
|
| 496 | 525 |
# Checkout sources of the target element to the specified location
|
| ... | ... | @@ -922,25 +951,36 @@ class Stream(): |
| 922 | 951 |
use_artifact_config=False,
|
| 923 | 952 |
artifact_remote_url=None,
|
| 924 | 953 |
fetch_subprojects=False,
|
| 925 |
- dynamic_plan=False):
|
|
| 954 |
+ dynamic_plan=False,
|
|
| 955 |
+ load_refs=False):
|
|
| 956 |
+ |
|
| 957 |
+ # Classify element and artifact strings
|
|
| 958 |
+ target_elements, target_artifacts = self._classify_artifacts(targets)
|
|
| 959 |
+ |
|
| 960 |
+ if target_artifacts and not load_refs:
|
|
| 961 |
+ detail = '\n'.join(target_artifacts)
|
|
| 962 |
+ raise ArtifactElementError("Cannot perform this operation with artifact refs:", detail=detail)
|
|
| 926 | 963 |
|
| 927 | 964 |
# Load rewritable if we have any tracking selection to make
|
| 928 | 965 |
rewritable = False
|
| 929 | 966 |
if track_targets:
|
| 930 | 967 |
rewritable = True
|
| 931 | 968 |
|
| 932 |
- # Load all targets
|
|
| 969 |
+ # Load all target elements
|
|
| 933 | 970 |
elements, except_elements, track_elements, track_except_elements = \
|
| 934 |
- self._pipeline.load([targets, except_targets, track_targets, track_except_targets],
|
|
| 971 |
+ self._pipeline.load([target_elements, except_targets, track_targets, track_except_targets],
|
|
| 935 | 972 |
rewritable=rewritable,
|
| 936 | 973 |
fetch_subprojects=fetch_subprojects)
|
| 937 | 974 |
|
| 975 |
+ # Obtain the ArtifactElement objects
|
|
| 976 |
+ artifacts = [self._project.create_artifact_element(ref) for ref in target_artifacts]
|
|
| 977 |
+ |
|
| 938 | 978 |
# Optionally filter out junction elements
|
| 939 | 979 |
if ignore_junction_targets:
|
| 940 | 980 |
elements = [e for e in elements if e.get_kind() != 'junction']
|
| 941 | 981 |
|
| 942 | 982 |
# Hold on to the targets
|
| 943 |
- self.targets = elements
|
|
| 983 |
+ self.targets = elements + artifacts
|
|
| 944 | 984 |
|
| 945 | 985 |
# Here we should raise an error if the track_elements targets
|
| 946 | 986 |
# are not dependencies of the primary targets, this is not
|
| ... | ... | @@ -997,9 +1037,9 @@ class Stream(): |
| 997 | 1037 |
|
| 998 | 1038 |
# Now move on to loading primary selection.
|
| 999 | 1039 |
#
|
| 1000 |
- self._pipeline.resolve_elements(elements)
|
|
| 1001 |
- selected = self._pipeline.get_selection(elements, selection, silent=False)
|
|
| 1002 |
- selected = self._pipeline.except_elements(elements,
|
|
| 1040 |
+ self._pipeline.resolve_elements(self.targets)
|
|
| 1041 |
+ selected = self._pipeline.get_selection(self.targets, selection, silent=False)
|
|
| 1042 |
+ selected = self._pipeline.except_elements(self.targets,
|
|
| 1003 | 1043 |
selected,
|
| 1004 | 1044 |
except_elements)
|
| 1005 | 1045 |
|
| ... | ... | @@ -1331,3 +1371,55 @@ class Stream(): |
| 1331 | 1371 |
required_list.append(element)
|
| 1332 | 1372 |
|
| 1333 | 1373 |
return required_list
|
| 1374 |
+ |
|
| 1375 |
+ # _classify_artifacts()
|
|
| 1376 |
+ #
|
|
| 1377 |
+ # Split up a list of targets into element names and artifact refs
|
|
| 1378 |
+ #
|
|
| 1379 |
+ # Args:
|
|
| 1380 |
+ # targets (list): A list of targets
|
|
| 1381 |
+ #
|
|
| 1382 |
+ # Returns:
|
|
| 1383 |
+ # (list): element names present in the targets
|
|
| 1384 |
+ # (list): artifact refs present in the targets
|
|
| 1385 |
+ #
|
|
| 1386 |
+ def _classify_artifacts(self, targets):
|
|
| 1387 |
+ element_targets = []
|
|
| 1388 |
+ artifact_refs = []
|
|
| 1389 |
+ element_globs = []
|
|
| 1390 |
+ artifact_globs = []
|
|
| 1391 |
+ |
|
| 1392 |
+ for target in targets:
|
|
| 1393 |
+ if target.endswith('.bst'):
|
|
| 1394 |
+ if any(c in "*?[" for c in target):
|
|
| 1395 |
+ element_globs.append(target)
|
|
| 1396 |
+ else:
|
|
| 1397 |
+ element_targets.append(target)
|
|
| 1398 |
+ else:
|
|
| 1399 |
+ if any(c in "*?[" for c in target):
|
|
| 1400 |
+ artifact_globs.append(target)
|
|
| 1401 |
+ else:
|
|
| 1402 |
+ try:
|
|
| 1403 |
+ verify_artifact_ref(target)
|
|
| 1404 |
+ except ArtifactElementError:
|
|
| 1405 |
+ element_targets.append(target)
|
|
| 1406 |
+ continue
|
|
| 1407 |
+ artifact_refs.append(target)
|
|
| 1408 |
+ |
|
| 1409 |
+ if element_globs:
|
|
| 1410 |
+ for dirpath, _, filenames in os.walk(self._project.element_path):
|
|
| 1411 |
+ for filename in filenames:
|
|
| 1412 |
+ element_path = os.path.join(dirpath, filename)
|
|
| 1413 |
+ length = len(self._project.element_path) + 1
|
|
| 1414 |
+ element_path = element_path[length:] # Strip out the element_path
|
|
| 1415 |
+ |
|
| 1416 |
+ if any(fnmatch(element_path, glob) for glob in element_globs):
|
|
| 1417 |
+ element_targets.append(element_path)
|
|
| 1418 |
+ |
|
| 1419 |
+ if artifact_globs:
|
|
| 1420 |
+ for glob in artifact_globs:
|
|
| 1421 |
+ artifact_refs.extend(self._artifacts.list_artifacts(glob=glob))
|
|
| 1422 |
+ if not artifact_refs:
|
|
| 1423 |
+ self._message(MessageType.WARN, "No artifacts found for globs: {}".format(', '.join(artifact_globs)))
|
|
| 1424 |
+ |
|
| 1425 |
+ return element_targets, artifact_refs
|
| ... | ... | @@ -365,8 +365,8 @@ _sentinel = object() |
| 365 | 365 |
#
|
| 366 | 366 |
def node_get(node, expected_type, key, indices=None, *, default_value=_sentinel, allow_none=False):
|
| 367 | 367 |
value = node.get(key, default_value)
|
| 368 |
- provenance = node_get_provenance(node)
|
|
| 369 | 368 |
if value is _sentinel:
|
| 369 |
+ provenance = node_get_provenance(node)
|
|
| 370 | 370 |
raise LoadError(LoadErrorReason.INVALID_DATA,
|
| 371 | 371 |
"{}: Dictionary did not contain expected key '{}'".format(provenance, key))
|
| 372 | 372 |
|
| ... | ... | @@ -914,6 +914,10 @@ RoundTripRepresenter.add_representer(SanitizedDict, |
| 914 | 914 |
SafeRepresenter.represent_dict)
|
| 915 | 915 |
|
| 916 | 916 |
|
| 917 |
+# Types we can short-circuit in node_sanitize for speed.
|
|
| 918 |
+__SANITIZE_SHORT_CIRCUIT_TYPES = (int, float, str, bool, tuple)
|
|
| 919 |
+ |
|
| 920 |
+ |
|
| 917 | 921 |
# node_sanitize()
|
| 918 | 922 |
#
|
| 919 | 923 |
# Returnes an alphabetically ordered recursive copy
|
| ... | ... | @@ -922,9 +926,21 @@ RoundTripRepresenter.add_representer(SanitizedDict, |
| 922 | 926 |
# Only dicts are ordered, list elements are left in order.
|
| 923 | 927 |
#
|
| 924 | 928 |
def node_sanitize(node):
|
| 929 |
+ # Short-circuit None which occurs ca. twice per element
|
|
| 930 |
+ if node is None:
|
|
| 931 |
+ return node
|
|
| 932 |
+ |
|
| 933 |
+ node_type = type(node)
|
|
| 934 |
+ # Next short-circuit integers, floats, strings, booleans, and tuples
|
|
| 935 |
+ if node_type in __SANITIZE_SHORT_CIRCUIT_TYPES:
|
|
| 936 |
+ return node
|
|
| 937 |
+ # Now short-circuit lists. Note this is only for the raw list
|
|
| 938 |
+ # type, CommentedSeq and others get caught later.
|
|
| 939 |
+ elif node_type is list:
|
|
| 940 |
+ return [node_sanitize(elt) for elt in node]
|
|
| 925 | 941 |
|
| 926 |
- if isinstance(node, collections.abc.Mapping):
|
|
| 927 |
- |
|
| 942 |
+ # Finally ChainMap and dict, and other Mappings need special handling
|
|
| 943 |
+ if node_type in (dict, ChainMap) or isinstance(node, collections.Mapping):
|
|
| 928 | 944 |
result = SanitizedDict()
|
| 929 | 945 |
|
| 930 | 946 |
key_list = [key for key, _ in node_items(node)]
|
| ... | ... | @@ -932,10 +948,12 @@ def node_sanitize(node): |
| 932 | 948 |
result[key] = node_sanitize(node[key])
|
| 933 | 949 |
|
| 934 | 950 |
return result
|
| 935 |
- |
|
| 951 |
+ # Catch the case of CommentedSeq and friends. This is more rare and so
|
|
| 952 |
+ # we keep complexity down by still using isinstance here.
|
|
| 936 | 953 |
elif isinstance(node, list):
|
| 937 | 954 |
return [node_sanitize(elt) for elt in node]
|
| 938 | 955 |
|
| 956 |
+ # Everything else (such as commented scalars) just gets returned as-is.
|
|
| 939 | 957 |
return node
|
| 940 | 958 |
|
| 941 | 959 |
|
| ... | ... | @@ -1064,15 +1082,52 @@ class ChainMap(collections.ChainMap): |
| 1064 | 1082 |
return default
|
| 1065 | 1083 |
|
| 1066 | 1084 |
|
| 1085 |
+# Node copying
|
|
| 1086 |
+#
|
|
| 1087 |
+# Unfortunately we copy nodes a *lot* and `isinstance()` is super-slow when
|
|
| 1088 |
+# things from collections.abc get involved. The result is the following
|
|
| 1089 |
+# intricate but substantially faster group of tuples and the use of `in`.
|
|
| 1090 |
+#
|
|
| 1091 |
+# If any of the {node,list}_{chain_,}_copy routines raise a ValueError
|
|
| 1092 |
+# then it's likely additional types need adding to these tuples.
|
|
| 1093 |
+ |
|
| 1094 |
+# When chaining a copy, these types are skipped since the ChainMap will
|
|
| 1095 |
+# retrieve them from the source node when needed. Other copiers might copy
|
|
| 1096 |
+# them, so we call them __QUICK_TYPES.
|
|
| 1097 |
+__QUICK_TYPES = (str, bool,
|
|
| 1098 |
+ yaml.scalarstring.PreservedScalarString,
|
|
| 1099 |
+ yaml.scalarstring.SingleQuotedScalarString,
|
|
| 1100 |
+ yaml.scalarstring.DoubleQuotedScalarString)
|
|
| 1101 |
+ |
|
| 1102 |
+# These types have to be iterated like a dictionary
|
|
| 1103 |
+__DICT_TYPES = (dict, ChainMap, yaml.comments.CommentedMap)
|
|
| 1104 |
+ |
|
| 1105 |
+# These types have to be iterated like a list
|
|
| 1106 |
+__LIST_TYPES = (list, yaml.comments.CommentedSeq)
|
|
| 1107 |
+ |
|
| 1108 |
+# These are the provenance types, which have to be cloned rather than any other
|
|
| 1109 |
+# copying tactic.
|
|
| 1110 |
+__PROVENANCE_TYPES = (Provenance, DictProvenance, MemberProvenance, ElementProvenance)
|
|
| 1111 |
+ |
|
| 1112 |
+# These are the directives used to compose lists, we need this because it's
|
|
| 1113 |
+# slightly faster during the node_final_assertions checks
|
|
| 1114 |
+__NODE_ASSERT_COMPOSITION_DIRECTIVES = ('(>)', '(<)', '(=)')
|
|
| 1115 |
+ |
|
| 1116 |
+ |
|
| 1067 | 1117 |
def node_chain_copy(source):
|
| 1068 | 1118 |
copy = ChainMap({}, source)
|
| 1069 | 1119 |
for key, value in source.items():
|
| 1070 |
- if isinstance(value, collections.abc.Mapping):
|
|
| 1120 |
+ value_type = type(value)
|
|
| 1121 |
+ if value_type in __DICT_TYPES:
|
|
| 1071 | 1122 |
copy[key] = node_chain_copy(value)
|
| 1072 |
- elif isinstance(value, list):
|
|
| 1123 |
+ elif value_type in __LIST_TYPES:
|
|
| 1073 | 1124 |
copy[key] = list_chain_copy(value)
|
| 1074 |
- elif isinstance(value, Provenance):
|
|
| 1125 |
+ elif value_type in __PROVENANCE_TYPES:
|
|
| 1075 | 1126 |
copy[key] = value.clone()
|
| 1127 |
+ elif value_type in __QUICK_TYPES:
|
|
| 1128 |
+ pass # No need to copy these, the chainmap deals with it
|
|
| 1129 |
+ else:
|
|
| 1130 |
+ raise ValueError("Unable to be quick about node_chain_copy of {}".format(value_type))
|
|
| 1076 | 1131 |
|
| 1077 | 1132 |
return copy
|
| 1078 | 1133 |
|
| ... | ... | @@ -1080,14 +1135,17 @@ def node_chain_copy(source): |
| 1080 | 1135 |
def list_chain_copy(source):
|
| 1081 | 1136 |
copy = []
|
| 1082 | 1137 |
for item in source:
|
| 1083 |
- if isinstance(item, collections.abc.Mapping):
|
|
| 1138 |
+ item_type = type(item)
|
|
| 1139 |
+ if item_type in __DICT_TYPES:
|
|
| 1084 | 1140 |
copy.append(node_chain_copy(item))
|
| 1085 |
- elif isinstance(item, list):
|
|
| 1141 |
+ elif item_type in __LIST_TYPES:
|
|
| 1086 | 1142 |
copy.append(list_chain_copy(item))
|
| 1087 |
- elif isinstance(item, Provenance):
|
|
| 1143 |
+ elif item_type in __PROVENANCE_TYPES:
|
|
| 1088 | 1144 |
copy.append(item.clone())
|
| 1089 |
- else:
|
|
| 1145 |
+ elif item_type in __QUICK_TYPES:
|
|
| 1090 | 1146 |
copy.append(item)
|
| 1147 |
+ else: # Fallback
|
|
| 1148 |
+ raise ValueError("Unable to be quick about list_chain_copy of {}".format(item_type))
|
|
| 1091 | 1149 |
|
| 1092 | 1150 |
return copy
|
| 1093 | 1151 |
|
| ... | ... | @@ -1095,14 +1153,17 @@ def list_chain_copy(source): |
| 1095 | 1153 |
def node_copy(source):
|
| 1096 | 1154 |
copy = {}
|
| 1097 | 1155 |
for key, value in source.items():
|
| 1098 |
- if isinstance(value, collections.abc.Mapping):
|
|
| 1156 |
+ value_type = type(value)
|
|
| 1157 |
+ if value_type in __DICT_TYPES:
|
|
| 1099 | 1158 |
copy[key] = node_copy(value)
|
| 1100 |
- elif isinstance(value, list):
|
|
| 1159 |
+ elif value_type in __LIST_TYPES:
|
|
| 1101 | 1160 |
copy[key] = list_copy(value)
|
| 1102 |
- elif isinstance(value, Provenance):
|
|
| 1161 |
+ elif value_type in __PROVENANCE_TYPES:
|
|
| 1103 | 1162 |
copy[key] = value.clone()
|
| 1104 |
- else:
|
|
| 1163 |
+ elif value_type in __QUICK_TYPES:
|
|
| 1105 | 1164 |
copy[key] = value
|
| 1165 |
+ else:
|
|
| 1166 |
+ raise ValueError("Unable to be quick about node_copy of {}".format(value_type))
|
|
| 1106 | 1167 |
|
| 1107 | 1168 |
ensure_provenance(copy)
|
| 1108 | 1169 |
|
| ... | ... | @@ -1112,14 +1173,17 @@ def node_copy(source): |
| 1112 | 1173 |
def list_copy(source):
|
| 1113 | 1174 |
copy = []
|
| 1114 | 1175 |
for item in source:
|
| 1115 |
- if isinstance(item, collections.abc.Mapping):
|
|
| 1176 |
+ item_type = type(item)
|
|
| 1177 |
+ if item_type in __DICT_TYPES:
|
|
| 1116 | 1178 |
copy.append(node_copy(item))
|
| 1117 |
- elif isinstance(item, list):
|
|
| 1179 |
+ elif item_type in __LIST_TYPES:
|
|
| 1118 | 1180 |
copy.append(list_copy(item))
|
| 1119 |
- elif isinstance(item, Provenance):
|
|
| 1181 |
+ elif item_type in __PROVENANCE_TYPES:
|
|
| 1120 | 1182 |
copy.append(item.clone())
|
| 1121 |
- else:
|
|
| 1183 |
+ elif item_type in __QUICK_TYPES:
|
|
| 1122 | 1184 |
copy.append(item)
|
| 1185 |
+ else:
|
|
| 1186 |
+ raise ValueError("Unable to be quick about list_copy of {}".format(item_type))
|
|
| 1123 | 1187 |
|
| 1124 | 1188 |
return copy
|
| 1125 | 1189 |
|
| ... | ... | @@ -1142,22 +1206,26 @@ def node_final_assertions(node): |
| 1142 | 1206 |
# indicates that the user intended to override a list which
|
| 1143 | 1207 |
# never existed in the underlying data
|
| 1144 | 1208 |
#
|
| 1145 |
- if key in ['(>)', '(<)', '(=)']:
|
|
| 1209 |
+ if key in __NODE_ASSERT_COMPOSITION_DIRECTIVES:
|
|
| 1146 | 1210 |
provenance = node_get_provenance(node, key)
|
| 1147 | 1211 |
raise LoadError(LoadErrorReason.TRAILING_LIST_DIRECTIVE,
|
| 1148 | 1212 |
"{}: Attempt to override non-existing list".format(provenance))
|
| 1149 | 1213 |
|
| 1150 |
- if isinstance(value, collections.abc.Mapping):
|
|
| 1214 |
+ value_type = type(value)
|
|
| 1215 |
+ |
|
| 1216 |
+ if value_type in __DICT_TYPES:
|
|
| 1151 | 1217 |
node_final_assertions(value)
|
| 1152 |
- elif isinstance(value, list):
|
|
| 1218 |
+ elif value_type in __LIST_TYPES:
|
|
| 1153 | 1219 |
list_final_assertions(value)
|
| 1154 | 1220 |
|
| 1155 | 1221 |
|
| 1156 | 1222 |
def list_final_assertions(values):
|
| 1157 | 1223 |
for value in values:
|
| 1158 |
- if isinstance(value, collections.abc.Mapping):
|
|
| 1224 |
+ value_type = type(value)
|
|
| 1225 |
+ |
|
| 1226 |
+ if value_type in __DICT_TYPES:
|
|
| 1159 | 1227 |
node_final_assertions(value)
|
| 1160 |
- elif isinstance(value, list):
|
|
| 1228 |
+ elif value_type in __LIST_TYPES:
|
|
| 1161 | 1229 |
list_final_assertions(value)
|
| 1162 | 1230 |
|
| 1163 | 1231 |
|
| ... | ... | @@ -41,6 +41,15 @@ cache: |
| 41 | 41 |
# Whether to pull build trees when downloading element artifacts
|
| 42 | 42 |
pull-buildtrees: False
|
| 43 | 43 |
|
| 44 |
+ # Whether to cache build trees on artifact creation:
|
|
| 45 |
+ #
|
|
| 46 |
+ # always - Always cache artifact build tree content
|
|
| 47 |
+ # failure - Only cache build trees of failed builds
|
|
| 48 |
+ # never - Don't cache artifact build tree content
|
|
| 49 |
+ #
|
|
| 50 |
+ cache-buildtrees: always
|
|
| 51 |
+ |
|
| 52 |
+ |
|
| 44 | 53 |
#
|
| 45 | 54 |
# Scheduler
|
| 46 | 55 |
#
|
| ... | ... | @@ -82,6 +82,7 @@ import contextlib |
| 82 | 82 |
from contextlib import contextmanager
|
| 83 | 83 |
import tempfile
|
| 84 | 84 |
import shutil
|
| 85 |
+import string
|
|
| 85 | 86 |
|
| 86 | 87 |
from . import _yaml
|
| 87 | 88 |
from ._variables import Variables
|
| ... | ... | @@ -577,6 +578,38 @@ class Element(Plugin): |
| 577 | 578 |
self.__assert_cached()
|
| 578 | 579 |
return self.__compute_splits(include, exclude, orphans)
|
| 579 | 580 |
|
| 581 |
+ def get_artifact_name(self, key=None):
|
|
| 582 |
+ """Compute and return this element's full artifact name
|
|
| 583 |
+ |
|
| 584 |
+ Generate a full name for an artifact, including the project
|
|
| 585 |
+ namespace, element name and cache key.
|
|
| 586 |
+ |
|
| 587 |
+ This can also be used as a relative path safely, and
|
|
| 588 |
+ will normalize parts of the element name such that only
|
|
| 589 |
+ digits, letters and some select characters are allowed.
|
|
| 590 |
+ |
|
| 591 |
+ Args:
|
|
| 592 |
+ key (str): The element's cache key. Defaults to None
|
|
| 593 |
+ |
|
| 594 |
+ Returns:
|
|
| 595 |
+ (str): The relative path for the artifact
|
|
| 596 |
+ """
|
|
| 597 |
+ project = self._get_project()
|
|
| 598 |
+ if key is None:
|
|
| 599 |
+ key = self._get_cache_key()
|
|
| 600 |
+ |
|
| 601 |
+ assert key is not None
|
|
| 602 |
+ |
|
| 603 |
+ valid_chars = string.digits + string.ascii_letters + '-._'
|
|
| 604 |
+ element_name = ''.join([
|
|
| 605 |
+ x if x in valid_chars else '_'
|
|
| 606 |
+ for x in self.normal_name
|
|
| 607 |
+ ])
|
|
| 608 |
+ |
|
| 609 |
+ # Note that project names are not allowed to contain slashes. Element names containing
|
|
| 610 |
+ # a '/' will have this replaced with a '-' upon Element object instantiation.
|
|
| 611 |
+ return '{0}/{1}/{2}'.format(project.name, element_name, key)
|
|
| 612 |
+ |
|
| 580 | 613 |
def stage_artifact(self, sandbox, *, path=None, include=None, exclude=None, orphans=True, update_mtimes=None):
|
| 581 | 614 |
"""Stage this element's output artifact in the sandbox
|
| 582 | 615 |
|
| ... | ... | @@ -1118,7 +1151,7 @@ class Element(Plugin): |
| 1118 | 1151 |
e.name for e in self.dependencies(Scope.BUILD, recurse=False)
|
| 1119 | 1152 |
]
|
| 1120 | 1153 |
|
| 1121 |
- self.__weak_cache_key = self.__calculate_cache_key(dependencies)
|
|
| 1154 |
+ self.__weak_cache_key = self._calculate_cache_key(dependencies)
|
|
| 1122 | 1155 |
|
| 1123 | 1156 |
if self.__weak_cache_key is None:
|
| 1124 | 1157 |
# Weak cache key could not be calculated yet
|
| ... | ... | @@ -1147,8 +1180,7 @@ class Element(Plugin): |
| 1147 | 1180 |
dependencies = [
|
| 1148 | 1181 |
e.__strict_cache_key for e in self.dependencies(Scope.BUILD)
|
| 1149 | 1182 |
]
|
| 1150 |
- self.__strict_cache_key = self.__calculate_cache_key(dependencies)
|
|
| 1151 |
- |
|
| 1183 |
+ self.__strict_cache_key = self._calculate_cache_key(dependencies)
|
|
| 1152 | 1184 |
if self.__strict_cache_key is None:
|
| 1153 | 1185 |
# Strict cache key could not be calculated yet
|
| 1154 | 1186 |
return
|
| ... | ... | @@ -1190,7 +1222,7 @@ class Element(Plugin): |
| 1190 | 1222 |
dependencies = [
|
| 1191 | 1223 |
e._get_cache_key() for e in self.dependencies(Scope.BUILD)
|
| 1192 | 1224 |
]
|
| 1193 |
- self.__cache_key = self.__calculate_cache_key(dependencies)
|
|
| 1225 |
+ self.__cache_key = self._calculate_cache_key(dependencies)
|
|
| 1194 | 1226 |
|
| 1195 | 1227 |
if self.__cache_key is None:
|
| 1196 | 1228 |
# Strong cache key could not be calculated yet
|
| ... | ... | @@ -1425,6 +1457,9 @@ class Element(Plugin): |
| 1425 | 1457 |
elif usebuildtree:
|
| 1426 | 1458 |
artifact_base, _ = self.__extract()
|
| 1427 | 1459 |
import_dir = os.path.join(artifact_base, 'buildtree')
|
| 1460 |
+ if not os.listdir(import_dir):
|
|
| 1461 |
+ detail = "Element type either does not expect a buildtree or it was explictily cached without one."
|
|
| 1462 |
+ self.warn("WARNING: {} Artifact contains an empty buildtree".format(self.name), detail=detail)
|
|
| 1428 | 1463 |
else:
|
| 1429 | 1464 |
# No workspace or cached buildtree, stage source directly
|
| 1430 | 1465 |
for source in self.sources():
|
| ... | ... | @@ -1631,6 +1666,8 @@ class Element(Plugin): |
| 1631 | 1666 |
# No collect directory existed
|
| 1632 | 1667 |
collectvdir = None
|
| 1633 | 1668 |
|
| 1669 |
+ context = self._get_context()
|
|
| 1670 |
+ |
|
| 1634 | 1671 |
# Create artifact directory structure
|
| 1635 | 1672 |
assembledir = os.path.join(rootdir, 'artifact')
|
| 1636 | 1673 |
filesdir = os.path.join(assembledir, 'files')
|
| ... | ... | @@ -1648,20 +1685,30 @@ class Element(Plugin): |
| 1648 | 1685 |
if collect is not None and collectvdir is not None:
|
| 1649 | 1686 |
collectvdir.export_files(filesdir, can_link=True)
|
| 1650 | 1687 |
|
| 1651 |
- try:
|
|
| 1652 |
- sandbox_vroot = sandbox.get_virtual_directory()
|
|
| 1653 |
- sandbox_build_dir = sandbox_vroot.descend(
|
|
| 1654 |
- self.get_variable('build-root').lstrip(os.sep).split(os.sep))
|
|
| 1655 |
- # Hard link files from build-root dir to buildtreedir directory
|
|
| 1656 |
- sandbox_build_dir.export_files(buildtreedir)
|
|
| 1657 |
- except VirtualDirectoryError:
|
|
| 1658 |
- # Directory could not be found. Pre-virtual
|
|
| 1659 |
- # directory behaviour was to continue silently
|
|
| 1660 |
- # if the directory could not be found.
|
|
| 1661 |
- pass
|
|
| 1688 |
+ cache_buildtrees = context.cache_buildtrees
|
|
| 1689 |
+ build_success = self.__build_result[0]
|
|
| 1690 |
+ |
|
| 1691 |
+ # cache_buildtrees defaults to 'always', as such the
|
|
| 1692 |
+ # default behaviour is to attempt to cache them. If only
|
|
| 1693 |
+ # caching failed artifact buildtrees, then query the build
|
|
| 1694 |
+ # result. Element types without a build-root dir will be cached
|
|
| 1695 |
+ # with an empty buildtreedir regardless of this configuration.
|
|
| 1696 |
+ |
|
| 1697 |
+ if cache_buildtrees == 'always' or (cache_buildtrees == 'failure' and not build_success):
|
|
| 1698 |
+ try:
|
|
| 1699 |
+ sandbox_vroot = sandbox.get_virtual_directory()
|
|
| 1700 |
+ sandbox_build_dir = sandbox_vroot.descend(
|
|
| 1701 |
+ self.get_variable('build-root').lstrip(os.sep).split(os.sep))
|
|
| 1702 |
+ # Hard link files from build-root dir to buildtreedir directory
|
|
| 1703 |
+ sandbox_build_dir.export_files(buildtreedir)
|
|
| 1704 |
+ except VirtualDirectoryError:
|
|
| 1705 |
+ # Directory could not be found. Pre-virtual
|
|
| 1706 |
+ # directory behaviour was to continue silently
|
|
| 1707 |
+ # if the directory could not be found.
|
|
| 1708 |
+ pass
|
|
| 1662 | 1709 |
|
| 1663 | 1710 |
# Copy build log
|
| 1664 |
- log_filename = self._get_context().get_log_filename()
|
|
| 1711 |
+ log_filename = context.get_log_filename()
|
|
| 1665 | 1712 |
self._build_log_path = os.path.join(logsdir, 'build.log')
|
| 1666 | 1713 |
if log_filename:
|
| 1667 | 1714 |
shutil.copyfile(log_filename, self._build_log_path)
|
| ... | ... | @@ -1802,7 +1849,7 @@ class Element(Plugin): |
| 1802 | 1849 |
return True
|
| 1803 | 1850 |
|
| 1804 | 1851 |
# Do not push elements that aren't cached, or that are cached with a dangling buildtree
|
| 1805 |
- # artifact unless element type is expected to have an an empty buildtree directory
|
|
| 1852 |
+ # ref unless element type is expected to have an an empty buildtree directory
|
|
| 1806 | 1853 |
if not self._cached_buildtree():
|
| 1807 | 1854 |
return True
|
| 1808 | 1855 |
|
| ... | ... | @@ -2004,6 +2051,8 @@ class Element(Plugin): |
| 2004 | 2051 |
# Returns:
|
| 2005 | 2052 |
# (bool): True if artifact cached with buildtree, False if
|
| 2006 | 2053 |
# element not cached or missing expected buildtree.
|
| 2054 |
+ # Note this only confirms if a buildtree is present,
|
|
| 2055 |
+ # not its contents.
|
|
| 2007 | 2056 |
#
|
| 2008 | 2057 |
def _cached_buildtree(self):
|
| 2009 | 2058 |
context = self._get_context()
|
| ... | ... | @@ -2032,41 +2081,7 @@ class Element(Plugin): |
| 2032 | 2081 |
source._fetch(previous_sources)
|
| 2033 | 2082 |
previous_sources.append(source)
|
| 2034 | 2083 |
|
| 2035 |
- #############################################################
|
|
| 2036 |
- # Private Local Methods #
|
|
| 2037 |
- #############################################################
|
|
| 2038 |
- |
|
| 2039 |
- # __update_source_state()
|
|
| 2040 |
- #
|
|
| 2041 |
- # Updates source consistency state
|
|
| 2042 |
- #
|
|
| 2043 |
- def __update_source_state(self):
|
|
| 2044 |
- |
|
| 2045 |
- # Cannot resolve source state until tracked
|
|
| 2046 |
- if self.__tracking_scheduled:
|
|
| 2047 |
- return
|
|
| 2048 |
- |
|
| 2049 |
- self.__consistency = Consistency.CACHED
|
|
| 2050 |
- workspace = self._get_workspace()
|
|
| 2051 |
- |
|
| 2052 |
- # Special case for workspaces
|
|
| 2053 |
- if workspace:
|
|
| 2054 |
- |
|
| 2055 |
- # A workspace is considered inconsistent in the case
|
|
| 2056 |
- # that its directory went missing
|
|
| 2057 |
- #
|
|
| 2058 |
- fullpath = workspace.get_absolute_path()
|
|
| 2059 |
- if not os.path.exists(fullpath):
|
|
| 2060 |
- self.__consistency = Consistency.INCONSISTENT
|
|
| 2061 |
- else:
|
|
| 2062 |
- |
|
| 2063 |
- # Determine overall consistency of the element
|
|
| 2064 |
- for source in self.__sources:
|
|
| 2065 |
- source._update_state()
|
|
| 2066 |
- source_consistency = source._get_consistency()
|
|
| 2067 |
- self.__consistency = min(self.__consistency, source_consistency)
|
|
| 2068 |
- |
|
| 2069 |
- # __calculate_cache_key():
|
|
| 2084 |
+ # _calculate_cache_key():
|
|
| 2070 | 2085 |
#
|
| 2071 | 2086 |
# Calculates the cache key
|
| 2072 | 2087 |
#
|
| ... | ... | @@ -2075,7 +2090,7 @@ class Element(Plugin): |
| 2075 | 2090 |
#
|
| 2076 | 2091 |
# None is returned if information for the cache key is missing.
|
| 2077 | 2092 |
#
|
| 2078 |
- def __calculate_cache_key(self, dependencies):
|
|
| 2093 |
+ def _calculate_cache_key(self, dependencies):
|
|
| 2079 | 2094 |
# No cache keys for dependencies which have no cache keys
|
| 2080 | 2095 |
if None in dependencies:
|
| 2081 | 2096 |
return None
|
| ... | ... | @@ -2114,6 +2129,40 @@ class Element(Plugin): |
| 2114 | 2129 |
|
| 2115 | 2130 |
return _cachekey.generate_key(cache_key_dict)
|
| 2116 | 2131 |
|
| 2132 |
+ #############################################################
|
|
| 2133 |
+ # Private Local Methods #
|
|
| 2134 |
+ #############################################################
|
|
| 2135 |
+ |
|
| 2136 |
+ # __update_source_state()
|
|
| 2137 |
+ #
|
|
| 2138 |
+ # Updates source consistency state
|
|
| 2139 |
+ #
|
|
| 2140 |
+ def __update_source_state(self):
|
|
| 2141 |
+ |
|
| 2142 |
+ # Cannot resolve source state until tracked
|
|
| 2143 |
+ if self.__tracking_scheduled:
|
|
| 2144 |
+ return
|
|
| 2145 |
+ |
|
| 2146 |
+ self.__consistency = Consistency.CACHED
|
|
| 2147 |
+ workspace = self._get_workspace()
|
|
| 2148 |
+ |
|
| 2149 |
+ # Special case for workspaces
|
|
| 2150 |
+ if workspace:
|
|
| 2151 |
+ |
|
| 2152 |
+ # A workspace is considered inconsistent in the case
|
|
| 2153 |
+ # that its directory went missing
|
|
| 2154 |
+ #
|
|
| 2155 |
+ fullpath = workspace.get_absolute_path()
|
|
| 2156 |
+ if not os.path.exists(fullpath):
|
|
| 2157 |
+ self.__consistency = Consistency.INCONSISTENT
|
|
| 2158 |
+ else:
|
|
| 2159 |
+ |
|
| 2160 |
+ # Determine overall consistency of the element
|
|
| 2161 |
+ for source in self.__sources:
|
|
| 2162 |
+ source._update_state()
|
|
| 2163 |
+ source_consistency = source._get_consistency()
|
|
| 2164 |
+ self.__consistency = min(self.__consistency, source_consistency)
|
|
| 2165 |
+ |
|
| 2117 | 2166 |
# __can_build_incrementally()
|
| 2118 | 2167 |
#
|
| 2119 | 2168 |
# Check if the element can be built incrementally, this
|
| ... | ... | @@ -2297,6 +2346,8 @@ class Element(Plugin): |
| 2297 | 2346 |
defaults['public'] = element_public
|
| 2298 | 2347 |
|
| 2299 | 2348 |
def __init_defaults(self, plugin_conf):
|
| 2349 |
+ if plugin_conf is None:
|
|
| 2350 |
+ return
|
|
| 2300 | 2351 |
|
| 2301 | 2352 |
# Defaults are loaded once per class and then reused
|
| 2302 | 2353 |
#
|
| ... | ... | @@ -211,7 +211,7 @@ def test_pull_tree(cli, tmpdir, datafiles): |
| 211 | 211 |
assert artifactcache.contains(element, element_key)
|
| 212 | 212 |
|
| 213 | 213 |
# Retrieve the Directory object from the cached artifact
|
| 214 |
- artifact_ref = artifactcache.get_artifact_fullname(element, element_key)
|
|
| 214 |
+ artifact_ref = element.get_artifact_name(element_key)
|
|
| 215 | 215 |
artifact_digest = cas.resolve_ref(artifact_ref)
|
| 216 | 216 |
|
| 217 | 217 |
queue = multiprocessing.Queue()
|
| ... | ... | @@ -190,7 +190,7 @@ def test_push_directory(cli, tmpdir, datafiles): |
| 190 | 190 |
assert artifactcache.has_push_remotes(element=element)
|
| 191 | 191 |
|
| 192 | 192 |
# Recreate the CasBasedDirectory object from the cached artifact
|
| 193 |
- artifact_ref = artifactcache.get_artifact_fullname(element, element_key)
|
|
| 193 |
+ artifact_ref = element.get_artifact_name(element_key)
|
|
| 194 | 194 |
artifact_digest = cas.resolve_ref(artifact_ref)
|
| 195 | 195 |
|
| 196 | 196 |
queue = multiprocessing.Queue()
|
| ... | ... | @@ -23,6 +23,7 @@ MAIN_OPTIONS = [ |
| 23 | 23 |
"--builders ",
|
| 24 | 24 |
"-c ",
|
| 25 | 25 |
"-C ",
|
| 26 |
+ "--cache-buildtrees ",
|
|
| 26 | 27 |
"--colors ",
|
| 27 | 28 |
"--config ",
|
| 28 | 29 |
"--debug ",
|
| ... | ... | @@ -156,6 +157,7 @@ def test_options(cli, cmd, word_idx, expected): |
| 156 | 157 |
|
| 157 | 158 |
@pytest.mark.parametrize("cmd,word_idx,expected", [
|
| 158 | 159 |
('bst --on-error ', 2, ['continue ', 'quit ', 'terminate ']),
|
| 160 |
+ ('bst --cache-buildtrees ', 2, ['always ', 'failure ', 'never ']),
|
|
| 159 | 161 |
('bst show --deps ', 3, ['all ', 'build ', 'none ', 'plan ', 'run ']),
|
| 160 | 162 |
('bst show --deps=', 2, ['all ', 'build ', 'none ', 'plan ', 'run ']),
|
| 161 | 163 |
('bst show --deps b', 3, ['build ']),
|
| 1 | 1 |
import os
|
| 2 | 2 |
import shutil
|
| 3 |
+import stat
|
|
| 3 | 4 |
import pytest
|
| 4 | 5 |
from buildstream.plugintestutils import cli
|
| 5 | 6 |
from tests.testutils import create_artifact_share, generate_junction
|
| ... | ... | @@ -462,3 +463,74 @@ def test_build_remote_option(caplog, cli, tmpdir, datafiles): |
| 462 | 463 |
assert shareproject.repo not in result.stderr
|
| 463 | 464 |
assert shareuser.repo not in result.stderr
|
| 464 | 465 |
assert sharecli.repo in result.stderr
|
| 466 |
+ |
|
| 467 |
+ |
|
| 468 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 469 |
+def test_pull_access_rights(caplog, cli, tmpdir, datafiles):
|
|
| 470 |
+ project = str(datafiles)
|
|
| 471 |
+ checkout = os.path.join(str(tmpdir), 'checkout')
|
|
| 472 |
+ |
|
| 473 |
+ # Work-around datafiles not preserving mode
|
|
| 474 |
+ os.chmod(os.path.join(project, 'files/bin-files/usr/bin/hello'), 0o0755)
|
|
| 475 |
+ |
|
| 476 |
+ # We need a big file that does not go into a batch to test a different
|
|
| 477 |
+ # code path
|
|
| 478 |
+ os.makedirs(os.path.join(project, 'files/dev-files/usr/share'), exist_ok=True)
|
|
| 479 |
+ with open(os.path.join(project, 'files/dev-files/usr/share/big-file'), 'w') as f:
|
|
| 480 |
+ buf = ' ' * 4096
|
|
| 481 |
+ for _ in range(1024):
|
|
| 482 |
+ f.write(buf)
|
|
| 483 |
+ |
|
| 484 |
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
|
|
| 485 |
+ |
|
| 486 |
+ cli.configure({
|
|
| 487 |
+ 'artifacts': {'url': share.repo, 'push': True}
|
|
| 488 |
+ })
|
|
| 489 |
+ result = cli.run(project=project, args=['build', 'compose-all.bst'])
|
|
| 490 |
+ result.assert_success()
|
|
| 491 |
+ |
|
| 492 |
+ result = cli.run(project=project,
|
|
| 493 |
+ args=['artifact', 'checkout',
|
|
| 494 |
+ '--hardlinks', '--no-integrate',
|
|
| 495 |
+ 'compose-all.bst',
|
|
| 496 |
+ '--directory', checkout])
|
|
| 497 |
+ result.assert_success()
|
|
| 498 |
+ |
|
| 499 |
+ st = os.lstat(os.path.join(checkout, 'usr/include/pony.h'))
|
|
| 500 |
+ assert stat.S_ISREG(st.st_mode)
|
|
| 501 |
+ assert stat.S_IMODE(st.st_mode) == 0o0644
|
|
| 502 |
+ |
|
| 503 |
+ st = os.lstat(os.path.join(checkout, 'usr/bin/hello'))
|
|
| 504 |
+ assert stat.S_ISREG(st.st_mode)
|
|
| 505 |
+ assert stat.S_IMODE(st.st_mode) == 0o0755
|
|
| 506 |
+ |
|
| 507 |
+ st = os.lstat(os.path.join(checkout, 'usr/share/big-file'))
|
|
| 508 |
+ assert stat.S_ISREG(st.st_mode)
|
|
| 509 |
+ assert stat.S_IMODE(st.st_mode) == 0o0644
|
|
| 510 |
+ |
|
| 511 |
+ shutil.rmtree(checkout)
|
|
| 512 |
+ |
|
| 513 |
+ artifacts = os.path.join(cli.directory, 'artifacts')
|
|
| 514 |
+ shutil.rmtree(artifacts)
|
|
| 515 |
+ |
|
| 516 |
+ result = cli.run(project=project, args=['artifact', 'pull', 'compose-all.bst'])
|
|
| 517 |
+ result.assert_success()
|
|
| 518 |
+ |
|
| 519 |
+ result = cli.run(project=project,
|
|
| 520 |
+ args=['artifact', 'checkout',
|
|
| 521 |
+ '--hardlinks', '--no-integrate',
|
|
| 522 |
+ 'compose-all.bst',
|
|
| 523 |
+ '--directory', checkout])
|
|
| 524 |
+ result.assert_success()
|
|
| 525 |
+ |
|
| 526 |
+ st = os.lstat(os.path.join(checkout, 'usr/include/pony.h'))
|
|
| 527 |
+ assert stat.S_ISREG(st.st_mode)
|
|
| 528 |
+ assert stat.S_IMODE(st.st_mode) == 0o0644
|
|
| 529 |
+ |
|
| 530 |
+ st = os.lstat(os.path.join(checkout, 'usr/bin/hello'))
|
|
| 531 |
+ assert stat.S_ISREG(st.st_mode)
|
|
| 532 |
+ assert stat.S_IMODE(st.st_mode) == 0o0755
|
|
| 533 |
+ |
|
| 534 |
+ st = os.lstat(os.path.join(checkout, 'usr/share/big-file'))
|
|
| 535 |
+ assert stat.S_ISREG(st.st_mode)
|
|
| 536 |
+ assert stat.S_IMODE(st.st_mode) == 0o0644
|
| ... | ... | @@ -20,9 +20,12 @@ |
| 20 | 20 |
|
| 21 | 21 |
import os
|
| 22 | 22 |
import pytest
|
| 23 |
+import shutil
|
|
| 23 | 24 |
|
| 24 | 25 |
from buildstream.plugintestutils import cli_integration as cli
|
| 25 |
- |
|
| 26 |
+from tests.testutils import create_artifact_share
|
|
| 27 |
+from tests.testutils.site import HAVE_SANDBOX
|
|
| 28 |
+from buildstream._exceptions import ErrorDomain
|
|
| 26 | 29 |
|
| 27 | 30 |
pytestmark = pytest.mark.integration
|
| 28 | 31 |
|
| ... | ... | @@ -66,3 +69,106 @@ def test_artifact_log(cli, tmpdir, datafiles): |
| 66 | 69 |
assert result.exit_code == 0
|
| 67 | 70 |
# The artifact is cached under both a strong key and a weak key
|
| 68 | 71 |
assert (log + log) == result.output
|
| 72 |
+ |
|
| 73 |
+ |
|
| 74 |
+# A test to capture the integration of the cachebuildtrees
|
|
| 75 |
+# behaviour, which by default is to include the buildtree
|
|
| 76 |
+# content of an element on caching.
|
|
| 77 |
+@pytest.mark.integration
|
|
| 78 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 79 |
+@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
|
|
| 80 |
+def test_cache_buildtrees(cli, tmpdir, datafiles):
|
|
| 81 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 82 |
+ element_name = 'autotools/amhello.bst'
|
|
| 83 |
+ |
|
| 84 |
+ # Create artifact shares for pull & push testing
|
|
| 85 |
+ with create_artifact_share(os.path.join(str(tmpdir), 'share1')) as share1,\
|
|
| 86 |
+ create_artifact_share(os.path.join(str(tmpdir), 'share2')) as share2,\
|
|
| 87 |
+ create_artifact_share(os.path.join(str(tmpdir), 'share3')) as share3:
|
|
| 88 |
+ cli.configure({
|
|
| 89 |
+ 'artifacts': {'url': share1.repo, 'push': True},
|
|
| 90 |
+ 'artifactdir': os.path.join(str(tmpdir), 'artifacts')
|
|
| 91 |
+ })
|
|
| 92 |
+ |
|
| 93 |
+ # Build autotools element with cache-buildtrees set via the
|
|
| 94 |
+ # cli. The artifact should be successfully pushed to the share1 remote
|
|
| 95 |
+ # and cached locally with an 'empty' buildtree digest, as it's not a
|
|
| 96 |
+ # dangling ref
|
|
| 97 |
+ result = cli.run(project=project, args=['--cache-buildtrees', 'never', 'build', element_name])
|
|
| 98 |
+ assert result.exit_code == 0
|
|
| 99 |
+ assert cli.get_element_state(project, element_name) == 'cached'
|
|
| 100 |
+ assert share1.has_artifact('test', element_name, cli.get_element_key(project, element_name))
|
|
| 101 |
+ |
|
| 102 |
+ # The extracted buildtree dir should be empty, as we set the config
|
|
| 103 |
+ # to not cache buildtrees
|
|
| 104 |
+ cache_key = cli.get_element_key(project, element_name)
|
|
| 105 |
+ elementdigest = share1.has_artifact('test', element_name, cache_key)
|
|
| 106 |
+ buildtreedir = os.path.join(str(tmpdir), 'artifacts', 'extract', 'test', 'autotools-amhello',
|
|
| 107 |
+ elementdigest.hash, 'buildtree')
|
|
| 108 |
+ assert os.path.isdir(buildtreedir)
|
|
| 109 |
+ assert not os.listdir(buildtreedir)
|
|
| 110 |
+ |
|
| 111 |
+ # Delete the local cached artifacts, and assert the when pulled with --pull-buildtrees
|
|
| 112 |
+ # that is was cached in share1 as expected with an empty buildtree dir
|
|
| 113 |
+ shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
|
|
| 114 |
+ assert cli.get_element_state(project, element_name) != 'cached'
|
|
| 115 |
+ result = cli.run(project=project, args=['--pull-buildtrees', 'artifact', 'pull', element_name])
|
|
| 116 |
+ assert element_name in result.get_pulled_elements()
|
|
| 117 |
+ assert os.path.isdir(buildtreedir)
|
|
| 118 |
+ assert not os.listdir(buildtreedir)
|
|
| 119 |
+ shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
|
|
| 120 |
+ |
|
| 121 |
+ # Assert that the default behaviour of pull to not include buildtrees on the artifact
|
|
| 122 |
+ # in share1 which was purposely cached with an empty one behaves as expected. As such the
|
|
| 123 |
+ # pulled artifact will have a dangling ref for the buildtree dir, regardless of content,
|
|
| 124 |
+ # leading to no buildtreedir being extracted
|
|
| 125 |
+ result = cli.run(project=project, args=['artifact', 'pull', element_name])
|
|
| 126 |
+ assert element_name in result.get_pulled_elements()
|
|
| 127 |
+ assert not os.path.isdir(buildtreedir)
|
|
| 128 |
+ shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
|
|
| 129 |
+ |
|
| 130 |
+ # Repeat building the artifacts, this time with the default behaviour of caching buildtrees,
|
|
| 131 |
+ # as such the buildtree dir should not be empty
|
|
| 132 |
+ cli.configure({
|
|
| 133 |
+ 'artifacts': {'url': share2.repo, 'push': True},
|
|
| 134 |
+ 'artifactdir': os.path.join(str(tmpdir), 'artifacts')
|
|
| 135 |
+ })
|
|
| 136 |
+ result = cli.run(project=project, args=['build', element_name])
|
|
| 137 |
+ assert result.exit_code == 0
|
|
| 138 |
+ assert cli.get_element_state(project, element_name) == 'cached'
|
|
| 139 |
+ assert share2.has_artifact('test', element_name, cli.get_element_key(project, element_name))
|
|
| 140 |
+ |
|
| 141 |
+ # Cache key will be the same however the digest hash will have changed as expected, so reconstruct paths
|
|
| 142 |
+ elementdigest = share2.has_artifact('test', element_name, cache_key)
|
|
| 143 |
+ buildtreedir = os.path.join(str(tmpdir), 'artifacts', 'extract', 'test', 'autotools-amhello',
|
|
| 144 |
+ elementdigest.hash, 'buildtree')
|
|
| 145 |
+ assert os.path.isdir(buildtreedir)
|
|
| 146 |
+ assert os.listdir(buildtreedir) is not None
|
|
| 147 |
+ |
|
| 148 |
+ # Delete the local cached artifacts, and assert that when pulled with --pull-buildtrees
|
|
| 149 |
+ # that it was cached in share2 as expected with a populated buildtree dir
|
|
| 150 |
+ shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
|
|
| 151 |
+ assert cli.get_element_state(project, element_name) != 'cached'
|
|
| 152 |
+ result = cli.run(project=project, args=['--pull-buildtrees', 'artifact', 'pull', element_name])
|
|
| 153 |
+ assert element_name in result.get_pulled_elements()
|
|
| 154 |
+ assert os.path.isdir(buildtreedir)
|
|
| 155 |
+ assert os.listdir(buildtreedir) is not None
|
|
| 156 |
+ shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
|
|
| 157 |
+ |
|
| 158 |
+ # Clarify that the user config option for cache-buildtrees works as the cli
|
|
| 159 |
+ # main option does. Point to share3 which does not have the artifacts cached to force
|
|
| 160 |
+ # a build
|
|
| 161 |
+ cli.configure({
|
|
| 162 |
+ 'artifacts': {'url': share3.repo, 'push': True},
|
|
| 163 |
+ 'artifactdir': os.path.join(str(tmpdir), 'artifacts'),
|
|
| 164 |
+ 'cache': {'cache-buildtrees': 'never'}
|
|
| 165 |
+ })
|
|
| 166 |
+ result = cli.run(project=project, args=['build', element_name])
|
|
| 167 |
+ assert result.exit_code == 0
|
|
| 168 |
+ assert cli.get_element_state(project, element_name) == 'cached'
|
|
| 169 |
+ cache_key = cli.get_element_key(project, element_name)
|
|
| 170 |
+ elementdigest = share3.has_artifact('test', element_name, cache_key)
|
|
| 171 |
+ buildtreedir = os.path.join(str(tmpdir), 'artifacts', 'extract', 'test', 'autotools-amhello',
|
|
| 172 |
+ elementdigest.hash, 'buildtree')
|
|
| 173 |
+ assert os.path.isdir(buildtreedir)
|
|
| 174 |
+ assert not os.listdir(buildtreedir)
|
| ... | ... | @@ -52,6 +52,29 @@ def test_buildtree_staged_forced_true(cli_integration, tmpdir, datafiles): |
| 52 | 52 |
assert 'Hi' in res.output
|
| 53 | 53 |
|
| 54 | 54 |
|
| 55 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 56 |
+@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
|
|
| 57 |
+def test_buildtree_staged_warn_empty_cached(cli_integration, tmpdir, datafiles):
|
|
| 58 |
+ # Test that if we stage a cached and empty buildtree, we warn the user.
|
|
| 59 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 60 |
+ element_name = 'build-shell/buildtree.bst'
|
|
| 61 |
+ |
|
| 62 |
+ # Switch to a temp artifact cache dir to ensure the artifact is rebuilt,
|
|
| 63 |
+ # caching an empty buildtree
|
|
| 64 |
+ cli_integration.configure({
|
|
| 65 |
+ 'artifactdir': os.path.join(os.path.join(str(tmpdir), 'artifacts'))
|
|
| 66 |
+ })
|
|
| 67 |
+ |
|
| 68 |
+ res = cli_integration.run(project=project, args=['--cache-buildtrees', 'never', 'build', element_name])
|
|
| 69 |
+ res.assert_success()
|
|
| 70 |
+ |
|
| 71 |
+ res = cli_integration.run(project=project, args=[
|
|
| 72 |
+ 'shell', '--build', '--use-buildtree', 'always', element_name, '--', 'cat', 'test'
|
|
| 73 |
+ ])
|
|
| 74 |
+ res.assert_shell_error()
|
|
| 75 |
+ assert "Artifact contains an empty buildtree" in res.stderr
|
|
| 76 |
+ |
|
| 77 |
+ |
|
| 55 | 78 |
@pytest.mark.datafiles(DATA_DIR)
|
| 56 | 79 |
@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
|
| 57 | 80 |
def test_buildtree_staged_if_available(cli_integration, tmpdir, datafiles):
|
| ... | ... | @@ -106,6 +129,54 @@ def test_buildtree_from_failure(cli_integration, tmpdir, datafiles): |
| 106 | 129 |
assert 'Hi' in res.output
|
| 107 | 130 |
|
| 108 | 131 |
|
| 132 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 133 |
+@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
|
|
| 134 |
+def test_buildtree_from_failure_option_never(cli_integration, tmpdir, datafiles):
|
|
| 135 |
+ |
|
| 136 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 137 |
+ element_name = 'build-shell/buildtree-fail.bst'
|
|
| 138 |
+ |
|
| 139 |
+ # Switch to a temp artifact cache dir to ensure the artifact is rebuilt,
|
|
| 140 |
+ # caching an empty buildtree
|
|
| 141 |
+ cli_integration.configure({
|
|
| 142 |
+ 'artifactdir': os.path.join(os.path.join(str(tmpdir), 'artifacts'))
|
|
| 143 |
+ })
|
|
| 144 |
+ |
|
| 145 |
+ res = cli_integration.run(project=project, args=['--cache-buildtrees', 'never', 'build', element_name])
|
|
| 146 |
+ res.assert_main_error(ErrorDomain.STREAM, None)
|
|
| 147 |
+ |
|
| 148 |
+ res = cli_integration.run(project=project, args=[
|
|
| 149 |
+ 'shell', '--build', element_name, '--use-buildtree', 'always', '--', 'cat', 'test'
|
|
| 150 |
+ ])
|
|
| 151 |
+ res.assert_shell_error()
|
|
| 152 |
+ assert "Artifact contains an empty buildtree" in res.stderr
|
|
| 153 |
+ |
|
| 154 |
+ |
|
| 155 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 156 |
+@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
|
|
| 157 |
+def test_buildtree_from_failure_option_failure(cli_integration, tmpdir, datafiles):
|
|
| 158 |
+ |
|
| 159 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 160 |
+ element_name = 'build-shell/buildtree-fail.bst'
|
|
| 161 |
+ |
|
| 162 |
+ # build with --cache-buildtrees set to 'failure', behaviour should match
|
|
| 163 |
+ # default behaviour (which is always) as the buildtree will explicitly have been
|
|
| 164 |
+ # cached with content.
|
|
| 165 |
+ cli_integration.configure({
|
|
| 166 |
+ 'artifactdir': os.path.join(os.path.join(str(tmpdir), 'artifacts'))
|
|
| 167 |
+ })
|
|
| 168 |
+ |
|
| 169 |
+ res = cli_integration.run(project=project, args=['--cache-buildtrees', 'failure', 'build', element_name])
|
|
| 170 |
+ res.assert_main_error(ErrorDomain.STREAM, None)
|
|
| 171 |
+ |
|
| 172 |
+ res = cli_integration.run(project=project, args=[
|
|
| 173 |
+ 'shell', '--build', element_name, '--use-buildtree', 'always', '--', 'cat', 'test'
|
|
| 174 |
+ ])
|
|
| 175 |
+ res.assert_success()
|
|
| 176 |
+ assert "WARNING: using a buildtree from a failed build" in res.stderr
|
|
| 177 |
+ assert 'Hi' in res.output
|
|
| 178 |
+ |
|
| 179 |
+ |
|
| 109 | 180 |
# Check that build shells work when pulled from a remote cache
|
| 110 | 181 |
# This is to roughly simulate remote execution
|
| 111 | 182 |
@pytest.mark.datafiles(DATA_DIR)
|
