Tom Pollard pushed to branch tpollard/workspacebuildtree at BuildStream / buildstream
Commits:
-
0095f888
by Tom Pollard at 2018-11-01T13:24:46Z
8 changed files:
- buildstream/_artifactcache/artifactcache.py
- buildstream/_artifactcache/cascache.py
- buildstream/_context.py
- buildstream/_frontend/cli.py
- buildstream/_stream.py
- buildstream/_workspaces.py
- buildstream/element.py
- tests/frontend/workspace.py
Changes:
| ... | ... | @@ -604,6 +604,20 @@ class ArtifactCache(): |
| 604 | 604 |
raise ImplError("Cache '{kind}' does not implement calculate_cache_size()"
|
| 605 | 605 |
.format(kind=type(self).__name__))
|
| 606 | 606 |
|
| 607 |
+ # checkout_artifact_subdir()
|
|
| 608 |
+ #
|
|
| 609 |
+ # Checkout given artifact subdir into provided directory
|
|
| 610 |
+ #
|
|
| 611 |
+ # Args:
|
|
| 612 |
+ # element (Element): The Element
|
|
| 613 |
+ # key (str): The cache key to use
|
|
| 614 |
+ # subdir (str): The subdir to checkout
|
|
| 615 |
+ # tmpdir (str): The dir to place the subdir content
|
|
| 616 |
+ #
|
|
| 617 |
+ def checkout_artifact_subdir(self, element, key, subdir, tmpdir):
|
|
| 618 |
+ raise ImplError("Cache '{kind}' does not implement checkout_artifact_subdir()"
|
|
| 619 |
+ .format(kind=type(self).__name__))
|
|
| 620 |
+ |
|
| 607 | 621 |
################################################
|
| 608 | 622 |
# Local Private Methods #
|
| 609 | 623 |
################################################
|
| ... | ... | @@ -457,6 +457,13 @@ class CASCache(ArtifactCache): |
| 457 | 457 |
|
| 458 | 458 |
return pushed
|
| 459 | 459 |
|
| 460 |
+ def checkout_artifact_subdir(self, element, key, subdir, tmpdir):
|
|
| 461 |
+ tree = self.resolve_ref(self.get_artifact_fullname(element, key))
|
|
| 462 |
+ |
|
| 463 |
+ # This assumes that the subdir digest is present in the element tree
|
|
| 464 |
+ subdirdigest = self._get_subdir(tree, subdir)
|
|
| 465 |
+ self._checkout(tmpdir, subdirdigest)
|
|
| 466 |
+ |
|
| 460 | 467 |
################################################
|
| 461 | 468 |
# API Private Methods #
|
| 462 | 469 |
################################################
|
| ... | ... | @@ -114,6 +114,9 @@ class Context(): |
| 114 | 114 |
# Whether or not to attempt to pull buildtrees globally
|
| 115 | 115 |
self.pullbuildtrees = False
|
| 116 | 116 |
|
| 117 |
+ # Whether to not include artifact buildtrees in workspaces if available
|
|
| 118 |
+ self.workspacebuildtrees = True
|
|
| 119 |
+ |
|
| 117 | 120 |
# Private variables
|
| 118 | 121 |
self._cache_key = None
|
| 119 | 122 |
self._message_handler = None
|
| ... | ... | @@ -164,7 +167,7 @@ class Context(): |
| 164 | 167 |
_yaml.node_validate(defaults, [
|
| 165 | 168 |
'sourcedir', 'builddir', 'artifactdir', 'logdir',
|
| 166 | 169 |
'scheduler', 'artifacts', 'logging', 'projects',
|
| 167 |
- 'cache', 'pullbuildtrees'
|
|
| 170 |
+ 'cache', 'pullbuildtrees', 'workspacebuildtrees'
|
|
| 168 | 171 |
])
|
| 169 | 172 |
|
| 170 | 173 |
for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir']:
|
| ... | ... | @@ -192,6 +195,9 @@ class Context(): |
| 192 | 195 |
# Load pull buildtrees configuration
|
| 193 | 196 |
self.pullbuildtrees = _yaml.node_get(defaults, bool, 'pullbuildtrees', default_value='False')
|
| 194 | 197 |
|
| 198 |
+ # Load workspace buildtrees configuration
|
|
| 199 |
+ self.workspacebuildtrees = _yaml.node_get(defaults, bool, 'workspacebuildtrees', default_value='True')
|
|
| 200 |
+ |
|
| 195 | 201 |
# Load logging config
|
| 196 | 202 |
logging = _yaml.node_get(defaults, Mapping, 'logging')
|
| 197 | 203 |
_yaml.node_validate(logging, [
|
| ... | ... | @@ -681,17 +681,21 @@ def workspace(): |
| 681 | 681 |
##################################################################
|
| 682 | 682 |
@workspace.command(name='open', short_help="Open a new workspace")
|
| 683 | 683 |
@click.option('--no-checkout', default=False, is_flag=True,
|
| 684 |
- help="Do not checkout the source, only link to the given directory")
|
|
| 684 |
+ help="Do not checkout the source or cached buildtree, only link to the given directory")
|
|
| 685 | 685 |
@click.option('--force', '-f', default=False, is_flag=True,
|
| 686 | 686 |
help="Overwrite files existing in checkout directory")
|
| 687 | 687 |
@click.option('--track', 'track_', default=False, is_flag=True,
|
| 688 | 688 |
help="Track and fetch new source references before checking out the workspace")
|
| 689 |
+@click.option('--no-cache', default=False, is_flag=True,
|
|
| 690 |
+ help="Do not checkout the cached buildtree")
|
|
| 689 | 691 |
@click.argument('element',
|
| 690 | 692 |
type=click.Path(readable=False))
|
| 691 | 693 |
@click.argument('directory', type=click.Path(file_okay=False))
|
| 692 | 694 |
@click.pass_obj
|
| 693 |
-def workspace_open(app, no_checkout, force, track_, element, directory):
|
|
| 694 |
- """Open a workspace for manual source modification"""
|
|
| 695 |
+def workspace_open(app, no_checkout, force, track_, no_cache, element, directory):
|
|
| 696 |
+ """Open a workspace for manual source modification, the elements buildtree
|
|
| 697 |
+ will be provided if available in the local artifact cache.
|
|
| 698 |
+ """
|
|
| 695 | 699 |
|
| 696 | 700 |
if os.path.exists(directory):
|
| 697 | 701 |
|
| ... | ... | @@ -703,11 +707,15 @@ def workspace_open(app, no_checkout, force, track_, element, directory): |
| 703 | 707 |
click.echo("Checkout directory is not empty: {}".format(directory), err=True)
|
| 704 | 708 |
sys.exit(-1)
|
| 705 | 709 |
|
| 710 |
+ if not no_cache and not no_checkout:
|
|
| 711 |
+ click.echo("WARNING: Workspace will be opened without the cached buildtree if not cached locally")
|
|
| 712 |
+ |
|
| 706 | 713 |
with app.initialized():
|
| 707 | 714 |
app.stream.workspace_open(element, directory,
|
| 708 | 715 |
no_checkout=no_checkout,
|
| 709 | 716 |
track_first=track_,
|
| 710 |
- force=force)
|
|
| 717 |
+ force=force,
|
|
| 718 |
+ no_cache=no_cache)
|
|
| 711 | 719 |
|
| 712 | 720 |
|
| 713 | 721 |
##################################################################
|
| ... | ... | @@ -463,11 +463,17 @@ class Stream(): |
| 463 | 463 |
# no_checkout (bool): Whether to skip checking out the source
|
| 464 | 464 |
# track_first (bool): Whether to track and fetch first
|
| 465 | 465 |
# force (bool): Whether to ignore contents in an existing directory
|
| 466 |
+ # no_cache (bool): Whether to not include the cached buildtree
|
|
| 466 | 467 |
#
|
| 467 | 468 |
def workspace_open(self, target, directory, *,
|
| 468 | 469 |
no_checkout,
|
| 469 | 470 |
track_first,
|
| 470 |
- force):
|
|
| 471 |
+ force,
|
|
| 472 |
+ no_cache):
|
|
| 473 |
+ |
|
| 474 |
+ # Override no_cache if the global user conf workspacebuildtrees is false
|
|
| 475 |
+ if not self._context.workspacebuildtrees:
|
|
| 476 |
+ no_cache = True
|
|
| 471 | 477 |
|
| 472 | 478 |
if track_first:
|
| 473 | 479 |
track_targets = (target,)
|
| ... | ... | @@ -480,6 +486,20 @@ class Stream(): |
| 480 | 486 |
target = elements[0]
|
| 481 | 487 |
directory = os.path.abspath(directory)
|
| 482 | 488 |
|
| 489 |
+ # Check if given target has a buildtree artifact cached locally
|
|
| 490 |
+ buildtree = None
|
|
| 491 |
+ if target._cached():
|
|
| 492 |
+ buildtree = self._artifacts.contains_subdir_artifact(target, target._get_cache_key(), 'buildtree')
|
|
| 493 |
+ |
|
| 494 |
+ # If we're running in the default state, make the user aware of buildtree usage
|
|
| 495 |
+ if not no_cache and not no_checkout:
|
|
| 496 |
+ if buildtree:
|
|
| 497 |
+ self._message(MessageType.INFO, "{} buildtree artifact is available,"
|
|
| 498 |
+ " workspace will be opened with it".format(target.name))
|
|
| 499 |
+ else:
|
|
| 500 |
+ self._message(MessageType.WARN, "{} buildtree artifact not available,"
|
|
| 501 |
+ " workspace will be opened with source checkout".format(target.name))
|
|
| 502 |
+ |
|
| 483 | 503 |
if not list(target.sources()):
|
| 484 | 504 |
build_depends = [x.name for x in target.dependencies(Scope.BUILD, recurse=False)]
|
| 485 | 505 |
if not build_depends:
|
| ... | ... | @@ -511,6 +531,7 @@ class Stream(): |
| 511 | 531 |
"fetch the latest version of the " +
|
| 512 | 532 |
"source.")
|
| 513 | 533 |
|
| 534 |
+ # Presume workspace to be forced if previous StreamError not raised
|
|
| 514 | 535 |
if workspace:
|
| 515 | 536 |
workspaces.delete_workspace(target._get_full_name())
|
| 516 | 537 |
workspaces.save_config()
|
| ... | ... | @@ -520,11 +541,16 @@ class Stream(): |
| 520 | 541 |
except OSError as e:
|
| 521 | 542 |
raise StreamError("Failed to create workspace directory: {}".format(e)) from e
|
| 522 | 543 |
|
| 523 |
- workspaces.create_workspace(target._get_full_name(), directory)
|
|
| 524 |
- |
|
| 525 |
- if not no_checkout:
|
|
| 526 |
- with target.timed_activity("Staging sources to {}".format(directory)):
|
|
| 527 |
- target._open_workspace()
|
|
| 544 |
+ # Handle opening workspace with buildtree included
|
|
| 545 |
+ if (buildtree and not no_cache) and not no_checkout:
|
|
| 546 |
+ workspaces.create_workspace(target._get_full_name(), directory, cached_build=buildtree)
|
|
| 547 |
+ with target.timed_activity("Staging buildtree to {}".format(directory)):
|
|
| 548 |
+ target._open_workspace(buildtree=buildtree)
|
|
| 549 |
+ else:
|
|
| 550 |
+ workspaces.create_workspace(target._get_full_name(), directory)
|
|
| 551 |
+ if (not buildtree or no_cache) and not no_checkout:
|
|
| 552 |
+ with target.timed_activity("Staging sources to {}".format(directory)):
|
|
| 553 |
+ target._open_workspace()
|
|
| 528 | 554 |
|
| 529 | 555 |
workspaces.save_config()
|
| 530 | 556 |
self._message(MessageType.INFO, "Saved workspace configuration")
|
| ... | ... | @@ -608,10 +634,24 @@ class Stream(): |
| 608 | 634 |
.format(workspace_path, e)) from e
|
| 609 | 635 |
|
| 610 | 636 |
workspaces.delete_workspace(element._get_full_name())
|
| 611 |
- workspaces.create_workspace(element._get_full_name(), workspace_path)
|
|
| 612 | 637 |
|
| 613 |
- with element.timed_activity("Staging sources to {}".format(workspace_path)):
|
|
| 614 |
- element._open_workspace()
|
|
| 638 |
+ # Create the workspace, ensuring the original optional cached build state is preserved if
|
|
| 639 |
+ # possible.
|
|
| 640 |
+ buildtree = False
|
|
| 641 |
+ if workspace.cached_build and element._cached():
|
|
| 642 |
+ if self._artifacts.contains_subdir_artifact(element, element._get_cache_key(), 'buildtree'):
|
|
| 643 |
+ buildtree = True
|
|
| 644 |
+ |
|
| 645 |
+ # Warn the user if the workspace cannot be opened with the original cached build state
|
|
| 646 |
+ if workspace.cached_build and not buildtree:
|
|
| 647 |
+ self._message(MessageType.WARN, "{} original buildtree artifact not available,"
|
|
| 648 |
+ " workspace will be opened with source checkout".format(element.name))
|
|
| 649 |
+ |
|
| 650 |
+ workspaces.create_workspace(element._get_full_name(), workspace_path,
|
|
| 651 |
+ cached_build=buildtree)
|
|
| 652 |
+ |
|
| 653 |
+ with element.timed_activity("Staging to {}".format(workspace_path)):
|
|
| 654 |
+ element._open_workspace(buildtree=buildtree)
|
|
| 615 | 655 |
|
| 616 | 656 |
self._message(MessageType.INFO,
|
| 617 | 657 |
"Reset workspace for {} at: {}".format(element.name,
|
| ... | ... | @@ -24,7 +24,7 @@ from . import _yaml |
| 24 | 24 |
from ._exceptions import LoadError, LoadErrorReason
|
| 25 | 25 |
|
| 26 | 26 |
|
| 27 |
-BST_WORKSPACE_FORMAT_VERSION = 3
|
|
| 27 |
+BST_WORKSPACE_FORMAT_VERSION = 4
|
|
| 28 | 28 |
|
| 29 | 29 |
|
| 30 | 30 |
# Workspace()
|
| ... | ... | @@ -43,9 +43,11 @@ BST_WORKSPACE_FORMAT_VERSION = 3 |
| 43 | 43 |
# running_files (dict): A dict mapping dependency elements to files
|
| 44 | 44 |
# changed between failed builds. Should be
|
| 45 | 45 |
# made obsolete with failed build artifacts.
|
| 46 |
+# cached_build (bool): If the workspace is staging the cached build artifact
|
|
| 46 | 47 |
#
|
| 47 | 48 |
class Workspace():
|
| 48 |
- def __init__(self, toplevel_project, *, last_successful=None, path=None, prepared=False, running_files=None):
|
|
| 49 |
+ def __init__(self, toplevel_project, *, last_successful=None, path=None, prepared=False,
|
|
| 50 |
+ running_files=None, cached_build=False):
|
|
| 49 | 51 |
self.prepared = prepared
|
| 50 | 52 |
self.last_successful = last_successful
|
| 51 | 53 |
self._path = path
|
| ... | ... | @@ -53,6 +55,7 @@ class Workspace(): |
| 53 | 55 |
|
| 54 | 56 |
self._toplevel_project = toplevel_project
|
| 55 | 57 |
self._key = None
|
| 58 |
+ self.cached_build = cached_build
|
|
| 56 | 59 |
|
| 57 | 60 |
# to_dict()
|
| 58 | 61 |
#
|
| ... | ... | @@ -65,7 +68,8 @@ class Workspace(): |
| 65 | 68 |
ret = {
|
| 66 | 69 |
'prepared': self.prepared,
|
| 67 | 70 |
'path': self._path,
|
| 68 |
- 'running_files': self.running_files
|
|
| 71 |
+ 'running_files': self.running_files,
|
|
| 72 |
+ 'cached_build': self.cached_build
|
|
| 69 | 73 |
}
|
| 70 | 74 |
if self.last_successful is not None:
|
| 71 | 75 |
ret["last_successful"] = self.last_successful
|
| ... | ... | @@ -224,12 +228,13 @@ class Workspaces(): |
| 224 | 228 |
# Args:
|
| 225 | 229 |
# element_name (str) - The element name to create a workspace for
|
| 226 | 230 |
# path (str) - The path in which the workspace should be kept
|
| 231 |
+ # cached_build (bool) - If the workspace is staging the cached build artifact
|
|
| 227 | 232 |
#
|
| 228 |
- def create_workspace(self, element_name, path):
|
|
| 233 |
+ def create_workspace(self, element_name, path, cached_build=False):
|
|
| 229 | 234 |
if path.startswith(self._toplevel_project.directory):
|
| 230 | 235 |
path = os.path.relpath(path, self._toplevel_project.directory)
|
| 231 | 236 |
|
| 232 |
- self._workspaces[element_name] = Workspace(self._toplevel_project, path=path)
|
|
| 237 |
+ self._workspaces[element_name] = Workspace(self._toplevel_project, path=path, cached_build=cached_build)
|
|
| 233 | 238 |
|
| 234 | 239 |
return self._workspaces[element_name]
|
| 235 | 240 |
|
| ... | ... | @@ -396,6 +401,7 @@ class Workspaces(): |
| 396 | 401 |
'path': _yaml.node_get(node, str, 'path'),
|
| 397 | 402 |
'last_successful': _yaml.node_get(node, str, 'last_successful', default_value=None),
|
| 398 | 403 |
'running_files': _yaml.node_get(node, dict, 'running_files', default_value=None),
|
| 404 |
+ 'cached_build': _yaml.node_get(node, bool, 'cached_build', default_value=False)
|
|
| 399 | 405 |
}
|
| 400 | 406 |
return Workspace.from_dict(self._toplevel_project, dictionary)
|
| 401 | 407 |
|
| ... | ... | @@ -1926,7 +1926,10 @@ class Element(Plugin): |
| 1926 | 1926 |
# This requires that a workspace already be created in
|
| 1927 | 1927 |
# the workspaces metadata first.
|
| 1928 | 1928 |
#
|
| 1929 |
- def _open_workspace(self):
|
|
| 1929 |
+ # Args:
|
|
| 1930 |
+ # buildtree (bool): Whether to open workspace with artifact buildtree
|
|
| 1931 |
+ #
|
|
| 1932 |
+ def _open_workspace(self, buildtree=False):
|
|
| 1930 | 1933 |
context = self._get_context()
|
| 1931 | 1934 |
workspace = self._get_workspace()
|
| 1932 | 1935 |
assert workspace is not None
|
| ... | ... | @@ -1939,12 +1942,22 @@ class Element(Plugin): |
| 1939 | 1942 |
# files in the target directory actually works without any
|
| 1940 | 1943 |
# additional support from Source implementations.
|
| 1941 | 1944 |
#
|
| 1945 |
+ |
|
| 1942 | 1946 |
os.makedirs(context.builddir, exist_ok=True)
|
| 1943 |
- with utils._tempdir(dir=context.builddir, prefix='workspace-{}'
|
|
| 1944 |
- .format(self.normal_name)) as temp:
|
|
| 1947 |
+ with utils._tempdir(dir=context.builddir, prefix='workspace-source-{}'
|
|
| 1948 |
+ .format(self.normal_name)) as temp,\
|
|
| 1949 |
+ utils._tempdir(dir=context.builddir, prefix='workspace-buildtree-{}'
|
|
| 1950 |
+ .format(self.normal_name)) as buildtreetemp:
|
|
| 1951 |
+ |
|
| 1945 | 1952 |
for source in self.sources():
|
| 1946 | 1953 |
source._init_workspace(temp)
|
| 1947 | 1954 |
|
| 1955 |
+ # Overwrite the source checkout with the cached buildtree
|
|
| 1956 |
+ if buildtree:
|
|
| 1957 |
+ self.__artifacts.checkout_artifact_subdir(self, self._get_cache_key(), 'buildtree', buildtreetemp)
|
|
| 1958 |
+ if utils._call([utils.get_host_tool('cp'), '-pfr', "".join((buildtreetemp, '/.')), temp])[0] != 0:
|
|
| 1959 |
+ raise ElementError("Failed to copy buildtree into workspace checkout at {}".format(buildtreetemp))
|
|
| 1960 |
+ |
|
| 1948 | 1961 |
# Now hardlink the files into the workspace target.
|
| 1949 | 1962 |
utils.link_files(temp, workspace.get_absolute_path())
|
| 1950 | 1963 |
|
| ... | ... | @@ -44,7 +44,7 @@ DATA_DIR = os.path.join( |
| 44 | 44 |
|
| 45 | 45 |
|
| 46 | 46 |
def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir=None,
|
| 47 |
- project_path=None, element_attrs=None):
|
|
| 47 |
+ project_path=None, element_attrs=None, no_cache=False):
|
|
| 48 | 48 |
if not workspace_dir:
|
| 49 | 49 |
workspace_dir = os.path.join(str(tmpdir), 'workspace{}'.format(suffix))
|
| 50 | 50 |
if not project_path:
|
| ... | ... | @@ -88,6 +88,8 @@ def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir |
| 88 | 88 |
args = ['workspace', 'open']
|
| 89 | 89 |
if track:
|
| 90 | 90 |
args.append('--track')
|
| 91 |
+ if no_cache:
|
|
| 92 |
+ args.append('--no-cache')
|
|
| 91 | 93 |
args.extend([element_name, workspace_dir])
|
| 92 | 94 |
result = cli.run(project=project_path, args=args)
|
| 93 | 95 |
|
| ... | ... | @@ -101,7 +103,7 @@ def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir |
| 101 | 103 |
filename = os.path.join(workspace_dir, 'usr', 'bin', 'hello')
|
| 102 | 104 |
assert os.path.exists(filename)
|
| 103 | 105 |
|
| 104 |
- return (element_name, project_path, workspace_dir)
|
|
| 106 |
+ return (element_name, project_path, workspace_dir, result)
|
|
| 105 | 107 |
|
| 106 | 108 |
|
| 107 | 109 |
@pytest.mark.datafiles(DATA_DIR)
|
| ... | ... | @@ -112,7 +114,7 @@ def test_open(cli, tmpdir, datafiles, kind): |
| 112 | 114 |
|
| 113 | 115 |
@pytest.mark.datafiles(DATA_DIR)
|
| 114 | 116 |
def test_open_bzr_customize(cli, tmpdir, datafiles):
|
| 115 |
- element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "bzr", False)
|
|
| 117 |
+ element_name, project, workspace, _ = open_workspace(cli, tmpdir, datafiles, "bzr", False)
|
|
| 116 | 118 |
|
| 117 | 119 |
# Check that the .bzr dir exists
|
| 118 | 120 |
bzrdir = os.path.join(workspace, ".bzr")
|
| ... | ... | @@ -137,7 +139,7 @@ def test_open_track(cli, tmpdir, datafiles, kind): |
| 137 | 139 |
@pytest.mark.datafiles(DATA_DIR)
|
| 138 | 140 |
@pytest.mark.parametrize("kind", repo_kinds)
|
| 139 | 141 |
def test_open_force(cli, tmpdir, datafiles, kind):
|
| 140 |
- element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, kind, False)
|
|
| 142 |
+ element_name, project, workspace, _ = open_workspace(cli, tmpdir, datafiles, kind, False)
|
|
| 141 | 143 |
|
| 142 | 144 |
# Close the workspace
|
| 143 | 145 |
result = cli.run(project=project, args=[
|
| ... | ... | @@ -158,7 +160,7 @@ def test_open_force(cli, tmpdir, datafiles, kind): |
| 158 | 160 |
@pytest.mark.datafiles(DATA_DIR)
|
| 159 | 161 |
@pytest.mark.parametrize("kind", repo_kinds)
|
| 160 | 162 |
def test_open_force_open(cli, tmpdir, datafiles, kind):
|
| 161 |
- element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, kind, False)
|
|
| 163 |
+ element_name, project, workspace, _ = open_workspace(cli, tmpdir, datafiles, kind, False)
|
|
| 162 | 164 |
|
| 163 | 165 |
# Assert the workspace dir exists
|
| 164 | 166 |
assert os.path.exists(workspace)
|
| ... | ... | @@ -173,7 +175,7 @@ def test_open_force_open(cli, tmpdir, datafiles, kind): |
| 173 | 175 |
@pytest.mark.datafiles(DATA_DIR)
|
| 174 | 176 |
@pytest.mark.parametrize("kind", repo_kinds)
|
| 175 | 177 |
def test_open_force_different_workspace(cli, tmpdir, datafiles, kind):
|
| 176 |
- element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, kind, False, "-alpha")
|
|
| 178 |
+ element_name, project, workspace, _ = open_workspace(cli, tmpdir, datafiles, kind, False, "-alpha")
|
|
| 177 | 179 |
|
| 178 | 180 |
# Assert the workspace dir exists
|
| 179 | 181 |
assert os.path.exists(workspace)
|
| ... | ... | @@ -183,7 +185,7 @@ def test_open_force_different_workspace(cli, tmpdir, datafiles, kind): |
| 183 | 185 |
|
| 184 | 186 |
tmpdir = os.path.join(str(tmpdir), "-beta")
|
| 185 | 187 |
shutil.move(hello_path, hello1_path)
|
| 186 |
- element_name2, project2, workspace2 = open_workspace(cli, tmpdir, datafiles, kind, False, "-beta")
|
|
| 188 |
+ element_name2, project2, workspace2, _ = open_workspace(cli, tmpdir, datafiles, kind, False, "-beta")
|
|
| 187 | 189 |
|
| 188 | 190 |
# Assert the workspace dir exists
|
| 189 | 191 |
assert os.path.exists(workspace2)
|
| ... | ... | @@ -210,7 +212,7 @@ def test_open_force_different_workspace(cli, tmpdir, datafiles, kind): |
| 210 | 212 |
@pytest.mark.datafiles(DATA_DIR)
|
| 211 | 213 |
@pytest.mark.parametrize("kind", repo_kinds)
|
| 212 | 214 |
def test_close(cli, tmpdir, datafiles, kind):
|
| 213 |
- element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, kind, False)
|
|
| 215 |
+ element_name, project, workspace, _ = open_workspace(cli, tmpdir, datafiles, kind, False)
|
|
| 214 | 216 |
|
| 215 | 217 |
# Close the workspace
|
| 216 | 218 |
result = cli.run(project=project, args=[
|
| ... | ... | @@ -226,7 +228,7 @@ def test_close(cli, tmpdir, datafiles, kind): |
| 226 | 228 |
def test_close_external_after_move_project(cli, tmpdir, datafiles):
|
| 227 | 229 |
workspace_dir = os.path.join(str(tmpdir), "workspace")
|
| 228 | 230 |
project_path = os.path.join(str(tmpdir), 'initial_project')
|
| 229 |
- element_name, _, _ = open_workspace(cli, tmpdir, datafiles, 'git', False, "", workspace_dir, project_path)
|
|
| 231 |
+ element_name, _, _, _ = open_workspace(cli, tmpdir, datafiles, 'git', False, "", workspace_dir, project_path)
|
|
| 230 | 232 |
assert os.path.exists(workspace_dir)
|
| 231 | 233 |
moved_dir = os.path.join(str(tmpdir), 'external_project')
|
| 232 | 234 |
shutil.move(project_path, moved_dir)
|
| ... | ... | @@ -246,8 +248,8 @@ def test_close_external_after_move_project(cli, tmpdir, datafiles): |
| 246 | 248 |
def test_close_internal_after_move_project(cli, tmpdir, datafiles):
|
| 247 | 249 |
initial_dir = os.path.join(str(tmpdir), 'initial_project')
|
| 248 | 250 |
initial_workspace = os.path.join(initial_dir, 'workspace')
|
| 249 |
- element_name, _, _ = open_workspace(cli, tmpdir, datafiles, 'git', False,
|
|
| 250 |
- workspace_dir=initial_workspace, project_path=initial_dir)
|
|
| 251 |
+ element_name, _, _, _ = open_workspace(cli, tmpdir, datafiles, 'git', False,
|
|
| 252 |
+ workspace_dir=initial_workspace, project_path=initial_dir)
|
|
| 251 | 253 |
moved_dir = os.path.join(str(tmpdir), 'internal_project')
|
| 252 | 254 |
shutil.move(initial_dir, moved_dir)
|
| 253 | 255 |
assert os.path.exists(moved_dir)
|
| ... | ... | @@ -265,7 +267,7 @@ def test_close_internal_after_move_project(cli, tmpdir, datafiles): |
| 265 | 267 |
|
| 266 | 268 |
@pytest.mark.datafiles(DATA_DIR)
|
| 267 | 269 |
def test_close_removed(cli, tmpdir, datafiles):
|
| 268 |
- element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, 'git', False)
|
|
| 270 |
+ element_name, project, workspace, _ = open_workspace(cli, tmpdir, datafiles, 'git', False)
|
|
| 269 | 271 |
|
| 270 | 272 |
# Remove it first, closing the workspace should work
|
| 271 | 273 |
shutil.rmtree(workspace)
|
| ... | ... | @@ -282,7 +284,7 @@ def test_close_removed(cli, tmpdir, datafiles): |
| 282 | 284 |
|
| 283 | 285 |
@pytest.mark.datafiles(DATA_DIR)
|
| 284 | 286 |
def test_close_nonexistant_element(cli, tmpdir, datafiles):
|
| 285 |
- element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, 'git', False)
|
|
| 287 |
+ element_name, project, workspace, _ = open_workspace(cli, tmpdir, datafiles, 'git', False)
|
|
| 286 | 288 |
element_path = os.path.join(datafiles.dirname, datafiles.basename, 'elements', element_name)
|
| 287 | 289 |
|
| 288 | 290 |
# First brutally remove the element.bst file, ensuring that
|
| ... | ... | @@ -304,9 +306,9 @@ def test_close_nonexistant_element(cli, tmpdir, datafiles): |
| 304 | 306 |
def test_close_multiple(cli, tmpdir, datafiles):
|
| 305 | 307 |
tmpdir_alpha = os.path.join(str(tmpdir), 'alpha')
|
| 306 | 308 |
tmpdir_beta = os.path.join(str(tmpdir), 'beta')
|
| 307 |
- alpha, project, workspace_alpha = open_workspace(
|
|
| 309 |
+ alpha, project, workspace_alpha, _ = open_workspace(
|
|
| 308 | 310 |
cli, tmpdir_alpha, datafiles, 'git', False, suffix='-alpha')
|
| 309 |
- beta, project, workspace_beta = open_workspace(
|
|
| 311 |
+ beta, project, workspace_beta, _ = open_workspace(
|
|
| 310 | 312 |
cli, tmpdir_beta, datafiles, 'git', False, suffix='-beta')
|
| 311 | 313 |
|
| 312 | 314 |
# Close the workspaces
|
| ... | ... | @@ -324,9 +326,9 @@ def test_close_multiple(cli, tmpdir, datafiles): |
| 324 | 326 |
def test_close_all(cli, tmpdir, datafiles):
|
| 325 | 327 |
tmpdir_alpha = os.path.join(str(tmpdir), 'alpha')
|
| 326 | 328 |
tmpdir_beta = os.path.join(str(tmpdir), 'beta')
|
| 327 |
- alpha, project, workspace_alpha = open_workspace(
|
|
| 329 |
+ alpha, project, workspace_alpha, _ = open_workspace(
|
|
| 328 | 330 |
cli, tmpdir_alpha, datafiles, 'git', False, suffix='-alpha')
|
| 329 |
- beta, project, workspace_beta = open_workspace(
|
|
| 331 |
+ beta, project, workspace_beta, _ = open_workspace(
|
|
| 330 | 332 |
cli, tmpdir_beta, datafiles, 'git', False, suffix='-beta')
|
| 331 | 333 |
|
| 332 | 334 |
# Close the workspaces
|
| ... | ... | @@ -343,7 +345,7 @@ def test_close_all(cli, tmpdir, datafiles): |
| 343 | 345 |
@pytest.mark.datafiles(DATA_DIR)
|
| 344 | 346 |
def test_reset(cli, tmpdir, datafiles):
|
| 345 | 347 |
# Open the workspace
|
| 346 |
- element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, 'git', False)
|
|
| 348 |
+ element_name, project, workspace, _ = open_workspace(cli, tmpdir, datafiles, 'git', False)
|
|
| 347 | 349 |
|
| 348 | 350 |
# Modify workspace
|
| 349 | 351 |
shutil.rmtree(os.path.join(workspace, 'usr', 'bin'))
|
| ... | ... | @@ -366,9 +368,9 @@ def test_reset_multiple(cli, tmpdir, datafiles): |
| 366 | 368 |
# Open the workspaces
|
| 367 | 369 |
tmpdir_alpha = os.path.join(str(tmpdir), 'alpha')
|
| 368 | 370 |
tmpdir_beta = os.path.join(str(tmpdir), 'beta')
|
| 369 |
- alpha, project, workspace_alpha = open_workspace(
|
|
| 371 |
+ alpha, project, workspace_alpha, _ = open_workspace(
|
|
| 370 | 372 |
cli, tmpdir_alpha, datafiles, 'git', False, suffix='-alpha')
|
| 371 |
- beta, project, workspace_beta = open_workspace(
|
|
| 373 |
+ beta, project, workspace_beta, _ = open_workspace(
|
|
| 372 | 374 |
cli, tmpdir_beta, datafiles, 'git', False, suffix='-beta')
|
| 373 | 375 |
|
| 374 | 376 |
# Modify workspaces
|
| ... | ... | @@ -392,9 +394,9 @@ def test_reset_all(cli, tmpdir, datafiles): |
| 392 | 394 |
# Open the workspaces
|
| 393 | 395 |
tmpdir_alpha = os.path.join(str(tmpdir), 'alpha')
|
| 394 | 396 |
tmpdir_beta = os.path.join(str(tmpdir), 'beta')
|
| 395 |
- alpha, project, workspace_alpha = open_workspace(
|
|
| 397 |
+ alpha, project, workspace_alpha, _ = open_workspace(
|
|
| 396 | 398 |
cli, tmpdir_alpha, datafiles, 'git', False, suffix='-alpha')
|
| 397 |
- beta, project, workspace_beta = open_workspace(
|
|
| 399 |
+ beta, project, workspace_beta, _ = open_workspace(
|
|
| 398 | 400 |
cli, tmpdir_beta, datafiles, 'git', False, suffix='-beta')
|
| 399 | 401 |
|
| 400 | 402 |
# Modify workspaces
|
| ... | ... | @@ -415,7 +417,7 @@ def test_reset_all(cli, tmpdir, datafiles): |
| 415 | 417 |
|
| 416 | 418 |
@pytest.mark.datafiles(DATA_DIR)
|
| 417 | 419 |
def test_list(cli, tmpdir, datafiles):
|
| 418 |
- element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, 'git', False)
|
|
| 420 |
+ element_name, project, workspace, _ = open_workspace(cli, tmpdir, datafiles, 'git', False)
|
|
| 419 | 421 |
|
| 420 | 422 |
# Now list the workspaces
|
| 421 | 423 |
result = cli.run(project=project, args=[
|
| ... | ... | @@ -437,7 +439,7 @@ def test_list(cli, tmpdir, datafiles): |
| 437 | 439 |
@pytest.mark.parametrize("kind", repo_kinds)
|
| 438 | 440 |
@pytest.mark.parametrize("strict", [("strict"), ("non-strict")])
|
| 439 | 441 |
def test_build(cli, tmpdir, datafiles, kind, strict):
|
| 440 |
- element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, kind, False)
|
|
| 442 |
+ element_name, project, workspace, _ = open_workspace(cli, tmpdir, datafiles, kind, False)
|
|
| 441 | 443 |
checkout = os.path.join(str(tmpdir), 'checkout')
|
| 442 | 444 |
|
| 443 | 445 |
# Modify workspace
|
| ... | ... | @@ -516,7 +518,7 @@ def test_buildable_no_ref(cli, tmpdir, datafiles): |
| 516 | 518 |
@pytest.mark.parametrize("modification", [("addfile"), ("removefile"), ("modifyfile")])
|
| 517 | 519 |
@pytest.mark.parametrize("strict", [("strict"), ("non-strict")])
|
| 518 | 520 |
def test_detect_modifications(cli, tmpdir, datafiles, modification, strict):
|
| 519 |
- element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, 'git', False)
|
|
| 521 |
+ element_name, project, workspace, _ = open_workspace(cli, tmpdir, datafiles, 'git', False)
|
|
| 520 | 522 |
checkout = os.path.join(str(tmpdir), 'checkout')
|
| 521 | 523 |
|
| 522 | 524 |
# Configure strict mode
|
| ... | ... | @@ -637,7 +639,8 @@ def test_list_unsupported_workspace(cli, tmpdir, datafiles, workspace_cfg): |
| 637 | 639 |
"alpha.bst": {
|
| 638 | 640 |
"prepared": False,
|
| 639 | 641 |
"path": "/workspaces/bravo",
|
| 640 |
- "running_files": {}
|
|
| 642 |
+ "running_files": {},
|
|
| 643 |
+ "cached_build": False
|
|
| 641 | 644 |
}
|
| 642 | 645 |
}
|
| 643 | 646 |
}),
|
| ... | ... | @@ -652,7 +655,8 @@ def test_list_unsupported_workspace(cli, tmpdir, datafiles, workspace_cfg): |
| 652 | 655 |
"alpha.bst": {
|
| 653 | 656 |
"prepared": False,
|
| 654 | 657 |
"path": "/workspaces/bravo",
|
| 655 |
- "running_files": {}
|
|
| 658 |
+ "running_files": {},
|
|
| 659 |
+ "cached_build": False
|
|
| 656 | 660 |
}
|
| 657 | 661 |
}
|
| 658 | 662 |
}),
|
| ... | ... | @@ -670,7 +674,8 @@ def test_list_unsupported_workspace(cli, tmpdir, datafiles, workspace_cfg): |
| 670 | 674 |
"alpha.bst": {
|
| 671 | 675 |
"prepared": False,
|
| 672 | 676 |
"path": "/workspaces/bravo",
|
| 673 |
- "running_files": {}
|
|
| 677 |
+ "running_files": {},
|
|
| 678 |
+ "cached_build": False
|
|
| 674 | 679 |
}
|
| 675 | 680 |
}
|
| 676 | 681 |
}),
|
| ... | ... | @@ -695,7 +700,8 @@ def test_list_unsupported_workspace(cli, tmpdir, datafiles, workspace_cfg): |
| 695 | 700 |
"last_successful": "some_key",
|
| 696 | 701 |
"running_files": {
|
| 697 | 702 |
"beta.bst": ["some_file"]
|
| 698 |
- }
|
|
| 703 |
+ },
|
|
| 704 |
+ "cached_build": False
|
|
| 699 | 705 |
}
|
| 700 | 706 |
}
|
| 701 | 707 |
}),
|
| ... | ... | @@ -715,7 +721,30 @@ def test_list_unsupported_workspace(cli, tmpdir, datafiles, workspace_cfg): |
| 715 | 721 |
"alpha.bst": {
|
| 716 | 722 |
"prepared": True,
|
| 717 | 723 |
"path": "/workspaces/bravo",
|
| 718 |
- "running_files": {}
|
|
| 724 |
+ "running_files": {},
|
|
| 725 |
+ "cached_build": False
|
|
| 726 |
+ }
|
|
| 727 |
+ }
|
|
| 728 |
+ }),
|
|
| 729 |
+ # Test loading version 4
|
|
| 730 |
+ ({
|
|
| 731 |
+ "format-version": 4,
|
|
| 732 |
+ "workspaces": {
|
|
| 733 |
+ "alpha.bst": {
|
|
| 734 |
+ "prepared": False,
|
|
| 735 |
+ "path": "/workspaces/bravo",
|
|
| 736 |
+ "running_files": {},
|
|
| 737 |
+ "cached_build": True
|
|
| 738 |
+ }
|
|
| 739 |
+ }
|
|
| 740 |
+ }, {
|
|
| 741 |
+ "format-version": BST_WORKSPACE_FORMAT_VERSION,
|
|
| 742 |
+ "workspaces": {
|
|
| 743 |
+ "alpha.bst": {
|
|
| 744 |
+ "prepared": False,
|
|
| 745 |
+ "path": "/workspaces/bravo",
|
|
| 746 |
+ "running_files": {},
|
|
| 747 |
+ "cached_build": True
|
|
| 719 | 748 |
}
|
| 720 | 749 |
}
|
| 721 | 750 |
})
|
| ... | ... | @@ -779,7 +808,7 @@ def test_list_supported_workspace(cli, tmpdir, datafiles, workspace_cfg, expecte |
| 779 | 808 |
@pytest.mark.datafiles(DATA_DIR)
|
| 780 | 809 |
@pytest.mark.parametrize("kind", repo_kinds)
|
| 781 | 810 |
def test_inconsitent_pipeline_message(cli, tmpdir, datafiles, kind):
|
| 782 |
- element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, kind, False)
|
|
| 811 |
+ element_name, project, workspace, _ = open_workspace(cli, tmpdir, datafiles, kind, False)
|
|
| 783 | 812 |
|
| 784 | 813 |
shutil.rmtree(workspace)
|
| 785 | 814 |
|
| ... | ... | @@ -793,8 +822,8 @@ def test_inconsitent_pipeline_message(cli, tmpdir, datafiles, kind): |
| 793 | 822 |
@pytest.mark.parametrize("strict", [("strict"), ("non-strict")])
|
| 794 | 823 |
def test_cache_key_workspace_in_dependencies(cli, tmpdir, datafiles, strict):
|
| 795 | 824 |
checkout = os.path.join(str(tmpdir), 'checkout')
|
| 796 |
- element_name, project, workspace = open_workspace(cli, os.path.join(str(tmpdir), 'repo-a'),
|
|
| 797 |
- datafiles, 'git', False)
|
|
| 825 |
+ element_name, project, workspace, _ = open_workspace(cli, os.path.join(str(tmpdir), 'repo-a'),
|
|
| 826 |
+ datafiles, 'git', False)
|
|
| 798 | 827 |
|
| 799 | 828 |
element_path = os.path.join(project, 'elements')
|
| 800 | 829 |
back_dep_element_name = 'workspace-test-back-dep.bst'
|
| ... | ... | @@ -869,10 +898,75 @@ def test_multiple_failed_builds(cli, tmpdir, datafiles): |
| 869 | 898 |
]
|
| 870 | 899 |
}
|
| 871 | 900 |
}
|
| 872 |
- element_name, project, _ = open_workspace(cli, tmpdir, datafiles,
|
|
| 873 |
- "git", False, element_attrs=element_config)
|
|
| 901 |
+ element_name, project, _, _ = open_workspace(cli, tmpdir, datafiles,
|
|
| 902 |
+ "git", False, element_attrs=element_config)
|
|
| 874 | 903 |
|
| 875 | 904 |
for _ in range(2):
|
| 876 | 905 |
result = cli.run(project=project, args=["build", element_name])
|
| 877 | 906 |
assert "BUG" not in result.stderr
|
| 878 | 907 |
assert cli.get_element_state(project, element_name) != "cached"
|
| 908 |
+ |
|
| 909 |
+ |
|
| 910 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 911 |
+def test_nocache_open_messages(cli, tmpdir, datafiles):
|
|
| 912 |
+ # cli default WARN for source dropback possibility when no-cache flag is not passed
|
|
| 913 |
+ element_name, project, workspace, result = open_workspace(cli, tmpdir, datafiles, 'git', False, suffix='1')
|
|
| 914 |
+ assert "WARNING: Workspace will be opened without the cached buildtree if not cached locally" in result.output
|
|
| 915 |
+ |
|
| 916 |
+ # cli WARN for source dropback happening when no-cache flag not given, but buildtree not available
|
|
| 917 |
+ assert "workspace will be opened with source checkout" in result.stderr
|
|
| 918 |
+ |
|
| 919 |
+ # cli default WARN for source dropback possibilty not given when no-cache flag is passed
|
|
| 920 |
+ tmpdir = os.path.join(str(tmpdir), "2")
|
|
| 921 |
+ element_name, project, workspace, result = open_workspace(cli, tmpdir, datafiles, 'git', False, suffix='2',
|
|
| 922 |
+ no_cache=True)
|
|
| 923 |
+ assert "WARNING: Workspace will be opened without the cached buildtree if not cached locally" not in result.output
|
|
| 924 |
+ |
|
| 925 |
+ |
|
| 926 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 927 |
+def test_nocache_reset_messages(cli, tmpdir, datafiles):
|
|
| 928 |
+ element_name, project, workspace, _ = open_workspace(cli, tmpdir, datafiles, 'git', False, suffix='1')
|
|
| 929 |
+ |
|
| 930 |
+ # Modify workspace, without building so the artifact is not cached
|
|
| 931 |
+ shutil.rmtree(os.path.join(workspace, 'usr', 'bin'))
|
|
| 932 |
+ os.makedirs(os.path.join(workspace, 'etc'))
|
|
| 933 |
+ with open(os.path.join(workspace, 'etc', 'pony.conf'), 'w') as f:
|
|
| 934 |
+ f.write("PONY='pink'")
|
|
| 935 |
+ |
|
| 936 |
+ # Now reset the open workspace, this should have the
|
|
| 937 |
+ # effect of reverting our changes to the original source, as it
|
|
| 938 |
+ # was not originally opened with a cached buildtree and as such
|
|
| 939 |
+ # should not notify the user
|
|
| 940 |
+ result = cli.run(project=project, args=[
|
|
| 941 |
+ 'workspace', 'reset', element_name
|
|
| 942 |
+ ])
|
|
| 943 |
+ result.assert_success()
|
|
| 944 |
+ assert "original buildtree artifact not available" not in result.output
|
|
| 945 |
+ assert os.path.exists(os.path.join(workspace, 'usr', 'bin', 'hello'))
|
|
| 946 |
+ assert not os.path.exists(os.path.join(workspace, 'etc', 'pony.conf'))
|
|
| 947 |
+ |
|
| 948 |
+ # Close the workspace
|
|
| 949 |
+ result = cli.run(project=project, args=[
|
|
| 950 |
+ 'workspace', 'close', '--remove-dir', element_name
|
|
| 951 |
+ ])
|
|
| 952 |
+ result.assert_success()
|
|
| 953 |
+ |
|
| 954 |
+ # Build the workspace so we have a cached buildtree artifact for the element
|
|
| 955 |
+ assert cli.get_element_state(project, element_name) == 'buildable'
|
|
| 956 |
+ result = cli.run(project=project, args=['build', element_name])
|
|
| 957 |
+ result.assert_success()
|
|
| 958 |
+ |
|
| 959 |
+ # Opening the workspace after a build should lead to the cached buildtree being
|
|
| 960 |
+ # staged by default
|
|
| 961 |
+ result = cli.run(project=project, args=[
|
|
| 962 |
+ 'workspace', 'open', element_name, workspace
|
|
| 963 |
+ ])
|
|
| 964 |
+ result.assert_success()
|
|
| 965 |
+ |
|
| 966 |
+ # Now reset the workspace and ensure that a warning is not given about the artifact
|
|
| 967 |
+ # buildtree not being available
|
|
| 968 |
+ result = cli.run(project=project, args=[
|
|
| 969 |
+ 'workspace', 'reset', element_name
|
|
| 970 |
+ ])
|
|
| 971 |
+ result.assert_success()
|
|
| 972 |
+ assert "original buildtree artifact not available" not in result.output
|
