Will Salmon pushed to branch willsalmon/defaultWorkspaces at BuildStream / buildstream
Commits:
-
09ef8b25
by Jonathan Maw at 2018-10-30T15:42:05Z
-
37afde0a
by Jonathan Maw at 2018-10-30T15:42:05Z
-
4bc71773
by Jonathan Maw at 2018-10-30T15:42:05Z
-
9b89564f
by Jonathan Maw at 2018-10-30T15:42:05Z
-
ec76cbe1
by Jonathan Maw at 2018-10-30T15:42:05Z
-
825963b5
by Jonathan Maw at 2018-10-30T15:42:05Z
-
cf721838
by Jonathan Maw at 2018-10-30T15:42:05Z
-
79d1bb7f
by Jonathan Maw at 2018-10-30T15:42:05Z
-
c7ac7e7d
by Jonathan Maw at 2018-10-30T16:19:17Z
-
3ade0720
by William Salmon at 2018-10-30T16:31:06Z
-
1017d231
by William Salmon at 2018-10-30T16:31:06Z
-
fb6122c9
by William Salmon at 2018-10-30T16:40:17Z
23 changed files:
- NEWS
- buildstream/_context.py
- buildstream/_exceptions.py
- buildstream/_frontend/app.py
- buildstream/_frontend/cli.py
- buildstream/_frontend/widget.py
- buildstream/_message.py
- buildstream/_stream.py
- buildstream/data/userconfig.yaml
- buildstream/element.py
- buildstream/sandbox/_mount.py
- buildstream/sandbox/sandbox.py
- tests/examples/developing.py
- tests/examples/junctions.py
- tests/frontend/buildcheckout.py
- tests/frontend/cross_junction_workspace.py
- tests/frontend/workspace.py
- + tests/integration/build-tree.py
- + tests/integration/project/elements/build-shell/buildtree-fail.bst
- + tests/integration/project/elements/build-shell/buildtree.bst
- tests/integration/shell.py
- tests/integration/workspace.py
- tests/plugins/filter.py
Changes:
| ... | ... | @@ -31,6 +31,15 @@ buildstream 1.3.1 |
| 31 | 31 |
new the `conf-root` variable to make the process easier. And there has been
|
| 32 | 32 |
a bug fix to workspaces so they can be build in workspaces too.
|
| 33 | 33 |
|
| 34 |
+ o Creating a build shell through the interactive mode or `bst shell --build`
|
|
| 35 |
+ will now use the cached build tree. It is now easier to debug local build
|
|
| 36 |
+ failures.
|
|
| 37 |
+ |
|
| 38 |
+ o `bst shell --sysroot` now takes any directory that contains a sysroot,
|
|
| 39 |
+ instead of just a specially-formatted build-root with a `root` and `scratch`
|
|
| 40 |
+ subdirectory.
|
|
| 41 |
+ |
|
| 42 |
+ |
|
| 34 | 43 |
=================
|
| 35 | 44 |
buildstream 1.1.5
|
| 36 | 45 |
=================
|
| ... | ... | @@ -60,6 +60,9 @@ class Context(): |
| 60 | 60 |
# The directory where build sandboxes will be created
|
| 61 | 61 |
self.builddir = None
|
| 62 | 62 |
|
| 63 |
+ # Default root location for workspaces
|
|
| 64 |
+ self.workspacedir = None
|
|
| 65 |
+ |
|
| 63 | 66 |
# The local binary artifact cache directory
|
| 64 | 67 |
self.artifactdir = None
|
| 65 | 68 |
|
| ... | ... | @@ -161,10 +164,10 @@ class Context(): |
| 161 | 164 |
_yaml.node_validate(defaults, [
|
| 162 | 165 |
'sourcedir', 'builddir', 'artifactdir', 'logdir',
|
| 163 | 166 |
'scheduler', 'artifacts', 'logging', 'projects',
|
| 164 |
- 'cache'
|
|
| 167 |
+ 'cache', 'workspacedir',
|
|
| 165 | 168 |
])
|
| 166 | 169 |
|
| 167 |
- for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir']:
|
|
| 170 |
+ for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir', 'workspacedir']:
|
|
| 168 | 171 |
# Allow the ~ tilde expansion and any environment variables in
|
| 169 | 172 |
# path specification in the config files.
|
| 170 | 173 |
#
|
| ... | ... | @@ -111,10 +111,8 @@ class BstError(Exception): |
| 111 | 111 |
#
|
| 112 | 112 |
self.detail = detail
|
| 113 | 113 |
|
| 114 |
- # The build sandbox in which the error occurred, if the
|
|
| 115 |
- # error occurred at element assembly time.
|
|
| 116 |
- #
|
|
| 117 |
- self.sandbox = None
|
|
| 114 |
+ # A sandbox can be created to debug this error
|
|
| 115 |
+ self.sandbox = False
|
|
| 118 | 116 |
|
| 119 | 117 |
# When this exception occurred during the handling of a job, indicate
|
| 120 | 118 |
# whether or not there is any point retrying the job.
|
| ... | ... | @@ -597,7 +597,7 @@ class App(): |
| 597 | 597 |
click.echo("\nDropping into an interactive shell in the failed build sandbox\n", err=True)
|
| 598 | 598 |
try:
|
| 599 | 599 |
prompt = self.shell_prompt(element)
|
| 600 |
- self.stream.shell(element, Scope.BUILD, prompt, directory=failure.sandbox, isolate=True)
|
|
| 600 |
+ self.stream.shell(element, Scope.BUILD, prompt, isolate=True)
|
|
| 601 | 601 |
except BstError as e:
|
| 602 | 602 |
click.echo("Error while attempting to create interactive shell: {}".format(e), err=True)
|
| 603 | 603 |
elif choice == 'log':
|
| ... | ... | @@ -678,28 +678,36 @@ def workspace(): |
| 678 | 678 |
@click.option('--no-checkout', default=False, is_flag=True,
|
| 679 | 679 |
help="Do not checkout the source, only link to the given directory")
|
| 680 | 680 |
@click.option('--force', '-f', default=False, is_flag=True,
|
| 681 |
- help="Overwrite files existing in checkout directory")
|
|
| 681 |
+ help="The workspace will be created even if the directory in which it will be created is not empty " +
|
|
| 682 |
+ "or if a workspace for that element already exists")
|
|
| 682 | 683 |
@click.option('--track', 'track_', default=False, is_flag=True,
|
| 683 | 684 |
help="Track and fetch new source references before checking out the workspace")
|
| 684 |
-@click.argument('element',
|
|
| 685 |
- type=click.Path(readable=False))
|
|
| 686 |
-@click.argument('directory', type=click.Path(file_okay=False))
|
|
| 685 |
+@click.option('--directory', type=click.Path(file_okay=False), default=None,
|
|
| 686 |
+ help="Only for use when a single Element is give: Set the directory to use to create the workspace")
|
|
| 687 |
+@click.argument('elements', nargs=-1, type=click.Path(readable=False))
|
|
| 687 | 688 |
@click.pass_obj
|
| 688 |
-def workspace_open(app, no_checkout, force, track_, element, directory):
|
|
| 689 |
+def workspace_open(app, no_checkout, force, track_, directory, elements):
|
|
| 689 | 690 |
"""Open a workspace for manual source modification"""
|
| 690 |
- |
|
| 691 |
- if os.path.exists(directory):
|
|
| 692 |
- |
|
| 693 |
- if not os.path.isdir(directory):
|
|
| 694 |
- click.echo("Checkout directory is not a directory: {}".format(directory), err=True)
|
|
| 691 |
+ directories = []
|
|
| 692 |
+ if directory is not None:
|
|
| 693 |
+ if len(elements) > 1:
|
|
| 694 |
+ click.echo("Directory option can only be used if a single element is given", err=True)
|
|
| 695 | 695 |
sys.exit(-1)
|
| 696 |
+ if os.path.exists(directory):
|
|
| 697 |
+ if not os.path.isdir(directory):
|
|
| 698 |
+ click.echo("Directory path is not a directory: {}".format(directory), err=True)
|
|
| 699 |
+ sys.exit(-1)
|
|
| 696 | 700 |
|
| 697 |
- if not (no_checkout or force) and os.listdir(directory):
|
|
| 698 |
- click.echo("Checkout directory is not empty: {}".format(directory), err=True)
|
|
| 699 |
- sys.exit(-1)
|
|
| 701 |
+ if not (no_checkout or force) and os.listdir(directory):
|
|
| 702 |
+ click.echo("Directory path is not empty: {}".format(directory), err=True)
|
|
| 703 |
+ sys.exit(-1)
|
|
| 704 |
+ directories.append({'dir': directory, 'force': True})
|
|
| 705 |
+ else:
|
|
| 706 |
+ for element in elements:
|
|
| 707 |
+ directories.append({'dir': element.rstrip('.bst'), 'force': False})
|
|
| 700 | 708 |
|
| 701 | 709 |
with app.initialized():
|
| 702 |
- app.stream.workspace_open(element, directory,
|
|
| 710 |
+ app.stream.workspace_open(elements, directories,
|
|
| 703 | 711 |
no_checkout=no_checkout,
|
| 704 | 712 |
track_first=track_,
|
| 705 | 713 |
force=force)
|
| ... | ... | @@ -668,17 +668,6 @@ class LogLine(Widget): |
| 668 | 668 |
|
| 669 | 669 |
extra_nl = True
|
| 670 | 670 |
|
| 671 |
- if message.sandbox is not None:
|
|
| 672 |
- sandbox = self._indent + 'Sandbox directory: ' + message.sandbox
|
|
| 673 |
- |
|
| 674 |
- text += '\n'
|
|
| 675 |
- if message.message_type == MessageType.FAIL:
|
|
| 676 |
- text += self._err_profile.fmt(sandbox, bold=True)
|
|
| 677 |
- else:
|
|
| 678 |
- text += self._detail_profile.fmt(sandbox)
|
|
| 679 |
- text += '\n'
|
|
| 680 |
- extra_nl = True
|
|
| 681 |
- |
|
| 682 | 671 |
if message.scheduler and message.message_type == MessageType.FAIL:
|
| 683 | 672 |
text += '\n'
|
| 684 | 673 |
|
| ... | ... | @@ -70,7 +70,7 @@ class Message(): |
| 70 | 70 |
self.elapsed = elapsed # The elapsed time, in timed messages
|
| 71 | 71 |
self.depth = depth # The depth of a timed message
|
| 72 | 72 |
self.logfile = logfile # The log file path where commands took place
|
| 73 |
- self.sandbox = sandbox # The sandbox directory where an error occurred (if any)
|
|
| 73 |
+ self.sandbox = sandbox # The error that caused this message used a sandbox
|
|
| 74 | 74 |
self.pid = os.getpid() # The process pid
|
| 75 | 75 |
self.unique_id = unique_id # The plugin object ID issueing the message
|
| 76 | 76 |
self.task_id = task_id # The plugin object ID of the task
|
| ... | ... | @@ -454,70 +454,77 @@ class Stream(): |
| 454 | 454 |
# track_first (bool): Whether to track and fetch first
|
| 455 | 455 |
# force (bool): Whether to ignore contents in an existing directory
|
| 456 | 456 |
#
|
| 457 |
- def workspace_open(self, target, directory, *,
|
|
| 457 |
+ def workspace_open(self, targets, directories, *,
|
|
| 458 | 458 |
no_checkout,
|
| 459 | 459 |
track_first,
|
| 460 | 460 |
force):
|
| 461 | 461 |
|
| 462 | 462 |
if track_first:
|
| 463 |
- track_targets = (target,)
|
|
| 463 |
+ track_targets = targets
|
|
| 464 | 464 |
else:
|
| 465 | 465 |
track_targets = ()
|
| 466 | 466 |
|
| 467 |
- elements, track_elements = self._load((target,), track_targets,
|
|
| 467 |
+ elements, track_elements = self._load(targets, track_targets,
|
|
| 468 | 468 |
selection=PipelineSelection.REDIRECT,
|
| 469 | 469 |
track_selection=PipelineSelection.REDIRECT)
|
| 470 |
- target = elements[0]
|
|
| 471 |
- directory = os.path.abspath(directory)
|
|
| 472 |
- |
|
| 473 |
- if not list(target.sources()):
|
|
| 474 |
- build_depends = [x.name for x in target.dependencies(Scope.BUILD, recurse=False)]
|
|
| 475 |
- if not build_depends:
|
|
| 476 |
- raise StreamError("The given element has no sources")
|
|
| 477 |
- detail = "Try opening a workspace on one of its dependencies instead:\n"
|
|
| 478 |
- detail += " \n".join(build_depends)
|
|
| 479 |
- raise StreamError("The given element has no sources", detail=detail)
|
|
| 480 | 470 |
|
| 481 | 471 |
workspaces = self._context.get_workspaces()
|
| 482 | 472 |
|
| 483 |
- # Check for workspace config
|
|
| 484 |
- workspace = workspaces.get_workspace(target._get_full_name())
|
|
| 485 |
- if workspace and not force:
|
|
| 486 |
- raise StreamError("Workspace '{}' is already defined at: {}"
|
|
| 487 |
- .format(target.name, workspace.get_absolute_path()))
|
|
| 488 |
- |
|
| 489 |
- # If we're going to checkout, we need at least a fetch,
|
|
| 490 |
- # if we were asked to track first, we're going to fetch anyway.
|
|
| 491 |
- #
|
|
| 492 |
- if not no_checkout or track_first:
|
|
| 493 |
- track_elements = []
|
|
| 494 |
- if track_first:
|
|
| 495 |
- track_elements = elements
|
|
| 496 |
- self._fetch(elements, track_elements=track_elements)
|
|
| 497 |
- |
|
| 498 |
- if not no_checkout and target._get_consistency() != Consistency.CACHED:
|
|
| 499 |
- raise StreamError("Could not stage uncached source. " +
|
|
| 500 |
- "Use `--track` to track and " +
|
|
| 501 |
- "fetch the latest version of the " +
|
|
| 502 |
- "source.")
|
|
| 473 |
+ for target, directory_dict in zip(elements, directories):
|
|
| 474 |
+ if directory_dict['force']:
|
|
| 475 |
+ directory = directory_dict['dir']
|
|
| 476 |
+ else:
|
|
| 477 |
+ directory = os.path.abspath(os.path.join(self._context.workspacedir, directory_dict['dir']))
|
|
| 478 |
+ |
|
| 479 |
+ if not list(target.sources()):
|
|
| 480 |
+ build_depends = [x.name for x in target.dependencies(Scope.BUILD, recurse=False)]
|
|
| 481 |
+ if not build_depends:
|
|
| 482 |
+ raise StreamError("The given element has no sources")
|
|
| 483 |
+ detail = "Try opening a workspace on one of its dependencies instead:\n"
|
|
| 484 |
+ detail += " \n".join(build_depends)
|
|
| 485 |
+ raise StreamError("The given element has no sources", detail=detail)
|
|
| 486 |
+ |
|
| 487 |
+ # Check for workspace config
|
|
| 488 |
+ workspace = workspaces.get_workspace(target._get_full_name())
|
|
| 489 |
+ if workspace and not force:
|
|
| 490 |
+ raise StreamError("Workspace '{}' is already defined at: {}"
|
|
| 491 |
+ .format(target.name, workspace.get_absolute_path()))
|
|
| 492 |
+ |
|
| 493 |
+ # If we're going to checkout, we need at least a fetch,
|
|
| 494 |
+ # if we were asked to track first, we're going to fetch anyway.
|
|
| 495 |
+ #
|
|
| 496 |
+ if not no_checkout or track_first:
|
|
| 497 |
+ track_elements = []
|
|
| 498 |
+ if track_first:
|
|
| 499 |
+ track_elements = elements
|
|
| 500 |
+ self._fetch(elements, track_elements=track_elements)
|
|
| 501 |
+ |
|
| 502 |
+ if not no_checkout and target._get_consistency() != Consistency.CACHED:
|
|
| 503 |
+ raise StreamError("Could not stage uncached source. " +
|
|
| 504 |
+ "Use `--track` to track and " +
|
|
| 505 |
+ "fetch the latest version of the " +
|
|
| 506 |
+ "source.")
|
|
| 503 | 507 |
|
| 504 |
- if workspace:
|
|
| 505 |
- workspaces.delete_workspace(target._get_full_name())
|
|
| 506 |
- workspaces.save_config()
|
|
| 507 |
- shutil.rmtree(directory)
|
|
| 508 |
- try:
|
|
| 509 |
- os.makedirs(directory, exist_ok=True)
|
|
| 510 |
- except OSError as e:
|
|
| 511 |
- raise StreamError("Failed to create workspace directory: {}".format(e)) from e
|
|
| 508 |
+ if workspace:
|
|
| 509 |
+ workspaces.delete_workspace(target._get_full_name())
|
|
| 510 |
+ workspaces.save_config()
|
|
| 511 |
+ shutil.rmtree(directory)
|
|
| 512 |
+ try:
|
|
| 513 |
+ os.makedirs(directory, exist_ok=True)
|
|
| 514 |
+ except OSError as e:
|
|
| 515 |
+ raise StreamError("Failed to create workspace directory: {}".format(e)) from e
|
|
| 512 | 516 |
|
| 513 |
- workspaces.create_workspace(target._get_full_name(), directory)
|
|
| 517 |
+ workspaces.create_workspace(target._get_full_name(), directory)
|
|
| 514 | 518 |
|
| 515 |
- if not no_checkout:
|
|
| 516 |
- with target.timed_activity("Staging sources to {}".format(directory)):
|
|
| 517 |
- target._open_workspace()
|
|
| 519 |
+ if not no_checkout:
|
|
| 520 |
+ with target.timed_activity("Staging sources to {}".format(directory)):
|
|
| 521 |
+ target._open_workspace()
|
|
| 518 | 522 |
|
| 519 |
- workspaces.save_config()
|
|
| 520 |
- self._message(MessageType.INFO, "Saved workspace configuration")
|
|
| 523 |
+ # Saving the workspace once it is set up means that if the next one fails before
|
|
| 524 |
+ # the configuration gets saved we dont end up with the good workspace not being saved
|
|
| 525 |
+ workspaces.save_config()
|
|
| 526 |
+ self._message(MessageType.INFO, "Added element {} to the workspace configuration"
|
|
| 527 |
+ .format(target._get_full_name()))
|
|
| 521 | 528 |
|
| 522 | 529 |
# workspace_close
|
| 523 | 530 |
#
|
| ... | ... | @@ -22,6 +22,9 @@ artifactdir: ${XDG_CACHE_HOME}/buildstream/artifacts |
| 22 | 22 |
# Location to store build logs
|
| 23 | 23 |
logdir: ${XDG_CACHE_HOME}/buildstream/logs
|
| 24 | 24 |
|
| 25 |
+# Default root location for workspacesi, blank for no default set.
|
|
| 26 |
+workspacedir: .
|
|
| 27 |
+ |
|
| 25 | 28 |
#
|
| 26 | 29 |
# Cache
|
| 27 | 30 |
#
|
| ... | ... | @@ -1318,7 +1318,9 @@ class Element(Plugin): |
| 1318 | 1318 |
@contextmanager
|
| 1319 | 1319 |
def _prepare_sandbox(self, scope, directory, deps='run', integrate=True):
|
| 1320 | 1320 |
# bst shell and bst checkout require a local sandbox.
|
| 1321 |
- with self.__sandbox(directory, config=self.__sandbox_config, allow_remote=False) as sandbox:
|
|
| 1321 |
+ bare_directory = True if directory else False
|
|
| 1322 |
+ with self.__sandbox(directory, config=self.__sandbox_config, allow_remote=False,
|
|
| 1323 |
+ bare_directory=bare_directory) as sandbox:
|
|
| 1322 | 1324 |
|
| 1323 | 1325 |
# Configure always comes first, and we need it.
|
| 1324 | 1326 |
self.configure_sandbox(sandbox)
|
| ... | ... | @@ -1385,6 +1387,7 @@ class Element(Plugin): |
| 1385 | 1387 |
# the same filing system as the rest of our cache.
|
| 1386 | 1388 |
temp_staging_location = os.path.join(self._get_context().artifactdir, "staging_temp")
|
| 1387 | 1389 |
temp_staging_directory = tempfile.mkdtemp(prefix=temp_staging_location)
|
| 1390 |
+ import_dir = temp_staging_directory
|
|
| 1388 | 1391 |
|
| 1389 | 1392 |
try:
|
| 1390 | 1393 |
workspace = self._get_workspace()
|
| ... | ... | @@ -1395,12 +1398,16 @@ class Element(Plugin): |
| 1395 | 1398 |
with self.timed_activity("Staging local files at {}"
|
| 1396 | 1399 |
.format(workspace.get_absolute_path())):
|
| 1397 | 1400 |
workspace.stage(temp_staging_directory)
|
| 1401 |
+ elif self._cached():
|
|
| 1402 |
+ # We have a cached buildtree to use, instead
|
|
| 1403 |
+ artifact_base, _ = self.__extract()
|
|
| 1404 |
+ import_dir = os.path.join(artifact_base, 'buildtree')
|
|
| 1398 | 1405 |
else:
|
| 1399 | 1406 |
# No workspace, stage directly
|
| 1400 | 1407 |
for source in self.sources():
|
| 1401 | 1408 |
source._stage(temp_staging_directory)
|
| 1402 | 1409 |
|
| 1403 |
- vdirectory.import_files(temp_staging_directory)
|
|
| 1410 |
+ vdirectory.import_files(import_dir)
|
|
| 1404 | 1411 |
|
| 1405 | 1412 |
finally:
|
| 1406 | 1413 |
# Staging may produce directories with less than 'rwx' permissions
|
| ... | ... | @@ -1566,9 +1573,8 @@ class Element(Plugin): |
| 1566 | 1573 |
collect = self.assemble(sandbox) # pylint: disable=assignment-from-no-return
|
| 1567 | 1574 |
self.__set_build_result(success=True, description="succeeded")
|
| 1568 | 1575 |
except BstError as e:
|
| 1569 |
- # If an error occurred assembling an element in a sandbox,
|
|
| 1570 |
- # then tack on the sandbox directory to the error
|
|
| 1571 |
- e.sandbox = rootdir
|
|
| 1576 |
+ # Shelling into a sandbox is useful to debug this error
|
|
| 1577 |
+ e.sandbox = True
|
|
| 1572 | 1578 |
|
| 1573 | 1579 |
# If there is a workspace open on this element, it will have
|
| 1574 | 1580 |
# been mounted for sandbox invocations instead of being staged.
|
| ... | ... | @@ -1683,8 +1689,8 @@ class Element(Plugin): |
| 1683 | 1689 |
"unable to collect artifact contents"
|
| 1684 | 1690 |
.format(collect))
|
| 1685 | 1691 |
|
| 1686 |
- # Finally cleanup the build dir
|
|
| 1687 |
- cleanup_rootdir()
|
|
| 1692 |
+ # Finally cleanup the build dir
|
|
| 1693 |
+ cleanup_rootdir()
|
|
| 1688 | 1694 |
|
| 1689 | 1695 |
return artifact_size
|
| 1690 | 1696 |
|
| ... | ... | @@ -2152,12 +2158,14 @@ class Element(Plugin): |
| 2152 | 2158 |
# stderr (fileobject): The stream for stderr for the sandbox
|
| 2153 | 2159 |
# config (SandboxConfig): The SandboxConfig object
|
| 2154 | 2160 |
# allow_remote (bool): Whether the sandbox is allowed to be remote
|
| 2161 |
+ # bare_directory (bool): Whether the directory is bare i.e. doesn't have
|
|
| 2162 |
+ # a separate 'root' subdir
|
|
| 2155 | 2163 |
#
|
| 2156 | 2164 |
# Yields:
|
| 2157 | 2165 |
# (Sandbox): A usable sandbox
|
| 2158 | 2166 |
#
|
| 2159 | 2167 |
@contextmanager
|
| 2160 |
- def __sandbox(self, directory, stdout=None, stderr=None, config=None, allow_remote=True):
|
|
| 2168 |
+ def __sandbox(self, directory, stdout=None, stderr=None, config=None, allow_remote=True, bare_directory=False):
|
|
| 2161 | 2169 |
context = self._get_context()
|
| 2162 | 2170 |
project = self._get_project()
|
| 2163 | 2171 |
platform = Platform.get_platform()
|
| ... | ... | @@ -2188,6 +2196,7 @@ class Element(Plugin): |
| 2188 | 2196 |
stdout=stdout,
|
| 2189 | 2197 |
stderr=stderr,
|
| 2190 | 2198 |
config=config,
|
| 2199 |
+ bare_directory=bare_directory,
|
|
| 2191 | 2200 |
allow_real_directory=not self.BST_VIRTUAL_DIRECTORY)
|
| 2192 | 2201 |
yield sandbox
|
| 2193 | 2202 |
|
| ... | ... | @@ -2197,7 +2206,7 @@ class Element(Plugin): |
| 2197 | 2206 |
|
| 2198 | 2207 |
# Recursive contextmanager...
|
| 2199 | 2208 |
with self.__sandbox(rootdir, stdout=stdout, stderr=stderr, config=config,
|
| 2200 |
- allow_remote=allow_remote) as sandbox:
|
|
| 2209 |
+ allow_remote=allow_remote, bare_directory=False) as sandbox:
|
|
| 2201 | 2210 |
yield sandbox
|
| 2202 | 2211 |
|
| 2203 | 2212 |
# Cleanup the build dir
|
| ... | ... | @@ -31,7 +31,6 @@ from .._fuse import SafeHardlinks |
| 31 | 31 |
#
|
| 32 | 32 |
class Mount():
|
| 33 | 33 |
def __init__(self, sandbox, mount_point, safe_hardlinks, fuse_mount_options=None):
|
| 34 |
- scratch_directory = sandbox._get_scratch_directory()
|
|
| 35 | 34 |
# Getting _get_underlying_directory() here is acceptable as
|
| 36 | 35 |
# we're part of the sandbox code. This will fail if our
|
| 37 | 36 |
# directory is CAS-based.
|
| ... | ... | @@ -51,6 +50,7 @@ class Mount(): |
| 51 | 50 |
# a regular mount point within the parent's redirected mount.
|
| 52 | 51 |
#
|
| 53 | 52 |
if self.safe_hardlinks:
|
| 53 |
+ scratch_directory = sandbox._get_scratch_directory()
|
|
| 54 | 54 |
# Redirected mount
|
| 55 | 55 |
self.mount_origin = os.path.join(root_directory, mount_point.lstrip(os.sep))
|
| 56 | 56 |
self.mount_base = os.path.join(scratch_directory, utils.url_directory_name(mount_point))
|
| ... | ... | @@ -98,16 +98,23 @@ class Sandbox(): |
| 98 | 98 |
self.__config = kwargs['config']
|
| 99 | 99 |
self.__stdout = kwargs['stdout']
|
| 100 | 100 |
self.__stderr = kwargs['stderr']
|
| 101 |
+ self.__bare_directory = kwargs['bare_directory']
|
|
| 101 | 102 |
|
| 102 | 103 |
# Setup the directories. Root and output_directory should be
|
| 103 | 104 |
# available to subclasses, hence being single-underscore. The
|
| 104 | 105 |
# others are private to this class.
|
| 105 |
- self._root = os.path.join(directory, 'root')
|
|
| 106 |
+ # If the directory is bare, it probably doesn't need scratch
|
|
| 107 |
+ if self.__bare_directory:
|
|
| 108 |
+ self._root = directory
|
|
| 109 |
+ self.__scratch = None
|
|
| 110 |
+ os.makedirs(self._root, exist_ok=True)
|
|
| 111 |
+ else:
|
|
| 112 |
+ self._root = os.path.join(directory, 'root')
|
|
| 113 |
+ self.__scratch = os.path.join(directory, 'scratch')
|
|
| 114 |
+ for directory_ in [self._root, self.__scratch]:
|
|
| 115 |
+ os.makedirs(directory_, exist_ok=True)
|
|
| 116 |
+ |
|
| 106 | 117 |
self._output_directory = None
|
| 107 |
- self.__directory = directory
|
|
| 108 |
- self.__scratch = os.path.join(self.__directory, 'scratch')
|
|
| 109 |
- for directory_ in [self._root, self.__scratch]:
|
|
| 110 |
- os.makedirs(directory_, exist_ok=True)
|
|
| 111 | 118 |
self._vdir = None
|
| 112 | 119 |
|
| 113 | 120 |
# This is set if anyone requests access to the underlying
|
| ... | ... | @@ -334,6 +341,7 @@ class Sandbox(): |
| 334 | 341 |
# Returns:
|
| 335 | 342 |
# (str): The sandbox scratch directory
|
| 336 | 343 |
def _get_scratch_directory(self):
|
| 344 |
+ assert not self.__bare_directory, "Scratch is not going to work with bare directories"
|
|
| 337 | 345 |
return self.__scratch
|
| 338 | 346 |
|
| 339 | 347 |
# _get_output()
|
| ... | ... | @@ -55,7 +55,7 @@ def test_open_workspace(cli, tmpdir, datafiles): |
| 55 | 55 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
| 56 | 56 |
workspace_dir = os.path.join(str(tmpdir), "workspace_hello")
|
| 57 | 57 |
|
| 58 |
- result = cli.run(project=project, args=['workspace', 'open', '-f', 'hello.bst', workspace_dir])
|
|
| 58 |
+ result = cli.run(project=project, args=['workspace', 'open', '-f', '--directory', workspace_dir, 'hello.bst', ])
|
|
| 59 | 59 |
result.assert_success()
|
| 60 | 60 |
|
| 61 | 61 |
result = cli.run(project=project, args=['workspace', 'list'])
|
| ... | ... | @@ -72,7 +72,7 @@ def test_make_change_in_workspace(cli, tmpdir, datafiles): |
| 72 | 72 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
| 73 | 73 |
workspace_dir = os.path.join(str(tmpdir), "workspace_hello")
|
| 74 | 74 |
|
| 75 |
- result = cli.run(project=project, args=['workspace', 'open', '-f', 'hello.bst', workspace_dir])
|
|
| 75 |
+ result = cli.run(project=project, args=['workspace', 'open', '-f', '--directory', workspace_dir, 'hello.bst'])
|
|
| 76 | 76 |
result.assert_success()
|
| 77 | 77 |
|
| 78 | 78 |
result = cli.run(project=project, args=['workspace', 'list'])
|
| ... | ... | @@ -44,7 +44,7 @@ def test_open_cross_junction_workspace(cli, tmpdir, datafiles): |
| 44 | 44 |
workspace_dir = os.path.join(str(tmpdir), "workspace_hello_junction")
|
| 45 | 45 |
|
| 46 | 46 |
result = cli.run(project=project,
|
| 47 |
- args=['workspace', 'open', 'hello-junction.bst:hello.bst', workspace_dir])
|
|
| 47 |
+ args=['workspace', 'open', '--directory', workspace_dir, 'hello-junction.bst:hello.bst'])
|
|
| 48 | 48 |
result.assert_success()
|
| 49 | 49 |
|
| 50 | 50 |
result = cli.run(project=project,
|
| ... | ... | @@ -509,7 +509,7 @@ def test_build_checkout_workspaced_junction(cli, tmpdir, datafiles): |
| 509 | 509 |
|
| 510 | 510 |
# Now open a workspace on the junction
|
| 511 | 511 |
#
|
| 512 |
- result = cli.run(project=project, args=['workspace', 'open', 'junction.bst', workspace])
|
|
| 512 |
+ result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, 'junction.bst'])
|
|
| 513 | 513 |
result.assert_success()
|
| 514 | 514 |
filename = os.path.join(workspace, 'files', 'etc-files', 'etc', 'animal.conf')
|
| 515 | 515 |
|
| ... | ... | @@ -47,7 +47,7 @@ def open_cross_junction(cli, tmpdir): |
| 47 | 47 |
workspace = tmpdir.join("workspace")
|
| 48 | 48 |
|
| 49 | 49 |
element = 'sub.bst:data.bst'
|
| 50 |
- args = ['workspace', 'open', element, str(workspace)]
|
|
| 50 |
+ args = ['workspace', 'open', '--directory', str(workspace), element]
|
|
| 51 | 51 |
result = cli.run(project=project, args=args)
|
| 52 | 52 |
result.assert_success()
|
| 53 | 53 |
|
| ... | ... | @@ -21,6 +21,7 @@ |
| 21 | 21 |
# Phillip Smyth <phillip smyth codethink co uk>
|
| 22 | 22 |
# Jonathan Maw <jonathan maw codethink co uk>
|
| 23 | 23 |
# Richard Maw <richard maw codethink co uk>
|
| 24 |
+# William Salmon <will salmon codethink co uk>
|
|
| 24 | 25 |
#
|
| 25 | 26 |
|
| 26 | 27 |
import os
|
| ... | ... | @@ -43,72 +44,195 @@ DATA_DIR = os.path.join( |
| 43 | 44 |
)
|
| 44 | 45 |
|
| 45 | 46 |
|
| 47 |
+class WorkspaceCreater():
|
|
| 48 |
+ def __init__(self, cli, tmpdir, datafiles, project_path=None):
|
|
| 49 |
+ self.cli = cli
|
|
| 50 |
+ self.tmpdir = tmpdir
|
|
| 51 |
+ self.datafiles = datafiles
|
|
| 52 |
+ |
|
| 53 |
+ if not project_path:
|
|
| 54 |
+ project_path = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 55 |
+ else:
|
|
| 56 |
+ shutil.copytree(os.path.join(datafiles.dirname, datafiles.basename), project_path)
|
|
| 57 |
+ |
|
| 58 |
+ self.project_path = project_path
|
|
| 59 |
+ self.bin_files_path = os.path.join(project_path, 'files', 'bin-files')
|
|
| 60 |
+ |
|
| 61 |
+ self.workspace_cmd = os.path.join(self.project_path, 'workspace_cmd')
|
|
| 62 |
+ |
|
| 63 |
+ def create_workspace_element(self, kind, track, suffix='', workspace_dir=None,
|
|
| 64 |
+ element_attrs=None):
|
|
| 65 |
+ element_name = 'workspace-test-{}{}.bst'.format(kind, suffix)
|
|
| 66 |
+ element_path = os.path.join(self.project_path, 'elements')
|
|
| 67 |
+ if not workspace_dir:
|
|
| 68 |
+ workspace_dir = os.path.join(self.workspace_cmd, element_name.rstrip('.bst'))
|
|
| 69 |
+ |
|
| 70 |
+ # Create our repo object of the given source type with
|
|
| 71 |
+ # the bin files, and then collect the initial ref.
|
|
| 72 |
+ repo = create_repo(kind, str(self.tmpdir))
|
|
| 73 |
+ ref = repo.create(self.bin_files_path)
|
|
| 74 |
+ if track:
|
|
| 75 |
+ ref = None
|
|
| 76 |
+ |
|
| 77 |
+ # Write out our test target
|
|
| 78 |
+ element = {
|
|
| 79 |
+ 'kind': 'import',
|
|
| 80 |
+ 'sources': [
|
|
| 81 |
+ repo.source_config(ref=ref)
|
|
| 82 |
+ ]
|
|
| 83 |
+ }
|
|
| 84 |
+ if element_attrs:
|
|
| 85 |
+ element = {**element, **element_attrs}
|
|
| 86 |
+ _yaml.dump(element,
|
|
| 87 |
+ os.path.join(element_path,
|
|
| 88 |
+ element_name))
|
|
| 89 |
+ return element_name, element_path, workspace_dir
|
|
| 90 |
+ |
|
| 91 |
+ def create_workspace_elements(self, kinds, track, suffixs=None, workspace_dir_usr=None,
|
|
| 92 |
+ element_attrs=None):
|
|
| 93 |
+ |
|
| 94 |
+ element_tuples = []
|
|
| 95 |
+ |
|
| 96 |
+ if suffixs is None:
|
|
| 97 |
+ suffixs = ['', ] * len(kinds)
|
|
| 98 |
+ else:
|
|
| 99 |
+ if len(suffixs) != len(kinds):
|
|
| 100 |
+ raise "terable error"
|
|
| 101 |
+ |
|
| 102 |
+ for suffix, kind in zip(suffixs, kinds):
|
|
| 103 |
+ element_name, element_path, workspace_dir = \
|
|
| 104 |
+ self.create_workspace_element(kind, track, suffix, workspace_dir_usr,
|
|
| 105 |
+ element_attrs)
|
|
| 106 |
+ |
|
| 107 |
+ # Assert that there is no reference, a track & fetch is needed
|
|
| 108 |
+ state = self.cli.get_element_state(self.project_path, element_name)
|
|
| 109 |
+ if track:
|
|
| 110 |
+ assert state == 'no reference'
|
|
| 111 |
+ else:
|
|
| 112 |
+ assert state == 'fetch needed'
|
|
| 113 |
+ element_tuples.append((element_name, workspace_dir))
|
|
| 114 |
+ |
|
| 115 |
+ return element_tuples
|
|
| 116 |
+ |
|
| 117 |
+ def open_workspaces(self, kinds, track, suffixs=None, workspace_dir=None,
|
|
| 118 |
+ element_attrs=None):
|
|
| 119 |
+ |
|
| 120 |
+ element_tuples = self.create_workspace_elements(kinds, track, suffixs, workspace_dir,
|
|
| 121 |
+ element_attrs)
|
|
| 122 |
+ os.makedirs(self.workspace_cmd, exist_ok=True)
|
|
| 123 |
+ |
|
| 124 |
+ # Now open the workspace, this should have the effect of automatically
|
|
| 125 |
+ # tracking & fetching the source from the repo.
|
|
| 126 |
+ args = ['workspace', 'open']
|
|
| 127 |
+ if track:
|
|
| 128 |
+ args.append('--track')
|
|
| 129 |
+ if workspace_dir is not None:
|
|
| 130 |
+ assert len(element_tuples) == 1, "test logic error"
|
|
| 131 |
+ _, workspace_dir = element_tuples[0]
|
|
| 132 |
+ args.extend(['--directory', workspace_dir])
|
|
| 133 |
+ |
|
| 134 |
+ args.extend([element_name for element_name, workspace_dir_suffix in element_tuples])
|
|
| 135 |
+ result = self.cli.run(cwd=self.workspace_cmd, project=self.project_path, args=args)
|
|
| 136 |
+ |
|
| 137 |
+ result.assert_success()
|
|
| 138 |
+ |
|
| 139 |
+ for element_name, workspace_dir in element_tuples:
|
|
| 140 |
+ # Assert that we are now buildable because the source is
|
|
| 141 |
+ # now cached.
|
|
| 142 |
+ assert self.cli.get_element_state(self.project_path, element_name) == 'buildable'
|
|
| 143 |
+ |
|
| 144 |
+ # Check that the executable hello file is found in the workspace
|
|
| 145 |
+ filename = os.path.join(workspace_dir, 'usr', 'bin', 'hello')
|
|
| 146 |
+ assert os.path.exists(filename)
|
|
| 147 |
+ |
|
| 148 |
+ return element_tuples
|
|
| 149 |
+ |
|
| 150 |
+ |
|
| 46 | 151 |
def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir=None,
|
| 47 | 152 |
project_path=None, element_attrs=None):
|
| 48 |
- if not workspace_dir:
|
|
| 49 |
- workspace_dir = os.path.join(str(tmpdir), 'workspace{}'.format(suffix))
|
|
| 50 |
- if not project_path:
|
|
| 51 |
- project_path = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 52 |
- else:
|
|
| 53 |
- shutil.copytree(os.path.join(datafiles.dirname, datafiles.basename), project_path)
|
|
| 54 |
- bin_files_path = os.path.join(project_path, 'files', 'bin-files')
|
|
| 55 |
- element_path = os.path.join(project_path, 'elements')
|
|
| 56 |
- element_name = 'workspace-test-{}{}.bst'.format(kind, suffix)
|
|
| 153 |
+ workspace_object = WorkspaceCreater(cli, tmpdir, datafiles, project_path)
|
|
| 154 |
+ workspaces = workspace_object.open_workspaces((kind, ), track, (suffix, ), workspace_dir,
|
|
| 155 |
+ element_attrs)
|
|
| 156 |
+ assert len(workspaces) == 1
|
|
| 157 |
+ element_name, workspace = workspaces[0]
|
|
| 158 |
+ return element_name, workspace_object.project_path, workspace
|
|
| 57 | 159 |
|
| 58 |
- # Create our repo object of the given source type with
|
|
| 59 |
- # the bin files, and then collect the initial ref.
|
|
| 60 |
- #
|
|
| 61 |
- repo = create_repo(kind, str(tmpdir))
|
|
| 62 |
- ref = repo.create(bin_files_path)
|
|
| 63 |
- if track:
|
|
| 64 |
- ref = None
|
|
| 65 | 160 |
|
| 66 |
- # Write out our test target
|
|
| 67 |
- element = {
|
|
| 68 |
- 'kind': 'import',
|
|
| 69 |
- 'sources': [
|
|
| 70 |
- repo.source_config(ref=ref)
|
|
| 71 |
- ]
|
|
| 72 |
- }
|
|
| 73 |
- if element_attrs:
|
|
| 74 |
- element = {**element, **element_attrs}
|
|
| 75 |
- _yaml.dump(element,
|
|
| 76 |
- os.path.join(element_path,
|
|
| 77 |
- element_name))
|
|
| 161 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 162 |
+@pytest.mark.parametrize("kind", repo_kinds)
|
|
| 163 |
+def test_open(cli, tmpdir, datafiles, kind):
|
|
| 164 |
+ open_workspace(cli, tmpdir, datafiles, kind, False)
|
|
| 78 | 165 |
|
| 79 |
- # Assert that there is no reference, a track & fetch is needed
|
|
| 80 |
- state = cli.get_element_state(project_path, element_name)
|
|
| 81 |
- if track:
|
|
| 82 |
- assert state == 'no reference'
|
|
| 83 |
- else:
|
|
| 84 |
- assert state == 'fetch needed'
|
|
| 166 |
+ |
|
| 167 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 168 |
+def test_open_multi(cli, tmpdir, datafiles):
|
|
| 169 |
+ |
|
| 170 |
+ workspace_object = WorkspaceCreater(cli, tmpdir, datafiles)
|
|
| 171 |
+ workspaces = workspace_object.open_workspaces(repo_kinds, False)
|
|
| 172 |
+ |
|
| 173 |
+ for (elname, workspace), kind in zip(workspaces, repo_kinds):
|
|
| 174 |
+ assert kind in elname
|
|
| 175 |
+ workspace_lsdir = os.listdir(workspace)
|
|
| 176 |
+ if kind == 'git':
|
|
| 177 |
+ assert('.git' in workspace_lsdir)
|
|
| 178 |
+ elif kind == 'bzr':
|
|
| 179 |
+ assert('.bzr' in workspace_lsdir)
|
|
| 180 |
+ else:
|
|
| 181 |
+ assert not ('.git' in workspace_lsdir)
|
|
| 182 |
+ assert not ('.bzr' in workspace_lsdir)
|
|
| 183 |
+ |
|
| 184 |
+ |
|
| 185 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 186 |
+def test_open_multi_with_directory(cli, tmpdir, datafiles):
|
|
| 187 |
+ workspace_object = WorkspaceCreater(cli, tmpdir, datafiles)
|
|
| 188 |
+ |
|
| 189 |
+ element_tuples = workspace_object.create_workspace_elements(repo_kinds, False, repo_kinds)
|
|
| 190 |
+ os.makedirs(workspace_object.workspace_cmd, exist_ok=True)
|
|
| 191 |
+ |
|
| 192 |
+ # Now open the workspace, this should have the effect of automatically
|
|
| 193 |
+ # tracking & fetching the source from the repo.
|
|
| 194 |
+ args = ['workspace', 'open']
|
|
| 195 |
+ args.extend(['--directory', 'any/dir/should/fail'])
|
|
| 196 |
+ |
|
| 197 |
+ args.extend([element_name for element_name, workspace_dir_suffix in element_tuples])
|
|
| 198 |
+ result = workspace_object.cli.run(cwd=workspace_object.workspace_cmd, project=workspace_object.project_path,
|
|
| 199 |
+ args=args)
|
|
| 200 |
+ |
|
| 201 |
+ result.assert_main_error(ErrorDomain.ARTIFACT, None)
|
|
| 202 |
+ assert ("Directory option can only be used if a single element is given" in result.stderr)
|
|
| 203 |
+ |
|
| 204 |
+ |
|
| 205 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 206 |
+def test_open_defaultlocation(cli, tmpdir, datafiles):
|
|
| 207 |
+ workspace_object = WorkspaceCreater(cli, tmpdir, datafiles)
|
|
| 208 |
+ |
|
| 209 |
+ ((element_name, workspace_dir), ) = workspace_object.create_workspace_elements(['git'], False, ['git'])
|
|
| 210 |
+ os.makedirs(workspace_object.workspace_cmd, exist_ok=True)
|
|
| 85 | 211 |
|
| 86 | 212 |
# Now open the workspace, this should have the effect of automatically
|
| 87 | 213 |
# tracking & fetching the source from the repo.
|
| 88 | 214 |
args = ['workspace', 'open']
|
| 89 |
- if track:
|
|
| 90 |
- args.append('--track')
|
|
| 91 |
- args.extend([element_name, workspace_dir])
|
|
| 92 |
- result = cli.run(project=project_path, args=args)
|
|
| 215 |
+ args.append(element_name)
|
|
| 216 |
+ |
|
| 217 |
+ # In the other tests we set the cmd to workspace_object.workspace_cmd with the optional
|
|
| 218 |
+ # argument, cwd for the workspace_object.cli.run function. But hear we set the default
|
|
| 219 |
+ # workspace location to workspace_object.workspace_cmd and run the cli.run function with
|
|
| 220 |
+ # no cwd option so that it runs in the project directory.
|
|
| 221 |
+ cli.configure({'workspacedir': workspace_object.workspace_cmd})
|
|
| 222 |
+ result = workspace_object.cli.run(project=workspace_object.project_path,
|
|
| 223 |
+ args=args)
|
|
| 93 | 224 |
|
| 94 | 225 |
result.assert_success()
|
| 95 | 226 |
|
| 96 |
- # Assert that we are now buildable because the source is
|
|
| 97 |
- # now cached.
|
|
| 98 |
- assert cli.get_element_state(project_path, element_name) == 'buildable'
|
|
| 227 |
+ assert cli.get_element_state(workspace_object.project_path, element_name) == 'buildable'
|
|
| 99 | 228 |
|
| 100 | 229 |
# Check that the executable hello file is found in the workspace
|
| 230 |
+ # even though the cli.run function was not run with cwd = workspace_object.workspace_cmd
|
|
| 231 |
+ # the workspace should be created in there as we used the 'workspacedir' configuration
|
|
| 232 |
+ # option.
|
|
| 101 | 233 |
filename = os.path.join(workspace_dir, 'usr', 'bin', 'hello')
|
| 102 | 234 |
assert os.path.exists(filename)
|
| 103 | 235 |
|
| 104 |
- return (element_name, project_path, workspace_dir)
|
|
| 105 |
- |
|
| 106 |
- |
|
| 107 |
-@pytest.mark.datafiles(DATA_DIR)
|
|
| 108 |
-@pytest.mark.parametrize("kind", repo_kinds)
|
|
| 109 |
-def test_open(cli, tmpdir, datafiles, kind):
|
|
| 110 |
- open_workspace(cli, tmpdir, datafiles, kind, False)
|
|
| 111 |
- |
|
| 112 | 236 |
|
| 113 | 237 |
@pytest.mark.datafiles(DATA_DIR)
|
| 114 | 238 |
def test_open_bzr_customize(cli, tmpdir, datafiles):
|
| ... | ... | @@ -150,7 +274,7 @@ def test_open_force(cli, tmpdir, datafiles, kind): |
| 150 | 274 |
|
| 151 | 275 |
# Now open the workspace again with --force, this should happily succeed
|
| 152 | 276 |
result = cli.run(project=project, args=[
|
| 153 |
- 'workspace', 'open', '--force', element_name, workspace
|
|
| 277 |
+ 'workspace', 'open', '--force', '--directory', workspace, element_name
|
|
| 154 | 278 |
])
|
| 155 | 279 |
result.assert_success()
|
| 156 | 280 |
|
| ... | ... | @@ -165,7 +289,7 @@ def test_open_force_open(cli, tmpdir, datafiles, kind): |
| 165 | 289 |
|
| 166 | 290 |
# Now open the workspace again with --force, this should happily succeed
|
| 167 | 291 |
result = cli.run(project=project, args=[
|
| 168 |
- 'workspace', 'open', '--force', element_name, workspace
|
|
| 292 |
+ 'workspace', 'open', '--force', '--directory', workspace, element_name
|
|
| 169 | 293 |
])
|
| 170 | 294 |
result.assert_success()
|
| 171 | 295 |
|
| ... | ... | @@ -196,7 +320,7 @@ def test_open_force_different_workspace(cli, tmpdir, datafiles, kind): |
| 196 | 320 |
|
| 197 | 321 |
# Now open the workspace again with --force, this should happily succeed
|
| 198 | 322 |
result = cli.run(project=project, args=[
|
| 199 |
- 'workspace', 'open', '--force', element_name2, workspace
|
|
| 323 |
+ 'workspace', 'open', '--force', '--directory', workspace, element_name2
|
|
| 200 | 324 |
])
|
| 201 | 325 |
|
| 202 | 326 |
# Assert that the file in workspace 1 has been replaced
|
| ... | ... | @@ -504,7 +628,7 @@ def test_buildable_no_ref(cli, tmpdir, datafiles): |
| 504 | 628 |
# Now open the workspace. We don't need to checkout the source though.
|
| 505 | 629 |
workspace = os.path.join(str(tmpdir), 'workspace-no-ref')
|
| 506 | 630 |
os.makedirs(workspace)
|
| 507 |
- args = ['workspace', 'open', '--no-checkout', element_name, workspace]
|
|
| 631 |
+ args = ['workspace', 'open', '--no-checkout', '--directory', workspace, element_name]
|
|
| 508 | 632 |
result = cli.run(project=project, args=args)
|
| 509 | 633 |
result.assert_success()
|
| 510 | 634 |
|
| ... | ... | @@ -766,7 +890,7 @@ def test_list_supported_workspace(cli, tmpdir, datafiles, workspace_cfg, expecte |
| 766 | 890 |
element_name))
|
| 767 | 891 |
|
| 768 | 892 |
# Make a change to the workspaces file
|
| 769 |
- result = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
|
|
| 893 |
+ result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
|
|
| 770 | 894 |
result.assert_success()
|
| 771 | 895 |
result = cli.run(project=project, args=['workspace', 'close', '--remove-dir', element_name])
|
| 772 | 896 |
result.assert_success()
|
| 1 |
+import os
|
|
| 2 |
+import pytest
|
|
| 3 |
+import shutil
|
|
| 4 |
+ |
|
| 5 |
+from tests.testutils import cli, cli_integration, create_artifact_share
|
|
| 6 |
+from buildstream._exceptions import ErrorDomain
|
|
| 7 |
+ |
|
| 8 |
+ |
|
| 9 |
+pytestmark = pytest.mark.integration
|
|
| 10 |
+ |
|
| 11 |
+ |
|
| 12 |
+DATA_DIR = os.path.join(
|
|
| 13 |
+ os.path.dirname(os.path.realpath(__file__)),
|
|
| 14 |
+ "project"
|
|
| 15 |
+)
|
|
| 16 |
+ |
|
| 17 |
+ |
|
| 18 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 19 |
+def test_buildtree_staged(cli_integration, tmpdir, datafiles):
|
|
| 20 |
+ # i.e. tests that cached build trees are staged by `bst shell --build`
|
|
| 21 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 22 |
+ element_name = 'build-shell/buildtree.bst'
|
|
| 23 |
+ |
|
| 24 |
+ res = cli_integration.run(project=project, args=['build', element_name])
|
|
| 25 |
+ res.assert_success()
|
|
| 26 |
+ |
|
| 27 |
+ res = cli_integration.run(project=project, args=[
|
|
| 28 |
+ 'shell', '--build', element_name, '--', 'grep', '-q', 'Hi', 'test'
|
|
| 29 |
+ ])
|
|
| 30 |
+ res.assert_success()
|
|
| 31 |
+ |
|
| 32 |
+ |
|
| 33 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 34 |
+def test_buildtree_from_failure(cli_integration, tmpdir, datafiles):
|
|
| 35 |
+ # i.e. test that on a build failure, we can still shell into it
|
|
| 36 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 37 |
+ element_name = 'build-shell/buildtree-fail.bst'
|
|
| 38 |
+ |
|
| 39 |
+ res = cli_integration.run(project=project, args=['build', element_name])
|
|
| 40 |
+ res.assert_main_error(ErrorDomain.STREAM, None)
|
|
| 41 |
+ |
|
| 42 |
+ # Assert that file has expected contents
|
|
| 43 |
+ res = cli_integration.run(project=project, args=[
|
|
| 44 |
+ 'shell', '--build', element_name, '--', 'cat', 'test'
|
|
| 45 |
+ ])
|
|
| 46 |
+ res.assert_success()
|
|
| 47 |
+ assert 'Hi' in res.output
|
|
| 48 |
+ |
|
| 49 |
+ |
|
| 50 |
+# Check that build shells work when pulled from a remote cache
|
|
| 51 |
+# This is to roughly simulate remote execution
|
|
| 52 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 53 |
+def test_buildtree_pulled(cli, tmpdir, datafiles):
|
|
| 54 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 55 |
+ element_name = 'build-shell/buildtree.bst'
|
|
| 56 |
+ |
|
| 57 |
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
|
|
| 58 |
+ # Build the element to push it to cache
|
|
| 59 |
+ cli.configure({
|
|
| 60 |
+ 'artifacts': {'url': share.repo, 'push': True}
|
|
| 61 |
+ })
|
|
| 62 |
+ result = cli.run(project=project, args=['build', element_name])
|
|
| 63 |
+ result.assert_success()
|
|
| 64 |
+ assert cli.get_element_state(project, element_name) == 'cached'
|
|
| 65 |
+ |
|
| 66 |
+ # Discard the cache
|
|
| 67 |
+ cli.configure({
|
|
| 68 |
+ 'artifacts': {'url': share.repo, 'push': True},
|
|
| 69 |
+ 'artifactdir': os.path.join(cli.directory, 'artifacts2')
|
|
| 70 |
+ })
|
|
| 71 |
+ assert cli.get_element_state(project, element_name) != 'cached'
|
|
| 72 |
+ |
|
| 73 |
+ # Pull from cache
|
|
| 74 |
+ result = cli.run(project=project, args=['pull', '--deps', 'all', element_name])
|
|
| 75 |
+ result.assert_success()
|
|
| 76 |
+ |
|
| 77 |
+ # Check it's using the cached build tree
|
|
| 78 |
+ res = cli.run(project=project, args=[
|
|
| 79 |
+ 'shell', '--build', element_name, '--', 'grep', '-q', 'Hi', 'test'
|
|
| 80 |
+ ])
|
|
| 81 |
+ res.assert_success()
|
| 1 |
+kind: manual
|
|
| 2 |
+description: |
|
|
| 3 |
+ Puts a file in the build tree so that build tree caching and staging can be tested,
|
|
| 4 |
+ then deliberately failing to build so we can check the output.
|
|
| 5 |
+ |
|
| 6 |
+depends:
|
|
| 7 |
+ - filename: base.bst
|
|
| 8 |
+ type: build
|
|
| 9 |
+ |
|
| 10 |
+config:
|
|
| 11 |
+ build-commands:
|
|
| 12 |
+ - "echo 'Hi' > %{build-root}/test"
|
|
| 13 |
+ - "false"
|
| 1 |
+kind: manual
|
|
| 2 |
+description: |
|
|
| 3 |
+ Puts a file in the build tree so that build tree caching and staging can be tested.
|
|
| 4 |
+ |
|
| 5 |
+depends:
|
|
| 6 |
+ - filename: base.bst
|
|
| 7 |
+ type: build
|
|
| 8 |
+ |
|
| 9 |
+config:
|
|
| 10 |
+ build-commands:
|
|
| 11 |
+ - "echo 'Hi' > %{build-root}/test"
|
| ... | ... | @@ -278,7 +278,7 @@ def test_workspace_visible(cli, tmpdir, datafiles): |
| 278 | 278 |
|
| 279 | 279 |
# Open a workspace on our build failing element
|
| 280 | 280 |
#
|
| 281 |
- res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
|
|
| 281 |
+ res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
|
|
| 282 | 282 |
assert res.exit_code == 0
|
| 283 | 283 |
|
| 284 | 284 |
# Ensure the dependencies of our build failing element are built
|
| ... | ... | @@ -302,46 +302,33 @@ def test_workspace_visible(cli, tmpdir, datafiles): |
| 302 | 302 |
assert result.output == workspace_hello
|
| 303 | 303 |
|
| 304 | 304 |
|
| 305 |
-# Test that we can see the workspace files in a shell
|
|
| 306 |
-@pytest.mark.integration
|
|
| 305 |
+# Test that '--sysroot' works
|
|
| 307 | 306 |
@pytest.mark.datafiles(DATA_DIR)
|
| 308 |
-def test_sysroot_workspace_visible(cli, tmpdir, datafiles):
|
|
| 307 |
+def test_sysroot(cli, tmpdir, datafiles):
|
|
| 309 | 308 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
| 310 |
- workspace = os.path.join(cli.directory, 'workspace')
|
|
| 311 |
- element_name = 'workspace/workspace-mount-fail.bst'
|
|
| 312 |
- |
|
| 313 |
- # Open a workspace on our build failing element
|
|
| 314 |
- #
|
|
| 315 |
- res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
|
|
| 316 |
- assert res.exit_code == 0
|
|
| 317 |
- |
|
| 318 |
- # Ensure the dependencies of our build failing element are built
|
|
| 319 |
- result = cli.run(project=project, args=['build', element_name])
|
|
| 320 |
- result.assert_main_error(ErrorDomain.STREAM, None)
|
|
| 321 |
- |
|
| 322 |
- # Discover the sysroot of the failed build directory, after one
|
|
| 323 |
- # failed build, there should be only one directory there.
|
|
| 324 |
- #
|
|
| 325 |
- build_base = os.path.join(cli.directory, 'build')
|
|
| 326 |
- build_dirs = os.listdir(path=build_base)
|
|
| 327 |
- assert len(build_dirs) == 1
|
|
| 328 |
- build_dir = os.path.join(build_base, build_dirs[0])
|
|
| 329 |
- |
|
| 330 |
- # Obtain a copy of the hello.c content from the workspace
|
|
| 331 |
- #
|
|
| 332 |
- workspace_hello_path = os.path.join(cli.directory, 'workspace', 'hello.c')
|
|
| 333 |
- assert os.path.exists(workspace_hello_path)
|
|
| 334 |
- with open(workspace_hello_path, 'r') as f:
|
|
| 335 |
- workspace_hello = f.read()
|
|
| 336 |
- |
|
| 337 |
- # Cat the hello.c file from a bst shell command, and assert
|
|
| 338 |
- # that we got the same content here
|
|
| 339 |
- #
|
|
| 340 |
- result = cli.run(project=project, args=[
|
|
| 341 |
- 'shell', '--build', '--sysroot', build_dir, element_name, '--', 'cat', 'hello.c'
|
|
| 309 |
+ base_element = "base/base-alpine.bst"
|
|
| 310 |
+ # test element only needs to be something lightweight for this test
|
|
| 311 |
+ test_element = "script/script.bst"
|
|
| 312 |
+ checkout_dir = os.path.join(str(tmpdir), 'alpine-sysroot')
|
|
| 313 |
+ test_file = 'hello'
|
|
| 314 |
+ |
|
| 315 |
+ # Build and check out a sysroot
|
|
| 316 |
+ res = cli.run(project=project, args=['build', base_element])
|
|
| 317 |
+ res.assert_success()
|
|
| 318 |
+ res = cli.run(project=project, args=['checkout', base_element, checkout_dir])
|
|
| 319 |
+ res.assert_success()
|
|
| 320 |
+ |
|
| 321 |
+ # Mutate the sysroot
|
|
| 322 |
+ test_path = os.path.join(checkout_dir, test_file)
|
|
| 323 |
+ with open(test_path, 'w') as f:
|
|
| 324 |
+ f.write('hello\n')
|
|
| 325 |
+ |
|
| 326 |
+ # Shell into the sysroot and check the test file exists
|
|
| 327 |
+ res = cli.run(project=project, args=[
|
|
| 328 |
+ 'shell', '--build', '--sysroot', checkout_dir, test_element, '--',
|
|
| 329 |
+ 'grep', '-q', 'hello', '/' + test_file
|
|
| 342 | 330 |
])
|
| 343 |
- assert result.exit_code == 0
|
|
| 344 |
- assert result.output == workspace_hello
|
|
| 331 |
+ res.assert_success()
|
|
| 345 | 332 |
|
| 346 | 333 |
|
| 347 | 334 |
# Test system integration commands can access devices in /dev
|
| ... | ... | @@ -23,7 +23,7 @@ def test_workspace_mount(cli, tmpdir, datafiles): |
| 23 | 23 |
workspace = os.path.join(cli.directory, 'workspace')
|
| 24 | 24 |
element_name = 'workspace/workspace-mount.bst'
|
| 25 | 25 |
|
| 26 |
- res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
|
|
| 26 |
+ res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
|
|
| 27 | 27 |
assert res.exit_code == 0
|
| 28 | 28 |
|
| 29 | 29 |
res = cli.run(project=project, args=['build', element_name])
|
| ... | ... | @@ -39,7 +39,7 @@ def test_workspace_commanddir(cli, tmpdir, datafiles): |
| 39 | 39 |
workspace = os.path.join(cli.directory, 'workspace')
|
| 40 | 40 |
element_name = 'workspace/workspace-commanddir.bst'
|
| 41 | 41 |
|
| 42 |
- res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
|
|
| 42 |
+ res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
|
|
| 43 | 43 |
assert res.exit_code == 0
|
| 44 | 44 |
|
| 45 | 45 |
res = cli.run(project=project, args=['build', element_name])
|
| ... | ... | @@ -75,7 +75,7 @@ def test_workspace_updated_dependency(cli, tmpdir, datafiles): |
| 75 | 75 |
_yaml.dump(dependency, os.path.join(element_path, dep_name))
|
| 76 | 76 |
|
| 77 | 77 |
# First open the workspace
|
| 78 |
- res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
|
|
| 78 |
+ res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
|
|
| 79 | 79 |
assert res.exit_code == 0
|
| 80 | 80 |
|
| 81 | 81 |
# We build the workspaced element, so that we have an artifact
|
| ... | ... | @@ -130,7 +130,7 @@ def test_workspace_update_dependency_failed(cli, tmpdir, datafiles): |
| 130 | 130 |
_yaml.dump(dependency, os.path.join(element_path, dep_name))
|
| 131 | 131 |
|
| 132 | 132 |
# First open the workspace
|
| 133 |
- res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
|
|
| 133 |
+ res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
|
|
| 134 | 134 |
assert res.exit_code == 0
|
| 135 | 135 |
|
| 136 | 136 |
# We build the workspaced element, so that we have an artifact
|
| ... | ... | @@ -205,7 +205,7 @@ def test_updated_dependency_nested(cli, tmpdir, datafiles): |
| 205 | 205 |
_yaml.dump(dependency, os.path.join(element_path, dep_name))
|
| 206 | 206 |
|
| 207 | 207 |
# First open the workspace
|
| 208 |
- res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
|
|
| 208 |
+ res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
|
|
| 209 | 209 |
assert res.exit_code == 0
|
| 210 | 210 |
|
| 211 | 211 |
# We build the workspaced element, so that we have an artifact
|
| ... | ... | @@ -258,7 +258,7 @@ def test_incremental_configure_commands_run_only_once(cli, tmpdir, datafiles): |
| 258 | 258 |
_yaml.dump(element, os.path.join(element_path, element_name))
|
| 259 | 259 |
|
| 260 | 260 |
# We open a workspace on the above element
|
| 261 |
- res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
|
|
| 261 |
+ res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
|
|
| 262 | 262 |
res.assert_success()
|
| 263 | 263 |
|
| 264 | 264 |
# Then we build, and check whether the configure step succeeded
|
| ... | ... | @@ -108,7 +108,7 @@ def test_filter_forbid_also_rdep(datafiles, cli): |
| 108 | 108 |
def test_filter_workspace_open(datafiles, cli, tmpdir):
|
| 109 | 109 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
| 110 | 110 |
workspace_dir = os.path.join(tmpdir.dirname, tmpdir.basename, "workspace")
|
| 111 |
- result = cli.run(project=project, args=['workspace', 'open', 'deps-permitted.bst', workspace_dir])
|
|
| 111 |
+ result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace_dir, 'deps-permitted.bst'])
|
|
| 112 | 112 |
result.assert_success()
|
| 113 | 113 |
assert os.path.exists(os.path.join(workspace_dir, "foo"))
|
| 114 | 114 |
assert os.path.exists(os.path.join(workspace_dir, "bar"))
|
| ... | ... | @@ -120,7 +120,7 @@ def test_filter_workspace_build(datafiles, cli, tmpdir): |
| 120 | 120 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
| 121 | 121 |
tempdir = os.path.join(tmpdir.dirname, tmpdir.basename)
|
| 122 | 122 |
workspace_dir = os.path.join(tempdir, "workspace")
|
| 123 |
- result = cli.run(project=project, args=['workspace', 'open', 'output-orphans.bst', workspace_dir])
|
|
| 123 |
+ result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace_dir, 'output-orphans.bst'])
|
|
| 124 | 124 |
result.assert_success()
|
| 125 | 125 |
src = os.path.join(workspace_dir, "foo")
|
| 126 | 126 |
dst = os.path.join(workspace_dir, "quux")
|
| ... | ... | @@ -138,7 +138,7 @@ def test_filter_workspace_close(datafiles, cli, tmpdir): |
| 138 | 138 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
| 139 | 139 |
tempdir = os.path.join(tmpdir.dirname, tmpdir.basename)
|
| 140 | 140 |
workspace_dir = os.path.join(tempdir, "workspace")
|
| 141 |
- result = cli.run(project=project, args=['workspace', 'open', 'output-orphans.bst', workspace_dir])
|
|
| 141 |
+ result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace_dir, 'output-orphans.bst'])
|
|
| 142 | 142 |
result.assert_success()
|
| 143 | 143 |
src = os.path.join(workspace_dir, "foo")
|
| 144 | 144 |
dst = os.path.join(workspace_dir, "quux")
|
| ... | ... | @@ -158,7 +158,7 @@ def test_filter_workspace_reset(datafiles, cli, tmpdir): |
| 158 | 158 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
| 159 | 159 |
tempdir = os.path.join(tmpdir.dirname, tmpdir.basename)
|
| 160 | 160 |
workspace_dir = os.path.join(tempdir, "workspace")
|
| 161 |
- result = cli.run(project=project, args=['workspace', 'open', 'output-orphans.bst', workspace_dir])
|
|
| 161 |
+ result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace_dir, 'output-orphans.bst'])
|
|
| 162 | 162 |
result.assert_success()
|
| 163 | 163 |
src = os.path.join(workspace_dir, "foo")
|
| 164 | 164 |
dst = os.path.join(workspace_dir, "quux")
|
