Benjamin Schubert pushed to branch bschubert/dont-cache-errors-from-host-tools at BuildStream / buildstream
Commits:
-
27932739
by Benjamin Schubert at 2018-11-19T09:46:39Z
-
ea2de561
by Benjamin Schubert at 2018-11-19T10:19:24Z
-
f23b6031
by Benjamin Schubert at 2018-11-19T11:39:51Z
-
a6defc0b
by Benjamin Schubert at 2018-11-19T11:39:51Z
-
88089d2d
by Benjamin Schubert at 2018-11-19T11:39:51Z
-
fd9e46be
by Benjamin Schubert at 2018-11-19T11:39:51Z
-
d32e0b83
by Benjamin Schubert at 2018-11-19T11:39:51Z
-
6f837118
by Benjamin Schubert at 2018-11-19T12:22:40Z
-
30b72244
by Chandan Singh at 2018-11-19T12:35:01Z
-
76c5d2f8
by Chandan Singh at 2018-11-19T12:35:01Z
-
9f629638
by Chandan Singh at 2018-11-19T13:03:36Z
-
48c51761
by Benjamin Schubert at 2018-11-19T15:20:56Z
-
877ed543
by Benjamin Schubert at 2018-11-19T15:20:56Z
16 changed files:
- buildstream/_artifactcache/cascache.py
- buildstream/_frontend/cli.py
- buildstream/_pipeline.py
- buildstream/_stream.py
- buildstream/element.py
- buildstream/plugins/sources/git.py
- buildstream/plugins/sources/pip.py
- buildstream/utils.py
- conftest.py
- tests/completions/completions.py
- tests/frontend/buildtrack.py
- + tests/frontend/project/elements/checkout-deps.bst
- + tests/frontend/project/files/etc-files/etc/buildstream/config
- + tests/frontend/source_checkout.py
- tests/integration/cachedfail.py
- + tests/utils/movedirectory.py
Changes:
| ... | ... | @@ -24,7 +24,6 @@ import os |
| 24 | 24 |
import stat
|
| 25 | 25 |
import tempfile
|
| 26 | 26 |
import uuid
|
| 27 |
-import errno
|
|
| 28 | 27 |
from urllib.parse import urlparse
|
| 29 | 28 |
|
| 30 | 29 |
import grpc
|
| ... | ... | @@ -140,17 +139,13 @@ class CASCache(): |
| 140 | 139 |
checkoutdir = os.path.join(tmpdir, ref)
|
| 141 | 140 |
self._checkout(checkoutdir, tree)
|
| 142 | 141 |
|
| 143 |
- os.makedirs(os.path.dirname(dest), exist_ok=True)
|
|
| 144 | 142 |
try:
|
| 145 |
- os.rename(checkoutdir, dest)
|
|
| 143 |
+ utils.move_atomic(checkoutdir, dest)
|
|
| 144 |
+ except utils.DirectoryExistsError:
|
|
| 145 |
+ # Another process beat us to rename
|
|
| 146 |
+ pass
|
|
| 146 | 147 |
except OSError as e:
|
| 147 |
- # With rename it's possible to get either ENOTEMPTY or EEXIST
|
|
| 148 |
- # in the case that the destination path is a not empty directory.
|
|
| 149 |
- #
|
|
| 150 |
- # If rename fails with these errors, another process beat
|
|
| 151 |
- # us to it so just ignore.
|
|
| 152 |
- if e.errno not in [errno.ENOTEMPTY, errno.EEXIST]:
|
|
| 153 |
- raise CASError("Failed to extract directory for ref '{}': {}".format(ref, e)) from e
|
|
| 148 |
+ raise CASError("Failed to extract directory for ref '{}': {}".format(ref, e)) from e
|
|
| 154 | 149 |
|
| 155 | 150 |
return originaldest
|
| 156 | 151 |
|
| ... | ... | @@ -664,6 +664,33 @@ def checkout(app, element, location, force, deps, integrate, hardlinks, tar): |
| 664 | 664 |
tar=tar)
|
| 665 | 665 |
|
| 666 | 666 |
|
| 667 |
+##################################################################
|
|
| 668 |
+# Source Checkout Command #
|
|
| 669 |
+##################################################################
|
|
| 670 |
+@cli.command(name='source-checkout', short_help='Checkout sources for an element')
|
|
| 671 |
+@click.option('--except', 'except_', multiple=True,
|
|
| 672 |
+ type=click.Path(readable=False),
|
|
| 673 |
+ help="Except certain dependencies")
|
|
| 674 |
+@click.option('--deps', '-d', default='none',
|
|
| 675 |
+ type=click.Choice(['build', 'none', 'run', 'all']),
|
|
| 676 |
+ help='The dependencies whose sources to checkout (default: none)')
|
|
| 677 |
+@click.option('--fetch', 'fetch_', default=False, is_flag=True,
|
|
| 678 |
+ help='Fetch elements if they are not fetched')
|
|
| 679 |
+@click.argument('element',
|
|
| 680 |
+ type=click.Path(readable=False))
|
|
| 681 |
+@click.argument('location', type=click.Path())
|
|
| 682 |
+@click.pass_obj
|
|
| 683 |
+def source_checkout(app, element, location, deps, fetch_, except_):
|
|
| 684 |
+ """Checkout sources of an element to the specified location
|
|
| 685 |
+ """
|
|
| 686 |
+ with app.initialized():
|
|
| 687 |
+ app.stream.source_checkout(element,
|
|
| 688 |
+ location=location,
|
|
| 689 |
+ deps=deps,
|
|
| 690 |
+ fetch=fetch_,
|
|
| 691 |
+ except_targets=except_)
|
|
| 692 |
+ |
|
| 693 |
+ |
|
| 667 | 694 |
##################################################################
|
| 668 | 695 |
# Workspace Command #
|
| 669 | 696 |
##################################################################
|
| ... | ... | @@ -370,7 +370,7 @@ class Pipeline(): |
| 370 | 370 |
detail += " Element: {} is inconsistent\n".format(element._get_full_name())
|
| 371 | 371 |
for source in element.sources():
|
| 372 | 372 |
if source._get_consistency() == Consistency.INCONSISTENT:
|
| 373 |
- detail += " Source {} is missing ref\n".format(source)
|
|
| 373 |
+ detail += " {} is missing ref\n".format(source)
|
|
| 374 | 374 |
detail += '\n'
|
| 375 | 375 |
detail += "Try tracking these elements first with `bst track`\n"
|
| 376 | 376 |
|
| ... | ... | @@ -383,6 +383,33 @@ class Pipeline(): |
| 383 | 383 |
detail += " " + element._get_full_name() + "\n"
|
| 384 | 384 |
raise PipelineError("Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline-workspaced")
|
| 385 | 385 |
|
| 386 |
+ # assert_sources_cached()
|
|
| 387 |
+ #
|
|
| 388 |
+ # Asserts that sources for the given list of elements are cached.
|
|
| 389 |
+ #
|
|
| 390 |
+ # Args:
|
|
| 391 |
+ # elements (list): The list of elements
|
|
| 392 |
+ #
|
|
| 393 |
+ def assert_sources_cached(self, elements):
|
|
| 394 |
+ uncached = []
|
|
| 395 |
+ with self._context.timed_activity("Checking sources"):
|
|
| 396 |
+ for element in elements:
|
|
| 397 |
+ if element._get_consistency() != Consistency.CACHED:
|
|
| 398 |
+ uncached.append(element)
|
|
| 399 |
+ |
|
| 400 |
+ if uncached:
|
|
| 401 |
+ detail = "Sources are not cached for the following elements:\n\n"
|
|
| 402 |
+ for element in uncached:
|
|
| 403 |
+ detail += " Following sources for element: {} are not cached:\n".format(element._get_full_name())
|
|
| 404 |
+ for source in element.sources():
|
|
| 405 |
+ if source._get_consistency() != Consistency.CACHED:
|
|
| 406 |
+ detail += " {}\n".format(source)
|
|
| 407 |
+ detail += '\n'
|
|
| 408 |
+ detail += "Try fetching these elements first with `bst fetch`,\n" + \
|
|
| 409 |
+ "or run this command with `--fetch` option\n"
|
|
| 410 |
+ |
|
| 411 |
+ raise PipelineError("Uncached sources", detail=detail, reason="uncached-sources")
|
|
| 412 |
+ |
|
| 386 | 413 |
#############################################################
|
| 387 | 414 |
# Private Methods #
|
| 388 | 415 |
#############################################################
|
| ... | ... | @@ -379,27 +379,7 @@ class Stream(): |
| 379 | 379 |
elements, _ = self._load((target,), (), fetch_subprojects=True)
|
| 380 | 380 |
target = elements[0]
|
| 381 | 381 |
|
| 382 |
- if not tar:
|
|
| 383 |
- try:
|
|
| 384 |
- os.makedirs(location, exist_ok=True)
|
|
| 385 |
- except OSError as e:
|
|
| 386 |
- raise StreamError("Failed to create checkout directory: '{}'"
|
|
| 387 |
- .format(e)) from e
|
|
| 388 |
- |
|
| 389 |
- if not tar:
|
|
| 390 |
- if not os.access(location, os.W_OK):
|
|
| 391 |
- raise StreamError("Checkout directory '{}' not writable"
|
|
| 392 |
- .format(location))
|
|
| 393 |
- if not force and os.listdir(location):
|
|
| 394 |
- raise StreamError("Checkout directory '{}' not empty"
|
|
| 395 |
- .format(location))
|
|
| 396 |
- elif os.path.exists(location) and location != '-':
|
|
| 397 |
- if not os.access(location, os.W_OK):
|
|
| 398 |
- raise StreamError("Output file '{}' not writable"
|
|
| 399 |
- .format(location))
|
|
| 400 |
- if not force and os.path.exists(location):
|
|
| 401 |
- raise StreamError("Output file '{}' already exists"
|
|
| 402 |
- .format(location))
|
|
| 382 |
+ self._check_location_writable(location, force=force, tar=tar)
|
|
| 403 | 383 |
|
| 404 | 384 |
# Stage deps into a temporary sandbox first
|
| 405 | 385 |
try:
|
| ... | ... | @@ -443,6 +423,42 @@ class Stream(): |
| 443 | 423 |
raise StreamError("Error while staging dependencies into a sandbox"
|
| 444 | 424 |
": '{}'".format(e), detail=e.detail, reason=e.reason) from e
|
| 445 | 425 |
|
| 426 |
+ # source_checkout()
|
|
| 427 |
+ #
|
|
| 428 |
+ # Checkout sources of the target element to the specified location
|
|
| 429 |
+ #
|
|
| 430 |
+ # Args:
|
|
| 431 |
+ # target (str): The target element whose sources to checkout
|
|
| 432 |
+ # location (str): Location to checkout the sources to
|
|
| 433 |
+ # deps (str): The dependencies to checkout
|
|
| 434 |
+ # fetch (bool): Whether to fetch missing sources
|
|
| 435 |
+ # except_targets (list): List of targets to except from staging
|
|
| 436 |
+ #
|
|
| 437 |
+ def source_checkout(self, target, *,
|
|
| 438 |
+ location=None,
|
|
| 439 |
+ deps='none',
|
|
| 440 |
+ fetch=False,
|
|
| 441 |
+ except_targets=()):
|
|
| 442 |
+ |
|
| 443 |
+ self._check_location_writable(location)
|
|
| 444 |
+ |
|
| 445 |
+ elements, _ = self._load((target,), (),
|
|
| 446 |
+ selection=deps,
|
|
| 447 |
+ except_targets=except_targets,
|
|
| 448 |
+ fetch_subprojects=True)
|
|
| 449 |
+ |
|
| 450 |
+ # Assert all sources are cached
|
|
| 451 |
+ if fetch:
|
|
| 452 |
+ self._fetch(elements)
|
|
| 453 |
+ self._pipeline.assert_sources_cached(elements)
|
|
| 454 |
+ |
|
| 455 |
+ # Stage all sources determined by scope
|
|
| 456 |
+ try:
|
|
| 457 |
+ self._write_element_sources(location, elements)
|
|
| 458 |
+ except BstError as e:
|
|
| 459 |
+ raise StreamError("Error while writing sources"
|
|
| 460 |
+ ": '{}'".format(e), detail=e.detail, reason=e.reason) from e
|
|
| 461 |
+ |
|
| 446 | 462 |
# workspace_open
|
| 447 | 463 |
#
|
| 448 | 464 |
# Open a project workspace
|
| ... | ... | @@ -726,7 +742,7 @@ class Stream(): |
| 726 | 742 |
if self._write_element_script(source_directory, element)
|
| 727 | 743 |
]
|
| 728 | 744 |
|
| 729 |
- self._write_element_sources(tempdir, elements)
|
|
| 745 |
+ self._write_element_sources(os.path.join(tempdir, "source"), elements)
|
|
| 730 | 746 |
self._write_build_script(tempdir, elements)
|
| 731 | 747 |
self._collect_sources(tempdir, tar_location,
|
| 732 | 748 |
target.normal_name, compression)
|
| ... | ... | @@ -1068,6 +1084,39 @@ class Stream(): |
| 1068 | 1084 |
self._enqueue_plan(fetch_plan)
|
| 1069 | 1085 |
self._run()
|
| 1070 | 1086 |
|
| 1087 |
+ # _check_location_writable()
|
|
| 1088 |
+ #
|
|
| 1089 |
+ # Check if given location is writable.
|
|
| 1090 |
+ #
|
|
| 1091 |
+ # Args:
|
|
| 1092 |
+ # location (str): Destination path
|
|
| 1093 |
+ # force (bool): Allow files to be overwritten
|
|
| 1094 |
+ # tar (bool): Whether destination is a tarball
|
|
| 1095 |
+ #
|
|
| 1096 |
+ # Raises:
|
|
| 1097 |
+ # (StreamError): If the destination is not writable
|
|
| 1098 |
+ #
|
|
| 1099 |
+ def _check_location_writable(self, location, force=False, tar=False):
|
|
| 1100 |
+ if not tar:
|
|
| 1101 |
+ try:
|
|
| 1102 |
+ os.makedirs(location, exist_ok=True)
|
|
| 1103 |
+ except OSError as e:
|
|
| 1104 |
+ raise StreamError("Failed to create destination directory: '{}'"
|
|
| 1105 |
+ .format(e)) from e
|
|
| 1106 |
+ if not os.access(location, os.W_OK):
|
|
| 1107 |
+ raise StreamError("Destination directory '{}' not writable"
|
|
| 1108 |
+ .format(location))
|
|
| 1109 |
+ if not force and os.listdir(location):
|
|
| 1110 |
+ raise StreamError("Destination directory '{}' not empty"
|
|
| 1111 |
+ .format(location))
|
|
| 1112 |
+ elif os.path.exists(location) and location != '-':
|
|
| 1113 |
+ if not os.access(location, os.W_OK):
|
|
| 1114 |
+ raise StreamError("Output file '{}' not writable"
|
|
| 1115 |
+ .format(location))
|
|
| 1116 |
+ if not force and os.path.exists(location):
|
|
| 1117 |
+ raise StreamError("Output file '{}' already exists"
|
|
| 1118 |
+ .format(location))
|
|
| 1119 |
+ |
|
| 1071 | 1120 |
# Helper function for checkout()
|
| 1072 | 1121 |
#
|
| 1073 | 1122 |
def _checkout_hardlinks(self, sandbox_vroot, directory):
|
| ... | ... | @@ -1089,11 +1138,10 @@ class Stream(): |
| 1089 | 1138 |
# Write all source elements to the given directory
|
| 1090 | 1139 |
def _write_element_sources(self, directory, elements):
|
| 1091 | 1140 |
for element in elements:
|
| 1092 |
- source_dir = os.path.join(directory, "source")
|
|
| 1093 |
- element_source_dir = os.path.join(source_dir, element.normal_name)
|
|
| 1094 |
- os.makedirs(element_source_dir)
|
|
| 1095 |
- |
|
| 1096 |
- element._stage_sources_at(element_source_dir)
|
|
| 1141 |
+ element_source_dir = self._get_element_dirname(directory, element)
|
|
| 1142 |
+ if list(element.sources()):
|
|
| 1143 |
+ os.makedirs(element_source_dir)
|
|
| 1144 |
+ element._stage_sources_at(element_source_dir)
|
|
| 1097 | 1145 |
|
| 1098 | 1146 |
# Write a master build script to the sandbox
|
| 1099 | 1147 |
def _write_build_script(self, directory, elements):
|
| ... | ... | @@ -1122,3 +1170,25 @@ class Stream(): |
| 1122 | 1170 |
|
| 1123 | 1171 |
with tarfile.open(tar_name, permissions) as tar:
|
| 1124 | 1172 |
tar.add(directory, arcname=element_name)
|
| 1173 |
+ |
|
| 1174 |
+ # _get_element_dirname()
|
|
| 1175 |
+ #
|
|
| 1176 |
+ # Get path to directory for an element based on its normal name.
|
|
| 1177 |
+ #
|
|
| 1178 |
+ # For cross-junction elements, the path will be prefixed with the name
|
|
| 1179 |
+ # of the junction element.
|
|
| 1180 |
+ #
|
|
| 1181 |
+ # Args:
|
|
| 1182 |
+ # directory (str): path to base directory
|
|
| 1183 |
+ # element (Element): the element
|
|
| 1184 |
+ #
|
|
| 1185 |
+ # Returns:
|
|
| 1186 |
+ # (str): Path to directory for this element
|
|
| 1187 |
+ #
|
|
| 1188 |
+ def _get_element_dirname(self, directory, element):
|
|
| 1189 |
+ parts = [element.normal_name]
|
|
| 1190 |
+ while element._get_project() != self._project:
|
|
| 1191 |
+ element = element._get_project().junction
|
|
| 1192 |
+ parts.append(element.normal_name)
|
|
| 1193 |
+ |
|
| 1194 |
+ return os.path.join(directory, *reversed(parts))
|
| ... | ... | @@ -85,7 +85,8 @@ import shutil |
| 85 | 85 |
from . import _yaml
|
| 86 | 86 |
from ._variables import Variables
|
| 87 | 87 |
from ._versions import BST_CORE_ARTIFACT_VERSION
|
| 88 |
-from ._exceptions import BstError, LoadError, LoadErrorReason, ImplError, ErrorDomain
|
|
| 88 |
+from ._exceptions import BstError, LoadError, LoadErrorReason, ImplError, \
|
|
| 89 |
+ ErrorDomain
|
|
| 89 | 90 |
from .utils import UtilError
|
| 90 | 91 |
from . import Plugin, Consistency, Scope
|
| 91 | 92 |
from . import SandboxFlags
|
| ... | ... | @@ -1553,7 +1554,6 @@ class Element(Plugin): |
| 1553 | 1554 |
self.__dynamic_public = _yaml.node_copy(self.__public)
|
| 1554 | 1555 |
|
| 1555 | 1556 |
# Call the abstract plugin methods
|
| 1556 |
- collect = None
|
|
| 1557 | 1557 |
try:
|
| 1558 | 1558 |
# Step 1 - Configure
|
| 1559 | 1559 |
self.configure_sandbox(sandbox)
|
| ... | ... | @@ -1564,7 +1564,7 @@ class Element(Plugin): |
| 1564 | 1564 |
# Step 4 - Assemble
|
| 1565 | 1565 |
collect = self.assemble(sandbox) # pylint: disable=assignment-from-no-return
|
| 1566 | 1566 |
self.__set_build_result(success=True, description="succeeded")
|
| 1567 |
- except BstError as e:
|
|
| 1567 |
+ except ElementError as e:
|
|
| 1568 | 1568 |
# Shelling into a sandbox is useful to debug this error
|
| 1569 | 1569 |
e.sandbox = True
|
| 1570 | 1570 |
|
| ... | ... | @@ -1586,104 +1586,105 @@ class Element(Plugin): |
| 1586 | 1586 |
self.warn("Failed to preserve workspace state for failed build sysroot: {}"
|
| 1587 | 1587 |
.format(e))
|
| 1588 | 1588 |
|
| 1589 |
- if isinstance(e, ElementError):
|
|
| 1590 |
- collect = e.collect # pylint: disable=no-member
|
|
| 1591 |
- |
|
| 1592 | 1589 |
self.__set_build_result(success=False, description=str(e), detail=e.detail)
|
| 1590 |
+ self._cache_artifact(rootdir, sandbox, e.collect)
|
|
| 1591 |
+ |
|
| 1593 | 1592 |
raise
|
| 1593 |
+ else:
|
|
| 1594 |
+ return self._cache_artifact(rootdir, sandbox, collect)
|
|
| 1594 | 1595 |
finally:
|
| 1595 |
- if collect is not None:
|
|
| 1596 |
- try:
|
|
| 1597 |
- sandbox_vroot = sandbox.get_virtual_directory()
|
|
| 1598 |
- collectvdir = sandbox_vroot.descend(collect.lstrip(os.sep).split(os.sep))
|
|
| 1599 |
- except VirtualDirectoryError:
|
|
| 1600 |
- # No collect directory existed
|
|
| 1601 |
- collectvdir = None
|
|
| 1602 |
- |
|
| 1603 |
- # Create artifact directory structure
|
|
| 1604 |
- assembledir = os.path.join(rootdir, 'artifact')
|
|
| 1605 |
- filesdir = os.path.join(assembledir, 'files')
|
|
| 1606 |
- logsdir = os.path.join(assembledir, 'logs')
|
|
| 1607 |
- metadir = os.path.join(assembledir, 'meta')
|
|
| 1608 |
- buildtreedir = os.path.join(assembledir, 'buildtree')
|
|
| 1609 |
- os.mkdir(assembledir)
|
|
| 1610 |
- if collect is not None and collectvdir is not None:
|
|
| 1611 |
- os.mkdir(filesdir)
|
|
| 1612 |
- os.mkdir(logsdir)
|
|
| 1613 |
- os.mkdir(metadir)
|
|
| 1614 |
- os.mkdir(buildtreedir)
|
|
| 1615 |
- |
|
| 1616 |
- # Hard link files from collect dir to files directory
|
|
| 1617 |
- if collect is not None and collectvdir is not None:
|
|
| 1618 |
- collectvdir.export_files(filesdir, can_link=True)
|
|
| 1619 |
- |
|
| 1620 |
- try:
|
|
| 1621 |
- sandbox_vroot = sandbox.get_virtual_directory()
|
|
| 1622 |
- sandbox_build_dir = sandbox_vroot.descend(
|
|
| 1623 |
- self.get_variable('build-root').lstrip(os.sep).split(os.sep))
|
|
| 1624 |
- # Hard link files from build-root dir to buildtreedir directory
|
|
| 1625 |
- sandbox_build_dir.export_files(buildtreedir)
|
|
| 1626 |
- except VirtualDirectoryError:
|
|
| 1627 |
- # Directory could not be found. Pre-virtual
|
|
| 1628 |
- # directory behaviour was to continue silently
|
|
| 1629 |
- # if the directory could not be found.
|
|
| 1630 |
- pass
|
|
| 1631 |
- |
|
| 1632 |
- # Copy build log
|
|
| 1633 |
- log_filename = context.get_log_filename()
|
|
| 1634 |
- self._build_log_path = os.path.join(logsdir, 'build.log')
|
|
| 1635 |
- if log_filename:
|
|
| 1636 |
- shutil.copyfile(log_filename, self._build_log_path)
|
|
| 1637 |
- |
|
| 1638 |
- # Store public data
|
|
| 1639 |
- _yaml.dump(_yaml.node_sanitize(self.__dynamic_public), os.path.join(metadir, 'public.yaml'))
|
|
| 1640 |
- |
|
| 1641 |
- # Store result
|
|
| 1642 |
- build_result_dict = {"success": self.__build_result[0], "description": self.__build_result[1]}
|
|
| 1643 |
- if self.__build_result[2] is not None:
|
|
| 1644 |
- build_result_dict["detail"] = self.__build_result[2]
|
|
| 1645 |
- _yaml.dump(build_result_dict, os.path.join(metadir, 'build-result.yaml'))
|
|
| 1646 |
- |
|
| 1647 |
- # ensure we have cache keys
|
|
| 1648 |
- self._assemble_done()
|
|
| 1649 |
- |
|
| 1650 |
- # Store keys.yaml
|
|
| 1651 |
- _yaml.dump(_yaml.node_sanitize({
|
|
| 1652 |
- 'strong': self._get_cache_key(),
|
|
| 1653 |
- 'weak': self._get_cache_key(_KeyStrength.WEAK),
|
|
| 1654 |
- }), os.path.join(metadir, 'keys.yaml'))
|
|
| 1655 |
- |
|
| 1656 |
- # Store dependencies.yaml
|
|
| 1657 |
- _yaml.dump(_yaml.node_sanitize({
|
|
| 1658 |
- e.name: e._get_cache_key() for e in self.dependencies(Scope.BUILD)
|
|
| 1659 |
- }), os.path.join(metadir, 'dependencies.yaml'))
|
|
| 1660 |
- |
|
| 1661 |
- # Store workspaced.yaml
|
|
| 1662 |
- _yaml.dump(_yaml.node_sanitize({
|
|
| 1663 |
- 'workspaced': True if self._get_workspace() else False
|
|
| 1664 |
- }), os.path.join(metadir, 'workspaced.yaml'))
|
|
| 1665 |
- |
|
| 1666 |
- # Store workspaced-dependencies.yaml
|
|
| 1667 |
- _yaml.dump(_yaml.node_sanitize({
|
|
| 1668 |
- 'workspaced-dependencies': [
|
|
| 1669 |
- e.name for e in self.dependencies(Scope.BUILD)
|
|
| 1670 |
- if e._get_workspace()
|
|
| 1671 |
- ]
|
|
| 1672 |
- }), os.path.join(metadir, 'workspaced-dependencies.yaml'))
|
|
| 1673 |
- |
|
| 1674 |
- with self.timed_activity("Caching artifact"):
|
|
| 1675 |
- artifact_size = utils._get_dir_size(assembledir)
|
|
| 1676 |
- self.__artifacts.commit(self, assembledir, self.__get_cache_keys_for_commit())
|
|
| 1677 |
- |
|
| 1678 |
- if collect is not None and collectvdir is None:
|
|
| 1679 |
- raise ElementError(
|
|
| 1680 |
- "Directory '{}' was not found inside the sandbox, "
|
|
| 1681 |
- "unable to collect artifact contents"
|
|
| 1682 |
- .format(collect))
|
|
| 1683 |
- |
|
| 1684 |
- # Finally cleanup the build dir
|
|
| 1685 | 1596 |
cleanup_rootdir()
|
| 1686 | 1597 |
|
| 1598 |
+ def _cache_artifact(self, rootdir, sandbox, collect):
|
|
| 1599 |
+ if collect is not None:
|
|
| 1600 |
+ try:
|
|
| 1601 |
+ sandbox_vroot = sandbox.get_virtual_directory()
|
|
| 1602 |
+ collectvdir = sandbox_vroot.descend(collect.lstrip(os.sep).split(os.sep))
|
|
| 1603 |
+ except VirtualDirectoryError:
|
|
| 1604 |
+ # No collect directory existed
|
|
| 1605 |
+ collectvdir = None
|
|
| 1606 |
+ |
|
| 1607 |
+ # Create artifact directory structure
|
|
| 1608 |
+ assembledir = os.path.join(rootdir, 'artifact')
|
|
| 1609 |
+ filesdir = os.path.join(assembledir, 'files')
|
|
| 1610 |
+ logsdir = os.path.join(assembledir, 'logs')
|
|
| 1611 |
+ metadir = os.path.join(assembledir, 'meta')
|
|
| 1612 |
+ buildtreedir = os.path.join(assembledir, 'buildtree')
|
|
| 1613 |
+ os.mkdir(assembledir)
|
|
| 1614 |
+ if collect is not None and collectvdir is not None:
|
|
| 1615 |
+ os.mkdir(filesdir)
|
|
| 1616 |
+ os.mkdir(logsdir)
|
|
| 1617 |
+ os.mkdir(metadir)
|
|
| 1618 |
+ os.mkdir(buildtreedir)
|
|
| 1619 |
+ |
|
| 1620 |
+ # Hard link files from collect dir to files directory
|
|
| 1621 |
+ if collect is not None and collectvdir is not None:
|
|
| 1622 |
+ collectvdir.export_files(filesdir, can_link=True)
|
|
| 1623 |
+ |
|
| 1624 |
+ try:
|
|
| 1625 |
+ sandbox_vroot = sandbox.get_virtual_directory()
|
|
| 1626 |
+ sandbox_build_dir = sandbox_vroot.descend(
|
|
| 1627 |
+ self.get_variable('build-root').lstrip(os.sep).split(os.sep))
|
|
| 1628 |
+ # Hard link files from build-root dir to buildtreedir directory
|
|
| 1629 |
+ sandbox_build_dir.export_files(buildtreedir)
|
|
| 1630 |
+ except VirtualDirectoryError:
|
|
| 1631 |
+ # Directory could not be found. Pre-virtual
|
|
| 1632 |
+ # directory behaviour was to continue silently
|
|
| 1633 |
+ # if the directory could not be found.
|
|
| 1634 |
+ pass
|
|
| 1635 |
+ |
|
| 1636 |
+ # Copy build log
|
|
| 1637 |
+ log_filename = self._get_context().get_log_filename()
|
|
| 1638 |
+ self._build_log_path = os.path.join(logsdir, 'build.log')
|
|
| 1639 |
+ if log_filename:
|
|
| 1640 |
+ shutil.copyfile(log_filename, self._build_log_path)
|
|
| 1641 |
+ |
|
| 1642 |
+ # Store public data
|
|
| 1643 |
+ _yaml.dump(_yaml.node_sanitize(self.__dynamic_public), os.path.join(metadir, 'public.yaml'))
|
|
| 1644 |
+ |
|
| 1645 |
+ # Store result
|
|
| 1646 |
+ build_result_dict = {"success": self.__build_result[0], "description": self.__build_result[1]}
|
|
| 1647 |
+ if self.__build_result[2] is not None:
|
|
| 1648 |
+ build_result_dict["detail"] = self.__build_result[2]
|
|
| 1649 |
+ _yaml.dump(build_result_dict, os.path.join(metadir, 'build-result.yaml'))
|
|
| 1650 |
+ |
|
| 1651 |
+ # ensure we have cache keys
|
|
| 1652 |
+ self._assemble_done()
|
|
| 1653 |
+ |
|
| 1654 |
+ # Store keys.yaml
|
|
| 1655 |
+ _yaml.dump(_yaml.node_sanitize({
|
|
| 1656 |
+ 'strong': self._get_cache_key(),
|
|
| 1657 |
+ 'weak': self._get_cache_key(_KeyStrength.WEAK),
|
|
| 1658 |
+ }), os.path.join(metadir, 'keys.yaml'))
|
|
| 1659 |
+ |
|
| 1660 |
+ # Store dependencies.yaml
|
|
| 1661 |
+ _yaml.dump(_yaml.node_sanitize({
|
|
| 1662 |
+ e.name: e._get_cache_key() for e in self.dependencies(Scope.BUILD)
|
|
| 1663 |
+ }), os.path.join(metadir, 'dependencies.yaml'))
|
|
| 1664 |
+ |
|
| 1665 |
+ # Store workspaced.yaml
|
|
| 1666 |
+ _yaml.dump(_yaml.node_sanitize({
|
|
| 1667 |
+ 'workspaced': True if self._get_workspace() else False
|
|
| 1668 |
+ }), os.path.join(metadir, 'workspaced.yaml'))
|
|
| 1669 |
+ |
|
| 1670 |
+ # Store workspaced-dependencies.yaml
|
|
| 1671 |
+ _yaml.dump(_yaml.node_sanitize({
|
|
| 1672 |
+ 'workspaced-dependencies': [
|
|
| 1673 |
+ e.name for e in self.dependencies(Scope.BUILD)
|
|
| 1674 |
+ if e._get_workspace()
|
|
| 1675 |
+ ]
|
|
| 1676 |
+ }), os.path.join(metadir, 'workspaced-dependencies.yaml'))
|
|
| 1677 |
+ |
|
| 1678 |
+ with self.timed_activity("Caching artifact"):
|
|
| 1679 |
+ artifact_size = utils._get_dir_size(assembledir)
|
|
| 1680 |
+ self.__artifacts.commit(self, assembledir, self.__get_cache_keys_for_commit())
|
|
| 1681 |
+ |
|
| 1682 |
+ if collect is not None and collectvdir is None:
|
|
| 1683 |
+ raise ElementError(
|
|
| 1684 |
+ "Directory '{}' was not found inside the sandbox, "
|
|
| 1685 |
+ "unable to collect artifact contents"
|
|
| 1686 |
+ .format(collect))
|
|
| 1687 |
+ |
|
| 1687 | 1688 |
return artifact_size
|
| 1688 | 1689 |
|
| 1689 | 1690 |
def _get_build_log(self):
|
| ... | ... | @@ -86,7 +86,6 @@ This plugin also utilises the following configurable core plugin warnings: |
| 86 | 86 |
"""
|
| 87 | 87 |
|
| 88 | 88 |
import os
|
| 89 |
-import errno
|
|
| 90 | 89 |
import re
|
| 91 | 90 |
import shutil
|
| 92 | 91 |
from collections.abc import Mapping
|
| ... | ... | @@ -97,6 +96,7 @@ from configparser import RawConfigParser |
| 97 | 96 |
from buildstream import Source, SourceError, Consistency, SourceFetcher
|
| 98 | 97 |
from buildstream import utils
|
| 99 | 98 |
from buildstream.plugin import CoreWarnings
|
| 99 |
+from buildstream.utils import move_atomic, DirectoryExistsError
|
|
| 100 | 100 |
|
| 101 | 101 |
GIT_MODULES = '.gitmodules'
|
| 102 | 102 |
|
| ... | ... | @@ -141,21 +141,16 @@ class GitMirror(SourceFetcher): |
| 141 | 141 |
fail="Failed to clone git repository {}".format(url),
|
| 142 | 142 |
fail_temporarily=True)
|
| 143 | 143 |
|
| 144 |
- # Attempt atomic rename into destination, this will fail if
|
|
| 145 |
- # another process beat us to the punch
|
|
| 146 | 144 |
try:
|
| 147 |
- os.rename(tmpdir, self.mirror)
|
|
| 145 |
+ move_atomic(tmpdir, self.mirror)
|
|
| 146 |
+ except DirectoryExistsError:
|
|
| 147 |
+ # Another process was quicker to download this repository.
|
|
| 148 |
+ # Let's discard our own
|
|
| 149 |
+ self.source.status("{}: Discarding duplicate clone of {}"
|
|
| 150 |
+ .format(self.source, url))
|
|
| 148 | 151 |
except OSError as e:
|
| 149 |
- |
|
| 150 |
- # When renaming and the destination repo already exists, os.rename()
|
|
| 151 |
- # will fail with ENOTEMPTY, since an empty directory will be silently
|
|
| 152 |
- # replaced
|
|
| 153 |
- if e.errno == errno.ENOTEMPTY:
|
|
| 154 |
- self.source.status("{}: Discarding duplicate clone of {}"
|
|
| 155 |
- .format(self.source, url))
|
|
| 156 |
- else:
|
|
| 157 |
- raise SourceError("{}: Failed to move cloned git repository {} from '{}' to '{}': {}"
|
|
| 158 |
- .format(self.source, url, tmpdir, self.mirror, e)) from e
|
|
| 152 |
+ raise SourceError("{}: Failed to move cloned git repository {} from '{}' to '{}': {}"
|
|
| 153 |
+ .format(self.source, url, tmpdir, self.mirror, e)) from e
|
|
| 159 | 154 |
|
| 160 | 155 |
def _fetch(self, alias_override=None):
|
| 161 | 156 |
url = self.source.translate_url(self.url,
|
| ... | ... | @@ -68,7 +68,6 @@ details on common configuration options for sources. |
| 68 | 68 |
The ``pip`` plugin is available since :ref:`format version 16 <project_format_version>`
|
| 69 | 69 |
"""
|
| 70 | 70 |
|
| 71 |
-import errno
|
|
| 72 | 71 |
import hashlib
|
| 73 | 72 |
import os
|
| 74 | 73 |
import re
|
| ... | ... | @@ -80,6 +79,7 @@ _PYPI_INDEX_URL = 'https://pypi.org/simple/' |
| 80 | 79 |
|
| 81 | 80 |
# Used only for finding pip command
|
| 82 | 81 |
_PYTHON_VERSIONS = [
|
| 82 |
+ 'python', # when running in a venv, we might not have the exact version
|
|
| 83 | 83 |
'python2.7',
|
| 84 | 84 |
'python3.0',
|
| 85 | 85 |
'python3.1',
|
| ... | ... | @@ -192,13 +192,14 @@ class PipSource(Source): |
| 192 | 192 |
# process has fetched the sources before us and ensure that we do
|
| 193 | 193 |
# not raise an error in that case.
|
| 194 | 194 |
try:
|
| 195 |
- os.makedirs(self._mirror)
|
|
| 196 |
- os.rename(package_dir, self._mirror)
|
|
| 197 |
- except FileExistsError:
|
|
| 198 |
- return
|
|
| 195 |
+ utils.move_atomic(package_dir, self._mirror)
|
|
| 196 |
+ except utils.DirectoryExistsError:
|
|
| 197 |
+ # Another process has beaten us and has fetched the sources
|
|
| 198 |
+ # before us.
|
|
| 199 |
+ pass
|
|
| 199 | 200 |
except OSError as e:
|
| 200 |
- if e.errno != errno.ENOTEMPTY:
|
|
| 201 |
- raise
|
|
| 201 |
+ raise SourceError("{}: Failed to move downloaded pip packages from '{}' to '{}': {}"
|
|
| 202 |
+ .format(self, package_dir, self._mirror, e)) from e
|
|
| 202 | 203 |
|
| 203 | 204 |
def stage(self, directory):
|
| 204 | 205 |
with self.timed_activity("Staging Python packages", silent_nested=True):
|
| ... | ... | @@ -72,6 +72,11 @@ class ProgramNotFoundError(BstError): |
| 72 | 72 |
super().__init__(message, domain=ErrorDomain.PROG_NOT_FOUND, reason=reason)
|
| 73 | 73 |
|
| 74 | 74 |
|
| 75 |
+class DirectoryExistsError(OSError):
|
|
| 76 |
+ """Raised when a `os.rename` is attempted but the destination is an existing directory.
|
|
| 77 |
+ """
|
|
| 78 |
+ |
|
| 79 |
+ |
|
| 75 | 80 |
class FileListResult():
|
| 76 | 81 |
"""An object which stores the result of one of the operations
|
| 77 | 82 |
which run on a list of files.
|
| ... | ... | @@ -500,6 +505,38 @@ def get_bst_version(): |
| 500 | 505 |
.format(__version__))
|
| 501 | 506 |
|
| 502 | 507 |
|
| 508 |
+def move_atomic(source, destination, ensure_parents=True):
|
|
| 509 |
+ """Move the source to the destination using atomic primitives.
|
|
| 510 |
+ |
|
| 511 |
+ This uses `os.rename` to move a file or directory to a new destination.
|
|
| 512 |
+ It wraps some `OSError` thrown errors to ensure their handling is correct.
|
|
| 513 |
+ |
|
| 514 |
+ The main reason for this to exist is that rename can throw different errors
|
|
| 515 |
+ for the same symptom (https://www.unix.com/man-page/POSIX/3posix/rename/).
|
|
| 516 |
+ |
|
| 517 |
+ We are especially interested here in the case when the destination already
|
|
| 518 |
+ exists. In this case, either EEXIST or ENOTEMPTY are thrown.
|
|
| 519 |
+ |
|
| 520 |
+ In order to ensure consistent handling of these exceptions, this function
|
|
| 521 |
+ should be used instead of `os.rename`
|
|
| 522 |
+ |
|
| 523 |
+ Args:
|
|
| 524 |
+ source (str or Path): source to rename
|
|
| 525 |
+ destination (str or Path): destination to which to move the source
|
|
| 526 |
+ ensure_parents (bool): Whether or not to create the parent's directories
|
|
| 527 |
+ of the destination (default: True)
|
|
| 528 |
+ """
|
|
| 529 |
+ if ensure_parents:
|
|
| 530 |
+ os.makedirs(os.path.dirname(str(destination)), exist_ok=True)
|
|
| 531 |
+ |
|
| 532 |
+ try:
|
|
| 533 |
+ os.rename(str(source), str(destination))
|
|
| 534 |
+ except OSError as exc:
|
|
| 535 |
+ if exc.errno in (errno.EEXIST, errno.ENOTEMPTY):
|
|
| 536 |
+ raise DirectoryExistsError(*exc.args) from exc
|
|
| 537 |
+ raise
|
|
| 538 |
+ |
|
| 539 |
+ |
|
| 503 | 540 |
@contextmanager
|
| 504 | 541 |
def save_file_atomic(filename, mode='w', *, buffering=-1, encoding=None,
|
| 505 | 542 |
errors=None, newline=None, closefd=True, opener=None, tempdir=None):
|
| ... | ... | @@ -56,6 +56,10 @@ def integration_cache(request): |
| 56 | 56 |
pass
|
| 57 | 57 |
|
| 58 | 58 |
|
| 59 |
-@pytest.fixture(autouse=True)
|
|
| 60 | 59 |
def clean_platform_cache():
|
| 61 | 60 |
Platform._instance = None
|
| 61 |
+ |
|
| 62 |
+ |
|
| 63 |
+@pytest.fixture(autouse=True)
|
|
| 64 |
+def ensure_platform_cache_is_clean():
|
|
| 65 |
+ clean_platform_cache()
|
| ... | ... | @@ -15,6 +15,7 @@ MAIN_COMMANDS = [ |
| 15 | 15 |
'push ',
|
| 16 | 16 |
'shell ',
|
| 17 | 17 |
'show ',
|
| 18 |
+ 'source-checkout ',
|
|
| 18 | 19 |
'source-bundle ',
|
| 19 | 20 |
'track ',
|
| 20 | 21 |
'workspace '
|
| ... | ... | @@ -115,6 +115,7 @@ def test_build_track(cli, datafiles, tmpdir, ref_storage, |
| 115 | 115 |
args += ['0.bst']
|
| 116 | 116 |
|
| 117 | 117 |
result = cli.run(project=project, silent=True, args=args)
|
| 118 |
+ result.assert_success()
|
|
| 118 | 119 |
tracked_elements = result.get_tracked_elements()
|
| 119 | 120 |
|
| 120 | 121 |
assert set(tracked_elements) == set(tracked)
|
| 1 |
+kind: import
|
|
| 2 |
+description: It is important for this element to have both build and runtime dependencies
|
|
| 3 |
+sources:
|
|
| 4 |
+- kind: local
|
|
| 5 |
+ path: files/etc-files
|
|
| 6 |
+depends:
|
|
| 7 |
+- filename: import-dev.bst
|
|
| 8 |
+ type: build
|
|
| 9 |
+- filename: import-bin.bst
|
|
| 10 |
+ type: runtime
|
| 1 |
+config
|
| 1 |
+import os
|
|
| 2 |
+import pytest
|
|
| 3 |
+ |
|
| 4 |
+from tests.testutils import cli
|
|
| 5 |
+ |
|
| 6 |
+from buildstream import utils, _yaml
|
|
| 7 |
+from buildstream._exceptions import ErrorDomain, LoadErrorReason
|
|
| 8 |
+ |
|
| 9 |
+# Project directory
|
|
| 10 |
+DATA_DIR = os.path.join(
|
|
| 11 |
+ os.path.dirname(os.path.realpath(__file__)),
|
|
| 12 |
+ 'project',
|
|
| 13 |
+)
|
|
| 14 |
+ |
|
| 15 |
+ |
|
| 16 |
+def generate_remote_import_element(input_path, output_path):
|
|
| 17 |
+ return {
|
|
| 18 |
+ 'kind': 'import',
|
|
| 19 |
+ 'sources': [
|
|
| 20 |
+ {
|
|
| 21 |
+ 'kind': 'remote',
|
|
| 22 |
+ 'url': 'file://{}'.format(input_path),
|
|
| 23 |
+ 'filename': output_path,
|
|
| 24 |
+ 'ref': utils.sha256sum(input_path),
|
|
| 25 |
+ }
|
|
| 26 |
+ ]
|
|
| 27 |
+ }
|
|
| 28 |
+ |
|
| 29 |
+ |
|
| 30 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 31 |
+def test_source_checkout(datafiles, cli):
|
|
| 32 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 33 |
+ checkout = os.path.join(cli.directory, 'source-checkout')
|
|
| 34 |
+ target = 'checkout-deps.bst'
|
|
| 35 |
+ |
|
| 36 |
+ result = cli.run(project=project, args=['source-checkout', target, '--deps', 'none', checkout])
|
|
| 37 |
+ result.assert_success()
|
|
| 38 |
+ |
|
| 39 |
+ assert os.path.exists(os.path.join(checkout, 'checkout-deps', 'etc', 'buildstream', 'config'))
|
|
| 40 |
+ |
|
| 41 |
+ |
|
| 42 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 43 |
+@pytest.mark.parametrize('deps', [('build'), ('none'), ('run'), ('all')])
|
|
| 44 |
+def test_source_checkout_deps(datafiles, cli, deps):
|
|
| 45 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 46 |
+ checkout = os.path.join(cli.directory, 'source-checkout')
|
|
| 47 |
+ target = 'checkout-deps.bst'
|
|
| 48 |
+ |
|
| 49 |
+ result = cli.run(project=project, args=['source-checkout', target, '--deps', deps, checkout])
|
|
| 50 |
+ result.assert_success()
|
|
| 51 |
+ |
|
| 52 |
+ # Sources of the target
|
|
| 53 |
+ if deps == 'build':
|
|
| 54 |
+ assert not os.path.exists(os.path.join(checkout, 'checkout-deps'))
|
|
| 55 |
+ else:
|
|
| 56 |
+ assert os.path.exists(os.path.join(checkout, 'checkout-deps', 'etc', 'buildstream', 'config'))
|
|
| 57 |
+ |
|
| 58 |
+ # Sources of the target's build dependencies
|
|
| 59 |
+ if deps in ('build', 'all'):
|
|
| 60 |
+ assert os.path.exists(os.path.join(checkout, 'import-dev', 'usr', 'include', 'pony.h'))
|
|
| 61 |
+ else:
|
|
| 62 |
+ assert not os.path.exists(os.path.join(checkout, 'import-dev'))
|
|
| 63 |
+ |
|
| 64 |
+ # Sources of the target's runtime dependencies
|
|
| 65 |
+ if deps in ('run', 'all'):
|
|
| 66 |
+ assert os.path.exists(os.path.join(checkout, 'import-bin', 'usr', 'bin', 'hello'))
|
|
| 67 |
+ else:
|
|
| 68 |
+ assert not os.path.exists(os.path.join(checkout, 'import-bin'))
|
|
| 69 |
+ |
|
| 70 |
+ |
|
| 71 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 72 |
+def test_source_checkout_except(datafiles, cli):
|
|
| 73 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 74 |
+ checkout = os.path.join(cli.directory, 'source-checkout')
|
|
| 75 |
+ target = 'checkout-deps.bst'
|
|
| 76 |
+ |
|
| 77 |
+ result = cli.run(project=project, args=['source-checkout', target,
|
|
| 78 |
+ '--deps', 'all',
|
|
| 79 |
+ '--except', 'import-bin.bst',
|
|
| 80 |
+ checkout])
|
|
| 81 |
+ result.assert_success()
|
|
| 82 |
+ |
|
| 83 |
+ # Sources for the target should be present
|
|
| 84 |
+ assert os.path.exists(os.path.join(checkout, 'checkout-deps', 'etc', 'buildstream', 'config'))
|
|
| 85 |
+ |
|
| 86 |
+ # Sources for import-bin.bst should not be present
|
|
| 87 |
+ assert not os.path.exists(os.path.join(checkout, 'import-bin'))
|
|
| 88 |
+ |
|
| 89 |
+ # Sources for other dependencies should be present
|
|
| 90 |
+ assert os.path.exists(os.path.join(checkout, 'import-dev', 'usr', 'include', 'pony.h'))
|
|
| 91 |
+ |
|
| 92 |
+ |
|
| 93 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 94 |
+@pytest.mark.parametrize('fetch', [(False), (True)])
|
|
| 95 |
+def test_source_checkout_fetch(datafiles, cli, fetch):
|
|
| 96 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 97 |
+ checkout = os.path.join(cli.directory, 'source-checkout')
|
|
| 98 |
+ target = 'remote-import-dev.bst'
|
|
| 99 |
+ target_path = os.path.join(project, 'elements', target)
|
|
| 100 |
+ |
|
| 101 |
+ # Create an element with remote source
|
|
| 102 |
+ element = generate_remote_import_element(
|
|
| 103 |
+ os.path.join(project, 'files', 'dev-files', 'usr', 'include', 'pony.h'),
|
|
| 104 |
+ 'pony.h')
|
|
| 105 |
+ _yaml.dump(element, target_path)
|
|
| 106 |
+ |
|
| 107 |
+ # Testing --fetch option requires that we do not have the sources
|
|
| 108 |
+ # cached already
|
|
| 109 |
+ assert cli.get_element_state(project, target) == 'fetch needed'
|
|
| 110 |
+ |
|
| 111 |
+ args = ['source-checkout']
|
|
| 112 |
+ if fetch:
|
|
| 113 |
+ args += ['--fetch']
|
|
| 114 |
+ args += [target, checkout]
|
|
| 115 |
+ result = cli.run(project=project, args=args)
|
|
| 116 |
+ |
|
| 117 |
+ if fetch:
|
|
| 118 |
+ result.assert_success()
|
|
| 119 |
+ assert os.path.exists(os.path.join(checkout, 'remote-import-dev', 'pony.h'))
|
|
| 120 |
+ else:
|
|
| 121 |
+ result.assert_main_error(ErrorDomain.PIPELINE, 'uncached-sources')
|
| ... | ... | @@ -4,6 +4,8 @@ import pytest |
| 4 | 4 |
from buildstream import _yaml
|
| 5 | 5 |
from buildstream._exceptions import ErrorDomain
|
| 6 | 6 |
|
| 7 |
+from conftest import clean_platform_cache
|
|
| 8 |
+ |
|
| 7 | 9 |
from tests.testutils import cli_integration as cli, create_artifact_share
|
| 8 | 10 |
from tests.testutils.site import IS_LINUX
|
| 9 | 11 |
|
| ... | ... | @@ -158,3 +160,40 @@ def test_push_cached_fail(cli, tmpdir, datafiles, on_error): |
| 158 | 160 |
assert cli.get_element_state(project, 'element.bst') == 'failed'
|
| 159 | 161 |
# This element should have been pushed to the remote
|
| 160 | 162 |
assert share.has_artifact('test', 'element.bst', cli.get_element_key(project, 'element.bst'))
|
| 163 |
+ |
|
| 164 |
+ |
|
| 165 |
+@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
|
|
| 166 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 167 |
+def test_host_tools_errors_are_not_cached(cli, tmpdir, datafiles):
|
|
| 168 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 169 |
+ element_path = os.path.join(project, 'elements', 'element.bst')
|
|
| 170 |
+ |
|
| 171 |
+ # Write out our test target
|
|
| 172 |
+ element = {
|
|
| 173 |
+ 'kind': 'script',
|
|
| 174 |
+ 'depends': [
|
|
| 175 |
+ {
|
|
| 176 |
+ 'filename': 'base.bst',
|
|
| 177 |
+ 'type': 'build',
|
|
| 178 |
+ },
|
|
| 179 |
+ ],
|
|
| 180 |
+ 'config': {
|
|
| 181 |
+ 'commands': [
|
|
| 182 |
+ 'true',
|
|
| 183 |
+ ],
|
|
| 184 |
+ },
|
|
| 185 |
+ }
|
|
| 186 |
+ _yaml.dump(element, element_path)
|
|
| 187 |
+ |
|
| 188 |
+ # Build without access to host tools, this will fail
|
|
| 189 |
+ result1 = cli.run(project=project, args=['build', 'element.bst'], env={'PATH': ''})
|
|
| 190 |
+ result1.assert_task_error(ErrorDomain.SANDBOX, 'unavailable-local-sandbox')
|
|
| 191 |
+ assert cli.get_element_state(project, 'element.bst') == 'buildable'
|
|
| 192 |
+ |
|
| 193 |
+ # clean the cache before running again
|
|
| 194 |
+ clean_platform_cache()
|
|
| 195 |
+ |
|
| 196 |
+ # When rebuilding, this should work
|
|
| 197 |
+ result2 = cli.run(project=project, args=['build', 'element.bst'])
|
|
| 198 |
+ result2.assert_success()
|
|
| 199 |
+ assert cli.get_element_state(project, 'element.bst') == 'cached'
|
| 1 |
+import pytest
|
|
| 2 |
+ |
|
| 3 |
+from buildstream.utils import move_atomic, DirectoryExistsError
|
|
| 4 |
+ |
|
| 5 |
+ |
|
| 6 |
+@pytest.fixture
|
|
| 7 |
+def src(tmp_path):
|
|
| 8 |
+ src = tmp_path.joinpath("src")
|
|
| 9 |
+ src.mkdir()
|
|
| 10 |
+ |
|
| 11 |
+ with src.joinpath("test").open("w") as fp:
|
|
| 12 |
+ fp.write("test")
|
|
| 13 |
+ |
|
| 14 |
+ return src
|
|
| 15 |
+ |
|
| 16 |
+ |
|
| 17 |
+def test_move_to_empty_dir(src, tmp_path):
|
|
| 18 |
+ dst = tmp_path.joinpath("dst")
|
|
| 19 |
+ |
|
| 20 |
+ move_atomic(src, dst)
|
|
| 21 |
+ |
|
| 22 |
+ assert dst.joinpath("test").exists()
|
|
| 23 |
+ |
|
| 24 |
+ |
|
| 25 |
+def test_move_to_empty_dir_create_parents(src, tmp_path):
|
|
| 26 |
+ dst = tmp_path.joinpath("nested/dst")
|
|
| 27 |
+ |
|
| 28 |
+ move_atomic(src, dst)
|
|
| 29 |
+ assert dst.joinpath("test").exists()
|
|
| 30 |
+ |
|
| 31 |
+ |
|
| 32 |
+def test_move_to_empty_dir_no_create_parents(src, tmp_path):
|
|
| 33 |
+ dst = tmp_path.joinpath("nested/dst")
|
|
| 34 |
+ |
|
| 35 |
+ with pytest.raises(FileNotFoundError):
|
|
| 36 |
+ move_atomic(src, dst, ensure_parents=False)
|
|
| 37 |
+ |
|
| 38 |
+ |
|
| 39 |
+def test_move_non_existing_dir(tmp_path):
|
|
| 40 |
+ dst = tmp_path.joinpath("dst")
|
|
| 41 |
+ src = tmp_path.joinpath("src")
|
|
| 42 |
+ |
|
| 43 |
+ with pytest.raises(FileNotFoundError):
|
|
| 44 |
+ move_atomic(src, dst)
|
|
| 45 |
+ |
|
| 46 |
+ |
|
| 47 |
+def test_move_to_existing_empty_dir(src, tmp_path):
|
|
| 48 |
+ dst = tmp_path.joinpath("dst")
|
|
| 49 |
+ dst.mkdir()
|
|
| 50 |
+ |
|
| 51 |
+ move_atomic(src, dst)
|
|
| 52 |
+ assert dst.joinpath("test").exists()
|
|
| 53 |
+ |
|
| 54 |
+ |
|
| 55 |
+def test_move_to_existing_file(src, tmp_path):
|
|
| 56 |
+ dst = tmp_path.joinpath("dst")
|
|
| 57 |
+ |
|
| 58 |
+ with dst.open("w") as fp:
|
|
| 59 |
+ fp.write("error")
|
|
| 60 |
+ |
|
| 61 |
+ with pytest.raises(NotADirectoryError):
|
|
| 62 |
+ move_atomic(src, dst)
|
|
| 63 |
+ |
|
| 64 |
+ |
|
| 65 |
+def test_move_file_to_existing_file(tmp_path):
|
|
| 66 |
+ dst = tmp_path.joinpath("dst")
|
|
| 67 |
+ src = tmp_path.joinpath("src")
|
|
| 68 |
+ |
|
| 69 |
+ with src.open("w") as fp:
|
|
| 70 |
+ fp.write("src")
|
|
| 71 |
+ |
|
| 72 |
+ with dst.open("w") as fp:
|
|
| 73 |
+ fp.write("dst")
|
|
| 74 |
+ |
|
| 75 |
+ move_atomic(src, dst)
|
|
| 76 |
+ with dst.open() as fp:
|
|
| 77 |
+ assert fp.read() == "src"
|
|
| 78 |
+ |
|
| 79 |
+ |
|
| 80 |
+def test_move_to_existing_non_empty_dir(src, tmp_path):
|
|
| 81 |
+ dst = tmp_path.joinpath("dst")
|
|
| 82 |
+ dst.mkdir()
|
|
| 83 |
+ |
|
| 84 |
+ with dst.joinpath("existing").open("w") as fp:
|
|
| 85 |
+ fp.write("already there")
|
|
| 86 |
+ |
|
| 87 |
+ with pytest.raises(DirectoryExistsError):
|
|
| 88 |
+ move_atomic(src, dst)
|
