Benjamin Schubert pushed to branch bschubert/pipeline-bak at BuildStream / buildstream
Commits:
-
fc3dcec8
by Angelos Evripiotis at 2019-01-11T16:45:00Z
-
f86b7ff3
by Angelos Evripiotis at 2019-01-11T16:45:00Z
-
d983f231
by Angelos Evripiotis at 2019-01-11T17:19:06Z
-
829a2b93
by Tristan Van Berkom at 2019-01-12T21:32:13Z
-
993e30ae
by Tristan Van Berkom at 2019-01-12T22:02:55Z
-
32732e01
by Javier Jardón at 2019-01-14T09:04:01Z
-
4f5f1184
by Valentin David at 2019-01-14T09:40:56Z
-
bb80a2b8
by Chandan Singh at 2019-01-14T14:30:15Z
-
10b3ee62
by Chandan Singh at 2019-01-14T14:57:08Z
-
542cdaf0
by Jürg Billeter at 2019-01-14T18:46:57Z
-
ff666e76
by James Ennis at 2019-01-14T18:46:57Z
-
273b0f55
by Tristan Van Berkom at 2019-01-14T20:24:57Z
-
2e3c2ea2
by Javier Jardón at 2019-01-14T22:26:04Z
-
d60d2e31
by Javier Jardón at 2019-01-15T00:39:22Z
-
605836c1
by Chandan Singh at 2019-01-15T00:57:05Z
-
006370af
by Chandan Singh at 2019-01-15T01:21:33Z
-
2d82468a
by Benjamin Schubert at 2019-01-15T11:37:14Z
-
df8c4324
by Benjamin Schubert at 2019-01-15T11:37:15Z
12 changed files:
- .gitlab-ci.yml
- CONTRIBUTING.rst
- buildstream/_frontend/cli.py
- buildstream/_pipeline.py
- buildstream/_scheduler/queues/buildqueue.py
- buildstream/_scheduler/queues/fetchqueue.py
- buildstream/_scheduler/queues/pullqueue.py
- buildstream/_scheduler/queues/queue.py
- buildstream/_stream.py
- buildstream/element.py
- requirements/dev-requirements.txt
- tests/completions/completions.py
Changes:
| ... | ... | @@ -61,7 +61,7 @@ tests-ubuntu-18.04: |
| 61 | 61 |
<<: *tests
|
| 62 | 62 |
|
| 63 | 63 |
overnight-fedora-28-aarch64:
|
| 64 |
- image: buildstream/testsuite-fedora:aarch64-28-06bab030-32a101f6
|
|
| 64 |
+ image: buildstream/testsuite-fedora:aarch64-28-5da27168-32c47d1c
|
|
| 65 | 65 |
tags:
|
| 66 | 66 |
- aarch64
|
| 67 | 67 |
<<: *tests
|
| ... | ... | @@ -70,6 +70,12 @@ overnight-fedora-28-aarch64: |
| 70 | 70 |
except: []
|
| 71 | 71 |
only:
|
| 72 | 72 |
- schedules
|
| 73 |
+ before_script:
|
|
| 74 |
+ # grpcio needs to be compiled from source on aarch64 so we additionally
|
|
| 75 |
+ # need a C++ compiler here.
|
|
| 76 |
+ # FIXME: Ideally this would be provided by the base image. This will be
|
|
| 77 |
+ # unblocked by https://gitlab.com/BuildStream/buildstream-docker-images/issues/34
|
|
| 78 |
+ - dnf install -y gcc-c++
|
|
| 73 | 79 |
|
| 74 | 80 |
tests-unix:
|
| 75 | 81 |
# Use fedora here, to a) run a test on fedora and b) ensure that we
|
| ... | ... | @@ -90,7 +96,6 @@ tests-unix: |
| 90 | 96 |
# Since the unix platform is required to run as root, no user change required
|
| 91 | 97 |
- ${TEST_COMMAND}
|
| 92 | 98 |
|
| 93 |
- |
|
| 94 | 99 |
tests-fedora-missing-deps:
|
| 95 | 100 |
# Ensure that tests behave nicely while missing bwrap and ostree
|
| 96 | 101 |
image: buildstream/testsuite-fedora:28-5da27168-32c47d1c
|
| ... | ... | @@ -108,6 +113,22 @@ tests-fedora-missing-deps: |
| 108 | 113 |
|
| 109 | 114 |
- ${TEST_COMMAND}
|
| 110 | 115 |
|
| 116 |
+tests-fedora-update-deps:
|
|
| 117 |
+ # Check if the tests pass after updating requirements to their latest
|
|
| 118 |
+ # allowed version.
|
|
| 119 |
+ allow_failure: true
|
|
| 120 |
+ image: buildstream/testsuite-fedora:28-5da27168-32c47d1c
|
|
| 121 |
+ <<: *tests
|
|
| 122 |
+ |
|
| 123 |
+ script:
|
|
| 124 |
+ - useradd -Um buildstream
|
|
| 125 |
+ - chown -R buildstream:buildstream .
|
|
| 126 |
+ |
|
| 127 |
+ - make --always-make --directory requirements
|
|
| 128 |
+ - cat requirements/*.txt
|
|
| 129 |
+ |
|
| 130 |
+ - su buildstream -c "${TEST_COMMAND}"
|
|
| 131 |
+ |
|
| 111 | 132 |
# Lint separately from testing
|
| 112 | 133 |
lint:
|
| 113 | 134 |
stage: test
|
| ... | ... | @@ -140,8 +161,8 @@ docs: |
| 140 | 161 |
stage: test
|
| 141 | 162 |
variables:
|
| 142 | 163 |
BST_EXT_URL: git+https://gitlab.com/BuildStream/bst-external.git
|
| 143 |
- BST_EXT_REF: 573843768f4d297f85dc3067465b3c7519a8dcc3 # 0.7.0
|
|
| 144 |
- FD_SDK_REF: 612f66e218445eee2b1a9d7dd27c9caba571612e # freedesktop-sdk-18.08.19-54-g612f66e2
|
|
| 164 |
+ BST_EXT_REF: 0.9.0-0-g63a19e8068bd777bd9cd59b1a9442f9749ea5a85
|
|
| 165 |
+ FD_SDK_REF: freedesktop-sdk-18.08.25-0-g250939d465d6dd7768a215f1fa59c4a3412fc337
|
|
| 145 | 166 |
before_script:
|
| 146 | 167 |
- |
|
| 147 | 168 |
mkdir -p "${HOME}/.config"
|
| ... | ... | @@ -1534,6 +1534,10 @@ You can always abort on the first failure by running:: |
| 1534 | 1534 |
|
| 1535 | 1535 |
tox -- -x
|
| 1536 | 1536 |
|
| 1537 |
+Similarly, you may also be interested in the ``--last-failed`` and
|
|
| 1538 |
+``--failed-first`` options as per the
|
|
| 1539 |
+`pytest cache <https://docs.pytest.org/en/latest/cache.html>`_ documentation.
|
|
| 1540 |
+ |
|
| 1537 | 1541 |
If you want to run a specific test or a group of tests, you
|
| 1538 | 1542 |
can specify a prefix to match. E.g. if you want to run all of
|
| 1539 | 1543 |
the frontend tests you can do::
|
| ... | ... | @@ -1545,6 +1549,12 @@ If you wanted to run the test_build_track test within frontend/buildtrack.py you |
| 1545 | 1549 |
|
| 1546 | 1550 |
tox -- tests/frontend/buildtrack.py::test_build_track
|
| 1547 | 1551 |
|
| 1552 |
+When running only a few tests, you may find the coverage and timing output
|
|
| 1553 |
+excessive, there are options to trim them. Note that coverage step will fail.
|
|
| 1554 |
+Here is an example::
|
|
| 1555 |
+ |
|
| 1556 |
+ tox -- --no-cov --durations=1 tests/frontend/buildtrack.py::test_build_track
|
|
| 1557 |
+ |
|
| 1548 | 1558 |
We also have a set of slow integration tests that are disabled by
|
| 1549 | 1559 |
default - you will notice most of them marked with SKIP in the pytest
|
| 1550 | 1560 |
output. To run them, you can use::
|
| ... | ... | @@ -2,6 +2,7 @@ import os |
| 2 | 2 |
import sys
|
| 3 | 3 |
from contextlib import ExitStack
|
| 4 | 4 |
from fnmatch import fnmatch
|
| 5 |
+from functools import partial
|
|
| 5 | 6 |
from tempfile import TemporaryDirectory
|
| 6 | 7 |
|
| 7 | 8 |
import click
|
| ... | ... | @@ -111,14 +112,25 @@ def complete_target(args, incomplete): |
| 111 | 112 |
return complete_list
|
| 112 | 113 |
|
| 113 | 114 |
|
| 114 |
-def complete_artifact(args, incomplete):
|
|
| 115 |
+def complete_artifact(orig_args, args, incomplete):
|
|
| 115 | 116 |
from .._context import Context
|
| 116 | 117 |
ctx = Context()
|
| 117 | 118 |
|
| 118 | 119 |
config = None
|
| 119 |
- for i, arg in enumerate(args):
|
|
| 120 |
- if arg in ('-c', '--config'):
|
|
| 121 |
- config = args[i + 1]
|
|
| 120 |
+ if orig_args:
|
|
| 121 |
+ for i, arg in enumerate(orig_args):
|
|
| 122 |
+ if arg in ('-c', '--config'):
|
|
| 123 |
+ try:
|
|
| 124 |
+ config = orig_args[i + 1]
|
|
| 125 |
+ except IndexError:
|
|
| 126 |
+ pass
|
|
| 127 |
+ if args:
|
|
| 128 |
+ for i, arg in enumerate(args):
|
|
| 129 |
+ if arg in ('-c', '--config'):
|
|
| 130 |
+ try:
|
|
| 131 |
+ config = args[i + 1]
|
|
| 132 |
+ except IndexError:
|
|
| 133 |
+ pass
|
|
| 122 | 134 |
ctx.load(config)
|
| 123 | 135 |
|
| 124 | 136 |
# element targets are valid artifact names
|
| ... | ... | @@ -128,8 +140,9 @@ def complete_artifact(args, incomplete): |
| 128 | 140 |
return complete_list
|
| 129 | 141 |
|
| 130 | 142 |
|
| 131 |
-def override_completions(cmd, cmd_param, args, incomplete):
|
|
| 143 |
+def override_completions(orig_args, cmd, cmd_param, args, incomplete):
|
|
| 132 | 144 |
"""
|
| 145 |
+ :param orig_args: original, non-completion args
|
|
| 133 | 146 |
:param cmd_param: command definition
|
| 134 | 147 |
:param args: full list of args typed before the incomplete arg
|
| 135 | 148 |
:param incomplete: the incomplete text to autocomplete
|
| ... | ... | @@ -150,7 +163,7 @@ def override_completions(cmd, cmd_param, args, incomplete): |
| 150 | 163 |
cmd_param.opts == ['--track-except']):
|
| 151 | 164 |
return complete_target(args, incomplete)
|
| 152 | 165 |
if cmd_param.name == 'artifacts':
|
| 153 |
- return complete_artifact(args, incomplete)
|
|
| 166 |
+ return complete_artifact(orig_args, args, incomplete)
|
|
| 154 | 167 |
|
| 155 | 168 |
raise CompleteUnhandled()
|
| 156 | 169 |
|
| ... | ... | @@ -161,7 +174,7 @@ def override_main(self, args=None, prog_name=None, complete_var=None, |
| 161 | 174 |
# Hook for the Bash completion. This only activates if the Bash
|
| 162 | 175 |
# completion is actually enabled, otherwise this is quite a fast
|
| 163 | 176 |
# noop.
|
| 164 |
- if main_bashcomplete(self, prog_name, override_completions):
|
|
| 177 |
+ if main_bashcomplete(self, prog_name, partial(override_completions, args)):
|
|
| 165 | 178 |
|
| 166 | 179 |
# If we're running tests we cant just go calling exit()
|
| 167 | 180 |
# from the main process.
|
| ... | ... | @@ -136,9 +136,6 @@ class Pipeline(): |
| 136 | 136 |
# Preflight
|
| 137 | 137 |
element._preflight()
|
| 138 | 138 |
|
| 139 |
- # Determine initial element state.
|
|
| 140 |
- element._update_state()
|
|
| 141 |
- |
|
| 142 | 139 |
# dependencies()
|
| 143 | 140 |
#
|
| 144 | 141 |
# Generator function to iterate over elements and optionally
|
| ... | ... | @@ -71,9 +71,6 @@ class BuildQueue(Queue): |
| 71 | 71 |
return element._assemble()
|
| 72 | 72 |
|
| 73 | 73 |
def status(self, element):
|
| 74 |
- # state of dependencies may have changed, recalculate element state
|
|
| 75 |
- element._update_state()
|
|
| 76 |
- |
|
| 77 | 74 |
if not element._is_required():
|
| 78 | 75 |
# Artifact is not currently required but it may be requested later.
|
| 79 | 76 |
# Keep it in the queue.
|
| ... | ... | @@ -44,9 +44,6 @@ class FetchQueue(Queue): |
| 44 | 44 |
element._fetch()
|
| 45 | 45 |
|
| 46 | 46 |
def status(self, element):
|
| 47 |
- # state of dependencies may have changed, recalculate element state
|
|
| 48 |
- element._update_state()
|
|
| 49 |
- |
|
| 50 | 47 |
if not element._is_required():
|
| 51 | 48 |
# Artifact is not currently required but it may be requested later.
|
| 52 | 49 |
# Keep it in the queue.
|
| ... | ... | @@ -72,7 +69,7 @@ class FetchQueue(Queue): |
| 72 | 69 |
if status == JobStatus.FAIL:
|
| 73 | 70 |
return
|
| 74 | 71 |
|
| 75 |
- element._update_state()
|
|
| 76 |
- |
|
| 72 |
+ element._fetch_done()
|
|
| 73 |
+
|
|
| 77 | 74 |
# Successful fetch, we must be CACHED now
|
| 78 | 75 |
assert element._get_consistency() == Consistency.CACHED
|
| ... | ... | @@ -39,9 +39,6 @@ class PullQueue(Queue): |
| 39 | 39 |
raise SkipJob(self.action_name)
|
| 40 | 40 |
|
| 41 | 41 |
def status(self, element):
|
| 42 |
- # state of dependencies may have changed, recalculate element state
|
|
| 43 |
- element._update_state()
|
|
| 44 |
- |
|
| 45 | 42 |
if not element._is_required():
|
| 46 | 43 |
# Artifact is not currently required but it may be requested later.
|
| 47 | 44 |
# Keep it in the queue.
|
| ... | ... | @@ -170,9 +170,9 @@ class Queue(): |
| 170 | 170 |
skip = [job for job in jobs if self.status(job.element) == QueueStatus.SKIP]
|
| 171 | 171 |
wait = [job for job in jobs if job not in skip]
|
| 172 | 172 |
|
| 173 |
+ self.skipped_elements.extend([job.element for job in skip])
|
|
| 173 | 174 |
self._wait_queue.extend(wait)
|
| 174 | 175 |
self._done_queue.extend(skip)
|
| 175 |
- self.skipped_elements.extend(skip)
|
|
| 176 | 176 |
|
| 177 | 177 |
# dequeue()
|
| 178 | 178 |
#
|
| ... | ... | @@ -1018,17 +1018,6 @@ class Stream(): |
| 1018 | 1018 |
|
| 1019 | 1019 |
_, status = self._scheduler.run(self.queues)
|
| 1020 | 1020 |
|
| 1021 |
- # Force update element states after a run, such that the summary
|
|
| 1022 |
- # is more coherent
|
|
| 1023 |
- try:
|
|
| 1024 |
- for element in self.total_elements:
|
|
| 1025 |
- element._update_state()
|
|
| 1026 |
- except BstError as e:
|
|
| 1027 |
- self._message(MessageType.ERROR, "Error resolving final state", detail=str(e))
|
|
| 1028 |
- set_last_task_error(e.domain, e.reason)
|
|
| 1029 |
- except Exception as e: # pylint: disable=broad-except
|
|
| 1030 |
- self._message(MessageType.BUG, "Unhandled exception while resolving final state", detail=str(e))
|
|
| 1031 |
- |
|
| 1032 | 1021 |
if status == SchedStatus.ERROR:
|
| 1033 | 1022 |
raise StreamError()
|
| 1034 | 1023 |
elif status == SchedStatus.TERMINATED:
|
| ... | ... | @@ -197,6 +197,7 @@ class Element(Plugin): |
| 197 | 197 |
|
| 198 | 198 |
self.__runtime_dependencies = [] # Direct runtime dependency Elements
|
| 199 | 199 |
self.__build_dependencies = [] # Direct build dependency Elements
|
| 200 |
+ self.__reverse_build_dependencies = [] # Direct reverse build dependency Elements
|
|
| 200 | 201 |
self.__sources = [] # List of Sources
|
| 201 | 202 |
self.__weak_cache_key = None # Our cached weak cache key
|
| 202 | 203 |
self.__strict_cache_key = None # Our cached cache key for strict builds
|
| ... | ... | @@ -227,6 +228,8 @@ class Element(Plugin): |
| 227 | 228 |
self.__metadata_workspaced = {} # Boolean of whether it's workspaced
|
| 228 | 229 |
self.__metadata_workspaced_dependencies = {} # List of which dependencies are workspaced
|
| 229 | 230 |
|
| 231 |
+ self.__is_workspaced = None
|
|
| 232 |
+ |
|
| 230 | 233 |
# Ensure we have loaded this class's defaults
|
| 231 | 234 |
self.__init_defaults(plugin_conf)
|
| 232 | 235 |
|
| ... | ... | @@ -370,6 +373,13 @@ class Element(Plugin): |
| 370 | 373 |
#############################################################
|
| 371 | 374 |
# Public Methods #
|
| 372 | 375 |
#############################################################
|
| 376 |
+ @property
|
|
| 377 |
+ def is_workspaced(self):
|
|
| 378 |
+ if self.__is_workspaced is None:
|
|
| 379 |
+ self.__is_workspaced = self._get_workspace() is not None
|
|
| 380 |
+ |
|
| 381 |
+ return self.__is_workspaced
|
|
| 382 |
+ |
|
| 373 | 383 |
def sources(self):
|
| 374 | 384 |
"""A generator function to enumerate the element sources
|
| 375 | 385 |
|
| ... | ... | @@ -439,6 +449,27 @@ class Element(Plugin): |
| 439 | 449 |
if should_yield and (recurse or recursed) and scope != Scope.BUILD:
|
| 440 | 450 |
yield self
|
| 441 | 451 |
|
| 452 |
+ def reverse_build_dependencies(self, recurse=False):
|
|
| 453 |
+ if not recurse:
|
|
| 454 |
+ yield from self.__reverse_build_dependencies
|
|
| 455 |
+ return
|
|
| 456 |
+ |
|
| 457 |
+ # visited = set()
|
|
| 458 |
+ |
|
| 459 |
+ def recurse_rdeps(element):
|
|
| 460 |
+ # if element in visited:
|
|
| 461 |
+ # return
|
|
| 462 |
+ |
|
| 463 |
+ # visited.add(element)
|
|
| 464 |
+ |
|
| 465 |
+ yield element
|
|
| 466 |
+ |
|
| 467 |
+ for rdep in element.__reverse_build_dependencies:
|
|
| 468 |
+ yield from recurse_rdeps(rdep)
|
|
| 469 |
+ |
|
| 470 |
+ for rdep in self.__reverse_build_dependencies:
|
|
| 471 |
+ yield from recurse_rdeps(rdep)
|
|
| 472 |
+ |
|
| 442 | 473 |
def search(self, scope, name):
|
| 443 | 474 |
"""Search for a dependency by name
|
| 444 | 475 |
|
| ... | ... | @@ -930,6 +961,7 @@ class Element(Plugin): |
| 930 | 961 |
for meta_dep in meta.build_dependencies:
|
| 931 | 962 |
dependency = Element._new_from_meta(meta_dep)
|
| 932 | 963 |
element.__build_dependencies.append(dependency)
|
| 964 |
+ dependency.__reverse_build_dependencies.append(element)
|
|
| 933 | 965 |
|
| 934 | 966 |
return element
|
| 935 | 967 |
|
| ... | ... | @@ -1279,6 +1311,9 @@ class Element(Plugin): |
| 1279 | 1311 |
for source in self.sources():
|
| 1280 | 1312 |
source._preflight()
|
| 1281 | 1313 |
|
| 1314 |
+ # Determine initial element state.
|
|
| 1315 |
+ self._update_state()
|
|
| 1316 |
+ |
|
| 1282 | 1317 |
# _schedule_tracking():
|
| 1283 | 1318 |
#
|
| 1284 | 1319 |
# Force an element state to be inconsistent. Any sources appear to be
|
| ... | ... | @@ -1306,6 +1341,9 @@ class Element(Plugin): |
| 1306 | 1341 |
|
| 1307 | 1342 |
self._update_state()
|
| 1308 | 1343 |
|
| 1344 |
+ for rdep in self.reverse_build_dependencies(recurse=True):
|
|
| 1345 |
+ rdep._update_state()
|
|
| 1346 |
+ |
|
| 1309 | 1347 |
# _track():
|
| 1310 | 1348 |
#
|
| 1311 | 1349 |
# Calls track() on the Element sources
|
| ... | ... | @@ -1446,6 +1484,7 @@ class Element(Plugin): |
| 1446 | 1484 |
# This unblocks pull/fetch/build.
|
| 1447 | 1485 |
#
|
| 1448 | 1486 |
def _set_required(self):
|
| 1487 |
+ # FIXME: this should enqueue stuff in the queue, it should not be here by default
|
|
| 1449 | 1488 |
if self.__required:
|
| 1450 | 1489 |
# Already done
|
| 1451 | 1490 |
return
|
| ... | ... | @@ -1456,6 +1495,7 @@ class Element(Plugin): |
| 1456 | 1495 |
for dep in self.dependencies(Scope.RUN, recurse=False):
|
| 1457 | 1496 |
dep._set_required()
|
| 1458 | 1497 |
|
| 1498 |
+ # FIXME: this should not be done at all here
|
|
| 1459 | 1499 |
self._update_state()
|
| 1460 | 1500 |
|
| 1461 | 1501 |
# _is_required():
|
| ... | ... | @@ -1499,10 +1539,16 @@ class Element(Plugin): |
| 1499 | 1539 |
|
| 1500 | 1540 |
self.__assemble_scheduled = False
|
| 1501 | 1541 |
self.__assemble_done = True
|
| 1502 |
- |
|
| 1542 |
+ # FIXME: only if workspaced
|
|
| 1503 | 1543 |
self._update_state()
|
| 1504 | 1544 |
|
| 1505 |
- if self._get_workspace() and self._cached_success():
|
|
| 1545 |
+ if self.is_workspaced:
|
|
| 1546 |
+ |
|
| 1547 |
+ # Update the state of all reverse dependencies
|
|
| 1548 |
+ for reverse_dependency in self.reverse_build_dependencies(recurse=True):
|
|
| 1549 |
+ reverse_dependency._update_state()
|
|
| 1550 |
+ |
|
| 1551 |
+ if self.is_workspaced and self._cached_success():
|
|
| 1506 | 1552 |
assert utils._is_main_process(), \
|
| 1507 | 1553 |
"Attempted to save workspace configuration from child process"
|
| 1508 | 1554 |
#
|
| ... | ... | @@ -2035,6 +2081,9 @@ class Element(Plugin): |
| 2035 | 2081 |
source._fetch(previous_sources)
|
| 2036 | 2082 |
previous_sources.append(source)
|
| 2037 | 2083 |
|
| 2084 |
+ def _fetch_done(self):
|
|
| 2085 |
+ self._update_state()
|
|
| 2086 |
+ |
|
| 2038 | 2087 |
#############################################################
|
| 2039 | 2088 |
# Private Local Methods #
|
| 2040 | 2089 |
#############################################################
|
| ... | ... | @@ -2,7 +2,7 @@ coverage==4.4 |
| 2 | 2 |
pylint==2.2.2
|
| 3 | 3 |
pycodestyle==2.4.0
|
| 4 | 4 |
pytest==4.0.2
|
| 5 |
-pytest-cov==2.6.0
|
|
| 5 |
+pytest-cov==2.6.1
|
|
| 6 | 6 |
pytest-datafiles==2.0
|
| 7 | 7 |
pytest-env==0.6.2
|
| 8 | 8 |
pytest-xdist==1.25.0
|
| ... | ... | @@ -281,3 +281,44 @@ def test_argument_element_invalid(datafiles, cli, project, cmd, word_idx, expect |
| 281 | 281 |
])
|
| 282 | 282 |
def test_help_commands(cli, cmd, word_idx, expected):
|
| 283 | 283 |
assert_completion(cli, cmd, word_idx, expected)
|
| 284 |
+ |
|
| 285 |
+ |
|
| 286 |
+@pytest.mark.datafiles(os.path.join(DATA_DIR, 'project'))
|
|
| 287 |
+def test_argument_artifact(cli, tmpdir, datafiles):
|
|
| 288 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 289 |
+ |
|
| 290 |
+ # Build an import element with no dependencies (as there will only be ONE cache key)
|
|
| 291 |
+ result = cli.run(project=project, args=['build', 'import-bin.bst']) # Has no dependencies
|
|
| 292 |
+ result.assert_success()
|
|
| 293 |
+ |
|
| 294 |
+ # Get the key and the artifact ref ($project/$element_name/$key)
|
|
| 295 |
+ key = cli.get_element_key(project, 'import-bin.bst')
|
|
| 296 |
+ artifact = os.path.join('test', 'import-bin', key)
|
|
| 297 |
+ |
|
| 298 |
+ # Test autocompletion of the artifact
|
|
| 299 |
+ cmds = [
|
|
| 300 |
+ 'bst artifact log ',
|
|
| 301 |
+ 'bst artifact log t',
|
|
| 302 |
+ 'bst artifact log test/'
|
|
| 303 |
+ ]
|
|
| 304 |
+ |
|
| 305 |
+ for i, cmd in enumerate(cmds):
|
|
| 306 |
+ word_idx = 3
|
|
| 307 |
+ result = cli.run(project=project, cwd=project, env={
|
|
| 308 |
+ '_BST_COMPLETION': 'complete',
|
|
| 309 |
+ 'COMP_WORDS': cmd,
|
|
| 310 |
+ 'COMP_CWORD': str(word_idx)
|
|
| 311 |
+ })
|
|
| 312 |
+ words = []
|
|
| 313 |
+ if result.output:
|
|
| 314 |
+ words = result.output.splitlines() # This leaves an extra space on each e.g. ['foo.bst ']
|
|
| 315 |
+ words = [word.strip() for word in words]
|
|
| 316 |
+ |
|
| 317 |
+ if i == 0:
|
|
| 318 |
+ expected = PROJECT_ELEMENTS + [artifact] # We should now be able to see the artifact
|
|
| 319 |
+ elif i == 1:
|
|
| 320 |
+ expected = ['target.bst', artifact]
|
|
| 321 |
+ elif i == 2:
|
|
| 322 |
+ expected = [artifact]
|
|
| 323 |
+ |
|
| 324 |
+ assert expected == words
|
