Benjamin Schubert pushed to branch bschubert/pipeline at BuildStream / buildstream
Commits:
-
f29a0995
by Tristan Van Berkom at 2019-01-09T19:51:14Z
-
d364ad02
by Tristan Van Berkom at 2019-01-09T19:52:22Z
-
1e352434
by Tristan Van Berkom at 2019-01-09T20:36:06Z
-
116c1070
by Tristan Van Berkom at 2019-01-09T20:39:15Z
-
c8f3616d
by Tristan Van Berkom at 2019-01-09T21:12:29Z
-
c87bb592
by Jürg Billeter at 2019-01-10T09:43:37Z
-
26e33346
by Jürg Billeter at 2019-01-10T12:50:15Z
-
06deb4c4
by Jürg Billeter at 2019-01-10T12:50:15Z
-
0c2a66b3
by Jürg Billeter at 2019-01-10T13:06:02Z
-
f86bc760
by Jürg Billeter at 2019-01-10T13:34:56Z
-
347eb34e
by Angelos Evripiotis at 2019-01-10T14:38:23Z
-
6bc27bc1
by Angelos Evripiotis at 2019-01-10T15:07:46Z
-
0b83d024
by Tristan Van Berkom at 2019-01-10T20:02:50Z
-
630e26f1
by Tristan Van Berkom at 2019-01-10T20:03:52Z
-
d1d7de57
by Tristan Van Berkom at 2019-01-11T04:13:16Z
-
276b8d48
by Tristan Van Berkom at 2019-01-11T04:42:24Z
-
1e5e528d
by Benjamin Schubert at 2019-01-11T08:26:31Z
-
3600fc49
by Benjamin Schubert at 2019-01-11T08:26:31Z
-
d7efc733
by Benjamin Schubert at 2019-01-11T08:26:31Z
-
250c8073
by Benjamin Schubert at 2019-01-11T08:26:31Z
-
0839fbd2
by Benjamin Schubert at 2019-01-11T08:26:31Z
-
4c410435
by Benjamin Schubert at 2019-01-11T08:26:31Z
-
a3a3d55c
by Benjamin Schubert at 2019-01-11T08:26:31Z
-
4beac407
by Benjamin Schubert at 2019-01-11T08:26:31Z
-
0f95990f
by Benjamin Schubert at 2019-01-11T08:26:31Z
20 changed files:
- .coveragerc
- .gitignore
- .gitlab-ci.yml
- CONTRIBUTING.rst
- buildstream/_options/optionarch.py
- buildstream/_pipeline.py
- buildstream/_platform/platform.py
- buildstream/_scheduler/queues/buildqueue.py
- buildstream/_scheduler/queues/fetchqueue.py
- buildstream/_scheduler/queues/pullqueue.py
- buildstream/_stream.py
- buildstream/element.py
- + tests/format/option-arch-alias/element.bst
- + tests/format/option-arch-alias/project.conf
- + tests/format/option-arch-unknown/element.bst
- + tests/format/option-arch-unknown/project.conf
- tests/format/optionarch.py
- + tests/frontend/order.py
- tests/testutils/runcli.py
- tox.ini
Changes:
... | ... | @@ -4,11 +4,15 @@ include = |
4 | 4 |
*/buildstream/*
|
5 | 5 |
|
6 | 6 |
omit =
|
7 |
- # Omit profiling helper module
|
|
7 |
+ # Omit some internals
|
|
8 | 8 |
*/buildstream/_profile.py
|
9 |
+ */buildstream/__main__.py
|
|
10 |
+ */buildstream/_version.py
|
|
9 | 11 |
# Omit generated code
|
10 | 12 |
*/buildstream/_protos/*
|
11 | 13 |
*/.eggs/*
|
14 |
+ # Omit .tox directory
|
|
15 |
+ */.tox/*
|
|
12 | 16 |
|
13 | 17 |
[report]
|
14 | 18 |
show_missing = True
|
... | ... | @@ -13,11 +13,12 @@ tests/**/*.pyc |
13 | 13 |
integration-cache/
|
14 | 14 |
tmp
|
15 | 15 |
.coverage
|
16 |
+.coverage-reports/
|
|
16 | 17 |
.coverage.*
|
17 | 18 |
.cache
|
18 | 19 |
.pytest_cache/
|
19 | 20 |
*.bst/
|
20 |
-.tox
|
|
21 |
+.tox/
|
|
21 | 22 |
|
22 | 23 |
# Pycache, in case buildstream is ran directly from within the source
|
23 | 24 |
# tree
|
... | ... | @@ -13,6 +13,7 @@ variables: |
13 | 13 |
PYTEST_ADDOPTS: "--color=yes"
|
14 | 14 |
INTEGRATION_CACHE: "${CI_PROJECT_DIR}/cache/integration-cache"
|
15 | 15 |
TEST_COMMAND: "tox -- --color=yes --integration"
|
16 |
+ COVERAGE_PREFIX: "${CI_JOB_NAME}."
|
|
16 | 17 |
|
17 | 18 |
|
18 | 19 |
#####################################################
|
... | ... | @@ -24,9 +25,6 @@ variables: |
24 | 25 |
.tests-template: &tests
|
25 | 26 |
stage: test
|
26 | 27 |
|
27 |
- variables:
|
|
28 |
- COVERAGE_DIR: coverage-linux
|
|
29 |
- |
|
30 | 28 |
before_script:
|
31 | 29 |
# Diagnostics
|
32 | 30 |
- mount
|
... | ... | @@ -40,14 +38,11 @@ variables: |
40 | 38 |
- su buildstream -c "${TEST_COMMAND}"
|
41 | 39 |
|
42 | 40 |
after_script:
|
43 |
- # Collect our reports
|
|
44 |
- - mkdir -p ${COVERAGE_DIR}
|
|
45 |
- - cp .coverage ${COVERAGE_DIR}/coverage."${CI_JOB_NAME}"
|
|
46 | 41 |
except:
|
47 | 42 |
- schedules
|
48 | 43 |
artifacts:
|
49 | 44 |
paths:
|
50 |
- - ${COVERAGE_DIR}
|
|
45 |
+ - .coverage-reports
|
|
51 | 46 |
|
52 | 47 |
tests-debian-9:
|
53 | 48 |
image: buildstream/testsuite-debian:9-5da27168-32c47d1c
|
... | ... | @@ -83,7 +78,6 @@ tests-unix: |
83 | 78 |
<<: *tests
|
84 | 79 |
variables:
|
85 | 80 |
BST_FORCE_BACKEND: "unix"
|
86 |
- COVERAGE_DIR: coverage-unix
|
|
87 | 81 |
|
88 | 82 |
script:
|
89 | 83 |
|
... | ... | @@ -239,22 +233,22 @@ coverage: |
239 | 233 |
stage: post
|
240 | 234 |
coverage: '/TOTAL +\d+ +\d+ +(\d+\.\d+)%/'
|
241 | 235 |
script:
|
242 |
- - pip3 install -r requirements/requirements.txt -r requirements/dev-requirements.txt
|
|
243 |
- - pip3 install --no-index .
|
|
244 |
- - mkdir report
|
|
245 |
- - cd report
|
|
246 |
- - cp ../coverage-unix/coverage.* .
|
|
247 |
- - cp ../coverage-linux/coverage.* .
|
|
248 |
- - ls coverage.*
|
|
249 |
- - coverage combine --rcfile=../.coveragerc -a coverage.*
|
|
250 |
- - coverage report --rcfile=../.coveragerc -m
|
|
236 |
+ - cp -a .coverage-reports/ ./coverage-sources
|
|
237 |
+ - tox -e coverage
|
|
238 |
+ - cp -a .coverage-reports/ ./coverage-report
|
|
251 | 239 |
dependencies:
|
252 | 240 |
- tests-debian-9
|
253 | 241 |
- tests-fedora-27
|
254 | 242 |
- tests-fedora-28
|
243 |
+ - tests-fedora-missing-deps
|
|
244 |
+ - tests-ubuntu-18.04
|
|
255 | 245 |
- tests-unix
|
256 | 246 |
except:
|
257 | 247 |
- schedules
|
248 |
+ artifacts:
|
|
249 |
+ paths:
|
|
250 |
+ - coverage-sources/
|
|
251 |
+ - coverage-report/
|
|
258 | 252 |
|
259 | 253 |
# Deploy, only for merges which land on master branch.
|
260 | 254 |
#
|
... | ... | @@ -553,7 +553,7 @@ One problem which arises from this is that we end up having symbols |
553 | 553 |
which are *public* according to the :ref:`rules discussed in the previous section
|
554 | 554 |
<contributing_public_and_private>`, but must be hidden away from the
|
555 | 555 |
*"Public API Surface"*. For example, BuildStream internal classes need
|
556 |
-to invoke methods on the ``Element`` and ``Source`` classes, wheras these
|
|
556 |
+to invoke methods on the ``Element`` and ``Source`` classes, whereas these
|
|
557 | 557 |
methods need to be hidden from the *"Public API Surface"*.
|
558 | 558 |
|
559 | 559 |
This is where BuildStream deviates from the PEP-8 standard for public
|
... | ... | @@ -631,7 +631,7 @@ An element plugin will derive from Element by importing:: |
631 | 631 |
|
632 | 632 |
from buildstream import Element
|
633 | 633 |
|
634 |
-When importing utilities specifically, dont import function names
|
|
634 |
+When importing utilities specifically, don't import function names
|
|
635 | 635 |
from there, instead import the module itself::
|
636 | 636 |
|
637 | 637 |
from . import utils
|
... | ... | @@ -737,7 +737,7 @@ Abstract methods |
737 | 737 |
~~~~~~~~~~~~~~~~
|
738 | 738 |
In BuildStream, an *"Abstract Method"* is a bit of a misnomer and does
|
739 | 739 |
not match up to how Python defines abstract methods, we need to seek out
|
740 |
-a new nomanclature to refer to these methods.
|
|
740 |
+a new nomenclature to refer to these methods.
|
|
741 | 741 |
|
742 | 742 |
In Python, an *"Abstract Method"* is a method which **must** be
|
743 | 743 |
implemented by a subclass, whereas all methods in Python can be
|
... | ... | @@ -960,7 +960,7 @@ possible, and avoid any cyclic relationships in modules. |
960 | 960 |
For instance, the ``Source`` objects are owned by ``Element``
|
961 | 961 |
objects in the BuildStream data model, and as such the ``Element``
|
962 | 962 |
will delegate some activities to the ``Source`` objects in its
|
963 |
-possesion. The ``Source`` objects should however never call functions
|
|
963 |
+possession. The ``Source`` objects should however never call functions
|
|
964 | 964 |
on the ``Element`` object, nor should the ``Source`` object itself
|
965 | 965 |
have any understanding of what an ``Element`` is.
|
966 | 966 |
|
... | ... | @@ -1223,7 +1223,7 @@ For further information about using the reStructuredText with sphinx, please see |
1223 | 1223 |
Building Docs
|
1224 | 1224 |
~~~~~~~~~~~~~
|
1225 | 1225 |
Before you can build the docs, you will end to ensure that you have installed
|
1226 |
-the required :ref:`buid dependencies <contributing_build_deps>` as mentioned
|
|
1226 |
+the required :ref:`build dependencies <contributing_build_deps>` as mentioned
|
|
1227 | 1227 |
in the testing section above.
|
1228 | 1228 |
|
1229 | 1229 |
To build the documentation, just run the following::
|
... | ... | @@ -1365,7 +1365,7 @@ Structure of an example |
1365 | 1365 |
'''''''''''''''''''''''
|
1366 | 1366 |
The :ref:`tutorial <tutorial>` and the :ref:`examples <examples>` sections
|
1367 | 1367 |
of the documentation contain a series of sample projects, each chapter in
|
1368 |
-the tutoral, or standalone example uses a sample project.
|
|
1368 |
+the tutorial, or standalone example uses a sample project.
|
|
1369 | 1369 |
|
1370 | 1370 |
Here is the the structure for adding new examples and tutorial chapters.
|
1371 | 1371 |
|
... | ... | @@ -1471,8 +1471,8 @@ Installing build dependencies |
1471 | 1471 |
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
1472 | 1472 |
Some of BuildStream's dependencies have non-python build dependencies. When
|
1473 | 1473 |
running tests with ``tox``, you will first need to install these dependencies.
|
1474 |
-Exact steps to install these will depend on your oprtation systemm. Commands
|
|
1475 |
-for installing them for some common distributions are lised below.
|
|
1474 |
+Exact steps to install these will depend on your operating system. Commands
|
|
1475 |
+for installing them for some common distributions are listed below.
|
|
1476 | 1476 |
|
1477 | 1477 |
For Fedora-based systems::
|
1478 | 1478 |
|
... | ... | @@ -1498,6 +1498,13 @@ option when running tox:: |
1498 | 1498 |
|
1499 | 1499 |
tox -e py37
|
1500 | 1500 |
|
1501 |
+If you would like to test and lint at the same time, or if you do have multiple
|
|
1502 |
+python versions installed and would like to test against multiple versions, then
|
|
1503 |
+we recommend using `detox <https://github.com/tox-dev/detox>`_, just run it with
|
|
1504 |
+the same arguments you would give `tox`::
|
|
1505 |
+ |
|
1506 |
+ detox -e lint,py36,py37
|
|
1507 |
+ |
|
1501 | 1508 |
Linting is performed separately from testing. In order to run the linting step which
|
1502 | 1509 |
consists of running the ``pycodestyle`` and ``pylint`` tools, run the following::
|
1503 | 1510 |
|
... | ... | @@ -1533,7 +1540,7 @@ the frontend tests you can do:: |
1533 | 1540 |
|
1534 | 1541 |
tox -- tests/frontend/
|
1535 | 1542 |
|
1536 |
-Specific tests can be chosen by using the :: delimeter after the test module.
|
|
1543 |
+Specific tests can be chosen by using the :: delimiter after the test module.
|
|
1537 | 1544 |
If you wanted to run the test_build_track test within frontend/buildtrack.py you could do::
|
1538 | 1545 |
|
1539 | 1546 |
tox -- tests/frontend/buildtrack.py::test_build_track
|
... | ... | @@ -1553,7 +1560,7 @@ can run ``tox`` with ``-r`` or ``--recreate`` option. |
1553 | 1560 |
.. note::
|
1554 | 1561 |
|
1555 | 1562 |
By default, we do not allow use of site packages in our ``tox``
|
1556 |
- confguration to enable running the tests in an isolated environment.
|
|
1563 |
+ configuration to enable running the tests in an isolated environment.
|
|
1557 | 1564 |
If you need to enable use of site packages for whatever reason, you can
|
1558 | 1565 |
do so by passing the ``--sitepackages`` option to ``tox``. Also, you will
|
1559 | 1566 |
not need to install any of the build dependencies mentioned above if you
|
... | ... | @@ -1574,10 +1581,23 @@ can run ``tox`` with ``-r`` or ``--recreate`` option. |
1574 | 1581 |
./setup.py test --addopts 'tests/frontend/buildtrack.py::test_build_track'
|
1575 | 1582 |
|
1576 | 1583 |
|
1584 |
+Observing coverage
|
|
1585 |
+~~~~~~~~~~~~~~~~~~
|
|
1586 |
+Once you have run the tests using `tox` (or `detox`), some coverage reports will
|
|
1587 |
+have been left behind.
|
|
1588 |
+ |
|
1589 |
+To view the coverage report of the last test run, simply run::
|
|
1590 |
+ |
|
1591 |
+ tox -e coverage
|
|
1592 |
+ |
|
1593 |
+This will collate any reports from separate python environments that may be
|
|
1594 |
+under test before displaying the combined coverage.
|
|
1595 |
+ |
|
1596 |
+ |
|
1577 | 1597 |
Adding tests
|
1578 | 1598 |
~~~~~~~~~~~~
|
1579 | 1599 |
Tests are found in the tests subdirectory, inside of which
|
1580 |
-there is a separarate directory for each *domain* of tests.
|
|
1600 |
+there is a separate directory for each *domain* of tests.
|
|
1581 | 1601 |
All tests are collected as::
|
1582 | 1602 |
|
1583 | 1603 |
tests/*/*.py
|
... | ... | @@ -17,6 +17,8 @@ |
17 | 17 |
# Authors:
|
18 | 18 |
# Tristan Van Berkom <tristan vanberkom codethink co uk>
|
19 | 19 |
|
20 |
+from .. import _yaml
|
|
21 |
+from .._exceptions import LoadError, LoadErrorReason, PlatformError
|
|
20 | 22 |
from .._platform import Platform
|
21 | 23 |
from .optionenum import OptionEnum
|
22 | 24 |
|
... | ... | @@ -41,7 +43,34 @@ class OptionArch(OptionEnum): |
41 | 43 |
super(OptionArch, self).load(node, allow_default_definition=False)
|
42 | 44 |
|
43 | 45 |
def load_default_value(self, node):
|
44 |
- return Platform.get_host_arch()
|
|
46 |
+ arch = Platform.get_host_arch()
|
|
47 |
+ |
|
48 |
+ default_value = None
|
|
49 |
+ |
|
50 |
+ for index, value in enumerate(self.values):
|
|
51 |
+ try:
|
|
52 |
+ canonical_value = Platform.canonicalize_arch(value)
|
|
53 |
+ if default_value is None and canonical_value == arch:
|
|
54 |
+ default_value = value
|
|
55 |
+ # Do not terminate the loop early to ensure we validate
|
|
56 |
+ # all values in the list.
|
|
57 |
+ except PlatformError as e:
|
|
58 |
+ provenance = _yaml.node_get_provenance(node, key='values', indices=[index])
|
|
59 |
+ prefix = ""
|
|
60 |
+ if provenance:
|
|
61 |
+ prefix = "{}: ".format(provenance)
|
|
62 |
+ raise LoadError(LoadErrorReason.INVALID_DATA,
|
|
63 |
+ "{}Invalid value for {} option '{}': {}"
|
|
64 |
+ .format(prefix, self.OPTION_TYPE, self.name, e))
|
|
65 |
+ |
|
66 |
+ if default_value is None:
|
|
67 |
+ # Host architecture is not supported by the project.
|
|
68 |
+ # Do not raise an error here as the user may override it.
|
|
69 |
+ # If the user does not override it, an error will be raised
|
|
70 |
+ # by resolve()/validate().
|
|
71 |
+ default_value = arch
|
|
72 |
+ |
|
73 |
+ return default_value
|
|
45 | 74 |
|
46 | 75 |
def resolve(self):
|
47 | 76 |
|
... | ... | @@ -22,6 +22,7 @@ |
22 | 22 |
import os
|
23 | 23 |
import itertools
|
24 | 24 |
from operator import itemgetter
|
25 |
+from collections import OrderedDict
|
|
25 | 26 |
|
26 | 27 |
from ._exceptions import PipelineError
|
27 | 28 |
from ._message import Message, MessageType
|
... | ... | @@ -135,9 +136,6 @@ class Pipeline(): |
135 | 136 |
# Preflight
|
136 | 137 |
element._preflight()
|
137 | 138 |
|
138 |
- # Determine initial element state.
|
|
139 |
- element._update_state()
|
|
140 |
- |
|
141 | 139 |
# dependencies()
|
142 | 140 |
#
|
143 | 141 |
# Generator function to iterate over elements and optionally
|
... | ... | @@ -479,7 +477,7 @@ class Pipeline(): |
479 | 477 |
#
|
480 | 478 |
class _Planner():
|
481 | 479 |
def __init__(self):
|
482 |
- self.depth_map = {}
|
|
480 |
+ self.depth_map = OrderedDict()
|
|
483 | 481 |
self.visiting_elements = set()
|
484 | 482 |
|
485 | 483 |
# Here we want to traverse the same element more than once when
|
... | ... | @@ -77,20 +77,17 @@ class Platform(): |
77 | 77 |
def get_host_os():
|
78 | 78 |
return os.uname()[0]
|
79 | 79 |
|
80 |
- # get_host_arch():
|
|
80 |
+ # canonicalize_arch():
|
|
81 | 81 |
#
|
82 |
- # This returns the architecture of the host machine. The possible values
|
|
83 |
- # map from uname -m in order to be a OS independent list.
|
|
82 |
+ # This returns the canonical, OS-independent architecture name
|
|
83 |
+ # or raises a PlatformError if the architecture is unknown.
|
|
84 | 84 |
#
|
85 |
- # Returns:
|
|
86 |
- # (string): String representing the architecture
|
|
87 | 85 |
@staticmethod
|
88 |
- def get_host_arch():
|
|
89 |
- # get the hardware identifier from uname
|
|
90 |
- uname_machine = os.uname()[4]
|
|
91 |
- uname_to_arch = {
|
|
86 |
+ def canonicalize_arch(arch):
|
|
87 |
+ aliases = {
|
|
88 |
+ "aarch32": "aarch32",
|
|
92 | 89 |
"aarch64": "aarch64",
|
93 |
- "aarch64_be": "aarch64-be",
|
|
90 |
+ "aarch64-be": "aarch64-be",
|
|
94 | 91 |
"amd64": "x86-64",
|
95 | 92 |
"arm": "aarch32",
|
96 | 93 |
"armv8l": "aarch64",
|
... | ... | @@ -99,17 +96,34 @@ class Platform(): |
99 | 96 |
"i486": "x86-32",
|
100 | 97 |
"i586": "x86-32",
|
101 | 98 |
"i686": "x86-32",
|
99 |
+ "power-isa-be": "power-isa-be",
|
|
100 |
+ "power-isa-le": "power-isa-le",
|
|
102 | 101 |
"ppc64": "power-isa-be",
|
103 | 102 |
"ppc64le": "power-isa-le",
|
104 | 103 |
"sparc": "sparc-v9",
|
105 | 104 |
"sparc64": "sparc-v9",
|
106 |
- "x86_64": "x86-64"
|
|
105 |
+ "sparc-v9": "sparc-v9",
|
|
106 |
+ "x86-32": "x86-32",
|
|
107 |
+ "x86-64": "x86-64"
|
|
107 | 108 |
}
|
109 |
+ |
|
108 | 110 |
try:
|
109 |
- return uname_to_arch[uname_machine]
|
|
111 |
+ return aliases[arch.replace('_', '-')]
|
|
110 | 112 |
except KeyError:
|
111 |
- raise PlatformError("uname gave unsupported machine architecture: {}"
|
|
112 |
- .format(uname_machine))
|
|
113 |
+ raise PlatformError("Unknown architecture: {}".format(arch))
|
|
114 |
+ |
|
115 |
+ # get_host_arch():
|
|
116 |
+ #
|
|
117 |
+ # This returns the architecture of the host machine. The possible values
|
|
118 |
+ # map from uname -m in order to be a OS independent list.
|
|
119 |
+ #
|
|
120 |
+ # Returns:
|
|
121 |
+ # (string): String representing the architecture
|
|
122 |
+ @staticmethod
|
|
123 |
+ def get_host_arch():
|
|
124 |
+ # get the hardware identifier from uname
|
|
125 |
+ uname_machine = os.uname()[4]
|
|
126 |
+ return Platform.canonicalize_arch(uname_machine)
|
|
113 | 127 |
|
114 | 128 |
##################################################################
|
115 | 129 |
# Sandbox functions #
|
... | ... | @@ -71,9 +71,6 @@ class BuildQueue(Queue): |
71 | 71 |
return element._assemble()
|
72 | 72 |
|
73 | 73 |
def status(self, element):
|
74 |
- # state of dependencies may have changed, recalculate element state
|
|
75 |
- element._update_state()
|
|
76 |
- |
|
77 | 74 |
if not element._is_required():
|
78 | 75 |
# Artifact is not currently required but it may be requested later.
|
79 | 76 |
# Keep it in the queue.
|
... | ... | @@ -44,9 +44,6 @@ class FetchQueue(Queue): |
44 | 44 |
element._fetch()
|
45 | 45 |
|
46 | 46 |
def status(self, element):
|
47 |
- # state of dependencies may have changed, recalculate element state
|
|
48 |
- element._update_state()
|
|
49 |
- |
|
50 | 47 |
if not element._is_required():
|
51 | 48 |
# Artifact is not currently required but it may be requested later.
|
52 | 49 |
# Keep it in the queue.
|
... | ... | @@ -72,7 +69,7 @@ class FetchQueue(Queue): |
72 | 69 |
if status == JobStatus.FAIL:
|
73 | 70 |
return
|
74 | 71 |
|
75 |
- element._update_state()
|
|
76 |
- |
|
72 |
+ element._fetch_done()
|
|
73 |
+
|
|
77 | 74 |
# Successful fetch, we must be CACHED now
|
78 | 75 |
assert element._get_consistency() == Consistency.CACHED
|
... | ... | @@ -39,9 +39,6 @@ class PullQueue(Queue): |
39 | 39 |
raise SkipJob(self.action_name)
|
40 | 40 |
|
41 | 41 |
def status(self, element):
|
42 |
- # state of dependencies may have changed, recalculate element state
|
|
43 |
- element._update_state()
|
|
44 |
- |
|
45 | 42 |
if not element._is_required():
|
46 | 43 |
# Artifact is not currently required but it may be requested later.
|
47 | 44 |
# Keep it in the queue.
|
... | ... | @@ -1018,17 +1018,6 @@ class Stream(): |
1018 | 1018 |
|
1019 | 1019 |
_, status = self._scheduler.run(self.queues)
|
1020 | 1020 |
|
1021 |
- # Force update element states after a run, such that the summary
|
|
1022 |
- # is more coherent
|
|
1023 |
- try:
|
|
1024 |
- for element in self.total_elements:
|
|
1025 |
- element._update_state()
|
|
1026 |
- except BstError as e:
|
|
1027 |
- self._message(MessageType.ERROR, "Error resolving final state", detail=str(e))
|
|
1028 |
- set_last_task_error(e.domain, e.reason)
|
|
1029 |
- except Exception as e: # pylint: disable=broad-except
|
|
1030 |
- self._message(MessageType.BUG, "Unhandled exception while resolving final state", detail=str(e))
|
|
1031 |
- |
|
1032 | 1021 |
if status == SchedStatus.ERROR:
|
1033 | 1022 |
raise StreamError()
|
1034 | 1023 |
elif status == SchedStatus.TERMINATED:
|
... | ... | @@ -197,6 +197,7 @@ class Element(Plugin): |
197 | 197 |
|
198 | 198 |
self.__runtime_dependencies = [] # Direct runtime dependency Elements
|
199 | 199 |
self.__build_dependencies = [] # Direct build dependency Elements
|
200 |
+ self.__reverse_build_dependencies = [] # Direct reverse build dependency Elements
|
|
200 | 201 |
self.__sources = [] # List of Sources
|
201 | 202 |
self.__weak_cache_key = None # Our cached weak cache key
|
202 | 203 |
self.__strict_cache_key = None # Our cached cache key for strict builds
|
... | ... | @@ -227,6 +228,8 @@ class Element(Plugin): |
227 | 228 |
self.__metadata_workspaced = {} # Boolean of whether it's workspaced
|
228 | 229 |
self.__metadata_workspaced_dependencies = {} # List of which dependencies are workspaced
|
229 | 230 |
|
231 |
+ self.__is_workspaced = None
|
|
232 |
+ |
|
230 | 233 |
# Ensure we have loaded this class's defaults
|
231 | 234 |
self.__init_defaults(plugin_conf)
|
232 | 235 |
|
... | ... | @@ -370,6 +373,13 @@ class Element(Plugin): |
370 | 373 |
#############################################################
|
371 | 374 |
# Public Methods #
|
372 | 375 |
#############################################################
|
376 |
+ @property
|
|
377 |
+ def is_workspaced(self):
|
|
378 |
+ if self.__is_workspaced is None:
|
|
379 |
+ self.__is_workspaced = self._get_workspace() is not None
|
|
380 |
+ |
|
381 |
+ return self.__is_workspaced
|
|
382 |
+ |
|
373 | 383 |
def sources(self):
|
374 | 384 |
"""A generator function to enumerate the element sources
|
375 | 385 |
|
... | ... | @@ -439,6 +449,27 @@ class Element(Plugin): |
439 | 449 |
if should_yield and (recurse or recursed) and scope != Scope.BUILD:
|
440 | 450 |
yield self
|
441 | 451 |
|
452 |
+ def reverse_build_dependencies(self, recurse=False):
|
|
453 |
+ if not recurse:
|
|
454 |
+ yield from self.__reverse_build_dependencies
|
|
455 |
+ return
|
|
456 |
+ |
|
457 |
+ # visited = set()
|
|
458 |
+ |
|
459 |
+ def recurse_rdeps(element):
|
|
460 |
+ # if element in visited:
|
|
461 |
+ # return
|
|
462 |
+ |
|
463 |
+ # visited.add(element)
|
|
464 |
+ |
|
465 |
+ yield element
|
|
466 |
+ |
|
467 |
+ for rdep in element.__reverse_build_dependencies:
|
|
468 |
+ yield from recurse_rdeps(rdep)
|
|
469 |
+ |
|
470 |
+ for rdep in self.__reverse_build_dependencies:
|
|
471 |
+ yield from recurse_rdeps(rdep)
|
|
472 |
+ |
|
442 | 473 |
def search(self, scope, name):
|
443 | 474 |
"""Search for a dependency by name
|
444 | 475 |
|
... | ... | @@ -930,6 +961,7 @@ class Element(Plugin): |
930 | 961 |
for meta_dep in meta.build_dependencies:
|
931 | 962 |
dependency = Element._new_from_meta(meta_dep)
|
932 | 963 |
element.__build_dependencies.append(dependency)
|
964 |
+ dependency.__reverse_build_dependencies.append(element)
|
|
933 | 965 |
|
934 | 966 |
return element
|
935 | 967 |
|
... | ... | @@ -1279,6 +1311,9 @@ class Element(Plugin): |
1279 | 1311 |
for source in self.sources():
|
1280 | 1312 |
source._preflight()
|
1281 | 1313 |
|
1314 |
+ # Determine initial element state.
|
|
1315 |
+ self._update_state()
|
|
1316 |
+ |
|
1282 | 1317 |
# _schedule_tracking():
|
1283 | 1318 |
#
|
1284 | 1319 |
# Force an element state to be inconsistent. Any sources appear to be
|
... | ... | @@ -1306,6 +1341,9 @@ class Element(Plugin): |
1306 | 1341 |
|
1307 | 1342 |
self._update_state()
|
1308 | 1343 |
|
1344 |
+ for rdep in self.reverse_build_dependencies(recurse=True):
|
|
1345 |
+ rdep._update_state()
|
|
1346 |
+ |
|
1309 | 1347 |
# _track():
|
1310 | 1348 |
#
|
1311 | 1349 |
# Calls track() on the Element sources
|
... | ... | @@ -1446,6 +1484,7 @@ class Element(Plugin): |
1446 | 1484 |
# This unblocks pull/fetch/build.
|
1447 | 1485 |
#
|
1448 | 1486 |
def _set_required(self):
|
1487 |
+ # FIXME: this should enqueue stuff in the queue, it should not be here by default
|
|
1449 | 1488 |
if self.__required:
|
1450 | 1489 |
# Already done
|
1451 | 1490 |
return
|
... | ... | @@ -1456,6 +1495,7 @@ class Element(Plugin): |
1456 | 1495 |
for dep in self.dependencies(Scope.RUN, recurse=False):
|
1457 | 1496 |
dep._set_required()
|
1458 | 1497 |
|
1498 |
+ # FIXME: this should not be done at all here
|
|
1459 | 1499 |
self._update_state()
|
1460 | 1500 |
|
1461 | 1501 |
# _is_required():
|
... | ... | @@ -1499,10 +1539,16 @@ class Element(Plugin): |
1499 | 1539 |
|
1500 | 1540 |
self.__assemble_scheduled = False
|
1501 | 1541 |
self.__assemble_done = True
|
1502 |
- |
|
1542 |
+ # FIXME: only if workspaced
|
|
1503 | 1543 |
self._update_state()
|
1504 | 1544 |
|
1505 |
- if self._get_workspace() and self._cached_success():
|
|
1545 |
+ if self.is_workspaced:
|
|
1546 |
+ |
|
1547 |
+ # Update the state of all reverse dependencies
|
|
1548 |
+ for reverse_dependency in self.reverse_build_dependencies(recurse=True):
|
|
1549 |
+ reverse_dependency._update_state()
|
|
1550 |
+ |
|
1551 |
+ if self.is_workspaced and self._cached_success():
|
|
1506 | 1552 |
assert utils._is_main_process(), \
|
1507 | 1553 |
"Attempted to save workspace configuration from child process"
|
1508 | 1554 |
#
|
... | ... | @@ -2035,6 +2081,9 @@ class Element(Plugin): |
2035 | 2081 |
source._fetch(previous_sources)
|
2036 | 2082 |
previous_sources.append(source)
|
2037 | 2083 |
|
2084 |
+ def _fetch_done(self):
|
|
2085 |
+ self._update_state()
|
|
2086 |
+ |
|
2038 | 2087 |
#############################################################
|
2039 | 2088 |
# Private Local Methods #
|
2040 | 2089 |
#############################################################
|
... | ... | @@ -2441,11 +2490,17 @@ class Element(Plugin): |
2441 | 2490 |
# Sandbox config, unlike others, has fixed members so we should validate them
|
2442 | 2491 |
_yaml.node_validate(sandbox_config, ['build-uid', 'build-gid', 'build-os', 'build-arch'])
|
2443 | 2492 |
|
2493 |
+ build_arch = self.node_get_member(sandbox_config, str, 'build-arch', default=None)
|
|
2494 |
+ if build_arch:
|
|
2495 |
+ build_arch = Platform.canonicalize_arch(build_arch)
|
|
2496 |
+ else:
|
|
2497 |
+ build_arch = host_arch
|
|
2498 |
+ |
|
2444 | 2499 |
return SandboxConfig(
|
2445 | 2500 |
self.node_get_member(sandbox_config, int, 'build-uid'),
|
2446 | 2501 |
self.node_get_member(sandbox_config, int, 'build-gid'),
|
2447 | 2502 |
self.node_get_member(sandbox_config, str, 'build-os', default=host_os),
|
2448 |
- self.node_get_member(sandbox_config, str, 'build-arch', default=host_arch))
|
|
2503 |
+ build_arch)
|
|
2449 | 2504 |
|
2450 | 2505 |
# This makes a special exception for the split rules, which
|
2451 | 2506 |
# elements may extend but whos defaults are defined in the project.
|
1 |
+kind: autotools
|
|
2 |
+variables:
|
|
3 |
+ result: "Nothing"
|
|
4 |
+ (?):
|
|
5 |
+ - machine_arch == "arm":
|
|
6 |
+ result: "Army"
|
|
7 |
+ - machine_arch == "x86_64":
|
|
8 |
+ result: "X86-64y"
|
1 |
+name: test
|
|
2 |
+ |
|
3 |
+options:
|
|
4 |
+ machine_arch:
|
|
5 |
+ type: arch
|
|
6 |
+ description: The machine architecture
|
|
7 |
+ values:
|
|
8 |
+ - arm
|
|
9 |
+ - x86_64
|
1 |
+kind: autotools
|
|
2 |
+variables:
|
|
3 |
+ result: "Nothing"
|
|
4 |
+ (?):
|
|
5 |
+ - machine_arch == "aarch32":
|
|
6 |
+ result: "Army"
|
|
7 |
+ - machine_arch == "aarch64":
|
|
8 |
+ result: "Aarchy"
|
|
9 |
+ - machine_arch == "x86-128":
|
|
10 |
+ result: "X86-128y"
|
1 |
+name: test
|
|
2 |
+ |
|
3 |
+options:
|
|
4 |
+ machine_arch:
|
|
5 |
+ type: arch
|
|
6 |
+ description: The machine architecture
|
|
7 |
+ values:
|
|
8 |
+ - aarch32
|
|
9 |
+ - aarch64
|
|
10 |
+ - x86-128
|
... | ... | @@ -75,3 +75,47 @@ def test_unsupported_arch(cli, datafiles): |
75 | 75 |
])
|
76 | 76 |
|
77 | 77 |
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
|
78 |
+ |
|
79 |
+ |
|
80 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
81 |
+def test_alias(cli, datafiles):
|
|
82 |
+ |
|
83 |
+ with override_uname_arch("arm"):
|
|
84 |
+ project = os.path.join(datafiles.dirname, datafiles.basename, 'option-arch-alias')
|
|
85 |
+ result = cli.run(project=project, silent=True, args=[
|
|
86 |
+ 'show',
|
|
87 |
+ '--deps', 'none',
|
|
88 |
+ '--format', '%{vars}',
|
|
89 |
+ 'element.bst'
|
|
90 |
+ ])
|
|
91 |
+ |
|
92 |
+ result.assert_success()
|
|
93 |
+ |
|
94 |
+ |
|
95 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
96 |
+def test_unknown_host_arch(cli, datafiles):
|
|
97 |
+ |
|
98 |
+ with override_uname_arch("x86_128"):
|
|
99 |
+ project = os.path.join(datafiles.dirname, datafiles.basename, 'option-arch')
|
|
100 |
+ result = cli.run(project=project, silent=True, args=[
|
|
101 |
+ 'show',
|
|
102 |
+ '--deps', 'none',
|
|
103 |
+ '--format', '%{vars}',
|
|
104 |
+ 'element.bst'
|
|
105 |
+ ])
|
|
106 |
+ |
|
107 |
+ result.assert_main_error(ErrorDomain.PLATFORM, None)
|
|
108 |
+ |
|
109 |
+ |
|
110 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
111 |
+def test_unknown_project_arch(cli, datafiles):
|
|
112 |
+ |
|
113 |
+ project = os.path.join(datafiles.dirname, datafiles.basename, 'option-arch-unknown')
|
|
114 |
+ result = cli.run(project=project, silent=True, args=[
|
|
115 |
+ 'show',
|
|
116 |
+ '--deps', 'none',
|
|
117 |
+ '--format', '%{vars}',
|
|
118 |
+ 'element.bst'
|
|
119 |
+ ])
|
|
120 |
+ |
|
121 |
+ result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
|
1 |
+import os
|
|
2 |
+ |
|
3 |
+import pytest
|
|
4 |
+from tests.testutils import cli, create_repo
|
|
5 |
+ |
|
6 |
+from buildstream import _yaml
|
|
7 |
+ |
|
8 |
+# Project directory
|
|
9 |
+DATA_DIR = os.path.join(
|
|
10 |
+ os.path.dirname(os.path.realpath(__file__)),
|
|
11 |
+ "project",
|
|
12 |
+)
|
|
13 |
+ |
|
14 |
+ |
|
15 |
+def create_element(repo, name, path, dependencies, ref=None):
|
|
16 |
+ element = {
|
|
17 |
+ 'kind': 'import',
|
|
18 |
+ 'sources': [
|
|
19 |
+ repo.source_config(ref=ref)
|
|
20 |
+ ],
|
|
21 |
+ 'depends': dependencies
|
|
22 |
+ }
|
|
23 |
+ _yaml.dump(element, os.path.join(path, name))
|
|
24 |
+ |
|
25 |
+ |
|
26 |
+# This tests a variety of scenarios and checks that the order in
|
|
27 |
+# which things are processed remains stable.
|
|
28 |
+#
|
|
29 |
+# This is especially important in order to ensure that our
|
|
30 |
+# depth sorting and optimization of which elements should be
|
|
31 |
+# processed first is doing it's job right, and that we are
|
|
32 |
+# promoting elements to the build queue as soon as possible
|
|
33 |
+#
|
|
34 |
+# Parameters:
|
|
35 |
+# targets (target elements): The targets to invoke bst with
|
|
36 |
+# template (dict): The project template dictionary, for create_element()
|
|
37 |
+# expected (list): A list of element names in the expected order
|
|
38 |
+#
|
|
39 |
+@pytest.mark.datafiles(os.path.join(DATA_DIR))
|
|
40 |
+@pytest.mark.parametrize("target,template,expected", [
|
|
41 |
+ # First simple test
|
|
42 |
+ ('3.bst', {
|
|
43 |
+ '0.bst': ['1.bst'],
|
|
44 |
+ '1.bst': [],
|
|
45 |
+ '2.bst': ['0.bst'],
|
|
46 |
+ '3.bst': ['0.bst', '1.bst', '2.bst']
|
|
47 |
+ }, ['1.bst', '0.bst', '2.bst', '3.bst']),
|
|
48 |
+ |
|
49 |
+ # A more complicated test with build of build dependencies
|
|
50 |
+ ('target.bst', {
|
|
51 |
+ 'a.bst': [],
|
|
52 |
+ 'base.bst': [],
|
|
53 |
+ 'timezones.bst': [],
|
|
54 |
+ 'middleware.bst': [{'filename': 'base.bst', 'type': 'build'}],
|
|
55 |
+ 'app.bst': [{'filename': 'middleware.bst', 'type': 'build'}],
|
|
56 |
+ 'target.bst': ['a.bst', 'base.bst', 'middleware.bst', 'app.bst', 'timezones.bst']
|
|
57 |
+ }, ['base.bst', 'middleware.bst', 'a.bst', 'app.bst', 'timezones.bst', 'target.bst']),
|
|
58 |
+])
|
|
59 |
+@pytest.mark.parametrize("operation", [('show'), ('fetch'), ('build')])
|
|
60 |
+def test_order(cli, datafiles, tmpdir, operation, target, template, expected):
|
|
61 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
62 |
+ dev_files_path = os.path.join(project, 'files', 'dev-files')
|
|
63 |
+ element_path = os.path.join(project, 'elements')
|
|
64 |
+ |
|
65 |
+ # FIXME: Remove this when the test passes reliably.
|
|
66 |
+ #
|
|
67 |
+ # There is no reason why the order should not
|
|
68 |
+ # be preserved when the builders is set to 1,
|
|
69 |
+ # the scheduler queue processing still seems to
|
|
70 |
+ # be losing the order.
|
|
71 |
+ #
|
|
72 |
+ if operation == 'build':
|
|
73 |
+ pytest.skip("FIXME: This still only sometimes passes")
|
|
74 |
+ |
|
75 |
+ # Configure to only allow one fetcher at a time, make it easy to
|
|
76 |
+ # determine what is being planned in what order.
|
|
77 |
+ cli.configure({
|
|
78 |
+ 'scheduler': {
|
|
79 |
+ 'fetchers': 1,
|
|
80 |
+ 'builders': 1
|
|
81 |
+ }
|
|
82 |
+ })
|
|
83 |
+ |
|
84 |
+ # Build the project from the template, make import elements
|
|
85 |
+ # all with the same repo
|
|
86 |
+ #
|
|
87 |
+ repo = create_repo('git', str(tmpdir))
|
|
88 |
+ ref = repo.create(dev_files_path)
|
|
89 |
+ for element, dependencies in template.items():
|
|
90 |
+ create_element(repo, element, element_path, dependencies, ref=ref)
|
|
91 |
+ repo.add_commit()
|
|
92 |
+ |
|
93 |
+ # Run test and collect results
|
|
94 |
+ if operation == 'show':
|
|
95 |
+ result = cli.run(args=['show', '--deps', 'plan', '--format', '%{name}', target], project=project, silent=True)
|
|
96 |
+ result.assert_success()
|
|
97 |
+ results = result.output.splitlines()
|
|
98 |
+ else:
|
|
99 |
+ if operation == 'fetch':
|
|
100 |
+ result = cli.run(args=['source', 'fetch', target], project=project, silent=True)
|
|
101 |
+ else:
|
|
102 |
+ result = cli.run(args=[operation, target], project=project, silent=True)
|
|
103 |
+ result.assert_success()
|
|
104 |
+ results = result.get_start_order(operation)
|
|
105 |
+ |
|
106 |
+ # Assert the order
|
|
107 |
+ print("Expected order: {}".format(expected))
|
|
108 |
+ print("Observed result order: {}".format(results))
|
|
109 |
+ assert results == expected
|
... | ... | @@ -167,6 +167,23 @@ class Result(): |
167 | 167 |
def assert_shell_error(self, fail_message=''):
|
168 | 168 |
assert self.exit_code == 1, fail_message
|
169 | 169 |
|
170 |
+ # get_start_order()
|
|
171 |
+ #
|
|
172 |
+ # Gets the list of elements processed in a given queue, in the
|
|
173 |
+ # order of their first appearances in the session.
|
|
174 |
+ #
|
|
175 |
+ # Args:
|
|
176 |
+ # activity (str): The queue activity name (like 'fetch')
|
|
177 |
+ #
|
|
178 |
+ # Returns:
|
|
179 |
+ # (list): A list of element names in the order which they first appeared in the result
|
|
180 |
+ #
|
|
181 |
+ def get_start_order(self, activity):
|
|
182 |
+ results = re.findall(r'\[\s*{}:(\S+)\s*\]\s*START\s*.*\.log'.format(activity), self.stderr)
|
|
183 |
+ if results is None:
|
|
184 |
+ return []
|
|
185 |
+ return list(results)
|
|
186 |
+ |
|
170 | 187 |
# get_tracked_elements()
|
171 | 188 |
#
|
172 | 189 |
# Produces a list of element names on which tracking occurred
|
1 |
+#
|
|
2 |
+# Tox global configuration
|
|
3 |
+#
|
|
1 | 4 |
[tox]
|
2 | 5 |
envlist = py35,py36,py37
|
3 | 6 |
skip_missing_interpreters = true
|
4 | 7 |
|
8 |
+#
|
|
9 |
+# Defaults for all environments
|
|
10 |
+#
|
|
11 |
+# Anything specified here is iherited by the sections
|
|
12 |
+#
|
|
5 | 13 |
[testenv]
|
6 |
-commands = pytest {posargs}
|
|
14 |
+commands =
|
|
15 |
+ pytest --basetemp {envtmpdir} {posargs}
|
|
16 |
+ mkdir -p .coverage-reports
|
|
17 |
+ mv {envtmpdir}/.coverage {toxinidir}/.coverage-reports/.coverage.{env:COVERAGE_PREFIX:}{envname}
|
|
7 | 18 |
deps =
|
8 | 19 |
-rrequirements/requirements.txt
|
9 | 20 |
-rrequirements/dev-requirements.txt
|
... | ... | @@ -13,6 +24,32 @@ passenv = |
13 | 24 |
GI_TYPELIB_PATH
|
14 | 25 |
INTEGRATION_CACHE
|
15 | 26 |
|
27 |
+#
|
|
28 |
+# These keys are not inherited by any other sections
|
|
29 |
+#
|
|
30 |
+setenv =
|
|
31 |
+ py{35,36,37}: COVERAGE_FILE = {envtmpdir}/.coverage
|
|
32 |
+whitelist_externals =
|
|
33 |
+ py{35,36,37}:
|
|
34 |
+ mv
|
|
35 |
+ mkdir
|
|
36 |
+ |
|
37 |
+#
|
|
38 |
+# Coverage reporting
|
|
39 |
+#
|
|
40 |
+[testenv:coverage]
|
|
41 |
+commands =
|
|
42 |
+ - coverage combine --rcfile={toxinidir}/.coveragerc {toxinidir}/.coverage-reports/
|
|
43 |
+ coverage report --rcfile={toxinidir}/.coveragerc -m
|
|
44 |
+deps =
|
|
45 |
+ -rrequirements/requirements.txt
|
|
46 |
+ -rrequirements/dev-requirements.txt
|
|
47 |
+setenv =
|
|
48 |
+ COVERAGE_FILE = {toxinidir}/.coverage-reports/.coverage
|
|
49 |
+ |
|
50 |
+#
|
|
51 |
+# Running linters
|
|
52 |
+#
|
|
16 | 53 |
[testenv:lint]
|
17 | 54 |
commands =
|
18 | 55 |
pycodestyle
|
... | ... | @@ -22,6 +59,9 @@ deps = |
22 | 59 |
-rrequirements/dev-requirements.txt
|
23 | 60 |
-rrequirements/plugin-requirements.txt
|
24 | 61 |
|
62 |
+#
|
|
63 |
+# Building documentation
|
|
64 |
+#
|
|
25 | 65 |
[testenv:docs]
|
26 | 66 |
commands =
|
27 | 67 |
make -C doc
|