Tom Pollard pushed to branch tpollard/buildremote at BuildStream / buildstream
Commits:
-
36746730
by Chandan Singh at 2019-01-31T10:50:05Z
-
fa4a21ce
by Chandan Singh at 2019-01-31T12:15:43Z
-
dd791373
by Chandan Singh at 2019-01-31T14:32:44Z
-
96c0fbd6
by Chandan Singh at 2019-01-31T15:39:19Z
-
d25e2795
by Benjamin Schubert at 2019-01-31T17:06:23Z
-
2d0eebbf
by Benjamin Schubert at 2019-01-31T17:06:23Z
-
583bd97d
by Benjamin Schubert at 2019-02-01T10:26:37Z
-
51cec3da
by Phil Dawson at 2019-02-01T14:25:44Z
-
2b38aabe
by Phil Dawson at 2019-02-01T15:33:00Z
-
dbb3d232
by James Ennis at 2019-02-01T15:51:32Z
-
7e4205cb
by James Ennis at 2019-02-01T15:51:32Z
-
4109a34a
by James Ennis at 2019-02-01T17:11:29Z
-
c9345014
by James Ennis at 2019-02-04T13:53:42Z
-
3ab09651
by James Ennis at 2019-02-04T14:47:43Z
-
cd8e5e27
by Tom Pollard at 2019-02-05T11:22:01Z
21 changed files:
- NEWS
- buildstream/_frontend/cli.py
- buildstream/_loader/loadelement.py
- buildstream/_loader/loader.py
- buildstream/_stream.py
- buildstream/plugins/elements/filter.py
- buildstream/plugins/elements/filter.yaml
- tests/cachekey/cachekey.py
- + tests/cachekey/project/elements/key-stability/aaa.bst
- + tests/cachekey/project/elements/key-stability/t1.bst
- + tests/cachekey/project/elements/key-stability/t2.bst
- + tests/cachekey/project/elements/key-stability/top-level.bst
- + tests/cachekey/project/elements/key-stability/zzz.bst
- tests/elements/filter.py
- tests/elements/filter/basic/elements/deps-permitted.bst
- + tests/elements/filter/basic/elements/output-include-nonexistent-domain.bst
- tests/frontend/completions.py
- tests/frontend/pull.py
- tests/frontend/push.py
- tests/testutils/site.py
- tox.ini
Changes:
| ... | ... | @@ -122,6 +122,10 @@ buildstream 1.3.1 |
| 122 | 122 |
'shell', 'show', 'source-checkout', 'track', 'workspace close' and 'workspace reset'
|
| 123 | 123 |
commands are affected.
|
| 124 | 124 |
|
| 125 |
+ o bst 'build' now has '--remote, -r' option, inline with bst 'push' & 'pull'.
|
|
| 126 |
+ Providing a remote will limit build's pull/push remote actions to the given
|
|
| 127 |
+ remote specifically, ignoring those defined via user or project configuration.
|
|
| 128 |
+ |
|
| 125 | 129 |
|
| 126 | 130 |
=================
|
| 127 | 131 |
buildstream 1.1.5
|
| ... | ... | @@ -338,10 +338,12 @@ def init(app, project_name, format_version, element_path, force): |
| 338 | 338 |
help="Allow tracking to cross junction boundaries")
|
| 339 | 339 |
@click.option('--track-save', default=False, is_flag=True,
|
| 340 | 340 |
help="Deprecated: This is ignored")
|
| 341 |
+@click.option('--remote', '-r', default=None,
|
|
| 342 |
+ help="The URL of the remote cache (defaults to the first configured cache)")
|
|
| 341 | 343 |
@click.argument('elements', nargs=-1,
|
| 342 | 344 |
type=click.Path(readable=False))
|
| 343 | 345 |
@click.pass_obj
|
| 344 |
-def build(app, elements, all_, track_, track_save, track_all, track_except, track_cross_junctions):
|
|
| 346 |
+def build(app, elements, all_, track_, track_save, track_all, track_except, track_cross_junctions, remote):
|
|
| 345 | 347 |
"""Build elements in a pipeline
|
| 346 | 348 |
|
| 347 | 349 |
Specifying no elements will result in building the default targets
|
| ... | ... | @@ -376,7 +378,8 @@ def build(app, elements, all_, track_, track_save, track_all, track_except, trac |
| 376 | 378 |
track_except=track_except,
|
| 377 | 379 |
track_cross_junctions=track_cross_junctions,
|
| 378 | 380 |
ignore_junction_targets=ignore_junction_targets,
|
| 379 |
- build_all=all_)
|
|
| 381 |
+ build_all=all_,
|
|
| 382 |
+ remote=remote)
|
|
| 380 | 383 |
|
| 381 | 384 |
|
| 382 | 385 |
##################################################################
|
| ... | ... | @@ -1012,7 +1015,7 @@ def artifact_checkout(app, force, deps, integrate, hardlinks, tar, directory, el |
| 1012 | 1015 |
@click.option('--deps', '-d', default='none',
|
| 1013 | 1016 |
type=click.Choice(['none', 'all']),
|
| 1014 | 1017 |
help='The dependency artifacts to pull (default: none)')
|
| 1015 |
-@click.option('--remote', '-r',
|
|
| 1018 |
+@click.option('--remote', '-r', default=None,
|
|
| 1016 | 1019 |
help="The URL of the remote cache (defaults to the first configured cache)")
|
| 1017 | 1020 |
@click.argument('elements', nargs=-1,
|
| 1018 | 1021 |
type=click.Path(readable=False))
|
| ... | ... | @@ -39,6 +39,20 @@ from .types import Symbol, Dependency |
| 39 | 39 |
# loader (Loader): The Loader object for this element
|
| 40 | 40 |
#
|
| 41 | 41 |
class LoadElement():
|
| 42 |
+ # Dependency():
|
|
| 43 |
+ #
|
|
| 44 |
+ # A link from a LoadElement to its dependencies.
|
|
| 45 |
+ #
|
|
| 46 |
+ # Keeps a link to one of the current Element's dependencies, together with
|
|
| 47 |
+ # its dependency type.
|
|
| 48 |
+ #
|
|
| 49 |
+ # Args:
|
|
| 50 |
+ # element (LoadElement): a LoadElement on which there is a dependency
|
|
| 51 |
+ # dep_type (str): the type of dependency this dependency link is
|
|
| 52 |
+ class Dependency:
|
|
| 53 |
+ def __init__(self, element, dep_type):
|
|
| 54 |
+ self.element = element
|
|
| 55 |
+ self.dep_type = dep_type
|
|
| 42 | 56 |
|
| 43 | 57 |
def __init__(self, node, filename, loader):
|
| 44 | 58 |
|
| ... | ... | @@ -74,8 +88,11 @@ class LoadElement(): |
| 74 | 88 |
'build-depends', 'runtime-depends',
|
| 75 | 89 |
])
|
| 76 | 90 |
|
| 77 |
- # Extract the Dependencies
|
|
| 78 |
- self.deps = _extract_depends_from_node(self.node)
|
|
| 91 |
+ self.dependencies = []
|
|
| 92 |
+ |
|
| 93 |
+ @property
|
|
| 94 |
+ def junction(self):
|
|
| 95 |
+ return self._loader.project.junction
|
|
| 79 | 96 |
|
| 80 | 97 |
# depends():
|
| 81 | 98 |
#
|
| ... | ... | @@ -101,8 +118,8 @@ class LoadElement(): |
| 101 | 118 |
return
|
| 102 | 119 |
|
| 103 | 120 |
self._dep_cache = {}
|
| 104 |
- for dep in self.deps:
|
|
| 105 |
- elt = self._loader.get_element_for_dep(dep)
|
|
| 121 |
+ for dep in self.dependencies:
|
|
| 122 |
+ elt = dep.element
|
|
| 106 | 123 |
|
| 107 | 124 |
# Ensure the cache of the element we depend on
|
| 108 | 125 |
elt._ensure_depends_cache()
|
| ... | ... | @@ -19,7 +19,6 @@ |
| 19 | 19 |
|
| 20 | 20 |
import os
|
| 21 | 21 |
from functools import cmp_to_key
|
| 22 |
-from collections import namedtuple
|
|
| 23 | 22 |
from collections.abc import Mapping
|
| 24 | 23 |
import tempfile
|
| 25 | 24 |
import shutil
|
| ... | ... | @@ -32,8 +31,8 @@ from .._profile import Topics, profile_start, profile_end |
| 32 | 31 |
from .._includes import Includes
|
| 33 | 32 |
from .._yamlcache import YamlCache
|
| 34 | 33 |
|
| 35 |
-from .types import Symbol, Dependency
|
|
| 36 |
-from .loadelement import LoadElement
|
|
| 34 |
+from .types import Symbol
|
|
| 35 |
+from .loadelement import LoadElement, _extract_depends_from_node
|
|
| 37 | 36 |
from . import MetaElement
|
| 38 | 37 |
from . import MetaSource
|
| 39 | 38 |
from ..types import CoreWarnings
|
| ... | ... | @@ -112,7 +111,7 @@ class Loader(): |
| 112 | 111 |
|
| 113 | 112 |
# First pass, recursively load files and populate our table of LoadElements
|
| 114 | 113 |
#
|
| 115 |
- deps = []
|
|
| 114 |
+ target_elements = []
|
|
| 116 | 115 |
|
| 117 | 116 |
# XXX This will need to be changed to the context's top-level project if this method
|
| 118 | 117 |
# is ever used for subprojects
|
| ... | ... | @@ -122,10 +121,10 @@ class Loader(): |
| 122 | 121 |
with YamlCache.open(self._context, cache_file) as yaml_cache:
|
| 123 | 122 |
for target in targets:
|
| 124 | 123 |
profile_start(Topics.LOAD_PROJECT, target)
|
| 125 |
- junction, name, loader = self._parse_name(target, rewritable, ticker,
|
|
| 126 |
- fetch_subprojects=fetch_subprojects)
|
|
| 127 |
- loader._load_file(name, rewritable, ticker, fetch_subprojects, yaml_cache)
|
|
| 128 |
- deps.append(Dependency(name, junction=junction))
|
|
| 124 |
+ _junction, name, loader = self._parse_name(target, rewritable, ticker,
|
|
| 125 |
+ fetch_subprojects=fetch_subprojects)
|
|
| 126 |
+ element = loader._load_file(name, rewritable, ticker, fetch_subprojects, yaml_cache)
|
|
| 127 |
+ target_elements.append(element)
|
|
| 129 | 128 |
profile_end(Topics.LOAD_PROJECT, target)
|
| 130 | 129 |
|
| 131 | 130 |
#
|
| ... | ... | @@ -134,29 +133,29 @@ class Loader(): |
| 134 | 133 |
|
| 135 | 134 |
# Set up a dummy element that depends on all top-level targets
|
| 136 | 135 |
# to resolve potential circular dependencies between them
|
| 137 |
- DummyTarget = namedtuple('DummyTarget', ['name', 'full_name', 'deps'])
|
|
| 138 |
- |
|
| 139 |
- dummy = DummyTarget(name='', full_name='', deps=deps)
|
|
| 140 |
- self._elements[''] = dummy
|
|
| 136 |
+ dummy_target = LoadElement("", "", self)
|
|
| 137 |
+ dummy_target.dependencies.extend(
|
|
| 138 |
+ LoadElement.Dependency(element, Symbol.RUNTIME)
|
|
| 139 |
+ for element in target_elements
|
|
| 140 |
+ )
|
|
| 141 | 141 |
|
| 142 | 142 |
profile_key = "_".join(t for t in targets)
|
| 143 | 143 |
profile_start(Topics.CIRCULAR_CHECK, profile_key)
|
| 144 |
- self._check_circular_deps('')
|
|
| 144 |
+ self._check_circular_deps(dummy_target)
|
|
| 145 | 145 |
profile_end(Topics.CIRCULAR_CHECK, profile_key)
|
| 146 | 146 |
|
| 147 | 147 |
ret = []
|
| 148 | 148 |
#
|
| 149 | 149 |
# Sort direct dependencies of elements by their dependency ordering
|
| 150 | 150 |
#
|
| 151 |
- for target in targets:
|
|
| 152 |
- profile_start(Topics.SORT_DEPENDENCIES, target)
|
|
| 153 |
- junction, name, loader = self._parse_name(target, rewritable, ticker,
|
|
| 154 |
- fetch_subprojects=fetch_subprojects)
|
|
| 155 |
- loader._sort_dependencies(name)
|
|
| 156 |
- profile_end(Topics.SORT_DEPENDENCIES, target)
|
|
| 151 |
+ for element in target_elements:
|
|
| 152 |
+ loader = element._loader
|
|
| 153 |
+ profile_start(Topics.SORT_DEPENDENCIES, element.name)
|
|
| 154 |
+ loader._sort_dependencies(element)
|
|
| 155 |
+ profile_end(Topics.SORT_DEPENDENCIES, element.name)
|
|
| 157 | 156 |
# Finally, wrap what we have into LoadElements and return the target
|
| 158 | 157 |
#
|
| 159 |
- ret.append(loader._collect_element(name))
|
|
| 158 |
+ ret.append(loader._collect_element(element))
|
|
| 160 | 159 |
|
| 161 | 160 |
return ret
|
| 162 | 161 |
|
| ... | ... | @@ -184,22 +183,6 @@ class Loader(): |
| 184 | 183 |
if os.path.exists(self._tempdir):
|
| 185 | 184 |
shutil.rmtree(self._tempdir)
|
| 186 | 185 |
|
| 187 |
- # get_element_for_dep():
|
|
| 188 |
- #
|
|
| 189 |
- # Gets a cached LoadElement by Dependency object
|
|
| 190 |
- #
|
|
| 191 |
- # This is used by LoadElement
|
|
| 192 |
- #
|
|
| 193 |
- # Args:
|
|
| 194 |
- # dep (Dependency): The dependency to search for
|
|
| 195 |
- #
|
|
| 196 |
- # Returns:
|
|
| 197 |
- # (LoadElement): The cached LoadElement
|
|
| 198 |
- #
|
|
| 199 |
- def get_element_for_dep(self, dep):
|
|
| 200 |
- loader = self._get_loader_for_dep(dep)
|
|
| 201 |
- return loader._elements[dep.name]
|
|
| 202 |
- |
|
| 203 | 186 |
###########################################
|
| 204 | 187 |
# Private Methods #
|
| 205 | 188 |
###########################################
|
| ... | ... | @@ -272,8 +255,10 @@ class Loader(): |
| 272 | 255 |
|
| 273 | 256 |
self._elements[filename] = element
|
| 274 | 257 |
|
| 258 |
+ dependencies = _extract_depends_from_node(node)
|
|
| 259 |
+ |
|
| 275 | 260 |
# Load all dependency files for the new LoadElement
|
| 276 |
- for dep in element.deps:
|
|
| 261 |
+ for dep in dependencies:
|
|
| 277 | 262 |
if dep.junction:
|
| 278 | 263 |
self._load_file(dep.junction, rewritable, ticker, fetch_subprojects, yaml_cache)
|
| 279 | 264 |
loader = self._get_loader(dep.junction, rewritable=rewritable, ticker=ticker,
|
| ... | ... | @@ -288,7 +273,9 @@ class Loader(): |
| 288 | 273 |
"{}: Cannot depend on junction"
|
| 289 | 274 |
.format(dep.provenance))
|
| 290 | 275 |
|
| 291 |
- deps_names = [dep.name for dep in element.deps]
|
|
| 276 |
+ element.dependencies.append(LoadElement.Dependency(dep_element, dep.dep_type))
|
|
| 277 |
+ |
|
| 278 |
+ deps_names = [dep.name for dep in dependencies]
|
|
| 292 | 279 |
self._warn_invalid_elements(deps_names)
|
| 293 | 280 |
|
| 294 | 281 |
return element
|
| ... | ... | @@ -299,12 +286,12 @@ class Loader(): |
| 299 | 286 |
# dependencies already resolved.
|
| 300 | 287 |
#
|
| 301 | 288 |
# Args:
|
| 302 |
- # element_name (str): The element-path relative element name to check
|
|
| 289 |
+ # element (str): The element to check
|
|
| 303 | 290 |
#
|
| 304 | 291 |
# Raises:
|
| 305 | 292 |
# (LoadError): In case there was a circular dependency error
|
| 306 | 293 |
#
|
| 307 |
- def _check_circular_deps(self, element_name, check_elements=None, validated=None, sequence=None):
|
|
| 294 |
+ def _check_circular_deps(self, element, check_elements=None, validated=None, sequence=None):
|
|
| 308 | 295 |
|
| 309 | 296 |
if check_elements is None:
|
| 310 | 297 |
check_elements = {}
|
| ... | ... | @@ -313,38 +300,31 @@ class Loader(): |
| 313 | 300 |
if sequence is None:
|
| 314 | 301 |
sequence = []
|
| 315 | 302 |
|
| 316 |
- element = self._elements[element_name]
|
|
| 317 |
- |
|
| 318 |
- # element name must be unique across projects
|
|
| 319 |
- # to be usable as key for the check_elements and validated dicts
|
|
| 320 |
- element_name = element.full_name
|
|
| 321 |
- |
|
| 322 | 303 |
# Skip already validated branches
|
| 323 |
- if validated.get(element_name) is not None:
|
|
| 304 |
+ if validated.get(element) is not None:
|
|
| 324 | 305 |
return
|
| 325 | 306 |
|
| 326 |
- if check_elements.get(element_name) is not None:
|
|
| 307 |
+ if check_elements.get(element) is not None:
|
|
| 327 | 308 |
# Create `chain`, the loop of element dependencies from this
|
| 328 | 309 |
# element back to itself, by trimming everything before this
|
| 329 | 310 |
# element from the sequence under consideration.
|
| 330 |
- chain = sequence[sequence.index(element_name):]
|
|
| 331 |
- chain.append(element_name)
|
|
| 311 |
+ chain = sequence[sequence.index(element.full_name):]
|
|
| 312 |
+ chain.append(element.full_name)
|
|
| 332 | 313 |
raise LoadError(LoadErrorReason.CIRCULAR_DEPENDENCY,
|
| 333 | 314 |
("Circular dependency detected at element: {}\n" +
|
| 334 | 315 |
"Dependency chain: {}")
|
| 335 |
- .format(element.name, " -> ".join(chain)))
|
|
| 316 |
+ .format(element.full_name, " -> ".join(chain)))
|
|
| 336 | 317 |
|
| 337 | 318 |
# Push / Check each dependency / Pop
|
| 338 |
- check_elements[element_name] = True
|
|
| 339 |
- sequence.append(element_name)
|
|
| 340 |
- for dep in element.deps:
|
|
| 341 |
- loader = self._get_loader_for_dep(dep)
|
|
| 342 |
- loader._check_circular_deps(dep.name, check_elements, validated, sequence)
|
|
| 343 |
- del check_elements[element_name]
|
|
| 319 |
+ check_elements[element] = True
|
|
| 320 |
+ sequence.append(element.full_name)
|
|
| 321 |
+ for dep in element.dependencies:
|
|
| 322 |
+ dep.element._loader._check_circular_deps(dep.element, check_elements, validated, sequence)
|
|
| 323 |
+ del check_elements[element]
|
|
| 344 | 324 |
sequence.pop()
|
| 345 | 325 |
|
| 346 | 326 |
# Eliminate duplicate paths
|
| 347 |
- validated[element_name] = True
|
|
| 327 |
+ validated[element] = True
|
|
| 348 | 328 |
|
| 349 | 329 |
# _sort_dependencies():
|
| 350 | 330 |
#
|
| ... | ... | @@ -357,28 +337,21 @@ class Loader(): |
| 357 | 337 |
# sorts throughout the build process.
|
| 358 | 338 |
#
|
| 359 | 339 |
# Args:
|
| 360 |
- # element_name (str): The element-path relative element name to sort
|
|
| 340 |
+ # element (LoadElement): The element to sort
|
|
| 361 | 341 |
#
|
| 362 |
- def _sort_dependencies(self, element_name, visited=None):
|
|
| 342 |
+ def _sort_dependencies(self, element, visited=None):
|
|
| 363 | 343 |
if visited is None:
|
| 364 |
- visited = {}
|
|
| 344 |
+ visited = set()
|
|
| 365 | 345 |
|
| 366 |
- element = self._elements[element_name]
|
|
| 367 |
- |
|
| 368 |
- # element name must be unique across projects
|
|
| 369 |
- # to be usable as key for the visited dict
|
|
| 370 |
- element_name = element.full_name
|
|
| 371 |
- |
|
| 372 |
- if visited.get(element_name) is not None:
|
|
| 346 |
+ if element in visited:
|
|
| 373 | 347 |
return
|
| 374 | 348 |
|
| 375 |
- for dep in element.deps:
|
|
| 376 |
- loader = self._get_loader_for_dep(dep)
|
|
| 377 |
- loader._sort_dependencies(dep.name, visited=visited)
|
|
| 349 |
+ for dep in element.dependencies:
|
|
| 350 |
+ dep.element._loader._sort_dependencies(dep.element, visited=visited)
|
|
| 378 | 351 |
|
| 379 | 352 |
def dependency_cmp(dep_a, dep_b):
|
| 380 |
- element_a = self.get_element_for_dep(dep_a)
|
|
| 381 |
- element_b = self.get_element_for_dep(dep_b)
|
|
| 353 |
+ element_a = dep_a.element
|
|
| 354 |
+ element_b = dep_b.element
|
|
| 382 | 355 |
|
| 383 | 356 |
# Sort on inter element dependency first
|
| 384 | 357 |
if element_a.depends(element_b):
|
| ... | ... | @@ -395,21 +368,21 @@ class Loader(): |
| 395 | 368 |
return -1
|
| 396 | 369 |
|
| 397 | 370 |
# All things being equal, string comparison.
|
| 398 |
- if dep_a.name > dep_b.name:
|
|
| 371 |
+ if element_a.name > element_b.name:
|
|
| 399 | 372 |
return 1
|
| 400 |
- elif dep_a.name < dep_b.name:
|
|
| 373 |
+ elif element_a.name < element_b.name:
|
|
| 401 | 374 |
return -1
|
| 402 | 375 |
|
| 403 | 376 |
# Sort local elements before junction elements
|
| 404 | 377 |
# and use string comparison between junction elements
|
| 405 |
- if dep_a.junction and dep_b.junction:
|
|
| 406 |
- if dep_a.junction > dep_b.junction:
|
|
| 378 |
+ if element_a.junction and element_b.junction:
|
|
| 379 |
+ if element_a.junction > element_b.junction:
|
|
| 407 | 380 |
return 1
|
| 408 |
- elif dep_a.junction < dep_b.junction:
|
|
| 381 |
+ elif element_a.junction < element_b.junction:
|
|
| 409 | 382 |
return -1
|
| 410 |
- elif dep_a.junction:
|
|
| 383 |
+ elif element_a.junction:
|
|
| 411 | 384 |
return -1
|
| 412 |
- elif dep_b.junction:
|
|
| 385 |
+ elif element_b.junction:
|
|
| 413 | 386 |
return 1
|
| 414 | 387 |
|
| 415 | 388 |
# This wont ever happen
|
| ... | ... | @@ -418,26 +391,23 @@ class Loader(): |
| 418 | 391 |
# Now dependency sort, we ensure that if any direct dependency
|
| 419 | 392 |
# directly or indirectly depends on another direct dependency,
|
| 420 | 393 |
# it is found later in the list.
|
| 421 |
- element.deps.sort(key=cmp_to_key(dependency_cmp))
|
|
| 394 |
+ element.dependencies.sort(key=cmp_to_key(dependency_cmp))
|
|
| 422 | 395 |
|
| 423 |
- visited[element_name] = True
|
|
| 396 |
+ visited.add(element)
|
|
| 424 | 397 |
|
| 425 | 398 |
# _collect_element()
|
| 426 | 399 |
#
|
| 427 | 400 |
# Collect the toplevel elements we have
|
| 428 | 401 |
#
|
| 429 | 402 |
# Args:
|
| 430 |
- # element_name (str): The element-path relative element name to sort
|
|
| 403 |
+ # element (LoadElement): The element for which to load a MetaElement
|
|
| 431 | 404 |
#
|
| 432 | 405 |
# Returns:
|
| 433 | 406 |
# (MetaElement): A recursively loaded MetaElement
|
| 434 | 407 |
#
|
| 435 |
- def _collect_element(self, element_name):
|
|
| 436 |
- |
|
| 437 |
- element = self._elements[element_name]
|
|
| 438 |
- |
|
| 408 |
+ def _collect_element(self, element):
|
|
| 439 | 409 |
# Return the already built one, if we already built it
|
| 440 |
- meta_element = self._meta_elements.get(element_name)
|
|
| 410 |
+ meta_element = self._meta_elements.get(element.name)
|
|
| 441 | 411 |
if meta_element:
|
| 442 | 412 |
return meta_element
|
| 443 | 413 |
|
| ... | ... | @@ -461,10 +431,10 @@ class Loader(): |
| 461 | 431 |
del source[Symbol.DIRECTORY]
|
| 462 | 432 |
|
| 463 | 433 |
index = sources.index(source)
|
| 464 |
- meta_source = MetaSource(element_name, index, element_kind, kind, source, directory)
|
|
| 434 |
+ meta_source = MetaSource(element.name, index, element_kind, kind, source, directory)
|
|
| 465 | 435 |
meta_sources.append(meta_source)
|
| 466 | 436 |
|
| 467 |
- meta_element = MetaElement(self.project, element_name, element_kind,
|
|
| 437 |
+ meta_element = MetaElement(self.project, element.name, element_kind,
|
|
| 468 | 438 |
elt_provenance, meta_sources,
|
| 469 | 439 |
_yaml.node_get(node, Mapping, Symbol.CONFIG, default_value={}),
|
| 470 | 440 |
_yaml.node_get(node, Mapping, Symbol.VARIABLES, default_value={}),
|
| ... | ... | @@ -475,12 +445,12 @@ class Loader(): |
| 475 | 445 |
element_kind == 'junction')
|
| 476 | 446 |
|
| 477 | 447 |
# Cache it now, make sure it's already there before recursing
|
| 478 |
- self._meta_elements[element_name] = meta_element
|
|
| 448 |
+ self._meta_elements[element.name] = meta_element
|
|
| 479 | 449 |
|
| 480 | 450 |
# Descend
|
| 481 |
- for dep in element.deps:
|
|
| 482 |
- loader = self._get_loader_for_dep(dep)
|
|
| 483 |
- meta_dep = loader._collect_element(dep.name)
|
|
| 451 |
+ for dep in element.dependencies:
|
|
| 452 |
+ loader = dep.element._loader
|
|
| 453 |
+ meta_dep = loader._collect_element(dep.element)
|
|
| 484 | 454 |
if dep.dep_type != 'runtime':
|
| 485 | 455 |
meta_element.build_dependencies.append(meta_dep)
|
| 486 | 456 |
if dep.dep_type != 'build':
|
| ... | ... | @@ -539,7 +509,7 @@ class Loader(): |
| 539 | 509 |
return None
|
| 540 | 510 |
|
| 541 | 511 |
# meta junction element
|
| 542 |
- meta_element = self._collect_element(filename)
|
|
| 512 |
+ meta_element = self._collect_element(self._elements[filename])
|
|
| 543 | 513 |
if meta_element.kind != 'junction':
|
| 544 | 514 |
raise LoadError(LoadErrorReason.INVALID_DATA,
|
| 545 | 515 |
"{}: Expected junction but element kind is {}".format(filename, meta_element.kind))
|
| ... | ... | @@ -601,23 +571,6 @@ class Loader(): |
| 601 | 571 |
|
| 602 | 572 |
return loader
|
| 603 | 573 |
|
| 604 |
- # _get_loader_for_dep():
|
|
| 605 |
- #
|
|
| 606 |
- # Gets the appropriate Loader for a Dependency object
|
|
| 607 |
- #
|
|
| 608 |
- # Args:
|
|
| 609 |
- # dep (Dependency): A Dependency object
|
|
| 610 |
- #
|
|
| 611 |
- # Returns:
|
|
| 612 |
- # (Loader): The Loader object to use for this Dependency
|
|
| 613 |
- #
|
|
| 614 |
- def _get_loader_for_dep(self, dep):
|
|
| 615 |
- if dep.junction:
|
|
| 616 |
- # junction dependency, delegate to appropriate loader
|
|
| 617 |
- return self._loaders[dep.junction]
|
|
| 618 |
- else:
|
|
| 619 |
- return self
|
|
| 620 |
- |
|
| 621 | 574 |
# _parse_name():
|
| 622 | 575 |
#
|
| 623 | 576 |
# Get junction and base name of element along with loader for the sub-project
|
| ... | ... | @@ -197,26 +197,36 @@ class Stream(): |
| 197 | 197 |
# ignore_junction_targets (bool): Whether junction targets should be filtered out
|
| 198 | 198 |
# build_all (bool): Whether to build all elements, or only those
|
| 199 | 199 |
# which are required to build the target.
|
| 200 |
+ # remote (str): The URL of a specific remote server to push to, or None
|
|
| 201 |
+ #
|
|
| 202 |
+ # If `remote` specified as None, then regular configuration will be used
|
|
| 203 |
+ # to determine where to push artifacts to.
|
|
| 200 | 204 |
#
|
| 201 | 205 |
def build(self, targets, *,
|
| 202 | 206 |
track_targets=None,
|
| 203 | 207 |
track_except=None,
|
| 204 | 208 |
track_cross_junctions=False,
|
| 205 | 209 |
ignore_junction_targets=False,
|
| 206 |
- build_all=False):
|
|
| 210 |
+ build_all=False,
|
|
| 211 |
+ remote=None):
|
|
| 207 | 212 |
|
| 208 | 213 |
if build_all:
|
| 209 | 214 |
selection = PipelineSelection.ALL
|
| 210 | 215 |
else:
|
| 211 | 216 |
selection = PipelineSelection.PLAN
|
| 212 | 217 |
|
| 218 |
+ use_config = True
|
|
| 219 |
+ if remote:
|
|
| 220 |
+ use_config = False
|
|
| 221 |
+ |
|
| 213 | 222 |
elements, track_elements = \
|
| 214 | 223 |
self._load(targets, track_targets,
|
| 215 | 224 |
selection=selection, track_selection=PipelineSelection.ALL,
|
| 216 | 225 |
track_except_targets=track_except,
|
| 217 | 226 |
track_cross_junctions=track_cross_junctions,
|
| 218 | 227 |
ignore_junction_targets=ignore_junction_targets,
|
| 219 |
- use_artifact_config=True,
|
|
| 228 |
+ use_artifact_config=use_config,
|
|
| 229 |
+ artifact_remote_url=remote,
|
|
| 220 | 230 |
fetch_subprojects=True,
|
| 221 | 231 |
dynamic_plan=True)
|
| 222 | 232 |
|
| ... | ... | @@ -20,25 +20,127 @@ |
| 20 | 20 |
"""
|
| 21 | 21 |
filter - Extract a subset of files from another element
|
| 22 | 22 |
=======================================================
|
| 23 |
-This filters another element by producing an output that is a subset of
|
|
| 24 |
-the filtered element.
|
|
| 23 |
+Filter another element by producing an output that is a subset of
|
|
| 24 |
+the parent element's output. Subsets are defined by the parent element's
|
|
| 25 |
+:ref:`split rules <public_split_rules>`.
|
|
| 25 | 26 |
|
| 26 |
-To specify the element to filter, specify it as the one and only build
|
|
| 27 |
-dependency to filter. See :ref:`Dependencies <format_dependencies>`
|
|
| 28 |
-for what dependencies are and how to specify them.
|
|
| 27 |
+Overview
|
|
| 28 |
+--------
|
|
| 29 |
+A filter element must have exactly one *build* dependency, where said
|
|
| 30 |
+dependency is the 'parent' element which we would like to filter.
|
|
| 31 |
+Runtime dependencies may also be specified, which can be useful to propagate
|
|
| 32 |
+forward from this filter element onto its reverse dependencies.
|
|
| 33 |
+See :ref:`Dependencies <format_dependencies>` to see how we specify dependencies.
|
|
| 29 | 34 |
|
| 30 |
-Dependencies aside from the filtered element may be specified, but
|
|
| 31 |
-they must be runtime dependencies only. This can be useful to propagate
|
|
| 32 |
-runtime dependencies forward from this filter element onto its reverse
|
|
| 33 |
-dependencies.
|
|
| 35 |
+When workspaces are opened, closed or reset on a filter element, or this
|
|
| 36 |
+element is tracked, the filter element will transparently pass on the command
|
|
| 37 |
+to its parent element (the sole build-dependency).
|
|
| 34 | 38 |
|
| 35 |
-When workspaces are opened, closed or reset on this element, or this
|
|
| 36 |
-element is tracked, instead of erroring due to a lack of sources, this
|
|
| 37 |
-element will transparently pass on the command to its sole build-dependency.
|
|
| 39 |
+Example
|
|
| 40 |
+-------
|
|
| 41 |
+Consider a simple import element, ``import.bst`` which imports the local files
|
|
| 42 |
+'foo', 'bar' and 'baz' (each stored in ``files/``, relative to the project's root):
|
|
| 38 | 43 |
|
| 39 |
-The default configuration and possible options are as such:
|
|
| 40 |
- .. literalinclude:: ../../../buildstream/plugins/elements/filter.yaml
|
|
| 41 |
- :language: yaml
|
|
| 44 |
+.. code:: yaml
|
|
| 45 |
+ |
|
| 46 |
+ kind: import
|
|
| 47 |
+ |
|
| 48 |
+ # Specify sources to import
|
|
| 49 |
+ sources:
|
|
| 50 |
+ - kind: local
|
|
| 51 |
+ path: files
|
|
| 52 |
+ |
|
| 53 |
+ # Specify public domain data, visible to other elements
|
|
| 54 |
+ public:
|
|
| 55 |
+ bst:
|
|
| 56 |
+ split-rules:
|
|
| 57 |
+ foo:
|
|
| 58 |
+ - /foo
|
|
| 59 |
+ bar:
|
|
| 60 |
+ - /bar
|
|
| 61 |
+ |
|
| 62 |
+.. note::
|
|
| 63 |
+ |
|
| 64 |
+ We can make an element's metadata visible to all reverse dependencies by making use
|
|
| 65 |
+ of the ``public:`` field. See the :ref:`public data documentation <format_public>`
|
|
| 66 |
+ for more information.
|
|
| 67 |
+ |
|
| 68 |
+In this example, ``import.bst`` will serve as the 'parent' of the filter element, thus
|
|
| 69 |
+its output will be filtered. It is important to understand that the artifact of the
|
|
| 70 |
+above element will contain the files: 'foo', 'bar' and 'baz'.
|
|
| 71 |
+ |
|
| 72 |
+Now, to produce an element whose artifact contains the file 'foo', and exlusively 'foo',
|
|
| 73 |
+we can define the following filter, ``filter-foo.bst``:
|
|
| 74 |
+ |
|
| 75 |
+.. code:: yaml
|
|
| 76 |
+ |
|
| 77 |
+ kind: filter
|
|
| 78 |
+ |
|
| 79 |
+ # Declare the sole build-dependency of the filter element
|
|
| 80 |
+ depends:
|
|
| 81 |
+ - filename: import.bst
|
|
| 82 |
+ type: build
|
|
| 83 |
+ |
|
| 84 |
+ # Declare a list of domains to include in the filter's artifact
|
|
| 85 |
+ config:
|
|
| 86 |
+ include:
|
|
| 87 |
+ - foo
|
|
| 88 |
+ |
|
| 89 |
+.. note::
|
|
| 90 |
+ |
|
| 91 |
+ We can also specify build-dependencies with a 'build-depends' field which has been
|
|
| 92 |
+ available since :ref:`format version 14 <project_format_version>`. See the
|
|
| 93 |
+ :ref:`Build-Depends documentation <format_build_depends>` for more detail.
|
|
| 94 |
+ |
|
| 95 |
+It should be noted that an 'empty' ``include:`` list would, by default, include all
|
|
| 96 |
+split-rules specified in the parent element, which, in this example, would be the
|
|
| 97 |
+files 'foo' and 'bar' (the file 'baz' was not covered by any split rules).
|
|
| 98 |
+ |
|
| 99 |
+Equally, we can use the ``exclude:`` statement to create the same artifact (which
|
|
| 100 |
+only contains the file 'foo') by declaring the following element, ``exclude-bar.bst``:
|
|
| 101 |
+ |
|
| 102 |
+.. code:: yaml
|
|
| 103 |
+ |
|
| 104 |
+ kind: filter
|
|
| 105 |
+ |
|
| 106 |
+ # Declare the sole build-dependency of the filter element
|
|
| 107 |
+ depends:
|
|
| 108 |
+ - filename: import.bst
|
|
| 109 |
+ type: build
|
|
| 110 |
+ |
|
| 111 |
+ # Declare a list of domains to exclude in the filter's artifact
|
|
| 112 |
+ config:
|
|
| 113 |
+ exclude:
|
|
| 114 |
+ - bar
|
|
| 115 |
+ |
|
| 116 |
+In addition to the ``include:`` and ``exclude:`` fields, there exists an ``include-orphans:``
|
|
| 117 |
+(Boolean) field, which defaults to ``False``. This will determine whether to include files
|
|
| 118 |
+which are not present in the 'split-rules'. For example, if we wanted to filter out all files
|
|
| 119 |
+which are not included as split rules we can define the following element, ``filter-misc.bst``:
|
|
| 120 |
+ |
|
| 121 |
+.. code:: yaml
|
|
| 122 |
+ |
|
| 123 |
+ kind: filter
|
|
| 124 |
+ |
|
| 125 |
+ # Declare the sole build-dependency of the filter element
|
|
| 126 |
+ depends:
|
|
| 127 |
+ - filename: import.bst
|
|
| 128 |
+ type: build
|
|
| 129 |
+ |
|
| 130 |
+ # Filter out all files which are not declared as split rules
|
|
| 131 |
+ config:
|
|
| 132 |
+ exclude:
|
|
| 133 |
+ - foo
|
|
| 134 |
+ - bar
|
|
| 135 |
+ include-orphans: True
|
|
| 136 |
+ |
|
| 137 |
+The artifact of ``filter-misc.bst`` will only contain the file 'baz'.
|
|
| 138 |
+ |
|
| 139 |
+Below is more information regarding the the default configurations and possible options
|
|
| 140 |
+of the filter element:
|
|
| 141 |
+ |
|
| 142 |
+.. literalinclude:: ../../../buildstream/plugins/elements/filter.yaml
|
|
| 143 |
+ :language: yaml
|
|
| 42 | 144 |
"""
|
| 43 | 145 |
|
| 44 | 146 |
from buildstream import Element, ElementError, Scope
|
| ... | ... | @@ -66,6 +168,8 @@ class FilterElement(Element): |
| 66 | 168 |
self.include = self.node_get_member(node, list, 'include')
|
| 67 | 169 |
self.exclude = self.node_get_member(node, list, 'exclude')
|
| 68 | 170 |
self.include_orphans = self.node_get_member(node, bool, 'include-orphans')
|
| 171 |
+ self.include_provenance = self.node_provenance(node, member_name='include')
|
|
| 172 |
+ self.exclude_provenance = self.node_provenance(node, member_name='exclude')
|
|
| 69 | 173 |
|
| 70 | 174 |
def preflight(self):
|
| 71 | 175 |
# Exactly one build-depend is permitted
|
| ... | ... | @@ -105,6 +209,31 @@ class FilterElement(Element): |
| 105 | 209 |
def assemble(self, sandbox):
|
| 106 | 210 |
with self.timed_activity("Staging artifact", silent_nested=True):
|
| 107 | 211 |
for dep in self.dependencies(Scope.BUILD, recurse=False):
|
| 212 |
+ # Check that all the included/excluded domains exist
|
|
| 213 |
+ pub_data = dep.get_public_data('bst')
|
|
| 214 |
+ split_rules = pub_data.get('split-rules', {})
|
|
| 215 |
+ unfound_includes = []
|
|
| 216 |
+ for domain in self.include:
|
|
| 217 |
+ if domain not in split_rules:
|
|
| 218 |
+ unfound_includes.append(domain)
|
|
| 219 |
+ unfound_excludes = []
|
|
| 220 |
+ for domain in self.exclude:
|
|
| 221 |
+ if domain not in split_rules:
|
|
| 222 |
+ unfound_excludes.append(domain)
|
|
| 223 |
+ |
|
| 224 |
+ detail = []
|
|
| 225 |
+ if unfound_includes:
|
|
| 226 |
+ detail.append("Unknown domains were used in {}".format(self.include_provenance))
|
|
| 227 |
+ detail.extend([' - {}'.format(domain) for domain in unfound_includes])
|
|
| 228 |
+ |
|
| 229 |
+ if unfound_excludes:
|
|
| 230 |
+ detail.append("Unknown domains were used in {}".format(self.exclude_provenance))
|
|
| 231 |
+ detail.extend([' - {}'.format(domain) for domain in unfound_excludes])
|
|
| 232 |
+ |
|
| 233 |
+ if detail:
|
|
| 234 |
+ detail = '\n'.join(detail)
|
|
| 235 |
+ raise ElementError("Unknown domains declared.", detail=detail)
|
|
| 236 |
+ |
|
| 108 | 237 |
dep.stage_artifact(sandbox, include=self.include,
|
| 109 | 238 |
exclude=self.exclude, orphans=self.include_orphans)
|
| 110 | 239 |
return ""
|
| ... | ... | @@ -2,20 +2,20 @@ |
| 2 | 2 |
# Filter element configuration
|
| 3 | 3 |
config:
|
| 4 | 4 |
|
| 5 |
- # A list of domains to include from each artifact, as
|
|
| 6 |
- # they were defined in the element's 'split-rules'.
|
|
| 5 |
+ # A list of domains to include in each artifact, as
|
|
| 6 |
+ # they were defined as public data in the parent
|
|
| 7 |
+ # element's 'split-rules'.
|
|
| 7 | 8 |
#
|
| 8 |
- # Since domains can be added, it is not an error to
|
|
| 9 |
- # specify domains which may not exist for all of the
|
|
| 10 |
- # elements in this composition.
|
|
| 9 |
+ # If a domain is specified that does not exist, the
|
|
| 10 |
+ # filter element will fail to build.
|
|
| 11 | 11 |
#
|
| 12 | 12 |
# The default empty list indicates that all domains
|
| 13 |
- # from each dependency should be included.
|
|
| 13 |
+ # of the parent's artifact should be included.
|
|
| 14 | 14 |
#
|
| 15 | 15 |
include: []
|
| 16 | 16 |
|
| 17 | 17 |
# A list of domains to exclude from each artifact, as
|
| 18 |
- # they were defined in the element's 'split-rules'.
|
|
| 18 |
+ # they were defined in the parent element's 'split-rules'.
|
|
| 19 | 19 |
#
|
| 20 | 20 |
# In the case that a file is spoken for by a domain
|
| 21 | 21 |
# in the 'include' list and another in the 'exclude'
|
| ... | ... | @@ -23,7 +23,7 @@ config: |
| 23 | 23 |
exclude: []
|
| 24 | 24 |
|
| 25 | 25 |
# Whether to include orphan files which are not
|
| 26 |
- # included by any of the 'split-rules' present on
|
|
| 27 |
- # a given element.
|
|
| 26 |
+ # included by any of the 'split-rules' present in
|
|
| 27 |
+ # the parent element.
|
|
| 28 | 28 |
#
|
| 29 | 29 |
include-orphans: False
|
| ... | ... | @@ -214,3 +214,41 @@ def test_cache_key_fatal_warnings(cli, tmpdir, first_warnings, second_warnings, |
| 214 | 214 |
second_keys = run_get_cache_key("second", second_warnings)
|
| 215 | 215 |
|
| 216 | 216 |
assert compare_cache_keys(first_keys, second_keys) == identical_keys
|
| 217 |
+ |
|
| 218 |
+ |
|
| 219 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 220 |
+def test_keys_stable_over_targets(cli, datafiles):
|
|
| 221 |
+ root_element = 'elements/key-stability/top-level.bst'
|
|
| 222 |
+ target1 = 'elements/key-stability/t1.bst'
|
|
| 223 |
+ target2 = 'elements/key-stability/t2.bst'
|
|
| 224 |
+ |
|
| 225 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 226 |
+ full_graph_result = cli.run(project=project, args=[
|
|
| 227 |
+ 'show',
|
|
| 228 |
+ '--format', '%{name}::%{full-key}',
|
|
| 229 |
+ root_element
|
|
| 230 |
+ ])
|
|
| 231 |
+ full_graph_result.assert_success()
|
|
| 232 |
+ all_cache_keys = parse_output_keys(full_graph_result.output)
|
|
| 233 |
+ |
|
| 234 |
+ ordering1_result = cli.run(project=project, args=[
|
|
| 235 |
+ 'show',
|
|
| 236 |
+ '--format', '%{name}::%{full-key}',
|
|
| 237 |
+ target1,
|
|
| 238 |
+ target2
|
|
| 239 |
+ ])
|
|
| 240 |
+ ordering1_result.assert_success()
|
|
| 241 |
+ ordering1_cache_keys = parse_output_keys(ordering1_result.output)
|
|
| 242 |
+ |
|
| 243 |
+ ordering2_result = cli.run(project=project, args=[
|
|
| 244 |
+ 'show',
|
|
| 245 |
+ '--format', '%{name}::%{full-key}',
|
|
| 246 |
+ target2,
|
|
| 247 |
+ target1
|
|
| 248 |
+ ])
|
|
| 249 |
+ ordering2_result.assert_success()
|
|
| 250 |
+ ordering2_cache_keys = parse_output_keys(ordering2_result.output)
|
|
| 251 |
+ |
|
| 252 |
+ for element in ordering1_cache_keys:
|
|
| 253 |
+ assert ordering1_cache_keys[element] == ordering2_cache_keys[element]
|
|
| 254 |
+ assert ordering1_cache_keys[element] == all_cache_keys[element]
|
| 1 |
+kind: import
|
|
| 2 |
+sources:
|
|
| 3 |
+- kind: local
|
|
| 4 |
+ path: elements/key-stability/aaa.bst
|
| 1 |
+kind: import
|
|
| 2 |
+sources:
|
|
| 3 |
+- kind: local
|
|
| 4 |
+ path: elements/key-stability/t1.bst
|
|
| 5 |
+depends:
|
|
| 6 |
+- elements/key-stability/zzz.bst
|
| 1 |
+kind: import
|
|
| 2 |
+sources:
|
|
| 3 |
+- kind: local
|
|
| 4 |
+ path: elements/key-stability/t2.bst
|
|
| 5 |
+depends:
|
|
| 6 |
+- elements/key-stability/aaa.bst
|
|
| 7 |
+- elements/key-stability/zzz.bst
|
| 1 |
+kind: import
|
|
| 2 |
+sources:
|
|
| 3 |
+- kind: local
|
|
| 4 |
+ path: elements/key-stability/top-level.bst
|
|
| 5 |
+depends:
|
|
| 6 |
+- elements/key-stability/t1.bst
|
|
| 7 |
+- elements/key-stability/t2.bst
|
| 1 |
+kind: import
|
|
| 2 |
+sources:
|
|
| 3 |
+- kind: local
|
|
| 4 |
+ path: elements/key-stability/zzz.bst
|
| ... | ... | @@ -484,3 +484,14 @@ def test_filter_include_with_indirect_deps(datafiles, cli, tmpdir): |
| 484 | 484 |
# indirect dependencies shouldn't be staged and filtered
|
| 485 | 485 |
assert not os.path.exists(os.path.join(checkout, "foo"))
|
| 486 | 486 |
assert not os.path.exists(os.path.join(checkout, "bar"))
|
| 487 |
+ |
|
| 488 |
+ |
|
| 489 |
+@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
|
|
| 490 |
+def test_filter_fails_for_nonexisting_domain(datafiles, cli, tmpdir):
|
|
| 491 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 492 |
+ result = cli.run(project=project, args=['build', 'output-include-nonexistent-domain.bst'])
|
|
| 493 |
+ result.assert_main_error(ErrorDomain.STREAM, None)
|
|
| 494 |
+ |
|
| 495 |
+ error = "Unknown domains were used in output-include-nonexistent-domain.bst [line 7 column 2]"
|
|
| 496 |
+ assert error in result.stderr
|
|
| 497 |
+ assert '- unknown_file' in result.stderr
|
| 1 | 1 |
kind: filter
|
| 2 | 2 |
depends:
|
| 3 |
-- filename: output-include.bst
|
|
| 3 |
+- filename: input.bst
|
|
| 4 | 4 |
type: build
|
| 5 | 5 |
- filename: output-exclude.bst
|
| 6 | 6 |
type: runtime
|
| 1 |
+kind: filter
|
|
| 2 |
+depends:
|
|
| 3 |
+- filename: input.bst
|
|
| 4 |
+ type: build
|
|
| 5 |
+config:
|
|
| 6 |
+ include:
|
|
| 7 |
+ - unknown_file
|
|
| 8 |
+ |
| ... | ... | @@ -141,7 +141,8 @@ def test_commands(cli, cmd, word_idx, expected): |
| 141 | 141 |
('bst --no-colors build -', 3, ['--all ', '--track ', '--track-all ',
|
| 142 | 142 |
'--track-except ',
|
| 143 | 143 |
'--track-cross-junctions ', '-J ',
|
| 144 |
- '--track-save ']),
|
|
| 144 |
+ '--track-save ',
|
|
| 145 |
+ '--remote ', '-r ']),
|
|
| 145 | 146 |
|
| 146 | 147 |
# Test the behavior of completing after an option that has a
|
| 147 | 148 |
# parameter that cannot be completed, vs an option that has
|
| ... | ... | @@ -408,3 +408,56 @@ def test_pull_missing_notifies_user(caplog, cli, tmpdir, datafiles): |
| 408 | 408 |
|
| 409 | 409 |
assert "INFO Remote ({}) does not have".format(share.repo) in result.stderr
|
| 410 | 410 |
assert "SKIPPED Pull" in result.stderr
|
| 411 |
+ |
|
| 412 |
+ |
|
| 413 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 414 |
+def test_build_remote_option(caplog, cli, tmpdir, datafiles):
|
|
| 415 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 416 |
+ caplog.set_level(1)
|
|
| 417 |
+ |
|
| 418 |
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare1')) as shareuser,\
|
|
| 419 |
+ create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2')) as shareproject,\
|
|
| 420 |
+ create_artifact_share(os.path.join(str(tmpdir), 'artifactshare3')) as sharecli:
|
|
| 421 |
+ |
|
| 422 |
+ # Add shareproject repo url to project.conf
|
|
| 423 |
+ with open(os.path.join(project, "project.conf"), "a") as projconf:
|
|
| 424 |
+ projconf.write("artifacts:\n url: {}\n push: True".format(shareproject.repo))
|
|
| 425 |
+ |
|
| 426 |
+ # Configure shareuser remote in user conf
|
|
| 427 |
+ cli.configure({
|
|
| 428 |
+ 'artifacts': {'url': shareuser.repo, 'push': True}
|
|
| 429 |
+ })
|
|
| 430 |
+ |
|
| 431 |
+ # Push the artifacts to the shareuser and shareproject remotes.
|
|
| 432 |
+ # Assert that shareuser and shareproject have the artfifacts cached,
|
|
| 433 |
+ # but sharecli doesn't, then delete locally cached elements
|
|
| 434 |
+ result = cli.run(project=project, args=['build', 'target.bst'])
|
|
| 435 |
+ result.assert_success()
|
|
| 436 |
+ all_elements = ['target.bst', 'import-bin.bst', 'compose-all.bst']
|
|
| 437 |
+ for element_name in all_elements:
|
|
| 438 |
+ assert element_name in result.get_pushed_elements()
|
|
| 439 |
+ assert_not_shared(cli, sharecli, project, element_name)
|
|
| 440 |
+ assert_shared(cli, shareuser, project, element_name)
|
|
| 441 |
+ assert_shared(cli, shareproject, project, element_name)
|
|
| 442 |
+ cli.remove_artifact_from_cache(project, element_name)
|
|
| 443 |
+ |
|
| 444 |
+ # Now check that a build with cli set as sharecli results in nothing being pulled,
|
|
| 445 |
+ # as it doesn't have them cached and shareuser/shareproject should be ignored. This
|
|
| 446 |
+ # will however result in the artifacts being built and pushed to it
|
|
| 447 |
+ result = cli.run(project=project, args=['build', '--remote', sharecli.repo, 'target.bst'])
|
|
| 448 |
+ result.assert_success()
|
|
| 449 |
+ for element_name in all_elements:
|
|
| 450 |
+ assert element_name not in result.get_pulled_elements()
|
|
| 451 |
+ assert_shared(cli, sharecli, project, element_name)
|
|
| 452 |
+ cli.remove_artifact_from_cache(project, element_name)
|
|
| 453 |
+ |
|
| 454 |
+ # Now check that a clean build with cli set as sharecli should result in artifacts only
|
|
| 455 |
+ # being pulled from it, as that was provided via the cli and is populated
|
|
| 456 |
+ result = cli.run(project=project, args=['build', '--remote', sharecli.repo, 'target.bst'])
|
|
| 457 |
+ result.assert_success()
|
|
| 458 |
+ for element_name in all_elements:
|
|
| 459 |
+ assert cli.get_element_state(project, element_name) == 'cached'
|
|
| 460 |
+ assert element_name in result.get_pulled_elements()
|
|
| 461 |
+ assert shareproject.repo not in result.stderr
|
|
| 462 |
+ assert shareuser.repo not in result.stderr
|
|
| 463 |
+ assert sharecli.repo in result.stderr
|
| ... | ... | @@ -416,3 +416,33 @@ def test_push_already_cached(caplog, cli, tmpdir, datafiles): |
| 416 | 416 |
assert not result.get_pushed_elements(), "No elements should have been pushed since the cache was populated"
|
| 417 | 417 |
assert "INFO Remote ({}) already has ".format(share.repo) in result.stderr
|
| 418 | 418 |
assert "SKIPPED Push" in result.stderr
|
| 419 |
+ |
|
| 420 |
+ |
|
| 421 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 422 |
+def test_build_remote_option(caplog, cli, tmpdir, datafiles):
|
|
| 423 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 424 |
+ caplog.set_level(1)
|
|
| 425 |
+ |
|
| 426 |
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare1')) as shareuser,\
|
|
| 427 |
+ create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2')) as shareproject,\
|
|
| 428 |
+ create_artifact_share(os.path.join(str(tmpdir), 'artifactshare3')) as sharecli:
|
|
| 429 |
+ |
|
| 430 |
+ # Add shareproject repo url to project.conf
|
|
| 431 |
+ with open(os.path.join(project, "project.conf"), "a") as projconf:
|
|
| 432 |
+ projconf.write("artifacts:\n url: {}\n push: True".format(shareproject.repo))
|
|
| 433 |
+ |
|
| 434 |
+ # Configure shareuser remote in user conf
|
|
| 435 |
+ cli.configure({
|
|
| 436 |
+ 'artifacts': {'url': shareuser.repo, 'push': True}
|
|
| 437 |
+ })
|
|
| 438 |
+ |
|
| 439 |
+ result = cli.run(project=project, args=['build', '--remote', sharecli.repo, 'target.bst'])
|
|
| 440 |
+ |
|
| 441 |
+ # Artifacts should have only been pushed to sharecli, as that was provided via the cli
|
|
| 442 |
+ result.assert_success()
|
|
| 443 |
+ all_elements = ['target.bst', 'import-bin.bst', 'compose-all.bst']
|
|
| 444 |
+ for element_name in all_elements:
|
|
| 445 |
+ assert element_name in result.get_pushed_elements()
|
|
| 446 |
+ assert_shared(cli, sharecli, project, element_name)
|
|
| 447 |
+ assert_not_shared(cli, shareuser, project, element_name)
|
|
| 448 |
+ assert_not_shared(cli, shareproject, project, element_name)
|
| ... | ... | @@ -18,7 +18,7 @@ try: |
| 18 | 18 |
utils.get_host_tool('git')
|
| 19 | 19 |
HAVE_GIT = True
|
| 20 | 20 |
out = str(subprocess.check_output(['git', '--version']), "utf-8")
|
| 21 |
- version = tuple(int(x) for x in out.split(' ', 2)[2].split('.'))
|
|
| 21 |
+ version = tuple(int(x) for x in out.split(' ')[2].split('.'))
|
|
| 22 | 22 |
HAVE_OLD_GIT = version < (1, 8, 5)
|
| 23 | 23 |
except ProgramNotFoundError:
|
| 24 | 24 |
HAVE_GIT = False
|
| ... | ... | @@ -88,5 +88,5 @@ whitelist_externals = |
| 88 | 88 |
commands =
|
| 89 | 89 |
python3 setup.py --command-packages=click_man.commands man_pages
|
| 90 | 90 |
deps =
|
| 91 |
- click-man
|
|
| 91 |
+ click-man >= 0.3.0
|
|
| 92 | 92 |
-rrequirements/requirements.txt
|
