Valentin David pushed to branch valentindavid/sysroot_dependencies at BuildStream / buildstream
Commits:
-
996e4089
by Valentin David at 2018-10-24T15:56:34Z
29 changed files:
- buildstream/_loader/loadelement.py
- buildstream/_loader/loader.py
- buildstream/buildelement.py
- buildstream/element.py
- buildstream/plugins/elements/compose.py
- doc/source/format_declaring.rst
- tests/loader/dependencies.py
- + tests/sysroot_depends/project/elements/a.bst
- + tests/sysroot_depends/project/elements/b.bst
- + tests/sysroot_depends/project/elements/base.bst
- + tests/sysroot_depends/project/elements/base/base-alpine.bst
- + tests/sysroot_depends/project/elements/compose-integration.bst
- + tests/sysroot_depends/project/elements/compose-layers-with-sysroot.bst
- + tests/sysroot_depends/project/elements/compose-layers.bst
- + tests/sysroot_depends/project/elements/integration.bst
- + tests/sysroot_depends/project/elements/layer1-files.bst
- + tests/sysroot_depends/project/elements/layer1.bst
- + tests/sysroot_depends/project/elements/layer2-files.bst
- + tests/sysroot_depends/project/elements/layer2.bst
- + tests/sysroot_depends/project/elements/manual-integration-runtime.bst
- + tests/sysroot_depends/project/elements/manual-integration.bst
- + tests/sysroot_depends/project/elements/target-variable.bst
- + tests/sysroot_depends/project/elements/target.bst
- + tests/sysroot_depends/project/files/a/a.txt
- + tests/sysroot_depends/project/files/b/b.txt
- + tests/sysroot_depends/project/files/layer1/1
- + tests/sysroot_depends/project/files/layer2/2
- + tests/sysroot_depends/project/project.conf
- + tests/sysroot_depends/sysroot_depends.py
Changes:
| ... | ... | @@ -72,10 +72,24 @@ class LoadElement(): |
| 72 | 72 |
'variables', 'environment', 'environment-nocache',
|
| 73 | 73 |
'config', 'public', 'description',
|
| 74 | 74 |
'build-depends', 'runtime-depends',
|
| 75 |
+ 'sysroots',
|
|
| 75 | 76 |
])
|
| 76 | 77 |
|
| 78 |
+ self.deps = []
|
|
| 79 |
+ sysroots = _yaml.node_get(node, list, 'sysroots', default_value=[])
|
|
| 80 |
+ for sysroot in sysroots:
|
|
| 81 |
+ _yaml.node_validate(sysroot, ['path', 'depends', 'build-depends'])
|
|
| 82 |
+ path = _yaml.node_get(sysroot, str, 'path')
|
|
| 83 |
+ for dep in _extract_depends_from_node(sysroot):
|
|
| 84 |
+ if dep.dep_type == Symbol.RUNTIME:
|
|
| 85 |
+ raise LoadError(LoadErrorReason.INVALID_DATA,
|
|
| 86 |
+ "{}: Sysroot'ed dependencies can not be of type 'runtime'"
|
|
| 87 |
+ .format(dep.provenance, dep.dep_type))
|
|
| 88 |
+ self.deps.append((path, dep))
|
|
| 89 |
+ |
|
| 77 | 90 |
# Extract the Dependencies
|
| 78 |
- self.deps = _extract_depends_from_node(self.node)
|
|
| 91 |
+ for dep in _extract_depends_from_node(self.node):
|
|
| 92 |
+ self.deps.append(('/', dep))
|
|
| 79 | 93 |
|
| 80 | 94 |
# depends():
|
| 81 | 95 |
#
|
| ... | ... | @@ -101,7 +115,7 @@ class LoadElement(): |
| 101 | 115 |
return
|
| 102 | 116 |
|
| 103 | 117 |
self._dep_cache = {}
|
| 104 |
- for dep in self.deps:
|
|
| 118 |
+ for _, dep in self.deps:
|
|
| 105 | 119 |
elt = self._loader.get_element_for_dep(dep)
|
| 106 | 120 |
|
| 107 | 121 |
# Ensure the cache of the element we depend on
|
| ... | ... | @@ -121,7 +121,7 @@ class Loader(): |
| 121 | 121 |
junction, name, loader = self._parse_name(target, rewritable, ticker,
|
| 122 | 122 |
fetch_subprojects=fetch_subprojects)
|
| 123 | 123 |
loader._load_file(name, rewritable, ticker, fetch_subprojects, yaml_cache)
|
| 124 |
- deps.append(Dependency(name, junction=junction))
|
|
| 124 |
+ deps.append(('/', Dependency(name, junction=junction)))
|
|
| 125 | 125 |
profile_end(Topics.LOAD_PROJECT, target)
|
| 126 | 126 |
|
| 127 | 127 |
#
|
| ... | ... | @@ -269,7 +269,7 @@ class Loader(): |
| 269 | 269 |
self._elements[filename] = element
|
| 270 | 270 |
|
| 271 | 271 |
# Load all dependency files for the new LoadElement
|
| 272 |
- for dep in element.deps:
|
|
| 272 |
+ for _, dep in element.deps:
|
|
| 273 | 273 |
if dep.junction:
|
| 274 | 274 |
self._load_file(dep.junction, rewritable, ticker, fetch_subprojects, yaml_cache)
|
| 275 | 275 |
loader = self._get_loader(dep.junction, rewritable=rewritable, ticker=ticker,
|
| ... | ... | @@ -286,6 +286,7 @@ class Loader(): |
| 286 | 286 |
|
| 287 | 287 |
return element
|
| 288 | 288 |
|
| 289 |
+ |
|
| 289 | 290 |
# _check_circular_deps():
|
| 290 | 291 |
#
|
| 291 | 292 |
# Detect circular dependencies on LoadElements with
|
| ... | ... | @@ -330,7 +331,7 @@ class Loader(): |
| 330 | 331 |
# Push / Check each dependency / Pop
|
| 331 | 332 |
check_elements[element_name] = True
|
| 332 | 333 |
sequence.append(element_name)
|
| 333 |
- for dep in element.deps:
|
|
| 334 |
+ for _, dep in element.deps:
|
|
| 334 | 335 |
loader = self._get_loader_for_dep(dep)
|
| 335 | 336 |
loader._check_circular_deps(dep.name, check_elements, validated, sequence)
|
| 336 | 337 |
del check_elements[element_name]
|
| ... | ... | @@ -365,14 +366,21 @@ class Loader(): |
| 365 | 366 |
if visited.get(element_name) is not None:
|
| 366 | 367 |
return
|
| 367 | 368 |
|
| 368 |
- for dep in element.deps:
|
|
| 369 |
+ for _, dep in element.deps:
|
|
| 369 | 370 |
loader = self._get_loader_for_dep(dep)
|
| 370 | 371 |
loader._sort_dependencies(dep.name, visited=visited)
|
| 371 | 372 |
|
| 372 |
- def dependency_cmp(dep_a, dep_b):
|
|
| 373 |
+ def dependency_cmp(sdep_a, sdep_b):
|
|
| 374 |
+ sysroot_a, dep_a = sdep_a
|
|
| 375 |
+ sysroot_b, dep_b = sdep_b
|
|
| 373 | 376 |
element_a = self.get_element_for_dep(dep_a)
|
| 374 | 377 |
element_b = self.get_element_for_dep(dep_b)
|
| 375 | 378 |
|
| 379 |
+ if sysroot_a < sysroot_b:
|
|
| 380 |
+ return -1
|
|
| 381 |
+ if sysroot_b < sysroot_a:
|
|
| 382 |
+ return 1
|
|
| 383 |
+ |
|
| 376 | 384 |
# Sort on inter element dependency first
|
| 377 | 385 |
if element_a.depends(element_b):
|
| 378 | 386 |
return 1
|
| ... | ... | @@ -471,11 +479,11 @@ class Loader(): |
| 471 | 479 |
self._meta_elements[element_name] = meta_element
|
| 472 | 480 |
|
| 473 | 481 |
# Descend
|
| 474 |
- for dep in element.deps:
|
|
| 482 |
+ for sysroot, dep in element.deps:
|
|
| 475 | 483 |
loader = self._get_loader_for_dep(dep)
|
| 476 | 484 |
meta_dep = loader._collect_element(dep.name)
|
| 477 | 485 |
if dep.dep_type != 'runtime':
|
| 478 |
- meta_element.build_dependencies.append(meta_dep)
|
|
| 486 |
+ meta_element.build_dependencies.append((sysroot, meta_dep))
|
|
| 479 | 487 |
if dep.dep_type != 'build':
|
| 480 | 488 |
meta_element.dependencies.append(meta_dep)
|
| 481 | 489 |
|
| ... | ... | @@ -213,9 +213,10 @@ class BuildElement(Element): |
| 213 | 213 |
|
| 214 | 214 |
# Run any integration commands provided by the dependencies
|
| 215 | 215 |
# once they are all staged and ready
|
| 216 |
- with self.timed_activity("Integrating sandbox"):
|
|
| 217 |
- for dep in self.dependencies(Scope.BUILD):
|
|
| 218 |
- dep.integrate(sandbox)
|
|
| 216 |
+ with self.timed_activity("Integrating sandbox (buildelement)"):
|
|
| 217 |
+ for sysroot, dep in self.dependencies(Scope.BUILD, with_sysroot=True):
|
|
| 218 |
+ if sysroot == '/':
|
|
| 219 |
+ dep.integrate(sandbox)
|
|
| 219 | 220 |
|
| 220 | 221 |
# Stage sources in the build root
|
| 221 | 222 |
self.stage_sources(sandbox, self.get_variable('build-root'))
|
| ... | ... | @@ -375,7 +375,8 @@ class Element(Plugin): |
| 375 | 375 |
for source in self.__sources:
|
| 376 | 376 |
yield source
|
| 377 | 377 |
|
| 378 |
- def dependencies(self, scope, *, recurse=True, visited=None, recursed=False):
|
|
| 378 |
+ def dependencies(self, scope, *, recurse=True, visited=None, recursed=False,
|
|
| 379 |
+ with_sysroot=False, sysroot='/'):
|
|
| 379 | 380 |
"""dependencies(scope, *, recurse=True)
|
| 380 | 381 |
|
| 381 | 382 |
A generator function which yields the dependencies of the given element.
|
| ... | ... | @@ -400,40 +401,56 @@ class Element(Plugin): |
| 400 | 401 |
|
| 401 | 402 |
scope_set = set((Scope.BUILD, Scope.RUN)) if scope == Scope.ALL else set((scope,))
|
| 402 | 403 |
|
| 403 |
- if full_name in visited and scope_set.issubset(visited[full_name]):
|
|
| 404 |
+ if with_sysroot:
|
|
| 405 |
+ key = (sysroot, full_name)
|
|
| 406 |
+ else:
|
|
| 407 |
+ key = full_name
|
|
| 408 |
+ |
|
| 409 |
+ if key in visited and scope_set.issubset(visited[key]):
|
|
| 404 | 410 |
return
|
| 405 | 411 |
|
| 406 | 412 |
should_yield = False
|
| 407 |
- if full_name not in visited:
|
|
| 408 |
- visited[full_name] = scope_set
|
|
| 413 |
+ if key not in visited:
|
|
| 414 |
+ visited[key] = scope_set
|
|
| 409 | 415 |
should_yield = True
|
| 410 | 416 |
else:
|
| 411 |
- visited[full_name] |= scope_set
|
|
| 417 |
+ visited[key] |= scope_set
|
|
| 412 | 418 |
|
| 413 | 419 |
if recurse or not recursed:
|
| 414 | 420 |
if scope == Scope.ALL:
|
| 415 |
- for dep in self.__build_dependencies:
|
|
| 421 |
+ build_deps = []
|
|
| 422 |
+ for dep_sysroot, dep in self.__build_dependencies:
|
|
| 423 |
+ new_sysroot = self._subst_string(dep_sysroot) if not recursed else sysroot
|
|
| 416 | 424 |
yield from dep.dependencies(Scope.ALL, recurse=recurse,
|
| 417 |
- visited=visited, recursed=True)
|
|
| 425 |
+ visited=visited, recursed=True,
|
|
| 426 |
+ sysroot=new_sysroot, with_sysroot=with_sysroot)
|
|
| 427 |
+ build_deps.append(dep)
|
|
| 418 | 428 |
|
| 419 | 429 |
for dep in self.__runtime_dependencies:
|
| 420 |
- if dep not in self.__build_dependencies:
|
|
| 430 |
+ if dep not in build_deps:
|
|
| 421 | 431 |
yield from dep.dependencies(Scope.ALL, recurse=recurse,
|
| 422 |
- visited=visited, recursed=True)
|
|
| 432 |
+ visited=visited, recursed=True,
|
|
| 433 |
+ sysroot=sysroot, with_sysroot=with_sysroot)
|
|
| 423 | 434 |
|
| 424 | 435 |
elif scope == Scope.BUILD:
|
| 425 |
- for dep in self.__build_dependencies:
|
|
| 436 |
+ for dep_sysroot, dep in self.__build_dependencies:
|
|
| 437 |
+ new_sysroot = self._subst_string(dep_sysroot) if not recursed else sysroot
|
|
| 426 | 438 |
yield from dep.dependencies(Scope.RUN, recurse=recurse,
|
| 427 |
- visited=visited, recursed=True)
|
|
| 439 |
+ visited=visited, recursed=True,
|
|
| 440 |
+ sysroot=new_sysroot, with_sysroot=with_sysroot)
|
|
| 428 | 441 |
|
| 429 | 442 |
elif scope == Scope.RUN:
|
| 430 | 443 |
for dep in self.__runtime_dependencies:
|
| 431 | 444 |
yield from dep.dependencies(Scope.RUN, recurse=recurse,
|
| 432 |
- visited=visited, recursed=True)
|
|
| 445 |
+ visited=visited, recursed=True,
|
|
| 446 |
+ sysroot=sysroot, with_sysroot=with_sysroot)
|
|
| 433 | 447 |
|
| 434 | 448 |
# Yeild self only at the end, after anything needed has been traversed
|
| 435 |
- if should_yield and (recurse or recursed) and (scope == Scope.ALL or scope == Scope.RUN):
|
|
| 436 |
- yield self
|
|
| 449 |
+ if should_yield and (recurse or recursed) and (scope in [Scope.ALL, Scope.RUN]):
|
|
| 450 |
+ if with_sysroot:
|
|
| 451 |
+ yield sysroot, self
|
|
| 452 |
+ else:
|
|
| 453 |
+ yield self
|
|
| 437 | 454 |
|
| 438 | 455 |
def search(self, scope, name):
|
| 439 | 456 |
"""Search for a dependency by name
|
| ... | ... | @@ -632,7 +649,7 @@ class Element(Plugin): |
| 632 | 649 |
vbasedir = sandbox.get_virtual_directory()
|
| 633 | 650 |
vstagedir = vbasedir \
|
| 634 | 651 |
if path is None \
|
| 635 |
- else vbasedir.descend(path.lstrip(os.sep).split(os.sep))
|
|
| 652 |
+ else vbasedir.descend(path.lstrip(os.sep).split(os.sep), create=True)
|
|
| 636 | 653 |
|
| 637 | 654 |
files = list(self.__compute_splits(include, exclude, orphans))
|
| 638 | 655 |
|
| ... | ... | @@ -650,7 +667,8 @@ class Element(Plugin): |
| 650 | 667 |
return link_result.combine(copy_result)
|
| 651 | 668 |
|
| 652 | 669 |
def stage_dependency_artifacts(self, sandbox, scope, *, path=None,
|
| 653 |
- include=None, exclude=None, orphans=True):
|
|
| 670 |
+ include=None, exclude=None, orphans=True,
|
|
| 671 |
+ build=True):
|
|
| 654 | 672 |
"""Stage element dependencies in scope
|
| 655 | 673 |
|
| 656 | 674 |
This is primarily a convenience wrapper around
|
| ... | ... | @@ -680,7 +698,14 @@ class Element(Plugin): |
| 680 | 698 |
if self.__can_build_incrementally() and workspace.last_successful:
|
| 681 | 699 |
old_dep_keys = self.__get_artifact_metadata_dependencies(workspace.last_successful)
|
| 682 | 700 |
|
| 683 |
- for dep in self.dependencies(scope):
|
|
| 701 |
+ def deps():
|
|
| 702 |
+ if build:
|
|
| 703 |
+ yield from self.dependencies(scope, with_sysroot=True)
|
|
| 704 |
+ else:
|
|
| 705 |
+ for dep in self.dependencies(scope, with_sysroot=False):
|
|
| 706 |
+ yield '/', dep
|
|
| 707 |
+ |
|
| 708 |
+ for sysroot, dep in deps():
|
|
| 684 | 709 |
# If we are workspaced, and we therefore perform an
|
| 685 | 710 |
# incremental build, we must ensure that we update the mtimes
|
| 686 | 711 |
# of any files created by our dependencies since the last
|
| ... | ... | @@ -705,8 +730,13 @@ class Element(Plugin): |
| 705 | 730 |
if utils._is_main_process():
|
| 706 | 731 |
self._get_context().get_workspaces().save_config()
|
| 707 | 732 |
|
| 733 |
+ if build:
|
|
| 734 |
+ sub_path = os.path.join(path, os.path.relpath(sysroot, '/')) if path else sysroot
|
|
| 735 |
+ else:
|
|
| 736 |
+ sub_path = path
|
|
| 737 |
+ |
|
| 708 | 738 |
result = dep.stage_artifact(sandbox,
|
| 709 |
- path=path,
|
|
| 739 |
+ path=sub_path,
|
|
| 710 | 740 |
include=include,
|
| 711 | 741 |
exclude=exclude,
|
| 712 | 742 |
orphans=orphans,
|
| ... | ... | @@ -906,9 +936,9 @@ class Element(Plugin): |
| 906 | 936 |
for meta_dep in meta.dependencies:
|
| 907 | 937 |
dependency = Element._new_from_meta(meta_dep, artifacts)
|
| 908 | 938 |
element.__runtime_dependencies.append(dependency)
|
| 909 |
- for meta_dep in meta.build_dependencies:
|
|
| 939 |
+ for sysroot, meta_dep in meta.build_dependencies:
|
|
| 910 | 940 |
dependency = Element._new_from_meta(meta_dep, artifacts)
|
| 911 |
- element.__build_dependencies.append(dependency)
|
|
| 941 |
+ element.__build_dependencies.append((sysroot, dependency))
|
|
| 912 | 942 |
|
| 913 | 943 |
return element
|
| 914 | 944 |
|
| ... | ... | @@ -1088,14 +1118,11 @@ class Element(Plugin): |
| 1088 | 1118 |
# Weak cache key includes names of direct build dependencies
|
| 1089 | 1119 |
# but does not include keys of dependencies.
|
| 1090 | 1120 |
if self.BST_STRICT_REBUILD:
|
| 1091 |
- dependencies = [
|
|
| 1092 |
- e._get_cache_key(strength=_KeyStrength.WEAK)
|
|
| 1093 |
- for e in self.dependencies(Scope.BUILD)
|
|
| 1094 |
- ]
|
|
| 1121 |
+ dependencies = [(sysroot, e._get_cache_key(strength=_KeyStrength.WEAK))
|
|
| 1122 |
+ for sysroot, e in self.dependencies(Scope.BUILD, with_sysroot=True)]
|
|
| 1095 | 1123 |
else:
|
| 1096 |
- dependencies = [
|
|
| 1097 |
- e.name for e in self.dependencies(Scope.BUILD, recurse=False)
|
|
| 1098 |
- ]
|
|
| 1124 |
+ dependencies = [(sysroot, e.name)
|
|
| 1125 |
+ for sysroot, e in self.dependencies(Scope.BUILD, with_sysroot=True)]
|
|
| 1099 | 1126 |
|
| 1100 | 1127 |
self.__weak_cache_key = self.__calculate_cache_key(dependencies)
|
| 1101 | 1128 |
|
| ... | ... | @@ -1123,9 +1150,8 @@ class Element(Plugin): |
| 1123 | 1150 |
return
|
| 1124 | 1151 |
|
| 1125 | 1152 |
if self.__strict_cache_key is None:
|
| 1126 |
- dependencies = [
|
|
| 1127 |
- e.__strict_cache_key for e in self.dependencies(Scope.BUILD)
|
|
| 1128 |
- ]
|
|
| 1153 |
+ dependencies = [(sysroot, e.__strict_cache_key)
|
|
| 1154 |
+ for sysroot, e in self.dependencies(Scope.BUILD, with_sysroot=True)]
|
|
| 1129 | 1155 |
self.__strict_cache_key = self.__calculate_cache_key(dependencies)
|
| 1130 | 1156 |
|
| 1131 | 1157 |
if self.__strict_cache_key is None:
|
| ... | ... | @@ -1165,10 +1191,8 @@ class Element(Plugin): |
| 1165 | 1191 |
strong_key, _ = self.__get_artifact_metadata_keys()
|
| 1166 | 1192 |
self.__cache_key = strong_key
|
| 1167 | 1193 |
elif self.__assemble_scheduled or self.__assemble_done:
|
| 1168 |
- # Artifact will or has been built, not downloaded
|
|
| 1169 |
- dependencies = [
|
|
| 1170 |
- e._get_cache_key() for e in self.dependencies(Scope.BUILD)
|
|
| 1171 |
- ]
|
|
| 1194 |
+ dependencies = [(sysroot, e._get_cache_key())
|
|
| 1195 |
+ for sysroot, e in self.dependencies(Scope.BUILD, with_sysroot=True)]
|
|
| 1172 | 1196 |
self.__cache_key = self.__calculate_cache_key(dependencies)
|
| 1173 | 1197 |
|
| 1174 | 1198 |
if self.__cache_key is None:
|
| ... | ... | @@ -1331,7 +1355,7 @@ class Element(Plugin): |
| 1331 | 1355 |
# Stage deps in the sandbox root
|
| 1332 | 1356 |
if deps == 'run':
|
| 1333 | 1357 |
with self.timed_activity("Staging dependencies", silent_nested=True):
|
| 1334 |
- self.stage_dependency_artifacts(sandbox, scope)
|
|
| 1358 |
+ self.stage_dependency_artifacts(sandbox, scope, build=False)
|
|
| 1335 | 1359 |
|
| 1336 | 1360 |
# Run any integration commands provided by the dependencies
|
| 1337 | 1361 |
# once they are all staged and ready
|
| ... | ... | @@ -2024,8 +2048,11 @@ class Element(Plugin): |
| 2024 | 2048 |
#
|
| 2025 | 2049 |
def __calculate_cache_key(self, dependencies):
|
| 2026 | 2050 |
# No cache keys for dependencies which have no cache keys
|
| 2027 |
- if None in dependencies:
|
|
| 2028 |
- return None
|
|
| 2051 |
+ for dep in dependencies:
|
|
| 2052 |
+ if dep[1] is None:
|
|
| 2053 |
+ return None
|
|
| 2054 |
+ # Do not break cache keys
|
|
| 2055 |
+ dependencies = [(sysroot, key) if sysroot != '/' else key for sysroot, key in dependencies]
|
|
| 2029 | 2056 |
|
| 2030 | 2057 |
# Generate dict that is used as base for all cache keys
|
| 2031 | 2058 |
if self.__cache_key_dict is None:
|
| ... | ... | @@ -115,15 +115,16 @@ class ComposeElement(Element): |
| 115 | 115 |
# Run any integration commands provided by the dependencies
|
| 116 | 116 |
# once they are all staged and ready
|
| 117 | 117 |
if self.integration:
|
| 118 |
- with self.timed_activity("Integrating sandbox"):
|
|
| 118 |
+ with self.timed_activity("Integrating sandbox (compose)"):
|
|
| 119 | 119 |
if require_split:
|
| 120 | 120 |
|
| 121 | 121 |
# Make a snapshot of all the files before integration-commands are run.
|
| 122 | 122 |
snapshot = set(vbasedir.list_relative_paths())
|
| 123 | 123 |
vbasedir.mark_unmodified()
|
| 124 | 124 |
|
| 125 |
- for dep in self.dependencies(Scope.BUILD):
|
|
| 126 |
- dep.integrate(sandbox)
|
|
| 125 |
+ for sysroot, dep in self.dependencies(Scope.BUILD, with_sysroot=True):
|
|
| 126 |
+ if sysroot == '/':
|
|
| 127 |
+ dep.integrate(sandbox)
|
|
| 127 | 128 |
|
| 128 | 129 |
if require_split:
|
| 129 | 130 |
# Calculate added, modified and removed files
|
| ... | ... | @@ -159,6 +159,57 @@ See :ref:`format_dependencies` for more information on the dependency model. |
| 159 | 159 |
|
| 160 | 160 |
The ``runtime-depends`` configuration is available since :ref:`format version 14 <project_format_version>`
|
| 161 | 161 |
|
| 162 |
+Sysroot'ed dependencies
|
|
| 163 |
+~~~~~~~~~~~~~~~~~~~~~~~
|
|
| 164 |
+ |
|
| 165 |
+Sysroot'ed dependencies are intended for bootstraping base systems or
|
|
| 166 |
+cross-compiling.
|
|
| 167 |
+ |
|
| 168 |
+.. code:: yaml
|
|
| 169 |
+ |
|
| 170 |
+ # Specify some sysroot'ed dependencies
|
|
| 171 |
+ sysroots:
|
|
| 172 |
+ - path: /sysroot
|
|
| 173 |
+ depends:
|
|
| 174 |
+ - element1.bst
|
|
| 175 |
+ - element2.bst
|
|
| 176 |
+ |
|
| 177 |
+During build, or initialization of build shell, sysroot'ed build
|
|
| 178 |
+dependencies will be staged in the given sysroot path instead of '/'
|
|
| 179 |
+together with the runtime dependencies of those sysroot'ed build
|
|
| 180 |
+dependencies.
|
|
| 181 |
+ |
|
| 182 |
+It is possible to end up with indirect runtime dependencies in
|
|
| 183 |
+different sysroots if they are staged from build dependencies with
|
|
| 184 |
+different sysroots. They will be staged multiple times.
|
|
| 185 |
+ |
|
| 186 |
+Sysroot path only apply to build dependencies. It is not possible to
|
|
| 187 |
+define runtime dependencies neither with ``type: runtime`` nor
|
|
| 188 |
+``runtime-depends``. It is possible to use ``all`` dependencies, but
|
|
| 189 |
+the sysroot part is only for the build part not the runtime.
|
|
| 190 |
+ |
|
| 191 |
+For example:
|
|
| 192 |
+ |
|
| 193 |
+.. code:: yaml
|
|
| 194 |
+ |
|
| 195 |
+ sysroots:
|
|
| 196 |
+ - path: /sysroot
|
|
| 197 |
+ depends:
|
|
| 198 |
+ - element.bst
|
|
| 199 |
+ |
|
| 200 |
+is equivalent to:
|
|
| 201 |
+ |
|
| 202 |
+.. code:: yaml
|
|
| 203 |
+ |
|
| 204 |
+ runtime-depends:
|
|
| 205 |
+ - element.bst
|
|
| 206 |
+ sysroots:
|
|
| 207 |
+ - path: /sysroot
|
|
| 208 |
+ build-depends:
|
|
| 209 |
+ - element.bst
|
|
| 210 |
+ |
|
| 211 |
+:ref:`Integration commands <public_integration>` are never executed for
|
|
| 212 |
+sysroot'ed dependencies.
|
|
| 162 | 213 |
|
| 163 | 214 |
.. _format_sources:
|
| 164 | 215 |
|
| ... | ... | @@ -27,8 +27,8 @@ def test_two_files(datafiles): |
| 27 | 27 |
|
| 28 | 28 |
assert(len(element.dependencies) == 1)
|
| 29 | 29 |
firstdep = element.dependencies[0]
|
| 30 |
- assert(isinstance(firstdep, MetaElement))
|
|
| 31 |
- assert(firstdep.kind == 'manual')
|
|
| 30 |
+ assert(isinstance(firstdep[1], MetaElement))
|
|
| 31 |
+ assert(firstdep[1].kind == 'manual')
|
|
| 32 | 32 |
|
| 33 | 33 |
|
| 34 | 34 |
@pytest.mark.datafiles(DATA_DIR)
|
| ... | ... | @@ -47,22 +47,22 @@ def test_shared_dependency(datafiles): |
| 47 | 47 |
# The first specified dependency is 'thefirstdep'
|
| 48 | 48 |
#
|
| 49 | 49 |
firstdep = element.dependencies[0]
|
| 50 |
- assert(isinstance(firstdep, MetaElement))
|
|
| 51 |
- assert(firstdep.kind == 'manual')
|
|
| 52 |
- assert(len(firstdep.dependencies) == 0)
|
|
| 50 |
+ assert(isinstance(firstdep[1], MetaElement))
|
|
| 51 |
+ assert(firstdep[1].kind == 'manual')
|
|
| 52 |
+ assert(len(firstdep[1].dependencies) == 0)
|
|
| 53 | 53 |
|
| 54 | 54 |
# The second specified dependency is 'shareddep'
|
| 55 | 55 |
#
|
| 56 | 56 |
shareddep = element.dependencies[1]
|
| 57 |
- assert(isinstance(shareddep, MetaElement))
|
|
| 58 |
- assert(shareddep.kind == 'shareddep')
|
|
| 59 |
- assert(len(shareddep.dependencies) == 1)
|
|
| 57 |
+ assert(isinstance(shareddep[1], MetaElement))
|
|
| 58 |
+ assert(shareddep[1].kind == 'shareddep')
|
|
| 59 |
+ assert(len(shareddep[1].dependencies) == 1)
|
|
| 60 | 60 |
|
| 61 | 61 |
# The element which shareddep depends on is
|
| 62 | 62 |
# the same element in memory as firstdep
|
| 63 | 63 |
#
|
| 64 |
- shareddepdep = shareddep.dependencies[0]
|
|
| 65 |
- assert(isinstance(shareddepdep, MetaElement))
|
|
| 64 |
+ shareddepdep = shareddep[1].dependencies[0]
|
|
| 65 |
+ assert(isinstance(shareddepdep[1], MetaElement))
|
|
| 66 | 66 |
|
| 67 | 67 |
# Assert they are in fact the same LoadElement
|
| 68 | 68 |
#
|
| ... | ... | @@ -71,7 +71,7 @@ def test_shared_dependency(datafiles): |
| 71 | 71 |
# equality test with '==' which is one of those operator
|
| 72 | 72 |
# overridable thingies.
|
| 73 | 73 |
#
|
| 74 |
- assert(shareddepdep is firstdep)
|
|
| 74 |
+ assert(shareddepdep[1] is firstdep[1])
|
|
| 75 | 75 |
|
| 76 | 76 |
|
| 77 | 77 |
@pytest.mark.datafiles(DATA_DIR)
|
| ... | ... | @@ -86,8 +86,8 @@ def test_dependency_dict(datafiles): |
| 86 | 86 |
|
| 87 | 87 |
assert(len(element.dependencies) == 1)
|
| 88 | 88 |
firstdep = element.dependencies[0]
|
| 89 |
- assert(isinstance(firstdep, MetaElement))
|
|
| 90 |
- assert(firstdep.kind == 'manual')
|
|
| 89 |
+ assert(isinstance(firstdep[1], MetaElement))
|
|
| 90 |
+ assert(firstdep[1].kind == 'manual')
|
|
| 91 | 91 |
|
| 92 | 92 |
|
| 93 | 93 |
@pytest.mark.datafiles(DATA_DIR)
|
| ... | ... | @@ -135,7 +135,7 @@ def test_build_dependency(datafiles): |
| 135 | 135 |
|
| 136 | 136 |
assert(len(element.build_dependencies) == 1)
|
| 137 | 137 |
firstdep = element.build_dependencies[0]
|
| 138 |
- assert(isinstance(firstdep, MetaElement))
|
|
| 138 |
+ assert(isinstance(firstdep[1], MetaElement))
|
|
| 139 | 139 |
|
| 140 | 140 |
assert(len(element.dependencies) == 0)
|
| 141 | 141 |
|
| ... | ... | @@ -151,7 +151,7 @@ def test_runtime_dependency(datafiles): |
| 151 | 151 |
|
| 152 | 152 |
assert(len(element.dependencies) == 1)
|
| 153 | 153 |
firstdep = element.dependencies[0]
|
| 154 |
- assert(isinstance(firstdep, MetaElement))
|
|
| 154 |
+ assert(isinstance(firstdep[1], MetaElement))
|
|
| 155 | 155 |
|
| 156 | 156 |
assert(len(element.build_dependencies) == 0)
|
| 157 | 157 |
|
| ... | ... | @@ -168,9 +168,9 @@ def test_build_runtime_dependency(datafiles): |
| 168 | 168 |
assert(len(element.dependencies) == 1)
|
| 169 | 169 |
assert(len(element.build_dependencies) == 1)
|
| 170 | 170 |
firstdep = element.dependencies[0]
|
| 171 |
- assert(isinstance(firstdep, MetaElement))
|
|
| 171 |
+ assert(isinstance(firstdep[1], MetaElement))
|
|
| 172 | 172 |
firstbuilddep = element.build_dependencies[0]
|
| 173 |
- assert(firstdep == firstbuilddep)
|
|
| 173 |
+ assert(firstdep[1] == firstbuilddep[1])
|
|
| 174 | 174 |
|
| 175 | 175 |
|
| 176 | 176 |
@pytest.mark.datafiles(DATA_DIR)
|
| ... | ... | @@ -185,9 +185,9 @@ def test_all_dependency(datafiles): |
| 185 | 185 |
assert(len(element.dependencies) == 1)
|
| 186 | 186 |
assert(len(element.build_dependencies) == 1)
|
| 187 | 187 |
firstdep = element.dependencies[0]
|
| 188 |
- assert(isinstance(firstdep, MetaElement))
|
|
| 188 |
+ assert(isinstance(firstdep[1], MetaElement))
|
|
| 189 | 189 |
firstbuilddep = element.build_dependencies[0]
|
| 190 |
- assert(firstdep == firstbuilddep)
|
|
| 190 |
+ assert(firstdep[1] == firstbuilddep[1])
|
|
| 191 | 191 |
|
| 192 | 192 |
|
| 193 | 193 |
@pytest.mark.datafiles(DATA_DIR)
|
| 1 |
+kind: import
|
|
| 2 |
+sources:
|
|
| 3 |
+ - kind: local
|
|
| 4 |
+ path: files/a
|
| 1 |
+kind: import
|
|
| 2 |
+sources:
|
|
| 3 |
+ - kind: local
|
|
| 4 |
+ path: files/b
|
| 1 |
+kind: stack
|
|
| 2 |
+depends:
|
|
| 3 |
+- base/base-alpine.bst
|
| 1 |
+kind: import
|
|
| 2 |
+ |
|
| 3 |
+description: |
|
|
| 4 |
+ Alpine Linux base for tests
|
|
| 5 |
+ |
|
| 6 |
+ Generated using the `tests/integration-tests/base/generate-base.sh` script.
|
|
| 7 |
+ |
|
| 8 |
+sources:
|
|
| 9 |
+ - kind: tar
|
|
| 10 |
+ url: alpine:integration-tests-base.v1.x86_64.tar.xz
|
|
| 11 |
+ base-dir: ''
|
|
| 12 |
+ ref: 3eb559250ba82b64a68d86d0636a6b127aa5f6d25d3601a79f79214dc9703639
|
| 1 |
+kind: compose
|
|
| 2 |
+ |
|
| 3 |
+sysroots:
|
|
| 4 |
+- path: /sysroot
|
|
| 5 |
+ build-depends:
|
|
| 6 |
+ - integration.bst
|
| 1 |
+kind: compose
|
|
| 2 |
+ |
|
| 3 |
+sysroots:
|
|
| 4 |
+- path: /other-sysroot
|
|
| 5 |
+ build-depends:
|
|
| 6 |
+ - layer2.bst
|
| 1 |
+kind: compose
|
|
| 2 |
+ |
|
| 3 |
+build-depends:
|
|
| 4 |
+- layer2.bst
|
| 1 |
+kind: manual
|
|
| 2 |
+ |
|
| 3 |
+depends:
|
|
| 4 |
+- base.bst
|
|
| 5 |
+ |
|
| 6 |
+config:
|
|
| 7 |
+ install-commands:
|
|
| 8 |
+ - echo 0 >"%{install-root}/integrated.txt"
|
|
| 9 |
+ |
|
| 10 |
+public:
|
|
| 11 |
+ bst:
|
|
| 12 |
+ integration-commands:
|
|
| 13 |
+ - echo 1 >/integrated.txt
|
| 1 |
+kind: import
|
|
| 2 |
+sources:
|
|
| 3 |
+- kind: local
|
|
| 4 |
+ path: files/layer1
|
| 1 |
+kind: stack
|
|
| 2 |
+ |
|
| 3 |
+depends:
|
|
| 4 |
+- layer1-files.bst
|
| 1 |
+kind: import
|
|
| 2 |
+sources:
|
|
| 3 |
+- kind: local
|
|
| 4 |
+ path: files/layer2
|
| 1 |
+kind: manual
|
|
| 2 |
+ |
|
| 3 |
+depends:
|
|
| 4 |
+- layer2-files.bst
|
|
| 5 |
+ |
|
| 6 |
+build-depends:
|
|
| 7 |
+- base.bst
|
|
| 8 |
+ |
|
| 9 |
+sysroots:
|
|
| 10 |
+- path: /sysroot
|
|
| 11 |
+ depends:
|
|
| 12 |
+ - layer1.bst
|
|
| 13 |
+ |
|
| 14 |
+config:
|
|
| 15 |
+ install-commands:
|
|
| 16 |
+ - mkdir -p "%{install-root}"
|
|
| 17 |
+ - |
|
|
| 18 |
+ for file in /*; do
|
|
| 19 |
+ if test -f "${file}"; then
|
|
| 20 |
+ cp "${file}" "%{install-root}"
|
|
| 21 |
+ fi
|
|
| 22 |
+ done
|
| 1 |
+kind: manual
|
|
| 2 |
+ |
|
| 3 |
+depends:
|
|
| 4 |
+- base.bst
|
|
| 5 |
+ |
|
| 6 |
+sysroots:
|
|
| 7 |
+- path: /sysroot
|
|
| 8 |
+ depends:
|
|
| 9 |
+ - integration.bst
|
|
| 10 |
+ |
|
| 11 |
+config:
|
|
| 12 |
+ install-commands:
|
|
| 13 |
+ - mkdir -p "%{install-root}"
|
|
| 14 |
+ - echo dummy >"%{install-root}/dummy.txt"
|
| 1 |
+kind: manual
|
|
| 2 |
+ |
|
| 3 |
+build-depends:
|
|
| 4 |
+- base.bst
|
|
| 5 |
+ |
|
| 6 |
+sysroots:
|
|
| 7 |
+- path: /sysroot
|
|
| 8 |
+ build-depends:
|
|
| 9 |
+ - integration.bst
|
|
| 10 |
+ |
|
| 11 |
+config:
|
|
| 12 |
+ install-commands:
|
|
| 13 |
+ - mkdir -p "%{install-root}/sysroot"
|
|
| 14 |
+ - if test -f /sysroot/integrated.txt; then cp /sysroot/integrated.txt "%{install-root}/sysroot"; fi
|
|
| 15 |
+ - if test -f /integrated.txt; then cp /integrated.txt "%{install-root}"; fi
|
| 1 |
+kind: compose
|
|
| 2 |
+ |
|
| 3 |
+build-depends:
|
|
| 4 |
+- a.bst
|
|
| 5 |
+ |
|
| 6 |
+variables:
|
|
| 7 |
+ mydir: test
|
|
| 8 |
+ |
|
| 9 |
+sysroots:
|
|
| 10 |
+- path: "/path/%{mydir}"
|
|
| 11 |
+ build-depends:
|
|
| 12 |
+ - b.bst
|
| 1 |
+kind: compose
|
|
| 2 |
+ |
|
| 3 |
+build-depends:
|
|
| 4 |
+- a.bst
|
|
| 5 |
+ |
|
| 6 |
+sysroots:
|
|
| 7 |
+- path: /sysroot
|
|
| 8 |
+ build-depends:
|
|
| 9 |
+ - b.bst
|
| 1 |
+test
|
| 1 |
+test
|
| 1 |
+1
|
| 1 |
+2
|
| 1 |
+name: test
|
|
| 2 |
+element-path: elements
|
|
| 3 |
+aliases:
|
|
| 4 |
+ alpine: https://bst-integration-test-images.ams3.cdn.digitaloceanspaces.com/
|
| 1 |
+import os
|
|
| 2 |
+import pytest
|
|
| 3 |
+from tests.testutils import cli_integration as cli
|
|
| 4 |
+ |
|
| 5 |
+ |
|
| 6 |
+# Project directory
|
|
| 7 |
+DATA_DIR = os.path.join(
|
|
| 8 |
+ os.path.dirname(os.path.realpath(__file__)),
|
|
| 9 |
+ "project",
|
|
| 10 |
+)
|
|
| 11 |
+ |
|
| 12 |
+ |
|
| 13 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 14 |
+def test_sysroot_dependency_smoke_test(datafiles, cli, tmpdir):
|
|
| 15 |
+ "Test simple sysroot use case without integration"
|
|
| 16 |
+ |
|
| 17 |
+ project = str(datafiles)
|
|
| 18 |
+ checkout = os.path.join(tmpdir, 'checkout')
|
|
| 19 |
+ |
|
| 20 |
+ result = cli.run(project=project,
|
|
| 21 |
+ args=['build', 'target.bst'])
|
|
| 22 |
+ result.assert_success()
|
|
| 23 |
+ |
|
| 24 |
+ result = cli.run(project=project,
|
|
| 25 |
+ args=['checkout', 'target.bst', checkout])
|
|
| 26 |
+ result.assert_success()
|
|
| 27 |
+ assert os.path.exists(os.path.join(checkout, 'a.txt'))
|
|
| 28 |
+ assert os.path.exists(os.path.join(checkout, 'sysroot', 'b.txt'))
|
|
| 29 |
+ |
|
| 30 |
+ |
|
| 31 |
+@pytest.mark.integration
|
|
| 32 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 33 |
+def test_skip_integration_commands_compose(datafiles, cli, tmpdir):
|
|
| 34 |
+ "Integration commands are not run on sysroots"
|
|
| 35 |
+ |
|
| 36 |
+ project = str(datafiles)
|
|
| 37 |
+ checkout = os.path.join(tmpdir, 'checkout')
|
|
| 38 |
+ |
|
| 39 |
+ result = cli.run(project=project,
|
|
| 40 |
+ args=['build', 'compose-integration.bst'])
|
|
| 41 |
+ result.assert_success()
|
|
| 42 |
+ |
|
| 43 |
+ result = cli.run(project=project,
|
|
| 44 |
+ args=['checkout', 'compose-integration.bst', checkout])
|
|
| 45 |
+ result.assert_success()
|
|
| 46 |
+ |
|
| 47 |
+ integrated = os.path.join(checkout, 'sysroot', 'integrated.txt')
|
|
| 48 |
+ assert os.path.exists(integrated)
|
|
| 49 |
+ with open(integrated, 'r') as f:
|
|
| 50 |
+ assert f.read() == '0\n'
|
|
| 51 |
+ |
|
| 52 |
+ |
|
| 53 |
+@pytest.mark.integration
|
|
| 54 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 55 |
+def test_skip_integration_commands_build_element(datafiles, cli, tmpdir):
|
|
| 56 |
+ "Integration commands are not run on sysroots"
|
|
| 57 |
+ |
|
| 58 |
+ project = str(datafiles)
|
|
| 59 |
+ checkout = os.path.join(tmpdir, 'checkout')
|
|
| 60 |
+ |
|
| 61 |
+ result = cli.run(project=project,
|
|
| 62 |
+ args=['build', 'manual-integration.bst'])
|
|
| 63 |
+ result.assert_success()
|
|
| 64 |
+ |
|
| 65 |
+ result = cli.run(project=project,
|
|
| 66 |
+ args=['checkout', 'manual-integration.bst', checkout])
|
|
| 67 |
+ result.assert_success()
|
|
| 68 |
+ |
|
| 69 |
+ sysroot_integrated = os.path.join(checkout, 'sysroot', 'integrated.txt')
|
|
| 70 |
+ integrated = os.path.join(checkout, 'integrated.txt')
|
|
| 71 |
+ assert os.path.exists(sysroot_integrated)
|
|
| 72 |
+ with open(sysroot_integrated, 'r') as f:
|
|
| 73 |
+ assert f.read() == '0\n'
|
|
| 74 |
+ # We need to make sure that integration command has not been run on / either.
|
|
| 75 |
+ assert not os.path.exists(integrated)
|
|
| 76 |
+ |
|
| 77 |
+ |
|
| 78 |
+@pytest.mark.integration
|
|
| 79 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 80 |
+def test_sysroot_only_for_build(cli, tmpdir, datafiles):
|
|
| 81 |
+ project = str(datafiles)
|
|
| 82 |
+ checkout = os.path.join(tmpdir, 'checkout')
|
|
| 83 |
+ |
|
| 84 |
+ result = cli.run(project=project,
|
|
| 85 |
+ args=['build', 'compose-layers.bst'])
|
|
| 86 |
+ result.assert_success()
|
|
| 87 |
+ |
|
| 88 |
+ result = cli.run(project=project,
|
|
| 89 |
+ args=['checkout', 'compose-layers.bst', checkout])
|
|
| 90 |
+ |
|
| 91 |
+ result.assert_success()
|
|
| 92 |
+ assert os.path.exists(os.path.join(checkout, '1'))
|
|
| 93 |
+ assert os.path.exists(os.path.join(checkout, '2'))
|
|
| 94 |
+ assert not os.path.exists(os.path.join(checkout, 'sysroot', '1'))
|
|
| 95 |
+ |
|
| 96 |
+ |
|
| 97 |
+@pytest.mark.integration
|
|
| 98 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 99 |
+def test_sysroot_only_for_build_with_sysroot(cli, tmpdir, datafiles):
|
|
| 100 |
+ project = str(datafiles)
|
|
| 101 |
+ checkout = os.path.join(tmpdir, 'checkout')
|
|
| 102 |
+ |
|
| 103 |
+ result = cli.run(project=project,
|
|
| 104 |
+ args=['build', 'compose-layers-with-sysroot.bst'])
|
|
| 105 |
+ result.assert_success()
|
|
| 106 |
+ |
|
| 107 |
+ result = cli.run(project=project,
|
|
| 108 |
+ args=['checkout', 'compose-layers-with-sysroot.bst', checkout])
|
|
| 109 |
+ |
|
| 110 |
+ result.assert_success()
|
|
| 111 |
+ assert os.path.exists(os.path.join(checkout, 'other-sysroot', '1'))
|
|
| 112 |
+ assert os.path.exists(os.path.join(checkout, 'other-sysroot', '2'))
|
|
| 113 |
+ assert not os.path.exists(os.path.join(checkout, 'sysroot', '1'))
|
|
| 114 |
+ |
|
| 115 |
+ |
|
| 116 |
+@pytest.mark.integration
|
|
| 117 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 118 |
+def test_shell_no_sysroot(cli, tmpdir, datafiles):
|
|
| 119 |
+ "bst shell does not have sysroots and dependencies are integrated"
|
|
| 120 |
+ |
|
| 121 |
+ project = str(datafiles)
|
|
| 122 |
+ |
|
| 123 |
+ result = cli.run(project=project,
|
|
| 124 |
+ args=['build', 'base.bst', 'manual-integration-runtime.bst'])
|
|
| 125 |
+ result.assert_success()
|
|
| 126 |
+ |
|
| 127 |
+ result = cli.run(project=project,
|
|
| 128 |
+ args=['shell', 'manual-integration-runtime.bst', '--', 'cat', '/integrated.txt'])
|
|
| 129 |
+ result.assert_success()
|
|
| 130 |
+ assert result.output == '1\n'
|
|
| 131 |
+ |
|
| 132 |
+ result = cli.run(project=project,
|
|
| 133 |
+ args=['shell', 'manual-integration-runtime.bst', '--', 'ls', '/sysroot/integrated.txt'])
|
|
| 134 |
+ assert result.exit_code != 0
|
|
| 135 |
+ assert result.output == ''
|
|
| 136 |
+ |
|
| 137 |
+ |
|
| 138 |
+@pytest.mark.integration
|
|
| 139 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 140 |
+def test_shell_build_sysroot(cli, tmpdir, datafiles):
|
|
| 141 |
+ "Build shell should stage build dependencies sysroot'ed non integrated"
|
|
| 142 |
+ |
|
| 143 |
+ project = str(datafiles)
|
|
| 144 |
+ |
|
| 145 |
+ result = cli.run(project=project,
|
|
| 146 |
+ args=['build', 'base.bst', 'integration.bst'])
|
|
| 147 |
+ result.assert_success()
|
|
| 148 |
+ |
|
| 149 |
+ result = cli.run(project=project,
|
|
| 150 |
+ args=['shell', '-b', 'manual-integration.bst', '--', 'cat', '/sysroot/integrated.txt'])
|
|
| 151 |
+ result.assert_success()
|
|
| 152 |
+ assert result.output == '0\n'
|
|
| 153 |
+ |
|
| 154 |
+ |
|
| 155 |
+@pytest.mark.integration
|
|
| 156 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 157 |
+def test_show_dependencies_only_once(cli, tmpdir, datafiles):
|
|
| 158 |
+ """Dependencies should not show up in status several times when they
|
|
| 159 |
+ are staged with multiple sysroots"""
|
|
| 160 |
+ |
|
| 161 |
+ project = str(datafiles)
|
|
| 162 |
+ |
|
| 163 |
+ result = cli.run(project=project,
|
|
| 164 |
+ args=['show', '--format', '%{name}', 'manual-integration.bst'])
|
|
| 165 |
+ result.assert_success()
|
|
| 166 |
+ pipeline = result.output.splitlines()
|
|
| 167 |
+ assert pipeline == ['base/base-alpine.bst',
|
|
| 168 |
+ 'base.bst',
|
|
| 169 |
+ 'integration.bst',
|
|
| 170 |
+ 'manual-integration.bst']
|
|
| 171 |
+ |
|
| 172 |
+ |
|
| 173 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 174 |
+def test_sysroot_path_subst_variable(datafiles, cli, tmpdir):
|
|
| 175 |
+ "Test that variables are expanded in sysroot path"
|
|
| 176 |
+ |
|
| 177 |
+ project = str(datafiles)
|
|
| 178 |
+ checkout = os.path.join(tmpdir, 'checkout')
|
|
| 179 |
+ |
|
| 180 |
+ result = cli.run(project=project,
|
|
| 181 |
+ args=['build', 'target.bst'])
|
|
| 182 |
+ result.assert_success()
|
|
| 183 |
+ |
|
| 184 |
+ result = cli.run(project=project,
|
|
| 185 |
+ args=['checkout', 'target.bst', checkout])
|
|
| 186 |
+ result.assert_success()
|
|
| 187 |
+ assert os.path.exists(os.path.join(checkout, 'a.txt'))
|
|
| 188 |
+ assert os.path.exists(os.path.join(checkout, '/path/test', 'b.txt'))
|
