Daniel pushed to branch danielsilverstone-ct/bwrap-check-runtime-only at BuildStream / buildstream
Commits:
-
d5b396e0
by Phillip Smyth at 2018-10-02T16:40:18Z
-
dae842fd
by Phillip Smyth at 2018-10-02T17:14:09Z
-
8dc16d3f
by Jürg Billeter at 2018-10-03T05:08:21Z
-
66446fc3
by Jürg Billeter at 2018-10-03T05:36:38Z
-
29c19bea
by Tristan Van Berkom at 2018-10-03T07:33:48Z
-
b645881c
by Tristan Van Berkom at 2018-10-03T07:33:48Z
-
c9437616
by Tristan Van Berkom at 2018-10-03T08:07:15Z
-
11320fe2
by Tristan Van Berkom at 2018-10-03T09:33:39Z
-
91271964
by Tristan Van Berkom at 2018-10-03T09:59:40Z
-
3bf895d2
by Jonathan Maw at 2018-10-03T11:48:48Z
-
e4969807
by Jonathan Maw at 2018-10-03T12:48:07Z
-
a0814399
by Tristan Van Berkom at 2018-10-03T13:05:52Z
-
0a1f8e3c
by Tristan Van Berkom at 2018-10-03T13:42:20Z
-
11161f99
by Tristan Van Berkom at 2018-10-03T13:44:02Z
-
3e797bb9
by Tristan Van Berkom at 2018-10-03T13:44:02Z
-
d9020e43
by Tristan Van Berkom at 2018-10-03T13:44:02Z
-
3e5ff5a9
by Tristan Van Berkom at 2018-10-03T14:09:51Z
-
d60800c9
by Daniel Silverstone at 2018-10-03T16:02:09Z
-
67f33221
by Daniel Silverstone at 2018-10-03T16:02:09Z
-
f23e6e80
by Daniel Silverstone at 2018-10-03T16:03:00Z
-
13e70232
by Daniel Silverstone at 2018-10-03T16:03:00Z
-
a53aa45c
by Daniel Silverstone at 2018-10-03T16:03:00Z
14 changed files:
- .gitlab-ci.yml
- buildstream/_artifactcache/cascache.py
- buildstream/_platform/darwin.py
- buildstream/_platform/linux.py
- buildstream/_scheduler/jobs/job.py
- buildstream/_scheduler/queues/queue.py
- buildstream/_scheduler/scheduler.py
- buildstream/_site.py
- buildstream/plugins/sources/git.py
- buildstream/sandbox/_sandboxdummy.py
- buildstream/source.py
- buildstream/utils.py
- setup.py
- tests/frontend/mirror.py
Changes:
... | ... | @@ -145,7 +145,8 @@ docs: |
145 | 145 |
stage: test
|
146 | 146 |
script:
|
147 | 147 |
- export BST_SOURCE_CACHE="$(pwd)/cache/integration-cache/sources"
|
148 |
- - pip3 install sphinx
|
|
148 |
+ # Currently sphinx_rtd_theme does not support Sphinx >1.8, this breaks search functionality
|
|
149 |
+ - pip3 install sphinx==1.7.9
|
|
149 | 150 |
- pip3 install sphinx-click
|
150 | 151 |
- pip3 install sphinx_rtd_theme
|
151 | 152 |
- cd dist && ./unpack.sh && cd buildstream
|
... | ... | @@ -506,7 +506,7 @@ class CASCache(ArtifactCache): |
506 | 506 |
def set_ref(self, ref, tree):
|
507 | 507 |
refpath = self._refpath(ref)
|
508 | 508 |
os.makedirs(os.path.dirname(refpath), exist_ok=True)
|
509 |
- with utils.save_file_atomic(refpath, 'wb') as f:
|
|
509 |
+ with utils.save_file_atomic(refpath, 'wb', tempdir=self.tmpdir) as f:
|
|
510 | 510 |
f.write(tree.SerializeToString())
|
511 | 511 |
|
512 | 512 |
# resolve_ref():
|
... | ... | @@ -34,6 +34,9 @@ class Darwin(Platform): |
34 | 34 |
super().__init__()
|
35 | 35 |
|
36 | 36 |
def create_sandbox(self, *args, **kwargs):
|
37 |
+ kwargs['dummy_reason'] = \
|
|
38 |
+ "OSXFUSE is not supported and there are no supported sandbox" + \
|
|
39 |
+ "technologies for OSX at this time"
|
|
37 | 40 |
return SandboxDummy(*args, **kwargs)
|
38 | 41 |
|
39 | 42 |
def check_sandbox_config(self, config):
|
... | ... | @@ -37,15 +37,31 @@ class Linux(Platform): |
37 | 37 |
self._uid = os.geteuid()
|
38 | 38 |
self._gid = os.getegid()
|
39 | 39 |
|
40 |
+ self._have_fuse = os.path.exists("/dev/fuse")
|
|
41 |
+ self._bwrap_exists = _site.check_bwrap_version(0, 0, 0)
|
|
42 |
+ self._have_good_bwrap = _site.check_bwrap_version(0, 1, 2)
|
|
43 |
+ |
|
44 |
+ self._local_sandbox_available = self._have_fuse and self._have_good_bwrap
|
|
45 |
+ |
|
40 | 46 |
self._die_with_parent_available = _site.check_bwrap_version(0, 1, 8)
|
41 | 47 |
|
42 |
- if self._local_sandbox_available():
|
|
48 |
+ if self._have_fuse and self._have_good_bwrap:
|
|
43 | 49 |
self._user_ns_available = self._check_user_ns_available()
|
44 | 50 |
else:
|
45 | 51 |
self._user_ns_available = False
|
46 | 52 |
|
47 | 53 |
def create_sandbox(self, *args, **kwargs):
|
48 |
- if not self._local_sandbox_available():
|
|
54 |
+ if not self._local_sandbox_available:
|
|
55 |
+ reasons = []
|
|
56 |
+ if not self._have_fuse:
|
|
57 |
+ reasons.append("FUSE is unavailable")
|
|
58 |
+ if not self._have_good_bwrap:
|
|
59 |
+ if self._bwrap_exists:
|
|
60 |
+ reasons.append("`bwrap` is too old (bst needs at least 0.1.2)")
|
|
61 |
+ else:
|
|
62 |
+ reasons.append("`bwrap` executable not found")
|
|
63 |
+ |
|
64 |
+ kwargs['dummy_reason'] = " and ".join(reasons)
|
|
49 | 65 |
return SandboxDummy(*args, **kwargs)
|
50 | 66 |
else:
|
51 | 67 |
from ..sandbox._sandboxbwrap import SandboxBwrap
|
... | ... | @@ -55,6 +71,10 @@ class Linux(Platform): |
55 | 71 |
return SandboxBwrap(*args, **kwargs)
|
56 | 72 |
|
57 | 73 |
def check_sandbox_config(self, config):
|
74 |
+ if not self._local_sandbox_available:
|
|
75 |
+ # Accept all sandbox configs as it's irrelevant with the dummy sandbox (no Sandbox.run).
|
|
76 |
+ return True
|
|
77 |
+ |
|
58 | 78 |
if self._user_ns_available:
|
59 | 79 |
# User namespace support allows arbitrary build UID/GID settings.
|
60 | 80 |
return True
|
... | ... | @@ -66,11 +86,6 @@ class Linux(Platform): |
66 | 86 |
################################################
|
67 | 87 |
# Private Methods #
|
68 | 88 |
################################################
|
69 |
- def _local_sandbox_available(self):
|
|
70 |
- try:
|
|
71 |
- return os.path.exists(utils.get_host_tool('bwrap')) and os.path.exists('/dev/fuse')
|
|
72 |
- except utils.ProgramNotFoundError:
|
|
73 |
- return False
|
|
74 | 89 |
|
75 | 90 |
def _check_user_ns_available(self):
|
76 | 91 |
# Here, lets check if bwrap is able to create user namespaces,
|
... | ... | @@ -119,6 +119,8 @@ class Job(): |
119 | 119 |
self._result = None # Return value of child action in the parent
|
120 | 120 |
self._tries = 0 # Try count, for retryable jobs
|
121 | 121 |
self._skipped_flag = False # Indicate whether the job was skipped.
|
122 |
+ self._terminated = False # Whether this job has been explicitly terminated
|
|
123 |
+ |
|
122 | 124 |
# If False, a retry will not be attempted regardless of whether _tries is less than _max_retries.
|
123 | 125 |
#
|
124 | 126 |
self._retry_flag = True
|
... | ... | @@ -190,6 +192,8 @@ class Job(): |
190 | 192 |
# Terminate the process using multiprocessing API pathway
|
191 | 193 |
self._process.terminate()
|
192 | 194 |
|
195 |
+ self._terminated = True
|
|
196 |
+ |
|
193 | 197 |
# terminate_wait()
|
194 | 198 |
#
|
195 | 199 |
# Wait for terminated jobs to complete
|
... | ... | @@ -273,18 +277,22 @@ class Job(): |
273 | 277 |
# running the integration commands).
|
274 | 278 |
#
|
275 | 279 |
# Args:
|
276 |
- # (int): The plugin identifier for this task
|
|
280 |
+ # task_id (int): The plugin identifier for this task
|
|
277 | 281 |
#
|
278 | 282 |
def set_task_id(self, task_id):
|
279 | 283 |
self._task_id = task_id
|
280 | 284 |
|
281 | 285 |
# skipped
|
282 | 286 |
#
|
287 |
+ # This will evaluate to True if the job was skipped
|
|
288 |
+ # during processing, or if it was forcefully terminated.
|
|
289 |
+ #
|
|
283 | 290 |
# Returns:
|
284 |
- # bool: True if the job was skipped while processing.
|
|
291 |
+ # (bool): Whether the job should appear as skipped
|
|
292 |
+ #
|
|
285 | 293 |
@property
|
286 | 294 |
def skipped(self):
|
287 |
- return self._skipped_flag
|
|
295 |
+ return self._skipped_flag or self._terminated
|
|
288 | 296 |
|
289 | 297 |
#######################################################
|
290 | 298 |
# Abstract Methods #
|
... | ... | @@ -326,16 +326,20 @@ class Queue(): |
326 | 326 |
detail=traceback.format_exc())
|
327 | 327 |
self.failed_elements.append(element)
|
328 | 328 |
else:
|
329 |
- |
|
330 |
- # No exception occured, handle the success/failure state in the normal way
|
|
331 | 329 |
#
|
330 |
+ # No exception occured in post processing
|
|
331 |
+ #
|
|
332 |
+ |
|
333 |
+ # All jobs get placed on the done queue for later processing.
|
|
332 | 334 |
self._done_queue.append(job)
|
333 | 335 |
|
334 |
- if success:
|
|
335 |
- if not job.skipped:
|
|
336 |
- self.processed_elements.append(element)
|
|
337 |
- else:
|
|
338 |
- self.skipped_elements.append(element)
|
|
336 |
+ # A Job can be skipped whether or not it has failed,
|
|
337 |
+ # we want to only bookkeep them as processed or failed
|
|
338 |
+ # if they are not skipped.
|
|
339 |
+ if job.skipped:
|
|
340 |
+ self.skipped_elements.append(element)
|
|
341 |
+ elif success:
|
|
342 |
+ self.processed_elements.append(element)
|
|
339 | 343 |
else:
|
340 | 344 |
self.failed_elements.append(element)
|
341 | 345 |
|
... | ... | @@ -387,6 +387,15 @@ class Scheduler(): |
387 | 387 |
# A loop registered event callback for keyboard interrupts
|
388 | 388 |
#
|
389 | 389 |
def _interrupt_event(self):
|
390 |
+ |
|
391 |
+ # FIXME: This should not be needed, but for some reason we receive an
|
|
392 |
+ # additional SIGINT event when the user hits ^C a second time
|
|
393 |
+ # to inform us that they really intend to terminate; even though
|
|
394 |
+ # we have disconnected our handlers at this time.
|
|
395 |
+ #
|
|
396 |
+ if self.terminated:
|
|
397 |
+ return
|
|
398 |
+ |
|
390 | 399 |
# Leave this to the frontend to decide, if no
|
391 | 400 |
# interrrupt callback was specified, then just terminate.
|
392 | 401 |
if self._interrupt_callback:
|
... | ... | @@ -78,7 +78,11 @@ def check_bwrap_version(major, minor, patch): |
78 | 78 |
if not bwrap_path:
|
79 | 79 |
return False
|
80 | 80 |
cmd = [bwrap_path, "--version"]
|
81 |
- version = str(subprocess.check_output(cmd).split()[1], "utf-8")
|
|
81 |
+ try:
|
|
82 |
+ version = str(subprocess.check_output(cmd).split()[1], "utf-8")
|
|
83 |
+ except subprocess.CalledProcessError:
|
|
84 |
+ # Failure trying to run bubblewrap
|
|
85 |
+ return False
|
|
82 | 86 |
_bwrap_major, _bwrap_minor, _bwrap_patch = map(int, version.split("."))
|
83 | 87 |
|
84 | 88 |
# Check whether the installed version meets the requirements
|
... | ... | @@ -184,10 +184,18 @@ class GitMirror(SourceFetcher): |
184 | 184 |
cwd=self.mirror)
|
185 | 185 |
|
186 | 186 |
def fetch(self, alias_override=None):
|
187 |
- self.ensure(alias_override)
|
|
188 |
- if not self.has_ref():
|
|
189 |
- self._fetch(alias_override)
|
|
190 |
- self.assert_ref()
|
|
187 |
+ # Resolve the URL for the message
|
|
188 |
+ resolved_url = self.source.translate_url(self.url,
|
|
189 |
+ alias_override=alias_override,
|
|
190 |
+ primary=self.primary)
|
|
191 |
+ |
|
192 |
+ with self.source.timed_activity("Fetching from {}"
|
|
193 |
+ .format(resolved_url),
|
|
194 |
+ silent_nested=True):
|
|
195 |
+ self.ensure(alias_override)
|
|
196 |
+ if not self.has_ref():
|
|
197 |
+ self._fetch(alias_override)
|
|
198 |
+ self.assert_ref()
|
|
191 | 199 |
|
192 | 200 |
def has_ref(self):
|
193 | 201 |
if not self.ref:
|
... | ... | @@ -23,6 +23,7 @@ from . import Sandbox |
23 | 23 |
class SandboxDummy(Sandbox):
|
24 | 24 |
def __init__(self, *args, **kwargs):
|
25 | 25 |
super().__init__(*args, **kwargs)
|
26 |
+ self._reason = kwargs.get("dummy_reason", "no reason given")
|
|
26 | 27 |
|
27 | 28 |
def run(self, command, flags, *, cwd=None, env=None):
|
28 | 29 |
|
... | ... | @@ -37,4 +38,4 @@ class SandboxDummy(Sandbox): |
37 | 38 |
"'{}'".format(command[0]),
|
38 | 39 |
reason='missing-command')
|
39 | 40 |
|
40 |
- raise SandboxError("This platform does not support local builds")
|
|
41 |
+ raise SandboxError("This platform does not support local builds: {}".format(self._reason))
|
... | ... | @@ -965,28 +965,48 @@ class Source(Plugin): |
965 | 965 |
# Tries to call fetch for every mirror, stopping once it succeeds
|
966 | 966 |
def __do_fetch(self, **kwargs):
|
967 | 967 |
project = self._get_project()
|
968 |
- source_fetchers = self.get_source_fetchers()
|
|
968 |
+ context = self._get_context()
|
|
969 |
+ |
|
970 |
+ # Silence the STATUS messages which might happen as a result
|
|
971 |
+ # of checking the source fetchers.
|
|
972 |
+ with context.silence():
|
|
973 |
+ source_fetchers = self.get_source_fetchers()
|
|
969 | 974 |
|
970 | 975 |
# Use the source fetchers if they are provided
|
971 | 976 |
#
|
972 | 977 |
if source_fetchers:
|
973 |
- for fetcher in source_fetchers:
|
|
974 |
- alias = fetcher._get_alias()
|
|
975 |
- for uri in project.get_alias_uris(alias, first_pass=self.__first_pass):
|
|
976 |
- try:
|
|
977 |
- fetcher.fetch(uri)
|
|
978 |
- # FIXME: Need to consider temporary vs. permanent failures,
|
|
979 |
- # and how this works with retries.
|
|
980 |
- except BstError as e:
|
|
981 |
- last_error = e
|
|
982 |
- continue
|
|
983 |
- |
|
984 |
- # No error, we're done with this fetcher
|
|
985 |
- break
|
|
986 | 978 |
|
987 |
- else:
|
|
988 |
- # No break occurred, raise the last detected error
|
|
989 |
- raise last_error
|
|
979 |
+ # Use a contorted loop here, this is to allow us to
|
|
980 |
+ # silence the messages which can result from consuming
|
|
981 |
+ # the items of source_fetchers, if it happens to be a generator.
|
|
982 |
+ #
|
|
983 |
+ source_fetchers = iter(source_fetchers)
|
|
984 |
+ try:
|
|
985 |
+ |
|
986 |
+ while True:
|
|
987 |
+ |
|
988 |
+ with context.silence():
|
|
989 |
+ fetcher = next(source_fetchers)
|
|
990 |
+ |
|
991 |
+ alias = fetcher._get_alias()
|
|
992 |
+ for uri in project.get_alias_uris(alias, first_pass=self.__first_pass):
|
|
993 |
+ try:
|
|
994 |
+ fetcher.fetch(uri)
|
|
995 |
+ # FIXME: Need to consider temporary vs. permanent failures,
|
|
996 |
+ # and how this works with retries.
|
|
997 |
+ except BstError as e:
|
|
998 |
+ last_error = e
|
|
999 |
+ continue
|
|
1000 |
+ |
|
1001 |
+ # No error, we're done with this fetcher
|
|
1002 |
+ break
|
|
1003 |
+ |
|
1004 |
+ else:
|
|
1005 |
+ # No break occurred, raise the last detected error
|
|
1006 |
+ raise last_error
|
|
1007 |
+ |
|
1008 |
+ except StopIteration:
|
|
1009 |
+ pass
|
|
990 | 1010 |
|
991 | 1011 |
# Default codepath is to reinstantiate the Source
|
992 | 1012 |
#
|
... | ... | @@ -502,7 +502,7 @@ def get_bst_version(): |
502 | 502 |
|
503 | 503 |
@contextmanager
|
504 | 504 |
def save_file_atomic(filename, mode='w', *, buffering=-1, encoding=None,
|
505 |
- errors=None, newline=None, closefd=True, opener=None):
|
|
505 |
+ errors=None, newline=None, closefd=True, opener=None, tempdir=None):
|
|
506 | 506 |
"""Save a file with a temporary name and rename it into place when ready.
|
507 | 507 |
|
508 | 508 |
This is a context manager which is meant for saving data to files.
|
... | ... | @@ -529,8 +529,9 @@ def save_file_atomic(filename, mode='w', *, buffering=-1, encoding=None, |
529 | 529 |
# https://bugs.python.org/issue8604
|
530 | 530 |
|
531 | 531 |
assert os.path.isabs(filename), "The utils.save_file_atomic() parameter ``filename`` must be an absolute path"
|
532 |
- dirname = os.path.dirname(filename)
|
|
533 |
- fd, tempname = tempfile.mkstemp(dir=dirname)
|
|
532 |
+ if tempdir is None:
|
|
533 |
+ tempdir = os.path.dirname(filename)
|
|
534 |
+ fd, tempname = tempfile.mkstemp(dir=tempdir)
|
|
534 | 535 |
os.close(fd)
|
535 | 536 |
|
536 | 537 |
f = open(tempname, mode=mode, buffering=buffering, encoding=encoding,
|
... | ... | @@ -562,6 +563,9 @@ def save_file_atomic(filename, mode='w', *, buffering=-1, encoding=None, |
562 | 563 |
#
|
563 | 564 |
# Get the disk usage of a given directory in bytes.
|
564 | 565 |
#
|
566 |
+# This function assumes that files do not inadvertantly
|
|
567 |
+# disappear while this function is running.
|
|
568 |
+#
|
|
565 | 569 |
# Arguments:
|
566 | 570 |
# (str) The path whose size to check.
|
567 | 571 |
#
|
... | ... | @@ -54,12 +54,13 @@ REQUIRED_BWRAP_MINOR = 1 |
54 | 54 |
REQUIRED_BWRAP_PATCH = 2
|
55 | 55 |
|
56 | 56 |
|
57 |
-def exit_bwrap(reason):
|
|
57 |
+def warn_bwrap(reason):
|
|
58 | 58 |
print(reason +
|
59 |
- "\nBuildStream requires Bubblewrap (bwrap) for"
|
|
60 |
- " sandboxing the build environment. Install it using your package manager"
|
|
61 |
- " (usually bwrap or bubblewrap)")
|
|
62 |
- sys.exit(1)
|
|
59 |
+ "\nBuildStream requires Bubblewrap (bwrap {}.{}.{} or better),"
|
|
60 |
+ " during local builds, for"
|
|
61 |
+ " sandboxing the build environment.\nInstall it using your package manager"
|
|
62 |
+ " (usually bwrap or bubblewrap) otherwise you will be limited to"
|
|
63 |
+ " remote builds only.".format(REQUIRED_BWRAP_MAJOR, REQUIRED_BWRAP_MINOR, REQUIRED_BWRAP_PATCH))
|
|
63 | 64 |
|
64 | 65 |
|
65 | 66 |
def bwrap_too_old(major, minor, patch):
|
... | ... | @@ -76,18 +77,19 @@ def bwrap_too_old(major, minor, patch): |
76 | 77 |
return False
|
77 | 78 |
|
78 | 79 |
|
79 |
-def assert_bwrap():
|
|
80 |
+def check_for_bwrap():
|
|
80 | 81 |
platform = os.environ.get('BST_FORCE_BACKEND', '') or sys.platform
|
81 | 82 |
if platform.startswith('linux'):
|
82 | 83 |
bwrap_path = shutil.which('bwrap')
|
83 | 84 |
if not bwrap_path:
|
84 |
- exit_bwrap("Bubblewrap not found")
|
|
85 |
+ warn_bwrap("Bubblewrap not found")
|
|
86 |
+ return
|
|
85 | 87 |
|
86 | 88 |
version_bytes = subprocess.check_output([bwrap_path, "--version"]).split()[1]
|
87 | 89 |
version_string = str(version_bytes, "utf-8")
|
88 | 90 |
major, minor, patch = map(int, version_string.split("."))
|
89 | 91 |
if bwrap_too_old(major, minor, patch):
|
90 |
- exit_bwrap("Bubblewrap too old")
|
|
92 |
+ warn_bwrap("Bubblewrap too old")
|
|
91 | 93 |
|
92 | 94 |
|
93 | 95 |
###########################################
|
... | ... | @@ -126,7 +128,7 @@ bst_install_entry_points = { |
126 | 128 |
}
|
127 | 129 |
|
128 | 130 |
if not os.environ.get('BST_ARTIFACTS_ONLY', ''):
|
129 |
- assert_bwrap()
|
|
131 |
+ check_for_bwrap()
|
|
130 | 132 |
bst_install_entry_points['console_scripts'] += [
|
131 | 133 |
'bst = buildstream._frontend:cli'
|
132 | 134 |
]
|
... | ... | @@ -139,6 +139,82 @@ def test_mirror_fetch(cli, tmpdir, datafiles, kind): |
139 | 139 |
result.assert_success()
|
140 | 140 |
|
141 | 141 |
|
142 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
143 |
+@pytest.mark.parametrize("ref_storage", [("inline"), ("project.refs")])
|
|
144 |
+@pytest.mark.parametrize("mirror", [("no-mirror"), ("mirror"), ("unrelated-mirror")])
|
|
145 |
+def test_mirror_fetch_ref_storage(cli, tmpdir, datafiles, ref_storage, mirror):
|
|
146 |
+ bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
|
|
147 |
+ dev_files_path = os.path.join(str(datafiles), 'files', 'dev-files', 'usr')
|
|
148 |
+ upstream_repodir = os.path.join(str(tmpdir), 'upstream')
|
|
149 |
+ mirror_repodir = os.path.join(str(tmpdir), 'mirror')
|
|
150 |
+ project_dir = os.path.join(str(tmpdir), 'project')
|
|
151 |
+ os.makedirs(project_dir)
|
|
152 |
+ element_dir = os.path.join(project_dir, 'elements')
|
|
153 |
+ |
|
154 |
+ # Create repo objects of the upstream and mirror
|
|
155 |
+ upstream_repo = create_repo('tar', upstream_repodir)
|
|
156 |
+ upstream_ref = upstream_repo.create(bin_files_path)
|
|
157 |
+ mirror_repo = upstream_repo.copy(mirror_repodir)
|
|
158 |
+ mirror_ref = upstream_ref
|
|
159 |
+ upstream_ref = upstream_repo.create(dev_files_path)
|
|
160 |
+ |
|
161 |
+ element = {
|
|
162 |
+ 'kind': 'import',
|
|
163 |
+ 'sources': [
|
|
164 |
+ upstream_repo.source_config(ref=upstream_ref if ref_storage == 'inline' else None)
|
|
165 |
+ ]
|
|
166 |
+ }
|
|
167 |
+ element_name = 'test.bst'
|
|
168 |
+ element_path = os.path.join(element_dir, element_name)
|
|
169 |
+ full_repo = element['sources'][0]['url']
|
|
170 |
+ upstream_map, repo_name = os.path.split(full_repo)
|
|
171 |
+ alias = 'foo'
|
|
172 |
+ aliased_repo = alias + ':' + repo_name
|
|
173 |
+ element['sources'][0]['url'] = aliased_repo
|
|
174 |
+ full_mirror = mirror_repo.source_config()['url']
|
|
175 |
+ mirror_map, _ = os.path.split(full_mirror)
|
|
176 |
+ os.makedirs(element_dir)
|
|
177 |
+ _yaml.dump(element, element_path)
|
|
178 |
+ |
|
179 |
+ if ref_storage == 'project.refs':
|
|
180 |
+ # Manually set project.refs to avoid caching the repo prematurely
|
|
181 |
+ project_refs = {'projects': {
|
|
182 |
+ 'test': {
|
|
183 |
+ element_name: [
|
|
184 |
+ {'ref': upstream_ref}
|
|
185 |
+ ]
|
|
186 |
+ }
|
|
187 |
+ }}
|
|
188 |
+ project_refs_path = os.path.join(project_dir, 'project.refs')
|
|
189 |
+ _yaml.dump(project_refs, project_refs_path)
|
|
190 |
+ |
|
191 |
+ project = {
|
|
192 |
+ 'name': 'test',
|
|
193 |
+ 'element-path': 'elements',
|
|
194 |
+ 'aliases': {
|
|
195 |
+ alias: upstream_map + "/"
|
|
196 |
+ },
|
|
197 |
+ 'ref-storage': ref_storage
|
|
198 |
+ }
|
|
199 |
+ if mirror != 'no-mirror':
|
|
200 |
+ mirror_data = [{
|
|
201 |
+ 'name': 'middle-earth',
|
|
202 |
+ 'aliases': {alias: [mirror_map + '/']}
|
|
203 |
+ }]
|
|
204 |
+ if mirror == 'unrelated-mirror':
|
|
205 |
+ mirror_data.insert(0, {
|
|
206 |
+ 'name': 'narnia',
|
|
207 |
+ 'aliases': {'frob': ['http://www.example.com/repo']}
|
|
208 |
+ })
|
|
209 |
+ project['mirrors'] = mirror_data
|
|
210 |
+ |
|
211 |
+ project_file = os.path.join(project_dir, 'project.conf')
|
|
212 |
+ _yaml.dump(project, project_file)
|
|
213 |
+ |
|
214 |
+ result = cli.run(project=project_dir, args=['fetch', element_name])
|
|
215 |
+ result.assert_success()
|
|
216 |
+ |
|
217 |
+ |
|
142 | 218 |
@pytest.mark.datafiles(DATA_DIR)
|
143 | 219 |
@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
|
144 | 220 |
def test_mirror_fetch_upstream_absent(cli, tmpdir, datafiles, kind):
|