Jonathan Maw pushed to branch jonathan/source-mirror-project-refs at BuildStream / buildstream
Commits:
-
d5b396e0
by Phillip Smyth at 2018-10-02T16:40:18Z
-
dae842fd
by Phillip Smyth at 2018-10-02T17:14:09Z
-
8dc16d3f
by Jürg Billeter at 2018-10-03T05:08:21Z
-
66446fc3
by Jürg Billeter at 2018-10-03T05:36:38Z
-
29c19bea
by Tristan Van Berkom at 2018-10-03T07:33:48Z
-
b645881c
by Tristan Van Berkom at 2018-10-03T07:33:48Z
-
c9437616
by Tristan Van Berkom at 2018-10-03T08:07:15Z
-
11320fe2
by Tristan Van Berkom at 2018-10-03T09:33:39Z
-
91271964
by Tristan Van Berkom at 2018-10-03T09:59:40Z
-
3bf895d2
by Jonathan Maw at 2018-10-03T11:48:48Z
7 changed files:
- .gitlab-ci.yml
- buildstream/_platform/linux.py
- buildstream/_scheduler/jobs/job.py
- buildstream/_scheduler/queues/queue.py
- buildstream/plugins/sources/git.py
- buildstream/source.py
- tests/frontend/mirror.py
Changes:
... | ... | @@ -145,7 +145,8 @@ docs: |
145 | 145 |
stage: test
|
146 | 146 |
script:
|
147 | 147 |
- export BST_SOURCE_CACHE="$(pwd)/cache/integration-cache/sources"
|
148 |
- - pip3 install sphinx
|
|
148 |
+ # Currently sphinx_rtd_theme does not support Sphinx >1.8, this breaks search functionality
|
|
149 |
+ - pip3 install sphinx==1.7.9
|
|
149 | 150 |
- pip3 install sphinx-click
|
150 | 151 |
- pip3 install sphinx_rtd_theme
|
151 | 152 |
- cd dist && ./unpack.sh && cd buildstream
|
... | ... | @@ -55,6 +55,10 @@ class Linux(Platform): |
55 | 55 |
return SandboxBwrap(*args, **kwargs)
|
56 | 56 |
|
57 | 57 |
def check_sandbox_config(self, config):
|
58 |
+ if not self._local_sandbox_available():
|
|
59 |
+ # Accept all sandbox configs as it's irrelevant with the dummy sandbox (no Sandbox.run).
|
|
60 |
+ return True
|
|
61 |
+ |
|
58 | 62 |
if self._user_ns_available:
|
59 | 63 |
# User namespace support allows arbitrary build UID/GID settings.
|
60 | 64 |
return True
|
... | ... | @@ -119,6 +119,8 @@ class Job(): |
119 | 119 |
self._result = None # Return value of child action in the parent
|
120 | 120 |
self._tries = 0 # Try count, for retryable jobs
|
121 | 121 |
self._skipped_flag = False # Indicate whether the job was skipped.
|
122 |
+ self._terminated = False # Whether this job has been explicitly terminated
|
|
123 |
+ |
|
122 | 124 |
# If False, a retry will not be attempted regardless of whether _tries is less than _max_retries.
|
123 | 125 |
#
|
124 | 126 |
self._retry_flag = True
|
... | ... | @@ -190,6 +192,8 @@ class Job(): |
190 | 192 |
# Terminate the process using multiprocessing API pathway
|
191 | 193 |
self._process.terminate()
|
192 | 194 |
|
195 |
+ self._terminated = True
|
|
196 |
+ |
|
193 | 197 |
# terminate_wait()
|
194 | 198 |
#
|
195 | 199 |
# Wait for terminated jobs to complete
|
... | ... | @@ -273,18 +277,22 @@ class Job(): |
273 | 277 |
# running the integration commands).
|
274 | 278 |
#
|
275 | 279 |
# Args:
|
276 |
- # (int): The plugin identifier for this task
|
|
280 |
+ # task_id (int): The plugin identifier for this task
|
|
277 | 281 |
#
|
278 | 282 |
def set_task_id(self, task_id):
|
279 | 283 |
self._task_id = task_id
|
280 | 284 |
|
281 | 285 |
# skipped
|
282 | 286 |
#
|
287 |
+ # This will evaluate to True if the job was skipped
|
|
288 |
+ # during processing, or if it was forcefully terminated.
|
|
289 |
+ #
|
|
283 | 290 |
# Returns:
|
284 |
- # bool: True if the job was skipped while processing.
|
|
291 |
+ # (bool): Whether the job should appear as skipped
|
|
292 |
+ #
|
|
285 | 293 |
@property
|
286 | 294 |
def skipped(self):
|
287 |
- return self._skipped_flag
|
|
295 |
+ return self._skipped_flag or self._terminated
|
|
288 | 296 |
|
289 | 297 |
#######################################################
|
290 | 298 |
# Abstract Methods #
|
... | ... | @@ -326,16 +326,20 @@ class Queue(): |
326 | 326 |
detail=traceback.format_exc())
|
327 | 327 |
self.failed_elements.append(element)
|
328 | 328 |
else:
|
329 |
- |
|
330 |
- # No exception occured, handle the success/failure state in the normal way
|
|
331 | 329 |
#
|
330 |
+ # No exception occured in post processing
|
|
331 |
+ #
|
|
332 |
+ |
|
333 |
+ # All jobs get placed on the done queue for later processing.
|
|
332 | 334 |
self._done_queue.append(job)
|
333 | 335 |
|
334 |
- if success:
|
|
335 |
- if not job.skipped:
|
|
336 |
- self.processed_elements.append(element)
|
|
337 |
- else:
|
|
338 |
- self.skipped_elements.append(element)
|
|
336 |
+ # A Job can be skipped whether or not it has failed,
|
|
337 |
+ # we want to only bookkeep them as processed or failed
|
|
338 |
+ # if they are not skipped.
|
|
339 |
+ if job.skipped:
|
|
340 |
+ self.skipped_elements.append(element)
|
|
341 |
+ elif success:
|
|
342 |
+ self.processed_elements.append(element)
|
|
339 | 343 |
else:
|
340 | 344 |
self.failed_elements.append(element)
|
341 | 345 |
|
... | ... | @@ -184,10 +184,18 @@ class GitMirror(SourceFetcher): |
184 | 184 |
cwd=self.mirror)
|
185 | 185 |
|
186 | 186 |
def fetch(self, alias_override=None):
|
187 |
- self.ensure(alias_override)
|
|
188 |
- if not self.has_ref():
|
|
189 |
- self._fetch(alias_override)
|
|
190 |
- self.assert_ref()
|
|
187 |
+ # Resolve the URL for the message
|
|
188 |
+ resolved_url = self.source.translate_url(self.url,
|
|
189 |
+ alias_override=alias_override,
|
|
190 |
+ primary=self.primary)
|
|
191 |
+ |
|
192 |
+ with self.source.timed_activity("Fetching from {}"
|
|
193 |
+ .format(resolved_url),
|
|
194 |
+ silent_nested=True):
|
|
195 |
+ self.ensure(alias_override)
|
|
196 |
+ if not self.has_ref():
|
|
197 |
+ self._fetch(alias_override)
|
|
198 |
+ self.assert_ref()
|
|
191 | 199 |
|
192 | 200 |
def has_ref(self):
|
193 | 201 |
if not self.ref:
|
... | ... | @@ -965,28 +965,48 @@ class Source(Plugin): |
965 | 965 |
# Tries to call fetch for every mirror, stopping once it succeeds
|
966 | 966 |
def __do_fetch(self, **kwargs):
|
967 | 967 |
project = self._get_project()
|
968 |
- source_fetchers = self.get_source_fetchers()
|
|
968 |
+ context = self._get_context()
|
|
969 |
+ |
|
970 |
+ # Silence the STATUS messages which might happen as a result
|
|
971 |
+ # of checking the source fetchers.
|
|
972 |
+ with context.silence():
|
|
973 |
+ source_fetchers = self.get_source_fetchers()
|
|
969 | 974 |
|
970 | 975 |
# Use the source fetchers if they are provided
|
971 | 976 |
#
|
972 | 977 |
if source_fetchers:
|
973 |
- for fetcher in source_fetchers:
|
|
974 |
- alias = fetcher._get_alias()
|
|
975 |
- for uri in project.get_alias_uris(alias, first_pass=self.__first_pass):
|
|
976 |
- try:
|
|
977 |
- fetcher.fetch(uri)
|
|
978 |
- # FIXME: Need to consider temporary vs. permanent failures,
|
|
979 |
- # and how this works with retries.
|
|
980 |
- except BstError as e:
|
|
981 |
- last_error = e
|
|
982 |
- continue
|
|
983 |
- |
|
984 |
- # No error, we're done with this fetcher
|
|
985 |
- break
|
|
986 | 978 |
|
987 |
- else:
|
|
988 |
- # No break occurred, raise the last detected error
|
|
989 |
- raise last_error
|
|
979 |
+ # Use a contorted loop here, this is to allow us to
|
|
980 |
+ # silence the messages which can result from consuming
|
|
981 |
+ # the items of source_fetchers, if it happens to be a generator.
|
|
982 |
+ #
|
|
983 |
+ source_fetchers = iter(source_fetchers)
|
|
984 |
+ try:
|
|
985 |
+ |
|
986 |
+ while True:
|
|
987 |
+ |
|
988 |
+ with context.silence():
|
|
989 |
+ fetcher = next(source_fetchers)
|
|
990 |
+ |
|
991 |
+ alias = fetcher._get_alias()
|
|
992 |
+ for uri in project.get_alias_uris(alias, first_pass=self.__first_pass):
|
|
993 |
+ try:
|
|
994 |
+ fetcher.fetch(uri)
|
|
995 |
+ # FIXME: Need to consider temporary vs. permanent failures,
|
|
996 |
+ # and how this works with retries.
|
|
997 |
+ except BstError as e:
|
|
998 |
+ last_error = e
|
|
999 |
+ continue
|
|
1000 |
+ |
|
1001 |
+ # No error, we're done with this fetcher
|
|
1002 |
+ break
|
|
1003 |
+ |
|
1004 |
+ else:
|
|
1005 |
+ # No break occurred, raise the last detected error
|
|
1006 |
+ raise last_error
|
|
1007 |
+ |
|
1008 |
+ except StopIteration:
|
|
1009 |
+ pass
|
|
990 | 1010 |
|
991 | 1011 |
# Default codepath is to reinstantiate the Source
|
992 | 1012 |
#
|
... | ... | @@ -139,6 +139,82 @@ def test_mirror_fetch(cli, tmpdir, datafiles, kind): |
139 | 139 |
result.assert_success()
|
140 | 140 |
|
141 | 141 |
|
142 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
143 |
+@pytest.mark.parametrize("ref_storage", [("inline"), ("project.refs")])
|
|
144 |
+@pytest.mark.parametrize("mirror", [("no-mirror"), ("mirror"), ("unrelated-mirror")])
|
|
145 |
+def test_mirror_fetch_ref_storage(cli, tmpdir, datafiles, ref_storage, mirror):
|
|
146 |
+ bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
|
|
147 |
+ dev_files_path = os.path.join(str(datafiles), 'files', 'dev-files', 'usr')
|
|
148 |
+ upstream_repodir = os.path.join(str(tmpdir), 'upstream')
|
|
149 |
+ mirror_repodir = os.path.join(str(tmpdir), 'mirror')
|
|
150 |
+ project_dir = os.path.join(str(tmpdir), 'project')
|
|
151 |
+ os.makedirs(project_dir)
|
|
152 |
+ element_dir = os.path.join(project_dir, 'elements')
|
|
153 |
+ |
|
154 |
+ # Create repo objects of the upstream and mirror
|
|
155 |
+ upstream_repo = create_repo('tar', upstream_repodir)
|
|
156 |
+ upstream_ref = upstream_repo.create(bin_files_path)
|
|
157 |
+ mirror_repo = upstream_repo.copy(mirror_repodir)
|
|
158 |
+ mirror_ref = upstream_ref
|
|
159 |
+ upstream_ref = upstream_repo.create(dev_files_path)
|
|
160 |
+ |
|
161 |
+ element = {
|
|
162 |
+ 'kind': 'import',
|
|
163 |
+ 'sources': [
|
|
164 |
+ upstream_repo.source_config(ref=upstream_ref if ref_storage == 'inline' else None)
|
|
165 |
+ ]
|
|
166 |
+ }
|
|
167 |
+ element_name = 'test.bst'
|
|
168 |
+ element_path = os.path.join(element_dir, element_name)
|
|
169 |
+ full_repo = element['sources'][0]['url']
|
|
170 |
+ upstream_map, repo_name = os.path.split(full_repo)
|
|
171 |
+ alias = 'foo'
|
|
172 |
+ aliased_repo = alias + ':' + repo_name
|
|
173 |
+ element['sources'][0]['url'] = aliased_repo
|
|
174 |
+ full_mirror = mirror_repo.source_config()['url']
|
|
175 |
+ mirror_map, _ = os.path.split(full_mirror)
|
|
176 |
+ os.makedirs(element_dir)
|
|
177 |
+ _yaml.dump(element, element_path)
|
|
178 |
+ |
|
179 |
+ if ref_storage == 'project.refs':
|
|
180 |
+ # Manually set project.refs to avoid caching the repo prematurely
|
|
181 |
+ project_refs = {'projects': {
|
|
182 |
+ 'test': {
|
|
183 |
+ element_name: [
|
|
184 |
+ {'ref': upstream_ref}
|
|
185 |
+ ]
|
|
186 |
+ }
|
|
187 |
+ }}
|
|
188 |
+ project_refs_path = os.path.join(project_dir, 'project.refs')
|
|
189 |
+ _yaml.dump(project_refs, project_refs_path)
|
|
190 |
+ |
|
191 |
+ project = {
|
|
192 |
+ 'name': 'test',
|
|
193 |
+ 'element-path': 'elements',
|
|
194 |
+ 'aliases': {
|
|
195 |
+ alias: upstream_map + "/"
|
|
196 |
+ },
|
|
197 |
+ 'ref-storage': ref_storage
|
|
198 |
+ }
|
|
199 |
+ if mirror != 'no-mirror':
|
|
200 |
+ mirror_data = [{
|
|
201 |
+ 'name': 'middle-earth',
|
|
202 |
+ 'aliases': {alias: [mirror_map + '/']}
|
|
203 |
+ }]
|
|
204 |
+ if mirror == 'unrelated-mirror':
|
|
205 |
+ mirror_data.insert(0, {
|
|
206 |
+ 'name': 'narnia',
|
|
207 |
+ 'aliases': {'frob': ['http://www.example.com/repo']}
|
|
208 |
+ })
|
|
209 |
+ project['mirrors'] = mirror_data
|
|
210 |
+ |
|
211 |
+ project_file = os.path.join(project_dir, 'project.conf')
|
|
212 |
+ _yaml.dump(project, project_file)
|
|
213 |
+ |
|
214 |
+ result = cli.run(project=project_dir, args=['fetch', element_name])
|
|
215 |
+ result.assert_success()
|
|
216 |
+ |
|
217 |
+ |
|
142 | 218 |
@pytest.mark.datafiles(DATA_DIR)
|
143 | 219 |
@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
|
144 | 220 |
def test_mirror_fetch_upstream_absent(cli, tmpdir, datafiles, kind):
|