Tom Pollard pushed to branch tpollard/566 at BuildStream / buildstream
Commits:
-
2b767fe8
by Jürg Billeter at 2018-12-20T10:06:11Z
-
7a102144
by Jürg Billeter at 2018-12-20T10:06:11Z
-
b325989e
by Jürg Billeter at 2018-12-20T10:07:20Z
-
77d8ad45
by Jürg Billeter at 2018-12-20T10:42:39Z
-
9835b7f1
by Chandan Singh at 2018-12-20T12:50:02Z
-
14da6955
by Chandan Singh at 2018-12-20T13:34:10Z
-
7368f569
by Angelos Evripiotis at 2018-12-20T13:59:23Z
-
46efc91d
by Angelos Evripiotis at 2018-12-20T13:59:23Z
-
e0c575c4
by Angelos Evripiotis at 2018-12-20T14:37:38Z
-
c05d8b4f
by Chandan Singh at 2018-12-20T15:53:12Z
-
e8055a56
by Chandan Singh at 2018-12-20T15:53:12Z
-
cd4889af
by Chandan Singh at 2018-12-20T16:34:08Z
-
ac995236
by Tom Pollard at 2018-12-20T17:18:02Z
-
c3153dea
by Tom Pollard at 2018-12-21T10:10:58Z
-
0fb728be
by Tom Pollard at 2018-12-21T11:10:43Z
21 changed files:
- NEWS
- buildstream/__init__.py
- buildstream/_artifactcache/artifactcache.py
- buildstream/_artifactcache/cascache.py
- buildstream/_context.py
- buildstream/_frontend/app.py
- + buildstream/_gitsourcebase.py
- buildstream/_project.py
- buildstream/_scheduler/queues/fetchqueue.py
- buildstream/_stream.py
- buildstream/data/userconfig.yaml
- buildstream/element.py
- buildstream/plugins/sources/git.py
- buildstream/utils.py
- tests/frontend/logging.py
- + tests/integration/pushbuildtrees.py
- + tests/sources/no-fetch-cached/files/file
- + tests/sources/no-fetch-cached/plugins/sources/always_cached.py
- + tests/sources/no-fetch-cached/project.conf
- + tests/sources/no_fetch_cached.py
- tests/testutils/runcli.py
Changes:
... | ... | @@ -30,6 +30,12 @@ buildstream 1.3.1 |
30 | 30 |
make changes to their .bst files if they are expecting these environment
|
31 | 31 |
variables to be set.
|
32 | 32 |
|
33 |
+ o BREAKING CHANGE: The 'auto-init' functionality has been removed. This would
|
|
34 |
+ offer to create a project in the event that bst was run against a directory
|
|
35 |
+ without a project, to be friendly to new users. It has been replaced with
|
|
36 |
+ an error message and a hint instead, to avoid bothering folks that just
|
|
37 |
+ made a mistake.
|
|
38 |
+ |
|
33 | 39 |
o Failed builds are included in the cache as well.
|
34 | 40 |
`bst checkout` will provide anything in `%{install-root}`.
|
35 | 41 |
A build including cached fails will cause any dependant elements
|
... | ... | @@ -67,8 +73,8 @@ buildstream 1.3.1 |
67 | 73 |
instead of just a specially-formatted build-root with a `root` and `scratch`
|
68 | 74 |
subdirectory.
|
69 | 75 |
|
70 |
- o The buildstream.conf file learned new 'prompt.auto-init',
|
|
71 |
- 'prompt.really-workspace-close-remove-dir', and
|
|
76 |
+ o The buildstream.conf file learned new
|
|
77 |
+ 'prompt.really-workspace-close-remove-dir' and
|
|
72 | 78 |
'prompt.really-workspace-reset-hard' options. These allow users to suppress
|
73 | 79 |
certain confirmation prompts, e.g. double-checking that the user meant to
|
74 | 80 |
run the command as typed.
|
... | ... | @@ -34,3 +34,8 @@ if "_BST_COMPLETION" not in os.environ: |
34 | 34 |
from .element import Element, ElementError
|
35 | 35 |
from .buildelement import BuildElement
|
36 | 36 |
from .scriptelement import ScriptElement
|
37 |
+ |
|
38 |
+ # XXX We are exposing a private member here as we expect it to move to a
|
|
39 |
+ # separate package soon. See the following discussion for more details:
|
|
40 |
+ # https://gitlab.com/BuildStream/buildstream/issues/739#note_124819869
|
|
41 |
+ from ._gitsourcebase import _GitSourceBase
|
... | ... | @@ -74,6 +74,7 @@ class ArtifactCache(): |
74 | 74 |
|
75 | 75 |
self._has_fetch_remotes = False
|
76 | 76 |
self._has_push_remotes = False
|
77 |
+ self._has_partial_push_remotes = False
|
|
77 | 78 |
|
78 | 79 |
os.makedirs(self.extractdir, exist_ok=True)
|
79 | 80 |
|
... | ... | @@ -398,6 +399,8 @@ class ArtifactCache(): |
398 | 399 |
self._has_fetch_remotes = True
|
399 | 400 |
if remote_spec.push:
|
400 | 401 |
self._has_push_remotes = True
|
402 |
+ if remote_spec.partial_push:
|
|
403 |
+ self._has_partial_push_remotes = True
|
|
401 | 404 |
|
402 | 405 |
remotes[remote_spec.url] = CASRemote(remote_spec)
|
403 | 406 |
|
... | ... | @@ -596,6 +599,31 @@ class ArtifactCache(): |
596 | 599 |
remotes_for_project = self._remotes[element._get_project()]
|
597 | 600 |
return any(remote.spec.push for remote in remotes_for_project)
|
598 | 601 |
|
602 |
+ # has_partial_push_remotes():
|
|
603 |
+ #
|
|
604 |
+ # Check whether any remote repositories are available for pushing
|
|
605 |
+ # non-complete artifacts
|
|
606 |
+ #
|
|
607 |
+ # Args:
|
|
608 |
+ # element (Element): The Element to check
|
|
609 |
+ #
|
|
610 |
+ # Returns:
|
|
611 |
+ # (bool): True if any remote repository is configured for optional
|
|
612 |
+ # partial pushes, False otherwise
|
|
613 |
+ #
|
|
614 |
+ def has_partial_push_remotes(self, *, element=None):
|
|
615 |
+ # If there's no partial push remotes available, we can't partial push at all
|
|
616 |
+ if not self._has_partial_push_remotes:
|
|
617 |
+ return False
|
|
618 |
+ elif element is None:
|
|
619 |
+ # At least one remote is set to allow partial pushes
|
|
620 |
+ return True
|
|
621 |
+ else:
|
|
622 |
+ # Check whether the specified element's project has push remotes configured
|
|
623 |
+ # to not accept partial artifact pushes
|
|
624 |
+ remotes_for_project = self._remotes[element._get_project()]
|
|
625 |
+ return any(remote.spec.partial_push for remote in remotes_for_project)
|
|
626 |
+ |
|
599 | 627 |
# push():
|
600 | 628 |
#
|
601 | 629 |
# Push committed artifact to remote repository.
|
... | ... | @@ -603,6 +631,8 @@ class ArtifactCache(): |
603 | 631 |
# Args:
|
604 | 632 |
# element (Element): The Element whose artifact is to be pushed
|
605 | 633 |
# keys (list): The cache keys to use
|
634 |
+ # partial(bool): If the artifact is cached in a partial state
|
|
635 |
+ # subdir(string): Optional subdir to not push
|
|
606 | 636 |
#
|
607 | 637 |
# Returns:
|
608 | 638 |
# (bool): True if any remote was updated, False if no pushes were required
|
... | ... | @@ -610,12 +640,25 @@ class ArtifactCache(): |
610 | 640 |
# Raises:
|
611 | 641 |
# (ArtifactError): if there was an error
|
612 | 642 |
#
|
613 |
- def push(self, element, keys):
|
|
643 |
+ def push(self, element, keys, partial=False, subdir=None):
|
|
614 | 644 |
refs = [self.get_artifact_fullname(element, key) for key in list(keys)]
|
615 | 645 |
|
616 | 646 |
project = element._get_project()
|
617 | 647 |
|
618 |
- push_remotes = [r for r in self._remotes[project] if r.spec.push]
|
|
648 |
+ push_remotes = []
|
|
649 |
+ partial_remotes = []
|
|
650 |
+ |
|
651 |
+ # Create list of remotes to push to, given current element and partial push config
|
|
652 |
+ if not partial:
|
|
653 |
+ push_remotes = [r for r in self._remotes[project] if (r.spec.push and not r.spec.partial_push)]
|
|
654 |
+ |
|
655 |
+ if self._has_partial_push_remotes:
|
|
656 |
+ # Create a specific list of the remotes expecting the artifact to be push in a partial
|
|
657 |
+ # state. This list needs to be pushed in a partial state, without the optional subdir if
|
|
658 |
+ # exists locally. No need to attempt pushing a partial artifact to a remote that is queued to
|
|
659 |
+ # to also recieve a full artifact
|
|
660 |
+ partial_remotes = [r for r in self._remotes[project] if (r.spec.partial_push and r.spec.push) and
|
|
661 |
+ r not in push_remotes]
|
|
619 | 662 |
|
620 | 663 |
pushed = False
|
621 | 664 |
|
... | ... | @@ -632,6 +675,19 @@ class ArtifactCache(): |
632 | 675 |
remote.spec.url, element._get_brief_display_key()
|
633 | 676 |
))
|
634 | 677 |
|
678 |
+ for remote in partial_remotes:
|
|
679 |
+ remote.init()
|
|
680 |
+ display_key = element._get_brief_display_key()
|
|
681 |
+ element.status("Pushing partial artifact {} -> {}".format(display_key, remote.spec.url))
|
|
682 |
+ |
|
683 |
+ if self.cas.push(refs, remote, subdir=subdir):
|
|
684 |
+ element.info("Pushed partial artifact {} -> {}".format(display_key, remote.spec.url))
|
|
685 |
+ pushed = True
|
|
686 |
+ else:
|
|
687 |
+ element.info("Remote ({}) already has {} partial cached".format(
|
|
688 |
+ remote.spec.url, element._get_brief_display_key()
|
|
689 |
+ ))
|
|
690 |
+ |
|
635 | 691 |
return pushed
|
636 | 692 |
|
637 | 693 |
# pull():
|
... | ... | @@ -659,14 +715,23 @@ class ArtifactCache(): |
659 | 715 |
element.status("Pulling artifact {} <- {}".format(display_key, remote.spec.url))
|
660 | 716 |
|
661 | 717 |
if self.cas.pull(ref, remote, progress=progress, subdir=subdir, excluded_subdirs=excluded_subdirs):
|
662 |
- element.info("Pulled artifact {} <- {}".format(display_key, remote.spec.url))
|
|
663 | 718 |
if subdir:
|
664 |
- # Attempt to extract subdir into artifact extract dir if it already exists
|
|
665 |
- # without containing the subdir. If the respective artifact extract dir does not
|
|
666 |
- # exist a complete extraction will complete.
|
|
667 |
- self.extract(element, key, subdir)
|
|
668 |
- # no need to pull from additional remotes
|
|
669 |
- return True
|
|
719 |
+ if not self.contains_subdir_artifact(element, key, subdir):
|
|
720 |
+ # The pull was expecting the specific subdit to be present, attempt
|
|
721 |
+ # to find it in other available remotes
|
|
722 |
+ element.info("Pulled partial artifact {} <- {}. Attempting to retrieve {} from remotes"
|
|
723 |
+ .format(display_key, remote.spec.url, subdir))
|
|
724 |
+ else:
|
|
725 |
+ element.info("Pulled artifact {} <- {}".format(display_key, remote.spec.url))
|
|
726 |
+ # Attempt to extract subdir into artifact extract dir if it already exists
|
|
727 |
+ # without containing the subdir. If the respective artifact extract dir does not
|
|
728 |
+ # exist a complete extraction will complete.
|
|
729 |
+ self.extract(element, key, subdir)
|
|
730 |
+ # no need to pull from additional remotes
|
|
731 |
+ return True
|
|
732 |
+ else:
|
|
733 |
+ element.info("Pulled artifact {} <- {}".format(display_key, remote.spec.url))
|
|
734 |
+ return True
|
|
670 | 735 |
else:
|
671 | 736 |
element.info("Remote ({}) does not have {} cached".format(
|
672 | 737 |
remote.spec.url, element._get_brief_display_key()
|
... | ... | @@ -45,7 +45,8 @@ from .. import _yaml |
45 | 45 |
_MAX_PAYLOAD_BYTES = 1024 * 1024
|
46 | 46 |
|
47 | 47 |
|
48 |
-class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key client_cert instance_name')):
|
|
48 |
+class CASRemoteSpec(namedtuple('CASRemoteSpec',
|
|
49 |
+ 'url push partial_push server_cert client_key client_cert instance_name')):
|
|
49 | 50 |
|
50 | 51 |
# _new_from_config_node
|
51 | 52 |
#
|
... | ... | @@ -53,9 +54,13 @@ class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key |
53 | 54 |
#
|
54 | 55 |
@staticmethod
|
55 | 56 |
def _new_from_config_node(spec_node, basedir=None):
|
56 |
- _yaml.node_validate(spec_node, ['url', 'push', 'server-cert', 'client-key', 'client-cert', 'instance_name'])
|
|
57 |
+ _yaml.node_validate(spec_node,
|
|
58 |
+ ['url', 'push', 'allow-partial-push', 'server-cert', 'client-key',
|
|
59 |
+ 'client-cert', 'instance_name'])
|
|
57 | 60 |
url = _yaml.node_get(spec_node, str, 'url')
|
58 | 61 |
push = _yaml.node_get(spec_node, bool, 'push', default_value=False)
|
62 |
+ partial_push = _yaml.node_get(spec_node, bool, 'allow-partial-push', default_value=False)
|
|
63 |
+ |
|
59 | 64 |
if not url:
|
60 | 65 |
provenance = _yaml.node_get_provenance(spec_node, 'url')
|
61 | 66 |
raise LoadError(LoadErrorReason.INVALID_DATA,
|
... | ... | @@ -85,10 +90,10 @@ class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key |
85 | 90 |
raise LoadError(LoadErrorReason.INVALID_DATA,
|
86 | 91 |
"{}: 'client-cert' was specified without 'client-key'".format(provenance))
|
87 | 92 |
|
88 |
- return CASRemoteSpec(url, push, server_cert, client_key, client_cert, instance_name)
|
|
93 |
+ return CASRemoteSpec(url, push, partial_push, server_cert, client_key, client_cert, instance_name)
|
|
89 | 94 |
|
90 | 95 |
|
91 |
-CASRemoteSpec.__new__.__defaults__ = (None, None, None, None)
|
|
96 |
+CASRemoteSpec.__new__.__defaults__ = (False, None, None, None, None)
|
|
92 | 97 |
|
93 | 98 |
|
94 | 99 |
class BlobNotFound(CASError):
|
... | ... | @@ -283,34 +288,40 @@ class CASCache(): |
283 | 288 |
# (bool): True if pull was successful, False if ref was not available
|
284 | 289 |
#
|
285 | 290 |
def pull(self, ref, remote, *, progress=None, subdir=None, excluded_subdirs=None):
|
286 |
- try:
|
|
287 |
- remote.init()
|
|
288 | 291 |
|
289 |
- request = buildstream_pb2.GetReferenceRequest(instance_name=remote.spec.instance_name)
|
|
290 |
- request.key = ref
|
|
291 |
- response = remote.ref_storage.GetReference(request)
|
|
292 |
+ while True:
|
|
293 |
+ try:
|
|
294 |
+ remote.init()
|
|
292 | 295 |
|
293 |
- tree = remote_execution_pb2.Digest()
|
|
294 |
- tree.hash = response.digest.hash
|
|
295 |
- tree.size_bytes = response.digest.size_bytes
|
|
296 |
+ request = buildstream_pb2.GetReferenceRequest(instance_name=remote.spec.instance_name)
|
|
297 |
+ request.key = ref
|
|
298 |
+ response = remote.ref_storage.GetReference(request)
|
|
296 | 299 |
|
297 |
- # Check if the element artifact is present, if so just fetch the subdir.
|
|
298 |
- if subdir and os.path.exists(self.objpath(tree)):
|
|
299 |
- self._fetch_subdir(remote, tree, subdir)
|
|
300 |
- else:
|
|
301 |
- # Fetch artifact, excluded_subdirs determined in pullqueue
|
|
302 |
- self._fetch_directory(remote, tree, excluded_subdirs=excluded_subdirs)
|
|
300 |
+ tree = remote_execution_pb2.Digest()
|
|
301 |
+ tree.hash = response.digest.hash
|
|
302 |
+ tree.size_bytes = response.digest.size_bytes
|
|
303 | 303 |
|
304 |
- self.set_ref(ref, tree)
|
|
304 |
+ # Check if the element artifact is present, if so just fetch the subdir.
|
|
305 |
+ if subdir and os.path.exists(self.objpath(tree)):
|
|
306 |
+ self._fetch_subdir(remote, tree, subdir)
|
|
307 |
+ else:
|
|
308 |
+ # Fetch artifact, excluded_subdirs determined in pullqueue
|
|
309 |
+ self._fetch_directory(remote, tree, excluded_subdirs=excluded_subdirs)
|
|
305 | 310 |
|
306 |
- return True
|
|
307 |
- except grpc.RpcError as e:
|
|
308 |
- if e.code() != grpc.StatusCode.NOT_FOUND:
|
|
309 |
- raise CASError("Failed to pull ref {}: {}".format(ref, e)) from e
|
|
310 |
- else:
|
|
311 |
- return False
|
|
312 |
- except BlobNotFound as e:
|
|
313 |
- return False
|
|
311 |
+ self.set_ref(ref, tree)
|
|
312 |
+ |
|
313 |
+ return True
|
|
314 |
+ except grpc.RpcError as e:
|
|
315 |
+ if e.code() != grpc.StatusCode.NOT_FOUND:
|
|
316 |
+ raise CASError("Failed to pull ref {}: {}".format(ref, e)) from e
|
|
317 |
+ else:
|
|
318 |
+ return False
|
|
319 |
+ except BlobNotFound as e:
|
|
320 |
+ if not excluded_subdirs and subdir:
|
|
321 |
+ # Could not complete a full pull, attempt partial
|
|
322 |
+ excluded_subdirs, subdir = subdir, excluded_subdirs
|
|
323 |
+ else:
|
|
324 |
+ return False
|
|
314 | 325 |
|
315 | 326 |
# pull_tree():
|
316 | 327 |
#
|
... | ... | @@ -355,6 +366,7 @@ class CASCache(): |
355 | 366 |
# Args:
|
356 | 367 |
# refs (list): The refs to push
|
357 | 368 |
# remote (CASRemote): The remote to push to
|
369 |
+ # subdir (string): Optional specific subdir to exempt from the push
|
|
358 | 370 |
#
|
359 | 371 |
# Returns:
|
360 | 372 |
# (bool): True if any remote was updated, False if no pushes were required
|
... | ... | @@ -362,7 +374,7 @@ class CASCache(): |
362 | 374 |
# Raises:
|
363 | 375 |
# (CASError): if there was an error
|
364 | 376 |
#
|
365 |
- def push(self, refs, remote):
|
|
377 |
+ def push(self, refs, remote, subdir=None):
|
|
366 | 378 |
skipped_remote = True
|
367 | 379 |
try:
|
368 | 380 |
for ref in refs:
|
... | ... | @@ -384,7 +396,7 @@ class CASCache(): |
384 | 396 |
# Intentionally re-raise RpcError for outer except block.
|
385 | 397 |
raise
|
386 | 398 |
|
387 |
- self._send_directory(remote, tree)
|
|
399 |
+ self._send_directory(remote, tree, excluded_dir=subdir)
|
|
388 | 400 |
|
389 | 401 |
request = buildstream_pb2.UpdateReferenceRequest(instance_name=remote.spec.instance_name)
|
390 | 402 |
request.keys.append(ref)
|
... | ... | @@ -866,10 +878,17 @@ class CASCache(): |
866 | 878 |
a += 1
|
867 | 879 |
b += 1
|
868 | 880 |
|
869 |
- def _reachable_refs_dir(self, reachable, tree, update_mtime=False):
|
|
881 |
+ def _reachable_refs_dir(self, reachable, tree, update_mtime=False, subdir=False):
|
|
870 | 882 |
if tree.hash in reachable:
|
871 | 883 |
return
|
872 | 884 |
|
885 |
+ # If looping through subdir digests, skip processing if
|
|
886 |
+ # ref path does not exist, allowing for partial objects
|
|
887 |
+ if subdir and not os.path.exists(self.objpath(tree)):
|
|
888 |
+ return
|
|
889 |
+ |
|
890 |
+ # Raises FileNotFound exception is path does not exist,
|
|
891 |
+ # which should only be entered on the top level digest
|
|
873 | 892 |
if update_mtime:
|
874 | 893 |
os.utime(self.objpath(tree))
|
875 | 894 |
|
... | ... | @@ -886,9 +905,9 @@ class CASCache(): |
886 | 905 |
reachable.add(filenode.digest.hash)
|
887 | 906 |
|
888 | 907 |
for dirnode in directory.directories:
|
889 |
- self._reachable_refs_dir(reachable, dirnode.digest, update_mtime=update_mtime)
|
|
908 |
+ self._reachable_refs_dir(reachable, dirnode.digest, update_mtime=update_mtime, subdir=True)
|
|
890 | 909 |
|
891 |
- def _required_blobs(self, directory_digest):
|
|
910 |
+ def _required_blobs(self, directory_digest, excluded_dir=None):
|
|
892 | 911 |
# parse directory, and recursively add blobs
|
893 | 912 |
d = remote_execution_pb2.Digest()
|
894 | 913 |
d.hash = directory_digest.hash
|
... | ... | @@ -907,7 +926,8 @@ class CASCache(): |
907 | 926 |
yield d
|
908 | 927 |
|
909 | 928 |
for dirnode in directory.directories:
|
910 |
- yield from self._required_blobs(dirnode.digest)
|
|
929 |
+ if dirnode.name != excluded_dir:
|
|
930 |
+ yield from self._required_blobs(dirnode.digest)
|
|
911 | 931 |
|
912 | 932 |
def _fetch_blob(self, remote, digest, stream):
|
913 | 933 |
resource_name_components = ['blobs', digest.hash, str(digest.size_bytes)]
|
... | ... | @@ -1029,6 +1049,7 @@ class CASCache(): |
1029 | 1049 |
objpath = self._ensure_blob(remote, dir_digest)
|
1030 | 1050 |
|
1031 | 1051 |
directory = remote_execution_pb2.Directory()
|
1052 |
+ |
|
1032 | 1053 |
with open(objpath, 'rb') as f:
|
1033 | 1054 |
directory.ParseFromString(f.read())
|
1034 | 1055 |
|
... | ... | @@ -1104,9 +1125,8 @@ class CASCache(): |
1104 | 1125 |
|
1105 | 1126 |
assert response.committed_size == digest.size_bytes
|
1106 | 1127 |
|
1107 |
- def _send_directory(self, remote, digest, u_uid=uuid.uuid4()):
|
|
1108 |
- required_blobs = self._required_blobs(digest)
|
|
1109 |
- |
|
1128 |
+ def _send_directory(self, remote, digest, u_uid=uuid.uuid4(), excluded_dir=None):
|
|
1129 |
+ required_blobs = self._required_blobs(digest, excluded_dir=excluded_dir)
|
|
1110 | 1130 |
missing_blobs = dict()
|
1111 | 1131 |
# Limit size of FindMissingBlobs request
|
1112 | 1132 |
for required_blobs_group in _grouper(required_blobs, 512):
|
... | ... | @@ -117,10 +117,6 @@ class Context(): |
117 | 117 |
# Whether or not to attempt to pull build trees globally
|
118 | 118 |
self.pull_buildtrees = None
|
119 | 119 |
|
120 |
- # Boolean, whether to offer to create a project for the user, if we are
|
|
121 |
- # invoked outside of a directory where we can resolve the project.
|
|
122 |
- self.prompt_auto_init = None
|
|
123 |
- |
|
124 | 120 |
# Boolean, whether we double-check with the user that they meant to
|
125 | 121 |
# remove a workspace directory.
|
126 | 122 |
self.prompt_workspace_close_remove_dir = None
|
... | ... | @@ -258,12 +254,10 @@ class Context(): |
258 | 254 |
prompt = _yaml.node_get(
|
259 | 255 |
defaults, Mapping, 'prompt')
|
260 | 256 |
_yaml.node_validate(prompt, [
|
261 |
- 'auto-init', 'really-workspace-close-remove-dir',
|
|
257 |
+ 'really-workspace-close-remove-dir',
|
|
262 | 258 |
'really-workspace-close-project-inaccessible',
|
263 | 259 |
'really-workspace-reset-hard',
|
264 | 260 |
])
|
265 |
- self.prompt_auto_init = _node_get_option_str(
|
|
266 |
- prompt, 'auto-init', ['ask', 'no']) == 'ask'
|
|
267 | 261 |
self.prompt_workspace_close_remove_dir = _node_get_option_str(
|
268 | 262 |
prompt, 'really-workspace-close-remove-dir', ['ask', 'yes']) == 'ask'
|
269 | 263 |
self.prompt_workspace_close_project_inaccessible = _node_get_option_str(
|
... | ... | @@ -219,13 +219,13 @@ class App(): |
219 | 219 |
default_mirror=self._main_options.get('default_mirror'))
|
220 | 220 |
except LoadError as e:
|
221 | 221 |
|
222 |
- # Let's automatically start a `bst init` session in this case
|
|
223 |
- if e.reason == LoadErrorReason.MISSING_PROJECT_CONF and self.interactive:
|
|
224 |
- click.echo("A project was not detected in the directory: {}".format(directory), err=True)
|
|
225 |
- if self.context.prompt_auto_init:
|
|
226 |
- click.echo("", err=True)
|
|
227 |
- if click.confirm("Would you like to create a new project here?"):
|
|
228 |
- self.init_project(None)
|
|
222 |
+ # Help users that are new to BuildStream by suggesting 'init'.
|
|
223 |
+ # We don't want to slow down users that just made a mistake, so
|
|
224 |
+ # don't stop them with an offer to create a project for them.
|
|
225 |
+ if e.reason == LoadErrorReason.MISSING_PROJECT_CONF:
|
|
226 |
+ click.echo("No project found. You can create a new project like so:", err=True)
|
|
227 |
+ click.echo("", err=True)
|
|
228 |
+ click.echo(" bst init", err=True)
|
|
229 | 229 |
|
230 | 230 |
self._error_exit(e, "Error loading project")
|
231 | 231 |
|
1 |
+#
|
|
2 |
+# Copyright (C) 2016 Codethink Limited
|
|
3 |
+# Copyright (C) 2018 Bloomberg Finance LP
|
|
4 |
+#
|
|
5 |
+# This program is free software; you can redistribute it and/or
|
|
6 |
+# modify it under the terms of the GNU Lesser General Public
|
|
7 |
+# License as published by the Free Software Foundation; either
|
|
8 |
+# version 2 of the License, or (at your option) any later version.
|
|
9 |
+#
|
|
10 |
+# This library is distributed in the hope that it will be useful,
|
|
11 |
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
12 |
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
13 |
+# Lesser General Public License for more details.
|
|
14 |
+#
|
|
15 |
+# You should have received a copy of the GNU Lesser General Public
|
|
16 |
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
|
|
17 |
+#
|
|
18 |
+# Authors:
|
|
19 |
+# Tristan Van Berkom <tristan vanberkom codethink co uk>
|
|
20 |
+# Chandan Singh <csingh43 bloomberg net>
|
|
21 |
+ |
|
22 |
+"""Abstract base class for source implementations that work with a Git repository"""
|
|
23 |
+ |
|
24 |
+import os
|
|
25 |
+import re
|
|
26 |
+import shutil
|
|
27 |
+from collections.abc import Mapping
|
|
28 |
+from io import StringIO
|
|
29 |
+from tempfile import TemporaryFile
|
|
30 |
+ |
|
31 |
+from configparser import RawConfigParser
|
|
32 |
+ |
|
33 |
+from buildstream import Source, SourceError, Consistency, SourceFetcher, CoreWarnings
|
|
34 |
+from buildstream import utils
|
|
35 |
+from buildstream.utils import move_atomic, DirectoryExistsError
|
|
36 |
+ |
|
37 |
+GIT_MODULES = '.gitmodules'
|
|
38 |
+ |
|
39 |
+# Warnings
|
|
40 |
+WARN_INCONSISTENT_SUBMODULE = "inconsistent-submodule"
|
|
41 |
+WARN_UNLISTED_SUBMODULE = "unlisted-submodule"
|
|
42 |
+WARN_INVALID_SUBMODULE = "invalid-submodule"
|
|
43 |
+ |
|
44 |
+ |
|
45 |
+# Because of handling of submodules, we maintain a GitMirror
|
|
46 |
+# for the primary git source and also for each submodule it
|
|
47 |
+# might have at a given time
|
|
48 |
+#
|
|
49 |
+class GitMirror(SourceFetcher):
|
|
50 |
+ |
|
51 |
+ def __init__(self, source, path, url, ref, *, primary=False, tags=[]):
|
|
52 |
+ |
|
53 |
+ super().__init__()
|
|
54 |
+ self.source = source
|
|
55 |
+ self.path = path
|
|
56 |
+ self.url = url
|
|
57 |
+ self.ref = ref
|
|
58 |
+ self.tags = tags
|
|
59 |
+ self.primary = primary
|
|
60 |
+ self.mirror = os.path.join(source.get_mirror_directory(), utils.url_directory_name(url))
|
|
61 |
+ self.mark_download_url(url)
|
|
62 |
+ |
|
63 |
+ # Ensures that the mirror exists
|
|
64 |
+ def ensure(self, alias_override=None):
|
|
65 |
+ |
|
66 |
+ # Unfortunately, git does not know how to only clone just a specific ref,
|
|
67 |
+ # so we have to download all of those gigs even if we only need a couple
|
|
68 |
+ # of bytes.
|
|
69 |
+ if not os.path.exists(self.mirror):
|
|
70 |
+ |
|
71 |
+ # Do the initial clone in a tmpdir just because we want an atomic move
|
|
72 |
+ # after a long standing clone which could fail overtime, for now do
|
|
73 |
+ # this directly in our git directory, eliminating the chances that the
|
|
74 |
+ # system configured tmpdir is not on the same partition.
|
|
75 |
+ #
|
|
76 |
+ with self.source.tempdir() as tmpdir:
|
|
77 |
+ url = self.source.translate_url(self.url, alias_override=alias_override,
|
|
78 |
+ primary=self.primary)
|
|
79 |
+ self.source.call([self.source.host_git, 'clone', '--mirror', '-n', url, tmpdir],
|
|
80 |
+ fail="Failed to clone git repository {}".format(url),
|
|
81 |
+ fail_temporarily=True)
|
|
82 |
+ |
|
83 |
+ try:
|
|
84 |
+ move_atomic(tmpdir, self.mirror)
|
|
85 |
+ except DirectoryExistsError:
|
|
86 |
+ # Another process was quicker to download this repository.
|
|
87 |
+ # Let's discard our own
|
|
88 |
+ self.source.status("{}: Discarding duplicate clone of {}"
|
|
89 |
+ .format(self.source, url))
|
|
90 |
+ except OSError as e:
|
|
91 |
+ raise SourceError("{}: Failed to move cloned git repository {} from '{}' to '{}': {}"
|
|
92 |
+ .format(self.source, url, tmpdir, self.mirror, e)) from e
|
|
93 |
+ |
|
94 |
+ def _fetch(self, alias_override=None):
|
|
95 |
+ url = self.source.translate_url(self.url,
|
|
96 |
+ alias_override=alias_override,
|
|
97 |
+ primary=self.primary)
|
|
98 |
+ |
|
99 |
+ if alias_override:
|
|
100 |
+ remote_name = utils.url_directory_name(alias_override)
|
|
101 |
+ _, remotes = self.source.check_output(
|
|
102 |
+ [self.source.host_git, 'remote'],
|
|
103 |
+ fail="Failed to retrieve list of remotes in {}".format(self.mirror),
|
|
104 |
+ cwd=self.mirror
|
|
105 |
+ )
|
|
106 |
+ if remote_name not in remotes:
|
|
107 |
+ self.source.call(
|
|
108 |
+ [self.source.host_git, 'remote', 'add', remote_name, url],
|
|
109 |
+ fail="Failed to add remote {} with url {}".format(remote_name, url),
|
|
110 |
+ cwd=self.mirror
|
|
111 |
+ )
|
|
112 |
+ else:
|
|
113 |
+ remote_name = "origin"
|
|
114 |
+ |
|
115 |
+ self.source.call([self.source.host_git, 'fetch', remote_name, '--prune', '--force', '--tags'],
|
|
116 |
+ fail="Failed to fetch from remote git repository: {}".format(url),
|
|
117 |
+ fail_temporarily=True,
|
|
118 |
+ cwd=self.mirror)
|
|
119 |
+ |
|
120 |
+ def fetch(self, alias_override=None):
|
|
121 |
+ # Resolve the URL for the message
|
|
122 |
+ resolved_url = self.source.translate_url(self.url,
|
|
123 |
+ alias_override=alias_override,
|
|
124 |
+ primary=self.primary)
|
|
125 |
+ |
|
126 |
+ with self.source.timed_activity("Fetching from {}"
|
|
127 |
+ .format(resolved_url),
|
|
128 |
+ silent_nested=True):
|
|
129 |
+ self.ensure(alias_override)
|
|
130 |
+ if not self.has_ref():
|
|
131 |
+ self._fetch(alias_override)
|
|
132 |
+ self.assert_ref()
|
|
133 |
+ |
|
134 |
+ def has_ref(self):
|
|
135 |
+ if not self.ref:
|
|
136 |
+ return False
|
|
137 |
+ |
|
138 |
+ # If the mirror doesnt exist, we also dont have the ref
|
|
139 |
+ if not os.path.exists(self.mirror):
|
|
140 |
+ return False
|
|
141 |
+ |
|
142 |
+ # Check if the ref is really there
|
|
143 |
+ rc = self.source.call([self.source.host_git, 'cat-file', '-t', self.ref], cwd=self.mirror)
|
|
144 |
+ return rc == 0
|
|
145 |
+ |
|
146 |
+ def assert_ref(self):
|
|
147 |
+ if not self.has_ref():
|
|
148 |
+ raise SourceError("{}: expected ref '{}' was not found in git repository: '{}'"
|
|
149 |
+ .format(self.source, self.ref, self.url))
|
|
150 |
+ |
|
151 |
+ def latest_commit_with_tags(self, tracking, track_tags=False):
|
|
152 |
+ _, output = self.source.check_output(
|
|
153 |
+ [self.source.host_git, 'rev-parse', tracking],
|
|
154 |
+ fail="Unable to find commit for specified branch name '{}'".format(tracking),
|
|
155 |
+ cwd=self.mirror)
|
|
156 |
+ ref = output.rstrip('\n')
|
|
157 |
+ |
|
158 |
+ if self.source.ref_format == 'git-describe':
|
|
159 |
+ # Prefix the ref with the closest tag, if available,
|
|
160 |
+ # to make the ref human readable
|
|
161 |
+ exit_code, output = self.source.check_output(
|
|
162 |
+ [self.source.host_git, 'describe', '--tags', '--abbrev=40', '--long', ref],
|
|
163 |
+ cwd=self.mirror)
|
|
164 |
+ if exit_code == 0:
|
|
165 |
+ ref = output.rstrip('\n')
|
|
166 |
+ |
|
167 |
+ if not track_tags:
|
|
168 |
+ return ref, []
|
|
169 |
+ |
|
170 |
+ tags = set()
|
|
171 |
+ for options in [[], ['--first-parent'], ['--tags'], ['--tags', '--first-parent']]:
|
|
172 |
+ exit_code, output = self.source.check_output(
|
|
173 |
+ [self.source.host_git, 'describe', '--abbrev=0', ref] + options,
|
|
174 |
+ cwd=self.mirror)
|
|
175 |
+ if exit_code == 0:
|
|
176 |
+ tag = output.strip()
|
|
177 |
+ _, commit_ref = self.source.check_output(
|
|
178 |
+ [self.source.host_git, 'rev-parse', tag + '^{commit}'],
|
|
179 |
+ fail="Unable to resolve tag '{}'".format(tag),
|
|
180 |
+ cwd=self.mirror)
|
|
181 |
+ exit_code = self.source.call(
|
|
182 |
+ [self.source.host_git, 'cat-file', 'tag', tag],
|
|
183 |
+ cwd=self.mirror)
|
|
184 |
+ annotated = (exit_code == 0)
|
|
185 |
+ |
|
186 |
+ tags.add((tag, commit_ref.strip(), annotated))
|
|
187 |
+ |
|
188 |
+ return ref, list(tags)
|
|
189 |
+ |
|
190 |
+ def stage(self, directory):
|
|
191 |
+ fullpath = os.path.join(directory, self.path)
|
|
192 |
+ |
|
193 |
+ # Using --shared here avoids copying the objects into the checkout, in any
|
|
194 |
+ # case we're just checking out a specific commit and then removing the .git/
|
|
195 |
+ # directory.
|
|
196 |
+ self.source.call([self.source.host_git, 'clone', '--no-checkout', '--shared', self.mirror, fullpath],
|
|
197 |
+ fail="Failed to create git mirror {} in directory: {}".format(self.mirror, fullpath),
|
|
198 |
+ fail_temporarily=True)
|
|
199 |
+ |
|
200 |
+ self.source.call([self.source.host_git, 'checkout', '--force', self.ref],
|
|
201 |
+ fail="Failed to checkout git ref {}".format(self.ref),
|
|
202 |
+ cwd=fullpath)
|
|
203 |
+ |
|
204 |
+ # Remove .git dir
|
|
205 |
+ shutil.rmtree(os.path.join(fullpath, ".git"))
|
|
206 |
+ |
|
207 |
+ self._rebuild_git(fullpath)
|
|
208 |
+ |
|
209 |
+ def init_workspace(self, directory):
|
|
210 |
+ fullpath = os.path.join(directory, self.path)
|
|
211 |
+ url = self.source.translate_url(self.url)
|
|
212 |
+ |
|
213 |
+ self.source.call([self.source.host_git, 'clone', '--no-checkout', self.mirror, fullpath],
|
|
214 |
+ fail="Failed to clone git mirror {} in directory: {}".format(self.mirror, fullpath),
|
|
215 |
+ fail_temporarily=True)
|
|
216 |
+ |
|
217 |
+ self.source.call([self.source.host_git, 'remote', 'set-url', 'origin', url],
|
|
218 |
+ fail='Failed to add remote origin "{}"'.format(url),
|
|
219 |
+ cwd=fullpath)
|
|
220 |
+ |
|
221 |
+ self.source.call([self.source.host_git, 'checkout', '--force', self.ref],
|
|
222 |
+ fail="Failed to checkout git ref {}".format(self.ref),
|
|
223 |
+ cwd=fullpath)
|
|
224 |
+ |
|
225 |
+ # List the submodules (path/url tuples) present at the given ref of this repo
|
|
226 |
+ def submodule_list(self):
|
|
227 |
+ modules = "{}:{}".format(self.ref, GIT_MODULES)
|
|
228 |
+ exit_code, output = self.source.check_output(
|
|
229 |
+ [self.source.host_git, 'show', modules], cwd=self.mirror)
|
|
230 |
+ |
|
231 |
+ # If git show reports error code 128 here, we take it to mean there is
|
|
232 |
+ # no .gitmodules file to display for the given revision.
|
|
233 |
+ if exit_code == 128:
|
|
234 |
+ return
|
|
235 |
+ elif exit_code != 0:
|
|
236 |
+ raise SourceError(
|
|
237 |
+ "{plugin}: Failed to show gitmodules at ref {ref}".format(
|
|
238 |
+ plugin=self, ref=self.ref))
|
|
239 |
+ |
|
240 |
+ content = '\n'.join([l.strip() for l in output.splitlines()])
|
|
241 |
+ |
|
242 |
+ io = StringIO(content)
|
|
243 |
+ parser = RawConfigParser()
|
|
244 |
+ parser.read_file(io)
|
|
245 |
+ |
|
246 |
+ for section in parser.sections():
|
|
247 |
+ # validate section name against the 'submodule "foo"' pattern
|
|
248 |
+ if re.match(r'submodule "(.*)"', section):
|
|
249 |
+ path = parser.get(section, 'path')
|
|
250 |
+ url = parser.get(section, 'url')
|
|
251 |
+ |
|
252 |
+ yield (path, url)
|
|
253 |
+ |
|
254 |
+ # Fetch the ref which this mirror requires its submodule to have,
|
|
255 |
+ # at the given ref of this mirror.
|
|
256 |
+ def submodule_ref(self, submodule, ref=None):
|
|
257 |
+ if not ref:
|
|
258 |
+ ref = self.ref
|
|
259 |
+ |
|
260 |
+ # list objects in the parent repo tree to find the commit
|
|
261 |
+ # object that corresponds to the submodule
|
|
262 |
+ _, output = self.source.check_output([self.source.host_git, 'ls-tree', ref, submodule],
|
|
263 |
+ fail="ls-tree failed for commit {} and submodule: {}".format(
|
|
264 |
+ ref, submodule),
|
|
265 |
+ cwd=self.mirror)
|
|
266 |
+ |
|
267 |
+ # read the commit hash from the output
|
|
268 |
+ fields = output.split()
|
|
269 |
+ if len(fields) >= 2 and fields[1] == 'commit':
|
|
270 |
+ submodule_commit = output.split()[2]
|
|
271 |
+ |
|
272 |
+ # fail if the commit hash is invalid
|
|
273 |
+ if len(submodule_commit) != 40:
|
|
274 |
+ raise SourceError("{}: Error reading commit information for submodule '{}'"
|
|
275 |
+ .format(self.source, submodule))
|
|
276 |
+ |
|
277 |
+ return submodule_commit
|
|
278 |
+ |
|
279 |
+ else:
|
|
280 |
+ detail = "The submodule '{}' is defined either in the BuildStream source\n".format(submodule) + \
|
|
281 |
+ "definition, or in a .gitmodules file. But the submodule was never added to the\n" + \
|
|
282 |
+ "underlying git repository with `git submodule add`."
|
|
283 |
+ |
|
284 |
+ self.source.warn("{}: Ignoring inconsistent submodule '{}'"
|
|
285 |
+ .format(self.source, submodule), detail=detail,
|
|
286 |
+ warning_token=WARN_INCONSISTENT_SUBMODULE)
|
|
287 |
+ |
|
288 |
+ return None
|
|
289 |
+ |
|
290 |
+ def _rebuild_git(self, fullpath):
|
|
291 |
+ if not self.tags:
|
|
292 |
+ return
|
|
293 |
+ |
|
294 |
+ with self.source.tempdir() as tmpdir:
|
|
295 |
+ included = set()
|
|
296 |
+ shallow = set()
|
|
297 |
+ for _, commit_ref, _ in self.tags:
|
|
298 |
+ |
|
299 |
+ _, out = self.source.check_output([self.source.host_git, 'rev-list',
|
|
300 |
+ '--boundary', '{}..{}'.format(commit_ref, self.ref)],
|
|
301 |
+ fail="Failed to get git history {}..{} in directory: {}"
|
|
302 |
+ .format(commit_ref, self.ref, fullpath),
|
|
303 |
+ fail_temporarily=True,
|
|
304 |
+ cwd=self.mirror)
|
|
305 |
+ for line in out.splitlines():
|
|
306 |
+ rev = line.lstrip('-')
|
|
307 |
+ if line[0] == '-':
|
|
308 |
+ shallow.add(rev)
|
|
309 |
+ else:
|
|
310 |
+ included.add(rev)
|
|
311 |
+ |
|
312 |
+ shallow -= included
|
|
313 |
+ included |= shallow
|
|
314 |
+ |
|
315 |
+ self.source.call([self.source.host_git, 'init'],
|
|
316 |
+ fail="Cannot initialize git repository: {}".format(fullpath),
|
|
317 |
+ cwd=fullpath)
|
|
318 |
+ |
|
319 |
+ for rev in included:
|
|
320 |
+ with TemporaryFile(dir=tmpdir) as commit_file:
|
|
321 |
+ self.source.call([self.source.host_git, 'cat-file', 'commit', rev],
|
|
322 |
+ stdout=commit_file,
|
|
323 |
+ fail="Failed to get commit {}".format(rev),
|
|
324 |
+ cwd=self.mirror)
|
|
325 |
+ commit_file.seek(0, 0)
|
|
326 |
+ self.source.call([self.source.host_git, 'hash-object', '-w', '-t', 'commit', '--stdin'],
|
|
327 |
+ stdin=commit_file,
|
|
328 |
+ fail="Failed to add commit object {}".format(rev),
|
|
329 |
+ cwd=fullpath)
|
|
330 |
+ |
|
331 |
+ with open(os.path.join(fullpath, '.git', 'shallow'), 'w') as shallow_file:
|
|
332 |
+ for rev in shallow:
|
|
333 |
+ shallow_file.write('{}\n'.format(rev))
|
|
334 |
+ |
|
335 |
+ for tag, commit_ref, annotated in self.tags:
|
|
336 |
+ if annotated:
|
|
337 |
+ with TemporaryFile(dir=tmpdir) as tag_file:
|
|
338 |
+ tag_data = 'object {}\ntype commit\ntag {}\n'.format(commit_ref, tag)
|
|
339 |
+ tag_file.write(tag_data.encode('ascii'))
|
|
340 |
+ tag_file.seek(0, 0)
|
|
341 |
+ _, tag_ref = self.source.check_output(
|
|
342 |
+ [self.source.host_git, 'hash-object', '-w', '-t',
|
|
343 |
+ 'tag', '--stdin'],
|
|
344 |
+ stdin=tag_file,
|
|
345 |
+ fail="Failed to add tag object {}".format(tag),
|
|
346 |
+ cwd=fullpath)
|
|
347 |
+ |
|
348 |
+ self.source.call([self.source.host_git, 'tag', tag, tag_ref.strip()],
|
|
349 |
+ fail="Failed to tag: {}".format(tag),
|
|
350 |
+ cwd=fullpath)
|
|
351 |
+ else:
|
|
352 |
+ self.source.call([self.source.host_git, 'tag', tag, commit_ref],
|
|
353 |
+ fail="Failed to tag: {}".format(tag),
|
|
354 |
+ cwd=fullpath)
|
|
355 |
+ |
|
356 |
+ with open(os.path.join(fullpath, '.git', 'HEAD'), 'w') as head:
|
|
357 |
+ self.source.call([self.source.host_git, 'rev-parse', self.ref],
|
|
358 |
+ stdout=head,
|
|
359 |
+ fail="Failed to parse commit {}".format(self.ref),
|
|
360 |
+ cwd=self.mirror)
|
|
361 |
+ |
|
362 |
+ |
|
363 |
+class _GitSourceBase(Source):
|
|
364 |
+ # pylint: disable=attribute-defined-outside-init
|
|
365 |
+ |
|
366 |
+ def configure(self, node):
|
|
367 |
+ ref = self.node_get_member(node, str, 'ref', None)
|
|
368 |
+ |
|
369 |
+ config_keys = ['url', 'track', 'ref', 'submodules',
|
|
370 |
+ 'checkout-submodules', 'ref-format',
|
|
371 |
+ 'track-tags', 'tags']
|
|
372 |
+ self.node_validate(node, config_keys + Source.COMMON_CONFIG_KEYS)
|
|
373 |
+ |
|
374 |
+ tags_node = self.node_get_member(node, list, 'tags', [])
|
|
375 |
+ for tag_node in tags_node:
|
|
376 |
+ self.node_validate(tag_node, ['tag', 'commit', 'annotated'])
|
|
377 |
+ |
|
378 |
+ tags = self._load_tags(node)
|
|
379 |
+ self.track_tags = self.node_get_member(node, bool, 'track-tags', False)
|
|
380 |
+ |
|
381 |
+ self.original_url = self.node_get_member(node, str, 'url')
|
|
382 |
+ self.mirror = GitMirror(self, '', self.original_url, ref, tags=tags, primary=True)
|
|
383 |
+ self.tracking = self.node_get_member(node, str, 'track', None)
|
|
384 |
+ |
|
385 |
+ self.ref_format = self.node_get_member(node, str, 'ref-format', 'sha1')
|
|
386 |
+ if self.ref_format not in ['sha1', 'git-describe']:
|
|
387 |
+ provenance = self.node_provenance(node, member_name='ref-format')
|
|
388 |
+ raise SourceError("{}: Unexpected value for ref-format: {}".format(provenance, self.ref_format))
|
|
389 |
+ |
|
390 |
+ # At this point we now know if the source has a ref and/or a track.
|
|
391 |
+ # If it is missing both then we will be unable to track or build.
|
|
392 |
+ if self.mirror.ref is None and self.tracking is None:
|
|
393 |
+ raise SourceError("{}: Git sources require a ref and/or track".format(self),
|
|
394 |
+ reason="missing-track-and-ref")
|
|
395 |
+ |
|
396 |
+ self.checkout_submodules = self.node_get_member(node, bool, 'checkout-submodules', True)
|
|
397 |
+ self.submodules = []
|
|
398 |
+ |
|
399 |
+ # Parse a dict of submodule overrides, stored in the submodule_overrides
|
|
400 |
+ # and submodule_checkout_overrides dictionaries.
|
|
401 |
+ self.submodule_overrides = {}
|
|
402 |
+ self.submodule_checkout_overrides = {}
|
|
403 |
+ modules = self.node_get_member(node, Mapping, 'submodules', {})
|
|
404 |
+ for path, _ in self.node_items(modules):
|
|
405 |
+ submodule = self.node_get_member(modules, Mapping, path)
|
|
406 |
+ url = self.node_get_member(submodule, str, 'url', None)
|
|
407 |
+ |
|
408 |
+ # Make sure to mark all URLs that are specified in the configuration
|
|
409 |
+ if url:
|
|
410 |
+ self.mark_download_url(url, primary=False)
|
|
411 |
+ |
|
412 |
+ self.submodule_overrides[path] = url
|
|
413 |
+ if 'checkout' in submodule:
|
|
414 |
+ checkout = self.node_get_member(submodule, bool, 'checkout')
|
|
415 |
+ self.submodule_checkout_overrides[path] = checkout
|
|
416 |
+ |
|
417 |
+ self.mark_download_url(self.original_url)
|
|
418 |
+ |
|
419 |
+ def preflight(self):
|
|
420 |
+ # Check if git is installed, get the binary at the same time
|
|
421 |
+ self.host_git = utils.get_host_tool('git')
|
|
422 |
+ |
|
423 |
+ def get_unique_key(self):
|
|
424 |
+ # Here we want to encode the local name of the repository and
|
|
425 |
+ # the ref, if the user changes the alias to fetch the same sources
|
|
426 |
+ # from another location, it should not affect the cache key.
|
|
427 |
+ key = [self.original_url, self.mirror.ref]
|
|
428 |
+ if self.mirror.tags:
|
|
429 |
+ tags = {tag: (commit, annotated) for tag, commit, annotated in self.mirror.tags}
|
|
430 |
+ key.append({'tags': tags})
|
|
431 |
+ |
|
432 |
+ # Only modify the cache key with checkout_submodules if it's something
|
|
433 |
+ # other than the default behaviour.
|
|
434 |
+ if self.checkout_submodules is False:
|
|
435 |
+ key.append({"checkout_submodules": self.checkout_submodules})
|
|
436 |
+ |
|
437 |
+ # We want the cache key to change if the source was
|
|
438 |
+ # configured differently, and submodules count.
|
|
439 |
+ if self.submodule_overrides:
|
|
440 |
+ key.append(self.submodule_overrides)
|
|
441 |
+ |
|
442 |
+ if self.submodule_checkout_overrides:
|
|
443 |
+ key.append({"submodule_checkout_overrides": self.submodule_checkout_overrides})
|
|
444 |
+ |
|
445 |
+ return key
|
|
446 |
+ |
|
447 |
+ def get_consistency(self):
|
|
448 |
+ if self._have_all_refs():
|
|
449 |
+ return Consistency.CACHED
|
|
450 |
+ elif self.mirror.ref is not None:
|
|
451 |
+ return Consistency.RESOLVED
|
|
452 |
+ return Consistency.INCONSISTENT
|
|
453 |
+ |
|
454 |
+ def load_ref(self, node):
|
|
455 |
+ self.mirror.ref = self.node_get_member(node, str, 'ref', None)
|
|
456 |
+ self.mirror.tags = self._load_tags(node)
|
|
457 |
+ |
|
458 |
+ def get_ref(self):
|
|
459 |
+ return self.mirror.ref, self.mirror.tags
|
|
460 |
+ |
|
461 |
+ def set_ref(self, ref_data, node):
|
|
462 |
+ if not ref_data:
|
|
463 |
+ self.mirror.ref = None
|
|
464 |
+ if 'ref' in node:
|
|
465 |
+ del node['ref']
|
|
466 |
+ self.mirror.tags = []
|
|
467 |
+ if 'tags' in node:
|
|
468 |
+ del node['tags']
|
|
469 |
+ else:
|
|
470 |
+ ref, tags = ref_data
|
|
471 |
+ node['ref'] = self.mirror.ref = ref
|
|
472 |
+ self.mirror.tags = tags
|
|
473 |
+ if tags:
|
|
474 |
+ node['tags'] = []
|
|
475 |
+ for tag, commit_ref, annotated in tags:
|
|
476 |
+ data = {'tag': tag,
|
|
477 |
+ 'commit': commit_ref,
|
|
478 |
+ 'annotated': annotated}
|
|
479 |
+ node['tags'].append(data)
|
|
480 |
+ else:
|
|
481 |
+ if 'tags' in node:
|
|
482 |
+ del node['tags']
|
|
483 |
+ |
|
484 |
+ def track(self):
|
|
485 |
+ |
|
486 |
+ # If self.tracking is not specified it's not an error, just silently return
|
|
487 |
+ if not self.tracking:
|
|
488 |
+ # Is there a better way to check if a ref is given.
|
|
489 |
+ if self.mirror.ref is None:
|
|
490 |
+ detail = 'Without a tracking branch ref can not be updated. Please ' + \
|
|
491 |
+ 'provide a ref or a track.'
|
|
492 |
+ raise SourceError("{}: No track or ref".format(self),
|
|
493 |
+ detail=detail, reason="track-attempt-no-track")
|
|
494 |
+ return None
|
|
495 |
+ |
|
496 |
+ # Resolve the URL for the message
|
|
497 |
+ resolved_url = self.translate_url(self.mirror.url)
|
|
498 |
+ with self.timed_activity("Tracking {} from {}"
|
|
499 |
+ .format(self.tracking, resolved_url),
|
|
500 |
+ silent_nested=True):
|
|
501 |
+ self.mirror.ensure()
|
|
502 |
+ self.mirror._fetch()
|
|
503 |
+ |
|
504 |
+ # Update self.mirror.ref and node.ref from the self.tracking branch
|
|
505 |
+ ret = self.mirror.latest_commit_with_tags(self.tracking, self.track_tags)
|
|
506 |
+ |
|
507 |
+ return ret
|
|
508 |
+ |
|
509 |
+ def init_workspace(self, directory):
|
|
510 |
+ # XXX: may wish to refactor this as some code dupe with stage()
|
|
511 |
+ self._refresh_submodules()
|
|
512 |
+ |
|
513 |
+ with self.timed_activity('Setting up workspace "{}"'.format(directory), silent_nested=True):
|
|
514 |
+ self.mirror.init_workspace(directory)
|
|
515 |
+ for mirror in self.submodules:
|
|
516 |
+ mirror.init_workspace(directory)
|
|
517 |
+ |
|
518 |
+ def stage(self, directory):
|
|
519 |
+ |
|
520 |
+ # Need to refresh submodule list here again, because
|
|
521 |
+ # it's possible that we did not load in the main process
|
|
522 |
+ # with submodules present (source needed fetching) and
|
|
523 |
+ # we may not know about the submodule yet come time to build.
|
|
524 |
+ #
|
|
525 |
+ self._refresh_submodules()
|
|
526 |
+ |
|
527 |
+ # Stage the main repo in the specified directory
|
|
528 |
+ #
|
|
529 |
+ with self.timed_activity("Staging {}".format(self.mirror.url), silent_nested=True):
|
|
530 |
+ self.mirror.stage(directory)
|
|
531 |
+ for mirror in self.submodules:
|
|
532 |
+ mirror.stage(directory)
|
|
533 |
+ |
|
534 |
+ def get_source_fetchers(self):
|
|
535 |
+ yield self.mirror
|
|
536 |
+ self._refresh_submodules()
|
|
537 |
+ for submodule in self.submodules:
|
|
538 |
+ yield submodule
|
|
539 |
+ |
|
540 |
+ def validate_cache(self):
|
|
541 |
+ discovered_submodules = {}
|
|
542 |
+ unlisted_submodules = []
|
|
543 |
+ invalid_submodules = []
|
|
544 |
+ |
|
545 |
+ for path, url in self.mirror.submodule_list():
|
|
546 |
+ discovered_submodules[path] = url
|
|
547 |
+ if self._ignore_submodule(path):
|
|
548 |
+ continue
|
|
549 |
+ |
|
550 |
+ override_url = self.submodule_overrides.get(path)
|
|
551 |
+ if not override_url:
|
|
552 |
+ unlisted_submodules.append((path, url))
|
|
553 |
+ |
|
554 |
+ # Warn about submodules which are explicitly configured but do not exist
|
|
555 |
+ for path, url in self.submodule_overrides.items():
|
|
556 |
+ if path not in discovered_submodules:
|
|
557 |
+ invalid_submodules.append((path, url))
|
|
558 |
+ |
|
559 |
+ if invalid_submodules:
|
|
560 |
+ detail = []
|
|
561 |
+ for path, url in invalid_submodules:
|
|
562 |
+ detail.append(" Submodule URL '{}' at path '{}'".format(url, path))
|
|
563 |
+ |
|
564 |
+ self.warn("{}: Invalid submodules specified".format(self),
|
|
565 |
+ warning_token=WARN_INVALID_SUBMODULE,
|
|
566 |
+ detail="The following submodules are specified in the source "
|
|
567 |
+ "description but do not exist according to the repository\n\n" +
|
|
568 |
+ "\n".join(detail))
|
|
569 |
+ |
|
570 |
+ # Warn about submodules which exist but have not been explicitly configured
|
|
571 |
+ if unlisted_submodules:
|
|
572 |
+ detail = []
|
|
573 |
+ for path, url in unlisted_submodules:
|
|
574 |
+ detail.append(" Submodule URL '{}' at path '{}'".format(url, path))
|
|
575 |
+ |
|
576 |
+ self.warn("{}: Unlisted submodules exist".format(self),
|
|
577 |
+ warning_token=WARN_UNLISTED_SUBMODULE,
|
|
578 |
+ detail="The following submodules exist but are not specified " +
|
|
579 |
+ "in the source description\n\n" +
|
|
580 |
+ "\n".join(detail))
|
|
581 |
+ |
|
582 |
+ # Assert that the ref exists in the track tag/branch, if track has been specified.
|
|
583 |
+ ref_in_track = False
|
|
584 |
+ if self.tracking:
|
|
585 |
+ _, branch = self.check_output([self.host_git, 'branch', '--list', self.tracking,
|
|
586 |
+ '--contains', self.mirror.ref],
|
|
587 |
+ cwd=self.mirror.mirror)
|
|
588 |
+ if branch:
|
|
589 |
+ ref_in_track = True
|
|
590 |
+ else:
|
|
591 |
+ _, tag = self.check_output([self.host_git, 'tag', '--list', self.tracking,
|
|
592 |
+ '--contains', self.mirror.ref],
|
|
593 |
+ cwd=self.mirror.mirror)
|
|
594 |
+ if tag:
|
|
595 |
+ ref_in_track = True
|
|
596 |
+ |
|
597 |
+ if not ref_in_track:
|
|
598 |
+ detail = "The ref provided for the element does not exist locally " + \
|
|
599 |
+ "in the provided track branch / tag '{}'.\n".format(self.tracking) + \
|
|
600 |
+ "You may wish to track the element to update the ref from '{}' ".format(self.tracking) + \
|
|
601 |
+ "with `bst track`,\n" + \
|
|
602 |
+ "or examine the upstream at '{}' for the specific ref.".format(self.mirror.url)
|
|
603 |
+ |
|
604 |
+ self.warn("{}: expected ref '{}' was not found in given track '{}' for staged repository: '{}'\n"
|
|
605 |
+ .format(self, self.mirror.ref, self.tracking, self.mirror.url),
|
|
606 |
+ detail=detail, warning_token=CoreWarnings.REF_NOT_IN_TRACK)
|
|
607 |
+ |
|
608 |
+ ###########################################################
|
|
609 |
+ # Local Functions #
|
|
610 |
+ ###########################################################
|
|
611 |
+ |
|
612 |
+ def _have_all_refs(self):
|
|
613 |
+ if not self.mirror.has_ref():
|
|
614 |
+ return False
|
|
615 |
+ |
|
616 |
+ self._refresh_submodules()
|
|
617 |
+ for mirror in self.submodules:
|
|
618 |
+ if not os.path.exists(mirror.mirror):
|
|
619 |
+ return False
|
|
620 |
+ if not mirror.has_ref():
|
|
621 |
+ return False
|
|
622 |
+ |
|
623 |
+ return True
|
|
624 |
+ |
|
625 |
+ # Refreshes the GitMirror objects for submodules
|
|
626 |
+ #
|
|
627 |
+ # Assumes that we have our mirror and we have the ref which we point to
|
|
628 |
+ #
|
|
629 |
+ def _refresh_submodules(self):
|
|
630 |
+ self.mirror.ensure()
|
|
631 |
+ submodules = []
|
|
632 |
+ |
|
633 |
+ for path, url in self.mirror.submodule_list():
|
|
634 |
+ |
|
635 |
+ # Completely ignore submodules which are disabled for checkout
|
|
636 |
+ if self._ignore_submodule(path):
|
|
637 |
+ continue
|
|
638 |
+ |
|
639 |
+ # Allow configuration to override the upstream
|
|
640 |
+ # location of the submodules.
|
|
641 |
+ override_url = self.submodule_overrides.get(path)
|
|
642 |
+ if override_url:
|
|
643 |
+ url = override_url
|
|
644 |
+ |
|
645 |
+ ref = self.mirror.submodule_ref(path)
|
|
646 |
+ if ref is not None:
|
|
647 |
+ mirror = GitMirror(self, path, url, ref)
|
|
648 |
+ submodules.append(mirror)
|
|
649 |
+ |
|
650 |
+ self.submodules = submodules
|
|
651 |
+ |
|
652 |
+ def _load_tags(self, node):
|
|
653 |
+ tags = []
|
|
654 |
+ tags_node = self.node_get_member(node, list, 'tags', [])
|
|
655 |
+ for tag_node in tags_node:
|
|
656 |
+ tag = self.node_get_member(tag_node, str, 'tag')
|
|
657 |
+ commit_ref = self.node_get_member(tag_node, str, 'commit')
|
|
658 |
+ annotated = self.node_get_member(tag_node, bool, 'annotated')
|
|
659 |
+ tags.append((tag, commit_ref, annotated))
|
|
660 |
+ return tags
|
|
661 |
+ |
|
662 |
+ # Checks whether the plugin configuration has explicitly
|
|
663 |
+ # configured this submodule to be ignored
|
|
664 |
+ def _ignore_submodule(self, path):
|
|
665 |
+ try:
|
|
666 |
+ checkout = self.submodule_checkout_overrides[path]
|
|
667 |
+ except KeyError:
|
|
668 |
+ checkout = self.checkout_submodules
|
|
669 |
+ |
|
670 |
+ return not checkout
|
... | ... | @@ -677,8 +677,9 @@ class Project(): |
677 | 677 |
#
|
678 | 678 |
def _find_project_dir(self, directory):
|
679 | 679 |
workspace_element = None
|
680 |
+ config_filenames = [_PROJECT_CONF_FILE, WORKSPACE_PROJECT_FILE]
|
|
680 | 681 |
found_directory, filename = utils._search_upward_for_files(
|
681 |
- directory, [_PROJECT_CONF_FILE, WORKSPACE_PROJECT_FILE]
|
|
682 |
+ directory, config_filenames
|
|
682 | 683 |
)
|
683 | 684 |
if filename == _PROJECT_CONF_FILE:
|
684 | 685 |
project_directory = found_directory
|
... | ... | @@ -691,8 +692,8 @@ class Project(): |
691 | 692 |
else:
|
692 | 693 |
raise LoadError(
|
693 | 694 |
LoadErrorReason.MISSING_PROJECT_CONF,
|
694 |
- '{} not found in current directory or any of its parent directories'
|
|
695 |
- .format(_PROJECT_CONF_FILE))
|
|
695 |
+ "None of {names} found in '{path}' or any of its parent directories"
|
|
696 |
+ .format(names=config_filenames, path=directory))
|
|
696 | 697 |
|
697 | 698 |
return project_directory, workspace_element
|
698 | 699 |
|
... | ... | @@ -40,10 +40,7 @@ class FetchQueue(Queue): |
40 | 40 |
self._skip_cached = skip_cached
|
41 | 41 |
|
42 | 42 |
def process(self, element):
|
43 |
- previous_sources = []
|
|
44 |
- for source in element.sources():
|
|
45 |
- source._fetch(previous_sources)
|
|
46 |
- previous_sources.append(source)
|
|
43 |
+ element._fetch()
|
|
47 | 44 |
|
48 | 45 |
def status(self, element):
|
49 | 46 |
# state of dependencies may have changed, recalculate element state
|
... | ... | @@ -1263,7 +1263,7 @@ class Stream(): |
1263 | 1263 |
required_list = []
|
1264 | 1264 |
|
1265 | 1265 |
# If context is set to not pull buildtrees, or no fetch remotes, return empty list
|
1266 |
- if not (self._context.pull_buildtrees or self._artifacts.has_fetch_remotes()):
|
|
1266 |
+ if not self._context.pull_buildtrees or not self._artifacts.has_fetch_remotes():
|
|
1267 | 1267 |
return required_list
|
1268 | 1268 |
|
1269 | 1269 |
for element in elements:
|
... | ... | @@ -112,14 +112,6 @@ logging: |
112 | 112 |
#
|
113 | 113 |
prompt:
|
114 | 114 |
|
115 |
- # Whether to create a project with 'bst init' if we are invoked outside of a
|
|
116 |
- # directory where we can resolve the project.
|
|
117 |
- #
|
|
118 |
- # ask - Prompt the user to choose.
|
|
119 |
- # no - Never create the project.
|
|
120 |
- #
|
|
121 |
- auto-init: ask
|
|
122 |
- |
|
123 | 115 |
# Whether to really proceed with 'bst workspace close --remove-dir' removing
|
124 | 116 |
# a workspace directory, potentially losing changes.
|
125 | 117 |
#
|
... | ... | @@ -1801,13 +1801,19 @@ class Element(Plugin): |
1801 | 1801 |
# (bool): True if this element does not need a push job to be created
|
1802 | 1802 |
#
|
1803 | 1803 |
def _skip_push(self):
|
1804 |
+ |
|
1804 | 1805 |
if not self.__artifacts.has_push_remotes(element=self):
|
1805 | 1806 |
# No push remotes for this element's project
|
1806 | 1807 |
return True
|
1807 | 1808 |
|
1808 | 1809 |
# Do not push elements that aren't cached, or that are cached with a dangling buildtree
|
1809 |
- # artifact unless element type is expected to have an an empty buildtree directory
|
|
1810 |
- if not self._cached_buildtree():
|
|
1810 |
+ # artifact unless element type is expected to have an an empty buildtree directory. Check
|
|
1811 |
+ # that this default behaviour is not overriden via a remote configured to allow pushing
|
|
1812 |
+ # artifacts without their corresponding buildtree.
|
|
1813 |
+ if not self._cached():
|
|
1814 |
+ return True
|
|
1815 |
+ |
|
1816 |
+ if not self._cached_buildtree() and not self.__artifacts.has_partial_push_remotes(element=self):
|
|
1811 | 1817 |
return True
|
1812 | 1818 |
|
1813 | 1819 |
# Do not push tainted artifact
|
... | ... | @@ -1818,7 +1824,8 @@ class Element(Plugin): |
1818 | 1824 |
|
1819 | 1825 |
# _push():
|
1820 | 1826 |
#
|
1821 |
- # Push locally cached artifact to remote artifact repository.
|
|
1827 |
+ # Push locally cached artifact to remote artifact repository. An attempt
|
|
1828 |
+ # will be made to push partial artifacts given current config
|
|
1822 | 1829 |
#
|
1823 | 1830 |
# Returns:
|
1824 | 1831 |
# (bool): True if the remote was updated, False if it already existed
|
... | ... | @@ -1831,8 +1838,19 @@ class Element(Plugin): |
1831 | 1838 |
self.warn("Not pushing tainted artifact.")
|
1832 | 1839 |
return False
|
1833 | 1840 |
|
1834 |
- # Push all keys used for local commit
|
|
1835 |
- pushed = self.__artifacts.push(self, self.__get_cache_keys_for_commit())
|
|
1841 |
+ # Push all keys used for local commit, this could be full or partial,
|
|
1842 |
+ # given previous _skip_push() logic. If buildtree isn't cached, then
|
|
1843 |
+ # set partial push
|
|
1844 |
+ |
|
1845 |
+ partial = False
|
|
1846 |
+ subdir = 'buildtree'
|
|
1847 |
+ if not self._cached_buildtree():
|
|
1848 |
+ partial = True
|
|
1849 |
+ |
|
1850 |
+ pushed = self.__artifacts.push(self, self.__get_cache_keys_for_commit(), partial=partial, subdir=subdir)
|
|
1851 |
+ |
|
1852 |
+ # Artifact might be cached in the server partially with the top level ref existing.
|
|
1853 |
+ # Check if we need to attempt a push of a locally cached buildtree given current config
|
|
1836 | 1854 |
if not pushed:
|
1837 | 1855 |
return False
|
1838 | 1856 |
|
... | ... | @@ -2022,6 +2040,20 @@ class Element(Plugin): |
2022 | 2040 |
|
2023 | 2041 |
return True
|
2024 | 2042 |
|
2043 |
+ # _fetch()
|
|
2044 |
+ #
|
|
2045 |
+ # Fetch the element's sources.
|
|
2046 |
+ #
|
|
2047 |
+ # Raises:
|
|
2048 |
+ # SourceError: If one of the element sources has an error
|
|
2049 |
+ #
|
|
2050 |
+ def _fetch(self):
|
|
2051 |
+ previous_sources = []
|
|
2052 |
+ for source in self.sources():
|
|
2053 |
+ if source._get_consistency() < Consistency.CACHED:
|
|
2054 |
+ source._fetch(previous_sources)
|
|
2055 |
+ previous_sources.append(source)
|
|
2056 |
+ |
|
2025 | 2057 |
#############################################################
|
2026 | 2058 |
# Private Local Methods #
|
2027 | 2059 |
#############################################################
|
... | ... | @@ -156,652 +156,11 @@ This plugin also utilises the following configurable :class:`core warnings <buil |
156 | 156 |
found in the provided track in the element's git repository.
|
157 | 157 |
"""
|
158 | 158 |
|
159 |
-import os
|
|
160 |
-import re
|
|
161 |
-import shutil
|
|
162 |
-from collections.abc import Mapping
|
|
163 |
-from io import StringIO
|
|
164 |
-from tempfile import TemporaryFile
|
|
159 |
+from buildstream import _GitSourceBase
|
|
165 | 160 |
|
166 |
-from configparser import RawConfigParser
|
|
167 | 161 |
|
168 |
-from buildstream import Source, SourceError, Consistency, SourceFetcher, CoreWarnings
|
|
169 |
-from buildstream import utils
|
|
170 |
-from buildstream.utils import move_atomic, DirectoryExistsError
|
|
171 |
- |
|
172 |
-GIT_MODULES = '.gitmodules'
|
|
173 |
- |
|
174 |
-# Warnings
|
|
175 |
-WARN_INCONSISTENT_SUBMODULE = "inconsistent-submodule"
|
|
176 |
-WARN_UNLISTED_SUBMODULE = "unlisted-submodule"
|
|
177 |
-WARN_INVALID_SUBMODULE = "invalid-submodule"
|
|
178 |
- |
|
179 |
- |
|
180 |
-# Because of handling of submodules, we maintain a GitMirror
|
|
181 |
-# for the primary git source and also for each submodule it
|
|
182 |
-# might have at a given time
|
|
183 |
-#
|
|
184 |
-class GitMirror(SourceFetcher):
|
|
185 |
- |
|
186 |
- def __init__(self, source, path, url, ref, *, primary=False, tags=[]):
|
|
187 |
- |
|
188 |
- super().__init__()
|
|
189 |
- self.source = source
|
|
190 |
- self.path = path
|
|
191 |
- self.url = url
|
|
192 |
- self.ref = ref
|
|
193 |
- self.tags = tags
|
|
194 |
- self.primary = primary
|
|
195 |
- self.mirror = os.path.join(source.get_mirror_directory(), utils.url_directory_name(url))
|
|
196 |
- self.mark_download_url(url)
|
|
197 |
- |
|
198 |
- # Ensures that the mirror exists
|
|
199 |
- def ensure(self, alias_override=None):
|
|
200 |
- |
|
201 |
- # Unfortunately, git does not know how to only clone just a specific ref,
|
|
202 |
- # so we have to download all of those gigs even if we only need a couple
|
|
203 |
- # of bytes.
|
|
204 |
- if not os.path.exists(self.mirror):
|
|
205 |
- |
|
206 |
- # Do the initial clone in a tmpdir just because we want an atomic move
|
|
207 |
- # after a long standing clone which could fail overtime, for now do
|
|
208 |
- # this directly in our git directory, eliminating the chances that the
|
|
209 |
- # system configured tmpdir is not on the same partition.
|
|
210 |
- #
|
|
211 |
- with self.source.tempdir() as tmpdir:
|
|
212 |
- url = self.source.translate_url(self.url, alias_override=alias_override,
|
|
213 |
- primary=self.primary)
|
|
214 |
- self.source.call([self.source.host_git, 'clone', '--mirror', '-n', url, tmpdir],
|
|
215 |
- fail="Failed to clone git repository {}".format(url),
|
|
216 |
- fail_temporarily=True)
|
|
217 |
- |
|
218 |
- try:
|
|
219 |
- move_atomic(tmpdir, self.mirror)
|
|
220 |
- except DirectoryExistsError:
|
|
221 |
- # Another process was quicker to download this repository.
|
|
222 |
- # Let's discard our own
|
|
223 |
- self.source.status("{}: Discarding duplicate clone of {}"
|
|
224 |
- .format(self.source, url))
|
|
225 |
- except OSError as e:
|
|
226 |
- raise SourceError("{}: Failed to move cloned git repository {} from '{}' to '{}': {}"
|
|
227 |
- .format(self.source, url, tmpdir, self.mirror, e)) from e
|
|
228 |
- |
|
229 |
- def _fetch(self, alias_override=None):
|
|
230 |
- url = self.source.translate_url(self.url,
|
|
231 |
- alias_override=alias_override,
|
|
232 |
- primary=self.primary)
|
|
233 |
- |
|
234 |
- if alias_override:
|
|
235 |
- remote_name = utils.url_directory_name(alias_override)
|
|
236 |
- _, remotes = self.source.check_output(
|
|
237 |
- [self.source.host_git, 'remote'],
|
|
238 |
- fail="Failed to retrieve list of remotes in {}".format(self.mirror),
|
|
239 |
- cwd=self.mirror
|
|
240 |
- )
|
|
241 |
- if remote_name not in remotes:
|
|
242 |
- self.source.call(
|
|
243 |
- [self.source.host_git, 'remote', 'add', remote_name, url],
|
|
244 |
- fail="Failed to add remote {} with url {}".format(remote_name, url),
|
|
245 |
- cwd=self.mirror
|
|
246 |
- )
|
|
247 |
- else:
|
|
248 |
- remote_name = "origin"
|
|
249 |
- |
|
250 |
- self.source.call([self.source.host_git, 'fetch', remote_name, '--prune', '--force', '--tags'],
|
|
251 |
- fail="Failed to fetch from remote git repository: {}".format(url),
|
|
252 |
- fail_temporarily=True,
|
|
253 |
- cwd=self.mirror)
|
|
254 |
- |
|
255 |
- def fetch(self, alias_override=None):
|
|
256 |
- # Resolve the URL for the message
|
|
257 |
- resolved_url = self.source.translate_url(self.url,
|
|
258 |
- alias_override=alias_override,
|
|
259 |
- primary=self.primary)
|
|
260 |
- |
|
261 |
- with self.source.timed_activity("Fetching from {}"
|
|
262 |
- .format(resolved_url),
|
|
263 |
- silent_nested=True):
|
|
264 |
- self.ensure(alias_override)
|
|
265 |
- if not self.has_ref():
|
|
266 |
- self._fetch(alias_override)
|
|
267 |
- self.assert_ref()
|
|
268 |
- |
|
269 |
- def has_ref(self):
|
|
270 |
- if not self.ref:
|
|
271 |
- return False
|
|
272 |
- |
|
273 |
- # If the mirror doesnt exist, we also dont have the ref
|
|
274 |
- if not os.path.exists(self.mirror):
|
|
275 |
- return False
|
|
276 |
- |
|
277 |
- # Check if the ref is really there
|
|
278 |
- rc = self.source.call([self.source.host_git, 'cat-file', '-t', self.ref], cwd=self.mirror)
|
|
279 |
- return rc == 0
|
|
280 |
- |
|
281 |
- def assert_ref(self):
|
|
282 |
- if not self.has_ref():
|
|
283 |
- raise SourceError("{}: expected ref '{}' was not found in git repository: '{}'"
|
|
284 |
- .format(self.source, self.ref, self.url))
|
|
285 |
- |
|
286 |
- def latest_commit_with_tags(self, tracking, track_tags=False):
|
|
287 |
- _, output = self.source.check_output(
|
|
288 |
- [self.source.host_git, 'rev-parse', tracking],
|
|
289 |
- fail="Unable to find commit for specified branch name '{}'".format(tracking),
|
|
290 |
- cwd=self.mirror)
|
|
291 |
- ref = output.rstrip('\n')
|
|
292 |
- |
|
293 |
- if self.source.ref_format == 'git-describe':
|
|
294 |
- # Prefix the ref with the closest tag, if available,
|
|
295 |
- # to make the ref human readable
|
|
296 |
- exit_code, output = self.source.check_output(
|
|
297 |
- [self.source.host_git, 'describe', '--tags', '--abbrev=40', '--long', ref],
|
|
298 |
- cwd=self.mirror)
|
|
299 |
- if exit_code == 0:
|
|
300 |
- ref = output.rstrip('\n')
|
|
301 |
- |
|
302 |
- if not track_tags:
|
|
303 |
- return ref, []
|
|
304 |
- |
|
305 |
- tags = set()
|
|
306 |
- for options in [[], ['--first-parent'], ['--tags'], ['--tags', '--first-parent']]:
|
|
307 |
- exit_code, output = self.source.check_output(
|
|
308 |
- [self.source.host_git, 'describe', '--abbrev=0', ref] + options,
|
|
309 |
- cwd=self.mirror)
|
|
310 |
- if exit_code == 0:
|
|
311 |
- tag = output.strip()
|
|
312 |
- _, commit_ref = self.source.check_output(
|
|
313 |
- [self.source.host_git, 'rev-parse', tag + '^{commit}'],
|
|
314 |
- fail="Unable to resolve tag '{}'".format(tag),
|
|
315 |
- cwd=self.mirror)
|
|
316 |
- exit_code = self.source.call(
|
|
317 |
- [self.source.host_git, 'cat-file', 'tag', tag],
|
|
318 |
- cwd=self.mirror)
|
|
319 |
- annotated = (exit_code == 0)
|
|
320 |
- |
|
321 |
- tags.add((tag, commit_ref.strip(), annotated))
|
|
322 |
- |
|
323 |
- return ref, list(tags)
|
|
324 |
- |
|
325 |
- def stage(self, directory):
|
|
326 |
- fullpath = os.path.join(directory, self.path)
|
|
327 |
- |
|
328 |
- # Using --shared here avoids copying the objects into the checkout, in any
|
|
329 |
- # case we're just checking out a specific commit and then removing the .git/
|
|
330 |
- # directory.
|
|
331 |
- self.source.call([self.source.host_git, 'clone', '--no-checkout', '--shared', self.mirror, fullpath],
|
|
332 |
- fail="Failed to create git mirror {} in directory: {}".format(self.mirror, fullpath),
|
|
333 |
- fail_temporarily=True)
|
|
334 |
- |
|
335 |
- self.source.call([self.source.host_git, 'checkout', '--force', self.ref],
|
|
336 |
- fail="Failed to checkout git ref {}".format(self.ref),
|
|
337 |
- cwd=fullpath)
|
|
338 |
- |
|
339 |
- # Remove .git dir
|
|
340 |
- shutil.rmtree(os.path.join(fullpath, ".git"))
|
|
341 |
- |
|
342 |
- self._rebuild_git(fullpath)
|
|
343 |
- |
|
344 |
- def init_workspace(self, directory):
|
|
345 |
- fullpath = os.path.join(directory, self.path)
|
|
346 |
- url = self.source.translate_url(self.url)
|
|
347 |
- |
|
348 |
- self.source.call([self.source.host_git, 'clone', '--no-checkout', self.mirror, fullpath],
|
|
349 |
- fail="Failed to clone git mirror {} in directory: {}".format(self.mirror, fullpath),
|
|
350 |
- fail_temporarily=True)
|
|
351 |
- |
|
352 |
- self.source.call([self.source.host_git, 'remote', 'set-url', 'origin', url],
|
|
353 |
- fail='Failed to add remote origin "{}"'.format(url),
|
|
354 |
- cwd=fullpath)
|
|
355 |
- |
|
356 |
- self.source.call([self.source.host_git, 'checkout', '--force', self.ref],
|
|
357 |
- fail="Failed to checkout git ref {}".format(self.ref),
|
|
358 |
- cwd=fullpath)
|
|
359 |
- |
|
360 |
- # List the submodules (path/url tuples) present at the given ref of this repo
|
|
361 |
- def submodule_list(self):
|
|
362 |
- modules = "{}:{}".format(self.ref, GIT_MODULES)
|
|
363 |
- exit_code, output = self.source.check_output(
|
|
364 |
- [self.source.host_git, 'show', modules], cwd=self.mirror)
|
|
365 |
- |
|
366 |
- # If git show reports error code 128 here, we take it to mean there is
|
|
367 |
- # no .gitmodules file to display for the given revision.
|
|
368 |
- if exit_code == 128:
|
|
369 |
- return
|
|
370 |
- elif exit_code != 0:
|
|
371 |
- raise SourceError(
|
|
372 |
- "{plugin}: Failed to show gitmodules at ref {ref}".format(
|
|
373 |
- plugin=self, ref=self.ref))
|
|
374 |
- |
|
375 |
- content = '\n'.join([l.strip() for l in output.splitlines()])
|
|
376 |
- |
|
377 |
- io = StringIO(content)
|
|
378 |
- parser = RawConfigParser()
|
|
379 |
- parser.read_file(io)
|
|
380 |
- |
|
381 |
- for section in parser.sections():
|
|
382 |
- # validate section name against the 'submodule "foo"' pattern
|
|
383 |
- if re.match(r'submodule "(.*)"', section):
|
|
384 |
- path = parser.get(section, 'path')
|
|
385 |
- url = parser.get(section, 'url')
|
|
386 |
- |
|
387 |
- yield (path, url)
|
|
388 |
- |
|
389 |
- # Fetch the ref which this mirror requires its submodule to have,
|
|
390 |
- # at the given ref of this mirror.
|
|
391 |
- def submodule_ref(self, submodule, ref=None):
|
|
392 |
- if not ref:
|
|
393 |
- ref = self.ref
|
|
394 |
- |
|
395 |
- # list objects in the parent repo tree to find the commit
|
|
396 |
- # object that corresponds to the submodule
|
|
397 |
- _, output = self.source.check_output([self.source.host_git, 'ls-tree', ref, submodule],
|
|
398 |
- fail="ls-tree failed for commit {} and submodule: {}".format(
|
|
399 |
- ref, submodule),
|
|
400 |
- cwd=self.mirror)
|
|
401 |
- |
|
402 |
- # read the commit hash from the output
|
|
403 |
- fields = output.split()
|
|
404 |
- if len(fields) >= 2 and fields[1] == 'commit':
|
|
405 |
- submodule_commit = output.split()[2]
|
|
406 |
- |
|
407 |
- # fail if the commit hash is invalid
|
|
408 |
- if len(submodule_commit) != 40:
|
|
409 |
- raise SourceError("{}: Error reading commit information for submodule '{}'"
|
|
410 |
- .format(self.source, submodule))
|
|
411 |
- |
|
412 |
- return submodule_commit
|
|
413 |
- |
|
414 |
- else:
|
|
415 |
- detail = "The submodule '{}' is defined either in the BuildStream source\n".format(submodule) + \
|
|
416 |
- "definition, or in a .gitmodules file. But the submodule was never added to the\n" + \
|
|
417 |
- "underlying git repository with `git submodule add`."
|
|
418 |
- |
|
419 |
- self.source.warn("{}: Ignoring inconsistent submodule '{}'"
|
|
420 |
- .format(self.source, submodule), detail=detail,
|
|
421 |
- warning_token=WARN_INCONSISTENT_SUBMODULE)
|
|
422 |
- |
|
423 |
- return None
|
|
424 |
- |
|
425 |
- def _rebuild_git(self, fullpath):
|
|
426 |
- if not self.tags:
|
|
427 |
- return
|
|
428 |
- |
|
429 |
- with self.source.tempdir() as tmpdir:
|
|
430 |
- included = set()
|
|
431 |
- shallow = set()
|
|
432 |
- for _, commit_ref, _ in self.tags:
|
|
433 |
- |
|
434 |
- _, out = self.source.check_output([self.source.host_git, 'rev-list',
|
|
435 |
- '--boundary', '{}..{}'.format(commit_ref, self.ref)],
|
|
436 |
- fail="Failed to get git history {}..{} in directory: {}"
|
|
437 |
- .format(commit_ref, self.ref, fullpath),
|
|
438 |
- fail_temporarily=True,
|
|
439 |
- cwd=self.mirror)
|
|
440 |
- for line in out.splitlines():
|
|
441 |
- rev = line.lstrip('-')
|
|
442 |
- if line[0] == '-':
|
|
443 |
- shallow.add(rev)
|
|
444 |
- else:
|
|
445 |
- included.add(rev)
|
|
446 |
- |
|
447 |
- shallow -= included
|
|
448 |
- included |= shallow
|
|
449 |
- |
|
450 |
- self.source.call([self.source.host_git, 'init'],
|
|
451 |
- fail="Cannot initialize git repository: {}".format(fullpath),
|
|
452 |
- cwd=fullpath)
|
|
453 |
- |
|
454 |
- for rev in included:
|
|
455 |
- with TemporaryFile(dir=tmpdir) as commit_file:
|
|
456 |
- self.source.call([self.source.host_git, 'cat-file', 'commit', rev],
|
|
457 |
- stdout=commit_file,
|
|
458 |
- fail="Failed to get commit {}".format(rev),
|
|
459 |
- cwd=self.mirror)
|
|
460 |
- commit_file.seek(0, 0)
|
|
461 |
- self.source.call([self.source.host_git, 'hash-object', '-w', '-t', 'commit', '--stdin'],
|
|
462 |
- stdin=commit_file,
|
|
463 |
- fail="Failed to add commit object {}".format(rev),
|
|
464 |
- cwd=fullpath)
|
|
465 |
- |
|
466 |
- with open(os.path.join(fullpath, '.git', 'shallow'), 'w') as shallow_file:
|
|
467 |
- for rev in shallow:
|
|
468 |
- shallow_file.write('{}\n'.format(rev))
|
|
469 |
- |
|
470 |
- for tag, commit_ref, annotated in self.tags:
|
|
471 |
- if annotated:
|
|
472 |
- with TemporaryFile(dir=tmpdir) as tag_file:
|
|
473 |
- tag_data = 'object {}\ntype commit\ntag {}\n'.format(commit_ref, tag)
|
|
474 |
- tag_file.write(tag_data.encode('ascii'))
|
|
475 |
- tag_file.seek(0, 0)
|
|
476 |
- _, tag_ref = self.source.check_output(
|
|
477 |
- [self.source.host_git, 'hash-object', '-w', '-t',
|
|
478 |
- 'tag', '--stdin'],
|
|
479 |
- stdin=tag_file,
|
|
480 |
- fail="Failed to add tag object {}".format(tag),
|
|
481 |
- cwd=fullpath)
|
|
482 |
- |
|
483 |
- self.source.call([self.source.host_git, 'tag', tag, tag_ref.strip()],
|
|
484 |
- fail="Failed to tag: {}".format(tag),
|
|
485 |
- cwd=fullpath)
|
|
486 |
- else:
|
|
487 |
- self.source.call([self.source.host_git, 'tag', tag, commit_ref],
|
|
488 |
- fail="Failed to tag: {}".format(tag),
|
|
489 |
- cwd=fullpath)
|
|
490 |
- |
|
491 |
- with open(os.path.join(fullpath, '.git', 'HEAD'), 'w') as head:
|
|
492 |
- self.source.call([self.source.host_git, 'rev-parse', self.ref],
|
|
493 |
- stdout=head,
|
|
494 |
- fail="Failed to parse commit {}".format(self.ref),
|
|
495 |
- cwd=self.mirror)
|
|
496 |
- |
|
497 |
- |
|
498 |
-class GitSource(Source):
|
|
499 |
- # pylint: disable=attribute-defined-outside-init
|
|
500 |
- |
|
501 |
- def configure(self, node):
|
|
502 |
- ref = self.node_get_member(node, str, 'ref', None)
|
|
503 |
- |
|
504 |
- config_keys = ['url', 'track', 'ref', 'submodules',
|
|
505 |
- 'checkout-submodules', 'ref-format',
|
|
506 |
- 'track-tags', 'tags']
|
|
507 |
- self.node_validate(node, config_keys + Source.COMMON_CONFIG_KEYS)
|
|
508 |
- |
|
509 |
- tags_node = self.node_get_member(node, list, 'tags', [])
|
|
510 |
- for tag_node in tags_node:
|
|
511 |
- self.node_validate(tag_node, ['tag', 'commit', 'annotated'])
|
|
512 |
- |
|
513 |
- tags = self._load_tags(node)
|
|
514 |
- self.track_tags = self.node_get_member(node, bool, 'track-tags', False)
|
|
515 |
- |
|
516 |
- self.original_url = self.node_get_member(node, str, 'url')
|
|
517 |
- self.mirror = GitMirror(self, '', self.original_url, ref, tags=tags, primary=True)
|
|
518 |
- self.tracking = self.node_get_member(node, str, 'track', None)
|
|
519 |
- |
|
520 |
- self.ref_format = self.node_get_member(node, str, 'ref-format', 'sha1')
|
|
521 |
- if self.ref_format not in ['sha1', 'git-describe']:
|
|
522 |
- provenance = self.node_provenance(node, member_name='ref-format')
|
|
523 |
- raise SourceError("{}: Unexpected value for ref-format: {}".format(provenance, self.ref_format))
|
|
524 |
- |
|
525 |
- # At this point we now know if the source has a ref and/or a track.
|
|
526 |
- # If it is missing both then we will be unable to track or build.
|
|
527 |
- if self.mirror.ref is None and self.tracking is None:
|
|
528 |
- raise SourceError("{}: Git sources require a ref and/or track".format(self),
|
|
529 |
- reason="missing-track-and-ref")
|
|
530 |
- |
|
531 |
- self.checkout_submodules = self.node_get_member(node, bool, 'checkout-submodules', True)
|
|
532 |
- self.submodules = []
|
|
533 |
- |
|
534 |
- # Parse a dict of submodule overrides, stored in the submodule_overrides
|
|
535 |
- # and submodule_checkout_overrides dictionaries.
|
|
536 |
- self.submodule_overrides = {}
|
|
537 |
- self.submodule_checkout_overrides = {}
|
|
538 |
- modules = self.node_get_member(node, Mapping, 'submodules', {})
|
|
539 |
- for path, _ in self.node_items(modules):
|
|
540 |
- submodule = self.node_get_member(modules, Mapping, path)
|
|
541 |
- url = self.node_get_member(submodule, str, 'url', None)
|
|
542 |
- |
|
543 |
- # Make sure to mark all URLs that are specified in the configuration
|
|
544 |
- if url:
|
|
545 |
- self.mark_download_url(url, primary=False)
|
|
546 |
- |
|
547 |
- self.submodule_overrides[path] = url
|
|
548 |
- if 'checkout' in submodule:
|
|
549 |
- checkout = self.node_get_member(submodule, bool, 'checkout')
|
|
550 |
- self.submodule_checkout_overrides[path] = checkout
|
|
551 |
- |
|
552 |
- self.mark_download_url(self.original_url)
|
|
553 |
- |
|
554 |
- def preflight(self):
|
|
555 |
- # Check if git is installed, get the binary at the same time
|
|
556 |
- self.host_git = utils.get_host_tool('git')
|
|
557 |
- |
|
558 |
- def get_unique_key(self):
|
|
559 |
- # Here we want to encode the local name of the repository and
|
|
560 |
- # the ref, if the user changes the alias to fetch the same sources
|
|
561 |
- # from another location, it should not affect the cache key.
|
|
562 |
- key = [self.original_url, self.mirror.ref]
|
|
563 |
- if self.mirror.tags:
|
|
564 |
- tags = {tag: (commit, annotated) for tag, commit, annotated in self.mirror.tags}
|
|
565 |
- key.append({'tags': tags})
|
|
566 |
- |
|
567 |
- # Only modify the cache key with checkout_submodules if it's something
|
|
568 |
- # other than the default behaviour.
|
|
569 |
- if self.checkout_submodules is False:
|
|
570 |
- key.append({"checkout_submodules": self.checkout_submodules})
|
|
571 |
- |
|
572 |
- # We want the cache key to change if the source was
|
|
573 |
- # configured differently, and submodules count.
|
|
574 |
- if self.submodule_overrides:
|
|
575 |
- key.append(self.submodule_overrides)
|
|
576 |
- |
|
577 |
- if self.submodule_checkout_overrides:
|
|
578 |
- key.append({"submodule_checkout_overrides": self.submodule_checkout_overrides})
|
|
579 |
- |
|
580 |
- return key
|
|
581 |
- |
|
582 |
- def get_consistency(self):
|
|
583 |
- if self.have_all_refs():
|
|
584 |
- return Consistency.CACHED
|
|
585 |
- elif self.mirror.ref is not None:
|
|
586 |
- return Consistency.RESOLVED
|
|
587 |
- return Consistency.INCONSISTENT
|
|
588 |
- |
|
589 |
- def load_ref(self, node):
|
|
590 |
- self.mirror.ref = self.node_get_member(node, str, 'ref', None)
|
|
591 |
- self.mirror.tags = self._load_tags(node)
|
|
592 |
- |
|
593 |
- def get_ref(self):
|
|
594 |
- return self.mirror.ref, self.mirror.tags
|
|
595 |
- |
|
596 |
- def set_ref(self, ref_data, node):
|
|
597 |
- if not ref_data:
|
|
598 |
- self.mirror.ref = None
|
|
599 |
- if 'ref' in node:
|
|
600 |
- del node['ref']
|
|
601 |
- self.mirror.tags = []
|
|
602 |
- if 'tags' in node:
|
|
603 |
- del node['tags']
|
|
604 |
- else:
|
|
605 |
- ref, tags = ref_data
|
|
606 |
- node['ref'] = self.mirror.ref = ref
|
|
607 |
- self.mirror.tags = tags
|
|
608 |
- if tags:
|
|
609 |
- node['tags'] = []
|
|
610 |
- for tag, commit_ref, annotated in tags:
|
|
611 |
- data = {'tag': tag,
|
|
612 |
- 'commit': commit_ref,
|
|
613 |
- 'annotated': annotated}
|
|
614 |
- node['tags'].append(data)
|
|
615 |
- else:
|
|
616 |
- if 'tags' in node:
|
|
617 |
- del node['tags']
|
|
618 |
- |
|
619 |
- def track(self):
|
|
620 |
- |
|
621 |
- # If self.tracking is not specified it's not an error, just silently return
|
|
622 |
- if not self.tracking:
|
|
623 |
- # Is there a better way to check if a ref is given.
|
|
624 |
- if self.mirror.ref is None:
|
|
625 |
- detail = 'Without a tracking branch ref can not be updated. Please ' + \
|
|
626 |
- 'provide a ref or a track.'
|
|
627 |
- raise SourceError("{}: No track or ref".format(self),
|
|
628 |
- detail=detail, reason="track-attempt-no-track")
|
|
629 |
- return None
|
|
630 |
- |
|
631 |
- # Resolve the URL for the message
|
|
632 |
- resolved_url = self.translate_url(self.mirror.url)
|
|
633 |
- with self.timed_activity("Tracking {} from {}"
|
|
634 |
- .format(self.tracking, resolved_url),
|
|
635 |
- silent_nested=True):
|
|
636 |
- self.mirror.ensure()
|
|
637 |
- self.mirror._fetch()
|
|
638 |
- |
|
639 |
- # Update self.mirror.ref and node.ref from the self.tracking branch
|
|
640 |
- ret = self.mirror.latest_commit_with_tags(self.tracking, self.track_tags)
|
|
641 |
- |
|
642 |
- return ret
|
|
643 |
- |
|
644 |
- def init_workspace(self, directory):
|
|
645 |
- # XXX: may wish to refactor this as some code dupe with stage()
|
|
646 |
- self.refresh_submodules()
|
|
647 |
- |
|
648 |
- with self.timed_activity('Setting up workspace "{}"'.format(directory), silent_nested=True):
|
|
649 |
- self.mirror.init_workspace(directory)
|
|
650 |
- for mirror in self.submodules:
|
|
651 |
- mirror.init_workspace(directory)
|
|
652 |
- |
|
653 |
- def stage(self, directory):
|
|
654 |
- |
|
655 |
- # Need to refresh submodule list here again, because
|
|
656 |
- # it's possible that we did not load in the main process
|
|
657 |
- # with submodules present (source needed fetching) and
|
|
658 |
- # we may not know about the submodule yet come time to build.
|
|
659 |
- #
|
|
660 |
- self.refresh_submodules()
|
|
661 |
- |
|
662 |
- # Stage the main repo in the specified directory
|
|
663 |
- #
|
|
664 |
- with self.timed_activity("Staging {}".format(self.mirror.url), silent_nested=True):
|
|
665 |
- self.mirror.stage(directory)
|
|
666 |
- for mirror in self.submodules:
|
|
667 |
- mirror.stage(directory)
|
|
668 |
- |
|
669 |
- def get_source_fetchers(self):
|
|
670 |
- yield self.mirror
|
|
671 |
- self.refresh_submodules()
|
|
672 |
- for submodule in self.submodules:
|
|
673 |
- yield submodule
|
|
674 |
- |
|
675 |
- def validate_cache(self):
|
|
676 |
- discovered_submodules = {}
|
|
677 |
- unlisted_submodules = []
|
|
678 |
- invalid_submodules = []
|
|
679 |
- |
|
680 |
- for path, url in self.mirror.submodule_list():
|
|
681 |
- discovered_submodules[path] = url
|
|
682 |
- if self.ignore_submodule(path):
|
|
683 |
- continue
|
|
684 |
- |
|
685 |
- override_url = self.submodule_overrides.get(path)
|
|
686 |
- if not override_url:
|
|
687 |
- unlisted_submodules.append((path, url))
|
|
688 |
- |
|
689 |
- # Warn about submodules which are explicitly configured but do not exist
|
|
690 |
- for path, url in self.submodule_overrides.items():
|
|
691 |
- if path not in discovered_submodules:
|
|
692 |
- invalid_submodules.append((path, url))
|
|
693 |
- |
|
694 |
- if invalid_submodules:
|
|
695 |
- detail = []
|
|
696 |
- for path, url in invalid_submodules:
|
|
697 |
- detail.append(" Submodule URL '{}' at path '{}'".format(url, path))
|
|
698 |
- |
|
699 |
- self.warn("{}: Invalid submodules specified".format(self),
|
|
700 |
- warning_token=WARN_INVALID_SUBMODULE,
|
|
701 |
- detail="The following submodules are specified in the source "
|
|
702 |
- "description but do not exist according to the repository\n\n" +
|
|
703 |
- "\n".join(detail))
|
|
704 |
- |
|
705 |
- # Warn about submodules which exist but have not been explicitly configured
|
|
706 |
- if unlisted_submodules:
|
|
707 |
- detail = []
|
|
708 |
- for path, url in unlisted_submodules:
|
|
709 |
- detail.append(" Submodule URL '{}' at path '{}'".format(url, path))
|
|
710 |
- |
|
711 |
- self.warn("{}: Unlisted submodules exist".format(self),
|
|
712 |
- warning_token=WARN_UNLISTED_SUBMODULE,
|
|
713 |
- detail="The following submodules exist but are not specified " +
|
|
714 |
- "in the source description\n\n" +
|
|
715 |
- "\n".join(detail))
|
|
716 |
- |
|
717 |
- # Assert that the ref exists in the track tag/branch, if track has been specified.
|
|
718 |
- ref_in_track = False
|
|
719 |
- if self.tracking:
|
|
720 |
- _, branch = self.check_output([self.host_git, 'branch', '--list', self.tracking,
|
|
721 |
- '--contains', self.mirror.ref],
|
|
722 |
- cwd=self.mirror.mirror)
|
|
723 |
- if branch:
|
|
724 |
- ref_in_track = True
|
|
725 |
- else:
|
|
726 |
- _, tag = self.check_output([self.host_git, 'tag', '--list', self.tracking,
|
|
727 |
- '--contains', self.mirror.ref],
|
|
728 |
- cwd=self.mirror.mirror)
|
|
729 |
- if tag:
|
|
730 |
- ref_in_track = True
|
|
731 |
- |
|
732 |
- if not ref_in_track:
|
|
733 |
- detail = "The ref provided for the element does not exist locally " + \
|
|
734 |
- "in the provided track branch / tag '{}'.\n".format(self.tracking) + \
|
|
735 |
- "You may wish to track the element to update the ref from '{}' ".format(self.tracking) + \
|
|
736 |
- "with `bst track`,\n" + \
|
|
737 |
- "or examine the upstream at '{}' for the specific ref.".format(self.mirror.url)
|
|
738 |
- |
|
739 |
- self.warn("{}: expected ref '{}' was not found in given track '{}' for staged repository: '{}'\n"
|
|
740 |
- .format(self, self.mirror.ref, self.tracking, self.mirror.url),
|
|
741 |
- detail=detail, warning_token=CoreWarnings.REF_NOT_IN_TRACK)
|
|
742 |
- |
|
743 |
- ###########################################################
|
|
744 |
- # Local Functions #
|
|
745 |
- ###########################################################
|
|
746 |
- def have_all_refs(self):
|
|
747 |
- if not self.mirror.has_ref():
|
|
748 |
- return False
|
|
749 |
- |
|
750 |
- self.refresh_submodules()
|
|
751 |
- for mirror in self.submodules:
|
|
752 |
- if not os.path.exists(mirror.mirror):
|
|
753 |
- return False
|
|
754 |
- if not mirror.has_ref():
|
|
755 |
- return False
|
|
756 |
- |
|
757 |
- return True
|
|
758 |
- |
|
759 |
- # Refreshes the GitMirror objects for submodules
|
|
760 |
- #
|
|
761 |
- # Assumes that we have our mirror and we have the ref which we point to
|
|
762 |
- #
|
|
763 |
- def refresh_submodules(self):
|
|
764 |
- self.mirror.ensure()
|
|
765 |
- submodules = []
|
|
766 |
- |
|
767 |
- for path, url in self.mirror.submodule_list():
|
|
768 |
- |
|
769 |
- # Completely ignore submodules which are disabled for checkout
|
|
770 |
- if self.ignore_submodule(path):
|
|
771 |
- continue
|
|
772 |
- |
|
773 |
- # Allow configuration to override the upstream
|
|
774 |
- # location of the submodules.
|
|
775 |
- override_url = self.submodule_overrides.get(path)
|
|
776 |
- if override_url:
|
|
777 |
- url = override_url
|
|
778 |
- |
|
779 |
- ref = self.mirror.submodule_ref(path)
|
|
780 |
- if ref is not None:
|
|
781 |
- mirror = GitMirror(self, path, url, ref)
|
|
782 |
- submodules.append(mirror)
|
|
783 |
- |
|
784 |
- self.submodules = submodules
|
|
785 |
- |
|
786 |
- def _load_tags(self, node):
|
|
787 |
- tags = []
|
|
788 |
- tags_node = self.node_get_member(node, list, 'tags', [])
|
|
789 |
- for tag_node in tags_node:
|
|
790 |
- tag = self.node_get_member(tag_node, str, 'tag')
|
|
791 |
- commit_ref = self.node_get_member(tag_node, str, 'commit')
|
|
792 |
- annotated = self.node_get_member(tag_node, bool, 'annotated')
|
|
793 |
- tags.append((tag, commit_ref, annotated))
|
|
794 |
- return tags
|
|
795 |
- |
|
796 |
- # Checks whether the plugin configuration has explicitly
|
|
797 |
- # configured this submodule to be ignored
|
|
798 |
- def ignore_submodule(self, path):
|
|
799 |
- try:
|
|
800 |
- checkout = self.submodule_checkout_overrides[path]
|
|
801 |
- except KeyError:
|
|
802 |
- checkout = self.checkout_submodules
|
|
803 |
- |
|
804 |
- return not checkout
|
|
162 |
+class GitSource(_GitSourceBase):
|
|
163 |
+ pass
|
|
805 | 164 |
|
806 | 165 |
|
807 | 166 |
# Plugin entry point
|
... | ... | @@ -1168,7 +1168,7 @@ def _call(*popenargs, terminate=False, **kwargs): |
1168 | 1168 |
#
|
1169 | 1169 |
def _glob2re(pat):
|
1170 | 1170 |
i, n = 0, len(pat)
|
1171 |
- res = ''
|
|
1171 |
+ res = '(?ms)'
|
|
1172 | 1172 |
while i < n:
|
1173 | 1173 |
c = pat[i]
|
1174 | 1174 |
i = i + 1
|
... | ... | @@ -1205,7 +1205,7 @@ def _glob2re(pat): |
1205 | 1205 |
res = '{}[{}]'.format(res, stuff)
|
1206 | 1206 |
else:
|
1207 | 1207 |
res = res + re.escape(c)
|
1208 |
- return res + r'\Z(?ms)'
|
|
1208 |
+ return res + r'\Z'
|
|
1209 | 1209 |
|
1210 | 1210 |
|
1211 | 1211 |
# _deduplicate()
|
... | ... | @@ -41,7 +41,7 @@ def test_default_logging(cli, tmpdir, datafiles): |
41 | 41 |
result = cli.run(project=project, args=['source', 'fetch', element_name])
|
42 | 42 |
result.assert_success()
|
43 | 43 |
|
44 |
- m = re.search("\[\d\d:\d\d:\d\d\]\[\]\[\] SUCCESS Checking sources", result.stderr)
|
|
44 |
+ m = re.search(r"\[\d\d:\d\d:\d\d\]\[\]\[\] SUCCESS Checking sources", result.stderr)
|
|
45 | 45 |
assert(m is not None)
|
46 | 46 |
|
47 | 47 |
|
... | ... | @@ -77,7 +77,7 @@ def test_custom_logging(cli, tmpdir, datafiles): |
77 | 77 |
result = cli.run(project=project, args=['source', 'fetch', element_name])
|
78 | 78 |
result.assert_success()
|
79 | 79 |
|
80 |
- m = re.search("\d\d:\d\d:\d\d,\d\d:\d\d:\d\d.\d{6},\d\d:\d\d:\d\d,,,SUCCESS,Checking sources", result.stderr)
|
|
80 |
+ m = re.search(r"\d\d:\d\d:\d\d,\d\d:\d\d:\d\d.\d{6},\d\d:\d\d:\d\d,,,SUCCESS,Checking sources", result.stderr)
|
|
81 | 81 |
assert(m is not None)
|
82 | 82 |
|
83 | 83 |
|
1 |
+import os
|
|
2 |
+import shutil
|
|
3 |
+import pytest
|
|
4 |
+ |
|
5 |
+from tests.testutils import cli_integration as cli, create_artifact_share
|
|
6 |
+from tests.testutils.integration import assert_contains
|
|
7 |
+from tests.testutils.site import HAVE_BWRAP, IS_LINUX
|
|
8 |
+from buildstream._exceptions import ErrorDomain, LoadErrorReason
|
|
9 |
+ |
|
10 |
+ |
|
11 |
+DATA_DIR = os.path.join(
|
|
12 |
+ os.path.dirname(os.path.realpath(__file__)),
|
|
13 |
+ "project"
|
|
14 |
+)
|
|
15 |
+ |
|
16 |
+ |
|
17 |
+# Remove artifact cache & set cli.config value of pull-buildtrees
|
|
18 |
+# to false, which is the default user context. The cache has to be
|
|
19 |
+# cleared as just forcefully removing the refpath leaves dangling objects.
|
|
20 |
+def default_state(cli, tmpdir, share):
|
|
21 |
+ shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
|
|
22 |
+ cli.configure({
|
|
23 |
+ 'artifacts': {'url': share.repo, 'push': False},
|
|
24 |
+ 'artifactdir': os.path.join(str(tmpdir), 'artifacts'),
|
|
25 |
+ 'cache': {'pull-buildtrees': False},
|
|
26 |
+ })
|
|
27 |
+ |
|
28 |
+ |
|
29 |
+# A test to capture the integration of the optionl push of buildtrees.
|
|
30 |
+# The behaviour should encompass pushing artifacts that are already cached
|
|
31 |
+# without a buildtree as well as artifacts that are cached with their buildtree.
|
|
32 |
+# This option is handled via 'allow-partial-push' on a per artifact remote config
|
|
33 |
+# node basis. Multiple remote config nodes can point to the same url and as such can
|
|
34 |
+# have different 'allow-partial-push' options, tests need to cover this
|
|
35 |
+@pytest.mark.integration
|
|
36 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
37 |
+@pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
|
|
38 |
+def test_pushbuildtrees(cli, tmpdir, datafiles, integration_cache):
|
|
39 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
40 |
+ element_name = 'autotools/amhello.bst'
|
|
41 |
+ |
|
42 |
+ # Create artifact shares for pull & push testing
|
|
43 |
+ with create_artifact_share(os.path.join(str(tmpdir), 'share1')) as share1,\
|
|
44 |
+ create_artifact_share(os.path.join(str(tmpdir), 'share2')) as share2,\
|
|
45 |
+ create_artifact_share(os.path.join(str(tmpdir), 'share3')) as share3:
|
|
46 |
+ |
|
47 |
+ cli.configure({
|
|
48 |
+ 'artifacts': {'url': share1.repo, 'push': True},
|
|
49 |
+ 'artifactdir': os.path.join(str(tmpdir), 'artifacts')
|
|
50 |
+ })
|
|
51 |
+ |
|
52 |
+ cli.configure({'artifacts': [{'url': share1.repo, 'push': True},
|
|
53 |
+ {'url': share2.repo, 'push': True, 'allow-partial-push': True}]})
|
|
54 |
+ |
|
55 |
+ # Build autotools element, checked pushed, delete local.
|
|
56 |
+ # As share 2 has push & allow-partial-push set a true, it
|
|
57 |
+ # should have pushed the artifacts, without the cached buildtrees,
|
|
58 |
+ # to it.
|
|
59 |
+ result = cli.run(project=project, args=['build', element_name])
|
|
60 |
+ assert result.exit_code == 0
|
|
61 |
+ assert cli.get_element_state(project, element_name) == 'cached'
|
|
62 |
+ elementdigest = share1.has_artifact('test', element_name, cli.get_element_key(project, element_name))
|
|
63 |
+ buildtreedir = os.path.join(str(tmpdir), 'artifacts', 'extract', 'test', 'autotools-amhello',
|
|
64 |
+ elementdigest.hash, 'buildtree')
|
|
65 |
+ assert os.path.isdir(buildtreedir)
|
|
66 |
+ assert element_name in result.get_partial_pushed_elements()
|
|
67 |
+ assert share1.has_artifact('test', element_name, cli.get_element_key(project, element_name))
|
|
68 |
+ assert share2.has_artifact('test', element_name, cli.get_element_key(project, element_name))
|
|
69 |
+ default_state(cli, tmpdir, share1)
|
|
70 |
+ |
|
71 |
+ # Check that after explictly pulling an artifact without it's buildtree,
|
|
72 |
+ # we can push it to another remote that is configured to accept the partial
|
|
73 |
+ # artifact
|
|
74 |
+ result = cli.run(project=project, args=['pull', element_name])
|
|
75 |
+ cli.configure({'artifacts': {'url': share3.repo, 'push': True, 'allow-partial-push': True}})
|
|
76 |
+ assert cli.get_element_state(project, element_name) == 'cached'
|
|
77 |
+ assert not os.path.isdir(buildtreedir)
|
|
78 |
+ result = cli.run(project=project, args=['push', element_name])
|
|
79 |
+ assert result.exit_code == 0
|
|
80 |
+ assert element_name in result.get_partial_pushed_elements()
|
|
81 |
+ assert element_name not in result.get_pushed_elements()
|
|
82 |
+ assert share3.has_artifact('test', element_name, cli.get_element_key(project, element_name))
|
|
83 |
+ default_state(cli, tmpdir, share3)
|
|
84 |
+ |
|
85 |
+ # Delete the local cache and pull the partial artifact from share 3,
|
|
86 |
+ # this should not include the buildtree when extracted locally, even when
|
|
87 |
+ # pull-buildtrees is given as a cli parameter as no available remotes will
|
|
88 |
+ # contain the buildtree
|
|
89 |
+ assert not os.path.isdir(buildtreedir)
|
|
90 |
+ assert cli.get_element_state(project, element_name) != 'cached'
|
|
91 |
+ print("share3 repo value is {}".format(share3.repo))
|
|
92 |
+ result = cli.run(project=project, args=['--pull-buildtrees', 'pull', element_name])
|
|
93 |
+ with open('/home/tom/buildstream/tmp/test_pushbuildtrees0/cache/buildstream.conf', 'r') as f:
|
|
94 |
+ print(f.read())
|
|
95 |
+ assert element_name in result.get_partial_pulled_elements()
|
|
96 |
+ assert not os.path.isdir(buildtreedir)
|
1 |
+Hello World!
|
1 |
+"""
|
|
2 |
+always_cached
|
|
3 |
+=============
|
|
4 |
+ |
|
5 |
+This is a test source plugin that is always cached.
|
|
6 |
+Used to test that BuildStream core does not call fetch() for cached sources.
|
|
7 |
+ |
|
8 |
+"""
|
|
9 |
+ |
|
10 |
+from buildstream import Consistency, Source
|
|
11 |
+ |
|
12 |
+ |
|
13 |
+class AlwaysCachedSource(Source):
|
|
14 |
+ |
|
15 |
+ def configure(self, node):
|
|
16 |
+ pass
|
|
17 |
+ |
|
18 |
+ def preflight(self):
|
|
19 |
+ pass
|
|
20 |
+ |
|
21 |
+ def get_unique_key(self):
|
|
22 |
+ return None
|
|
23 |
+ |
|
24 |
+ def get_consistency(self):
|
|
25 |
+ return Consistency.CACHED
|
|
26 |
+ |
|
27 |
+ def load_ref(self, node):
|
|
28 |
+ pass
|
|
29 |
+ |
|
30 |
+ def get_ref(self):
|
|
31 |
+ return None
|
|
32 |
+ |
|
33 |
+ def set_ref(self, ref, node):
|
|
34 |
+ pass
|
|
35 |
+ |
|
36 |
+ def fetch(self):
|
|
37 |
+ # Source is always cached, so fetch() should never be called
|
|
38 |
+ assert False
|
|
39 |
+ |
|
40 |
+ def stage(self, directory):
|
|
41 |
+ pass
|
|
42 |
+ |
|
43 |
+ |
|
44 |
+def setup():
|
|
45 |
+ return AlwaysCachedSource
|
1 |
+# Project with local source plugins
|
|
2 |
+name: no-fetch-cached
|
|
3 |
+ |
|
4 |
+plugins:
|
|
5 |
+- origin: local
|
|
6 |
+ path: plugins/sources
|
|
7 |
+ sources:
|
|
8 |
+ always_cached: 0
|
1 |
+import os
|
|
2 |
+import pytest
|
|
3 |
+ |
|
4 |
+from buildstream import _yaml
|
|
5 |
+ |
|
6 |
+from tests.testutils import cli, create_repo
|
|
7 |
+from tests.testutils.site import HAVE_GIT
|
|
8 |
+ |
|
9 |
+DATA_DIR = os.path.join(
|
|
10 |
+ os.path.dirname(os.path.realpath(__file__)),
|
|
11 |
+ 'no-fetch-cached'
|
|
12 |
+)
|
|
13 |
+ |
|
14 |
+ |
|
15 |
+##################################################################
|
|
16 |
+# Tests #
|
|
17 |
+##################################################################
|
|
18 |
+# Test that fetch() is not called for cached sources
|
|
19 |
+@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
|
|
20 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
21 |
+def test_no_fetch_cached(cli, tmpdir, datafiles):
|
|
22 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
23 |
+ |
|
24 |
+ # Create the repo from 'files' subdir
|
|
25 |
+ repo = create_repo('git', str(tmpdir))
|
|
26 |
+ ref = repo.create(os.path.join(project, 'files'))
|
|
27 |
+ |
|
28 |
+ # Write out test target with a cached and a non-cached source
|
|
29 |
+ element = {
|
|
30 |
+ 'kind': 'import',
|
|
31 |
+ 'sources': [
|
|
32 |
+ repo.source_config(ref=ref),
|
|
33 |
+ {
|
|
34 |
+ 'kind': 'always_cached'
|
|
35 |
+ }
|
|
36 |
+ ]
|
|
37 |
+ }
|
|
38 |
+ _yaml.dump(element, os.path.join(project, 'target.bst'))
|
|
39 |
+ |
|
40 |
+ # Test fetch of target with a cached and a non-cached source
|
|
41 |
+ result = cli.run(project=project, args=[
|
|
42 |
+ 'source', 'fetch', 'target.bst'
|
|
43 |
+ ])
|
|
44 |
+ result.assert_success()
|
... | ... | @@ -191,6 +191,13 @@ class Result(): |
191 | 191 |
|
192 | 192 |
return list(pushed)
|
193 | 193 |
|
194 |
+ def get_partial_pushed_elements(self):
|
|
195 |
+ pushed = re.findall(r'\[\s*push:(\S+)\s*\]\s*INFO\s*Pushed partial artifact', self.stderr)
|
|
196 |
+ if pushed is None:
|
|
197 |
+ return []
|
|
198 |
+ |
|
199 |
+ return list(pushed)
|
|
200 |
+ |
|
194 | 201 |
def get_pulled_elements(self):
|
195 | 202 |
pulled = re.findall(r'\[\s*pull:(\S+)\s*\]\s*INFO\s*Pulled artifact', self.stderr)
|
196 | 203 |
if pulled is None:
|
... | ... | @@ -198,6 +205,13 @@ class Result(): |
198 | 205 |
|
199 | 206 |
return list(pulled)
|
200 | 207 |
|
208 |
+ def get_partial_pulled_elements(self):
|
|
209 |
+ pulled = re.findall(r'\[\s*pull:(\S+)\s*\]\s*INFO\s*Pulled partial artifact', self.stderr)
|
|
210 |
+ if pulled is None:
|
|
211 |
+ return []
|
|
212 |
+ |
|
213 |
+ return list(pulled)
|
|
214 |
+ |
|
201 | 215 |
|
202 | 216 |
class Cli():
|
203 | 217 |
|