Tom Pollard pushed to branch tpollard/494 at BuildStream / buildstream
Commits:
- 
83ab183e
by Tiago Gomes at 2018-09-25T16:37:30Z
- 
c8594806
by Tiago Gomes at 2018-09-25T17:03:55Z
- 
fdbf9be1
by Tiago Gomes at 2018-09-26T08:17:46Z
- 
9280b0cf
by Tiago Gomes at 2018-09-26T15:54:53Z
- 
af131503
by Tiago Gomes at 2018-09-26T16:35:58Z
- 
f7f14f25
by Tiago Gomes at 2018-09-26T17:42:12Z
- 
0f2bc375
by Martin Blanchard at 2018-09-27T08:46:27Z
- 
16cf9d5f
by Martin Blanchard at 2018-09-27T08:46:27Z
- 
f5f3cb7c
by Jim MacArthur at 2018-09-27T09:16:38Z
- 
b99a6740
by Jürg Billeter at 2018-09-27T09:18:03Z
- 
f8bbe008
by Jürg Billeter at 2018-09-27T09:18:03Z
- 
f9494f1f
by Jürg Billeter at 2018-09-27T09:18:03Z
- 
b4ad84c3
by Jürg Billeter at 2018-09-27T09:18:03Z
- 
966af3d2
by Jürg Billeter at 2018-09-27T09:18:03Z
- 
f08e5eae
by Jürg Billeter at 2018-09-27T09:18:03Z
- 
10ed9158
by Jürg Billeter at 2018-09-27T09:18:03Z
- 
7aec8b0f
by Jürg Billeter at 2018-09-27T09:18:03Z
- 
a5025c33
by Jürg Billeter at 2018-09-27T09:18:03Z
- 
ef1cb374
by Jürg Billeter at 2018-09-27T09:18:03Z
- 
52991be1
by Jürg Billeter at 2018-09-27T09:18:03Z
- 
cb6b35dc
by Jürg Billeter at 2018-09-27T09:18:42Z
- 
28c62b00
by Jürg Billeter at 2018-09-27T09:18:43Z
- 
99e90918
by Jürg Billeter at 2018-09-27T09:19:12Z
- 
b8f920e2
by Jürg Billeter at 2018-09-27T09:19:12Z
- 
3d88c5d4
by Jürg Billeter at 2018-09-27T09:19:12Z
- 
ed878cb6
by Jürg Billeter at 2018-09-27T09:19:12Z
- 
fe82c9c2
by Jürg Billeter at 2018-09-27T09:19:12Z
- 
0d0f700d
by Jürg Billeter at 2018-09-27T09:19:12Z
- 
dd770ec3
by Jürg Billeter at 2018-09-27T09:19:12Z
- 
44da8175
by Jürg Billeter at 2018-09-27T09:48:20Z
- 
1162fe7f
by Tom Pollard at 2018-09-27T13:07:36Z
26 changed files:
- .gitlab-ci.yml
- buildstream/_artifactcache/artifactcache.py
- buildstream/_artifactcache/cascache.py
- buildstream/_artifactcache/casserver.py
- buildstream/_context.py
- buildstream/_frontend/app.py
- buildstream/_frontend/cli.py
- buildstream/_loader/loader.py
- buildstream/_platform/linux.py
- buildstream/_platform/platform.py
- buildstream/_platform/unix.py
- + buildstream/_protos/google/rpc/code.proto
- + buildstream/_protos/google/rpc/code_pb2.py
- + buildstream/_protos/google/rpc/code_pb2_grpc.py
- buildstream/_scheduler/jobs/cachesizejob.py
- buildstream/_scheduler/jobs/cleanupjob.py
- buildstream/_scheduler/queues/buildqueue.py
- buildstream/_scheduler/queues/pullqueue.py
- buildstream/_scheduler/scheduler.py
- buildstream/_stream.py
- buildstream/element.py
- buildstream/sandbox/_sandboxremote.py
- buildstream/storage/_casbaseddirectory.py
- tests/artifactcache/pull.py
- tests/artifactcache/push.py
- tests/testutils/artifactshare.py
Changes:
| ... | ... | @@ -79,6 +79,8 @@ source_dist: | 
| 79 | 79 |    - cd ../..
 | 
| 80 | 80 |    - mkdir -p coverage-linux/
 | 
| 81 | 81 |    - cp dist/buildstream/.coverage.* coverage-linux/coverage."${CI_JOB_NAME}"
 | 
| 82 | +  except:
 | |
| 83 | +  - schedules
 | |
| 82 | 84 |    artifacts:
 | 
| 83 | 85 |      paths:
 | 
| 84 | 86 |      - coverage-linux/
 | 
| ... | ... | @@ -127,6 +129,8 @@ tests-unix: | 
| 127 | 129 |      - cd ../..
 | 
| 128 | 130 |      - mkdir -p coverage-unix/
 | 
| 129 | 131 |      - cp dist/buildstream/.coverage.* coverage-unix/coverage.unix
 | 
| 132 | +  except:
 | |
| 133 | +  - schedules
 | |
| 130 | 134 |    artifacts:
 | 
| 131 | 135 |      paths:
 | 
| 132 | 136 |      - coverage-unix/
 | 
| ... | ... | @@ -148,10 +152,41 @@ docs: | 
| 148 | 152 |    - make BST_FORCE_SESSION_REBUILD=1 -C doc
 | 
| 149 | 153 |    - cd ../..
 | 
| 150 | 154 |    - mv dist/buildstream/doc/build/html public
 | 
| 155 | +  except:
 | |
| 156 | +  - schedules
 | |
| 151 | 157 |    artifacts:
 | 
| 152 | 158 |      paths:
 | 
| 153 | 159 |      - public/
 | 
| 154 | 160 |  | 
| 161 | +.overnight-tests: &overnight-tests-template
 | |
| 162 | +  stage: test
 | |
| 163 | +  variables:
 | |
| 164 | +    bst_ext_url: git+https://gitlab.com/BuildStream/bst-external.git
 | |
| 165 | +    bst_ext_ref: 1d6ab71151b93c8cbc0a91a36ffe9270f3b835f1 # 0.5.1
 | |
| 166 | +    fd_sdk_ref: 88d7c22c2281b987faa02edd57df80d430eecf1f # 18.08.12
 | |
| 167 | +  before_script:
 | |
| 168 | +  - (cd dist && ./unpack.sh && cd buildstream && pip3 install .)
 | |
| 169 | +  - pip3 install --user -e ${bst_ext_url}@${bst_ext_ref}#egg=bst_ext
 | |
| 170 | +  - git clone https://gitlab.com/freedesktop-sdk/freedesktop-sdk.git
 | |
| 171 | +  - git -C freedesktop-sdk checkout ${fd_sdk_ref}
 | |
| 172 | +  only:
 | |
| 173 | +  - schedules
 | |
| 174 | + | |
| 175 | +overnight-tests:
 | |
| 176 | +  <<: *overnight-tests-template
 | |
| 177 | +  script:
 | |
| 178 | +  - make -C freedesktop-sdk
 | |
| 179 | +  tags:
 | |
| 180 | +  - overnight-tests
 | |
| 181 | + | |
| 182 | +overnight-tests-no-cache:
 | |
| 183 | +  <<: *overnight-tests-template
 | |
| 184 | +  script:
 | |
| 185 | +  - sed -i '/artifacts:/,+1 d' freedesktop-sdk/bootstrap/project.conf
 | |
| 186 | +  - sed -i '/artifacts:/,+1 d' freedesktop-sdk/project.conf
 | |
| 187 | +  - make -C freedesktop-sdk
 | |
| 188 | +  tags:
 | |
| 189 | +  - overnight-tests
 | |
| 155 | 190 |  | 
| 156 | 191 |  # Check code quality with gitlab's built-in feature.
 | 
| 157 | 192 |  #
 | 
| ... | ... | @@ -170,6 +205,8 @@ code_quality: | 
| 170 | 205 |          --volume "$PWD":/code
 | 
| 171 | 206 |          --volume /var/run/docker.sock:/var/run/docker.sock
 | 
| 172 | 207 |          "registry.gitlab.com/gitlab-org/security-products/codequality:$SP_VERSION" /code
 | 
| 208 | +  except:
 | |
| 209 | +  - schedules
 | |
| 173 | 210 |    artifacts:
 | 
| 174 | 211 |      paths: [gl-code-quality-report.json]
 | 
| 175 | 212 |  | 
| ... | ... | @@ -199,6 +236,8 @@ analysis: | 
| 199 | 236 |      radon raw -s -j buildstream > analysis/raw.json
 | 
| 200 | 237 |      radon raw -s buildstream
 | 
| 201 | 238 |  | 
| 239 | +  except:
 | |
| 240 | +  - schedules
 | |
| 202 | 241 |    artifacts:
 | 
| 203 | 242 |      paths:
 | 
| 204 | 243 |      - analysis/
 | 
| ... | ... | @@ -224,6 +263,8 @@ coverage: | 
| 224 | 263 |    - tests-fedora-28
 | 
| 225 | 264 |    - tests-unix
 | 
| 226 | 265 |    - source_dist
 | 
| 266 | +  except:
 | |
| 267 | +  - schedules
 | |
| 227 | 268 |  | 
| 228 | 269 |  # Deploy, only for merges which land on master branch.
 | 
| 229 | 270 |  #
 | 
| ... | ... | @@ -232,8 +273,14 @@ pages: | 
| 232 | 273 |    dependencies:
 | 
| 233 | 274 |    - source_dist
 | 
| 234 | 275 |    - docs
 | 
| 276 | +  variables:
 | |
| 277 | +    ACME_DIR: public/.well-known/acme-challenge
 | |
| 235 | 278 |    script:
 | 
| 236 | -  - find public/
 | |
| 279 | +  - mkdir -p ${ACME_DIR}
 | |
| 280 | +    # Required to finish the creation of the Let's Encrypt certificate,
 | |
| 281 | +    # which allows using https://docs.buildstream.build/ for accessing
 | |
| 282 | +    # the documentation.
 | |
| 283 | +  - echo ${ACME_CHALLENGE} > ${ACME_DIR}/$(echo ${ACME_CHALLENGE} | cut -c1-43)
 | |
| 237 | 284 |    artifacts:
 | 
| 238 | 285 |      paths:
 | 
| 239 | 286 |      - public/
 | 
| ... | ... | @@ -248,3 +295,5 @@ pages: | 
| 248 | 295 |    # See https://gitlab.com/gitlab-org/gitlab-ce/issues/35141
 | 
| 249 | 296 |    #
 | 
| 250 | 297 |    - master
 | 
| 298 | +  except:
 | |
| 299 | +  - schedules | 
| ... | ... | @@ -38,8 +38,9 @@ CACHE_SIZE_FILE = "cache_size" | 
| 38 | 38 |  #     url (str): Location of the remote artifact cache
 | 
| 39 | 39 |  #     push (bool): Whether we should attempt to push artifacts to this cache,
 | 
| 40 | 40 |  #                  in addition to pulling from it.
 | 
| 41 | +#     buildtrees (bool): Whether the default action of pull should include the artifact buildtree
 | |
| 41 | 42 |  #
 | 
| 42 | -class ArtifactCacheSpec(namedtuple('ArtifactCacheSpec', 'url push server_cert client_key client_cert')):
 | |
| 43 | +class ArtifactCacheSpec(namedtuple('ArtifactCacheSpec', 'url push server_cert client_key client_cert buildtrees')):
 | |
| 43 | 44 |  | 
| 44 | 45 |      # _new_from_config_node
 | 
| 45 | 46 |      #
 | 
| ... | ... | @@ -47,9 +48,10 @@ class ArtifactCacheSpec(namedtuple('ArtifactCacheSpec', 'url push server_cert cl | 
| 47 | 48 |      #
 | 
| 48 | 49 |      @staticmethod
 | 
| 49 | 50 |      def _new_from_config_node(spec_node, basedir=None):
 | 
| 50 | -        _yaml.node_validate(spec_node, ['url', 'push', 'server-cert', 'client-key', 'client-cert'])
 | |
| 51 | +        _yaml.node_validate(spec_node, ['url', 'push', 'server-cert', 'client-key', 'client-cert', 'pullbuildtrees'])
 | |
| 51 | 52 |          url = _yaml.node_get(spec_node, str, 'url')
 | 
| 52 | 53 |          push = _yaml.node_get(spec_node, bool, 'push', default_value=False)
 | 
| 54 | +        buildtrees = _yaml.node_get(spec_node, bool, 'pullbuildtrees', default_value=False)
 | |
| 53 | 55 |          if not url:
 | 
| 54 | 56 |              provenance = _yaml.node_get_provenance(spec_node, 'url')
 | 
| 55 | 57 |              raise LoadError(LoadErrorReason.INVALID_DATA,
 | 
| ... | ... | @@ -77,7 +79,7 @@ class ArtifactCacheSpec(namedtuple('ArtifactCacheSpec', 'url push server_cert cl | 
| 77 | 79 |              raise LoadError(LoadErrorReason.INVALID_DATA,
 | 
| 78 | 80 |                              "{}: 'client-cert' was specified without 'client-key'".format(provenance))
 | 
| 79 | 81 |  | 
| 80 | -        return ArtifactCacheSpec(url, push, server_cert, client_key, client_cert)
 | |
| 82 | +        return ArtifactCacheSpec(url, push, server_cert, client_key, client_cert, buildtrees)
 | |
| 81 | 83 |  | 
| 82 | 84 |  | 
| 83 | 85 |  ArtifactCacheSpec.__new__.__defaults__ = (None, None, None)
 | 
| ... | ... | @@ -383,6 +385,13 @@ class ArtifactCache(): | 
| 383 | 385 |      # Abstract methods for subclasses to implement #
 | 
| 384 | 386 |      ################################################
 | 
| 385 | 387 |  | 
| 388 | +    # preflight():
 | |
| 389 | +    #
 | |
| 390 | +    # Preflight check.
 | |
| 391 | +    #
 | |
| 392 | +    def preflight(self):
 | |
| 393 | +        pass
 | |
| 394 | + | |
| 386 | 395 |      # update_atime()
 | 
| 387 | 396 |      #
 | 
| 388 | 397 |      # Update the atime of an artifact.
 | 
| ... | ... | @@ -419,6 +428,22 @@ class ArtifactCache(): | 
| 419 | 428 |          raise ImplError("Cache '{kind}' does not implement contains()"
 | 
| 420 | 429 |                          .format(kind=type(self).__name__))
 | 
| 421 | 430 |  | 
| 431 | +    # contains_subdir_artifact():
 | |
| 432 | +    #
 | |
| 433 | +    # Check whether an artifact element contains a digest for a subdir
 | |
| 434 | +    # which is populated in the cache, i.e non dangling.
 | |
| 435 | +    #
 | |
| 436 | +    # Args:
 | |
| 437 | +    #     element (Element): The Element to check
 | |
| 438 | +    #     key (str): The cache key to use
 | |
| 439 | +    #     subdir (str): The subdir to check
 | |
| 440 | +    #
 | |
| 441 | +    # Returns: True if the subdir exists & is populated in the cache, False otherwise
 | |
| 442 | +    #
 | |
| 443 | +    def contains_subdir_artifact(self, element, key, subdir):
 | |
| 444 | +        raise ImplError("Cache '{kind}' does not implement contains_subdir_artifact()"
 | |
| 445 | +                        .format(kind=type(self).__name__))
 | |
| 446 | + | |
| 422 | 447 |      # list_artifacts():
 | 
| 423 | 448 |      #
 | 
| 424 | 449 |      # List artifacts in this cache in LRU order.
 | 
| ... | ... | @@ -544,11 +569,12 @@ class ArtifactCache(): | 
| 544 | 569 |      #     element (Element): The Element whose artifact is to be fetched
 | 
| 545 | 570 |      #     key (str): The cache key to use
 | 
| 546 | 571 |      #     progress (callable): The progress callback, if any
 | 
| 572 | +    #     buildtree (bool): If buildtrees are to be pulled from the remote cache
 | |
| 547 | 573 |      #
 | 
| 548 | 574 |      # Returns:
 | 
| 549 | 575 |      #   (bool): True if pull was successful, False if artifact was not available
 | 
| 550 | 576 |      #
 | 
| 551 | -    def pull(self, element, key, *, progress=None):
 | |
| 577 | +    def pull(self, element, key, *, progress=None, subdir=None, excluded_subdirs=None):
 | |
| 552 | 578 |          raise ImplError("Cache '{kind}' does not implement pull()"
 | 
| 553 | 579 |                          .format(kind=type(self).__name__))
 | 
| 554 | 580 |  | 
| ... | ... | @@ -54,7 +54,6 @@ _MAX_PAYLOAD_BYTES = 1024 * 1024 | 
| 54 | 54 |  #
 | 
| 55 | 55 |  # Args:
 | 
| 56 | 56 |  #     context (Context): The BuildStream context
 | 
| 57 | -#     enable_push (bool): Whether pushing is allowed by the platform
 | |
| 58 | 57 |  #
 | 
| 59 | 58 |  # Pushing is explicitly disabled by the platform in some cases,
 | 
| 60 | 59 |  # like when we are falling back to functioning without using
 | 
| ... | ... | @@ -62,17 +61,14 @@ _MAX_PAYLOAD_BYTES = 1024 * 1024 | 
| 62 | 61 |  #
 | 
| 63 | 62 |  class CASCache(ArtifactCache):
 | 
| 64 | 63 |  | 
| 65 | -    def __init__(self, context, *, enable_push=True):
 | |
| 64 | +    def __init__(self, context):
 | |
| 66 | 65 |          super().__init__(context)
 | 
| 67 | 66 |  | 
| 68 | 67 |          self.casdir = os.path.join(context.artifactdir, 'cas')
 | 
| 69 | 68 |          os.makedirs(os.path.join(self.casdir, 'refs', 'heads'), exist_ok=True)
 | 
| 70 | 69 |          os.makedirs(os.path.join(self.casdir, 'objects'), exist_ok=True)
 | 
| 71 | - | |
| 72 | 70 |          self._calculate_cache_quota()
 | 
| 73 | 71 |  | 
| 74 | -        self._enable_push = enable_push
 | |
| 75 | - | |
| 76 | 72 |          # Per-project list of _CASRemote instances.
 | 
| 77 | 73 |          self._remotes = {}
 | 
| 78 | 74 |  | 
| ... | ... | @@ -83,12 +79,28 @@ class CASCache(ArtifactCache): | 
| 83 | 79 |      #     Implementation of abstract methods       #
 | 
| 84 | 80 |      ################################################
 | 
| 85 | 81 |  | 
| 82 | +    def preflight(self):
 | |
| 83 | +        if (not os.path.isdir(os.path.join(self.casdir, 'refs', 'heads')) or
 | |
| 84 | +            not os.path.isdir(os.path.join(self.casdir, 'objects'))):
 | |
| 85 | +            raise ArtifactError("CAS repository check failed for '{}'"
 | |
| 86 | +                                .format(self.casdir))
 | |
| 87 | + | |
| 86 | 88 |      def contains(self, element, key):
 | 
| 87 | 89 |          refpath = self._refpath(self.get_artifact_fullname(element, key))
 | 
| 88 | 90 |  | 
| 89 | 91 |          # This assumes that the repository doesn't have any dangling pointers
 | 
| 90 | 92 |          return os.path.exists(refpath)
 | 
| 91 | 93 |  | 
| 94 | +    def contains_subdir_artifact(self, element, key, subdir):
 | |
| 95 | +        tree = self.resolve_ref(self.get_artifact_fullname(element, key))
 | |
| 96 | + | |
| 97 | +        # This assumes that the subdir digest is present in the element tree
 | |
| 98 | +        subdirdigest = self._get_subdir(tree, subdir)
 | |
| 99 | +        objpath = self.objpath(subdirdigest)
 | |
| 100 | + | |
| 101 | +        # True if subdir content is cached or if empty as expected
 | |
| 102 | +        return os.path.exists(objpath)
 | |
| 103 | + | |
| 92 | 104 |      def extract(self, element, key):
 | 
| 93 | 105 |          ref = self.get_artifact_fullname(element, key)
 | 
| 94 | 106 |  | 
| ... | ... | @@ -214,7 +226,7 @@ class CASCache(ArtifactCache): | 
| 214 | 226 |              return bool(remotes_for_project)
 | 
| 215 | 227 |  | 
| 216 | 228 |      def has_push_remotes(self, *, element=None):
 | 
| 217 | -        if not self._has_push_remotes or not self._enable_push:
 | |
| 229 | +        if not self._has_push_remotes:
 | |
| 218 | 230 |              # No project has push remotes
 | 
| 219 | 231 |              return False
 | 
| 220 | 232 |          elif element is None:
 | 
| ... | ... | @@ -225,7 +237,7 @@ class CASCache(ArtifactCache): | 
| 225 | 237 |              remotes_for_project = self._remotes[element._get_project()]
 | 
| 226 | 238 |              return any(remote.spec.push for remote in remotes_for_project)
 | 
| 227 | 239 |  | 
| 228 | -    def pull(self, element, key, *, progress=None):
 | |
| 240 | +    def pull(self, element, key, *, progress=None, subdir=None, excluded_subdirs=None):
 | |
| 229 | 241 |          ref = self.get_artifact_fullname(element, key)
 | 
| 230 | 242 |  | 
| 231 | 243 |          project = element._get_project()
 | 
| ... | ... | @@ -244,8 +256,14 @@ class CASCache(ArtifactCache): | 
| 244 | 256 |                  tree.hash = response.digest.hash
 | 
| 245 | 257 |                  tree.size_bytes = response.digest.size_bytes
 | 
| 246 | 258 |  | 
| 247 | -                self._fetch_directory(remote, tree)
 | |
| 259 | +                # Check if the element artifact is present, if so just fetch subdir
 | |
| 260 | +                if subdir and os.path.exists(self.objpath(tree)):
 | |
| 261 | +                    self._fetch_subdir(remote, tree, subdir)
 | |
| 262 | +                else:
 | |
| 263 | +                    # Fetch artifact, excluded_subdirs determined in pullqueue
 | |
| 264 | +                    self._fetch_directory(remote, tree, excluded_subdirs=excluded_subdirs)
 | |
| 248 | 265 |  | 
| 266 | +                # tree is the remote value, so is the same without or without dangling ref locally
 | |
| 249 | 267 |                  self.set_ref(ref, tree)
 | 
| 250 | 268 |  | 
| 251 | 269 |                  element.info("Pulled artifact {} <- {}".format(display_key, remote.spec.url))
 | 
| ... | ... | @@ -646,7 +664,6 @@ class CASCache(ArtifactCache): | 
| 646 | 664 |      ################################################
 | 
| 647 | 665 |      #             Local Private Methods            #
 | 
| 648 | 666 |      ################################################
 | 
| 649 | - | |
| 650 | 667 |      def _checkout(self, dest, tree):
 | 
| 651 | 668 |          os.makedirs(dest, exist_ok=True)
 | 
| 652 | 669 |  | 
| ... | ... | @@ -665,8 +682,10 @@ class CASCache(ArtifactCache): | 
| 665 | 682 |                           stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
 | 
| 666 | 683 |  | 
| 667 | 684 |          for dirnode in directory.directories:
 | 
| 668 | -            fullpath = os.path.join(dest, dirnode.name)
 | |
| 669 | -            self._checkout(fullpath, dirnode.digest)
 | |
| 685 | +            # Don't try to checkout a dangling ref
 | |
| 686 | +            if os.path.exists(self.objpath(dirnode.digest)):
 | |
| 687 | +                fullpath = os.path.join(dest, dirnode.name)
 | |
| 688 | +                self._checkout(fullpath, dirnode.digest)
 | |
| 670 | 689 |  | 
| 671 | 690 |          for symlinknode in directory.symlinks:
 | 
| 672 | 691 |              # symlink
 | 
| ... | ... | @@ -945,7 +964,7 @@ class CASCache(ArtifactCache): | 
| 945 | 964 |      #     remote (Remote): The remote to use.
 | 
| 946 | 965 |      #     dir_digest (Digest): Digest object for the directory to fetch.
 | 
| 947 | 966 |      #
 | 
| 948 | -    def _fetch_directory(self, remote, dir_digest):
 | |
| 967 | +    def _fetch_directory(self, remote, dir_digest, excluded_subdirs=None):
 | |
| 949 | 968 |          fetch_queue = [dir_digest]
 | 
| 950 | 969 |          fetch_next_queue = []
 | 
| 951 | 970 |          batch = _CASBatchRead(remote)
 | 
| ... | ... | @@ -963,8 +982,13 @@ class CASCache(ArtifactCache): | 
| 963 | 982 |                  directory.ParseFromString(f.read())
 | 
| 964 | 983 |  | 
| 965 | 984 |              for dirnode in directory.directories:
 | 
| 966 | -                batch = self._fetch_directory_node(remote, dirnode.digest, batch,
 | |
| 967 | -                                                   fetch_queue, fetch_next_queue, recursive=True)
 | |
| 985 | +                if excluded_subdirs:
 | |
| 986 | +                    if dirnode.name not in excluded_subdirs:
 | |
| 987 | +                        batch = self._fetch_directory_node(remote, dirnode.digest, batch,
 | |
| 988 | +                                                           fetch_queue, fetch_next_queue, recursive=True)
 | |
| 989 | +                else:
 | |
| 990 | +                    batch = self._fetch_directory_node(remote, dirnode.digest, batch,
 | |
| 991 | +                                                       fetch_queue, fetch_next_queue, recursive=True)
 | |
| 968 | 992 |  | 
| 969 | 993 |              for filenode in directory.files:
 | 
| 970 | 994 |                  batch = self._fetch_directory_node(remote, filenode.digest, batch,
 | 
| ... | ... | @@ -973,6 +997,12 @@ class CASCache(ArtifactCache): | 
| 973 | 997 |          # Fetch final batch
 | 
| 974 | 998 |          self._fetch_directory_batch(remote, batch, fetch_queue, fetch_next_queue)
 | 
| 975 | 999 |  | 
| 1000 | + | |
| 1001 | +    def _fetch_subdir(self, remote, tree, subdir):
 | |
| 1002 | +        subdirdigest = self._get_subdir(tree, subdir)
 | |
| 1003 | +        self._fetch_directory(remote, subdirdigest)
 | |
| 1004 | + | |
| 1005 | + | |
| 976 | 1006 |      def _fetch_tree(self, remote, digest):
 | 
| 977 | 1007 |          # download but do not store the Tree object
 | 
| 978 | 1008 |          with tempfile.NamedTemporaryFile(dir=self.tmpdir) as out:
 | 
| ... | ... | @@ -35,8 +35,6 @@ from .._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc | 
| 35 | 35 |  from .._exceptions import ArtifactError
 | 
| 36 | 36 |  from .._context import Context
 | 
| 37 | 37 |  | 
| 38 | -from .cascache import CASCache
 | |
| 39 | - | |
| 40 | 38 |  | 
| 41 | 39 |  # The default limit for gRPC messages is 4 MiB.
 | 
| 42 | 40 |  # Limit payload to 1 MiB to leave sufficient headroom for metadata.
 | 
| ... | ... | @@ -60,7 +58,7 @@ def create_server(repo, *, enable_push): | 
| 60 | 58 |      context = Context()
 | 
| 61 | 59 |      context.artifactdir = os.path.abspath(repo)
 | 
| 62 | 60 |  | 
| 63 | -    artifactcache = CASCache(context)
 | |
| 61 | +    artifactcache = context.artifactcache
 | |
| 64 | 62 |  | 
| 65 | 63 |      # Use max_workers default from Python 3.5+
 | 
| 66 | 64 |      max_workers = (os.cpu_count() or 1) * 5
 | 
| ... | ... | @@ -30,6 +30,7 @@ from ._exceptions import LoadError, LoadErrorReason, BstError | 
| 30 | 30 |  from ._message import Message, MessageType
 | 
| 31 | 31 |  from ._profile import Topics, profile_start, profile_end
 | 
| 32 | 32 |  from ._artifactcache import ArtifactCache
 | 
| 33 | +from ._artifactcache.cascache import CASCache
 | |
| 33 | 34 |  from ._workspaces import Workspaces
 | 
| 34 | 35 |  from .plugin import _plugin_lookup
 | 
| 35 | 36 |  | 
| ... | ... | @@ -113,6 +114,7 @@ class Context(): | 
| 113 | 114 |          self._cache_key = None
 | 
| 114 | 115 |          self._message_handler = None
 | 
| 115 | 116 |          self._message_depth = deque()
 | 
| 117 | +        self._artifactcache = None
 | |
| 116 | 118 |          self._projects = []
 | 
| 117 | 119 |          self._project_overrides = {}
 | 
| 118 | 120 |          self._workspaces = None
 | 
| ... | ... | @@ -227,6 +229,13 @@ class Context(): | 
| 227 | 229 |                              "{}: on-error should be one of: {}".format(
 | 
| 228 | 230 |                                  provenance, ", ".join(valid_actions)))
 | 
| 229 | 231 |  | 
| 232 | +    @property
 | |
| 233 | +    def artifactcache(self):
 | |
| 234 | +        if not self._artifactcache:
 | |
| 235 | +            self._artifactcache = CASCache(self)
 | |
| 236 | + | |
| 237 | +        return self._artifactcache
 | |
| 238 | + | |
| 230 | 239 |      # add_project():
 | 
| 231 | 240 |      #
 | 
| 232 | 241 |      # Add a project to the context.
 | 
| ... | ... | @@ -198,10 +198,15 @@ class App(): | 
| 198 | 198 |              if option_value is not None:
 | 
| 199 | 199 |                  setattr(self.context, context_attr, option_value)
 | 
| 200 | 200 |          try:
 | 
| 201 | -            Platform.create_instance(self.context)
 | |
| 201 | +            Platform.get_platform()
 | |
| 202 | 202 |          except BstError as e:
 | 
| 203 | 203 |              self._error_exit(e, "Error instantiating platform")
 | 
| 204 | 204 |  | 
| 205 | +        try:
 | |
| 206 | +            self.context.artifactcache.preflight()
 | |
| 207 | +        except BstError as e:
 | |
| 208 | +            self._error_exit(e, "Error instantiating artifact cache")
 | |
| 209 | + | |
| 205 | 210 |          # Create the logger right before setting the message handler
 | 
| 206 | 211 |          self.logger = LogLine(self.context,
 | 
| 207 | 212 |                                self._content_profile,
 | 
| ... | ... | @@ -305,10 +305,12 @@ def init(app, project_name, format_version, element_path, force): | 
| 305 | 305 |                help="Allow tracking to cross junction boundaries")
 | 
| 306 | 306 |  @click.option('--track-save', default=False, is_flag=True,
 | 
| 307 | 307 |                help="Deprecated: This is ignored")
 | 
| 308 | +@click.option('--pull-buildtrees', default=False, is_flag=True,
 | |
| 309 | +              help="Pull buildtrees from a remote cache server")
 | |
| 308 | 310 |  @click.argument('elements', nargs=-1,
 | 
| 309 | 311 |                  type=click.Path(readable=False))
 | 
| 310 | 312 |  @click.pass_obj
 | 
| 311 | -def build(app, elements, all_, track_, track_save, track_all, track_except, track_cross_junctions):
 | |
| 313 | +def build(app, elements, all_, track_, track_save, track_all, track_except, track_cross_junctions, pull_buildtrees):
 | |
| 312 | 314 |      """Build elements in a pipeline"""
 | 
| 313 | 315 |  | 
| 314 | 316 |      if (track_except or track_cross_junctions) and not (track_ or track_all):
 | 
| ... | ... | @@ -327,7 +329,8 @@ def build(app, elements, all_, track_, track_save, track_all, track_except, trac | 
| 327 | 329 |                           track_targets=track_,
 | 
| 328 | 330 |                           track_except=track_except,
 | 
| 329 | 331 |                           track_cross_junctions=track_cross_junctions,
 | 
| 330 | -                         build_all=all_)
 | |
| 332 | +                         build_all=all_,
 | |
| 333 | +                         pull_buildtrees=pull_buildtrees)
 | |
| 331 | 334 |  | 
| 332 | 335 |  | 
| 333 | 336 |  ##################################################################
 | 
| ... | ... | @@ -429,10 +432,12 @@ def track(app, elements, deps, except_, cross_junctions): | 
| 429 | 432 |                help='The dependency artifacts to pull (default: none)')
 | 
| 430 | 433 |  @click.option('--remote', '-r',
 | 
| 431 | 434 |                help="The URL of the remote cache (defaults to the first configured cache)")
 | 
| 435 | +@click.option('--pull-buildtrees', default=False, is_flag=True,
 | |
| 436 | +              help="Pull buildtrees from a remote cache server")
 | |
| 432 | 437 |  @click.argument('elements', nargs=-1,
 | 
| 433 | 438 |                  type=click.Path(readable=False))
 | 
| 434 | 439 |  @click.pass_obj
 | 
| 435 | -def pull(app, elements, deps, remote):
 | |
| 440 | +def pull(app, elements, deps, remote, pull_buildtrees):
 | |
| 436 | 441 |      """Pull a built artifact from the configured remote artifact cache.
 | 
| 437 | 442 |  | 
| 438 | 443 |      By default the artifact will be pulled one of the configured caches
 | 
| ... | ... | @@ -446,7 +451,7 @@ def pull(app, elements, deps, remote): | 
| 446 | 451 |          all:   All dependencies
 | 
| 447 | 452 |      """
 | 
| 448 | 453 |      with app.initialized(session_name="Pull"):
 | 
| 449 | -        app.stream.pull(elements, selection=deps, remote=remote)
 | |
| 454 | +        app.stream.pull(elements, selection=deps, remote=remote, pull_buildtrees=pull_buildtrees)
 | |
| 450 | 455 |  | 
| 451 | 456 |  | 
| 452 | 457 |  ##################################################################
 | 
| ... | ... | @@ -28,7 +28,6 @@ from .. import Consistency | 
| 28 | 28 |  from .. import _yaml
 | 
| 29 | 29 |  from ..element import Element
 | 
| 30 | 30 |  from .._profile import Topics, profile_start, profile_end
 | 
| 31 | -from .._platform import Platform
 | |
| 32 | 31 |  from .._includes import Includes
 | 
| 33 | 32 |  | 
| 34 | 33 |  from .types import Symbol, Dependency
 | 
| ... | ... | @@ -518,8 +517,7 @@ class Loader(): | 
| 518 | 517 |              raise LoadError(LoadErrorReason.INVALID_DATA,
 | 
| 519 | 518 |                              "{}: Expected junction but element kind is {}".format(filename, meta_element.kind))
 | 
| 520 | 519 |  | 
| 521 | -        platform = Platform.get_platform()
 | |
| 522 | -        element = Element._new_from_meta(meta_element, platform.artifactcache)
 | |
| 520 | +        element = Element._new_from_meta(meta_element, self._context.artifactcache)
 | |
| 523 | 521 |          element._preflight()
 | 
| 524 | 522 |  | 
| 525 | 523 |          sources = list(element.sources())
 | 
| ... | ... | @@ -17,11 +17,11 @@ | 
| 17 | 17 |  #  Authors:
 | 
| 18 | 18 |  #        Tristan Maat <tristan maat codethink co uk>
 | 
| 19 | 19 |  | 
| 20 | +import os
 | |
| 20 | 21 |  import subprocess
 | 
| 21 | 22 |  | 
| 22 | 23 |  from .. import _site
 | 
| 23 | 24 |  from .. import utils
 | 
| 24 | -from .._artifactcache.cascache import CASCache
 | |
| 25 | 25 |  from .._message import Message, MessageType
 | 
| 26 | 26 |  from ..sandbox import SandboxBwrap
 | 
| 27 | 27 |  | 
| ... | ... | @@ -30,17 +30,15 @@ from . import Platform | 
| 30 | 30 |  | 
| 31 | 31 |  class Linux(Platform):
 | 
| 32 | 32 |  | 
| 33 | -    def __init__(self, context):
 | |
| 33 | +    def __init__(self):
 | |
| 34 | 34 |  | 
| 35 | -        super().__init__(context)
 | |
| 35 | +        super().__init__()
 | |
| 36 | 36 |  | 
| 37 | -        self._die_with_parent_available = _site.check_bwrap_version(0, 1, 8)
 | |
| 38 | -        self._user_ns_available = self._check_user_ns_available(context)
 | |
| 39 | -        self._artifact_cache = CASCache(context, enable_push=self._user_ns_available)
 | |
| 37 | +        self._uid = os.geteuid()
 | |
| 38 | +        self._gid = os.getegid()
 | |
| 40 | 39 |  | 
| 41 | -    @property
 | |
| 42 | -    def artifactcache(self):
 | |
| 43 | -        return self._artifact_cache
 | |
| 40 | +        self._die_with_parent_available = _site.check_bwrap_version(0, 1, 8)
 | |
| 41 | +        self._user_ns_available = self._check_user_ns_available()
 | |
| 44 | 42 |  | 
| 45 | 43 |      def create_sandbox(self, *args, **kwargs):
 | 
| 46 | 44 |          # Inform the bubblewrap sandbox as to whether it can use user namespaces or not
 | 
| ... | ... | @@ -48,10 +46,19 @@ class Linux(Platform): | 
| 48 | 46 |          kwargs['die_with_parent_available'] = self._die_with_parent_available
 | 
| 49 | 47 |          return SandboxBwrap(*args, **kwargs)
 | 
| 50 | 48 |  | 
| 49 | +    def check_sandbox_config(self, config):
 | |
| 50 | +        if self._user_ns_available:
 | |
| 51 | +            # User namespace support allows arbitrary build UID/GID settings.
 | |
| 52 | +            return True
 | |
| 53 | +        else:
 | |
| 54 | +            # Without user namespace support, the UID/GID in the sandbox
 | |
| 55 | +            # will match the host UID/GID.
 | |
| 56 | +            return config.build_uid == self._uid and config.build_gid == self._gid
 | |
| 57 | + | |
| 51 | 58 |      ################################################
 | 
| 52 | 59 |      #              Private Methods                 #
 | 
| 53 | 60 |      ################################################
 | 
| 54 | -    def _check_user_ns_available(self, context):
 | |
| 61 | +    def _check_user_ns_available(self):
 | |
| 55 | 62 |  | 
| 56 | 63 |          # Here, lets check if bwrap is able to create user namespaces,
 | 
| 57 | 64 |          # issue a warning if it's not available, and save the state
 | 
| ... | ... | @@ -75,9 +82,4 @@ class Linux(Platform): | 
| 75 | 82 |              return True
 | 
| 76 | 83 |  | 
| 77 | 84 |          else:
 | 
| 78 | -            context.message(
 | |
| 79 | -                Message(None, MessageType.WARN,
 | |
| 80 | -                        "Unable to create user namespaces with bubblewrap, resorting to fallback",
 | |
| 81 | -                        detail="Some builds may not function due to lack of uid / gid 0, " +
 | |
| 82 | -                        "artifacts created will not be trusted for push purposes."))
 | |
| 83 | 85 |              return False | 
| ... | ... | @@ -29,17 +29,13 @@ class Platform(): | 
| 29 | 29 |      # Platform()
 | 
| 30 | 30 |      #
 | 
| 31 | 31 |      # A class to manage platform-specific details. Currently holds the
 | 
| 32 | -    # sandbox factory, the artifact cache and staging operations, as
 | |
| 33 | -    # well as platform helpers.
 | |
| 32 | +    # sandbox factory as well as platform helpers.
 | |
| 34 | 33 |      #
 | 
| 35 | -    # Args:
 | |
| 36 | -    #     context (context): The project context
 | |
| 37 | -    #
 | |
| 38 | -    def __init__(self, context):
 | |
| 39 | -        self.context = context
 | |
| 34 | +    def __init__(self):
 | |
| 35 | +        pass
 | |
| 40 | 36 |  | 
| 41 | 37 |      @classmethod
 | 
| 42 | -    def create_instance(cls, *args, **kwargs):
 | |
| 38 | +    def _create_instance(cls):
 | |
| 43 | 39 |          if sys.platform.startswith('linux'):
 | 
| 44 | 40 |              backend = 'linux'
 | 
| 45 | 41 |          else:
 | 
| ... | ... | @@ -58,22 +54,14 @@ class Platform(): | 
| 58 | 54 |          else:
 | 
| 59 | 55 |              raise PlatformError("No such platform: '{}'".format(backend))
 | 
| 60 | 56 |  | 
| 61 | -        cls._instance = PlatformImpl(*args, **kwargs)
 | |
| 57 | +        cls._instance = PlatformImpl()
 | |
| 62 | 58 |  | 
| 63 | 59 |      @classmethod
 | 
| 64 | 60 |      def get_platform(cls):
 | 
| 65 | 61 |          if not cls._instance:
 | 
| 66 | -            raise PlatformError("Platform needs to be initialized first")
 | |
| 62 | +            cls._create_instance()
 | |
| 67 | 63 |          return cls._instance
 | 
| 68 | 64 |  | 
| 69 | -    ##################################################################
 | |
| 70 | -    #                       Platform properties                      #
 | |
| 71 | -    ##################################################################
 | |
| 72 | -    @property
 | |
| 73 | -    def artifactcache(self):
 | |
| 74 | -        raise ImplError("Platform {platform} does not implement an artifactcache"
 | |
| 75 | -                        .format(platform=type(self).__name__))
 | |
| 76 | - | |
| 77 | 65 |      ##################################################################
 | 
| 78 | 66 |      #                        Sandbox functions                       #
 | 
| 79 | 67 |      ##################################################################
 | 
| ... | ... | @@ -92,3 +80,7 @@ class Platform(): | 
| 92 | 80 |      def create_sandbox(self, *args, **kwargs):
 | 
| 93 | 81 |          raise ImplError("Platform {platform} does not implement create_sandbox()"
 | 
| 94 | 82 |                          .format(platform=type(self).__name__))
 | 
| 83 | + | |
| 84 | +    def check_sandbox_config(self, config):
 | |
| 85 | +        raise ImplError("Platform {platform} does not implement check_sandbox_config()"
 | |
| 86 | +                        .format(platform=type(self).__name__)) | 
| ... | ... | @@ -19,7 +19,6 @@ | 
| 19 | 19 |  | 
| 20 | 20 |  import os
 | 
| 21 | 21 |  | 
| 22 | -from .._artifactcache.cascache import CASCache
 | |
| 23 | 22 |  from .._exceptions import PlatformError
 | 
| 24 | 23 |  from ..sandbox import SandboxChroot
 | 
| 25 | 24 |  | 
| ... | ... | @@ -28,18 +27,21 @@ from . import Platform | 
| 28 | 27 |  | 
| 29 | 28 |  class Unix(Platform):
 | 
| 30 | 29 |  | 
| 31 | -    def __init__(self, context):
 | |
| 30 | +    def __init__(self):
 | |
| 32 | 31 |  | 
| 33 | -        super().__init__(context)
 | |
| 34 | -        self._artifact_cache = CASCache(context)
 | |
| 32 | +        super().__init__()
 | |
| 33 | + | |
| 34 | +        self._uid = os.geteuid()
 | |
| 35 | +        self._gid = os.getegid()
 | |
| 35 | 36 |  | 
| 36 | 37 |          # Not necessarily 100% reliable, but we want to fail early.
 | 
| 37 | -        if os.geteuid() != 0:
 | |
| 38 | +        if self._uid != 0:
 | |
| 38 | 39 |              raise PlatformError("Root privileges are required to run without bubblewrap.")
 | 
| 39 | 40 |  | 
| 40 | -    @property
 | |
| 41 | -    def artifactcache(self):
 | |
| 42 | -        return self._artifact_cache
 | |
| 43 | - | |
| 44 | 41 |      def create_sandbox(self, *args, **kwargs):
 | 
| 45 | 42 |          return SandboxChroot(*args, **kwargs)
 | 
| 43 | + | |
| 44 | +    def check_sandbox_config(self, config):
 | |
| 45 | +        # With the chroot sandbox, the UID/GID in the sandbox
 | |
| 46 | +        # will match the host UID/GID (typically 0/0).
 | |
| 47 | +        return config.build_uid == self._uid and config.build_gid == self._gid | 
| 1 | +// Copyright 2017 Google Inc.
 | |
| 2 | +//
 | |
| 3 | +// Licensed under the Apache License, Version 2.0 (the "License");
 | |
| 4 | +// you may not use this file except in compliance with the License.
 | |
| 5 | +// You may obtain a copy of the License at
 | |
| 6 | +//
 | |
| 7 | +//     http://www.apache.org/licenses/LICENSE-2.0
 | |
| 8 | +//
 | |
| 9 | +// Unless required by applicable law or agreed to in writing, software
 | |
| 10 | +// distributed under the License is distributed on an "AS IS" BASIS,
 | |
| 11 | +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 | |
| 12 | +// See the License for the specific language governing permissions and
 | |
| 13 | +// limitations under the License.
 | |
| 14 | + | |
| 15 | +syntax = "proto3";
 | |
| 16 | + | |
| 17 | +package google.rpc;
 | |
| 18 | + | |
| 19 | +option go_package = "google.golang.org/genproto/googleapis/rpc/code;code";
 | |
| 20 | +option java_multiple_files = true;
 | |
| 21 | +option java_outer_classname = "CodeProto";
 | |
| 22 | +option java_package = "com.google.rpc";
 | |
| 23 | +option objc_class_prefix = "RPC";
 | |
| 24 | + | |
| 25 | + | |
| 26 | +// The canonical error codes for Google APIs.
 | |
| 27 | +//
 | |
| 28 | +//
 | |
| 29 | +// Sometimes multiple error codes may apply.  Services should return
 | |
| 30 | +// the most specific error code that applies.  For example, prefer
 | |
| 31 | +// `OUT_OF_RANGE` over `FAILED_PRECONDITION` if both codes apply.
 | |
| 32 | +// Similarly prefer `NOT_FOUND` or `ALREADY_EXISTS` over `FAILED_PRECONDITION`.
 | |
| 33 | +enum Code {
 | |
| 34 | +  // Not an error; returned on success
 | |
| 35 | +  //
 | |
| 36 | +  // HTTP Mapping: 200 OK
 | |
| 37 | +  OK = 0;
 | |
| 38 | + | |
| 39 | +  // The operation was cancelled, typically by the caller.
 | |
| 40 | +  //
 | |
| 41 | +  // HTTP Mapping: 499 Client Closed Request
 | |
| 42 | +  CANCELLED = 1;
 | |
| 43 | + | |
| 44 | +  // Unknown error.  For example, this error may be returned when
 | |
| 45 | +  // a `Status` value received from another address space belongs to
 | |
| 46 | +  // an error space that is not known in this address space.  Also
 | |
| 47 | +  // errors raised by APIs that do not return enough error information
 | |
| 48 | +  // may be converted to this error.
 | |
| 49 | +  //
 | |
| 50 | +  // HTTP Mapping: 500 Internal Server Error
 | |
| 51 | +  UNKNOWN = 2;
 | |
| 52 | + | |
| 53 | +  // The client specified an invalid argument.  Note that this differs
 | |
| 54 | +  // from `FAILED_PRECONDITION`.  `INVALID_ARGUMENT` indicates arguments
 | |
| 55 | +  // that are problematic regardless of the state of the system
 | |
| 56 | +  // (e.g., a malformed file name).
 | |
| 57 | +  //
 | |
| 58 | +  // HTTP Mapping: 400 Bad Request
 | |
| 59 | +  INVALID_ARGUMENT = 3;
 | |
| 60 | + | |
| 61 | +  // The deadline expired before the operation could complete. For operations
 | |
| 62 | +  // that change the state of the system, this error may be returned
 | |
| 63 | +  // even if the operation has completed successfully.  For example, a
 | |
| 64 | +  // successful response from a server could have been delayed long
 | |
| 65 | +  // enough for the deadline to expire.
 | |
| 66 | +  //
 | |
| 67 | +  // HTTP Mapping: 504 Gateway Timeout
 | |
| 68 | +  DEADLINE_EXCEEDED = 4;
 | |
| 69 | + | |
| 70 | +  // Some requested entity (e.g., file or directory) was not found.
 | |
| 71 | +  //
 | |
| 72 | +  // Note to server developers: if a request is denied for an entire class
 | |
| 73 | +  // of users, such as gradual feature rollout or undocumented whitelist,
 | |
| 74 | +  // `NOT_FOUND` may be used. If a request is denied for some users within
 | |
| 75 | +  // a class of users, such as user-based access control, `PERMISSION_DENIED`
 | |
| 76 | +  // must be used.
 | |
| 77 | +  //
 | |
| 78 | +  // HTTP Mapping: 404 Not Found
 | |
| 79 | +  NOT_FOUND = 5;
 | |
| 80 | + | |
| 81 | +  // The entity that a client attempted to create (e.g., file or directory)
 | |
| 82 | +  // already exists.
 | |
| 83 | +  //
 | |
| 84 | +  // HTTP Mapping: 409 Conflict
 | |
| 85 | +  ALREADY_EXISTS = 6;
 | |
| 86 | + | |
| 87 | +  // The caller does not have permission to execute the specified
 | |
| 88 | +  // operation. `PERMISSION_DENIED` must not be used for rejections
 | |
| 89 | +  // caused by exhausting some resource (use `RESOURCE_EXHAUSTED`
 | |
| 90 | +  // instead for those errors). `PERMISSION_DENIED` must not be
 | |
| 91 | +  // used if the caller can not be identified (use `UNAUTHENTICATED`
 | |
| 92 | +  // instead for those errors). This error code does not imply the
 | |
| 93 | +  // request is valid or the requested entity exists or satisfies
 | |
| 94 | +  // other pre-conditions.
 | |
| 95 | +  //
 | |
| 96 | +  // HTTP Mapping: 403 Forbidden
 | |
| 97 | +  PERMISSION_DENIED = 7;
 | |
| 98 | + | |
| 99 | +  // The request does not have valid authentication credentials for the
 | |
| 100 | +  // operation.
 | |
| 101 | +  //
 | |
| 102 | +  // HTTP Mapping: 401 Unauthorized
 | |
| 103 | +  UNAUTHENTICATED = 16;
 | |
| 104 | + | |
| 105 | +  // Some resource has been exhausted, perhaps a per-user quota, or
 | |
| 106 | +  // perhaps the entire file system is out of space.
 | |
| 107 | +  //
 | |
| 108 | +  // HTTP Mapping: 429 Too Many Requests
 | |
| 109 | +  RESOURCE_EXHAUSTED = 8;
 | |
| 110 | + | |
| 111 | +  // The operation was rejected because the system is not in a state
 | |
| 112 | +  // required for the operation's execution.  For example, the directory
 | |
| 113 | +  // to be deleted is non-empty, an rmdir operation is applied to
 | |
| 114 | +  // a non-directory, etc.
 | |
| 115 | +  //
 | |
| 116 | +  // Service implementors can use the following guidelines to decide
 | |
| 117 | +  // between `FAILED_PRECONDITION`, `ABORTED`, and `UNAVAILABLE`:
 | |
| 118 | +  //  (a) Use `UNAVAILABLE` if the client can retry just the failing call.
 | |
| 119 | +  //  (b) Use `ABORTED` if the client should retry at a higher level
 | |
| 120 | +  //      (e.g., when a client-specified test-and-set fails, indicating the
 | |
| 121 | +  //      client should restart a read-modify-write sequence).
 | |
| 122 | +  //  (c) Use `FAILED_PRECONDITION` if the client should not retry until
 | |
| 123 | +  //      the system state has been explicitly fixed.  E.g., if an "rmdir"
 | |
| 124 | +  //      fails because the directory is non-empty, `FAILED_PRECONDITION`
 | |
| 125 | +  //      should be returned since the client should not retry unless
 | |
| 126 | +  //      the files are deleted from the directory.
 | |
| 127 | +  //
 | |
| 128 | +  // HTTP Mapping: 400 Bad Request
 | |
| 129 | +  FAILED_PRECONDITION = 9;
 | |
| 130 | + | |
| 131 | +  // The operation was aborted, typically due to a concurrency issue such as
 | |
| 132 | +  // a sequencer check failure or transaction abort.
 | |
| 133 | +  //
 | |
| 134 | +  // See the guidelines above for deciding between `FAILED_PRECONDITION`,
 | |
| 135 | +  // `ABORTED`, and `UNAVAILABLE`.
 | |
| 136 | +  //
 | |
| 137 | +  // HTTP Mapping: 409 Conflict
 | |
| 138 | +  ABORTED = 10;
 | |
| 139 | + | |
| 140 | +  // The operation was attempted past the valid range.  E.g., seeking or
 | |
| 141 | +  // reading past end-of-file.
 | |
| 142 | +  //
 | |
| 143 | +  // Unlike `INVALID_ARGUMENT`, this error indicates a problem that may
 | |
| 144 | +  // be fixed if the system state changes. For example, a 32-bit file
 | |
| 145 | +  // system will generate `INVALID_ARGUMENT` if asked to read at an
 | |
| 146 | +  // offset that is not in the range [0,2^32-1], but it will generate
 | |
| 147 | +  // `OUT_OF_RANGE` if asked to read from an offset past the current
 | |
| 148 | +  // file size.
 | |
| 149 | +  //
 | |
| 150 | +  // There is a fair bit of overlap between `FAILED_PRECONDITION` and
 | |
| 151 | +  // `OUT_OF_RANGE`.  We recommend using `OUT_OF_RANGE` (the more specific
 | |
| 152 | +  // error) when it applies so that callers who are iterating through
 | |
| 153 | +  // a space can easily look for an `OUT_OF_RANGE` error to detect when
 | |
| 154 | +  // they are done.
 | |
| 155 | +  //
 | |
| 156 | +  // HTTP Mapping: 400 Bad Request
 | |
| 157 | +  OUT_OF_RANGE = 11;
 | |
| 158 | + | |
| 159 | +  // The operation is not implemented or is not supported/enabled in this
 | |
| 160 | +  // service.
 | |
| 161 | +  //
 | |
| 162 | +  // HTTP Mapping: 501 Not Implemented
 | |
| 163 | +  UNIMPLEMENTED = 12;
 | |
| 164 | + | |
| 165 | +  // Internal errors.  This means that some invariants expected by the
 | |
| 166 | +  // underlying system have been broken.  This error code is reserved
 | |
| 167 | +  // for serious errors.
 | |
| 168 | +  //
 | |
| 169 | +  // HTTP Mapping: 500 Internal Server Error
 | |
| 170 | +  INTERNAL = 13;
 | |
| 171 | + | |
| 172 | +  // The service is currently unavailable.  This is most likely a
 | |
| 173 | +  // transient condition, which can be corrected by retrying with
 | |
| 174 | +  // a backoff.
 | |
| 175 | +  //
 | |
| 176 | +  // See the guidelines above for deciding between `FAILED_PRECONDITION`,
 | |
| 177 | +  // `ABORTED`, and `UNAVAILABLE`.
 | |
| 178 | +  //
 | |
| 179 | +  // HTTP Mapping: 503 Service Unavailable
 | |
| 180 | +  UNAVAILABLE = 14;
 | |
| 181 | + | |
| 182 | +  // Unrecoverable data loss or corruption.
 | |
| 183 | +  //
 | |
| 184 | +  // HTTP Mapping: 500 Internal Server Error
 | |
| 185 | +  DATA_LOSS = 15;
 | |
| 186 | +} | |
| \ No newline at end of file | 
| 1 | +# Generated by the protocol buffer compiler.  DO NOT EDIT!
 | |
| 2 | +# source: google/rpc/code.proto
 | |
| 3 | + | |
| 4 | +import sys
 | |
| 5 | +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
 | |
| 6 | +from google.protobuf.internal import enum_type_wrapper
 | |
| 7 | +from google.protobuf import descriptor as _descriptor
 | |
| 8 | +from google.protobuf import message as _message
 | |
| 9 | +from google.protobuf import reflection as _reflection
 | |
| 10 | +from google.protobuf import symbol_database as _symbol_database
 | |
| 11 | +# @@protoc_insertion_point(imports)
 | |
| 12 | + | |
| 13 | +_sym_db = _symbol_database.Default()
 | |
| 14 | + | |
| 15 | + | |
| 16 | + | |
| 17 | + | |
| 18 | +DESCRIPTOR = _descriptor.FileDescriptor(
 | |
| 19 | +  name='google/rpc/code.proto',
 | |
| 20 | +  package='google.rpc',
 | |
| 21 | +  syntax='proto3',
 | |
| 22 | +  serialized_options=_b('\n\016com.google.rpcB\tCodeProtoP\001Z3google.golang.org/genproto/googleapis/rpc/code;code\242\002\003RPC'),
 | |
| 23 | +  serialized_pb=_b('\n\x15google/rpc/code.proto\x12\ngoogle.rpc*\xb7\x02\n\x04\x43ode\x12\x06\n\x02OK\x10\x00\x12\r\n\tCANCELLED\x10\x01\x12\x0b\n\x07UNKNOWN\x10\x02\x12\x14\n\x10INVALID_ARGUMENT\x10\x03\x12\x15\n\x11\x44\x45\x41\x44LINE_EXCEEDED\x10\x04\x12\r\n\tNOT_FOUND\x10\x05\x12\x12\n\x0e\x41LREADY_EXISTS\x10\x06\x12\x15\n\x11PERMISSION_DENIED\x10\x07\x12\x13\n\x0fUNAUTHENTICATED\x10\x10\x12\x16\n\x12RESOURCE_EXHAUSTED\x10\x08\x12\x17\n\x13\x46\x41ILED_PRECONDITION\x10\t\x12\x0b\n\x07\x41\x42ORTED\x10\n\x12\x10\n\x0cOUT_OF_RANGE\x10\x0b\x12\x11\n\rUNIMPLEMENTED\x10\x0c\x12\x0c\n\x08INTERNAL\x10\r\x12\x0f\n\x0bUNAVAILABLE\x10\x0e\x12\r\n\tDATA_LOSS\x10\x0f\x42X\n\x0e\x63om.google.rpcB\tCodeProtoP\x01Z3google.golang.org/genproto/googleapis/rpc/code;code\xa2\x02\x03RPCb\x06proto3')
 | |
| 24 | +)
 | |
| 25 | + | |
| 26 | +_CODE = _descriptor.EnumDescriptor(
 | |
| 27 | +  name='Code',
 | |
| 28 | +  full_name='google.rpc.Code',
 | |
| 29 | +  filename=None,
 | |
| 30 | +  file=DESCRIPTOR,
 | |
| 31 | +  values=[
 | |
| 32 | +    _descriptor.EnumValueDescriptor(
 | |
| 33 | +      name='OK', index=0, number=0,
 | |
| 34 | +      serialized_options=None,
 | |
| 35 | +      type=None),
 | |
| 36 | +    _descriptor.EnumValueDescriptor(
 | |
| 37 | +      name='CANCELLED', index=1, number=1,
 | |
| 38 | +      serialized_options=None,
 | |
| 39 | +      type=None),
 | |
| 40 | +    _descriptor.EnumValueDescriptor(
 | |
| 41 | +      name='UNKNOWN', index=2, number=2,
 | |
| 42 | +      serialized_options=None,
 | |
| 43 | +      type=None),
 | |
| 44 | +    _descriptor.EnumValueDescriptor(
 | |
| 45 | +      name='INVALID_ARGUMENT', index=3, number=3,
 | |
| 46 | +      serialized_options=None,
 | |
| 47 | +      type=None),
 | |
| 48 | +    _descriptor.EnumValueDescriptor(
 | |
| 49 | +      name='DEADLINE_EXCEEDED', index=4, number=4,
 | |
| 50 | +      serialized_options=None,
 | |
| 51 | +      type=None),
 | |
| 52 | +    _descriptor.EnumValueDescriptor(
 | |
| 53 | +      name='NOT_FOUND', index=5, number=5,
 | |
| 54 | +      serialized_options=None,
 | |
| 55 | +      type=None),
 | |
| 56 | +    _descriptor.EnumValueDescriptor(
 | |
| 57 | +      name='ALREADY_EXISTS', index=6, number=6,
 | |
| 58 | +      serialized_options=None,
 | |
| 59 | +      type=None),
 | |
| 60 | +    _descriptor.EnumValueDescriptor(
 | |
| 61 | +      name='PERMISSION_DENIED', index=7, number=7,
 | |
| 62 | +      serialized_options=None,
 | |
| 63 | +      type=None),
 | |
| 64 | +    _descriptor.EnumValueDescriptor(
 | |
| 65 | +      name='UNAUTHENTICATED', index=8, number=16,
 | |
| 66 | +      serialized_options=None,
 | |
| 67 | +      type=None),
 | |
| 68 | +    _descriptor.EnumValueDescriptor(
 | |
| 69 | +      name='RESOURCE_EXHAUSTED', index=9, number=8,
 | |
| 70 | +      serialized_options=None,
 | |
| 71 | +      type=None),
 | |
| 72 | +    _descriptor.EnumValueDescriptor(
 | |
| 73 | +      name='FAILED_PRECONDITION', index=10, number=9,
 | |
| 74 | +      serialized_options=None,
 | |
| 75 | +      type=None),
 | |
| 76 | +    _descriptor.EnumValueDescriptor(
 | |
| 77 | +      name='ABORTED', index=11, number=10,
 | |
| 78 | +      serialized_options=None,
 | |
| 79 | +      type=None),
 | |
| 80 | +    _descriptor.EnumValueDescriptor(
 | |
| 81 | +      name='OUT_OF_RANGE', index=12, number=11,
 | |
| 82 | +      serialized_options=None,
 | |
| 83 | +      type=None),
 | |
| 84 | +    _descriptor.EnumValueDescriptor(
 | |
| 85 | +      name='UNIMPLEMENTED', index=13, number=12,
 | |
| 86 | +      serialized_options=None,
 | |
| 87 | +      type=None),
 | |
| 88 | +    _descriptor.EnumValueDescriptor(
 | |
| 89 | +      name='INTERNAL', index=14, number=13,
 | |
| 90 | +      serialized_options=None,
 | |
| 91 | +      type=None),
 | |
| 92 | +    _descriptor.EnumValueDescriptor(
 | |
| 93 | +      name='UNAVAILABLE', index=15, number=14,
 | |
| 94 | +      serialized_options=None,
 | |
| 95 | +      type=None),
 | |
| 96 | +    _descriptor.EnumValueDescriptor(
 | |
| 97 | +      name='DATA_LOSS', index=16, number=15,
 | |
| 98 | +      serialized_options=None,
 | |
| 99 | +      type=None),
 | |
| 100 | +  ],
 | |
| 101 | +  containing_type=None,
 | |
| 102 | +  serialized_options=None,
 | |
| 103 | +  serialized_start=38,
 | |
| 104 | +  serialized_end=349,
 | |
| 105 | +)
 | |
| 106 | +_sym_db.RegisterEnumDescriptor(_CODE)
 | |
| 107 | + | |
| 108 | +Code = enum_type_wrapper.EnumTypeWrapper(_CODE)
 | |
| 109 | +OK = 0
 | |
| 110 | +CANCELLED = 1
 | |
| 111 | +UNKNOWN = 2
 | |
| 112 | +INVALID_ARGUMENT = 3
 | |
| 113 | +DEADLINE_EXCEEDED = 4
 | |
| 114 | +NOT_FOUND = 5
 | |
| 115 | +ALREADY_EXISTS = 6
 | |
| 116 | +PERMISSION_DENIED = 7
 | |
| 117 | +UNAUTHENTICATED = 16
 | |
| 118 | +RESOURCE_EXHAUSTED = 8
 | |
| 119 | +FAILED_PRECONDITION = 9
 | |
| 120 | +ABORTED = 10
 | |
| 121 | +OUT_OF_RANGE = 11
 | |
| 122 | +UNIMPLEMENTED = 12
 | |
| 123 | +INTERNAL = 13
 | |
| 124 | +UNAVAILABLE = 14
 | |
| 125 | +DATA_LOSS = 15
 | |
| 126 | + | |
| 127 | + | |
| 128 | +DESCRIPTOR.enum_types_by_name['Code'] = _CODE
 | |
| 129 | +_sym_db.RegisterFileDescriptor(DESCRIPTOR)
 | |
| 130 | + | |
| 131 | + | |
| 132 | +DESCRIPTOR._options = None
 | |
| 133 | +# @@protoc_insertion_point(module_scope) | 
| 1 | +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
 | |
| 2 | +import grpc
 | |
| 3 | + | 
| ... | ... | @@ -17,7 +17,6 @@ | 
| 17 | 17 |  #        Tristan Daniël Maat <tristan maat codethink co uk>
 | 
| 18 | 18 |  #
 | 
| 19 | 19 |  from .job import Job
 | 
| 20 | -from ..._platform import Platform
 | |
| 21 | 20 |  | 
| 22 | 21 |  | 
| 23 | 22 |  class CacheSizeJob(Job):
 | 
| ... | ... | @@ -25,8 +24,8 @@ class CacheSizeJob(Job): | 
| 25 | 24 |          super().__init__(*args, **kwargs)
 | 
| 26 | 25 |          self._complete_cb = complete_cb
 | 
| 27 | 26 |  | 
| 28 | -        platform = Platform.get_platform()
 | |
| 29 | -        self._artifacts = platform.artifactcache
 | |
| 27 | +        context = self._scheduler.context
 | |
| 28 | +        self._artifacts = context.artifactcache
 | |
| 30 | 29 |  | 
| 31 | 30 |      def child_process(self):
 | 
| 32 | 31 |          return self._artifacts.compute_cache_size()
 | 
| ... | ... | @@ -17,15 +17,14 @@ | 
| 17 | 17 |  #        Tristan Daniël Maat <tristan maat codethink co uk>
 | 
| 18 | 18 |  #
 | 
| 19 | 19 |  from .job import Job
 | 
| 20 | -from ..._platform import Platform
 | |
| 21 | 20 |  | 
| 22 | 21 |  | 
| 23 | 22 |  class CleanupJob(Job):
 | 
| 24 | 23 |      def __init__(self, *args, **kwargs):
 | 
| 25 | 24 |          super().__init__(*args, **kwargs)
 | 
| 26 | 25 |  | 
| 27 | -        platform = Platform.get_platform()
 | |
| 28 | -        self._artifacts = platform.artifactcache
 | |
| 26 | +        context = self._scheduler.context
 | |
| 27 | +        self._artifacts = context.artifactcache
 | |
| 29 | 28 |  | 
| 30 | 29 |      def child_process(self):
 | 
| 31 | 30 |          return self._artifacts.clean()
 | 
| ... | ... | @@ -24,7 +24,6 @@ from . import Queue, QueueStatus | 
| 24 | 24 |  from ..jobs import ElementJob
 | 
| 25 | 25 |  from ..resources import ResourceType
 | 
| 26 | 26 |  from ..._message import MessageType
 | 
| 27 | -from ..._platform import Platform
 | |
| 28 | 27 |  | 
| 29 | 28 |  | 
| 30 | 29 |  # A queue which assembles elements
 | 
| ... | ... | @@ -94,8 +93,8 @@ class BuildQueue(Queue): | 
| 94 | 93 |          # as returned from Element._assemble() to the estimated
 | 
| 95 | 94 |          # artifact cache size
 | 
| 96 | 95 |          #
 | 
| 97 | -        platform = Platform.get_platform()
 | |
| 98 | -        artifacts = platform.artifactcache
 | |
| 96 | +        context = self._scheduler.context
 | |
| 97 | +        artifacts = context.artifactcache
 | |
| 99 | 98 |  | 
| 100 | 99 |          artifacts.add_artifact_size(artifact_size)
 | 
| 101 | 100 |  | 
| ... | ... | @@ -32,9 +32,20 @@ class PullQueue(Queue): | 
| 32 | 32 |      complete_name = "Pulled"
 | 
| 33 | 33 |      resources = [ResourceType.DOWNLOAD, ResourceType.CACHE]
 | 
| 34 | 34 |  | 
| 35 | +    def __init__(self, scheduler, buildtrees=False):
 | |
| 36 | +        super().__init__(scheduler)
 | |
| 37 | + | |
| 38 | +        # Current default exclusions on pull
 | |
| 39 | +        self._excluded_subdirs = ["buildtree"]
 | |
| 40 | +        self._subdir = None
 | |
| 41 | +        # If buildtrees are to be pulled, remove the value from exclusion list
 | |
| 42 | +        if buildtrees:
 | |
| 43 | +            self._subdir = "buildtree"
 | |
| 44 | +            self._excluded_subdirs.remove(self._subdir)
 | |
| 45 | + | |
| 35 | 46 |      def process(self, element):
 | 
| 36 | 47 |          # returns whether an artifact was downloaded or not
 | 
| 37 | -        if not element._pull():
 | |
| 48 | +        if not element._pull(subdir=self._subdir, excluded_subdirs=self._excluded_subdirs):
 | |
| 38 | 49 |              raise SkipJob(self.action_name)
 | 
| 39 | 50 |  | 
| 40 | 51 |      def status(self, element):
 | 
| ... | ... | @@ -49,7 +60,7 @@ class PullQueue(Queue): | 
| 49 | 60 |          if not element._can_query_cache():
 | 
| 50 | 61 |              return QueueStatus.WAIT
 | 
| 51 | 62 |  | 
| 52 | -        if element._pull_pending():
 | |
| 63 | +        if element._pull_pending(subdir=self._subdir):
 | |
| 53 | 64 |              return QueueStatus.READY
 | 
| 54 | 65 |          else:
 | 
| 55 | 66 |              return QueueStatus.SKIP
 | 
| ... | ... | @@ -29,7 +29,6 @@ from contextlib import contextmanager | 
| 29 | 29 |  # Local imports
 | 
| 30 | 30 |  from .resources import Resources, ResourceType
 | 
| 31 | 31 |  from .jobs import CacheSizeJob, CleanupJob
 | 
| 32 | -from .._platform import Platform
 | |
| 33 | 32 |  | 
| 34 | 33 |  | 
| 35 | 34 |  # A decent return code for Scheduler.run()
 | 
| ... | ... | @@ -348,8 +347,8 @@ class Scheduler(): | 
| 348 | 347 |      #       which will report the calculated cache size.
 | 
| 349 | 348 |      #
 | 
| 350 | 349 |      def _run_cleanup(self, cache_size):
 | 
| 351 | -        platform = Platform.get_platform()
 | |
| 352 | -        artifacts = platform.artifactcache
 | |
| 350 | +        context = self.context
 | |
| 351 | +        artifacts = context.artifactcache
 | |
| 353 | 352 |  | 
| 354 | 353 |          if not artifacts.has_quota_exceeded():
 | 
| 355 | 354 |              return
 | 
| ... | ... | @@ -32,7 +32,6 @@ from ._exceptions import StreamError, ImplError, BstError, set_last_task_error | 
| 32 | 32 |  from ._message import Message, MessageType
 | 
| 33 | 33 |  from ._scheduler import Scheduler, SchedStatus, TrackQueue, FetchQueue, BuildQueue, PullQueue, PushQueue
 | 
| 34 | 34 |  from ._pipeline import Pipeline, PipelineSelection
 | 
| 35 | -from ._platform import Platform
 | |
| 36 | 35 |  from . import utils, _yaml, _site
 | 
| 37 | 36 |  from . import Scope, Consistency
 | 
| 38 | 37 |  | 
| ... | ... | @@ -71,8 +70,7 @@ class Stream(): | 
| 71 | 70 |          #
 | 
| 72 | 71 |          # Private members
 | 
| 73 | 72 |          #
 | 
| 74 | -        self._platform = Platform.get_platform()
 | |
| 75 | -        self._artifacts = self._platform.artifactcache
 | |
| 73 | +        self._artifacts = context.artifactcache
 | |
| 76 | 74 |          self._context = context
 | 
| 77 | 75 |          self._project = project
 | 
| 78 | 76 |          self._pipeline = Pipeline(context, project, self._artifacts)
 | 
| ... | ... | @@ -162,12 +160,14 @@ class Stream(): | 
| 162 | 160 |      #    track_cross_junctions (bool): Whether tracking should cross junction boundaries
 | 
| 163 | 161 |      #    build_all (bool): Whether to build all elements, or only those
 | 
| 164 | 162 |      #                      which are required to build the target.
 | 
| 163 | +    #    pull_buildtrees (bool): Whether to pull buildtrees from a remote cache server
 | |
| 165 | 164 |      #
 | 
| 166 | 165 |      def build(self, targets, *,
 | 
| 167 | 166 |                track_targets=None,
 | 
| 168 | 167 |                track_except=None,
 | 
| 169 | 168 |                track_cross_junctions=False,
 | 
| 170 | -              build_all=False):
 | |
| 169 | +              build_all=False,
 | |
| 170 | +              pull_buildtrees=False):
 | |
| 171 | 171 |  | 
| 172 | 172 |          if build_all:
 | 
| 173 | 173 |              selection = PipelineSelection.ALL
 | 
| ... | ... | @@ -197,7 +197,11 @@ class Stream(): | 
| 197 | 197 |              self._add_queue(track_queue, track=True)
 | 
| 198 | 198 |  | 
| 199 | 199 |          if self._artifacts.has_fetch_remotes():
 | 
| 200 | -            self._add_queue(PullQueue(self._scheduler))
 | |
| 200 | +            # Query if any of the user defined artifact servers have buildtrees set
 | |
| 201 | +            for cache in self._context.artifact_cache_specs:
 | |
| 202 | +                if cache.buildtrees:
 | |
| 203 | +                    pull_buildtrees = True
 | |
| 204 | +            self._add_queue(PullQueue(self._scheduler, buildtrees=pull_buildtrees))
 | |
| 201 | 205 |  | 
| 202 | 206 |          self._add_queue(FetchQueue(self._scheduler, skip_cached=True))
 | 
| 203 | 207 |          self._add_queue(BuildQueue(self._scheduler))
 | 
| ... | ... | @@ -297,7 +301,8 @@ class Stream(): | 
| 297 | 301 |      #
 | 
| 298 | 302 |      def pull(self, targets, *,
 | 
| 299 | 303 |               selection=PipelineSelection.NONE,
 | 
| 300 | -             remote=None):
 | |
| 304 | +             remote=None,
 | |
| 305 | +             pull_buildtrees=False):
 | |
| 301 | 306 |  | 
| 302 | 307 |          use_config = True
 | 
| 303 | 308 |          if remote:
 | 
| ... | ... | @@ -312,8 +317,13 @@ class Stream(): | 
| 312 | 317 |          if not self._artifacts.has_fetch_remotes():
 | 
| 313 | 318 |              raise StreamError("No artifact caches available for pulling artifacts")
 | 
| 314 | 319 |  | 
| 320 | +        # Query if any of the user defined artifact servers have buildtrees set
 | |
| 321 | +        for cache in self._context.artifact_cache_specs:
 | |
| 322 | +            if cache.buildtrees:
 | |
| 323 | +                pull_buildtrees = True
 | |
| 324 | + | |
| 315 | 325 |          self._pipeline.assert_consistent(elements)
 | 
| 316 | -        self._add_queue(PullQueue(self._scheduler))
 | |
| 326 | +        self._add_queue(PullQueue(self._scheduler, buildtrees=pull_buildtrees))
 | |
| 317 | 327 |          self._enqueue_plan(elements)
 | 
| 318 | 328 |          self._run()
 | 
| 319 | 329 |  | 
| ... | ... | @@ -246,15 +246,23 @@ class Element(Plugin): | 
| 246 | 246 |          self.__config = self.__extract_config(meta)
 | 
| 247 | 247 |          self._configure(self.__config)
 | 
| 248 | 248 |  | 
| 249 | -        # Extract Sandbox config
 | |
| 250 | -        self.__sandbox_config = self.__extract_sandbox_config(meta)
 | |
| 251 | - | |
| 252 | 249 |          # Extract remote execution URL
 | 
| 253 | 250 |          if not self.__is_junction:
 | 
| 254 | 251 |              self.__remote_execution_url = project.remote_execution_url
 | 
| 255 | 252 |          else:
 | 
| 256 | 253 |              self.__remote_execution_url = None
 | 
| 257 | 254 |  | 
| 255 | +        # Extract Sandbox config
 | |
| 256 | +        self.__sandbox_config = self.__extract_sandbox_config(meta)
 | |
| 257 | + | |
| 258 | +        self.__sandbox_config_supported = True
 | |
| 259 | +        if not self.__use_remote_execution():
 | |
| 260 | +            platform = Platform.get_platform()
 | |
| 261 | +            if not platform.check_sandbox_config(self.__sandbox_config):
 | |
| 262 | +                # Local sandbox does not fully support specified sandbox config.
 | |
| 263 | +                # This will taint the artifact, disable pushing.
 | |
| 264 | +                self.__sandbox_config_supported = False
 | |
| 265 | + | |
| 258 | 266 |      def __lt__(self, other):
 | 
| 259 | 267 |          return self.name < other.name
 | 
| 260 | 268 |  | 
| ... | ... | @@ -1131,6 +1139,7 @@ class Element(Plugin): | 
| 1131 | 1139 |              if not self.__weak_cached:
 | 
| 1132 | 1140 |                  self.__weak_cached = self.__artifacts.contains(self, self.__weak_cache_key)
 | 
| 1133 | 1141 |  | 
| 1142 | + | |
| 1134 | 1143 |          if (not self.__assemble_scheduled and not self.__assemble_done and
 | 
| 1135 | 1144 |                  not self._cached_success() and not self._pull_pending()):
 | 
| 1136 | 1145 |              # Workspaced sources are considered unstable if a build is pending
 | 
| ... | ... | @@ -1521,6 +1530,11 @@ class Element(Plugin): | 
| 1521 | 1530 |          context = self._get_context()
 | 
| 1522 | 1531 |          with self._output_file() as output_file:
 | 
| 1523 | 1532 |  | 
| 1533 | +            if not self.__sandbox_config_supported:
 | |
| 1534 | +                self.warn("Sandbox configuration is not supported by the platform.",
 | |
| 1535 | +                          detail="Falling back to UID {} GID {}. Artifact will not be pushed."
 | |
| 1536 | +                          .format(self.__sandbox_config.build_uid, self.__sandbox_config.build_gid))
 | |
| 1537 | + | |
| 1524 | 1538 |              # Explicitly clean it up, keep the build dir around if exceptions are raised
 | 
| 1525 | 1539 |              os.makedirs(context.builddir, exist_ok=True)
 | 
| 1526 | 1540 |              rootdir = tempfile.mkdtemp(prefix="{}-".format(self.normal_name), dir=context.builddir)
 | 
| ... | ... | @@ -1676,18 +1690,26 @@ class Element(Plugin): | 
| 1676 | 1690 |  | 
| 1677 | 1691 |      # _pull_pending()
 | 
| 1678 | 1692 |      #
 | 
| 1679 | -    # Check whether the artifact will be pulled.
 | |
| 1693 | +    # Check whether the artifact will be pulled. If the pull operation is to
 | |
| 1694 | +    # include a specific subdir of the element artifact (from cli or user conf)
 | |
| 1695 | +    # then the local cache is queried for the subdirs existence.
 | |
| 1696 | +    #
 | |
| 1697 | +    # Args:
 | |
| 1698 | +    #    subdir (str): Whether the pull has been invoked with a specific subdir set
 | |
| 1680 | 1699 |      #
 | 
| 1681 | 1700 |      # Returns:
 | 
| 1682 | 1701 |      #   (bool): Whether a pull operation is pending
 | 
| 1683 | 1702 |      #
 | 
| 1684 | -    def _pull_pending(self):
 | |
| 1703 | +    def _pull_pending(self, subdir=None):
 | |
| 1685 | 1704 |          if self._get_workspace():
 | 
| 1686 | 1705 |              # Workspace builds are never pushed to artifact servers
 | 
| 1687 | 1706 |              return False
 | 
| 1688 | 1707 |  | 
| 1689 | -        if self.__strong_cached:
 | |
| 1690 | -            # Artifact already in local cache
 | |
| 1708 | +        if self.__strong_cached and subdir:
 | |
| 1709 | +            # If we've specified a subdir, check if the subdir is cached locally
 | |
| 1710 | +            if self.__artifacts.contains_subdir_artifact(self, self.__strict_cache_key, subdir):
 | |
| 1711 | +                return False
 | |
| 1712 | +        elif self.__strong_cached:
 | |
| 1691 | 1713 |              return False
 | 
| 1692 | 1714 |  | 
| 1693 | 1715 |          # Pull is pending if artifact remote server available
 | 
| ... | ... | @@ -1709,11 +1731,10 @@ class Element(Plugin): | 
| 1709 | 1731 |  | 
| 1710 | 1732 |          self._update_state()
 | 
| 1711 | 1733 |  | 
| 1712 | -    def _pull_strong(self, *, progress=None):
 | |
| 1734 | +    def _pull_strong(self, *, progress=None, subdir=None, excluded_subdirs=None):
 | |
| 1713 | 1735 |          weak_key = self._get_cache_key(strength=_KeyStrength.WEAK)
 | 
| 1714 | - | |
| 1715 | 1736 |          key = self.__strict_cache_key
 | 
| 1716 | -        if not self.__artifacts.pull(self, key, progress=progress):
 | |
| 1737 | +        if not self.__artifacts.pull(self, key, progress=progress, subdir=subdir, excluded_subdirs=excluded_subdirs):
 | |
| 1717 | 1738 |              return False
 | 
| 1718 | 1739 |  | 
| 1719 | 1740 |          # update weak ref by pointing it to this newly fetched artifact
 | 
| ... | ... | @@ -1721,10 +1742,9 @@ class Element(Plugin): | 
| 1721 | 1742 |  | 
| 1722 | 1743 |          return True
 | 
| 1723 | 1744 |  | 
| 1724 | -    def _pull_weak(self, *, progress=None):
 | |
| 1745 | +    def _pull_weak(self, *, progress=None, subdir=None, excluded_subdirs=None):
 | |
| 1725 | 1746 |          weak_key = self._get_cache_key(strength=_KeyStrength.WEAK)
 | 
| 1726 | - | |
| 1727 | -        if not self.__artifacts.pull(self, weak_key, progress=progress):
 | |
| 1747 | +        if not self.__artifacts.pull(self, weak_key, progress=progress, subdir=subdir, excluded_subdirs=excluded_subdirs):
 | |
| 1728 | 1748 |              return False
 | 
| 1729 | 1749 |  | 
| 1730 | 1750 |          # extract strong cache key from this newly fetched artifact
 | 
| ... | ... | @@ -1742,17 +1762,17 @@ class Element(Plugin): | 
| 1742 | 1762 |      #
 | 
| 1743 | 1763 |      # Returns: True if the artifact has been downloaded, False otherwise
 | 
| 1744 | 1764 |      #
 | 
| 1745 | -    def _pull(self):
 | |
| 1765 | +    def _pull(self, subdir=None, excluded_subdirs=None):
 | |
| 1746 | 1766 |          context = self._get_context()
 | 
| 1747 | 1767 |  | 
| 1748 | 1768 |          def progress(percent, message):
 | 
| 1749 | 1769 |              self.status(message)
 | 
| 1750 | 1770 |  | 
| 1751 | 1771 |          # Attempt to pull artifact without knowing whether it's available
 | 
| 1752 | -        pulled = self._pull_strong(progress=progress)
 | |
| 1772 | +        pulled = self._pull_strong(progress=progress, subdir=subdir, excluded_subdirs=excluded_subdirs)
 | |
| 1753 | 1773 |  | 
| 1754 | 1774 |          if not pulled and not self._cached() and not context.get_strict():
 | 
| 1755 | -            pulled = self._pull_weak(progress=progress)
 | |
| 1775 | +            pulled = self._pull_weak(progress=progress, subdir=subdir, excluded_subdirs=excluded_subdirs)
 | |
| 1756 | 1776 |  | 
| 1757 | 1777 |          if not pulled:
 | 
| 1758 | 1778 |              return False
 | 
| ... | ... | @@ -1775,10 +1795,14 @@ class Element(Plugin): | 
| 1775 | 1795 |          if not self._cached():
 | 
| 1776 | 1796 |              return True
 | 
| 1777 | 1797 |  | 
| 1778 | -        # Do not push tained artifact
 | |
| 1798 | +        # Do not push tainted artifact
 | |
| 1779 | 1799 |          if self.__get_tainted():
 | 
| 1780 | 1800 |              return True
 | 
| 1781 | 1801 |  | 
| 1802 | +        # Do not push elements that have a dangling buildtree artifact unless element type is
 | |
| 1803 | +        # expected to have an empty buildtree directory
 | |
| 1804 | +        if not self.__artifacts.contains_subdir_artifact(self, self.__strict_cache_key, 'buildtree'):
 | |
| 1805 | +            return True
 | |
| 1782 | 1806 |          return False
 | 
| 1783 | 1807 |  | 
| 1784 | 1808 |      # _push():
 | 
| ... | ... | @@ -2110,10 +2134,19 @@ class Element(Plugin): | 
| 2110 | 2134 |              workspaced_dependencies = self.__get_artifact_metadata_workspaced_dependencies()
 | 
| 2111 | 2135 |  | 
| 2112 | 2136 |              # Other conditions should be or-ed
 | 
| 2113 | -            self.__tainted = workspaced or workspaced_dependencies
 | |
| 2137 | +            self.__tainted = (workspaced or workspaced_dependencies or
 | |
| 2138 | +                              not self.__sandbox_config_supported)
 | |
| 2114 | 2139 |  | 
| 2115 | 2140 |          return self.__tainted
 | 
| 2116 | 2141 |  | 
| 2142 | +    # __use_remote_execution():
 | |
| 2143 | +    #
 | |
| 2144 | +    # Returns True if remote execution is configured and the element plugin
 | |
| 2145 | +    # supports it.
 | |
| 2146 | +    #
 | |
| 2147 | +    def __use_remote_execution(self):
 | |
| 2148 | +        return self.__remote_execution_url and self.BST_VIRTUAL_DIRECTORY
 | |
| 2149 | + | |
| 2117 | 2150 |      # __sandbox():
 | 
| 2118 | 2151 |      #
 | 
| 2119 | 2152 |      # A context manager to prepare a Sandbox object at the specified directory,
 | 
| ... | ... | @@ -2135,9 +2168,7 @@ class Element(Plugin): | 
| 2135 | 2168 |          project = self._get_project()
 | 
| 2136 | 2169 |          platform = Platform.get_platform()
 | 
| 2137 | 2170 |  | 
| 2138 | -        if (directory is not None and
 | |
| 2139 | -            self.__remote_execution_url and
 | |
| 2140 | -            self.BST_VIRTUAL_DIRECTORY):
 | |
| 2171 | +        if directory is not None and self.__use_remote_execution():
 | |
| 2141 | 2172 |  | 
| 2142 | 2173 |              self.info("Using a remote sandbox for artifact {} with directory '{}'".format(self.name, directory))
 | 
| 2143 | 2174 |  | 
| ... | ... | @@ -27,7 +27,7 @@ from . import Sandbox | 
| 27 | 27 |  from ..storage._filebaseddirectory import FileBasedDirectory
 | 
| 28 | 28 |  from ..storage._casbaseddirectory import CasBasedDirectory
 | 
| 29 | 29 |  from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
 | 
| 30 | -from .._platform import Platform
 | |
| 30 | +from .._protos.google.rpc import code_pb2
 | |
| 31 | 31 |  | 
| 32 | 32 |  | 
| 33 | 33 |  class SandboxError(Exception):
 | 
| ... | ... | @@ -71,8 +71,8 @@ class SandboxRemote(Sandbox): | 
| 71 | 71 |                                                        output_files=[],
 | 
| 72 | 72 |                                                        output_directories=[self._output_directory],
 | 
| 73 | 73 |                                                        platform=None)
 | 
| 74 | -        platform = Platform.get_platform()
 | |
| 75 | -        cascache = platform.artifactcache
 | |
| 74 | +        context = self._get_context()
 | |
| 75 | +        cascache = context.artifactcache
 | |
| 76 | 76 |          # Upload the Command message to the remote CAS server
 | 
| 77 | 77 |          command_digest = cascache.push_message(self._get_project(), remote_command)
 | 
| 78 | 78 |          if not command_digest or not cascache.verify_digest_pushed(self._get_project(), command_digest):
 | 
| ... | ... | @@ -134,8 +134,8 @@ class SandboxRemote(Sandbox): | 
| 134 | 134 |          if tree_digest is None or not tree_digest.hash:
 | 
| 135 | 135 |              raise SandboxError("Output directory structure had no digest attached.")
 | 
| 136 | 136 |  | 
| 137 | -        platform = Platform.get_platform()
 | |
| 138 | -        cascache = platform.artifactcache
 | |
| 137 | +        context = self._get_context()
 | |
| 138 | +        cascache = context.artifactcache
 | |
| 139 | 139 |          # Now do a pull to ensure we have the necessary parts.
 | 
| 140 | 140 |          dir_digest = cascache.pull_tree(self._get_project(), tree_digest)
 | 
| 141 | 141 |          if dir_digest is None or not dir_digest.hash or not dir_digest.size_bytes:
 | 
| ... | ... | @@ -170,8 +170,8 @@ class SandboxRemote(Sandbox): | 
| 170 | 170 |  | 
| 171 | 171 |          upload_vdir.recalculate_hash()
 | 
| 172 | 172 |  | 
| 173 | -        platform = Platform.get_platform()
 | |
| 174 | -        cascache = platform.artifactcache
 | |
| 173 | +        context = self._get_context()
 | |
| 174 | +        cascache = context.artifactcache
 | |
| 175 | 175 |          # Now, push that key (without necessarily needing a ref) to the remote.
 | 
| 176 | 176 |          cascache.push_directory(self._get_project(), upload_vdir)
 | 
| 177 | 177 |          if not cascache.verify_digest_pushed(self._get_project(), upload_vdir.ref):
 | 
| ... | ... | @@ -207,15 +207,23 @@ class SandboxRemote(Sandbox): | 
| 207 | 207 |  | 
| 208 | 208 |          operation.response.Unpack(execution_response)
 | 
| 209 | 209 |  | 
| 210 | -        if execution_response.status.code != 0:
 | |
| 211 | -            # A normal error during the build: the remote execution system
 | |
| 212 | -            # has worked correctly but the command failed.
 | |
| 213 | -            # execution_response.error also contains 'message' (str) and
 | |
| 214 | -            # 'details' (iterator of Any) which we ignore at the moment.
 | |
| 215 | -            return execution_response.status.code
 | |
| 210 | +        if execution_response.status.code != code_pb2.OK:
 | |
| 211 | +            # An unexpected error during execution: the remote execution
 | |
| 212 | +            # system failed at processing the execution request.
 | |
| 213 | +            if execution_response.status.message:
 | |
| 214 | +                raise SandboxError(execution_response.status.message)
 | |
| 215 | +            else:
 | |
| 216 | +                raise SandboxError("Remote server failed at executing the build request.")
 | |
| 216 | 217 |  | 
| 217 | 218 |          action_result = execution_response.result
 | 
| 218 | 219 |  | 
| 220 | +        if action_result.exit_code != 0:
 | |
| 221 | +            # A normal error during the build: the remote execution system
 | |
| 222 | +            # has worked correctly but the command failed.
 | |
| 223 | +            # action_result.stdout and action_result.stderr also contains
 | |
| 224 | +            # build command outputs which we ignore at the moment.
 | |
| 225 | +            return action_result.exit_code
 | |
| 226 | + | |
| 219 | 227 |          self.process_job_output(action_result.output_directories, action_result.output_files)
 | 
| 220 | 228 |  | 
| 221 | 229 |          return 0 | 
| ... | ... | @@ -38,7 +38,6 @@ from .._exceptions import BstError | 
| 38 | 38 |  from .directory import Directory, VirtualDirectoryError
 | 
| 39 | 39 |  from ._filebaseddirectory import FileBasedDirectory
 | 
| 40 | 40 |  from ..utils import FileListResult, safe_copy, list_relative_paths
 | 
| 41 | -from .._artifactcache.cascache import CASCache
 | |
| 42 | 41 |  | 
| 43 | 42 |  | 
| 44 | 43 |  class IndexEntry():
 | 
| ... | ... | @@ -80,7 +79,7 @@ class CasBasedDirectory(Directory): | 
| 80 | 79 |          self.filename = filename
 | 
| 81 | 80 |          self.common_name = common_name
 | 
| 82 | 81 |          self.pb2_directory = remote_execution_pb2.Directory()
 | 
| 83 | -        self.cas_cache = CASCache(context)
 | |
| 82 | +        self.cas_cache = context.artifactcache
 | |
| 84 | 83 |          if ref:
 | 
| 85 | 84 |              with open(self.cas_cache.objpath(ref), 'rb') as f:
 | 
| 86 | 85 |                  self.pb2_directory.ParseFromString(f.read())
 | 
| ... | ... | @@ -6,7 +6,6 @@ import signal | 
| 6 | 6 |  import pytest
 | 
| 7 | 7 |  | 
| 8 | 8 |  from buildstream import _yaml, _signals, utils
 | 
| 9 | -from buildstream._artifactcache.cascache import CASCache
 | |
| 10 | 9 |  from buildstream._context import Context
 | 
| 11 | 10 |  from buildstream._project import Project
 | 
| 12 | 11 |  from buildstream._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
 | 
| ... | ... | @@ -88,7 +87,7 @@ def test_pull(cli, tmpdir, datafiles): | 
| 88 | 87 |          # Load the project and CAS cache
 | 
| 89 | 88 |          project = Project(project_dir, context)
 | 
| 90 | 89 |          project.ensure_fully_loaded()
 | 
| 91 | -        cas = CASCache(context)
 | |
| 90 | +        cas = context.artifactcache
 | |
| 92 | 91 |  | 
| 93 | 92 |          # Assert that the element's artifact is **not** cached
 | 
| 94 | 93 |          element = project.load_elements(['target.bst'], cas)[0]
 | 
| ... | ... | @@ -130,7 +129,7 @@ def _test_pull(user_config_file, project_dir, artifact_dir, | 
| 130 | 129 |      project.ensure_fully_loaded()
 | 
| 131 | 130 |  | 
| 132 | 131 |      # Create a local CAS cache handle
 | 
| 133 | -    cas = CASCache(context)
 | |
| 132 | +    cas = context.artifactcache
 | |
| 134 | 133 |  | 
| 135 | 134 |      # Load the target element
 | 
| 136 | 135 |      element = project.load_elements([element_name], cas)[0]
 | 
| ... | ... | @@ -191,7 +190,7 @@ def test_pull_tree(cli, tmpdir, datafiles): | 
| 191 | 190 |          # Load the project and CAS cache
 | 
| 192 | 191 |          project = Project(project_dir, context)
 | 
| 193 | 192 |          project.ensure_fully_loaded()
 | 
| 194 | -        cas = CASCache(context)
 | |
| 193 | +        cas = context.artifactcache
 | |
| 195 | 194 |  | 
| 196 | 195 |          # Assert that the element's artifact is cached
 | 
| 197 | 196 |          element = project.load_elements(['target.bst'], cas)[0]
 | 
| ... | ... | @@ -269,7 +268,7 @@ def _test_push_tree(user_config_file, project_dir, artifact_dir, artifact_digest | 
| 269 | 268 |      project.ensure_fully_loaded()
 | 
| 270 | 269 |  | 
| 271 | 270 |      # Create a local CAS cache handle
 | 
| 272 | -    cas = CASCache(context)
 | |
| 271 | +    cas = context.artifactcache
 | |
| 273 | 272 |  | 
| 274 | 273 |      # Manually setup the CAS remote
 | 
| 275 | 274 |      cas.setup_remotes(use_config=True)
 | 
| ... | ... | @@ -304,7 +303,7 @@ def _test_pull_tree(user_config_file, project_dir, artifact_dir, artifact_digest | 
| 304 | 303 |      project.ensure_fully_loaded()
 | 
| 305 | 304 |  | 
| 306 | 305 |      # Create a local CAS cache handle
 | 
| 307 | -    cas = CASCache(context)
 | |
| 306 | +    cas = context.artifactcache
 | |
| 308 | 307 |  | 
| 309 | 308 |      # Manually setup the CAS remote
 | 
| 310 | 309 |      cas.setup_remotes(use_config=True)
 | 
| ... | ... | @@ -6,7 +6,6 @@ import pytest | 
| 6 | 6 |  | 
| 7 | 7 |  from pluginbase import PluginBase
 | 
| 8 | 8 |  from buildstream import _yaml, _signals, utils
 | 
| 9 | -from buildstream._artifactcache.cascache import CASCache
 | |
| 10 | 9 |  from buildstream._context import Context
 | 
| 11 | 10 |  from buildstream._project import Project
 | 
| 12 | 11 |  from buildstream._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
 | 
| ... | ... | @@ -67,7 +66,7 @@ def test_push(cli, tmpdir, datafiles): | 
| 67 | 66 |          project.ensure_fully_loaded()
 | 
| 68 | 67 |  | 
| 69 | 68 |          # Create a local CAS cache handle
 | 
| 70 | -        cas = CASCache(context)
 | |
| 69 | +        cas = context.artifactcache
 | |
| 71 | 70 |  | 
| 72 | 71 |          # Assert that the element's artifact is cached
 | 
| 73 | 72 |          element = project.load_elements(['target.bst'], cas)[0]
 | 
| ... | ... | @@ -109,7 +108,7 @@ def _test_push(user_config_file, project_dir, artifact_dir, | 
| 109 | 108 |      project.ensure_fully_loaded()
 | 
| 110 | 109 |  | 
| 111 | 110 |      # Create a local CAS cache handle
 | 
| 112 | -    cas = CASCache(context)
 | |
| 111 | +    cas = context.artifactcache
 | |
| 113 | 112 |  | 
| 114 | 113 |      # Load the target element
 | 
| 115 | 114 |      element = project.load_elements([element_name], cas)[0]
 | 
| ... | ... | @@ -166,7 +165,7 @@ def test_push_directory(cli, tmpdir, datafiles): | 
| 166 | 165 |          # Load the project and CAS cache
 | 
| 167 | 166 |          project = Project(project_dir, context)
 | 
| 168 | 167 |          project.ensure_fully_loaded()
 | 
| 169 | -        cas = CASCache(context)
 | |
| 168 | +        cas = context.artifactcache
 | |
| 170 | 169 |  | 
| 171 | 170 |          # Assert that the element's artifact is cached
 | 
| 172 | 171 |          element = project.load_elements(['target.bst'], cas)[0]
 | 
| ... | ... | @@ -217,7 +216,7 @@ def _test_push_directory(user_config_file, project_dir, artifact_dir, artifact_d | 
| 217 | 216 |      project.ensure_fully_loaded()
 | 
| 218 | 217 |  | 
| 219 | 218 |      # Create a local CAS cache handle
 | 
| 220 | -    cas = CASCache(context)
 | |
| 219 | +    cas = context.artifactcache
 | |
| 221 | 220 |  | 
| 222 | 221 |      # Manually setup the CAS remote
 | 
| 223 | 222 |      cas.setup_remotes(use_config=True)
 | 
| ... | ... | @@ -292,7 +291,7 @@ def _test_push_message(user_config_file, project_dir, artifact_dir, queue): | 
| 292 | 291 |      project.ensure_fully_loaded()
 | 
| 293 | 292 |  | 
| 294 | 293 |      # Create a local CAS cache handle
 | 
| 295 | -    cas = CASCache(context)
 | |
| 294 | +    cas = context.artifactcache
 | |
| 296 | 295 |  | 
| 297 | 296 |      # Manually setup the CAS remote
 | 
| 298 | 297 |      cas.setup_remotes(use_config=True)
 | 
| ... | ... | @@ -11,7 +11,6 @@ from multiprocessing import Process, Queue | 
| 11 | 11 |  import pytest_cov
 | 
| 12 | 12 |  | 
| 13 | 13 |  from buildstream import _yaml
 | 
| 14 | -from buildstream._artifactcache.cascache import CASCache
 | |
| 15 | 14 |  from buildstream._artifactcache.casserver import create_server
 | 
| 16 | 15 |  from buildstream._context import Context
 | 
| 17 | 16 |  from buildstream._exceptions import ArtifactError
 | 
| ... | ... | @@ -49,7 +48,7 @@ class ArtifactShare(): | 
| 49 | 48 |          context = Context()
 | 
| 50 | 49 |          context.artifactdir = self.repodir
 | 
| 51 | 50 |  | 
| 52 | -        self.cas = CASCache(context)
 | |
| 51 | +        self.cas = context.artifactcache
 | |
| 53 | 52 |  | 
| 54 | 53 |          self.total_space = total_space
 | 
| 55 | 54 |          self.free_space = free_space
 | 
