Jürg Billeter pushed to branch juerg/cas-batch at BuildStream / buildstream
Commits:
- 
40f25512
by Richard Maw at 2018-09-10T12:29:15Z
- 
247686dc
by richardmaw-codethink at 2018-09-10T14:47:48Z
- 
ef66daf5
by Tiago Gomes at 2018-09-10T14:57:41Z
- 
a37bc6ce
by Tiago Gomes at 2018-09-10T14:57:41Z
- 
af74a3f8
by Tiago Gomes at 2018-09-10T14:57:41Z
- 
75c55633
by Tiago Gomes at 2018-09-10T14:57:41Z
- 
e0bb71b2
by Tiago Gomes at 2018-09-10T15:27:53Z
- 
47f3064a
by Jürg Billeter at 2018-09-10T15:43:25Z
- 
1a7fb3cb
by Jürg Billeter at 2018-09-10T15:43:25Z
12 changed files:
- buildstream/_artifactcache/casserver.py
- buildstream/_exceptions.py
- buildstream/data/projectconfig.yaml
- buildstream/element.py
- buildstream/sandbox/_sandboxbwrap.py
- buildstream/sandbox/_sandboxchroot.py
- doc/source/format_declaring.rst
- tests/integration/manual.py
- + tests/loader/variables.py
- + tests/loader/variables/simple/foo.txt
- + tests/loader/variables/simple/project.conf
- tests/testutils/repo/git.py
Changes:
| ... | ... | @@ -38,6 +38,10 @@ from .._context import Context | 
| 38 | 38 |  from .cascache import CASCache
 | 
| 39 | 39 |  | 
| 40 | 40 |  | 
| 41 | +# The default limit for gRPC messages is 4 MiB
 | |
| 42 | +_MAX_BATCH_TOTAL_SIZE_BYTES = 4 * 1024 * 1024
 | |
| 43 | + | |
| 44 | + | |
| 41 | 45 |  # Trying to push an artifact that is too large
 | 
| 42 | 46 |  class ArtifactTooLargeException(Exception):
 | 
| 43 | 47 |      pass
 | 
| ... | ... | @@ -67,6 +71,9 @@ def create_server(repo, *, enable_push): | 
| 67 | 71 |      remote_execution_pb2_grpc.add_ContentAddressableStorageServicer_to_server(
 | 
| 68 | 72 |          _ContentAddressableStorageServicer(artifactcache), server)
 | 
| 69 | 73 |  | 
| 74 | +    remote_execution_pb2_grpc.add_CapabilitiesServicer_to_server(
 | |
| 75 | +        _CapabilitiesServicer(), server)
 | |
| 76 | + | |
| 70 | 77 |      buildstream_pb2_grpc.add_ReferenceStorageServicer_to_server(
 | 
| 71 | 78 |          _ReferenceStorageServicer(artifactcache, enable_push=enable_push), server)
 | 
| 72 | 79 |  | 
| ... | ... | @@ -229,6 +236,48 @@ class _ContentAddressableStorageServicer(remote_execution_pb2_grpc.ContentAddres | 
| 229 | 236 |                  d.size_bytes = digest.size_bytes
 | 
| 230 | 237 |          return response
 | 
| 231 | 238 |  | 
| 239 | +    def BatchReadBlobs(self, request, context):
 | |
| 240 | +        response = remote_execution_pb2.BatchReadBlobsResponse()
 | |
| 241 | +        batch_size = 0
 | |
| 242 | + | |
| 243 | +        for digest in request.digests:
 | |
| 244 | +            batch_size += digest.size_bytes
 | |
| 245 | +            if batch_size > _MAX_BATCH_TOTAL_SIZE_BYTES:
 | |
| 246 | +                context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
 | |
| 247 | +                return response
 | |
| 248 | + | |
| 249 | +            blob_response = response.responses.add()
 | |
| 250 | +            blob_response.digest.hash = digest.hash
 | |
| 251 | +            blob_response.digest.size_bytes = digest.size_bytes
 | |
| 252 | +            try:
 | |
| 253 | +                with open(self.cas.objpath(digest), 'rb') as f:
 | |
| 254 | +                    if os.fstat(f.fileno()).st_size != digest.size_bytes:
 | |
| 255 | +                        blob_response.status.code = grpc.StatusCode.NOT_FOUND
 | |
| 256 | +                        continue
 | |
| 257 | + | |
| 258 | +                    blob_response.data = f.read(digest.size_bytes)
 | |
| 259 | +            except FileNotFoundError:
 | |
| 260 | +                blob_response.status.code = grpc.StatusCode.NOT_FOUND
 | |
| 261 | + | |
| 262 | +        return response
 | |
| 263 | + | |
| 264 | + | |
| 265 | +class _CapabilitiesServicer(remote_execution_pb2_grpc.CapabilitiesServicer):
 | |
| 266 | +    def GetCapabilities(self, request, context):
 | |
| 267 | +        response = remote_execution_pb2.ServerCapabilities()
 | |
| 268 | + | |
| 269 | +        cache_capabilities = response.cache_capabilities
 | |
| 270 | +        cache_capabilities.digest_function.append(remote_execution_pb2.SHA256)
 | |
| 271 | +        cache_capabilities.action_cache_update_capabilities.update_enabled = False
 | |
| 272 | +        cache_capabilities.max_batch_total_size_bytes = _MAX_BATCH_TOTAL_SIZE_BYTES
 | |
| 273 | +        cache_capabilities.symlink_absolute_path_strategy = remote_execution_pb2.CacheCapabilities.ALLOWED
 | |
| 274 | + | |
| 275 | +        response.deprecated_api_version.major = 2
 | |
| 276 | +        response.low_api_version.major = 2
 | |
| 277 | +        response.high_api_version.major = 2
 | |
| 278 | + | |
| 279 | +        return response
 | |
| 280 | + | |
| 232 | 281 |  | 
| 233 | 282 |  class _ReferenceStorageServicer(buildstream_pb2_grpc.ReferenceStorageServicer):
 | 
| 234 | 283 |      def __init__(self, cas, *, enable_push):
 | 
| ... | ... | @@ -220,6 +220,9 @@ class LoadErrorReason(Enum): | 
| 220 | 220 |      # A recursive variable has been encountered
 | 
| 221 | 221 |      RECURSIVE_VARIABLE = 22
 | 
| 222 | 222 |  | 
| 223 | +    # An attempt so set the value of a protected variable
 | |
| 224 | +    PROTECTED_VARIABLE_REDEFINED = 23
 | |
| 225 | + | |
| 223 | 226 |  | 
| 224 | 227 |  # LoadError
 | 
| 225 | 228 |  #
 | 
| ... | ... | @@ -16,21 +16,7 @@ ref-storage: inline | 
| 16 | 16 |  # Variable Configuration
 | 
| 17 | 17 |  #
 | 
| 18 | 18 |  variables:
 | 
| 19 | - | |
| 20 | -  # Maximum number of parallel build processes within a given
 | |
| 21 | -  # build, support for this is conditional on the element type
 | |
| 22 | -  # and the build system used (any element using 'make' can
 | |
| 23 | -  # implement this).
 | |
| 24 | -  #
 | |
| 25 | -  # Note: this value defaults to the number of cores available
 | |
| 26 | -  max-jobs: 4
 | |
| 27 | - | |
| 28 | -  # Note: These variables are defined later on in element.py and _project.py
 | |
| 29 | -  element-name: ""
 | |
| 30 | -  project-name: ""
 | |
| 31 | - | |
| 32 | 19 |    # Path configuration, to be used in build instructions.
 | 
| 33 | -  #
 | |
| 34 | 20 |    prefix: "/usr"
 | 
| 35 | 21 |    exec_prefix: "%{prefix}"
 | 
| 36 | 22 |    bindir: "%{exec_prefix}/bin"
 | 
| ... | ... | @@ -89,7 +75,6 @@ variables: | 
| 89 | 75 |      find "%{install-root}" -name '*.pyc' -exec \
 | 
| 90 | 76 |        dd if=/dev/zero of={} bs=1 count=4 seek=4 conv=notrunc ';'
 | 
| 91 | 77 |  | 
| 92 | - | |
| 93 | 78 |  # Base sandbox environment, can be overridden by plugins
 | 
| 94 | 79 |  environment:
 | 
| 95 | 80 |    PATH: /usr/bin:/bin:/usr/sbin:/sbin
 | 
| ... | ... | @@ -2283,7 +2283,8 @@ class Element(Plugin): | 
| 2283 | 2283 |      # substituting command strings to be run in the sandbox
 | 
| 2284 | 2284 |      #
 | 
| 2285 | 2285 |      def __extract_variables(self, meta):
 | 
| 2286 | -        default_vars = _yaml.node_get(self.__defaults, Mapping, 'variables', default_value={})
 | |
| 2286 | +        default_vars = _yaml.node_get(self.__defaults, Mapping, 'variables',
 | |
| 2287 | +                                      default_value={})
 | |
| 2287 | 2288 |  | 
| 2288 | 2289 |          project = self._get_project()
 | 
| 2289 | 2290 |          if self.__is_junction:
 | 
| ... | ... | @@ -2296,6 +2297,13 @@ class Element(Plugin): | 
| 2296 | 2297 |          _yaml.composite(variables, meta.variables)
 | 
| 2297 | 2298 |          _yaml.node_final_assertions(variables)
 | 
| 2298 | 2299 |  | 
| 2300 | +        for var in ('project-name', 'element-name', 'max-jobs'):
 | |
| 2301 | +            provenance = _yaml.node_get_provenance(variables, var)
 | |
| 2302 | +            if provenance and provenance.filename != '':
 | |
| 2303 | +                raise LoadError(LoadErrorReason.PROTECTED_VARIABLE_REDEFINED,
 | |
| 2304 | +                                "{}: invalid redefinition of protected variable '{}'"
 | |
| 2305 | +                                .format(provenance, var))
 | |
| 2306 | + | |
| 2299 | 2307 |          return variables
 | 
| 2300 | 2308 |  | 
| 2301 | 2309 |      # This will resolve the final configuration to be handed
 | 
| ... | ... | @@ -69,6 +69,15 @@ class SandboxBwrap(Sandbox): | 
| 69 | 69 |          if env is None:
 | 
| 70 | 70 |              env = self._get_environment()
 | 
| 71 | 71 |  | 
| 72 | +        if cwd is None:
 | |
| 73 | +            cwd = '/'
 | |
| 74 | + | |
| 75 | +        # Naive getcwd implementations can break when bind-mounts to different
 | |
| 76 | +        # paths on the same filesystem are present. Letting the command know
 | |
| 77 | +        # what directory it is in makes it unnecessary to call the faulty
 | |
| 78 | +        # getcwd.
 | |
| 79 | +        env['PWD'] = cwd
 | |
| 80 | + | |
| 72 | 81 |          if not self._has_command(command[0], env):
 | 
| 73 | 82 |              raise SandboxError("Staged artifacts do not provide command "
 | 
| 74 | 83 |                                 "'{}'".format(command[0]),
 | 
| ... | ... | @@ -83,9 +92,6 @@ class SandboxBwrap(Sandbox): | 
| 83 | 92 |          mount_map = MountMap(self, flags & SandboxFlags.ROOT_READ_ONLY)
 | 
| 84 | 93 |          root_mount_source = mount_map.get_mount_source('/')
 | 
| 85 | 94 |  | 
| 86 | -        if cwd is None:
 | |
| 87 | -            cwd = '/'
 | |
| 88 | - | |
| 89 | 95 |          # Grab the full path of the bwrap binary
 | 
| 90 | 96 |          bwrap_command = [utils.get_host_tool('bwrap')]
 | 
| 91 | 97 |  | 
| ... | ... | @@ -58,6 +58,12 @@ class SandboxChroot(Sandbox): | 
| 58 | 58 |          if env is None:
 | 
| 59 | 59 |              env = self._get_environment()
 | 
| 60 | 60 |  | 
| 61 | +        # Naive getcwd implementations can break when bind-mounts to different
 | |
| 62 | +        # paths on the same filesystem are present. Letting the command know
 | |
| 63 | +        # what directory it is in makes it unnecessary to call the faulty
 | |
| 64 | +        # getcwd.
 | |
| 65 | +        env['PWD'] = cwd
 | |
| 66 | + | |
| 61 | 67 |          if not self._has_command(command[0], env):
 | 
| 62 | 68 |              raise SandboxError("Staged artifacts do not provide command "
 | 
| 63 | 69 |                                 "'{}'".format(command[0]),
 | 
| ... | ... | @@ -484,3 +484,25 @@ dependency and that all referenced variables are declared, the following is fine | 
| 484 | 484 |       install-commands:
 | 
| 485 | 485 |       - |
 | 
| 486 | 486 |         %{make-install} RELEASE_TEXT="%{release-text}"
 | 
| 487 | + | |
| 488 | + | |
| 489 | +Variables declared by BuildStream
 | |
| 490 | +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 | |
| 491 | +BuildStream declares a set of :ref:`builtin <project_builtin_defaults>`
 | |
| 492 | +variables that may be overridden. In addition, the following
 | |
| 493 | +read-only variables are also dynamically declared by BuildStream:
 | |
| 494 | + | |
| 495 | +* ``element-name``
 | |
| 496 | + | |
| 497 | +  The name of the element being processed (e.g base/alpine.bst).
 | |
| 498 | + | |
| 499 | +* ``project-name``
 | |
| 500 | + | |
| 501 | +  The name of project where BuildStream is being used.
 | |
| 502 | + | |
| 503 | +* ``max-jobs``
 | |
| 504 | + | |
| 505 | +  Maximum number of parallel build processes within a given
 | |
| 506 | +  build, support for this is conditional on the element type
 | |
| 507 | +  and the build system used (any element using 'make' can
 | |
| 508 | +  implement this). | 
| ... | ... | @@ -64,7 +64,7 @@ strip | 
| 64 | 64 |  | 
| 65 | 65 |  | 
| 66 | 66 |  @pytest.mark.datafiles(DATA_DIR)
 | 
| 67 | -def test_manual_element_noparallel(cli, tmpdir, datafiles):
 | |
| 67 | +def test_manual_element_environment(cli, tmpdir, datafiles):
 | |
| 68 | 68 |      project = os.path.join(datafiles.dirname, datafiles.basename)
 | 
| 69 | 69 |      checkout = os.path.join(cli.directory, 'checkout')
 | 
| 70 | 70 |      element_path = os.path.join(project, 'elements')
 | 
| ... | ... | @@ -72,15 +72,11 @@ def test_manual_element_noparallel(cli, tmpdir, datafiles): | 
| 72 | 72 |  | 
| 73 | 73 |      create_manual_element(element_name, element_path, {
 | 
| 74 | 74 |          'install-commands': [
 | 
| 75 | -            "echo $MAKEFLAGS >> test",
 | |
| 76 | 75 |              "echo $V >> test",
 | 
| 77 | 76 |              "cp test %{install-root}"
 | 
| 78 | 77 |          ]
 | 
| 79 | 78 |      }, {
 | 
| 80 | -        'max-jobs': 2,
 | |
| 81 | -        'notparallel': True
 | |
| 82 | 79 |      }, {
 | 
| 83 | -        'MAKEFLAGS': '-j%{max-jobs} -Wall',
 | |
| 84 | 80 |          'V': 2
 | 
| 85 | 81 |      })
 | 
| 86 | 82 |  | 
| ... | ... | @@ -93,13 +89,11 @@ def test_manual_element_noparallel(cli, tmpdir, datafiles): | 
| 93 | 89 |      with open(os.path.join(checkout, 'test')) as f:
 | 
| 94 | 90 |          text = f.read()
 | 
| 95 | 91 |  | 
| 96 | -    assert text == """-j1 -Wall
 | |
| 97 | -2
 | |
| 98 | -"""
 | |
| 92 | +    assert text == "2\n"
 | |
| 99 | 93 |  | 
| 100 | 94 |  | 
| 101 | 95 |  @pytest.mark.datafiles(DATA_DIR)
 | 
| 102 | -def test_manual_element_environment(cli, tmpdir, datafiles):
 | |
| 96 | +def test_manual_element_noparallel(cli, tmpdir, datafiles):
 | |
| 103 | 97 |      project = os.path.join(datafiles.dirname, datafiles.basename)
 | 
| 104 | 98 |      checkout = os.path.join(cli.directory, 'checkout')
 | 
| 105 | 99 |      element_path = os.path.join(project, 'elements')
 | 
| ... | ... | @@ -112,7 +106,7 @@ def test_manual_element_environment(cli, tmpdir, datafiles): | 
| 112 | 106 |              "cp test %{install-root}"
 | 
| 113 | 107 |          ]
 | 
| 114 | 108 |      }, {
 | 
| 115 | -        'max-jobs': 2
 | |
| 109 | +        'notparallel': True
 | |
| 116 | 110 |      }, {
 | 
| 117 | 111 |          'MAKEFLAGS': '-j%{max-jobs} -Wall',
 | 
| 118 | 112 |          'V': 2
 | 
| ... | ... | @@ -127,6 +121,6 @@ def test_manual_element_environment(cli, tmpdir, datafiles): | 
| 127 | 121 |      with open(os.path.join(checkout, 'test')) as f:
 | 
| 128 | 122 |          text = f.read()
 | 
| 129 | 123 |  | 
| 130 | -    assert text == """-j2 -Wall
 | |
| 124 | +    assert text == """-j1 -Wall
 | |
| 131 | 125 |  2
 | 
| 132 | 126 |  """ | 
| 1 | +import os
 | |
| 2 | +import pytest
 | |
| 3 | + | |
| 4 | +from buildstream import _yaml
 | |
| 5 | +from buildstream._exceptions import ErrorDomain, LoadErrorReason
 | |
| 6 | +from tests.testutils import cli
 | |
| 7 | + | |
| 8 | +DATA_DIR = os.path.join(
 | |
| 9 | +    os.path.dirname(os.path.realpath(__file__)),
 | |
| 10 | +    'variables',
 | |
| 11 | +)
 | |
| 12 | + | |
| 13 | +PROTECTED_VARIABLES = [('project-name'), ('element-name'), ('max-jobs')]
 | |
| 14 | + | |
| 15 | + | |
| 16 | +@pytest.mark.parametrize('protected_var', PROTECTED_VARIABLES)
 | |
| 17 | +@pytest.mark.datafiles(DATA_DIR)
 | |
| 18 | +def test_use_of_protected_var_project_conf(cli, tmpdir, datafiles, protected_var):
 | |
| 19 | +    project = os.path.join(str(datafiles), 'simple')
 | |
| 20 | + | |
| 21 | +    conf = {
 | |
| 22 | +        'name': 'test',
 | |
| 23 | +        'variables': {
 | |
| 24 | +            protected_var: 'some-value'
 | |
| 25 | +        }
 | |
| 26 | +    }
 | |
| 27 | +    _yaml.dump(conf, os.path.join(project, 'project.conf'))
 | |
| 28 | + | |
| 29 | +    element = {
 | |
| 30 | +        'kind': 'import',
 | |
| 31 | +        'sources': [
 | |
| 32 | +            {
 | |
| 33 | +                'kind': 'local',
 | |
| 34 | +                'path': 'foo.txt'
 | |
| 35 | +            }
 | |
| 36 | +        ],
 | |
| 37 | +    }
 | |
| 38 | +    _yaml.dump(element, os.path.join(project, 'target.bst'))
 | |
| 39 | + | |
| 40 | +    result = cli.run(project=project, args=['build', 'target.bst'])
 | |
| 41 | +    result.assert_main_error(ErrorDomain.LOAD,
 | |
| 42 | +                             LoadErrorReason.PROTECTED_VARIABLE_REDEFINED)
 | |
| 43 | + | |
| 44 | + | |
| 45 | +@pytest.mark.parametrize('protected_var', PROTECTED_VARIABLES)
 | |
| 46 | +@pytest.mark.datafiles(DATA_DIR)
 | |
| 47 | +def test_use_of_protected_var_element_overrides(cli, tmpdir, datafiles, protected_var):
 | |
| 48 | +    project = os.path.join(str(datafiles), 'simple')
 | |
| 49 | + | |
| 50 | +    conf = {
 | |
| 51 | +        'name': 'test',
 | |
| 52 | +        'elements': {
 | |
| 53 | +            'manual': {
 | |
| 54 | +                'variables': {
 | |
| 55 | +                    protected_var: 'some-value'
 | |
| 56 | +                }
 | |
| 57 | +            }
 | |
| 58 | +        }
 | |
| 59 | +    }
 | |
| 60 | +    _yaml.dump(conf, os.path.join(project, 'project.conf'))
 | |
| 61 | + | |
| 62 | +    element = {
 | |
| 63 | +        'kind': 'manual',
 | |
| 64 | +        'sources': [
 | |
| 65 | +            {
 | |
| 66 | +                'kind': 'local',
 | |
| 67 | +                'path': 'foo.txt'
 | |
| 68 | +            }
 | |
| 69 | +        ],
 | |
| 70 | +    }
 | |
| 71 | +    _yaml.dump(element, os.path.join(project, 'target.bst'))
 | |
| 72 | + | |
| 73 | +    result = cli.run(project=project, args=['build', 'target.bst'])
 | |
| 74 | +    result.assert_main_error(ErrorDomain.LOAD,
 | |
| 75 | +                             LoadErrorReason.PROTECTED_VARIABLE_REDEFINED)
 | |
| 76 | + | |
| 77 | + | |
| 78 | +@pytest.mark.parametrize('protected_var', PROTECTED_VARIABLES)
 | |
| 79 | +@pytest.mark.datafiles(DATA_DIR)
 | |
| 80 | +def test_use_of_protected_var_in_element(cli, tmpdir, datafiles, protected_var):
 | |
| 81 | +    project = os.path.join(str(datafiles), 'simple')
 | |
| 82 | + | |
| 83 | +    element = {
 | |
| 84 | +        'kind': 'import',
 | |
| 85 | +        'sources': [
 | |
| 86 | +            {
 | |
| 87 | +                'kind': 'local',
 | |
| 88 | +                'path': 'foo.txt'
 | |
| 89 | +            }
 | |
| 90 | +        ],
 | |
| 91 | +        'variables': {
 | |
| 92 | +            protected_var: 'some-value'
 | |
| 93 | +        }
 | |
| 94 | +    }
 | |
| 95 | +    _yaml.dump(element, os.path.join(project, 'target.bst'))
 | |
| 96 | + | |
| 97 | +    result = cli.run(project=project, args=['build', 'target.bst'])
 | |
| 98 | +    result.assert_main_error(ErrorDomain.LOAD,
 | |
| 99 | +                             LoadErrorReason.PROTECTED_VARIABLE_REDEFINED) | 
| 1 | +foo | 
| 1 | +name: foo | 
| ... | ... | @@ -26,24 +26,30 @@ class Git(Repo): | 
| 26 | 26 |  | 
| 27 | 27 |          super(Git, self).__init__(directory, subdir)
 | 
| 28 | 28 |  | 
| 29 | +    def _run_git(self, *args, **kwargs):
 | |
| 30 | +        argv = ['git']
 | |
| 31 | +        argv.extend(args)
 | |
| 32 | +        if 'env' not in kwargs:
 | |
| 33 | +            kwargs['env'] = dict(GIT_ENV, PWD=self.repo)
 | |
| 34 | +        kwargs.setdefault('cwd', self.repo)
 | |
| 35 | +        kwargs.setdefault('check', True)
 | |
| 36 | +        return subprocess.run(argv, **kwargs)
 | |
| 37 | + | |
| 29 | 38 |      def create(self, directory):
 | 
| 30 | 39 |          self.copy_directory(directory, self.repo)
 | 
| 31 | -        subprocess.call(['git', 'init', '.'], env=GIT_ENV, cwd=self.repo)
 | |
| 32 | -        subprocess.call(['git', 'add', '.'], env=GIT_ENV, cwd=self.repo)
 | |
| 33 | -        subprocess.call(['git', 'commit', '-m', 'Initial commit'], env=GIT_ENV, cwd=self.repo)
 | |
| 40 | +        self._run_git('init', '.')
 | |
| 41 | +        self._run_git('add', '.')
 | |
| 42 | +        self._run_git('commit', '-m', 'Initial commit')
 | |
| 34 | 43 |          return self.latest_commit()
 | 
| 35 | 44 |  | 
| 36 | 45 |      def add_commit(self):
 | 
| 37 | -        subprocess.call(['git', 'commit', '--allow-empty', '-m', 'Additional commit'],
 | |
| 38 | -                        env=GIT_ENV, cwd=self.repo)
 | |
| 46 | +        self._run_git('commit', '--allow-empty', '-m', 'Additional commit')
 | |
| 39 | 47 |          return self.latest_commit()
 | 
| 40 | 48 |  | 
| 41 | 49 |      def add_file(self, filename):
 | 
| 42 | 50 |          shutil.copy(filename, self.repo)
 | 
| 43 | -        subprocess.call(['git', 'add', os.path.basename(filename)], env=GIT_ENV, cwd=self.repo)
 | |
| 44 | -        subprocess.call([
 | |
| 45 | -            'git', 'commit', '-m', 'Added {}'.format(os.path.basename(filename))
 | |
| 46 | -        ], env=GIT_ENV, cwd=self.repo)
 | |
| 51 | +        self._run_git('add', os.path.basename(filename))
 | |
| 52 | +        self._run_git('commit', '-m', 'Added {}'.format(os.path.basename(filename)))
 | |
| 47 | 53 |          return self.latest_commit()
 | 
| 48 | 54 |  | 
| 49 | 55 |      def add_submodule(self, subdir, url=None, checkout=None):
 | 
| ... | ... | @@ -53,8 +59,8 @@ class Git(Repo): | 
| 53 | 59 |          if url is not None:
 | 
| 54 | 60 |              submodule['url'] = url
 | 
| 55 | 61 |          self.submodules[subdir] = submodule
 | 
| 56 | -        subprocess.call(['git', 'submodule', 'add', url, subdir], env=GIT_ENV, cwd=self.repo)
 | |
| 57 | -        subprocess.call(['git', 'commit', '-m', 'Added the submodule'], env=GIT_ENV, cwd=self.repo)
 | |
| 62 | +        self._run_git('submodule', 'add', url, subdir)
 | |
| 63 | +        self._run_git('commit', '-m', 'Added the submodule')
 | |
| 58 | 64 |          return self.latest_commit()
 | 
| 59 | 65 |  | 
| 60 | 66 |      def source_config(self, ref=None, checkout_submodules=None):
 | 
| ... | ... | @@ -74,10 +80,8 @@ class Git(Repo): | 
| 74 | 80 |          return config
 | 
| 75 | 81 |  | 
| 76 | 82 |      def latest_commit(self):
 | 
| 77 | -        output = subprocess.check_output([
 | |
| 78 | -            'git', 'rev-parse', 'master'
 | |
| 79 | -        ], env=GIT_ENV, cwd=self.repo)
 | |
| 83 | +        output = self._run_git('rev-parse', 'master', stdout=subprocess.PIPE).stdout
 | |
| 80 | 84 |          return output.decode('UTF-8').strip()
 | 
| 81 | 85 |  | 
| 82 | 86 |      def branch(self, branch_name):
 | 
| 83 | -        subprocess.call(['git', 'checkout', '-b', branch_name], env=GIT_ENV, cwd=self.repo) | |
| 87 | +        self._run_git('checkout', '-b', branch_name) | 
