Santiago Gil pushed to branch santigl/104-platform-matching at BuildGrid / buildgrid
Commits:
-
3fd9add5
by Jeremiah Bonney at 2019-02-07T17:01:49Z
-
5c10c0ad
by Jeremiah Bonney at 2019-02-07T17:01:52Z
-
af8a5816
by Martin Blanchard at 2019-02-07T17:33:44Z
-
fb812017
by Martin Blanchard at 2019-02-07T17:40:11Z
-
ee5542ab
by Martin Blanchard at 2019-02-07T17:40:22Z
-
3ad1d461
by Martin Blanchard at 2019-02-07T17:40:22Z
-
710da565
by Martin Blanchard at 2019-02-07T17:40:22Z
-
b7355f32
by Martin Blanchard at 2019-02-07T17:40:22Z
-
9717831a
by Santiago Gil at 2019-02-13T10:56:10Z
-
1d5f956a
by Santiago Gil at 2019-02-13T10:56:10Z
19 changed files:
- buildgrid/_app/cli.py
- + buildgrid/_app/commands/cmd_actioncache.py
- buildgrid/_app/commands/cmd_bot.py
- buildgrid/_app/commands/cmd_cas.py
- buildgrid/_app/commands/cmd_execute.py
- buildgrid/_app/settings/parser.py
- buildgrid/_app/settings/reference.yml
- + buildgrid/client/actioncache.py
- buildgrid/server/actioncache/storage.py
- buildgrid/server/bots/instance.py
- buildgrid/server/execution/instance.py
- buildgrid/server/job.py
- buildgrid/server/scheduler.py
- buildgrid/utils.py
- docs/source/reference_cli.rst
- tests/integration/action_cache_service.py
- tests/integration/bots_service.py
- tests/integration/execution_service.py
- tests/integration/operations_service.py
Changes:
| ... | ... | @@ -21,6 +21,7 @@ Any files in the commands/ folder with the name cmd_*.py |
| 21 | 21 |
will be attempted to be imported.
|
| 22 | 22 |
"""
|
| 23 | 23 |
|
| 24 |
+import importlib
|
|
| 24 | 25 |
import logging
|
| 25 | 26 |
import os
|
| 26 | 27 |
import sys
|
| ... | ... | @@ -123,21 +124,30 @@ cmd_folder = os.path.abspath(os.path.join(os.path.dirname(__file__), |
| 123 | 124 |
'commands'))
|
| 124 | 125 |
|
| 125 | 126 |
|
| 126 |
-class BuildGridCLI(click.MultiCommand):
|
|
| 127 |
+class App(click.MultiCommand):
|
|
| 127 | 128 |
|
| 128 | 129 |
def list_commands(self, context):
|
| 130 |
+ """Lists available command names."""
|
|
| 129 | 131 |
commands = []
|
| 130 | 132 |
for filename in os.listdir(cmd_folder):
|
| 131 |
- if filename.endswith('.py') and \
|
|
| 132 |
- filename.startswith('cmd_'):
|
|
| 133 |
+ if filename.endswith('.py') and filename.startswith('cmd_'):
|
|
| 133 | 134 |
commands.append(filename[4:-3])
|
| 134 | 135 |
commands.sort()
|
| 136 |
+ |
|
| 135 | 137 |
return commands
|
| 136 | 138 |
|
| 137 |
- def get_command(self, context, name):
|
|
| 138 |
- mod = __import__(name='buildgrid._app.commands.cmd_{}'.format(name),
|
|
| 139 |
- fromlist=['cli'])
|
|
| 140 |
- return mod.cli
|
|
| 139 |
+ def get_command(self, context, command_name):
|
|
| 140 |
+ """Looks-up and loads a particular command by name."""
|
|
| 141 |
+ command_name = command_name.replace('-', '')
|
|
| 142 |
+ try:
|
|
| 143 |
+ module = importlib.import_module(
|
|
| 144 |
+ 'buildgrid._app.commands.cmd_{}'.format(command_name))
|
|
| 145 |
+ |
|
| 146 |
+ except ImportError:
|
|
| 147 |
+ click.echo("Error: No such command: [{}].".format(command_name), err=True)
|
|
| 148 |
+ sys.exit(-1)
|
|
| 149 |
+ |
|
| 150 |
+ return module.cli
|
|
| 141 | 151 |
|
| 142 | 152 |
|
| 143 | 153 |
class DebugFilter(logging.Filter):
|
| ... | ... | @@ -192,10 +202,10 @@ def setup_logging(verbosity=0, debug_mode=False): |
| 192 | 202 |
root_logger.setLevel(logging.DEBUG)
|
| 193 | 203 |
|
| 194 | 204 |
|
| 195 |
-@click.command(cls=BuildGridCLI, context_settings=CONTEXT_SETTINGS)
|
|
| 205 |
+@click.command(cls=App, context_settings=CONTEXT_SETTINGS)
|
|
| 196 | 206 |
@pass_context
|
| 197 | 207 |
def cli(context):
|
| 198 |
- """BuildGrid App"""
|
|
| 208 |
+ """BuildGrid's client and server CLI front-end."""
|
|
| 199 | 209 |
root_logger = logging.getLogger()
|
| 200 | 210 |
|
| 201 | 211 |
# Clean-up root logger for any pre-configuration:
|
| 1 |
+# Copyright (C) 2019 Bloomberg LP
|
|
| 2 |
+#
|
|
| 3 |
+# Licensed under the Apache License, Version 2.0 (the "License");
|
|
| 4 |
+# you may not use this file except in compliance with the License.
|
|
| 5 |
+# You may obtain a copy of the License at
|
|
| 6 |
+#
|
|
| 7 |
+# <http://www.apache.org/licenses/LICENSE-2.0>
|
|
| 8 |
+#
|
|
| 9 |
+# Unless required by applicable law or agreed to in writing, software
|
|
| 10 |
+# distributed under the License is distributed on an "AS IS" BASIS,
|
|
| 11 |
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
| 12 |
+# See the License for the specific language governing permissions and
|
|
| 13 |
+# limitations under the License.
|
|
| 14 |
+ |
|
| 15 |
+ |
|
| 16 |
+import os
|
|
| 17 |
+import sys
|
|
| 18 |
+from textwrap import indent
|
|
| 19 |
+ |
|
| 20 |
+import click
|
|
| 21 |
+from google.protobuf import json_format
|
|
| 22 |
+ |
|
| 23 |
+from buildgrid.client.actioncache import query
|
|
| 24 |
+from buildgrid.client.authentication import setup_channel
|
|
| 25 |
+from buildgrid.client.cas import download
|
|
| 26 |
+from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
|
| 27 |
+from buildgrid.utils import create_digest, parse_digest
|
|
| 28 |
+ |
|
| 29 |
+from ..cli import pass_context
|
|
| 30 |
+ |
|
| 31 |
+ |
|
| 32 |
+@click.group(name='action-cache', short_help="Query and update the action cache service.")
|
|
| 33 |
+@click.option('--remote', type=click.STRING, default='http://localhost:50051', show_default=True,
|
|
| 34 |
+ help="Remote execution server's URL (port defaults to 50051 if no specified).")
|
|
| 35 |
+@click.option('--auth-token', type=click.Path(exists=True, dir_okay=False), default=None,
|
|
| 36 |
+ help="Authorization token for the remote.")
|
|
| 37 |
+@click.option('--client-key', type=click.Path(exists=True, dir_okay=False), default=None,
|
|
| 38 |
+ help="Private client key for TLS (PEM-encoded).")
|
|
| 39 |
+@click.option('--client-cert', type=click.Path(exists=True, dir_okay=False), default=None,
|
|
| 40 |
+ help="Public client certificate for TLS (PEM-encoded).")
|
|
| 41 |
+@click.option('--server-cert', type=click.Path(exists=True, dir_okay=False), default=None,
|
|
| 42 |
+ help="Public server certificate for TLS (PEM-encoded)")
|
|
| 43 |
+@click.option('--instance-name', type=click.STRING, default=None, show_default=True,
|
|
| 44 |
+ help="Targeted farm instance name.")
|
|
| 45 |
+@pass_context
|
|
| 46 |
+def cli(context, remote, instance_name, auth_token, client_key, client_cert, server_cert):
|
|
| 47 |
+ """Entry-point for the ``bgd action-cache`` CLI command group."""
|
|
| 48 |
+ try:
|
|
| 49 |
+ context.channel, _ = setup_channel(remote, auth_token=auth_token,
|
|
| 50 |
+ client_key=client_key, client_cert=client_cert,
|
|
| 51 |
+ server_cert=server_cert)
|
|
| 52 |
+ |
|
| 53 |
+ except InvalidArgumentError as e:
|
|
| 54 |
+ click.echo("Error: {}.".format(e), err=True)
|
|
| 55 |
+ sys.exit(-1)
|
|
| 56 |
+ |
|
| 57 |
+ context.instance_name = instance_name
|
|
| 58 |
+ |
|
| 59 |
+ |
|
| 60 |
+@cli.command('get', short_help="Retrieves a cached action-result.")
|
|
| 61 |
+@click.argument('action-digest-string', nargs=1, type=click.STRING, required=True)
|
|
| 62 |
+@click.option('--json', is_flag=True, show_default=True,
|
|
| 63 |
+ help="Print action result in JSON format.")
|
|
| 64 |
+@pass_context
|
|
| 65 |
+def get(context, action_digest_string, json):
|
|
| 66 |
+ """Entry-point of the ``bgd action-cache get`` CLI command.
|
|
| 67 |
+ |
|
| 68 |
+ Note:
|
|
| 69 |
+ Digest strings are expected to be like: ``{hash}/{size_bytes}``.
|
|
| 70 |
+ """
|
|
| 71 |
+ action_digest = parse_digest(action_digest_string)
|
|
| 72 |
+ if action_digest is None:
|
|
| 73 |
+ click.echo("Error: Invalid digest string '{}'.".format(action_digest_string), err=True)
|
|
| 74 |
+ sys.exit(-1)
|
|
| 75 |
+ |
|
| 76 |
+ # Simply hit the action cache with the given action digest:
|
|
| 77 |
+ with query(context.channel, instance=context.instance_name) as action_cache:
|
|
| 78 |
+ action_result = action_cache.get(action_digest)
|
|
| 79 |
+ |
|
| 80 |
+ if action_result is not None:
|
|
| 81 |
+ if not json:
|
|
| 82 |
+ action_result_digest = create_digest(action_result.SerializeToString())
|
|
| 83 |
+ |
|
| 84 |
+ click.echo("Hit: {}/{}: Result cached with digest=[{}/{}]"
|
|
| 85 |
+ .format(action_digest.hash[:8], action_digest.size_bytes,
|
|
| 86 |
+ action_result_digest.hash, action_result_digest.size_bytes))
|
|
| 87 |
+ |
|
| 88 |
+ # TODO: Print ActionResult details?
|
|
| 89 |
+ |
|
| 90 |
+ else:
|
|
| 91 |
+ click.echo(json_format.MessageToJson(action_result))
|
|
| 92 |
+ |
|
| 93 |
+ else:
|
|
| 94 |
+ click.echo("Miss: {}/{}: No associated result found in cache..."
|
|
| 95 |
+ .format(action_digest.hash[:8], action_digest.size_bytes))
|
|
| 96 |
+ |
|
| 97 |
+ |
|
| 98 |
+@cli.command('update', short_help="Maps an action to a given action-result.")
|
|
| 99 |
+@click.argument('action-digest-string', nargs=1, type=click.STRING, required=True)
|
|
| 100 |
+@click.argument('action-result-digest-string', nargs=1, type=click.STRING, required=True)
|
|
| 101 |
+@pass_context
|
|
| 102 |
+def update(context, action_digest_string, action_result_digest_string):
|
|
| 103 |
+ """Entry-point of the ``bgd action-cache update`` CLI command.
|
|
| 104 |
+ |
|
| 105 |
+ Note:
|
|
| 106 |
+ Digest strings are expected to be like: ``{hash}/{size_bytes}``.
|
|
| 107 |
+ """
|
|
| 108 |
+ action_digest = parse_digest(action_digest_string)
|
|
| 109 |
+ if action_digest is None:
|
|
| 110 |
+ click.echo("Error: Invalid digest string '{}'.".format(action_digest_string), err=True)
|
|
| 111 |
+ sys.exit(-1)
|
|
| 112 |
+ |
|
| 113 |
+ action_result_digest = parse_digest(action_result_digest_string)
|
|
| 114 |
+ if action_result_digest is None:
|
|
| 115 |
+ click.echo("Error: Invalid digest string '{}'.".format(action_result_digest_string), err=True)
|
|
| 116 |
+ sys.exit(-1)
|
|
| 117 |
+ |
|
| 118 |
+ # We have to download the ActionResult message fom CAS first...
|
|
| 119 |
+ with download(context.channel, instance=context.instance_name) as downloader:
|
|
| 120 |
+ action_result = downloader.get_message(action_result_digest,
|
|
| 121 |
+ remote_execution_pb2.ActionResult())
|
|
| 122 |
+ |
|
| 123 |
+ # And only then we can update the action cache for the given digest:
|
|
| 124 |
+ with query(context.channel, instance=context.instance_name) as action_cache:
|
|
| 125 |
+ action_result = action_cache.update(action_digest, action_result)
|
|
| 126 |
+ |
|
| 127 |
+ if action_result is None:
|
|
| 128 |
+ click.echo("Error: Failed updating cache result for action="">
|
|
| 129 |
+ .format(action_digest.hash, action_digest.size_bytes), err=True)
|
|
| 130 |
+ sys.exit(-1)
|
| ... | ... | @@ -59,11 +59,14 @@ from ..cli import pass_context, setup_logging |
| 59 | 59 |
help="Time period for bot updates to the server in seconds.")
|
| 60 | 60 |
@click.option('--parent', type=click.STRING, default=None, show_default=True,
|
| 61 | 61 |
help="Targeted farm resource.")
|
| 62 |
+@click.option('-w', '--worker-property', nargs=2, type=(click.STRING, click.STRING), multiple=True,
|
|
| 63 |
+ help="List of key-value pairs of worker properties.")
|
|
| 62 | 64 |
@click.option('-v', '--verbose', count=True,
|
| 63 | 65 |
help='Increase log verbosity level.')
|
| 64 | 66 |
@pass_context
|
| 65 |
-def cli(context, parent, update_period, remote, auth_token, client_key, client_cert, server_cert,
|
|
| 66 |
- remote_cas, cas_client_key, cas_client_cert, cas_server_cert, verbose):
|
|
| 67 |
+def cli(context, parent, update_period, remote, auth_token, client_key,
|
|
| 68 |
+ client_cert, server_cert, remote_cas, cas_client_key, cas_client_cert,
|
|
| 69 |
+ cas_server_cert, worker_property, verbose):
|
|
| 67 | 70 |
setup_logging(verbosity=verbose)
|
| 68 | 71 |
# Setup the remote execution server channel:
|
| 69 | 72 |
try:
|
| ... | ... | @@ -90,8 +93,14 @@ def cli(context, parent, update_period, remote, auth_token, client_key, client_c |
| 90 | 93 |
|
| 91 | 94 |
bot_interface = interface.BotInterface(context.channel)
|
| 92 | 95 |
|
| 96 |
+ worker_properties_dict = {}
|
|
| 97 |
+ for property_name, property_value in worker_property:
|
|
| 98 |
+ if property_name not in worker_properties_dict:
|
|
| 99 |
+ worker_properties_dict[property_name] = set()
|
|
| 100 |
+ worker_properties_dict[property_name].add(property_value)
|
|
| 101 |
+ |
|
| 93 | 102 |
worker = Worker()
|
| 94 |
- worker.add_device(Device())
|
|
| 103 |
+ worker.add_device(Device(properties=worker_properties_dict))
|
|
| 95 | 104 |
hardware_interface = HardwareInterface(worker)
|
| 96 | 105 |
|
| 97 | 106 |
context.bot_interface = bot_interface
|
| ... | ... | @@ -29,7 +29,7 @@ from buildgrid.client.authentication import setup_channel |
| 29 | 29 |
from buildgrid.client.cas import download, upload
|
| 30 | 30 |
from buildgrid._exceptions import InvalidArgumentError
|
| 31 | 31 |
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
| 32 |
-from buildgrid.utils import create_digest, merkle_tree_maker, read_file
|
|
| 32 |
+from buildgrid.utils import create_digest, parse_digest, merkle_tree_maker, read_file
|
|
| 33 | 33 |
|
| 34 | 34 |
from ..cli import pass_context
|
| 35 | 35 |
|
| ... | ... | @@ -65,15 +65,27 @@ def cli(context, remote, instance_name, auth_token, client_key, client_cert, ser |
| 65 | 65 |
@cli.command('upload-dummy', short_help="Upload a dummy action. Should be used with `execute dummy-request`")
|
| 66 | 66 |
@pass_context
|
| 67 | 67 |
def upload_dummy(context):
|
| 68 |
- action = remote_execution_pb2.Action(do_not_cache=True)
|
|
| 68 |
+ command = remote_execution_pb2.Command()
|
|
| 69 |
+ with upload(context.channel, instance=context.instance_name) as uploader:
|
|
| 70 |
+ command_digest = uploader.put_message(command)
|
|
| 71 |
+ |
|
| 72 |
+ if command_digest.ByteSize():
|
|
| 73 |
+ click.echo('Success: Pushed Command, digest=["{}/{}]"'
|
|
| 74 |
+ .format(command_digest.hash, command_digest.size_bytes))
|
|
| 75 |
+ else:
|
|
| 76 |
+ click.echo("Error: Failed pushing empty Command.", err=True)
|
|
| 77 |
+ |
|
| 78 |
+ action = remote_execution_pb2.Action(command_digest=command_digest,
|
|
| 79 |
+ do_not_cache=True)
|
|
| 80 |
+ |
|
| 69 | 81 |
with upload(context.channel, instance=context.instance_name) as uploader:
|
| 70 | 82 |
action_digest = uploader.put_message(action)
|
| 71 | 83 |
|
| 72 | 84 |
if action_digest.ByteSize():
|
| 73 |
- click.echo('Success: Pushed digest=["{}/{}]"'
|
|
| 85 |
+ click.echo('Success: Pushed Action, digest=["{}/{}]"'
|
|
| 74 | 86 |
.format(action_digest.hash, action_digest.size_bytes))
|
| 75 | 87 |
else:
|
| 76 |
- click.echo("Error: Failed pushing empty message.", err=True)
|
|
| 88 |
+ click.echo("Error: Failed pushing empty Action.", err=True)
|
|
| 77 | 89 |
|
| 78 | 90 |
|
| 79 | 91 |
@cli.command('upload-file', short_help="Upload files to the CAS server.")
|
| ... | ... | @@ -131,16 +143,6 @@ def upload_directory(context, directory_path, verify): |
| 131 | 143 |
click.echo("Error: Failed pushing path=[{}]".format(node_path), err=True)
|
| 132 | 144 |
|
| 133 | 145 |
|
| 134 |
-def _create_digest(digest_string):
|
|
| 135 |
- digest_hash, digest_size = digest_string.split('/')
|
|
| 136 |
- |
|
| 137 |
- digest = remote_execution_pb2.Digest()
|
|
| 138 |
- digest.hash = digest_hash
|
|
| 139 |
- digest.size_bytes = int(digest_size)
|
|
| 140 |
- |
|
| 141 |
- return digest
|
|
| 142 |
- |
|
| 143 |
- |
|
| 144 | 146 |
@cli.command('download-file', short_help="Download one or more files from the CAS server. "
|
| 145 | 147 |
"(Specified as a space-separated list of DIGEST FILE_PATH)")
|
| 146 | 148 |
@click.argument('digest-path-list', nargs=-1, type=str, required=True) # 'digest path' pairs
|
| ... | ... | @@ -158,7 +160,7 @@ def download_file(context, digest_path_list, verify): |
| 158 | 160 |
"path=[{}] already exists.".format(file_path), err=True)
|
| 159 | 161 |
continue
|
| 160 | 162 |
|
| 161 |
- digest = _create_digest(digest_string)
|
|
| 163 |
+ digest = parse_digest(digest_string)
|
|
| 162 | 164 |
|
| 163 | 165 |
downloader.download_file(digest, file_path)
|
| 164 | 166 |
downloaded_files[file_path] = digest
|
| ... | ... | @@ -191,7 +193,7 @@ def download_directory(context, digest_string, directory_path, verify): |
| 191 | 193 |
"path=[{}] already exists.".format(directory_path), err=True)
|
| 192 | 194 |
return
|
| 193 | 195 |
|
| 194 |
- digest = _create_digest(digest_string)
|
|
| 196 |
+ digest = parse_digest(digest_string)
|
|
| 195 | 197 |
with download(context.channel, instance=context.instance_name) as downloader:
|
| 196 | 198 |
downloader.download_directory(digest, directory_path)
|
| 197 | 199 |
|
| ... | ... | @@ -72,7 +72,11 @@ def cli(context, remote, instance_name, auth_token, client_key, client_cert, ser |
| 72 | 72 |
def request_dummy(context, number, wait_for_completion):
|
| 73 | 73 |
|
| 74 | 74 |
click.echo("Sending execution request...")
|
| 75 |
- action = remote_execution_pb2.Action(do_not_cache=True)
|
|
| 75 |
+ command = remote_execution_pb2.Command()
|
|
| 76 |
+ command_digest = create_digest(command.SerializeToString())
|
|
| 77 |
+ |
|
| 78 |
+ action = remote_execution_pb2.Action(command_digest=command_digest,
|
|
| 79 |
+ do_not_cache=True)
|
|
| 76 | 80 |
action_digest = create_digest(action.SerializeToString())
|
| 77 | 81 |
|
| 78 | 82 |
stub = remote_execution_pb2_grpc.ExecutionStub(context.channel)
|
| ... | ... | @@ -107,23 +111,31 @@ def request_dummy(context, number, wait_for_completion): |
| 107 | 111 |
help="Tuple of expected output file and is-executeable flag.")
|
| 108 | 112 |
@click.option('--output-directory', default='testing', show_default=True,
|
| 109 | 113 |
help="Output directory for the output files.")
|
| 114 |
+@click.option('-p', '--platform-property', nargs=2, type=(click.STRING, click.STRING), multiple=True,
|
|
| 115 |
+ help="List of key-value pairs of required platform properties.")
|
|
| 110 | 116 |
@click.argument('input-root', nargs=1, type=click.Path(), required=True)
|
| 111 | 117 |
@click.argument('commands', nargs=-1, type=click.STRING, required=True)
|
| 112 | 118 |
@pass_context
|
| 113 |
-def run_command(context, input_root, commands, output_file, output_directory):
|
|
| 119 |
+def run_command(context, input_root, commands, output_file, output_directory,
|
|
| 120 |
+ platform_property):
|
|
| 114 | 121 |
stub = remote_execution_pb2_grpc.ExecutionStub(context.channel)
|
| 115 | 122 |
|
| 116 |
- output_executeables = []
|
|
| 123 |
+ output_executables = []
|
|
| 117 | 124 |
with upload(context.channel, instance=context.instance_name) as uploader:
|
| 118 | 125 |
command = remote_execution_pb2.Command()
|
| 119 | 126 |
|
| 120 | 127 |
for arg in commands:
|
| 121 | 128 |
command.arguments.extend([arg])
|
| 122 | 129 |
|
| 123 |
- for file, is_executeable in output_file:
|
|
| 130 |
+ for file, is_executable in output_file:
|
|
| 124 | 131 |
command.output_files.extend([file])
|
| 125 |
- if is_executeable:
|
|
| 126 |
- output_executeables.append(file)
|
|
| 132 |
+ if is_executable:
|
|
| 133 |
+ output_executables.append(file)
|
|
| 134 |
+ |
|
| 135 |
+ for attribute_name, attribute_value in platform_property:
|
|
| 136 |
+ new_property = command.platform.properties.add()
|
|
| 137 |
+ new_property.name = attribute_name
|
|
| 138 |
+ new_property.value = attribute_value
|
|
| 127 | 139 |
|
| 128 | 140 |
command_digest = uploader.put_message(command, queue=True)
|
| 129 | 141 |
|
| ... | ... | @@ -165,6 +177,6 @@ def run_command(context, input_root, commands, output_file, output_directory): |
| 165 | 177 |
downloader.download_file(output_file_response.digest, path)
|
| 166 | 178 |
|
| 167 | 179 |
for output_file_response in execute_response.result.output_files:
|
| 168 |
- if output_file_response.path in output_executeables:
|
|
| 180 |
+ if output_file_response.path in output_executables:
|
|
| 169 | 181 |
st = os.stat(path)
|
| 170 | 182 |
os.chmod(path, st.st_mode | stat.S_IXUSR)
|
| ... | ... | @@ -247,12 +247,13 @@ class Action(YamlFactory): |
| 247 | 247 |
storage(:class:`buildgrid.server.cas.storage.storage_abc.StorageABC`): Instance of storage to use.
|
| 248 | 248 |
max_cached_refs(int): Max number of cached actions.
|
| 249 | 249 |
allow_updates(bool): Allow updates pushed to CAS. Defaults to ``True``.
|
| 250 |
+ cache_failed_actions(bool): Whether to store failed (non-zero exit code) actions. Default to ``True``.
|
|
| 250 | 251 |
"""
|
| 251 | 252 |
|
| 252 | 253 |
yaml_tag = u'!action-cache'
|
| 253 | 254 |
|
| 254 |
- def __new__(cls, storage, max_cached_refs, allow_updates=True):
|
|
| 255 |
- return ActionCache(storage, max_cached_refs, allow_updates)
|
|
| 255 |
+ def __new__(cls, storage, max_cached_refs, allow_updates=True, cache_failed_actions=True):
|
|
| 256 |
+ return ActionCache(storage, max_cached_refs, allow_updates, cache_failed_actions)
|
|
| 256 | 257 |
|
| 257 | 258 |
|
| 258 | 259 |
class Reference(YamlFactory):
|
| ... | ... | @@ -74,6 +74,9 @@ instances: |
| 74 | 74 |
##
|
| 75 | 75 |
# Whether or not writing to the cache is allowed.
|
| 76 | 76 |
allow-updates: true
|
| 77 |
+ ##
|
|
| 78 |
+ # Whether failed actions (non-zero exit code) are stored
|
|
| 79 |
+ cache-failed-actions: true
|
|
| 77 | 80 |
|
| 78 | 81 |
- !execution
|
| 79 | 82 |
##
|
| 1 |
+# Copyright (C) 2019 Bloomberg LP
|
|
| 2 |
+#
|
|
| 3 |
+# Licensed under the Apache License, Version 2.0 (the "License");
|
|
| 4 |
+# you may not use this file except in compliance with the License.
|
|
| 5 |
+# You may obtain a copy of the License at
|
|
| 6 |
+#
|
|
| 7 |
+# <http://www.apache.org/licenses/LICENSE-2.0>
|
|
| 8 |
+#
|
|
| 9 |
+# Unless required by applicable law or agreed to in writing, software
|
|
| 10 |
+# distributed under the License is distributed on an "AS IS" BASIS,
|
|
| 11 |
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
| 12 |
+# See the License for the specific language governing permissions and
|
|
| 13 |
+# limitations under the License.
|
|
| 14 |
+ |
|
| 15 |
+ |
|
| 16 |
+from contextlib import contextmanager
|
|
| 17 |
+ |
|
| 18 |
+import grpc
|
|
| 19 |
+ |
|
| 20 |
+from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
|
|
| 21 |
+ |
|
| 22 |
+ |
|
| 23 |
+@contextmanager
|
|
| 24 |
+def query(channel, instance=None):
|
|
| 25 |
+ """Context manager generator for the :class:`ActionCacheClient` class."""
|
|
| 26 |
+ client = ActionCacheClient(channel, instance=instance)
|
|
| 27 |
+ try:
|
|
| 28 |
+ yield client
|
|
| 29 |
+ finally:
|
|
| 30 |
+ client.close()
|
|
| 31 |
+ |
|
| 32 |
+ |
|
| 33 |
+class ActionCacheClient:
|
|
| 34 |
+ """Remote ActionCache service client helper.
|
|
| 35 |
+ |
|
| 36 |
+ The :class:`ActionCacheClient` class comes with a generator factory function
|
|
| 37 |
+ that can be used together with the `with` statement for context management::
|
|
| 38 |
+ |
|
| 39 |
+ from buildgrid.client.actioncache import query
|
|
| 40 |
+ |
|
| 41 |
+ with query(channel, instance='build') as action_cache:
|
|
| 42 |
+ digest, action_result = action_cache.get(action_digest)
|
|
| 43 |
+ """
|
|
| 44 |
+ |
|
| 45 |
+ def __init__(self, channel, instance=None):
|
|
| 46 |
+ """Initializes a new :class:`ActionCacheClient` instance.
|
|
| 47 |
+ |
|
| 48 |
+ Args:
|
|
| 49 |
+ channel (grpc.Channel): a gRPC channel to the ActionCache endpoint.
|
|
| 50 |
+ instance (str, optional): the targeted instance's name.
|
|
| 51 |
+ """
|
|
| 52 |
+ self.channel = channel
|
|
| 53 |
+ |
|
| 54 |
+ self.instance_name = instance
|
|
| 55 |
+ |
|
| 56 |
+ self.__actioncache_stub = remote_execution_pb2_grpc.ActionCacheStub(self.channel)
|
|
| 57 |
+ |
|
| 58 |
+ # --- Public API ---
|
|
| 59 |
+ |
|
| 60 |
+ def get(self, action_digest):
|
|
| 61 |
+ """Retrieves the cached :obj:`ActionResult` for a given :obj:`Action`.
|
|
| 62 |
+ |
|
| 63 |
+ Args:
|
|
| 64 |
+ action_digest (:obj:`Digest`): the action's digest to query.
|
|
| 65 |
+ |
|
| 66 |
+ Returns:
|
|
| 67 |
+ :obj:`ActionResult`: the cached result or None if not found.
|
|
| 68 |
+ |
|
| 69 |
+ Raises:
|
|
| 70 |
+ grpc.RpcError: on any network or remote service error.
|
|
| 71 |
+ """
|
|
| 72 |
+ request = remote_execution_pb2.GetActionResultRequest()
|
|
| 73 |
+ if self.instance_name:
|
|
| 74 |
+ request.instance_name = self.instance_name
|
|
| 75 |
+ request.action_digest.CopyFrom(action_digest)
|
|
| 76 |
+ |
|
| 77 |
+ try:
|
|
| 78 |
+ return self.__actioncache_stub.GetActionResult(request)
|
|
| 79 |
+ |
|
| 80 |
+ except grpc.RpcError as e:
|
|
| 81 |
+ status_code = e.code()
|
|
| 82 |
+ if status_code != grpc.StatusCode.NOT_FOUND:
|
|
| 83 |
+ raise
|
|
| 84 |
+ |
|
| 85 |
+ return None
|
|
| 86 |
+ |
|
| 87 |
+ def update(self, action_digest, action_result):
|
|
| 88 |
+ """Maps in cache an :obj:`Action` to an :obj:`ActionResult`.
|
|
| 89 |
+ |
|
| 90 |
+ Args:
|
|
| 91 |
+ action_digest (:obj:`Digest`): the action's digest to update.
|
|
| 92 |
+ action_result (:obj:`ActionResult`): the action's result.
|
|
| 93 |
+ |
|
| 94 |
+ Returns:
|
|
| 95 |
+ :obj:`ActionResult`: the cached result or None on failure.
|
|
| 96 |
+ |
|
| 97 |
+ Raises:
|
|
| 98 |
+ grpc.RpcError: on any network or remote service error.
|
|
| 99 |
+ """
|
|
| 100 |
+ request = remote_execution_pb2.UpdateActionResultRequest()
|
|
| 101 |
+ if self.instance_name:
|
|
| 102 |
+ request.instance_name = self.instance_name
|
|
| 103 |
+ request.action_digest.CopyFrom(action_digest)
|
|
| 104 |
+ request.action_result.CopyFrom(action_result)
|
|
| 105 |
+ |
|
| 106 |
+ try:
|
|
| 107 |
+ return self.__actioncache_stub.UpdateActionResult(request)
|
|
| 108 |
+ |
|
| 109 |
+ except grpc.RpcError as e:
|
|
| 110 |
+ status_code = e.code()
|
|
| 111 |
+ if status_code != grpc.StatusCode.NOT_FOUND:
|
|
| 112 |
+ raise
|
|
| 113 |
+ |
|
| 114 |
+ return None
|
|
| 115 |
+ |
|
| 116 |
+ def close(self):
|
|
| 117 |
+ """Closes the underlying connection stubs."""
|
|
| 118 |
+ self.__actioncache_stub = None
|
| ... | ... | @@ -20,22 +20,50 @@ Action Cache |
| 20 | 20 |
Implements an in-memory action Cache
|
| 21 | 21 |
"""
|
| 22 | 22 |
|
| 23 |
+import logging
|
|
| 23 | 24 |
|
| 24 | 25 |
from ..referencestorage.storage import ReferenceCache
|
| 25 | 26 |
|
| 26 | 27 |
|
| 27 | 28 |
class ActionCache(ReferenceCache):
|
| 28 | 29 |
|
| 30 |
+ def __init__(self, storage, max_cached_refs, allow_updates=True, cache_failed_actions=True):
|
|
| 31 |
+ """ Initialises a new ActionCache instance.
|
|
| 32 |
+ |
|
| 33 |
+ Args:
|
|
| 34 |
+ storage (StorageABC): storage backend instance to be used. Passed to ReferenceCache
|
|
| 35 |
+ max_cached_refs (int): maximum number of entries to be stored. Passed to ReferenceCache
|
|
| 36 |
+ allow_updates (bool): allow the client to write to storage. Passed to ReferenceCache
|
|
| 37 |
+ cache_failed_actions (bool): cache actions with non-zero exit codes.
|
|
| 38 |
+ """
|
|
| 39 |
+ super().__init__(storage, max_cached_refs, allow_updates)
|
|
| 40 |
+ |
|
| 41 |
+ self.__logger = logging.getLogger(__name__)
|
|
| 42 |
+ |
|
| 43 |
+ self._cache_failed_actions = cache_failed_actions
|
|
| 44 |
+ |
|
| 45 |
+ # --- Public API ---
|
|
| 46 |
+ |
|
| 29 | 47 |
def register_instance_with_server(self, instance_name, server):
|
| 30 | 48 |
server.add_action_cache_instance(self, instance_name)
|
| 31 | 49 |
|
| 32 | 50 |
def get_action_result(self, action_digest):
|
| 51 |
+ """Retrieves the cached result for an action."""
|
|
| 33 | 52 |
key = self._get_key(action_digest)
|
| 53 |
+ |
|
| 34 | 54 |
return self.get_action_reference(key)
|
| 35 | 55 |
|
| 36 | 56 |
def update_action_result(self, action_digest, action_result):
|
| 37 |
- key = self._get_key(action_digest)
|
|
| 38 |
- self.update_reference(key, action_result)
|
|
| 57 |
+ """Stores in cache a result for an action."""
|
|
| 58 |
+ if self._cache_failed_actions or action_result.exit_code == 0:
|
|
| 59 |
+ key = self._get_key(action_digest)
|
|
| 60 |
+ |
|
| 61 |
+ self.update_reference(key, action_result)
|
|
| 62 |
+ |
|
| 63 |
+ self.__logger.info("Result cached for action [%s/%s]",
|
|
| 64 |
+ action_digest.hash, action_digest.size_bytes)
|
|
| 65 |
+ |
|
| 66 |
+ # --- Private API ---
|
|
| 39 | 67 |
|
| 40 | 68 |
def _get_key(self, action_digest):
|
| 41 | 69 |
return (action_digest.hash, action_digest.size_bytes)
|
| ... | ... | @@ -50,7 +50,6 @@ class BotsInterface: |
| 50 | 50 |
register with the service, the old one should be closed along
|
| 51 | 51 |
with all its jobs.
|
| 52 | 52 |
"""
|
| 53 |
- |
|
| 54 | 53 |
bot_id = bot_session.bot_id
|
| 55 | 54 |
|
| 56 | 55 |
if bot_id == "":
|
| ... | ... | @@ -100,10 +99,25 @@ class BotsInterface: |
| 100 | 99 |
return bot_session
|
| 101 | 100 |
|
| 102 | 101 |
def _request_leases(self, bot_session):
|
| 103 |
- # TODO: Send worker capabilities to the scheduler!
|
|
| 104 | 102 |
# Only send one lease at a time currently.
|
| 105 | 103 |
if not bot_session.leases:
|
| 106 |
- leases = self._scheduler.request_job_leases({})
|
|
| 104 |
+ worker_capabilities = {}
|
|
| 105 |
+ |
|
| 106 |
+ # TODO? Fail if there are no devices in the worker?
|
|
| 107 |
+ if bot_session.worker.devices:
|
|
| 108 |
+ # According to the spec:
|
|
| 109 |
+ # "The first device in the worker is the "primary device" -
|
|
| 110 |
+ # that is, the device running a bot and which is
|
|
| 111 |
+ # responsible for actually executing commands."
|
|
| 112 |
+ primary_device = bot_session.worker.devices[0]
|
|
| 113 |
+ |
|
| 114 |
+ for device_property in primary_device.properties:
|
|
| 115 |
+ if device_property.key not in worker_capabilities:
|
|
| 116 |
+ worker_capabilities[device_property.key] = set()
|
|
| 117 |
+ worker_capabilities[device_property.key].add(device_property.value)
|
|
| 118 |
+ |
|
| 119 |
+ leases = self._scheduler.request_job_leases(worker_capabilities)
|
|
| 120 |
+ |
|
| 107 | 121 |
if leases:
|
| 108 | 122 |
for lease in leases:
|
| 109 | 123 |
self._assigned_leases[bot_session.name].add(lease.id)
|
| ... | ... | @@ -22,7 +22,7 @@ An instance of the Remote Execution Service. |
| 22 | 22 |
import logging
|
| 23 | 23 |
|
| 24 | 24 |
from buildgrid._exceptions import FailedPreconditionError, InvalidArgumentError, NotFoundError
|
| 25 |
-from buildgrid._protos.build.bazel.remote.execution.v2.remote_execution_pb2 import Action
|
|
| 25 |
+from buildgrid._protos.build.bazel.remote.execution.v2.remote_execution_pb2 import Action, Command
|
|
| 26 | 26 |
from buildgrid.utils import get_hash_type
|
| 27 | 27 |
|
| 28 | 28 |
|
| ... | ... | @@ -50,11 +50,22 @@ class ExecutionInstance: |
| 50 | 50 |
this action.
|
| 51 | 51 |
"""
|
| 52 | 52 |
action = self._storage.get_message(action_digest, Action)
|
| 53 |
- |
|
| 54 | 53 |
if not action:
|
| 55 | 54 |
raise FailedPreconditionError("Could not get action from storage.")
|
| 56 | 55 |
|
| 56 |
+ command = self._storage.get_message(action.command_digest, Command)
|
|
| 57 |
+ |
|
| 58 |
+ if not command:
|
|
| 59 |
+ raise FailedPreconditionError("Could not get command from storage.")
|
|
| 60 |
+ |
|
| 61 |
+ platform_requirements = {}
|
|
| 62 |
+ for platform_property in command.platform.properties:
|
|
| 63 |
+ if platform_property.name not in platform_requirements:
|
|
| 64 |
+ platform_requirements[platform_property.name] = set()
|
|
| 65 |
+ platform_requirements[platform_property.name].add(platform_property.value)
|
|
| 66 |
+ |
|
| 57 | 67 |
return self._scheduler.queue_job_action(action, action_digest,
|
| 68 |
+ platform_requirements=platform_requirements,
|
|
| 58 | 69 |
skip_cache_lookup=skip_cache_lookup)
|
| 59 | 70 |
|
| 60 | 71 |
def register_job_peer(self, job_name, peer, message_queue):
|
| ... | ... | @@ -29,7 +29,7 @@ from buildgrid._protos.google.rpc import code_pb2 |
| 29 | 29 |
|
| 30 | 30 |
class Job:
|
| 31 | 31 |
|
| 32 |
- def __init__(self, action, action_digest, priority=0):
|
|
| 32 |
+ def __init__(self, action, action_digest, platform_requirements=None, priority=0):
|
|
| 33 | 33 |
self.__logger = logging.getLogger(__name__)
|
| 34 | 34 |
|
| 35 | 35 |
self._name = str(uuid.uuid4())
|
| ... | ... | @@ -59,6 +59,9 @@ class Job: |
| 59 | 59 |
self._do_not_cache = self._action.do_not_cache
|
| 60 | 60 |
self._n_tries = 0
|
| 61 | 61 |
|
| 62 |
+ self._platform_requirements = platform_requirements \
|
|
| 63 |
+ if platform_requirements else dict()
|
|
| 64 |
+ |
|
| 62 | 65 |
self._done = False
|
| 63 | 66 |
|
| 64 | 67 |
def __lt__(self, other):
|
| ... | ... | @@ -113,6 +116,10 @@ class Job: |
| 113 | 116 |
|
| 114 | 117 |
# --- Public API: REAPI ---
|
| 115 | 118 |
|
| 119 |
+ @property
|
|
| 120 |
+ def platform_requirements(self):
|
|
| 121 |
+ return self._platform_requirements
|
|
| 122 |
+ |
|
| 116 | 123 |
@property
|
| 117 | 124 |
def do_not_cache(self):
|
| 118 | 125 |
return self._do_not_cache
|
| ... | ... | @@ -145,7 +145,8 @@ class Scheduler: |
| 145 | 145 |
if not job.n_peers and job.done and not job.lease:
|
| 146 | 146 |
self._delete_job(job.name)
|
| 147 | 147 |
|
| 148 |
- def queue_job_action(self, action, action_digest, priority=0, skip_cache_lookup=False):
|
|
| 148 |
+ def queue_job_action(self, action, action_digest, platform_requirements=None,
|
|
| 149 |
+ priority=0, skip_cache_lookup=False):
|
|
| 149 | 150 |
"""Inserts a newly created job into the execution queue.
|
| 150 | 151 |
|
| 151 | 152 |
Warning:
|
| ... | ... | @@ -155,6 +156,9 @@ class Scheduler: |
| 155 | 156 |
Args:
|
| 156 | 157 |
action (Action): the given action to queue for execution.
|
| 157 | 158 |
action_digest (Digest): the digest of the given action.
|
| 159 |
+ platform_requirements (dict(set)): platform attributes that a worker
|
|
| 160 |
+ must satisfy in order to be assigned the job. (Each key can
|
|
| 161 |
+ have multiple values.)
|
|
| 158 | 162 |
priority (int): the execution job's priority.
|
| 159 | 163 |
skip_cache_lookup (bool): whether or not to look for pre-computed
|
| 160 | 164 |
result for the given action.
|
| ... | ... | @@ -178,7 +182,9 @@ class Scheduler: |
| 178 | 182 |
|
| 179 | 183 |
return job.name
|
| 180 | 184 |
|
| 181 |
- job = Job(action, action_digest, priority=priority)
|
|
| 185 |
+ job = Job(action, action_digest,
|
|
| 186 |
+ platform_requirements=platform_requirements,
|
|
| 187 |
+ priority=priority)
|
|
| 182 | 188 |
|
| 183 | 189 |
self.__logger.debug("Job created for action [%s]: [%s]",
|
| 184 | 190 |
action_digest.hash[:8], job.name)
|
| ... | ... | @@ -271,28 +277,29 @@ class Scheduler: |
| 271 | 277 |
"""Generates a list of the highest priority leases to be run.
|
| 272 | 278 |
|
| 273 | 279 |
Args:
|
| 274 |
- worker_capabilities (dict): a set of key-value pairs decribing the
|
|
| 280 |
+ worker_capabilities (dict): a set of key-value pairs describing the
|
|
| 275 | 281 |
worker properties, configuration and state at the time of the
|
| 276 | 282 |
request.
|
| 277 |
- |
|
| 278 |
- Warning: Worker capabilities handling is not implemented at the moment!
|
|
| 279 | 283 |
"""
|
| 280 | 284 |
if not self.__queue:
|
| 281 | 285 |
return []
|
| 282 | 286 |
|
| 283 |
- # TODO: Try to match worker_capabilities with jobs properties.
|
|
| 284 |
- job = self.__queue.pop()
|
|
| 287 |
+ # Looking for the first job that could be assigned to the worker...
|
|
| 288 |
+ for job_index, job in enumerate(self.__queue):
|
|
| 289 |
+ if self._worker_is_capable(worker_capabilities, job):
|
|
| 290 |
+ self.__logger.info("Job scheduled to run: [%s]", job.name)
|
|
| 285 | 291 |
|
| 286 |
- self.__logger.info("Job scheduled to run: [%s]", job.name)
|
|
| 292 |
+ lease = job.lease
|
|
| 287 | 293 |
|
| 288 |
- lease = job.lease
|
|
| 294 |
+ if not lease:
|
|
| 295 |
+ # For now, one lease at a time:
|
|
| 296 |
+ lease = job.create_lease()
|
|
| 289 | 297 |
|
| 290 |
- if not lease:
|
|
| 291 |
- # For now, one lease at a time:
|
|
| 292 |
- lease = job.create_lease()
|
|
| 298 |
+ if lease:
|
|
| 299 |
+ del self.__queue[job_index]
|
|
| 300 |
+ return [lease]
|
|
| 293 | 301 |
|
| 294 |
- if lease:
|
|
| 295 |
- return [lease]
|
|
| 302 |
+ return None
|
|
| 296 | 303 |
|
| 297 | 304 |
return None
|
| 298 | 305 |
|
| ... | ... | @@ -622,3 +629,28 @@ class Scheduler: |
| 622 | 629 |
|
| 623 | 630 |
for message_queue in self.__build_metadata_queues:
|
| 624 | 631 |
message_queue.put(message)
|
| 632 |
+ |
|
| 633 |
+ def _worker_is_capable(self, worker_capabilities, job):
|
|
| 634 |
+ """Returns whether the worker is suitable to run the job."""
|
|
| 635 |
+ # TODO: Replace this with the logic defined in the Platform msg. standard.
|
|
| 636 |
+ |
|
| 637 |
+ job_requirements = job.platform_requirements
|
|
| 638 |
+ # For now we'll only check OS and ISA properties.
|
|
| 639 |
+ |
|
| 640 |
+ if not job_requirements:
|
|
| 641 |
+ return True
|
|
| 642 |
+ |
|
| 643 |
+ # OS:
|
|
| 644 |
+ worker_oses = worker_capabilities.get('os', set())
|
|
| 645 |
+ job_oses = job_requirements.get('os', set())
|
|
| 646 |
+ if job_oses and not (job_oses & worker_oses):
|
|
| 647 |
+ return False
|
|
| 648 |
+ |
|
| 649 |
+ # ISAs:
|
|
| 650 |
+ worker_isas = worker_capabilities.get('isa', [])
|
|
| 651 |
+ job_isas = job_requirements.get('isa', None)
|
|
| 652 |
+ |
|
| 653 |
+ if job_isas and not (job_isas & worker_isas):
|
|
| 654 |
+ return False
|
|
| 655 |
+ |
|
| 656 |
+ return True
|
| ... | ... | @@ -17,7 +17,7 @@ from operator import attrgetter |
| 17 | 17 |
import os
|
| 18 | 18 |
import socket
|
| 19 | 19 |
|
| 20 |
-from buildgrid.settings import HASH
|
|
| 20 |
+from buildgrid.settings import HASH, HASH_LENGTH
|
|
| 21 | 21 |
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
| 22 | 22 |
|
| 23 | 23 |
|
| ... | ... | @@ -53,6 +53,27 @@ def create_digest(bytes_to_digest): |
| 53 | 53 |
size_bytes=len(bytes_to_digest))
|
| 54 | 54 |
|
| 55 | 55 |
|
| 56 |
+def parse_digest(digest_string):
|
|
| 57 |
+ """Creates a :obj:`Digest` from a digest string.
|
|
| 58 |
+ |
|
| 59 |
+ A digest string should alway be: ``{hash}/{size_bytes}``.
|
|
| 60 |
+ |
|
| 61 |
+ Args:
|
|
| 62 |
+ digest_string (str): the digest string.
|
|
| 63 |
+ |
|
| 64 |
+ Returns:
|
|
| 65 |
+ :obj:`Digest`: The :obj:`Digest` read from the string or None if
|
|
| 66 |
+ `digest_string` is not a valid digest string.
|
|
| 67 |
+ """
|
|
| 68 |
+ digest_hash, digest_size = digest_string.split('/')
|
|
| 69 |
+ |
|
| 70 |
+ if len(digest_hash) == HASH_LENGTH and digest_size.isdigit():
|
|
| 71 |
+ return remote_execution_pb2.Digest(hash=digest_hash,
|
|
| 72 |
+ size_bytes=int(digest_size))
|
|
| 73 |
+ |
|
| 74 |
+ return None
|
|
| 75 |
+ |
|
| 76 |
+ |
|
| 56 | 77 |
def read_file(file_path):
|
| 57 | 78 |
"""Loads raw file content in memory.
|
| 58 | 79 |
|
| ... | ... | @@ -15,6 +15,27 @@ BuildGrid's Command Line Interface (CLI) reference documentation. |
| 15 | 15 |
|
| 16 | 16 |
----
|
| 17 | 17 |
|
| 18 |
+.. _invoking-bgd-action-cache:
|
|
| 19 |
+ |
|
| 20 |
+.. click:: buildgrid._app.commands.cmd_actioncache:cli
|
|
| 21 |
+ :prog: bgd action-cache
|
|
| 22 |
+ |
|
| 23 |
+----
|
|
| 24 |
+ |
|
| 25 |
+.. _invoking-bgd-action-cache-get:
|
|
| 26 |
+ |
|
| 27 |
+.. click:: buildgrid._app.commands.cmd_actioncache:get
|
|
| 28 |
+ :prog: bgd action-cache get
|
|
| 29 |
+ |
|
| 30 |
+----
|
|
| 31 |
+ |
|
| 32 |
+.. _invoking-bgd-action-cache-update:
|
|
| 33 |
+ |
|
| 34 |
+.. click:: buildgrid._app.commands.cmd_actioncache:update
|
|
| 35 |
+ :prog: bgd action-cache update
|
|
| 36 |
+ |
|
| 37 |
+----
|
|
| 38 |
+ |
|
| 18 | 39 |
.. _invoking-bgd-bot:
|
| 19 | 40 |
|
| 20 | 41 |
.. click:: buildgrid._app.commands.cmd_bot:cli
|
| ... | ... | @@ -137,4 +158,4 @@ BuildGrid's Command Line Interface (CLI) reference documentation. |
| 137 | 158 |
.. _invoking-bgd-server-start:
|
| 138 | 159 |
|
| 139 | 160 |
.. click:: buildgrid._app.commands.cmd_server:start
|
| 140 |
- :prog: bgd server start
|
|
| \ No newline at end of file | ||
| 161 |
+ :prog: bgd server start
|
| ... | ... | @@ -85,3 +85,37 @@ def test_disabled_update_action_result(context): |
| 85 | 85 |
ac_service.UpdateActionResult(request, context)
|
| 86 | 86 |
|
| 87 | 87 |
context.set_code.assert_called_once_with(grpc.StatusCode.UNIMPLEMENTED)
|
| 88 |
+ |
|
| 89 |
+ |
|
| 90 |
+def test_disabled_cache_failed_actions(cas, context):
|
|
| 91 |
+ disabled_failed_actions = ActionCache(cas, 50, True, False)
|
|
| 92 |
+ with mock.patch.object(service, 'remote_execution_pb2_grpc'):
|
|
| 93 |
+ ac_service = ActionCacheService(server)
|
|
| 94 |
+ ac_service.add_instance("", disabled_failed_actions)
|
|
| 95 |
+ |
|
| 96 |
+ failure_action_digest = remote_execution_pb2.Digest(hash='failure', size_bytes=4)
|
|
| 97 |
+ |
|
| 98 |
+ # Add a non-zero exit code ActionResult to the cache
|
|
| 99 |
+ action_result = remote_execution_pb2.ActionResult(stdout_raw=b'Failed', exit_code=1)
|
|
| 100 |
+ request = remote_execution_pb2.UpdateActionResultRequest(action_digest=failure_action_digest,
|
|
| 101 |
+ action_result=action_result)
|
|
| 102 |
+ ac_service.UpdateActionResult(request, context)
|
|
| 103 |
+ |
|
| 104 |
+ # Check that before adding the ActionResult, attempting to fetch it fails
|
|
| 105 |
+ request = remote_execution_pb2.GetActionResultRequest(instance_name="",
|
|
| 106 |
+ action_digest=failure_action_digest)
|
|
| 107 |
+ ac_service.GetActionResult(request, context)
|
|
| 108 |
+ context.set_code.assert_called_once_with(grpc.StatusCode.NOT_FOUND)
|
|
| 109 |
+ |
|
| 110 |
+ success_action_digest = remote_execution_pb2.Digest(hash='success', size_bytes=4)
|
|
| 111 |
+ |
|
| 112 |
+ # Now add a zero exit code Action result to the cache, and check that fetching
|
|
| 113 |
+ # it is successful
|
|
| 114 |
+ success_action_result = remote_execution_pb2.ActionResult(stdout_raw=b'Successful')
|
|
| 115 |
+ request = remote_execution_pb2.UpdateActionResultRequest(action_digest=success_action_digest,
|
|
| 116 |
+ action_result=success_action_result)
|
|
| 117 |
+ ac_service.UpdateActionResult(request, context)
|
|
| 118 |
+ request = remote_execution_pb2.GetActionResultRequest(instance_name="",
|
|
| 119 |
+ action_digest=success_action_digest)
|
|
| 120 |
+ fetched_result = ac_service.GetActionResult(request, context)
|
|
| 121 |
+ assert fetched_result.stdout_raw == success_action_result.stdout_raw
|
| ... | ... | @@ -153,11 +153,27 @@ def test_post_bot_event_temp(context, instance): |
| 153 | 153 |
context.set_code.assert_called_once_with(grpc.StatusCode.UNIMPLEMENTED)
|
| 154 | 154 |
|
| 155 | 155 |
|
| 156 |
-def _inject_work(scheduler, action=None, action_digest=None):
|
|
| 156 |
+def test_unmet_platform_requirements(bot_session, context, instance):
|
|
| 157 |
+ request = bots_pb2.CreateBotSessionRequest(parent='',
|
|
| 158 |
+ bot_session=bot_session)
|
|
| 159 |
+ |
|
| 160 |
+ action_digest = remote_execution_pb2.Digest(hash='gaff')
|
|
| 161 |
+ _inject_work(instance._instances[""]._scheduler,
|
|
| 162 |
+ action_digest=action_digest,
|
|
| 163 |
+ platform_requirements={'os': set('wonderful-os')})
|
|
| 164 |
+ |
|
| 165 |
+ response = instance.CreateBotSession(request, context)
|
|
| 166 |
+ |
|
| 167 |
+ assert len(response.leases) == 0
|
|
| 168 |
+ |
|
| 169 |
+ |
|
| 170 |
+def _inject_work(scheduler, action=None, action_digest=None,
|
|
| 171 |
+ platform_requirements=None):
|
|
| 157 | 172 |
if not action:
|
| 158 | 173 |
action = remote_execution_pb2.Action()
|
| 159 | 174 |
|
| 160 | 175 |
if not action_digest:
|
| 161 | 176 |
action_digest = remote_execution_pb2.Digest()
|
| 162 | 177 |
|
| 163 |
- scheduler.queue_job_action(action, action_digest, skip_cache_lookup=True)
|
|
| 178 |
+ scheduler.queue_job_action(action, action_digest, platform_requirements,
|
|
| 179 |
+ skip_cache_lookup=True)
|
| ... | ... | @@ -37,7 +37,12 @@ from buildgrid.server.execution.service import ExecutionService |
| 37 | 37 |
|
| 38 | 38 |
|
| 39 | 39 |
server = mock.create_autospec(grpc.server)
|
| 40 |
-action = remote_execution_pb2.Action(do_not_cache=True)
|
|
| 40 |
+ |
|
| 41 |
+command = remote_execution_pb2.Command()
|
|
| 42 |
+command_digest = create_digest(command.SerializeToString())
|
|
| 43 |
+ |
|
| 44 |
+action = remote_execution_pb2.Action(command_digest=command_digest,
|
|
| 45 |
+ do_not_cache=True)
|
|
| 41 | 46 |
action_digest = create_digest(action.SerializeToString())
|
| 42 | 47 |
|
| 43 | 48 |
|
| ... | ... | @@ -50,7 +55,13 @@ def context(): |
| 50 | 55 |
@pytest.fixture(params=["action-cache", "no-action-cache"])
|
| 51 | 56 |
def controller(request):
|
| 52 | 57 |
storage = lru_memory_cache.LRUMemoryCache(1024 * 1024)
|
| 58 |
+ |
|
| 59 |
+ write_session = storage.begin_write(command_digest)
|
|
| 60 |
+ write_session.write(command.SerializeToString())
|
|
| 61 |
+ storage.commit_write(command_digest, write_session)
|
|
| 62 |
+ |
|
| 53 | 63 |
write_session = storage.begin_write(action_digest)
|
| 64 |
+ write_session.write(action.SerializeToString())
|
|
| 54 | 65 |
storage.commit_write(action_digest, write_session)
|
| 55 | 66 |
|
| 56 | 67 |
if request.param == "action-cache":
|
| ... | ... | @@ -36,7 +36,12 @@ from buildgrid.utils import create_digest |
| 36 | 36 |
|
| 37 | 37 |
server = mock.create_autospec(grpc.server)
|
| 38 | 38 |
instance_name = "blade"
|
| 39 |
-action = remote_execution_pb2.Action(do_not_cache=True)
|
|
| 39 |
+ |
|
| 40 |
+command = remote_execution_pb2.Command()
|
|
| 41 |
+command_digest = create_digest(command.SerializeToString())
|
|
| 42 |
+ |
|
| 43 |
+action = remote_execution_pb2.Action(command_digest=command_digest,
|
|
| 44 |
+ do_not_cache=True)
|
|
| 40 | 45 |
action_digest = create_digest(action.SerializeToString())
|
| 41 | 46 |
|
| 42 | 47 |
|
| ... | ... | @@ -57,7 +62,13 @@ def execute_request(): |
| 57 | 62 |
@pytest.fixture
|
| 58 | 63 |
def controller():
|
| 59 | 64 |
storage = lru_memory_cache.LRUMemoryCache(1024 * 1024)
|
| 65 |
+ |
|
| 66 |
+ write_session = storage.begin_write(command_digest)
|
|
| 67 |
+ write_session.write(command.SerializeToString())
|
|
| 68 |
+ storage.commit_write(command_digest, write_session)
|
|
| 69 |
+ |
|
| 60 | 70 |
write_session = storage.begin_write(action_digest)
|
| 71 |
+ write_session.write(action.SerializeToString())
|
|
| 61 | 72 |
storage.commit_write(action_digest, write_session)
|
| 62 | 73 |
|
| 63 | 74 |
yield ExecutionController(None, storage)
|
