Francisco Redondo Marchena pushed to branch Qinusty/397 at BuildStream / buildstream
Commits:
-
a0ad985f
by Phil Dawson at 2018-07-25T13:03:23Z
-
95b4eae4
by Phil Dawson at 2018-07-25T13:03:23Z
-
889bf238
by Jim MacArthur at 2018-07-25T13:43:21Z
-
54fee6c8
by Phillip Smyth at 2018-07-25T14:15:36Z
-
f62b6cb7
by Jonathan Maw at 2018-07-25T15:01:33Z
-
acbb90ff
by Josh Smith at 2018-07-26T12:53:45Z
-
66d341d2
by Josh Smith at 2018-07-26T12:53:45Z
-
f0d53c7d
by Josh Smith at 2018-07-26T12:53:45Z
-
9be2c1b3
by Josh Smith at 2018-07-26T12:53:45Z
-
ad7e88f7
by Josh Smith at 2018-07-26T12:53:45Z
-
b2f2f79b
by Josh Smith at 2018-07-26T12:53:45Z
15 changed files:
- buildstream/_artifactcache/cascache.py
- buildstream/_exceptions.py
- buildstream/_frontend/app.py
- buildstream/_scheduler/jobs/job.py
- buildstream/_stream.py
- buildstream/element.py
- buildstream/plugin.py
- buildstream/plugins/sources/_downloadablefilesource.py
- buildstream/plugins/sources/git.py
- buildstream/source.py
- tests/frontend/show.py
- tests/frontend/workspace.py
- tests/sources/deb.py
- tests/sources/tar.py
- tests/sources/zip.py
Changes:
| ... | ... | @@ -340,7 +340,7 @@ class CASCache(ArtifactCache): |
| 340 | 340 |
|
| 341 | 341 |
except grpc.RpcError as e:
|
| 342 | 342 |
if e.code() != grpc.StatusCode.RESOURCE_EXHAUSTED:
|
| 343 |
- raise ArtifactError("Failed to push artifact {}: {}".format(refs, e)) from e
|
|
| 343 |
+ raise ArtifactError("Failed to push artifact {}: {}".format(refs, e), temporary=True) from e
|
|
| 344 | 344 |
|
| 345 | 345 |
return pushed
|
| 346 | 346 |
|
| ... | ... | @@ -99,7 +99,7 @@ class ErrorDomain(Enum): |
| 99 | 99 |
#
|
| 100 | 100 |
class BstError(Exception):
|
| 101 | 101 |
|
| 102 |
- def __init__(self, message, *, detail=None, domain=None, reason=None):
|
|
| 102 |
+ def __init__(self, message, *, detail=None, domain=None, reason=None, temporary=False):
|
|
| 103 | 103 |
global _last_exception
|
| 104 | 104 |
|
| 105 | 105 |
super().__init__(message)
|
| ... | ... | @@ -114,6 +114,11 @@ class BstError(Exception): |
| 114 | 114 |
#
|
| 115 | 115 |
self.sandbox = None
|
| 116 | 116 |
|
| 117 |
+ # When this exception occurred during the handling of a job, indicate
|
|
| 118 |
+ # whether or not there is any point retrying the job.
|
|
| 119 |
+ #
|
|
| 120 |
+ self.temporary = temporary
|
|
| 121 |
+ |
|
| 117 | 122 |
# Error domain and reason
|
| 118 | 123 |
#
|
| 119 | 124 |
self.domain = domain
|
| ... | ... | @@ -131,8 +136,8 @@ class BstError(Exception): |
| 131 | 136 |
# or by the base :class:`.Plugin` element itself.
|
| 132 | 137 |
#
|
| 133 | 138 |
class PluginError(BstError):
|
| 134 |
- def __init__(self, message, reason=None):
|
|
| 135 |
- super().__init__(message, domain=ErrorDomain.PLUGIN, reason=reason)
|
|
| 139 |
+ def __init__(self, message, reason=None, temporary=False):
|
|
| 140 |
+ super().__init__(message, domain=ErrorDomain.PLUGIN, reason=reason, temporary=False)
|
|
| 136 | 141 |
|
| 137 | 142 |
|
| 138 | 143 |
# LoadErrorReason
|
| ... | ... | @@ -249,8 +254,8 @@ class SandboxError(BstError): |
| 249 | 254 |
# Raised when errors are encountered in the artifact caches
|
| 250 | 255 |
#
|
| 251 | 256 |
class ArtifactError(BstError):
|
| 252 |
- def __init__(self, message, *, detail=None, reason=None):
|
|
| 253 |
- super().__init__(message, detail=detail, domain=ErrorDomain.ARTIFACT, reason=reason)
|
|
| 257 |
+ def __init__(self, message, *, detail=None, reason=None, temporary=False):
|
|
| 258 |
+ super().__init__(message, detail=detail, domain=ErrorDomain.ARTIFACT, reason=reason, temporary=True)
|
|
| 254 | 259 |
|
| 255 | 260 |
|
| 256 | 261 |
# PipelineError
|
| ... | ... | @@ -270,6 +270,10 @@ class App(): |
| 270 | 270 |
|
| 271 | 271 |
# Exit with the error
|
| 272 | 272 |
self._error_exit(e)
|
| 273 |
+ except RecursionError:
|
|
| 274 |
+ click.echo("RecursionError: Depency depth is too large. Maximum recursion depth exceeded.",
|
|
| 275 |
+ err=True)
|
|
| 276 |
+ sys.exit(-1)
|
|
| 273 | 277 |
|
| 274 | 278 |
else:
|
| 275 | 279 |
# No exceptions occurred, print session time and summary
|
| ... | ... | @@ -35,6 +35,12 @@ from ..._exceptions import ImplError, BstError, set_last_task_error |
| 35 | 35 |
from ..._message import Message, MessageType, unconditional_messages
|
| 36 | 36 |
from ... import _signals, utils
|
| 37 | 37 |
|
| 38 |
+# Return code values shutdown of job handling child processes
|
|
| 39 |
+#
|
|
| 40 |
+RC_OK = 0
|
|
| 41 |
+RC_FAIL = 1
|
|
| 42 |
+RC_PERM_FAIL = 2
|
|
| 43 |
+ |
|
| 38 | 44 |
|
| 39 | 45 |
# Used to distinguish between status messages and return values
|
| 40 | 46 |
class Envelope():
|
| ... | ... | @@ -111,6 +117,10 @@ class Job(): |
| 111 | 117 |
self._max_retries = max_retries # Maximum number of automatic retries
|
| 112 | 118 |
self._result = None # Return value of child action in the parent
|
| 113 | 119 |
self._tries = 0 # Try count, for retryable jobs
|
| 120 |
+ |
|
| 121 |
+ # If False, a retry will not be attempted regardless of whether _tries is less than _max_retries.
|
|
| 122 |
+ #
|
|
| 123 |
+ self._retry_flag = True
|
|
| 114 | 124 |
self._logfile = logfile
|
| 115 | 125 |
self._task_id = None
|
| 116 | 126 |
|
| ... | ... | @@ -388,8 +398,9 @@ class Job(): |
| 388 | 398 |
result = self.child_process()
|
| 389 | 399 |
except BstError as e:
|
| 390 | 400 |
elapsed = datetime.datetime.now() - starttime
|
| 401 |
+ self._retry_flag = e.temporary
|
|
| 391 | 402 |
|
| 392 |
- if self._tries <= self._max_retries:
|
|
| 403 |
+ if self._retry_flag and (self._tries <= self._max_retries):
|
|
| 393 | 404 |
self.message(MessageType.FAIL,
|
| 394 | 405 |
"Try #{} failed, retrying".format(self._tries),
|
| 395 | 406 |
elapsed=elapsed)
|
| ... | ... | @@ -402,7 +413,10 @@ class Job(): |
| 402 | 413 |
|
| 403 | 414 |
# Report the exception to the parent (for internal testing purposes)
|
| 404 | 415 |
self._child_send_error(e)
|
| 405 |
- self._child_shutdown(1)
|
|
| 416 |
+ |
|
| 417 |
+ # Set return code based on whether or not the error was temporary.
|
|
| 418 |
+ #
|
|
| 419 |
+ self._child_shutdown(RC_FAIL if self._retry_flag else RC_PERM_FAIL)
|
|
| 406 | 420 |
|
| 407 | 421 |
except Exception as e: # pylint: disable=broad-except
|
| 408 | 422 |
|
| ... | ... | @@ -416,7 +430,7 @@ class Job(): |
| 416 | 430 |
self.message(MessageType.BUG, self.action_name,
|
| 417 | 431 |
elapsed=elapsed, detail=detail,
|
| 418 | 432 |
logfile=filename)
|
| 419 |
- self._child_shutdown(1)
|
|
| 433 |
+ self._child_shutdown(RC_FAIL)
|
|
| 420 | 434 |
|
| 421 | 435 |
else:
|
| 422 | 436 |
# No exception occurred in the action
|
| ... | ... | @@ -430,7 +444,7 @@ class Job(): |
| 430 | 444 |
# Shutdown needs to stay outside of the above context manager,
|
| 431 | 445 |
# make sure we dont try to handle SIGTERM while the process
|
| 432 | 446 |
# is already busy in sys.exit()
|
| 433 |
- self._child_shutdown(0)
|
|
| 447 |
+ self._child_shutdown(RC_OK)
|
|
| 434 | 448 |
|
| 435 | 449 |
# _child_send_error()
|
| 436 | 450 |
#
|
| ... | ... | @@ -495,7 +509,8 @@ class Job(): |
| 495 | 509 |
message.action_name = self.action_name
|
| 496 | 510 |
message.task_id = self._task_id
|
| 497 | 511 |
|
| 498 |
- if message.message_type == MessageType.FAIL and self._tries <= self._max_retries:
|
|
| 512 |
+ if (message.message_type == MessageType.FAIL and
|
|
| 513 |
+ self._tries <= self._max_retries and self._retry_flag):
|
|
| 499 | 514 |
# Job will be retried, display failures as warnings in the frontend
|
| 500 | 515 |
message.message_type = MessageType.WARN
|
| 501 | 516 |
|
| ... | ... | @@ -529,12 +544,17 @@ class Job(): |
| 529 | 544 |
def _parent_child_completed(self, pid, returncode):
|
| 530 | 545 |
self._parent_shutdown()
|
| 531 | 546 |
|
| 532 |
- if returncode != 0 and self._tries <= self._max_retries:
|
|
| 547 |
+ # We don't want to retry if we got OK or a permanent fail.
|
|
| 548 |
+ # This is set in _child_action but must also be set for the parent.
|
|
| 549 |
+ #
|
|
| 550 |
+ self._retry_flag = returncode not in (RC_OK, RC_PERM_FAIL)
|
|
| 551 |
+ |
|
| 552 |
+ if self._retry_flag and (self._tries <= self._max_retries):
|
|
| 533 | 553 |
self.spawn()
|
| 534 | 554 |
return
|
| 535 | 555 |
|
| 536 |
- self.parent_complete(returncode == 0, self._result)
|
|
| 537 |
- self._scheduler.job_completed(self, returncode == 0)
|
|
| 556 |
+ self.parent_complete(returncode == RC_OK, self._result)
|
|
| 557 |
+ self._scheduler.job_completed(self, returncode == RC_OK)
|
|
| 538 | 558 |
|
| 539 | 559 |
# _parent_process_envelope()
|
| 540 | 560 |
#
|
| ... | ... | @@ -476,7 +476,7 @@ class Stream(): |
| 476 | 476 |
|
| 477 | 477 |
# Check for workspace config
|
| 478 | 478 |
workspace = workspaces.get_workspace(target._get_full_name())
|
| 479 |
- if workspace:
|
|
| 479 |
+ if workspace and not force:
|
|
| 480 | 480 |
raise StreamError("Workspace '{}' is already defined at: {}"
|
| 481 | 481 |
.format(target.name, workspace.path))
|
| 482 | 482 |
|
| ... | ... | @@ -495,6 +495,10 @@ class Stream(): |
| 495 | 495 |
"fetch the latest version of the " +
|
| 496 | 496 |
"source.")
|
| 497 | 497 |
|
| 498 |
+ if workspace:
|
|
| 499 |
+ workspaces.delete_workspace(target._get_full_name())
|
|
| 500 |
+ workspaces.save_config()
|
|
| 501 |
+ shutil.rmtree(directory)
|
|
| 498 | 502 |
try:
|
| 499 | 503 |
os.makedirs(directory, exist_ok=True)
|
| 500 | 504 |
except OSError as e:
|
| ... | ... | @@ -140,9 +140,10 @@ class ElementError(BstError): |
| 140 | 140 |
message (str): The error message to report to the user
|
| 141 | 141 |
detail (str): A possibly multiline, more detailed error message
|
| 142 | 142 |
reason (str): An optional machine readable reason string, used for test cases
|
| 143 |
+ temporary(bool): An indicator to whether the error may occur if the operation was run again.
|
|
| 143 | 144 |
"""
|
| 144 |
- def __init__(self, message, *, detail=None, reason=None):
|
|
| 145 |
- super().__init__(message, detail=detail, domain=ErrorDomain.ELEMENT, reason=reason)
|
|
| 145 |
+ def __init__(self, message, *, detail=None, reason=None, temporary=False):
|
|
| 146 |
+ super().__init__(message, detail=detail, domain=ErrorDomain.ELEMENT, reason=reason, temporary=temporary)
|
|
| 146 | 147 |
|
| 147 | 148 |
|
| 148 | 149 |
class Element(Plugin):
|
| ... | ... | @@ -478,13 +478,15 @@ class Plugin(): |
| 478 | 478 |
silent_nested=silent_nested):
|
| 479 | 479 |
yield
|
| 480 | 480 |
|
| 481 |
- def call(self, *popenargs, fail=None, **kwargs):
|
|
| 481 |
+ def call(self, *popenargs, fail=None, fail_temporarily=False, **kwargs):
|
|
| 482 | 482 |
"""A wrapper for subprocess.call()
|
| 483 | 483 |
|
| 484 | 484 |
Args:
|
| 485 | 485 |
popenargs (list): Popen() arguments
|
| 486 | 486 |
fail (str): A message to display if the process returns
|
| 487 | 487 |
a non zero exit code
|
| 488 |
+ fail_temporarily (bool): Whether any exceptions should
|
|
| 489 |
+ be raised as temporary. (*Added in 1.4*)
|
|
| 488 | 490 |
rest_of_args (kwargs): Remaining arguments to subprocess.call()
|
| 489 | 491 |
|
| 490 | 492 |
Returns:
|
| ... | ... | @@ -507,7 +509,7 @@ class Plugin(): |
| 507 | 509 |
"Failed to download ponies from {}".format(
|
| 508 | 510 |
self.mirror_directory))
|
| 509 | 511 |
"""
|
| 510 |
- exit_code, _ = self.__call(*popenargs, fail=fail, **kwargs)
|
|
| 512 |
+ exit_code, _ = self.__call(*popenargs, fail=fail, fail_temporarily=fail_temporarily, **kwargs)
|
|
| 511 | 513 |
return exit_code
|
| 512 | 514 |
|
| 513 | 515 |
def check_output(self, *popenargs, fail=None, **kwargs):
|
| ... | ... | @@ -619,7 +621,7 @@ class Plugin(): |
| 619 | 621 |
|
| 620 | 622 |
# Internal subprocess implementation for the call() and check_output() APIs
|
| 621 | 623 |
#
|
| 622 |
- def __call(self, *popenargs, collect_stdout=False, fail=None, **kwargs):
|
|
| 624 |
+ def __call(self, *popenargs, collect_stdout=False, fail=None, fail_temporarily=False, **kwargs):
|
|
| 623 | 625 |
|
| 624 | 626 |
with self._output_file() as output_file:
|
| 625 | 627 |
if 'stdout' not in kwargs:
|
| ... | ... | @@ -634,7 +636,8 @@ class Plugin(): |
| 634 | 636 |
exit_code, output = utils._call(*popenargs, **kwargs)
|
| 635 | 637 |
|
| 636 | 638 |
if fail and exit_code:
|
| 637 |
- raise PluginError("{plugin}: {message}".format(plugin=self, message=fail))
|
|
| 639 |
+ raise PluginError("{plugin}: {message}".format(plugin=self, message=fail),
|
|
| 640 |
+ temporary=fail_temporarily)
|
|
| 638 | 641 |
|
| 639 | 642 |
return (exit_code, output)
|
| 640 | 643 |
|
| ... | ... | @@ -150,11 +150,11 @@ class DownloadableFileSource(Source): |
| 150 | 150 |
# we would have downloaded.
|
| 151 | 151 |
return self.ref
|
| 152 | 152 |
raise SourceError("{}: Error mirroring {}: {}"
|
| 153 |
- .format(self, self.url, e)) from e
|
|
| 153 |
+ .format(self, self.url, e), temporary=True) from e
|
|
| 154 | 154 |
|
| 155 | 155 |
except (urllib.error.URLError, urllib.error.ContentTooShortError, OSError) as e:
|
| 156 | 156 |
raise SourceError("{}: Error mirroring {}: {}"
|
| 157 |
- .format(self, self.url, e)) from e
|
|
| 157 |
+ .format(self, self.url, e), temporary=True) from e
|
|
| 158 | 158 |
|
| 159 | 159 |
def _get_mirror_dir(self):
|
| 160 | 160 |
return os.path.join(self.get_mirror_directory(),
|
| ... | ... | @@ -113,7 +113,8 @@ class GitMirror(): |
| 113 | 113 |
#
|
| 114 | 114 |
with self.source.tempdir() as tmpdir:
|
| 115 | 115 |
self.source.call([self.source.host_git, 'clone', '--mirror', '-n', self.url, tmpdir],
|
| 116 |
- fail="Failed to clone git repository {}".format(self.url))
|
|
| 116 |
+ fail="Failed to clone git repository {}".format(self.url),
|
|
| 117 |
+ fail_temporarily=True)
|
|
| 117 | 118 |
|
| 118 | 119 |
try:
|
| 119 | 120 |
shutil.move(tmpdir, self.mirror)
|
| ... | ... | @@ -124,6 +125,7 @@ class GitMirror(): |
| 124 | 125 |
def fetch(self):
|
| 125 | 126 |
self.source.call([self.source.host_git, 'fetch', 'origin', '--prune'],
|
| 126 | 127 |
fail="Failed to fetch from remote git repository: {}".format(self.url),
|
| 128 |
+ fail_temporarily=True,
|
|
| 127 | 129 |
cwd=self.mirror)
|
| 128 | 130 |
|
| 129 | 131 |
def has_ref(self):
|
| ... | ... | @@ -157,7 +159,8 @@ class GitMirror(): |
| 157 | 159 |
# case we're just checking out a specific commit and then removing the .git/
|
| 158 | 160 |
# directory.
|
| 159 | 161 |
self.source.call([self.source.host_git, 'clone', '--no-checkout', '--shared', self.mirror, fullpath],
|
| 160 |
- fail="Failed to create git mirror {} in directory: {}".format(self.mirror, fullpath))
|
|
| 162 |
+ fail="Failed to create git mirror {} in directory: {}".format(self.mirror, fullpath),
|
|
| 163 |
+ fail_temporarily=True)
|
|
| 161 | 164 |
|
| 162 | 165 |
self.source.call([self.source.host_git, 'checkout', '--force', self.ref],
|
| 163 | 166 |
fail="Failed to checkout git ref {}".format(self.ref),
|
| ... | ... | @@ -170,7 +173,8 @@ class GitMirror(): |
| 170 | 173 |
fullpath = os.path.join(directory, self.path)
|
| 171 | 174 |
|
| 172 | 175 |
self.source.call([self.source.host_git, 'clone', '--no-checkout', self.mirror, fullpath],
|
| 173 |
- fail="Failed to clone git mirror {} in directory: {}".format(self.mirror, fullpath))
|
|
| 176 |
+ fail="Failed to clone git mirror {} in directory: {}".format(self.mirror, fullpath),
|
|
| 177 |
+ fail_temporarily=True)
|
|
| 174 | 178 |
|
| 175 | 179 |
self.source.call([self.source.host_git, 'remote', 'set-url', 'origin', self.url],
|
| 176 | 180 |
fail='Failed to add remote origin "{}"'.format(self.url),
|
| ... | ... | @@ -108,9 +108,10 @@ class SourceError(BstError): |
| 108 | 108 |
message (str): The breif error description to report to the user
|
| 109 | 109 |
detail (str): A possibly multiline, more detailed error message
|
| 110 | 110 |
reason (str): An optional machine readable reason string, used for test cases
|
| 111 |
+ temporary(bool): An indicator to whether the error may occur if the operation was run again.
|
|
| 111 | 112 |
"""
|
| 112 |
- def __init__(self, message, *, detail=None, reason=None):
|
|
| 113 |
- super().__init__(message, detail=detail, domain=ErrorDomain.SOURCE, reason=reason)
|
|
| 113 |
+ def __init__(self, message, *, detail=None, reason=None, temporary=False):
|
|
| 114 |
+ super().__init__(message, detail=detail, domain=ErrorDomain.SOURCE, reason=reason, temporary=temporary)
|
|
| 114 | 115 |
|
| 115 | 116 |
|
| 116 | 117 |
class Source(Plugin):
|
| 1 | 1 |
import os
|
| 2 |
-import pytest
|
|
| 2 |
+import sys
|
|
| 3 |
+import shutil
|
|
| 3 | 4 |
import itertools
|
| 5 |
+import pytest
|
|
| 4 | 6 |
from tests.testutils import cli
|
| 5 |
- |
|
| 6 | 7 |
from buildstream import _yaml
|
| 7 | 8 |
from buildstream._exceptions import ErrorDomain, LoadErrorReason
|
| 8 | 9 |
|
| ... | ... | @@ -232,3 +233,58 @@ def test_fetched_junction(cli, tmpdir, datafiles, element_name): |
| 232 | 233 |
|
| 233 | 234 |
results = result.output.strip().splitlines()
|
| 234 | 235 |
assert 'junction.bst:import-etc.bst-buildable' in results
|
| 236 |
+ |
|
| 237 |
+ |
|
| 238 |
+###############################################################
|
|
| 239 |
+# Testing recursion depth #
|
|
| 240 |
+###############################################################
|
|
| 241 |
+@pytest.mark.parametrize("dependency_depth", [100, 500, 1200])
|
|
| 242 |
+def test_exceed_max_recursion_depth(cli, tmpdir, dependency_depth):
|
|
| 243 |
+ project_name = "recursion-test"
|
|
| 244 |
+ path = str(tmpdir)
|
|
| 245 |
+ project_path = os.path.join(path, project_name)
|
|
| 246 |
+ |
|
| 247 |
+ def setup_test():
|
|
| 248 |
+ """
|
|
| 249 |
+ Creates a bst project with dependencydepth + 1 elements, each of which
|
|
| 250 |
+ depends of the previous element to be created. Each element created
|
|
| 251 |
+ is of type import and has an empty source file.
|
|
| 252 |
+ """
|
|
| 253 |
+ os.mkdir(project_path)
|
|
| 254 |
+ |
|
| 255 |
+ result = cli.run(project=project_path, silent=True,
|
|
| 256 |
+ args=['init', '--project-name', project_name])
|
|
| 257 |
+ result.assert_success()
|
|
| 258 |
+ |
|
| 259 |
+ sourcefiles_path = os.path.join(project_path, "files")
|
|
| 260 |
+ os.mkdir(sourcefiles_path)
|
|
| 261 |
+ |
|
| 262 |
+ element_path = os.path.join(project_path, "elements")
|
|
| 263 |
+ for i in range(0, dependency_depth + 1):
|
|
| 264 |
+ element = {
|
|
| 265 |
+ 'kind': 'import',
|
|
| 266 |
+ 'sources': [{'kind': 'local',
|
|
| 267 |
+ 'path': 'files/source{}'.format(str(i))}],
|
|
| 268 |
+ 'depends': ['element{}.bst'.format(str(i - 1))]
|
|
| 269 |
+ }
|
|
| 270 |
+ if i == 0:
|
|
| 271 |
+ del element['depends']
|
|
| 272 |
+ _yaml.dump(element, os.path.join(element_path, "element{}.bst".format(str(i))))
|
|
| 273 |
+ |
|
| 274 |
+ source = os.path.join(sourcefiles_path, "source{}".format(str(i)))
|
|
| 275 |
+ open(source, 'x').close()
|
|
| 276 |
+ assert os.path.exists(source)
|
|
| 277 |
+ |
|
| 278 |
+ setup_test()
|
|
| 279 |
+ result = cli.run(project=project_path, silent=True,
|
|
| 280 |
+ args=['show', "element{}.bst".format(str(dependency_depth))])
|
|
| 281 |
+ |
|
| 282 |
+ recursion_limit = sys.getrecursionlimit()
|
|
| 283 |
+ if dependency_depth <= recursion_limit:
|
|
| 284 |
+ result.assert_success()
|
|
| 285 |
+ else:
|
|
| 286 |
+ # Assert exception is thown and handled
|
|
| 287 |
+ assert not result.unhandled_exception
|
|
| 288 |
+ assert result.exit_code == -1
|
|
| 289 |
+ |
|
| 290 |
+ shutil.rmtree(project_path)
|
| ... | ... | @@ -123,6 +123,58 @@ def test_open_force(cli, tmpdir, datafiles, kind): |
| 123 | 123 |
result.assert_success()
|
| 124 | 124 |
|
| 125 | 125 |
|
| 126 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 127 |
+@pytest.mark.parametrize("kind", repo_kinds)
|
|
| 128 |
+def test_open_force_open(cli, tmpdir, datafiles, kind):
|
|
| 129 |
+ element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, kind, False)
|
|
| 130 |
+ |
|
| 131 |
+ # Assert the workspace dir exists
|
|
| 132 |
+ assert os.path.exists(workspace)
|
|
| 133 |
+ |
|
| 134 |
+ # Now open the workspace again with --force, this should happily succeed
|
|
| 135 |
+ result = cli.run(project=project, args=[
|
|
| 136 |
+ 'workspace', 'open', '--force', element_name, workspace
|
|
| 137 |
+ ])
|
|
| 138 |
+ result.assert_success()
|
|
| 139 |
+ |
|
| 140 |
+ |
|
| 141 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 142 |
+@pytest.mark.parametrize("kind", repo_kinds)
|
|
| 143 |
+def test_open_force_different_workspace(cli, tmpdir, datafiles, kind):
|
|
| 144 |
+ element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, kind, False, "-alpha")
|
|
| 145 |
+ |
|
| 146 |
+ # Assert the workspace dir exists
|
|
| 147 |
+ assert os.path.exists(workspace)
|
|
| 148 |
+ |
|
| 149 |
+ hello_path = os.path.join(workspace, 'usr', 'bin', 'hello')
|
|
| 150 |
+ hello1_path = os.path.join(workspace, 'usr', 'bin', 'hello1')
|
|
| 151 |
+ |
|
| 152 |
+ tmpdir = os.path.join(str(tmpdir), "-beta")
|
|
| 153 |
+ shutil.move(hello_path, hello1_path)
|
|
| 154 |
+ element_name2, project2, workspace2 = open_workspace(cli, tmpdir, datafiles, kind, False, "-beta")
|
|
| 155 |
+ |
|
| 156 |
+ # Assert the workspace dir exists
|
|
| 157 |
+ assert os.path.exists(workspace2)
|
|
| 158 |
+ |
|
| 159 |
+ # Assert that workspace 1 contains the modified file
|
|
| 160 |
+ assert os.path.exists(hello1_path)
|
|
| 161 |
+ |
|
| 162 |
+ # Assert that workspace 2 contains the unmodified file
|
|
| 163 |
+ assert os.path.exists(os.path.join(workspace2, 'usr', 'bin', 'hello'))
|
|
| 164 |
+ |
|
| 165 |
+ # Now open the workspace again with --force, this should happily succeed
|
|
| 166 |
+ result = cli.run(project=project, args=[
|
|
| 167 |
+ 'workspace', 'open', '--force', element_name2, workspace
|
|
| 168 |
+ ])
|
|
| 169 |
+ |
|
| 170 |
+ # Assert that the file in workspace 1 has been replaced
|
|
| 171 |
+ # With the file from workspace 2
|
|
| 172 |
+ assert os.path.exists(hello_path)
|
|
| 173 |
+ assert not os.path.exists(hello1_path)
|
|
| 174 |
+ |
|
| 175 |
+ result.assert_success()
|
|
| 176 |
+ |
|
| 177 |
+ |
|
| 126 | 178 |
@pytest.mark.datafiles(DATA_DIR)
|
| 127 | 179 |
@pytest.mark.parametrize("kind", repo_kinds)
|
| 128 | 180 |
def test_close(cli, tmpdir, datafiles, kind):
|
| ... | ... | @@ -45,7 +45,7 @@ def test_no_ref(cli, tmpdir, datafiles): |
| 45 | 45 |
assert cli.get_element_state(project, 'target.bst') == 'no reference'
|
| 46 | 46 |
|
| 47 | 47 |
|
| 48 |
-# Test that when I fetch a nonexistent URL, errors are handled gracefully.
|
|
| 48 |
+# Test that when I fetch a nonexistent URL, errors are handled gracefully and a retry is performed.
|
|
| 49 | 49 |
@pytest.mark.skipif(HAVE_ARPY is False, reason="arpy is not available")
|
| 50 | 50 |
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'fetch'))
|
| 51 | 51 |
def test_fetch_bad_url(cli, tmpdir, datafiles):
|
| ... | ... | @@ -56,6 +56,7 @@ def test_fetch_bad_url(cli, tmpdir, datafiles): |
| 56 | 56 |
result = cli.run(project=project, args=[
|
| 57 | 57 |
'fetch', 'target.bst'
|
| 58 | 58 |
])
|
| 59 |
+ assert "Try #" in result.stderr
|
|
| 59 | 60 |
result.assert_main_error(ErrorDomain.STREAM, None)
|
| 60 | 61 |
result.assert_task_error(ErrorDomain.SOURCE, None)
|
| 61 | 62 |
|
| ... | ... | @@ -56,7 +56,7 @@ def test_no_ref(cli, tmpdir, datafiles): |
| 56 | 56 |
assert cli.get_element_state(project, 'target.bst') == 'no reference'
|
| 57 | 57 |
|
| 58 | 58 |
|
| 59 |
-# Test that when I fetch a nonexistent URL, errors are handled gracefully.
|
|
| 59 |
+# Test that when I fetch a nonexistent URL, errors are handled gracefully and a retry is performed.
|
|
| 60 | 60 |
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'fetch'))
|
| 61 | 61 |
def test_fetch_bad_url(cli, tmpdir, datafiles):
|
| 62 | 62 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
| ... | ... | @@ -66,6 +66,7 @@ def test_fetch_bad_url(cli, tmpdir, datafiles): |
| 66 | 66 |
result = cli.run(project=project, args=[
|
| 67 | 67 |
'fetch', 'target.bst'
|
| 68 | 68 |
])
|
| 69 |
+ assert "Try #" in result.stderr
|
|
| 69 | 70 |
result.assert_main_error(ErrorDomain.STREAM, None)
|
| 70 | 71 |
result.assert_task_error(ErrorDomain.SOURCE, None)
|
| 71 | 72 |
|
| ... | ... | @@ -43,7 +43,7 @@ def test_no_ref(cli, tmpdir, datafiles): |
| 43 | 43 |
assert cli.get_element_state(project, 'target.bst') == 'no reference'
|
| 44 | 44 |
|
| 45 | 45 |
|
| 46 |
-# Test that when I fetch a nonexistent URL, errors are handled gracefully.
|
|
| 46 |
+# Test that when I fetch a nonexistent URL, errors are handled gracefully and a retry is performed.
|
|
| 47 | 47 |
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'fetch'))
|
| 48 | 48 |
def test_fetch_bad_url(cli, tmpdir, datafiles):
|
| 49 | 49 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
| ... | ... | @@ -53,6 +53,7 @@ def test_fetch_bad_url(cli, tmpdir, datafiles): |
| 53 | 53 |
result = cli.run(project=project, args=[
|
| 54 | 54 |
'fetch', 'target.bst'
|
| 55 | 55 |
])
|
| 56 |
+ assert "Try #" in result.stderr
|
|
| 56 | 57 |
result.assert_main_error(ErrorDomain.STREAM, None)
|
| 57 | 58 |
result.assert_task_error(ErrorDomain.SOURCE, None)
|
| 58 | 59 |
|
