Gökçen Nurlu pushed to branch gokcen/no_auto_fetch at BuildStream / buildstream
Commits:
- 
516e990e
by ctolentino8 at 2018-10-31T11:36:46Z
- 
b8a37a63
by Tristan Van Berkom at 2018-11-01T10:16:25Z
- 
b27b592a
by Benjamin Schubert at 2018-11-01T10:49:57Z
- 
89ace5d7
by Benjamin Schubert at 2018-11-01T11:16:36Z
- 
4cfabce8
by Angelos Evripiotis at 2018-11-01T11:35:02Z
- 
48860aac
by Tristan Van Berkom at 2018-11-01T12:01:04Z
- 
d868b409
by Daniel Silverstone at 2018-11-01T13:40:24Z
- 
7f79b9ce
by Tristan Van Berkom at 2018-11-01T14:25:57Z
- 
3f15801c
by Gökçen Nurlu at 2018-11-02T11:45:36Z
- 
28f53e33
by Gökçen Nurlu at 2018-11-02T11:45:36Z
- 
efff5259
by Gökçen Nurlu at 2018-11-02T11:45:36Z
- 
781017b6
by Gökçen Nurlu at 2018-11-02T11:45:36Z
- 
31f47d1f
by Gökçen Nurlu at 2018-11-02T11:46:03Z
- 
e35d54be
by Gökçen Nurlu at 2018-11-02T11:46:03Z
- 
25578cec
by Gökçen Nurlu at 2018-11-02T11:46:03Z
13 changed files:
- NEWS
- buildstream/_frontend/app.py
- buildstream/_frontend/cli.py
- buildstream/_pipeline.py
- buildstream/_stream.py
- buildstream/_versions.py
- buildstream/_yaml.py
- buildstream/plugins/elements/manual.yaml
- setup.py
- tests/frontend/buildcheckout.py
- tests/frontend/cross_junction_workspace.py
- tests/frontend/init.py
- tests/frontend/workspace.py
Changes:
| ... | ... | @@ -2,6 +2,12 @@ | 
| 2 | 2 |  buildstream 1.3.1
 | 
| 3 | 3 |  =================
 | 
| 4 | 4 |  | 
| 5 | +  o BREAKING CHANGE: The 'manual' element lost its default 'MAKEFLAGS' and 'V'
 | |
| 6 | +    environment variables. There is already a 'make' element with the same
 | |
| 7 | +    variables. Note that this is a breaking change, it will require users to
 | |
| 8 | +    make changes to their .bst files if they are expecting these environment
 | |
| 9 | +    variables to be set.
 | |
| 10 | + | |
| 5 | 11 |    o Failed builds are included in the cache as well.
 | 
| 6 | 12 |      `bst checkout` will provide anything in `%{install-root}`.
 | 
| 7 | 13 |      A build including cached fails will cause any dependant elements
 | 
| ... | ... | @@ -305,7 +305,6 @@ class App(): | 
| 305 | 305 |          directory = self._main_options['directory']
 | 
| 306 | 306 |          directory = os.path.abspath(directory)
 | 
| 307 | 307 |          project_path = os.path.join(directory, 'project.conf')
 | 
| 308 | -        elements_path = os.path.join(directory, element_path)
 | |
| 309 | 308 |  | 
| 310 | 309 |          try:
 | 
| 311 | 310 |              # Abort if the project.conf already exists, unless `--force` was specified in `bst init`
 | 
| ... | ... | @@ -335,6 +334,7 @@ class App(): | 
| 335 | 334 |                  raise AppError("Error creating project directory {}: {}".format(directory, e)) from e
 | 
| 336 | 335 |  | 
| 337 | 336 |              # Create the elements sub-directory if it doesnt exist
 | 
| 337 | +            elements_path = os.path.join(directory, element_path)
 | |
| 338 | 338 |              try:
 | 
| 339 | 339 |                  os.makedirs(elements_path, exist_ok=True)
 | 
| 340 | 340 |              except IOError as e:
 | 
| ... | ... | @@ -395,8 +395,10 @@ def fetch(app, elements, deps, track_, except_, track_cross_junctions): | 
| 395 | 395 |                help="Allow crossing junction boundaries")
 | 
| 396 | 396 |  @click.argument('elements', nargs=-1,
 | 
| 397 | 397 |                  type=click.Path(readable=False))
 | 
| 398 | +@click.option('--fetch', 'fetch_', default=False, is_flag=True,
 | |
| 399 | +              help="Enable auto-fetching of junction(s) automatically if elements has dependencies")
 | |
| 398 | 400 |  @click.pass_obj
 | 
| 399 | -def track(app, elements, deps, except_, cross_junctions):
 | |
| 401 | +def track(app, elements, deps, except_, cross_junctions, fetch_):
 | |
| 400 | 402 |      """Consults the specified tracking branches for new versions available
 | 
| 401 | 403 |      to build and updates the project with any newly available references.
 | 
| 402 | 404 |  | 
| ... | ... | @@ -417,7 +419,8 @@ def track(app, elements, deps, except_, cross_junctions): | 
| 417 | 419 |          app.stream.track(elements,
 | 
| 418 | 420 |                           selection=deps,
 | 
| 419 | 421 |                           except_targets=except_,
 | 
| 420 | -                         cross_junctions=cross_junctions)
 | |
| 422 | +                         cross_junctions=cross_junctions,
 | |
| 423 | +                         fetch_subprojects=fetch_)
 | |
| 421 | 424 |  | 
| 422 | 425 |  | 
| 423 | 426 |  ##################################################################
 | 
| ... | ... | @@ -431,8 +434,10 @@ def track(app, elements, deps, except_, cross_junctions): | 
| 431 | 434 |                help="The URL of the remote cache (defaults to the first configured cache)")
 | 
| 432 | 435 |  @click.argument('elements', nargs=-1,
 | 
| 433 | 436 |                  type=click.Path(readable=False))
 | 
| 437 | +@click.option('--fetch', 'fetch_', default=False, is_flag=True,
 | |
| 438 | +              help="Enable auto-fetching of junction(s) automatically if element has dependencies")
 | |
| 434 | 439 |  @click.pass_obj
 | 
| 435 | -def pull(app, elements, deps, remote):
 | |
| 440 | +def pull(app, elements, deps, remote, fetch_):
 | |
| 436 | 441 |      """Pull a built artifact from the configured remote artifact cache.
 | 
| 437 | 442 |  | 
| 438 | 443 |      By default the artifact will be pulled one of the configured caches
 | 
| ... | ... | @@ -446,7 +451,7 @@ def pull(app, elements, deps, remote): | 
| 446 | 451 |          all:   All dependencies
 | 
| 447 | 452 |      """
 | 
| 448 | 453 |      with app.initialized(session_name="Pull"):
 | 
| 449 | -        app.stream.pull(elements, selection=deps, remote=remote)
 | |
| 454 | +        app.stream.pull(elements, selection=deps, remote=remote, fetch_subprojects=fetch_)
 | |
| 450 | 455 |  | 
| 451 | 456 |  | 
| 452 | 457 |  ##################################################################
 | 
| ... | ... | @@ -460,8 +465,10 @@ def pull(app, elements, deps, remote): | 
| 460 | 465 |                help="The URL of the remote cache (defaults to the first configured cache)")
 | 
| 461 | 466 |  @click.argument('elements', nargs=-1,
 | 
| 462 | 467 |                  type=click.Path(readable=False))
 | 
| 468 | +@click.option('--fetch', 'fetch_', default=False, is_flag=True,
 | |
| 469 | +              help="Enable auto-fetching of junction(s) automatically if element has dependencies")
 | |
| 463 | 470 |  @click.pass_obj
 | 
| 464 | -def push(app, elements, deps, remote):
 | |
| 471 | +def push(app, elements, deps, remote, fetch_):
 | |
| 465 | 472 |      """Push a built artifact to a remote artifact cache.
 | 
| 466 | 473 |  | 
| 467 | 474 |      The default destination is the highest priority configured cache. You can
 | 
| ... | ... | @@ -474,7 +481,7 @@ def push(app, elements, deps, remote): | 
| 474 | 481 |          all:   All dependencies
 | 
| 475 | 482 |      """
 | 
| 476 | 483 |      with app.initialized(session_name="Push"):
 | 
| 477 | -        app.stream.push(elements, selection=deps, remote=remote)
 | |
| 484 | +        app.stream.push(elements, selection=deps, remote=remote, fetch_subprojects=fetch_)
 | |
| 478 | 485 |  | 
| 479 | 486 |  | 
| 480 | 487 |  ##################################################################
 | 
| ... | ... | @@ -643,8 +650,10 @@ def shell(app, element, sysroot, mount, isolate, build_, command): | 
| 643 | 650 |  @click.argument('element',
 | 
| 644 | 651 |                  type=click.Path(readable=False))
 | 
| 645 | 652 |  @click.argument('location', type=click.Path())
 | 
| 653 | +@click.option('--fetch', 'fetch_', default=False, is_flag=True,
 | |
| 654 | +              help="Enable auto-fetching of junction(s) automatically if element has dependencies")
 | |
| 646 | 655 |  @click.pass_obj
 | 
| 647 | -def checkout(app, element, location, force, deps, integrate, hardlinks, tar):
 | |
| 656 | +def checkout(app, element, location, force, deps, integrate, hardlinks, tar, fetch_):
 | |
| 648 | 657 |      """Checkout a built artifact to the specified location
 | 
| 649 | 658 |      """
 | 
| 650 | 659 |  | 
| ... | ... | @@ -659,7 +668,8 @@ def checkout(app, element, location, force, deps, integrate, hardlinks, tar): | 
| 659 | 668 |                              deps=deps,
 | 
| 660 | 669 |                              integrate=integrate,
 | 
| 661 | 670 |                              hardlinks=hardlinks,
 | 
| 662 | -                            tar=tar)
 | |
| 671 | +                            tar=tar,
 | |
| 672 | +                            fetch_subprojects=fetch_)
 | |
| 663 | 673 |  | 
| 664 | 674 |  | 
| 665 | 675 |  ##################################################################
 | 
| ... | ... | @@ -684,8 +694,10 @@ def workspace(): | 
| 684 | 694 |  @click.argument('element',
 | 
| 685 | 695 |                  type=click.Path(readable=False))
 | 
| 686 | 696 |  @click.argument('directory', type=click.Path(file_okay=False))
 | 
| 697 | +@click.option('--fetch', 'fetch_', default=False, is_flag=True,
 | |
| 698 | +              help="Enable auto-fetching of element and related junction(s)")
 | |
| 687 | 699 |  @click.pass_obj
 | 
| 688 | -def workspace_open(app, no_checkout, force, track_, element, directory):
 | |
| 700 | +def workspace_open(app, no_checkout, force, track_, element, directory, fetch_):
 | |
| 689 | 701 |      """Open a workspace for manual source modification"""
 | 
| 690 | 702 |  | 
| 691 | 703 |      if os.path.exists(directory):
 | 
| ... | ... | @@ -702,7 +714,8 @@ def workspace_open(app, no_checkout, force, track_, element, directory): | 
| 702 | 714 |          app.stream.workspace_open(element, directory,
 | 
| 703 | 715 |                                    no_checkout=no_checkout,
 | 
| 704 | 716 |                                    track_first=track_,
 | 
| 705 | -                                  force=force)
 | |
| 717 | +                                  force=force,
 | |
| 718 | +                                  fetch=fetch_)
 | |
| 706 | 719 |  | 
| 707 | 720 |  | 
| 708 | 721 |  ##################################################################
 | 
| ... | ... | @@ -764,8 +777,10 @@ def workspace_close(app, remove_dir, all_, elements): | 
| 764 | 777 |                help="Reset all open workspaces")
 | 
| 765 | 778 |  @click.argument('elements', nargs=-1,
 | 
| 766 | 779 |                  type=click.Path(readable=False))
 | 
| 780 | +@click.option('--fetch', 'fetch_', default=False, is_flag=True,
 | |
| 781 | +              help="Enable auto-fetching of element and related junction(s)")
 | |
| 767 | 782 |  @click.pass_obj
 | 
| 768 | -def workspace_reset(app, soft, track_, all_, elements):
 | |
| 783 | +def workspace_reset(app, soft, track_, all_, elements, fetch_):
 | |
| 769 | 784 |      """Reset a workspace to its original state"""
 | 
| 770 | 785 |  | 
| 771 | 786 |      # Check that the workspaces in question exist
 | 
| ... | ... | @@ -785,7 +800,7 @@ def workspace_reset(app, soft, track_, all_, elements): | 
| 785 | 800 |          if all_:
 | 
| 786 | 801 |              elements = tuple(element_name for element_name, _ in app.context.get_workspaces().list())
 | 
| 787 | 802 |  | 
| 788 | -        app.stream.workspace_reset(elements, soft=soft, track_first=track_)
 | |
| 803 | +        app.stream.workspace_reset(elements, soft=soft, track_first=track_, fetch=fetch_)
 | |
| 789 | 804 |  | 
| 790 | 805 |  | 
| 791 | 806 |  ##################################################################
 | 
| ... | ... | @@ -818,9 +833,11 @@ def workspace_list(app): | 
| 818 | 833 |                help="The directory to write the tarball to")
 | 
| 819 | 834 |  @click.argument('element',
 | 
| 820 | 835 |                  type=click.Path(readable=False))
 | 
| 836 | +@click.option('--fetch', 'fetch_', default=False, is_flag=True,
 | |
| 837 | +              help="Enable auto-fetching of element and related junction(s)")
 | |
| 821 | 838 |  @click.pass_obj
 | 
| 822 | 839 |  def source_bundle(app, element, force, directory,
 | 
| 823 | -                  track_, compression, except_):
 | |
| 840 | +                  track_, compression, except_, fetch_):
 | |
| 824 | 841 |      """Produce a source bundle to be manually executed
 | 
| 825 | 842 |      """
 | 
| 826 | 843 |      with app.initialized():
 | 
| ... | ... | @@ -828,4 +845,5 @@ def source_bundle(app, element, force, directory, | 
| 828 | 845 |                                   track_first=track_,
 | 
| 829 | 846 |                                   force=force,
 | 
| 830 | 847 |                                   compression=compression,
 | 
| 831 | -                                 except_targets=except_) | |
| 848 | +                                 except_targets=except_,
 | |
| 849 | +                                 fetch=fetch_) | 
| ... | ... | @@ -383,6 +383,33 @@ class Pipeline(): | 
| 383 | 383 |                  detail += "  " + element._get_full_name() + "\n"
 | 
| 384 | 384 |              raise PipelineError("Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline-workspaced")
 | 
| 385 | 385 |  | 
| 386 | +    # assert_sources_cached()
 | |
| 387 | +    #
 | |
| 388 | +    # Asserts that sources for the given list of elements are cached.
 | |
| 389 | +    #
 | |
| 390 | +    # Args:
 | |
| 391 | +    #    elements (list): The list of elements
 | |
| 392 | +    #
 | |
| 393 | +    def assert_sources_cached(self, elements):
 | |
| 394 | +        uncached = []
 | |
| 395 | +        with self._context.timed_activity("Checking sources"):
 | |
| 396 | +            for element in elements:
 | |
| 397 | +                if element._get_consistency() != Consistency.CACHED:
 | |
| 398 | +                    uncached.append(element)
 | |
| 399 | + | |
| 400 | +        if uncached:
 | |
| 401 | +            detail = "Sources are not cached for the following elements:\n\n"
 | |
| 402 | +            for element in uncached:
 | |
| 403 | +                detail += "  Following sources for element: {} are not cached:\n".format(element._get_full_name())
 | |
| 404 | +                for source in element.sources():
 | |
| 405 | +                    if source._get_consistency() != Consistency.CACHED:
 | |
| 406 | +                        detail += "    {}\n".format(source)
 | |
| 407 | +                detail += '\n'
 | |
| 408 | +            detail += "Try fetching these elements first with `bst fetch`,\n" + \
 | |
| 409 | +                      "or run this command with `--fetch` option\n"
 | |
| 410 | + | |
| 411 | +            raise PipelineError("Uncached sources", detail=detail, reason="uncached-sources")
 | |
| 412 | + | |
| 386 | 413 |      #############################################################
 | 
| 387 | 414 |      #                     Private Methods                       #
 | 
| 388 | 415 |      #############################################################
 | 
| ... | ... | @@ -256,6 +256,7 @@ class Stream(): | 
| 256 | 256 |      #    selection (PipelineSelection): The selection mode for the specified targets
 | 
| 257 | 257 |      #    except_targets (list of str): Specified targets to except from tracking
 | 
| 258 | 258 |      #    cross_junctions (bool): Whether tracking should cross junction boundaries
 | 
| 259 | +    #    fetch_subprojects (bool): Flag to enable auto-fetch of junctions if they are needed by `targets`
 | |
| 259 | 260 |      #
 | 
| 260 | 261 |      # If no error is encountered while tracking, then the project files
 | 
| 261 | 262 |      # are rewritten inline.
 | 
| ... | ... | @@ -263,7 +264,8 @@ class Stream(): | 
| 263 | 264 |      def track(self, targets, *,
 | 
| 264 | 265 |                selection=PipelineSelection.REDIRECT,
 | 
| 265 | 266 |                except_targets=None,
 | 
| 266 | -              cross_junctions=False):
 | |
| 267 | +              cross_junctions=False,
 | |
| 268 | +              fetch_subprojects):
 | |
| 267 | 269 |  | 
| 268 | 270 |          # We pass no target to build. Only to track. Passing build targets
 | 
| 269 | 271 |          # would fully load project configuration which might not be
 | 
| ... | ... | @@ -274,7 +276,7 @@ class Stream(): | 
| 274 | 276 |                         except_targets=except_targets,
 | 
| 275 | 277 |                         track_except_targets=except_targets,
 | 
| 276 | 278 |                         track_cross_junctions=cross_junctions,
 | 
| 277 | -                       fetch_subprojects=True)
 | |
| 279 | +                       fetch_subprojects=fetch_subprojects)
 | |
| 278 | 280 |  | 
| 279 | 281 |          track_queue = TrackQueue(self._scheduler)
 | 
| 280 | 282 |          self._add_queue(track_queue, track=True)
 | 
| ... | ... | @@ -289,13 +291,15 @@ class Stream(): | 
| 289 | 291 |      #    targets (list of str): Targets to pull
 | 
| 290 | 292 |      #    selection (PipelineSelection): The selection mode for the specified targets
 | 
| 291 | 293 |      #    remote (str): The URL of a specific remote server to pull from, or None
 | 
| 294 | +    #    fetch_subprojects (bool): Flag to enable auto-fetch of junctions if they are needed by `targets`
 | |
| 292 | 295 |      #
 | 
| 293 | 296 |      # If `remote` specified as None, then regular configuration will be used
 | 
| 294 | 297 |      # to determine where to pull artifacts from.
 | 
| 295 | 298 |      #
 | 
| 296 | 299 |      def pull(self, targets, *,
 | 
| 297 | 300 |               selection=PipelineSelection.NONE,
 | 
| 298 | -             remote=None):
 | |
| 301 | +             remote=None,
 | |
| 302 | +             fetch_subprojects):
 | |
| 299 | 303 |  | 
| 300 | 304 |          use_config = True
 | 
| 301 | 305 |          if remote:
 | 
| ... | ... | @@ -305,7 +309,7 @@ class Stream(): | 
| 305 | 309 |                                   selection=selection,
 | 
| 306 | 310 |                                   use_artifact_config=use_config,
 | 
| 307 | 311 |                                   artifact_remote_url=remote,
 | 
| 308 | -                                 fetch_subprojects=True)
 | |
| 312 | +                                 fetch_subprojects=fetch_subprojects)
 | |
| 309 | 313 |  | 
| 310 | 314 |          if not self._artifacts.has_fetch_remotes():
 | 
| 311 | 315 |              raise StreamError("No artifact caches available for pulling artifacts")
 | 
| ... | ... | @@ -323,13 +327,15 @@ class Stream(): | 
| 323 | 327 |      #    targets (list of str): Targets to push
 | 
| 324 | 328 |      #    selection (PipelineSelection): The selection mode for the specified targets
 | 
| 325 | 329 |      #    remote (str): The URL of a specific remote server to push to, or None
 | 
| 330 | +    #    fetch_subprojects (bool): Flag to enable auto-fetch of junctions if they are needed by `targets`
 | |
| 326 | 331 |      #
 | 
| 327 | 332 |      # If `remote` specified as None, then regular configuration will be used
 | 
| 328 | 333 |      # to determine where to push artifacts to.
 | 
| 329 | 334 |      #
 | 
| 330 | 335 |      def push(self, targets, *,
 | 
| 331 | 336 |               selection=PipelineSelection.NONE,
 | 
| 332 | -             remote=None):
 | |
| 337 | +             remote=None,
 | |
| 338 | +             fetch_subprojects):
 | |
| 333 | 339 |  | 
| 334 | 340 |          use_config = True
 | 
| 335 | 341 |          if remote:
 | 
| ... | ... | @@ -339,7 +345,7 @@ class Stream(): | 
| 339 | 345 |                                   selection=selection,
 | 
| 340 | 346 |                                   use_artifact_config=use_config,
 | 
| 341 | 347 |                                   artifact_remote_url=remote,
 | 
| 342 | -                                 fetch_subprojects=True)
 | |
| 348 | +                                 fetch_subprojects=fetch_subprojects)
 | |
| 343 | 349 |  | 
| 344 | 350 |          if not self._artifacts.has_push_remotes():
 | 
| 345 | 351 |              raise StreamError("No artifact caches available for pushing artifacts")
 | 
| ... | ... | @@ -366,6 +372,7 @@ class Stream(): | 
| 366 | 372 |      #                will be placed at the given location. If true and
 | 
| 367 | 373 |      #                location is '-', the tarball will be dumped on the
 | 
| 368 | 374 |      #                standard output.
 | 
| 375 | +    #    fetch_subprojects (bool): Flag to enable auto-fetch of junctions if they are needed by `targets`
 | |
| 369 | 376 |      #
 | 
| 370 | 377 |      def checkout(self, target, *,
 | 
| 371 | 378 |                   location=None,
 | 
| ... | ... | @@ -373,10 +380,11 @@ class Stream(): | 
| 373 | 380 |                   deps='run',
 | 
| 374 | 381 |                   integrate=True,
 | 
| 375 | 382 |                   hardlinks=False,
 | 
| 376 | -                 tar=False):
 | |
| 383 | +                 tar=False,
 | |
| 384 | +                 fetch_subprojects):
 | |
| 377 | 385 |  | 
| 378 | 386 |          # We only have one target in a checkout command
 | 
| 379 | -        elements, _ = self._load((target,), (), fetch_subprojects=True)
 | |
| 387 | +        elements, _ = self._load((target,), (), fetch_subprojects=fetch_subprojects)
 | |
| 380 | 388 |          target = elements[0]
 | 
| 381 | 389 |  | 
| 382 | 390 |          if not tar:
 | 
| ... | ... | @@ -453,11 +461,13 @@ class Stream(): | 
| 453 | 461 |      #    no_checkout (bool): Whether to skip checking out the source
 | 
| 454 | 462 |      #    track_first (bool): Whether to track and fetch first
 | 
| 455 | 463 |      #    force (bool): Whether to ignore contents in an existing directory
 | 
| 464 | +    #    fetch (bool): Enable auto-fetching of target and related junction(s)
 | |
| 456 | 465 |      #
 | 
| 457 | 466 |      def workspace_open(self, target, directory, *,
 | 
| 458 | 467 |                         no_checkout,
 | 
| 459 | 468 |                         track_first,
 | 
| 460 | -                       force):
 | |
| 469 | +                       force,
 | |
| 470 | +                       fetch):
 | |
| 461 | 471 |  | 
| 462 | 472 |          if track_first:
 | 
| 463 | 473 |              track_targets = (target,)
 | 
| ... | ... | @@ -466,7 +476,8 @@ class Stream(): | 
| 466 | 476 |  | 
| 467 | 477 |          elements, track_elements = self._load((target,), track_targets,
 | 
| 468 | 478 |                                                selection=PipelineSelection.REDIRECT,
 | 
| 469 | -                                              track_selection=PipelineSelection.REDIRECT)
 | |
| 479 | +                                              track_selection=PipelineSelection.REDIRECT,
 | |
| 480 | +                                              fetch_subprojects=fetch)
 | |
| 470 | 481 |          target = elements[0]
 | 
| 471 | 482 |          directory = os.path.abspath(directory)
 | 
| 472 | 483 |  | 
| ... | ... | @@ -486,20 +497,21 @@ class Stream(): | 
| 486 | 497 |              raise StreamError("Workspace '{}' is already defined at: {}"
 | 
| 487 | 498 |                                .format(target.name, workspace.get_absolute_path()))
 | 
| 488 | 499 |  | 
| 489 | -        # If we're going to checkout, we need at least a fetch,
 | |
| 490 | -        # if we were asked to track first, we're going to fetch anyway.
 | |
| 491 | -        #
 | |
| 492 | -        if not no_checkout or track_first:
 | |
| 500 | +        # If we're going to checkout, we need to handle fetchs and tracks
 | |
| 501 | +        if not no_checkout:
 | |
| 493 | 502 |              track_elements = []
 | 
| 494 | 503 |              if track_first:
 | 
| 495 | 504 |                  track_elements = elements
 | 
| 496 | -            self._fetch(elements, track_elements=track_elements)
 | |
| 497 | 505 |  | 
| 498 | -        if not no_checkout and target._get_consistency() != Consistency.CACHED:
 | |
| 499 | -            raise StreamError("Could not stage uncached source. " +
 | |
| 500 | -                              "Use `--track` to track and " +
 | |
| 501 | -                              "fetch the latest version of the " +
 | |
| 502 | -                              "source.")
 | |
| 506 | +            fetch_elements = []
 | |
| 507 | +            if fetch:
 | |
| 508 | +                fetch_elements = elements
 | |
| 509 | + | |
| 510 | +            self._fetch(fetch_elements, track_elements=track_elements)
 | |
| 511 | +            # _fetch above might have done only fetch, only track, both or
 | |
| 512 | +            # none. We still need to ensure that element's refrenced sources
 | |
| 513 | +            # are available.
 | |
| 514 | +            self._pipeline.assert_sources_cached(elements)
 | |
| 503 | 515 |  | 
| 504 | 516 |          if workspace:
 | 
| 505 | 517 |              workspaces.delete_workspace(target._get_full_name())
 | 
| ... | ... | @@ -555,8 +567,9 @@ class Stream(): | 
| 555 | 567 |      #    targets (list of str): The target elements to reset the workspace for
 | 
| 556 | 568 |      #    soft (bool): Only reset workspace state
 | 
| 557 | 569 |      #    track_first (bool): Whether to also track the sources first
 | 
| 570 | +    #    fetch (bool): Enable auto-fetching of target and related junction(s)
 | |
| 558 | 571 |      #
 | 
| 559 | -    def workspace_reset(self, targets, *, soft, track_first):
 | |
| 572 | +    def workspace_reset(self, targets, *, soft, track_first, fetch):
 | |
| 560 | 573 |  | 
| 561 | 574 |          if track_first:
 | 
| 562 | 575 |              track_targets = targets
 | 
| ... | ... | @@ -565,7 +578,8 @@ class Stream(): | 
| 565 | 578 |  | 
| 566 | 579 |          elements, track_elements = self._load(targets, track_targets,
 | 
| 567 | 580 |                                                selection=PipelineSelection.REDIRECT,
 | 
| 568 | -                                              track_selection=PipelineSelection.REDIRECT)
 | |
| 581 | +                                              track_selection=PipelineSelection.REDIRECT,
 | |
| 582 | +                                              fetch_subprojects=fetch)
 | |
| 569 | 583 |  | 
| 570 | 584 |          nonexisting = []
 | 
| 571 | 585 |          for element in elements:
 | 
| ... | ... | @@ -574,9 +588,18 @@ class Stream(): | 
| 574 | 588 |          if nonexisting:
 | 
| 575 | 589 |              raise StreamError("Workspace does not exist", detail="\n".join(nonexisting))
 | 
| 576 | 590 |  | 
| 577 | -        # Do the tracking first
 | |
| 591 | +        to_track = []
 | |
| 578 | 592 |          if track_first:
 | 
| 579 | -            self._fetch(elements, track_elements=track_elements)
 | |
| 593 | +            to_track = track_elements
 | |
| 594 | + | |
| 595 | +        to_fetch = []
 | |
| 596 | +        if fetch:
 | |
| 597 | +            to_fetch = elements
 | |
| 598 | + | |
| 599 | +        if to_fetch or to_track:
 | |
| 600 | +            self._fetch(to_fetch, track_elements=to_track)
 | |
| 601 | + | |
| 602 | +        self._pipeline.assert_sources_cached(elements)
 | |
| 580 | 603 |  | 
| 581 | 604 |          workspaces = self._context.get_workspaces()
 | 
| 582 | 605 |  | 
| ... | ... | @@ -660,12 +683,14 @@ class Stream(): | 
| 660 | 683 |      #    track_first (bool): Track new source references before bundling
 | 
| 661 | 684 |      #    compression (str): The compression type to use
 | 
| 662 | 685 |      #    force (bool): Overwrite an existing tarball
 | 
| 686 | +    #    fetch (bool): Enable auto-fetching of target and related junction(s)
 | |
| 663 | 687 |      #
 | 
| 664 | 688 |      def source_bundle(self, target, directory, *,
 | 
| 665 | 689 |                        track_first=False,
 | 
| 666 | 690 |                        force=False,
 | 
| 667 | 691 |                        compression="gz",
 | 
| 668 | -                      except_targets=()):
 | |
| 692 | +                      except_targets=(),
 | |
| 693 | +                      fetch=False):
 | |
| 669 | 694 |  | 
| 670 | 695 |          if track_first:
 | 
| 671 | 696 |              track_targets = (target,)
 | 
| ... | ... | @@ -676,7 +701,7 @@ class Stream(): | 
| 676 | 701 |                                                selection=PipelineSelection.ALL,
 | 
| 677 | 702 |                                                except_targets=except_targets,
 | 
| 678 | 703 |                                                track_selection=PipelineSelection.ALL,
 | 
| 679 | -                                              fetch_subprojects=True)
 | |
| 704 | +                                              fetch_subprojects=fetch)
 | |
| 680 | 705 |  | 
| 681 | 706 |          # source-bundle only supports one target
 | 
| 682 | 707 |          target = self.targets[0]
 | 
| ... | ... | @@ -701,7 +726,12 @@ class Stream(): | 
| 701 | 726 |  | 
| 702 | 727 |          # Fetch and possibly track first
 | 
| 703 | 728 |          #
 | 
| 704 | -        self._fetch(elements, track_elements=track_elements)
 | |
| 729 | +        if fetch:
 | |
| 730 | +            self._fetch(elements, track_elements=track_elements)
 | |
| 731 | +        else:
 | |
| 732 | +            # No fetch, just tracking
 | |
| 733 | +            self._fetch([], track_elements=track_elements)
 | |
| 734 | +            self._pipeline.assert_sources_cached(elements)
 | |
| 705 | 735 |  | 
| 706 | 736 |          # We don't use the scheduler for this as it is almost entirely IO
 | 
| 707 | 737 |          # bound.
 | 
| ... | ... | @@ -23,7 +23,7 @@ | 
| 23 | 23 |  # This version is bumped whenever enhancements are made
 | 
| 24 | 24 |  # to the `project.conf` format or the core element format.
 | 
| 25 | 25 |  #
 | 
| 26 | -BST_FORMAT_VERSION = 17
 | |
| 26 | +BST_FORMAT_VERSION = 18
 | |
| 27 | 27 |  | 
| 28 | 28 |  | 
| 29 | 29 |  # The base BuildStream artifact version
 | 
| ... | ... | @@ -1049,6 +1049,12 @@ class ChainMap(collections.ChainMap): | 
| 1049 | 1049 |          for key in clearable:
 | 
| 1050 | 1050 |              del self[key]
 | 
| 1051 | 1051 |  | 
| 1052 | +    def get(self, key, default=None):
 | |
| 1053 | +        try:
 | |
| 1054 | +            return self[key]
 | |
| 1055 | +        except KeyError:
 | |
| 1056 | +            return default
 | |
| 1057 | + | |
| 1052 | 1058 |  | 
| 1053 | 1059 |  def node_chain_copy(source):
 | 
| 1054 | 1060 |      copy = ChainMap({}, source)
 | 
| 1 | -# No variables added for the manual element by default, set
 | |
| 2 | -# this if you plan to use make, and the sources cannot handle
 | |
| 3 | -# parallelization.
 | |
| 4 | -#
 | |
| 5 | -# variables:
 | |
| 6 | -#
 | |
| 7 | -#   notparallel: True
 | |
| 8 | - | |
| 9 | 1 |  # Manual build element does not provide any default
 | 
| 10 | 2 |  # build commands
 | 
| 11 | 3 |  config:
 | 
| ... | ... | @@ -28,14 +20,3 @@ config: | 
| 28 | 20 |    strip-commands:
 | 
| 29 | 21 |    - |
 | 
| 30 | 22 |      %{strip-binaries} | 
| 31 | - | |
| 32 | -# Use max-jobs CPUs for building and enable verbosity
 | |
| 33 | -environment:
 | |
| 34 | -  MAKEFLAGS: -j%{max-jobs}
 | |
| 35 | -  V: 1
 | |
| 36 | - | |
| 37 | -# And dont consider MAKEFLAGS or V as something which may
 | |
| 38 | -# affect build output.
 | |
| 39 | -environment-nocache:
 | |
| 40 | -- MAKEFLAGS
 | |
| 41 | -- V | 
| ... | ... | @@ -39,6 +39,7 @@ if sys.version_info[0] != REQUIRED_PYTHON_MAJOR or sys.version_info[1] < REQUIRE | 
| 39 | 39 |  try:
 | 
| 40 | 40 |      from setuptools import setup, find_packages, Command
 | 
| 41 | 41 |      from setuptools.command.easy_install import ScriptWriter
 | 
| 42 | +    from setuptools.command.test import test as TestCommand
 | |
| 42 | 43 |  except ImportError:
 | 
| 43 | 44 |      print("BuildStream requires setuptools in order to build. Install it using"
 | 
| 44 | 45 |            " your package manager (usually python3-setuptools) or via pip (pip3"
 | 
| ... | ... | @@ -219,9 +220,48 @@ class BuildGRPC(Command): | 
| 219 | 220 |                          f.write(code)
 | 
| 220 | 221 |  | 
| 221 | 222 |  | 
| 223 | +#####################################################
 | |
| 224 | +#                   Pytest command                  #
 | |
| 225 | +#####################################################
 | |
| 226 | +class PyTest(TestCommand):
 | |
| 227 | +    """Defines a pytest command class to run tests from setup.py"""
 | |
| 228 | + | |
| 229 | +    user_options = TestCommand.user_options + [
 | |
| 230 | +        ("addopts=", None, "Arguments to pass to pytest"),
 | |
| 231 | +        ('index-url=''build_grpc': BuildGRPC,
 | |
| 264 | +        'pytest': PyTest,
 | |
| 225 | 265 |      }
 | 
| 226 | 266 |      cmdclass.update(versioneer.get_cmdclass())
 | 
| 227 | 267 |      return cmdclass
 | 
| ... | ... | @@ -305,6 +345,5 @@ setup(name='BuildStream', | 
| 305 | 345 |            'grpcio >= 1.10',
 | 
| 306 | 346 |        ],
 | 
| 307 | 347 |        entry_points=bst_install_entry_points,
 | 
| 308 | -      setup_requires=['pytest-runner'],
 | |
| 309 | 348 |        tests_require=dev_requires,
 | 
| 310 | 349 |        zip_safe=False) | 
| ... | ... | @@ -510,6 +510,8 @@ def test_build_checkout_workspaced_junction(cli, tmpdir, datafiles): | 
| 510 | 510 |      # Now open a workspace on the junction
 | 
| 511 | 511 |      #
 | 
| 512 | 512 |      result = cli.run(project=project, args=['workspace', 'open', 'junction.bst', workspace])
 | 
| 513 | +    result.assert_main_error(ErrorDomain.PIPELINE, 'uncached-sources')
 | |
| 514 | +    result = cli.run(project=project, args=['workspace', 'open', 'junction.bst', workspace, '--fetch'])
 | |
| 513 | 515 |      result.assert_success()
 | 
| 514 | 516 |      filename = os.path.join(workspace, 'files', 'etc-files', 'etc', 'animal.conf')
 | 
| 515 | 517 |  | 
| ... | ... | @@ -47,7 +47,7 @@ def open_cross_junction(cli, tmpdir): | 
| 47 | 47 |      workspace = tmpdir.join("workspace")
 | 
| 48 | 48 |  | 
| 49 | 49 |      element = 'sub.bst:data.bst'
 | 
| 50 | -    args = ['workspace', 'open', element, str(workspace)]
 | |
| 50 | +    args = ['workspace', 'open', element, str(workspace), '--fetch']
 | |
| 51 | 51 |      result = cli.run(project=project, args=args)
 | 
| 52 | 52 |      result.assert_success()
 | 
| 53 | 53 |  | 
| ... | ... | @@ -3,6 +3,7 @@ import pytest | 
| 3 | 3 |  from tests.testutils import cli
 | 
| 4 | 4 |  | 
| 5 | 5 |  from buildstream import _yaml
 | 
| 6 | +from buildstream._frontend.app import App
 | |
| 6 | 7 |  from buildstream._exceptions import ErrorDomain, LoadErrorReason
 | 
| 7 | 8 |  from buildstream._versions import BST_FORMAT_VERSION
 | 
| 8 | 9 |  | 
| ... | ... | @@ -98,3 +99,34 @@ def test_bad_element_path(cli, tmpdir, element_path): | 
| 98 | 99 |          'init', '--project-name', 'foo', '--element-path', element_path
 | 
| 99 | 100 |      ])
 | 
| 100 | 101 |      result.assert_main_error(ErrorDomain.APP, 'invalid-element-path')
 | 
| 102 | + | |
| 103 | + | |
| 104 | +@pytest.mark.parametrize("element_path", [('foo'), ('foo/bar')])
 | |
| 105 | +def test_element_path_interactive(cli, tmp_path, monkeypatch, element_path):
 | |
| 106 | +    project = tmp_path
 | |
| 107 | +    project_conf_path = project.joinpath('project.conf')
 | |
| 108 | + | |
| 109 | +    class DummyInteractiveApp(App):
 | |
| 110 | +        def __init__(self, *args, **kwargs):
 | |
| 111 | +            super().__init__(*args, **kwargs)
 | |
| 112 | +            self.interactive = True
 | |
| 113 | + | |
| 114 | +        @classmethod
 | |
| 115 | +        def create(cls, *args, **kwargs):
 | |
| 116 | +            return DummyInteractiveApp(*args, **kwargs)
 | |
| 117 | + | |
| 118 | +        def _init_project_interactive(self, *args, **kwargs):
 | |
| 119 | +            return ('project_name', '0', element_path)
 | |
| 120 | + | |
| 121 | +    monkeypatch.setattr(App, 'create', DummyInteractiveApp.create)
 | |
| 122 | + | |
| 123 | +    result = cli.run(project=str(project), args=['init'])
 | |
| 124 | +    result.assert_success()
 | |
| 125 | + | |
| 126 | +    full_element_path = project.joinpath(element_path)
 | |
| 127 | +    assert full_element_path.exists()
 | |
| 128 | + | |
| 129 | +    project_conf = _yaml.load(str(project_conf_path))
 | |
| 130 | +    assert project_conf['name'] == 'project_name'
 | |
| 131 | +    assert project_conf['format-version'] == '0'
 | |
| 132 | +    assert project_conf['element-path'] == element_path | 
| ... | ... | @@ -85,7 +85,7 @@ def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir | 
| 85 | 85 |  | 
| 86 | 86 |      # Now open the workspace, this should have the effect of automatically
 | 
| 87 | 87 |      # tracking & fetching the source from the repo.
 | 
| 88 | -    args = ['workspace', 'open']
 | |
| 88 | +    args = ['workspace', 'open', '--fetch']
 | |
| 89 | 89 |      if track:
 | 
| 90 | 90 |          args.append('--track')
 | 
| 91 | 91 |      args.extend([element_name, workspace_dir])
 | 
