Angelos Evripiotis pushed to branch aevri/doc_arch_tweaks at BuildStream / buildstream
Commits:
- 
b8561fb1
by Chandan Singh at 2019-02-15T14:58:22Z
 - 
2699c77c
by Chandan Singh at 2019-02-15T14:58:31Z
 - 
677fc6c5
by Chandan Singh at 2019-02-15T16:09:21Z
 - 
3b889eb8
by Javier Jardón at 2019-02-15T20:22:08Z
 - 
232906b1
by Javier Jardón at 2019-02-15T23:08:50Z
 - 
ee2296ab
by Tristan Van Berkom at 2019-02-18T08:41:57Z
 - 
c79696fe
by Tristan Van Berkom at 2019-02-18T09:43:22Z
 - 
fb65af6c
by Jürg Billeter at 2019-02-18T10:29:25Z
 - 
f5660fa0
by Jim MacArthur at 2019-02-18T10:29:25Z
 - 
cdcf0dc5
by Jürg Billeter at 2019-02-18T10:29:25Z
 - 
f9dd6ea2
by Jim MacArthur at 2019-02-18T10:29:25Z
 - 
050249bb
by Jürg Billeter at 2019-02-18T10:29:25Z
 - 
3b881efc
by Jürg Billeter at 2019-02-18T10:29:25Z
 - 
3832c0d1
by Jim MacArthur at 2019-02-18T10:29:25Z
 - 
ef85e3b2
by Jürg Billeter at 2019-02-18T11:18:10Z
 - 
e0f0a01d
by Chandan Singh at 2019-02-18T12:41:31Z
 - 
f149fed2
by Tristan Van Berkom at 2019-02-18T13:43:28Z
 - 
4022234e
by Tristan Van Berkom at 2019-02-18T13:44:29Z
 - 
86466e7e
by Tristan Van Berkom at 2019-02-18T14:27:08Z
 - 
c8cd24b9
by Jonathan Maw at 2019-02-18T17:37:00Z
 - 
de70f8c0
by Jonathan Maw at 2019-02-18T18:57:45Z
 - 
00b86b19
by Chandan Singh at 2019-02-18T18:59:31Z
 - 
067a0c6b
by Chandan Singh at 2019-02-18T20:31:10Z
 - 
fca37d0d
by Chandan Singh at 2019-02-19T06:37:04Z
 - 
5f0571d1
by Jürg Billeter at 2019-02-19T07:37:21Z
 - 
e2074693
by Gökçen Nurlu at 2019-02-19T07:44:54Z
 - 
cfbe409d
by Jürg Billeter at 2019-02-19T09:00:31Z
 - 
1ae17968
by Valentin David at 2019-02-19T09:01:48Z
 - 
afe823e8
by Jürg Billeter at 2019-02-19T10:33:59Z
 - 
d70bfc38
by Adam Jones at 2019-02-19T10:34:37Z
 - 
857e7414
by Tom Pollard at 2019-02-19T11:37:15Z
 - 
9bc389a8
by Jürg Billeter at 2019-02-19T11:39:44Z
 - 
4a002bee
by Jürg Billeter at 2019-02-19T13:17:00Z
 - 
1cd27edf
by Jürg Billeter at 2019-02-19T13:19:43Z
 - 
69675d22
by Jürg Billeter at 2019-02-19T13:27:31Z
 - 
99764715
by Jürg Billeter at 2019-02-19T15:03:58Z
 - 
fed74896
by Angelos Evripiotis at 2019-02-19T16:42:09Z
 - 
c855b3b3
by Angelos Evripiotis at 2019-02-19T16:42:09Z
 - 
16287f51
by Angelos Evripiotis at 2019-02-19T16:42:09Z
 
22 changed files:
- .gitlab-ci.yml
 - .gitlab/issue_templates/bst_bug.md
 - .gitlab/issue_templates/bst_task.md
 - CONTRIBUTING.rst
 - NEWS
 - README.rst
 - buildstream/_artifactcache.py
 - buildstream/_yaml.py
 - buildstream/element.py
 - buildstream/storage/_casbaseddirectory.py
 - buildstream/storage/_filebaseddirectory.py
 - buildstream/storage/directory.py
 - contrib/bst-here
 - doc/badges.py
 - doc/source/arch_data_model.rst
 - requirements/Makefile
 - + tests/frontend/artifact.py
 - tests/frontend/overlaps/a-whitelisted.bst
 - tests/frontend/overlaps/b-whitelisted.bst
 - tests/frontend/overlaps/c-whitelisted.bst
 - tests/integration/artifact.py
 - tests/integration/shell.py
 
Changes:
| ... | ... | @@ -160,7 +160,7 @@ tests-wsl: | 
| 160 | 160 | 
 | 
| 161 | 161 | 
   script:
 | 
| 162 | 162 | 
   - "${TEST_COMMAND}"
 | 
| 163 | 
-  when: manual
 | 
|
| 163 | 
+  allow_failure: true
 | 
|
| 164 | 164 | 
 | 
| 165 | 165 | 
 # Automatically build documentation for every commit, we want to know
 | 
| 166 | 166 | 
 # if building documentation fails even if we're not deploying it.
 | 
| ... | ... | @@ -281,10 +281,12 @@ coverage: | 
| 281 | 281 | 
     - tox -e coverage
 | 
| 282 | 282 | 
     - cp -a .coverage-reports/ ./coverage-report
 | 
| 283 | 283 | 
   dependencies:
 | 
| 284 | 
+  - tests-centos-7.6
 | 
|
| 284 | 285 | 
   - tests-debian-9
 | 
| 285 | 286 | 
   - tests-fedora-28
 | 
| 286 | 287 | 
   - tests-fedora-29
 | 
| 287 | 288 | 
   - tests-fedora-missing-deps
 | 
| 289 | 
+  - tests-fedora-update-deps
 | 
|
| 288 | 290 | 
   - tests-ubuntu-18.04
 | 
| 289 | 291 | 
   - tests-unix
 | 
| 290 | 292 | 
   except:
 | 
| ... | ... | @@ -33,4 +33,6 @@ | 
| 33 | 33 | 
 * BuildStream version affected: /milestone %BuildStream_v1.x
 | 
| 34 | 34 | 
 | 
| 35 | 35 | 
 ----
 | 
| 36 | 
+[//]: # (To review information about possible relevant labels for this issue please view the list of labels: https://gitlab.com/BuildStream/buildstream/labels)
 | 
|
| 37 | 
+  | 
|
| 36 | 38 | 
 /label ~bug
 | 
| ... | ... | @@ -15,3 +15,5 @@ | 
| 15 | 15 | 
 [//]: # (Acceptance criteria should follow the S.M.A.R.T. principle https://en.wikipedia.org/wiki/SMART_criteria )
 | 
| 16 | 16 | 
 | 
| 17 | 17 | 
 ----
 | 
| 18 | 
+[//]: # (To review information about possible relevant labels for this issue please view the list of labels: https://gitlab.com/BuildStream/buildstream/labels)
 | 
|
| 19 | 
+  | 
| ... | ... | @@ -1234,6 +1234,8 @@ This will give you a ``doc/build/html`` directory with the html docs which | 
| 1234 | 1234 | 
 you can view in your browser locally to test.
 | 
| 1235 | 1235 | 
 | 
| 1236 | 1236 | 
 | 
| 1237 | 
+.. _contributing_session_html:
 | 
|
| 1238 | 
+  | 
|
| 1237 | 1239 | 
 Regenerating session html
 | 
| 1238 | 1240 | 
 '''''''''''''''''''''''''
 | 
| 1239 | 1241 | 
 The documentation build will build the session files if they are missing,
 | 
| ... | ... | @@ -1252,6 +1254,8 @@ To force rebuild session html while building the doc, simply run `tox` with the | 
| 1252 | 1254 | 
   env BST_FORCE_SESSION_REBUILD=1 tox -e docs
 | 
| 1253 | 1255 | 
 | 
| 1254 | 1256 | 
 | 
| 1257 | 
+.. _contributing_man_pages:
 | 
|
| 1258 | 
+  | 
|
| 1255 | 1259 | 
 Man pages
 | 
| 1256 | 1260 | 
 ~~~~~~~~~
 | 
| 1257 | 1261 | 
 Unfortunately it is quite difficult to integrate the man pages build
 | 
| ... | ... | @@ -1779,3 +1783,257 @@ changing the ``.in`` file, run the following to update the matching ``.txt`` | 
| 1779 | 1783 | 
 file::
 | 
| 1780 | 1784 | 
 | 
| 1781 | 1785 | 
    make -C requirements
 | 
| 1786 | 
+  | 
|
| 1787 | 
+  | 
|
| 1788 | 
+Making releases
 | 
|
| 1789 | 
+---------------
 | 
|
| 1790 | 
+This is a checklist of activities which must be observed when creating
 | 
|
| 1791 | 
+BuildStream releases, it is important to keep this section up to date
 | 
|
| 1792 | 
+whenever the release process changes.
 | 
|
| 1793 | 
+  | 
|
| 1794 | 
+  | 
|
| 1795 | 
+Requirements
 | 
|
| 1796 | 
+~~~~~~~~~~~~
 | 
|
| 1797 | 
+There are a couple of requirements and accounts required in order
 | 
|
| 1798 | 
+to publish a release.
 | 
|
| 1799 | 
+  | 
|
| 1800 | 
+* Ability to send email to ``buildstream-list gnome org`` and
 | 
|
| 1801 | 
+  to ``gnome-announce-list gnome org``.
 | 
|
| 1802 | 
+  | 
|
| 1803 | 
+* Shell account at ``master.gnome.org``.
 | 
|
| 1804 | 
+  | 
|
| 1805 | 
+* Access to the `BuildStream project on PyPI <https://pypi.org/project/BuildStream/>`_
 | 
|
| 1806 | 
+  | 
|
| 1807 | 
+* An email client which still knows how to send emails in plain text.
 | 
|
| 1808 | 
+  | 
|
| 1809 | 
+  | 
|
| 1810 | 
+Pre-release changes
 | 
|
| 1811 | 
+~~~~~~~~~~~~~~~~~~~
 | 
|
| 1812 | 
+Before actually rolling the release, here is a list of changes which
 | 
|
| 1813 | 
+might need to be done in preparation of the release.
 | 
|
| 1814 | 
+  | 
|
| 1815 | 
+* Ensure that the man pages are up to date
 | 
|
| 1816 | 
+  | 
|
| 1817 | 
+  The man pages are committed to the repository because we are
 | 
|
| 1818 | 
+  currently unable to integrate this generation into the setuptools
 | 
|
| 1819 | 
+  build phase, as outlined in issue #8.
 | 
|
| 1820 | 
+  | 
|
| 1821 | 
+  If any of the user facing CLI has changed, or if any of the
 | 
|
| 1822 | 
+  related docstrings have changed, then you should
 | 
|
| 1823 | 
+  :ref:`regenerate the man pages <contributing_man_pages>` and
 | 
|
| 1824 | 
+  add/commit the results before wrapping a release.
 | 
|
| 1825 | 
+  | 
|
| 1826 | 
+* Ensure the documentation session HTML is up to date
 | 
|
| 1827 | 
+  | 
|
| 1828 | 
+  The session HTML files are committed to the repository for multiple
 | 
|
| 1829 | 
+  reasons, one of them being that the documentation must be buildable
 | 
|
| 1830 | 
+  from within a release build environment so that downstream distribution
 | 
|
| 1831 | 
+  packagers can easily create the docs package.
 | 
|
| 1832 | 
+  | 
|
| 1833 | 
+  This is currently only needed for the first stable release
 | 
|
| 1834 | 
+  in a stable line of releases, after this point the API is frozen
 | 
|
| 1835 | 
+  and will not change for the remainder of the stable release lifetime,
 | 
|
| 1836 | 
+  so nothing interesting will have changed in these session files.
 | 
|
| 1837 | 
+  | 
|
| 1838 | 
+  If regeneration is needed, follow :ref:`the instructions above <contributing_session_html>`.
 | 
|
| 1839 | 
+  | 
|
| 1840 | 
+* Ensure the NEWS entry is up to date and ready
 | 
|
| 1841 | 
+  | 
|
| 1842 | 
+  For a stable release where features have not been added, we
 | 
|
| 1843 | 
+  should at least add some entries about the issues which have
 | 
|
| 1844 | 
+  been fixed since the last stable release.
 | 
|
| 1845 | 
+  | 
|
| 1846 | 
+  For development releases, it is worthwhile going over the
 | 
|
| 1847 | 
+  existing entries and ensuring all the major feature additions
 | 
|
| 1848 | 
+  are mentioned and there are no redundancies.
 | 
|
| 1849 | 
+  | 
|
| 1850 | 
+* Push pre-release changes
 | 
|
| 1851 | 
+  | 
|
| 1852 | 
+  Now that any final pre-release changes to generated files or NEWS have
 | 
|
| 1853 | 
+  been made, push these directly to the upstream repository.
 | 
|
| 1854 | 
+  | 
|
| 1855 | 
+  Do not sit around waiting for CI or approval, these superficial changes
 | 
|
| 1856 | 
+  do not affect CI and you are intended to push these changes directly
 | 
|
| 1857 | 
+  to the upstream repository.
 | 
|
| 1858 | 
+  | 
|
| 1859 | 
+  | 
|
| 1860 | 
+Release process
 | 
|
| 1861 | 
+~~~~~~~~~~~~~~~
 | 
|
| 1862 | 
+  | 
|
| 1863 | 
+* Ensure that the latest commit is passing in CI
 | 
|
| 1864 | 
+  | 
|
| 1865 | 
+  Of course, we do not release software which does not pass it's own
 | 
|
| 1866 | 
+  tests.
 | 
|
| 1867 | 
+  | 
|
| 1868 | 
+* Get the list of contributors
 | 
|
| 1869 | 
+  | 
|
| 1870 | 
+  The list of contributors for a given list is a list of
 | 
|
| 1871 | 
+  any contributors who have landed any patches since the
 | 
|
| 1872 | 
+  last release.
 | 
|
| 1873 | 
+  | 
|
| 1874 | 
+  An easy way to get this list is to ask git to summarize
 | 
|
| 1875 | 
+  the authors of commits since the *last release tag*. For
 | 
|
| 1876 | 
+  example, if we are about to create the ``1.1.1`` release, then
 | 
|
| 1877 | 
+  we need to observe all of the commits since the ``1.1.0``
 | 
|
| 1878 | 
+  release:
 | 
|
| 1879 | 
+  | 
|
| 1880 | 
+  .. code:: shell
 | 
|
| 1881 | 
+  | 
|
| 1882 | 
+     git shortlog -s 1.1.0...@
 | 
|
| 1883 | 
+  | 
|
| 1884 | 
+  At times, the same contributor might make contributions from different
 | 
|
| 1885 | 
+  machines which they have setup their author names differently, you
 | 
|
| 1886 | 
+  can see that some of the authors are actually duplicates, then
 | 
|
| 1887 | 
+  remove the duplicates.
 | 
|
| 1888 | 
+  | 
|
| 1889 | 
+* Start composing the release announcement email
 | 
|
| 1890 | 
+  | 
|
| 1891 | 
+  The first thing to do when composing the release email is to
 | 
|
| 1892 | 
+  ensure your mail client has disabled any HTML formatting and will
 | 
|
| 1893 | 
+  safely use plain text only.
 | 
|
| 1894 | 
+  | 
|
| 1895 | 
+  Try to make the release announcement consistent with other release
 | 
|
| 1896 | 
+  announcements as much as possible, an example of the email
 | 
|
| 1897 | 
+  can be `found here <https://mail.gnome.org/archives/buildstream-list/2019-February/msg00039.html>`_.
 | 
|
| 1898 | 
+  | 
|
| 1899 | 
+  The recipients of the email are ``buildstream-list gnome org`` and
 | 
|
| 1900 | 
+  ``gnome-announce-list gnome org`` and the title of the email should
 | 
|
| 1901 | 
+  be of the form: ``BuildStream 1.1.1 released``, without any exclamation point.
 | 
|
| 1902 | 
+  | 
|
| 1903 | 
+  The format of the email is essentially::
 | 
|
| 1904 | 
+  | 
|
| 1905 | 
+    Hi all,
 | 
|
| 1906 | 
+  | 
|
| 1907 | 
+    This is the personalized message written to you about this
 | 
|
| 1908 | 
+    release.
 | 
|
| 1909 | 
+  | 
|
| 1910 | 
+    If this is an unstable release, this should include a warning
 | 
|
| 1911 | 
+    to this effect and an invitation to users to please help us
 | 
|
| 1912 | 
+    test this release.
 | 
|
| 1913 | 
+  | 
|
| 1914 | 
+    This is also a good place to highlight specific bug fixes which
 | 
|
| 1915 | 
+    users may have been waiting for, or highlight a new feature we
 | 
|
| 1916 | 
+    want users to try out.
 | 
|
| 1917 | 
+  | 
|
| 1918 | 
+  | 
|
| 1919 | 
+    What is BuildStream ?
 | 
|
| 1920 | 
+    =====================
 | 
|
| 1921 | 
+    This is a concise blurb which describes BuildStream in a couple of
 | 
|
| 1922 | 
+    sentences, and is taken from the the README.rst.
 | 
|
| 1923 | 
+  | 
|
| 1924 | 
+    The easiest thing is to just copy this over from the last release email.
 | 
|
| 1925 | 
+  | 
|
| 1926 | 
+  | 
|
| 1927 | 
+    =================
 | 
|
| 1928 | 
+    buildstream 1.1.1
 | 
|
| 1929 | 
+    =================
 | 
|
| 1930 | 
+    This section is directly copy pasted from the top of the NEWS file
 | 
|
| 1931 | 
+  | 
|
| 1932 | 
+  | 
|
| 1933 | 
+    Contributors
 | 
|
| 1934 | 
+    ============
 | 
|
| 1935 | 
+     - This is Where
 | 
|
| 1936 | 
+     - You Put
 | 
|
| 1937 | 
+     - The Contributor
 | 
|
| 1938 | 
+     - Names Which
 | 
|
| 1939 | 
+     - You Extracted
 | 
|
| 1940 | 
+     - Using git shortlog -s
 | 
|
| 1941 | 
+  | 
|
| 1942 | 
+  | 
|
| 1943 | 
+    Where can I get it ?
 | 
|
| 1944 | 
+    ====================
 | 
|
| 1945 | 
+    https://download.gnome.org/sources/BuildStream/1.1/
 | 
|
| 1946 | 
+  | 
|
| 1947 | 
+    For more information on the BuildStream project, visit our home page
 | 
|
| 1948 | 
+    at https://buildstream.build/
 | 
|
| 1949 | 
+  | 
|
| 1950 | 
+* Publish the release tag
 | 
|
| 1951 | 
+  | 
|
| 1952 | 
+  Now that any pre-release changes are upstream, create and push the
 | 
|
| 1953 | 
+  signed release tag like so:
 | 
|
| 1954 | 
+  | 
|
| 1955 | 
+  .. code:: shell
 | 
|
| 1956 | 
+  | 
|
| 1957 | 
+     git tag -s 1.1.1
 | 
|
| 1958 | 
+     git push origin 1.1.1
 | 
|
| 1959 | 
+  | 
|
| 1960 | 
+* Upload the release tarball
 | 
|
| 1961 | 
+  | 
|
| 1962 | 
+  First get yourself into a clean repository state, ensure that you
 | 
|
| 1963 | 
+  don't have any unfinished work or precious, uncommitted files lying
 | 
|
| 1964 | 
+  around in your checkout and then run:
 | 
|
| 1965 | 
+  | 
|
| 1966 | 
+  .. code:: shell
 | 
|
| 1967 | 
+  | 
|
| 1968 | 
+     git clean -xdff
 | 
|
| 1969 | 
+  | 
|
| 1970 | 
+  Create the tarball with the following command:
 | 
|
| 1971 | 
+  | 
|
| 1972 | 
+  .. code:: shell
 | 
|
| 1973 | 
+  | 
|
| 1974 | 
+     python3 setup.py sdist
 | 
|
| 1975 | 
+  | 
|
| 1976 | 
+  And upload the resulting tarball to the master GNOME server:
 | 
|
| 1977 | 
+  | 
|
| 1978 | 
+  .. code:: shell
 | 
|
| 1979 | 
+  | 
|
| 1980 | 
+     scp dist/BuildStream-1.1.1.tar.gz <user>@master.gnome.org:
 | 
|
| 1981 | 
+  | 
|
| 1982 | 
+  And finally login to your account at ``master.gnome.org`` and run
 | 
|
| 1983 | 
+  the install scripts to publish the tarball and update the mirrors:
 | 
|
| 1984 | 
+  | 
|
| 1985 | 
+  .. code:: shell
 | 
|
| 1986 | 
+  | 
|
| 1987 | 
+     ftpadmin install BuildStream-1.1.1.tar.gz
 | 
|
| 1988 | 
+  | 
|
| 1989 | 
+* Send the release email
 | 
|
| 1990 | 
+  | 
|
| 1991 | 
+  Now that the release tag is up and the tarball is published,
 | 
|
| 1992 | 
+  you can send the release email.
 | 
|
| 1993 | 
+  | 
|
| 1994 | 
+  | 
|
| 1995 | 
+Post-release activities
 | 
|
| 1996 | 
+~~~~~~~~~~~~~~~~~~~~~~~
 | 
|
| 1997 | 
+Once the release has been published, there are some activities
 | 
|
| 1998 | 
+which need to be done to ensure everything is up to date.
 | 
|
| 1999 | 
+  | 
|
| 2000 | 
+* If this is a stable release, then the tarball should also be
 | 
|
| 2001 | 
+  uploaded to PyPI.
 | 
|
| 2002 | 
+  | 
|
| 2003 | 
+  Make sure you have ``twine`` installed and upload the tarball
 | 
|
| 2004 | 
+  like so:
 | 
|
| 2005 | 
+  | 
|
| 2006 | 
+  .. code:: shell
 | 
|
| 2007 | 
+  | 
|
| 2008 | 
+     pip3 install --user twine
 | 
|
| 2009 | 
+     twine upload -r pypi dist/BuildStream-1.0.1.tar.gz
 | 
|
| 2010 | 
+  | 
|
| 2011 | 
+* Update the topic line in the #buildstream IRC channel if needed
 | 
|
| 2012 | 
+  | 
|
| 2013 | 
+  The IRC channel usually advertizes the latest stable release
 | 
|
| 2014 | 
+  in the topic line, now is the right time to update it.
 | 
|
| 2015 | 
+  | 
|
| 2016 | 
+* Update the website repository
 | 
|
| 2017 | 
+  | 
|
| 2018 | 
+  The website wants to link to release announcements, but this
 | 
|
| 2019 | 
+  cannot be automated because we cannot guess what the link to
 | 
|
| 2020 | 
+  the release email will be in the mailing list archive.
 | 
|
| 2021 | 
+  | 
|
| 2022 | 
+  Find the URL to the announcement you just published
 | 
|
| 2023 | 
+  `in the mailing list archives <https://mail.gnome.org/archives/buildstream-list/>`_,
 | 
|
| 2024 | 
+  and use that URL to update the ``anouncements.json`` file in the website
 | 
|
| 2025 | 
+  repository.
 | 
|
| 2026 | 
+  | 
|
| 2027 | 
+  Commit and push this change to the the ``anouncements.json`` file to
 | 
|
| 2028 | 
+  the upstream website repository, and gitlab will take care of automatically
 | 
|
| 2029 | 
+  updating the website accordingly.
 | 
|
| 2030 | 
+  | 
|
| 2031 | 
+* Regenerate BuildStream documentation
 | 
|
| 2032 | 
+  | 
|
| 2033 | 
+  In order to update the badges we use in various documentation
 | 
|
| 2034 | 
+  which reflects what is the latest stable releases and the latest
 | 
|
| 2035 | 
+  development snapshots, we simply need to ensure a pipeline runs
 | 
|
| 2036 | 
+  for the master branch in the BuildStream repository.
 | 
|
| 2037 | 
+  | 
|
| 2038 | 
+  You can do this by using the "Run Pipeline" feature on the
 | 
|
| 2039 | 
+  `pipelines page in the gitlab UI <https://gitlab.com/BuildStream/buildstream/pipelines>`_.
 | 
| ... | ... | @@ -138,6 +138,9 @@ buildstream 1.3.1 | 
| 138 | 138 | 
   o BREAKING CHANGE: Symlinks are no longer resolved during staging and absolute
 | 
| 139 | 139 | 
     symlinks are now preserved instead of being converted to relative symlinks.
 | 
| 140 | 140 | 
 | 
| 141 | 
+  o BREAKING CHANGE: Overlap whitelists now require absolute paths. This allows
 | 
|
| 142 | 
+    use of variables such as %{prefix} and matches the documentation.
 | 
|
| 143 | 
+  | 
|
| 141 | 144 | 
 | 
| 142 | 145 | 
 =================
 | 
| 143 | 146 | 
 buildstream 1.1.5
 | 
| ... | ... | @@ -100,3 +100,9 @@ We also recommend exploring some existing BuildStream projects: | 
| 100 | 100 | 
 * https://gitlab.com/baserock/definitions
 | 
| 101 | 101 | 
 | 
| 102 | 102 | 
 If you have any questions please ask on our `#buildstream <irc://irc.gnome.org/buildstream>`_ channel in `irc.gnome.org <irc://irc.gnome.org>`_
 | 
| 103 | 
+  | 
|
| 104 | 
+  | 
|
| 105 | 
+Availability in distros
 | 
|
| 106 | 
+=======================
 | 
|
| 107 | 
+.. image:: https://repology.org/badge/vertical-allrepos/buildstream.svg
 | 
|
| 108 | 
+   :target: https://repology.org/metapackage/buildstream/versions
 | 
| ... | ... | @@ -588,13 +588,16 @@ class ArtifactCache(): | 
| 588 | 588 | 
     #
 | 
| 589 | 589 | 
     # Args:
 | 
| 590 | 590 | 
     #     element (Element): The Element commit an artifact for
 | 
| 591 | 
-    #     content (str): The element's content directory
 | 
|
| 591 | 
+    #     content (Directory): The element's content directory
 | 
|
| 592 | 592 | 
     #     keys (list): The cache keys to use
 | 
| 593 | 593 | 
     #
 | 
| 594 | 594 | 
     def commit(self, element, content, keys):
 | 
| 595 | 595 | 
         refs = [element.get_artifact_name(key) for key in keys]
 | 
| 596 | 596 | 
 | 
| 597 | 
-        self.cas.commit(refs, content)
 | 
|
| 597 | 
+        tree = content._get_digest()
 | 
|
| 598 | 
+  | 
|
| 599 | 
+        for ref in refs:
 | 
|
| 600 | 
+            self.cas.set_ref(ref, tree)
 | 
|
| 598 | 601 | 
 | 
| 599 | 602 | 
     # diff():
 | 
| 600 | 603 | 
     #
 | 
| ... | ... | @@ -940,7 +940,7 @@ def node_sanitize(node): | 
| 940 | 940 | 
         return [node_sanitize(elt) for elt in node]
 | 
| 941 | 941 | 
 | 
| 942 | 942 | 
     # Finally ChainMap and dict, and other Mappings need special handling
 | 
| 943 | 
-    if node_type in (dict, ChainMap) or isinstance(node, collections.Mapping):
 | 
|
| 943 | 
+    if node_type in (dict, ChainMap) or isinstance(node, collections.abc.Mapping):
 | 
|
| 944 | 944 | 
         result = SanitizedDict()
 | 
| 945 | 945 | 
 | 
| 946 | 946 | 
         key_list = [key for key, _ in node_items(node)]
 | 
| ... | ... | @@ -103,6 +103,7 @@ from .types import _KeyStrength, CoreWarnings | 
| 103 | 103 | 
 | 
| 104 | 104 | 
 from .storage.directory import Directory
 | 
| 105 | 105 | 
 from .storage._filebaseddirectory import FileBasedDirectory
 | 
| 106 | 
+from .storage._casbaseddirectory import CasBasedDirectory
 | 
|
| 106 | 107 | 
 from .storage.directory import VirtualDirectoryError
 | 
| 107 | 108 | 
 | 
| 108 | 109 | 
 | 
| ... | ... | @@ -1619,12 +1620,12 @@ class Element(Plugin): | 
| 1619 | 1620 | 
                 self.__dynamic_public = _yaml.node_copy(self.__public)
 | 
| 1620 | 1621 | 
 | 
| 1621 | 1622 | 
                 # Call the abstract plugin methods
 | 
| 1622 | 
-                try:
 | 
|
| 1623 | 
-                    # Step 1 - Configure
 | 
|
| 1624 | 
-                    self.__configure_sandbox(sandbox)
 | 
|
| 1625 | 
-                    # Step 2 - Stage
 | 
|
| 1626 | 
-                    self.stage(sandbox)
 | 
|
| 1627 | 1623 | 
 | 
| 1624 | 
+                # Step 1 - Configure
 | 
|
| 1625 | 
+                self.__configure_sandbox(sandbox)
 | 
|
| 1626 | 
+                # Step 2 - Stage
 | 
|
| 1627 | 
+                self.stage(sandbox)
 | 
|
| 1628 | 
+                try:
 | 
|
| 1628 | 1629 | 
                     if self.__batch_prepare_assemble:
 | 
| 1629 | 1630 | 
                         cm = sandbox.batch(self.__batch_prepare_assemble_flags,
 | 
| 1630 | 1631 | 
                                            collect=self.__batch_prepare_assemble_collect)
 | 
| ... | ... | @@ -1670,106 +1671,109 @@ class Element(Plugin): | 
| 1670 | 1671 | 
                     cleanup_rootdir()
 | 
| 1671 | 1672 | 
 | 
| 1672 | 1673 | 
     def _cache_artifact(self, rootdir, sandbox, collect):
 | 
| 1673 | 
-        if collect is not None:
 | 
|
| 1674 | 
-            try:
 | 
|
| 1675 | 
-                sandbox_vroot = sandbox.get_virtual_directory()
 | 
|
| 1676 | 
-                collectvdir = sandbox_vroot.descend(collect.lstrip(os.sep).split(os.sep))
 | 
|
| 1677 | 
-            except VirtualDirectoryError:
 | 
|
| 1678 | 
-                # No collect directory existed
 | 
|
| 1679 | 
-                collectvdir = None
 | 
|
| 1674 | 
+        with self.timed_activity("Caching artifact"):
 | 
|
| 1675 | 
+            if collect is not None:
 | 
|
| 1676 | 
+                try:
 | 
|
| 1677 | 
+                    sandbox_vroot = sandbox.get_virtual_directory()
 | 
|
| 1678 | 
+                    collectvdir = sandbox_vroot.descend(collect.lstrip(os.sep).split(os.sep))
 | 
|
| 1679 | 
+                except VirtualDirectoryError:
 | 
|
| 1680 | 
+                    # No collect directory existed
 | 
|
| 1681 | 
+                    collectvdir = None
 | 
|
| 1680 | 1682 | 
 | 
| 1681 | 
-        context = self._get_context()
 | 
|
| 1683 | 
+            context = self._get_context()
 | 
|
| 1682 | 1684 | 
 | 
| 1683 | 
-        # Create artifact directory structure
 | 
|
| 1684 | 
-        assembledir = os.path.join(rootdir, 'artifact')
 | 
|
| 1685 | 
-        filesdir = os.path.join(assembledir, 'files')
 | 
|
| 1686 | 
-        logsdir = os.path.join(assembledir, 'logs')
 | 
|
| 1687 | 
-        metadir = os.path.join(assembledir, 'meta')
 | 
|
| 1688 | 
-        buildtreedir = os.path.join(assembledir, 'buildtree')
 | 
|
| 1689 | 
-        os.mkdir(assembledir)
 | 
|
| 1690 | 
-        if collect is not None and collectvdir is not None:
 | 
|
| 1691 | 
-            os.mkdir(filesdir)
 | 
|
| 1692 | 
-        os.mkdir(logsdir)
 | 
|
| 1693 | 
-        os.mkdir(metadir)
 | 
|
| 1694 | 
-        os.mkdir(buildtreedir)
 | 
|
| 1695 | 
-  | 
|
| 1696 | 
-        # Hard link files from collect dir to files directory
 | 
|
| 1697 | 
-        if collect is not None and collectvdir is not None:
 | 
|
| 1698 | 
-            collectvdir.export_files(filesdir, can_link=True)
 | 
|
| 1699 | 
-  | 
|
| 1700 | 
-        cache_buildtrees = context.cache_buildtrees
 | 
|
| 1701 | 
-        build_success = self.__build_result[0]
 | 
|
| 1702 | 
-  | 
|
| 1703 | 
-        # cache_buildtrees defaults to 'always', as such the
 | 
|
| 1704 | 
-        # default behaviour is to attempt to cache them. If only
 | 
|
| 1705 | 
-        # caching failed artifact buildtrees, then query the build
 | 
|
| 1706 | 
-        # result. Element types without a build-root dir will be cached
 | 
|
| 1707 | 
-        # with an empty buildtreedir regardless of this configuration.
 | 
|
| 1708 | 
-  | 
|
| 1709 | 
-        if cache_buildtrees == 'always' or (cache_buildtrees == 'failure' and not build_success):
 | 
|
| 1710 | 
-            try:
 | 
|
| 1685 | 
+            assemblevdir = CasBasedDirectory(cas_cache=context.artifactcache.cas, ref=None)
 | 
|
| 1686 | 
+            logsvdir = assemblevdir.descend("logs", create=True)
 | 
|
| 1687 | 
+            metavdir = assemblevdir.descend("meta", create=True)
 | 
|
| 1688 | 
+            buildtreevdir = assemblevdir.descend("buildtree", create=True)
 | 
|
| 1689 | 
+  | 
|
| 1690 | 
+            # Create artifact directory structure
 | 
|
| 1691 | 
+            assembledir = os.path.join(rootdir, 'artifact')
 | 
|
| 1692 | 
+            logsdir = os.path.join(assembledir, 'logs')
 | 
|
| 1693 | 
+            metadir = os.path.join(assembledir, 'meta')
 | 
|
| 1694 | 
+            os.mkdir(assembledir)
 | 
|
| 1695 | 
+            os.mkdir(logsdir)
 | 
|
| 1696 | 
+            os.mkdir(metadir)
 | 
|
| 1697 | 
+  | 
|
| 1698 | 
+            if collect is not None and collectvdir is not None:
 | 
|
| 1699 | 
+                filesvdir = assemblevdir.descend("files", create=True)
 | 
|
| 1700 | 
+                filesvdir.import_files(collectvdir)
 | 
|
| 1701 | 
+  | 
|
| 1702 | 
+            cache_buildtrees = context.cache_buildtrees
 | 
|
| 1703 | 
+            build_success = self.__build_result[0]
 | 
|
| 1704 | 
+  | 
|
| 1705 | 
+            # cache_buildtrees defaults to 'always', as such the
 | 
|
| 1706 | 
+            # default behaviour is to attempt to cache them. If only
 | 
|
| 1707 | 
+            # caching failed artifact buildtrees, then query the build
 | 
|
| 1708 | 
+            # result. Element types without a build-root dir will be cached
 | 
|
| 1709 | 
+            # with an empty buildtreedir regardless of this configuration.
 | 
|
| 1710 | 
+  | 
|
| 1711 | 
+            if cache_buildtrees == 'always' or (cache_buildtrees == 'failure' and not build_success):
 | 
|
| 1711 | 1712 | 
                 sandbox_vroot = sandbox.get_virtual_directory()
 | 
| 1712 | 
-                sandbox_build_dir = sandbox_vroot.descend(
 | 
|
| 1713 | 
-                    self.get_variable('build-root').lstrip(os.sep).split(os.sep))
 | 
|
| 1714 | 
-                # Hard link files from build-root dir to buildtreedir directory
 | 
|
| 1715 | 
-                sandbox_build_dir.export_files(buildtreedir)
 | 
|
| 1716 | 
-            except VirtualDirectoryError:
 | 
|
| 1717 | 
-                # Directory could not be found. Pre-virtual
 | 
|
| 1718 | 
-                # directory behaviour was to continue silently
 | 
|
| 1719 | 
-                # if the directory could not be found.
 | 
|
| 1720 | 
-                pass
 | 
|
| 1713 | 
+                try:
 | 
|
| 1714 | 
+                    sandbox_build_dir = sandbox_vroot.descend(
 | 
|
| 1715 | 
+                        self.get_variable('build-root').lstrip(os.sep).split(os.sep))
 | 
|
| 1716 | 
+                    buildtreevdir.import_files(sandbox_build_dir)
 | 
|
| 1717 | 
+                except VirtualDirectoryError:
 | 
|
| 1718 | 
+                    # Directory could not be found. Pre-virtual
 | 
|
| 1719 | 
+                    # directory behaviour was to continue silently
 | 
|
| 1720 | 
+                    # if the directory could not be found.
 | 
|
| 1721 | 
+                    pass
 | 
|
| 1722 | 
+  | 
|
| 1723 | 
+            # Write some logs out to normal directories: logsdir and metadir
 | 
|
| 1724 | 
+            # Copy build log
 | 
|
| 1725 | 
+            log_filename = context.get_log_filename()
 | 
|
| 1726 | 
+            self._build_log_path = os.path.join(logsdir, 'build.log')
 | 
|
| 1727 | 
+            if log_filename:
 | 
|
| 1728 | 
+                shutil.copyfile(log_filename, self._build_log_path)
 | 
|
| 1729 | 
+  | 
|
| 1730 | 
+            # Store public data
 | 
|
| 1731 | 
+            _yaml.dump(_yaml.node_sanitize(self.__dynamic_public), os.path.join(metadir, 'public.yaml'))
 | 
|
| 1732 | 
+  | 
|
| 1733 | 
+            # Store result
 | 
|
| 1734 | 
+            build_result_dict = {"success": self.__build_result[0], "description": self.__build_result[1]}
 | 
|
| 1735 | 
+            if self.__build_result[2] is not None:
 | 
|
| 1736 | 
+                build_result_dict["detail"] = self.__build_result[2]
 | 
|
| 1737 | 
+            _yaml.dump(build_result_dict, os.path.join(metadir, 'build-result.yaml'))
 | 
|
| 1738 | 
+  | 
|
| 1739 | 
+            # ensure we have cache keys
 | 
|
| 1740 | 
+            self._assemble_done()
 | 
|
| 1741 | 
+  | 
|
| 1742 | 
+            # Store keys.yaml
 | 
|
| 1743 | 
+            _yaml.dump(_yaml.node_sanitize({
 | 
|
| 1744 | 
+                'strong': self._get_cache_key(),
 | 
|
| 1745 | 
+                'weak': self._get_cache_key(_KeyStrength.WEAK),
 | 
|
| 1746 | 
+            }), os.path.join(metadir, 'keys.yaml'))
 | 
|
| 1747 | 
+  | 
|
| 1748 | 
+            # Store dependencies.yaml
 | 
|
| 1749 | 
+            _yaml.dump(_yaml.node_sanitize({
 | 
|
| 1750 | 
+                e.name: e._get_cache_key() for e in self.dependencies(Scope.BUILD)
 | 
|
| 1751 | 
+            }), os.path.join(metadir, 'dependencies.yaml'))
 | 
|
| 1752 | 
+  | 
|
| 1753 | 
+            # Store workspaced.yaml
 | 
|
| 1754 | 
+            _yaml.dump(_yaml.node_sanitize({
 | 
|
| 1755 | 
+                'workspaced': bool(self._get_workspace())
 | 
|
| 1756 | 
+            }), os.path.join(metadir, 'workspaced.yaml'))
 | 
|
| 1757 | 
+  | 
|
| 1758 | 
+            # Store workspaced-dependencies.yaml
 | 
|
| 1759 | 
+            _yaml.dump(_yaml.node_sanitize({
 | 
|
| 1760 | 
+                'workspaced-dependencies': [
 | 
|
| 1761 | 
+                    e.name for e in self.dependencies(Scope.BUILD)
 | 
|
| 1762 | 
+                    if e._get_workspace()
 | 
|
| 1763 | 
+                ]
 | 
|
| 1764 | 
+            }), os.path.join(metadir, 'workspaced-dependencies.yaml'))
 | 
|
| 1721 | 1765 | 
 | 
| 1722 | 
-        # Copy build log
 | 
|
| 1723 | 
-        log_filename = context.get_log_filename()
 | 
|
| 1724 | 
-        self._build_log_path = os.path.join(logsdir, 'build.log')
 | 
|
| 1725 | 
-        if log_filename:
 | 
|
| 1726 | 
-            shutil.copyfile(log_filename, self._build_log_path)
 | 
|
| 1727 | 
-  | 
|
| 1728 | 
-        # Store public data
 | 
|
| 1729 | 
-        _yaml.dump(_yaml.node_sanitize(self.__dynamic_public), os.path.join(metadir, 'public.yaml'))
 | 
|
| 1730 | 
-  | 
|
| 1731 | 
-        # Store result
 | 
|
| 1732 | 
-        build_result_dict = {"success": self.__build_result[0], "description": self.__build_result[1]}
 | 
|
| 1733 | 
-        if self.__build_result[2] is not None:
 | 
|
| 1734 | 
-            build_result_dict["detail"] = self.__build_result[2]
 | 
|
| 1735 | 
-        _yaml.dump(build_result_dict, os.path.join(metadir, 'build-result.yaml'))
 | 
|
| 1736 | 
-  | 
|
| 1737 | 
-        # ensure we have cache keys
 | 
|
| 1738 | 
-        self._assemble_done()
 | 
|
| 1739 | 
-  | 
|
| 1740 | 
-        # Store keys.yaml
 | 
|
| 1741 | 
-        _yaml.dump(_yaml.node_sanitize({
 | 
|
| 1742 | 
-            'strong': self._get_cache_key(),
 | 
|
| 1743 | 
-            'weak': self._get_cache_key(_KeyStrength.WEAK),
 | 
|
| 1744 | 
-        }), os.path.join(metadir, 'keys.yaml'))
 | 
|
| 1745 | 
-  | 
|
| 1746 | 
-        # Store dependencies.yaml
 | 
|
| 1747 | 
-        _yaml.dump(_yaml.node_sanitize({
 | 
|
| 1748 | 
-            e.name: e._get_cache_key() for e in self.dependencies(Scope.BUILD)
 | 
|
| 1749 | 
-        }), os.path.join(metadir, 'dependencies.yaml'))
 | 
|
| 1750 | 
-  | 
|
| 1751 | 
-        # Store workspaced.yaml
 | 
|
| 1752 | 
-        _yaml.dump(_yaml.node_sanitize({
 | 
|
| 1753 | 
-            'workspaced': bool(self._get_workspace())
 | 
|
| 1754 | 
-        }), os.path.join(metadir, 'workspaced.yaml'))
 | 
|
| 1755 | 
-  | 
|
| 1756 | 
-        # Store workspaced-dependencies.yaml
 | 
|
| 1757 | 
-        _yaml.dump(_yaml.node_sanitize({
 | 
|
| 1758 | 
-            'workspaced-dependencies': [
 | 
|
| 1759 | 
-                e.name for e in self.dependencies(Scope.BUILD)
 | 
|
| 1760 | 
-                if e._get_workspace()
 | 
|
| 1761 | 
-            ]
 | 
|
| 1762 | 
-        }), os.path.join(metadir, 'workspaced-dependencies.yaml'))
 | 
|
| 1766 | 
+            metavdir.import_files(metadir)
 | 
|
| 1767 | 
+            logsvdir.import_files(logsdir)
 | 
|
| 1763 | 1768 | 
 | 
| 1764 | 
-        with self.timed_activity("Caching artifact"):
 | 
|
| 1765 | 
-            artifact_size = utils._get_dir_size(assembledir)
 | 
|
| 1766 | 
-            self.__artifacts.commit(self, assembledir, self.__get_cache_keys_for_commit())
 | 
|
| 1769 | 
+            artifact_size = assemblevdir.get_size()
 | 
|
| 1770 | 
+            self.__artifacts.commit(self, assemblevdir, self.__get_cache_keys_for_commit())
 | 
|
| 1767 | 1771 | 
 | 
| 1768 | 
-        if collect is not None and collectvdir is None:
 | 
|
| 1769 | 
-            raise ElementError(
 | 
|
| 1770 | 
-                "Directory '{}' was not found inside the sandbox, "
 | 
|
| 1771 | 
-                "unable to collect artifact contents"
 | 
|
| 1772 | 
-                .format(collect))
 | 
|
| 1772 | 
+            if collect is not None and collectvdir is None:
 | 
|
| 1773 | 
+                raise ElementError(
 | 
|
| 1774 | 
+                    "Directory '{}' was not found inside the sandbox, "
 | 
|
| 1775 | 
+                    "unable to collect artifact contents"
 | 
|
| 1776 | 
+                    .format(collect))
 | 
|
| 1773 | 1777 | 
 | 
| 1774 | 1778 | 
         return artifact_size
 | 
| 1775 | 1779 | 
 | 
| ... | ... | @@ -2605,7 +2609,7 @@ class Element(Plugin): | 
| 2605 | 2609 | 
             if include_file and not exclude_file:
 | 
| 2606 | 2610 | 
                 yield filename.lstrip(os.sep)
 | 
| 2607 | 2611 | 
 | 
| 2608 | 
-    def __file_is_whitelisted(self, pattern):
 | 
|
| 2612 | 
+    def __file_is_whitelisted(self, path):
 | 
|
| 2609 | 2613 | 
         # Considered storing the whitelist regex for re-use, but public data
 | 
| 2610 | 2614 | 
         # can be altered mid-build.
 | 
| 2611 | 2615 | 
         # Public data is not guaranteed to stay the same for the duration of
 | 
| ... | ... | @@ -2617,7 +2621,7 @@ class Element(Plugin): | 
| 2617 | 2621 | 
             whitelist_expressions = [utils._glob2re(self.__variables.subst(exp.strip())) for exp in whitelist]
 | 
| 2618 | 2622 | 
             _expression_ = ('^(?:' + '|'.join(whitelist_expressions) + ')$')
 | 
| 2619 | 2623 | 
             self.__whitelist_regex = re.compile(_expression_)
 | 
| 2620 | 
-        return self.__whitelist_regex.match(pattern)
 | 
|
| 2624 | 
+        return self.__whitelist_regex.match(os.path.join(os.sep, path))
 | 
|
| 2621 | 2625 | 
 | 
| 2622 | 2626 | 
     # __extract():
 | 
| 2623 | 2627 | 
     #
 | 
| ... | ... | @@ -136,10 +136,10 @@ class CasBasedDirectory(Directory): | 
| 136 | 136 | 
         the parent).
 | 
| 137 | 137 | 
 | 
| 138 | 138 | 
         """
 | 
| 139 | 
-        self.ref = self.cas_cache.add_object(buffer=self.pb2_directory.SerializeToString())
 | 
|
| 140 | 139 | 
         if caller:
 | 
| 141 | 140 | 
             old_dir = self._find_pb2_entry(caller.filename)
 | 
| 142 | 141 | 
             self.cas_cache.add_object(digest=old_dir.digest, buffer=caller.pb2_directory.SerializeToString())
 | 
| 142 | 
+        self.ref = self.cas_cache.add_object(buffer=self.pb2_directory.SerializeToString())
 | 
|
| 143 | 143 | 
         if self.parent:
 | 
| 144 | 144 | 
             self.parent._recalculate_recursing_up(self)
 | 
| 145 | 145 | 
 | 
| ... | ... | @@ -277,14 +277,6 @@ class CasBasedDirectory(Directory): | 
| 277 | 277 | 
                                                          directory_list))
 | 
| 278 | 278 | 
         return None
 | 
| 279 | 279 | 
 | 
| 280 | 
-    def find_root(self):
 | 
|
| 281 | 
-        """ Finds the root of this directory tree by following 'parent' until there is
 | 
|
| 282 | 
-        no parent. """
 | 
|
| 283 | 
-        if self.parent:
 | 
|
| 284 | 
-            return self.parent.find_root()
 | 
|
| 285 | 
-        else:
 | 
|
| 286 | 
-            return self
 | 
|
| 287 | 
-  | 
|
| 288 | 280 | 
     def _check_replacement(self, name, path_prefix, fileListResult):
 | 
| 289 | 281 | 
         """ Checks whether 'name' exists, and if so, whether we can overwrite it.
 | 
| 290 | 282 | 
         If we can, add the name to 'overwritten_files' and delete the existing entry.
 | 
| ... | ... | @@ -451,7 +443,7 @@ class CasBasedDirectory(Directory): | 
| 451 | 443 | 
                 files = external_pathspec.list_relative_paths()
 | 
| 452 | 444 | 
 | 
| 453 | 445 | 
         if isinstance(external_pathspec, FileBasedDirectory):
 | 
| 454 | 
-            source_directory = external_pathspec.get_underlying_directory()
 | 
|
| 446 | 
+            source_directory = external_pathspec._get_underlying_directory()
 | 
|
| 455 | 447 | 
             result = self._import_files_from_directory(source_directory, files=files)
 | 
| 456 | 448 | 
         elif isinstance(external_pathspec, str):
 | 
| 457 | 449 | 
             source_directory = external_pathspec
 | 
| ... | ... | @@ -635,6 +627,18 @@ class CasBasedDirectory(Directory): | 
| 635 | 627 | 
         self._recalculate_recursing_up()
 | 
| 636 | 628 | 
         self._recalculate_recursing_down()
 | 
| 637 | 629 | 
 | 
| 630 | 
+    def get_size(self):
 | 
|
| 631 | 
+        total = len(self.pb2_directory.SerializeToString())
 | 
|
| 632 | 
+        for i in self.index.values():
 | 
|
| 633 | 
+            if isinstance(i.buildstream_object, CasBasedDirectory):
 | 
|
| 634 | 
+                total += i.buildstream_object.get_size()
 | 
|
| 635 | 
+            elif isinstance(i.pb_object, remote_execution_pb2.FileNode):
 | 
|
| 636 | 
+                src_name = self.cas_cache.objpath(i.pb_object.digest)
 | 
|
| 637 | 
+                filesize = os.stat(src_name).st_size
 | 
|
| 638 | 
+                total += filesize
 | 
|
| 639 | 
+            # Symlink nodes are encoded as part of the directory serialization.
 | 
|
| 640 | 
+        return total
 | 
|
| 641 | 
+  | 
|
| 638 | 642 | 
     def _get_identifier(self):
 | 
| 639 | 643 | 
         path = ""
 | 
| 640 | 644 | 
         if self.parent:
 | 
| ... | ... | @@ -653,3 +657,15 @@ class CasBasedDirectory(Directory): | 
| 653 | 657 | 
         throw an exception. """
 | 
| 654 | 658 | 
         raise VirtualDirectoryError("_get_underlying_directory was called on a CAS-backed directory," +
 | 
| 655 | 659 | 
                                     " which has no underlying directory.")
 | 
| 660 | 
+  | 
|
| 661 | 
+    # _get_digest():
 | 
|
| 662 | 
+    #
 | 
|
| 663 | 
+    # Return the Digest for this directory.
 | 
|
| 664 | 
+    #
 | 
|
| 665 | 
+    # Returns:
 | 
|
| 666 | 
+    #   (Digest): The Digest protobuf object for the Directory protobuf
 | 
|
| 667 | 
+    #
 | 
|
| 668 | 
+    def _get_digest(self):
 | 
|
| 669 | 
+        if not self.ref:
 | 
|
| 670 | 
+            self.ref = self.cas_cache.add_object(buffer=self.pb2_directory.SerializeToString())
 | 
|
| 671 | 
+        return self.ref
 | 
| ... | ... | @@ -30,6 +30,7 @@ See also: :ref:`sandboxing`. | 
| 30 | 30 | 
 import os
 | 
| 31 | 31 | 
 import time
 | 
| 32 | 32 | 
 from .directory import Directory, VirtualDirectoryError
 | 
| 33 | 
+from .. import utils
 | 
|
| 33 | 34 | 
 from ..utils import link_files, copy_files, list_relative_paths, _get_link_mtime, _magic_timestamp
 | 
| 34 | 35 | 
 from ..utils import _set_deterministic_user, _set_deterministic_mtime
 | 
| 35 | 36 | 
 | 
| ... | ... | @@ -201,6 +202,9 @@ class FileBasedDirectory(Directory): | 
| 201 | 202 | 
 | 
| 202 | 203 | 
         return list_relative_paths(self.external_directory)
 | 
| 203 | 204 | 
 | 
| 205 | 
+    def get_size(self):
 | 
|
| 206 | 
+        return utils._get_dir_size(self.external_directory)
 | 
|
| 207 | 
+  | 
|
| 204 | 208 | 
     def __str__(self):
 | 
| 205 | 209 | 
         # This returns the whole path (since we don't know where the directory started)
 | 
| 206 | 210 | 
         # which exposes the sandbox directory; we will have to assume for the time being
 | 
| ... | ... | @@ -177,3 +177,9 @@ class Directory(): | 
| 177 | 177 | 
 | 
| 178 | 178 | 
         """
 | 
| 179 | 179 | 
         raise NotImplementedError()
 | 
| 180 | 
+  | 
|
| 181 | 
+    def get_size(self):
 | 
|
| 182 | 
+        """ Get an approximation of the storage space in bytes used by this directory
 | 
|
| 183 | 
+        and all files and subdirectories in it. Storage space varies by implementation
 | 
|
| 184 | 
+        and effective space used may be lower than this number due to deduplication. """
 | 
|
| 185 | 
+        raise NotImplementedError()
 | 
| ... | ... | @@ -25,16 +25,22 @@ | 
| 25 | 25 | 
 usage() {
 | 
| 26 | 26 | 
     cat <<EOF
 | 
| 27 | 27 | 
 | 
| 28 | 
-USAGE: $(basename "$0") [-i BST_HERE_IMAGE] [-p] [-t] [-T] [-v VOLUME ...] [-h] [COMMAND [ARG..]]
 | 
|
| 28 | 
+USAGE: $(basename "$0") [-i BST_HERE_IMAGE] [-j TAG] [-p] [-t] [-T] [-v VOLUME ...] [-h] [COMMAND [ARG..]]
 | 
|
| 29 | 29 | 
 | 
| 30 | 30 | 
 Run a bst command in a new BuildStream container.
 | 
| 31 | 31 | 
 | 
| 32 | 32 | 
 If no command is specified, an interactive shell is launched
 | 
| 33 | 33 | 
 using "/bin/bash -i".
 | 
| 34 | 34 | 
 | 
| 35 | 
+See https://hub.docker.com/r/buildstream/buildstream for details on image
 | 
|
| 36 | 
+variants.
 | 
|
| 37 | 
+  | 
|
| 35 | 38 | 
 OPTIONS:
 | 
| 36 | 39 | 
     -i IMAGE      Specify Docker image to use; can also be specified by setting
 | 
| 37 | 40 | 
                   BST_HERE_IMAGE environment variable.
 | 
| 41 | 
+                  (default: buildstream/buildstream)
 | 
|
| 42 | 
+    -j TAG        Specify the tag of the Docker image to use.
 | 
|
| 43 | 
+                  (default: latest)
 | 
|
| 38 | 44 | 
     -p            Pull the latest buildstream image before running.
 | 
| 39 | 45 | 
     -t            Force pseudo-terminal allocation.
 | 
| 40 | 46 | 
     -T            Disable pseudo-terminal allocation.
 | 
| ... | ... | @@ -46,7 +52,8 @@ EOF | 
| 46 | 52 | 
     exit "$1"
 | 
| 47 | 53 | 
 }
 | 
| 48 | 54 | 
 | 
| 49 | 
-bst_here_image="${BST_HERE_IMAGE:-buildstream/buildstream-fedora:latest}"
 | 
|
| 55 | 
+bst_here_image="${BST_HERE_IMAGE:-buildstream/buildstream}"
 | 
|
| 56 | 
+bst_here_tag=
 | 
|
| 50 | 57 | 
 | 
| 51 | 58 | 
 is_tty=
 | 
| 52 | 59 | 
 update=false
 | 
| ... | ... | @@ -57,12 +64,15 @@ then | 
| 57 | 64 | 
     is_tty=y
 | 
| 58 | 65 | 
 fi
 | 
| 59 | 66 | 
 | 
| 60 | 
-while getopts i:ptTv:h arg
 | 
|
| 67 | 
+while getopts i:j:ptTv:h arg
 | 
|
| 61 | 68 | 
 do
 | 
| 62 | 69 | 
     case $arg in
 | 
| 63 | 70 | 
     i)
 | 
| 64 | 71 | 
         bst_here_image="$OPTARG"
 | 
| 65 | 72 | 
         ;;
 | 
| 73 | 
+    j)
 | 
|
| 74 | 
+        bst_here_tag="$OPTARG"
 | 
|
| 75 | 
+        ;;
 | 
|
| 66 | 76 | 
     p)
 | 
| 67 | 77 | 
         update=true
 | 
| 68 | 78 | 
         ;;
 | 
| ... | ... | @@ -83,6 +93,10 @@ do | 
| 83 | 93 | 
     esac
 | 
| 84 | 94 | 
 done
 | 
| 85 | 95 | 
 | 
| 96 | 
+if [ -n "$bst_here_tag" ]; then
 | 
|
| 97 | 
+    bst_here_image="$bst_here_image:$bst_here_tag"
 | 
|
| 98 | 
+fi
 | 
|
| 99 | 
+  | 
|
| 86 | 100 | 
 test "$OPTIND" -gt 1 &&
 | 
| 87 | 101 | 
     shift $(( OPTIND - 1 ))
 | 
| 88 | 102 | 
 | 
| ... | ... | @@ -96,7 +96,7 @@ def parse_tag(tag): | 
| 96 | 96 | 
 def guess_version(release):
 | 
| 97 | 97 | 
     try:
 | 
| 98 | 98 | 
         tags_output = subprocess.check_output(['git', 'tag'])
 | 
| 99 | 
-    except CalledProcessError:
 | 
|
| 99 | 
+    except subprocess.CalledProcessError:
 | 
|
| 100 | 100 | 
         return (0, 0, 0)
 | 
| 101 | 101 | 
 | 
| 102 | 102 | 
     # Parse the `git tag` output into a list of integer tuples
 | 
| ... | ... | @@ -27,17 +27,17 @@ are the loaded representation of the ``.bst`` files loaded from the :ref:`projec | 
| 27 | 27 | 
 The *Element* is an abstract base class which cannot do anything on its own, its
 | 
| 28 | 28 | 
 concrete class is defined by *plugins* which are either included in the BuildStream
 | 
| 29 | 29 | 
 :ref:`core set of plugins <plugins>` or loaded from external sources :ref:`defined by the project
 | 
| 30 | 
-<project_plugins>`
 | 
|
| 30 | 
+<project_plugins>`.
 | 
|
| 31 | 31 | 
 | 
| 32 | 32 | 
 The responsibilities of an element include:
 | 
| 33 | 33 | 
 | 
| 34 | 
-* Loading the element's configuration from the core provided dictionary
 | 
|
| 34 | 
+* Loading the element's configuration from the core provided dictionary.
 | 
|
| 35 | 35 | 
 * Providing a unique key for any element specific configuration which might
 | 
| 36 | 
-  effect the output produced by the element
 | 
|
| 37 | 
-* Configuring the sandbox
 | 
|
| 36 | 
+  affect the output produced by the element.
 | 
|
| 37 | 
+* Configuring the sandbox.
 | 
|
| 38 | 38 | 
 * Staging the data into the sandbox, which might include Sources and
 | 
| 39 | 
-  the outputs of previous elements
 | 
|
| 40 | 
-* Assembling the output *artifact*
 | 
|
| 39 | 
+  the outputs of previous elements.
 | 
|
| 40 | 
+* Assembling the output *artifact*.
 | 
|
| 41 | 41 | 
 | 
| 42 | 42 | 
 | 
| 43 | 43 | 
 Element data structure
 | 
| ... | ... | @@ -72,9 +72,10 @@ deal of configurations understood by the *Element* is also understood by the cor | 
| 72 | 72 | 
 has default configurations built into BuildStream and configurable with the project
 | 
| 73 | 73 | 
 configuration. These include values such as *variables*, *environment*, *sandbox*, etc.
 | 
| 74 | 74 | 
 | 
| 75 | 
-As shown above, composition is performed in two stages, as we only need to composite
 | 
|
| 76 | 
-the data from the toplevel element declaration against the composition of previous
 | 
|
| 77 | 
-stages every time we instantiate an element.
 | 
|
| 75 | 
+As shown above, composition is performed in two stages for each element. First
 | 
|
| 76 | 
+we compose everything below the line, this happens just once per 'kind' of
 | 
|
| 77 | 
+element - the result is re-used. Secondly, we compose the element declaration
 | 
|
| 78 | 
+on top.
 | 
|
| 78 | 79 | 
 | 
| 79 | 80 | 
 | 
| 80 | 81 | 
 Source
 | 
| ... | ... | @@ -87,7 +88,7 @@ of the host or build environment. | 
| 87 | 88 | 
 This is to say that:
 | 
| 88 | 89 | 
 | 
| 89 | 90 | 
 * User configuration on the host, or filesystem outside of BuildStream designated
 | 
| 90 | 
-  directories, must never be modified as a side effect of running BuildStream.
 | 
|
| 91 | 
+  directories, must never be modified as a side-effect of running BuildStream.
 | 
|
| 91 | 92 | 
 | 
| 92 | 93 | 
 * When the Source uses host tools, host side configurations must never result in
 | 
| 93 | 94 | 
   deviations of what is staged to a build directory. The Source must behave exactly
 | 
| ... | ... | @@ -95,12 +96,12 @@ This is to say that: | 
| 95 | 96 | 
 | 
| 96 | 97 | 
 The responsibilities of a source include:
 | 
| 97 | 98 | 
 | 
| 98 | 
-* Loading the source's configuration from the core provided dictionary
 | 
|
| 99 | 
+* Loading the source's configuration from the core provided dictionary.
 | 
|
| 99 | 100 | 
 * Providing a unique key for any source specific configuration which might
 | 
| 100 | 
-  effect the staged source
 | 
|
| 101 | 
-* Implement discovery of new versions of the source upstream (referred to as *"tracking"*)
 | 
|
| 102 | 
-* Staging the unpacked source to a given directory
 | 
|
| 103 | 
-* Preparing workspaces
 | 
|
| 101 | 
+  affect the staged source.
 | 
|
| 102 | 
+* Implement discovery of new versions of the source upstream (referred to as *"tracking"*).
 | 
|
| 103 | 
+* Staging the unpacked source to a given directory.
 | 
|
| 104 | 
+* Preparing workspaces.
 | 
|
| 104 | 105 | 
 | 
| 105 | 106 | 
 | 
| 106 | 107 | 
 Source data structure
 | 
| ... | ... | @@ -17,5 +17,5 @@ all: $(REQUIREMENTS_TXT) | 
| 17 | 17 | 
 	$(eval VENVDIR := $(shell mktemp -d $(CURDIR)/.bst-venv.XXXXXX))
 | 
| 18 | 18 | 
 	$(VENV) $(VENVDIR)
 | 
| 19 | 19 | 
 	$(VENV_PIP) install -r $^
 | 
| 20 | 
-	$(VENV_PIP) freeze -r $^ > $@
 | 
|
| 20 | 
+	$(VENV_PIP) freeze -r $^ | grep -v pkg-resources > $@
 | 
|
| 21 | 21 | 
 	rm -rf $(VENVDIR)
 | 
| 1 | 
+#
 | 
|
| 2 | 
+#  Copyright (C) 2018 Codethink Limited
 | 
|
| 3 | 
+#  Copyright (C) 2018 Bloomberg Finance LP
 | 
|
| 4 | 
+#
 | 
|
| 5 | 
+#  This program is free software; you can redistribute it and/or
 | 
|
| 6 | 
+#  modify it under the terms of the GNU Lesser General Public
 | 
|
| 7 | 
+#  License as published by the Free Software Foundation; either
 | 
|
| 8 | 
+#  version 2 of the License, or (at your option) any later version.
 | 
|
| 9 | 
+#
 | 
|
| 10 | 
+#  This library is distributed in the hope that it will be useful,
 | 
|
| 11 | 
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
|
| 12 | 
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
 | 
|
| 13 | 
+#  Lesser General Public License for more details.
 | 
|
| 14 | 
+#
 | 
|
| 15 | 
+#  You should have received a copy of the GNU Lesser General Public
 | 
|
| 16 | 
+#  License along with this library. If not, see <http://www.gnu.org/licenses/>.
 | 
|
| 17 | 
+#
 | 
|
| 18 | 
+#  Authors: Richard Maw <richard maw codethink co uk>
 | 
|
| 19 | 
+#
 | 
|
| 20 | 
+  | 
|
| 21 | 
+import os
 | 
|
| 22 | 
+import pytest
 | 
|
| 23 | 
+  | 
|
| 24 | 
+from buildstream.plugintestutils import cli
 | 
|
| 25 | 
+  | 
|
| 26 | 
+  | 
|
| 27 | 
+# Project directory
 | 
|
| 28 | 
+DATA_DIR = os.path.join(
 | 
|
| 29 | 
+    os.path.dirname(os.path.realpath(__file__)),
 | 
|
| 30 | 
+    "project",
 | 
|
| 31 | 
+)
 | 
|
| 32 | 
+  | 
|
| 33 | 
+  | 
|
| 34 | 
+@pytest.mark.datafiles(DATA_DIR)
 | 
|
| 35 | 
+def test_artifact_log(cli, tmpdir, datafiles):
 | 
|
| 36 | 
+    project = os.path.join(datafiles.dirname, datafiles.basename)
 | 
|
| 37 | 
+  | 
|
| 38 | 
+    # Get the cache key of our test element
 | 
|
| 39 | 
+    result = cli.run(project=project, silent=True, args=[
 | 
|
| 40 | 
+        '--no-colors',
 | 
|
| 41 | 
+        'show', '--deps', 'none', '--format', '%{full-key}',
 | 
|
| 42 | 
+        'target.bst'
 | 
|
| 43 | 
+    ])
 | 
|
| 44 | 
+    key = result.output.strip()
 | 
|
| 45 | 
+  | 
|
| 46 | 
+    # Ensure we have an artifact to read
 | 
|
| 47 | 
+    result = cli.run(project=project, args=['build', 'target.bst'])
 | 
|
| 48 | 
+    assert result.exit_code == 0
 | 
|
| 49 | 
+  | 
|
| 50 | 
+    # Read the log via the element name
 | 
|
| 51 | 
+    result = cli.run(project=project, args=['artifact', 'log', 'target.bst'])
 | 
|
| 52 | 
+    assert result.exit_code == 0
 | 
|
| 53 | 
+    log = result.output
 | 
|
| 54 | 
+  | 
|
| 55 | 
+    # Read the log via the key
 | 
|
| 56 | 
+    result = cli.run(project=project, args=['artifact', 'log', 'test/target/' + key])
 | 
|
| 57 | 
+    assert result.exit_code == 0
 | 
|
| 58 | 
+    assert log == result.output
 | 
|
| 59 | 
+  | 
|
| 60 | 
+    # Read the log via glob
 | 
|
| 61 | 
+    result = cli.run(project=project, args=['artifact', 'log', 'test/target/*'])
 | 
|
| 62 | 
+    assert result.exit_code == 0
 | 
|
| 63 | 
+    # The artifact is cached under both a strong key and a weak key
 | 
|
| 64 | 
+    assert (log + log) == result.output
 | 
| ... | ... | @@ -10,4 +10,4 @@ sources: | 
| 10 | 10 | 
 public:
 | 
| 11 | 11 | 
   bst:
 | 
| 12 | 12 | 
     overlap-whitelist:
 | 
| 13 | 
-    - "file*"
 | 
|
| 13 | 
+    - "/file*"
 | 
| ... | ... | @@ -8,9 +8,9 @@ sources: | 
| 8 | 8 | 
 - kind: local
 | 
| 9 | 9 | 
   path: "b"
 | 
| 10 | 10 | 
 variables:
 | 
| 11 | 
-  FILE: file
 | 
|
| 11 | 
+  FILE: /file
 | 
|
| 12 | 12 | 
 public:
 | 
| 13 | 13 | 
   bst:
 | 
| 14 | 14 | 
     overlap-whitelist:
 | 
| 15 | 
-    - file2
 | 
|
| 15 | 
+    - /file2
 | 
|
| 16 | 16 | 
     - "%{FILE}3"
 | 
| ... | ... | @@ -8,4 +8,4 @@ sources: | 
| 8 | 8 | 
 public:
 | 
| 9 | 9 | 
   bst:
 | 
| 10 | 10 | 
     overlap-whitelist:
 | 
| 11 | 
-    - "file*"
 | 
|
| 11 | 
+    - "/file*"
 | 
| ... | ... | @@ -37,40 +37,6 @@ DATA_DIR = os.path.join( | 
| 37 | 37 | 
 )
 | 
| 38 | 38 | 
 | 
| 39 | 39 | 
 | 
| 40 | 
-@pytest.mark.integration
 | 
|
| 41 | 
-@pytest.mark.datafiles(DATA_DIR)
 | 
|
| 42 | 
-def test_artifact_log(cli, tmpdir, datafiles):
 | 
|
| 43 | 
-    project = os.path.join(datafiles.dirname, datafiles.basename)
 | 
|
| 44 | 
-  | 
|
| 45 | 
-    # Get the cache key of our test element
 | 
|
| 46 | 
-    result = cli.run(project=project, silent=True, args=[
 | 
|
| 47 | 
-        '--no-colors',
 | 
|
| 48 | 
-        'show', '--deps', 'none', '--format', '%{full-key}',
 | 
|
| 49 | 
-        'base.bst'
 | 
|
| 50 | 
-    ])
 | 
|
| 51 | 
-    key = result.output.strip()
 | 
|
| 52 | 
-  | 
|
| 53 | 
-    # Ensure we have an artifact to read
 | 
|
| 54 | 
-    result = cli.run(project=project, args=['build', 'base.bst'])
 | 
|
| 55 | 
-    assert result.exit_code == 0
 | 
|
| 56 | 
-  | 
|
| 57 | 
-    # Read the log via the element name
 | 
|
| 58 | 
-    result = cli.run(project=project, args=['artifact', 'log', 'base.bst'])
 | 
|
| 59 | 
-    assert result.exit_code == 0
 | 
|
| 60 | 
-    log = result.output
 | 
|
| 61 | 
-  | 
|
| 62 | 
-    # Read the log via the key
 | 
|
| 63 | 
-    result = cli.run(project=project, args=['artifact', 'log', 'test/base/' + key])
 | 
|
| 64 | 
-    assert result.exit_code == 0
 | 
|
| 65 | 
-    assert log == result.output
 | 
|
| 66 | 
-  | 
|
| 67 | 
-    # Read the log via glob
 | 
|
| 68 | 
-    result = cli.run(project=project, args=['artifact', 'log', 'test/base/*'])
 | 
|
| 69 | 
-    assert result.exit_code == 0
 | 
|
| 70 | 
-    # The artifact is cached under both a strong key and a weak key
 | 
|
| 71 | 
-    assert (log + log) == result.output
 | 
|
| 72 | 
-  | 
|
| 73 | 
-  | 
|
| 74 | 40 | 
 # A test to capture the integration of the cachebuildtrees
 | 
| 75 | 41 | 
 # behaviour, which by default is to include the buildtree
 | 
| 76 | 42 | 
 # content of an element on caching.
 | 
| ... | ... | @@ -212,6 +212,7 @@ def test_host_files_expand_environ(cli, tmpdir, datafiles, path): | 
| 212 | 212 | 
 # Test that bind mounts defined in project.conf dont mount in isolation
 | 
| 213 | 213 | 
 @pytest.mark.parametrize("path", [("/etc/pony.conf"), ("/usr/share/pony/pony.txt")])
 | 
| 214 | 214 | 
 @pytest.mark.datafiles(DATA_DIR)
 | 
| 215 | 
+@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
 | 
|
| 215 | 216 | 
 def test_isolated_no_mount(cli, tmpdir, datafiles, path):
 | 
| 216 | 217 | 
     project = os.path.join(datafiles.dirname, datafiles.basename)
 | 
| 217 | 218 | 
     ponyfile = os.path.join(project, 'files', 'shell-mount', 'pony.txt')
 | 
| ... | ... | @@ -226,6 +227,8 @@ def test_isolated_no_mount(cli, tmpdir, datafiles, path): | 
| 226 | 227 | 
         }
 | 
| 227 | 228 | 
     })
 | 
| 228 | 229 | 
     assert result.exit_code != 0
 | 
| 230 | 
+    assert path in result.stderr
 | 
|
| 231 | 
+    assert 'No such file or directory' in result.stderr
 | 
|
| 229 | 232 | 
 | 
| 230 | 233 | 
 | 
| 231 | 234 | 
 # Test that we warn about non-existing files on the host if the mount is not
 | 
