This is an automated email from the ASF dual-hosted git repository.

root pushed a commit to branch tristan/multiple-caches
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 518ec59f73125a52c6c689dbc016f99d25c4e71d
Author: Sam Thursfield <[email protected]>
AuthorDate: Tue Nov 28 12:03:10 2017 +0000

    tests: Exercise the new multiple cache support
---
 tests/frontend/pull.py           | 132 ++++++++++++++++++++++++++++++++-------
 tests/frontend/push.py           |  23 +++----
 tests/testutils/artifactshare.py |  41 ++++++++----
 tests/testutils/runcli.py        |  19 ++++++
 4 files changed, 171 insertions(+), 44 deletions(-)

diff --git a/tests/frontend/pull.py b/tests/frontend/pull.py
index 18a4b46..6920566 100644
--- a/tests/frontend/pull.py
+++ b/tests/frontend/pull.py
@@ -4,8 +4,6 @@ import pytest
 from tests.testutils import cli, create_artifact_share, configure_remote_caches
 from tests.testutils.site import IS_LINUX
 
-from buildstream import _yaml
-
 # Project directory
 DATA_DIR = os.path.join(
     os.path.dirname(os.path.realpath(__file__)),
@@ -25,16 +23,30 @@ def assert_shared(cli, share, project, element_name):
                              .format(share.repo, element_name))
 
 
+# Assert that a given artifact is NOT in the share
+#
+def assert_not_shared(cli, share, project, element_name):
+    # NOTE: 'test' here is the name of the project
+    # specified in the project.conf we are testing with.
+    #
+    cache_key = cli.get_element_key(project, element_name)
+    if share.has_artifact('test', element_name, cache_key):
+        raise AssertionError("Artifact share at {} unexpectedly contains the 
element {}"
+                             .format(share.repo, element_name))
+
+
 @pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
 @pytest.mark.parametrize(
-    'override_url, project_url, user_url',
+    'override_urls, project_urls, user_urls',
     [
-        pytest.param(None, None, 'share.repo', id='user-config'),
-        pytest.param(None, 'share.repo', None, id='project-config'),
-        pytest.param('share.repo', None, None, 
id='project-override-in-user-config'),
+        # The leftmost cache is the highest priority one.
+        pytest.param([], [], ['share.repo', '/tmp/do-not-use/user'], 
id='user-config'),
+        pytest.param([], ['share.repo', '/tmp/do-not-use/project'], 
['/tmp/do-not-use/user'], id='project-config'),
+        pytest.param(['share.repo'], ['/tmp/do-not-use/project'], 
['/tmp/do-not-use/user'],
+                     id='project-override-in-user-config'),
     ])
 @pytest.mark.datafiles(DATA_DIR)
-def test_push_pull(cli, tmpdir, datafiles, override_url, project_url, 
user_url):
+def test_push_pull(cli, tmpdir, datafiles, override_urls, project_urls, 
user_urls):
     project = os.path.join(datafiles.dirname, datafiles.basename)
     share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
 
@@ -45,25 +57,23 @@ def test_push_pull(cli, tmpdir, datafiles, override_url, 
project_url, user_url):
     # Assert that we are now cached locally
     assert cli.get_element_state(project, 'import-bin.bst') == 'cached'
 
-    override_url = share.repo if override_url == 'share.repo' else override_url
-    project_url = share.repo if project_url == 'share.repo' else project_url
-    user_url = share.repo if user_url == 'share.repo' else user_url
+    override_urls = [share.repo if url == 'share.repo' else url for url in 
override_urls]
+    project_urls = [share.repo if url == 'share.repo' else url for url in 
project_urls]
+    user_urls = [share.repo if url == 'share.repo' else url for url in 
user_urls]
 
+    # Configure artifact share
     project_conf_file = str(datafiles.join('project.conf'))
-    configure_remote_caches(cli, project_conf_file, override_url, project_url, 
user_url)
+    configure_remote_caches(cli, project_conf_file, override_urls, 
project_urls, user_urls)
+    share.update_summary()
 
-    # Now try bst push
+    # Now try bst push. This will push to the highest priority cache.
     result = cli.run(project=project, args=['push', 'import-bin.bst'])
     result.assert_success()
+    share.update_summary()
 
     # And finally assert that the artifact is in the share
     assert_shared(cli, share, project, 'import-bin.bst')
 
-    # Make sure we update the summary in our artifact share,
-    # we dont have a real server around to do it
-    #
-    share.update_summary()
-
     # Now we've pushed, delete the user's local artifact cache
     # directory and try to redownload it from the share
     #
@@ -116,17 +126,13 @@ def test_push_pull_all(cli, tmpdir, datafiles):
     # Now try bst push
     result = cli.run(project=project, args=['push', '--deps', 'all', 
'target.bst'])
     result.assert_success()
+    share.update_summary()
 
     # And finally assert that the artifact is in the share
     all_elements = ['target.bst', 'import-bin.bst', 'import-dev.bst', 
'compose-all.bst']
     for element_name in all_elements:
         assert_shared(cli, share, project, element_name)
 
-    # Make sure we update the summary in our artifact share,
-    # we dont have a real server around to do it
-    #
-    share.update_summary()
-
     # Now we've pushed, delete the user's local artifact cache
     # directory and try to redownload it from the share
     #
@@ -145,3 +151,85 @@ def test_push_pull_all(cli, tmpdir, datafiles):
     # And assert that it's again in the local cache, without having built
     for element_name in all_elements:
         assert cli.get_element_state(project, element_name) == 'cached'
+
+
[email protected](not IS_LINUX, reason='Only available on linux')
[email protected](DATA_DIR)
+def test_push_pull_specific_remote(cli, tmpdir, datafiles):
+    project = os.path.join(datafiles.dirname, datafiles.basename)
+
+    good_share = create_artifact_share(os.path.join(str(tmpdir), 
'goodartifactshare'))
+    bad_share = create_artifact_share(os.path.join(str(tmpdir), 
'badartifactshare'))
+
+    # First build it without the artifact cache configured
+    result = cli.run(project=project, args=['build', 'target.bst'])
+    assert result.exit_code == 0
+
+    # Assert that we are now cached locally
+    state = cli.get_element_state(project, 'target.bst')
+    assert state == 'cached'
+
+    # Configure only the artifact share that we want to avoid.
+    project_conf_file = str(datafiles.join('project.conf'))
+    configure_remote_caches(cli, project_conf_file, [bad_share.repo], 
[bad_share.repo], [bad_share.repo])
+
+    # Now try bst push
+    result = cli.run(project=project, args=[
+        'push', 'target.bst', '--remote', good_share.repo
+    ])
+    assert result.exit_code == 0
+    good_share.update_summary()
+    bad_share.update_summary()
+
+    # Assert that all the artifacts are in the share we pushed
+    # to, and not the other.
+    assert_shared(cli, good_share, project, 'target.bst')
+    assert_not_shared(cli, bad_share, project, 'target.bst')
+
+    # Now we've pushed, delete the user's local artifact cache
+    # directory and try to redownload it from the share
+    #
+    artifacts = os.path.join(cli.directory, 'artifacts')
+    shutil.rmtree(artifacts)
+
+    result = cli.run(project=project, args=['pull', 'target.bst', '--remote',
+                                            good_share.repo])
+    assert result.exit_code == 0
+
+    # And assert that it's again in the local cache, without having built
+    assert cli.get_element_state(project, 'target.bst') == 'cached'
+
+
[email protected](not IS_LINUX, reason='Only available on linux')
[email protected](DATA_DIR)
+def test_pull_secondary_cache(cli, tmpdir, datafiles):
+    project = os.path.join(datafiles.dirname, datafiles.basename)
+
+    share1 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare1'))
+    share2 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2'))
+
+    # First build it without the artifact cache configured
+    result = cli.run(project=project, args=['build', 'target.bst'])
+    assert result.exit_code == 0
+
+    # Assert that we are now cached locally
+    state = cli.get_element_state(project, 'target.bst')
+    assert state == 'cached'
+
+    # bst push to secondary remote
+    result = cli.run(project=project, args=[
+        'push', 'target.bst', '--remote', share2.repo
+    ])
+    assert result.exit_code == 0
+    share2.update_summary()
+
+    # Now we've pushed, delete the user's local artifact cache
+    artifacts = os.path.join(cli.directory, 'artifacts')
+    shutil.rmtree(artifacts)
+
+    # Configure artifact shares
+    project_conf_file = str(datafiles.join('project.conf'))
+    configure_remote_caches(cli, project_conf_file, [], [share1.repo, 
share2.repo], [])
+
+    # And assert that it's found in share2
+    assert cli.get_element_state(project, 'target.bst') == 'downloadable'
diff --git a/tests/frontend/push.py b/tests/frontend/push.py
index 9d897a8..06f53cf 100644
--- a/tests/frontend/push.py
+++ b/tests/frontend/push.py
@@ -3,8 +3,6 @@ import pytest
 from tests.testutils import cli, create_artifact_share, configure_remote_caches
 from tests.testutils.site import IS_LINUX
 
-from buildstream import _yaml
-
 # Project directory
 DATA_DIR = os.path.join(
     os.path.dirname(os.path.realpath(__file__)),
@@ -26,14 +24,16 @@ def assert_shared(cli, share, project, element_name):
 
 @pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
 @pytest.mark.parametrize(
-    'override_url, project_url, user_url',
+    'override_urls, project_urls, user_urls',
     [
-        pytest.param(None, None, 'share.repo', id='user-config'),
-        pytest.param(None, 'share.repo', None, id='project-config'),
-        pytest.param('share.repo', None, None, 
id='project-override-in-user-config'),
+        # The leftmost cache is the highest priority one.
+        pytest.param([], [], ['share.repo', '/tmp/do-not-use/user'], 
id='user-config'),
+        pytest.param([], ['share.repo', '/tmp/do-not-use/project'], 
['/tmp/do-not-use/user'], id='project-config'),
+        pytest.param(['share.repo'], ['/tmp/do-not-use/project'], 
['/tmp/do-not-use/user'],
+                     id='project-override-in-user-config'),
     ])
 @pytest.mark.datafiles(DATA_DIR)
-def test_push(cli, tmpdir, datafiles, override_url, user_url, project_url):
+def test_push(cli, tmpdir, datafiles, user_urls, project_urls, override_urls):
     project = str(datafiles)
     share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
 
@@ -44,12 +44,13 @@ def test_push(cli, tmpdir, datafiles, override_url, 
user_url, project_url):
     # Assert that we are now cached locally
     assert cli.get_element_state(project, 'target.bst') == 'cached'
 
-    override_url = share.repo if override_url == 'share.repo' else override_url
-    project_url = share.repo if project_url == 'share.repo' else project_url
-    user_url = share.repo if user_url == 'share.repo' else user_url
+    override_urls = [share.repo if url == 'share.repo' else url for url in 
override_urls]
+    project_urls = [share.repo if url == 'share.repo' else url for url in 
project_urls]
+    user_urls = [share.repo if url == 'share.repo' else url for url in 
user_urls]
 
+    # Configure artifact share
     project_conf_file = str(datafiles.join('project.conf'))
-    configure_remote_caches(cli, project_conf_file, override_url, project_url, 
user_url)
+    configure_remote_caches(cli, project_conf_file, override_urls, 
project_urls, user_urls)
 
     # Now try bst push
     result = cli.run(project=project, args=['push', 'target.bst'])
diff --git a/tests/testutils/artifactshare.py b/tests/testutils/artifactshare.py
index ebf38f3..f616e57 100644
--- a/tests/testutils/artifactshare.py
+++ b/tests/testutils/artifactshare.py
@@ -115,28 +115,47 @@ def create_artifact_share(directory):
 # User config is set through a helper on the 'cli' object, while the
 # project.conf file is updated manually using the _yaml module.
 #
-def configure_remote_caches(cli, project_conf_file, override_url, 
project_url=None, user_url=None):
+def configure_remote_caches(cli, project_conf_file, override_urls, 
project_urls=[], user_urls=[]):
     user_config = {}
-    if user_url is not None:
+    if len(user_urls) == 1:
         user_config['artifacts'] = {
-            'url': user_url
+            'url': user_urls[0]
         }
+    elif len(user_urls) > 1:
+        user_config['artifacts'] = [
+            {'url': url} for url in user_urls
+        ]
 
-    if override_url is not None:
+    if len(override_urls) == 1:
         user_config['projects'] = {
             'test': {
                 'artifacts': {
-                    'url': override_url,
+                    'url': override_urls[0],
                 }
             }
         }
+    elif len(override_urls) > 1:
+        user_config['projects'] = {
+            'test': {
+                'artifacts': [
+                    {'url': override_url} for url in override_urls
+                ]
+            }
+        }
     cli.configure(user_config)
 
-    if project_url is not None:
+    if len(project_urls) > 0:
         project_config = _yaml.load(project_conf_file)
-        project_config.update({
-            'artifacts': {
-                'url': project_url,
-            }
-        })
+        if len(project_urls) == 1:
+            project_config.update({
+                'artifacts': {
+                    'url': project_urls[0],
+                }
+            })
+        elif len(project_urls) > 1:
+            project_config.update({
+                'artifacts': [
+                    {'url': url} for url in project_urls
+                ]
+            })
         _yaml.dump(_yaml.node_sanitize(project_config), 
filename=project_conf_file)
diff --git a/tests/testutils/runcli.py b/tests/testutils/runcli.py
index d1b8c01..27aa21f 100644
--- a/tests/testutils/runcli.py
+++ b/tests/testutils/runcli.py
@@ -290,6 +290,25 @@ class Cli():
         result.assert_success()
         return result.output.strip()
 
+        # Strip failure messages that result from pulling from invalid caches.
+        # These are harmless in some cases.
+        #
+        # There are two reasons for this ugly hack. Firstly, click.CliRunner
+        # makes it impossible for us to parse stdout independently of stderr.
+        # This is <https://github.com/pallets/click/issues/371>.
+        #
+        # Secondly, we can't use the normal BuildStream logging at the point
+        # that we need to record failure to contact a configured cache, so
+        # there's no easy way to hide the message. This will change soon and
+        # we should be able to remove this hack once we fix
+        # <https://gitlab.com/BuildStream/buildstream/issues/168>.
+        result_lines = []
+        for line in result.output.split('\n'):
+            if not line.startswith('Failed to fetch remote refs'):
+                result_lines.append(line)
+
+        return '\n'.join(result_lines).strip()
+
     # Fetch an element's cache key by invoking bst show
     # on the project with the CLI
     #

Reply via email to