This is an automated email from the ASF dual-hosted git repository.

akitouni pushed a commit to branch abderrahim/cascache-cleanup
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 6f5d61a59ac294b3b850c042db144b089f2a94fa
Author: Abderrahim Kitouni <[email protected]>
AuthorDate: Sun Apr 27 16:59:10 2025 +0100

    casremote: drop the push_message method
    
    It's now unused
---
 src/buildstream/_cas/casremote.py | 18 -------------
 tests/artifactcache/push.py       | 55 ---------------------------------------
 2 files changed, 73 deletions(-)

diff --git a/src/buildstream/_cas/casremote.py 
b/src/buildstream/_cas/casremote.py
index ae4aa9004..2db8033e4 100644
--- a/src/buildstream/_cas/casremote.py
+++ b/src/buildstream/_cas/casremote.py
@@ -61,24 +61,6 @@ class CASRemote(BaseRemote):
         response = local_cas.GetInstanceNameForRemotes(request)
         self.local_cas_instance_name = response.instance_name
 
-    # push_message():
-    #
-    # Push the given protobuf message to a remote.
-    #
-    # Args:
-    #     message (Message): A protobuf message to push.
-    #
-    # Raises:
-    #     (CASRemoteError): if there was an error
-    #
-    def push_message(self, message):
-
-        message_buffer = message.SerializeToString()
-
-        self.init()
-
-        return self.cascache.add_object(buffer=message_buffer, 
instance_name=self.local_cas_instance_name)
-
 
 # Represents a batch of blobs queued for fetching.
 #
diff --git a/tests/artifactcache/push.py b/tests/artifactcache/push.py
index 7c2160f6e..2b6f9274a 100644
--- a/tests/artifactcache/push.py
+++ b/tests/artifactcache/push.py
@@ -131,58 +131,3 @@ def test_push_split(cli, tmpdir, datafiles):
             cli.get_artifact_name(project_dir, "test", "target.bst", 
cache_key=element_key)
         )
         assert storage.get_cas_files(proto) is not None
-
-
[email protected](DATA_DIR)
-def test_push_message(tmpdir, datafiles):
-    project_dir = str(datafiles)
-
-    # Set up an artifact cache.
-    artifactshare = os.path.join(str(tmpdir), "artifactshare")
-    with create_artifact_share(artifactshare) as share:
-        # Configure artifact share
-        rootcache_dir = os.path.join(str(tmpdir), "cache")
-        user_config_file = str(tmpdir.join("buildstream.conf"))
-        user_config = {
-            "scheduler": {"pushers": 1},
-            "artifacts": {
-                "servers": [
-                    {
-                        "url": share.repo,
-                        "push": True,
-                    }
-                ]
-            },
-            "cachedir": rootcache_dir,
-        }
-
-        # Write down the user configuration file
-        _yaml.roundtrip_dump(user_config, file=user_config_file)
-
-        with dummy_context(config=user_config_file) as context:
-            # Load the project manually
-            project = Project(project_dir, context)
-            project.ensure_fully_loaded()
-
-            # Create a local artifact cache handle
-            artifactcache = context.artifactcache
-
-            # Initialize remotes
-            context.initialize_remotes(True, True, None, None)
-            assert artifactcache.has_push_remotes()
-
-            command = remote_execution_pb2.Command(
-                arguments=["/usr/bin/gcc", "--help"],
-                working_directory="/buildstream-build",
-                output_directories=["/buildstream-install"],
-            )
-
-            # Push the message object
-            _, remotes = artifactcache.get_remotes(project.name, True)
-            assert len(remotes) == 1
-            command_digest = remotes[0].push_message(command)
-            message_hash, message_size = command_digest.hash, 
command_digest.size_bytes
-
-        assert message_hash and message_size
-        message_digest = remote_execution_pb2.Digest(hash=message_hash, 
size_bytes=message_size)
-        assert share.has_object(message_digest)

Reply via email to