Repository: cassandra-dtest
Updated Branches:
  refs/heads/master 77be87ecf -> a5f90890f


Verify that archiving audit + fql files work

Patch by marcuse; reviewed by Ariel Weisberg for CASSANDRA-14373


Project: http://git-wip-us.apache.org/repos/asf/cassandra-dtest/repo
Commit: http://git-wip-us.apache.org/repos/asf/cassandra-dtest/commit/a5f90890
Tree: http://git-wip-us.apache.org/repos/asf/cassandra-dtest/tree/a5f90890
Diff: http://git-wip-us.apache.org/repos/asf/cassandra-dtest/diff/a5f90890

Branch: refs/heads/master
Commit: a5f90890f3cdd516c3955c868234ea1ffc7bd093
Parents: 77be87e
Author: Marcus Eriksson <marc...@apache.org>
Authored: Fri Sep 21 12:46:27 2018 +0200
Committer: Marcus Eriksson <marc...@apache.org>
Committed: Thu Oct 11 13:17:47 2018 +0200

----------------------------------------------------------------------
 auditlog_test.py | 139 ++++++++++++++++++++++++++++++++++++++++++++++++++
 1 file changed, 139 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/cassandra-dtest/blob/a5f90890/auditlog_test.py
----------------------------------------------------------------------
diff --git a/auditlog_test.py b/auditlog_test.py
new file mode 100644
index 0000000..7a57604
--- /dev/null
+++ b/auditlog_test.py
@@ -0,0 +1,139 @@
+import logging
+import os
+import os.path
+import pytest
+import shlex
+import stat
+import subprocess
+import tempfile
+import time
+
+from ccmlib.node import handle_external_tool_process
+from dtest import Tester
+
+since = pytest.mark.since
+logger = logging.getLogger(__name__)
+
+@since('4.0')
+class TestAuditlog(Tester):
+    def test_archiving(self):
+        cluster = self.cluster
+        log_dir = tempfile.mkdtemp('logs')
+        moved_log_dir, move_script = self._create_script()
+        cluster.set_configuration_options(values={'audit_logging_options': 
{'enabled': True,
+                                                                            
'audit_logs_dir': log_dir,
+                                                                            
'roll_cycle': 'TEST_SECONDLY',
+                                                                            
'archive_command':'%s %%path'%(move_script)}})
+        cluster.populate(1).start(wait_for_binary_proto=True)
+        node = cluster.nodelist()[0]
+        node.stress(['write', 'n=100k', "no-warmup", "cl=ONE", "-rate", 
"threads=300"])
+        node.nodetool("disableauditlog")
+        assert len(os.listdir(moved_log_dir)) > 0
+        for f in os.listdir(log_dir):
+            assert not f.endswith(".cq4")
+
+    def test_fql_nodetool_options(self):
+        cluster = self.cluster
+        log_dir = tempfile.mkdtemp('logs')
+        moved_log_dir, move_script = self._create_script()
+        
cluster.set_configuration_options(values={'full_query_logging_options': 
{'log_dir': log_dir,
+                                                                               
  'archive_command': 'conf should not be used'}})
+        cluster.populate(1).start(wait_for_binary_proto=True)
+        node = cluster.nodelist()[0]
+        node.nodetool("enablefullquerylog --archive-command \"%s %%path\" 
--roll-cycle=TEST_SECONDLY"%move_script)
+        node.stress(['write', 'n=100k', "no-warmup", "cl=ONE", "-rate", 
"threads=300"])
+        # make sure at least one file has been rolled and archived:
+        assert node.grep_log("Executing archive command", filename="debug.log")
+        assert len(os.listdir(moved_log_dir)) > 0
+        node.nodetool("disablefullquerylog")
+        for f in os.listdir(log_dir):
+            assert not f.endswith(".cq4")
+        # make sure the non-rolled file gets archived when we disable fql
+        node.watch_log_for("Archiving existing file", filename="debug.log")
+
+    def test_archiving_fql(self):
+        cluster = self.cluster
+        log_dir = tempfile.mkdtemp('logs')
+        moved_log_dir, move_script = self._create_script()
+        
cluster.set_configuration_options(values={'full_query_logging_options': 
{'log_dir': log_dir,
+                                                                               
  'roll_cycle': 'TEST_SECONDLY',
+                                                                               
  'archive_command':'%s %%path'%(move_script)}})
+        cluster.populate(1).start(wait_for_binary_proto=True)
+        node = cluster.nodelist()[0]
+        node.nodetool("enablefullquerylog")
+        node.stress(['write', 'n=100k', "no-warmup", "cl=ONE", "-rate", 
"threads=300"])
+        # make sure at least one file has been rolled and archived:
+        assert node.grep_log("Executing archive command", filename="debug.log")
+        assert len(os.listdir(moved_log_dir)) > 0
+        node.nodetool("disablefullquerylog")
+        for f in os.listdir(log_dir):
+            assert not f.endswith(".cq4")
+        # make sure the non-rolled file gets archived when we disable fql
+        node.watch_log_for("Archiving existing file", filename="debug.log")
+
+    def test_archive_on_startup(self):
+        cluster = self.cluster
+        log_dir = tempfile.mkdtemp('logs')
+        moved_log_dir, move_script = self._create_script()
+        files = []
+        for i in range(10):
+            (_, fakelogfile) = tempfile.mkstemp(dir=log_dir, suffix='.cq4')
+            files.append(fakelogfile)
+        for f in files:
+            assert os.path.isfile(f)
+
+        assert len(files) == 10
+
+        
cluster.set_configuration_options(values={'full_query_logging_options': 
{'log_dir': log_dir,
+                                                                               
  'roll_cycle': 'TEST_SECONDLY',
+                                                                               
  'archive_command':'%s %%path'%(move_script)}})
+        cluster.populate(1).start(wait_for_binary_proto=True)
+        node = cluster.nodelist()[0]
+        node.nodetool("enablefullquerylog")
+
+        for f in files:
+            assert not os.path.isfile(f)
+            filename = os.path.basename(f)
+            assert os.path.isfile(os.path.join(moved_log_dir, filename))
+
+    def test_archive_on_shutdown(self):
+        cluster = self.cluster
+        log_dir = tempfile.mkdtemp('logs')
+        moved_log_dir, move_script = self._create_script()
+
+        
cluster.set_configuration_options(values={'full_query_logging_options': 
{'log_dir': log_dir,
+                                                                               
  'roll_cycle': 'TEST_SECONDLY',
+                                                                               
  'archive_command':'%s %%path'%(move_script)}})
+        cluster.populate(1).start(wait_for_binary_proto=True)
+        node = cluster.nodelist()[0]
+        node.nodetool("enablefullquerylog")
+
+        # adding a bunch of files after fql is enabled - these will get 
archived when we disable
+        files = []
+        for i in range(10):
+            (_, fakelogfile) = tempfile.mkstemp(dir=log_dir, suffix='.cq4')
+            files.append(fakelogfile)
+        for f in files:
+            assert os.path.isfile(f)
+
+        assert len(files) == 10
+
+        node.nodetool("disablefullquerylog")
+
+        for f in files:
+            assert not os.path.isfile(f)
+            filename = os.path.basename(f)
+            assert os.path.isfile(os.path.join(moved_log_dir, filename))
+
+
+    def _create_script(self):
+        moved_log_dir = tempfile.mkdtemp('movedlogs')
+        with tempfile.NamedTemporaryFile(delete=False, mode='w') as f:
+            f.write("""
+#!/bin/sh
+mv $1 %s
+                """%moved_log_dir)
+            move_script = f.name
+        st = os.stat(move_script)
+        os.chmod(move_script, st.st_mode | stat.S_IEXEC)
+        return (moved_log_dir, move_script)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@cassandra.apache.org
For additional commands, e-mail: commits-h...@cassandra.apache.org

Reply via email to