changeset 8db4d7414471 in /home/hg/repos/gajim

details:http://hg.gajim.org/gajim?cmd=changeset;node=8db4d7414471
description: Remove caps which have not been seen for three months from the db.

        Thanks Asterix for the initial version of this patch!

diffstat:

 src/common/caps.py        |  62 ++++++++++++++++++-------------
 src/common/check_paths.py |   3 +-
 src/common/defs.py        |   2 +-
 src/common/logger.py      |  19 ++++++++-
 src/common/optparser.py   |  22 +++++++++++
 test/unit/test_caps.py    |   4 +-
 6 files changed, 79 insertions(+), 33 deletions(-)

diffs (276 lines):

diff -r bbd9bfbbbaa2 -r 8db4d7414471 src/common/caps.py
--- a/src/common/caps.py        Wed Nov 11 21:38:39 2009 +0100
+++ b/src/common/caps.py        Wed Nov 11 23:14:51 2009 +0100
@@ -40,6 +40,10 @@
 # Features where we cannot safely assume that the other side supports them
 FEATURE_BLACKLIST = [NS_CHATSTATES, NS_XHTML_IM, NS_RECEIPTS, NS_ESESSION]
 
+# Query entry status codes
+NEW = 0
+QUERIED = 1
+CACHED = 2 # got the answer
 
 
################################################################################
 ### Public API of this module
@@ -58,7 +62,7 @@
        supported_features = cache_item.features
        if requested_feature in supported_features:
                return True
-       elif supported_features == [] and cache_item.queried in (0, 1):
+       elif supported_features == [] and cache_item.status in (NEW, QUERIED):
                # assume feature is supported, if we don't know yet, what the 
client
                # is capable of
                return requested_feature not in FEATURE_BLACKLIST
@@ -168,8 +172,8 @@
        def _is_hash_valid(self, identities, features, dataforms):
                ''' To be implemented by subclassess '''
                raise NotImplementedError()             
-       
-       
+               
+
 class ClientCaps(AbstractClientCaps):
        ''' The current XEP-115 implementation '''
        
@@ -188,7 +192,7 @@
                computed_hash = compute_caps_hash(identities, features,
                                dataforms=dataforms, 
hash_method=self._hash_method)
                return computed_hash == self._hash      
-       
+
        
 class OldClientCaps(AbstractClientCaps):
        ''' Old XEP-115 implemtation. Kept around for background competability. 
 '''
@@ -204,7 +208,7 @@
                
        def _is_hash_valid(self, identities, features, dataforms):
                return True     
-               
+
                
 class NullClientCaps(AbstractClientCaps):
        '''
@@ -220,7 +224,7 @@
        def _lookup_in_cache(self, caps_cache):
                # lookup something which does not exist to get a new CacheItem 
created
                cache_item = caps_cache[('dummy', '')]
-               assert cache_item.queried == 0
+               assert cache_item.status != CACHED
                return cache_item
        
        def _discover(self, connection, jid):
@@ -248,7 +252,7 @@
                        #   another object, and we will have plenty of 
identical long
                        #   strings. therefore we can cache them
                        __names = {}
-                       
+                                               
                        def __init__(self, hash_method, hash_, logger):
                                # cached into db
                                self.hash_method = hash_method
@@ -257,12 +261,8 @@
                                self._identities = []
                                self._logger = logger
 
-                               # not cached into db:
-                               # have we sent the query?
-                               # 0 == not queried
-                               # 1 == queried
-                               # 2 == got the answer
-                               self.queried = 0
+                               self.status = NEW
+                               self._recently_seen = False
 
                        def _get_features(self):
                                return self._features
@@ -304,19 +304,28 @@
                                self.features = features
                                self._logger.add_caps_entry(self.hash_method, 
self.hash,
                                        identities, features)
+                               self.status = CACHED
+                               
+                       def update_last_seen(self):
+                               if not self._recently_seen:
+                                       self._recently_seen = True
+                                       
self._logger.update_caps_time(self.hash_method, self.hash)
 
                self.__CacheItem = CacheItem
                self.logger = logger
 
        def initialize_from_db(self):
-               # get data from logger...
-               if self.logger is not None:
-                       for hash_method, hash_, identities, features in \
-                       self.logger.iter_caps_data():
-                               x = self[(hash_method, hash_)]
-                               x.identities = identities
-                               x.features = features
-                               x.queried = 2
+               self._remove_outdated_caps()
+               for hash_method, hash_, identities, features in \
+               self.logger.iter_caps_data():
+                       x = self[(hash_method, hash_)]
+                       x.identities = identities
+                       x.features = features
+                       x.status = CACHED
+       
+       def _remove_outdated_caps(self):
+               '''Removes outdated values from the db'''
+               self.logger.clean_caps_table()
 
        def __getitem__(self, caps):
                if caps in self.__cache:
@@ -336,13 +345,14 @@
                lookup_cache_item = client_caps.get_cache_lookup_strategy()
                q = lookup_cache_item(self)     
                
-               if q.queried == 0:
+               if q.status == NEW:
                        # do query for bare node+hash pair
                        # this will create proper object
-                       q.queried = 1
+                       q.status = QUERIED
                        discover = client_caps.get_discover_strategy()
                        discover(connection, jid)
-
+               else: 
+                       q.update_last_seen()
 
 
################################################################################
 ### Caps network coding
@@ -391,7 +401,7 @@
                                client_caps = OldClientCaps(caps_hash, node)
                        else:
                                client_caps = ClientCaps(caps_hash, node, 
hash_method)
-               
+
                capscache.query_client_of_jid_if_unknown(self, jid, client_caps)
                contact.client_caps = client_caps
 
@@ -409,7 +419,7 @@
                lookup = contact.client_caps.get_cache_lookup_strategy()
                cache_item = lookup(capscache)  
                                        
-               if cache_item.queried == 2:
+               if cache_item.status == CACHED:
                        return
                else:
                        validate = 
contact.client_caps.get_hash_validation_strategy()
diff -r bbd9bfbbbaa2 -r 8db4d7414471 src/common/check_paths.py
--- a/src/common/check_paths.py Wed Nov 11 21:38:39 2009 +0100
+++ b/src/common/check_paths.py Wed Nov 11 23:14:51 2009 +0100
@@ -91,7 +91,8 @@
                CREATE TABLE caps_cache (
                        hash_method TEXT,
                        hash TEXT,
-                       data BLOB);
+                       data BLOB,
+                       last_seen INTEGER);
 
                CREATE TABLE rooms_last_message_time(
                        jid_id INTEGER PRIMARY KEY UNIQUE,
diff -r bbd9bfbbbaa2 -r 8db4d7414471 src/common/defs.py
--- a/src/common/defs.py        Wed Nov 11 21:38:39 2009 +0100
+++ b/src/common/defs.py        Wed Nov 11 23:14:51 2009 +0100
@@ -27,7 +27,7 @@
 datadir = '../'
 localedir = '../po'
 
-version = '0.12.5.8-dev'
+version = '0.13.0.1-dev'
 
 import sys, os.path
 for base in ('.', 'common'):
diff -r bbd9bfbbbaa2 -r 8db4d7414471 src/common/logger.py
--- a/src/common/logger.py      Wed Nov 11 21:38:39 2009 +0100
+++ b/src/common/logger.py      Wed Nov 11 23:14:51 2009 +0100
@@ -838,14 +838,27 @@
                gzip.close()
                data = string.getvalue()
                self.cur.execute('''
-                       INSERT INTO caps_cache ( hash_method, hash, data )
-                       VALUES (?, ?, ?);
-                       ''', (hash_method, hash_, buffer(data))) # (1) -- note 
above
+                       INSERT INTO caps_cache ( hash_method, hash, data, 
last_seen )
+                       VALUES (?, ?, ?, ?);
+                       ''', (hash_method, hash_, buffer(data), 
int(time.time())))
+               # (1) -- note above
                try:
                        self.con.commit()
                except sqlite.OperationalError, e:
                        print >> sys.stderr, str(e)
 
+       def update_caps_time(self, method, hash_):
+               sql = '''UPDATE caps_cache SET last_seen = %d
+                       WHERE hash_method = "%s" and hash = "%s"''' % \
+                       (int(time.time()), method, hash_)
+               self.simple_commit(sql)
+
+       def clean_caps_table(self):
+               '''Remove caps which was not seen for 3 months'''
+               sql = '''DELETE FROM caps_cache WHERE last_seen < %d''' % \
+                       int(time.time() - 3*30*24*3600)
+               self.simple_commit(sql)
+
        def replace_roster(self, account_name, roster_version, roster):
                ''' Replace current roster in DB by a new one.
                accout_name is the name of the account to change
diff -r bbd9bfbbbaa2 -r 8db4d7414471 src/common/optparser.py
--- a/src/common/optparser.py   Wed Nov 11 21:38:39 2009 +0100
+++ b/src/common/optparser.py   Wed Nov 11 23:14:51 2009 +0100
@@ -29,6 +29,7 @@
 import os
 import locale
 import re
+from time import time
 from common import gajim
 from common import helpers
 from common import caps
@@ -218,6 +219,8 @@
                        self.update_config_to_01257()
                if old < [0, 12, 5, 8] and new >= [0, 12, 5, 8]:
                        self.update_config_to_01258()
+               if old < [0, 13, 0, 1] and new >= [0, 13, 0, 1]:
+                       self.update_config_to_01301()
 
                gajim.logger.init_vars()
                gajim.config.set('version', new_version)
@@ -817,4 +820,23 @@
                        'proxy.jabber.ru', 'proxy.jabbim.cz'])
                gajim.config.set('version', '0.12.5.8')
 
+       def update_config_to_01301(self):
+               back = os.getcwd()
+               os.chdir(logger.LOG_DB_FOLDER)
+               con = sqlite.connect(logger.LOG_DB_FILE)
+               os.chdir(back)
+               cur = con.cursor()
+               try:
+                       cur.executescript(
+                               '''
+                               ALTER TABLE caps_cache
+                               ADD last_seen INTEGER default %d;
+                               ''' % int(time())
+                       )
+                       con.commit()
+               except sqlite.OperationalError:
+                       pass
+               con.close()
+               gajim.config.set('version', '0.13.0.1')
+
 # vim: se ts=3:
diff -r bbd9bfbbbaa2 -r 8db4d7414471 test/unit/test_caps.py
--- a/test/unit/test_caps.py    Wed Nov 11 21:38:39 2009 +0100
+++ b/test/unit/test_caps.py    Wed Nov 11 23:14:51 2009 +0100
@@ -66,9 +66,9 @@
                
        def test_initialize_from_db(self):
                ''' Read cashed dummy data from db ''' 
-               self.assertEqual(self.cc[self.client_caps].queried, 0)
+               self.assertEqual(self.cc[self.client_caps].status, caps.NEW)
                self.cc.initialize_from_db()
-               self.assertEqual(self.cc[self.client_caps].queried, 2)
+               self.assertEqual(self.cc[self.client_caps].status, caps.CACHED)
 
        def test_preload_triggering_query(self):
                ''' Make sure that preload issues a disco '''
_______________________________________________
Commits mailing list
[email protected]
http://lists.gajim.org/cgi-bin/listinfo/commits

Reply via email to