20 dec 2015, 14:29, sanhua.zh:

> Here is the test result for selecting 100,000 items in original test  
> case.
>
>
> shared cache mode
> 2015-12-20 21:24:58.714 Test[1126:11609] cost 2.173480
> 2015-12-20 21:24:58.714 Test[1126:11610] cost 2.173449
> 2015-12-20 21:24:58.714 Test[1126:11608] cost 2.173768
> 2015-12-20 21:24:58.714 Test[1126:11611] cost 2.173169
>
>
> without shared cache mode
> 2015-12-20 21:28:49.647 Test[1286:13077] cost 0.028914
> 2015-12-20 21:28:49.647 Test[1286:13078] cost 0.028914
> 2015-12-20 21:28:49.647 Test[1286:13079] cost 0.028964
> 2015-12-20 21:28:49.647 Test[1286:13076] cost 0.028958
>
>
> May be your python code does not run the same thing as mine, I am  
> poor in python so that I could not figure it out.
Indeed, I had changed the query to just scan the table in the database  
and not return all  rows. Now I chaged that and also set the  
corresponding number of rows. Still cache sharing does not make such a  
mega differencr. Below are the timings.
I have no clu now for your mega difference.
SQLite version? 3.8 here
hardware? I have (only) Mac OS X 10.5.8 with a 1.22 GHz PowerPC

Timings for default mode:
$ python3 larry3.py [B
sqlite version 3.8.3.1
cache sharing 0 cache size 2000 rowcount 100000
after split 0.0037369728088378906
after start 0.8668131828308105
steps 100000 connect+fetch 4.39 connect 0.0
steps 100000 connect+fetch 4.52 connect 0.0
steps 100000 connect+fetch 4.62 connect 0.0
steps 100000 connect+fetch 4.51 connect 0.0
elapsed 5.21

Timings in shared cache mode:
sqlite version 3.8.3.1
cache sharing 1 cache size 2000 rowcount 100000
after split 0.0035581588745117188
after start 0.7083160877227783
steps 100000 connect+fetch 6.4 connect 0.0
steps 100000 connect+fetch 6.17 connect 0.0
steps 100000 connect+fetch 6.56 connect 0.0
steps 100000 connect+fetch 6.46 connect 0.0
elapsed 6.85

Python script:

import random, os, time, threading
import sqlite3 as sqlite
print ("sqlite version", sqlite.sqlite_version)

TESTDB='larry.tmp'
SHARED=0
SIZE=2000
ROWCOUNT=100000
print('cache sharing', SHARED, 'cache size', SIZE, "rowcount", ROWCOUNT)
sqlite.enable_shared_cache(SHARED)

def connect():
     con= sqlite.Connection (TESTDB, isolation_level=None,  
check_same_thread=0)
     con.execute ("pragma cache_size=%i"%(SIZE,))
     return con

def e_str (e):
     " format exception as string "
     return "%s: %s" % (e.__class__.__name__, e)

class Testthread (threading.Thread):
     """
     execute query in a thread
     """
     def __init__ (self, qq, con = None):
         self.con = con
         self.qq = qq
         self.out = "thr%i.out" % id (self)
         open (self.out, 'w').close ()
         os.remove (self.out)
         threading.Thread.__init__ (
                 self,
                 target=self.__target,
                 )
     def start (self):
         threading.Thread.start (self)
         for retry in range (5):
             time.sleep (2 ** retry * .05)
             if os.access (self.out, os.R_OK):
                 break
         else:
             print("Testthread: spoolfile does not appear")
         time.sleep (.10) # for the SQL to start
     def __target (self):
         subt0=time.time()
         if not self.con:
             self.con = connect ()
         dt1=round(time.time()-subt0,2)
         f = open (self.out, 'w')
         try:
             try:
                 n=0
                 for q in self.qq.split (';'):
                     i=None
                     for i in self.con.execute(q):
                         n+=1
                         continue
                     f.write (str(i)+'\n') # write last line only
                 dt2=round(time.time()-subt0,2)
                 print("steps", n, "connect+fetch", dt2, "connect", dt1)
             except Exception as e:
                 f.write (e_str (e) + '\n')
         finally:
             f.close()
             self.con.close ()
     def join (self, timeout=None):
         if timeout is not None:
             threading.Thread.join (self, timeout)
         else:
             timeout = 7.5 # respond to keyboard interrupts
             while self.isAlive ():
                 threading.Thread.join (self, timeout)
         return self
     def get_result (self):
         try:
             return open (self.out, 'r').read ().strip ()
         except IOError as e:
             return None

def main ():
     # create some test data
     con = connect ()
     try:
         n=con.execute("select count(*) from test").fetchone()[0]
     except sqlite.OperationalError:
         n=None
     con.close()
     if n!=ROWCOUNT:
         print ("n!=ROWCOUNT",n ,ROWCOUNT, "creating new test  
database..")
         open (TESTDB, 'w').close ()
         os.remove (TESTDB)
         con = connect ()
         con.execute ("create table test (id integer)")
         con.execute ("begin")
         con.executemany(
             "insert into test(id) values(:1)",
             ((i+1,) for i in range(ROWCOUNT)))
         con.execute ("end")
         con.close ()
         print ("done")
     qq="SELECT id  FROM test"
     t0=time.time()
     tthh=[]
     for i in range(4):
         con = None ###connect()
         tthh.append (Testthread (qq, con))
     print("after split", time.time()-t0)
     for th in tthh:
         th.start ()
     print("after start", time.time()-t0)
     ckres0="(%i,)"%(ROWCOUNT,)
     for th in tthh:
         res=th.join ().get_result().split('\n')
         if res[0]!=ckres0:
             print ("result", res[0],'!=', ckres0)
     dt=round(time.time()-t0,2)
     print('elapsed', dt)

if __name__ == "__main__":
     main ()

Reply via email to