Hi
I'm using a negative number so I can search for the table using the getTables
function (as recommended in a previous post). Problems reproduces for 256, -256
and 2000.
Pretty sure the MemoryError is on the PyTables side - below is a standalone
script that reproduces it.
Apols about the confusion on replying - I didn't realise that a replied email
was different to a fresh email.
Regards
David
import tables as pytables
class TableSummary:
def __init__(self, attributes, dtype, id, filename, tablename):
self.attributes = attributes
self.dtype = dtype
self.id = id
self.filename = filename
self.tablename = tablename
def getTables(ptFile, attributes={}, where="/"):
answer = []
for node in ptFile.walkNodes(where):
if isinstance(node, pytables.Leaf):
matches = True
for items in attributes.items():
if not node._v_attrs.__contains__(items[0]):
matches = False
break
#if ("%s" % node._v_attrs.__getattr__(items[0])) <> ("%s" %
items[1]):
if node._v_attrs.__getattr__(items[0]) <> items[1]:
matches = False
break
if matches: answer.append(node)
return answer
def openDB(ptFilename):
ptFile = pytables.openFile(ptFilename, mode="a")
TSIDSeed = 0
newTSByTSID = {}
for table in getTables(ptFile, where="/data"):
attributes = {}
for name in table._v_attrs._f_list():
attributes[name] = table._v_attrs[name] # I'm not sure if the
_v_attrs can be added in one go
TSIDSeed += 1
ts = TableSummary(attributes, table.dtype, TSIDSeed, ptFilename,
table._v_pathname)
ts.size = table.nrows
if ts.size > 0:
ts.firstDate = table.read(start=0, field="Date")[0]
ts.lastDate = table.read(start=table.nrows-1, field="Date")[0]
newTSByTSID[TSIDSeed] = ts
return newTSByTSID
def test():
print len(openDB("C:\\QSDI\\TSCache\\DTN2.h5"))
test()
NODE_CACHE_SLOTS not specified
Traceback (most recent call last):
File "pytables_open.qvs", line 53, in <module>
test()
File "pytables_open.qvs", line 51, in test
print len(openDB("C:\\QSDI\\TSCache\\DTN2.h5"))
File "pytables_open.qvs", line 43, in openDB
ts.firstDate = table.read(start=0, field="Date")[0]
File "c:\python26qv\lib\site-packages\tables\table.py", line 1565, in read
arr = self._read(start, stop, step, field)
File "c:\python26qv\lib\site-packages\tables\table.py", line 1527, in _read
self.row._fillCol(result, start, stop, step, field)
File "c:\python26qv\lib\site-packages\tables\utils.py", line 228, in newfget
mydict[name] = value = fget(self)
File "c:\python26qv\lib\site-packages\tables\table.py", line 299, in row
return tableExtension.Row(self)
File "tableExtension.pyx", line 751, in tables.tableExtension.Row.__cinit__
(tables\tableExtension.c:6736)
File "tableExtension.pyx", line 786, in tables.tableExtension.Row._newBuffer
(tables\tableExtension.c:7192)
File "c:\python26qv\lib\site-packages\tables\table.py", line 687, in
_get_container
return numpy.empty(shape=shape, dtype=self._v_dtype)
MemoryError
Closing remaining open files: C:\QSDI\TSCache\DTN2.h5... done
NODE_CACHE_SLOTS = 256
Traceback (most recent call last):
File "pytables_open.qvs", line 53, in <module>
test()
File "pytables_open.qvs", line 51, in test
openDB("C:\\QSDI\\TSCache\\DTN2.h5")
File "pytables_open.qvs", line 43, in openDB
ts.firstDate = table.read(start=0, field="Date")[0]
File "c:\python26qv\lib\site-packages\tables\table.py", line 1565, in read
arr = self._read(start, stop, step, field)
File "c:\python26qv\lib\site-packages\tables\table.py", line 1527, in _read
self.row._fillCol(result, start, stop, step, field)
File "c:\python26qv\lib\site-packages\tables\utils.py", line 228, in newfget
mydict[name] = value = fget(self)
File "c:\python26qv\lib\site-packages\tables\table.py", line 299, in row
return tableExtension.Row(self)
File "tableExtension.pyx", line 751, in tables.tableExtension.Row.__cinit__
(tables\tableExtension.c:6736)
File "tableExtension.pyx", line 786, in tables.tableExtension.Row._newBuffer
(tables\tableExtension.c:7192)
File "c:\python26qv\lib\site-packages\tables\table.py", line 687, in
_get_container
return numpy.empty(shape=shape, dtype=self._v_dtype)
MemoryError
Closing remaining open files: C:\QSDI\TSCache\DTN2.h5... done
NODE_CACHE_SLOTS = -256
c:\python26qv\lib\site-packages\tables\file.py:293: PerformanceWarning: the
dictionary of alive nodes is exceeding the recommended maximum n
umber (256); be ready to see PyTables asking for *lots* of memory and possibly
slow I/O.
PerformanceWarning)
Traceback (most recent call last):
File "pytables_open.qvs", line 53, in <module>
test()
File "pytables_open.qvs", line 51, in test
print len(openDB("C:\\QSDI\\TSCache\\DTN2.h5"))
File "pytables_open.qvs", line 43, in openDB
ts.firstDate = table.read(start=0, field="Date")[0]
File "c:\python26qv\lib\site-packages\tables\table.py", line 1565, in read
arr = self._read(start, stop, step, field)
File "c:\python26qv\lib\site-packages\tables\table.py", line 1527, in _read
self.row._fillCol(result, start, stop, step, field)
File "c:\python26qv\lib\site-packages\tables\utils.py", line 228, in newfget
mydict[name] = value = fget(self)
File "c:\python26qv\lib\site-packages\tables\table.py", line 299, in row
return tableExtension.Row(self)
File "tableExtension.pyx", line 751, in tables.tableExtension.Row.__cinit__
(tables\tableExtension.c:6736)
File "tableExtension.pyx", line 786, in tables.tableExtension.Row._newBuffer
(tables\tableExtension.c:7192)
File "c:\python26qv\lib\site-packages\tables\table.py", line 687, in
_get_container
return numpy.empty(shape=shape, dtype=self._v_dtype)
MemoryError
Closing remaining open files: C:\QSDI\TSCache\DTN2.h5... done
NODE_CACHE_SLOTS=2000
c:\python26qv\lib\site-packages\tables\table.py:2518: PerformanceWarning: table
``/data/_2279`` is being preempted from alive nodes without
its buffers being flushed or with some index being dirty. This may lead to
very ineficient use of resources and even to fatal errors in cer
tain situations. Please do a call to the .flush() or .reIndexDirty() methods
on this table before start using other nodes.
PerformanceWarning)
c:\python26qv\lib\site-packages\tables\table.py:2518: PerformanceWarning: table
``/data/_2214`` is being preempted from alive nodes without
its buffers being flushed or with some index being dirty. This may lead to
very ineficient use of resources and even to fatal errors in cer
tain situations. Please do a call to the .flush() or .reIndexDirty() methods
on this table before start using other nodes.
PerformanceWarning)
c:\python26qv\lib\site-packages\tables\table.py:2518: PerformanceWarning: table
``/data/_2203`` is being preempted from alive nodes without
its buffers being flushed or with some index being dirty. This may lead to
very ineficient use of resources and even to fatal errors in cer
tain situations. Please do a call to the .flush() or .reIndexDirty() methods
on this table before start using other nodes.
PerformanceWarning)
c:\python26qv\lib\site-packages\tables\table.py:2518: PerformanceWarning: table
``/data/_2198`` is being preempted from alive nodes without
its buffers being flushed or with some index being dirty. This may lead to
very ineficient use of resources and even to fatal errors in cer
tain situations. Please do a call to the .flush() or .reIndexDirty() methods
on this table before start using other nodes.
PerformanceWarning)
c:\python26qv\lib\site-packages\tables\table.py:2518: PerformanceWarning: table
``/data/_2140`` is being preempted from alive nodes without
its buffers being flushed or with some index being dirty. This may lead to
very ineficient use of resources and even to fatal errors in cer
tain situations. Please do a call to the .flush() or .reIndexDirty() methods
on this table before start using other nodes.
PerformanceWarning)
c:\python26qv\lib\site-packages\tables\table.py:2518: PerformanceWarning: table
``/data/_2136`` is being preempted from alive nodes without
its buffers being flushed or with some index being dirty. This may lead to
very ineficient use of resources and even to fatal errors in cer
tain situations. Please do a call to the .flush() or .reIndexDirty() methods
on this table before start using other nodes.
PerformanceWarning)
c:\python26qv\lib\site-packages\tables\table.py:2518: PerformanceWarning: table
``/data/_2013`` is being preempted from alive nodes without
its buffers being flushed or with some index being dirty. This may lead to
very ineficient use of resources and even to fatal errors in cer
tain situations. Please do a call to the .flush() or .reIndexDirty() methods
on this table before start using other nodes.
PerformanceWarning)
c:\python26qv\lib\site-packages\tables\table.py:2518: PerformanceWarning: table
``/data/_1997`` is being preempted from alive nodes without
its buffers being flushed or with some index being dirty. This may lead to
very ineficient use of resources and even to fatal errors in cer
tain situations. Please do a call to the .flush() or .reIndexDirty() methods
on this table before start using other nodes.
PerformanceWarning)
c:\python26qv\lib\site-packages\tables\table.py:2518: PerformanceWarning: table
``/data/_1994`` is being preempted from alive nodes without
its buffers being flushed or with some index being dirty. This may lead to
very ineficient use of resources and even to fatal errors in cer
tain situations. Please do a call to the .flush() or .reIndexDirty() methods
on this table before start using other nodes.
PerformanceWarning)
c:\python26qv\lib\site-packages\tables\table.py:2518: PerformanceWarning: table
``/data/_1963`` is being preempted from alive nodes without
its buffers being flushed or with some index being dirty. This may lead to
very ineficient use of resources and even to fatal errors in cer
tain situations. Please do a call to the .flush() or .reIndexDirty() methods
on this table before start using other nodes.
PerformanceWarning)
c:\python26qv\lib\site-packages\tables\table.py:2518: PerformanceWarning: table
``/data/_1906`` is being preempted from alive nodes without
its buffers being flushed or with some index being dirty. This may lead to
very ineficient use of resources and even to fatal errors in cer
tain situations. Please do a call to the .flush() or .reIndexDirty() methods
on this table before start using other nodes.
PerformanceWarning)
c:\python26qv\lib\site-packages\tables\table.py:2518: PerformanceWarning: table
``/data/_1896`` is being preempted from alive nodes without
its buffers being flushed or with some index being dirty. This may lead to
very ineficient use of resources and even to fatal errors in cer
tain situations. Please do a call to the .flush() or .reIndexDirty() methods
on this table before start using other nodes.
PerformanceWarning)
c:\python26qv\lib\site-packages\tables\table.py:2518: PerformanceWarning: table
``/data/_1870`` is being preempted from alive nodes without
its buffers being flushed or with some index being dirty. This may lead to
very ineficient use of resources and even to fatal errors in cer
tain situations. Please do a call to the .flush() or .reIndexDirty() methods
on this table before start using other nodes.
PerformanceWarning)
c:\python26qv\lib\site-packages\tables\table.py:2518: PerformanceWarning: table
``/data/_1860`` is being preempted from alive nodes without
its buffers being flushed or with some index being dirty. This may lead to
very ineficient use of resources and even to fatal errors in cer
tain situations. Please do a call to the .flush() or .reIndexDirty() methods
on this table before start using other nodes.
PerformanceWarning)
c:\python26qv\lib\site-packages\tables\table.py:2518: PerformanceWarning: table
``/data/_1825`` is being preempted from alive nodes without
its buffers being flushed or with some index being dirty. This may lead to
very ineficient use of resources and even to fatal errors in cer
tain situations. Please do a call to the .flush() or .reIndexDirty() methods
on this table before start using other nodes.
PerformanceWarning)
c:\python26qv\lib\site-packages\tables\table.py:2518: PerformanceWarning: table
``/data/_1735`` is being preempted from alive nodes without
its buffers being flushed or with some index being dirty. This may lead to
very ineficient use of resources and even to fatal errors in cer
tain situations. Please do a call to the .flush() or .reIndexDirty() methods
on this table before start using other nodes.
PerformanceWarning)
c:\python26qv\lib\site-packages\tables\table.py:2518: PerformanceWarning: table
``/data/_1680`` is being preempted from alive nodes without
its buffers being flushed or with some index being dirty. This may lead to
very ineficient use of resources and even to fatal errors in cer
tain situations. Please do a call to the .flush() or .reIndexDirty() methods
on this table before start using other nodes.
PerformanceWarning)
c:\python26qv\lib\site-packages\tables\table.py:2518: PerformanceWarning: table
``/data/_1638`` is being preempted from alive nodes without
its buffers being flushed or with some index being dirty. This may lead to
very ineficient use of resources and even to fatal errors in cer
tain situations. Please do a call to the .flush() or .reIndexDirty() methods
on this table before start using other nodes.
PerformanceWarning)
c:\python26qv\lib\site-packages\tables\table.py:2518: PerformanceWarning: table
``/data/_1623`` is being preempted from alive nodes without
its buffers being flushed or with some index being dirty. This may lead to
very ineficient use of resources and even to fatal errors in cer
tain situations. Please do a call to the .flush() or .reIndexDirty() methods
on this table before start using other nodes.
PerformanceWarning)
Traceback (most recent call last):
File "pytables_open.qvs", line 53, in <module>
test()
File "pytables_open.qvs", line 51, in test
openDB("C:\\QSDI\\TSCache\\DTN2.h5")
File "pytables_open.qvs", line 43, in openDB
ts.firstDate = table.read(start=0, field="Date")[0]
File "c:\python26qv\lib\site-packages\tables\table.py", line 1565, in read
arr = self._read(start, stop, step, field)
File "c:\python26qv\lib\site-packages\tables\table.py", line 1527, in _read
self.row._fillCol(result, start, stop, step, field)
File "c:\python26qv\lib\site-packages\tables\utils.py", line 228, in newfget
mydict[name] = value = fget(self)
File "c:\python26qv\lib\site-packages\tables\table.py", line 299, in row
return tableExtension.Row(self)
File "tableExtension.pyx", line 751, in tables.tableExtension.Row.__cinit__
(tables\tableExtension.c:6736)
File "tableExtension.pyx", line 786, in tables.tableExtension.Row._newBuffer
(tables\tableExtension.c:7192)
File "c:\python26qv\lib\site-packages\tables\table.py", line 687, in
_get_container
return numpy.empty(shape=shape, dtype=self._v_dtype)
MemoryError
Closing remaining open files: C:\QSDI\TSCache\DTN2.h5... done
-----Original Message-----
From: Francesc Alted [mailto:[email protected]]
Sent: Wed 8-Dec-2010 09:44
To: [email protected]
Subject: Re: [Pytables-users] MemoryError on reading attributes
A Wednesday 08 December 2010 01:32:12 [email protected] escrigué:
> Hi
>
> I have loaded around 2200 table total of 1.1GB (I have a lot more to
> load - csv size is 12GB). The load crash with a memory error:
[clip]
> File "c:\python26qv\lib\site-packages\tables\table.py", line 687,
> in _get_container
> return numpy.empty(shape=shape, dtype=self._v_dtype)
> MemoryError
>
>
> When I open the db I wish to cycle through each table and store the
> attributes in memory, the first row of first column, the last row of
> the last column but no other table data.
>
> If I open the db with the NODE_CACHE_SLOTS=-10000 python uses about
> 1GB of ram then stops with a memory error, same for
> NODE_CACHE_SLOTS=-2000. If I run it with NODE_CACHE_SLOTS=2000 then
> I still get a memory error followed by a performance warning thus:
May I ask why do you insist in putting so many slots in cache? Can you
reproduce the problem when using the default (256)?
Also, you should double check that you are not creating a leak on your
own application (for example, by not removing data imported from CSV
files).
In case you are sure that the problem is on the PyTables side, could you
please provide a self-contained script reproducing the problem?
Finally, when you post about a new subject, please do not reply to an
existing (and unrelated) post; that is confusing.
Cheers,
--
Francesc Alted
------------------------------------------------------------------------------
What happens now with your Lotus Notes apps - do you make another costly
upgrade, or settle for being marooned without product support? Time to move
off Lotus Notes and onto the cloud with Force.com, apps are easier to build,
use, and manage than apps on traditional platforms. Sign up for the Lotus
Notes Migration Kit to learn more. http://p.sf.net/sfu/salesforce-d2d
_______________________________________________
Pytables-users mailing list
[email protected]
https://lists.sourceforge.net/lists/listinfo/pytables-users
Visit our website at http://www.ubs.com
This message contains confidential information and is intended only
for the individual named. If you are not the named addressee you
should not disseminate, distribute or copy this e-mail. Please
notify the sender immediately by e-mail if you have received this
e-mail by mistake and delete this e-mail from your system.
E-mails are not encrypted and cannot be guaranteed to be secure or
error-free as information could be intercepted, corrupted, lost,
destroyed, arrive late or incomplete, or contain viruses. The sender
therefore does not accept liability for any errors or omissions in the
contents of this message which arise as a result of e-mail transmission.
If verification is required please request a hard-copy version. This
message is provided for informational purposes and should not be
construed as a solicitation or offer to buy or sell any securities
or related financial instruments.
UBS Limited is a company limited by shares incorporated in the United
Kingdom registered in England and Wales with number 2035362.
Registered office: 1 Finsbury Avenue, London EC2M 2PP. UBS Limited
is authorised and regulated by the Financial Services Authority.
UBS AG is a public company incorporated with limited liability in
Switzerland domiciled in the Canton of Basel-City and the Canton of
Zurich respectively registered at the Commercial Registry offices in
those Cantons with Identification No: CH-270.3.004.646-4 and having
respective head offices at Aeschenvorstadt 1, 4051 Basel and
Bahnhofstrasse 45, 8001 Zurich, Switzerland. Registered in the
United Kingdom as a foreign company with No: FC021146 and having a
UK Establishment registered at Companies House, Cardiff, with No:
BR 004507. The principal office of UK Establishment: 1 Finsbury Avenue,
London EC2M 2PP. In the United Kingdom, UBS AG is authorised and
regulated by the Financial Services Authority.
UBS reserves the right to retain all messages. Messages are protected
and accessed only in legally justified cases.
------------------------------------------------------------------------------
What happens now with your Lotus Notes apps - do you make another costly
upgrade, or settle for being marooned without product support? Time to move
off Lotus Notes and onto the cloud with Force.com, apps are easier to build,
use, and manage than apps on traditional platforms. Sign up for the Lotus
Notes Migration Kit to learn more. http://p.sf.net/sfu/salesforce-d2d
_______________________________________________
Pytables-users mailing list
[email protected]
https://lists.sourceforge.net/lists/listinfo/pytables-users