index 7dae5df67776641c5b47b78801bbe8e6495ffa7f..e1105d3488484e85709872189091dbdd7ac09dac 100644 (file)
-# $Id: back_sqlite.py,v 1.1 2002-09-18 05:07:47 richard Exp $
-__doc__ = '''
+# $Id: back_sqlite.py,v 1.17 2004-03-18 01:58:45 richard Exp $
+'''Implements a backend for SQLite.
+
See https://pysqlite.sourceforge.net/ for pysqlite info
'''
-import base64, marshal
-from roundup.backends.rdbms_common import *
+__docformat__ = 'restructuredtext'
+
+import os, base64, marshal
+
+from roundup import hyperdb
+from roundup.backends import rdbms_common
+from roundup.backends import locking
import sqlite
-class Database(Database):
+class Database(rdbms_common.Database):
# char to use for positional arguments
arg = '%s'
+ def sql_open_connection(self):
+ db = os.path.join(self.config.DATABASE, 'db')
+ conn = sqlite.connect(db=db)
+ cursor = conn.cursor()
+ return (conn, cursor)
+
def open_connection(self):
# ensure files are group readable and writable
os.umask(0002)
+
+ # lock the database
db = os.path.join(self.config.DATABASE, 'db')
- self.conn = sqlite.connect(db=db)
- cursor = self.conn.cursor()
+ lockfilenm = db[:-3] + 'lck'
+ self.lockfile = locking.acquire_lock(lockfilenm)
+ self.lockfile.write(str(os.getpid()))
+ self.lockfile.flush()
+
+ (self.conn, self.cursor) = self.sql_open_connection()
+
try:
- self.database_schema = self.load_dbschema(cursor)
+ self.load_dbschema()
except sqlite.DatabaseError, error:
if str(error) != 'no such table: schema':
raise
- self.database_schema = {}
- cursor = self.conn.cursor()
- cursor.execute('create table schema (schema varchar)')
- cursor.execute('create table ids (name varchar, num integer)')
+ self.init_dbschema()
+ self.cursor.execute('create table schema (schema varchar)')
+ self.cursor.execute('create table ids (name varchar, num integer)')
+ self.cursor.execute('create index ids_name_idx on ids(name)')
+ self.create_version_2_tables()
- def __repr__(self):
- return '<roundlite 0x%x>'%id(self)
+ def close(self):
+ ''' Close off the connection.
+ '''
+ self.sql_close()
+ if self.lockfile is not None:
+ locking.release_lock(self.lockfile)
+ if self.lockfile is not None:
+ self.lockfile.close()
+ self.lockfile = None
+
+ def create_version_2_tables(self):
+ self.cursor.execute('create table otks (otk_key varchar, '
+ 'otk_value varchar, otk_time integer)')
+ self.cursor.execute('create index otks_key_idx on otks(otk_key)')
+ self.cursor.execute('create table sessions (session_key varchar, '
+ 'session_time integer, session_value varchar)')
+ self.cursor.execute('create index sessions_key_idx on '
+ 'sessions(session_key)')
+
+ def add_actor_column(self):
+ # update existing tables to have the new actor column
+ tables = self.database_schema['tables']
+ for classname, spec in self.classes.items():
+ if tables.has_key(classname):
+ dbspec = tables[classname]
+ self.update_class(spec, dbspec, force=1, adding_actor=1)
+
+ def update_class(self, spec, old_spec, force=0, adding_actor=0):
+ ''' Determine the differences between the current spec and the
+ database version of the spec, and update where necessary.
+
+ If 'force' is true, update the database anyway.
+
+ SQLite doesn't have ALTER TABLE, so we have to copy and
+ regenerate the tables with the new schema.
+ '''
+ new_has = spec.properties.has_key
+ new_spec = spec.schema()
+ new_spec[1].sort()
+ old_spec[1].sort()
+ if not force and new_spec == old_spec:
+ # no changes
+ return 0
+
+ if __debug__:
+ print >>hyperdb.DEBUG, 'update_class FIRING'
+
+ # detect multilinks that have been removed, and drop their table
+ old_has = {}
+ for name,prop in old_spec[1]:
+ old_has[name] = 1
+ if new_has(name) or not isinstance(prop, hyperdb.Multilink):
+ continue
+ # it's a multilink, and it's been removed - drop the old
+ # table. First drop indexes.
+ self.drop_multilink_table_indexes(spec.classname, ml)
+ sql = 'drop table %s_%s'%(spec.classname, prop)
+ if __debug__:
+ print >>hyperdb.DEBUG, 'update_class', (self, sql)
+ self.cursor.execute(sql)
+ old_has = old_has.has_key
+
+ # now figure how we populate the new table
+ if adding_actor:
+ fetch = ['_activity', '_creation', '_creator']
+ else:
+ fetch = ['_actor', '_activity', '_creation', '_creator']
+ properties = spec.getprops()
+ for propname,x in new_spec[1]:
+ prop = properties[propname]
+ if isinstance(prop, hyperdb.Multilink):
+ if force or not old_has(propname):
+ # we need to create the new table
+ self.create_multilink_table(spec, propname)
+ elif old_has(propname):
+ # we copy this col over from the old table
+ fetch.append('_'+propname)
+
+ # select the data out of the old table
+ fetch.append('id')
+ fetch.append('__retired__')
+ fetchcols = ','.join(fetch)
+ cn = spec.classname
+ sql = 'select %s from _%s'%(fetchcols, cn)
+ if __debug__:
+ print >>hyperdb.DEBUG, 'update_class', (self, sql)
+ self.cursor.execute(sql)
+ olddata = self.cursor.fetchall()
+
+ # TODO: update all the other index dropping code
+ self.drop_class_table_indexes(cn, old_spec[0])
+
+ # drop the old table
+ self.cursor.execute('drop table _%s'%cn)
+
+ # create the new table
+ self.create_class_table(spec)
+
+ if olddata:
+ # do the insert of the old data - the new columns will have
+ # NULL values
+ args = ','.join([self.arg for x in fetch])
+ sql = 'insert into _%s (%s) values (%s)'%(cn, fetchcols, args)
+ if __debug__:
+ print >>hyperdb.DEBUG, 'update_class', (self, sql, olddata[0])
+ for entry in olddata:
+ self.cursor.execute(sql, tuple(entry))
+
+ return 1
- def sql_fetchone(self, cursor):
- ''' Fetch a single row. If there's nothing to fetch, return None.
+ def sql_close(self):
+ ''' Squash any error caused by us already having closed the
+ connection.
'''
- return cursor.fetchone()
+ try:
+ self.conn.close()
+ except sqlite.ProgrammingError, value:
+ if str(value) != 'close failed - Connection is closed.':
+ raise
+
+ def sql_rollback(self):
+ ''' Squash any error caused by us having closed the connection (and
+ therefore not having anything to roll back)
+ '''
+ try:
+ self.conn.rollback()
+ except sqlite.ProgrammingError, value:
+ if str(value) != 'rollback failed - Connection is closed.':
+ raise
+
+ def __repr__(self):
+ return '<roundlite 0x%x>'%id(self)
def sql_commit(self):
''' Actually commit to the database.
if str(error) != 'cannot commit - no transaction is active':
raise
- def save_dbschema(self, cursor, schema):
- ''' Save the schema definition that the database currently implements
- '''
- s = repr(self.database_schema)
- self.sql(cursor, 'insert into schema values (%s)', (s,))
+ def sql_index_exists(self, table_name, index_name):
+ self.cursor.execute('pragma index_list(%s)'%table_name)
+ for entry in self.cursor.fetchall():
+ if entry[1] == index_name:
+ return 1
+ return 0
- def load_dbschema(self, cursor):
- ''' Load the schema definition that the database currently implements
+class sqliteClass:
+ def filter(self, search_matches, filterspec, sort=(None,None),
+ group=(None,None)):
+ ''' If there's NO matches to a fetch, sqlite returns NULL
+ instead of nothing
'''
- cursor.execute('select schema from schema')
- return eval(cursor.fetchone()[0])
+ return filter(None, rdbms_common.Class.filter(self, search_matches,
+ filterspec, sort=sort, group=group))
- def save_journal(self, cursor, classname, cols, nodeid, journaldate,
- journaltag, action, params):
- ''' Save the journal entry to the database
- '''
- # make the params db-friendly
- params = repr(params)
- entry = (nodeid, journaldate, journaltag, action, params)
-
- # do the insert
- a = self.arg
- sql = 'insert into %s__journal (%s) values (%s,%s,%s,%s,%s)'%(classname,
- cols, a, a, a, a, a)
- if __debug__:
- print >>hyperdb.DEBUG, 'addjournal', (self, sql, entry)
- cursor.execute(sql, entry)
+class Class(sqliteClass, rdbms_common.Class):
+ pass
+
+class IssueClass(sqliteClass, rdbms_common.IssueClass):
+ pass
+
+class FileClass(sqliteClass, rdbms_common.FileClass):
+ pass
- def load_journal(self, cursor, classname, cols, nodeid):
- ''' Load the journal from the database
- '''
- # now get the journal entries
- sql = 'select %s from %s__journal where nodeid=%s'%(cols, classname,
- self.arg)
- if __debug__:
- print >>hyperdb.DEBUG, 'getjournal', (self, sql, nodeid)
- cursor.execute(sql, (nodeid,))
- res = []
- for nodeid, date_stamp, user, action, params in cursor.fetchall():
- params = eval(params)
- res.append((nodeid, date.Date(date_stamp), user, action, params))
- return res