1 #$Id: back_mysql.py,v 1.75 2008-02-27 08:32:50 richard Exp $
2 #
3 # Copyright (c) 2003 Martynas Sklyzmantas, Andrey Lebedev <andrey@micro.lt>
4 #
5 # This module is free software, and you may redistribute it and/or modify
6 # under the same terms as Python, so long as this copyright message and
7 # disclaimer are retained in their original form.
8 #
10 '''This module defines a backend implementation for MySQL.
13 How to implement AUTO_INCREMENT:
15 mysql> create table foo (num integer auto_increment primary key, name
16 varchar(255)) AUTO_INCREMENT=1 ENGINE=InnoDB;
18 ql> insert into foo (name) values ('foo5');
19 Query OK, 1 row affected (0.00 sec)
21 mysql> SELECT num FROM foo WHERE num IS NULL;
22 +-----+
23 | num |
24 +-----+
25 | 4 |
26 +-----+
27 1 row in set (0.00 sec)
29 mysql> SELECT num FROM foo WHERE num IS NULL;
30 Empty set (0.00 sec)
32 NOTE: we don't need an index on the id column if it's PRIMARY KEY
34 '''
35 __docformat__ = 'restructuredtext'
37 from roundup.backends.rdbms_common import *
38 from roundup.backends import rdbms_common
39 import MySQLdb
40 import os, shutil
41 from MySQLdb.constants import ER
42 import logging
44 def connection_dict(config, dbnamestr=None):
45 d = rdbms_common.connection_dict(config, dbnamestr)
46 if d.has_key('password'):
47 d['passwd'] = d['password']
48 del d['password']
49 if d.has_key('port'):
50 d['port'] = int(d['port'])
51 return d
53 def db_nuke(config):
54 """Clear all database contents and drop database itself"""
55 if db_exists(config):
56 kwargs = connection_dict(config)
57 conn = MySQLdb.connect(**kwargs)
58 try:
59 conn.select_db(config.RDBMS_NAME)
60 except:
61 # no, it doesn't exist
62 pass
63 else:
64 cursor = conn.cursor()
65 cursor.execute("SHOW TABLES")
66 tables = cursor.fetchall()
67 # stupid MySQL bug requires us to drop all the tables first
68 for table in tables:
69 command = 'DROP TABLE `%s`'%table[0]
70 if __debug__:
71 logging.getLogger('hyperdb').debug(command)
72 cursor.execute(command)
73 command = "DROP DATABASE %s"%config.RDBMS_NAME
74 logging.getLogger('hyperdb').info(command)
75 cursor.execute(command)
76 conn.commit()
77 conn.close()
79 if os.path.exists(config.DATABASE):
80 shutil.rmtree(config.DATABASE)
82 def db_create(config):
83 """Create the database."""
84 kwargs = connection_dict(config)
85 conn = MySQLdb.connect(**kwargs)
86 cursor = conn.cursor()
87 command = "CREATE DATABASE %s"%config.RDBMS_NAME
88 logging.getLogger('hyperdb').info(command)
89 cursor.execute(command)
90 conn.commit()
91 conn.close()
93 def db_exists(config):
94 """Check if database already exists."""
95 kwargs = connection_dict(config)
96 conn = MySQLdb.connect(**kwargs)
97 try:
98 try:
99 conn.select_db(config.RDBMS_NAME)
100 except MySQLdb.OperationalError:
101 return 0
102 finally:
103 conn.close()
104 return 1
107 class Database(Database):
108 arg = '%s'
110 # used by some code to switch styles of query
111 implements_intersect = 0
113 # Backend for MySQL to use.
114 # InnoDB is faster, but if you're running <4.0.16 then you'll need to
115 # use BDB to pass all unit tests.
116 mysql_backend = 'InnoDB'
117 #mysql_backend = 'BDB'
119 hyperdb_to_sql_datatypes = {
120 hyperdb.String : 'TEXT',
121 hyperdb.Date : 'DATETIME',
122 hyperdb.Link : 'INTEGER',
123 hyperdb.Interval : 'VARCHAR(255)',
124 hyperdb.Password : 'VARCHAR(255)',
125 hyperdb.Boolean : 'BOOL',
126 hyperdb.Number : 'REAL',
127 }
129 hyperdb_to_sql_value = {
130 hyperdb.String : str,
131 # no fractional seconds for MySQL
132 hyperdb.Date : lambda x: x.formal(sep=' '),
133 hyperdb.Link : int,
134 hyperdb.Interval : str,
135 hyperdb.Password : str,
136 hyperdb.Boolean : int,
137 hyperdb.Number : lambda x: x,
138 hyperdb.Multilink : lambda x: x, # used in journal marshalling
139 }
141 def sql_open_connection(self):
142 kwargs = connection_dict(self.config, 'db')
143 logging.getLogger('hyperdb').info('open database %r'%(kwargs['db'],))
144 try:
145 conn = MySQLdb.connect(**kwargs)
146 except MySQLdb.OperationalError, message:
147 raise DatabaseError, message
148 cursor = conn.cursor()
149 cursor.execute("SET AUTOCOMMIT=0")
150 cursor.execute("START TRANSACTION")
151 return (conn, cursor)
153 def open_connection(self):
154 # make sure the database actually exists
155 if not db_exists(self.config):
156 db_create(self.config)
158 self.conn, self.cursor = self.sql_open_connection()
160 try:
161 self.load_dbschema()
162 except MySQLdb.OperationalError, message:
163 if message[0] != ER.NO_DB_ERROR:
164 raise
165 except MySQLdb.ProgrammingError, message:
166 if message[0] != ER.NO_SUCH_TABLE:
167 raise DatabaseError, message
168 self.init_dbschema()
169 self.sql("CREATE TABLE `schema` (`schema` TEXT) ENGINE=%s"%
170 self.mysql_backend)
171 self.sql('''CREATE TABLE ids (name VARCHAR(255),
172 num INTEGER) ENGINE=%s'''%self.mysql_backend)
173 self.sql('create index ids_name_idx on ids(name)')
174 self.create_version_2_tables()
176 def load_dbschema(self):
177 ''' Load the schema definition that the database currently implements
178 '''
179 self.cursor.execute('select `schema` from `schema`')
180 schema = self.cursor.fetchone()
181 if schema:
182 self.database_schema = eval(schema[0])
183 else:
184 self.database_schema = {}
186 def save_dbschema(self):
187 ''' Save the schema definition that the database currently implements
188 '''
189 s = repr(self.database_schema)
190 self.sql('delete from `schema`')
191 self.sql('insert into `schema` values (%s)', (s,))
193 def create_version_2_tables(self):
194 # OTK store
195 self.sql('''CREATE TABLE otks (otk_key VARCHAR(255),
196 otk_value TEXT, otk_time FLOAT(20))
197 ENGINE=%s'''%self.mysql_backend)
198 self.sql('CREATE INDEX otks_key_idx ON otks(otk_key)')
200 # Sessions store
201 self.sql('''CREATE TABLE sessions (session_key VARCHAR(255),
202 session_time FLOAT(20), session_value TEXT)
203 ENGINE=%s'''%self.mysql_backend)
204 self.sql('''CREATE INDEX sessions_key_idx ON
205 sessions(session_key)''')
207 # full-text indexing store
208 self.sql('''CREATE TABLE __textids (_class VARCHAR(255),
209 _itemid VARCHAR(255), _prop VARCHAR(255), _textid INT)
210 ENGINE=%s'''%self.mysql_backend)
211 self.sql('''CREATE TABLE __words (_word VARCHAR(30),
212 _textid INT) ENGINE=%s'''%self.mysql_backend)
213 self.sql('CREATE INDEX words_word_ids ON __words(_word)')
214 self.sql('CREATE INDEX words_by_id ON __words (_textid)')
215 self.sql('CREATE UNIQUE INDEX __textids_by_props ON '
216 '__textids (_class, _itemid, _prop)')
217 sql = 'insert into ids (name, num) values (%s,%s)'%(self.arg, self.arg)
218 self.sql(sql, ('__textids', 1))
220 def add_new_columns_v2(self):
221 '''While we're adding the actor column, we need to update the
222 tables to have the correct datatypes.'''
223 for klass in self.classes.values():
224 cn = klass.classname
225 properties = klass.getprops()
226 old_spec = self.database_schema['tables'][cn]
228 # figure the non-Multilink properties to copy over
229 propnames = ['activity', 'creation', 'creator']
231 # figure actions based on data type
232 for name, s_prop in old_spec[1]:
233 # s_prop is a repr() string of a hyperdb type object
234 if s_prop.find('Multilink') == -1:
235 if properties.has_key(name):
236 propnames.append(name)
237 continue
238 tn = '%s_%s'%(cn, name)
240 if properties.has_key(name):
241 # grabe the current values
242 sql = 'select linkid, nodeid from %s'%tn
243 self.sql(sql)
244 rows = self.cursor.fetchall()
246 # drop the old table
247 self.drop_multilink_table_indexes(cn, name)
248 sql = 'drop table %s'%tn
249 self.sql(sql)
251 if properties.has_key(name):
252 # re-create and populate the new table
253 self.create_multilink_table(klass, name)
254 sql = '''insert into %s (linkid, nodeid) values
255 (%s, %s)'''%(tn, self.arg, self.arg)
256 for linkid, nodeid in rows:
257 self.sql(sql, (int(linkid), int(nodeid)))
259 # figure the column names to fetch
260 fetch = ['_%s'%name for name in propnames]
262 # select the data out of the old table
263 fetch.append('id')
264 fetch.append('__retired__')
265 fetchcols = ','.join(fetch)
266 sql = 'select %s from _%s'%(fetchcols, cn)
267 self.sql(sql)
269 # unserialise the old data
270 olddata = []
271 propnames = propnames + ['id', '__retired__']
272 cols = []
273 first = 1
274 for entry in self.cursor.fetchall():
275 l = []
276 olddata.append(l)
277 for i in range(len(propnames)):
278 name = propnames[i]
279 v = entry[i]
281 if name in ('id', '__retired__'):
282 if first:
283 cols.append(name)
284 l.append(int(v))
285 continue
286 if first:
287 cols.append('_' + name)
288 prop = properties[name]
289 if isinstance(prop, Date) and v is not None:
290 v = date.Date(v)
291 elif isinstance(prop, Interval) and v is not None:
292 v = date.Interval(v)
293 elif isinstance(prop, Password) and v is not None:
294 v = password.Password(encrypted=v)
295 elif (isinstance(prop, Boolean) or
296 isinstance(prop, Number)) and v is not None:
297 v = float(v)
299 # convert to new MySQL data type
300 prop = properties[name]
301 if v is not None:
302 e = self.hyperdb_to_sql_value[prop.__class__](v)
303 else:
304 e = None
305 l.append(e)
307 # Intervals store the seconds value too
308 if isinstance(prop, Interval):
309 if first:
310 cols.append('__' + name + '_int__')
311 if v is not None:
312 l.append(v.as_seconds())
313 else:
314 l.append(e)
315 first = 0
317 self.drop_class_table_indexes(cn, old_spec[0])
319 # drop the old table
320 self.sql('drop table _%s'%cn)
322 # create the new table
323 self.create_class_table(klass)
325 # do the insert of the old data
326 args = ','.join([self.arg for x in cols])
327 cols = ','.join(cols)
328 sql = 'insert into _%s (%s) values (%s)'%(cn, cols, args)
329 for entry in olddata:
330 self.sql(sql, tuple(entry))
332 # now load up the old journal data to migrate it
333 cols = ','.join('nodeid date tag action params'.split())
334 sql = 'select %s from %s__journal'%(cols, cn)
335 self.sql(sql)
337 # data conversions
338 olddata = []
339 for nodeid, journaldate, journaltag, action, params in \
340 self.cursor.fetchall():
341 #nodeid = int(nodeid)
342 journaldate = date.Date(journaldate)
343 #params = eval(params)
344 olddata.append((nodeid, journaldate, journaltag, action,
345 params))
347 # drop journal table and indexes
348 self.drop_journal_table_indexes(cn)
349 sql = 'drop table %s__journal'%cn
350 self.sql(sql)
352 # re-create journal table
353 self.create_journal_table(klass)
354 dc = self.hyperdb_to_sql_value[hyperdb.Date]
355 for nodeid, journaldate, journaltag, action, params in olddata:
356 self.save_journal(cn, cols, nodeid, dc(journaldate),
357 journaltag, action, params)
359 # make sure the normal schema update code doesn't try to
360 # change things
361 self.database_schema['tables'][cn] = klass.schema()
363 def fix_version_2_tables(self):
364 # Convert journal date column to TIMESTAMP, params column to TEXT
365 self._convert_journal_tables()
367 # Convert all String properties to TEXT
368 self._convert_string_properties()
370 def __repr__(self):
371 return '<myroundsql 0x%x>'%id(self)
373 def sql_fetchone(self):
374 return self.cursor.fetchone()
376 def sql_fetchall(self):
377 return self.cursor.fetchall()
379 def sql_index_exists(self, table_name, index_name):
380 self.sql('show index from %s'%table_name)
381 for index in self.cursor.fetchall():
382 if index[2] == index_name:
383 return 1
384 return 0
386 def create_class_table(self, spec, create_sequence=1):
387 cols, mls = self.determine_columns(spec.properties.items())
389 # add on our special columns
390 cols.append(('id', 'INTEGER PRIMARY KEY'))
391 cols.append(('__retired__', 'INTEGER DEFAULT 0'))
393 # create the base table
394 scols = ','.join(['%s %s'%x for x in cols])
395 sql = 'create table _%s (%s) ENGINE=%s'%(spec.classname, scols,
396 self.mysql_backend)
397 self.sql(sql)
399 self.create_class_table_indexes(spec)
400 return cols, mls
402 def create_class_table_indexes(self, spec):
403 ''' create the class table for the given spec
404 '''
405 # create __retired__ index
406 index_sql2 = 'create index _%s_retired_idx on _%s(__retired__)'%(
407 spec.classname, spec.classname)
408 self.sql(index_sql2)
410 # create index for key property
411 if spec.key:
412 if isinstance(spec.properties[spec.key], String):
413 idx = spec.key + '(255)'
414 else:
415 idx = spec.key
416 index_sql3 = 'create index _%s_%s_idx on _%s(_%s)'%(
417 spec.classname, spec.key,
418 spec.classname, idx)
419 self.sql(index_sql3)
421 # TODO: create indexes on (selected?) Link property columns, as
422 # they're more likely to be used for lookup
424 def add_class_key_required_unique_constraint(self, cn, key):
425 # mysql requires sizes on TEXT indexes
426 prop = self.classes[cn].getprops()[key]
427 if isinstance(prop, String):
428 sql = '''create unique index _%s_key_retired_idx
429 on _%s(__retired__, _%s(255))'''%(cn, cn, key)
430 else:
431 sql = '''create unique index _%s_key_retired_idx
432 on _%s(__retired__, _%s)'''%(cn, cn, key)
433 self.sql(sql)
435 def create_class_table_key_index(self, cn, key):
436 # mysql requires sizes on TEXT indexes
437 prop = self.classes[cn].getprops()[key]
438 if isinstance(prop, String):
439 sql = 'create index _%s_%s_idx on _%s(_%s(255))'%(cn, key, cn, key)
440 else:
441 sql = 'create index _%s_%s_idx on _%s(_%s)'%(cn, key, cn, key)
442 self.sql(sql)
444 def drop_class_table_indexes(self, cn, key):
445 # drop the old table indexes first
446 l = ['_%s_id_idx'%cn, '_%s_retired_idx'%cn]
447 if key:
448 l.append('_%s_%s_idx'%(cn, key))
450 table_name = '_%s'%cn
451 for index_name in l:
452 if not self.sql_index_exists(table_name, index_name):
453 continue
454 index_sql = 'drop index %s on %s'%(index_name, table_name)
455 self.sql(index_sql)
457 def create_journal_table(self, spec):
458 ''' create the journal table for a class given the spec and
459 already-determined cols
460 '''
461 # journal table
462 cols = ','.join(['%s varchar'%x
463 for x in 'nodeid date tag action params'.split()])
464 sql = '''create table %s__journal (
465 nodeid integer, date datetime, tag varchar(255),
466 action varchar(255), params text) ENGINE=%s'''%(
467 spec.classname, self.mysql_backend)
468 self.sql(sql)
469 self.create_journal_table_indexes(spec)
471 def drop_journal_table_indexes(self, classname):
472 index_name = '%s_journ_idx'%classname
473 if not self.sql_index_exists('%s__journal'%classname, index_name):
474 return
475 index_sql = 'drop index %s on %s__journal'%(index_name, classname)
476 self.sql(index_sql)
478 def create_multilink_table(self, spec, ml):
479 sql = '''CREATE TABLE `%s_%s` (linkid VARCHAR(255),
480 nodeid VARCHAR(255)) ENGINE=%s'''%(spec.classname, ml,
481 self.mysql_backend)
482 self.sql(sql)
483 self.create_multilink_table_indexes(spec, ml)
485 def drop_multilink_table_indexes(self, classname, ml):
486 l = [
487 '%s_%s_l_idx'%(classname, ml),
488 '%s_%s_n_idx'%(classname, ml)
489 ]
490 table_name = '%s_%s'%(classname, ml)
491 for index_name in l:
492 if not self.sql_index_exists(table_name, index_name):
493 continue
494 sql = 'drop index %s on %s'%(index_name, table_name)
495 self.sql(sql)
497 def drop_class_table_key_index(self, cn, key):
498 table_name = '_%s'%cn
499 index_name = '_%s_%s_idx'%(cn, key)
500 if not self.sql_index_exists(table_name, index_name):
501 return
502 sql = 'drop index %s on %s'%(index_name, table_name)
503 self.sql(sql)
505 # old-skool id generation
506 def newid(self, classname):
507 ''' Generate a new id for the given class
508 '''
509 # get the next ID - "FOR UPDATE" will lock the row for us
510 sql = 'select num from ids where name=%s FOR UPDATE'%self.arg
511 self.sql(sql, (classname, ))
512 newid = int(self.cursor.fetchone()[0])
514 # update the counter
515 sql = 'update ids set num=%s where name=%s'%(self.arg, self.arg)
516 vals = (int(newid)+1, classname)
517 self.sql(sql, vals)
519 # return as string
520 return str(newid)
522 def setid(self, classname, setid):
523 ''' Set the id counter: used during import of database
525 We add one to make it behave like the seqeunces in postgres.
526 '''
527 sql = 'update ids set num=%s where name=%s'%(self.arg, self.arg)
528 vals = (int(setid)+1, classname)
529 self.sql(sql, vals)
531 def clear(self):
532 rdbms_common.Database.clear(self)
534 # set the id counters to 0 (setid adds one) so we start at 1
535 for cn in self.classes.keys():
536 self.setid(cn, 0)
538 def create_class(self, spec):
539 rdbms_common.Database.create_class(self, spec)
540 sql = 'insert into ids (name, num) values (%s, %s)'
541 vals = (spec.classname, 1)
542 self.sql(sql, vals)
544 def sql_commit(self, fail_ok=False):
545 ''' Actually commit to the database.
546 '''
547 logging.getLogger('hyperdb').info('commit')
549 # MySQL commits don't seem to ever fail, the latest update winning.
550 # makes you wonder why they have transactions...
551 self.conn.commit()
553 # open a new cursor for subsequent work
554 self.cursor = self.conn.cursor()
556 # make sure we're in a new transaction and not autocommitting
557 self.sql("SET AUTOCOMMIT=0")
558 self.sql("START TRANSACTION")
560 def sql_close(self):
561 logging.getLogger('hyperdb').info('close')
562 try:
563 self.conn.close()
564 except MySQLdb.ProgrammingError, message:
565 if str(message) != 'closing a closed connection':
566 raise
568 class MysqlClass:
569 def _subselect(self, classname, multilink_table):
570 ''' "I can't believe it's not a toy RDBMS"
571 see, even toy RDBMSes like gadfly and sqlite can do sub-selects...
572 '''
573 self.db.sql('select nodeid from %s'%multilink_table)
574 s = ','.join([x[0] for x in self.db.sql_fetchall()])
575 return '_%s.id not in (%s)'%(classname, s)
577 class Class(MysqlClass, rdbms_common.Class):
578 pass
579 class IssueClass(MysqlClass, rdbms_common.IssueClass):
580 pass
581 class FileClass(MysqlClass, rdbms_common.FileClass):
582 pass
584 # vim: set et sts=4 sw=4 :