summary | shortlog | log | commit | commitdiff | tree
raw | patch | inline | side by side (parent: ecca590)
raw | patch | inline | side by side (parent: ecca590)
author | richard <richard@57a73879-2fb5-44c3-a270-3262357dd7e2> | |
Wed, 24 Mar 2004 03:07:52 +0000 (03:07 +0000) | ||
committer | richard <richard@57a73879-2fb5-44c3-a270-3262357dd7e2> | |
Wed, 24 Mar 2004 03:07:52 +0000 (03:07 +0000) |
git-svn-id: http://svn.roundup-tracker.org/svnroot/roundup/trunk@2168 57a73879-2fb5-44c3-a270-3262357dd7e2
index 2a5c95780cf3f472ba8dbdb64a5a599cb5b2ce7c..517ef00c5622570515df6a21f50046989642847e 100644 (file)
--- a/demo.py
+++ b/demo.py
#
# Copyright (c) 2003 Richard Jones (richard@mechanicalcat.net)
#
-# $Id: demo.py,v 1.7 2003-11-06 14:24:57 jlgijsbers Exp $
+# $Id: demo.py,v 1.8 2004-03-24 03:07:51 richard Exp $
import sys, os, string, re, urlparse
import shutil, socket, errno, BaseHTTPServer
except os.error, error:
if error.errno != errno.ENOENT:
raise
- init.write_select_db(home, 'anydbm')
+ init.write_select_db(home, 'mysql')
# figure basic params for server
hostname = socket.gethostname()
s = f.read().replace('http://tracker.example/cgi-bin/roundup.cgi/bugs/',
url)
f.close()
+ s = s + """
+MYSQL_DBHOST = 'localhost'
+MYSQL_DBUSER = 'rounduptest'
+MYSQL_DBPASSWORD = 'rounduptest'
+MYSQL_DBNAME = 'rounduptest'
+MYSQL_DATABASE = (MYSQL_DBHOST, MYSQL_DBUSER, MYSQL_DBPASSWORD, MYSQL_DBNAME)
+"""
f = open(os.path.join(home, 'config.py'), 'w')
f.write(s)
f.close()
index 6aa3a633d0b1c288e46d2a6fdf60b213a0c0db78..2efd853784463f084240068de362d1a3f8543255 100644 (file)
self.cursor.execute(sql, ('__textids', 1))
def add_actor_column(self):
- ''' While we're adding the actor column, we need to update the
+ '''While we're adding the actor column, we need to update the
tables to have the correct datatypes.'''
- assert 0, 'FINISH ME!'
-
- for spec in self.classes.values():
- new_has = spec.properties.has_key
- new_spec = spec.schema()
- new_spec[1].sort()
- old_spec[1].sort()
- if not force and new_spec == old_spec:
- # no changes
- return 0
-
- if __debug__:
- print >>hyperdb.DEBUG, 'update_class FIRING'
-
- # detect multilinks that have been removed, and drop their table
- old_has = {}
- for name,prop in old_spec[1]:
- old_has[name] = 1
- if new_has(name) or not isinstance(prop, hyperdb.Multilink):
+ for klass in self.classes.values():
+ cn = klass.classname
+ properties = klass.getprops()
+ old_spec = self.database_schema['tables'][cn]
+
+ execute = self.cursor.execute
+
+ # figure the non-Multilink properties to copy over
+ propnames = ['activity', 'creation', 'creator']
+
+ # figure actions based on data type
+ for name, s_prop in old_spec[1]:
+ # s_prop is a repr() string of a hyperdb type object
+ if s_prop.find('Multilink') == -1:
+ if properties.has_key(name):
+ propnames.append(name)
continue
- # it's a multilink, and it's been removed - drop the old
- # table. First drop indexes.
- self.drop_multilink_table_indexes(spec.classname, ml)
- sql = 'drop table %s_%s'%(spec.classname, prop)
+ tn = '%s_%s'%(cn, name)
+
+ if properties.has_key(name):
+ # grabe the current values
+ sql = 'select linkid, nodeid from %s'%tn
+ if __debug__:
+ print >>hyperdb.DEBUG, 'migration', (self, sql)
+ execute(sql)
+ rows = self.cursor.fetchall()
+
+ # drop the old table
+ self.drop_multilink_table_indexes(cn, name)
+ sql = 'drop table %s'%tn
if __debug__:
- print >>hyperdb.DEBUG, 'update_class', (self, sql)
- self.cursor.execute(sql)
- old_has = old_has.has_key
+ print >>hyperdb.DEBUG, 'migration', (self, sql)
+ execute(sql)
- # now figure how we populate the new table
- if adding_actor:
- fetch = ['_activity', '_creation', '_creator']
- else:
- fetch = ['_actor', '_activity', '_creation', '_creator']
- properties = spec.getprops()
- for propname,x in new_spec[1]:
- prop = properties[propname]
- if isinstance(prop, hyperdb.Multilink):
- if force or not old_has(propname):
- # we need to create the new table
- self.create_multilink_table(spec, propname)
- elif old_has(propname):
- # we copy this col over from the old table
- fetch.append('_'+propname)
+ if properties.has_key(name):
+ # re-create and populate the new table
+ self.create_multilink_table(klass, name)
+ sql = '''insert into %s (linkid, nodeid) values
+ (%s, %s)'''%(tn, self.arg, self.arg)
+ for linkid, nodeid in rows:
+ execute(sql, (int(linkid), int(nodeid)))
+
+ # figure the column names to fetch
+ fetch = ['_%s'%name for name in propnames]
# select the data out of the old table
fetch.append('id')
fetch.append('__retired__')
fetchcols = ','.join(fetch)
- cn = spec.classname
sql = 'select %s from _%s'%(fetchcols, cn)
if __debug__:
- print >>hyperdb.DEBUG, 'update_class', (self, sql)
+ print >>hyperdb.DEBUG, 'migration', (self, sql)
self.cursor.execute(sql)
- olddata = self.cursor.fetchall()
- # TODO: update all the other index dropping code
+ # unserialise the old data
+ olddata = []
+ propnames = propnames + ['id', '__retired__']
+ for entry in self.cursor.fetchall():
+ l = []
+ olddata.append(l)
+ for i in range(len(propnames)):
+ name = propnames[i]
+ v = entry[i]
+
+ if name in ('id', '__retired__'):
+ l.append(int(v))
+ continue
+ prop = properties[name]
+ if isinstance(prop, Date) and v is not None:
+ v = date.Date(v)
+ elif isinstance(prop, Interval) and v is not None:
+ v = date.Interval(v)
+ elif isinstance(prop, Password) and v is not None:
+ v = password.Password(encrypted=v)
+ elif (isinstance(prop, Boolean) or
+ isinstance(prop, Number)) and v is not None:
+ v = float(v)
+
+ # convert to new MySQL data type
+ prop = properties[name]
+ if v is not None:
+ v = self.hyperdb_to_sql_value[prop.__class__](v)
+ l.append(v)
+
self.drop_class_table_indexes(cn, old_spec[0])
# drop the old table
- self.cursor.execute('drop table _%s'%cn)
+ execute('drop table _%s'%cn)
# create the new table
- self.create_class_table(spec)
+ self.create_class_table(klass)
- # do the insert of the old data - the new columns will have
- # NULL values
+ # do the insert of the old data
args = ','.join([self.arg for x in fetch])
sql = 'insert into _%s (%s) values (%s)'%(cn, fetchcols, args)
if __debug__:
- print >>hyperdb.DEBUG, 'update_class', (self, sql, olddata[0])
+ print >>hyperdb.DEBUG, 'migration', (self, sql)
for entry in olddata:
- self.cursor.execute(sql, tuple(entry))
+ if __debug__:
+ print >>hyperdb.DEBUG, '... data', entry
+ execute(sql, tuple(entry))
- return 1
+ # now load up the old journal data
+ cols = ','.join('nodeid date tag action params'.split())
+ sql = 'select %s from %s__journal'%(cols, cn)
+ if __debug__:
+ print >>hyperdb.DEBUG, 'migration', (self, sql)
+ execute(sql)
+
+ olddata = []
+ for nodeid, journaldate, journaltag, action, params in \
+ self.cursor.fetchall():
+ nodeid = int(nodeid)
+ journaldate = date.Date(journaldate)
+ params = eval(params)
+ olddata.append((nodeid, journaldate, journaltag, action,
+ params))
+
+ # drop journal table and indexes
+ self.drop_journal_table_indexes(cn)
+ sql = 'drop table %s__journal'%cn
+ if __debug__:
+ print >>hyperdb.DEBUG, 'migration', (self, sql)
+ execute(sql)
+
+ # re-create journal table
+ self.create_journal_table(klass)
+ for nodeid, journaldate, journaltag, action, params in olddata:
+ self.save_journal(cn, cols, nodeid, journaldate,
+ journaltag, action, params)
+
+ # make sure the normal schema update code doesn't try to
+ # change things
+ self.database_schema['tables'][cn] = klass.schema()
def __repr__(self):
return '<myroundsql 0x%x>'%id(self)
index 56d0e1c0041a8a77c0bc5530b841a48e0a6ce541..07b0257591d9164a5d974315b1c2348847c62d0c 100644 (file)
-# $Id: rdbms_common.py,v 1.84 2004-03-22 07:45:39 richard Exp $
+# $Id: rdbms_common.py,v 1.85 2004-03-24 03:07:52 richard Exp $
''' Relational database (SQL) backend common code.
Basics:
return 0
if version == 1:
+ # change the schema structure
+ self.database_schema = {'tables': self.database_schema}
+
+ # version 1 didn't have the actor column (note that in
+ # MySQL this will also transition the tables to typed columns)
+ self.add_actor_column()
+
# version 1 doesn't have the OTK, session and indexing in the
# database
self.create_version_2_tables()
- # version 1 also didn't have the actor column
- self.add_actor_column()
self.database_schema['version'] = self.current_db_version
return 1
instance of a hyperdb "type" _or_ a string repr of that type.
'''
cols = [
- ('_actor', 'INTEGER'),
- ('_activity', 'DATE'),
- ('_creator', 'INTEGER'),
- ('_creation', 'DATE')
+ ('_actor', self.hyperdb_to_sql_datatypes[hyperdb.Link]),
+ ('_activity', self.hyperdb_to_sql_datatypes[hyperdb.Date]),
+ ('_creator', self.hyperdb_to_sql_datatypes[hyperdb.Link]),
+ ('_creation', self.hyperdb_to_sql_datatypes[hyperdb.Date]),
]
mls = []
# add the multilinks separately
return 1
def create_class_table(self, spec):
- ''' create the class table for the given spec
- '''
+ '''Create the class table for the given Class "spec". Creates the
+ indexes too.'''
cols, mls = self.determine_columns(spec.properties.items())
# add on our special columns
def create_multilink_table_indexes(self, spec, ml):
# create index on linkid
index_sql = 'create index %s_%s_l_idx on %s_%s(linkid)'%(
- spec.classname, ml, spec.classname, ml)
+ spec.classname, ml, spec.classname, ml)
if __debug__:
print >>hyperdb.DEBUG, 'create_index', (self, index_sql)
self.cursor.execute(index_sql)
# create index on nodeid
index_sql = 'create index %s_%s_n_idx on %s_%s(nodeid)'%(
- spec.classname, ml, spec.classname, ml)
+ spec.classname, ml, spec.classname, ml)
if __debug__:
print >>hyperdb.DEBUG, 'create_index', (self, index_sql)
self.cursor.execute(index_sql)
sql_to_hyperdb_value = {
hyperdb.String : str,
- hyperdb.Date : date.Date,
+ hyperdb.Date : lambda x:date.Date(str(x).replace(' ', '.')),
# hyperdb.Link : int, # XXX numeric ids
hyperdb.Link : str,
hyperdb.Interval : date.Interval,
sql = 'insert into %s__journal (%s) values (%s,%s,%s,%s,%s)'%(
classname, cols, a, a, a, a, a)
if __debug__:
- print >>hyperdb.DEBUG, 'addjournal', (self, sql, entry)
+ print >>hyperdb.DEBUG, 'save_journal', (self, sql, entry)
self.cursor.execute(sql, entry)
def load_journal(self, classname, cols, nodeid):
index f3789f0e13e946bcd53c28d068f87f70a7ec2f88..0d644dd22d033dc9ba18662530dbf742e1ea7a54 100644 (file)
l = []
for entry in ids:
if num_re.match(entry):
- l.append(cl.get(entry, key))
+ l.append(linkcl.get(entry, key))
else:
l.append(entry)
return l
diff --git a/roundup/date.py b/roundup/date.py
index 60377117cb7da0afa4ff63ed26a983b5568df4a8..6b9a807e40f792ad3233a496efd225c3c8f444e8 100644 (file)
--- a/roundup/date.py
+++ b/roundup/date.py
# BASIS, AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
# SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
#
-# $Id: date.py,v 1.61 2004-03-22 07:45:39 richard Exp $
+# $Id: date.py,v 1.62 2004-03-24 03:07:51 richard Exp $
"""Date, time and time interval handling.
"""
except:
raise ValueError, 'Unknown spec %r'%spec
- usagespec='[yyyy]-[mm]-[dd].[H]H:MM[:SS][offset]'
+ usagespec='[yyyy]-[mm]-[dd].[H]H:MM[:SS.SSS][offset]'
def set(self, spec, offset=0, date_re=re.compile(r'''
((?P<y>\d\d\d\d)([/-](?P<m>\d\d?)([/-](?P<d>\d\d?))?)? # yyyy[-mm[-dd]]
|(?P<a>\d\d?)[/-](?P<b>\d\d?))? # or mm-dd
(?P<n>\.)? # .
- (((?P<H>\d?\d):(?P<M>\d\d))?(:(?P<S>\d\d))?)? # hh:mm:ss
+ (((?P<H>\d?\d):(?P<M>\d\d))?(:(?P<S>\d\d(\.\d+)?))?)? # hh:mm:ss
(?P<o>.+)? # offset
''', re.VERBOSE), serialised_re=re.compile(r'''
- (\d{4})(\d\d)(\d\d)(\d\d)(\d\d)(\d\d)
+ (\d{4})(\d\d)(\d\d)(\d\d)(\d\d)(\d\d(\.\d+)?)
''', re.VERBOSE), add_granularity=0):
''' set the date to the value in spec
'''
if m is not None:
# we're serialised - easy!
g = m.groups()
- (self.year, self.month, self.day, self.hour, self.minute,
- self.second) = map(int, g[:6])
+ (self.year, self.month, self.day, self.hour, self.minute) = \
+ map(int, g[:5])
+ self.second = float(g[5])
return
# not serialised data, try usual format
H = int(info['H']) - offset
M = int(info['M'])
S = 0
- if info['S'] is not None: S = int(info['S'])
+ if info['S'] is not None:
+ S = float(info['S'])
if add_granularity:
S = S - 1
try:
self.applyInterval(Interval(info['o'], allowdate=0))
except ValueError:
- raise ValueError, _('Not a date spec: %s' % self.usagespec)
+ raise ValueError, _('%r not a date spec (%s)')%(spec,
+ self.usagespec)
def addInterval(self, interval):
''' Add the interval to this date, returning the date tuple
day = self.day + sign * interval.day
hour = self.hour + sign * interval.hour
minute = self.minute + sign * interval.minute
- second = self.second + sign * interval.second
+ # Intervals work on whole seconds
+ second = int(self.second) + sign * interval.second
# now cope with under- and over-flow
# first do the time
self.minute, self.second, 0, 0, 0))
b = calendar.timegm((other.year, other.month, other.day,
other.hour, other.minute, other.second, 0, 0, 0))
- diff = a - b
+ # intervals work in whole seconds
+ diff = int(a - b)
if diff > 0:
sign = 1
else:
if len(spec) == 7:
self.sign, self.year, self.month, self.day, self.hour, \
self.minute, self.second = spec
+ self.second = int(self.second)
else:
# old, buggy spec form
self.sign = sign
self.year, self.month, self.day, self.hour, self.minute, \
self.second = spec
+ self.second = int(self.second)
def set(self, spec, allowdate=1, interval_re=re.compile('''
\s*(?P<s>[-+])? # + or -