From 02bb637a7aabf6242b148d951ba123524e112d84 Mon Sep 17 00:00:00 2001 From: schlatterbeck Date: Tue, 29 Sep 2009 07:27:17 +0000 Subject: [PATCH] New config option csv_field_size: Pythons csv module (which is used for export/import) has a new field size limit starting with python2.5. We now issue a warning during export if the limit is too small and use the csv_field_size configuration during import to set the limit for the csv module. git-svn-id: http://svn.roundup-tracker.org/svnroot/roundup/roundup/trunk@4359 57a73879-2fb5-44c3-a270-3262357dd7e2 --- CHANGES.txt | 5 +++ roundup/admin.py | 26 ++++++++++++-- roundup/configuration.py | 7 ++++ test/db_test_base.py | 73 +++++++++++++++++++++++++++++++++++----- 4 files changed, 100 insertions(+), 11 deletions(-) diff --git a/CHANGES.txt b/CHANGES.txt index 2078365..7d0dfbe 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -12,6 +12,11 @@ Fixes: and stopwords (thanks Thomas Arendsen Hein, Bernhard Reiter)(issue 2550584) - fixed typos in the installation instructions (thanks Thomas Arendsen Hein) (issue 2550573) +- New config option csv_field_size: Pythons csv module (which is used + for export/import) has a new field size limit starting with python2.5. + We now issue a warning during export if the limit is too small and use + the csv_field_size configuration during import to set the limit for + the csv module. 2009-08-10 1.4.9 (r4346) diff --git a/roundup/admin.py b/roundup/admin.py index 91273bc..83728c7 100644 --- a/roundup/admin.py +++ b/roundup/admin.py @@ -1099,6 +1099,9 @@ Erase it? Y/N: """)) if not os.path.exists(dir): os.makedirs(dir) + # maximum csv field length exceeding configured size? + max_len = self.db.config.CSV_FIELD_SIZE + # do all the classes specified for classname in classes: cl = self.get_class(classname) @@ -1121,7 +1124,18 @@ Erase it? Y/N: """)) if self.verbose: sys.stdout.write('\rExporting %s - %s'%(classname, nodeid)) sys.stdout.flush() - writer.writerow(cl.export_list(propnames, nodeid)) + node = cl.getnode(nodeid) + exp = cl.export_list(propnames, nodeid) + lensum = sum (len (repr(node[p])) for p in propnames) + # for a safe upper bound of field length we add + # difference between CSV len and sum of all field lengths + d = sum (len(x) for x in exp) - lensum + assert (d > 0) + for p in propnames: + ll = len(repr(node[p])) + d + if ll > max_len: + max_len = ll + writer.writerow(exp) if export_files and hasattr(cl, 'export_files'): cl.export_files(dir, nodeid) @@ -1136,6 +1150,9 @@ Erase it? Y/N: """)) journals = csv.writer(jf, colon_separated) map(journals.writerow, cl.export_journals()) jf.close() + if max_len > self.db.config.CSV_FIELD_SIZE: + print >> sys.stderr, \ + "Warning: config csv_field_size should be at least %s"%max_len return 0 def do_exporttables(self, args): @@ -1177,6 +1194,9 @@ Erase it? Y/N: """)) raise UsageError, _('Not enough arguments supplied') from roundup import hyperdb + if hasattr (csv, 'field_size_limit'): + csv.field_size_limit(self.db.config.CSV_FIELD_SIZE) + # directory to import from dir = args[0] @@ -1212,7 +1232,7 @@ Erase it? Y/N: """)) if hasattr(cl, 'import_files'): cl.import_files(dir, nodeid) maxid = max(maxid, int(nodeid)) - print + print >> sys.stdout f.close() # import the journals @@ -1222,7 +1242,7 @@ Erase it? Y/N: """)) f.close() # set the id counter - print 'setting', classname, maxid+1 + print >> sys.stdout, 'setting', classname, maxid+1 self.db.setid(classname, str(maxid+1)) self.db_uncommitted = True diff --git a/roundup/configuration.py b/roundup/configuration.py index b6571b0..bd2811e 100644 --- a/roundup/configuration.py +++ b/roundup/configuration.py @@ -530,6 +530,13 @@ SETTINGS = ( "stop-words (eg. A,AND,ARE,AS,AT,BE,BUT,BY, ...)"), (OctalNumberOption, "umask", "02", "Defines the file creation mode mask."), + (IntegerNumberOption, 'csv_field_size', '131072', + "Maximum size of a csv-field during import. Roundups export\n" + "format is a csv (comma separated values) variant. The csv\n" + "reader has a limit on the size of individual fields\n" + "starting with python 2.5. Set this to a higher value if you\n" + "get the error 'Error: field larger than field limit' during\n" + "import."), )), ("tracker", ( (Option, "name", "Roundup issue tracker", diff --git a/test/db_test_base.py b/test/db_test_base.py index ce3d2ee..d5cada9 100644 --- a/test/db_test_base.py +++ b/test/db_test_base.py @@ -1613,6 +1613,18 @@ class DBTest(MyTestCase): # XXX add sorting tests for other types + # nuke and re-create db for restore + def nukeAndCreate(self): + # shut down this db and nuke it + self.db.close() + self.nuke_database() + + # open a new, empty database + os.makedirs(config.DATABASE + '/files') + self.db = self.module.Database(config, 'admin') + setupSchema(self.db, 0, self.module) + + def testImportExport(self): # use the filtering setup to create a bunch of items ae, filt = self.filteringSetup() @@ -1660,14 +1672,7 @@ class DBTest(MyTestCase): klass.export_files('_test_export', id) journals[cn] = klass.export_journals() - # shut down this db and nuke it - self.db.close() - self.nuke_database() - - # open a new, empty database - os.makedirs(config.DATABASE + '/files') - self.db = self.module.Database(config, 'admin') - setupSchema(self.db, 0, self.module) + self.nukeAndCreate() # import for cn, items in export.items(): @@ -1730,6 +1735,58 @@ class DBTest(MyTestCase): newid = self.db.user.create(username='testing') assert newid > maxid + # test import/export via admin interface + def testAdminImportExport(self): + import roundup.admin + import csv + # use the filtering setup to create a bunch of items + ae, filt = self.filteringSetup() + # create large field + self.db.priority.create(name = 'X' * 500) + self.db.config.CSV_FIELD_SIZE = 400 + self.db.commit() + output = [] + # ugly hack to get stderr output and disable stdout output + # during regression test. Depends on roundup.admin not using + # anything but stdout/stderr from sys (which is currently the + # case) + def stderrwrite(s): + output.append(s) + roundup.admin.sys = MockNull () + try: + roundup.admin.sys.stderr.write = stderrwrite + tool = roundup.admin.AdminTool() + home = '.' + tool.tracker_home = home + tool.db = self.db + tool.verbose = False + tool.do_export (['_test_export']) + self.assertEqual(len(output), 2) + self.assertEqual(output [1], '\n') + self.failUnless(output [0].startswith + ('Warning: config csv_field_size should be at least')) + self.failUnless(int(output[0].split()[-1]) > 500) + + if hasattr(roundup.admin.csv, 'field_size_limit'): + self.nukeAndCreate() + self.db.config.CSV_FIELD_SIZE = 400 + tool = roundup.admin.AdminTool() + tool.tracker_home = home + tool.db = self.db + tool.verbose = False + self.assertRaises(csv.Error, tool.do_import, ['_test_export']) + + self.nukeAndCreate() + self.db.config.CSV_FIELD_SIZE = 3200 + tool = roundup.admin.AdminTool() + tool.tracker_home = home + tool.db = self.db + tool.verbose = False + tool.do_import(['_test_export']) + finally: + roundup.admin.sys = sys + shutil.rmtree('_test_export') + def testAddProperty(self): self.db.issue.create(title="spam", status='1') self.db.commit() -- 2.30.2