From 146d9de9520fc0da66309ca724782e4b6028a2cd Mon Sep 17 00:00:00 2001 From: "Karl O. Pinc" Date: Sat, 22 Jun 2024 20:56:10 -0500 Subject: [PATCH] Throw invalid column count errors where the insert statement is executed Psycopg 3 throws a generic ProgramimgError, so core.py has trouble reporting the problem. Perhaps this can be solved by creating a generic PreparedStmt class at the core level, but this works for the moment. --- src/pgwui_upload_core/views/upload.py | 84 ++++++++++++++++++++------- 1 file changed, 63 insertions(+), 21 deletions(-) diff --git a/src/pgwui_upload_core/views/upload.py b/src/pgwui_upload_core/views/upload.py index ba6b7a5..6184035 100644 --- a/src/pgwui_upload_core/views/upload.py +++ b/src/pgwui_upload_core/views/upload.py @@ -29,6 +29,8 @@ import logging import markupsafe import psycopg.errors +from html import escape as html_escape + from pgwui_core.core import ( UploadNullFileInitialPost, TabularFileUploadHandler, @@ -124,7 +126,7 @@ class BaseTableUploadHandler(TabularFileUploadHandler): self.write_double_key(response) return response - def _execute(self, stmt, tupl): + def execute(self, stmt, tupl): '''Execute a statement and express encoding errors ''' try: @@ -149,7 +151,7 @@ class BaseTableUploadHandler(TabularFileUploadHandler): if not resolvable. ''' try: - self._execute( + self.execute( ('SELECT nspname, relname' ' FROM pg_class' ' JOIN pg_namespace' @@ -212,7 +214,7 @@ class BaseTableUploadHandler(TabularFileUploadHandler): # tables.is_insertable_into does not reflect whether # there's an insert trigger on the table. " OR tables.table_type = 'VIEW')") - self._execute(sql, (table, schema)) + self.execute(sql, (table, schema)) return self.cur.fetchone() is not None def quote_columns(self, settings): @@ -281,7 +283,33 @@ class BaseTableUploadHandler(TabularFileUploadHandler): def build_insert_stmt( self, data, qualified_table, quotecols, column_quoter): - schema, table = self.validate_table(qualified_table) + insert_stmt = InsertStmt() + insert_stmt.build_insert_stmt( + self, data, qualified_table, quotecols, column_quoter) + return insert_stmt + + def factory(self, ue): + '''Make a db loader function from an UploadEngine. + + Input: + + Side Effects: + Assigns: self.ue, self.cur + And, lots of changes to the db + ''' + self.ue = ue + self.cur = ue.cur + + +@attrs.define +class InsertStmt: + stmt = attrs.field(default=None) + cols = attrs.field(default=None) + + def build_insert_stmt( + self, tuh, data, qualified_table, quotecols, column_quoter): + '''tuh: A TableUploadHandler''' + schema, table = tuh.validate_table(qualified_table) column_sql = ('SELECT 1 FROM information_schema.columns' ' WHERE columns.table_name = %s' @@ -291,37 +319,26 @@ class BaseTableUploadHandler(TabularFileUploadHandler): else: column_sql += ' AND columns.column_name = lower(%s::name)' - insert_stmt = f'INSERT INTO {self.quotetable(schema, table)} (' + self.stmt = f'INSERT INTO {tuh.quotetable(schema, table)} (' value_string = '' col_sep = '' bad_cols = [] for col_name in data.headers.tuples: # Check that colum name exists - self._execute(column_sql, (table, schema, col_name)) - if self.cur.fetchone() is None: + tuh.execute(column_sql, (table, schema, col_name)) + if tuh.cur.fetchone() is None: bad_cols.append(col_name) else: # Add column to sql statement - insert_stmt += col_sep + column_quoter(col_name) + self.stmt += col_sep + column_quoter(col_name) value_string += col_sep + '%s' col_sep = ', ' if bad_cols: self.report_bad_cols(qualified_table, bad_cols, quotecols) - return insert_stmt + ') VALUES({0})'.format(value_string) - - def factory(self, ue): - '''Make a db loader function from an UploadEngine. - - Input: - - Side Effects: - Assigns: self.ue, self.cur - And, lots of changes to the db - ''' - self.ue = ue - self.cur = ue.cur + self.stmt += ') VALUES({0})'.format(value_string) + self.cols = len(data.headers.tuples) def set_upload_response(component, request, response): @@ -335,3 +352,28 @@ def set_upload_response(component, request, response): response.setdefault('pgwui', dict()) response['pgwui']['upload_settings'] = upload_settings + + +def match_insert_to_dataline(udl, insert_stmt, source_file=None): + '''Make sure the UploadDataLine has the number of values expected + by the insert statement + ''' + data_cols = len(udl.tuples) + if data_cols != insert_stmt.cols: + descr = ('The number of data columns does not match the' + " number of headings given in the file's first line") + + source_detail = '' + if source_file is not None: + source_detail = f'File: ({html_escape(source_file)}); ' + detail = (f'{source_detail}' + f'Expected number of columns: {data_cols}') + + if data_cols > insert_stmt.cols: + raise core_ex.TooManyColsError( + udl.lineno, 'Too many data columns', descr=descr, + detail=detail, data=udl.raw) + else: + raise core_ex.TooFewColsError( + udl.lineno, 'Too few data columns', descr=descr, + detail=detail, data=udl.raw) -- 2.34.1