From 99e556f6f581c3ff84cc9e116f487650fab2b51e Mon Sep 17 00:00:00 2001 From: silentninja Date: Tue, 1 Mar 2022 16:19:42 +0530 Subject: [PATCH 1/8] Replace column_index usage with attnum in Column creation and copying operations. --- db/columns/operations/alter.py | 21 ++++++---- db/columns/operations/create.py | 45 ++++++++++++---------- db/columns/operations/select.py | 23 +++++++++-- db/constraints/operations/create.py | 13 ++++--- db/constraints/operations/select.py | 4 +- db/tests/columns/operations/test_alter.py | 24 +++++++----- db/tests/columns/operations/test_create.py | 43 +++++++++++++-------- 7 files changed, 106 insertions(+), 67 deletions(-) diff --git a/db/columns/operations/alter.py b/db/columns/operations/alter.py index 8ca95d7aed..fb30e6c6a7 100644 --- a/db/columns/operations/alter.py +++ b/db/columns/operations/alter.py @@ -6,7 +6,9 @@ from db.columns.defaults import NAME, NULLABLE from db.columns.exceptions import InvalidDefaultError, InvalidTypeError, InvalidTypeOptionError -from db.columns.operations.select import get_column_attnum_from_name, get_column_default, get_column_index_from_name +from db.columns.operations.select import ( + get_column_attnum_from_name, get_column_default, get_column_name_from_attnum, +) from db.columns.utils import get_mathesar_column_with_engine, get_type_options from db.tables.operations.select import get_oid_from_table, reflect_table_from_oid from db.types.base import get_db_type_name @@ -72,13 +74,12 @@ def alter_column_type( # Re-reflect table so that column is accurate table = reflect_table_from_oid(table_oid, engine, connection) column = table.columns[column_name] - column_index = get_column_index_from_name(table_oid, column_name, engine, connection) column_attnum = get_column_attnum_from_name(table_oid, column_name, engine, connection) default = get_column_default(table_oid, column_attnum, engine, connection) if default is not None: default_text = column.server_default.arg.text - set_column_default(table, column_index, engine, connection, None) + set_column_default(table_oid, column_attnum, engine, connection, None) prepared_table_name = _preparer.format_table(table) prepared_column_name = _preparer.format_column(column) @@ -97,7 +98,7 @@ def alter_column_type( cast_stmt = f"{cast_function_name}({default_text})" default_stmt = select(text(cast_stmt)) new_default = str(execute_statement(engine, default_stmt, connection).first()[0]) - set_column_default(table, column_index, engine, connection, new_default) + set_column_default(table_oid, column_attnum, engine, connection, new_default) def retype_column( @@ -135,15 +136,19 @@ def retype_column( raise e.orig -def change_column_nullable(table, column_index, engine, connection, nullable): - column = table.columns[column_index] +def change_column_nullable(table_oid, column_attum, engine, connection, nullable): + table = reflect_table_from_oid(table_oid, engine) + column_name = get_column_name_from_attnum(table_oid, column_attum, engine) + column = table.columns[column_name] ctx = MigrationContext.configure(connection) op = Operations(ctx) op.alter_column(table.name, column.name, nullable=nullable, schema=table.schema) -def set_column_default(table, column_index, engine, connection, default): - column = table.columns[column_index] +def set_column_default(table_oid, column_attnum, engine, connection, default): + table = reflect_table_from_oid(table_oid, engine, connection) + column_name = get_column_name_from_attnum(table_oid, column_attnum, engine) + column = table.columns[column_name] default_clause = DefaultClause(str(default)) if default is not None else default try: ctx = MigrationContext.configure(connection) diff --git a/db/columns/operations/create.py b/db/columns/operations/create.py index 8a778aba06..310d98bd7c 100644 --- a/db/columns/operations/create.py +++ b/db/columns/operations/create.py @@ -9,7 +9,9 @@ from db.columns.defaults import DEFAULT, NAME, NULLABLE, TYPE from db.columns.exceptions import InvalidDefaultError, InvalidTypeError, InvalidTypeOptionError from db.columns.operations.alter import set_column_default, change_column_nullable -from db.columns.operations.select import get_column_attnum_from_name, get_column_default, get_column_index_from_name +from db.columns.operations.select import ( + get_column_attnum_from_name, get_column_default, get_column_name_from_attnum, +) from db.columns.utils import get_mathesar_column_with_engine from db.constraints.operations.create import copy_constraint from db.constraints.operations.select import get_column_constraints @@ -92,43 +94,45 @@ def compile_copy_column(element, compiler, **_): ) -def _duplicate_column_data(table_oid, from_column, to_column, engine): +def _duplicate_column_data(table_oid, from_column_attnum, to_column_attnum, engine): table = reflect_table_from_oid(table_oid, engine) - from_column_attnum = get_column_attnum_from_name(table_oid, table.c[from_column].name, engine) + from_column_name = get_column_name_from_attnum(table_oid, from_column_attnum, engine) + to_column_name = get_column_name_from_attnum(table_oid, to_column_attnum, engine) copy = CopyColumn( table.schema, table.name, - table.c[to_column].name, - table.c[from_column].name + to_column_name, + from_column_name, ) with engine.begin() as conn: conn.execute(copy) from_default = get_column_default(table_oid, from_column_attnum, engine) if from_default is not None: with engine.begin() as conn: - set_column_default(table, to_column, engine, conn, from_default) + set_column_default(table_oid, to_column_attnum, engine, conn, from_default) -def _duplicate_column_constraints(table_oid, from_column, to_column, engine, copy_nullable=True): +def _duplicate_column_constraints(table_oid, from_column_attnum, to_column_attnum, engine, copy_nullable=True): table = reflect_table_from_oid(table_oid, engine) + from_column_name = get_column_name_from_attnum(table_oid, from_column_attnum, engine) if copy_nullable: with engine.begin() as conn: - change_column_nullable(table, to_column, engine, conn, table.c[from_column].nullable) - - constraints = get_column_constraints(from_column, table_oid, engine) + change_column_nullable(table_oid, to_column_attnum, engine, conn, table.c[from_column_name].nullable) + constraints = get_column_constraints(from_column_attnum, table_oid, engine) for constraint in constraints: constraint_type = constraint_utils.get_constraint_type_from_char(constraint.contype) if constraint_type != constraint_utils.ConstraintType.UNIQUE.value: # Don't allow duplication of primary keys continue copy_constraint( - table, engine, constraint, from_column, to_column + table_oid, engine, constraint, from_column_attnum, to_column_attnum ) -def duplicate_column(table_oid, copy_from_index, engine, new_column_name=None, copy_data=True, copy_constraints=True): +def duplicate_column(table_oid, copy_from_attnum, engine, new_column_name=None, copy_data=True, copy_constraints=True): table = reflect_table_from_oid(table_oid, engine) - from_column = table.c[copy_from_index] + copy_from_name = get_column_name_from_attnum(table_oid, copy_from_attnum, engine) + from_column = table.c[copy_from_name] if new_column_name is None: new_column_name = _gen_col_name(table, from_column.name) @@ -138,25 +142,24 @@ def duplicate_column(table_oid, copy_from_index, engine, new_column_name=None, c NULLABLE: True, } new_column = create_column(engine, table_oid, column_data) - new_column_index = get_column_index_from_name(table_oid, new_column.name, engine) - + new_column_attnum = get_column_attnum_from_name(table_oid, new_column.name, engine) if copy_data: _duplicate_column_data( table_oid, - copy_from_index, - new_column_index, + copy_from_attnum, + new_column_attnum, engine ) if copy_constraints: _duplicate_column_constraints( table_oid, - copy_from_index, - new_column_index, + copy_from_attnum, + new_column_attnum, engine, copy_nullable=copy_data ) table = reflect_table_from_oid(table_oid, engine) - column_index = get_column_index_from_name(table_oid, new_column_name, engine) - return get_mathesar_column_with_engine(table.c[column_index], engine) + column_name = get_column_name_from_attnum(table_oid, new_column_attnum, engine) + return get_mathesar_column_with_engine(table.c[column_name], engine) diff --git a/db/columns/operations/select.py b/db/columns/operations/select.py index c5e1f780a0..ee7c69dc13 100644 --- a/db/columns/operations/select.py +++ b/db/columns/operations/select.py @@ -83,18 +83,33 @@ def get_column_indexes_from_table(table_oid, engine, connection_to_use=None): return results -def get_column_name_from_attnum(table_oid, attnum, engine, connection_to_use=None): +def _get_columns_name_from_attnums(table_oid, attnums, engine, connection_to_use=None): with warnings.catch_warnings(): warnings.filterwarnings("ignore", message="Did not recognize type") pg_attribute = Table("pg_attribute", MetaData(), autoload_with=engine) sel = select(pg_attribute.c.attname).where( and_( pg_attribute.c.attrelid == table_oid, - pg_attribute.c.attnum == attnum + pg_attribute.c.attnum.in_(attnums) ) ) - result = execute_statement(engine, sel, connection_to_use).scalar() - return result + return sel + + +def get_columns_name_from_attnums(table_oid, attnums, engine, connection_to_use=None): + """ + Returns the respective list of attnum of the column names passed. + The order is based on the column order in the table and not by the order of the column names argument. + """ + statement = _get_columns_name_from_attnums(table_oid, attnums, engine, connection_to_use=None) + column_names_tuple = execute_statement(engine, statement, connection_to_use).fetchall() + column_names = [column_name_tuple[0] for column_name_tuple in column_names_tuple] + return column_names + + +def get_column_name_from_attnum(table_oid, attnum, engine, connection_to_use=None): + statement = _get_columns_name_from_attnums(table_oid, [attnum], engine, connection_to_use=None) + return execute_statement(engine, statement, connection_to_use).scalar() def get_column_default_dict(table_oid, attnum, engine, connection_to_use=None): diff --git a/db/constraints/operations/create.py b/db/constraints/operations/create.py index c232536002..9981a9187a 100644 --- a/db/constraints/operations/create.py +++ b/db/constraints/operations/create.py @@ -2,7 +2,9 @@ from alembic.operations import Operations from sqlalchemy import MetaData +from db.columns.operations.select import get_columns_name_from_attnums from db.constraints.utils import get_constraint_type_from_char, ConstraintType, naming_convention +from db.tables.operations.select import reflect_table_from_oid def create_unique_constraint(table_name, schema, engine, columns, constraint_name=None): @@ -16,14 +18,13 @@ def create_unique_constraint(table_name, schema, engine, columns, constraint_nam op.create_unique_constraint(constraint_name, table_name, columns, schema) -def copy_constraint(table, engine, constraint, from_column, to_column): +def copy_constraint(table_oid, engine, constraint, from_column_attnum, to_column_attnum): + table = reflect_table_from_oid(table_oid, engine) constraint_type = get_constraint_type_from_char(constraint.contype) if constraint_type == ConstraintType.UNIQUE.value: - column_idxs = [con - 1 for con in constraint.conkey] - columns = [ - table.c[to_column if idx == from_column else idx].name - for idx in column_idxs - ] + column_attnums = constraint.conkey + changed_column_attnums = [to_column_attnum if attnum == from_column_attnum else attnum for attnum in column_attnums] + columns = get_columns_name_from_attnums(table_oid, changed_column_attnums, engine) create_unique_constraint(table.name, table.schema, engine, columns) else: raise NotImplementedError diff --git a/db/constraints/operations/select.py b/db/constraints/operations/select.py index f46997740e..abace625b1 100644 --- a/db/constraints/operations/select.py +++ b/db/constraints/operations/select.py @@ -49,7 +49,7 @@ def get_constraint_oid_by_name_and_table_oid(name, table_oid, engine): return result['oid'] -def get_column_constraints(column_index, table_oid, engine): +def get_column_constraints(column_attnum, table_oid, engine): metadata = MetaData() with warnings.catch_warnings(): warnings.filterwarnings("ignore", message="Did not recognize type") @@ -62,7 +62,7 @@ def get_column_constraints(column_index, table_oid, engine): pg_constraint.c.conrelid == table_oid, # 'conkey' contains a list of the constrained column's indices # Here, we check if the column index appears in the conkey list - pg_constraint.c.conkey.bool_op("&&")(f"{{{column_index + 1}}}") + pg_constraint.c.conkey.bool_op("&&")(f"{{{column_attnum}}}") )) ) diff --git a/db/tests/columns/operations/test_alter.py b/db/tests/columns/operations/test_alter.py index 3f944b7686..346df01d3c 100644 --- a/db/tests/columns/operations/test_alter.py +++ b/db/tests/columns/operations/test_alter.py @@ -262,10 +262,12 @@ def test_change_column_nullable_changes(engine_with_schema, nullable_tup): Column(nontarget_column_name, String), ) table.create() + table_oid = get_oid_from_table(table_name, schema, engine) + target_column_attnum = get_column_attnum_from_name(table_oid, target_column_name, engine) with engine.begin() as conn: change_column_nullable( - table, - 0, + table_oid, + target_column_attnum, engine, conn, nullable_tup[1], @@ -289,6 +291,7 @@ def test_change_column_nullable_with_data(engine_with_schema, nullable_tup): Column(target_column_name, Integer, nullable=nullable_tup[0]), ) table.create() + table_oid = get_oid_from_table(table_name, schema, engine) ins = table.insert().values( [ {target_column_name: 1}, @@ -298,10 +301,11 @@ def test_change_column_nullable_with_data(engine_with_schema, nullable_tup): ) with engine.begin() as conn: conn.execute(ins) + target_column_attnum = get_column_attnum_from_name(table_oid, target_column_name, engine) with engine.begin() as conn: change_column_nullable( - table, - 0, + table_oid, + target_column_attnum, engine, conn, nullable_tup[1], @@ -324,6 +328,8 @@ def test_change_column_nullable_changes_raises_with_null_data(engine_with_schema Column(target_column_name, Integer, nullable=True), ) table.create() + table_oid = get_oid_from_table(table_name, schema, engine) + target_column_attnum = get_column_attnum_from_name(table_oid, target_column_name, engine) ins = table.insert().values( [ {target_column_name: 1}, @@ -336,8 +342,8 @@ def test_change_column_nullable_changes_raises_with_null_data(engine_with_schema with engine.begin() as conn: with pytest.raises(IntegrityError) as e: change_column_nullable( - table, - 0, + table_oid, + target_column_attnum, engine, conn, False, @@ -360,7 +366,7 @@ def test_column_default_create(engine_with_schema, col_type): table_oid = get_oid_from_table(table_name, schema, engine) column_attnum = get_column_attnum_from_name(table_oid, column_name, engine) with engine.begin() as conn: - set_column_default(table, 0, engine, conn, set_default) + set_column_default(table_oid, column_attnum, engine, conn, set_default) default = get_column_default(table_oid, column_attnum, engine) created_default = get_default(engine, table) @@ -384,7 +390,7 @@ def test_column_default_update(engine_with_schema, col_type): table_oid = get_oid_from_table(table_name, schema, engine) column_attnum = get_column_attnum_from_name(table_oid, column_name, engine) with engine.begin() as conn: - set_column_default(table, 0, engine, conn, set_default) + set_column_default(table_oid, column_attnum, engine, conn, set_default) default = get_column_default(table_oid, column_attnum, engine) created_default = get_default(engine, table) @@ -408,7 +414,7 @@ def test_column_default_delete(engine_with_schema, col_type): table_oid = get_oid_from_table(table_name, schema, engine) column_attnum = get_column_attnum_from_name(table_oid, column_name, engine) with engine.begin() as conn: - set_column_default(table, 0, engine, conn, None) + set_column_default(table_oid, column_attnum, engine, conn, None) default = get_column_default(table_oid, column_attnum, engine) created_default = get_default(engine, table) diff --git a/db/tests/columns/operations/test_create.py b/db/tests/columns/operations/test_create.py index 3d3903198c..ccbc1de645 100644 --- a/db/tests/columns/operations/test_create.py +++ b/db/tests/columns/operations/test_create.py @@ -2,7 +2,7 @@ from sqlalchemy import Integer, Column, Table, MetaData, Numeric, UniqueConstraint from db.columns.operations.create import create_column, duplicate_column -from db.columns.operations.select import get_column_attnum_from_name, get_column_default, get_column_index_from_name +from db.columns.operations.select import get_column_attnum_from_name, get_column_default from db.tables.operations.select import get_oid_from_table, reflect_table_from_oid from db.constraints.operations.select import get_column_constraints from db.tests.columns.utils import create_test_table @@ -35,14 +35,14 @@ def _check_duplicate_data(table_oid, engine, copy_data): def _check_duplicate_unique_constraint( - table_oid, col_index, con_idxs, engine, copy_constraints + table_oid, col_attnum, con_attnums, engine, copy_constraints ): - constraints_ = get_column_constraints(col_index, table_oid, engine) + constraints_ = get_column_constraints(col_attnum, table_oid, engine) if copy_constraints: assert len(constraints_) == 1 constraint = constraints_[0] assert constraint.contype == "u" - assert set([con - 1 for con in constraint.conkey]) == set(con_idxs) + assert set(constraint.conkey) == set(con_attnums) else: assert len(constraints_) == 0 @@ -215,15 +215,17 @@ def test_create_column_bad_options(engine_with_schema): def test_duplicate_column_name(engine_with_schema): engine, schema = engine_with_schema table_name = "atable" + filler_column_name = "Filler" new_col_name = "duplicated_column" table = Table( table_name, MetaData(bind=engine, schema=schema), - Column("Filler", Numeric) + Column(filler_column_name, Numeric) ) table.create() table_oid = get_oid_from_table(table_name, schema, engine) - duplicate_column(table_oid, 0, engine, new_col_name) + col_attnum = get_column_attnum_from_name(table_oid, filler_column_name, engine) + duplicate_column(table_oid, col_attnum, engine, new_col_name) table = reflect_table_from_oid(table_oid, engine) assert new_col_name in table.c @@ -239,14 +241,15 @@ def test_duplicate_column_single_unique(engine_with_schema, copy_data, copy_cons create_test_table(table_name, cols, insert_data, schema, engine) table_oid = get_oid_from_table(table_name, schema, engine) + target_col_attnum = get_column_attnum_from_name(table_oid, target_column_name, engine) duplicate_column( - table_oid, 0, engine, new_col_name, copy_data, copy_constraints + table_oid, target_col_attnum, engine, new_col_name, copy_data, copy_constraints ) - col_index = get_column_index_from_name(table_oid, new_col_name, engine) + col_attnum = get_column_attnum_from_name(table_oid, new_col_name, engine) _check_duplicate_data(table_oid, engine, copy_data) _check_duplicate_unique_constraint( - table_oid, col_index, [col_index], engine, copy_constraints + table_oid, col_attnum, [col_attnum], engine, copy_constraints ) @@ -256,23 +259,26 @@ def test_duplicate_column_multi_unique(engine_with_schema, copy_data, copy_const table_name = "atable" target_column_name = "columtoduplicate" new_col_name = "duplicated_column" + filler_col_name = "Filler" cols = [ Column(target_column_name, Numeric), - Column("Filler", Numeric), - UniqueConstraint(target_column_name, "Filler") + Column(filler_col_name, Numeric), + UniqueConstraint(target_column_name, filler_col_name) ] insert_data = [(1, 2), (2, 3), (3, 4)] create_test_table(table_name, cols, insert_data, schema, engine) table_oid = get_oid_from_table(table_name, schema, engine) + target_col_attnum = get_column_attnum_from_name(table_oid, target_column_name, engine) duplicate_column( - table_oid, 0, engine, new_col_name, copy_data, copy_constraints + table_oid, target_col_attnum, engine, new_col_name, copy_data, copy_constraints ) - col_index = get_column_index_from_name(table_oid, new_col_name, engine) + new_col_attnum = get_column_attnum_from_name(table_oid, new_col_name, engine) + filler_col_attnum = get_column_attnum_from_name(table_oid, filler_col_name, engine) _check_duplicate_data(table_oid, engine, copy_data) _check_duplicate_unique_constraint( - table_oid, col_index, [1, col_index], engine, copy_constraints + table_oid, new_col_attnum, [filler_col_attnum, new_col_attnum], engine, copy_constraints ) @@ -290,8 +296,9 @@ def test_duplicate_column_nullable( create_test_table(table_name, cols, insert_data, schema, engine) table_oid = get_oid_from_table(table_name, schema, engine) + target_col_attnum = get_column_attnum_from_name(table_oid, target_column_name, engine) col = duplicate_column( - table_oid, 0, engine, new_col_name, copy_data, copy_constraints + table_oid, target_col_attnum, engine, new_col_name, copy_data, copy_constraints ) _check_duplicate_data(table_oid, engine, copy_data) @@ -320,7 +327,8 @@ def test_duplicate_non_unique_constraint(engine_with_schema): conn.execute(table.insert().values(data)) table_oid = get_oid_from_table(table_name, schema, engine) - col = duplicate_column(table_oid, 0, engine, new_col_name) + col_attnum = get_column_attnum_from_name(table_oid, target_column_name, engine) + col = duplicate_column(table_oid, col_attnum, engine, new_col_name) _check_duplicate_data(table_oid, engine, True) assert col.primary_key is False @@ -337,8 +345,9 @@ def test_duplicate_column_default(engine_with_schema, copy_data, copy_constraint create_test_table(table_name, cols, [], schema, engine) table_oid = get_oid_from_table(table_name, schema, engine) + target_col_attnum = get_column_attnum_from_name(table_oid, target_column_name, engine) duplicate_column( - table_oid, 0, engine, new_col_name, copy_data, copy_constraints + table_oid, target_col_attnum, engine, new_col_name, copy_data, copy_constraints ) column_attnum = get_column_attnum_from_name(table_oid, new_col_name, engine) From e5f4559395813bbb954810944de875c42416d05e Mon Sep 17 00:00:00 2001 From: silentninja Date: Tue, 1 Mar 2022 23:54:27 +0530 Subject: [PATCH 2/8] Replace column_index usage at the service layer -> db calls with column attnum --- mathesar/api/serializers/columns.py | 2 +- mathesar/models.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/mathesar/api/serializers/columns.py b/mathesar/api/serializers/columns.py index 3b27a077c4..c19bd99268 100644 --- a/mathesar/api/serializers/columns.py +++ b/mathesar/api/serializers/columns.py @@ -106,7 +106,7 @@ class Meta(SimpleColumnSerializer.Meta): ) # From duplication fields - source_column = serializers.IntegerField(required=False, write_only=True) + source_column = serializers.PrimaryKeyRelatedField(queryset=Column.objects.all(), required=False, write_only=True) copy_source_data = serializers.BooleanField(default=True, write_only=True) copy_source_constraints = serializers.BooleanField(default=True, write_only=True) diff --git a/mathesar/models.py b/mathesar/models.py index 44d066b2b9..75a31d9784 100644 --- a/mathesar/models.py +++ b/mathesar/models.py @@ -245,10 +245,10 @@ def drop_column(self, column_attnum): self.schema._sa_engine, ) - def duplicate_column(self, column_index, copy_data, copy_constraints, name=None): + def duplicate_column(self, column_attnum, copy_data, copy_constraints, name=None): return duplicate_column( self.oid, - column_index, + column_attnum, self.schema._sa_engine, new_column_name=name, copy_data=copy_data, From f93545f6a608f068917888cfec5aa15b01b1626c Mon Sep 17 00:00:00 2001 From: silentninja Date: Wed, 2 Mar 2022 03:37:31 +0530 Subject: [PATCH 3/8] Fix wrong parameter error in `change_column_nullable` function Pass correct table oid in `alter_column` --- db/columns/operations/alter.py | 10 +- mathesar/tests/api/test_column_api.py | 159 ++++++++------------------ mathesar/tests/api/test_record_api.py | 18 +-- 3 files changed, 63 insertions(+), 124 deletions(-) diff --git a/db/columns/operations/alter.py b/db/columns/operations/alter.py index fb30e6c6a7..c187af26d9 100644 --- a/db/columns/operations/alter.py +++ b/db/columns/operations/alter.py @@ -39,14 +39,16 @@ def alter_column(engine, table_oid, column_index, column_data): table, column_index, engine, conn, type_options=column_data[TYPE_OPTIONS_KEY] ) - + column_name = table.columns[column_index].name + column_attnum = get_column_attnum_from_name(table_oid, column_name, engine) if NULLABLE_KEY in column_data: nullable = column_data[NULLABLE_KEY] - change_column_nullable(table, column_index, engine, conn, nullable) + change_column_nullable(table_oid, column_attnum, engine, conn, nullable) if DEFAULT_DICT in column_data: default_dict = column_data[DEFAULT_DICT] default = default_dict[DEFAULT_KEY] if default_dict is not None else None - set_column_default(table, column_index, engine, conn, default) + + set_column_default(table_oid, column_attnum, engine, conn, default) if NAME_KEY in column_data: # Name always needs to be the last item altered # since previous operations need the name to work @@ -137,7 +139,7 @@ def retype_column( def change_column_nullable(table_oid, column_attum, engine, connection, nullable): - table = reflect_table_from_oid(table_oid, engine) + table = reflect_table_from_oid(table_oid, engine, connection) column_name = get_column_name_from_attnum(table_oid, column_attum, engine) column = table.columns[column_name] ctx = MigrationContext.configure(connection) diff --git a/mathesar/tests/api/test_column_api.py b/mathesar/tests/api/test_column_api.py index d1ae5bc702..881544f863 100644 --- a/mathesar/tests/api/test_column_api.py +++ b/mathesar/tests/api/test_column_api.py @@ -42,6 +42,13 @@ def column_test_table(patent_schema): return table +def _get_columns_by_name(table, name_list): + columns_by_name_dict = { + col.name: col for col in ServiceLayerColumn.objects.filter(table=table) if col.name in name_list + } + return [columns_by_name_dict[col_name] for col_name in name_list] + + @pytest.fixture def column_test_table_with_service_layer_options(patent_schema): engine = patent_schema._sa_engine @@ -378,18 +385,13 @@ def test_column_update_name(column_test_table, client): cache.clear() name = "updatedname" data = {"name": name} - response = client.get( - f"/api/db/v0/tables/{column_test_table.id}/columns/" - ) - columns = response.json()['results'] - column_index = 1 - column_id = columns[column_index]['id'] + column = _get_columns_by_name(column_test_table, ['mycolumn1'])[0] response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column_id}/", data=data + f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/", data=data ) assert response.json()["name"] == name response = client.get( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column_id}/" + f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/" ) assert response.json()["name"] == name @@ -440,14 +442,9 @@ def test_column_update_default(column_test_table, client): cache.clear() expt_default = 5 data = {"default": {"value": expt_default}} # Ensure we pass a int and not a str - response = client.get( - f"/api/db/v0/tables/{column_test_table.id}/columns/" - ) - columns = response.json()['results'] - column_index = 1 - column_id = columns[column_index]['id'] + column = _get_columns_by_name(column_test_table, ['mycolumn0'])[0] response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column_id}/", + f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/", data=json.dumps(data), content_type="application/json", ) @@ -458,14 +455,9 @@ def test_column_update_delete_default(column_test_table, client): cache.clear() expt_default = None data = {"default": None} - response = client.get( - f"/api/db/v0/tables/{column_test_table.id}/columns/" - ) - columns = response.json()['results'] - column_index = 2 - column_id = columns[column_index]['id'] + column = _get_columns_by_name(column_test_table, ['mycolumn0'])[0] response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column_id}/", + f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/", data=data, ) assert response.json()["default"] == expt_default @@ -474,14 +466,10 @@ def test_column_update_delete_default(column_test_table, client): def test_column_update_default_invalid_cast(column_test_table, client): cache.clear() data = {"default": {"value": "not an integer"}} - response = client.get( - f"/api/db/v0/tables/{column_test_table.id}/columns/" - ) - columns = response.json()['results'] - column_index = 1 - column_id = columns[column_index]['id'] + column = _get_columns_by_name(column_test_table, ['mycolumn0'])[0] + response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column_id}/", + f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/", data=json.dumps(data), content_type="application/json" ) @@ -492,14 +480,9 @@ def test_column_update_type_dynamic_default(column_test_table, client): cache.clear() type_ = "NUMERIC" data = {"type": type_} - response = client.get( - f"/api/db/v0/tables/{column_test_table.id}/columns/" - ) - columns = response.json()['results'] - column_index = 0 - column_id = columns[column_index]['id'] + column = _get_columns_by_name(column_test_table, ['mycolumn0'])[0] response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column_id}/", data=data + f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/", data=data ) assert response.status_code == 400 @@ -508,14 +491,9 @@ def test_column_update_type(column_test_table, client): cache.clear() type_ = "BOOLEAN" data = {"type": type_} - response = client.get( - f"/api/db/v0/tables/{column_test_table.id}/columns/" - ) - columns = response.json()['results'] - column_index = 3 - column_id = columns[column_index]['id'] + column = _get_columns_by_name(column_test_table, ['mycolumn3'])[0] response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column_id}/", data=data + f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/", data=data ) assert response.json()["type"] == type_ @@ -525,14 +503,9 @@ def test_column_update_name_and_type(column_test_table, client): type_ = "BOOLEAN" new_name = 'new name' data = {"type": type_, "name": new_name} - response = client.get( - f"/api/db/v0/tables/{column_test_table.id}/columns/" - ) - columns = response.json()['results'] - column_index = 3 - column_id = columns[column_index]['id'] + column = _get_columns_by_name(column_test_table, ['mycolumn3'])[0] response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column_id}/", data=data + f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/", data=data ) assert response.json()["type"] == type_ assert response.json()["name"] == new_name @@ -543,14 +516,10 @@ def test_column_update_name_type_nullable(column_test_table, client): type_ = "BOOLEAN" new_name = 'new name' data = {"type": type_, "name": new_name, "nullable": True} - response = client.get( - f"/api/db/v0/tables/{column_test_table.id}/columns/" - ) - columns = response.json()['results'] - column_index = 3 - column_id = columns[column_index]['id'] + column = _get_columns_by_name(column_test_table, ['mycolumn3'])[0] + response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column_id}/", data=data + f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/", data=data ) assert response.json()["type"] == type_ assert response.json()["name"] == new_name @@ -567,14 +536,9 @@ def test_column_update_name_type_nullable_default(column_test_table, client): "nullable": True, "default": {"value": True}, } - response = client.get( - f"/api/db/v0/tables/{column_test_table.id}/columns/" - ) - columns = response.json()['results'] - column_index = 3 - column_id = columns[column_index]['id'] + column = _get_columns_by_name(column_test_table, ['mycolumn3'])[0] response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column_id}/", + f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/", data=json.dumps(data), content_type='application/json' ) @@ -589,14 +553,9 @@ def test_column_update_type_options(column_test_table, client): type_ = "NUMERIC" type_options = {"precision": 3, "scale": 1} data = {"type": type_, "type_options": type_options} - response = client.get( - f"/api/db/v0/tables/{column_test_table.id}/columns/" - ) - columns = response.json()['results'] - column_index = 3 - column_id = columns[column_index]['id'] + column = _get_columns_by_name(column_test_table, ['mycolumn3'])[0] response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column_id}/", + f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/", data, ) assert response.json()["type"] == type_ @@ -607,20 +566,15 @@ def test_column_update_type_options_no_type(column_test_table, client): cache.clear() type_ = "NUMERIC" data = {"type": type_} - response = client.get( - f"/api/db/v0/tables/{column_test_table.id}/columns/" - ) - columns = response.json()['results'] - column_index = 3 - column_id = columns[column_index]['id'] + column = _get_columns_by_name(column_test_table, ['mycolumn3'])[0] client.patch( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column_id}/", + f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/", data, ) type_options = {"precision": 3, "scale": 1} type_option_data = {"type_options": type_options} response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column_id}/", + f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/", type_option_data, ) assert response.json()["type"] == type_ @@ -650,14 +604,9 @@ def test_column_update_returns_table_dependent_fields(column_test_table, client) cache.clear() expt_default = 5 data = {"default": {"value": expt_default}} - response = client.get( - f"/api/db/v0/tables/{column_test_table.id}/columns/" - ) - columns = response.json()['results'] - column_index = 1 - column_id = columns[column_index]['id'] + column = _get_columns_by_name(column_test_table, ['mycolumn1'])[0] response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column_id}/", + f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/", data=json.dumps(data), content_type="application/json" ) @@ -670,14 +619,9 @@ def test_column_update_type_invalid_options(column_test_table, client, type_opti cache.clear() type_ = "NUMERIC" data = {"type": type_, "type_options": type_options} - response = client.get( - f"/api/db/v0/tables/{column_test_table.id}/columns/" - ) - columns = response.json()['results'] - column_index = 3 - column_id = columns[column_index]['id'] + column = _get_columns_by_name(column_test_table, ['mycolumn3'])[0] response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column_id}/", + f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/", data=data, ) assert response.status_code == 400 @@ -687,14 +631,9 @@ def test_column_update_type_invalid_cast(column_test_table, client): cache.clear() type_ = "MATHESAR_TYPES.EMAIL" data = {"type": type_} - response = client.get( - f"/api/db/v0/tables/{column_test_table.id}/columns/" - ) - columns = response.json()['results'] - column_index = 1 - column_id = columns[column_index]['id'] + column = _get_columns_by_name(column_test_table, ['mycolumn1'])[0] response = client.patch( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column_id}/", data=data + f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/", data=data ) assert response.status_code == 400 @@ -716,14 +655,9 @@ def test_column_destroy(column_test_table, client): cache.clear() num_columns = len(column_test_table.sa_columns) col_one_name = column_test_table.sa_columns[1].name - response = client.get( - f"/api/db/v0/tables/{column_test_table.id}/columns/" - ) - columns = response.json()['results'] - column_index = 1 - column_id = columns[column_index]['id'] + column = _get_columns_by_name(column_test_table, ['mycolumn1'])[0] response = client.delete( - f"/api/db/v0/tables/{column_test_table.id}/columns/{column_id}/" + f"/api/db/v0/tables/{column_test_table.id}/columns/{column.id}/" ) assert response.status_code == 204 new_columns_response = client.get( @@ -747,11 +681,11 @@ def test_column_destroy_when_missing(column_test_table, client): def test_column_duplicate(column_test_table, client): cache.clear() - target_col_idx = 2 - target_col = column_test_table.sa_columns[target_col_idx] + column = _get_columns_by_name(column_test_table, ['mycolumn1'])[0] + target_col = column_test_table.sa_columns[column.name] data = { "name": "new_col_name", - "source_column": target_col_idx, + "source_column": column.id, "copy_source_data": False, "copy_source_constraints": False, } @@ -768,7 +702,7 @@ def test_column_duplicate(column_test_table, client): assert mock_infer.call_args[0] == ( column_test_table.oid, - target_col_idx, + column, column_test_table.schema._sa_engine, ) assert mock_infer.call_args[1] == { @@ -787,7 +721,8 @@ def test_column_duplicate_when_missing(column_test_table, client): ) assert response.status_code == 400 response_data = response.json()[0] - assert "not found" in response_data['message'] + assert 2151 == response_data['code'] + assert "object does not exist" in response_data['message'] def test_column_duplicate_some_parameters(column_test_table, client): diff --git a/mathesar/tests/api/test_record_api.py b/mathesar/tests/api/test_record_api.py index 4c2e76f879..122c24b8bc 100644 --- a/mathesar/tests/api/test_record_api.py +++ b/mathesar/tests/api/test_record_api.py @@ -10,6 +10,13 @@ from mathesar.api.exceptions.error_codes import ErrorCodes +def _get_columns_by_name(table, name_list): + columns_by_name_dict = { + col.name: col for col in models.Column.objects.filter(table=table) if col.name in name_list + } + return [columns_by_name_dict[col_name] for col_name in name_list] + + def test_record_list(create_table, client): """ Desired format: @@ -353,15 +360,10 @@ def test_record_list_sort(create_table, client): def test_null_error_record_create(create_table, client): table_name = 'NASA Record Create' table = create_table(table_name) - response = client.get( - f"/api/db/v0/tables/{table.id}/columns/" - ) - columns = response.json()['results'] - column_index = 3 - column_id = columns[column_index]['id'] + column = _get_columns_by_name(table, ['Case Number'])[0] data = {"nullable": False} client.patch( - f"/api/db/v0/tables/{table.id}/columns/{column_id}/", data=data + f"/api/db/v0/tables/{table.id}/columns/{column.id}/", data=data ) data = { 'Center': 'NASA Example Space Center', @@ -376,7 +378,7 @@ def test_null_error_record_create(create_table, client): record_data = response.json() assert response.status_code == 400 assert 'null value in column "Case Number"' in record_data[0]['message'] - assert column_id == record_data[0]['detail']['column_id'] + assert column.id == record_data[0]['detail']['column_id'] @pytest.mark.parametrize('table_name,grouping,expected_groups', grouping_params) From 91d95b6d292730de13f1acd8c4df709c83842b6d Mon Sep 17 00:00:00 2001 From: silentninja Date: Thu, 3 Mar 2022 04:20:25 +0530 Subject: [PATCH 4/8] Use non-reflecting queryset manager in `mathesar/api/serializers/columns.py:source_column` queryset parameter --- mathesar/api/serializers/columns.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mathesar/api/serializers/columns.py b/mathesar/api/serializers/columns.py index c19bd99268..d7b1a68580 100644 --- a/mathesar/api/serializers/columns.py +++ b/mathesar/api/serializers/columns.py @@ -106,7 +106,7 @@ class Meta(SimpleColumnSerializer.Meta): ) # From duplication fields - source_column = serializers.PrimaryKeyRelatedField(queryset=Column.objects.all(), required=False, write_only=True) + source_column = serializers.PrimaryKeyRelatedField(queryset=Column.current_objects.all(), required=False, write_only=True) copy_source_data = serializers.BooleanField(default=True, write_only=True) copy_source_constraints = serializers.BooleanField(default=True, write_only=True) From 351797e97e167b1fb1a2d29cad9ab02e497e0383 Mon Sep 17 00:00:00 2001 From: silentninja Date: Thu, 3 Mar 2022 05:48:28 +0530 Subject: [PATCH 5/8] Remove stale exception --- mathesar/api/db/viewsets/columns.py | 21 ++++++--------------- 1 file changed, 6 insertions(+), 15 deletions(-) diff --git a/mathesar/api/db/viewsets/columns.py b/mathesar/api/db/viewsets/columns.py index ac9be1a299..6685e36f29 100644 --- a/mathesar/api/db/viewsets/columns.py +++ b/mathesar/api/db/viewsets/columns.py @@ -41,21 +41,12 @@ def create(self, request, table_pk=None): serializer.is_valid(raise_exception=True) if 'source_column' in serializer.validated_data: - try: - column = table.duplicate_column( - serializer.validated_data['source_column'], - serializer.validated_data['copy_source_data'], - serializer.validated_data['copy_source_constraints'], - serializer.validated_data.get('name'), - ) - except IndexError as e: - _col_idx = serializer.validated_data['source_column'] - raise base_api_exceptions.NotFoundAPIException( - e, - message=f'column index "{_col_idx}" not found', - field='source_column', - status_code=status.HTTP_400_BAD_REQUEST - ) + column = table.duplicate_column( + serializer.validated_data['source_column'], + serializer.validated_data['copy_source_data'], + serializer.validated_data['copy_source_constraints'], + serializer.validated_data.get('name'), + ) else: try: column = table.add_column(request.data) From 564848b099d155536e8b341c1f0442c170377967 Mon Sep 17 00:00:00 2001 From: Sean Colsen Date: Thu, 3 Mar 2022 10:33:33 -0500 Subject: [PATCH 6/8] Add code comments linking to discussions --- mathesar_ui/src/App.svelte | 11 ++++++----- .../pagination/Pagination.svelte | 7 +++++-- mathesar_ui/src/utils/routing.ts | 16 ++++++++++++++++ 3 files changed, 27 insertions(+), 7 deletions(-) diff --git a/mathesar_ui/src/App.svelte b/mathesar_ui/src/App.svelte index cf23f44695..bdb5b0674f 100644 --- a/mathesar_ui/src/App.svelte +++ b/mathesar_ui/src/App.svelte @@ -9,11 +9,12 @@ import { currentSchemaId } from '@mathesar/stores/schemas'; import { beginUpdatingUrlWhenSchemaChanges } from './utils/routing'; - // This is a bit of a hack to deal with our routing still being a patchwork of - // declarative and imperative logic. Without this call, the URL will not - // reliably set the query params when the schema changes. It actually _will_ - // set the query params _sometimes_, but we weren't able to figure out why the - // behavior is inconsistent. + // Why is this function called at such a high level, and not handled closer to + // the code point related to saving tab data or the code point related to + // switching schemas? + // + // Because we need to place this at a high level in order to avoid circular + // imports. beginUpdatingUrlWhenSchemaChanges(currentSchemaId); diff --git a/mathesar_ui/src/component-library/pagination/Pagination.svelte b/mathesar_ui/src/component-library/pagination/Pagination.svelte index c0eb4576a8..dab7d9868b 100644 --- a/mathesar_ui/src/component-library/pagination/Pagination.svelte +++ b/mathesar_ui/src/component-library/pagination/Pagination.svelte @@ -33,8 +33,11 @@ // TODO: @seancolsen says: // > Refactor `pageCount` to no longer be an exported prop. We're exporting it // > just so the parent component can access the calculation done within this - // > component. That's an unconventional flow of data. I'd rather do the - // > calculation in the parent and pass it down to this component. + // > component. That's an unconventional flow of data. + // + // See https://github.com/centerofci/mathesar/pull/1109#discussion_r818638950 + // for further discussion. @pavish and @seancolsen settled on an approach + // using a utils function. export let pageCount = 0; // ARIA Label for component diff --git a/mathesar_ui/src/utils/routing.ts b/mathesar_ui/src/utils/routing.ts index 3ece05a10f..1b6f246cc3 100644 --- a/mathesar_ui/src/utils/routing.ts +++ b/mathesar_ui/src/utils/routing.ts @@ -1,9 +1,25 @@ +/** + * @overview + * + * Our routing system is a mix of declarative logic (e.g. in App.svelte) and + * imperative logic (e.g. `saveTabData` within `tabDataSaver.ts`). We need the + * imperative logic because the query param which saves the tab data is quite + * complex and would be difficult to implement declaratively. + * + * This file provides helpers for the imperative logic. + */ + import type { Unsubscriber, Writable } from 'svelte/store'; import { get } from 'svelte/store'; import { currentDBName } from '@mathesar/stores/databases'; import { getTabsForSchema } from '@mathesar/stores/tabs/manager'; import { saveTabData } from '@mathesar/stores/tabs/tabDataSaver'; +/** + * See https://github.com/centerofci/mathesar/pull/1109#discussion_r816957769 + * for more information on the origin of the approach used in this function and + * why we need it. + */ export function beginUpdatingUrlWhenSchemaChanges( currentSchemaId: Writable, ): Unsubscriber { From ee92ec504e7c2edae732ef2fb12e7a367cff08a7 Mon Sep 17 00:00:00 2001 From: silentninja Date: Fri, 4 Mar 2022 05:13:36 +0530 Subject: [PATCH 7/8] Fix `mathesar.tests.api.test_column_api.test_column_update_display_options` to use table instance instead of a fixture when calling `mathesar.tests.api.test_column_api._get_columns_by_name` --- mathesar/tests/api/test_column_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mathesar/tests/api/test_column_api.py b/mathesar/tests/api/test_column_api.py index 3f7be483ff..69ab2baa8b 100644 --- a/mathesar/tests/api/test_column_api.py +++ b/mathesar/tests/api/test_column_api.py @@ -402,7 +402,7 @@ def test_column_update_name(column_test_table, client): def test_column_update_display_options(column_test_table_with_service_layer_options, client): cache.clear() table, columns = column_test_table_with_service_layer_options - column = _get_columns_by_name(column_test_table, ['mycolumn1'])[0] + column = _get_columns_by_name(table, ['mycolumn1'])[0] column_id = column.id display_options = {"input": "dropdown", "custom_labels": {"TRUE": "yes", "FALSE": "no"}} display_options_data = {"display_options": display_options} From 3fa6f5800bc6254620a533cf2e2e9a5f01b786c3 Mon Sep 17 00:00:00 2001 From: Sean Colsen Date: Fri, 4 Mar 2022 08:59:29 -0500 Subject: [PATCH 8/8] Upgrade Playwright to 0.2.3 --- Dockerfile.integ-tests | 2 +- requirements-dev.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile.integ-tests b/Dockerfile.integ-tests index 86d6979616..0fa060841e 100644 --- a/Dockerfile.integ-tests +++ b/Dockerfile.integ-tests @@ -111,7 +111,7 @@ RUN wget https://github.com/jwilder/dockerize/releases/download/$DOCKERIZE_VERSI && rm dockerize-linux-amd64-$DOCKERIZE_VERSION.tar.gz RUN pip install playwright==1.18.2 -RUN pip install pytest-playwright==0.2.2 +RUN pip install pytest-playwright==0.2.3 RUN playwright install RUN playwright install-deps diff --git a/requirements-dev.txt b/requirements-dev.txt index edbfcef7c3..f3e24f0f58 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -6,4 +6,4 @@ pytest-env==0.6.2 pytest-cov==3.0.0 Sphinx==3.5.3 playwright==1.18.1 -pytest-playwright==0.2.2 \ No newline at end of file +pytest-playwright==0.2.3 \ No newline at end of file